ob-metaflow-stubs 6.0.6.2__py2.py3-none-any.whl → 6.0.7.0__py2.py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (261) hide show
  1. metaflow-stubs/__init__.pyi +972 -972
  2. metaflow-stubs/cards.pyi +1 -1
  3. metaflow-stubs/cli.pyi +1 -1
  4. metaflow-stubs/cli_components/__init__.pyi +1 -1
  5. metaflow-stubs/cli_components/utils.pyi +1 -1
  6. metaflow-stubs/client/__init__.pyi +1 -1
  7. metaflow-stubs/client/core.pyi +3 -3
  8. metaflow-stubs/client/filecache.pyi +2 -2
  9. metaflow-stubs/events.pyi +1 -1
  10. metaflow-stubs/exception.pyi +1 -1
  11. metaflow-stubs/flowspec.pyi +2 -2
  12. metaflow-stubs/generated_for.txt +1 -1
  13. metaflow-stubs/includefile.pyi +2 -2
  14. metaflow-stubs/meta_files.pyi +1 -1
  15. metaflow-stubs/metadata_provider/__init__.pyi +1 -1
  16. metaflow-stubs/metadata_provider/heartbeat.pyi +1 -1
  17. metaflow-stubs/metadata_provider/metadata.pyi +1 -1
  18. metaflow-stubs/metadata_provider/util.pyi +1 -1
  19. metaflow-stubs/metaflow_config.pyi +1 -1
  20. metaflow-stubs/metaflow_current.pyi +53 -53
  21. metaflow-stubs/metaflow_git.pyi +1 -1
  22. metaflow-stubs/mf_extensions/__init__.pyi +1 -1
  23. metaflow-stubs/mf_extensions/obcheckpoint/__init__.pyi +1 -1
  24. metaflow-stubs/mf_extensions/obcheckpoint/plugins/__init__.pyi +1 -1
  25. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/__init__.pyi +1 -1
  26. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/card_utils/__init__.pyi +1 -1
  27. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/card_utils/async_cards.pyi +2 -2
  28. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/card_utils/deco_injection_mixin.pyi +1 -1
  29. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/card_utils/extra_components.pyi +1 -1
  30. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/__init__.pyi +1 -1
  31. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/cards/__init__.pyi +1 -1
  32. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/cards/checkpoint_lister.pyi +2 -2
  33. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/cards/lineage_card.pyi +1 -1
  34. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/checkpoint_storage.pyi +2 -2
  35. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/constructors.pyi +1 -1
  36. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/core.pyi +2 -2
  37. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/decorator.pyi +3 -3
  38. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/exceptions.pyi +1 -1
  39. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/final_api.pyi +1 -1
  40. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/lineage.pyi +1 -1
  41. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/__init__.pyi +1 -1
  42. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/context.pyi +2 -2
  43. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/core.pyi +2 -2
  44. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/decorator.pyi +1 -1
  45. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/exceptions.pyi +1 -1
  46. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/task_utils.pyi +2 -2
  47. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/utils.pyi +1 -1
  48. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastructures.pyi +1 -1
  49. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/exceptions.pyi +1 -1
  50. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/hf_hub/__init__.pyi +1 -1
  51. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/hf_hub/decorator.pyi +2 -2
  52. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/modeling_utils/__init__.pyi +1 -1
  53. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/modeling_utils/core.pyi +2 -2
  54. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/modeling_utils/exceptions.pyi +1 -1
  55. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/modeling_utils/model_storage.pyi +2 -2
  56. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/__init__.pyi +1 -1
  57. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/flowspec_utils.pyi +1 -1
  58. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/general.pyi +1 -1
  59. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/identity_utils.pyi +2 -2
  60. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/serialization_handler/__init__.pyi +1 -1
  61. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/serialization_handler/base.pyi +1 -1
  62. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/serialization_handler/tar.pyi +1 -1
  63. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/tar_utils.pyi +1 -1
  64. metaflow-stubs/mf_extensions/outerbounds/__init__.pyi +1 -1
  65. metaflow-stubs/mf_extensions/outerbounds/plugins/__init__.pyi +1 -1
  66. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/__init__.pyi +1 -1
  67. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/__init__.pyi +1 -1
  68. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/_state_machine.pyi +1 -1
  69. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/_vendor/__init__.pyi +1 -1
  70. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/_vendor/spinner/__init__.pyi +1 -1
  71. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/_vendor/spinner/spinners.pyi +1 -1
  72. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/app_cli.pyi +1 -1
  73. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/app_config.pyi +2 -2
  74. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/capsule.pyi +3 -3
  75. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/click_importer.pyi +1 -1
  76. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/code_package/__init__.pyi +1 -1
  77. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/code_package/code_packager.pyi +2 -2
  78. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/config/__init__.pyi +1 -1
  79. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/config/cli_generator.pyi +1 -1
  80. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/config/config_utils.pyi +2 -2
  81. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/config/schema_export.pyi +1 -1
  82. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/config/typed_configs.pyi +2 -2
  83. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/config/unified_config.pyi +1 -1
  84. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/dependencies.pyi +2 -2
  85. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/deployer.pyi +3 -3
  86. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/experimental/__init__.pyi +1 -1
  87. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/perimeters.pyi +1 -1
  88. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/utils.pyi +3 -3
  89. metaflow-stubs/mf_extensions/outerbounds/plugins/aws/__init__.pyi +1 -1
  90. metaflow-stubs/mf_extensions/outerbounds/plugins/aws/assume_role_decorator.pyi +2 -2
  91. metaflow-stubs/mf_extensions/outerbounds/plugins/card_utilities/__init__.pyi +1 -1
  92. metaflow-stubs/mf_extensions/outerbounds/plugins/card_utilities/async_cards.pyi +1 -1
  93. metaflow-stubs/mf_extensions/outerbounds/plugins/card_utilities/injector.pyi +1 -1
  94. metaflow-stubs/mf_extensions/outerbounds/plugins/checkpoint_datastores/__init__.pyi +1 -1
  95. metaflow-stubs/mf_extensions/outerbounds/plugins/checkpoint_datastores/coreweave.pyi +2 -2
  96. metaflow-stubs/mf_extensions/outerbounds/plugins/checkpoint_datastores/nebius.pyi +2 -2
  97. metaflow-stubs/mf_extensions/outerbounds/plugins/fast_bakery/__init__.pyi +1 -1
  98. metaflow-stubs/mf_extensions/outerbounds/plugins/fast_bakery/baker.pyi +2 -2
  99. metaflow-stubs/mf_extensions/outerbounds/plugins/fast_bakery/docker_environment.pyi +2 -2
  100. metaflow-stubs/mf_extensions/outerbounds/plugins/fast_bakery/fast_bakery.pyi +1 -1
  101. metaflow-stubs/mf_extensions/outerbounds/plugins/kubernetes/__init__.pyi +1 -1
  102. metaflow-stubs/mf_extensions/outerbounds/plugins/kubernetes/pod_killer.pyi +1 -1
  103. metaflow-stubs/mf_extensions/outerbounds/plugins/ollama/__init__.pyi +1 -1
  104. metaflow-stubs/mf_extensions/outerbounds/plugins/ollama/constants.pyi +1 -1
  105. metaflow-stubs/mf_extensions/outerbounds/plugins/ollama/exceptions.pyi +1 -1
  106. metaflow-stubs/mf_extensions/outerbounds/plugins/ollama/ollama.pyi +1 -1
  107. metaflow-stubs/mf_extensions/outerbounds/plugins/ollama/status_card.pyi +1 -1
  108. metaflow-stubs/mf_extensions/outerbounds/plugins/snowflake/__init__.pyi +1 -1
  109. metaflow-stubs/mf_extensions/outerbounds/plugins/snowflake/snowflake.pyi +1 -1
  110. metaflow-stubs/mf_extensions/outerbounds/profilers/__init__.pyi +1 -1
  111. metaflow-stubs/mf_extensions/outerbounds/profilers/gpu.pyi +1 -1
  112. metaflow-stubs/mf_extensions/outerbounds/remote_config.pyi +2 -2
  113. metaflow-stubs/mf_extensions/outerbounds/toplevel/__init__.pyi +1 -1
  114. metaflow-stubs/mf_extensions/outerbounds/toplevel/global_aliases_for_metaflow_package.pyi +1 -1
  115. metaflow-stubs/mf_extensions/outerbounds/toplevel/s3_proxy.pyi +1 -1
  116. metaflow-stubs/multicore_utils.pyi +1 -1
  117. metaflow-stubs/ob_internal.pyi +1 -1
  118. metaflow-stubs/packaging_sys/__init__.pyi +3 -3
  119. metaflow-stubs/packaging_sys/backend.pyi +3 -3
  120. metaflow-stubs/packaging_sys/distribution_support.pyi +4 -4
  121. metaflow-stubs/packaging_sys/tar_backend.pyi +4 -4
  122. metaflow-stubs/packaging_sys/utils.pyi +1 -1
  123. metaflow-stubs/packaging_sys/v1.pyi +2 -2
  124. metaflow-stubs/parameters.pyi +2 -2
  125. metaflow-stubs/plugins/__init__.pyi +11 -11
  126. metaflow-stubs/plugins/airflow/__init__.pyi +1 -1
  127. metaflow-stubs/plugins/airflow/airflow_utils.pyi +1 -1
  128. metaflow-stubs/plugins/airflow/exception.pyi +1 -1
  129. metaflow-stubs/plugins/airflow/sensors/__init__.pyi +1 -1
  130. metaflow-stubs/plugins/airflow/sensors/base_sensor.pyi +1 -1
  131. metaflow-stubs/plugins/airflow/sensors/external_task_sensor.pyi +1 -1
  132. metaflow-stubs/plugins/airflow/sensors/s3_sensor.pyi +1 -1
  133. metaflow-stubs/plugins/argo/__init__.pyi +1 -1
  134. metaflow-stubs/plugins/argo/argo_client.pyi +1 -1
  135. metaflow-stubs/plugins/argo/argo_events.pyi +1 -1
  136. metaflow-stubs/plugins/argo/argo_workflows.pyi +1 -1
  137. metaflow-stubs/plugins/argo/argo_workflows_decorator.pyi +2 -2
  138. metaflow-stubs/plugins/argo/argo_workflows_deployer.pyi +2 -2
  139. metaflow-stubs/plugins/argo/argo_workflows_deployer_objects.pyi +1 -1
  140. metaflow-stubs/plugins/argo/exit_hooks.pyi +1 -1
  141. metaflow-stubs/plugins/aws/__init__.pyi +1 -1
  142. metaflow-stubs/plugins/aws/aws_client.pyi +1 -1
  143. metaflow-stubs/plugins/aws/aws_utils.pyi +1 -1
  144. metaflow-stubs/plugins/aws/batch/__init__.pyi +1 -1
  145. metaflow-stubs/plugins/aws/batch/batch.pyi +1 -1
  146. metaflow-stubs/plugins/aws/batch/batch_client.pyi +1 -1
  147. metaflow-stubs/plugins/aws/batch/batch_decorator.pyi +1 -1
  148. metaflow-stubs/plugins/aws/secrets_manager/__init__.pyi +1 -1
  149. metaflow-stubs/plugins/aws/secrets_manager/aws_secrets_manager_secrets_provider.pyi +2 -2
  150. metaflow-stubs/plugins/aws/step_functions/__init__.pyi +1 -1
  151. metaflow-stubs/plugins/aws/step_functions/event_bridge_client.pyi +1 -1
  152. metaflow-stubs/plugins/aws/step_functions/schedule_decorator.pyi +1 -1
  153. metaflow-stubs/plugins/aws/step_functions/step_functions.pyi +1 -1
  154. metaflow-stubs/plugins/aws/step_functions/step_functions_client.pyi +1 -1
  155. metaflow-stubs/plugins/aws/step_functions/step_functions_deployer.pyi +2 -2
  156. metaflow-stubs/plugins/aws/step_functions/step_functions_deployer_objects.pyi +2 -2
  157. metaflow-stubs/plugins/azure/__init__.pyi +1 -1
  158. metaflow-stubs/plugins/azure/azure_credential.pyi +1 -1
  159. metaflow-stubs/plugins/azure/azure_exceptions.pyi +1 -1
  160. metaflow-stubs/plugins/azure/azure_secret_manager_secrets_provider.pyi +2 -2
  161. metaflow-stubs/plugins/azure/azure_utils.pyi +1 -1
  162. metaflow-stubs/plugins/azure/blob_service_client_factory.pyi +1 -1
  163. metaflow-stubs/plugins/azure/includefile_support.pyi +1 -1
  164. metaflow-stubs/plugins/cards/__init__.pyi +1 -1
  165. metaflow-stubs/plugins/cards/card_client.pyi +1 -1
  166. metaflow-stubs/plugins/cards/card_creator.pyi +1 -1
  167. metaflow-stubs/plugins/cards/card_datastore.pyi +1 -1
  168. metaflow-stubs/plugins/cards/card_decorator.pyi +1 -1
  169. metaflow-stubs/plugins/cards/card_modules/__init__.pyi +1 -1
  170. metaflow-stubs/plugins/cards/card_modules/basic.pyi +1 -1
  171. metaflow-stubs/plugins/cards/card_modules/card.pyi +1 -1
  172. metaflow-stubs/plugins/cards/card_modules/components.pyi +2 -2
  173. metaflow-stubs/plugins/cards/card_modules/convert_to_native_type.pyi +1 -1
  174. metaflow-stubs/plugins/cards/card_modules/renderer_tools.pyi +1 -1
  175. metaflow-stubs/plugins/cards/card_modules/test_cards.pyi +1 -1
  176. metaflow-stubs/plugins/cards/card_resolver.pyi +1 -1
  177. metaflow-stubs/plugins/cards/component_serializer.pyi +1 -1
  178. metaflow-stubs/plugins/cards/exception.pyi +1 -1
  179. metaflow-stubs/plugins/catch_decorator.pyi +1 -1
  180. metaflow-stubs/plugins/datatools/__init__.pyi +1 -1
  181. metaflow-stubs/plugins/datatools/local.pyi +1 -1
  182. metaflow-stubs/plugins/datatools/s3/__init__.pyi +1 -1
  183. metaflow-stubs/plugins/datatools/s3/s3.pyi +2 -2
  184. metaflow-stubs/plugins/datatools/s3/s3tail.pyi +1 -1
  185. metaflow-stubs/plugins/datatools/s3/s3util.pyi +1 -1
  186. metaflow-stubs/plugins/debug_logger.pyi +1 -1
  187. metaflow-stubs/plugins/debug_monitor.pyi +1 -1
  188. metaflow-stubs/plugins/environment_decorator.pyi +1 -1
  189. metaflow-stubs/plugins/events_decorator.pyi +1 -1
  190. metaflow-stubs/plugins/exit_hook/__init__.pyi +1 -1
  191. metaflow-stubs/plugins/exit_hook/exit_hook_decorator.pyi +1 -1
  192. metaflow-stubs/plugins/frameworks/__init__.pyi +1 -1
  193. metaflow-stubs/plugins/frameworks/pytorch.pyi +1 -1
  194. metaflow-stubs/plugins/gcp/__init__.pyi +1 -1
  195. metaflow-stubs/plugins/gcp/gcp_secret_manager_secrets_provider.pyi +2 -2
  196. metaflow-stubs/plugins/gcp/gs_exceptions.pyi +1 -1
  197. metaflow-stubs/plugins/gcp/gs_storage_client_factory.pyi +1 -1
  198. metaflow-stubs/plugins/gcp/gs_utils.pyi +1 -1
  199. metaflow-stubs/plugins/gcp/includefile_support.pyi +1 -1
  200. metaflow-stubs/plugins/kubernetes/__init__.pyi +1 -1
  201. metaflow-stubs/plugins/kubernetes/kube_utils.pyi +1 -1
  202. metaflow-stubs/plugins/kubernetes/kubernetes.pyi +1 -1
  203. metaflow-stubs/plugins/kubernetes/kubernetes_client.pyi +1 -1
  204. metaflow-stubs/plugins/kubernetes/kubernetes_decorator.pyi +1 -1
  205. metaflow-stubs/plugins/kubernetes/kubernetes_jobsets.pyi +1 -1
  206. metaflow-stubs/plugins/kubernetes/spot_monitor_sidecar.pyi +1 -1
  207. metaflow-stubs/plugins/ollama/__init__.pyi +1 -1
  208. metaflow-stubs/plugins/parallel_decorator.pyi +1 -1
  209. metaflow-stubs/plugins/perimeters.pyi +1 -1
  210. metaflow-stubs/plugins/project_decorator.pyi +1 -1
  211. metaflow-stubs/plugins/pypi/__init__.pyi +2 -2
  212. metaflow-stubs/plugins/pypi/conda_decorator.pyi +1 -1
  213. metaflow-stubs/plugins/pypi/conda_environment.pyi +4 -4
  214. metaflow-stubs/plugins/pypi/parsers.pyi +1 -1
  215. metaflow-stubs/plugins/pypi/pypi_decorator.pyi +1 -1
  216. metaflow-stubs/plugins/pypi/pypi_environment.pyi +1 -1
  217. metaflow-stubs/plugins/pypi/utils.pyi +1 -1
  218. metaflow-stubs/plugins/resources_decorator.pyi +1 -1
  219. metaflow-stubs/plugins/retry_decorator.pyi +1 -1
  220. metaflow-stubs/plugins/secrets/__init__.pyi +2 -2
  221. metaflow-stubs/plugins/secrets/inline_secrets_provider.pyi +2 -2
  222. metaflow-stubs/plugins/secrets/secrets_decorator.pyi +1 -1
  223. metaflow-stubs/plugins/secrets/secrets_func.pyi +1 -1
  224. metaflow-stubs/plugins/secrets/secrets_spec.pyi +1 -1
  225. metaflow-stubs/plugins/secrets/utils.pyi +1 -1
  226. metaflow-stubs/plugins/snowflake/__init__.pyi +1 -1
  227. metaflow-stubs/plugins/storage_executor.pyi +1 -1
  228. metaflow-stubs/plugins/test_unbounded_foreach_decorator.pyi +2 -2
  229. metaflow-stubs/plugins/timeout_decorator.pyi +1 -1
  230. metaflow-stubs/plugins/torchtune/__init__.pyi +1 -1
  231. metaflow-stubs/plugins/uv/__init__.pyi +1 -1
  232. metaflow-stubs/plugins/uv/uv_environment.pyi +2 -2
  233. metaflow-stubs/profilers/__init__.pyi +1 -1
  234. metaflow-stubs/pylint_wrapper.pyi +1 -1
  235. metaflow-stubs/runner/__init__.pyi +1 -1
  236. metaflow-stubs/runner/deployer.pyi +4 -4
  237. metaflow-stubs/runner/deployer_impl.pyi +2 -2
  238. metaflow-stubs/runner/metaflow_runner.pyi +2 -2
  239. metaflow-stubs/runner/nbdeploy.pyi +1 -1
  240. metaflow-stubs/runner/nbrun.pyi +1 -1
  241. metaflow-stubs/runner/subprocess_manager.pyi +1 -1
  242. metaflow-stubs/runner/utils.pyi +2 -2
  243. metaflow-stubs/system/__init__.pyi +1 -1
  244. metaflow-stubs/system/system_logger.pyi +1 -1
  245. metaflow-stubs/system/system_monitor.pyi +1 -1
  246. metaflow-stubs/tagging_util.pyi +1 -1
  247. metaflow-stubs/tuple_util.pyi +1 -1
  248. metaflow-stubs/user_configs/__init__.pyi +1 -1
  249. metaflow-stubs/user_configs/config_options.pyi +1 -1
  250. metaflow-stubs/user_configs/config_parameters.pyi +4 -4
  251. metaflow-stubs/user_decorators/__init__.pyi +1 -1
  252. metaflow-stubs/user_decorators/common.pyi +1 -1
  253. metaflow-stubs/user_decorators/mutable_flow.pyi +2 -2
  254. metaflow-stubs/user_decorators/mutable_step.pyi +3 -3
  255. metaflow-stubs/user_decorators/user_flow_decorator.pyi +3 -3
  256. metaflow-stubs/user_decorators/user_step_decorator.pyi +3 -3
  257. {ob_metaflow_stubs-6.0.6.2.dist-info → ob_metaflow_stubs-6.0.7.0.dist-info}/METADATA +1 -1
  258. ob_metaflow_stubs-6.0.7.0.dist-info/RECORD +261 -0
  259. ob_metaflow_stubs-6.0.6.2.dist-info/RECORD +0 -261
  260. {ob_metaflow_stubs-6.0.6.2.dist-info → ob_metaflow_stubs-6.0.7.0.dist-info}/WHEEL +0 -0
  261. {ob_metaflow_stubs-6.0.6.2.dist-info → ob_metaflow_stubs-6.0.7.0.dist-info}/top_level.txt +0 -0
@@ -1,15 +1,15 @@
1
1
  ######################################################################################################
2
2
  # Auto-generated Metaflow stub file #
3
3
  # MF version: 2.17.1.0+obcheckpoint(0.2.4);ob(v1) #
4
- # Generated on 2025-08-11T21:26:28.605682 #
4
+ # Generated on 2025-08-18T05:26:58.140419 #
5
5
  ######################################################################################################
6
6
 
7
7
  from __future__ import annotations
8
8
 
9
9
  import typing
10
10
  if typing.TYPE_CHECKING:
11
- import typing
12
11
  import datetime
12
+ import typing
13
13
  FlowSpecDerived = typing.TypeVar("FlowSpecDerived", bound="FlowSpec", contravariant=False, covariant=False)
14
14
  StepFlag = typing.NewType("StepFlag", bool)
15
15
 
@@ -39,9 +39,9 @@ from .user_decorators.user_step_decorator import UserStepDecorator as UserStepDe
39
39
  from .user_decorators.user_step_decorator import StepMutator as StepMutator
40
40
  from .user_decorators.user_step_decorator import user_step_decorator as user_step_decorator
41
41
  from .user_decorators.user_flow_decorator import FlowMutator as FlowMutator
42
- from . import metaflow_git as metaflow_git
43
42
  from . import tuple_util as tuple_util
44
43
  from . import cards as cards
44
+ from . import metaflow_git as metaflow_git
45
45
  from . import events as events
46
46
  from . import runner as runner
47
47
  from . import plugins as plugins
@@ -168,77 +168,207 @@ def step(f: typing.Union[typing.Callable[[FlowSpecDerived], None], typing.Callab
168
168
  ...
169
169
 
170
170
  @typing.overload
171
- def parallel(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
171
+ def model(*, load: typing.Union[typing.List[str], str, typing.List[typing.Tuple[str, typing.Optional[str]]]] = None, temp_dir_root: str = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
172
172
  """
173
- Decorator prototype for all step decorators. This function gets specialized
174
- and imported for all decorators types by _import_plugin_decorators().
173
+ Enables loading / saving of models within a step.
174
+
175
+ > Examples
176
+ - Saving Models
177
+ ```python
178
+ @model
179
+ @step
180
+ def train(self):
181
+ # current.model.save returns a dictionary reference to the model saved
182
+ self.my_model = current.model.save(
183
+ path_to_my_model,
184
+ label="my_model",
185
+ metadata={
186
+ "epochs": 10,
187
+ "batch-size": 32,
188
+ "learning-rate": 0.001,
189
+ }
190
+ )
191
+ self.next(self.test)
192
+
193
+ @model(load="my_model")
194
+ @step
195
+ def test(self):
196
+ # `current.model.loaded` returns a dictionary of the loaded models
197
+ # where the key is the name of the artifact and the value is the path to the model
198
+ print(os.listdir(current.model.loaded["my_model"]))
199
+ self.next(self.end)
200
+ ```
201
+
202
+ - Loading models
203
+ ```python
204
+ @step
205
+ def train(self):
206
+ # current.model.load returns the path to the model loaded
207
+ checkpoint_path = current.model.load(
208
+ self.checkpoint_key,
209
+ )
210
+ model_path = current.model.load(
211
+ self.model,
212
+ )
213
+ self.next(self.test)
214
+ ```
215
+
216
+
217
+ Parameters
218
+ ----------
219
+ load : Union[List[str],str,List[Tuple[str,Union[str,None]]]], default: None
220
+ Artifact name/s referencing the models/checkpoints to load. Artifact names refer to the names of the instance variables set to `self`.
221
+ These artifact names give to `load` be reference objects or reference `key` string's from objects created by `current.checkpoint` / `current.model` / `current.huggingface_hub`.
222
+ If a list of tuples is provided, the first element is the artifact name and the second element is the path the artifact needs be unpacked on
223
+ the local filesystem. If the second element is None, the artifact will be unpacked in the current working directory.
224
+ If a string is provided, then the artifact corresponding to that name will be loaded in the current working directory.
225
+
226
+ temp_dir_root : str, default: None
227
+ The root directory under which `current.model.loaded` will store loaded models
175
228
  """
176
229
  ...
177
230
 
178
231
  @typing.overload
179
- def parallel(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
232
+ def model(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
180
233
  ...
181
234
 
182
- def parallel(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
235
+ @typing.overload
236
+ def model(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
237
+ ...
238
+
239
+ def model(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, load: typing.Union[typing.List[str], str, typing.List[typing.Tuple[str, typing.Optional[str]]]] = None, temp_dir_root: str = None):
183
240
  """
184
- Decorator prototype for all step decorators. This function gets specialized
185
- and imported for all decorators types by _import_plugin_decorators().
241
+ Enables loading / saving of models within a step.
242
+
243
+ > Examples
244
+ - Saving Models
245
+ ```python
246
+ @model
247
+ @step
248
+ def train(self):
249
+ # current.model.save returns a dictionary reference to the model saved
250
+ self.my_model = current.model.save(
251
+ path_to_my_model,
252
+ label="my_model",
253
+ metadata={
254
+ "epochs": 10,
255
+ "batch-size": 32,
256
+ "learning-rate": 0.001,
257
+ }
258
+ )
259
+ self.next(self.test)
260
+
261
+ @model(load="my_model")
262
+ @step
263
+ def test(self):
264
+ # `current.model.loaded` returns a dictionary of the loaded models
265
+ # where the key is the name of the artifact and the value is the path to the model
266
+ print(os.listdir(current.model.loaded["my_model"]))
267
+ self.next(self.end)
268
+ ```
269
+
270
+ - Loading models
271
+ ```python
272
+ @step
273
+ def train(self):
274
+ # current.model.load returns the path to the model loaded
275
+ checkpoint_path = current.model.load(
276
+ self.checkpoint_key,
277
+ )
278
+ model_path = current.model.load(
279
+ self.model,
280
+ )
281
+ self.next(self.test)
282
+ ```
283
+
284
+
285
+ Parameters
286
+ ----------
287
+ load : Union[List[str],str,List[Tuple[str,Union[str,None]]]], default: None
288
+ Artifact name/s referencing the models/checkpoints to load. Artifact names refer to the names of the instance variables set to `self`.
289
+ These artifact names give to `load` be reference objects or reference `key` string's from objects created by `current.checkpoint` / `current.model` / `current.huggingface_hub`.
290
+ If a list of tuples is provided, the first element is the artifact name and the second element is the path the artifact needs be unpacked on
291
+ the local filesystem. If the second element is None, the artifact will be unpacked in the current working directory.
292
+ If a string is provided, then the artifact corresponding to that name will be loaded in the current working directory.
293
+
294
+ temp_dir_root : str, default: None
295
+ The root directory under which `current.model.loaded` will store loaded models
296
+ """
297
+ ...
298
+
299
+ def nvidia(*, gpu: int, gpu_type: str, queue_timeout: int) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
300
+ """
301
+ Specifies that this step should execute on DGX cloud.
302
+
303
+
304
+ Parameters
305
+ ----------
306
+ gpu : int
307
+ Number of GPUs to use.
308
+ gpu_type : str
309
+ Type of Nvidia GPU to use.
310
+ queue_timeout : int
311
+ Time to keep the job in NVCF's queue.
186
312
  """
187
313
  ...
188
314
 
189
315
  @typing.overload
190
- def pypi(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
316
+ def retry(*, times: int = 3, minutes_between_retries: int = 2) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
191
317
  """
192
- Specifies the PyPI packages for the step.
318
+ Specifies the number of times the task corresponding
319
+ to a step needs to be retried.
193
320
 
194
- Information in this decorator will augment any
195
- attributes set in the `@pyi_base` flow-level decorator. Hence,
196
- you can use `@pypi_base` to set packages required by all
197
- steps and use `@pypi` to specify step-specific overrides.
321
+ This decorator is useful for handling transient errors, such as networking issues.
322
+ If your task contains operations that can't be retried safely, e.g. database updates,
323
+ it is advisable to annotate it with `@retry(times=0)`.
324
+
325
+ This can be used in conjunction with the `@catch` decorator. The `@catch`
326
+ decorator will execute a no-op task after all retries have been exhausted,
327
+ ensuring that the flow execution can continue.
198
328
 
199
329
 
200
330
  Parameters
201
331
  ----------
202
- packages : Dict[str, str], default: {}
203
- Packages to use for this step. The key is the name of the package
204
- and the value is the version to use.
205
- python : str, optional, default: None
206
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
207
- that the version used will correspond to the version of the Python interpreter used to start the run.
332
+ times : int, default 3
333
+ Number of times to retry this task.
334
+ minutes_between_retries : int, default 2
335
+ Number of minutes between retries.
208
336
  """
209
337
  ...
210
338
 
211
339
  @typing.overload
212
- def pypi(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
340
+ def retry(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
213
341
  ...
214
342
 
215
343
  @typing.overload
216
- def pypi(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
344
+ def retry(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
217
345
  ...
218
346
 
219
- def pypi(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
347
+ def retry(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, times: int = 3, minutes_between_retries: int = 2):
220
348
  """
221
- Specifies the PyPI packages for the step.
349
+ Specifies the number of times the task corresponding
350
+ to a step needs to be retried.
222
351
 
223
- Information in this decorator will augment any
224
- attributes set in the `@pyi_base` flow-level decorator. Hence,
225
- you can use `@pypi_base` to set packages required by all
226
- steps and use `@pypi` to specify step-specific overrides.
352
+ This decorator is useful for handling transient errors, such as networking issues.
353
+ If your task contains operations that can't be retried safely, e.g. database updates,
354
+ it is advisable to annotate it with `@retry(times=0)`.
355
+
356
+ This can be used in conjunction with the `@catch` decorator. The `@catch`
357
+ decorator will execute a no-op task after all retries have been exhausted,
358
+ ensuring that the flow execution can continue.
227
359
 
228
360
 
229
361
  Parameters
230
362
  ----------
231
- packages : Dict[str, str], default: {}
232
- Packages to use for this step. The key is the name of the package
233
- and the value is the version to use.
234
- python : str, optional, default: None
235
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
236
- that the version used will correspond to the version of the Python interpreter used to start the run.
363
+ times : int, default 3
364
+ Number of times to retry this task.
365
+ minutes_between_retries : int, default 2
366
+ Number of minutes between retries.
237
367
  """
238
368
  ...
239
369
 
240
370
  @typing.overload
241
- def app_deploy(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
371
+ def parallel(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
242
372
  """
243
373
  Decorator prototype for all step decorators. This function gets specialized
244
374
  and imported for all decorators types by _import_plugin_decorators().
@@ -246,353 +376,222 @@ def app_deploy(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.
246
376
  ...
247
377
 
248
378
  @typing.overload
249
- def app_deploy(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
379
+ def parallel(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
250
380
  ...
251
381
 
252
- def app_deploy(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
382
+ def parallel(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
253
383
  """
254
384
  Decorator prototype for all step decorators. This function gets specialized
255
385
  and imported for all decorators types by _import_plugin_decorators().
256
386
  """
257
387
  ...
258
388
 
259
- @typing.overload
260
- def fast_bakery_internal(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
389
+ def vllm(*, model: str, backend: str, openai_api_server: bool, debug: bool, card_refresh_interval: int, max_retries: int, retry_alert_frequency: int, engine_args: dict) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
261
390
  """
262
- Internal decorator to support Fast bakery
391
+ This decorator is used to run vllm APIs as Metaflow task sidecars.
392
+
393
+ User code call
394
+ --------------
395
+ @vllm(
396
+ model="...",
397
+ ...
398
+ )
399
+
400
+ Valid backend options
401
+ ---------------------
402
+ - 'local': Run as a separate process on the local task machine.
403
+
404
+ Valid model options
405
+ -------------------
406
+ Any HuggingFace model identifier, e.g. 'meta-llama/Llama-3.2-1B'
407
+
408
+ NOTE: vLLM's OpenAI-compatible server serves ONE model per server instance.
409
+ If you need multiple models, you must create multiple @vllm decorators.
410
+
411
+
412
+ Parameters
413
+ ----------
414
+ model: str
415
+ HuggingFace model identifier to be served by vLLM.
416
+ backend: str
417
+ Determines where and how to run the vLLM process.
418
+ openai_api_server: bool
419
+ Whether to use OpenAI-compatible API server mode (subprocess) instead of native engine.
420
+ Default is False (uses native engine).
421
+ Set to True for backward compatibility with existing code.
422
+ debug: bool
423
+ Whether to turn on verbose debugging logs.
424
+ card_refresh_interval: int
425
+ Interval in seconds for refreshing the vLLM status card.
426
+ Only used when openai_api_server=True.
427
+ max_retries: int
428
+ Maximum number of retries checking for vLLM server startup.
429
+ Only used when openai_api_server=True.
430
+ retry_alert_frequency: int
431
+ Frequency of alert logs for vLLM server startup retries.
432
+ Only used when openai_api_server=True.
433
+ engine_args : dict
434
+ Additional keyword arguments to pass to the vLLM engine.
435
+ For example, `tensor_parallel_size=2`.
263
436
  """
264
437
  ...
265
438
 
266
439
  @typing.overload
267
- def fast_bakery_internal(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
268
- ...
269
-
270
- def fast_bakery_internal(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
271
- """
272
- Internal decorator to support Fast bakery
273
- """
274
- ...
275
-
276
- @typing.overload
277
- def checkpoint(*, load_policy: str = 'fresh', temp_dir_root: str = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
440
+ def conda(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
278
441
  """
279
- Enables checkpointing for a step.
280
-
281
- > Examples
282
-
283
- - Saving Checkpoints
284
-
285
- ```python
286
- @checkpoint
287
- @step
288
- def train(self):
289
- model = create_model(self.parameters, checkpoint_path = None)
290
- for i in range(self.epochs):
291
- # some training logic
292
- loss = model.train(self.dataset)
293
- if i % 10 == 0:
294
- model.save(
295
- current.checkpoint.directory,
296
- )
297
- # saves the contents of the `current.checkpoint.directory` as a checkpoint
298
- # and returns a reference dictionary to the checkpoint saved in the datastore
299
- self.latest_checkpoint = current.checkpoint.save(
300
- name="epoch_checkpoint",
301
- metadata={
302
- "epoch": i,
303
- "loss": loss,
304
- }
305
- )
306
- ```
307
-
308
- - Using Loaded Checkpoints
309
-
310
- ```python
311
- @retry(times=3)
312
- @checkpoint
313
- @step
314
- def train(self):
315
- # Assume that the task has restarted and the previous attempt of the task
316
- # saved a checkpoint
317
- checkpoint_path = None
318
- if current.checkpoint.is_loaded: # Check if a checkpoint is loaded
319
- print("Loaded checkpoint from the previous attempt")
320
- checkpoint_path = current.checkpoint.directory
442
+ Specifies the Conda environment for the step.
321
443
 
322
- model = create_model(self.parameters, checkpoint_path = checkpoint_path)
323
- for i in range(self.epochs):
324
- ...
325
- ```
444
+ Information in this decorator will augment any
445
+ attributes set in the `@conda_base` flow-level decorator. Hence,
446
+ you can use `@conda_base` to set packages required by all
447
+ steps and use `@conda` to specify step-specific overrides.
326
448
 
327
449
 
328
450
  Parameters
329
451
  ----------
330
- load_policy : str, default: "fresh"
331
- The policy for loading the checkpoint. The following policies are supported:
332
- - "eager": Loads the the latest available checkpoint within the namespace.
333
- With this mode, the latest checkpoint written by any previous task (can be even a different run) of the step
334
- will be loaded at the start of the task.
335
- - "none": Do not load any checkpoint
336
- - "fresh": Loads the lastest checkpoint created within the running Task.
337
- This mode helps loading checkpoints across various retry attempts of the same task.
338
- With this mode, no checkpoint will be loaded at the start of a task but any checkpoints
339
- created within the task will be loaded when the task is retries execution on failure.
340
-
341
- temp_dir_root : str, default: None
342
- The root directory under which `current.checkpoint.directory` will be created.
452
+ packages : Dict[str, str], default {}
453
+ Packages to use for this step. The key is the name of the package
454
+ and the value is the version to use.
455
+ libraries : Dict[str, str], default {}
456
+ Supported for backward compatibility. When used with packages, packages will take precedence.
457
+ python : str, optional, default None
458
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
459
+ that the version used will correspond to the version of the Python interpreter used to start the run.
460
+ disabled : bool, default False
461
+ If set to True, disables @conda.
343
462
  """
344
463
  ...
345
464
 
346
465
  @typing.overload
347
- def checkpoint(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
466
+ def conda(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
348
467
  ...
349
468
 
350
469
  @typing.overload
351
- def checkpoint(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
352
- ...
353
-
354
- def checkpoint(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, load_policy: str = 'fresh', temp_dir_root: str = None):
355
- """
356
- Enables checkpointing for a step.
357
-
358
- > Examples
359
-
360
- - Saving Checkpoints
361
-
362
- ```python
363
- @checkpoint
364
- @step
365
- def train(self):
366
- model = create_model(self.parameters, checkpoint_path = None)
367
- for i in range(self.epochs):
368
- # some training logic
369
- loss = model.train(self.dataset)
370
- if i % 10 == 0:
371
- model.save(
372
- current.checkpoint.directory,
373
- )
374
- # saves the contents of the `current.checkpoint.directory` as a checkpoint
375
- # and returns a reference dictionary to the checkpoint saved in the datastore
376
- self.latest_checkpoint = current.checkpoint.save(
377
- name="epoch_checkpoint",
378
- metadata={
379
- "epoch": i,
380
- "loss": loss,
381
- }
382
- )
383
- ```
384
-
385
- - Using Loaded Checkpoints
386
-
387
- ```python
388
- @retry(times=3)
389
- @checkpoint
390
- @step
391
- def train(self):
392
- # Assume that the task has restarted and the previous attempt of the task
393
- # saved a checkpoint
394
- checkpoint_path = None
395
- if current.checkpoint.is_loaded: # Check if a checkpoint is loaded
396
- print("Loaded checkpoint from the previous attempt")
397
- checkpoint_path = current.checkpoint.directory
398
-
399
- model = create_model(self.parameters, checkpoint_path = checkpoint_path)
400
- for i in range(self.epochs):
401
- ...
402
- ```
403
-
404
-
405
- Parameters
406
- ----------
407
- load_policy : str, default: "fresh"
408
- The policy for loading the checkpoint. The following policies are supported:
409
- - "eager": Loads the the latest available checkpoint within the namespace.
410
- With this mode, the latest checkpoint written by any previous task (can be even a different run) of the step
411
- will be loaded at the start of the task.
412
- - "none": Do not load any checkpoint
413
- - "fresh": Loads the lastest checkpoint created within the running Task.
414
- This mode helps loading checkpoints across various retry attempts of the same task.
415
- With this mode, no checkpoint will be loaded at the start of a task but any checkpoints
416
- created within the task will be loaded when the task is retries execution on failure.
417
-
418
- temp_dir_root : str, default: None
419
- The root directory under which `current.checkpoint.directory` will be created.
420
- """
421
- ...
422
-
423
- def s3_proxy(*, integration_name: typing.Optional[str] = None, write_mode: typing.Optional[str] = None, debug: typing.Optional[bool] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
424
- """
425
- S3 Proxy decorator for routing S3 requests through a local proxy service.
426
-
427
-
428
- Parameters
429
- ----------
430
- integration_name : str, optional
431
- Name of the S3 proxy integration. If not specified, will use the only
432
- available S3 proxy integration in the namespace (fails if multiple exist).
433
- write_mode : str, optional
434
- The desired behavior during write operations to target (origin) S3 bucket.
435
- allowed options are:
436
- "origin-and-cache" -> write to both the target S3 bucket and local object
437
- storage
438
- "origin" -> only write to the target S3 bucket
439
- "cache" -> only write to the object storage service used for caching
440
- debug : bool, optional
441
- Enable debug logging for proxy operations.
442
- """
470
+ def conda(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
443
471
  ...
444
472
 
445
- def vllm(*, model: str, backend: str, openai_api_server: bool, debug: bool, card_refresh_interval: int, max_retries: int, retry_alert_frequency: int, engine_args: dict) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
473
+ def conda(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
446
474
  """
447
- This decorator is used to run vllm APIs as Metaflow task sidecars.
448
-
449
- User code call
450
- --------------
451
- @vllm(
452
- model="...",
453
- ...
454
- )
455
-
456
- Valid backend options
457
- ---------------------
458
- - 'local': Run as a separate process on the local task machine.
459
-
460
- Valid model options
461
- -------------------
462
- Any HuggingFace model identifier, e.g. 'meta-llama/Llama-3.2-1B'
475
+ Specifies the Conda environment for the step.
463
476
 
464
- NOTE: vLLM's OpenAI-compatible server serves ONE model per server instance.
465
- If you need multiple models, you must create multiple @vllm decorators.
477
+ Information in this decorator will augment any
478
+ attributes set in the `@conda_base` flow-level decorator. Hence,
479
+ you can use `@conda_base` to set packages required by all
480
+ steps and use `@conda` to specify step-specific overrides.
466
481
 
467
482
 
468
483
  Parameters
469
484
  ----------
470
- model: str
471
- HuggingFace model identifier to be served by vLLM.
472
- backend: str
473
- Determines where and how to run the vLLM process.
474
- openai_api_server: bool
475
- Whether to use OpenAI-compatible API server mode (subprocess) instead of native engine.
476
- Default is False (uses native engine).
477
- Set to True for backward compatibility with existing code.
478
- debug: bool
479
- Whether to turn on verbose debugging logs.
480
- card_refresh_interval: int
481
- Interval in seconds for refreshing the vLLM status card.
482
- Only used when openai_api_server=True.
483
- max_retries: int
484
- Maximum number of retries checking for vLLM server startup.
485
- Only used when openai_api_server=True.
486
- retry_alert_frequency: int
487
- Frequency of alert logs for vLLM server startup retries.
488
- Only used when openai_api_server=True.
489
- engine_args : dict
490
- Additional keyword arguments to pass to the vLLM engine.
491
- For example, `tensor_parallel_size=2`.
485
+ packages : Dict[str, str], default {}
486
+ Packages to use for this step. The key is the name of the package
487
+ and the value is the version to use.
488
+ libraries : Dict[str, str], default {}
489
+ Supported for backward compatibility. When used with packages, packages will take precedence.
490
+ python : str, optional, default None
491
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
492
+ that the version used will correspond to the version of the Python interpreter used to start the run.
493
+ disabled : bool, default False
494
+ If set to True, disables @conda.
492
495
  """
493
496
  ...
494
497
 
495
498
  @typing.overload
496
- def catch(*, var: typing.Optional[str] = None, print_exception: bool = True) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
499
+ def card(*, type: str = 'default', id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
497
500
  """
498
- Specifies that the step will success under all circumstances.
501
+ Creates a human-readable report, a Metaflow Card, after this step completes.
499
502
 
500
- The decorator will create an optional artifact, specified by `var`, which
501
- contains the exception raised. You can use it to detect the presence
502
- of errors, indicating that all happy-path artifacts produced by the step
503
- are missing.
503
+ Note that you may add multiple `@card` decorators in a step with different parameters.
504
504
 
505
505
 
506
506
  Parameters
507
507
  ----------
508
- var : str, optional, default None
509
- Name of the artifact in which to store the caught exception.
510
- If not specified, the exception is not stored.
511
- print_exception : bool, default True
512
- Determines whether or not the exception is printed to
513
- stdout when caught.
508
+ type : str, default 'default'
509
+ Card type.
510
+ id : str, optional, default None
511
+ If multiple cards are present, use this id to identify this card.
512
+ options : Dict[str, Any], default {}
513
+ Options passed to the card. The contents depend on the card type.
514
+ timeout : int, default 45
515
+ Interrupt reporting if it takes more than this many seconds.
514
516
  """
515
517
  ...
516
518
 
517
519
  @typing.overload
518
- def catch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
520
+ def card(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
519
521
  ...
520
522
 
521
523
  @typing.overload
522
- def catch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
524
+ def card(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
523
525
  ...
524
526
 
525
- def catch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, var: typing.Optional[str] = None, print_exception: bool = True):
527
+ def card(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, type: str = 'default', id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45):
526
528
  """
527
- Specifies that the step will success under all circumstances.
529
+ Creates a human-readable report, a Metaflow Card, after this step completes.
528
530
 
529
- The decorator will create an optional artifact, specified by `var`, which
530
- contains the exception raised. You can use it to detect the presence
531
- of errors, indicating that all happy-path artifacts produced by the step
532
- are missing.
531
+ Note that you may add multiple `@card` decorators in a step with different parameters.
533
532
 
534
533
 
535
534
  Parameters
536
535
  ----------
537
- var : str, optional, default None
538
- Name of the artifact in which to store the caught exception.
539
- If not specified, the exception is not stored.
540
- print_exception : bool, default True
541
- Determines whether or not the exception is printed to
542
- stdout when caught.
536
+ type : str, default 'default'
537
+ Card type.
538
+ id : str, optional, default None
539
+ If multiple cards are present, use this id to identify this card.
540
+ options : Dict[str, Any], default {}
541
+ Options passed to the card. The contents depend on the card type.
542
+ timeout : int, default 45
543
+ Interrupt reporting if it takes more than this many seconds.
543
544
  """
544
545
  ...
545
546
 
546
547
  @typing.overload
547
- def secrets(*, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], role: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
548
+ def pypi(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
548
549
  """
549
- Specifies secrets to be retrieved and injected as environment variables prior to
550
- the execution of a step.
550
+ Specifies the PyPI packages for the step.
551
+
552
+ Information in this decorator will augment any
553
+ attributes set in the `@pyi_base` flow-level decorator. Hence,
554
+ you can use `@pypi_base` to set packages required by all
555
+ steps and use `@pypi` to specify step-specific overrides.
551
556
 
552
557
 
553
558
  Parameters
554
559
  ----------
555
- sources : List[Union[str, Dict[str, Any]]], default: []
556
- List of secret specs, defining how the secrets are to be retrieved
557
- role : str, optional, default: None
558
- Role to use for fetching secrets
560
+ packages : Dict[str, str], default: {}
561
+ Packages to use for this step. The key is the name of the package
562
+ and the value is the version to use.
563
+ python : str, optional, default: None
564
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
565
+ that the version used will correspond to the version of the Python interpreter used to start the run.
559
566
  """
560
567
  ...
561
568
 
562
569
  @typing.overload
563
- def secrets(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
570
+ def pypi(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
564
571
  ...
565
572
 
566
573
  @typing.overload
567
- def secrets(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
574
+ def pypi(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
568
575
  ...
569
576
 
570
- def secrets(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], role: typing.Optional[str] = None):
577
+ def pypi(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
571
578
  """
572
- Specifies secrets to be retrieved and injected as environment variables prior to
573
- the execution of a step.
574
-
579
+ Specifies the PyPI packages for the step.
575
580
 
576
- Parameters
577
- ----------
578
- sources : List[Union[str, Dict[str, Any]]], default: []
579
- List of secret specs, defining how the secrets are to be retrieved
580
- role : str, optional, default: None
581
- Role to use for fetching secrets
582
- """
583
- ...
584
-
585
- def nvct(*, gpu: int, gpu_type: str) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
586
- """
587
- Specifies that this step should execute on DGX cloud.
581
+ Information in this decorator will augment any
582
+ attributes set in the `@pyi_base` flow-level decorator. Hence,
583
+ you can use `@pypi_base` to set packages required by all
584
+ steps and use `@pypi` to specify step-specific overrides.
588
585
 
589
586
 
590
587
  Parameters
591
588
  ----------
592
- gpu : int
593
- Number of GPUs to use.
594
- gpu_type : str
595
- Type of Nvidia GPU to use.
589
+ packages : Dict[str, str], default: {}
590
+ Packages to use for this step. The key is the name of the package
591
+ and the value is the version to use.
592
+ python : str, optional, default: None
593
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
594
+ that the version used will correspond to the version of the Python interpreter used to start the run.
596
595
  """
597
596
  ...
598
597
 
@@ -639,374 +638,267 @@ def ollama(*, models: list, backend: str, force_pull: bool, cache_update_policy:
639
638
  """
640
639
  ...
641
640
 
642
- @typing.overload
643
- def card(*, type: str = 'default', id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
641
+ def nvct(*, gpu: int, gpu_type: str) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
644
642
  """
645
- Creates a human-readable report, a Metaflow Card, after this step completes.
646
-
647
- Note that you may add multiple `@card` decorators in a step with different parameters.
643
+ Specifies that this step should execute on DGX cloud.
648
644
 
649
645
 
650
646
  Parameters
651
647
  ----------
652
- type : str, default 'default'
653
- Card type.
654
- id : str, optional, default None
655
- If multiple cards are present, use this id to identify this card.
656
- options : Dict[str, Any], default {}
657
- Options passed to the card. The contents depend on the card type.
658
- timeout : int, default 45
659
- Interrupt reporting if it takes more than this many seconds.
648
+ gpu : int
649
+ Number of GPUs to use.
650
+ gpu_type : str
651
+ Type of Nvidia GPU to use.
660
652
  """
661
653
  ...
662
654
 
663
655
  @typing.overload
664
- def card(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
656
+ def nebius_s3_proxy(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
657
+ """
658
+ Nebius-specific S3 Proxy decorator for routing S3 requests through a local proxy service.
659
+ It exists to make it easier for users to know that this decorator should only be used with
660
+ a Neo Cloud like Nebius.
661
+ """
665
662
  ...
666
663
 
667
664
  @typing.overload
668
- def card(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
665
+ def nebius_s3_proxy(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
669
666
  ...
670
667
 
671
- def card(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, type: str = 'default', id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45):
668
+ def nebius_s3_proxy(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
672
669
  """
673
- Creates a human-readable report, a Metaflow Card, after this step completes.
674
-
675
- Note that you may add multiple `@card` decorators in a step with different parameters.
676
-
677
-
678
- Parameters
679
- ----------
680
- type : str, default 'default'
681
- Card type.
682
- id : str, optional, default None
683
- If multiple cards are present, use this id to identify this card.
684
- options : Dict[str, Any], default {}
685
- Options passed to the card. The contents depend on the card type.
686
- timeout : int, default 45
687
- Interrupt reporting if it takes more than this many seconds.
670
+ Nebius-specific S3 Proxy decorator for routing S3 requests through a local proxy service.
671
+ It exists to make it easier for users to know that this decorator should only be used with
672
+ a Neo Cloud like Nebius.
673
+ """
674
+ ...
675
+
676
+ @typing.overload
677
+ def coreweave_s3_proxy(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
678
+ """
679
+ CoreWeave-specific S3 Proxy decorator for routing S3 requests through a local proxy service.
680
+ It exists to make it easier for users to know that this decorator should only be used with
681
+ a Neo Cloud like CoreWeave.
682
+ """
683
+ ...
684
+
685
+ @typing.overload
686
+ def coreweave_s3_proxy(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
687
+ ...
688
+
689
+ def coreweave_s3_proxy(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
690
+ """
691
+ CoreWeave-specific S3 Proxy decorator for routing S3 requests through a local proxy service.
692
+ It exists to make it easier for users to know that this decorator should only be used with
693
+ a Neo Cloud like CoreWeave.
688
694
  """
689
695
  ...
690
696
 
691
697
  @typing.overload
692
- def resources(*, cpu: int = 1, gpu: typing.Optional[int] = None, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
698
+ def environment(*, vars: typing.Dict[str, str] = {}) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
693
699
  """
694
- Specifies the resources needed when executing this step.
695
-
696
- Use `@resources` to specify the resource requirements
697
- independently of the specific compute layer (`@batch`, `@kubernetes`).
698
-
699
- You can choose the compute layer on the command line by executing e.g.
700
- ```
701
- python myflow.py run --with batch
702
- ```
703
- or
704
- ```
705
- python myflow.py run --with kubernetes
706
- ```
707
- which executes the flow on the desired system using the
708
- requirements specified in `@resources`.
700
+ Specifies environment variables to be set prior to the execution of a step.
709
701
 
710
702
 
711
703
  Parameters
712
704
  ----------
713
- cpu : int, default 1
714
- Number of CPUs required for this step.
715
- gpu : int, optional, default None
716
- Number of GPUs required for this step.
717
- disk : int, optional, default None
718
- Disk size (in MB) required for this step. Only applies on Kubernetes.
719
- memory : int, default 4096
720
- Memory size (in MB) required for this step.
721
- shared_memory : int, optional, default None
722
- The value for the size (in MiB) of the /dev/shm volume for this step.
723
- This parameter maps to the `--shm-size` option in Docker.
705
+ vars : Dict[str, str], default {}
706
+ Dictionary of environment variables to set.
724
707
  """
725
708
  ...
726
709
 
727
710
  @typing.overload
728
- def resources(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
711
+ def environment(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
729
712
  ...
730
713
 
731
714
  @typing.overload
732
- def resources(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
715
+ def environment(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
733
716
  ...
734
717
 
735
- def resources(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, cpu: int = 1, gpu: typing.Optional[int] = None, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None):
718
+ def environment(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, vars: typing.Dict[str, str] = {}):
736
719
  """
737
- Specifies the resources needed when executing this step.
738
-
739
- Use `@resources` to specify the resource requirements
740
- independently of the specific compute layer (`@batch`, `@kubernetes`).
741
-
742
- You can choose the compute layer on the command line by executing e.g.
743
- ```
744
- python myflow.py run --with batch
745
- ```
746
- or
747
- ```
748
- python myflow.py run --with kubernetes
749
- ```
750
- which executes the flow on the desired system using the
751
- requirements specified in `@resources`.
720
+ Specifies environment variables to be set prior to the execution of a step.
752
721
 
753
722
 
754
723
  Parameters
755
724
  ----------
756
- cpu : int, default 1
757
- Number of CPUs required for this step.
758
- gpu : int, optional, default None
759
- Number of GPUs required for this step.
760
- disk : int, optional, default None
761
- Disk size (in MB) required for this step. Only applies on Kubernetes.
762
- memory : int, default 4096
763
- Memory size (in MB) required for this step.
764
- shared_memory : int, optional, default None
765
- The value for the size (in MiB) of the /dev/shm volume for this step.
766
- This parameter maps to the `--shm-size` option in Docker.
725
+ vars : Dict[str, str], default {}
726
+ Dictionary of environment variables to set.
767
727
  """
768
728
  ...
769
729
 
770
730
  @typing.overload
771
- def test_append_card(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
731
+ def timeout(*, seconds: int = 0, minutes: int = 0, hours: int = 0) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
772
732
  """
773
- A simple decorator that demonstrates using CardDecoratorInjector
774
- to inject a card and render simple markdown content.
733
+ Specifies a timeout for your step.
734
+
735
+ This decorator is useful if this step may hang indefinitely.
736
+
737
+ This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
738
+ A timeout is considered to be an exception thrown by the step. It will cause the step to be
739
+ retried if needed and the exception will be caught by the `@catch` decorator, if present.
740
+
741
+ Note that all the values specified in parameters are added together so if you specify
742
+ 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
743
+
744
+
745
+ Parameters
746
+ ----------
747
+ seconds : int, default 0
748
+ Number of seconds to wait prior to timing out.
749
+ minutes : int, default 0
750
+ Number of minutes to wait prior to timing out.
751
+ hours : int, default 0
752
+ Number of hours to wait prior to timing out.
775
753
  """
776
754
  ...
777
755
 
778
756
  @typing.overload
779
- def test_append_card(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
757
+ def timeout(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
780
758
  ...
781
759
 
782
- def test_append_card(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
783
- """
784
- A simple decorator that demonstrates using CardDecoratorInjector
785
- to inject a card and render simple markdown content.
786
- """
760
+ @typing.overload
761
+ def timeout(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
787
762
  ...
788
763
 
789
- @typing.overload
790
- def retry(*, times: int = 3, minutes_between_retries: int = 2) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
764
+ def timeout(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, seconds: int = 0, minutes: int = 0, hours: int = 0):
791
765
  """
792
- Specifies the number of times the task corresponding
793
- to a step needs to be retried.
766
+ Specifies a timeout for your step.
794
767
 
795
- This decorator is useful for handling transient errors, such as networking issues.
796
- If your task contains operations that can't be retried safely, e.g. database updates,
797
- it is advisable to annotate it with `@retry(times=0)`.
768
+ This decorator is useful if this step may hang indefinitely.
798
769
 
799
- This can be used in conjunction with the `@catch` decorator. The `@catch`
800
- decorator will execute a no-op task after all retries have been exhausted,
801
- ensuring that the flow execution can continue.
770
+ This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
771
+ A timeout is considered to be an exception thrown by the step. It will cause the step to be
772
+ retried if needed and the exception will be caught by the `@catch` decorator, if present.
773
+
774
+ Note that all the values specified in parameters are added together so if you specify
775
+ 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
802
776
 
803
777
 
804
778
  Parameters
805
779
  ----------
806
- times : int, default 3
807
- Number of times to retry this task.
808
- minutes_between_retries : int, default 2
809
- Number of minutes between retries.
780
+ seconds : int, default 0
781
+ Number of seconds to wait prior to timing out.
782
+ minutes : int, default 0
783
+ Number of minutes to wait prior to timing out.
784
+ hours : int, default 0
785
+ Number of hours to wait prior to timing out.
810
786
  """
811
787
  ...
812
788
 
813
789
  @typing.overload
814
- def retry(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
790
+ def app_deploy(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
791
+ """
792
+ Decorator prototype for all step decorators. This function gets specialized
793
+ and imported for all decorators types by _import_plugin_decorators().
794
+ """
815
795
  ...
816
796
 
817
797
  @typing.overload
818
- def retry(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
798
+ def app_deploy(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
819
799
  ...
820
800
 
821
- def retry(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, times: int = 3, minutes_between_retries: int = 2):
801
+ def app_deploy(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
822
802
  """
823
- Specifies the number of times the task corresponding
824
- to a step needs to be retried.
825
-
826
- This decorator is useful for handling transient errors, such as networking issues.
827
- If your task contains operations that can't be retried safely, e.g. database updates,
828
- it is advisable to annotate it with `@retry(times=0)`.
829
-
830
- This can be used in conjunction with the `@catch` decorator. The `@catch`
831
- decorator will execute a no-op task after all retries have been exhausted,
832
- ensuring that the flow execution can continue.
833
-
834
-
835
- Parameters
836
- ----------
837
- times : int, default 3
838
- Number of times to retry this task.
839
- minutes_between_retries : int, default 2
840
- Number of minutes between retries.
803
+ Decorator prototype for all step decorators. This function gets specialized
804
+ and imported for all decorators types by _import_plugin_decorators().
841
805
  """
842
806
  ...
843
807
 
844
- def kubernetes(*, cpu: int = 1, memory: int = 4096, disk: int = 10240, image: typing.Optional[str] = None, image_pull_policy: str = 'KUBERNETES_IMAGE_PULL_POLICY', image_pull_secrets: typing.List[str] = [], service_account: str = 'METAFLOW_KUBERNETES_SERVICE_ACCOUNT', secrets: typing.Optional[typing.List[str]] = None, node_selector: typing.Union[typing.Dict[str, str], str, None] = None, namespace: str = 'METAFLOW_KUBERNETES_NAMESPACE', gpu: typing.Optional[int] = None, gpu_vendor: str = 'KUBERNETES_GPU_VENDOR', tolerations: typing.List[typing.Dict[str, str]] = [], labels: typing.Dict[str, str] = 'METAFLOW_KUBERNETES_LABELS', annotations: typing.Dict[str, str] = 'METAFLOW_KUBERNETES_ANNOTATIONS', use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = '/metaflow_temp', persistent_volume_claims: typing.Optional[typing.Dict[str, str]] = None, shared_memory: typing.Optional[int] = None, port: typing.Optional[int] = None, compute_pool: typing.Optional[str] = None, hostname_resolution_timeout: int = 600, qos: str = 'Burstable', security_context: typing.Optional[typing.Dict[str, typing.Any]] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
808
+ @typing.overload
809
+ def secrets(*, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], role: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
845
810
  """
846
- Specifies that this step should execute on Kubernetes.
811
+ Specifies secrets to be retrieved and injected as environment variables prior to
812
+ the execution of a step.
847
813
 
848
814
 
849
815
  Parameters
850
816
  ----------
851
- cpu : int, default 1
852
- Number of CPUs required for this step. If `@resources` is
853
- also present, the maximum value from all decorators is used.
854
- memory : int, default 4096
855
- Memory size (in MB) required for this step. If
856
- `@resources` is also present, the maximum value from all decorators is
857
- used.
858
- disk : int, default 10240
859
- Disk size (in MB) required for this step. If
860
- `@resources` is also present, the maximum value from all decorators is
861
- used.
862
- image : str, optional, default None
863
- Docker image to use when launching on Kubernetes. If not specified, and
864
- METAFLOW_KUBERNETES_CONTAINER_IMAGE is specified, that image is used. If
865
- not, a default Docker image mapping to the current version of Python is used.
866
- image_pull_policy: str, default KUBERNETES_IMAGE_PULL_POLICY
867
- If given, the imagePullPolicy to be applied to the Docker image of the step.
868
- image_pull_secrets: List[str], default []
869
- The default is extracted from METAFLOW_KUBERNETES_IMAGE_PULL_SECRETS.
870
- Kubernetes image pull secrets to use when pulling container images
871
- in Kubernetes.
872
- service_account : str, default METAFLOW_KUBERNETES_SERVICE_ACCOUNT
873
- Kubernetes service account to use when launching pod in Kubernetes.
874
- secrets : List[str], optional, default None
875
- Kubernetes secrets to use when launching pod in Kubernetes. These
876
- secrets are in addition to the ones defined in `METAFLOW_KUBERNETES_SECRETS`
877
- in Metaflow configuration.
878
- node_selector: Union[Dict[str,str], str], optional, default None
879
- Kubernetes node selector(s) to apply to the pod running the task.
880
- Can be passed in as a comma separated string of values e.g.
881
- 'kubernetes.io/os=linux,kubernetes.io/arch=amd64' or as a dictionary
882
- {'kubernetes.io/os': 'linux', 'kubernetes.io/arch': 'amd64'}
883
- namespace : str, default METAFLOW_KUBERNETES_NAMESPACE
884
- Kubernetes namespace to use when launching pod in Kubernetes.
885
- gpu : int, optional, default None
886
- Number of GPUs required for this step. A value of zero implies that
887
- the scheduled node should not have GPUs.
888
- gpu_vendor : str, default KUBERNETES_GPU_VENDOR
889
- The vendor of the GPUs to be used for this step.
890
- tolerations : List[Dict[str,str]], default []
891
- The default is extracted from METAFLOW_KUBERNETES_TOLERATIONS.
892
- Kubernetes tolerations to use when launching pod in Kubernetes.
893
- labels: Dict[str, str], default: METAFLOW_KUBERNETES_LABELS
894
- Kubernetes labels to use when launching pod in Kubernetes.
895
- annotations: Dict[str, str], default: METAFLOW_KUBERNETES_ANNOTATIONS
896
- Kubernetes annotations to use when launching pod in Kubernetes.
897
- use_tmpfs : bool, default False
898
- This enables an explicit tmpfs mount for this step.
899
- tmpfs_tempdir : bool, default True
900
- sets METAFLOW_TEMPDIR to tmpfs_path if set for this step.
901
- tmpfs_size : int, optional, default: None
902
- The value for the size (in MiB) of the tmpfs mount for this step.
903
- This parameter maps to the `--tmpfs` option in Docker. Defaults to 50% of the
904
- memory allocated for this step.
905
- tmpfs_path : str, optional, default /metaflow_temp
906
- Path to tmpfs mount for this step.
907
- persistent_volume_claims : Dict[str, str], optional, default None
908
- A map (dictionary) of persistent volumes to be mounted to the pod for this step. The map is from persistent
909
- volumes to the path to which the volume is to be mounted, e.g., `{'pvc-name': '/path/to/mount/on'}`.
910
- shared_memory: int, optional
911
- Shared memory size (in MiB) required for this step
912
- port: int, optional
913
- Port number to specify in the Kubernetes job object
914
- compute_pool : str, optional, default None
915
- Compute pool to be used for for this step.
916
- If not specified, any accessible compute pool within the perimeter is used.
917
- hostname_resolution_timeout: int, default 10 * 60
918
- Timeout in seconds for the workers tasks in the gang scheduled cluster to resolve the hostname of control task.
919
- Only applicable when @parallel is used.
920
- qos: str, default: Burstable
921
- Quality of Service class to assign to the pod. Supported values are: Guaranteed, Burstable, BestEffort
922
-
923
- security_context: Dict[str, Any], optional, default None
924
- Container security context. Applies to the task container. Allows the following keys:
925
- - privileged: bool, optional, default None
926
- - allow_privilege_escalation: bool, optional, default None
927
- - run_as_user: int, optional, default None
928
- - run_as_group: int, optional, default None
929
- - run_as_non_root: bool, optional, default None
817
+ sources : List[Union[str, Dict[str, Any]]], default: []
818
+ List of secret specs, defining how the secrets are to be retrieved
819
+ role : str, optional, default: None
820
+ Role to use for fetching secrets
930
821
  """
931
822
  ...
932
823
 
933
824
  @typing.overload
934
- def conda(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
825
+ def secrets(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
826
+ ...
827
+
828
+ @typing.overload
829
+ def secrets(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
830
+ ...
831
+
832
+ def secrets(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], role: typing.Optional[str] = None):
935
833
  """
936
- Specifies the Conda environment for the step.
834
+ Specifies secrets to be retrieved and injected as environment variables prior to
835
+ the execution of a step.
937
836
 
938
- Information in this decorator will augment any
939
- attributes set in the `@conda_base` flow-level decorator. Hence,
940
- you can use `@conda_base` to set packages required by all
941
- steps and use `@conda` to specify step-specific overrides.
837
+
838
+ Parameters
839
+ ----------
840
+ sources : List[Union[str, Dict[str, Any]]], default: []
841
+ List of secret specs, defining how the secrets are to be retrieved
842
+ role : str, optional, default: None
843
+ Role to use for fetching secrets
844
+ """
845
+ ...
846
+
847
+ def s3_proxy(*, integration_name: typing.Optional[str] = None, write_mode: typing.Optional[str] = None, debug: typing.Optional[bool] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
848
+ """
849
+ S3 Proxy decorator for routing S3 requests through a local proxy service.
942
850
 
943
851
 
944
852
  Parameters
945
853
  ----------
946
- packages : Dict[str, str], default {}
947
- Packages to use for this step. The key is the name of the package
948
- and the value is the version to use.
949
- libraries : Dict[str, str], default {}
950
- Supported for backward compatibility. When used with packages, packages will take precedence.
951
- python : str, optional, default None
952
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
953
- that the version used will correspond to the version of the Python interpreter used to start the run.
954
- disabled : bool, default False
955
- If set to True, disables @conda.
854
+ integration_name : str, optional
855
+ Name of the S3 proxy integration. If not specified, will use the only
856
+ available S3 proxy integration in the namespace (fails if multiple exist).
857
+ write_mode : str, optional
858
+ The desired behavior during write operations to target (origin) S3 bucket.
859
+ allowed options are:
860
+ "origin-and-cache" -> write to both the target S3 bucket and local object
861
+ storage
862
+ "origin" -> only write to the target S3 bucket
863
+ "cache" -> only write to the object storage service used for caching
864
+ debug : bool, optional
865
+ Enable debug logging for proxy operations.
956
866
  """
957
867
  ...
958
868
 
959
869
  @typing.overload
960
- def conda(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
870
+ def fast_bakery_internal(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
871
+ """
872
+ Internal decorator to support Fast bakery
873
+ """
961
874
  ...
962
875
 
963
876
  @typing.overload
964
- def conda(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
877
+ def fast_bakery_internal(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
965
878
  ...
966
879
 
967
- def conda(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
880
+ def fast_bakery_internal(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
968
881
  """
969
- Specifies the Conda environment for the step.
970
-
971
- Information in this decorator will augment any
972
- attributes set in the `@conda_base` flow-level decorator. Hence,
973
- you can use `@conda_base` to set packages required by all
974
- steps and use `@conda` to specify step-specific overrides.
975
-
976
-
977
- Parameters
978
- ----------
979
- packages : Dict[str, str], default {}
980
- Packages to use for this step. The key is the name of the package
981
- and the value is the version to use.
982
- libraries : Dict[str, str], default {}
983
- Supported for backward compatibility. When used with packages, packages will take precedence.
984
- python : str, optional, default None
985
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
986
- that the version used will correspond to the version of the Python interpreter used to start the run.
987
- disabled : bool, default False
988
- If set to True, disables @conda.
882
+ Internal decorator to support Fast bakery
989
883
  """
990
884
  ...
991
885
 
992
886
  @typing.overload
993
- def nebius_s3_proxy(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
887
+ def test_append_card(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
994
888
  """
995
- Nebius-specific S3 Proxy decorator for routing S3 requests through a local proxy service.
996
- It exists to make it easier for users to know that this decorator should only be used with
997
- a Neo Cloud like Nebius.
889
+ A simple decorator that demonstrates using CardDecoratorInjector
890
+ to inject a card and render simple markdown content.
998
891
  """
999
892
  ...
1000
893
 
1001
894
  @typing.overload
1002
- def nebius_s3_proxy(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
895
+ def test_append_card(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1003
896
  ...
1004
897
 
1005
- def nebius_s3_proxy(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
898
+ def test_append_card(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
1006
899
  """
1007
- Nebius-specific S3 Proxy decorator for routing S3 requests through a local proxy service.
1008
- It exists to make it easier for users to know that this decorator should only be used with
1009
- a Neo Cloud like Nebius.
900
+ A simple decorator that demonstrates using CardDecoratorInjector
901
+ to inject a card and render simple markdown content.
1010
902
  """
1011
903
  ...
1012
904
 
@@ -1090,296 +982,463 @@ def huggingface_hub(*, temp_dir_root: typing.Optional[str] = None, load: typing.
1090
982
  """
1091
983
  ...
1092
984
 
1093
- def nvidia(*, gpu: int, gpu_type: str, queue_timeout: int) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1094
- """
1095
- Specifies that this step should execute on DGX cloud.
1096
-
1097
-
1098
- Parameters
1099
- ----------
1100
- gpu : int
1101
- Number of GPUs to use.
1102
- gpu_type : str
1103
- Type of Nvidia GPU to use.
1104
- queue_timeout : int
1105
- Time to keep the job in NVCF's queue.
1106
- """
1107
- ...
1108
-
1109
985
  @typing.overload
1110
- def model(*, load: typing.Union[typing.List[str], str, typing.List[typing.Tuple[str, typing.Optional[str]]]] = None, temp_dir_root: str = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
986
+ def resources(*, cpu: int = 1, gpu: typing.Optional[int] = None, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1111
987
  """
1112
- Enables loading / saving of models within a step.
988
+ Specifies the resources needed when executing this step.
1113
989
 
1114
- > Examples
1115
- - Saving Models
1116
- ```python
1117
- @model
1118
- @step
1119
- def train(self):
1120
- # current.model.save returns a dictionary reference to the model saved
1121
- self.my_model = current.model.save(
1122
- path_to_my_model,
1123
- label="my_model",
1124
- metadata={
1125
- "epochs": 10,
1126
- "batch-size": 32,
1127
- "learning-rate": 0.001,
1128
- }
1129
- )
1130
- self.next(self.test)
990
+ Use `@resources` to specify the resource requirements
991
+ independently of the specific compute layer (`@batch`, `@kubernetes`).
1131
992
 
1132
- @model(load="my_model")
1133
- @step
1134
- def test(self):
1135
- # `current.model.loaded` returns a dictionary of the loaded models
1136
- # where the key is the name of the artifact and the value is the path to the model
1137
- print(os.listdir(current.model.loaded["my_model"]))
1138
- self.next(self.end)
993
+ You can choose the compute layer on the command line by executing e.g.
1139
994
  ```
1140
-
1141
- - Loading models
1142
- ```python
1143
- @step
1144
- def train(self):
1145
- # current.model.load returns the path to the model loaded
1146
- checkpoint_path = current.model.load(
1147
- self.checkpoint_key,
1148
- )
1149
- model_path = current.model.load(
1150
- self.model,
1151
- )
1152
- self.next(self.test)
995
+ python myflow.py run --with batch
996
+ ```
997
+ or
998
+ ```
999
+ python myflow.py run --with kubernetes
1153
1000
  ```
1001
+ which executes the flow on the desired system using the
1002
+ requirements specified in `@resources`.
1154
1003
 
1155
1004
 
1156
1005
  Parameters
1157
1006
  ----------
1158
- load : Union[List[str],str,List[Tuple[str,Union[str,None]]]], default: None
1159
- Artifact name/s referencing the models/checkpoints to load. Artifact names refer to the names of the instance variables set to `self`.
1160
- These artifact names give to `load` be reference objects or reference `key` string's from objects created by `current.checkpoint` / `current.model` / `current.huggingface_hub`.
1161
- If a list of tuples is provided, the first element is the artifact name and the second element is the path the artifact needs be unpacked on
1162
- the local filesystem. If the second element is None, the artifact will be unpacked in the current working directory.
1163
- If a string is provided, then the artifact corresponding to that name will be loaded in the current working directory.
1164
-
1165
- temp_dir_root : str, default: None
1166
- The root directory under which `current.model.loaded` will store loaded models
1007
+ cpu : int, default 1
1008
+ Number of CPUs required for this step.
1009
+ gpu : int, optional, default None
1010
+ Number of GPUs required for this step.
1011
+ disk : int, optional, default None
1012
+ Disk size (in MB) required for this step. Only applies on Kubernetes.
1013
+ memory : int, default 4096
1014
+ Memory size (in MB) required for this step.
1015
+ shared_memory : int, optional, default None
1016
+ The value for the size (in MiB) of the /dev/shm volume for this step.
1017
+ This parameter maps to the `--shm-size` option in Docker.
1167
1018
  """
1168
1019
  ...
1169
1020
 
1170
1021
  @typing.overload
1171
- def model(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1022
+ def resources(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1172
1023
  ...
1173
1024
 
1174
1025
  @typing.overload
1175
- def model(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1026
+ def resources(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1176
1027
  ...
1177
1028
 
1178
- def model(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, load: typing.Union[typing.List[str], str, typing.List[typing.Tuple[str, typing.Optional[str]]]] = None, temp_dir_root: str = None):
1029
+ def resources(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, cpu: int = 1, gpu: typing.Optional[int] = None, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None):
1179
1030
  """
1180
- Enables loading / saving of models within a step.
1031
+ Specifies the resources needed when executing this step.
1181
1032
 
1182
- > Examples
1183
- - Saving Models
1184
- ```python
1185
- @model
1186
- @step
1187
- def train(self):
1188
- # current.model.save returns a dictionary reference to the model saved
1189
- self.my_model = current.model.save(
1190
- path_to_my_model,
1191
- label="my_model",
1192
- metadata={
1193
- "epochs": 10,
1194
- "batch-size": 32,
1195
- "learning-rate": 0.001,
1196
- }
1197
- )
1198
- self.next(self.test)
1033
+ Use `@resources` to specify the resource requirements
1034
+ independently of the specific compute layer (`@batch`, `@kubernetes`).
1199
1035
 
1200
- @model(load="my_model")
1201
- @step
1202
- def test(self):
1203
- # `current.model.loaded` returns a dictionary of the loaded models
1204
- # where the key is the name of the artifact and the value is the path to the model
1205
- print(os.listdir(current.model.loaded["my_model"]))
1206
- self.next(self.end)
1036
+ You can choose the compute layer on the command line by executing e.g.
1207
1037
  ```
1208
-
1209
- - Loading models
1210
- ```python
1211
- @step
1212
- def train(self):
1213
- # current.model.load returns the path to the model loaded
1214
- checkpoint_path = current.model.load(
1215
- self.checkpoint_key,
1216
- )
1217
- model_path = current.model.load(
1218
- self.model,
1219
- )
1220
- self.next(self.test)
1038
+ python myflow.py run --with batch
1039
+ ```
1040
+ or
1221
1041
  ```
1042
+ python myflow.py run --with kubernetes
1043
+ ```
1044
+ which executes the flow on the desired system using the
1045
+ requirements specified in `@resources`.
1222
1046
 
1223
1047
 
1224
1048
  Parameters
1225
1049
  ----------
1226
- load : Union[List[str],str,List[Tuple[str,Union[str,None]]]], default: None
1227
- Artifact name/s referencing the models/checkpoints to load. Artifact names refer to the names of the instance variables set to `self`.
1228
- These artifact names give to `load` be reference objects or reference `key` string's from objects created by `current.checkpoint` / `current.model` / `current.huggingface_hub`.
1229
- If a list of tuples is provided, the first element is the artifact name and the second element is the path the artifact needs be unpacked on
1230
- the local filesystem. If the second element is None, the artifact will be unpacked in the current working directory.
1231
- If a string is provided, then the artifact corresponding to that name will be loaded in the current working directory.
1232
-
1233
- temp_dir_root : str, default: None
1234
- The root directory under which `current.model.loaded` will store loaded models
1050
+ cpu : int, default 1
1051
+ Number of CPUs required for this step.
1052
+ gpu : int, optional, default None
1053
+ Number of GPUs required for this step.
1054
+ disk : int, optional, default None
1055
+ Disk size (in MB) required for this step. Only applies on Kubernetes.
1056
+ memory : int, default 4096
1057
+ Memory size (in MB) required for this step.
1058
+ shared_memory : int, optional, default None
1059
+ The value for the size (in MiB) of the /dev/shm volume for this step.
1060
+ This parameter maps to the `--shm-size` option in Docker.
1235
1061
  """
1236
1062
  ...
1237
1063
 
1238
1064
  @typing.overload
1239
- def environment(*, vars: typing.Dict[str, str] = {}) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1065
+ def catch(*, var: typing.Optional[str] = None, print_exception: bool = True) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1240
1066
  """
1241
- Specifies environment variables to be set prior to the execution of a step.
1067
+ Specifies that the step will success under all circumstances.
1068
+
1069
+ The decorator will create an optional artifact, specified by `var`, which
1070
+ contains the exception raised. You can use it to detect the presence
1071
+ of errors, indicating that all happy-path artifacts produced by the step
1072
+ are missing.
1242
1073
 
1243
1074
 
1244
1075
  Parameters
1245
1076
  ----------
1246
- vars : Dict[str, str], default {}
1247
- Dictionary of environment variables to set.
1077
+ var : str, optional, default None
1078
+ Name of the artifact in which to store the caught exception.
1079
+ If not specified, the exception is not stored.
1080
+ print_exception : bool, default True
1081
+ Determines whether or not the exception is printed to
1082
+ stdout when caught.
1248
1083
  """
1249
1084
  ...
1250
1085
 
1251
1086
  @typing.overload
1252
- def environment(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1087
+ def catch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1253
1088
  ...
1254
1089
 
1255
1090
  @typing.overload
1256
- def environment(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1091
+ def catch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1257
1092
  ...
1258
1093
 
1259
- def environment(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, vars: typing.Dict[str, str] = {}):
1094
+ def catch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, var: typing.Optional[str] = None, print_exception: bool = True):
1260
1095
  """
1261
- Specifies environment variables to be set prior to the execution of a step.
1096
+ Specifies that the step will success under all circumstances.
1097
+
1098
+ The decorator will create an optional artifact, specified by `var`, which
1099
+ contains the exception raised. You can use it to detect the presence
1100
+ of errors, indicating that all happy-path artifacts produced by the step
1101
+ are missing.
1262
1102
 
1263
1103
 
1264
1104
  Parameters
1265
1105
  ----------
1266
- vars : Dict[str, str], default {}
1267
- Dictionary of environment variables to set.
1106
+ var : str, optional, default None
1107
+ Name of the artifact in which to store the caught exception.
1108
+ If not specified, the exception is not stored.
1109
+ print_exception : bool, default True
1110
+ Determines whether or not the exception is printed to
1111
+ stdout when caught.
1112
+ """
1113
+ ...
1114
+
1115
+ def kubernetes(*, cpu: int = 1, memory: int = 4096, disk: int = 10240, image: typing.Optional[str] = None, image_pull_policy: str = 'KUBERNETES_IMAGE_PULL_POLICY', image_pull_secrets: typing.List[str] = [], service_account: str = 'METAFLOW_KUBERNETES_SERVICE_ACCOUNT', secrets: typing.Optional[typing.List[str]] = None, node_selector: typing.Union[typing.Dict[str, str], str, None] = None, namespace: str = 'METAFLOW_KUBERNETES_NAMESPACE', gpu: typing.Optional[int] = None, gpu_vendor: str = 'KUBERNETES_GPU_VENDOR', tolerations: typing.List[typing.Dict[str, str]] = [], labels: typing.Dict[str, str] = 'METAFLOW_KUBERNETES_LABELS', annotations: typing.Dict[str, str] = 'METAFLOW_KUBERNETES_ANNOTATIONS', use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = '/metaflow_temp', persistent_volume_claims: typing.Optional[typing.Dict[str, str]] = None, shared_memory: typing.Optional[int] = None, port: typing.Optional[int] = None, compute_pool: typing.Optional[str] = None, hostname_resolution_timeout: int = 600, qos: str = 'Burstable', security_context: typing.Optional[typing.Dict[str, typing.Any]] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1116
+ """
1117
+ Specifies that this step should execute on Kubernetes.
1118
+
1119
+
1120
+ Parameters
1121
+ ----------
1122
+ cpu : int, default 1
1123
+ Number of CPUs required for this step. If `@resources` is
1124
+ also present, the maximum value from all decorators is used.
1125
+ memory : int, default 4096
1126
+ Memory size (in MB) required for this step. If
1127
+ `@resources` is also present, the maximum value from all decorators is
1128
+ used.
1129
+ disk : int, default 10240
1130
+ Disk size (in MB) required for this step. If
1131
+ `@resources` is also present, the maximum value from all decorators is
1132
+ used.
1133
+ image : str, optional, default None
1134
+ Docker image to use when launching on Kubernetes. If not specified, and
1135
+ METAFLOW_KUBERNETES_CONTAINER_IMAGE is specified, that image is used. If
1136
+ not, a default Docker image mapping to the current version of Python is used.
1137
+ image_pull_policy: str, default KUBERNETES_IMAGE_PULL_POLICY
1138
+ If given, the imagePullPolicy to be applied to the Docker image of the step.
1139
+ image_pull_secrets: List[str], default []
1140
+ The default is extracted from METAFLOW_KUBERNETES_IMAGE_PULL_SECRETS.
1141
+ Kubernetes image pull secrets to use when pulling container images
1142
+ in Kubernetes.
1143
+ service_account : str, default METAFLOW_KUBERNETES_SERVICE_ACCOUNT
1144
+ Kubernetes service account to use when launching pod in Kubernetes.
1145
+ secrets : List[str], optional, default None
1146
+ Kubernetes secrets to use when launching pod in Kubernetes. These
1147
+ secrets are in addition to the ones defined in `METAFLOW_KUBERNETES_SECRETS`
1148
+ in Metaflow configuration.
1149
+ node_selector: Union[Dict[str,str], str], optional, default None
1150
+ Kubernetes node selector(s) to apply to the pod running the task.
1151
+ Can be passed in as a comma separated string of values e.g.
1152
+ 'kubernetes.io/os=linux,kubernetes.io/arch=amd64' or as a dictionary
1153
+ {'kubernetes.io/os': 'linux', 'kubernetes.io/arch': 'amd64'}
1154
+ namespace : str, default METAFLOW_KUBERNETES_NAMESPACE
1155
+ Kubernetes namespace to use when launching pod in Kubernetes.
1156
+ gpu : int, optional, default None
1157
+ Number of GPUs required for this step. A value of zero implies that
1158
+ the scheduled node should not have GPUs.
1159
+ gpu_vendor : str, default KUBERNETES_GPU_VENDOR
1160
+ The vendor of the GPUs to be used for this step.
1161
+ tolerations : List[Dict[str,str]], default []
1162
+ The default is extracted from METAFLOW_KUBERNETES_TOLERATIONS.
1163
+ Kubernetes tolerations to use when launching pod in Kubernetes.
1164
+ labels: Dict[str, str], default: METAFLOW_KUBERNETES_LABELS
1165
+ Kubernetes labels to use when launching pod in Kubernetes.
1166
+ annotations: Dict[str, str], default: METAFLOW_KUBERNETES_ANNOTATIONS
1167
+ Kubernetes annotations to use when launching pod in Kubernetes.
1168
+ use_tmpfs : bool, default False
1169
+ This enables an explicit tmpfs mount for this step.
1170
+ tmpfs_tempdir : bool, default True
1171
+ sets METAFLOW_TEMPDIR to tmpfs_path if set for this step.
1172
+ tmpfs_size : int, optional, default: None
1173
+ The value for the size (in MiB) of the tmpfs mount for this step.
1174
+ This parameter maps to the `--tmpfs` option in Docker. Defaults to 50% of the
1175
+ memory allocated for this step.
1176
+ tmpfs_path : str, optional, default /metaflow_temp
1177
+ Path to tmpfs mount for this step.
1178
+ persistent_volume_claims : Dict[str, str], optional, default None
1179
+ A map (dictionary) of persistent volumes to be mounted to the pod for this step. The map is from persistent
1180
+ volumes to the path to which the volume is to be mounted, e.g., `{'pvc-name': '/path/to/mount/on'}`.
1181
+ shared_memory: int, optional
1182
+ Shared memory size (in MiB) required for this step
1183
+ port: int, optional
1184
+ Port number to specify in the Kubernetes job object
1185
+ compute_pool : str, optional, default None
1186
+ Compute pool to be used for for this step.
1187
+ If not specified, any accessible compute pool within the perimeter is used.
1188
+ hostname_resolution_timeout: int, default 10 * 60
1189
+ Timeout in seconds for the workers tasks in the gang scheduled cluster to resolve the hostname of control task.
1190
+ Only applicable when @parallel is used.
1191
+ qos: str, default: Burstable
1192
+ Quality of Service class to assign to the pod. Supported values are: Guaranteed, Burstable, BestEffort
1193
+
1194
+ security_context: Dict[str, Any], optional, default None
1195
+ Container security context. Applies to the task container. Allows the following keys:
1196
+ - privileged: bool, optional, default None
1197
+ - allow_privilege_escalation: bool, optional, default None
1198
+ - run_as_user: int, optional, default None
1199
+ - run_as_group: int, optional, default None
1200
+ - run_as_non_root: bool, optional, default None
1268
1201
  """
1269
1202
  ...
1270
1203
 
1271
1204
  @typing.overload
1272
- def timeout(*, seconds: int = 0, minutes: int = 0, hours: int = 0) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1205
+ def checkpoint(*, load_policy: str = 'fresh', temp_dir_root: str = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1273
1206
  """
1274
- Specifies a timeout for your step.
1207
+ Enables checkpointing for a step.
1275
1208
 
1276
- This decorator is useful if this step may hang indefinitely.
1209
+ > Examples
1277
1210
 
1278
- This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
1279
- A timeout is considered to be an exception thrown by the step. It will cause the step to be
1280
- retried if needed and the exception will be caught by the `@catch` decorator, if present.
1211
+ - Saving Checkpoints
1281
1212
 
1282
- Note that all the values specified in parameters are added together so if you specify
1283
- 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
1213
+ ```python
1214
+ @checkpoint
1215
+ @step
1216
+ def train(self):
1217
+ model = create_model(self.parameters, checkpoint_path = None)
1218
+ for i in range(self.epochs):
1219
+ # some training logic
1220
+ loss = model.train(self.dataset)
1221
+ if i % 10 == 0:
1222
+ model.save(
1223
+ current.checkpoint.directory,
1224
+ )
1225
+ # saves the contents of the `current.checkpoint.directory` as a checkpoint
1226
+ # and returns a reference dictionary to the checkpoint saved in the datastore
1227
+ self.latest_checkpoint = current.checkpoint.save(
1228
+ name="epoch_checkpoint",
1229
+ metadata={
1230
+ "epoch": i,
1231
+ "loss": loss,
1232
+ }
1233
+ )
1234
+ ```
1235
+
1236
+ - Using Loaded Checkpoints
1237
+
1238
+ ```python
1239
+ @retry(times=3)
1240
+ @checkpoint
1241
+ @step
1242
+ def train(self):
1243
+ # Assume that the task has restarted and the previous attempt of the task
1244
+ # saved a checkpoint
1245
+ checkpoint_path = None
1246
+ if current.checkpoint.is_loaded: # Check if a checkpoint is loaded
1247
+ print("Loaded checkpoint from the previous attempt")
1248
+ checkpoint_path = current.checkpoint.directory
1249
+
1250
+ model = create_model(self.parameters, checkpoint_path = checkpoint_path)
1251
+ for i in range(self.epochs):
1252
+ ...
1253
+ ```
1284
1254
 
1285
1255
 
1286
1256
  Parameters
1287
1257
  ----------
1288
- seconds : int, default 0
1289
- Number of seconds to wait prior to timing out.
1290
- minutes : int, default 0
1291
- Number of minutes to wait prior to timing out.
1292
- hours : int, default 0
1293
- Number of hours to wait prior to timing out.
1258
+ load_policy : str, default: "fresh"
1259
+ The policy for loading the checkpoint. The following policies are supported:
1260
+ - "eager": Loads the the latest available checkpoint within the namespace.
1261
+ With this mode, the latest checkpoint written by any previous task (can be even a different run) of the step
1262
+ will be loaded at the start of the task.
1263
+ - "none": Do not load any checkpoint
1264
+ - "fresh": Loads the lastest checkpoint created within the running Task.
1265
+ This mode helps loading checkpoints across various retry attempts of the same task.
1266
+ With this mode, no checkpoint will be loaded at the start of a task but any checkpoints
1267
+ created within the task will be loaded when the task is retries execution on failure.
1268
+
1269
+ temp_dir_root : str, default: None
1270
+ The root directory under which `current.checkpoint.directory` will be created.
1294
1271
  """
1295
1272
  ...
1296
1273
 
1297
1274
  @typing.overload
1298
- def timeout(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1275
+ def checkpoint(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1299
1276
  ...
1300
1277
 
1301
1278
  @typing.overload
1302
- def timeout(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1279
+ def checkpoint(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1303
1280
  ...
1304
1281
 
1305
- def timeout(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, seconds: int = 0, minutes: int = 0, hours: int = 0):
1282
+ def checkpoint(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, load_policy: str = 'fresh', temp_dir_root: str = None):
1306
1283
  """
1307
- Specifies a timeout for your step.
1284
+ Enables checkpointing for a step.
1308
1285
 
1309
- This decorator is useful if this step may hang indefinitely.
1286
+ > Examples
1310
1287
 
1311
- This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
1312
- A timeout is considered to be an exception thrown by the step. It will cause the step to be
1313
- retried if needed and the exception will be caught by the `@catch` decorator, if present.
1288
+ - Saving Checkpoints
1314
1289
 
1315
- Note that all the values specified in parameters are added together so if you specify
1316
- 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
1290
+ ```python
1291
+ @checkpoint
1292
+ @step
1293
+ def train(self):
1294
+ model = create_model(self.parameters, checkpoint_path = None)
1295
+ for i in range(self.epochs):
1296
+ # some training logic
1297
+ loss = model.train(self.dataset)
1298
+ if i % 10 == 0:
1299
+ model.save(
1300
+ current.checkpoint.directory,
1301
+ )
1302
+ # saves the contents of the `current.checkpoint.directory` as a checkpoint
1303
+ # and returns a reference dictionary to the checkpoint saved in the datastore
1304
+ self.latest_checkpoint = current.checkpoint.save(
1305
+ name="epoch_checkpoint",
1306
+ metadata={
1307
+ "epoch": i,
1308
+ "loss": loss,
1309
+ }
1310
+ )
1311
+ ```
1312
+
1313
+ - Using Loaded Checkpoints
1314
+
1315
+ ```python
1316
+ @retry(times=3)
1317
+ @checkpoint
1318
+ @step
1319
+ def train(self):
1320
+ # Assume that the task has restarted and the previous attempt of the task
1321
+ # saved a checkpoint
1322
+ checkpoint_path = None
1323
+ if current.checkpoint.is_loaded: # Check if a checkpoint is loaded
1324
+ print("Loaded checkpoint from the previous attempt")
1325
+ checkpoint_path = current.checkpoint.directory
1326
+
1327
+ model = create_model(self.parameters, checkpoint_path = checkpoint_path)
1328
+ for i in range(self.epochs):
1329
+ ...
1330
+ ```
1317
1331
 
1318
1332
 
1319
1333
  Parameters
1320
1334
  ----------
1321
- seconds : int, default 0
1322
- Number of seconds to wait prior to timing out.
1323
- minutes : int, default 0
1324
- Number of minutes to wait prior to timing out.
1325
- hours : int, default 0
1326
- Number of hours to wait prior to timing out.
1335
+ load_policy : str, default: "fresh"
1336
+ The policy for loading the checkpoint. The following policies are supported:
1337
+ - "eager": Loads the the latest available checkpoint within the namespace.
1338
+ With this mode, the latest checkpoint written by any previous task (can be even a different run) of the step
1339
+ will be loaded at the start of the task.
1340
+ - "none": Do not load any checkpoint
1341
+ - "fresh": Loads the lastest checkpoint created within the running Task.
1342
+ This mode helps loading checkpoints across various retry attempts of the same task.
1343
+ With this mode, no checkpoint will be loaded at the start of a task but any checkpoints
1344
+ created within the task will be loaded when the task is retries execution on failure.
1345
+
1346
+ temp_dir_root : str, default: None
1347
+ The root directory under which `current.checkpoint.directory` will be created.
1327
1348
  """
1328
1349
  ...
1329
1350
 
1330
1351
  @typing.overload
1331
- def coreweave_s3_proxy(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1352
+ def conda_base(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1332
1353
  """
1333
- CoreWeave-specific S3 Proxy decorator for routing S3 requests through a local proxy service.
1334
- It exists to make it easier for users to know that this decorator should only be used with
1335
- a Neo Cloud like CoreWeave.
1354
+ Specifies the Conda environment for all steps of the flow.
1355
+
1356
+ Use `@conda_base` to set common libraries required by all
1357
+ steps and use `@conda` to specify step-specific additions.
1358
+
1359
+
1360
+ Parameters
1361
+ ----------
1362
+ packages : Dict[str, str], default {}
1363
+ Packages to use for this flow. The key is the name of the package
1364
+ and the value is the version to use.
1365
+ libraries : Dict[str, str], default {}
1366
+ Supported for backward compatibility. When used with packages, packages will take precedence.
1367
+ python : str, optional, default None
1368
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1369
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1370
+ disabled : bool, default False
1371
+ If set to True, disables Conda.
1336
1372
  """
1337
1373
  ...
1338
1374
 
1339
1375
  @typing.overload
1340
- def coreweave_s3_proxy(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1376
+ def conda_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1341
1377
  ...
1342
1378
 
1343
- def coreweave_s3_proxy(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
1379
+ def conda_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
1344
1380
  """
1345
- CoreWeave-specific S3 Proxy decorator for routing S3 requests through a local proxy service.
1346
- It exists to make it easier for users to know that this decorator should only be used with
1347
- a Neo Cloud like CoreWeave.
1381
+ Specifies the Conda environment for all steps of the flow.
1382
+
1383
+ Use `@conda_base` to set common libraries required by all
1384
+ steps and use `@conda` to specify step-specific additions.
1385
+
1386
+
1387
+ Parameters
1388
+ ----------
1389
+ packages : Dict[str, str], default {}
1390
+ Packages to use for this flow. The key is the name of the package
1391
+ and the value is the version to use.
1392
+ libraries : Dict[str, str], default {}
1393
+ Supported for backward compatibility. When used with packages, packages will take precedence.
1394
+ python : str, optional, default None
1395
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1396
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1397
+ disabled : bool, default False
1398
+ If set to True, disables Conda.
1348
1399
  """
1349
1400
  ...
1350
1401
 
1351
- def project(*, name: str, branch: typing.Optional[str] = None, production: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1402
+ def airflow_external_task_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, external_dag_id: str, external_task_ids: typing.List[str], allowed_states: typing.List[str], failed_states: typing.List[str], execution_delta: "datetime.timedelta", check_existence: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1352
1403
  """
1353
- Specifies what flows belong to the same project.
1354
-
1355
- A project-specific namespace is created for all flows that
1356
- use the same `@project(name)`.
1404
+ The `@airflow_external_task_sensor` decorator attaches a Airflow [ExternalTaskSensor](https://airflow.apache.org/docs/apache-airflow/stable/_api/airflow/sensors/external_task/index.html#airflow.sensors.external_task.ExternalTaskSensor) before the start step of the flow.
1405
+ This decorator only works when a flow is scheduled on Airflow and is compiled using `airflow create`. More than one `@airflow_external_task_sensor` can be added as a flow decorators. Adding more than one decorator will ensure that `start` step starts only after all sensors finish.
1357
1406
 
1358
1407
 
1359
1408
  Parameters
1360
1409
  ----------
1410
+ timeout : int
1411
+ Time, in seconds before the task times out and fails. (Default: 3600)
1412
+ poke_interval : int
1413
+ Time in seconds that the job should wait in between each try. (Default: 60)
1414
+ mode : str
1415
+ How the sensor operates. Options are: { poke | reschedule }. (Default: "poke")
1416
+ exponential_backoff : bool
1417
+ allow progressive longer waits between pokes by using exponential backoff algorithm. (Default: True)
1418
+ pool : str
1419
+ the slot pool this task should run in,
1420
+ slot pools are a way to limit concurrency for certain tasks. (Default:None)
1421
+ soft_fail : bool
1422
+ Set to true to mark the task as SKIPPED on failure. (Default: False)
1361
1423
  name : str
1362
- Project name. Make sure that the name is unique amongst all
1363
- projects that use the same production scheduler. The name may
1364
- contain only lowercase alphanumeric characters and underscores.
1365
-
1366
- branch : Optional[str], default None
1367
- The branch to use. If not specified, the branch is set to
1368
- `user.<username>` unless `production` is set to `True`. This can
1369
- also be set on the command line using `--branch` as a top-level option.
1370
- It is an error to specify `branch` in the decorator and on the command line.
1371
-
1372
- production : bool, default False
1373
- Whether or not the branch is the production branch. This can also be set on the
1374
- command line using `--production` as a top-level option. It is an error to specify
1375
- `production` in the decorator and on the command line.
1376
- The project branch name will be:
1377
- - if `branch` is specified:
1378
- - if `production` is True: `prod.<branch>`
1379
- - if `production` is False: `test.<branch>`
1380
- - if `branch` is not specified:
1381
- - if `production` is True: `prod`
1382
- - if `production` is False: `user.<username>`
1424
+ Name of the sensor on Airflow
1425
+ description : str
1426
+ Description of sensor in the Airflow UI
1427
+ external_dag_id : str
1428
+ The dag_id that contains the task you want to wait for.
1429
+ external_task_ids : List[str]
1430
+ The list of task_ids that you want to wait for.
1431
+ If None (default value) the sensor waits for the DAG. (Default: None)
1432
+ allowed_states : List[str]
1433
+ Iterable of allowed states, (Default: ['success'])
1434
+ failed_states : List[str]
1435
+ Iterable of failed or dis-allowed states. (Default: None)
1436
+ execution_delta : datetime.timedelta
1437
+ time difference with the previous execution to look at,
1438
+ the default is the same logical date as the current task or DAG. (Default: None)
1439
+ check_existence: bool
1440
+ Set to True to check if the external task exists or check if
1441
+ the DAG to wait for exists. (Default: True)
1383
1442
  """
1384
1443
  ...
1385
1444
 
@@ -1434,10 +1493,13 @@ def schedule(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, hourly:
1434
1493
  """
1435
1494
  ...
1436
1495
 
1437
- def airflow_external_task_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, external_dag_id: str, external_task_ids: typing.List[str], allowed_states: typing.List[str], failed_states: typing.List[str], execution_delta: "datetime.timedelta", check_existence: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1496
+ def airflow_s3_key_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, bucket_key: typing.Union[str, typing.List[str]], bucket_name: str, wildcard_match: bool, aws_conn_id: str, verify: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1438
1497
  """
1439
- The `@airflow_external_task_sensor` decorator attaches a Airflow [ExternalTaskSensor](https://airflow.apache.org/docs/apache-airflow/stable/_api/airflow/sensors/external_task/index.html#airflow.sensors.external_task.ExternalTaskSensor) before the start step of the flow.
1440
- This decorator only works when a flow is scheduled on Airflow and is compiled using `airflow create`. More than one `@airflow_external_task_sensor` can be added as a flow decorators. Adding more than one decorator will ensure that `start` step starts only after all sensors finish.
1498
+ The `@airflow_s3_key_sensor` decorator attaches a Airflow [S3KeySensor](https://airflow.apache.org/docs/apache-airflow-providers-amazon/stable/_api/airflow/providers/amazon/aws/sensors/s3/index.html#airflow.providers.amazon.aws.sensors.s3.S3KeySensor)
1499
+ before the start step of the flow. This decorator only works when a flow is scheduled on Airflow
1500
+ and is compiled using `airflow create`. More than one `@airflow_s3_key_sensor` can be
1501
+ added as a flow decorators. Adding more than one decorator will ensure that `start` step
1502
+ starts only after all sensors finish.
1441
1503
 
1442
1504
 
1443
1505
  Parameters
@@ -1459,21 +1521,59 @@ def airflow_external_task_sensor(*, timeout: int, poke_interval: int, mode: str,
1459
1521
  Name of the sensor on Airflow
1460
1522
  description : str
1461
1523
  Description of sensor in the Airflow UI
1462
- external_dag_id : str
1463
- The dag_id that contains the task you want to wait for.
1464
- external_task_ids : List[str]
1465
- The list of task_ids that you want to wait for.
1466
- If None (default value) the sensor waits for the DAG. (Default: None)
1467
- allowed_states : List[str]
1468
- Iterable of allowed states, (Default: ['success'])
1469
- failed_states : List[str]
1470
- Iterable of failed or dis-allowed states. (Default: None)
1471
- execution_delta : datetime.timedelta
1472
- time difference with the previous execution to look at,
1473
- the default is the same logical date as the current task or DAG. (Default: None)
1474
- check_existence: bool
1475
- Set to True to check if the external task exists or check if
1476
- the DAG to wait for exists. (Default: True)
1524
+ bucket_key : Union[str, List[str]]
1525
+ The key(s) being waited on. Supports full s3:// style url or relative path from root level.
1526
+ When it's specified as a full s3:// url, please leave `bucket_name` as None
1527
+ bucket_name : str
1528
+ Name of the S3 bucket. Only needed when bucket_key is not provided as a full s3:// url.
1529
+ When specified, all the keys passed to bucket_key refers to this bucket. (Default:None)
1530
+ wildcard_match : bool
1531
+ whether the bucket_key should be interpreted as a Unix wildcard pattern. (Default: False)
1532
+ aws_conn_id : str
1533
+ a reference to the s3 connection on Airflow. (Default: None)
1534
+ verify : bool
1535
+ Whether or not to verify SSL certificates for S3 connection. (Default: None)
1536
+ """
1537
+ ...
1538
+
1539
+ @typing.overload
1540
+ def pypi_base(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1541
+ """
1542
+ Specifies the PyPI packages for all steps of the flow.
1543
+
1544
+ Use `@pypi_base` to set common packages required by all
1545
+ steps and use `@pypi` to specify step-specific overrides.
1546
+
1547
+ Parameters
1548
+ ----------
1549
+ packages : Dict[str, str], default: {}
1550
+ Packages to use for this flow. The key is the name of the package
1551
+ and the value is the version to use.
1552
+ python : str, optional, default: None
1553
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1554
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1555
+ """
1556
+ ...
1557
+
1558
+ @typing.overload
1559
+ def pypi_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1560
+ ...
1561
+
1562
+ def pypi_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
1563
+ """
1564
+ Specifies the PyPI packages for all steps of the flow.
1565
+
1566
+ Use `@pypi_base` to set common packages required by all
1567
+ steps and use `@pypi` to specify step-specific overrides.
1568
+
1569
+ Parameters
1570
+ ----------
1571
+ packages : Dict[str, str], default: {}
1572
+ Packages to use for this flow. The key is the name of the package
1573
+ and the value is the version to use.
1574
+ python : str, optional, default: None
1575
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1576
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1477
1577
  """
1478
1578
  ...
1479
1579
 
@@ -1571,94 +1671,138 @@ def trigger(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, event: t
1571
1671
  ...
1572
1672
 
1573
1673
  @typing.overload
1574
- def conda_base(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1674
+ def trigger_on_finish(*, flow: typing.Union[typing.Dict[str, str], str, None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1575
1675
  """
1576
- Specifies the Conda environment for all steps of the flow.
1676
+ Specifies the flow(s) that this flow depends on.
1577
1677
 
1578
- Use `@conda_base` to set common libraries required by all
1579
- steps and use `@conda` to specify step-specific additions.
1678
+ ```
1679
+ @trigger_on_finish(flow='FooFlow')
1680
+ ```
1681
+ or
1682
+ ```
1683
+ @trigger_on_finish(flows=['FooFlow', 'BarFlow'])
1684
+ ```
1685
+ This decorator respects the @project decorator and triggers the flow
1686
+ when upstream runs within the same namespace complete successfully
1687
+
1688
+ Additionally, you can specify project aware upstream flow dependencies
1689
+ by specifying the fully qualified project_flow_name.
1690
+ ```
1691
+ @trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
1692
+ ```
1693
+ or
1694
+ ```
1695
+ @trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
1696
+ ```
1697
+
1698
+ You can also specify just the project or project branch (other values will be
1699
+ inferred from the current project or project branch):
1700
+ ```
1701
+ @trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
1702
+ ```
1703
+
1704
+ Note that `branch` is typically one of:
1705
+ - `prod`
1706
+ - `user.bob`
1707
+ - `test.my_experiment`
1708
+ - `prod.staging`
1580
1709
 
1581
1710
 
1582
1711
  Parameters
1583
1712
  ----------
1584
- packages : Dict[str, str], default {}
1585
- Packages to use for this flow. The key is the name of the package
1586
- and the value is the version to use.
1587
- libraries : Dict[str, str], default {}
1588
- Supported for backward compatibility. When used with packages, packages will take precedence.
1589
- python : str, optional, default None
1590
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
1591
- that the version used will correspond to the version of the Python interpreter used to start the run.
1592
- disabled : bool, default False
1593
- If set to True, disables Conda.
1713
+ flow : Union[str, Dict[str, str]], optional, default None
1714
+ Upstream flow dependency for this flow.
1715
+ flows : List[Union[str, Dict[str, str]]], default []
1716
+ Upstream flow dependencies for this flow.
1717
+ options : Dict[str, Any], default {}
1718
+ Backend-specific configuration for tuning eventing behavior.
1594
1719
  """
1595
1720
  ...
1596
1721
 
1597
1722
  @typing.overload
1598
- def conda_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1723
+ def trigger_on_finish(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1599
1724
  ...
1600
1725
 
1601
- def conda_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
1726
+ def trigger_on_finish(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, flow: typing.Union[typing.Dict[str, str], str, None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}):
1602
1727
  """
1603
- Specifies the Conda environment for all steps of the flow.
1728
+ Specifies the flow(s) that this flow depends on.
1604
1729
 
1605
- Use `@conda_base` to set common libraries required by all
1606
- steps and use `@conda` to specify step-specific additions.
1730
+ ```
1731
+ @trigger_on_finish(flow='FooFlow')
1732
+ ```
1733
+ or
1734
+ ```
1735
+ @trigger_on_finish(flows=['FooFlow', 'BarFlow'])
1736
+ ```
1737
+ This decorator respects the @project decorator and triggers the flow
1738
+ when upstream runs within the same namespace complete successfully
1607
1739
 
1740
+ Additionally, you can specify project aware upstream flow dependencies
1741
+ by specifying the fully qualified project_flow_name.
1742
+ ```
1743
+ @trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
1744
+ ```
1745
+ or
1746
+ ```
1747
+ @trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
1748
+ ```
1608
1749
 
1609
- Parameters
1610
- ----------
1611
- packages : Dict[str, str], default {}
1612
- Packages to use for this flow. The key is the name of the package
1613
- and the value is the version to use.
1614
- libraries : Dict[str, str], default {}
1615
- Supported for backward compatibility. When used with packages, packages will take precedence.
1616
- python : str, optional, default None
1617
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
1618
- that the version used will correspond to the version of the Python interpreter used to start the run.
1619
- disabled : bool, default False
1620
- If set to True, disables Conda.
1621
- """
1622
- ...
1623
-
1624
- @typing.overload
1625
- def pypi_base(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1626
- """
1627
- Specifies the PyPI packages for all steps of the flow.
1750
+ You can also specify just the project or project branch (other values will be
1751
+ inferred from the current project or project branch):
1752
+ ```
1753
+ @trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
1754
+ ```
1755
+
1756
+ Note that `branch` is typically one of:
1757
+ - `prod`
1758
+ - `user.bob`
1759
+ - `test.my_experiment`
1760
+ - `prod.staging`
1628
1761
 
1629
- Use `@pypi_base` to set common packages required by all
1630
- steps and use `@pypi` to specify step-specific overrides.
1631
1762
 
1632
1763
  Parameters
1633
1764
  ----------
1634
- packages : Dict[str, str], default: {}
1635
- Packages to use for this flow. The key is the name of the package
1636
- and the value is the version to use.
1637
- python : str, optional, default: None
1638
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
1639
- that the version used will correspond to the version of the Python interpreter used to start the run.
1765
+ flow : Union[str, Dict[str, str]], optional, default None
1766
+ Upstream flow dependency for this flow.
1767
+ flows : List[Union[str, Dict[str, str]]], default []
1768
+ Upstream flow dependencies for this flow.
1769
+ options : Dict[str, Any], default {}
1770
+ Backend-specific configuration for tuning eventing behavior.
1640
1771
  """
1641
1772
  ...
1642
1773
 
1643
- @typing.overload
1644
- def pypi_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1645
- ...
1646
-
1647
- def pypi_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
1774
+ def project(*, name: str, branch: typing.Optional[str] = None, production: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1648
1775
  """
1649
- Specifies the PyPI packages for all steps of the flow.
1776
+ Specifies what flows belong to the same project.
1777
+
1778
+ A project-specific namespace is created for all flows that
1779
+ use the same `@project(name)`.
1650
1780
 
1651
- Use `@pypi_base` to set common packages required by all
1652
- steps and use `@pypi` to specify step-specific overrides.
1653
1781
 
1654
1782
  Parameters
1655
1783
  ----------
1656
- packages : Dict[str, str], default: {}
1657
- Packages to use for this flow. The key is the name of the package
1658
- and the value is the version to use.
1659
- python : str, optional, default: None
1660
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
1661
- that the version used will correspond to the version of the Python interpreter used to start the run.
1784
+ name : str
1785
+ Project name. Make sure that the name is unique amongst all
1786
+ projects that use the same production scheduler. The name may
1787
+ contain only lowercase alphanumeric characters and underscores.
1788
+
1789
+ branch : Optional[str], default None
1790
+ The branch to use. If not specified, the branch is set to
1791
+ `user.<username>` unless `production` is set to `True`. This can
1792
+ also be set on the command line using `--branch` as a top-level option.
1793
+ It is an error to specify `branch` in the decorator and on the command line.
1794
+
1795
+ production : bool, default False
1796
+ Whether or not the branch is the production branch. This can also be set on the
1797
+ command line using `--production` as a top-level option. It is an error to specify
1798
+ `production` in the decorator and on the command line.
1799
+ The project branch name will be:
1800
+ - if `branch` is specified:
1801
+ - if `production` is True: `prod.<branch>`
1802
+ - if `production` is False: `test.<branch>`
1803
+ - if `branch` is not specified:
1804
+ - if `production` is True: `prod`
1805
+ - if `production` is False: `user.<username>`
1662
1806
  """
1663
1807
  ...
1664
1808
 
@@ -1776,149 +1920,5 @@ def with_artifact_store(f: typing.Optional[typing.Type[FlowSpecDerived]] = None)
1776
1920
  """
1777
1921
  ...
1778
1922
 
1779
- @typing.overload
1780
- def trigger_on_finish(*, flow: typing.Union[typing.Dict[str, str], str, None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1781
- """
1782
- Specifies the flow(s) that this flow depends on.
1783
-
1784
- ```
1785
- @trigger_on_finish(flow='FooFlow')
1786
- ```
1787
- or
1788
- ```
1789
- @trigger_on_finish(flows=['FooFlow', 'BarFlow'])
1790
- ```
1791
- This decorator respects the @project decorator and triggers the flow
1792
- when upstream runs within the same namespace complete successfully
1793
-
1794
- Additionally, you can specify project aware upstream flow dependencies
1795
- by specifying the fully qualified project_flow_name.
1796
- ```
1797
- @trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
1798
- ```
1799
- or
1800
- ```
1801
- @trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
1802
- ```
1803
-
1804
- You can also specify just the project or project branch (other values will be
1805
- inferred from the current project or project branch):
1806
- ```
1807
- @trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
1808
- ```
1809
-
1810
- Note that `branch` is typically one of:
1811
- - `prod`
1812
- - `user.bob`
1813
- - `test.my_experiment`
1814
- - `prod.staging`
1815
-
1816
-
1817
- Parameters
1818
- ----------
1819
- flow : Union[str, Dict[str, str]], optional, default None
1820
- Upstream flow dependency for this flow.
1821
- flows : List[Union[str, Dict[str, str]]], default []
1822
- Upstream flow dependencies for this flow.
1823
- options : Dict[str, Any], default {}
1824
- Backend-specific configuration for tuning eventing behavior.
1825
- """
1826
- ...
1827
-
1828
- @typing.overload
1829
- def trigger_on_finish(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1830
- ...
1831
-
1832
- def trigger_on_finish(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, flow: typing.Union[typing.Dict[str, str], str, None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}):
1833
- """
1834
- Specifies the flow(s) that this flow depends on.
1835
-
1836
- ```
1837
- @trigger_on_finish(flow='FooFlow')
1838
- ```
1839
- or
1840
- ```
1841
- @trigger_on_finish(flows=['FooFlow', 'BarFlow'])
1842
- ```
1843
- This decorator respects the @project decorator and triggers the flow
1844
- when upstream runs within the same namespace complete successfully
1845
-
1846
- Additionally, you can specify project aware upstream flow dependencies
1847
- by specifying the fully qualified project_flow_name.
1848
- ```
1849
- @trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
1850
- ```
1851
- or
1852
- ```
1853
- @trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
1854
- ```
1855
-
1856
- You can also specify just the project or project branch (other values will be
1857
- inferred from the current project or project branch):
1858
- ```
1859
- @trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
1860
- ```
1861
-
1862
- Note that `branch` is typically one of:
1863
- - `prod`
1864
- - `user.bob`
1865
- - `test.my_experiment`
1866
- - `prod.staging`
1867
-
1868
-
1869
- Parameters
1870
- ----------
1871
- flow : Union[str, Dict[str, str]], optional, default None
1872
- Upstream flow dependency for this flow.
1873
- flows : List[Union[str, Dict[str, str]]], default []
1874
- Upstream flow dependencies for this flow.
1875
- options : Dict[str, Any], default {}
1876
- Backend-specific configuration for tuning eventing behavior.
1877
- """
1878
- ...
1879
-
1880
- def airflow_s3_key_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, bucket_key: typing.Union[str, typing.List[str]], bucket_name: str, wildcard_match: bool, aws_conn_id: str, verify: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1881
- """
1882
- The `@airflow_s3_key_sensor` decorator attaches a Airflow [S3KeySensor](https://airflow.apache.org/docs/apache-airflow-providers-amazon/stable/_api/airflow/providers/amazon/aws/sensors/s3/index.html#airflow.providers.amazon.aws.sensors.s3.S3KeySensor)
1883
- before the start step of the flow. This decorator only works when a flow is scheduled on Airflow
1884
- and is compiled using `airflow create`. More than one `@airflow_s3_key_sensor` can be
1885
- added as a flow decorators. Adding more than one decorator will ensure that `start` step
1886
- starts only after all sensors finish.
1887
-
1888
-
1889
- Parameters
1890
- ----------
1891
- timeout : int
1892
- Time, in seconds before the task times out and fails. (Default: 3600)
1893
- poke_interval : int
1894
- Time in seconds that the job should wait in between each try. (Default: 60)
1895
- mode : str
1896
- How the sensor operates. Options are: { poke | reschedule }. (Default: "poke")
1897
- exponential_backoff : bool
1898
- allow progressive longer waits between pokes by using exponential backoff algorithm. (Default: True)
1899
- pool : str
1900
- the slot pool this task should run in,
1901
- slot pools are a way to limit concurrency for certain tasks. (Default:None)
1902
- soft_fail : bool
1903
- Set to true to mark the task as SKIPPED on failure. (Default: False)
1904
- name : str
1905
- Name of the sensor on Airflow
1906
- description : str
1907
- Description of sensor in the Airflow UI
1908
- bucket_key : Union[str, List[str]]
1909
- The key(s) being waited on. Supports full s3:// style url or relative path from root level.
1910
- When it's specified as a full s3:// url, please leave `bucket_name` as None
1911
- bucket_name : str
1912
- Name of the S3 bucket. Only needed when bucket_key is not provided as a full s3:// url.
1913
- When specified, all the keys passed to bucket_key refers to this bucket. (Default:None)
1914
- wildcard_match : bool
1915
- whether the bucket_key should be interpreted as a Unix wildcard pattern. (Default: False)
1916
- aws_conn_id : str
1917
- a reference to the s3 connection on Airflow. (Default: None)
1918
- verify : bool
1919
- Whether or not to verify SSL certificates for S3 connection. (Default: None)
1920
- """
1921
- ...
1922
-
1923
1923
  pkg_name: str
1924
1924