ob-metaflow-stubs 6.0.9.3__py2.py3-none-any.whl → 6.0.9.4__py2.py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of ob-metaflow-stubs might be problematic. Click here for more details.

Files changed (262) hide show
  1. metaflow-stubs/__init__.pyi +967 -967
  2. metaflow-stubs/cards.pyi +2 -2
  3. metaflow-stubs/cli.pyi +2 -2
  4. metaflow-stubs/cli_components/__init__.pyi +2 -2
  5. metaflow-stubs/cli_components/utils.pyi +2 -2
  6. metaflow-stubs/client/__init__.pyi +2 -2
  7. metaflow-stubs/client/core.pyi +5 -5
  8. metaflow-stubs/client/filecache.pyi +3 -3
  9. metaflow-stubs/events.pyi +2 -2
  10. metaflow-stubs/exception.pyi +2 -2
  11. metaflow-stubs/flowspec.pyi +4 -4
  12. metaflow-stubs/generated_for.txt +1 -1
  13. metaflow-stubs/includefile.pyi +3 -3
  14. metaflow-stubs/meta_files.pyi +2 -2
  15. metaflow-stubs/metadata_provider/__init__.pyi +2 -2
  16. metaflow-stubs/metadata_provider/heartbeat.pyi +2 -2
  17. metaflow-stubs/metadata_provider/metadata.pyi +2 -2
  18. metaflow-stubs/metadata_provider/util.pyi +2 -2
  19. metaflow-stubs/metaflow_config.pyi +2 -2
  20. metaflow-stubs/metaflow_current.pyi +36 -36
  21. metaflow-stubs/metaflow_git.pyi +2 -2
  22. metaflow-stubs/mf_extensions/__init__.pyi +2 -2
  23. metaflow-stubs/mf_extensions/obcheckpoint/__init__.pyi +2 -2
  24. metaflow-stubs/mf_extensions/obcheckpoint/plugins/__init__.pyi +2 -2
  25. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/__init__.pyi +2 -2
  26. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/card_utils/__init__.pyi +2 -2
  27. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/card_utils/async_cards.pyi +3 -3
  28. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/card_utils/deco_injection_mixin.pyi +2 -2
  29. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/card_utils/extra_components.pyi +3 -3
  30. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/__init__.pyi +2 -2
  31. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/cards/__init__.pyi +2 -2
  32. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/cards/checkpoint_lister.pyi +3 -3
  33. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/cards/lineage_card.pyi +2 -2
  34. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/checkpoint_storage.pyi +4 -4
  35. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/constructors.pyi +2 -2
  36. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/core.pyi +4 -4
  37. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/decorator.pyi +4 -4
  38. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/exceptions.pyi +2 -2
  39. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/final_api.pyi +3 -3
  40. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/lineage.pyi +2 -2
  41. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/__init__.pyi +2 -2
  42. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/context.pyi +3 -3
  43. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/core.pyi +2 -2
  44. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/decorator.pyi +2 -2
  45. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/exceptions.pyi +2 -2
  46. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/task_utils.pyi +3 -3
  47. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/utils.pyi +2 -2
  48. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastructures.pyi +2 -2
  49. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/exceptions.pyi +2 -2
  50. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/hf_hub/__init__.pyi +2 -2
  51. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/hf_hub/decorator.pyi +2 -2
  52. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/modeling_utils/__init__.pyi +2 -2
  53. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/modeling_utils/core.pyi +3 -3
  54. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/modeling_utils/exceptions.pyi +2 -2
  55. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/modeling_utils/model_storage.pyi +3 -3
  56. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/__init__.pyi +2 -2
  57. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/flowspec_utils.pyi +2 -2
  58. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/general.pyi +2 -2
  59. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/identity_utils.pyi +3 -3
  60. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/serialization_handler/__init__.pyi +2 -2
  61. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/serialization_handler/base.pyi +2 -2
  62. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/serialization_handler/tar.pyi +2 -2
  63. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/tar_utils.pyi +2 -2
  64. metaflow-stubs/mf_extensions/outerbounds/__init__.pyi +2 -2
  65. metaflow-stubs/mf_extensions/outerbounds/plugins/__init__.pyi +2 -2
  66. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/__init__.pyi +2 -2
  67. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/__init__.pyi +2 -2
  68. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/_state_machine.pyi +2 -2
  69. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/_vendor/__init__.pyi +2 -2
  70. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/_vendor/spinner/__init__.pyi +2 -2
  71. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/_vendor/spinner/spinners.pyi +2 -2
  72. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/app_cli.pyi +3 -3
  73. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/app_config.pyi +3 -3
  74. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/capsule.pyi +3 -3
  75. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/click_importer.pyi +2 -2
  76. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/code_package/__init__.pyi +2 -2
  77. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/code_package/code_packager.pyi +2 -2
  78. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/config/__init__.pyi +2 -2
  79. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/config/cli_generator.pyi +2 -2
  80. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/config/config_utils.pyi +3 -3
  81. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/config/schema_export.pyi +2 -2
  82. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/config/typed_configs.pyi +4 -4
  83. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/config/unified_config.pyi +2 -2
  84. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/dependencies.pyi +4 -4
  85. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/deployer.pyi +5 -5
  86. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/experimental/__init__.pyi +2 -2
  87. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/perimeters.pyi +2 -2
  88. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/utils.pyi +3 -3
  89. metaflow-stubs/mf_extensions/outerbounds/plugins/aws/__init__.pyi +2 -2
  90. metaflow-stubs/mf_extensions/outerbounds/plugins/aws/assume_role_decorator.pyi +3 -3
  91. metaflow-stubs/mf_extensions/outerbounds/plugins/card_utilities/__init__.pyi +2 -2
  92. metaflow-stubs/mf_extensions/outerbounds/plugins/card_utilities/async_cards.pyi +2 -2
  93. metaflow-stubs/mf_extensions/outerbounds/plugins/card_utilities/injector.pyi +2 -2
  94. metaflow-stubs/mf_extensions/outerbounds/plugins/checkpoint_datastores/__init__.pyi +2 -2
  95. metaflow-stubs/mf_extensions/outerbounds/plugins/checkpoint_datastores/coreweave.pyi +3 -3
  96. metaflow-stubs/mf_extensions/outerbounds/plugins/checkpoint_datastores/nebius.pyi +3 -3
  97. metaflow-stubs/mf_extensions/outerbounds/plugins/fast_bakery/__init__.pyi +2 -2
  98. metaflow-stubs/mf_extensions/outerbounds/plugins/fast_bakery/baker.pyi +3 -3
  99. metaflow-stubs/mf_extensions/outerbounds/plugins/fast_bakery/docker_environment.pyi +3 -3
  100. metaflow-stubs/mf_extensions/outerbounds/plugins/fast_bakery/fast_bakery.pyi +2 -2
  101. metaflow-stubs/mf_extensions/outerbounds/plugins/kubernetes/__init__.pyi +2 -2
  102. metaflow-stubs/mf_extensions/outerbounds/plugins/kubernetes/pod_killer.pyi +2 -2
  103. metaflow-stubs/mf_extensions/outerbounds/plugins/ollama/__init__.pyi +2 -2
  104. metaflow-stubs/mf_extensions/outerbounds/plugins/ollama/constants.pyi +2 -2
  105. metaflow-stubs/mf_extensions/outerbounds/plugins/ollama/exceptions.pyi +2 -2
  106. metaflow-stubs/mf_extensions/outerbounds/plugins/ollama/ollama.pyi +2 -2
  107. metaflow-stubs/mf_extensions/outerbounds/plugins/ollama/status_card.pyi +2 -2
  108. metaflow-stubs/mf_extensions/outerbounds/plugins/snowflake/__init__.pyi +2 -2
  109. metaflow-stubs/mf_extensions/outerbounds/plugins/snowflake/snowflake.pyi +2 -2
  110. metaflow-stubs/mf_extensions/outerbounds/profilers/__init__.pyi +2 -2
  111. metaflow-stubs/mf_extensions/outerbounds/profilers/gpu.pyi +2 -2
  112. metaflow-stubs/mf_extensions/outerbounds/remote_config.pyi +3 -3
  113. metaflow-stubs/mf_extensions/outerbounds/toplevel/__init__.pyi +2 -2
  114. metaflow-stubs/mf_extensions/outerbounds/toplevel/global_aliases_for_metaflow_package.pyi +2 -2
  115. metaflow-stubs/mf_extensions/outerbounds/toplevel/s3_proxy.pyi +2 -2
  116. metaflow-stubs/multicore_utils.pyi +2 -2
  117. metaflow-stubs/ob_internal.pyi +2 -2
  118. metaflow-stubs/packaging_sys/__init__.pyi +6 -6
  119. metaflow-stubs/packaging_sys/backend.pyi +2 -2
  120. metaflow-stubs/packaging_sys/distribution_support.pyi +4 -4
  121. metaflow-stubs/packaging_sys/tar_backend.pyi +5 -5
  122. metaflow-stubs/packaging_sys/utils.pyi +2 -2
  123. metaflow-stubs/packaging_sys/v1.pyi +3 -3
  124. metaflow-stubs/parameters.pyi +3 -3
  125. metaflow-stubs/plugins/__init__.pyi +15 -15
  126. metaflow-stubs/plugins/airflow/__init__.pyi +2 -2
  127. metaflow-stubs/plugins/airflow/airflow_utils.pyi +2 -2
  128. metaflow-stubs/plugins/airflow/exception.pyi +2 -2
  129. metaflow-stubs/plugins/airflow/sensors/__init__.pyi +2 -2
  130. metaflow-stubs/plugins/airflow/sensors/base_sensor.pyi +2 -2
  131. metaflow-stubs/plugins/airflow/sensors/external_task_sensor.pyi +2 -2
  132. metaflow-stubs/plugins/airflow/sensors/s3_sensor.pyi +2 -2
  133. metaflow-stubs/plugins/argo/__init__.pyi +2 -2
  134. metaflow-stubs/plugins/argo/argo_client.pyi +2 -2
  135. metaflow-stubs/plugins/argo/argo_events.pyi +2 -2
  136. metaflow-stubs/plugins/argo/argo_workflows.pyi +3 -3
  137. metaflow-stubs/plugins/argo/argo_workflows_decorator.pyi +4 -4
  138. metaflow-stubs/plugins/argo/argo_workflows_deployer.pyi +4 -4
  139. metaflow-stubs/plugins/argo/argo_workflows_deployer_objects.pyi +2 -2
  140. metaflow-stubs/plugins/argo/exit_hooks.pyi +2 -2
  141. metaflow-stubs/plugins/aws/__init__.pyi +2 -2
  142. metaflow-stubs/plugins/aws/aws_client.pyi +2 -2
  143. metaflow-stubs/plugins/aws/aws_utils.pyi +2 -2
  144. metaflow-stubs/plugins/aws/batch/__init__.pyi +2 -2
  145. metaflow-stubs/plugins/aws/batch/batch.pyi +2 -2
  146. metaflow-stubs/plugins/aws/batch/batch_client.pyi +2 -2
  147. metaflow-stubs/plugins/aws/batch/batch_decorator.pyi +2 -2
  148. metaflow-stubs/plugins/aws/secrets_manager/__init__.pyi +2 -2
  149. metaflow-stubs/plugins/aws/secrets_manager/aws_secrets_manager_secrets_provider.pyi +3 -3
  150. metaflow-stubs/plugins/aws/step_functions/__init__.pyi +2 -2
  151. metaflow-stubs/plugins/aws/step_functions/event_bridge_client.pyi +2 -2
  152. metaflow-stubs/plugins/aws/step_functions/schedule_decorator.pyi +2 -2
  153. metaflow-stubs/plugins/aws/step_functions/step_functions.pyi +2 -2
  154. metaflow-stubs/plugins/aws/step_functions/step_functions_client.pyi +2 -2
  155. metaflow-stubs/plugins/aws/step_functions/step_functions_deployer.pyi +3 -3
  156. metaflow-stubs/plugins/aws/step_functions/step_functions_deployer_objects.pyi +3 -3
  157. metaflow-stubs/plugins/azure/__init__.pyi +2 -2
  158. metaflow-stubs/plugins/azure/azure_credential.pyi +2 -2
  159. metaflow-stubs/plugins/azure/azure_exceptions.pyi +2 -2
  160. metaflow-stubs/plugins/azure/azure_secret_manager_secrets_provider.pyi +3 -3
  161. metaflow-stubs/plugins/azure/azure_utils.pyi +2 -2
  162. metaflow-stubs/plugins/azure/blob_service_client_factory.pyi +2 -2
  163. metaflow-stubs/plugins/azure/includefile_support.pyi +2 -2
  164. metaflow-stubs/plugins/cards/__init__.pyi +2 -2
  165. metaflow-stubs/plugins/cards/card_client.pyi +2 -2
  166. metaflow-stubs/plugins/cards/card_creator.pyi +2 -2
  167. metaflow-stubs/plugins/cards/card_datastore.pyi +2 -2
  168. metaflow-stubs/plugins/cards/card_decorator.pyi +2 -2
  169. metaflow-stubs/plugins/cards/card_modules/__init__.pyi +2 -2
  170. metaflow-stubs/plugins/cards/card_modules/basic.pyi +2 -2
  171. metaflow-stubs/plugins/cards/card_modules/card.pyi +2 -2
  172. metaflow-stubs/plugins/cards/card_modules/components.pyi +4 -4
  173. metaflow-stubs/plugins/cards/card_modules/convert_to_native_type.pyi +2 -2
  174. metaflow-stubs/plugins/cards/card_modules/renderer_tools.pyi +2 -2
  175. metaflow-stubs/plugins/cards/card_modules/test_cards.pyi +2 -2
  176. metaflow-stubs/plugins/cards/card_resolver.pyi +2 -2
  177. metaflow-stubs/plugins/cards/component_serializer.pyi +2 -2
  178. metaflow-stubs/plugins/cards/exception.pyi +2 -2
  179. metaflow-stubs/plugins/catch_decorator.pyi +3 -3
  180. metaflow-stubs/plugins/datatools/__init__.pyi +2 -2
  181. metaflow-stubs/plugins/datatools/local.pyi +2 -2
  182. metaflow-stubs/plugins/datatools/s3/__init__.pyi +2 -2
  183. metaflow-stubs/plugins/datatools/s3/s3.pyi +3 -3
  184. metaflow-stubs/plugins/datatools/s3/s3tail.pyi +2 -2
  185. metaflow-stubs/plugins/datatools/s3/s3util.pyi +2 -2
  186. metaflow-stubs/plugins/debug_logger.pyi +2 -2
  187. metaflow-stubs/plugins/debug_monitor.pyi +2 -2
  188. metaflow-stubs/plugins/environment_decorator.pyi +2 -2
  189. metaflow-stubs/plugins/events_decorator.pyi +2 -2
  190. metaflow-stubs/plugins/exit_hook/__init__.pyi +2 -2
  191. metaflow-stubs/plugins/exit_hook/exit_hook_decorator.pyi +2 -2
  192. metaflow-stubs/plugins/frameworks/__init__.pyi +2 -2
  193. metaflow-stubs/plugins/frameworks/pytorch.pyi +2 -2
  194. metaflow-stubs/plugins/gcp/__init__.pyi +2 -2
  195. metaflow-stubs/plugins/gcp/gcp_secret_manager_secrets_provider.pyi +3 -3
  196. metaflow-stubs/plugins/gcp/gs_exceptions.pyi +2 -2
  197. metaflow-stubs/plugins/gcp/gs_storage_client_factory.pyi +2 -2
  198. metaflow-stubs/plugins/gcp/gs_utils.pyi +2 -2
  199. metaflow-stubs/plugins/gcp/includefile_support.pyi +2 -2
  200. metaflow-stubs/plugins/kubernetes/__init__.pyi +2 -2
  201. metaflow-stubs/plugins/kubernetes/kube_utils.pyi +2 -2
  202. metaflow-stubs/plugins/kubernetes/kubernetes.pyi +2 -2
  203. metaflow-stubs/plugins/kubernetes/kubernetes_client.pyi +2 -2
  204. metaflow-stubs/plugins/kubernetes/kubernetes_decorator.pyi +2 -2
  205. metaflow-stubs/plugins/kubernetes/kubernetes_jobsets.pyi +2 -2
  206. metaflow-stubs/plugins/kubernetes/spot_monitor_sidecar.pyi +2 -2
  207. metaflow-stubs/plugins/ollama/__init__.pyi +3 -3
  208. metaflow-stubs/plugins/optuna/__init__.pyi +2 -2
  209. metaflow-stubs/plugins/parallel_decorator.pyi +2 -2
  210. metaflow-stubs/plugins/perimeters.pyi +2 -2
  211. metaflow-stubs/plugins/project_decorator.pyi +2 -2
  212. metaflow-stubs/plugins/pypi/__init__.pyi +3 -3
  213. metaflow-stubs/plugins/pypi/conda_decorator.pyi +2 -2
  214. metaflow-stubs/plugins/pypi/conda_environment.pyi +4 -4
  215. metaflow-stubs/plugins/pypi/parsers.pyi +2 -2
  216. metaflow-stubs/plugins/pypi/pypi_decorator.pyi +2 -2
  217. metaflow-stubs/plugins/pypi/pypi_environment.pyi +2 -2
  218. metaflow-stubs/plugins/pypi/utils.pyi +2 -2
  219. metaflow-stubs/plugins/resources_decorator.pyi +2 -2
  220. metaflow-stubs/plugins/retry_decorator.pyi +2 -2
  221. metaflow-stubs/plugins/secrets/__init__.pyi +2 -2
  222. metaflow-stubs/plugins/secrets/inline_secrets_provider.pyi +2 -2
  223. metaflow-stubs/plugins/secrets/secrets_decorator.pyi +2 -2
  224. metaflow-stubs/plugins/secrets/secrets_func.pyi +2 -2
  225. metaflow-stubs/plugins/secrets/secrets_spec.pyi +2 -2
  226. metaflow-stubs/plugins/secrets/utils.pyi +2 -2
  227. metaflow-stubs/plugins/snowflake/__init__.pyi +2 -2
  228. metaflow-stubs/plugins/storage_executor.pyi +2 -2
  229. metaflow-stubs/plugins/test_unbounded_foreach_decorator.pyi +2 -2
  230. metaflow-stubs/plugins/timeout_decorator.pyi +3 -3
  231. metaflow-stubs/plugins/torchtune/__init__.pyi +2 -2
  232. metaflow-stubs/plugins/uv/__init__.pyi +2 -2
  233. metaflow-stubs/plugins/uv/uv_environment.pyi +3 -3
  234. metaflow-stubs/profilers/__init__.pyi +2 -2
  235. metaflow-stubs/pylint_wrapper.pyi +2 -2
  236. metaflow-stubs/runner/__init__.pyi +2 -2
  237. metaflow-stubs/runner/deployer.pyi +33 -33
  238. metaflow-stubs/runner/deployer_impl.pyi +3 -3
  239. metaflow-stubs/runner/metaflow_runner.pyi +3 -3
  240. metaflow-stubs/runner/nbdeploy.pyi +2 -2
  241. metaflow-stubs/runner/nbrun.pyi +2 -2
  242. metaflow-stubs/runner/subprocess_manager.pyi +2 -2
  243. metaflow-stubs/runner/utils.pyi +3 -3
  244. metaflow-stubs/system/__init__.pyi +2 -2
  245. metaflow-stubs/system/system_logger.pyi +3 -3
  246. metaflow-stubs/system/system_monitor.pyi +2 -2
  247. metaflow-stubs/tagging_util.pyi +2 -2
  248. metaflow-stubs/tuple_util.pyi +2 -2
  249. metaflow-stubs/user_configs/__init__.pyi +2 -2
  250. metaflow-stubs/user_configs/config_options.pyi +3 -3
  251. metaflow-stubs/user_configs/config_parameters.pyi +7 -7
  252. metaflow-stubs/user_decorators/__init__.pyi +2 -2
  253. metaflow-stubs/user_decorators/common.pyi +2 -2
  254. metaflow-stubs/user_decorators/mutable_flow.pyi +5 -5
  255. metaflow-stubs/user_decorators/mutable_step.pyi +4 -4
  256. metaflow-stubs/user_decorators/user_flow_decorator.pyi +4 -4
  257. metaflow-stubs/user_decorators/user_step_decorator.pyi +4 -4
  258. {ob_metaflow_stubs-6.0.9.3.dist-info → ob_metaflow_stubs-6.0.9.4.dist-info}/METADATA +1 -1
  259. ob_metaflow_stubs-6.0.9.4.dist-info/RECORD +262 -0
  260. ob_metaflow_stubs-6.0.9.3.dist-info/RECORD +0 -262
  261. {ob_metaflow_stubs-6.0.9.3.dist-info → ob_metaflow_stubs-6.0.9.4.dist-info}/WHEEL +0 -0
  262. {ob_metaflow_stubs-6.0.9.3.dist-info → ob_metaflow_stubs-6.0.9.4.dist-info}/top_level.txt +0 -0
@@ -1,15 +1,15 @@
1
1
  ######################################################################################################
2
2
  # Auto-generated Metaflow stub file #
3
- # MF version: 2.18.1.1+obcheckpoint(0.2.4);ob(v1) #
4
- # Generated on 2025-09-02T19:19:25.341768 #
3
+ # MF version: 2.18.2.1+obcheckpoint(0.2.4);ob(v1) #
4
+ # Generated on 2025-09-03T10:45:51.965005 #
5
5
  ######################################################################################################
6
6
 
7
7
  from __future__ import annotations
8
8
 
9
9
  import typing
10
10
  if typing.TYPE_CHECKING:
11
- import typing
12
11
  import datetime
12
+ import typing
13
13
  FlowSpecDerived = typing.TypeVar("FlowSpecDerived", bound="FlowSpec", contravariant=False, covariant=False)
14
14
  StepFlag = typing.NewType("StepFlag", bool)
15
15
 
@@ -39,17 +39,17 @@ from .user_decorators.user_step_decorator import UserStepDecorator as UserStepDe
39
39
  from .user_decorators.user_step_decorator import StepMutator as StepMutator
40
40
  from .user_decorators.user_step_decorator import user_step_decorator as user_step_decorator
41
41
  from .user_decorators.user_flow_decorator import FlowMutator as FlowMutator
42
+ from . import tuple_util as tuple_util
43
+ from . import cards as cards
42
44
  from . import metaflow_git as metaflow_git
43
45
  from . import events as events
44
- from . import cards as cards
45
- from . import tuple_util as tuple_util
46
46
  from . import runner as runner
47
47
  from . import plugins as plugins
48
48
  from .mf_extensions.outerbounds.toplevel.global_aliases_for_metaflow_package import S3 as S3
49
49
  from . import includefile as includefile
50
50
  from .includefile import IncludeFile as IncludeFile
51
- from .plugins.pypi.parsers import conda_environment_yml_parser as conda_environment_yml_parser
52
51
  from .plugins.pypi.parsers import pyproject_toml_parser as pyproject_toml_parser
52
+ from .plugins.pypi.parsers import conda_environment_yml_parser as conda_environment_yml_parser
53
53
  from .plugins.pypi.parsers import requirements_txt_parser as requirements_txt_parser
54
54
  from . import client as client
55
55
  from .client.core import namespace as namespace
@@ -168,78 +168,70 @@ def step(f: typing.Union[typing.Callable[[FlowSpecDerived], None], typing.Callab
168
168
  ...
169
169
 
170
170
  @typing.overload
171
- def conda(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
171
+ def app_deploy(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
172
172
  """
173
- Specifies the Conda environment for the step.
174
-
175
- Information in this decorator will augment any
176
- attributes set in the `@conda_base` flow-level decorator. Hence,
177
- you can use `@conda_base` to set packages required by all
178
- steps and use `@conda` to specify step-specific overrides.
179
-
180
-
181
- Parameters
182
- ----------
183
- packages : Dict[str, str], default {}
184
- Packages to use for this step. The key is the name of the package
185
- and the value is the version to use.
186
- libraries : Dict[str, str], default {}
187
- Supported for backward compatibility. When used with packages, packages will take precedence.
188
- python : str, optional, default None
189
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
190
- that the version used will correspond to the version of the Python interpreter used to start the run.
191
- disabled : bool, default False
192
- If set to True, disables @conda.
173
+ Decorator prototype for all step decorators. This function gets specialized
174
+ and imported for all decorators types by _import_plugin_decorators().
193
175
  """
194
176
  ...
195
177
 
196
178
  @typing.overload
197
- def conda(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
179
+ def app_deploy(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
198
180
  ...
199
181
 
200
- @typing.overload
201
- def conda(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
182
+ def app_deploy(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
183
+ """
184
+ Decorator prototype for all step decorators. This function gets specialized
185
+ and imported for all decorators types by _import_plugin_decorators().
186
+ """
202
187
  ...
203
188
 
204
- def conda(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
189
+ @typing.overload
190
+ def card(*, type: str = 'default', id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
205
191
  """
206
- Specifies the Conda environment for the step.
192
+ Creates a human-readable report, a Metaflow Card, after this step completes.
207
193
 
208
- Information in this decorator will augment any
209
- attributes set in the `@conda_base` flow-level decorator. Hence,
210
- you can use `@conda_base` to set packages required by all
211
- steps and use `@conda` to specify step-specific overrides.
194
+ Note that you may add multiple `@card` decorators in a step with different parameters.
212
195
 
213
196
 
214
197
  Parameters
215
198
  ----------
216
- packages : Dict[str, str], default {}
217
- Packages to use for this step. The key is the name of the package
218
- and the value is the version to use.
219
- libraries : Dict[str, str], default {}
220
- Supported for backward compatibility. When used with packages, packages will take precedence.
221
- python : str, optional, default None
222
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
223
- that the version used will correspond to the version of the Python interpreter used to start the run.
224
- disabled : bool, default False
225
- If set to True, disables @conda.
199
+ type : str, default 'default'
200
+ Card type.
201
+ id : str, optional, default None
202
+ If multiple cards are present, use this id to identify this card.
203
+ options : Dict[str, Any], default {}
204
+ Options passed to the card. The contents depend on the card type.
205
+ timeout : int, default 45
206
+ Interrupt reporting if it takes more than this many seconds.
226
207
  """
227
208
  ...
228
209
 
229
210
  @typing.overload
230
- def fast_bakery_internal(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
231
- """
232
- Internal decorator to support Fast bakery
233
- """
211
+ def card(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
234
212
  ...
235
213
 
236
214
  @typing.overload
237
- def fast_bakery_internal(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
215
+ def card(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
238
216
  ...
239
217
 
240
- def fast_bakery_internal(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
218
+ def card(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, type: str = 'default', id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45):
241
219
  """
242
- Internal decorator to support Fast bakery
220
+ Creates a human-readable report, a Metaflow Card, after this step completes.
221
+
222
+ Note that you may add multiple `@card` decorators in a step with different parameters.
223
+
224
+
225
+ Parameters
226
+ ----------
227
+ type : str, default 'default'
228
+ Card type.
229
+ id : str, optional, default None
230
+ If multiple cards are present, use this id to identify this card.
231
+ options : Dict[str, Any], default {}
232
+ Options passed to the card. The contents depend on the card type.
233
+ timeout : int, default 45
234
+ Interrupt reporting if it takes more than this many seconds.
243
235
  """
244
236
  ...
245
237
 
@@ -295,538 +287,594 @@ def catch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], ty
295
287
  ...
296
288
 
297
289
  @typing.overload
298
- def card(*, type: str = 'default', id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
290
+ def checkpoint(*, load_policy: str = 'fresh', temp_dir_root: str = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
299
291
  """
300
- Creates a human-readable report, a Metaflow Card, after this step completes.
292
+ Enables checkpointing for a step.
301
293
 
302
- Note that you may add multiple `@card` decorators in a step with different parameters.
294
+ > Examples
295
+
296
+ - Saving Checkpoints
297
+
298
+ ```python
299
+ @checkpoint
300
+ @step
301
+ def train(self):
302
+ model = create_model(self.parameters, checkpoint_path = None)
303
+ for i in range(self.epochs):
304
+ # some training logic
305
+ loss = model.train(self.dataset)
306
+ if i % 10 == 0:
307
+ model.save(
308
+ current.checkpoint.directory,
309
+ )
310
+ # saves the contents of the `current.checkpoint.directory` as a checkpoint
311
+ # and returns a reference dictionary to the checkpoint saved in the datastore
312
+ self.latest_checkpoint = current.checkpoint.save(
313
+ name="epoch_checkpoint",
314
+ metadata={
315
+ "epoch": i,
316
+ "loss": loss,
317
+ }
318
+ )
319
+ ```
320
+
321
+ - Using Loaded Checkpoints
322
+
323
+ ```python
324
+ @retry(times=3)
325
+ @checkpoint
326
+ @step
327
+ def train(self):
328
+ # Assume that the task has restarted and the previous attempt of the task
329
+ # saved a checkpoint
330
+ checkpoint_path = None
331
+ if current.checkpoint.is_loaded: # Check if a checkpoint is loaded
332
+ print("Loaded checkpoint from the previous attempt")
333
+ checkpoint_path = current.checkpoint.directory
334
+
335
+ model = create_model(self.parameters, checkpoint_path = checkpoint_path)
336
+ for i in range(self.epochs):
337
+ ...
338
+ ```
303
339
 
304
340
 
305
341
  Parameters
306
342
  ----------
307
- type : str, default 'default'
308
- Card type.
309
- id : str, optional, default None
310
- If multiple cards are present, use this id to identify this card.
311
- options : Dict[str, Any], default {}
312
- Options passed to the card. The contents depend on the card type.
313
- timeout : int, default 45
314
- Interrupt reporting if it takes more than this many seconds.
343
+ load_policy : str, default: "fresh"
344
+ The policy for loading the checkpoint. The following policies are supported:
345
+ - "eager": Loads the the latest available checkpoint within the namespace.
346
+ With this mode, the latest checkpoint written by any previous task (can be even a different run) of the step
347
+ will be loaded at the start of the task.
348
+ - "none": Do not load any checkpoint
349
+ - "fresh": Loads the lastest checkpoint created within the running Task.
350
+ This mode helps loading checkpoints across various retry attempts of the same task.
351
+ With this mode, no checkpoint will be loaded at the start of a task but any checkpoints
352
+ created within the task will be loaded when the task is retries execution on failure.
353
+
354
+ temp_dir_root : str, default: None
355
+ The root directory under which `current.checkpoint.directory` will be created.
315
356
  """
316
357
  ...
317
358
 
318
359
  @typing.overload
319
- def card(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
360
+ def checkpoint(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
320
361
  ...
321
362
 
322
363
  @typing.overload
323
- def card(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
364
+ def checkpoint(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
324
365
  ...
325
366
 
326
- def card(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, type: str = 'default', id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45):
367
+ def checkpoint(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, load_policy: str = 'fresh', temp_dir_root: str = None):
327
368
  """
328
- Creates a human-readable report, a Metaflow Card, after this step completes.
369
+ Enables checkpointing for a step.
329
370
 
330
- Note that you may add multiple `@card` decorators in a step with different parameters.
371
+ > Examples
372
+
373
+ - Saving Checkpoints
374
+
375
+ ```python
376
+ @checkpoint
377
+ @step
378
+ def train(self):
379
+ model = create_model(self.parameters, checkpoint_path = None)
380
+ for i in range(self.epochs):
381
+ # some training logic
382
+ loss = model.train(self.dataset)
383
+ if i % 10 == 0:
384
+ model.save(
385
+ current.checkpoint.directory,
386
+ )
387
+ # saves the contents of the `current.checkpoint.directory` as a checkpoint
388
+ # and returns a reference dictionary to the checkpoint saved in the datastore
389
+ self.latest_checkpoint = current.checkpoint.save(
390
+ name="epoch_checkpoint",
391
+ metadata={
392
+ "epoch": i,
393
+ "loss": loss,
394
+ }
395
+ )
396
+ ```
397
+
398
+ - Using Loaded Checkpoints
399
+
400
+ ```python
401
+ @retry(times=3)
402
+ @checkpoint
403
+ @step
404
+ def train(self):
405
+ # Assume that the task has restarted and the previous attempt of the task
406
+ # saved a checkpoint
407
+ checkpoint_path = None
408
+ if current.checkpoint.is_loaded: # Check if a checkpoint is loaded
409
+ print("Loaded checkpoint from the previous attempt")
410
+ checkpoint_path = current.checkpoint.directory
411
+
412
+ model = create_model(self.parameters, checkpoint_path = checkpoint_path)
413
+ for i in range(self.epochs):
414
+ ...
415
+ ```
331
416
 
332
417
 
333
418
  Parameters
334
419
  ----------
335
- type : str, default 'default'
336
- Card type.
337
- id : str, optional, default None
338
- If multiple cards are present, use this id to identify this card.
339
- options : Dict[str, Any], default {}
340
- Options passed to the card. The contents depend on the card type.
341
- timeout : int, default 45
342
- Interrupt reporting if it takes more than this many seconds.
420
+ load_policy : str, default: "fresh"
421
+ The policy for loading the checkpoint. The following policies are supported:
422
+ - "eager": Loads the the latest available checkpoint within the namespace.
423
+ With this mode, the latest checkpoint written by any previous task (can be even a different run) of the step
424
+ will be loaded at the start of the task.
425
+ - "none": Do not load any checkpoint
426
+ - "fresh": Loads the lastest checkpoint created within the running Task.
427
+ This mode helps loading checkpoints across various retry attempts of the same task.
428
+ With this mode, no checkpoint will be loaded at the start of a task but any checkpoints
429
+ created within the task will be loaded when the task is retries execution on failure.
430
+
431
+ temp_dir_root : str, default: None
432
+ The root directory under which `current.checkpoint.directory` will be created.
343
433
  """
344
434
  ...
345
435
 
346
- @typing.overload
347
- def retry(*, times: int = 3, minutes_between_retries: int = 2) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
436
+ def nvidia(*, gpu: int, gpu_type: str, queue_timeout: int) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
348
437
  """
349
- Specifies the number of times the task corresponding
350
- to a step needs to be retried.
351
-
352
- This decorator is useful for handling transient errors, such as networking issues.
353
- If your task contains operations that can't be retried safely, e.g. database updates,
354
- it is advisable to annotate it with `@retry(times=0)`.
355
-
356
- This can be used in conjunction with the `@catch` decorator. The `@catch`
357
- decorator will execute a no-op task after all retries have been exhausted,
358
- ensuring that the flow execution can continue.
438
+ Specifies that this step should execute on DGX cloud.
359
439
 
360
440
 
361
441
  Parameters
362
442
  ----------
363
- times : int, default 3
364
- Number of times to retry this task.
365
- minutes_between_retries : int, default 2
366
- Number of minutes between retries.
443
+ gpu : int
444
+ Number of GPUs to use.
445
+ gpu_type : str
446
+ Type of Nvidia GPU to use.
447
+ queue_timeout : int
448
+ Time to keep the job in NVCF's queue.
367
449
  """
368
450
  ...
369
451
 
370
452
  @typing.overload
371
- def retry(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
453
+ def pypi(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
454
+ """
455
+ Specifies the PyPI packages for the step.
456
+
457
+ Information in this decorator will augment any
458
+ attributes set in the `@pyi_base` flow-level decorator. Hence,
459
+ you can use `@pypi_base` to set packages required by all
460
+ steps and use `@pypi` to specify step-specific overrides.
461
+
462
+
463
+ Parameters
464
+ ----------
465
+ packages : Dict[str, str], default: {}
466
+ Packages to use for this step. The key is the name of the package
467
+ and the value is the version to use.
468
+ python : str, optional, default: None
469
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
470
+ that the version used will correspond to the version of the Python interpreter used to start the run.
471
+ """
372
472
  ...
373
473
 
374
474
  @typing.overload
375
- def retry(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
475
+ def pypi(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
376
476
  ...
377
477
 
378
- def retry(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, times: int = 3, minutes_between_retries: int = 2):
478
+ @typing.overload
479
+ def pypi(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
480
+ ...
481
+
482
+ def pypi(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
379
483
  """
380
- Specifies the number of times the task corresponding
381
- to a step needs to be retried.
382
-
383
- This decorator is useful for handling transient errors, such as networking issues.
384
- If your task contains operations that can't be retried safely, e.g. database updates,
385
- it is advisable to annotate it with `@retry(times=0)`.
484
+ Specifies the PyPI packages for the step.
386
485
 
387
- This can be used in conjunction with the `@catch` decorator. The `@catch`
388
- decorator will execute a no-op task after all retries have been exhausted,
389
- ensuring that the flow execution can continue.
486
+ Information in this decorator will augment any
487
+ attributes set in the `@pyi_base` flow-level decorator. Hence,
488
+ you can use `@pypi_base` to set packages required by all
489
+ steps and use `@pypi` to specify step-specific overrides.
390
490
 
391
491
 
392
492
  Parameters
393
493
  ----------
394
- times : int, default 3
395
- Number of times to retry this task.
396
- minutes_between_retries : int, default 2
397
- Number of minutes between retries.
494
+ packages : Dict[str, str], default: {}
495
+ Packages to use for this step. The key is the name of the package
496
+ and the value is the version to use.
497
+ python : str, optional, default: None
498
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
499
+ that the version used will correspond to the version of the Python interpreter used to start the run.
398
500
  """
399
501
  ...
400
502
 
401
- def nvidia(*, gpu: int, gpu_type: str, queue_timeout: int) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
503
+ @typing.overload
504
+ def parallel(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
402
505
  """
403
- Specifies that this step should execute on DGX cloud.
404
-
405
-
406
- Parameters
407
- ----------
408
- gpu : int
409
- Number of GPUs to use.
410
- gpu_type : str
411
- Type of Nvidia GPU to use.
412
- queue_timeout : int
413
- Time to keep the job in NVCF's queue.
506
+ Decorator prototype for all step decorators. This function gets specialized
507
+ and imported for all decorators types by _import_plugin_decorators().
414
508
  """
415
509
  ...
416
510
 
417
511
  @typing.overload
418
- def environment(*, vars: typing.Dict[str, str] = {}) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
512
+ def parallel(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
513
+ ...
514
+
515
+ def parallel(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
419
516
  """
420
- Specifies environment variables to be set prior to the execution of a step.
421
-
422
-
423
- Parameters
424
- ----------
425
- vars : Dict[str, str], default {}
426
- Dictionary of environment variables to set.
517
+ Decorator prototype for all step decorators. This function gets specialized
518
+ and imported for all decorators types by _import_plugin_decorators().
427
519
  """
428
520
  ...
429
521
 
430
522
  @typing.overload
431
- def environment(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
523
+ def coreweave_s3_proxy(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
524
+ """
525
+ CoreWeave-specific S3 Proxy decorator for routing S3 requests through a local proxy service.
526
+ It exists to make it easier for users to know that this decorator should only be used with
527
+ a Neo Cloud like CoreWeave.
528
+ """
432
529
  ...
433
530
 
434
531
  @typing.overload
435
- def environment(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
532
+ def coreweave_s3_proxy(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
436
533
  ...
437
534
 
438
- def environment(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, vars: typing.Dict[str, str] = {}):
535
+ def coreweave_s3_proxy(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
439
536
  """
440
- Specifies environment variables to be set prior to the execution of a step.
441
-
442
-
443
- Parameters
444
- ----------
445
- vars : Dict[str, str], default {}
446
- Dictionary of environment variables to set.
537
+ CoreWeave-specific S3 Proxy decorator for routing S3 requests through a local proxy service.
538
+ It exists to make it easier for users to know that this decorator should only be used with
539
+ a Neo Cloud like CoreWeave.
447
540
  """
448
541
  ...
449
542
 
450
- def nvct(*, gpu: int, gpu_type: str) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
543
+ @typing.overload
544
+ def nebius_s3_proxy(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
451
545
  """
452
- Specifies that this step should execute on DGX cloud.
453
-
454
-
455
- Parameters
456
- ----------
457
- gpu : int
458
- Number of GPUs to use.
459
- gpu_type : str
460
- Type of Nvidia GPU to use.
546
+ Nebius-specific S3 Proxy decorator for routing S3 requests through a local proxy service.
547
+ It exists to make it easier for users to know that this decorator should only be used with
548
+ a Neo Cloud like Nebius.
461
549
  """
462
550
  ...
463
551
 
464
552
  @typing.overload
465
- def resources(*, cpu: int = 1, gpu: typing.Optional[int] = None, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
553
+ def nebius_s3_proxy(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
554
+ ...
555
+
556
+ def nebius_s3_proxy(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
466
557
  """
467
- Specifies the resources needed when executing this step.
558
+ Nebius-specific S3 Proxy decorator for routing S3 requests through a local proxy service.
559
+ It exists to make it easier for users to know that this decorator should only be used with
560
+ a Neo Cloud like Nebius.
561
+ """
562
+ ...
563
+
564
+ @typing.overload
565
+ def retry(*, times: int = 3, minutes_between_retries: int = 2) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
566
+ """
567
+ Specifies the number of times the task corresponding
568
+ to a step needs to be retried.
468
569
 
469
- Use `@resources` to specify the resource requirements
470
- independently of the specific compute layer (`@batch`, `@kubernetes`).
570
+ This decorator is useful for handling transient errors, such as networking issues.
571
+ If your task contains operations that can't be retried safely, e.g. database updates,
572
+ it is advisable to annotate it with `@retry(times=0)`.
471
573
 
472
- You can choose the compute layer on the command line by executing e.g.
473
- ```
474
- python myflow.py run --with batch
475
- ```
476
- or
477
- ```
478
- python myflow.py run --with kubernetes
479
- ```
480
- which executes the flow on the desired system using the
481
- requirements specified in `@resources`.
574
+ This can be used in conjunction with the `@catch` decorator. The `@catch`
575
+ decorator will execute a no-op task after all retries have been exhausted,
576
+ ensuring that the flow execution can continue.
482
577
 
483
578
 
484
579
  Parameters
485
580
  ----------
486
- cpu : int, default 1
487
- Number of CPUs required for this step.
488
- gpu : int, optional, default None
489
- Number of GPUs required for this step.
490
- disk : int, optional, default None
491
- Disk size (in MB) required for this step. Only applies on Kubernetes.
492
- memory : int, default 4096
493
- Memory size (in MB) required for this step.
494
- shared_memory : int, optional, default None
495
- The value for the size (in MiB) of the /dev/shm volume for this step.
496
- This parameter maps to the `--shm-size` option in Docker.
581
+ times : int, default 3
582
+ Number of times to retry this task.
583
+ minutes_between_retries : int, default 2
584
+ Number of minutes between retries.
497
585
  """
498
586
  ...
499
587
 
500
588
  @typing.overload
501
- def resources(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
589
+ def retry(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
502
590
  ...
503
591
 
504
592
  @typing.overload
505
- def resources(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
593
+ def retry(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
506
594
  ...
507
595
 
508
- def resources(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, cpu: int = 1, gpu: typing.Optional[int] = None, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None):
596
+ def retry(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, times: int = 3, minutes_between_retries: int = 2):
509
597
  """
510
- Specifies the resources needed when executing this step.
511
-
512
- Use `@resources` to specify the resource requirements
513
- independently of the specific compute layer (`@batch`, `@kubernetes`).
514
-
515
- You can choose the compute layer on the command line by executing e.g.
516
- ```
517
- python myflow.py run --with batch
518
- ```
519
- or
520
- ```
521
- python myflow.py run --with kubernetes
522
- ```
523
- which executes the flow on the desired system using the
524
- requirements specified in `@resources`.
598
+ Specifies the number of times the task corresponding
599
+ to a step needs to be retried.
525
600
 
601
+ This decorator is useful for handling transient errors, such as networking issues.
602
+ If your task contains operations that can't be retried safely, e.g. database updates,
603
+ it is advisable to annotate it with `@retry(times=0)`.
526
604
 
527
- Parameters
528
- ----------
529
- cpu : int, default 1
530
- Number of CPUs required for this step.
531
- gpu : int, optional, default None
532
- Number of GPUs required for this step.
533
- disk : int, optional, default None
534
- Disk size (in MB) required for this step. Only applies on Kubernetes.
535
- memory : int, default 4096
536
- Memory size (in MB) required for this step.
537
- shared_memory : int, optional, default None
538
- The value for the size (in MiB) of the /dev/shm volume for this step.
539
- This parameter maps to the `--shm-size` option in Docker.
540
- """
541
- ...
542
-
543
- def s3_proxy(*, integration_name: typing.Optional[str] = None, write_mode: typing.Optional[str] = None, debug: typing.Optional[bool] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
544
- """
545
- S3 Proxy decorator for routing S3 requests through a local proxy service.
605
+ This can be used in conjunction with the `@catch` decorator. The `@catch`
606
+ decorator will execute a no-op task after all retries have been exhausted,
607
+ ensuring that the flow execution can continue.
546
608
 
547
609
 
548
610
  Parameters
549
611
  ----------
550
- integration_name : str, optional
551
- Name of the S3 proxy integration. If not specified, will use the only
552
- available S3 proxy integration in the namespace (fails if multiple exist).
553
- write_mode : str, optional
554
- The desired behavior during write operations to target (origin) S3 bucket.
555
- allowed options are:
556
- "origin-and-cache" -> write to both the target S3 bucket and local object
557
- storage
558
- "origin" -> only write to the target S3 bucket
559
- "cache" -> only write to the object storage service used for caching
560
- debug : bool, optional
561
- Enable debug logging for proxy operations.
612
+ times : int, default 3
613
+ Number of times to retry this task.
614
+ minutes_between_retries : int, default 2
615
+ Number of minutes between retries.
562
616
  """
563
617
  ...
564
618
 
565
619
  @typing.overload
566
- def checkpoint(*, load_policy: str = 'fresh', temp_dir_root: str = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
620
+ def model(*, load: typing.Union[typing.List[str], str, typing.List[typing.Tuple[str, typing.Optional[str]]]] = None, temp_dir_root: str = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
567
621
  """
568
- Enables checkpointing for a step.
622
+ Enables loading / saving of models within a step.
569
623
 
570
624
  > Examples
571
-
572
- - Saving Checkpoints
573
-
625
+ - Saving Models
574
626
  ```python
575
- @checkpoint
627
+ @model
576
628
  @step
577
629
  def train(self):
578
- model = create_model(self.parameters, checkpoint_path = None)
579
- for i in range(self.epochs):
580
- # some training logic
581
- loss = model.train(self.dataset)
582
- if i % 10 == 0:
583
- model.save(
584
- current.checkpoint.directory,
585
- )
586
- # saves the contents of the `current.checkpoint.directory` as a checkpoint
587
- # and returns a reference dictionary to the checkpoint saved in the datastore
588
- self.latest_checkpoint = current.checkpoint.save(
589
- name="epoch_checkpoint",
590
- metadata={
591
- "epoch": i,
592
- "loss": loss,
593
- }
594
- )
595
- ```
630
+ # current.model.save returns a dictionary reference to the model saved
631
+ self.my_model = current.model.save(
632
+ path_to_my_model,
633
+ label="my_model",
634
+ metadata={
635
+ "epochs": 10,
636
+ "batch-size": 32,
637
+ "learning-rate": 0.001,
638
+ }
639
+ )
640
+ self.next(self.test)
596
641
 
597
- - Using Loaded Checkpoints
642
+ @model(load="my_model")
643
+ @step
644
+ def test(self):
645
+ # `current.model.loaded` returns a dictionary of the loaded models
646
+ # where the key is the name of the artifact and the value is the path to the model
647
+ print(os.listdir(current.model.loaded["my_model"]))
648
+ self.next(self.end)
649
+ ```
598
650
 
651
+ - Loading models
599
652
  ```python
600
- @retry(times=3)
601
- @checkpoint
602
653
  @step
603
654
  def train(self):
604
- # Assume that the task has restarted and the previous attempt of the task
605
- # saved a checkpoint
606
- checkpoint_path = None
607
- if current.checkpoint.is_loaded: # Check if a checkpoint is loaded
608
- print("Loaded checkpoint from the previous attempt")
609
- checkpoint_path = current.checkpoint.directory
610
-
611
- model = create_model(self.parameters, checkpoint_path = checkpoint_path)
612
- for i in range(self.epochs):
613
- ...
655
+ # current.model.load returns the path to the model loaded
656
+ checkpoint_path = current.model.load(
657
+ self.checkpoint_key,
658
+ )
659
+ model_path = current.model.load(
660
+ self.model,
661
+ )
662
+ self.next(self.test)
614
663
  ```
615
664
 
616
665
 
617
666
  Parameters
618
667
  ----------
619
- load_policy : str, default: "fresh"
620
- The policy for loading the checkpoint. The following policies are supported:
621
- - "eager": Loads the the latest available checkpoint within the namespace.
622
- With this mode, the latest checkpoint written by any previous task (can be even a different run) of the step
623
- will be loaded at the start of the task.
624
- - "none": Do not load any checkpoint
625
- - "fresh": Loads the lastest checkpoint created within the running Task.
626
- This mode helps loading checkpoints across various retry attempts of the same task.
627
- With this mode, no checkpoint will be loaded at the start of a task but any checkpoints
628
- created within the task will be loaded when the task is retries execution on failure.
668
+ load : Union[List[str],str,List[Tuple[str,Union[str,None]]]], default: None
669
+ Artifact name/s referencing the models/checkpoints to load. Artifact names refer to the names of the instance variables set to `self`.
670
+ These artifact names give to `load` be reference objects or reference `key` string's from objects created by `current.checkpoint` / `current.model` / `current.huggingface_hub`.
671
+ If a list of tuples is provided, the first element is the artifact name and the second element is the path the artifact needs be unpacked on
672
+ the local filesystem. If the second element is None, the artifact will be unpacked in the current working directory.
673
+ If a string is provided, then the artifact corresponding to that name will be loaded in the current working directory.
629
674
 
630
675
  temp_dir_root : str, default: None
631
- The root directory under which `current.checkpoint.directory` will be created.
676
+ The root directory under which `current.model.loaded` will store loaded models
632
677
  """
633
678
  ...
634
679
 
635
680
  @typing.overload
636
- def checkpoint(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
681
+ def model(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
637
682
  ...
638
683
 
639
684
  @typing.overload
640
- def checkpoint(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
685
+ def model(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
641
686
  ...
642
687
 
643
- def checkpoint(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, load_policy: str = 'fresh', temp_dir_root: str = None):
688
+ def model(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, load: typing.Union[typing.List[str], str, typing.List[typing.Tuple[str, typing.Optional[str]]]] = None, temp_dir_root: str = None):
644
689
  """
645
- Enables checkpointing for a step.
690
+ Enables loading / saving of models within a step.
646
691
 
647
692
  > Examples
648
-
649
- - Saving Checkpoints
650
-
693
+ - Saving Models
651
694
  ```python
652
- @checkpoint
695
+ @model
653
696
  @step
654
697
  def train(self):
655
- model = create_model(self.parameters, checkpoint_path = None)
656
- for i in range(self.epochs):
657
- # some training logic
658
- loss = model.train(self.dataset)
659
- if i % 10 == 0:
660
- model.save(
661
- current.checkpoint.directory,
662
- )
663
- # saves the contents of the `current.checkpoint.directory` as a checkpoint
664
- # and returns a reference dictionary to the checkpoint saved in the datastore
665
- self.latest_checkpoint = current.checkpoint.save(
666
- name="epoch_checkpoint",
667
- metadata={
668
- "epoch": i,
669
- "loss": loss,
670
- }
671
- )
672
- ```
698
+ # current.model.save returns a dictionary reference to the model saved
699
+ self.my_model = current.model.save(
700
+ path_to_my_model,
701
+ label="my_model",
702
+ metadata={
703
+ "epochs": 10,
704
+ "batch-size": 32,
705
+ "learning-rate": 0.001,
706
+ }
707
+ )
708
+ self.next(self.test)
673
709
 
674
- - Using Loaded Checkpoints
710
+ @model(load="my_model")
711
+ @step
712
+ def test(self):
713
+ # `current.model.loaded` returns a dictionary of the loaded models
714
+ # where the key is the name of the artifact and the value is the path to the model
715
+ print(os.listdir(current.model.loaded["my_model"]))
716
+ self.next(self.end)
717
+ ```
675
718
 
719
+ - Loading models
676
720
  ```python
677
- @retry(times=3)
678
- @checkpoint
679
721
  @step
680
722
  def train(self):
681
- # Assume that the task has restarted and the previous attempt of the task
682
- # saved a checkpoint
683
- checkpoint_path = None
684
- if current.checkpoint.is_loaded: # Check if a checkpoint is loaded
685
- print("Loaded checkpoint from the previous attempt")
686
- checkpoint_path = current.checkpoint.directory
687
-
688
- model = create_model(self.parameters, checkpoint_path = checkpoint_path)
689
- for i in range(self.epochs):
690
- ...
723
+ # current.model.load returns the path to the model loaded
724
+ checkpoint_path = current.model.load(
725
+ self.checkpoint_key,
726
+ )
727
+ model_path = current.model.load(
728
+ self.model,
729
+ )
730
+ self.next(self.test)
691
731
  ```
692
732
 
693
733
 
694
734
  Parameters
695
735
  ----------
696
- load_policy : str, default: "fresh"
697
- The policy for loading the checkpoint. The following policies are supported:
698
- - "eager": Loads the the latest available checkpoint within the namespace.
699
- With this mode, the latest checkpoint written by any previous task (can be even a different run) of the step
700
- will be loaded at the start of the task.
701
- - "none": Do not load any checkpoint
702
- - "fresh": Loads the lastest checkpoint created within the running Task.
703
- This mode helps loading checkpoints across various retry attempts of the same task.
704
- With this mode, no checkpoint will be loaded at the start of a task but any checkpoints
705
- created within the task will be loaded when the task is retries execution on failure.
736
+ load : Union[List[str],str,List[Tuple[str,Union[str,None]]]], default: None
737
+ Artifact name/s referencing the models/checkpoints to load. Artifact names refer to the names of the instance variables set to `self`.
738
+ These artifact names give to `load` be reference objects or reference `key` string's from objects created by `current.checkpoint` / `current.model` / `current.huggingface_hub`.
739
+ If a list of tuples is provided, the first element is the artifact name and the second element is the path the artifact needs be unpacked on
740
+ the local filesystem. If the second element is None, the artifact will be unpacked in the current working directory.
741
+ If a string is provided, then the artifact corresponding to that name will be loaded in the current working directory.
706
742
 
707
743
  temp_dir_root : str, default: None
708
- The root directory under which `current.checkpoint.directory` will be created.
744
+ The root directory under which `current.model.loaded` will store loaded models
709
745
  """
710
746
  ...
711
747
 
712
- @typing.overload
713
- def timeout(*, seconds: int = 0, minutes: int = 0, hours: int = 0) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
748
+ def huggingface_hub(*, temp_dir_root: typing.Optional[str] = None, load: typing.Union[typing.List[str], typing.List[typing.Tuple[typing.Dict, str]], typing.List[typing.Tuple[str, str]], typing.List[typing.Dict], None]) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
714
749
  """
715
- Specifies a timeout for your step.
750
+ Decorator that helps cache, version and store models/datasets from huggingface hub.
716
751
 
717
- This decorator is useful if this step may hang indefinitely.
752
+ > Examples
718
753
 
719
- This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
720
- A timeout is considered to be an exception thrown by the step. It will cause the step to be
721
- retried if needed and the exception will be caught by the `@catch` decorator, if present.
754
+ **Usage: creating references of models from huggingface that may be loaded in downstream steps**
755
+ ```python
756
+ @huggingface_hub
757
+ @step
758
+ def pull_model_from_huggingface(self):
759
+ # `current.huggingface_hub.snapshot_download` downloads the model from the Hugging Face Hub
760
+ # and saves it in the backend storage based on the model's `repo_id`. If there exists a model
761
+ # with the same `repo_id` in the backend storage, it will not download the model again. The return
762
+ # value of the function is a reference to the model in the backend storage.
763
+ # This reference can be used to load the model in the subsequent steps via `@model(load=["llama_model"])`
722
764
 
723
- Note that all the values specified in parameters are added together so if you specify
724
- 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
765
+ self.model_id = "mistralai/Mistral-7B-Instruct-v0.1"
766
+ self.llama_model = current.huggingface_hub.snapshot_download(
767
+ repo_id=self.model_id,
768
+ allow_patterns=["*.safetensors", "*.json", "tokenizer.*"],
769
+ )
770
+ self.next(self.train)
771
+ ```
772
+
773
+ **Usage: loading models directly from huggingface hub or from cache (from metaflow's datastore)**
774
+ ```python
775
+ @huggingface_hub(load=["mistralai/Mistral-7B-Instruct-v0.1"])
776
+ @step
777
+ def pull_model_from_huggingface(self):
778
+ path_to_model = current.huggingface_hub.loaded["mistralai/Mistral-7B-Instruct-v0.1"]
779
+ ```
780
+
781
+ ```python
782
+ @huggingface_hub(load=[("mistralai/Mistral-7B-Instruct-v0.1", "/my-directory"), ("myorg/mistral-lora, "/my-lora-directory")])
783
+ @step
784
+ def finetune_model(self):
785
+ path_to_model = current.huggingface_hub.loaded["mistralai/Mistral-7B-Instruct-v0.1"]
786
+ # path_to_model will be /my-directory
787
+ ```
788
+
789
+ ```python
790
+ # Takes all the arguments passed to `snapshot_download`
791
+ # except for `local_dir`
792
+ @huggingface_hub(load=[
793
+ {
794
+ "repo_id": "mistralai/Mistral-7B-Instruct-v0.1",
795
+ },
796
+ {
797
+ "repo_id": "myorg/mistral-lora",
798
+ "repo_type": "model",
799
+ },
800
+ ])
801
+ @step
802
+ def finetune_model(self):
803
+ path_to_model = current.huggingface_hub.loaded["mistralai/Mistral-7B-Instruct-v0.1"]
804
+ # path_to_model will be /my-directory
805
+ ```
725
806
 
726
807
 
727
808
  Parameters
728
809
  ----------
729
- seconds : int, default 0
730
- Number of seconds to wait prior to timing out.
731
- minutes : int, default 0
732
- Number of minutes to wait prior to timing out.
733
- hours : int, default 0
734
- Number of hours to wait prior to timing out.
810
+ temp_dir_root : str, optional
811
+ The root directory that will hold the temporary directory where objects will be downloaded.
812
+
813
+ load: Union[List[str], List[Tuple[Dict, str]], List[Tuple[str, str]], List[Dict], None]
814
+ The list of repos (models/datasets) to load.
815
+
816
+ Loaded repos can be accessed via `current.huggingface_hub.loaded`. If load is set, then the following happens:
817
+
818
+ - If repo (model/dataset) is not found in the datastore:
819
+ - Downloads the repo from Hugging Face Hub to a temporary directory (or uses specified path) for local access
820
+ - Stores it in Metaflow's datastore (s3/gcs/azure etc.) with a unique name based on repo_type/repo_id
821
+ - All HF models loaded for a `@step` will be cached separately under flow/step/namespace.
822
+
823
+ - If repo is found in the datastore:
824
+ - Loads it directly from datastore to local path (can be temporary directory or specified path)
735
825
  """
736
826
  ...
737
827
 
738
- @typing.overload
739
- def timeout(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
740
- ...
741
-
742
- @typing.overload
743
- def timeout(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
744
- ...
745
-
746
- def timeout(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, seconds: int = 0, minutes: int = 0, hours: int = 0):
828
+ def nvct(*, gpu: int, gpu_type: str) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
747
829
  """
748
- Specifies a timeout for your step.
749
-
750
- This decorator is useful if this step may hang indefinitely.
751
-
752
- This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
753
- A timeout is considered to be an exception thrown by the step. It will cause the step to be
754
- retried if needed and the exception will be caught by the `@catch` decorator, if present.
755
-
756
- Note that all the values specified in parameters are added together so if you specify
757
- 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
830
+ Specifies that this step should execute on DGX cloud.
758
831
 
759
832
 
760
833
  Parameters
761
834
  ----------
762
- seconds : int, default 0
763
- Number of seconds to wait prior to timing out.
764
- minutes : int, default 0
765
- Number of minutes to wait prior to timing out.
766
- hours : int, default 0
767
- Number of hours to wait prior to timing out.
835
+ gpu : int
836
+ Number of GPUs to use.
837
+ gpu_type : str
838
+ Type of Nvidia GPU to use.
768
839
  """
769
840
  ...
770
841
 
771
- def ollama(*, models: list, backend: str, force_pull: bool, cache_update_policy: str, force_cache_update: bool, debug: bool, circuit_breaker_config: dict, timeout_config: dict) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
842
+ @typing.overload
843
+ def secrets(*, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], role: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
772
844
  """
773
- This decorator is used to run Ollama APIs as Metaflow task sidecars.
774
-
775
- User code call
776
- --------------
777
- @ollama(
778
- models=[...],
779
- ...
780
- )
781
-
782
- Valid backend options
783
- ---------------------
784
- - 'local': Run as a separate process on the local task machine.
785
- - (TODO) 'managed': Outerbounds hosts and selects compute provider.
786
- - (TODO) 'remote': Spin up separate instance to serve Ollama models.
787
-
788
- Valid model options
789
- -------------------
790
- Any model here https://ollama.com/search, e.g. 'llama3.2', 'llama3.3'
845
+ Specifies secrets to be retrieved and injected as environment variables prior to
846
+ the execution of a step.
791
847
 
792
848
 
793
849
  Parameters
794
850
  ----------
795
- models: list[str]
796
- List of Ollama containers running models in sidecars.
797
- backend: str
798
- Determines where and how to run the Ollama process.
799
- force_pull: bool
800
- Whether to run `ollama pull` no matter what, or first check the remote cache in Metaflow datastore for this model key.
801
- cache_update_policy: str
802
- Cache update policy: "auto", "force", or "never".
803
- force_cache_update: bool
804
- Simple override for "force" cache update policy.
805
- debug: bool
806
- Whether to turn on verbose debugging logs.
807
- circuit_breaker_config: dict
808
- Configuration for circuit breaker protection. Keys: failure_threshold, recovery_timeout, reset_timeout.
809
- timeout_config: dict
810
- Configuration for various operation timeouts. Keys: pull, stop, health_check, install, server_startup.
851
+ sources : List[Union[str, Dict[str, Any]]], default: []
852
+ List of secret specs, defining how the secrets are to be retrieved
853
+ role : str, optional, default: None
854
+ Role to use for fetching secrets
811
855
  """
812
856
  ...
813
857
 
814
858
  @typing.overload
815
- def parallel(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
816
- """
817
- Decorator prototype for all step decorators. This function gets specialized
818
- and imported for all decorators types by _import_plugin_decorators().
819
- """
859
+ def secrets(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
820
860
  ...
821
861
 
822
862
  @typing.overload
823
- def parallel(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
863
+ def secrets(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
824
864
  ...
825
865
 
826
- def parallel(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
866
+ def secrets(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], role: typing.Optional[str] = None):
827
867
  """
828
- Decorator prototype for all step decorators. This function gets specialized
829
- and imported for all decorators types by _import_plugin_decorators().
868
+ Specifies secrets to be retrieved and injected as environment variables prior to
869
+ the execution of a step.
870
+
871
+
872
+ Parameters
873
+ ----------
874
+ sources : List[Union[str, Dict[str, Any]]], default: []
875
+ List of secret specs, defining how the secrets are to be retrieved
876
+ role : str, optional, default: None
877
+ Role to use for fetching secrets
830
878
  """
831
879
  ...
832
880
 
@@ -919,483 +967,419 @@ def kubernetes(*, cpu: int = 1, memory: int = 4096, disk: int = 10240, image: ty
919
967
  """
920
968
  ...
921
969
 
922
- def huggingface_hub(*, temp_dir_root: typing.Optional[str] = None, load: typing.Union[typing.List[str], typing.List[typing.Tuple[typing.Dict, str]], typing.List[typing.Tuple[str, str]], typing.List[typing.Dict], None]) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
970
+ def ollama(*, models: list, backend: str, force_pull: bool, cache_update_policy: str, force_cache_update: bool, debug: bool, circuit_breaker_config: dict, timeout_config: dict) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
923
971
  """
924
- Decorator that helps cache, version and store models/datasets from huggingface hub.
925
-
926
- > Examples
927
-
928
- **Usage: creating references of models from huggingface that may be loaded in downstream steps**
929
- ```python
930
- @huggingface_hub
931
- @step
932
- def pull_model_from_huggingface(self):
933
- # `current.huggingface_hub.snapshot_download` downloads the model from the Hugging Face Hub
934
- # and saves it in the backend storage based on the model's `repo_id`. If there exists a model
935
- # with the same `repo_id` in the backend storage, it will not download the model again. The return
936
- # value of the function is a reference to the model in the backend storage.
937
- # This reference can be used to load the model in the subsequent steps via `@model(load=["llama_model"])`
938
-
939
- self.model_id = "mistralai/Mistral-7B-Instruct-v0.1"
940
- self.llama_model = current.huggingface_hub.snapshot_download(
941
- repo_id=self.model_id,
942
- allow_patterns=["*.safetensors", "*.json", "tokenizer.*"],
943
- )
944
- self.next(self.train)
945
- ```
972
+ This decorator is used to run Ollama APIs as Metaflow task sidecars.
946
973
 
947
- **Usage: loading models directly from huggingface hub or from cache (from metaflow's datastore)**
948
- ```python
949
- @huggingface_hub(load=["mistralai/Mistral-7B-Instruct-v0.1"])
950
- @step
951
- def pull_model_from_huggingface(self):
952
- path_to_model = current.huggingface_hub.loaded["mistralai/Mistral-7B-Instruct-v0.1"]
953
- ```
974
+ User code call
975
+ --------------
976
+ @ollama(
977
+ models=[...],
978
+ ...
979
+ )
954
980
 
955
- ```python
956
- @huggingface_hub(load=[("mistralai/Mistral-7B-Instruct-v0.1", "/my-directory"), ("myorg/mistral-lora, "/my-lora-directory")])
957
- @step
958
- def finetune_model(self):
959
- path_to_model = current.huggingface_hub.loaded["mistralai/Mistral-7B-Instruct-v0.1"]
960
- # path_to_model will be /my-directory
961
- ```
981
+ Valid backend options
982
+ ---------------------
983
+ - 'local': Run as a separate process on the local task machine.
984
+ - (TODO) 'managed': Outerbounds hosts and selects compute provider.
985
+ - (TODO) 'remote': Spin up separate instance to serve Ollama models.
962
986
 
963
- ```python
964
- # Takes all the arguments passed to `snapshot_download`
965
- # except for `local_dir`
966
- @huggingface_hub(load=[
967
- {
968
- "repo_id": "mistralai/Mistral-7B-Instruct-v0.1",
969
- },
970
- {
971
- "repo_id": "myorg/mistral-lora",
972
- "repo_type": "model",
973
- },
974
- ])
975
- @step
976
- def finetune_model(self):
977
- path_to_model = current.huggingface_hub.loaded["mistralai/Mistral-7B-Instruct-v0.1"]
978
- # path_to_model will be /my-directory
979
- ```
987
+ Valid model options
988
+ -------------------
989
+ Any model here https://ollama.com/search, e.g. 'llama3.2', 'llama3.3'
980
990
 
981
991
 
982
992
  Parameters
983
993
  ----------
984
- temp_dir_root : str, optional
985
- The root directory that will hold the temporary directory where objects will be downloaded.
986
-
987
- load: Union[List[str], List[Tuple[Dict, str]], List[Tuple[str, str]], List[Dict], None]
988
- The list of repos (models/datasets) to load.
989
-
990
- Loaded repos can be accessed via `current.huggingface_hub.loaded`. If load is set, then the following happens:
991
-
992
- - If repo (model/dataset) is not found in the datastore:
993
- - Downloads the repo from Hugging Face Hub to a temporary directory (or uses specified path) for local access
994
- - Stores it in Metaflow's datastore (s3/gcs/azure etc.) with a unique name based on repo_type/repo_id
995
- - All HF models loaded for a `@step` will be cached separately under flow/step/namespace.
996
-
997
- - If repo is found in the datastore:
998
- - Loads it directly from datastore to local path (can be temporary directory or specified path)
999
- """
1000
- ...
1001
-
1002
- @typing.overload
1003
- def coreweave_s3_proxy(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1004
- """
1005
- CoreWeave-specific S3 Proxy decorator for routing S3 requests through a local proxy service.
1006
- It exists to make it easier for users to know that this decorator should only be used with
1007
- a Neo Cloud like CoreWeave.
1008
- """
1009
- ...
1010
-
1011
- @typing.overload
1012
- def coreweave_s3_proxy(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1013
- ...
1014
-
1015
- def coreweave_s3_proxy(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
1016
- """
1017
- CoreWeave-specific S3 Proxy decorator for routing S3 requests through a local proxy service.
1018
- It exists to make it easier for users to know that this decorator should only be used with
1019
- a Neo Cloud like CoreWeave.
994
+ models: list[str]
995
+ List of Ollama containers running models in sidecars.
996
+ backend: str
997
+ Determines where and how to run the Ollama process.
998
+ force_pull: bool
999
+ Whether to run `ollama pull` no matter what, or first check the remote cache in Metaflow datastore for this model key.
1000
+ cache_update_policy: str
1001
+ Cache update policy: "auto", "force", or "never".
1002
+ force_cache_update: bool
1003
+ Simple override for "force" cache update policy.
1004
+ debug: bool
1005
+ Whether to turn on verbose debugging logs.
1006
+ circuit_breaker_config: dict
1007
+ Configuration for circuit breaker protection. Keys: failure_threshold, recovery_timeout, reset_timeout.
1008
+ timeout_config: dict
1009
+ Configuration for various operation timeouts. Keys: pull, stop, health_check, install, server_startup.
1020
1010
  """
1021
1011
  ...
1022
1012
 
1023
1013
  @typing.overload
1024
- def secrets(*, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], role: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1014
+ def environment(*, vars: typing.Dict[str, str] = {}) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1025
1015
  """
1026
- Specifies secrets to be retrieved and injected as environment variables prior to
1027
- the execution of a step.
1016
+ Specifies environment variables to be set prior to the execution of a step.
1028
1017
 
1029
1018
 
1030
1019
  Parameters
1031
1020
  ----------
1032
- sources : List[Union[str, Dict[str, Any]]], default: []
1033
- List of secret specs, defining how the secrets are to be retrieved
1034
- role : str, optional, default: None
1035
- Role to use for fetching secrets
1021
+ vars : Dict[str, str], default {}
1022
+ Dictionary of environment variables to set.
1036
1023
  """
1037
1024
  ...
1038
1025
 
1039
1026
  @typing.overload
1040
- def secrets(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1027
+ def environment(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1041
1028
  ...
1042
1029
 
1043
1030
  @typing.overload
1044
- def secrets(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1031
+ def environment(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1045
1032
  ...
1046
1033
 
1047
- def secrets(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], role: typing.Optional[str] = None):
1034
+ def environment(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, vars: typing.Dict[str, str] = {}):
1048
1035
  """
1049
- Specifies secrets to be retrieved and injected as environment variables prior to
1050
- the execution of a step.
1036
+ Specifies environment variables to be set prior to the execution of a step.
1051
1037
 
1052
1038
 
1053
1039
  Parameters
1054
1040
  ----------
1055
- sources : List[Union[str, Dict[str, Any]]], default: []
1056
- List of secret specs, defining how the secrets are to be retrieved
1057
- role : str, optional, default: None
1058
- Role to use for fetching secrets
1041
+ vars : Dict[str, str], default {}
1042
+ Dictionary of environment variables to set.
1059
1043
  """
1060
1044
  ...
1061
1045
 
1062
- @typing.overload
1063
- def model(*, load: typing.Union[typing.List[str], str, typing.List[typing.Tuple[str, typing.Optional[str]]]] = None, temp_dir_root: str = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1046
+ def vllm(*, model: str, backend: str, openai_api_server: bool, debug: bool, card_refresh_interval: int, max_retries: int, retry_alert_frequency: int, engine_args: dict) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1064
1047
  """
1065
- Enables loading / saving of models within a step.
1048
+ This decorator is used to run vllm APIs as Metaflow task sidecars.
1066
1049
 
1067
- > Examples
1068
- - Saving Models
1069
- ```python
1070
- @model
1071
- @step
1072
- def train(self):
1073
- # current.model.save returns a dictionary reference to the model saved
1074
- self.my_model = current.model.save(
1075
- path_to_my_model,
1076
- label="my_model",
1077
- metadata={
1078
- "epochs": 10,
1079
- "batch-size": 32,
1080
- "learning-rate": 0.001,
1081
- }
1082
- )
1083
- self.next(self.test)
1050
+ User code call
1051
+ --------------
1052
+ @vllm(
1053
+ model="...",
1054
+ ...
1055
+ )
1084
1056
 
1085
- @model(load="my_model")
1086
- @step
1087
- def test(self):
1088
- # `current.model.loaded` returns a dictionary of the loaded models
1089
- # where the key is the name of the artifact and the value is the path to the model
1090
- print(os.listdir(current.model.loaded["my_model"]))
1091
- self.next(self.end)
1092
- ```
1057
+ Valid backend options
1058
+ ---------------------
1059
+ - 'local': Run as a separate process on the local task machine.
1093
1060
 
1094
- - Loading models
1095
- ```python
1096
- @step
1097
- def train(self):
1098
- # current.model.load returns the path to the model loaded
1099
- checkpoint_path = current.model.load(
1100
- self.checkpoint_key,
1101
- )
1102
- model_path = current.model.load(
1103
- self.model,
1104
- )
1105
- self.next(self.test)
1106
- ```
1061
+ Valid model options
1062
+ -------------------
1063
+ Any HuggingFace model identifier, e.g. 'meta-llama/Llama-3.2-1B'
1064
+
1065
+ NOTE: vLLM's OpenAI-compatible server serves ONE model per server instance.
1066
+ If you need multiple models, you must create multiple @vllm decorators.
1107
1067
 
1108
1068
 
1109
1069
  Parameters
1110
1070
  ----------
1111
- load : Union[List[str],str,List[Tuple[str,Union[str,None]]]], default: None
1112
- Artifact name/s referencing the models/checkpoints to load. Artifact names refer to the names of the instance variables set to `self`.
1113
- These artifact names give to `load` be reference objects or reference `key` string's from objects created by `current.checkpoint` / `current.model` / `current.huggingface_hub`.
1114
- If a list of tuples is provided, the first element is the artifact name and the second element is the path the artifact needs be unpacked on
1115
- the local filesystem. If the second element is None, the artifact will be unpacked in the current working directory.
1116
- If a string is provided, then the artifact corresponding to that name will be loaded in the current working directory.
1117
-
1118
- temp_dir_root : str, default: None
1119
- The root directory under which `current.model.loaded` will store loaded models
1071
+ model: str
1072
+ HuggingFace model identifier to be served by vLLM.
1073
+ backend: str
1074
+ Determines where and how to run the vLLM process.
1075
+ openai_api_server: bool
1076
+ Whether to use OpenAI-compatible API server mode (subprocess) instead of native engine.
1077
+ Default is False (uses native engine).
1078
+ Set to True for backward compatibility with existing code.
1079
+ debug: bool
1080
+ Whether to turn on verbose debugging logs.
1081
+ card_refresh_interval: int
1082
+ Interval in seconds for refreshing the vLLM status card.
1083
+ Only used when openai_api_server=True.
1084
+ max_retries: int
1085
+ Maximum number of retries checking for vLLM server startup.
1086
+ Only used when openai_api_server=True.
1087
+ retry_alert_frequency: int
1088
+ Frequency of alert logs for vLLM server startup retries.
1089
+ Only used when openai_api_server=True.
1090
+ engine_args : dict
1091
+ Additional keyword arguments to pass to the vLLM engine.
1092
+ For example, `tensor_parallel_size=2`.
1120
1093
  """
1121
1094
  ...
1122
1095
 
1123
1096
  @typing.overload
1124
- def model(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1125
- ...
1126
-
1127
- @typing.overload
1128
- def model(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1129
- ...
1130
-
1131
- def model(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, load: typing.Union[typing.List[str], str, typing.List[typing.Tuple[str, typing.Optional[str]]]] = None, temp_dir_root: str = None):
1097
+ def conda(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1132
1098
  """
1133
- Enables loading / saving of models within a step.
1134
-
1135
- > Examples
1136
- - Saving Models
1137
- ```python
1138
- @model
1139
- @step
1140
- def train(self):
1141
- # current.model.save returns a dictionary reference to the model saved
1142
- self.my_model = current.model.save(
1143
- path_to_my_model,
1144
- label="my_model",
1145
- metadata={
1146
- "epochs": 10,
1147
- "batch-size": 32,
1148
- "learning-rate": 0.001,
1149
- }
1150
- )
1151
- self.next(self.test)
1152
-
1153
- @model(load="my_model")
1154
- @step
1155
- def test(self):
1156
- # `current.model.loaded` returns a dictionary of the loaded models
1157
- # where the key is the name of the artifact and the value is the path to the model
1158
- print(os.listdir(current.model.loaded["my_model"]))
1159
- self.next(self.end)
1160
- ```
1099
+ Specifies the Conda environment for the step.
1161
1100
 
1162
- - Loading models
1163
- ```python
1164
- @step
1165
- def train(self):
1166
- # current.model.load returns the path to the model loaded
1167
- checkpoint_path = current.model.load(
1168
- self.checkpoint_key,
1169
- )
1170
- model_path = current.model.load(
1171
- self.model,
1172
- )
1173
- self.next(self.test)
1174
- ```
1101
+ Information in this decorator will augment any
1102
+ attributes set in the `@conda_base` flow-level decorator. Hence,
1103
+ you can use `@conda_base` to set packages required by all
1104
+ steps and use `@conda` to specify step-specific overrides.
1175
1105
 
1176
1106
 
1177
1107
  Parameters
1178
1108
  ----------
1179
- load : Union[List[str],str,List[Tuple[str,Union[str,None]]]], default: None
1180
- Artifact name/s referencing the models/checkpoints to load. Artifact names refer to the names of the instance variables set to `self`.
1181
- These artifact names give to `load` be reference objects or reference `key` string's from objects created by `current.checkpoint` / `current.model` / `current.huggingface_hub`.
1182
- If a list of tuples is provided, the first element is the artifact name and the second element is the path the artifact needs be unpacked on
1183
- the local filesystem. If the second element is None, the artifact will be unpacked in the current working directory.
1184
- If a string is provided, then the artifact corresponding to that name will be loaded in the current working directory.
1185
-
1186
- temp_dir_root : str, default: None
1187
- The root directory under which `current.model.loaded` will store loaded models
1109
+ packages : Dict[str, str], default {}
1110
+ Packages to use for this step. The key is the name of the package
1111
+ and the value is the version to use.
1112
+ libraries : Dict[str, str], default {}
1113
+ Supported for backward compatibility. When used with packages, packages will take precedence.
1114
+ python : str, optional, default None
1115
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1116
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1117
+ disabled : bool, default False
1118
+ If set to True, disables @conda.
1188
1119
  """
1189
1120
  ...
1190
1121
 
1191
1122
  @typing.overload
1192
- def app_deploy(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1193
- """
1194
- Decorator prototype for all step decorators. This function gets specialized
1195
- and imported for all decorators types by _import_plugin_decorators().
1196
- """
1123
+ def conda(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1197
1124
  ...
1198
1125
 
1199
1126
  @typing.overload
1200
- def app_deploy(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1201
- ...
1202
-
1203
- def app_deploy(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
1204
- """
1205
- Decorator prototype for all step decorators. This function gets specialized
1206
- and imported for all decorators types by _import_plugin_decorators().
1207
- """
1127
+ def conda(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1208
1128
  ...
1209
1129
 
1210
- @typing.overload
1211
- def pypi(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1130
+ def conda(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
1212
1131
  """
1213
- Specifies the PyPI packages for the step.
1132
+ Specifies the Conda environment for the step.
1214
1133
 
1215
1134
  Information in this decorator will augment any
1216
- attributes set in the `@pyi_base` flow-level decorator. Hence,
1217
- you can use `@pypi_base` to set packages required by all
1218
- steps and use `@pypi` to specify step-specific overrides.
1135
+ attributes set in the `@conda_base` flow-level decorator. Hence,
1136
+ you can use `@conda_base` to set packages required by all
1137
+ steps and use `@conda` to specify step-specific overrides.
1219
1138
 
1220
1139
 
1221
1140
  Parameters
1222
1141
  ----------
1223
- packages : Dict[str, str], default: {}
1142
+ packages : Dict[str, str], default {}
1224
1143
  Packages to use for this step. The key is the name of the package
1225
1144
  and the value is the version to use.
1226
- python : str, optional, default: None
1145
+ libraries : Dict[str, str], default {}
1146
+ Supported for backward compatibility. When used with packages, packages will take precedence.
1147
+ python : str, optional, default None
1227
1148
  Version of Python to use, e.g. '3.7.4'. A default value of None implies
1228
1149
  that the version used will correspond to the version of the Python interpreter used to start the run.
1150
+ disabled : bool, default False
1151
+ If set to True, disables @conda.
1229
1152
  """
1230
1153
  ...
1231
1154
 
1232
1155
  @typing.overload
1233
- def pypi(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1156
+ def timeout(*, seconds: int = 0, minutes: int = 0, hours: int = 0) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1157
+ """
1158
+ Specifies a timeout for your step.
1159
+
1160
+ This decorator is useful if this step may hang indefinitely.
1161
+
1162
+ This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
1163
+ A timeout is considered to be an exception thrown by the step. It will cause the step to be
1164
+ retried if needed and the exception will be caught by the `@catch` decorator, if present.
1165
+
1166
+ Note that all the values specified in parameters are added together so if you specify
1167
+ 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
1168
+
1169
+
1170
+ Parameters
1171
+ ----------
1172
+ seconds : int, default 0
1173
+ Number of seconds to wait prior to timing out.
1174
+ minutes : int, default 0
1175
+ Number of minutes to wait prior to timing out.
1176
+ hours : int, default 0
1177
+ Number of hours to wait prior to timing out.
1178
+ """
1234
1179
  ...
1235
1180
 
1236
1181
  @typing.overload
1237
- def pypi(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1182
+ def timeout(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1238
1183
  ...
1239
1184
 
1240
- def pypi(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
1185
+ @typing.overload
1186
+ def timeout(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1187
+ ...
1188
+
1189
+ def timeout(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, seconds: int = 0, minutes: int = 0, hours: int = 0):
1241
1190
  """
1242
- Specifies the PyPI packages for the step.
1191
+ Specifies a timeout for your step.
1243
1192
 
1244
- Information in this decorator will augment any
1245
- attributes set in the `@pyi_base` flow-level decorator. Hence,
1246
- you can use `@pypi_base` to set packages required by all
1247
- steps and use `@pypi` to specify step-specific overrides.
1193
+ This decorator is useful if this step may hang indefinitely.
1194
+
1195
+ This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
1196
+ A timeout is considered to be an exception thrown by the step. It will cause the step to be
1197
+ retried if needed and the exception will be caught by the `@catch` decorator, if present.
1198
+
1199
+ Note that all the values specified in parameters are added together so if you specify
1200
+ 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
1248
1201
 
1249
1202
 
1250
1203
  Parameters
1251
1204
  ----------
1252
- packages : Dict[str, str], default: {}
1253
- Packages to use for this step. The key is the name of the package
1254
- and the value is the version to use.
1255
- python : str, optional, default: None
1256
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
1257
- that the version used will correspond to the version of the Python interpreter used to start the run.
1205
+ seconds : int, default 0
1206
+ Number of seconds to wait prior to timing out.
1207
+ minutes : int, default 0
1208
+ Number of minutes to wait prior to timing out.
1209
+ hours : int, default 0
1210
+ Number of hours to wait prior to timing out.
1211
+ """
1212
+ ...
1213
+
1214
+ def s3_proxy(*, integration_name: typing.Optional[str] = None, write_mode: typing.Optional[str] = None, debug: typing.Optional[bool] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1215
+ """
1216
+ S3 Proxy decorator for routing S3 requests through a local proxy service.
1217
+
1218
+
1219
+ Parameters
1220
+ ----------
1221
+ integration_name : str, optional
1222
+ Name of the S3 proxy integration. If not specified, will use the only
1223
+ available S3 proxy integration in the namespace (fails if multiple exist).
1224
+ write_mode : str, optional
1225
+ The desired behavior during write operations to target (origin) S3 bucket.
1226
+ allowed options are:
1227
+ "origin-and-cache" -> write to both the target S3 bucket and local object
1228
+ storage
1229
+ "origin" -> only write to the target S3 bucket
1230
+ "cache" -> only write to the object storage service used for caching
1231
+ debug : bool, optional
1232
+ Enable debug logging for proxy operations.
1258
1233
  """
1259
1234
  ...
1260
1235
 
1261
1236
  @typing.overload
1262
- def test_append_card(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1237
+ def fast_bakery_internal(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1263
1238
  """
1264
- A simple decorator that demonstrates using CardDecoratorInjector
1265
- to inject a card and render simple markdown content.
1239
+ Internal decorator to support Fast bakery
1266
1240
  """
1267
1241
  ...
1268
1242
 
1269
1243
  @typing.overload
1270
- def test_append_card(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1244
+ def fast_bakery_internal(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1271
1245
  ...
1272
1246
 
1273
- def test_append_card(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
1247
+ def fast_bakery_internal(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
1274
1248
  """
1275
- A simple decorator that demonstrates using CardDecoratorInjector
1276
- to inject a card and render simple markdown content.
1249
+ Internal decorator to support Fast bakery
1277
1250
  """
1278
1251
  ...
1279
1252
 
1280
1253
  @typing.overload
1281
- def nebius_s3_proxy(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1254
+ def test_append_card(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1282
1255
  """
1283
- Nebius-specific S3 Proxy decorator for routing S3 requests through a local proxy service.
1284
- It exists to make it easier for users to know that this decorator should only be used with
1285
- a Neo Cloud like Nebius.
1256
+ A simple decorator that demonstrates using CardDecoratorInjector
1257
+ to inject a card and render simple markdown content.
1286
1258
  """
1287
1259
  ...
1288
1260
 
1289
1261
  @typing.overload
1290
- def nebius_s3_proxy(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1262
+ def test_append_card(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1291
1263
  ...
1292
1264
 
1293
- def nebius_s3_proxy(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
1265
+ def test_append_card(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
1294
1266
  """
1295
- Nebius-specific S3 Proxy decorator for routing S3 requests through a local proxy service.
1296
- It exists to make it easier for users to know that this decorator should only be used with
1297
- a Neo Cloud like Nebius.
1267
+ A simple decorator that demonstrates using CardDecoratorInjector
1268
+ to inject a card and render simple markdown content.
1298
1269
  """
1299
1270
  ...
1300
1271
 
1301
- def vllm(*, model: str, backend: str, openai_api_server: bool, debug: bool, card_refresh_interval: int, max_retries: int, retry_alert_frequency: int, engine_args: dict) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1272
+ @typing.overload
1273
+ def resources(*, cpu: int = 1, gpu: typing.Optional[int] = None, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1302
1274
  """
1303
- This decorator is used to run vllm APIs as Metaflow task sidecars.
1304
-
1305
- User code call
1306
- --------------
1307
- @vllm(
1308
- model="...",
1309
- ...
1310
- )
1311
-
1312
- Valid backend options
1313
- ---------------------
1314
- - 'local': Run as a separate process on the local task machine.
1275
+ Specifies the resources needed when executing this step.
1315
1276
 
1316
- Valid model options
1317
- -------------------
1318
- Any HuggingFace model identifier, e.g. 'meta-llama/Llama-3.2-1B'
1277
+ Use `@resources` to specify the resource requirements
1278
+ independently of the specific compute layer (`@batch`, `@kubernetes`).
1319
1279
 
1320
- NOTE: vLLM's OpenAI-compatible server serves ONE model per server instance.
1321
- If you need multiple models, you must create multiple @vllm decorators.
1280
+ You can choose the compute layer on the command line by executing e.g.
1281
+ ```
1282
+ python myflow.py run --with batch
1283
+ ```
1284
+ or
1285
+ ```
1286
+ python myflow.py run --with kubernetes
1287
+ ```
1288
+ which executes the flow on the desired system using the
1289
+ requirements specified in `@resources`.
1322
1290
 
1323
1291
 
1324
1292
  Parameters
1325
1293
  ----------
1326
- model: str
1327
- HuggingFace model identifier to be served by vLLM.
1328
- backend: str
1329
- Determines where and how to run the vLLM process.
1330
- openai_api_server: bool
1331
- Whether to use OpenAI-compatible API server mode (subprocess) instead of native engine.
1332
- Default is False (uses native engine).
1333
- Set to True for backward compatibility with existing code.
1334
- debug: bool
1335
- Whether to turn on verbose debugging logs.
1336
- card_refresh_interval: int
1337
- Interval in seconds for refreshing the vLLM status card.
1338
- Only used when openai_api_server=True.
1339
- max_retries: int
1340
- Maximum number of retries checking for vLLM server startup.
1341
- Only used when openai_api_server=True.
1342
- retry_alert_frequency: int
1343
- Frequency of alert logs for vLLM server startup retries.
1344
- Only used when openai_api_server=True.
1345
- engine_args : dict
1346
- Additional keyword arguments to pass to the vLLM engine.
1347
- For example, `tensor_parallel_size=2`.
1294
+ cpu : int, default 1
1295
+ Number of CPUs required for this step.
1296
+ gpu : int, optional, default None
1297
+ Number of GPUs required for this step.
1298
+ disk : int, optional, default None
1299
+ Disk size (in MB) required for this step. Only applies on Kubernetes.
1300
+ memory : int, default 4096
1301
+ Memory size (in MB) required for this step.
1302
+ shared_memory : int, optional, default None
1303
+ The value for the size (in MiB) of the /dev/shm volume for this step.
1304
+ This parameter maps to the `--shm-size` option in Docker.
1348
1305
  """
1349
1306
  ...
1350
1307
 
1351
1308
  @typing.overload
1352
- def conda_base(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1309
+ def resources(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1310
+ ...
1311
+
1312
+ @typing.overload
1313
+ def resources(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1314
+ ...
1315
+
1316
+ def resources(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, cpu: int = 1, gpu: typing.Optional[int] = None, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None):
1353
1317
  """
1354
- Specifies the Conda environment for all steps of the flow.
1318
+ Specifies the resources needed when executing this step.
1355
1319
 
1356
- Use `@conda_base` to set common libraries required by all
1357
- steps and use `@conda` to specify step-specific additions.
1320
+ Use `@resources` to specify the resource requirements
1321
+ independently of the specific compute layer (`@batch`, `@kubernetes`).
1322
+
1323
+ You can choose the compute layer on the command line by executing e.g.
1324
+ ```
1325
+ python myflow.py run --with batch
1326
+ ```
1327
+ or
1328
+ ```
1329
+ python myflow.py run --with kubernetes
1330
+ ```
1331
+ which executes the flow on the desired system using the
1332
+ requirements specified in `@resources`.
1358
1333
 
1359
1334
 
1360
1335
  Parameters
1361
1336
  ----------
1362
- packages : Dict[str, str], default {}
1363
- Packages to use for this flow. The key is the name of the package
1364
- and the value is the version to use.
1365
- libraries : Dict[str, str], default {}
1366
- Supported for backward compatibility. When used with packages, packages will take precedence.
1367
- python : str, optional, default None
1368
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
1369
- that the version used will correspond to the version of the Python interpreter used to start the run.
1370
- disabled : bool, default False
1371
- If set to True, disables Conda.
1337
+ cpu : int, default 1
1338
+ Number of CPUs required for this step.
1339
+ gpu : int, optional, default None
1340
+ Number of GPUs required for this step.
1341
+ disk : int, optional, default None
1342
+ Disk size (in MB) required for this step. Only applies on Kubernetes.
1343
+ memory : int, default 4096
1344
+ Memory size (in MB) required for this step.
1345
+ shared_memory : int, optional, default None
1346
+ The value for the size (in MiB) of the /dev/shm volume for this step.
1347
+ This parameter maps to the `--shm-size` option in Docker.
1372
1348
  """
1373
1349
  ...
1374
1350
 
1375
- @typing.overload
1376
- def conda_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1377
- ...
1378
-
1379
- def conda_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
1351
+ def project(*, name: str, branch: typing.Optional[str] = None, production: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1380
1352
  """
1381
- Specifies the Conda environment for all steps of the flow.
1353
+ Specifies what flows belong to the same project.
1382
1354
 
1383
- Use `@conda_base` to set common libraries required by all
1384
- steps and use `@conda` to specify step-specific additions.
1355
+ A project-specific namespace is created for all flows that
1356
+ use the same `@project(name)`.
1385
1357
 
1386
1358
 
1387
1359
  Parameters
1388
1360
  ----------
1389
- packages : Dict[str, str], default {}
1390
- Packages to use for this flow. The key is the name of the package
1391
- and the value is the version to use.
1392
- libraries : Dict[str, str], default {}
1393
- Supported for backward compatibility. When used with packages, packages will take precedence.
1394
- python : str, optional, default None
1395
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
1396
- that the version used will correspond to the version of the Python interpreter used to start the run.
1397
- disabled : bool, default False
1398
- If set to True, disables Conda.
1361
+ name : str
1362
+ Project name. Make sure that the name is unique amongst all
1363
+ projects that use the same production scheduler. The name may
1364
+ contain only lowercase alphanumeric characters and underscores.
1365
+
1366
+ branch : Optional[str], default None
1367
+ The branch to use. If not specified, the branch is set to
1368
+ `user.<username>` unless `production` is set to `True`. This can
1369
+ also be set on the command line using `--branch` as a top-level option.
1370
+ It is an error to specify `branch` in the decorator and on the command line.
1371
+
1372
+ production : bool, default False
1373
+ Whether or not the branch is the production branch. This can also be set on the
1374
+ command line using `--production` as a top-level option. It is an error to specify
1375
+ `production` in the decorator and on the command line.
1376
+ The project branch name will be:
1377
+ - if `branch` is specified:
1378
+ - if `production` is True: `prod.<branch>`
1379
+ - if `production` is False: `test.<branch>`
1380
+ - if `branch` is not specified:
1381
+ - if `production` is True: `prod`
1382
+ - if `production` is False: `user.<username>`
1399
1383
  """
1400
1384
  ...
1401
1385
 
@@ -1483,47 +1467,256 @@ def trigger(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, event: t
1483
1467
 
1484
1468
  Parameters
1485
1469
  ----------
1486
- event : Union[str, Dict[str, Any]], optional, default None
1487
- Event dependency for this flow.
1488
- events : List[Union[str, Dict[str, Any]]], default []
1489
- Events dependency for this flow.
1490
- options : Dict[str, Any], default {}
1491
- Backend-specific configuration for tuning eventing behavior.
1470
+ event : Union[str, Dict[str, Any]], optional, default None
1471
+ Event dependency for this flow.
1472
+ events : List[Union[str, Dict[str, Any]]], default []
1473
+ Events dependency for this flow.
1474
+ options : Dict[str, Any], default {}
1475
+ Backend-specific configuration for tuning eventing behavior.
1476
+ """
1477
+ ...
1478
+
1479
+ @typing.overload
1480
+ def trigger_on_finish(*, flow: typing.Union[typing.Dict[str, str], str, None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1481
+ """
1482
+ Specifies the flow(s) that this flow depends on.
1483
+
1484
+ ```
1485
+ @trigger_on_finish(flow='FooFlow')
1486
+ ```
1487
+ or
1488
+ ```
1489
+ @trigger_on_finish(flows=['FooFlow', 'BarFlow'])
1490
+ ```
1491
+ This decorator respects the @project decorator and triggers the flow
1492
+ when upstream runs within the same namespace complete successfully
1493
+
1494
+ Additionally, you can specify project aware upstream flow dependencies
1495
+ by specifying the fully qualified project_flow_name.
1496
+ ```
1497
+ @trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
1498
+ ```
1499
+ or
1500
+ ```
1501
+ @trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
1502
+ ```
1503
+
1504
+ You can also specify just the project or project branch (other values will be
1505
+ inferred from the current project or project branch):
1506
+ ```
1507
+ @trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
1508
+ ```
1509
+
1510
+ Note that `branch` is typically one of:
1511
+ - `prod`
1512
+ - `user.bob`
1513
+ - `test.my_experiment`
1514
+ - `prod.staging`
1515
+
1516
+
1517
+ Parameters
1518
+ ----------
1519
+ flow : Union[str, Dict[str, str]], optional, default None
1520
+ Upstream flow dependency for this flow.
1521
+ flows : List[Union[str, Dict[str, str]]], default []
1522
+ Upstream flow dependencies for this flow.
1523
+ options : Dict[str, Any], default {}
1524
+ Backend-specific configuration for tuning eventing behavior.
1525
+ """
1526
+ ...
1527
+
1528
+ @typing.overload
1529
+ def trigger_on_finish(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1530
+ ...
1531
+
1532
+ def trigger_on_finish(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, flow: typing.Union[typing.Dict[str, str], str, None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}):
1533
+ """
1534
+ Specifies the flow(s) that this flow depends on.
1535
+
1536
+ ```
1537
+ @trigger_on_finish(flow='FooFlow')
1538
+ ```
1539
+ or
1540
+ ```
1541
+ @trigger_on_finish(flows=['FooFlow', 'BarFlow'])
1542
+ ```
1543
+ This decorator respects the @project decorator and triggers the flow
1544
+ when upstream runs within the same namespace complete successfully
1545
+
1546
+ Additionally, you can specify project aware upstream flow dependencies
1547
+ by specifying the fully qualified project_flow_name.
1548
+ ```
1549
+ @trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
1550
+ ```
1551
+ or
1552
+ ```
1553
+ @trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
1554
+ ```
1555
+
1556
+ You can also specify just the project or project branch (other values will be
1557
+ inferred from the current project or project branch):
1558
+ ```
1559
+ @trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
1560
+ ```
1561
+
1562
+ Note that `branch` is typically one of:
1563
+ - `prod`
1564
+ - `user.bob`
1565
+ - `test.my_experiment`
1566
+ - `prod.staging`
1567
+
1568
+
1569
+ Parameters
1570
+ ----------
1571
+ flow : Union[str, Dict[str, str]], optional, default None
1572
+ Upstream flow dependency for this flow.
1573
+ flows : List[Union[str, Dict[str, str]]], default []
1574
+ Upstream flow dependencies for this flow.
1575
+ options : Dict[str, Any], default {}
1576
+ Backend-specific configuration for tuning eventing behavior.
1577
+ """
1578
+ ...
1579
+
1580
+ @typing.overload
1581
+ def schedule(*, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1582
+ """
1583
+ Specifies the times when the flow should be run when running on a
1584
+ production scheduler.
1585
+
1586
+
1587
+ Parameters
1588
+ ----------
1589
+ hourly : bool, default False
1590
+ Run the workflow hourly.
1591
+ daily : bool, default True
1592
+ Run the workflow daily.
1593
+ weekly : bool, default False
1594
+ Run the workflow weekly.
1595
+ cron : str, optional, default None
1596
+ Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
1597
+ specified by this expression.
1598
+ timezone : str, optional, default None
1599
+ Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1600
+ which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
1601
+ """
1602
+ ...
1603
+
1604
+ @typing.overload
1605
+ def schedule(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1606
+ ...
1607
+
1608
+ def schedule(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None):
1609
+ """
1610
+ Specifies the times when the flow should be run when running on a
1611
+ production scheduler.
1612
+
1613
+
1614
+ Parameters
1615
+ ----------
1616
+ hourly : bool, default False
1617
+ Run the workflow hourly.
1618
+ daily : bool, default True
1619
+ Run the workflow daily.
1620
+ weekly : bool, default False
1621
+ Run the workflow weekly.
1622
+ cron : str, optional, default None
1623
+ Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
1624
+ specified by this expression.
1625
+ timezone : str, optional, default None
1626
+ Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1627
+ which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
1628
+ """
1629
+ ...
1630
+
1631
+ @typing.overload
1632
+ def pypi_base(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1633
+ """
1634
+ Specifies the PyPI packages for all steps of the flow.
1635
+
1636
+ Use `@pypi_base` to set common packages required by all
1637
+ steps and use `@pypi` to specify step-specific overrides.
1638
+
1639
+ Parameters
1640
+ ----------
1641
+ packages : Dict[str, str], default: {}
1642
+ Packages to use for this flow. The key is the name of the package
1643
+ and the value is the version to use.
1644
+ python : str, optional, default: None
1645
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1646
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1647
+ """
1648
+ ...
1649
+
1650
+ @typing.overload
1651
+ def pypi_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1652
+ ...
1653
+
1654
+ def pypi_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
1655
+ """
1656
+ Specifies the PyPI packages for all steps of the flow.
1657
+
1658
+ Use `@pypi_base` to set common packages required by all
1659
+ steps and use `@pypi` to specify step-specific overrides.
1660
+
1661
+ Parameters
1662
+ ----------
1663
+ packages : Dict[str, str], default: {}
1664
+ Packages to use for this flow. The key is the name of the package
1665
+ and the value is the version to use.
1666
+ python : str, optional, default: None
1667
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1668
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1669
+ """
1670
+ ...
1671
+
1672
+ @typing.overload
1673
+ def conda_base(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1674
+ """
1675
+ Specifies the Conda environment for all steps of the flow.
1676
+
1677
+ Use `@conda_base` to set common libraries required by all
1678
+ steps and use `@conda` to specify step-specific additions.
1679
+
1680
+
1681
+ Parameters
1682
+ ----------
1683
+ packages : Dict[str, str], default {}
1684
+ Packages to use for this flow. The key is the name of the package
1685
+ and the value is the version to use.
1686
+ libraries : Dict[str, str], default {}
1687
+ Supported for backward compatibility. When used with packages, packages will take precedence.
1688
+ python : str, optional, default None
1689
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1690
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1691
+ disabled : bool, default False
1692
+ If set to True, disables Conda.
1492
1693
  """
1493
1694
  ...
1494
1695
 
1495
- def project(*, name: str, branch: typing.Optional[str] = None, production: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1696
+ @typing.overload
1697
+ def conda_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1698
+ ...
1699
+
1700
+ def conda_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
1496
1701
  """
1497
- Specifies what flows belong to the same project.
1702
+ Specifies the Conda environment for all steps of the flow.
1498
1703
 
1499
- A project-specific namespace is created for all flows that
1500
- use the same `@project(name)`.
1704
+ Use `@conda_base` to set common libraries required by all
1705
+ steps and use `@conda` to specify step-specific additions.
1501
1706
 
1502
1707
 
1503
1708
  Parameters
1504
1709
  ----------
1505
- name : str
1506
- Project name. Make sure that the name is unique amongst all
1507
- projects that use the same production scheduler. The name may
1508
- contain only lowercase alphanumeric characters and underscores.
1509
-
1510
- branch : Optional[str], default None
1511
- The branch to use. If not specified, the branch is set to
1512
- `user.<username>` unless `production` is set to `True`. This can
1513
- also be set on the command line using `--branch` as a top-level option.
1514
- It is an error to specify `branch` in the decorator and on the command line.
1515
-
1516
- production : bool, default False
1517
- Whether or not the branch is the production branch. This can also be set on the
1518
- command line using `--production` as a top-level option. It is an error to specify
1519
- `production` in the decorator and on the command line.
1520
- The project branch name will be:
1521
- - if `branch` is specified:
1522
- - if `production` is True: `prod.<branch>`
1523
- - if `production` is False: `test.<branch>`
1524
- - if `branch` is not specified:
1525
- - if `production` is True: `prod`
1526
- - if `production` is False: `user.<username>`
1710
+ packages : Dict[str, str], default {}
1711
+ Packages to use for this flow. The key is the name of the package
1712
+ and the value is the version to use.
1713
+ libraries : Dict[str, str], default {}
1714
+ Supported for backward compatibility. When used with packages, packages will take precedence.
1715
+ python : str, optional, default None
1716
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1717
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1718
+ disabled : bool, default False
1719
+ If set to True, disables Conda.
1527
1720
  """
1528
1721
  ...
1529
1722
 
@@ -1727,198 +1920,5 @@ def airflow_s3_key_sensor(*, timeout: int, poke_interval: int, mode: str, expone
1727
1920
  """
1728
1921
  ...
1729
1922
 
1730
- @typing.overload
1731
- def trigger_on_finish(*, flow: typing.Union[typing.Dict[str, str], str, None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1732
- """
1733
- Specifies the flow(s) that this flow depends on.
1734
-
1735
- ```
1736
- @trigger_on_finish(flow='FooFlow')
1737
- ```
1738
- or
1739
- ```
1740
- @trigger_on_finish(flows=['FooFlow', 'BarFlow'])
1741
- ```
1742
- This decorator respects the @project decorator and triggers the flow
1743
- when upstream runs within the same namespace complete successfully
1744
-
1745
- Additionally, you can specify project aware upstream flow dependencies
1746
- by specifying the fully qualified project_flow_name.
1747
- ```
1748
- @trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
1749
- ```
1750
- or
1751
- ```
1752
- @trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
1753
- ```
1754
-
1755
- You can also specify just the project or project branch (other values will be
1756
- inferred from the current project or project branch):
1757
- ```
1758
- @trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
1759
- ```
1760
-
1761
- Note that `branch` is typically one of:
1762
- - `prod`
1763
- - `user.bob`
1764
- - `test.my_experiment`
1765
- - `prod.staging`
1766
-
1767
-
1768
- Parameters
1769
- ----------
1770
- flow : Union[str, Dict[str, str]], optional, default None
1771
- Upstream flow dependency for this flow.
1772
- flows : List[Union[str, Dict[str, str]]], default []
1773
- Upstream flow dependencies for this flow.
1774
- options : Dict[str, Any], default {}
1775
- Backend-specific configuration for tuning eventing behavior.
1776
- """
1777
- ...
1778
-
1779
- @typing.overload
1780
- def trigger_on_finish(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1781
- ...
1782
-
1783
- def trigger_on_finish(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, flow: typing.Union[typing.Dict[str, str], str, None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}):
1784
- """
1785
- Specifies the flow(s) that this flow depends on.
1786
-
1787
- ```
1788
- @trigger_on_finish(flow='FooFlow')
1789
- ```
1790
- or
1791
- ```
1792
- @trigger_on_finish(flows=['FooFlow', 'BarFlow'])
1793
- ```
1794
- This decorator respects the @project decorator and triggers the flow
1795
- when upstream runs within the same namespace complete successfully
1796
-
1797
- Additionally, you can specify project aware upstream flow dependencies
1798
- by specifying the fully qualified project_flow_name.
1799
- ```
1800
- @trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
1801
- ```
1802
- or
1803
- ```
1804
- @trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
1805
- ```
1806
-
1807
- You can also specify just the project or project branch (other values will be
1808
- inferred from the current project or project branch):
1809
- ```
1810
- @trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
1811
- ```
1812
-
1813
- Note that `branch` is typically one of:
1814
- - `prod`
1815
- - `user.bob`
1816
- - `test.my_experiment`
1817
- - `prod.staging`
1818
-
1819
-
1820
- Parameters
1821
- ----------
1822
- flow : Union[str, Dict[str, str]], optional, default None
1823
- Upstream flow dependency for this flow.
1824
- flows : List[Union[str, Dict[str, str]]], default []
1825
- Upstream flow dependencies for this flow.
1826
- options : Dict[str, Any], default {}
1827
- Backend-specific configuration for tuning eventing behavior.
1828
- """
1829
- ...
1830
-
1831
- @typing.overload
1832
- def pypi_base(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1833
- """
1834
- Specifies the PyPI packages for all steps of the flow.
1835
-
1836
- Use `@pypi_base` to set common packages required by all
1837
- steps and use `@pypi` to specify step-specific overrides.
1838
-
1839
- Parameters
1840
- ----------
1841
- packages : Dict[str, str], default: {}
1842
- Packages to use for this flow. The key is the name of the package
1843
- and the value is the version to use.
1844
- python : str, optional, default: None
1845
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
1846
- that the version used will correspond to the version of the Python interpreter used to start the run.
1847
- """
1848
- ...
1849
-
1850
- @typing.overload
1851
- def pypi_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1852
- ...
1853
-
1854
- def pypi_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
1855
- """
1856
- Specifies the PyPI packages for all steps of the flow.
1857
-
1858
- Use `@pypi_base` to set common packages required by all
1859
- steps and use `@pypi` to specify step-specific overrides.
1860
-
1861
- Parameters
1862
- ----------
1863
- packages : Dict[str, str], default: {}
1864
- Packages to use for this flow. The key is the name of the package
1865
- and the value is the version to use.
1866
- python : str, optional, default: None
1867
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
1868
- that the version used will correspond to the version of the Python interpreter used to start the run.
1869
- """
1870
- ...
1871
-
1872
- @typing.overload
1873
- def schedule(*, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1874
- """
1875
- Specifies the times when the flow should be run when running on a
1876
- production scheduler.
1877
-
1878
-
1879
- Parameters
1880
- ----------
1881
- hourly : bool, default False
1882
- Run the workflow hourly.
1883
- daily : bool, default True
1884
- Run the workflow daily.
1885
- weekly : bool, default False
1886
- Run the workflow weekly.
1887
- cron : str, optional, default None
1888
- Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
1889
- specified by this expression.
1890
- timezone : str, optional, default None
1891
- Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1892
- which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
1893
- """
1894
- ...
1895
-
1896
- @typing.overload
1897
- def schedule(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1898
- ...
1899
-
1900
- def schedule(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None):
1901
- """
1902
- Specifies the times when the flow should be run when running on a
1903
- production scheduler.
1904
-
1905
-
1906
- Parameters
1907
- ----------
1908
- hourly : bool, default False
1909
- Run the workflow hourly.
1910
- daily : bool, default True
1911
- Run the workflow daily.
1912
- weekly : bool, default False
1913
- Run the workflow weekly.
1914
- cron : str, optional, default None
1915
- Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
1916
- specified by this expression.
1917
- timezone : str, optional, default None
1918
- Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1919
- which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
1920
- """
1921
- ...
1922
-
1923
1923
  pkg_name: str
1924
1924