ob-metaflow-stubs 6.0.5.0__py2.py3-none-any.whl → 6.0.5.1__py2.py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of ob-metaflow-stubs might be problematic. Click here for more details.

Files changed (261) hide show
  1. metaflow-stubs/__init__.pyi +935 -866
  2. metaflow-stubs/cards.pyi +1 -1
  3. metaflow-stubs/cli.pyi +1 -1
  4. metaflow-stubs/cli_components/__init__.pyi +1 -1
  5. metaflow-stubs/cli_components/utils.pyi +1 -1
  6. metaflow-stubs/client/__init__.pyi +1 -1
  7. metaflow-stubs/client/core.pyi +5 -5
  8. metaflow-stubs/client/filecache.pyi +1 -1
  9. metaflow-stubs/events.pyi +1 -1
  10. metaflow-stubs/exception.pyi +1 -1
  11. metaflow-stubs/flowspec.pyi +4 -4
  12. metaflow-stubs/generated_for.txt +1 -1
  13. metaflow-stubs/includefile.pyi +4 -4
  14. metaflow-stubs/meta_files.pyi +1 -1
  15. metaflow-stubs/metadata_provider/__init__.pyi +1 -1
  16. metaflow-stubs/metadata_provider/heartbeat.pyi +1 -1
  17. metaflow-stubs/metadata_provider/metadata.pyi +2 -2
  18. metaflow-stubs/metadata_provider/util.pyi +1 -1
  19. metaflow-stubs/metaflow_config.pyi +1 -1
  20. metaflow-stubs/metaflow_current.pyi +42 -42
  21. metaflow-stubs/metaflow_git.pyi +1 -1
  22. metaflow-stubs/mf_extensions/__init__.pyi +1 -1
  23. metaflow-stubs/mf_extensions/obcheckpoint/__init__.pyi +1 -1
  24. metaflow-stubs/mf_extensions/obcheckpoint/plugins/__init__.pyi +1 -1
  25. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/__init__.pyi +1 -1
  26. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/card_utils/__init__.pyi +1 -1
  27. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/card_utils/async_cards.pyi +1 -1
  28. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/card_utils/deco_injection_mixin.pyi +1 -1
  29. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/card_utils/extra_components.pyi +2 -2
  30. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/__init__.pyi +1 -1
  31. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/cards/__init__.pyi +1 -1
  32. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/cards/checkpoint_lister.pyi +2 -2
  33. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/cards/lineage_card.pyi +1 -1
  34. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/checkpoint_storage.pyi +3 -3
  35. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/constructors.pyi +1 -1
  36. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/core.pyi +3 -3
  37. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/decorator.pyi +3 -3
  38. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/exceptions.pyi +1 -1
  39. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/final_api.pyi +2 -2
  40. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/lineage.pyi +1 -1
  41. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/__init__.pyi +1 -1
  42. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/context.pyi +3 -3
  43. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/core.pyi +2 -2
  44. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/decorator.pyi +1 -1
  45. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/exceptions.pyi +1 -1
  46. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/task_utils.pyi +3 -3
  47. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/utils.pyi +1 -1
  48. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastructures.pyi +2 -2
  49. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/exceptions.pyi +1 -1
  50. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/hf_hub/__init__.pyi +1 -1
  51. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/hf_hub/decorator.pyi +2 -2
  52. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/modeling_utils/__init__.pyi +1 -1
  53. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/modeling_utils/core.pyi +2 -2
  54. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/modeling_utils/exceptions.pyi +1 -1
  55. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/modeling_utils/model_storage.pyi +3 -3
  56. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/__init__.pyi +1 -1
  57. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/flowspec_utils.pyi +1 -1
  58. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/general.pyi +1 -1
  59. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/identity_utils.pyi +2 -2
  60. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/serialization_handler/__init__.pyi +1 -1
  61. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/serialization_handler/base.pyi +1 -1
  62. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/serialization_handler/tar.pyi +2 -2
  63. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/tar_utils.pyi +2 -2
  64. metaflow-stubs/mf_extensions/outerbounds/__init__.pyi +1 -1
  65. metaflow-stubs/mf_extensions/outerbounds/plugins/__init__.pyi +1 -1
  66. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/__init__.pyi +1 -1
  67. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/__init__.pyi +1 -1
  68. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/_state_machine.pyi +5 -5
  69. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/_vendor/__init__.pyi +1 -1
  70. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/_vendor/spinner/__init__.pyi +1 -1
  71. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/_vendor/spinner/spinners.pyi +1 -1
  72. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/app_cli.pyi +2 -2
  73. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/app_config.pyi +2 -2
  74. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/capsule.pyi +7 -7
  75. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/click_importer.pyi +1 -1
  76. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/code_package/__init__.pyi +1 -1
  77. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/code_package/code_packager.pyi +1 -1
  78. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/config/__init__.pyi +1 -1
  79. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/config/cli_generator.pyi +1 -1
  80. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/config/config_utils.pyi +3 -3
  81. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/config/schema_export.pyi +1 -1
  82. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/config/typed_configs.pyi +2 -2
  83. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/config/unified_config.pyi +2 -2
  84. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/dependencies.pyi +2 -2
  85. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/deployer.pyi +4 -4
  86. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/experimental/__init__.pyi +1 -1
  87. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/perimeters.pyi +1 -1
  88. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/utils.pyi +5 -5
  89. metaflow-stubs/mf_extensions/outerbounds/plugins/aws/__init__.pyi +1 -1
  90. metaflow-stubs/mf_extensions/outerbounds/plugins/aws/assume_role_decorator.pyi +1 -1
  91. metaflow-stubs/mf_extensions/outerbounds/plugins/card_utilities/__init__.pyi +1 -1
  92. metaflow-stubs/mf_extensions/outerbounds/plugins/card_utilities/async_cards.pyi +1 -1
  93. metaflow-stubs/mf_extensions/outerbounds/plugins/card_utilities/injector.pyi +1 -1
  94. metaflow-stubs/mf_extensions/outerbounds/plugins/checkpoint_datastores/__init__.pyi +1 -1
  95. metaflow-stubs/mf_extensions/outerbounds/plugins/checkpoint_datastores/coreweave.pyi +2 -2
  96. metaflow-stubs/mf_extensions/outerbounds/plugins/checkpoint_datastores/nebius.pyi +2 -2
  97. metaflow-stubs/mf_extensions/outerbounds/plugins/fast_bakery/__init__.pyi +1 -1
  98. metaflow-stubs/mf_extensions/outerbounds/plugins/fast_bakery/baker.pyi +3 -3
  99. metaflow-stubs/mf_extensions/outerbounds/plugins/fast_bakery/docker_environment.pyi +2 -2
  100. metaflow-stubs/mf_extensions/outerbounds/plugins/fast_bakery/fast_bakery.pyi +1 -1
  101. metaflow-stubs/mf_extensions/outerbounds/plugins/kubernetes/__init__.pyi +1 -1
  102. metaflow-stubs/mf_extensions/outerbounds/plugins/kubernetes/pod_killer.pyi +1 -1
  103. metaflow-stubs/mf_extensions/outerbounds/plugins/ollama/__init__.pyi +1 -1
  104. metaflow-stubs/mf_extensions/outerbounds/plugins/ollama/constants.pyi +1 -1
  105. metaflow-stubs/mf_extensions/outerbounds/plugins/ollama/exceptions.pyi +1 -1
  106. metaflow-stubs/mf_extensions/outerbounds/plugins/ollama/ollama.pyi +1 -1
  107. metaflow-stubs/mf_extensions/outerbounds/plugins/ollama/status_card.pyi +1 -1
  108. metaflow-stubs/mf_extensions/outerbounds/plugins/snowflake/__init__.pyi +1 -1
  109. metaflow-stubs/mf_extensions/outerbounds/plugins/snowflake/snowflake.pyi +1 -1
  110. metaflow-stubs/mf_extensions/outerbounds/profilers/__init__.pyi +1 -1
  111. metaflow-stubs/mf_extensions/outerbounds/profilers/gpu.pyi +1 -1
  112. metaflow-stubs/mf_extensions/outerbounds/remote_config.pyi +1 -1
  113. metaflow-stubs/mf_extensions/outerbounds/toplevel/__init__.pyi +1 -1
  114. metaflow-stubs/mf_extensions/outerbounds/toplevel/global_aliases_for_metaflow_package.pyi +12 -1
  115. metaflow-stubs/mf_extensions/outerbounds/toplevel/s3_proxy.pyi +24 -0
  116. metaflow-stubs/multicore_utils.pyi +1 -1
  117. metaflow-stubs/ob_internal.pyi +1 -1
  118. metaflow-stubs/packaging_sys/__init__.pyi +5 -5
  119. metaflow-stubs/packaging_sys/backend.pyi +3 -3
  120. metaflow-stubs/packaging_sys/distribution_support.pyi +3 -3
  121. metaflow-stubs/packaging_sys/tar_backend.pyi +4 -4
  122. metaflow-stubs/packaging_sys/utils.pyi +1 -1
  123. metaflow-stubs/packaging_sys/v1.pyi +2 -2
  124. metaflow-stubs/parameters.pyi +4 -4
  125. metaflow-stubs/plugins/__init__.pyi +9 -9
  126. metaflow-stubs/plugins/airflow/__init__.pyi +1 -1
  127. metaflow-stubs/plugins/airflow/airflow_utils.pyi +1 -1
  128. metaflow-stubs/plugins/airflow/exception.pyi +1 -1
  129. metaflow-stubs/plugins/airflow/sensors/__init__.pyi +1 -1
  130. metaflow-stubs/plugins/airflow/sensors/base_sensor.pyi +1 -1
  131. metaflow-stubs/plugins/airflow/sensors/external_task_sensor.pyi +1 -1
  132. metaflow-stubs/plugins/airflow/sensors/s3_sensor.pyi +1 -1
  133. metaflow-stubs/plugins/argo/__init__.pyi +1 -1
  134. metaflow-stubs/plugins/argo/argo_client.pyi +1 -1
  135. metaflow-stubs/plugins/argo/argo_events.pyi +1 -1
  136. metaflow-stubs/plugins/argo/argo_workflows.pyi +2 -2
  137. metaflow-stubs/plugins/argo/argo_workflows_decorator.pyi +2 -2
  138. metaflow-stubs/plugins/argo/argo_workflows_deployer.pyi +3 -3
  139. metaflow-stubs/plugins/argo/argo_workflows_deployer_objects.pyi +2 -2
  140. metaflow-stubs/plugins/argo/exit_hooks.pyi +2 -2
  141. metaflow-stubs/plugins/aws/__init__.pyi +1 -1
  142. metaflow-stubs/plugins/aws/aws_client.pyi +1 -1
  143. metaflow-stubs/plugins/aws/aws_utils.pyi +1 -1
  144. metaflow-stubs/plugins/aws/batch/__init__.pyi +1 -1
  145. metaflow-stubs/plugins/aws/batch/batch.pyi +1 -1
  146. metaflow-stubs/plugins/aws/batch/batch_client.pyi +1 -1
  147. metaflow-stubs/plugins/aws/batch/batch_decorator.pyi +1 -1
  148. metaflow-stubs/plugins/aws/secrets_manager/__init__.pyi +1 -1
  149. metaflow-stubs/plugins/aws/secrets_manager/aws_secrets_manager_secrets_provider.pyi +3 -3
  150. metaflow-stubs/plugins/aws/step_functions/__init__.pyi +1 -1
  151. metaflow-stubs/plugins/aws/step_functions/event_bridge_client.pyi +1 -1
  152. metaflow-stubs/plugins/aws/step_functions/schedule_decorator.pyi +1 -1
  153. metaflow-stubs/plugins/aws/step_functions/step_functions.pyi +1 -1
  154. metaflow-stubs/plugins/aws/step_functions/step_functions_client.pyi +1 -1
  155. metaflow-stubs/plugins/aws/step_functions/step_functions_deployer.pyi +3 -3
  156. metaflow-stubs/plugins/aws/step_functions/step_functions_deployer_objects.pyi +3 -3
  157. metaflow-stubs/plugins/azure/__init__.pyi +1 -1
  158. metaflow-stubs/plugins/azure/azure_credential.pyi +1 -1
  159. metaflow-stubs/plugins/azure/azure_exceptions.pyi +1 -1
  160. metaflow-stubs/plugins/azure/azure_secret_manager_secrets_provider.pyi +3 -3
  161. metaflow-stubs/plugins/azure/azure_utils.pyi +1 -1
  162. metaflow-stubs/plugins/azure/blob_service_client_factory.pyi +1 -1
  163. metaflow-stubs/plugins/azure/includefile_support.pyi +1 -1
  164. metaflow-stubs/plugins/cards/__init__.pyi +5 -5
  165. metaflow-stubs/plugins/cards/card_client.pyi +1 -1
  166. metaflow-stubs/plugins/cards/card_creator.pyi +1 -1
  167. metaflow-stubs/plugins/cards/card_datastore.pyi +1 -1
  168. metaflow-stubs/plugins/cards/card_decorator.pyi +2 -2
  169. metaflow-stubs/plugins/cards/card_modules/__init__.pyi +1 -1
  170. metaflow-stubs/plugins/cards/card_modules/basic.pyi +2 -2
  171. metaflow-stubs/plugins/cards/card_modules/card.pyi +1 -1
  172. metaflow-stubs/plugins/cards/card_modules/components.pyi +3 -3
  173. metaflow-stubs/plugins/cards/card_modules/convert_to_native_type.pyi +1 -1
  174. metaflow-stubs/plugins/cards/card_modules/renderer_tools.pyi +1 -1
  175. metaflow-stubs/plugins/cards/card_modules/test_cards.pyi +1 -1
  176. metaflow-stubs/plugins/cards/card_resolver.pyi +1 -1
  177. metaflow-stubs/plugins/cards/component_serializer.pyi +1 -1
  178. metaflow-stubs/plugins/cards/exception.pyi +1 -1
  179. metaflow-stubs/plugins/catch_decorator.pyi +2 -2
  180. metaflow-stubs/plugins/datatools/__init__.pyi +1 -1
  181. metaflow-stubs/plugins/datatools/local.pyi +1 -1
  182. metaflow-stubs/plugins/datatools/s3/__init__.pyi +1 -1
  183. metaflow-stubs/plugins/datatools/s3/s3.pyi +2 -2
  184. metaflow-stubs/plugins/datatools/s3/s3tail.pyi +1 -1
  185. metaflow-stubs/plugins/datatools/s3/s3util.pyi +1 -1
  186. metaflow-stubs/plugins/debug_logger.pyi +1 -1
  187. metaflow-stubs/plugins/debug_monitor.pyi +1 -1
  188. metaflow-stubs/plugins/environment_decorator.pyi +1 -1
  189. metaflow-stubs/plugins/events_decorator.pyi +1 -1
  190. metaflow-stubs/plugins/exit_hook/__init__.pyi +1 -1
  191. metaflow-stubs/plugins/exit_hook/exit_hook_decorator.pyi +1 -1
  192. metaflow-stubs/plugins/frameworks/__init__.pyi +1 -1
  193. metaflow-stubs/plugins/frameworks/pytorch.pyi +1 -1
  194. metaflow-stubs/plugins/gcp/__init__.pyi +1 -1
  195. metaflow-stubs/plugins/gcp/gcp_secret_manager_secrets_provider.pyi +3 -3
  196. metaflow-stubs/plugins/gcp/gs_exceptions.pyi +1 -1
  197. metaflow-stubs/plugins/gcp/gs_storage_client_factory.pyi +1 -1
  198. metaflow-stubs/plugins/gcp/gs_utils.pyi +1 -1
  199. metaflow-stubs/plugins/gcp/includefile_support.pyi +1 -1
  200. metaflow-stubs/plugins/kubernetes/__init__.pyi +1 -1
  201. metaflow-stubs/plugins/kubernetes/kube_utils.pyi +2 -2
  202. metaflow-stubs/plugins/kubernetes/kubernetes.pyi +1 -1
  203. metaflow-stubs/plugins/kubernetes/kubernetes_client.pyi +1 -1
  204. metaflow-stubs/plugins/kubernetes/kubernetes_decorator.pyi +1 -1
  205. metaflow-stubs/plugins/kubernetes/kubernetes_jobsets.pyi +1 -1
  206. metaflow-stubs/plugins/kubernetes/spot_monitor_sidecar.pyi +1 -1
  207. metaflow-stubs/plugins/ollama/__init__.pyi +1 -1
  208. metaflow-stubs/plugins/parallel_decorator.pyi +1 -1
  209. metaflow-stubs/plugins/perimeters.pyi +1 -1
  210. metaflow-stubs/plugins/project_decorator.pyi +1 -1
  211. metaflow-stubs/plugins/pypi/__init__.pyi +2 -2
  212. metaflow-stubs/plugins/pypi/conda_decorator.pyi +1 -1
  213. metaflow-stubs/plugins/pypi/conda_environment.pyi +4 -4
  214. metaflow-stubs/plugins/pypi/parsers.pyi +1 -1
  215. metaflow-stubs/plugins/pypi/pypi_decorator.pyi +1 -1
  216. metaflow-stubs/plugins/pypi/pypi_environment.pyi +1 -1
  217. metaflow-stubs/plugins/pypi/utils.pyi +1 -1
  218. metaflow-stubs/plugins/resources_decorator.pyi +1 -1
  219. metaflow-stubs/plugins/retry_decorator.pyi +1 -1
  220. metaflow-stubs/plugins/secrets/__init__.pyi +2 -2
  221. metaflow-stubs/plugins/secrets/inline_secrets_provider.pyi +2 -2
  222. metaflow-stubs/plugins/secrets/secrets_decorator.pyi +1 -1
  223. metaflow-stubs/plugins/secrets/secrets_func.pyi +1 -1
  224. metaflow-stubs/plugins/secrets/secrets_spec.pyi +1 -1
  225. metaflow-stubs/plugins/secrets/utils.pyi +1 -1
  226. metaflow-stubs/plugins/snowflake/__init__.pyi +1 -1
  227. metaflow-stubs/plugins/storage_executor.pyi +1 -1
  228. metaflow-stubs/plugins/test_unbounded_foreach_decorator.pyi +1 -1
  229. metaflow-stubs/plugins/timeout_decorator.pyi +2 -2
  230. metaflow-stubs/plugins/torchtune/__init__.pyi +1 -1
  231. metaflow-stubs/plugins/uv/__init__.pyi +1 -1
  232. metaflow-stubs/plugins/uv/uv_environment.pyi +2 -2
  233. metaflow-stubs/profilers/__init__.pyi +1 -1
  234. metaflow-stubs/pylint_wrapper.pyi +1 -1
  235. metaflow-stubs/runner/__init__.pyi +1 -1
  236. metaflow-stubs/runner/deployer.pyi +34 -34
  237. metaflow-stubs/runner/deployer_impl.pyi +2 -2
  238. metaflow-stubs/runner/metaflow_runner.pyi +3 -3
  239. metaflow-stubs/runner/nbdeploy.pyi +1 -1
  240. metaflow-stubs/runner/nbrun.pyi +1 -1
  241. metaflow-stubs/runner/subprocess_manager.pyi +1 -1
  242. metaflow-stubs/runner/utils.pyi +3 -3
  243. metaflow-stubs/system/__init__.pyi +1 -1
  244. metaflow-stubs/system/system_logger.pyi +1 -1
  245. metaflow-stubs/system/system_monitor.pyi +1 -1
  246. metaflow-stubs/tagging_util.pyi +1 -1
  247. metaflow-stubs/tuple_util.pyi +1 -1
  248. metaflow-stubs/user_configs/__init__.pyi +1 -1
  249. metaflow-stubs/user_configs/config_options.pyi +3 -3
  250. metaflow-stubs/user_configs/config_parameters.pyi +5 -5
  251. metaflow-stubs/user_decorators/__init__.pyi +1 -1
  252. metaflow-stubs/user_decorators/common.pyi +1 -1
  253. metaflow-stubs/user_decorators/mutable_flow.pyi +4 -4
  254. metaflow-stubs/user_decorators/mutable_step.pyi +5 -5
  255. metaflow-stubs/user_decorators/user_flow_decorator.pyi +3 -3
  256. metaflow-stubs/user_decorators/user_step_decorator.pyi +6 -6
  257. {ob_metaflow_stubs-6.0.5.0.dist-info → ob_metaflow_stubs-6.0.5.1.dist-info}/METADATA +1 -1
  258. ob_metaflow_stubs-6.0.5.1.dist-info/RECORD +261 -0
  259. ob_metaflow_stubs-6.0.5.0.dist-info/RECORD +0 -260
  260. {ob_metaflow_stubs-6.0.5.0.dist-info → ob_metaflow_stubs-6.0.5.1.dist-info}/WHEEL +0 -0
  261. {ob_metaflow_stubs-6.0.5.0.dist-info → ob_metaflow_stubs-6.0.5.1.dist-info}/top_level.txt +0 -0
@@ -1,7 +1,7 @@
1
1
  ######################################################################################################
2
2
  # Auto-generated Metaflow stub file #
3
3
  # MF version: 2.16.8.1+obcheckpoint(0.2.4);ob(v1) #
4
- # Generated on 2025-07-31T17:05:42.725448 #
4
+ # Generated on 2025-08-01T20:12:28.874985 #
5
5
  ######################################################################################################
6
6
 
7
7
  from __future__ import annotations
@@ -39,17 +39,17 @@ from .user_decorators.user_step_decorator import UserStepDecorator as UserStepDe
39
39
  from .user_decorators.user_step_decorator import StepMutator as StepMutator
40
40
  from .user_decorators.user_step_decorator import user_step_decorator as user_step_decorator
41
41
  from .user_decorators.user_flow_decorator import FlowMutator as FlowMutator
42
- from . import events as events
43
- from . import tuple_util as tuple_util
44
42
  from . import cards as cards
45
43
  from . import metaflow_git as metaflow_git
44
+ from . import tuple_util as tuple_util
45
+ from . import events as events
46
46
  from . import runner as runner
47
47
  from . import plugins as plugins
48
48
  from .mf_extensions.outerbounds.toplevel.global_aliases_for_metaflow_package import S3 as S3
49
49
  from . import includefile as includefile
50
50
  from .includefile import IncludeFile as IncludeFile
51
- from .plugins.pypi.parsers import pyproject_toml_parser as pyproject_toml_parser
52
51
  from .plugins.pypi.parsers import conda_environment_yml_parser as conda_environment_yml_parser
52
+ from .plugins.pypi.parsers import pyproject_toml_parser as pyproject_toml_parser
53
53
  from .plugins.pypi.parsers import requirements_txt_parser as requirements_txt_parser
54
54
  from . import client as client
55
55
  from .client.core import namespace as namespace
@@ -72,6 +72,11 @@ from .runner.nbdeploy import NBDeployer as NBDeployer
72
72
  from .mf_extensions.obcheckpoint.plugins.machine_learning_utilities.checkpoints.final_api import Checkpoint as Checkpoint
73
73
  from .mf_extensions.obcheckpoint.plugins.machine_learning_utilities.datastructures import load_model as load_model
74
74
  from .mf_extensions.obcheckpoint.plugins.machine_learning_utilities.datastore.context import artifact_store_from as artifact_store_from
75
+ from .mf_extensions.outerbounds.toplevel.s3_proxy import get_aws_client_with_s3_proxy as get_aws_client_with_s3_proxy
76
+ from .mf_extensions.outerbounds.toplevel.s3_proxy import get_S3_with_s3_proxy as get_S3_with_s3_proxy
77
+ from .mf_extensions.outerbounds.toplevel.global_aliases_for_metaflow_package import set_s3_proxy_config as set_s3_proxy_config
78
+ from .mf_extensions.outerbounds.toplevel.global_aliases_for_metaflow_package import clear_s3_proxy_config as clear_s3_proxy_config
79
+ from .mf_extensions.outerbounds.toplevel.global_aliases_for_metaflow_package import get_s3_proxy_config as get_s3_proxy_config
75
80
  from .mf_extensions.outerbounds.toplevel.global_aliases_for_metaflow_package import get_aws_client as get_aws_client
76
81
  from .mf_extensions.outerbounds.plugins.snowflake.snowflake import Snowflake as Snowflake
77
82
  from .mf_extensions.outerbounds.plugins.checkpoint_datastores.nebius import nebius_checkpoints as nebius_checkpoints
@@ -163,51 +168,229 @@ def step(f: typing.Union[typing.Callable[[FlowSpecDerived], None], typing.Callab
163
168
  ...
164
169
 
165
170
  @typing.overload
166
- def card(*, type: str = 'default', id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
171
+ def catch(*, var: typing.Optional[str] = None, print_exception: bool = True) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
167
172
  """
168
- Creates a human-readable report, a Metaflow Card, after this step completes.
173
+ Specifies that the step will success under all circumstances.
169
174
 
170
- Note that you may add multiple `@card` decorators in a step with different parameters.
175
+ The decorator will create an optional artifact, specified by `var`, which
176
+ contains the exception raised. You can use it to detect the presence
177
+ of errors, indicating that all happy-path artifacts produced by the step
178
+ are missing.
171
179
 
172
180
 
173
181
  Parameters
174
182
  ----------
175
- type : str, default 'default'
176
- Card type.
177
- id : str, optional, default None
178
- If multiple cards are present, use this id to identify this card.
179
- options : Dict[str, Any], default {}
180
- Options passed to the card. The contents depend on the card type.
181
- timeout : int, default 45
182
- Interrupt reporting if it takes more than this many seconds.
183
+ var : str, optional, default None
184
+ Name of the artifact in which to store the caught exception.
185
+ If not specified, the exception is not stored.
186
+ print_exception : bool, default True
187
+ Determines whether or not the exception is printed to
188
+ stdout when caught.
183
189
  """
184
190
  ...
185
191
 
186
192
  @typing.overload
187
- def card(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
193
+ def catch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
188
194
  ...
189
195
 
190
196
  @typing.overload
191
- def card(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
197
+ def catch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
192
198
  ...
193
199
 
194
- def card(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, type: str = 'default', id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45):
200
+ def catch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, var: typing.Optional[str] = None, print_exception: bool = True):
195
201
  """
196
- Creates a human-readable report, a Metaflow Card, after this step completes.
202
+ Specifies that the step will success under all circumstances.
197
203
 
198
- Note that you may add multiple `@card` decorators in a step with different parameters.
204
+ The decorator will create an optional artifact, specified by `var`, which
205
+ contains the exception raised. You can use it to detect the presence
206
+ of errors, indicating that all happy-path artifacts produced by the step
207
+ are missing.
199
208
 
200
209
 
201
210
  Parameters
202
211
  ----------
203
- type : str, default 'default'
204
- Card type.
205
- id : str, optional, default None
206
- If multiple cards are present, use this id to identify this card.
207
- options : Dict[str, Any], default {}
208
- Options passed to the card. The contents depend on the card type.
209
- timeout : int, default 45
210
- Interrupt reporting if it takes more than this many seconds.
212
+ var : str, optional, default None
213
+ Name of the artifact in which to store the caught exception.
214
+ If not specified, the exception is not stored.
215
+ print_exception : bool, default True
216
+ Determines whether or not the exception is printed to
217
+ stdout when caught.
218
+ """
219
+ ...
220
+
221
+ @typing.overload
222
+ def secrets(*, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], role: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
223
+ """
224
+ Specifies secrets to be retrieved and injected as environment variables prior to
225
+ the execution of a step.
226
+
227
+
228
+ Parameters
229
+ ----------
230
+ sources : List[Union[str, Dict[str, Any]]], default: []
231
+ List of secret specs, defining how the secrets are to be retrieved
232
+ role : str, optional, default: None
233
+ Role to use for fetching secrets
234
+ """
235
+ ...
236
+
237
+ @typing.overload
238
+ def secrets(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
239
+ ...
240
+
241
+ @typing.overload
242
+ def secrets(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
243
+ ...
244
+
245
+ def secrets(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], role: typing.Optional[str] = None):
246
+ """
247
+ Specifies secrets to be retrieved and injected as environment variables prior to
248
+ the execution of a step.
249
+
250
+
251
+ Parameters
252
+ ----------
253
+ sources : List[Union[str, Dict[str, Any]]], default: []
254
+ List of secret specs, defining how the secrets are to be retrieved
255
+ role : str, optional, default: None
256
+ Role to use for fetching secrets
257
+ """
258
+ ...
259
+
260
+ @typing.overload
261
+ def fast_bakery_internal(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
262
+ """
263
+ Internal decorator to support Fast bakery
264
+ """
265
+ ...
266
+
267
+ @typing.overload
268
+ def fast_bakery_internal(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
269
+ ...
270
+
271
+ def fast_bakery_internal(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
272
+ """
273
+ Internal decorator to support Fast bakery
274
+ """
275
+ ...
276
+
277
+ @typing.overload
278
+ def test_append_card(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
279
+ """
280
+ A simple decorator that demonstrates using CardDecoratorInjector
281
+ to inject a card and render simple markdown content.
282
+ """
283
+ ...
284
+
285
+ @typing.overload
286
+ def test_append_card(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
287
+ ...
288
+
289
+ def test_append_card(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
290
+ """
291
+ A simple decorator that demonstrates using CardDecoratorInjector
292
+ to inject a card and render simple markdown content.
293
+ """
294
+ ...
295
+
296
+ @typing.overload
297
+ def pypi(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
298
+ """
299
+ Specifies the PyPI packages for the step.
300
+
301
+ Information in this decorator will augment any
302
+ attributes set in the `@pyi_base` flow-level decorator. Hence,
303
+ you can use `@pypi_base` to set packages required by all
304
+ steps and use `@pypi` to specify step-specific overrides.
305
+
306
+
307
+ Parameters
308
+ ----------
309
+ packages : Dict[str, str], default: {}
310
+ Packages to use for this step. The key is the name of the package
311
+ and the value is the version to use.
312
+ python : str, optional, default: None
313
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
314
+ that the version used will correspond to the version of the Python interpreter used to start the run.
315
+ """
316
+ ...
317
+
318
+ @typing.overload
319
+ def pypi(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
320
+ ...
321
+
322
+ @typing.overload
323
+ def pypi(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
324
+ ...
325
+
326
+ def pypi(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
327
+ """
328
+ Specifies the PyPI packages for the step.
329
+
330
+ Information in this decorator will augment any
331
+ attributes set in the `@pyi_base` flow-level decorator. Hence,
332
+ you can use `@pypi_base` to set packages required by all
333
+ steps and use `@pypi` to specify step-specific overrides.
334
+
335
+
336
+ Parameters
337
+ ----------
338
+ packages : Dict[str, str], default: {}
339
+ Packages to use for this step. The key is the name of the package
340
+ and the value is the version to use.
341
+ python : str, optional, default: None
342
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
343
+ that the version used will correspond to the version of the Python interpreter used to start the run.
344
+ """
345
+ ...
346
+
347
+ def vllm(*, model: str, backend: str, openai_api_server: bool, debug: bool, card_refresh_interval: int, max_retries: int, retry_alert_frequency: int, engine_args: dict) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
348
+ """
349
+ This decorator is used to run vllm APIs as Metaflow task sidecars.
350
+
351
+ User code call
352
+ --------------
353
+ @vllm(
354
+ model="...",
355
+ ...
356
+ )
357
+
358
+ Valid backend options
359
+ ---------------------
360
+ - 'local': Run as a separate process on the local task machine.
361
+
362
+ Valid model options
363
+ -------------------
364
+ Any HuggingFace model identifier, e.g. 'meta-llama/Llama-3.2-1B'
365
+
366
+ NOTE: vLLM's OpenAI-compatible server serves ONE model per server instance.
367
+ If you need multiple models, you must create multiple @vllm decorators.
368
+
369
+
370
+ Parameters
371
+ ----------
372
+ model: str
373
+ HuggingFace model identifier to be served by vLLM.
374
+ backend: str
375
+ Determines where and how to run the vLLM process.
376
+ openai_api_server: bool
377
+ Whether to use OpenAI-compatible API server mode (subprocess) instead of native engine.
378
+ Default is False (uses native engine).
379
+ Set to True for backward compatibility with existing code.
380
+ debug: bool
381
+ Whether to turn on verbose debugging logs.
382
+ card_refresh_interval: int
383
+ Interval in seconds for refreshing the vLLM status card.
384
+ Only used when openai_api_server=True.
385
+ max_retries: int
386
+ Maximum number of retries checking for vLLM server startup.
387
+ Only used when openai_api_server=True.
388
+ retry_alert_frequency: int
389
+ Frequency of alert logs for vLLM server startup retries.
390
+ Only used when openai_api_server=True.
391
+ engine_args : dict
392
+ Additional keyword arguments to pass to the vLLM engine.
393
+ For example, `tensor_parallel_size=2`.
211
394
  """
212
395
  ...
213
396
 
@@ -340,95 +523,11 @@ def model(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], ty
340
523
  """
341
524
  ...
342
525
 
343
- def nvct(*, gpu: int, gpu_type: str) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
526
+ def huggingface_hub(*, temp_dir_root: typing.Optional[str] = None, load: typing.Union[typing.List[str], typing.List[typing.Tuple[typing.Dict, str]], typing.List[typing.Tuple[str, str]], typing.List[typing.Dict], None]) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
344
527
  """
345
- Specifies that this step should execute on DGX cloud.
528
+ Decorator that helps cache, version and store models/datasets from huggingface hub.
346
529
 
347
-
348
- Parameters
349
- ----------
350
- gpu : int
351
- Number of GPUs to use.
352
- gpu_type : str
353
- Type of Nvidia GPU to use.
354
- """
355
- ...
356
-
357
- @typing.overload
358
- def app_deploy(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
359
- """
360
- Decorator prototype for all step decorators. This function gets specialized
361
- and imported for all decorators types by _import_plugin_decorators().
362
- """
363
- ...
364
-
365
- @typing.overload
366
- def app_deploy(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
367
- ...
368
-
369
- def app_deploy(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
370
- """
371
- Decorator prototype for all step decorators. This function gets specialized
372
- and imported for all decorators types by _import_plugin_decorators().
373
- """
374
- ...
375
-
376
- @typing.overload
377
- def catch(*, var: typing.Optional[str] = None, print_exception: bool = True) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
378
- """
379
- Specifies that the step will success under all circumstances.
380
-
381
- The decorator will create an optional artifact, specified by `var`, which
382
- contains the exception raised. You can use it to detect the presence
383
- of errors, indicating that all happy-path artifacts produced by the step
384
- are missing.
385
-
386
-
387
- Parameters
388
- ----------
389
- var : str, optional, default None
390
- Name of the artifact in which to store the caught exception.
391
- If not specified, the exception is not stored.
392
- print_exception : bool, default True
393
- Determines whether or not the exception is printed to
394
- stdout when caught.
395
- """
396
- ...
397
-
398
- @typing.overload
399
- def catch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
400
- ...
401
-
402
- @typing.overload
403
- def catch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
404
- ...
405
-
406
- def catch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, var: typing.Optional[str] = None, print_exception: bool = True):
407
- """
408
- Specifies that the step will success under all circumstances.
409
-
410
- The decorator will create an optional artifact, specified by `var`, which
411
- contains the exception raised. You can use it to detect the presence
412
- of errors, indicating that all happy-path artifacts produced by the step
413
- are missing.
414
-
415
-
416
- Parameters
417
- ----------
418
- var : str, optional, default None
419
- Name of the artifact in which to store the caught exception.
420
- If not specified, the exception is not stored.
421
- print_exception : bool, default True
422
- Determines whether or not the exception is printed to
423
- stdout when caught.
424
- """
425
- ...
426
-
427
- def huggingface_hub(*, temp_dir_root: typing.Optional[str] = None, load: typing.Union[typing.List[str], typing.List[typing.Tuple[typing.Dict, str]], typing.List[typing.Tuple[str, str]], typing.List[typing.Dict], None]) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
428
- """
429
- Decorator that helps cache, version and store models/datasets from huggingface hub.
430
-
431
- > Examples
530
+ > Examples
432
531
 
433
532
  **Usage: creating references of models from huggingface that may be loaded in downstream steps**
434
533
  ```python
@@ -504,100 +603,6 @@ def huggingface_hub(*, temp_dir_root: typing.Optional[str] = None, load: typing.
504
603
  """
505
604
  ...
506
605
 
507
- @typing.overload
508
- def secrets(*, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], role: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
509
- """
510
- Specifies secrets to be retrieved and injected as environment variables prior to
511
- the execution of a step.
512
-
513
-
514
- Parameters
515
- ----------
516
- sources : List[Union[str, Dict[str, Any]]], default: []
517
- List of secret specs, defining how the secrets are to be retrieved
518
- role : str, optional, default: None
519
- Role to use for fetching secrets
520
- """
521
- ...
522
-
523
- @typing.overload
524
- def secrets(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
525
- ...
526
-
527
- @typing.overload
528
- def secrets(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
529
- ...
530
-
531
- def secrets(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], role: typing.Optional[str] = None):
532
- """
533
- Specifies secrets to be retrieved and injected as environment variables prior to
534
- the execution of a step.
535
-
536
-
537
- Parameters
538
- ----------
539
- sources : List[Union[str, Dict[str, Any]]], default: []
540
- List of secret specs, defining how the secrets are to be retrieved
541
- role : str, optional, default: None
542
- Role to use for fetching secrets
543
- """
544
- ...
545
-
546
- @typing.overload
547
- def retry(*, times: int = 3, minutes_between_retries: int = 2) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
548
- """
549
- Specifies the number of times the task corresponding
550
- to a step needs to be retried.
551
-
552
- This decorator is useful for handling transient errors, such as networking issues.
553
- If your task contains operations that can't be retried safely, e.g. database updates,
554
- it is advisable to annotate it with `@retry(times=0)`.
555
-
556
- This can be used in conjunction with the `@catch` decorator. The `@catch`
557
- decorator will execute a no-op task after all retries have been exhausted,
558
- ensuring that the flow execution can continue.
559
-
560
-
561
- Parameters
562
- ----------
563
- times : int, default 3
564
- Number of times to retry this task.
565
- minutes_between_retries : int, default 2
566
- Number of minutes between retries.
567
- """
568
- ...
569
-
570
- @typing.overload
571
- def retry(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
572
- ...
573
-
574
- @typing.overload
575
- def retry(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
576
- ...
577
-
578
- def retry(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, times: int = 3, minutes_between_retries: int = 2):
579
- """
580
- Specifies the number of times the task corresponding
581
- to a step needs to be retried.
582
-
583
- This decorator is useful for handling transient errors, such as networking issues.
584
- If your task contains operations that can't be retried safely, e.g. database updates,
585
- it is advisable to annotate it with `@retry(times=0)`.
586
-
587
- This can be used in conjunction with the `@catch` decorator. The `@catch`
588
- decorator will execute a no-op task after all retries have been exhausted,
589
- ensuring that the flow execution can continue.
590
-
591
-
592
- Parameters
593
- ----------
594
- times : int, default 3
595
- Number of times to retry this task.
596
- minutes_between_retries : int, default 2
597
- Number of minutes between retries.
598
- """
599
- ...
600
-
601
606
  def nvidia(*, gpu: int, gpu_type: str, queue_timeout: int) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
602
607
  """
603
608
  Specifies that this step should execute on DGX cloud.
@@ -615,667 +620,731 @@ def nvidia(*, gpu: int, gpu_type: str, queue_timeout: int) -> typing.Callable[[t
615
620
  ...
616
621
 
617
622
  @typing.overload
618
- def resources(*, cpu: int = 1, gpu: typing.Optional[int] = None, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
623
+ def checkpoint(*, load_policy: str = 'fresh', temp_dir_root: str = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
619
624
  """
620
- Specifies the resources needed when executing this step.
625
+ Enables checkpointing for a step.
621
626
 
622
- Use `@resources` to specify the resource requirements
623
- independently of the specific compute layer (`@batch`, `@kubernetes`).
627
+ > Examples
624
628
 
625
- You can choose the compute layer on the command line by executing e.g.
626
- ```
627
- python myflow.py run --with batch
628
- ```
629
- or
629
+ - Saving Checkpoints
630
+
631
+ ```python
632
+ @checkpoint
633
+ @step
634
+ def train(self):
635
+ model = create_model(self.parameters, checkpoint_path = None)
636
+ for i in range(self.epochs):
637
+ # some training logic
638
+ loss = model.train(self.dataset)
639
+ if i % 10 == 0:
640
+ model.save(
641
+ current.checkpoint.directory,
642
+ )
643
+ # saves the contents of the `current.checkpoint.directory` as a checkpoint
644
+ # and returns a reference dictionary to the checkpoint saved in the datastore
645
+ self.latest_checkpoint = current.checkpoint.save(
646
+ name="epoch_checkpoint",
647
+ metadata={
648
+ "epoch": i,
649
+ "loss": loss,
650
+ }
651
+ )
630
652
  ```
631
- python myflow.py run --with kubernetes
653
+
654
+ - Using Loaded Checkpoints
655
+
656
+ ```python
657
+ @retry(times=3)
658
+ @checkpoint
659
+ @step
660
+ def train(self):
661
+ # Assume that the task has restarted and the previous attempt of the task
662
+ # saved a checkpoint
663
+ checkpoint_path = None
664
+ if current.checkpoint.is_loaded: # Check if a checkpoint is loaded
665
+ print("Loaded checkpoint from the previous attempt")
666
+ checkpoint_path = current.checkpoint.directory
667
+
668
+ model = create_model(self.parameters, checkpoint_path = checkpoint_path)
669
+ for i in range(self.epochs):
670
+ ...
632
671
  ```
633
- which executes the flow on the desired system using the
634
- requirements specified in `@resources`.
635
672
 
636
673
 
637
674
  Parameters
638
675
  ----------
639
- cpu : int, default 1
640
- Number of CPUs required for this step.
641
- gpu : int, optional, default None
642
- Number of GPUs required for this step.
643
- disk : int, optional, default None
644
- Disk size (in MB) required for this step. Only applies on Kubernetes.
645
- memory : int, default 4096
646
- Memory size (in MB) required for this step.
647
- shared_memory : int, optional, default None
648
- The value for the size (in MiB) of the /dev/shm volume for this step.
649
- This parameter maps to the `--shm-size` option in Docker.
676
+ load_policy : str, default: "fresh"
677
+ The policy for loading the checkpoint. The following policies are supported:
678
+ - "eager": Loads the the latest available checkpoint within the namespace.
679
+ With this mode, the latest checkpoint written by any previous task (can be even a different run) of the step
680
+ will be loaded at the start of the task.
681
+ - "none": Do not load any checkpoint
682
+ - "fresh": Loads the lastest checkpoint created within the running Task.
683
+ This mode helps loading checkpoints across various retry attempts of the same task.
684
+ With this mode, no checkpoint will be loaded at the start of a task but any checkpoints
685
+ created within the task will be loaded when the task is retries execution on failure.
686
+
687
+ temp_dir_root : str, default: None
688
+ The root directory under which `current.checkpoint.directory` will be created.
650
689
  """
651
690
  ...
652
691
 
653
692
  @typing.overload
654
- def resources(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
693
+ def checkpoint(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
655
694
  ...
656
695
 
657
696
  @typing.overload
658
- def resources(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
697
+ def checkpoint(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
659
698
  ...
660
699
 
661
- def resources(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, cpu: int = 1, gpu: typing.Optional[int] = None, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None):
700
+ def checkpoint(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, load_policy: str = 'fresh', temp_dir_root: str = None):
662
701
  """
663
- Specifies the resources needed when executing this step.
702
+ Enables checkpointing for a step.
664
703
 
665
- Use `@resources` to specify the resource requirements
666
- independently of the specific compute layer (`@batch`, `@kubernetes`).
704
+ > Examples
667
705
 
668
- You can choose the compute layer on the command line by executing e.g.
669
- ```
670
- python myflow.py run --with batch
671
- ```
672
- or
706
+ - Saving Checkpoints
707
+
708
+ ```python
709
+ @checkpoint
710
+ @step
711
+ def train(self):
712
+ model = create_model(self.parameters, checkpoint_path = None)
713
+ for i in range(self.epochs):
714
+ # some training logic
715
+ loss = model.train(self.dataset)
716
+ if i % 10 == 0:
717
+ model.save(
718
+ current.checkpoint.directory,
719
+ )
720
+ # saves the contents of the `current.checkpoint.directory` as a checkpoint
721
+ # and returns a reference dictionary to the checkpoint saved in the datastore
722
+ self.latest_checkpoint = current.checkpoint.save(
723
+ name="epoch_checkpoint",
724
+ metadata={
725
+ "epoch": i,
726
+ "loss": loss,
727
+ }
728
+ )
673
729
  ```
674
- python myflow.py run --with kubernetes
730
+
731
+ - Using Loaded Checkpoints
732
+
733
+ ```python
734
+ @retry(times=3)
735
+ @checkpoint
736
+ @step
737
+ def train(self):
738
+ # Assume that the task has restarted and the previous attempt of the task
739
+ # saved a checkpoint
740
+ checkpoint_path = None
741
+ if current.checkpoint.is_loaded: # Check if a checkpoint is loaded
742
+ print("Loaded checkpoint from the previous attempt")
743
+ checkpoint_path = current.checkpoint.directory
744
+
745
+ model = create_model(self.parameters, checkpoint_path = checkpoint_path)
746
+ for i in range(self.epochs):
747
+ ...
675
748
  ```
676
- which executes the flow on the desired system using the
677
- requirements specified in `@resources`.
678
749
 
679
750
 
680
751
  Parameters
681
752
  ----------
682
- cpu : int, default 1
683
- Number of CPUs required for this step.
684
- gpu : int, optional, default None
685
- Number of GPUs required for this step.
686
- disk : int, optional, default None
687
- Disk size (in MB) required for this step. Only applies on Kubernetes.
688
- memory : int, default 4096
689
- Memory size (in MB) required for this step.
690
- shared_memory : int, optional, default None
691
- The value for the size (in MiB) of the /dev/shm volume for this step.
692
- This parameter maps to the `--shm-size` option in Docker.
753
+ load_policy : str, default: "fresh"
754
+ The policy for loading the checkpoint. The following policies are supported:
755
+ - "eager": Loads the the latest available checkpoint within the namespace.
756
+ With this mode, the latest checkpoint written by any previous task (can be even a different run) of the step
757
+ will be loaded at the start of the task.
758
+ - "none": Do not load any checkpoint
759
+ - "fresh": Loads the lastest checkpoint created within the running Task.
760
+ This mode helps loading checkpoints across various retry attempts of the same task.
761
+ With this mode, no checkpoint will be loaded at the start of a task but any checkpoints
762
+ created within the task will be loaded when the task is retries execution on failure.
763
+
764
+ temp_dir_root : str, default: None
765
+ The root directory under which `current.checkpoint.directory` will be created.
693
766
  """
694
767
  ...
695
768
 
696
769
  @typing.overload
697
- def conda(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
770
+ def parallel(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
698
771
  """
699
- Specifies the Conda environment for the step.
700
-
701
- Information in this decorator will augment any
702
- attributes set in the `@conda_base` flow-level decorator. Hence,
703
- you can use `@conda_base` to set packages required by all
704
- steps and use `@conda` to specify step-specific overrides.
772
+ Decorator prototype for all step decorators. This function gets specialized
773
+ and imported for all decorators types by _import_plugin_decorators().
774
+ """
775
+ ...
776
+
777
+ @typing.overload
778
+ def parallel(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
779
+ ...
780
+
781
+ def parallel(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
782
+ """
783
+ Decorator prototype for all step decorators. This function gets specialized
784
+ and imported for all decorators types by _import_plugin_decorators().
785
+ """
786
+ ...
787
+
788
+ @typing.overload
789
+ def environment(*, vars: typing.Dict[str, str] = {}) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
790
+ """
791
+ Specifies environment variables to be set prior to the execution of a step.
705
792
 
706
793
 
707
794
  Parameters
708
795
  ----------
709
- packages : Dict[str, str], default {}
710
- Packages to use for this step. The key is the name of the package
711
- and the value is the version to use.
712
- libraries : Dict[str, str], default {}
713
- Supported for backward compatibility. When used with packages, packages will take precedence.
714
- python : str, optional, default None
715
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
716
- that the version used will correspond to the version of the Python interpreter used to start the run.
717
- disabled : bool, default False
718
- If set to True, disables @conda.
796
+ vars : Dict[str, str], default {}
797
+ Dictionary of environment variables to set.
719
798
  """
720
799
  ...
721
800
 
722
801
  @typing.overload
723
- def conda(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
802
+ def environment(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
724
803
  ...
725
804
 
726
805
  @typing.overload
727
- def conda(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
806
+ def environment(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
728
807
  ...
729
808
 
730
- def conda(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
809
+ def environment(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, vars: typing.Dict[str, str] = {}):
731
810
  """
732
- Specifies the Conda environment for the step.
811
+ Specifies environment variables to be set prior to the execution of a step.
733
812
 
734
- Information in this decorator will augment any
735
- attributes set in the `@conda_base` flow-level decorator. Hence,
736
- you can use `@conda_base` to set packages required by all
737
- steps and use `@conda` to specify step-specific overrides.
813
+
814
+ Parameters
815
+ ----------
816
+ vars : Dict[str, str], default {}
817
+ Dictionary of environment variables to set.
818
+ """
819
+ ...
820
+
821
+ def kubernetes(*, cpu: int = 1, memory: int = 4096, disk: int = 10240, image: typing.Optional[str] = None, image_pull_policy: str = 'KUBERNETES_IMAGE_PULL_POLICY', image_pull_secrets: typing.List[str] = [], service_account: str = 'METAFLOW_KUBERNETES_SERVICE_ACCOUNT', secrets: typing.Optional[typing.List[str]] = None, node_selector: typing.Union[typing.Dict[str, str], str, None] = None, namespace: str = 'METAFLOW_KUBERNETES_NAMESPACE', gpu: typing.Optional[int] = None, gpu_vendor: str = 'KUBERNETES_GPU_VENDOR', tolerations: typing.List[typing.Dict[str, str]] = [], labels: typing.Dict[str, str] = 'METAFLOW_KUBERNETES_LABELS', annotations: typing.Dict[str, str] = 'METAFLOW_KUBERNETES_ANNOTATIONS', use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = '/metaflow_temp', persistent_volume_claims: typing.Optional[typing.Dict[str, str]] = None, shared_memory: typing.Optional[int] = None, port: typing.Optional[int] = None, compute_pool: typing.Optional[str] = None, hostname_resolution_timeout: int = 600, qos: str = 'Burstable', security_context: typing.Optional[typing.Dict[str, typing.Any]] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
822
+ """
823
+ Specifies that this step should execute on Kubernetes.
738
824
 
739
825
 
740
826
  Parameters
741
827
  ----------
742
- packages : Dict[str, str], default {}
743
- Packages to use for this step. The key is the name of the package
744
- and the value is the version to use.
745
- libraries : Dict[str, str], default {}
746
- Supported for backward compatibility. When used with packages, packages will take precedence.
747
- python : str, optional, default None
748
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
749
- that the version used will correspond to the version of the Python interpreter used to start the run.
750
- disabled : bool, default False
751
- If set to True, disables @conda.
828
+ cpu : int, default 1
829
+ Number of CPUs required for this step. If `@resources` is
830
+ also present, the maximum value from all decorators is used.
831
+ memory : int, default 4096
832
+ Memory size (in MB) required for this step. If
833
+ `@resources` is also present, the maximum value from all decorators is
834
+ used.
835
+ disk : int, default 10240
836
+ Disk size (in MB) required for this step. If
837
+ `@resources` is also present, the maximum value from all decorators is
838
+ used.
839
+ image : str, optional, default None
840
+ Docker image to use when launching on Kubernetes. If not specified, and
841
+ METAFLOW_KUBERNETES_CONTAINER_IMAGE is specified, that image is used. If
842
+ not, a default Docker image mapping to the current version of Python is used.
843
+ image_pull_policy: str, default KUBERNETES_IMAGE_PULL_POLICY
844
+ If given, the imagePullPolicy to be applied to the Docker image of the step.
845
+ image_pull_secrets: List[str], default []
846
+ The default is extracted from METAFLOW_KUBERNETES_IMAGE_PULL_SECRETS.
847
+ Kubernetes image pull secrets to use when pulling container images
848
+ in Kubernetes.
849
+ service_account : str, default METAFLOW_KUBERNETES_SERVICE_ACCOUNT
850
+ Kubernetes service account to use when launching pod in Kubernetes.
851
+ secrets : List[str], optional, default None
852
+ Kubernetes secrets to use when launching pod in Kubernetes. These
853
+ secrets are in addition to the ones defined in `METAFLOW_KUBERNETES_SECRETS`
854
+ in Metaflow configuration.
855
+ node_selector: Union[Dict[str,str], str], optional, default None
856
+ Kubernetes node selector(s) to apply to the pod running the task.
857
+ Can be passed in as a comma separated string of values e.g.
858
+ 'kubernetes.io/os=linux,kubernetes.io/arch=amd64' or as a dictionary
859
+ {'kubernetes.io/os': 'linux', 'kubernetes.io/arch': 'amd64'}
860
+ namespace : str, default METAFLOW_KUBERNETES_NAMESPACE
861
+ Kubernetes namespace to use when launching pod in Kubernetes.
862
+ gpu : int, optional, default None
863
+ Number of GPUs required for this step. A value of zero implies that
864
+ the scheduled node should not have GPUs.
865
+ gpu_vendor : str, default KUBERNETES_GPU_VENDOR
866
+ The vendor of the GPUs to be used for this step.
867
+ tolerations : List[Dict[str,str]], default []
868
+ The default is extracted from METAFLOW_KUBERNETES_TOLERATIONS.
869
+ Kubernetes tolerations to use when launching pod in Kubernetes.
870
+ labels: Dict[str, str], default: METAFLOW_KUBERNETES_LABELS
871
+ Kubernetes labels to use when launching pod in Kubernetes.
872
+ annotations: Dict[str, str], default: METAFLOW_KUBERNETES_ANNOTATIONS
873
+ Kubernetes annotations to use when launching pod in Kubernetes.
874
+ use_tmpfs : bool, default False
875
+ This enables an explicit tmpfs mount for this step.
876
+ tmpfs_tempdir : bool, default True
877
+ sets METAFLOW_TEMPDIR to tmpfs_path if set for this step.
878
+ tmpfs_size : int, optional, default: None
879
+ The value for the size (in MiB) of the tmpfs mount for this step.
880
+ This parameter maps to the `--tmpfs` option in Docker. Defaults to 50% of the
881
+ memory allocated for this step.
882
+ tmpfs_path : str, optional, default /metaflow_temp
883
+ Path to tmpfs mount for this step.
884
+ persistent_volume_claims : Dict[str, str], optional, default None
885
+ A map (dictionary) of persistent volumes to be mounted to the pod for this step. The map is from persistent
886
+ volumes to the path to which the volume is to be mounted, e.g., `{'pvc-name': '/path/to/mount/on'}`.
887
+ shared_memory: int, optional
888
+ Shared memory size (in MiB) required for this step
889
+ port: int, optional
890
+ Port number to specify in the Kubernetes job object
891
+ compute_pool : str, optional, default None
892
+ Compute pool to be used for for this step.
893
+ If not specified, any accessible compute pool within the perimeter is used.
894
+ hostname_resolution_timeout: int, default 10 * 60
895
+ Timeout in seconds for the workers tasks in the gang scheduled cluster to resolve the hostname of control task.
896
+ Only applicable when @parallel is used.
897
+ qos: str, default: Burstable
898
+ Quality of Service class to assign to the pod. Supported values are: Guaranteed, Burstable, BestEffort
899
+
900
+ security_context: Dict[str, Any], optional, default None
901
+ Container security context. Applies to the task container. Allows the following keys:
902
+ - privileged: bool, optional, default None
903
+ - allow_privilege_escalation: bool, optional, default None
904
+ - run_as_user: int, optional, default None
905
+ - run_as_group: int, optional, default None
906
+ - run_as_non_root: bool, optional, default None
752
907
  """
753
908
  ...
754
909
 
755
910
  @typing.overload
756
- def fast_bakery_internal(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
911
+ def coreweave_s3_proxy(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
757
912
  """
758
- Internal decorator to support Fast bakery
913
+ CoreWeave-specific S3 Proxy decorator for routing S3 requests through a local proxy service.
914
+ It exists to make it easier for users to know that this decorator should only be used with
915
+ a Neo Cloud like CoreWeave.
759
916
  """
760
917
  ...
761
918
 
762
919
  @typing.overload
763
- def fast_bakery_internal(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
920
+ def coreweave_s3_proxy(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
764
921
  ...
765
922
 
766
- def fast_bakery_internal(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
923
+ def coreweave_s3_proxy(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
767
924
  """
768
- Internal decorator to support Fast bakery
925
+ CoreWeave-specific S3 Proxy decorator for routing S3 requests through a local proxy service.
926
+ It exists to make it easier for users to know that this decorator should only be used with
927
+ a Neo Cloud like CoreWeave.
769
928
  """
770
929
  ...
771
930
 
772
- def vllm(*, model: str, backend: str, openai_api_server: bool, debug: bool, card_refresh_interval: int, max_retries: int, retry_alert_frequency: int, engine_args: dict) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
931
+ @typing.overload
932
+ def retry(*, times: int = 3, minutes_between_retries: int = 2) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
773
933
  """
774
- This decorator is used to run vllm APIs as Metaflow task sidecars.
775
-
776
- User code call
777
- --------------
778
- @vllm(
779
- model="...",
780
- ...
781
- )
782
-
783
- Valid backend options
784
- ---------------------
785
- - 'local': Run as a separate process on the local task machine.
934
+ Specifies the number of times the task corresponding
935
+ to a step needs to be retried.
786
936
 
787
- Valid model options
788
- -------------------
789
- Any HuggingFace model identifier, e.g. 'meta-llama/Llama-3.2-1B'
937
+ This decorator is useful for handling transient errors, such as networking issues.
938
+ If your task contains operations that can't be retried safely, e.g. database updates,
939
+ it is advisable to annotate it with `@retry(times=0)`.
790
940
 
791
- NOTE: vLLM's OpenAI-compatible server serves ONE model per server instance.
792
- If you need multiple models, you must create multiple @vllm decorators.
941
+ This can be used in conjunction with the `@catch` decorator. The `@catch`
942
+ decorator will execute a no-op task after all retries have been exhausted,
943
+ ensuring that the flow execution can continue.
793
944
 
794
945
 
795
946
  Parameters
796
947
  ----------
797
- model: str
798
- HuggingFace model identifier to be served by vLLM.
799
- backend: str
800
- Determines where and how to run the vLLM process.
801
- openai_api_server: bool
802
- Whether to use OpenAI-compatible API server mode (subprocess) instead of native engine.
803
- Default is False (uses native engine).
804
- Set to True for backward compatibility with existing code.
805
- debug: bool
806
- Whether to turn on verbose debugging logs.
807
- card_refresh_interval: int
808
- Interval in seconds for refreshing the vLLM status card.
809
- Only used when openai_api_server=True.
810
- max_retries: int
811
- Maximum number of retries checking for vLLM server startup.
812
- Only used when openai_api_server=True.
813
- retry_alert_frequency: int
814
- Frequency of alert logs for vLLM server startup retries.
815
- Only used when openai_api_server=True.
816
- engine_args : dict
817
- Additional keyword arguments to pass to the vLLM engine.
818
- For example, `tensor_parallel_size=2`.
948
+ times : int, default 3
949
+ Number of times to retry this task.
950
+ minutes_between_retries : int, default 2
951
+ Number of minutes between retries.
819
952
  """
820
953
  ...
821
954
 
822
955
  @typing.overload
823
- def pypi(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
956
+ def retry(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
957
+ ...
958
+
959
+ @typing.overload
960
+ def retry(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
961
+ ...
962
+
963
+ def retry(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, times: int = 3, minutes_between_retries: int = 2):
824
964
  """
825
- Specifies the PyPI packages for the step.
965
+ Specifies the number of times the task corresponding
966
+ to a step needs to be retried.
826
967
 
827
- Information in this decorator will augment any
828
- attributes set in the `@pyi_base` flow-level decorator. Hence,
829
- you can use `@pypi_base` to set packages required by all
830
- steps and use `@pypi` to specify step-specific overrides.
968
+ This decorator is useful for handling transient errors, such as networking issues.
969
+ If your task contains operations that can't be retried safely, e.g. database updates,
970
+ it is advisable to annotate it with `@retry(times=0)`.
971
+
972
+ This can be used in conjunction with the `@catch` decorator. The `@catch`
973
+ decorator will execute a no-op task after all retries have been exhausted,
974
+ ensuring that the flow execution can continue.
831
975
 
832
976
 
833
977
  Parameters
834
978
  ----------
835
- packages : Dict[str, str], default: {}
836
- Packages to use for this step. The key is the name of the package
837
- and the value is the version to use.
838
- python : str, optional, default: None
839
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
840
- that the version used will correspond to the version of the Python interpreter used to start the run.
979
+ times : int, default 3
980
+ Number of times to retry this task.
981
+ minutes_between_retries : int, default 2
982
+ Number of minutes between retries.
841
983
  """
842
984
  ...
843
985
 
844
- @typing.overload
845
- def pypi(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
846
- ...
847
-
848
- @typing.overload
849
- def pypi(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
850
- ...
851
-
852
- def pypi(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
986
+ def nvct(*, gpu: int, gpu_type: str) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
853
987
  """
854
- Specifies the PyPI packages for the step.
855
-
856
- Information in this decorator will augment any
857
- attributes set in the `@pyi_base` flow-level decorator. Hence,
858
- you can use `@pypi_base` to set packages required by all
859
- steps and use `@pypi` to specify step-specific overrides.
988
+ Specifies that this step should execute on DGX cloud.
860
989
 
861
990
 
862
991
  Parameters
863
992
  ----------
864
- packages : Dict[str, str], default: {}
865
- Packages to use for this step. The key is the name of the package
866
- and the value is the version to use.
867
- python : str, optional, default: None
868
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
869
- that the version used will correspond to the version of the Python interpreter used to start the run.
993
+ gpu : int
994
+ Number of GPUs to use.
995
+ gpu_type : str
996
+ Type of Nvidia GPU to use.
870
997
  """
871
998
  ...
872
999
 
873
1000
  @typing.overload
874
- def parallel(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1001
+ def card(*, type: str = 'default', id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
875
1002
  """
876
- Decorator prototype for all step decorators. This function gets specialized
877
- and imported for all decorators types by _import_plugin_decorators().
1003
+ Creates a human-readable report, a Metaflow Card, after this step completes.
1004
+
1005
+ Note that you may add multiple `@card` decorators in a step with different parameters.
1006
+
1007
+
1008
+ Parameters
1009
+ ----------
1010
+ type : str, default 'default'
1011
+ Card type.
1012
+ id : str, optional, default None
1013
+ If multiple cards are present, use this id to identify this card.
1014
+ options : Dict[str, Any], default {}
1015
+ Options passed to the card. The contents depend on the card type.
1016
+ timeout : int, default 45
1017
+ Interrupt reporting if it takes more than this many seconds.
878
1018
  """
879
1019
  ...
880
1020
 
881
1021
  @typing.overload
882
- def parallel(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1022
+ def card(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
883
1023
  ...
884
1024
 
885
- def parallel(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
886
- """
887
- Decorator prototype for all step decorators. This function gets specialized
888
- and imported for all decorators types by _import_plugin_decorators().
889
- """
1025
+ @typing.overload
1026
+ def card(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
890
1027
  ...
891
1028
 
892
- def ollama(*, models: list, backend: str, force_pull: bool, cache_update_policy: str, force_cache_update: bool, debug: bool, circuit_breaker_config: dict, timeout_config: dict) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1029
+ def card(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, type: str = 'default', id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45):
893
1030
  """
894
- This decorator is used to run Ollama APIs as Metaflow task sidecars.
895
-
896
- User code call
897
- --------------
898
- @ollama(
899
- models=[...],
900
- ...
901
- )
902
-
903
- Valid backend options
904
- ---------------------
905
- - 'local': Run as a separate process on the local task machine.
906
- - (TODO) 'managed': Outerbounds hosts and selects compute provider.
907
- - (TODO) 'remote': Spin up separate instance to serve Ollama models.
1031
+ Creates a human-readable report, a Metaflow Card, after this step completes.
908
1032
 
909
- Valid model options
910
- -------------------
911
- Any model here https://ollama.com/search, e.g. 'llama3.2', 'llama3.3'
1033
+ Note that you may add multiple `@card` decorators in a step with different parameters.
912
1034
 
913
1035
 
914
1036
  Parameters
915
1037
  ----------
916
- models: list[str]
917
- List of Ollama containers running models in sidecars.
918
- backend: str
919
- Determines where and how to run the Ollama process.
920
- force_pull: bool
921
- Whether to run `ollama pull` no matter what, or first check the remote cache in Metaflow datastore for this model key.
922
- cache_update_policy: str
923
- Cache update policy: "auto", "force", or "never".
924
- force_cache_update: bool
925
- Simple override for "force" cache update policy.
926
- debug: bool
927
- Whether to turn on verbose debugging logs.
928
- circuit_breaker_config: dict
929
- Configuration for circuit breaker protection. Keys: failure_threshold, recovery_timeout, reset_timeout.
930
- timeout_config: dict
931
- Configuration for various operation timeouts. Keys: pull, stop, health_check, install, server_startup.
1038
+ type : str, default 'default'
1039
+ Card type.
1040
+ id : str, optional, default None
1041
+ If multiple cards are present, use this id to identify this card.
1042
+ options : Dict[str, Any], default {}
1043
+ Options passed to the card. The contents depend on the card type.
1044
+ timeout : int, default 45
1045
+ Interrupt reporting if it takes more than this many seconds.
932
1046
  """
933
1047
  ...
934
1048
 
935
1049
  @typing.overload
936
- def checkpoint(*, load_policy: str = 'fresh', temp_dir_root: str = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1050
+ def nebius_s3_proxy(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
937
1051
  """
938
- Enables checkpointing for a step.
939
-
940
- > Examples
941
-
942
- - Saving Checkpoints
943
-
944
- ```python
945
- @checkpoint
946
- @step
947
- def train(self):
948
- model = create_model(self.parameters, checkpoint_path = None)
949
- for i in range(self.epochs):
950
- # some training logic
951
- loss = model.train(self.dataset)
952
- if i % 10 == 0:
953
- model.save(
954
- current.checkpoint.directory,
955
- )
956
- # saves the contents of the `current.checkpoint.directory` as a checkpoint
957
- # and returns a reference dictionary to the checkpoint saved in the datastore
958
- self.latest_checkpoint = current.checkpoint.save(
959
- name="epoch_checkpoint",
960
- metadata={
961
- "epoch": i,
962
- "loss": loss,
963
- }
964
- )
965
- ```
966
-
967
- - Using Loaded Checkpoints
968
-
969
- ```python
970
- @retry(times=3)
971
- @checkpoint
972
- @step
973
- def train(self):
974
- # Assume that the task has restarted and the previous attempt of the task
975
- # saved a checkpoint
976
- checkpoint_path = None
977
- if current.checkpoint.is_loaded: # Check if a checkpoint is loaded
978
- print("Loaded checkpoint from the previous attempt")
979
- checkpoint_path = current.checkpoint.directory
980
-
981
- model = create_model(self.parameters, checkpoint_path = checkpoint_path)
982
- for i in range(self.epochs):
983
- ...
984
- ```
985
-
986
-
987
- Parameters
988
- ----------
989
- load_policy : str, default: "fresh"
990
- The policy for loading the checkpoint. The following policies are supported:
991
- - "eager": Loads the the latest available checkpoint within the namespace.
992
- With this mode, the latest checkpoint written by any previous task (can be even a different run) of the step
993
- will be loaded at the start of the task.
994
- - "none": Do not load any checkpoint
995
- - "fresh": Loads the lastest checkpoint created within the running Task.
996
- This mode helps loading checkpoints across various retry attempts of the same task.
997
- With this mode, no checkpoint will be loaded at the start of a task but any checkpoints
998
- created within the task will be loaded when the task is retries execution on failure.
999
-
1000
- temp_dir_root : str, default: None
1001
- The root directory under which `current.checkpoint.directory` will be created.
1052
+ Nebius-specific S3 Proxy decorator for routing S3 requests through a local proxy service.
1053
+ It exists to make it easier for users to know that this decorator should only be used with
1054
+ a Neo Cloud like Nebius.
1002
1055
  """
1003
1056
  ...
1004
1057
 
1005
1058
  @typing.overload
1006
- def checkpoint(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1059
+ def nebius_s3_proxy(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1007
1060
  ...
1008
1061
 
1009
- @typing.overload
1010
- def checkpoint(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1062
+ def nebius_s3_proxy(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
1063
+ """
1064
+ Nebius-specific S3 Proxy decorator for routing S3 requests through a local proxy service.
1065
+ It exists to make it easier for users to know that this decorator should only be used with
1066
+ a Neo Cloud like Nebius.
1067
+ """
1011
1068
  ...
1012
1069
 
1013
- def checkpoint(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, load_policy: str = 'fresh', temp_dir_root: str = None):
1070
+ @typing.overload
1071
+ def resources(*, cpu: int = 1, gpu: typing.Optional[int] = None, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1014
1072
  """
1015
- Enables checkpointing for a step.
1016
-
1017
- > Examples
1073
+ Specifies the resources needed when executing this step.
1018
1074
 
1019
- - Saving Checkpoints
1075
+ Use `@resources` to specify the resource requirements
1076
+ independently of the specific compute layer (`@batch`, `@kubernetes`).
1020
1077
 
1021
- ```python
1022
- @checkpoint
1023
- @step
1024
- def train(self):
1025
- model = create_model(self.parameters, checkpoint_path = None)
1026
- for i in range(self.epochs):
1027
- # some training logic
1028
- loss = model.train(self.dataset)
1029
- if i % 10 == 0:
1030
- model.save(
1031
- current.checkpoint.directory,
1032
- )
1033
- # saves the contents of the `current.checkpoint.directory` as a checkpoint
1034
- # and returns a reference dictionary to the checkpoint saved in the datastore
1035
- self.latest_checkpoint = current.checkpoint.save(
1036
- name="epoch_checkpoint",
1037
- metadata={
1038
- "epoch": i,
1039
- "loss": loss,
1040
- }
1041
- )
1078
+ You can choose the compute layer on the command line by executing e.g.
1042
1079
  ```
1043
-
1044
- - Using Loaded Checkpoints
1045
-
1046
- ```python
1047
- @retry(times=3)
1048
- @checkpoint
1049
- @step
1050
- def train(self):
1051
- # Assume that the task has restarted and the previous attempt of the task
1052
- # saved a checkpoint
1053
- checkpoint_path = None
1054
- if current.checkpoint.is_loaded: # Check if a checkpoint is loaded
1055
- print("Loaded checkpoint from the previous attempt")
1056
- checkpoint_path = current.checkpoint.directory
1057
-
1058
- model = create_model(self.parameters, checkpoint_path = checkpoint_path)
1059
- for i in range(self.epochs):
1060
- ...
1080
+ python myflow.py run --with batch
1081
+ ```
1082
+ or
1061
1083
  ```
1084
+ python myflow.py run --with kubernetes
1085
+ ```
1086
+ which executes the flow on the desired system using the
1087
+ requirements specified in `@resources`.
1062
1088
 
1063
1089
 
1064
1090
  Parameters
1065
1091
  ----------
1066
- load_policy : str, default: "fresh"
1067
- The policy for loading the checkpoint. The following policies are supported:
1068
- - "eager": Loads the the latest available checkpoint within the namespace.
1069
- With this mode, the latest checkpoint written by any previous task (can be even a different run) of the step
1070
- will be loaded at the start of the task.
1071
- - "none": Do not load any checkpoint
1072
- - "fresh": Loads the lastest checkpoint created within the running Task.
1073
- This mode helps loading checkpoints across various retry attempts of the same task.
1074
- With this mode, no checkpoint will be loaded at the start of a task but any checkpoints
1075
- created within the task will be loaded when the task is retries execution on failure.
1076
-
1077
- temp_dir_root : str, default: None
1078
- The root directory under which `current.checkpoint.directory` will be created.
1092
+ cpu : int, default 1
1093
+ Number of CPUs required for this step.
1094
+ gpu : int, optional, default None
1095
+ Number of GPUs required for this step.
1096
+ disk : int, optional, default None
1097
+ Disk size (in MB) required for this step. Only applies on Kubernetes.
1098
+ memory : int, default 4096
1099
+ Memory size (in MB) required for this step.
1100
+ shared_memory : int, optional, default None
1101
+ The value for the size (in MiB) of the /dev/shm volume for this step.
1102
+ This parameter maps to the `--shm-size` option in Docker.
1079
1103
  """
1080
1104
  ...
1081
1105
 
1082
1106
  @typing.overload
1083
- def test_append_card(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1084
- """
1085
- A simple decorator that demonstrates using CardDecoratorInjector
1086
- to inject a card and render simple markdown content.
1087
- """
1107
+ def resources(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1088
1108
  ...
1089
1109
 
1090
1110
  @typing.overload
1091
- def test_append_card(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1111
+ def resources(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1092
1112
  ...
1093
1113
 
1094
- def test_append_card(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
1114
+ def resources(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, cpu: int = 1, gpu: typing.Optional[int] = None, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None):
1095
1115
  """
1096
- A simple decorator that demonstrates using CardDecoratorInjector
1097
- to inject a card and render simple markdown content.
1116
+ Specifies the resources needed when executing this step.
1117
+
1118
+ Use `@resources` to specify the resource requirements
1119
+ independently of the specific compute layer (`@batch`, `@kubernetes`).
1120
+
1121
+ You can choose the compute layer on the command line by executing e.g.
1122
+ ```
1123
+ python myflow.py run --with batch
1124
+ ```
1125
+ or
1126
+ ```
1127
+ python myflow.py run --with kubernetes
1128
+ ```
1129
+ which executes the flow on the desired system using the
1130
+ requirements specified in `@resources`.
1131
+
1132
+
1133
+ Parameters
1134
+ ----------
1135
+ cpu : int, default 1
1136
+ Number of CPUs required for this step.
1137
+ gpu : int, optional, default None
1138
+ Number of GPUs required for this step.
1139
+ disk : int, optional, default None
1140
+ Disk size (in MB) required for this step. Only applies on Kubernetes.
1141
+ memory : int, default 4096
1142
+ Memory size (in MB) required for this step.
1143
+ shared_memory : int, optional, default None
1144
+ The value for the size (in MiB) of the /dev/shm volume for this step.
1145
+ This parameter maps to the `--shm-size` option in Docker.
1098
1146
  """
1099
1147
  ...
1100
1148
 
1101
1149
  @typing.overload
1102
- def environment(*, vars: typing.Dict[str, str] = {}) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1150
+ def timeout(*, seconds: int = 0, minutes: int = 0, hours: int = 0) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1103
1151
  """
1104
- Specifies environment variables to be set prior to the execution of a step.
1152
+ Specifies a timeout for your step.
1153
+
1154
+ This decorator is useful if this step may hang indefinitely.
1155
+
1156
+ This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
1157
+ A timeout is considered to be an exception thrown by the step. It will cause the step to be
1158
+ retried if needed and the exception will be caught by the `@catch` decorator, if present.
1159
+
1160
+ Note that all the values specified in parameters are added together so if you specify
1161
+ 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
1105
1162
 
1106
1163
 
1107
1164
  Parameters
1108
1165
  ----------
1109
- vars : Dict[str, str], default {}
1110
- Dictionary of environment variables to set.
1166
+ seconds : int, default 0
1167
+ Number of seconds to wait prior to timing out.
1168
+ minutes : int, default 0
1169
+ Number of minutes to wait prior to timing out.
1170
+ hours : int, default 0
1171
+ Number of hours to wait prior to timing out.
1111
1172
  """
1112
1173
  ...
1113
1174
 
1114
1175
  @typing.overload
1115
- def environment(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1176
+ def timeout(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1116
1177
  ...
1117
1178
 
1118
1179
  @typing.overload
1119
- def environment(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1180
+ def timeout(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1120
1181
  ...
1121
1182
 
1122
- def environment(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, vars: typing.Dict[str, str] = {}):
1183
+ def timeout(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, seconds: int = 0, minutes: int = 0, hours: int = 0):
1123
1184
  """
1124
- Specifies environment variables to be set prior to the execution of a step.
1185
+ Specifies a timeout for your step.
1186
+
1187
+ This decorator is useful if this step may hang indefinitely.
1188
+
1189
+ This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
1190
+ A timeout is considered to be an exception thrown by the step. It will cause the step to be
1191
+ retried if needed and the exception will be caught by the `@catch` decorator, if present.
1192
+
1193
+ Note that all the values specified in parameters are added together so if you specify
1194
+ 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
1125
1195
 
1126
1196
 
1127
1197
  Parameters
1128
1198
  ----------
1129
- vars : Dict[str, str], default {}
1130
- Dictionary of environment variables to set.
1199
+ seconds : int, default 0
1200
+ Number of seconds to wait prior to timing out.
1201
+ minutes : int, default 0
1202
+ Number of minutes to wait prior to timing out.
1203
+ hours : int, default 0
1204
+ Number of hours to wait prior to timing out.
1131
1205
  """
1132
1206
  ...
1133
1207
 
1134
- def kubernetes(*, cpu: int = 1, memory: int = 4096, disk: int = 10240, image: typing.Optional[str] = None, image_pull_policy: str = 'KUBERNETES_IMAGE_PULL_POLICY', image_pull_secrets: typing.List[str] = [], service_account: str = 'METAFLOW_KUBERNETES_SERVICE_ACCOUNT', secrets: typing.Optional[typing.List[str]] = None, node_selector: typing.Union[typing.Dict[str, str], str, None] = None, namespace: str = 'METAFLOW_KUBERNETES_NAMESPACE', gpu: typing.Optional[int] = None, gpu_vendor: str = 'KUBERNETES_GPU_VENDOR', tolerations: typing.List[typing.Dict[str, str]] = [], labels: typing.Dict[str, str] = 'METAFLOW_KUBERNETES_LABELS', annotations: typing.Dict[str, str] = 'METAFLOW_KUBERNETES_ANNOTATIONS', use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = '/metaflow_temp', persistent_volume_claims: typing.Optional[typing.Dict[str, str]] = None, shared_memory: typing.Optional[int] = None, port: typing.Optional[int] = None, compute_pool: typing.Optional[str] = None, hostname_resolution_timeout: int = 600, qos: str = 'Burstable', security_context: typing.Optional[typing.Dict[str, typing.Any]] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1135
- """
1136
- Specifies that this step should execute on Kubernetes.
1137
-
1138
-
1139
- Parameters
1140
- ----------
1141
- cpu : int, default 1
1142
- Number of CPUs required for this step. If `@resources` is
1143
- also present, the maximum value from all decorators is used.
1144
- memory : int, default 4096
1145
- Memory size (in MB) required for this step. If
1146
- `@resources` is also present, the maximum value from all decorators is
1147
- used.
1148
- disk : int, default 10240
1149
- Disk size (in MB) required for this step. If
1150
- `@resources` is also present, the maximum value from all decorators is
1151
- used.
1152
- image : str, optional, default None
1153
- Docker image to use when launching on Kubernetes. If not specified, and
1154
- METAFLOW_KUBERNETES_CONTAINER_IMAGE is specified, that image is used. If
1155
- not, a default Docker image mapping to the current version of Python is used.
1156
- image_pull_policy: str, default KUBERNETES_IMAGE_PULL_POLICY
1157
- If given, the imagePullPolicy to be applied to the Docker image of the step.
1158
- image_pull_secrets: List[str], default []
1159
- The default is extracted from METAFLOW_KUBERNETES_IMAGE_PULL_SECRETS.
1160
- Kubernetes image pull secrets to use when pulling container images
1161
- in Kubernetes.
1162
- service_account : str, default METAFLOW_KUBERNETES_SERVICE_ACCOUNT
1163
- Kubernetes service account to use when launching pod in Kubernetes.
1164
- secrets : List[str], optional, default None
1165
- Kubernetes secrets to use when launching pod in Kubernetes. These
1166
- secrets are in addition to the ones defined in `METAFLOW_KUBERNETES_SECRETS`
1167
- in Metaflow configuration.
1168
- node_selector: Union[Dict[str,str], str], optional, default None
1169
- Kubernetes node selector(s) to apply to the pod running the task.
1170
- Can be passed in as a comma separated string of values e.g.
1171
- 'kubernetes.io/os=linux,kubernetes.io/arch=amd64' or as a dictionary
1172
- {'kubernetes.io/os': 'linux', 'kubernetes.io/arch': 'amd64'}
1173
- namespace : str, default METAFLOW_KUBERNETES_NAMESPACE
1174
- Kubernetes namespace to use when launching pod in Kubernetes.
1175
- gpu : int, optional, default None
1176
- Number of GPUs required for this step. A value of zero implies that
1177
- the scheduled node should not have GPUs.
1178
- gpu_vendor : str, default KUBERNETES_GPU_VENDOR
1179
- The vendor of the GPUs to be used for this step.
1180
- tolerations : List[Dict[str,str]], default []
1181
- The default is extracted from METAFLOW_KUBERNETES_TOLERATIONS.
1182
- Kubernetes tolerations to use when launching pod in Kubernetes.
1183
- labels: Dict[str, str], default: METAFLOW_KUBERNETES_LABELS
1184
- Kubernetes labels to use when launching pod in Kubernetes.
1185
- annotations: Dict[str, str], default: METAFLOW_KUBERNETES_ANNOTATIONS
1186
- Kubernetes annotations to use when launching pod in Kubernetes.
1187
- use_tmpfs : bool, default False
1188
- This enables an explicit tmpfs mount for this step.
1189
- tmpfs_tempdir : bool, default True
1190
- sets METAFLOW_TEMPDIR to tmpfs_path if set for this step.
1191
- tmpfs_size : int, optional, default: None
1192
- The value for the size (in MiB) of the tmpfs mount for this step.
1193
- This parameter maps to the `--tmpfs` option in Docker. Defaults to 50% of the
1194
- memory allocated for this step.
1195
- tmpfs_path : str, optional, default /metaflow_temp
1196
- Path to tmpfs mount for this step.
1197
- persistent_volume_claims : Dict[str, str], optional, default None
1198
- A map (dictionary) of persistent volumes to be mounted to the pod for this step. The map is from persistent
1199
- volumes to the path to which the volume is to be mounted, e.g., `{'pvc-name': '/path/to/mount/on'}`.
1200
- shared_memory: int, optional
1201
- Shared memory size (in MiB) required for this step
1202
- port: int, optional
1203
- Port number to specify in the Kubernetes job object
1204
- compute_pool : str, optional, default None
1205
- Compute pool to be used for for this step.
1206
- If not specified, any accessible compute pool within the perimeter is used.
1207
- hostname_resolution_timeout: int, default 10 * 60
1208
- Timeout in seconds for the workers tasks in the gang scheduled cluster to resolve the hostname of control task.
1209
- Only applicable when @parallel is used.
1210
- qos: str, default: Burstable
1211
- Quality of Service class to assign to the pod. Supported values are: Guaranteed, Burstable, BestEffort
1212
-
1213
- security_context: Dict[str, Any], optional, default None
1214
- Container security context. Applies to the task container. Allows the following keys:
1215
- - privileged: bool, optional, default None
1216
- - allow_privilege_escalation: bool, optional, default None
1217
- - run_as_user: int, optional, default None
1218
- - run_as_group: int, optional, default None
1219
- - run_as_non_root: bool, optional, default None
1208
+ @typing.overload
1209
+ def app_deploy(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1210
+ """
1211
+ Decorator prototype for all step decorators. This function gets specialized
1212
+ and imported for all decorators types by _import_plugin_decorators().
1220
1213
  """
1221
1214
  ...
1222
1215
 
1223
1216
  @typing.overload
1224
- def timeout(*, seconds: int = 0, minutes: int = 0, hours: int = 0) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1217
+ def app_deploy(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1218
+ ...
1219
+
1220
+ def app_deploy(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
1225
1221
  """
1226
- Specifies a timeout for your step.
1222
+ Decorator prototype for all step decorators. This function gets specialized
1223
+ and imported for all decorators types by _import_plugin_decorators().
1224
+ """
1225
+ ...
1226
+
1227
+ def s3_proxy(*, integration_name: typing.Optional[str] = None, write_mode: typing.Optional[str] = None, debug: typing.Optional[bool] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1228
+ """
1229
+ S3 Proxy decorator for routing S3 requests through a local proxy service.
1227
1230
 
1228
- This decorator is useful if this step may hang indefinitely.
1229
1231
 
1230
- This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
1231
- A timeout is considered to be an exception thrown by the step. It will cause the step to be
1232
- retried if needed and the exception will be caught by the `@catch` decorator, if present.
1232
+ Parameters
1233
+ ----------
1234
+ integration_name : str, optional
1235
+ Name of the S3 proxy integration. If not specified, will use the only
1236
+ available S3 proxy integration in the namespace (fails if multiple exist).
1237
+ write_mode : str, optional
1238
+ The desired behavior during write operations to target (origin) S3 bucket.
1239
+ allowed options are:
1240
+ "origin-and-cache" -> write to both the target S3 bucket and local object
1241
+ storage
1242
+ "origin" -> only write to the target S3 bucket
1243
+ "cache" -> only write to the object storage service used for caching
1244
+ debug : bool, optional
1245
+ Enable debug logging for proxy operations.
1246
+ """
1247
+ ...
1248
+
1249
+ @typing.overload
1250
+ def conda(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1251
+ """
1252
+ Specifies the Conda environment for the step.
1233
1253
 
1234
- Note that all the values specified in parameters are added together so if you specify
1235
- 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
1254
+ Information in this decorator will augment any
1255
+ attributes set in the `@conda_base` flow-level decorator. Hence,
1256
+ you can use `@conda_base` to set packages required by all
1257
+ steps and use `@conda` to specify step-specific overrides.
1236
1258
 
1237
1259
 
1238
1260
  Parameters
1239
1261
  ----------
1240
- seconds : int, default 0
1241
- Number of seconds to wait prior to timing out.
1242
- minutes : int, default 0
1243
- Number of minutes to wait prior to timing out.
1244
- hours : int, default 0
1245
- Number of hours to wait prior to timing out.
1262
+ packages : Dict[str, str], default {}
1263
+ Packages to use for this step. The key is the name of the package
1264
+ and the value is the version to use.
1265
+ libraries : Dict[str, str], default {}
1266
+ Supported for backward compatibility. When used with packages, packages will take precedence.
1267
+ python : str, optional, default None
1268
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1269
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1270
+ disabled : bool, default False
1271
+ If set to True, disables @conda.
1246
1272
  """
1247
1273
  ...
1248
1274
 
1249
1275
  @typing.overload
1250
- def timeout(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1276
+ def conda(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1251
1277
  ...
1252
1278
 
1253
1279
  @typing.overload
1254
- def timeout(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1280
+ def conda(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1255
1281
  ...
1256
1282
 
1257
- def timeout(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, seconds: int = 0, minutes: int = 0, hours: int = 0):
1283
+ def conda(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
1258
1284
  """
1259
- Specifies a timeout for your step.
1285
+ Specifies the Conda environment for the step.
1260
1286
 
1261
- This decorator is useful if this step may hang indefinitely.
1287
+ Information in this decorator will augment any
1288
+ attributes set in the `@conda_base` flow-level decorator. Hence,
1289
+ you can use `@conda_base` to set packages required by all
1290
+ steps and use `@conda` to specify step-specific overrides.
1262
1291
 
1263
- This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
1264
- A timeout is considered to be an exception thrown by the step. It will cause the step to be
1265
- retried if needed and the exception will be caught by the `@catch` decorator, if present.
1266
1292
 
1267
- Note that all the values specified in parameters are added together so if you specify
1268
- 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
1293
+ Parameters
1294
+ ----------
1295
+ packages : Dict[str, str], default {}
1296
+ Packages to use for this step. The key is the name of the package
1297
+ and the value is the version to use.
1298
+ libraries : Dict[str, str], default {}
1299
+ Supported for backward compatibility. When used with packages, packages will take precedence.
1300
+ python : str, optional, default None
1301
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1302
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1303
+ disabled : bool, default False
1304
+ If set to True, disables @conda.
1305
+ """
1306
+ ...
1307
+
1308
+ def ollama(*, models: list, backend: str, force_pull: bool, cache_update_policy: str, force_cache_update: bool, debug: bool, circuit_breaker_config: dict, timeout_config: dict) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1309
+ """
1310
+ This decorator is used to run Ollama APIs as Metaflow task sidecars.
1311
+
1312
+ User code call
1313
+ --------------
1314
+ @ollama(
1315
+ models=[...],
1316
+ ...
1317
+ )
1318
+
1319
+ Valid backend options
1320
+ ---------------------
1321
+ - 'local': Run as a separate process on the local task machine.
1322
+ - (TODO) 'managed': Outerbounds hosts and selects compute provider.
1323
+ - (TODO) 'remote': Spin up separate instance to serve Ollama models.
1324
+
1325
+ Valid model options
1326
+ -------------------
1327
+ Any model here https://ollama.com/search, e.g. 'llama3.2', 'llama3.3'
1269
1328
 
1270
1329
 
1271
1330
  Parameters
1272
1331
  ----------
1273
- seconds : int, default 0
1274
- Number of seconds to wait prior to timing out.
1275
- minutes : int, default 0
1276
- Number of minutes to wait prior to timing out.
1277
- hours : int, default 0
1278
- Number of hours to wait prior to timing out.
1332
+ models: list[str]
1333
+ List of Ollama containers running models in sidecars.
1334
+ backend: str
1335
+ Determines where and how to run the Ollama process.
1336
+ force_pull: bool
1337
+ Whether to run `ollama pull` no matter what, or first check the remote cache in Metaflow datastore for this model key.
1338
+ cache_update_policy: str
1339
+ Cache update policy: "auto", "force", or "never".
1340
+ force_cache_update: bool
1341
+ Simple override for "force" cache update policy.
1342
+ debug: bool
1343
+ Whether to turn on verbose debugging logs.
1344
+ circuit_breaker_config: dict
1345
+ Configuration for circuit breaker protection. Keys: failure_threshold, recovery_timeout, reset_timeout.
1346
+ timeout_config: dict
1347
+ Configuration for various operation timeouts. Keys: pull, stop, health_check, install, server_startup.
1279
1348
  """
1280
1349
  ...
1281
1350
 
@@ -1330,44 +1399,97 @@ def conda_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packa
1330
1399
  """
1331
1400
  ...
1332
1401
 
1402
+ def airflow_external_task_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, external_dag_id: str, external_task_ids: typing.List[str], allowed_states: typing.List[str], failed_states: typing.List[str], execution_delta: "datetime.timedelta", check_existence: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1403
+ """
1404
+ The `@airflow_external_task_sensor` decorator attaches a Airflow [ExternalTaskSensor](https://airflow.apache.org/docs/apache-airflow/stable/_api/airflow/sensors/external_task/index.html#airflow.sensors.external_task.ExternalTaskSensor) before the start step of the flow.
1405
+ This decorator only works when a flow is scheduled on Airflow and is compiled using `airflow create`. More than one `@airflow_external_task_sensor` can be added as a flow decorators. Adding more than one decorator will ensure that `start` step starts only after all sensors finish.
1406
+
1407
+
1408
+ Parameters
1409
+ ----------
1410
+ timeout : int
1411
+ Time, in seconds before the task times out and fails. (Default: 3600)
1412
+ poke_interval : int
1413
+ Time in seconds that the job should wait in between each try. (Default: 60)
1414
+ mode : str
1415
+ How the sensor operates. Options are: { poke | reschedule }. (Default: "poke")
1416
+ exponential_backoff : bool
1417
+ allow progressive longer waits between pokes by using exponential backoff algorithm. (Default: True)
1418
+ pool : str
1419
+ the slot pool this task should run in,
1420
+ slot pools are a way to limit concurrency for certain tasks. (Default:None)
1421
+ soft_fail : bool
1422
+ Set to true to mark the task as SKIPPED on failure. (Default: False)
1423
+ name : str
1424
+ Name of the sensor on Airflow
1425
+ description : str
1426
+ Description of sensor in the Airflow UI
1427
+ external_dag_id : str
1428
+ The dag_id that contains the task you want to wait for.
1429
+ external_task_ids : List[str]
1430
+ The list of task_ids that you want to wait for.
1431
+ If None (default value) the sensor waits for the DAG. (Default: None)
1432
+ allowed_states : List[str]
1433
+ Iterable of allowed states, (Default: ['success'])
1434
+ failed_states : List[str]
1435
+ Iterable of failed or dis-allowed states. (Default: None)
1436
+ execution_delta : datetime.timedelta
1437
+ time difference with the previous execution to look at,
1438
+ the default is the same logical date as the current task or DAG. (Default: None)
1439
+ check_existence: bool
1440
+ Set to True to check if the external task exists or check if
1441
+ the DAG to wait for exists. (Default: True)
1442
+ """
1443
+ ...
1444
+
1333
1445
  @typing.overload
1334
- def pypi_base(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1446
+ def schedule(*, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1335
1447
  """
1336
- Specifies the PyPI packages for all steps of the flow.
1448
+ Specifies the times when the flow should be run when running on a
1449
+ production scheduler.
1337
1450
 
1338
- Use `@pypi_base` to set common packages required by all
1339
- steps and use `@pypi` to specify step-specific overrides.
1340
1451
 
1341
1452
  Parameters
1342
1453
  ----------
1343
- packages : Dict[str, str], default: {}
1344
- Packages to use for this flow. The key is the name of the package
1345
- and the value is the version to use.
1346
- python : str, optional, default: None
1347
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
1348
- that the version used will correspond to the version of the Python interpreter used to start the run.
1454
+ hourly : bool, default False
1455
+ Run the workflow hourly.
1456
+ daily : bool, default True
1457
+ Run the workflow daily.
1458
+ weekly : bool, default False
1459
+ Run the workflow weekly.
1460
+ cron : str, optional, default None
1461
+ Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
1462
+ specified by this expression.
1463
+ timezone : str, optional, default None
1464
+ Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1465
+ which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
1349
1466
  """
1350
1467
  ...
1351
1468
 
1352
1469
  @typing.overload
1353
- def pypi_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1470
+ def schedule(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1354
1471
  ...
1355
1472
 
1356
- def pypi_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
1473
+ def schedule(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None):
1357
1474
  """
1358
- Specifies the PyPI packages for all steps of the flow.
1475
+ Specifies the times when the flow should be run when running on a
1476
+ production scheduler.
1359
1477
 
1360
- Use `@pypi_base` to set common packages required by all
1361
- steps and use `@pypi` to specify step-specific overrides.
1362
1478
 
1363
1479
  Parameters
1364
1480
  ----------
1365
- packages : Dict[str, str], default: {}
1366
- Packages to use for this flow. The key is the name of the package
1367
- and the value is the version to use.
1368
- python : str, optional, default: None
1369
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
1370
- that the version used will correspond to the version of the Python interpreter used to start the run.
1481
+ hourly : bool, default False
1482
+ Run the workflow hourly.
1483
+ daily : bool, default True
1484
+ Run the workflow daily.
1485
+ weekly : bool, default False
1486
+ Run the workflow weekly.
1487
+ cron : str, optional, default None
1488
+ Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
1489
+ specified by this expression.
1490
+ timezone : str, optional, default None
1491
+ Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1492
+ which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
1371
1493
  """
1372
1494
  ...
1373
1495
 
@@ -1485,6 +1607,49 @@ def with_artifact_store(f: typing.Optional[typing.Type[FlowSpecDerived]] = None)
1485
1607
  """
1486
1608
  ...
1487
1609
 
1610
+ def airflow_s3_key_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, bucket_key: typing.Union[str, typing.List[str]], bucket_name: str, wildcard_match: bool, aws_conn_id: str, verify: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1611
+ """
1612
+ The `@airflow_s3_key_sensor` decorator attaches a Airflow [S3KeySensor](https://airflow.apache.org/docs/apache-airflow-providers-amazon/stable/_api/airflow/providers/amazon/aws/sensors/s3/index.html#airflow.providers.amazon.aws.sensors.s3.S3KeySensor)
1613
+ before the start step of the flow. This decorator only works when a flow is scheduled on Airflow
1614
+ and is compiled using `airflow create`. More than one `@airflow_s3_key_sensor` can be
1615
+ added as a flow decorators. Adding more than one decorator will ensure that `start` step
1616
+ starts only after all sensors finish.
1617
+
1618
+
1619
+ Parameters
1620
+ ----------
1621
+ timeout : int
1622
+ Time, in seconds before the task times out and fails. (Default: 3600)
1623
+ poke_interval : int
1624
+ Time in seconds that the job should wait in between each try. (Default: 60)
1625
+ mode : str
1626
+ How the sensor operates. Options are: { poke | reschedule }. (Default: "poke")
1627
+ exponential_backoff : bool
1628
+ allow progressive longer waits between pokes by using exponential backoff algorithm. (Default: True)
1629
+ pool : str
1630
+ the slot pool this task should run in,
1631
+ slot pools are a way to limit concurrency for certain tasks. (Default:None)
1632
+ soft_fail : bool
1633
+ Set to true to mark the task as SKIPPED on failure. (Default: False)
1634
+ name : str
1635
+ Name of the sensor on Airflow
1636
+ description : str
1637
+ Description of sensor in the Airflow UI
1638
+ bucket_key : Union[str, List[str]]
1639
+ The key(s) being waited on. Supports full s3:// style url or relative path from root level.
1640
+ When it's specified as a full s3:// url, please leave `bucket_name` as None
1641
+ bucket_name : str
1642
+ Name of the S3 bucket. Only needed when bucket_key is not provided as a full s3:// url.
1643
+ When specified, all the keys passed to bucket_key refers to this bucket. (Default:None)
1644
+ wildcard_match : bool
1645
+ whether the bucket_key should be interpreted as a Unix wildcard pattern. (Default: False)
1646
+ aws_conn_id : str
1647
+ a reference to the s3 connection on Airflow. (Default: None)
1648
+ verify : bool
1649
+ Whether or not to verify SSL certificates for S3 connection. (Default: None)
1650
+ """
1651
+ ...
1652
+
1488
1653
  @typing.overload
1489
1654
  def trigger_on_finish(*, flow: typing.Union[typing.Dict[str, str], str, None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1490
1655
  """
@@ -1586,178 +1751,6 @@ def trigger_on_finish(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *
1586
1751
  """
1587
1752
  ...
1588
1753
 
1589
- def airflow_external_task_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, external_dag_id: str, external_task_ids: typing.List[str], allowed_states: typing.List[str], failed_states: typing.List[str], execution_delta: "datetime.timedelta", check_existence: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1590
- """
1591
- The `@airflow_external_task_sensor` decorator attaches a Airflow [ExternalTaskSensor](https://airflow.apache.org/docs/apache-airflow/stable/_api/airflow/sensors/external_task/index.html#airflow.sensors.external_task.ExternalTaskSensor) before the start step of the flow.
1592
- This decorator only works when a flow is scheduled on Airflow and is compiled using `airflow create`. More than one `@airflow_external_task_sensor` can be added as a flow decorators. Adding more than one decorator will ensure that `start` step starts only after all sensors finish.
1593
-
1594
-
1595
- Parameters
1596
- ----------
1597
- timeout : int
1598
- Time, in seconds before the task times out and fails. (Default: 3600)
1599
- poke_interval : int
1600
- Time in seconds that the job should wait in between each try. (Default: 60)
1601
- mode : str
1602
- How the sensor operates. Options are: { poke | reschedule }. (Default: "poke")
1603
- exponential_backoff : bool
1604
- allow progressive longer waits between pokes by using exponential backoff algorithm. (Default: True)
1605
- pool : str
1606
- the slot pool this task should run in,
1607
- slot pools are a way to limit concurrency for certain tasks. (Default:None)
1608
- soft_fail : bool
1609
- Set to true to mark the task as SKIPPED on failure. (Default: False)
1610
- name : str
1611
- Name of the sensor on Airflow
1612
- description : str
1613
- Description of sensor in the Airflow UI
1614
- external_dag_id : str
1615
- The dag_id that contains the task you want to wait for.
1616
- external_task_ids : List[str]
1617
- The list of task_ids that you want to wait for.
1618
- If None (default value) the sensor waits for the DAG. (Default: None)
1619
- allowed_states : List[str]
1620
- Iterable of allowed states, (Default: ['success'])
1621
- failed_states : List[str]
1622
- Iterable of failed or dis-allowed states. (Default: None)
1623
- execution_delta : datetime.timedelta
1624
- time difference with the previous execution to look at,
1625
- the default is the same logical date as the current task or DAG. (Default: None)
1626
- check_existence: bool
1627
- Set to True to check if the external task exists or check if
1628
- the DAG to wait for exists. (Default: True)
1629
- """
1630
- ...
1631
-
1632
- @typing.overload
1633
- def schedule(*, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1634
- """
1635
- Specifies the times when the flow should be run when running on a
1636
- production scheduler.
1637
-
1638
-
1639
- Parameters
1640
- ----------
1641
- hourly : bool, default False
1642
- Run the workflow hourly.
1643
- daily : bool, default True
1644
- Run the workflow daily.
1645
- weekly : bool, default False
1646
- Run the workflow weekly.
1647
- cron : str, optional, default None
1648
- Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
1649
- specified by this expression.
1650
- timezone : str, optional, default None
1651
- Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1652
- which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
1653
- """
1654
- ...
1655
-
1656
- @typing.overload
1657
- def schedule(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1658
- ...
1659
-
1660
- def schedule(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None):
1661
- """
1662
- Specifies the times when the flow should be run when running on a
1663
- production scheduler.
1664
-
1665
-
1666
- Parameters
1667
- ----------
1668
- hourly : bool, default False
1669
- Run the workflow hourly.
1670
- daily : bool, default True
1671
- Run the workflow daily.
1672
- weekly : bool, default False
1673
- Run the workflow weekly.
1674
- cron : str, optional, default None
1675
- Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
1676
- specified by this expression.
1677
- timezone : str, optional, default None
1678
- Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1679
- which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
1680
- """
1681
- ...
1682
-
1683
- def airflow_s3_key_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, bucket_key: typing.Union[str, typing.List[str]], bucket_name: str, wildcard_match: bool, aws_conn_id: str, verify: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1684
- """
1685
- The `@airflow_s3_key_sensor` decorator attaches a Airflow [S3KeySensor](https://airflow.apache.org/docs/apache-airflow-providers-amazon/stable/_api/airflow/providers/amazon/aws/sensors/s3/index.html#airflow.providers.amazon.aws.sensors.s3.S3KeySensor)
1686
- before the start step of the flow. This decorator only works when a flow is scheduled on Airflow
1687
- and is compiled using `airflow create`. More than one `@airflow_s3_key_sensor` can be
1688
- added as a flow decorators. Adding more than one decorator will ensure that `start` step
1689
- starts only after all sensors finish.
1690
-
1691
-
1692
- Parameters
1693
- ----------
1694
- timeout : int
1695
- Time, in seconds before the task times out and fails. (Default: 3600)
1696
- poke_interval : int
1697
- Time in seconds that the job should wait in between each try. (Default: 60)
1698
- mode : str
1699
- How the sensor operates. Options are: { poke | reschedule }. (Default: "poke")
1700
- exponential_backoff : bool
1701
- allow progressive longer waits between pokes by using exponential backoff algorithm. (Default: True)
1702
- pool : str
1703
- the slot pool this task should run in,
1704
- slot pools are a way to limit concurrency for certain tasks. (Default:None)
1705
- soft_fail : bool
1706
- Set to true to mark the task as SKIPPED on failure. (Default: False)
1707
- name : str
1708
- Name of the sensor on Airflow
1709
- description : str
1710
- Description of sensor in the Airflow UI
1711
- bucket_key : Union[str, List[str]]
1712
- The key(s) being waited on. Supports full s3:// style url or relative path from root level.
1713
- When it's specified as a full s3:// url, please leave `bucket_name` as None
1714
- bucket_name : str
1715
- Name of the S3 bucket. Only needed when bucket_key is not provided as a full s3:// url.
1716
- When specified, all the keys passed to bucket_key refers to this bucket. (Default:None)
1717
- wildcard_match : bool
1718
- whether the bucket_key should be interpreted as a Unix wildcard pattern. (Default: False)
1719
- aws_conn_id : str
1720
- a reference to the s3 connection on Airflow. (Default: None)
1721
- verify : bool
1722
- Whether or not to verify SSL certificates for S3 connection. (Default: None)
1723
- """
1724
- ...
1725
-
1726
- def project(*, name: str, branch: typing.Optional[str] = None, production: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1727
- """
1728
- Specifies what flows belong to the same project.
1729
-
1730
- A project-specific namespace is created for all flows that
1731
- use the same `@project(name)`.
1732
-
1733
-
1734
- Parameters
1735
- ----------
1736
- name : str
1737
- Project name. Make sure that the name is unique amongst all
1738
- projects that use the same production scheduler. The name may
1739
- contain only lowercase alphanumeric characters and underscores.
1740
-
1741
- branch : Optional[str], default None
1742
- The branch to use. If not specified, the branch is set to
1743
- `user.<username>` unless `production` is set to `True`. This can
1744
- also be set on the command line using `--branch` as a top-level option.
1745
- It is an error to specify `branch` in the decorator and on the command line.
1746
-
1747
- production : bool, default False
1748
- Whether or not the branch is the production branch. This can also be set on the
1749
- command line using `--production` as a top-level option. It is an error to specify
1750
- `production` in the decorator and on the command line.
1751
- The project branch name will be:
1752
- - if `branch` is specified:
1753
- - if `production` is True: `prod.<branch>`
1754
- - if `production` is False: `test.<branch>`
1755
- - if `branch` is not specified:
1756
- - if `production` is True: `prod`
1757
- - if `production` is False: `user.<username>`
1758
- """
1759
- ...
1760
-
1761
1754
  @typing.overload
1762
1755
  def trigger(*, event: typing.Union[str, typing.Dict[str, typing.Any], None] = None, events: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1763
1756
  """
@@ -1851,5 +1844,81 @@ def trigger(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, event: t
1851
1844
  """
1852
1845
  ...
1853
1846
 
1847
+ @typing.overload
1848
+ def pypi_base(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1849
+ """
1850
+ Specifies the PyPI packages for all steps of the flow.
1851
+
1852
+ Use `@pypi_base` to set common packages required by all
1853
+ steps and use `@pypi` to specify step-specific overrides.
1854
+
1855
+ Parameters
1856
+ ----------
1857
+ packages : Dict[str, str], default: {}
1858
+ Packages to use for this flow. The key is the name of the package
1859
+ and the value is the version to use.
1860
+ python : str, optional, default: None
1861
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1862
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1863
+ """
1864
+ ...
1865
+
1866
+ @typing.overload
1867
+ def pypi_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1868
+ ...
1869
+
1870
+ def pypi_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
1871
+ """
1872
+ Specifies the PyPI packages for all steps of the flow.
1873
+
1874
+ Use `@pypi_base` to set common packages required by all
1875
+ steps and use `@pypi` to specify step-specific overrides.
1876
+
1877
+ Parameters
1878
+ ----------
1879
+ packages : Dict[str, str], default: {}
1880
+ Packages to use for this flow. The key is the name of the package
1881
+ and the value is the version to use.
1882
+ python : str, optional, default: None
1883
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1884
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1885
+ """
1886
+ ...
1887
+
1888
+ def project(*, name: str, branch: typing.Optional[str] = None, production: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1889
+ """
1890
+ Specifies what flows belong to the same project.
1891
+
1892
+ A project-specific namespace is created for all flows that
1893
+ use the same `@project(name)`.
1894
+
1895
+
1896
+ Parameters
1897
+ ----------
1898
+ name : str
1899
+ Project name. Make sure that the name is unique amongst all
1900
+ projects that use the same production scheduler. The name may
1901
+ contain only lowercase alphanumeric characters and underscores.
1902
+
1903
+ branch : Optional[str], default None
1904
+ The branch to use. If not specified, the branch is set to
1905
+ `user.<username>` unless `production` is set to `True`. This can
1906
+ also be set on the command line using `--branch` as a top-level option.
1907
+ It is an error to specify `branch` in the decorator and on the command line.
1908
+
1909
+ production : bool, default False
1910
+ Whether or not the branch is the production branch. This can also be set on the
1911
+ command line using `--production` as a top-level option. It is an error to specify
1912
+ `production` in the decorator and on the command line.
1913
+ The project branch name will be:
1914
+ - if `branch` is specified:
1915
+ - if `production` is True: `prod.<branch>`
1916
+ - if `production` is False: `test.<branch>`
1917
+ - if `branch` is not specified:
1918
+ - if `production` is True: `prod`
1919
+ - if `production` is False: `user.<username>`
1920
+ """
1921
+ ...
1922
+
1854
1923
  pkg_name: str
1855
1924