ob-metaflow-stubs 6.0.10.4__py2.py3-none-any.whl → 6.0.10.5__py2.py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (262) hide show
  1. metaflow-stubs/__init__.pyi +863 -863
  2. metaflow-stubs/cards.pyi +2 -2
  3. metaflow-stubs/cli.pyi +2 -2
  4. metaflow-stubs/cli_components/__init__.pyi +2 -2
  5. metaflow-stubs/cli_components/utils.pyi +2 -2
  6. metaflow-stubs/client/__init__.pyi +2 -2
  7. metaflow-stubs/client/core.pyi +4 -4
  8. metaflow-stubs/client/filecache.pyi +2 -2
  9. metaflow-stubs/events.pyi +2 -2
  10. metaflow-stubs/exception.pyi +2 -2
  11. metaflow-stubs/flowspec.pyi +4 -4
  12. metaflow-stubs/generated_for.txt +1 -1
  13. metaflow-stubs/includefile.pyi +4 -4
  14. metaflow-stubs/meta_files.pyi +2 -2
  15. metaflow-stubs/metadata_provider/__init__.pyi +2 -2
  16. metaflow-stubs/metadata_provider/heartbeat.pyi +2 -2
  17. metaflow-stubs/metadata_provider/metadata.pyi +2 -2
  18. metaflow-stubs/metadata_provider/util.pyi +2 -2
  19. metaflow-stubs/metaflow_config.pyi +2 -2
  20. metaflow-stubs/metaflow_current.pyi +58 -58
  21. metaflow-stubs/metaflow_git.pyi +2 -2
  22. metaflow-stubs/mf_extensions/__init__.pyi +2 -2
  23. metaflow-stubs/mf_extensions/obcheckpoint/__init__.pyi +2 -2
  24. metaflow-stubs/mf_extensions/obcheckpoint/plugins/__init__.pyi +2 -2
  25. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/__init__.pyi +2 -2
  26. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/card_utils/__init__.pyi +2 -2
  27. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/card_utils/async_cards.pyi +2 -2
  28. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/card_utils/deco_injection_mixin.pyi +2 -2
  29. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/card_utils/extra_components.pyi +3 -3
  30. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/__init__.pyi +2 -2
  31. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/cards/__init__.pyi +2 -2
  32. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/cards/checkpoint_lister.pyi +5 -5
  33. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/cards/lineage_card.pyi +2 -2
  34. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/checkpoint_storage.pyi +4 -4
  35. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/constructors.pyi +2 -2
  36. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/core.pyi +2 -2
  37. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/decorator.pyi +4 -4
  38. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/exceptions.pyi +2 -2
  39. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/final_api.pyi +2 -2
  40. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/lineage.pyi +2 -2
  41. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/__init__.pyi +2 -2
  42. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/context.pyi +3 -3
  43. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/core.pyi +3 -3
  44. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/decorator.pyi +2 -2
  45. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/exceptions.pyi +2 -2
  46. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/task_utils.pyi +2 -2
  47. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/utils.pyi +2 -2
  48. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastructures.pyi +2 -2
  49. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/exceptions.pyi +2 -2
  50. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/hf_hub/__init__.pyi +2 -2
  51. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/hf_hub/decorator.pyi +3 -3
  52. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/modeling_utils/__init__.pyi +2 -2
  53. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/modeling_utils/core.pyi +3 -3
  54. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/modeling_utils/exceptions.pyi +2 -2
  55. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/modeling_utils/model_storage.pyi +4 -4
  56. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/__init__.pyi +2 -2
  57. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/flowspec_utils.pyi +2 -2
  58. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/general.pyi +2 -2
  59. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/identity_utils.pyi +2 -2
  60. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/serialization_handler/__init__.pyi +2 -2
  61. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/serialization_handler/base.pyi +2 -2
  62. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/serialization_handler/tar.pyi +2 -2
  63. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/tar_utils.pyi +2 -2
  64. metaflow-stubs/mf_extensions/outerbounds/__init__.pyi +2 -2
  65. metaflow-stubs/mf_extensions/outerbounds/plugins/__init__.pyi +2 -2
  66. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/__init__.pyi +2 -2
  67. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/__init__.pyi +2 -2
  68. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/_state_machine.pyi +2 -2
  69. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/_vendor/__init__.pyi +2 -2
  70. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/_vendor/spinner/__init__.pyi +2 -2
  71. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/_vendor/spinner/spinners.pyi +2 -2
  72. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/app_cli.pyi +3 -3
  73. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/app_config.pyi +3 -3
  74. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/capsule.pyi +4 -4
  75. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/click_importer.pyi +2 -2
  76. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/code_package/__init__.pyi +2 -2
  77. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/code_package/code_packager.pyi +3 -3
  78. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/config/__init__.pyi +2 -2
  79. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/config/cli_generator.pyi +2 -2
  80. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/config/config_utils.pyi +3 -3
  81. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/config/schema_export.pyi +2 -2
  82. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/config/typed_configs.pyi +3 -3
  83. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/config/unified_config.pyi +3 -3
  84. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/dependencies.pyi +3 -3
  85. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/deployer.pyi +4 -4
  86. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/experimental/__init__.pyi +2 -2
  87. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/perimeters.pyi +2 -2
  88. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/utils.pyi +3 -3
  89. metaflow-stubs/mf_extensions/outerbounds/plugins/aws/__init__.pyi +2 -2
  90. metaflow-stubs/mf_extensions/outerbounds/plugins/aws/assume_role_decorator.pyi +2 -2
  91. metaflow-stubs/mf_extensions/outerbounds/plugins/card_utilities/__init__.pyi +2 -2
  92. metaflow-stubs/mf_extensions/outerbounds/plugins/card_utilities/async_cards.pyi +2 -2
  93. metaflow-stubs/mf_extensions/outerbounds/plugins/card_utilities/injector.pyi +2 -2
  94. metaflow-stubs/mf_extensions/outerbounds/plugins/checkpoint_datastores/__init__.pyi +2 -2
  95. metaflow-stubs/mf_extensions/outerbounds/plugins/checkpoint_datastores/coreweave.pyi +2 -2
  96. metaflow-stubs/mf_extensions/outerbounds/plugins/checkpoint_datastores/nebius.pyi +2 -2
  97. metaflow-stubs/mf_extensions/outerbounds/plugins/fast_bakery/__init__.pyi +2 -2
  98. metaflow-stubs/mf_extensions/outerbounds/plugins/fast_bakery/baker.pyi +3 -3
  99. metaflow-stubs/mf_extensions/outerbounds/plugins/fast_bakery/docker_environment.pyi +2 -2
  100. metaflow-stubs/mf_extensions/outerbounds/plugins/fast_bakery/fast_bakery.pyi +2 -2
  101. metaflow-stubs/mf_extensions/outerbounds/plugins/kubernetes/__init__.pyi +2 -2
  102. metaflow-stubs/mf_extensions/outerbounds/plugins/kubernetes/pod_killer.pyi +2 -2
  103. metaflow-stubs/mf_extensions/outerbounds/plugins/ollama/__init__.pyi +2 -2
  104. metaflow-stubs/mf_extensions/outerbounds/plugins/ollama/constants.pyi +2 -2
  105. metaflow-stubs/mf_extensions/outerbounds/plugins/ollama/exceptions.pyi +2 -2
  106. metaflow-stubs/mf_extensions/outerbounds/plugins/ollama/ollama.pyi +2 -2
  107. metaflow-stubs/mf_extensions/outerbounds/plugins/ollama/status_card.pyi +2 -2
  108. metaflow-stubs/mf_extensions/outerbounds/plugins/snowflake/__init__.pyi +2 -2
  109. metaflow-stubs/mf_extensions/outerbounds/plugins/snowflake/snowflake.pyi +2 -2
  110. metaflow-stubs/mf_extensions/outerbounds/profilers/__init__.pyi +2 -2
  111. metaflow-stubs/mf_extensions/outerbounds/profilers/gpu.pyi +2 -2
  112. metaflow-stubs/mf_extensions/outerbounds/remote_config.pyi +2 -2
  113. metaflow-stubs/mf_extensions/outerbounds/toplevel/__init__.pyi +2 -2
  114. metaflow-stubs/mf_extensions/outerbounds/toplevel/global_aliases_for_metaflow_package.pyi +2 -2
  115. metaflow-stubs/mf_extensions/outerbounds/toplevel/s3_proxy.pyi +2 -2
  116. metaflow-stubs/multicore_utils.pyi +2 -2
  117. metaflow-stubs/ob_internal.pyi +2 -2
  118. metaflow-stubs/packaging_sys/__init__.pyi +7 -7
  119. metaflow-stubs/packaging_sys/backend.pyi +4 -4
  120. metaflow-stubs/packaging_sys/distribution_support.pyi +4 -4
  121. metaflow-stubs/packaging_sys/tar_backend.pyi +6 -6
  122. metaflow-stubs/packaging_sys/utils.pyi +2 -2
  123. metaflow-stubs/packaging_sys/v1.pyi +2 -2
  124. metaflow-stubs/parameters.pyi +4 -4
  125. metaflow-stubs/plugins/__init__.pyi +11 -11
  126. metaflow-stubs/plugins/airflow/__init__.pyi +2 -2
  127. metaflow-stubs/plugins/airflow/airflow_utils.pyi +2 -2
  128. metaflow-stubs/plugins/airflow/exception.pyi +2 -2
  129. metaflow-stubs/plugins/airflow/sensors/__init__.pyi +2 -2
  130. metaflow-stubs/plugins/airflow/sensors/base_sensor.pyi +2 -2
  131. metaflow-stubs/plugins/airflow/sensors/external_task_sensor.pyi +2 -2
  132. metaflow-stubs/plugins/airflow/sensors/s3_sensor.pyi +2 -2
  133. metaflow-stubs/plugins/argo/__init__.pyi +2 -2
  134. metaflow-stubs/plugins/argo/argo_client.pyi +2 -2
  135. metaflow-stubs/plugins/argo/argo_events.pyi +2 -2
  136. metaflow-stubs/plugins/argo/argo_workflows.pyi +2 -2
  137. metaflow-stubs/plugins/argo/argo_workflows_decorator.pyi +4 -4
  138. metaflow-stubs/plugins/argo/argo_workflows_deployer.pyi +3 -3
  139. metaflow-stubs/plugins/argo/argo_workflows_deployer_objects.pyi +3 -3
  140. metaflow-stubs/plugins/argo/exit_hooks.pyi +2 -2
  141. metaflow-stubs/plugins/aws/__init__.pyi +2 -2
  142. metaflow-stubs/plugins/aws/aws_client.pyi +2 -2
  143. metaflow-stubs/plugins/aws/aws_utils.pyi +2 -2
  144. metaflow-stubs/plugins/aws/batch/__init__.pyi +2 -2
  145. metaflow-stubs/plugins/aws/batch/batch.pyi +2 -2
  146. metaflow-stubs/plugins/aws/batch/batch_client.pyi +2 -2
  147. metaflow-stubs/plugins/aws/batch/batch_decorator.pyi +2 -2
  148. metaflow-stubs/plugins/aws/secrets_manager/__init__.pyi +2 -2
  149. metaflow-stubs/plugins/aws/secrets_manager/aws_secrets_manager_secrets_provider.pyi +2 -2
  150. metaflow-stubs/plugins/aws/step_functions/__init__.pyi +2 -2
  151. metaflow-stubs/plugins/aws/step_functions/event_bridge_client.pyi +2 -2
  152. metaflow-stubs/plugins/aws/step_functions/schedule_decorator.pyi +2 -2
  153. metaflow-stubs/plugins/aws/step_functions/step_functions.pyi +2 -2
  154. metaflow-stubs/plugins/aws/step_functions/step_functions_client.pyi +2 -2
  155. metaflow-stubs/plugins/aws/step_functions/step_functions_deployer.pyi +3 -3
  156. metaflow-stubs/plugins/aws/step_functions/step_functions_deployer_objects.pyi +3 -3
  157. metaflow-stubs/plugins/azure/__init__.pyi +2 -2
  158. metaflow-stubs/plugins/azure/azure_credential.pyi +2 -2
  159. metaflow-stubs/plugins/azure/azure_exceptions.pyi +2 -2
  160. metaflow-stubs/plugins/azure/azure_secret_manager_secrets_provider.pyi +2 -2
  161. metaflow-stubs/plugins/azure/azure_utils.pyi +2 -2
  162. metaflow-stubs/plugins/azure/blob_service_client_factory.pyi +2 -2
  163. metaflow-stubs/plugins/azure/includefile_support.pyi +2 -2
  164. metaflow-stubs/plugins/cards/__init__.pyi +2 -2
  165. metaflow-stubs/plugins/cards/card_client.pyi +2 -2
  166. metaflow-stubs/plugins/cards/card_creator.pyi +2 -2
  167. metaflow-stubs/plugins/cards/card_datastore.pyi +2 -2
  168. metaflow-stubs/plugins/cards/card_decorator.pyi +2 -2
  169. metaflow-stubs/plugins/cards/card_modules/__init__.pyi +2 -2
  170. metaflow-stubs/plugins/cards/card_modules/basic.pyi +3 -3
  171. metaflow-stubs/plugins/cards/card_modules/card.pyi +2 -2
  172. metaflow-stubs/plugins/cards/card_modules/components.pyi +3 -3
  173. metaflow-stubs/plugins/cards/card_modules/convert_to_native_type.pyi +2 -2
  174. metaflow-stubs/plugins/cards/card_modules/renderer_tools.pyi +2 -2
  175. metaflow-stubs/plugins/cards/card_modules/test_cards.pyi +2 -2
  176. metaflow-stubs/plugins/cards/card_resolver.pyi +2 -2
  177. metaflow-stubs/plugins/cards/component_serializer.pyi +2 -2
  178. metaflow-stubs/plugins/cards/exception.pyi +2 -2
  179. metaflow-stubs/plugins/catch_decorator.pyi +2 -2
  180. metaflow-stubs/plugins/datatools/__init__.pyi +2 -2
  181. metaflow-stubs/plugins/datatools/local.pyi +2 -2
  182. metaflow-stubs/plugins/datatools/s3/__init__.pyi +2 -2
  183. metaflow-stubs/plugins/datatools/s3/s3.pyi +4 -4
  184. metaflow-stubs/plugins/datatools/s3/s3tail.pyi +2 -2
  185. metaflow-stubs/plugins/datatools/s3/s3util.pyi +2 -2
  186. metaflow-stubs/plugins/debug_logger.pyi +2 -2
  187. metaflow-stubs/plugins/debug_monitor.pyi +2 -2
  188. metaflow-stubs/plugins/environment_decorator.pyi +2 -2
  189. metaflow-stubs/plugins/events_decorator.pyi +45 -4
  190. metaflow-stubs/plugins/exit_hook/__init__.pyi +2 -2
  191. metaflow-stubs/plugins/exit_hook/exit_hook_decorator.pyi +2 -2
  192. metaflow-stubs/plugins/frameworks/__init__.pyi +2 -2
  193. metaflow-stubs/plugins/frameworks/pytorch.pyi +2 -2
  194. metaflow-stubs/plugins/gcp/__init__.pyi +2 -2
  195. metaflow-stubs/plugins/gcp/gcp_secret_manager_secrets_provider.pyi +2 -2
  196. metaflow-stubs/plugins/gcp/gs_exceptions.pyi +2 -2
  197. metaflow-stubs/plugins/gcp/gs_storage_client_factory.pyi +2 -2
  198. metaflow-stubs/plugins/gcp/gs_utils.pyi +2 -2
  199. metaflow-stubs/plugins/gcp/includefile_support.pyi +2 -2
  200. metaflow-stubs/plugins/kubernetes/__init__.pyi +2 -2
  201. metaflow-stubs/plugins/kubernetes/kube_utils.pyi +2 -2
  202. metaflow-stubs/plugins/kubernetes/kubernetes.pyi +2 -2
  203. metaflow-stubs/plugins/kubernetes/kubernetes_client.pyi +2 -2
  204. metaflow-stubs/plugins/kubernetes/kubernetes_decorator.pyi +2 -2
  205. metaflow-stubs/plugins/kubernetes/kubernetes_jobsets.pyi +2 -2
  206. metaflow-stubs/plugins/kubernetes/spot_monitor_sidecar.pyi +2 -2
  207. metaflow-stubs/plugins/ollama/__init__.pyi +3 -3
  208. metaflow-stubs/plugins/optuna/__init__.pyi +2 -2
  209. metaflow-stubs/plugins/parallel_decorator.pyi +2 -2
  210. metaflow-stubs/plugins/perimeters.pyi +2 -2
  211. metaflow-stubs/plugins/project_decorator.pyi +2 -2
  212. metaflow-stubs/plugins/pypi/__init__.pyi +2 -2
  213. metaflow-stubs/plugins/pypi/conda_decorator.pyi +2 -2
  214. metaflow-stubs/plugins/pypi/conda_environment.pyi +3 -3
  215. metaflow-stubs/plugins/pypi/parsers.pyi +2 -2
  216. metaflow-stubs/plugins/pypi/pypi_decorator.pyi +2 -2
  217. metaflow-stubs/plugins/pypi/pypi_environment.pyi +2 -2
  218. metaflow-stubs/plugins/pypi/utils.pyi +2 -2
  219. metaflow-stubs/plugins/resources_decorator.pyi +2 -2
  220. metaflow-stubs/plugins/retry_decorator.pyi +2 -2
  221. metaflow-stubs/plugins/secrets/__init__.pyi +3 -3
  222. metaflow-stubs/plugins/secrets/inline_secrets_provider.pyi +2 -2
  223. metaflow-stubs/plugins/secrets/secrets_decorator.pyi +2 -2
  224. metaflow-stubs/plugins/secrets/secrets_func.pyi +2 -2
  225. metaflow-stubs/plugins/secrets/secrets_spec.pyi +2 -2
  226. metaflow-stubs/plugins/secrets/utils.pyi +2 -2
  227. metaflow-stubs/plugins/snowflake/__init__.pyi +2 -2
  228. metaflow-stubs/plugins/storage_executor.pyi +2 -2
  229. metaflow-stubs/plugins/test_unbounded_foreach_decorator.pyi +3 -3
  230. metaflow-stubs/plugins/timeout_decorator.pyi +2 -2
  231. metaflow-stubs/plugins/torchtune/__init__.pyi +2 -2
  232. metaflow-stubs/plugins/uv/__init__.pyi +2 -2
  233. metaflow-stubs/plugins/uv/uv_environment.pyi +2 -2
  234. metaflow-stubs/profilers/__init__.pyi +2 -2
  235. metaflow-stubs/pylint_wrapper.pyi +2 -2
  236. metaflow-stubs/runner/__init__.pyi +2 -2
  237. metaflow-stubs/runner/deployer.pyi +33 -33
  238. metaflow-stubs/runner/deployer_impl.pyi +2 -2
  239. metaflow-stubs/runner/metaflow_runner.pyi +3 -3
  240. metaflow-stubs/runner/nbdeploy.pyi +2 -2
  241. metaflow-stubs/runner/nbrun.pyi +2 -2
  242. metaflow-stubs/runner/subprocess_manager.pyi +2 -2
  243. metaflow-stubs/runner/utils.pyi +3 -3
  244. metaflow-stubs/system/__init__.pyi +2 -2
  245. metaflow-stubs/system/system_logger.pyi +2 -2
  246. metaflow-stubs/system/system_monitor.pyi +2 -2
  247. metaflow-stubs/tagging_util.pyi +2 -2
  248. metaflow-stubs/tuple_util.pyi +2 -2
  249. metaflow-stubs/user_configs/__init__.pyi +2 -2
  250. metaflow-stubs/user_configs/config_options.pyi +3 -3
  251. metaflow-stubs/user_configs/config_parameters.pyi +5 -5
  252. metaflow-stubs/user_decorators/__init__.pyi +2 -2
  253. metaflow-stubs/user_decorators/common.pyi +2 -2
  254. metaflow-stubs/user_decorators/mutable_flow.pyi +6 -6
  255. metaflow-stubs/user_decorators/mutable_step.pyi +6 -6
  256. metaflow-stubs/user_decorators/user_flow_decorator.pyi +3 -3
  257. metaflow-stubs/user_decorators/user_step_decorator.pyi +5 -5
  258. {ob_metaflow_stubs-6.0.10.4.dist-info → ob_metaflow_stubs-6.0.10.5.dist-info}/METADATA +1 -1
  259. ob_metaflow_stubs-6.0.10.5.dist-info/RECORD +262 -0
  260. ob_metaflow_stubs-6.0.10.4.dist-info/RECORD +0 -262
  261. {ob_metaflow_stubs-6.0.10.4.dist-info → ob_metaflow_stubs-6.0.10.5.dist-info}/WHEEL +0 -0
  262. {ob_metaflow_stubs-6.0.10.4.dist-info → ob_metaflow_stubs-6.0.10.5.dist-info}/top_level.txt +0 -0
@@ -1,7 +1,7 @@
1
1
  ######################################################################################################
2
2
  # Auto-generated Metaflow stub file #
3
- # MF version: 2.18.5.1+obcheckpoint(0.2.6);ob(v1) #
4
- # Generated on 2025-09-16T23:23:08.891416 #
3
+ # MF version: 2.18.7.1+obcheckpoint(0.2.6);ob(v1) #
4
+ # Generated on 2025-09-19T08:41:35.349888 #
5
5
  ######################################################################################################
6
6
 
7
7
  from __future__ import annotations
@@ -39,8 +39,8 @@ from .user_decorators.user_step_decorator import UserStepDecorator as UserStepDe
39
39
  from .user_decorators.user_step_decorator import StepMutator as StepMutator
40
40
  from .user_decorators.user_step_decorator import user_step_decorator as user_step_decorator
41
41
  from .user_decorators.user_flow_decorator import FlowMutator as FlowMutator
42
- from . import cards as cards
43
42
  from . import tuple_util as tuple_util
43
+ from . import cards as cards
44
44
  from . import metaflow_git as metaflow_git
45
45
  from . import events as events
46
46
  from . import runner as runner
@@ -49,8 +49,8 @@ from .mf_extensions.outerbounds.toplevel.global_aliases_for_metaflow_package imp
49
49
  from . import includefile as includefile
50
50
  from .includefile import IncludeFile as IncludeFile
51
51
  from .plugins.pypi.parsers import conda_environment_yml_parser as conda_environment_yml_parser
52
- from .plugins.pypi.parsers import requirements_txt_parser as requirements_txt_parser
53
52
  from .plugins.pypi.parsers import pyproject_toml_parser as pyproject_toml_parser
53
+ from .plugins.pypi.parsers import requirements_txt_parser as requirements_txt_parser
54
54
  from . import client as client
55
55
  from .client.core import namespace as namespace
56
56
  from .client.core import get_namespace as get_namespace
@@ -168,163 +168,353 @@ def step(f: typing.Union[typing.Callable[[FlowSpecDerived], None], typing.Callab
168
168
  ...
169
169
 
170
170
  @typing.overload
171
- def coreweave_s3_proxy(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
171
+ def model(*, load: typing.Union[typing.List[str], str, typing.List[typing.Tuple[str, typing.Optional[str]]]] = None, temp_dir_root: str = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
172
172
  """
173
- CoreWeave-specific S3 Proxy decorator for routing S3 requests through a local proxy service.
174
- It exists to make it easier for users to know that this decorator should only be used with
175
- a Neo Cloud like CoreWeave.
173
+ Enables loading / saving of models within a step.
174
+
175
+ > Examples
176
+ - Saving Models
177
+ ```python
178
+ @model
179
+ @step
180
+ def train(self):
181
+ # current.model.save returns a dictionary reference to the model saved
182
+ self.my_model = current.model.save(
183
+ path_to_my_model,
184
+ label="my_model",
185
+ metadata={
186
+ "epochs": 10,
187
+ "batch-size": 32,
188
+ "learning-rate": 0.001,
189
+ }
190
+ )
191
+ self.next(self.test)
192
+
193
+ @model(load="my_model")
194
+ @step
195
+ def test(self):
196
+ # `current.model.loaded` returns a dictionary of the loaded models
197
+ # where the key is the name of the artifact and the value is the path to the model
198
+ print(os.listdir(current.model.loaded["my_model"]))
199
+ self.next(self.end)
200
+ ```
201
+
202
+ - Loading models
203
+ ```python
204
+ @step
205
+ def train(self):
206
+ # current.model.load returns the path to the model loaded
207
+ checkpoint_path = current.model.load(
208
+ self.checkpoint_key,
209
+ )
210
+ model_path = current.model.load(
211
+ self.model,
212
+ )
213
+ self.next(self.test)
214
+ ```
215
+
216
+
217
+ Parameters
218
+ ----------
219
+ load : Union[List[str],str,List[Tuple[str,Union[str,None]]]], default: None
220
+ Artifact name/s referencing the models/checkpoints to load. Artifact names refer to the names of the instance variables set to `self`.
221
+ These artifact names give to `load` be reference objects or reference `key` string's from objects created by `current.checkpoint` / `current.model` / `current.huggingface_hub`.
222
+ If a list of tuples is provided, the first element is the artifact name and the second element is the path the artifact needs be unpacked on
223
+ the local filesystem. If the second element is None, the artifact will be unpacked in the current working directory.
224
+ If a string is provided, then the artifact corresponding to that name will be loaded in the current working directory.
225
+
226
+ temp_dir_root : str, default: None
227
+ The root directory under which `current.model.loaded` will store loaded models
176
228
  """
177
229
  ...
178
230
 
179
231
  @typing.overload
180
- def coreweave_s3_proxy(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
232
+ def model(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
181
233
  ...
182
234
 
183
- def coreweave_s3_proxy(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
184
- """
185
- CoreWeave-specific S3 Proxy decorator for routing S3 requests through a local proxy service.
186
- It exists to make it easier for users to know that this decorator should only be used with
187
- a Neo Cloud like CoreWeave.
188
- """
235
+ @typing.overload
236
+ def model(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
189
237
  ...
190
238
 
191
- @typing.overload
192
- def fast_bakery_internal(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
239
+ def model(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, load: typing.Union[typing.List[str], str, typing.List[typing.Tuple[str, typing.Optional[str]]]] = None, temp_dir_root: str = None):
193
240
  """
194
- Internal decorator to support Fast bakery
241
+ Enables loading / saving of models within a step.
242
+
243
+ > Examples
244
+ - Saving Models
245
+ ```python
246
+ @model
247
+ @step
248
+ def train(self):
249
+ # current.model.save returns a dictionary reference to the model saved
250
+ self.my_model = current.model.save(
251
+ path_to_my_model,
252
+ label="my_model",
253
+ metadata={
254
+ "epochs": 10,
255
+ "batch-size": 32,
256
+ "learning-rate": 0.001,
257
+ }
258
+ )
259
+ self.next(self.test)
260
+
261
+ @model(load="my_model")
262
+ @step
263
+ def test(self):
264
+ # `current.model.loaded` returns a dictionary of the loaded models
265
+ # where the key is the name of the artifact and the value is the path to the model
266
+ print(os.listdir(current.model.loaded["my_model"]))
267
+ self.next(self.end)
268
+ ```
269
+
270
+ - Loading models
271
+ ```python
272
+ @step
273
+ def train(self):
274
+ # current.model.load returns the path to the model loaded
275
+ checkpoint_path = current.model.load(
276
+ self.checkpoint_key,
277
+ )
278
+ model_path = current.model.load(
279
+ self.model,
280
+ )
281
+ self.next(self.test)
282
+ ```
283
+
284
+
285
+ Parameters
286
+ ----------
287
+ load : Union[List[str],str,List[Tuple[str,Union[str,None]]]], default: None
288
+ Artifact name/s referencing the models/checkpoints to load. Artifact names refer to the names of the instance variables set to `self`.
289
+ These artifact names give to `load` be reference objects or reference `key` string's from objects created by `current.checkpoint` / `current.model` / `current.huggingface_hub`.
290
+ If a list of tuples is provided, the first element is the artifact name and the second element is the path the artifact needs be unpacked on
291
+ the local filesystem. If the second element is None, the artifact will be unpacked in the current working directory.
292
+ If a string is provided, then the artifact corresponding to that name will be loaded in the current working directory.
293
+
294
+ temp_dir_root : str, default: None
295
+ The root directory under which `current.model.loaded` will store loaded models
195
296
  """
196
297
  ...
197
298
 
198
- @typing.overload
199
- def fast_bakery_internal(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
200
- ...
201
-
202
- def fast_bakery_internal(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
299
+ def vllm(*, model: str, backend: str, openai_api_server: bool, debug: bool, card_refresh_interval: int, max_retries: int, retry_alert_frequency: int, engine_args: dict) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
203
300
  """
204
- Internal decorator to support Fast bakery
301
+ This decorator is used to run vllm APIs as Metaflow task sidecars.
302
+
303
+ User code call
304
+ --------------
305
+ @vllm(
306
+ model="...",
307
+ ...
308
+ )
309
+
310
+ Valid backend options
311
+ ---------------------
312
+ - 'local': Run as a separate process on the local task machine.
313
+
314
+ Valid model options
315
+ -------------------
316
+ Any HuggingFace model identifier, e.g. 'meta-llama/Llama-3.2-1B'
317
+
318
+ NOTE: vLLM's OpenAI-compatible server serves ONE model per server instance.
319
+ If you need multiple models, you must create multiple @vllm decorators.
320
+
321
+
322
+ Parameters
323
+ ----------
324
+ model: str
325
+ HuggingFace model identifier to be served by vLLM.
326
+ backend: str
327
+ Determines where and how to run the vLLM process.
328
+ openai_api_server: bool
329
+ Whether to use OpenAI-compatible API server mode (subprocess) instead of native engine.
330
+ Default is False (uses native engine).
331
+ Set to True for backward compatibility with existing code.
332
+ debug: bool
333
+ Whether to turn on verbose debugging logs.
334
+ card_refresh_interval: int
335
+ Interval in seconds for refreshing the vLLM status card.
336
+ Only used when openai_api_server=True.
337
+ max_retries: int
338
+ Maximum number of retries checking for vLLM server startup.
339
+ Only used when openai_api_server=True.
340
+ retry_alert_frequency: int
341
+ Frequency of alert logs for vLLM server startup retries.
342
+ Only used when openai_api_server=True.
343
+ engine_args : dict
344
+ Additional keyword arguments to pass to the vLLM engine.
345
+ For example, `tensor_parallel_size=2`.
205
346
  """
206
347
  ...
207
348
 
208
349
  @typing.overload
209
- def parallel(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
350
+ def nebius_s3_proxy(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
210
351
  """
211
- Decorator prototype for all step decorators. This function gets specialized
212
- and imported for all decorators types by _import_plugin_decorators().
352
+ Nebius-specific S3 Proxy decorator for routing S3 requests through a local proxy service.
353
+ It exists to make it easier for users to know that this decorator should only be used with
354
+ a Neo Cloud like Nebius.
213
355
  """
214
356
  ...
215
357
 
216
358
  @typing.overload
217
- def parallel(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
359
+ def nebius_s3_proxy(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
218
360
  ...
219
361
 
220
- def parallel(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
362
+ def nebius_s3_proxy(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
221
363
  """
222
- Decorator prototype for all step decorators. This function gets specialized
223
- and imported for all decorators types by _import_plugin_decorators().
364
+ Nebius-specific S3 Proxy decorator for routing S3 requests through a local proxy service.
365
+ It exists to make it easier for users to know that this decorator should only be used with
366
+ a Neo Cloud like Nebius.
224
367
  """
225
368
  ...
226
369
 
227
370
  @typing.overload
228
- def retry(*, times: int = 3, minutes_between_retries: int = 2) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
371
+ def pypi(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
229
372
  """
230
- Specifies the number of times the task corresponding
231
- to a step needs to be retried.
232
-
233
- This decorator is useful for handling transient errors, such as networking issues.
234
- If your task contains operations that can't be retried safely, e.g. database updates,
235
- it is advisable to annotate it with `@retry(times=0)`.
373
+ Specifies the PyPI packages for the step.
236
374
 
237
- This can be used in conjunction with the `@catch` decorator. The `@catch`
238
- decorator will execute a no-op task after all retries have been exhausted,
239
- ensuring that the flow execution can continue.
375
+ Information in this decorator will augment any
376
+ attributes set in the `@pyi_base` flow-level decorator. Hence,
377
+ you can use `@pypi_base` to set packages required by all
378
+ steps and use `@pypi` to specify step-specific overrides.
240
379
 
241
380
 
242
381
  Parameters
243
382
  ----------
244
- times : int, default 3
245
- Number of times to retry this task.
246
- minutes_between_retries : int, default 2
247
- Number of minutes between retries.
383
+ packages : Dict[str, str], default: {}
384
+ Packages to use for this step. The key is the name of the package
385
+ and the value is the version to use.
386
+ python : str, optional, default: None
387
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
388
+ that the version used will correspond to the version of the Python interpreter used to start the run.
248
389
  """
249
390
  ...
250
391
 
251
392
  @typing.overload
252
- def retry(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
393
+ def pypi(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
253
394
  ...
254
395
 
255
396
  @typing.overload
256
- def retry(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
397
+ def pypi(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
257
398
  ...
258
399
 
259
- def retry(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, times: int = 3, minutes_between_retries: int = 2):
400
+ def pypi(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
260
401
  """
261
- Specifies the number of times the task corresponding
262
- to a step needs to be retried.
263
-
264
- This decorator is useful for handling transient errors, such as networking issues.
265
- If your task contains operations that can't be retried safely, e.g. database updates,
266
- it is advisable to annotate it with `@retry(times=0)`.
402
+ Specifies the PyPI packages for the step.
267
403
 
268
- This can be used in conjunction with the `@catch` decorator. The `@catch`
269
- decorator will execute a no-op task after all retries have been exhausted,
270
- ensuring that the flow execution can continue.
404
+ Information in this decorator will augment any
405
+ attributes set in the `@pyi_base` flow-level decorator. Hence,
406
+ you can use `@pypi_base` to set packages required by all
407
+ steps and use `@pypi` to specify step-specific overrides.
271
408
 
272
409
 
273
410
  Parameters
274
411
  ----------
275
- times : int, default 3
276
- Number of times to retry this task.
277
- minutes_between_retries : int, default 2
278
- Number of minutes between retries.
279
- """
280
- ...
281
-
282
- @typing.overload
283
- def card(*, type: str = 'default', id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
412
+ packages : Dict[str, str], default: {}
413
+ Packages to use for this step. The key is the name of the package
414
+ and the value is the version to use.
415
+ python : str, optional, default: None
416
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
417
+ that the version used will correspond to the version of the Python interpreter used to start the run.
284
418
  """
285
- Creates a human-readable report, a Metaflow Card, after this step completes.
419
+ ...
420
+
421
+ @typing.overload
422
+ def coreweave_s3_proxy(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
423
+ """
424
+ CoreWeave-specific S3 Proxy decorator for routing S3 requests through a local proxy service.
425
+ It exists to make it easier for users to know that this decorator should only be used with
426
+ a Neo Cloud like CoreWeave.
427
+ """
428
+ ...
429
+
430
+ @typing.overload
431
+ def coreweave_s3_proxy(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
432
+ ...
433
+
434
+ def coreweave_s3_proxy(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
435
+ """
436
+ CoreWeave-specific S3 Proxy decorator for routing S3 requests through a local proxy service.
437
+ It exists to make it easier for users to know that this decorator should only be used with
438
+ a Neo Cloud like CoreWeave.
439
+ """
440
+ ...
441
+
442
+ @typing.overload
443
+ def resources(*, cpu: int = 1, gpu: typing.Optional[int] = None, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
444
+ """
445
+ Specifies the resources needed when executing this step.
286
446
 
287
- Note that you may add multiple `@card` decorators in a step with different parameters.
447
+ Use `@resources` to specify the resource requirements
448
+ independently of the specific compute layer (`@batch`, `@kubernetes`).
449
+
450
+ You can choose the compute layer on the command line by executing e.g.
451
+ ```
452
+ python myflow.py run --with batch
453
+ ```
454
+ or
455
+ ```
456
+ python myflow.py run --with kubernetes
457
+ ```
458
+ which executes the flow on the desired system using the
459
+ requirements specified in `@resources`.
288
460
 
289
461
 
290
462
  Parameters
291
463
  ----------
292
- type : str, default 'default'
293
- Card type.
294
- id : str, optional, default None
295
- If multiple cards are present, use this id to identify this card.
296
- options : Dict[str, Any], default {}
297
- Options passed to the card. The contents depend on the card type.
298
- timeout : int, default 45
299
- Interrupt reporting if it takes more than this many seconds.
464
+ cpu : int, default 1
465
+ Number of CPUs required for this step.
466
+ gpu : int, optional, default None
467
+ Number of GPUs required for this step.
468
+ disk : int, optional, default None
469
+ Disk size (in MB) required for this step. Only applies on Kubernetes.
470
+ memory : int, default 4096
471
+ Memory size (in MB) required for this step.
472
+ shared_memory : int, optional, default None
473
+ The value for the size (in MiB) of the /dev/shm volume for this step.
474
+ This parameter maps to the `--shm-size` option in Docker.
300
475
  """
301
476
  ...
302
477
 
303
478
  @typing.overload
304
- def card(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
479
+ def resources(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
305
480
  ...
306
481
 
307
482
  @typing.overload
308
- def card(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
483
+ def resources(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
309
484
  ...
310
485
 
311
- def card(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, type: str = 'default', id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45):
486
+ def resources(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, cpu: int = 1, gpu: typing.Optional[int] = None, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None):
312
487
  """
313
- Creates a human-readable report, a Metaflow Card, after this step completes.
488
+ Specifies the resources needed when executing this step.
314
489
 
315
- Note that you may add multiple `@card` decorators in a step with different parameters.
490
+ Use `@resources` to specify the resource requirements
491
+ independently of the specific compute layer (`@batch`, `@kubernetes`).
492
+
493
+ You can choose the compute layer on the command line by executing e.g.
494
+ ```
495
+ python myflow.py run --with batch
496
+ ```
497
+ or
498
+ ```
499
+ python myflow.py run --with kubernetes
500
+ ```
501
+ which executes the flow on the desired system using the
502
+ requirements specified in `@resources`.
316
503
 
317
504
 
318
505
  Parameters
319
506
  ----------
320
- type : str, default 'default'
321
- Card type.
322
- id : str, optional, default None
323
- If multiple cards are present, use this id to identify this card.
324
- options : Dict[str, Any], default {}
325
- Options passed to the card. The contents depend on the card type.
326
- timeout : int, default 45
327
- Interrupt reporting if it takes more than this many seconds.
507
+ cpu : int, default 1
508
+ Number of CPUs required for this step.
509
+ gpu : int, optional, default None
510
+ Number of GPUs required for this step.
511
+ disk : int, optional, default None
512
+ Disk size (in MB) required for this step. Only applies on Kubernetes.
513
+ memory : int, default 4096
514
+ Memory size (in MB) required for this step.
515
+ shared_memory : int, optional, default None
516
+ The value for the size (in MiB) of the /dev/shm volume for this step.
517
+ This parameter maps to the `--shm-size` option in Docker.
328
518
  """
329
519
  ...
330
520
 
@@ -455,171 +645,147 @@ def huggingface_hub(*, temp_dir_root: typing.Optional[str] = None, cache_scope:
455
645
  """
456
646
  ...
457
647
 
458
- def vllm(*, model: str, backend: str, openai_api_server: bool, debug: bool, card_refresh_interval: int, max_retries: int, retry_alert_frequency: int, engine_args: dict) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
648
+ @typing.overload
649
+ def retry(*, times: int = 3, minutes_between_retries: int = 2) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
459
650
  """
460
- This decorator is used to run vllm APIs as Metaflow task sidecars.
461
-
462
- User code call
463
- --------------
464
- @vllm(
465
- model="...",
466
- ...
467
- )
468
-
469
- Valid backend options
470
- ---------------------
471
- - 'local': Run as a separate process on the local task machine.
472
-
473
- Valid model options
474
- -------------------
475
- Any HuggingFace model identifier, e.g. 'meta-llama/Llama-3.2-1B'
476
-
477
- NOTE: vLLM's OpenAI-compatible server serves ONE model per server instance.
478
- If you need multiple models, you must create multiple @vllm decorators.
651
+ Specifies the number of times the task corresponding
652
+ to a step needs to be retried.
479
653
 
654
+ This decorator is useful for handling transient errors, such as networking issues.
655
+ If your task contains operations that can't be retried safely, e.g. database updates,
656
+ it is advisable to annotate it with `@retry(times=0)`.
480
657
 
481
- Parameters
482
- ----------
483
- model: str
484
- HuggingFace model identifier to be served by vLLM.
485
- backend: str
486
- Determines where and how to run the vLLM process.
487
- openai_api_server: bool
488
- Whether to use OpenAI-compatible API server mode (subprocess) instead of native engine.
489
- Default is False (uses native engine).
490
- Set to True for backward compatibility with existing code.
491
- debug: bool
492
- Whether to turn on verbose debugging logs.
493
- card_refresh_interval: int
494
- Interval in seconds for refreshing the vLLM status card.
495
- Only used when openai_api_server=True.
496
- max_retries: int
497
- Maximum number of retries checking for vLLM server startup.
498
- Only used when openai_api_server=True.
499
- retry_alert_frequency: int
500
- Frequency of alert logs for vLLM server startup retries.
501
- Only used when openai_api_server=True.
502
- engine_args : dict
503
- Additional keyword arguments to pass to the vLLM engine.
504
- For example, `tensor_parallel_size=2`.
505
- """
506
- ...
507
-
508
- @typing.overload
509
- def secrets(*, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], role: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
510
- """
511
- Specifies secrets to be retrieved and injected as environment variables prior to
512
- the execution of a step.
658
+ This can be used in conjunction with the `@catch` decorator. The `@catch`
659
+ decorator will execute a no-op task after all retries have been exhausted,
660
+ ensuring that the flow execution can continue.
513
661
 
514
662
 
515
663
  Parameters
516
664
  ----------
517
- sources : List[Union[str, Dict[str, Any]]], default: []
518
- List of secret specs, defining how the secrets are to be retrieved
519
- role : str, optional, default: None
520
- Role to use for fetching secrets
665
+ times : int, default 3
666
+ Number of times to retry this task.
667
+ minutes_between_retries : int, default 2
668
+ Number of minutes between retries.
521
669
  """
522
670
  ...
523
671
 
524
672
  @typing.overload
525
- def secrets(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
673
+ def retry(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
526
674
  ...
527
675
 
528
676
  @typing.overload
529
- def secrets(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
677
+ def retry(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
530
678
  ...
531
679
 
532
- def secrets(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], role: typing.Optional[str] = None):
680
+ def retry(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, times: int = 3, minutes_between_retries: int = 2):
533
681
  """
534
- Specifies secrets to be retrieved and injected as environment variables prior to
535
- the execution of a step.
682
+ Specifies the number of times the task corresponding
683
+ to a step needs to be retried.
536
684
 
685
+ This decorator is useful for handling transient errors, such as networking issues.
686
+ If your task contains operations that can't be retried safely, e.g. database updates,
687
+ it is advisable to annotate it with `@retry(times=0)`.
537
688
 
538
- Parameters
539
- ----------
540
- sources : List[Union[str, Dict[str, Any]]], default: []
541
- List of secret specs, defining how the secrets are to be retrieved
542
- role : str, optional, default: None
543
- Role to use for fetching secrets
544
- """
545
- ...
546
-
547
- def s3_proxy(*, integration_name: typing.Optional[str] = None, write_mode: typing.Optional[str] = None, debug: typing.Optional[bool] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
548
- """
549
- S3 Proxy decorator for routing S3 requests through a local proxy service.
689
+ This can be used in conjunction with the `@catch` decorator. The `@catch`
690
+ decorator will execute a no-op task after all retries have been exhausted,
691
+ ensuring that the flow execution can continue.
550
692
 
551
693
 
552
694
  Parameters
553
695
  ----------
554
- integration_name : str, optional
555
- Name of the S3 proxy integration. If not specified, will use the only
556
- available S3 proxy integration in the namespace (fails if multiple exist).
557
- write_mode : str, optional
558
- The desired behavior during write operations to target (origin) S3 bucket.
559
- allowed options are:
560
- "origin-and-cache" -> write to both the target S3 bucket and local object
561
- storage
562
- "origin" -> only write to the target S3 bucket
563
- "cache" -> only write to the object storage service used for caching
564
- debug : bool, optional
565
- Enable debug logging for proxy operations.
696
+ times : int, default 3
697
+ Number of times to retry this task.
698
+ minutes_between_retries : int, default 2
699
+ Number of minutes between retries.
566
700
  """
567
701
  ...
568
702
 
569
- def ollama(*, models: list, backend: str, force_pull: bool, cache_update_policy: str, force_cache_update: bool, debug: bool, circuit_breaker_config: dict, timeout_config: dict) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
703
+ def kubernetes(*, cpu: int = 1, memory: int = 4096, disk: int = 10240, image: typing.Optional[str] = None, image_pull_policy: str = 'KUBERNETES_IMAGE_PULL_POLICY', image_pull_secrets: typing.List[str] = [], service_account: str = 'METAFLOW_KUBERNETES_SERVICE_ACCOUNT', secrets: typing.Optional[typing.List[str]] = None, node_selector: typing.Union[typing.Dict[str, str], str, None] = None, namespace: str = 'METAFLOW_KUBERNETES_NAMESPACE', gpu: typing.Optional[int] = None, gpu_vendor: str = 'KUBERNETES_GPU_VENDOR', tolerations: typing.List[typing.Dict[str, str]] = [], labels: typing.Dict[str, str] = 'METAFLOW_KUBERNETES_LABELS', annotations: typing.Dict[str, str] = 'METAFLOW_KUBERNETES_ANNOTATIONS', use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = '/metaflow_temp', persistent_volume_claims: typing.Optional[typing.Dict[str, str]] = None, shared_memory: typing.Optional[int] = None, port: typing.Optional[int] = None, compute_pool: typing.Optional[str] = None, hostname_resolution_timeout: int = 600, qos: str = 'Burstable', security_context: typing.Optional[typing.Dict[str, typing.Any]] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
570
704
  """
571
- This decorator is used to run Ollama APIs as Metaflow task sidecars.
572
-
573
- User code call
574
- --------------
575
- @ollama(
576
- models=[...],
577
- ...
578
- )
579
-
580
- Valid backend options
581
- ---------------------
582
- - 'local': Run as a separate process on the local task machine.
583
- - (TODO) 'managed': Outerbounds hosts and selects compute provider.
584
- - (TODO) 'remote': Spin up separate instance to serve Ollama models.
585
-
586
- Valid model options
587
- -------------------
588
- Any model here https://ollama.com/search, e.g. 'llama3.2', 'llama3.3'
705
+ Specifies that this step should execute on Kubernetes.
589
706
 
590
707
 
591
708
  Parameters
592
709
  ----------
593
- models: list[str]
594
- List of Ollama containers running models in sidecars.
595
- backend: str
596
- Determines where and how to run the Ollama process.
597
- force_pull: bool
598
- Whether to run `ollama pull` no matter what, or first check the remote cache in Metaflow datastore for this model key.
599
- cache_update_policy: str
600
- Cache update policy: "auto", "force", or "never".
601
- force_cache_update: bool
602
- Simple override for "force" cache update policy.
603
- debug: bool
604
- Whether to turn on verbose debugging logs.
605
- circuit_breaker_config: dict
606
- Configuration for circuit breaker protection. Keys: failure_threshold, recovery_timeout, reset_timeout.
607
- timeout_config: dict
608
- Configuration for various operation timeouts. Keys: pull, stop, health_check, install, server_startup.
609
- """
610
- ...
611
-
612
- def nvct(*, gpu: int, gpu_type: str) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
613
- """
614
- Specifies that this step should execute on DGX cloud.
615
-
710
+ cpu : int, default 1
711
+ Number of CPUs required for this step. If `@resources` is
712
+ also present, the maximum value from all decorators is used.
713
+ memory : int, default 4096
714
+ Memory size (in MB) required for this step. If
715
+ `@resources` is also present, the maximum value from all decorators is
716
+ used.
717
+ disk : int, default 10240
718
+ Disk size (in MB) required for this step. If
719
+ `@resources` is also present, the maximum value from all decorators is
720
+ used.
721
+ image : str, optional, default None
722
+ Docker image to use when launching on Kubernetes. If not specified, and
723
+ METAFLOW_KUBERNETES_CONTAINER_IMAGE is specified, that image is used. If
724
+ not, a default Docker image mapping to the current version of Python is used.
725
+ image_pull_policy: str, default KUBERNETES_IMAGE_PULL_POLICY
726
+ If given, the imagePullPolicy to be applied to the Docker image of the step.
727
+ image_pull_secrets: List[str], default []
728
+ The default is extracted from METAFLOW_KUBERNETES_IMAGE_PULL_SECRETS.
729
+ Kubernetes image pull secrets to use when pulling container images
730
+ in Kubernetes.
731
+ service_account : str, default METAFLOW_KUBERNETES_SERVICE_ACCOUNT
732
+ Kubernetes service account to use when launching pod in Kubernetes.
733
+ secrets : List[str], optional, default None
734
+ Kubernetes secrets to use when launching pod in Kubernetes. These
735
+ secrets are in addition to the ones defined in `METAFLOW_KUBERNETES_SECRETS`
736
+ in Metaflow configuration.
737
+ node_selector: Union[Dict[str,str], str], optional, default None
738
+ Kubernetes node selector(s) to apply to the pod running the task.
739
+ Can be passed in as a comma separated string of values e.g.
740
+ 'kubernetes.io/os=linux,kubernetes.io/arch=amd64' or as a dictionary
741
+ {'kubernetes.io/os': 'linux', 'kubernetes.io/arch': 'amd64'}
742
+ namespace : str, default METAFLOW_KUBERNETES_NAMESPACE
743
+ Kubernetes namespace to use when launching pod in Kubernetes.
744
+ gpu : int, optional, default None
745
+ Number of GPUs required for this step. A value of zero implies that
746
+ the scheduled node should not have GPUs.
747
+ gpu_vendor : str, default KUBERNETES_GPU_VENDOR
748
+ The vendor of the GPUs to be used for this step.
749
+ tolerations : List[Dict[str,str]], default []
750
+ The default is extracted from METAFLOW_KUBERNETES_TOLERATIONS.
751
+ Kubernetes tolerations to use when launching pod in Kubernetes.
752
+ labels: Dict[str, str], default: METAFLOW_KUBERNETES_LABELS
753
+ Kubernetes labels to use when launching pod in Kubernetes.
754
+ annotations: Dict[str, str], default: METAFLOW_KUBERNETES_ANNOTATIONS
755
+ Kubernetes annotations to use when launching pod in Kubernetes.
756
+ use_tmpfs : bool, default False
757
+ This enables an explicit tmpfs mount for this step.
758
+ tmpfs_tempdir : bool, default True
759
+ sets METAFLOW_TEMPDIR to tmpfs_path if set for this step.
760
+ tmpfs_size : int, optional, default: None
761
+ The value for the size (in MiB) of the tmpfs mount for this step.
762
+ This parameter maps to the `--tmpfs` option in Docker. Defaults to 50% of the
763
+ memory allocated for this step.
764
+ tmpfs_path : str, optional, default /metaflow_temp
765
+ Path to tmpfs mount for this step.
766
+ persistent_volume_claims : Dict[str, str], optional, default None
767
+ A map (dictionary) of persistent volumes to be mounted to the pod for this step. The map is from persistent
768
+ volumes to the path to which the volume is to be mounted, e.g., `{'pvc-name': '/path/to/mount/on'}`.
769
+ shared_memory: int, optional
770
+ Shared memory size (in MiB) required for this step
771
+ port: int, optional
772
+ Port number to specify in the Kubernetes job object
773
+ compute_pool : str, optional, default None
774
+ Compute pool to be used for for this step.
775
+ If not specified, any accessible compute pool within the perimeter is used.
776
+ hostname_resolution_timeout: int, default 10 * 60
777
+ Timeout in seconds for the workers tasks in the gang scheduled cluster to resolve the hostname of control task.
778
+ Only applicable when @parallel is used.
779
+ qos: str, default: Burstable
780
+ Quality of Service class to assign to the pod. Supported values are: Guaranteed, Burstable, BestEffort
616
781
 
617
- Parameters
618
- ----------
619
- gpu : int
620
- Number of GPUs to use.
621
- gpu_type : str
622
- Type of Nvidia GPU to use.
782
+ security_context: Dict[str, Any], optional, default None
783
+ Container security context. Applies to the task container. Allows the following keys:
784
+ - privileged: bool, optional, default None
785
+ - allow_privilege_escalation: bool, optional, default None
786
+ - run_as_user: int, optional, default None
787
+ - run_as_group: int, optional, default None
788
+ - run_as_non_root: bool, optional, default None
623
789
  """
624
790
  ...
625
791
 
@@ -642,75 +808,156 @@ def app_deploy(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None
642
808
  """
643
809
  ...
644
810
 
811
+ def s3_proxy(*, integration_name: typing.Optional[str] = None, write_mode: typing.Optional[str] = None, debug: typing.Optional[bool] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
812
+ """
813
+ S3 Proxy decorator for routing S3 requests through a local proxy service.
814
+
815
+
816
+ Parameters
817
+ ----------
818
+ integration_name : str, optional
819
+ Name of the S3 proxy integration. If not specified, will use the only
820
+ available S3 proxy integration in the namespace (fails if multiple exist).
821
+ write_mode : str, optional
822
+ The desired behavior during write operations to target (origin) S3 bucket.
823
+ allowed options are:
824
+ "origin-and-cache" -> write to both the target S3 bucket and local object
825
+ storage
826
+ "origin" -> only write to the target S3 bucket
827
+ "cache" -> only write to the object storage service used for caching
828
+ debug : bool, optional
829
+ Enable debug logging for proxy operations.
830
+ """
831
+ ...
832
+
645
833
  @typing.overload
646
- def nebius_s3_proxy(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
834
+ def secrets(*, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], role: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
647
835
  """
648
- Nebius-specific S3 Proxy decorator for routing S3 requests through a local proxy service.
649
- It exists to make it easier for users to know that this decorator should only be used with
650
- a Neo Cloud like Nebius.
836
+ Specifies secrets to be retrieved and injected as environment variables prior to
837
+ the execution of a step.
838
+
839
+
840
+ Parameters
841
+ ----------
842
+ sources : List[Union[str, Dict[str, Any]]], default: []
843
+ List of secret specs, defining how the secrets are to be retrieved
844
+ role : str, optional, default: None
845
+ Role to use for fetching secrets
651
846
  """
652
847
  ...
653
848
 
654
849
  @typing.overload
655
- def nebius_s3_proxy(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
850
+ def secrets(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
656
851
  ...
657
852
 
658
- def nebius_s3_proxy(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
853
+ @typing.overload
854
+ def secrets(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
855
+ ...
856
+
857
+ def secrets(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], role: typing.Optional[str] = None):
659
858
  """
660
- Nebius-specific S3 Proxy decorator for routing S3 requests through a local proxy service.
661
- It exists to make it easier for users to know that this decorator should only be used with
662
- a Neo Cloud like Nebius.
859
+ Specifies secrets to be retrieved and injected as environment variables prior to
860
+ the execution of a step.
861
+
862
+
863
+ Parameters
864
+ ----------
865
+ sources : List[Union[str, Dict[str, Any]]], default: []
866
+ List of secret specs, defining how the secrets are to be retrieved
867
+ role : str, optional, default: None
868
+ Role to use for fetching secrets
663
869
  """
664
870
  ...
665
871
 
666
872
  @typing.overload
667
- def pypi(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
873
+ def environment(*, vars: typing.Dict[str, str] = {}) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
668
874
  """
669
- Specifies the PyPI packages for the step.
875
+ Specifies environment variables to be set prior to the execution of a step.
670
876
 
671
- Information in this decorator will augment any
672
- attributes set in the `@pyi_base` flow-level decorator. Hence,
673
- you can use `@pypi_base` to set packages required by all
674
- steps and use `@pypi` to specify step-specific overrides.
877
+
878
+ Parameters
879
+ ----------
880
+ vars : Dict[str, str], default {}
881
+ Dictionary of environment variables to set.
882
+ """
883
+ ...
884
+
885
+ @typing.overload
886
+ def environment(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
887
+ ...
888
+
889
+ @typing.overload
890
+ def environment(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
891
+ ...
892
+
893
+ def environment(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, vars: typing.Dict[str, str] = {}):
894
+ """
895
+ Specifies environment variables to be set prior to the execution of a step.
675
896
 
676
897
 
677
898
  Parameters
678
899
  ----------
679
- packages : Dict[str, str], default: {}
680
- Packages to use for this step. The key is the name of the package
681
- and the value is the version to use.
682
- python : str, optional, default: None
683
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
684
- that the version used will correspond to the version of the Python interpreter used to start the run.
900
+ vars : Dict[str, str], default {}
901
+ Dictionary of environment variables to set.
685
902
  """
686
903
  ...
687
904
 
688
905
  @typing.overload
689
- def pypi(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
906
+ def timeout(*, seconds: int = 0, minutes: int = 0, hours: int = 0) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
907
+ """
908
+ Specifies a timeout for your step.
909
+
910
+ This decorator is useful if this step may hang indefinitely.
911
+
912
+ This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
913
+ A timeout is considered to be an exception thrown by the step. It will cause the step to be
914
+ retried if needed and the exception will be caught by the `@catch` decorator, if present.
915
+
916
+ Note that all the values specified in parameters are added together so if you specify
917
+ 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
918
+
919
+
920
+ Parameters
921
+ ----------
922
+ seconds : int, default 0
923
+ Number of seconds to wait prior to timing out.
924
+ minutes : int, default 0
925
+ Number of minutes to wait prior to timing out.
926
+ hours : int, default 0
927
+ Number of hours to wait prior to timing out.
928
+ """
690
929
  ...
691
930
 
692
931
  @typing.overload
693
- def pypi(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
932
+ def timeout(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
694
933
  ...
695
934
 
696
- def pypi(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
935
+ @typing.overload
936
+ def timeout(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
937
+ ...
938
+
939
+ def timeout(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, seconds: int = 0, minutes: int = 0, hours: int = 0):
697
940
  """
698
- Specifies the PyPI packages for the step.
941
+ Specifies a timeout for your step.
699
942
 
700
- Information in this decorator will augment any
701
- attributes set in the `@pyi_base` flow-level decorator. Hence,
702
- you can use `@pypi_base` to set packages required by all
703
- steps and use `@pypi` to specify step-specific overrides.
943
+ This decorator is useful if this step may hang indefinitely.
944
+
945
+ This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
946
+ A timeout is considered to be an exception thrown by the step. It will cause the step to be
947
+ retried if needed and the exception will be caught by the `@catch` decorator, if present.
948
+
949
+ Note that all the values specified in parameters are added together so if you specify
950
+ 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
704
951
 
705
952
 
706
953
  Parameters
707
954
  ----------
708
- packages : Dict[str, str], default: {}
709
- Packages to use for this step. The key is the name of the package
710
- and the value is the version to use.
711
- python : str, optional, default: None
712
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
713
- that the version used will correspond to the version of the Python interpreter used to start the run.
955
+ seconds : int, default 0
956
+ Number of seconds to wait prior to timing out.
957
+ minutes : int, default 0
958
+ Number of minutes to wait prior to timing out.
959
+ hours : int, default 0
960
+ Number of hours to wait prior to timing out.
714
961
  """
715
962
  ...
716
963
 
@@ -862,61 +1109,53 @@ def checkpoint(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None
862
1109
  ...
863
1110
 
864
1111
  @typing.overload
865
- def timeout(*, seconds: int = 0, minutes: int = 0, hours: int = 0) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1112
+ def catch(*, var: typing.Optional[str] = None, print_exception: bool = True) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
866
1113
  """
867
- Specifies a timeout for your step.
1114
+ Specifies that the step will success under all circumstances.
868
1115
 
869
- This decorator is useful if this step may hang indefinitely.
870
-
871
- This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
872
- A timeout is considered to be an exception thrown by the step. It will cause the step to be
873
- retried if needed and the exception will be caught by the `@catch` decorator, if present.
874
-
875
- Note that all the values specified in parameters are added together so if you specify
876
- 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
1116
+ The decorator will create an optional artifact, specified by `var`, which
1117
+ contains the exception raised. You can use it to detect the presence
1118
+ of errors, indicating that all happy-path artifacts produced by the step
1119
+ are missing.
877
1120
 
878
1121
 
879
1122
  Parameters
880
1123
  ----------
881
- seconds : int, default 0
882
- Number of seconds to wait prior to timing out.
883
- minutes : int, default 0
884
- Number of minutes to wait prior to timing out.
885
- hours : int, default 0
886
- Number of hours to wait prior to timing out.
1124
+ var : str, optional, default None
1125
+ Name of the artifact in which to store the caught exception.
1126
+ If not specified, the exception is not stored.
1127
+ print_exception : bool, default True
1128
+ Determines whether or not the exception is printed to
1129
+ stdout when caught.
887
1130
  """
888
1131
  ...
889
1132
 
890
1133
  @typing.overload
891
- def timeout(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1134
+ def catch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
892
1135
  ...
893
1136
 
894
1137
  @typing.overload
895
- def timeout(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1138
+ def catch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
896
1139
  ...
897
1140
 
898
- def timeout(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, seconds: int = 0, minutes: int = 0, hours: int = 0):
1141
+ def catch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, var: typing.Optional[str] = None, print_exception: bool = True):
899
1142
  """
900
- Specifies a timeout for your step.
901
-
902
- This decorator is useful if this step may hang indefinitely.
903
-
904
- This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
905
- A timeout is considered to be an exception thrown by the step. It will cause the step to be
906
- retried if needed and the exception will be caught by the `@catch` decorator, if present.
1143
+ Specifies that the step will success under all circumstances.
907
1144
 
908
- Note that all the values specified in parameters are added together so if you specify
909
- 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
1145
+ The decorator will create an optional artifact, specified by `var`, which
1146
+ contains the exception raised. You can use it to detect the presence
1147
+ of errors, indicating that all happy-path artifacts produced by the step
1148
+ are missing.
910
1149
 
911
1150
 
912
1151
  Parameters
913
1152
  ----------
914
- seconds : int, default 0
915
- Number of seconds to wait prior to timing out.
916
- minutes : int, default 0
917
- Number of minutes to wait prior to timing out.
918
- hours : int, default 0
919
- Number of hours to wait prior to timing out.
1153
+ var : str, optional, default None
1154
+ Name of the artifact in which to store the caught exception.
1155
+ If not specified, the exception is not stored.
1156
+ print_exception : bool, default True
1157
+ Determines whether or not the exception is printed to
1158
+ stdout when caught.
920
1159
  """
921
1160
  ...
922
1161
 
@@ -940,600 +1179,318 @@ def test_append_card(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag]
940
1179
  ...
941
1180
 
942
1181
  @typing.overload
943
- def resources(*, cpu: int = 1, gpu: typing.Optional[int] = None, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1182
+ def fast_bakery_internal(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
944
1183
  """
945
- Specifies the resources needed when executing this step.
946
-
947
- Use `@resources` to specify the resource requirements
948
- independently of the specific compute layer (`@batch`, `@kubernetes`).
949
-
950
- You can choose the compute layer on the command line by executing e.g.
951
- ```
952
- python myflow.py run --with batch
953
- ```
954
- or
955
- ```
956
- python myflow.py run --with kubernetes
957
- ```
958
- which executes the flow on the desired system using the
959
- requirements specified in `@resources`.
960
-
961
-
962
- Parameters
963
- ----------
964
- cpu : int, default 1
965
- Number of CPUs required for this step.
966
- gpu : int, optional, default None
967
- Number of GPUs required for this step.
968
- disk : int, optional, default None
969
- Disk size (in MB) required for this step. Only applies on Kubernetes.
970
- memory : int, default 4096
971
- Memory size (in MB) required for this step.
972
- shared_memory : int, optional, default None
973
- The value for the size (in MiB) of the /dev/shm volume for this step.
974
- This parameter maps to the `--shm-size` option in Docker.
1184
+ Internal decorator to support Fast bakery
975
1185
  """
976
1186
  ...
977
1187
 
978
1188
  @typing.overload
979
- def resources(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
980
- ...
981
-
982
- @typing.overload
983
- def resources(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1189
+ def fast_bakery_internal(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
984
1190
  ...
985
1191
 
986
- def resources(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, cpu: int = 1, gpu: typing.Optional[int] = None, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None):
1192
+ def fast_bakery_internal(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
987
1193
  """
988
- Specifies the resources needed when executing this step.
989
-
990
- Use `@resources` to specify the resource requirements
991
- independently of the specific compute layer (`@batch`, `@kubernetes`).
992
-
993
- You can choose the compute layer on the command line by executing e.g.
994
- ```
995
- python myflow.py run --with batch
996
- ```
997
- or
998
- ```
999
- python myflow.py run --with kubernetes
1000
- ```
1001
- which executes the flow on the desired system using the
1002
- requirements specified in `@resources`.
1003
-
1004
-
1005
- Parameters
1006
- ----------
1007
- cpu : int, default 1
1008
- Number of CPUs required for this step.
1009
- gpu : int, optional, default None
1010
- Number of GPUs required for this step.
1011
- disk : int, optional, default None
1012
- Disk size (in MB) required for this step. Only applies on Kubernetes.
1013
- memory : int, default 4096
1014
- Memory size (in MB) required for this step.
1015
- shared_memory : int, optional, default None
1016
- The value for the size (in MiB) of the /dev/shm volume for this step.
1017
- This parameter maps to the `--shm-size` option in Docker.
1194
+ Internal decorator to support Fast bakery
1018
1195
  """
1019
1196
  ...
1020
1197
 
1021
1198
  @typing.overload
1022
- def catch(*, var: typing.Optional[str] = None, print_exception: bool = True) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1199
+ def conda(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1023
1200
  """
1024
- Specifies that the step will success under all circumstances.
1201
+ Specifies the Conda environment for the step.
1025
1202
 
1026
- The decorator will create an optional artifact, specified by `var`, which
1027
- contains the exception raised. You can use it to detect the presence
1028
- of errors, indicating that all happy-path artifacts produced by the step
1029
- are missing.
1203
+ Information in this decorator will augment any
1204
+ attributes set in the `@conda_base` flow-level decorator. Hence,
1205
+ you can use `@conda_base` to set packages required by all
1206
+ steps and use `@conda` to specify step-specific overrides.
1030
1207
 
1031
1208
 
1032
1209
  Parameters
1033
1210
  ----------
1034
- var : str, optional, default None
1035
- Name of the artifact in which to store the caught exception.
1036
- If not specified, the exception is not stored.
1037
- print_exception : bool, default True
1038
- Determines whether or not the exception is printed to
1039
- stdout when caught.
1211
+ packages : Dict[str, str], default {}
1212
+ Packages to use for this step. The key is the name of the package
1213
+ and the value is the version to use.
1214
+ libraries : Dict[str, str], default {}
1215
+ Supported for backward compatibility. When used with packages, packages will take precedence.
1216
+ python : str, optional, default None
1217
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1218
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1219
+ disabled : bool, default False
1220
+ If set to True, disables @conda.
1040
1221
  """
1041
1222
  ...
1042
1223
 
1043
1224
  @typing.overload
1044
- def catch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1225
+ def conda(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1045
1226
  ...
1046
1227
 
1047
1228
  @typing.overload
1048
- def catch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1049
- ...
1050
-
1051
- def catch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, var: typing.Optional[str] = None, print_exception: bool = True):
1052
- """
1053
- Specifies that the step will success under all circumstances.
1054
-
1055
- The decorator will create an optional artifact, specified by `var`, which
1056
- contains the exception raised. You can use it to detect the presence
1057
- of errors, indicating that all happy-path artifacts produced by the step
1058
- are missing.
1059
-
1060
-
1061
- Parameters
1062
- ----------
1063
- var : str, optional, default None
1064
- Name of the artifact in which to store the caught exception.
1065
- If not specified, the exception is not stored.
1066
- print_exception : bool, default True
1067
- Determines whether or not the exception is printed to
1068
- stdout when caught.
1069
- """
1070
- ...
1071
-
1072
- def nvidia(*, gpu: int, gpu_type: str, queue_timeout: int) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1073
- """
1074
- Specifies that this step should execute on DGX cloud.
1075
-
1076
-
1077
- Parameters
1078
- ----------
1079
- gpu : int
1080
- Number of GPUs to use.
1081
- gpu_type : str
1082
- Type of Nvidia GPU to use.
1083
- queue_timeout : int
1084
- Time to keep the job in NVCF's queue.
1085
- """
1229
+ def conda(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1086
1230
  ...
1087
1231
 
1088
- @typing.overload
1089
- def environment(*, vars: typing.Dict[str, str] = {}) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1232
+ def conda(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
1090
1233
  """
1091
- Specifies environment variables to be set prior to the execution of a step.
1092
-
1234
+ Specifies the Conda environment for the step.
1093
1235
 
1094
- Parameters
1095
- ----------
1096
- vars : Dict[str, str], default {}
1097
- Dictionary of environment variables to set.
1098
- """
1099
- ...
1100
-
1101
- @typing.overload
1102
- def environment(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1103
- ...
1104
-
1105
- @typing.overload
1106
- def environment(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1107
- ...
1108
-
1109
- def environment(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, vars: typing.Dict[str, str] = {}):
1110
- """
1111
- Specifies environment variables to be set prior to the execution of a step.
1236
+ Information in this decorator will augment any
1237
+ attributes set in the `@conda_base` flow-level decorator. Hence,
1238
+ you can use `@conda_base` to set packages required by all
1239
+ steps and use `@conda` to specify step-specific overrides.
1112
1240
 
1113
1241
 
1114
1242
  Parameters
1115
1243
  ----------
1116
- vars : Dict[str, str], default {}
1117
- Dictionary of environment variables to set.
1244
+ packages : Dict[str, str], default {}
1245
+ Packages to use for this step. The key is the name of the package
1246
+ and the value is the version to use.
1247
+ libraries : Dict[str, str], default {}
1248
+ Supported for backward compatibility. When used with packages, packages will take precedence.
1249
+ python : str, optional, default None
1250
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1251
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1252
+ disabled : bool, default False
1253
+ If set to True, disables @conda.
1118
1254
  """
1119
1255
  ...
1120
1256
 
1121
1257
  @typing.overload
1122
- def model(*, load: typing.Union[typing.List[str], str, typing.List[typing.Tuple[str, typing.Optional[str]]]] = None, temp_dir_root: str = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1258
+ def card(*, type: str = 'default', id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1123
1259
  """
1124
- Enables loading / saving of models within a step.
1125
-
1126
- > Examples
1127
- - Saving Models
1128
- ```python
1129
- @model
1130
- @step
1131
- def train(self):
1132
- # current.model.save returns a dictionary reference to the model saved
1133
- self.my_model = current.model.save(
1134
- path_to_my_model,
1135
- label="my_model",
1136
- metadata={
1137
- "epochs": 10,
1138
- "batch-size": 32,
1139
- "learning-rate": 0.001,
1140
- }
1141
- )
1142
- self.next(self.test)
1143
-
1144
- @model(load="my_model")
1145
- @step
1146
- def test(self):
1147
- # `current.model.loaded` returns a dictionary of the loaded models
1148
- # where the key is the name of the artifact and the value is the path to the model
1149
- print(os.listdir(current.model.loaded["my_model"]))
1150
- self.next(self.end)
1151
- ```
1260
+ Creates a human-readable report, a Metaflow Card, after this step completes.
1152
1261
 
1153
- - Loading models
1154
- ```python
1155
- @step
1156
- def train(self):
1157
- # current.model.load returns the path to the model loaded
1158
- checkpoint_path = current.model.load(
1159
- self.checkpoint_key,
1160
- )
1161
- model_path = current.model.load(
1162
- self.model,
1163
- )
1164
- self.next(self.test)
1165
- ```
1262
+ Note that you may add multiple `@card` decorators in a step with different parameters.
1166
1263
 
1167
1264
 
1168
1265
  Parameters
1169
1266
  ----------
1170
- load : Union[List[str],str,List[Tuple[str,Union[str,None]]]], default: None
1171
- Artifact name/s referencing the models/checkpoints to load. Artifact names refer to the names of the instance variables set to `self`.
1172
- These artifact names give to `load` be reference objects or reference `key` string's from objects created by `current.checkpoint` / `current.model` / `current.huggingface_hub`.
1173
- If a list of tuples is provided, the first element is the artifact name and the second element is the path the artifact needs be unpacked on
1174
- the local filesystem. If the second element is None, the artifact will be unpacked in the current working directory.
1175
- If a string is provided, then the artifact corresponding to that name will be loaded in the current working directory.
1176
-
1177
- temp_dir_root : str, default: None
1178
- The root directory under which `current.model.loaded` will store loaded models
1267
+ type : str, default 'default'
1268
+ Card type.
1269
+ id : str, optional, default None
1270
+ If multiple cards are present, use this id to identify this card.
1271
+ options : Dict[str, Any], default {}
1272
+ Options passed to the card. The contents depend on the card type.
1273
+ timeout : int, default 45
1274
+ Interrupt reporting if it takes more than this many seconds.
1179
1275
  """
1180
1276
  ...
1181
1277
 
1182
1278
  @typing.overload
1183
- def model(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1279
+ def card(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1184
1280
  ...
1185
1281
 
1186
1282
  @typing.overload
1187
- def model(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1283
+ def card(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1188
1284
  ...
1189
1285
 
1190
- def model(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, load: typing.Union[typing.List[str], str, typing.List[typing.Tuple[str, typing.Optional[str]]]] = None, temp_dir_root: str = None):
1286
+ def card(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, type: str = 'default', id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45):
1191
1287
  """
1192
- Enables loading / saving of models within a step.
1193
-
1194
- > Examples
1195
- - Saving Models
1196
- ```python
1197
- @model
1198
- @step
1199
- def train(self):
1200
- # current.model.save returns a dictionary reference to the model saved
1201
- self.my_model = current.model.save(
1202
- path_to_my_model,
1203
- label="my_model",
1204
- metadata={
1205
- "epochs": 10,
1206
- "batch-size": 32,
1207
- "learning-rate": 0.001,
1208
- }
1209
- )
1210
- self.next(self.test)
1211
-
1212
- @model(load="my_model")
1213
- @step
1214
- def test(self):
1215
- # `current.model.loaded` returns a dictionary of the loaded models
1216
- # where the key is the name of the artifact and the value is the path to the model
1217
- print(os.listdir(current.model.loaded["my_model"]))
1218
- self.next(self.end)
1219
- ```
1288
+ Creates a human-readable report, a Metaflow Card, after this step completes.
1220
1289
 
1221
- - Loading models
1222
- ```python
1223
- @step
1224
- def train(self):
1225
- # current.model.load returns the path to the model loaded
1226
- checkpoint_path = current.model.load(
1227
- self.checkpoint_key,
1228
- )
1229
- model_path = current.model.load(
1230
- self.model,
1231
- )
1232
- self.next(self.test)
1233
- ```
1290
+ Note that you may add multiple `@card` decorators in a step with different parameters.
1234
1291
 
1235
1292
 
1236
1293
  Parameters
1237
1294
  ----------
1238
- load : Union[List[str],str,List[Tuple[str,Union[str,None]]]], default: None
1239
- Artifact name/s referencing the models/checkpoints to load. Artifact names refer to the names of the instance variables set to `self`.
1240
- These artifact names give to `load` be reference objects or reference `key` string's from objects created by `current.checkpoint` / `current.model` / `current.huggingface_hub`.
1241
- If a list of tuples is provided, the first element is the artifact name and the second element is the path the artifact needs be unpacked on
1242
- the local filesystem. If the second element is None, the artifact will be unpacked in the current working directory.
1243
- If a string is provided, then the artifact corresponding to that name will be loaded in the current working directory.
1244
-
1245
- temp_dir_root : str, default: None
1246
- The root directory under which `current.model.loaded` will store loaded models
1295
+ type : str, default 'default'
1296
+ Card type.
1297
+ id : str, optional, default None
1298
+ If multiple cards are present, use this id to identify this card.
1299
+ options : Dict[str, Any], default {}
1300
+ Options passed to the card. The contents depend on the card type.
1301
+ timeout : int, default 45
1302
+ Interrupt reporting if it takes more than this many seconds.
1247
1303
  """
1248
1304
  ...
1249
1305
 
1250
- def kubernetes(*, cpu: int = 1, memory: int = 4096, disk: int = 10240, image: typing.Optional[str] = None, image_pull_policy: str = 'KUBERNETES_IMAGE_PULL_POLICY', image_pull_secrets: typing.List[str] = [], service_account: str = 'METAFLOW_KUBERNETES_SERVICE_ACCOUNT', secrets: typing.Optional[typing.List[str]] = None, node_selector: typing.Union[typing.Dict[str, str], str, None] = None, namespace: str = 'METAFLOW_KUBERNETES_NAMESPACE', gpu: typing.Optional[int] = None, gpu_vendor: str = 'KUBERNETES_GPU_VENDOR', tolerations: typing.List[typing.Dict[str, str]] = [], labels: typing.Dict[str, str] = 'METAFLOW_KUBERNETES_LABELS', annotations: typing.Dict[str, str] = 'METAFLOW_KUBERNETES_ANNOTATIONS', use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = '/metaflow_temp', persistent_volume_claims: typing.Optional[typing.Dict[str, str]] = None, shared_memory: typing.Optional[int] = None, port: typing.Optional[int] = None, compute_pool: typing.Optional[str] = None, hostname_resolution_timeout: int = 600, qos: str = 'Burstable', security_context: typing.Optional[typing.Dict[str, typing.Any]] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1306
+ def nvct(*, gpu: int, gpu_type: str) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1251
1307
  """
1252
- Specifies that this step should execute on Kubernetes.
1308
+ Specifies that this step should execute on DGX cloud.
1253
1309
 
1254
1310
 
1255
1311
  Parameters
1256
1312
  ----------
1257
- cpu : int, default 1
1258
- Number of CPUs required for this step. If `@resources` is
1259
- also present, the maximum value from all decorators is used.
1260
- memory : int, default 4096
1261
- Memory size (in MB) required for this step. If
1262
- `@resources` is also present, the maximum value from all decorators is
1263
- used.
1264
- disk : int, default 10240
1265
- Disk size (in MB) required for this step. If
1266
- `@resources` is also present, the maximum value from all decorators is
1267
- used.
1268
- image : str, optional, default None
1269
- Docker image to use when launching on Kubernetes. If not specified, and
1270
- METAFLOW_KUBERNETES_CONTAINER_IMAGE is specified, that image is used. If
1271
- not, a default Docker image mapping to the current version of Python is used.
1272
- image_pull_policy: str, default KUBERNETES_IMAGE_PULL_POLICY
1273
- If given, the imagePullPolicy to be applied to the Docker image of the step.
1274
- image_pull_secrets: List[str], default []
1275
- The default is extracted from METAFLOW_KUBERNETES_IMAGE_PULL_SECRETS.
1276
- Kubernetes image pull secrets to use when pulling container images
1277
- in Kubernetes.
1278
- service_account : str, default METAFLOW_KUBERNETES_SERVICE_ACCOUNT
1279
- Kubernetes service account to use when launching pod in Kubernetes.
1280
- secrets : List[str], optional, default None
1281
- Kubernetes secrets to use when launching pod in Kubernetes. These
1282
- secrets are in addition to the ones defined in `METAFLOW_KUBERNETES_SECRETS`
1283
- in Metaflow configuration.
1284
- node_selector: Union[Dict[str,str], str], optional, default None
1285
- Kubernetes node selector(s) to apply to the pod running the task.
1286
- Can be passed in as a comma separated string of values e.g.
1287
- 'kubernetes.io/os=linux,kubernetes.io/arch=amd64' or as a dictionary
1288
- {'kubernetes.io/os': 'linux', 'kubernetes.io/arch': 'amd64'}
1289
- namespace : str, default METAFLOW_KUBERNETES_NAMESPACE
1290
- Kubernetes namespace to use when launching pod in Kubernetes.
1291
- gpu : int, optional, default None
1292
- Number of GPUs required for this step. A value of zero implies that
1293
- the scheduled node should not have GPUs.
1294
- gpu_vendor : str, default KUBERNETES_GPU_VENDOR
1295
- The vendor of the GPUs to be used for this step.
1296
- tolerations : List[Dict[str,str]], default []
1297
- The default is extracted from METAFLOW_KUBERNETES_TOLERATIONS.
1298
- Kubernetes tolerations to use when launching pod in Kubernetes.
1299
- labels: Dict[str, str], default: METAFLOW_KUBERNETES_LABELS
1300
- Kubernetes labels to use when launching pod in Kubernetes.
1301
- annotations: Dict[str, str], default: METAFLOW_KUBERNETES_ANNOTATIONS
1302
- Kubernetes annotations to use when launching pod in Kubernetes.
1303
- use_tmpfs : bool, default False
1304
- This enables an explicit tmpfs mount for this step.
1305
- tmpfs_tempdir : bool, default True
1306
- sets METAFLOW_TEMPDIR to tmpfs_path if set for this step.
1307
- tmpfs_size : int, optional, default: None
1308
- The value for the size (in MiB) of the tmpfs mount for this step.
1309
- This parameter maps to the `--tmpfs` option in Docker. Defaults to 50% of the
1310
- memory allocated for this step.
1311
- tmpfs_path : str, optional, default /metaflow_temp
1312
- Path to tmpfs mount for this step.
1313
- persistent_volume_claims : Dict[str, str], optional, default None
1314
- A map (dictionary) of persistent volumes to be mounted to the pod for this step. The map is from persistent
1315
- volumes to the path to which the volume is to be mounted, e.g., `{'pvc-name': '/path/to/mount/on'}`.
1316
- shared_memory: int, optional
1317
- Shared memory size (in MiB) required for this step
1318
- port: int, optional
1319
- Port number to specify in the Kubernetes job object
1320
- compute_pool : str, optional, default None
1321
- Compute pool to be used for for this step.
1322
- If not specified, any accessible compute pool within the perimeter is used.
1323
- hostname_resolution_timeout: int, default 10 * 60
1324
- Timeout in seconds for the workers tasks in the gang scheduled cluster to resolve the hostname of control task.
1325
- Only applicable when @parallel is used.
1326
- qos: str, default: Burstable
1327
- Quality of Service class to assign to the pod. Supported values are: Guaranteed, Burstable, BestEffort
1328
-
1329
- security_context: Dict[str, Any], optional, default None
1330
- Container security context. Applies to the task container. Allows the following keys:
1331
- - privileged: bool, optional, default None
1332
- - allow_privilege_escalation: bool, optional, default None
1333
- - run_as_user: int, optional, default None
1334
- - run_as_group: int, optional, default None
1335
- - run_as_non_root: bool, optional, default None
1313
+ gpu : int
1314
+ Number of GPUs to use.
1315
+ gpu_type : str
1316
+ Type of Nvidia GPU to use.
1336
1317
  """
1337
1318
  ...
1338
1319
 
1339
- @typing.overload
1340
- def conda(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1320
+ def nvidia(*, gpu: int, gpu_type: str, queue_timeout: int) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1341
1321
  """
1342
- Specifies the Conda environment for the step.
1343
-
1344
- Information in this decorator will augment any
1345
- attributes set in the `@conda_base` flow-level decorator. Hence,
1346
- you can use `@conda_base` to set packages required by all
1347
- steps and use `@conda` to specify step-specific overrides.
1322
+ Specifies that this step should execute on DGX cloud.
1348
1323
 
1349
1324
 
1350
1325
  Parameters
1351
1326
  ----------
1352
- packages : Dict[str, str], default {}
1353
- Packages to use for this step. The key is the name of the package
1354
- and the value is the version to use.
1355
- libraries : Dict[str, str], default {}
1356
- Supported for backward compatibility. When used with packages, packages will take precedence.
1357
- python : str, optional, default None
1358
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
1359
- that the version used will correspond to the version of the Python interpreter used to start the run.
1360
- disabled : bool, default False
1361
- If set to True, disables @conda.
1327
+ gpu : int
1328
+ Number of GPUs to use.
1329
+ gpu_type : str
1330
+ Type of Nvidia GPU to use.
1331
+ queue_timeout : int
1332
+ Time to keep the job in NVCF's queue.
1362
1333
  """
1363
1334
  ...
1364
1335
 
1365
- @typing.overload
1366
- def conda(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1367
- ...
1368
-
1369
- @typing.overload
1370
- def conda(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1371
- ...
1372
-
1373
- def conda(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
1336
+ def ollama(*, models: list, backend: str, force_pull: bool, cache_update_policy: str, force_cache_update: bool, debug: bool, circuit_breaker_config: dict, timeout_config: dict) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1374
1337
  """
1375
- Specifies the Conda environment for the step.
1338
+ This decorator is used to run Ollama APIs as Metaflow task sidecars.
1376
1339
 
1377
- Information in this decorator will augment any
1378
- attributes set in the `@conda_base` flow-level decorator. Hence,
1379
- you can use `@conda_base` to set packages required by all
1380
- steps and use `@conda` to specify step-specific overrides.
1340
+ User code call
1341
+ --------------
1342
+ @ollama(
1343
+ models=[...],
1344
+ ...
1345
+ )
1346
+
1347
+ Valid backend options
1348
+ ---------------------
1349
+ - 'local': Run as a separate process on the local task machine.
1350
+ - (TODO) 'managed': Outerbounds hosts and selects compute provider.
1351
+ - (TODO) 'remote': Spin up separate instance to serve Ollama models.
1352
+
1353
+ Valid model options
1354
+ -------------------
1355
+ Any model here https://ollama.com/search, e.g. 'llama3.2', 'llama3.3'
1381
1356
 
1382
1357
 
1383
1358
  Parameters
1384
1359
  ----------
1385
- packages : Dict[str, str], default {}
1386
- Packages to use for this step. The key is the name of the package
1387
- and the value is the version to use.
1388
- libraries : Dict[str, str], default {}
1389
- Supported for backward compatibility. When used with packages, packages will take precedence.
1390
- python : str, optional, default None
1391
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
1392
- that the version used will correspond to the version of the Python interpreter used to start the run.
1393
- disabled : bool, default False
1394
- If set to True, disables @conda.
1360
+ models: list[str]
1361
+ List of Ollama containers running models in sidecars.
1362
+ backend: str
1363
+ Determines where and how to run the Ollama process.
1364
+ force_pull: bool
1365
+ Whether to run `ollama pull` no matter what, or first check the remote cache in Metaflow datastore for this model key.
1366
+ cache_update_policy: str
1367
+ Cache update policy: "auto", "force", or "never".
1368
+ force_cache_update: bool
1369
+ Simple override for "force" cache update policy.
1370
+ debug: bool
1371
+ Whether to turn on verbose debugging logs.
1372
+ circuit_breaker_config: dict
1373
+ Configuration for circuit breaker protection. Keys: failure_threshold, recovery_timeout, reset_timeout.
1374
+ timeout_config: dict
1375
+ Configuration for various operation timeouts. Keys: pull, stop, health_check, install, server_startup.
1395
1376
  """
1396
1377
  ...
1397
1378
 
1398
1379
  @typing.overload
1399
- def conda_base(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1380
+ def parallel(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1400
1381
  """
1401
- Specifies the Conda environment for all steps of the flow.
1402
-
1403
- Use `@conda_base` to set common libraries required by all
1404
- steps and use `@conda` to specify step-specific additions.
1405
-
1406
-
1407
- Parameters
1408
- ----------
1409
- packages : Dict[str, str], default {}
1410
- Packages to use for this flow. The key is the name of the package
1411
- and the value is the version to use.
1412
- libraries : Dict[str, str], default {}
1413
- Supported for backward compatibility. When used with packages, packages will take precedence.
1414
- python : str, optional, default None
1415
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
1416
- that the version used will correspond to the version of the Python interpreter used to start the run.
1417
- disabled : bool, default False
1418
- If set to True, disables Conda.
1382
+ Decorator prototype for all step decorators. This function gets specialized
1383
+ and imported for all decorators types by _import_plugin_decorators().
1419
1384
  """
1420
1385
  ...
1421
1386
 
1422
1387
  @typing.overload
1423
- def conda_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1388
+ def parallel(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1424
1389
  ...
1425
1390
 
1426
- def conda_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
1391
+ def parallel(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
1427
1392
  """
1428
- Specifies the Conda environment for all steps of the flow.
1429
-
1430
- Use `@conda_base` to set common libraries required by all
1431
- steps and use `@conda` to specify step-specific additions.
1432
-
1433
-
1434
- Parameters
1435
- ----------
1436
- packages : Dict[str, str], default {}
1437
- Packages to use for this flow. The key is the name of the package
1438
- and the value is the version to use.
1439
- libraries : Dict[str, str], default {}
1440
- Supported for backward compatibility. When used with packages, packages will take precedence.
1441
- python : str, optional, default None
1442
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
1443
- that the version used will correspond to the version of the Python interpreter used to start the run.
1444
- disabled : bool, default False
1445
- If set to True, disables Conda.
1393
+ Decorator prototype for all step decorators. This function gets specialized
1394
+ and imported for all decorators types by _import_plugin_decorators().
1446
1395
  """
1447
1396
  ...
1448
1397
 
1449
1398
  @typing.overload
1450
- def trigger(*, event: typing.Union[str, typing.Dict[str, typing.Any], None] = None, events: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1399
+ def trigger_on_finish(*, flow: typing.Union[typing.Dict[str, str], str, None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1451
1400
  """
1452
- Specifies the event(s) that this flow depends on.
1401
+ Specifies the flow(s) that this flow depends on.
1453
1402
 
1454
1403
  ```
1455
- @trigger(event='foo')
1404
+ @trigger_on_finish(flow='FooFlow')
1456
1405
  ```
1457
1406
  or
1458
1407
  ```
1459
- @trigger(events=['foo', 'bar'])
1408
+ @trigger_on_finish(flows=['FooFlow', 'BarFlow'])
1460
1409
  ```
1410
+ This decorator respects the @project decorator and triggers the flow
1411
+ when upstream runs within the same namespace complete successfully
1461
1412
 
1462
- Additionally, you can specify the parameter mappings
1463
- to map event payload to Metaflow parameters for the flow.
1413
+ Additionally, you can specify project aware upstream flow dependencies
1414
+ by specifying the fully qualified project_flow_name.
1464
1415
  ```
1465
- @trigger(event={'name':'foo', 'parameters':{'flow_param': 'event_field'}})
1416
+ @trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
1466
1417
  ```
1467
1418
  or
1468
1419
  ```
1469
- @trigger(events=[{'name':'foo', 'parameters':{'flow_param_1': 'event_field_1'},
1470
- {'name':'bar', 'parameters':{'flow_param_2': 'event_field_2'}])
1420
+ @trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
1471
1421
  ```
1472
1422
 
1473
- 'parameters' can also be a list of strings and tuples like so:
1474
- ```
1475
- @trigger(event={'name':'foo', 'parameters':['common_name', ('flow_param', 'event_field')]})
1476
- ```
1477
- This is equivalent to:
1423
+ You can also specify just the project or project branch (other values will be
1424
+ inferred from the current project or project branch):
1478
1425
  ```
1479
- @trigger(event={'name':'foo', 'parameters':{'common_name': 'common_name', 'flow_param': 'event_field'}})
1426
+ @trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
1480
1427
  ```
1481
1428
 
1429
+ Note that `branch` is typically one of:
1430
+ - `prod`
1431
+ - `user.bob`
1432
+ - `test.my_experiment`
1433
+ - `prod.staging`
1434
+
1482
1435
 
1483
1436
  Parameters
1484
1437
  ----------
1485
- event : Union[str, Dict[str, Any]], optional, default None
1486
- Event dependency for this flow.
1487
- events : List[Union[str, Dict[str, Any]]], default []
1488
- Events dependency for this flow.
1438
+ flow : Union[str, Dict[str, str]], optional, default None
1439
+ Upstream flow dependency for this flow.
1440
+ flows : List[Union[str, Dict[str, str]]], default []
1441
+ Upstream flow dependencies for this flow.
1489
1442
  options : Dict[str, Any], default {}
1490
1443
  Backend-specific configuration for tuning eventing behavior.
1491
1444
  """
1492
1445
  ...
1493
1446
 
1494
1447
  @typing.overload
1495
- def trigger(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1448
+ def trigger_on_finish(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1496
1449
  ...
1497
1450
 
1498
- def trigger(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, event: typing.Union[str, typing.Dict[str, typing.Any], None] = None, events: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], options: typing.Dict[str, typing.Any] = {}):
1451
+ def trigger_on_finish(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, flow: typing.Union[typing.Dict[str, str], str, None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}):
1499
1452
  """
1500
- Specifies the event(s) that this flow depends on.
1453
+ Specifies the flow(s) that this flow depends on.
1501
1454
 
1502
1455
  ```
1503
- @trigger(event='foo')
1456
+ @trigger_on_finish(flow='FooFlow')
1504
1457
  ```
1505
1458
  or
1506
1459
  ```
1507
- @trigger(events=['foo', 'bar'])
1460
+ @trigger_on_finish(flows=['FooFlow', 'BarFlow'])
1508
1461
  ```
1462
+ This decorator respects the @project decorator and triggers the flow
1463
+ when upstream runs within the same namespace complete successfully
1509
1464
 
1510
- Additionally, you can specify the parameter mappings
1511
- to map event payload to Metaflow parameters for the flow.
1465
+ Additionally, you can specify project aware upstream flow dependencies
1466
+ by specifying the fully qualified project_flow_name.
1512
1467
  ```
1513
- @trigger(event={'name':'foo', 'parameters':{'flow_param': 'event_field'}})
1468
+ @trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
1514
1469
  ```
1515
1470
  or
1516
1471
  ```
1517
- @trigger(events=[{'name':'foo', 'parameters':{'flow_param_1': 'event_field_1'},
1518
- {'name':'bar', 'parameters':{'flow_param_2': 'event_field_2'}])
1472
+ @trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
1519
1473
  ```
1520
1474
 
1521
- 'parameters' can also be a list of strings and tuples like so:
1522
- ```
1523
- @trigger(event={'name':'foo', 'parameters':['common_name', ('flow_param', 'event_field')]})
1524
- ```
1525
- This is equivalent to:
1475
+ You can also specify just the project or project branch (other values will be
1476
+ inferred from the current project or project branch):
1526
1477
  ```
1527
- @trigger(event={'name':'foo', 'parameters':{'common_name': 'common_name', 'flow_param': 'event_field'}})
1478
+ @trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
1528
1479
  ```
1529
1480
 
1481
+ Note that `branch` is typically one of:
1482
+ - `prod`
1483
+ - `user.bob`
1484
+ - `test.my_experiment`
1485
+ - `prod.staging`
1486
+
1530
1487
 
1531
1488
  Parameters
1532
1489
  ----------
1533
- event : Union[str, Dict[str, Any]], optional, default None
1534
- Event dependency for this flow.
1535
- events : List[Union[str, Dict[str, Any]]], default []
1536
- Events dependency for this flow.
1490
+ flow : Union[str, Dict[str, str]], optional, default None
1491
+ Upstream flow dependency for this flow.
1492
+ flows : List[Union[str, Dict[str, str]]], default []
1493
+ Upstream flow dependencies for this flow.
1537
1494
  options : Dict[str, Any], default {}
1538
1495
  Backend-specific configuration for tuning eventing behavior.
1539
1496
  """
@@ -1582,38 +1539,44 @@ def airflow_external_task_sensor(*, timeout: int, poke_interval: int, mode: str,
1582
1539
  """
1583
1540
  ...
1584
1541
 
1585
- def project(*, name: str, branch: typing.Optional[str] = None, production: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1542
+ @typing.overload
1543
+ def pypi_base(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1586
1544
  """
1587
- Specifies what flows belong to the same project.
1588
-
1589
- A project-specific namespace is created for all flows that
1590
- use the same `@project(name)`.
1545
+ Specifies the PyPI packages for all steps of the flow.
1591
1546
 
1547
+ Use `@pypi_base` to set common packages required by all
1548
+ steps and use `@pypi` to specify step-specific overrides.
1592
1549
 
1593
1550
  Parameters
1594
1551
  ----------
1595
- name : str
1596
- Project name. Make sure that the name is unique amongst all
1597
- projects that use the same production scheduler. The name may
1598
- contain only lowercase alphanumeric characters and underscores.
1552
+ packages : Dict[str, str], default: {}
1553
+ Packages to use for this flow. The key is the name of the package
1554
+ and the value is the version to use.
1555
+ python : str, optional, default: None
1556
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1557
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1558
+ """
1559
+ ...
1560
+
1561
+ @typing.overload
1562
+ def pypi_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1563
+ ...
1564
+
1565
+ def pypi_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
1566
+ """
1567
+ Specifies the PyPI packages for all steps of the flow.
1599
1568
 
1600
- branch : Optional[str], default None
1601
- The branch to use. If not specified, the branch is set to
1602
- `user.<username>` unless `production` is set to `True`. This can
1603
- also be set on the command line using `--branch` as a top-level option.
1604
- It is an error to specify `branch` in the decorator and on the command line.
1569
+ Use `@pypi_base` to set common packages required by all
1570
+ steps and use `@pypi` to specify step-specific overrides.
1605
1571
 
1606
- production : bool, default False
1607
- Whether or not the branch is the production branch. This can also be set on the
1608
- command line using `--production` as a top-level option. It is an error to specify
1609
- `production` in the decorator and on the command line.
1610
- The project branch name will be:
1611
- - if `branch` is specified:
1612
- - if `production` is True: `prod.<branch>`
1613
- - if `production` is False: `test.<branch>`
1614
- - if `branch` is not specified:
1615
- - if `production` is True: `prod`
1616
- - if `production` is False: `user.<username>`
1572
+ Parameters
1573
+ ----------
1574
+ packages : Dict[str, str], default: {}
1575
+ Packages to use for this flow. The key is the name of the package
1576
+ and the value is the version to use.
1577
+ python : str, optional, default: None
1578
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1579
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1617
1580
  """
1618
1581
  ...
1619
1582
 
@@ -1783,103 +1746,53 @@ def with_artifact_store(f: typing.Optional[typing.Type[FlowSpecDerived]] = None)
1783
1746
  ...
1784
1747
 
1785
1748
  @typing.overload
1786
- def trigger_on_finish(*, flow: typing.Union[typing.Dict[str, str], str, None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1749
+ def conda_base(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1787
1750
  """
1788
- Specifies the flow(s) that this flow depends on.
1789
-
1790
- ```
1791
- @trigger_on_finish(flow='FooFlow')
1792
- ```
1793
- or
1794
- ```
1795
- @trigger_on_finish(flows=['FooFlow', 'BarFlow'])
1796
- ```
1797
- This decorator respects the @project decorator and triggers the flow
1798
- when upstream runs within the same namespace complete successfully
1799
-
1800
- Additionally, you can specify project aware upstream flow dependencies
1801
- by specifying the fully qualified project_flow_name.
1802
- ```
1803
- @trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
1804
- ```
1805
- or
1806
- ```
1807
- @trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
1808
- ```
1809
-
1810
- You can also specify just the project or project branch (other values will be
1811
- inferred from the current project or project branch):
1812
- ```
1813
- @trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
1814
- ```
1751
+ Specifies the Conda environment for all steps of the flow.
1815
1752
 
1816
- Note that `branch` is typically one of:
1817
- - `prod`
1818
- - `user.bob`
1819
- - `test.my_experiment`
1820
- - `prod.staging`
1753
+ Use `@conda_base` to set common libraries required by all
1754
+ steps and use `@conda` to specify step-specific additions.
1821
1755
 
1822
1756
 
1823
1757
  Parameters
1824
1758
  ----------
1825
- flow : Union[str, Dict[str, str]], optional, default None
1826
- Upstream flow dependency for this flow.
1827
- flows : List[Union[str, Dict[str, str]]], default []
1828
- Upstream flow dependencies for this flow.
1829
- options : Dict[str, Any], default {}
1830
- Backend-specific configuration for tuning eventing behavior.
1759
+ packages : Dict[str, str], default {}
1760
+ Packages to use for this flow. The key is the name of the package
1761
+ and the value is the version to use.
1762
+ libraries : Dict[str, str], default {}
1763
+ Supported for backward compatibility. When used with packages, packages will take precedence.
1764
+ python : str, optional, default None
1765
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1766
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1767
+ disabled : bool, default False
1768
+ If set to True, disables Conda.
1831
1769
  """
1832
1770
  ...
1833
1771
 
1834
1772
  @typing.overload
1835
- def trigger_on_finish(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1773
+ def conda_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1836
1774
  ...
1837
1775
 
1838
- def trigger_on_finish(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, flow: typing.Union[typing.Dict[str, str], str, None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}):
1776
+ def conda_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
1839
1777
  """
1840
- Specifies the flow(s) that this flow depends on.
1841
-
1842
- ```
1843
- @trigger_on_finish(flow='FooFlow')
1844
- ```
1845
- or
1846
- ```
1847
- @trigger_on_finish(flows=['FooFlow', 'BarFlow'])
1848
- ```
1849
- This decorator respects the @project decorator and triggers the flow
1850
- when upstream runs within the same namespace complete successfully
1851
-
1852
- Additionally, you can specify project aware upstream flow dependencies
1853
- by specifying the fully qualified project_flow_name.
1854
- ```
1855
- @trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
1856
- ```
1857
- or
1858
- ```
1859
- @trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
1860
- ```
1861
-
1862
- You can also specify just the project or project branch (other values will be
1863
- inferred from the current project or project branch):
1864
- ```
1865
- @trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
1866
- ```
1778
+ Specifies the Conda environment for all steps of the flow.
1867
1779
 
1868
- Note that `branch` is typically one of:
1869
- - `prod`
1870
- - `user.bob`
1871
- - `test.my_experiment`
1872
- - `prod.staging`
1780
+ Use `@conda_base` to set common libraries required by all
1781
+ steps and use `@conda` to specify step-specific additions.
1873
1782
 
1874
1783
 
1875
1784
  Parameters
1876
1785
  ----------
1877
- flow : Union[str, Dict[str, str]], optional, default None
1878
- Upstream flow dependency for this flow.
1879
- flows : List[Union[str, Dict[str, str]]], default []
1880
- Upstream flow dependencies for this flow.
1881
- options : Dict[str, Any], default {}
1882
- Backend-specific configuration for tuning eventing behavior.
1786
+ packages : Dict[str, str], default {}
1787
+ Packages to use for this flow. The key is the name of the package
1788
+ and the value is the version to use.
1789
+ libraries : Dict[str, str], default {}
1790
+ Supported for backward compatibility. When used with packages, packages will take precedence.
1791
+ python : str, optional, default None
1792
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1793
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1794
+ disabled : bool, default False
1795
+ If set to True, disables Conda.
1883
1796
  """
1884
1797
  ...
1885
1798
 
@@ -1926,44 +1839,131 @@ def airflow_s3_key_sensor(*, timeout: int, poke_interval: int, mode: str, expone
1926
1839
  """
1927
1840
  ...
1928
1841
 
1842
+ def project(*, name: str, branch: typing.Optional[str] = None, production: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1843
+ """
1844
+ Specifies what flows belong to the same project.
1845
+
1846
+ A project-specific namespace is created for all flows that
1847
+ use the same `@project(name)`.
1848
+
1849
+
1850
+ Parameters
1851
+ ----------
1852
+ name : str
1853
+ Project name. Make sure that the name is unique amongst all
1854
+ projects that use the same production scheduler. The name may
1855
+ contain only lowercase alphanumeric characters and underscores.
1856
+
1857
+ branch : Optional[str], default None
1858
+ The branch to use. If not specified, the branch is set to
1859
+ `user.<username>` unless `production` is set to `True`. This can
1860
+ also be set on the command line using `--branch` as a top-level option.
1861
+ It is an error to specify `branch` in the decorator and on the command line.
1862
+
1863
+ production : bool, default False
1864
+ Whether or not the branch is the production branch. This can also be set on the
1865
+ command line using `--production` as a top-level option. It is an error to specify
1866
+ `production` in the decorator and on the command line.
1867
+ The project branch name will be:
1868
+ - if `branch` is specified:
1869
+ - if `production` is True: `prod.<branch>`
1870
+ - if `production` is False: `test.<branch>`
1871
+ - if `branch` is not specified:
1872
+ - if `production` is True: `prod`
1873
+ - if `production` is False: `user.<username>`
1874
+ """
1875
+ ...
1876
+
1929
1877
  @typing.overload
1930
- def pypi_base(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1878
+ def trigger(*, event: typing.Union[str, typing.Dict[str, typing.Any], None] = None, events: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1931
1879
  """
1932
- Specifies the PyPI packages for all steps of the flow.
1880
+ Specifies the event(s) that this flow depends on.
1881
+
1882
+ ```
1883
+ @trigger(event='foo')
1884
+ ```
1885
+ or
1886
+ ```
1887
+ @trigger(events=['foo', 'bar'])
1888
+ ```
1889
+
1890
+ Additionally, you can specify the parameter mappings
1891
+ to map event payload to Metaflow parameters for the flow.
1892
+ ```
1893
+ @trigger(event={'name':'foo', 'parameters':{'flow_param': 'event_field'}})
1894
+ ```
1895
+ or
1896
+ ```
1897
+ @trigger(events=[{'name':'foo', 'parameters':{'flow_param_1': 'event_field_1'},
1898
+ {'name':'bar', 'parameters':{'flow_param_2': 'event_field_2'}])
1899
+ ```
1900
+
1901
+ 'parameters' can also be a list of strings and tuples like so:
1902
+ ```
1903
+ @trigger(event={'name':'foo', 'parameters':['common_name', ('flow_param', 'event_field')]})
1904
+ ```
1905
+ This is equivalent to:
1906
+ ```
1907
+ @trigger(event={'name':'foo', 'parameters':{'common_name': 'common_name', 'flow_param': 'event_field'}})
1908
+ ```
1933
1909
 
1934
- Use `@pypi_base` to set common packages required by all
1935
- steps and use `@pypi` to specify step-specific overrides.
1936
1910
 
1937
1911
  Parameters
1938
1912
  ----------
1939
- packages : Dict[str, str], default: {}
1940
- Packages to use for this flow. The key is the name of the package
1941
- and the value is the version to use.
1942
- python : str, optional, default: None
1943
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
1944
- that the version used will correspond to the version of the Python interpreter used to start the run.
1913
+ event : Union[str, Dict[str, Any]], optional, default None
1914
+ Event dependency for this flow.
1915
+ events : List[Union[str, Dict[str, Any]]], default []
1916
+ Events dependency for this flow.
1917
+ options : Dict[str, Any], default {}
1918
+ Backend-specific configuration for tuning eventing behavior.
1945
1919
  """
1946
1920
  ...
1947
1921
 
1948
1922
  @typing.overload
1949
- def pypi_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1923
+ def trigger(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1950
1924
  ...
1951
1925
 
1952
- def pypi_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
1926
+ def trigger(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, event: typing.Union[str, typing.Dict[str, typing.Any], None] = None, events: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], options: typing.Dict[str, typing.Any] = {}):
1953
1927
  """
1954
- Specifies the PyPI packages for all steps of the flow.
1928
+ Specifies the event(s) that this flow depends on.
1929
+
1930
+ ```
1931
+ @trigger(event='foo')
1932
+ ```
1933
+ or
1934
+ ```
1935
+ @trigger(events=['foo', 'bar'])
1936
+ ```
1937
+
1938
+ Additionally, you can specify the parameter mappings
1939
+ to map event payload to Metaflow parameters for the flow.
1940
+ ```
1941
+ @trigger(event={'name':'foo', 'parameters':{'flow_param': 'event_field'}})
1942
+ ```
1943
+ or
1944
+ ```
1945
+ @trigger(events=[{'name':'foo', 'parameters':{'flow_param_1': 'event_field_1'},
1946
+ {'name':'bar', 'parameters':{'flow_param_2': 'event_field_2'}])
1947
+ ```
1948
+
1949
+ 'parameters' can also be a list of strings and tuples like so:
1950
+ ```
1951
+ @trigger(event={'name':'foo', 'parameters':['common_name', ('flow_param', 'event_field')]})
1952
+ ```
1953
+ This is equivalent to:
1954
+ ```
1955
+ @trigger(event={'name':'foo', 'parameters':{'common_name': 'common_name', 'flow_param': 'event_field'}})
1956
+ ```
1955
1957
 
1956
- Use `@pypi_base` to set common packages required by all
1957
- steps and use `@pypi` to specify step-specific overrides.
1958
1958
 
1959
1959
  Parameters
1960
1960
  ----------
1961
- packages : Dict[str, str], default: {}
1962
- Packages to use for this flow. The key is the name of the package
1963
- and the value is the version to use.
1964
- python : str, optional, default: None
1965
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
1966
- that the version used will correspond to the version of the Python interpreter used to start the run.
1961
+ event : Union[str, Dict[str, Any]], optional, default None
1962
+ Event dependency for this flow.
1963
+ events : List[Union[str, Dict[str, Any]]], default []
1964
+ Events dependency for this flow.
1965
+ options : Dict[str, Any], default {}
1966
+ Backend-specific configuration for tuning eventing behavior.
1967
1967
  """
1968
1968
  ...
1969
1969