ob-metaflow-stubs 6.0.10.5__py2.py3-none-any.whl → 6.0.10.7__py2.py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of ob-metaflow-stubs might be problematic. Click here for more details.

Files changed (262) hide show
  1. metaflow-stubs/__init__.pyi +917 -917
  2. metaflow-stubs/cards.pyi +2 -2
  3. metaflow-stubs/cli.pyi +2 -2
  4. metaflow-stubs/cli_components/__init__.pyi +2 -2
  5. metaflow-stubs/cli_components/utils.pyi +2 -2
  6. metaflow-stubs/client/__init__.pyi +2 -2
  7. metaflow-stubs/client/core.pyi +6 -6
  8. metaflow-stubs/client/filecache.pyi +2 -2
  9. metaflow-stubs/events.pyi +2 -2
  10. metaflow-stubs/exception.pyi +2 -2
  11. metaflow-stubs/flowspec.pyi +5 -5
  12. metaflow-stubs/generated_for.txt +1 -1
  13. metaflow-stubs/includefile.pyi +4 -4
  14. metaflow-stubs/meta_files.pyi +2 -2
  15. metaflow-stubs/metadata_provider/__init__.pyi +2 -2
  16. metaflow-stubs/metadata_provider/heartbeat.pyi +2 -2
  17. metaflow-stubs/metadata_provider/metadata.pyi +2 -2
  18. metaflow-stubs/metadata_provider/util.pyi +2 -2
  19. metaflow-stubs/metaflow_config.pyi +2 -2
  20. metaflow-stubs/metaflow_current.pyi +60 -60
  21. metaflow-stubs/metaflow_git.pyi +2 -2
  22. metaflow-stubs/mf_extensions/__init__.pyi +2 -2
  23. metaflow-stubs/mf_extensions/obcheckpoint/__init__.pyi +2 -2
  24. metaflow-stubs/mf_extensions/obcheckpoint/plugins/__init__.pyi +2 -2
  25. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/__init__.pyi +2 -2
  26. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/card_utils/__init__.pyi +2 -2
  27. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/card_utils/async_cards.pyi +2 -2
  28. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/card_utils/deco_injection_mixin.pyi +2 -2
  29. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/card_utils/extra_components.pyi +2 -2
  30. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/__init__.pyi +2 -2
  31. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/cards/__init__.pyi +2 -2
  32. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/cards/checkpoint_lister.pyi +3 -3
  33. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/cards/lineage_card.pyi +2 -2
  34. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/checkpoint_storage.pyi +3 -3
  35. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/constructors.pyi +2 -2
  36. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/core.pyi +3 -3
  37. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/decorator.pyi +5 -5
  38. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/exceptions.pyi +2 -2
  39. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/final_api.pyi +3 -3
  40. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/lineage.pyi +2 -2
  41. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/__init__.pyi +2 -2
  42. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/context.pyi +3 -3
  43. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/core.pyi +2 -2
  44. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/decorator.pyi +2 -2
  45. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/exceptions.pyi +2 -2
  46. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/task_utils.pyi +2 -2
  47. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/utils.pyi +2 -2
  48. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastructures.pyi +3 -3
  49. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/exceptions.pyi +2 -2
  50. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/hf_hub/__init__.pyi +2 -2
  51. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/hf_hub/decorator.pyi +2 -2
  52. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/modeling_utils/__init__.pyi +2 -2
  53. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/modeling_utils/core.pyi +3 -3
  54. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/modeling_utils/exceptions.pyi +2 -2
  55. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/modeling_utils/model_storage.pyi +3 -3
  56. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/__init__.pyi +2 -2
  57. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/flowspec_utils.pyi +2 -2
  58. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/general.pyi +2 -2
  59. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/identity_utils.pyi +2 -2
  60. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/serialization_handler/__init__.pyi +2 -2
  61. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/serialization_handler/base.pyi +2 -2
  62. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/serialization_handler/tar.pyi +2 -2
  63. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/tar_utils.pyi +2 -2
  64. metaflow-stubs/mf_extensions/outerbounds/__init__.pyi +2 -2
  65. metaflow-stubs/mf_extensions/outerbounds/plugins/__init__.pyi +2 -2
  66. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/__init__.pyi +2 -2
  67. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/__init__.pyi +2 -2
  68. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/_state_machine.pyi +2 -2
  69. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/_vendor/__init__.pyi +2 -2
  70. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/_vendor/spinner/__init__.pyi +2 -2
  71. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/_vendor/spinner/spinners.pyi +2 -2
  72. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/app_cli.pyi +2 -2
  73. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/app_config.pyi +3 -3
  74. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/capsule.pyi +4 -4
  75. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/click_importer.pyi +2 -2
  76. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/code_package/__init__.pyi +2 -2
  77. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/code_package/code_packager.pyi +3 -3
  78. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/config/__init__.pyi +2 -2
  79. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/config/cli_generator.pyi +2 -2
  80. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/config/config_utils.pyi +3 -3
  81. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/config/schema_export.pyi +2 -2
  82. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/config/typed_configs.pyi +4 -4
  83. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/config/unified_config.pyi +3 -3
  84. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/dependencies.pyi +3 -3
  85. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/deployer.pyi +4 -4
  86. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/experimental/__init__.pyi +2 -2
  87. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/perimeters.pyi +2 -2
  88. metaflow-stubs/mf_extensions/outerbounds/plugins/apps/core/utils.pyi +3 -3
  89. metaflow-stubs/mf_extensions/outerbounds/plugins/aws/__init__.pyi +2 -2
  90. metaflow-stubs/mf_extensions/outerbounds/plugins/aws/assume_role_decorator.pyi +3 -3
  91. metaflow-stubs/mf_extensions/outerbounds/plugins/card_utilities/__init__.pyi +2 -2
  92. metaflow-stubs/mf_extensions/outerbounds/plugins/card_utilities/async_cards.pyi +2 -2
  93. metaflow-stubs/mf_extensions/outerbounds/plugins/card_utilities/injector.pyi +2 -2
  94. metaflow-stubs/mf_extensions/outerbounds/plugins/checkpoint_datastores/__init__.pyi +2 -2
  95. metaflow-stubs/mf_extensions/outerbounds/plugins/checkpoint_datastores/coreweave.pyi +3 -3
  96. metaflow-stubs/mf_extensions/outerbounds/plugins/checkpoint_datastores/nebius.pyi +3 -3
  97. metaflow-stubs/mf_extensions/outerbounds/plugins/fast_bakery/__init__.pyi +2 -2
  98. metaflow-stubs/mf_extensions/outerbounds/plugins/fast_bakery/baker.pyi +3 -3
  99. metaflow-stubs/mf_extensions/outerbounds/plugins/fast_bakery/docker_environment.pyi +2 -2
  100. metaflow-stubs/mf_extensions/outerbounds/plugins/fast_bakery/fast_bakery.pyi +2 -2
  101. metaflow-stubs/mf_extensions/outerbounds/plugins/kubernetes/__init__.pyi +2 -2
  102. metaflow-stubs/mf_extensions/outerbounds/plugins/kubernetes/pod_killer.pyi +2 -2
  103. metaflow-stubs/mf_extensions/outerbounds/plugins/ollama/__init__.pyi +2 -2
  104. metaflow-stubs/mf_extensions/outerbounds/plugins/ollama/constants.pyi +2 -2
  105. metaflow-stubs/mf_extensions/outerbounds/plugins/ollama/exceptions.pyi +2 -2
  106. metaflow-stubs/mf_extensions/outerbounds/plugins/ollama/ollama.pyi +2 -2
  107. metaflow-stubs/mf_extensions/outerbounds/plugins/ollama/status_card.pyi +2 -2
  108. metaflow-stubs/mf_extensions/outerbounds/plugins/snowflake/__init__.pyi +2 -2
  109. metaflow-stubs/mf_extensions/outerbounds/plugins/snowflake/snowflake.pyi +2 -2
  110. metaflow-stubs/mf_extensions/outerbounds/profilers/__init__.pyi +2 -2
  111. metaflow-stubs/mf_extensions/outerbounds/profilers/gpu.pyi +2 -2
  112. metaflow-stubs/mf_extensions/outerbounds/remote_config.pyi +2 -2
  113. metaflow-stubs/mf_extensions/outerbounds/toplevel/__init__.pyi +2 -2
  114. metaflow-stubs/mf_extensions/outerbounds/toplevel/global_aliases_for_metaflow_package.pyi +2 -2
  115. metaflow-stubs/mf_extensions/outerbounds/toplevel/s3_proxy.pyi +2 -2
  116. metaflow-stubs/multicore_utils.pyi +2 -2
  117. metaflow-stubs/ob_internal.pyi +2 -2
  118. metaflow-stubs/packaging_sys/__init__.pyi +6 -6
  119. metaflow-stubs/packaging_sys/backend.pyi +3 -3
  120. metaflow-stubs/packaging_sys/distribution_support.pyi +4 -4
  121. metaflow-stubs/packaging_sys/tar_backend.pyi +5 -5
  122. metaflow-stubs/packaging_sys/utils.pyi +2 -2
  123. metaflow-stubs/packaging_sys/v1.pyi +2 -2
  124. metaflow-stubs/parameters.pyi +4 -4
  125. metaflow-stubs/plugins/__init__.pyi +12 -12
  126. metaflow-stubs/plugins/airflow/__init__.pyi +2 -2
  127. metaflow-stubs/plugins/airflow/airflow_utils.pyi +2 -2
  128. metaflow-stubs/plugins/airflow/exception.pyi +2 -2
  129. metaflow-stubs/plugins/airflow/sensors/__init__.pyi +2 -2
  130. metaflow-stubs/plugins/airflow/sensors/base_sensor.pyi +2 -2
  131. metaflow-stubs/plugins/airflow/sensors/external_task_sensor.pyi +2 -2
  132. metaflow-stubs/plugins/airflow/sensors/s3_sensor.pyi +2 -2
  133. metaflow-stubs/plugins/argo/__init__.pyi +2 -2
  134. metaflow-stubs/plugins/argo/argo_client.pyi +2 -2
  135. metaflow-stubs/plugins/argo/argo_events.pyi +2 -2
  136. metaflow-stubs/plugins/argo/argo_workflows.pyi +3 -3
  137. metaflow-stubs/plugins/argo/argo_workflows_decorator.pyi +4 -4
  138. metaflow-stubs/plugins/argo/argo_workflows_deployer.pyi +4 -4
  139. metaflow-stubs/plugins/argo/argo_workflows_deployer_objects.pyi +2 -2
  140. metaflow-stubs/plugins/argo/exit_hooks.pyi +3 -3
  141. metaflow-stubs/plugins/aws/__init__.pyi +2 -2
  142. metaflow-stubs/plugins/aws/aws_client.pyi +2 -2
  143. metaflow-stubs/plugins/aws/aws_utils.pyi +2 -2
  144. metaflow-stubs/plugins/aws/batch/__init__.pyi +2 -2
  145. metaflow-stubs/plugins/aws/batch/batch.pyi +2 -2
  146. metaflow-stubs/plugins/aws/batch/batch_client.pyi +2 -2
  147. metaflow-stubs/plugins/aws/batch/batch_decorator.pyi +2 -2
  148. metaflow-stubs/plugins/aws/secrets_manager/__init__.pyi +2 -2
  149. metaflow-stubs/plugins/aws/secrets_manager/aws_secrets_manager_secrets_provider.pyi +2 -2
  150. metaflow-stubs/plugins/aws/step_functions/__init__.pyi +2 -2
  151. metaflow-stubs/plugins/aws/step_functions/event_bridge_client.pyi +2 -2
  152. metaflow-stubs/plugins/aws/step_functions/schedule_decorator.pyi +2 -2
  153. metaflow-stubs/plugins/aws/step_functions/step_functions.pyi +2 -2
  154. metaflow-stubs/plugins/aws/step_functions/step_functions_client.pyi +2 -2
  155. metaflow-stubs/plugins/aws/step_functions/step_functions_deployer.pyi +3 -3
  156. metaflow-stubs/plugins/aws/step_functions/step_functions_deployer_objects.pyi +2 -2
  157. metaflow-stubs/plugins/azure/__init__.pyi +2 -2
  158. metaflow-stubs/plugins/azure/azure_credential.pyi +2 -2
  159. metaflow-stubs/plugins/azure/azure_exceptions.pyi +2 -2
  160. metaflow-stubs/plugins/azure/azure_secret_manager_secrets_provider.pyi +2 -2
  161. metaflow-stubs/plugins/azure/azure_utils.pyi +2 -2
  162. metaflow-stubs/plugins/azure/blob_service_client_factory.pyi +2 -2
  163. metaflow-stubs/plugins/azure/includefile_support.pyi +2 -2
  164. metaflow-stubs/plugins/cards/__init__.pyi +2 -2
  165. metaflow-stubs/plugins/cards/card_client.pyi +2 -2
  166. metaflow-stubs/plugins/cards/card_creator.pyi +2 -2
  167. metaflow-stubs/plugins/cards/card_datastore.pyi +2 -2
  168. metaflow-stubs/plugins/cards/card_decorator.pyi +3 -3
  169. metaflow-stubs/plugins/cards/card_modules/__init__.pyi +2 -2
  170. metaflow-stubs/plugins/cards/card_modules/basic.pyi +3 -3
  171. metaflow-stubs/plugins/cards/card_modules/card.pyi +2 -2
  172. metaflow-stubs/plugins/cards/card_modules/components.pyi +3 -3
  173. metaflow-stubs/plugins/cards/card_modules/convert_to_native_type.pyi +2 -2
  174. metaflow-stubs/plugins/cards/card_modules/renderer_tools.pyi +2 -2
  175. metaflow-stubs/plugins/cards/card_modules/test_cards.pyi +2 -2
  176. metaflow-stubs/plugins/cards/card_resolver.pyi +2 -2
  177. metaflow-stubs/plugins/cards/component_serializer.pyi +2 -2
  178. metaflow-stubs/plugins/cards/exception.pyi +2 -2
  179. metaflow-stubs/plugins/catch_decorator.pyi +2 -2
  180. metaflow-stubs/plugins/datatools/__init__.pyi +2 -2
  181. metaflow-stubs/plugins/datatools/local.pyi +2 -2
  182. metaflow-stubs/plugins/datatools/s3/__init__.pyi +2 -2
  183. metaflow-stubs/plugins/datatools/s3/s3.pyi +4 -4
  184. metaflow-stubs/plugins/datatools/s3/s3tail.pyi +2 -2
  185. metaflow-stubs/plugins/datatools/s3/s3util.pyi +2 -2
  186. metaflow-stubs/plugins/debug_logger.pyi +2 -2
  187. metaflow-stubs/plugins/debug_monitor.pyi +2 -2
  188. metaflow-stubs/plugins/environment_decorator.pyi +2 -2
  189. metaflow-stubs/plugins/events_decorator.pyi +2 -2
  190. metaflow-stubs/plugins/exit_hook/__init__.pyi +2 -2
  191. metaflow-stubs/plugins/exit_hook/exit_hook_decorator.pyi +2 -2
  192. metaflow-stubs/plugins/frameworks/__init__.pyi +2 -2
  193. metaflow-stubs/plugins/frameworks/pytorch.pyi +2 -2
  194. metaflow-stubs/plugins/gcp/__init__.pyi +2 -2
  195. metaflow-stubs/plugins/gcp/gcp_secret_manager_secrets_provider.pyi +2 -2
  196. metaflow-stubs/plugins/gcp/gs_exceptions.pyi +2 -2
  197. metaflow-stubs/plugins/gcp/gs_storage_client_factory.pyi +2 -2
  198. metaflow-stubs/plugins/gcp/gs_utils.pyi +2 -2
  199. metaflow-stubs/plugins/gcp/includefile_support.pyi +2 -2
  200. metaflow-stubs/plugins/kubernetes/__init__.pyi +2 -2
  201. metaflow-stubs/plugins/kubernetes/kube_utils.pyi +2 -2
  202. metaflow-stubs/plugins/kubernetes/kubernetes.pyi +2 -2
  203. metaflow-stubs/plugins/kubernetes/kubernetes_client.pyi +2 -2
  204. metaflow-stubs/plugins/kubernetes/kubernetes_decorator.pyi +2 -2
  205. metaflow-stubs/plugins/kubernetes/kubernetes_jobsets.pyi +2 -2
  206. metaflow-stubs/plugins/kubernetes/spot_monitor_sidecar.pyi +2 -2
  207. metaflow-stubs/plugins/ollama/__init__.pyi +3 -3
  208. metaflow-stubs/plugins/optuna/__init__.pyi +2 -2
  209. metaflow-stubs/plugins/parallel_decorator.pyi +2 -2
  210. metaflow-stubs/plugins/perimeters.pyi +2 -2
  211. metaflow-stubs/plugins/project_decorator.pyi +2 -2
  212. metaflow-stubs/plugins/pypi/__init__.pyi +2 -2
  213. metaflow-stubs/plugins/pypi/conda_decorator.pyi +2 -2
  214. metaflow-stubs/plugins/pypi/conda_environment.pyi +4 -4
  215. metaflow-stubs/plugins/pypi/parsers.pyi +2 -2
  216. metaflow-stubs/plugins/pypi/pypi_decorator.pyi +2 -2
  217. metaflow-stubs/plugins/pypi/pypi_environment.pyi +2 -2
  218. metaflow-stubs/plugins/pypi/utils.pyi +2 -2
  219. metaflow-stubs/plugins/resources_decorator.pyi +2 -2
  220. metaflow-stubs/plugins/retry_decorator.pyi +2 -2
  221. metaflow-stubs/plugins/secrets/__init__.pyi +2 -2
  222. metaflow-stubs/plugins/secrets/inline_secrets_provider.pyi +2 -2
  223. metaflow-stubs/plugins/secrets/secrets_decorator.pyi +2 -2
  224. metaflow-stubs/plugins/secrets/secrets_func.pyi +2 -2
  225. metaflow-stubs/plugins/secrets/secrets_spec.pyi +2 -2
  226. metaflow-stubs/plugins/secrets/utils.pyi +2 -2
  227. metaflow-stubs/plugins/snowflake/__init__.pyi +2 -2
  228. metaflow-stubs/plugins/storage_executor.pyi +2 -2
  229. metaflow-stubs/plugins/test_unbounded_foreach_decorator.pyi +3 -3
  230. metaflow-stubs/plugins/timeout_decorator.pyi +2 -2
  231. metaflow-stubs/plugins/torchtune/__init__.pyi +2 -2
  232. metaflow-stubs/plugins/uv/__init__.pyi +2 -2
  233. metaflow-stubs/plugins/uv/uv_environment.pyi +2 -2
  234. metaflow-stubs/profilers/__init__.pyi +2 -2
  235. metaflow-stubs/pylint_wrapper.pyi +2 -2
  236. metaflow-stubs/runner/__init__.pyi +2 -2
  237. metaflow-stubs/runner/deployer.pyi +7 -7
  238. metaflow-stubs/runner/deployer_impl.pyi +2 -2
  239. metaflow-stubs/runner/metaflow_runner.pyi +5 -5
  240. metaflow-stubs/runner/nbdeploy.pyi +2 -2
  241. metaflow-stubs/runner/nbrun.pyi +2 -2
  242. metaflow-stubs/runner/subprocess_manager.pyi +2 -2
  243. metaflow-stubs/runner/utils.pyi +3 -3
  244. metaflow-stubs/system/__init__.pyi +2 -2
  245. metaflow-stubs/system/system_logger.pyi +3 -3
  246. metaflow-stubs/system/system_monitor.pyi +2 -2
  247. metaflow-stubs/tagging_util.pyi +2 -2
  248. metaflow-stubs/tuple_util.pyi +2 -2
  249. metaflow-stubs/user_configs/__init__.pyi +2 -2
  250. metaflow-stubs/user_configs/config_options.pyi +2 -2
  251. metaflow-stubs/user_configs/config_parameters.pyi +5 -5
  252. metaflow-stubs/user_decorators/__init__.pyi +2 -2
  253. metaflow-stubs/user_decorators/common.pyi +2 -2
  254. metaflow-stubs/user_decorators/mutable_flow.pyi +5 -5
  255. metaflow-stubs/user_decorators/mutable_step.pyi +5 -5
  256. metaflow-stubs/user_decorators/user_flow_decorator.pyi +5 -5
  257. metaflow-stubs/user_decorators/user_step_decorator.pyi +7 -7
  258. {ob_metaflow_stubs-6.0.10.5.dist-info → ob_metaflow_stubs-6.0.10.7.dist-info}/METADATA +1 -1
  259. ob_metaflow_stubs-6.0.10.7.dist-info/RECORD +262 -0
  260. ob_metaflow_stubs-6.0.10.5.dist-info/RECORD +0 -262
  261. {ob_metaflow_stubs-6.0.10.5.dist-info → ob_metaflow_stubs-6.0.10.7.dist-info}/WHEEL +0 -0
  262. {ob_metaflow_stubs-6.0.10.5.dist-info → ob_metaflow_stubs-6.0.10.7.dist-info}/top_level.txt +0 -0
@@ -1,7 +1,7 @@
1
1
  ######################################################################################################
2
2
  # Auto-generated Metaflow stub file #
3
- # MF version: 2.18.7.1+obcheckpoint(0.2.6);ob(v1) #
4
- # Generated on 2025-09-19T08:41:35.349888 #
3
+ # MF version: 2.18.7.2+obcheckpoint(0.2.6);ob(v1) #
4
+ # Generated on 2025-09-19T18:41:10.791351 #
5
5
  ######################################################################################################
6
6
 
7
7
  from __future__ import annotations
@@ -39,18 +39,18 @@ from .user_decorators.user_step_decorator import UserStepDecorator as UserStepDe
39
39
  from .user_decorators.user_step_decorator import StepMutator as StepMutator
40
40
  from .user_decorators.user_step_decorator import user_step_decorator as user_step_decorator
41
41
  from .user_decorators.user_flow_decorator import FlowMutator as FlowMutator
42
- from . import tuple_util as tuple_util
43
42
  from . import cards as cards
44
43
  from . import metaflow_git as metaflow_git
45
44
  from . import events as events
45
+ from . import tuple_util as tuple_util
46
46
  from . import runner as runner
47
47
  from . import plugins as plugins
48
48
  from .mf_extensions.outerbounds.toplevel.global_aliases_for_metaflow_package import S3 as S3
49
49
  from . import includefile as includefile
50
50
  from .includefile import IncludeFile as IncludeFile
51
- from .plugins.pypi.parsers import conda_environment_yml_parser as conda_environment_yml_parser
52
- from .plugins.pypi.parsers import pyproject_toml_parser as pyproject_toml_parser
53
51
  from .plugins.pypi.parsers import requirements_txt_parser as requirements_txt_parser
52
+ from .plugins.pypi.parsers import pyproject_toml_parser as pyproject_toml_parser
53
+ from .plugins.pypi.parsers import conda_environment_yml_parser as conda_environment_yml_parser
54
54
  from . import client as client
55
55
  from .client.core import namespace as namespace
56
56
  from .client.core import get_namespace as get_namespace
@@ -168,131 +168,57 @@ def step(f: typing.Union[typing.Callable[[FlowSpecDerived], None], typing.Callab
168
168
  ...
169
169
 
170
170
  @typing.overload
171
- def model(*, load: typing.Union[typing.List[str], str, typing.List[typing.Tuple[str, typing.Optional[str]]]] = None, temp_dir_root: str = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
171
+ def retry(*, times: int = 3, minutes_between_retries: int = 2) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
172
172
  """
173
- Enables loading / saving of models within a step.
174
-
175
- > Examples
176
- - Saving Models
177
- ```python
178
- @model
179
- @step
180
- def train(self):
181
- # current.model.save returns a dictionary reference to the model saved
182
- self.my_model = current.model.save(
183
- path_to_my_model,
184
- label="my_model",
185
- metadata={
186
- "epochs": 10,
187
- "batch-size": 32,
188
- "learning-rate": 0.001,
189
- }
190
- )
191
- self.next(self.test)
173
+ Specifies the number of times the task corresponding
174
+ to a step needs to be retried.
192
175
 
193
- @model(load="my_model")
194
- @step
195
- def test(self):
196
- # `current.model.loaded` returns a dictionary of the loaded models
197
- # where the key is the name of the artifact and the value is the path to the model
198
- print(os.listdir(current.model.loaded["my_model"]))
199
- self.next(self.end)
200
- ```
176
+ This decorator is useful for handling transient errors, such as networking issues.
177
+ If your task contains operations that can't be retried safely, e.g. database updates,
178
+ it is advisable to annotate it with `@retry(times=0)`.
201
179
 
202
- - Loading models
203
- ```python
204
- @step
205
- def train(self):
206
- # current.model.load returns the path to the model loaded
207
- checkpoint_path = current.model.load(
208
- self.checkpoint_key,
209
- )
210
- model_path = current.model.load(
211
- self.model,
212
- )
213
- self.next(self.test)
214
- ```
180
+ This can be used in conjunction with the `@catch` decorator. The `@catch`
181
+ decorator will execute a no-op task after all retries have been exhausted,
182
+ ensuring that the flow execution can continue.
215
183
 
216
184
 
217
185
  Parameters
218
186
  ----------
219
- load : Union[List[str],str,List[Tuple[str,Union[str,None]]]], default: None
220
- Artifact name/s referencing the models/checkpoints to load. Artifact names refer to the names of the instance variables set to `self`.
221
- These artifact names give to `load` be reference objects or reference `key` string's from objects created by `current.checkpoint` / `current.model` / `current.huggingface_hub`.
222
- If a list of tuples is provided, the first element is the artifact name and the second element is the path the artifact needs be unpacked on
223
- the local filesystem. If the second element is None, the artifact will be unpacked in the current working directory.
224
- If a string is provided, then the artifact corresponding to that name will be loaded in the current working directory.
225
-
226
- temp_dir_root : str, default: None
227
- The root directory under which `current.model.loaded` will store loaded models
187
+ times : int, default 3
188
+ Number of times to retry this task.
189
+ minutes_between_retries : int, default 2
190
+ Number of minutes between retries.
228
191
  """
229
192
  ...
230
193
 
231
194
  @typing.overload
232
- def model(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
195
+ def retry(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
233
196
  ...
234
197
 
235
198
  @typing.overload
236
- def model(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
199
+ def retry(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
237
200
  ...
238
201
 
239
- def model(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, load: typing.Union[typing.List[str], str, typing.List[typing.Tuple[str, typing.Optional[str]]]] = None, temp_dir_root: str = None):
202
+ def retry(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, times: int = 3, minutes_between_retries: int = 2):
240
203
  """
241
- Enables loading / saving of models within a step.
242
-
243
- > Examples
244
- - Saving Models
245
- ```python
246
- @model
247
- @step
248
- def train(self):
249
- # current.model.save returns a dictionary reference to the model saved
250
- self.my_model = current.model.save(
251
- path_to_my_model,
252
- label="my_model",
253
- metadata={
254
- "epochs": 10,
255
- "batch-size": 32,
256
- "learning-rate": 0.001,
257
- }
258
- )
259
- self.next(self.test)
204
+ Specifies the number of times the task corresponding
205
+ to a step needs to be retried.
260
206
 
261
- @model(load="my_model")
262
- @step
263
- def test(self):
264
- # `current.model.loaded` returns a dictionary of the loaded models
265
- # where the key is the name of the artifact and the value is the path to the model
266
- print(os.listdir(current.model.loaded["my_model"]))
267
- self.next(self.end)
268
- ```
207
+ This decorator is useful for handling transient errors, such as networking issues.
208
+ If your task contains operations that can't be retried safely, e.g. database updates,
209
+ it is advisable to annotate it with `@retry(times=0)`.
269
210
 
270
- - Loading models
271
- ```python
272
- @step
273
- def train(self):
274
- # current.model.load returns the path to the model loaded
275
- checkpoint_path = current.model.load(
276
- self.checkpoint_key,
277
- )
278
- model_path = current.model.load(
279
- self.model,
280
- )
281
- self.next(self.test)
282
- ```
211
+ This can be used in conjunction with the `@catch` decorator. The `@catch`
212
+ decorator will execute a no-op task after all retries have been exhausted,
213
+ ensuring that the flow execution can continue.
283
214
 
284
215
 
285
216
  Parameters
286
217
  ----------
287
- load : Union[List[str],str,List[Tuple[str,Union[str,None]]]], default: None
288
- Artifact name/s referencing the models/checkpoints to load. Artifact names refer to the names of the instance variables set to `self`.
289
- These artifact names give to `load` be reference objects or reference `key` string's from objects created by `current.checkpoint` / `current.model` / `current.huggingface_hub`.
290
- If a list of tuples is provided, the first element is the artifact name and the second element is the path the artifact needs be unpacked on
291
- the local filesystem. If the second element is None, the artifact will be unpacked in the current working directory.
292
- If a string is provided, then the artifact corresponding to that name will be loaded in the current working directory.
293
-
294
- temp_dir_root : str, default: None
295
- The root directory under which `current.model.loaded` will store loaded models
218
+ times : int, default 3
219
+ Number of times to retry this task.
220
+ minutes_between_retries : int, default 2
221
+ Number of minutes between retries.
296
222
  """
297
223
  ...
298
224
 
@@ -346,24 +272,85 @@ def vllm(*, model: str, backend: str, openai_api_server: bool, debug: bool, card
346
272
  """
347
273
  ...
348
274
 
275
+ def ollama(*, models: list, backend: str, force_pull: bool, cache_update_policy: str, force_cache_update: bool, debug: bool, circuit_breaker_config: dict, timeout_config: dict) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
276
+ """
277
+ This decorator is used to run Ollama APIs as Metaflow task sidecars.
278
+
279
+ User code call
280
+ --------------
281
+ @ollama(
282
+ models=[...],
283
+ ...
284
+ )
285
+
286
+ Valid backend options
287
+ ---------------------
288
+ - 'local': Run as a separate process on the local task machine.
289
+ - (TODO) 'managed': Outerbounds hosts and selects compute provider.
290
+ - (TODO) 'remote': Spin up separate instance to serve Ollama models.
291
+
292
+ Valid model options
293
+ -------------------
294
+ Any model here https://ollama.com/search, e.g. 'llama3.2', 'llama3.3'
295
+
296
+
297
+ Parameters
298
+ ----------
299
+ models: list[str]
300
+ List of Ollama containers running models in sidecars.
301
+ backend: str
302
+ Determines where and how to run the Ollama process.
303
+ force_pull: bool
304
+ Whether to run `ollama pull` no matter what, or first check the remote cache in Metaflow datastore for this model key.
305
+ cache_update_policy: str
306
+ Cache update policy: "auto", "force", or "never".
307
+ force_cache_update: bool
308
+ Simple override for "force" cache update policy.
309
+ debug: bool
310
+ Whether to turn on verbose debugging logs.
311
+ circuit_breaker_config: dict
312
+ Configuration for circuit breaker protection. Keys: failure_threshold, recovery_timeout, reset_timeout.
313
+ timeout_config: dict
314
+ Configuration for various operation timeouts. Keys: pull, stop, health_check, install, server_startup.
315
+ """
316
+ ...
317
+
349
318
  @typing.overload
350
- def nebius_s3_proxy(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
319
+ def secrets(*, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], role: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
351
320
  """
352
- Nebius-specific S3 Proxy decorator for routing S3 requests through a local proxy service.
353
- It exists to make it easier for users to know that this decorator should only be used with
354
- a Neo Cloud like Nebius.
321
+ Specifies secrets to be retrieved and injected as environment variables prior to
322
+ the execution of a step.
323
+
324
+
325
+ Parameters
326
+ ----------
327
+ sources : List[Union[str, Dict[str, Any]]], default: []
328
+ List of secret specs, defining how the secrets are to be retrieved
329
+ role : str, optional, default: None
330
+ Role to use for fetching secrets
355
331
  """
356
332
  ...
357
333
 
358
334
  @typing.overload
359
- def nebius_s3_proxy(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
335
+ def secrets(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
360
336
  ...
361
337
 
362
- def nebius_s3_proxy(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
338
+ @typing.overload
339
+ def secrets(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
340
+ ...
341
+
342
+ def secrets(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], role: typing.Optional[str] = None):
363
343
  """
364
- Nebius-specific S3 Proxy decorator for routing S3 requests through a local proxy service.
365
- It exists to make it easier for users to know that this decorator should only be used with
366
- a Neo Cloud like Nebius.
344
+ Specifies secrets to be retrieved and injected as environment variables prior to
345
+ the execution of a step.
346
+
347
+
348
+ Parameters
349
+ ----------
350
+ sources : List[Union[str, Dict[str, Any]]], default: []
351
+ List of secret specs, defining how the secrets are to be retrieved
352
+ role : str, optional, default: None
353
+ Role to use for fetching secrets
367
354
  """
368
355
  ...
369
356
 
@@ -419,56 +406,284 @@ def pypi(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typ
419
406
  ...
420
407
 
421
408
  @typing.overload
422
- def coreweave_s3_proxy(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
409
+ def fast_bakery_internal(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
423
410
  """
424
- CoreWeave-specific S3 Proxy decorator for routing S3 requests through a local proxy service.
425
- It exists to make it easier for users to know that this decorator should only be used with
426
- a Neo Cloud like CoreWeave.
411
+ Internal decorator to support Fast bakery
427
412
  """
428
413
  ...
429
414
 
430
415
  @typing.overload
431
- def coreweave_s3_proxy(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
416
+ def fast_bakery_internal(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
432
417
  ...
433
418
 
434
- def coreweave_s3_proxy(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
419
+ def fast_bakery_internal(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
435
420
  """
436
- CoreWeave-specific S3 Proxy decorator for routing S3 requests through a local proxy service.
437
- It exists to make it easier for users to know that this decorator should only be used with
438
- a Neo Cloud like CoreWeave.
421
+ Internal decorator to support Fast bakery
439
422
  """
440
423
  ...
441
424
 
442
- @typing.overload
443
- def resources(*, cpu: int = 1, gpu: typing.Optional[int] = None, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
425
+ def nvct(*, gpu: int, gpu_type: str) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
444
426
  """
445
- Specifies the resources needed when executing this step.
446
-
447
- Use `@resources` to specify the resource requirements
448
- independently of the specific compute layer (`@batch`, `@kubernetes`).
449
-
450
- You can choose the compute layer on the command line by executing e.g.
451
- ```
452
- python myflow.py run --with batch
453
- ```
454
- or
455
- ```
456
- python myflow.py run --with kubernetes
457
- ```
458
- which executes the flow on the desired system using the
459
- requirements specified in `@resources`.
427
+ Specifies that this step should execute on DGX cloud.
460
428
 
461
429
 
462
430
  Parameters
463
431
  ----------
464
- cpu : int, default 1
465
- Number of CPUs required for this step.
466
- gpu : int, optional, default None
467
- Number of GPUs required for this step.
468
- disk : int, optional, default None
469
- Disk size (in MB) required for this step. Only applies on Kubernetes.
470
- memory : int, default 4096
471
- Memory size (in MB) required for this step.
432
+ gpu : int
433
+ Number of GPUs to use.
434
+ gpu_type : str
435
+ Type of Nvidia GPU to use.
436
+ """
437
+ ...
438
+
439
+ @typing.overload
440
+ def parallel(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
441
+ """
442
+ Decorator prototype for all step decorators. This function gets specialized
443
+ and imported for all decorators types by _import_plugin_decorators().
444
+ """
445
+ ...
446
+
447
+ @typing.overload
448
+ def parallel(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
449
+ ...
450
+
451
+ def parallel(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
452
+ """
453
+ Decorator prototype for all step decorators. This function gets specialized
454
+ and imported for all decorators types by _import_plugin_decorators().
455
+ """
456
+ ...
457
+
458
+ @typing.overload
459
+ def test_append_card(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
460
+ """
461
+ A simple decorator that demonstrates using CardDecoratorInjector
462
+ to inject a card and render simple markdown content.
463
+ """
464
+ ...
465
+
466
+ @typing.overload
467
+ def test_append_card(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
468
+ ...
469
+
470
+ def test_append_card(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
471
+ """
472
+ A simple decorator that demonstrates using CardDecoratorInjector
473
+ to inject a card and render simple markdown content.
474
+ """
475
+ ...
476
+
477
+ @typing.overload
478
+ def checkpoint(*, load_policy: str = 'fresh', temp_dir_root: str = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
479
+ """
480
+ Enables checkpointing for a step.
481
+
482
+ > Examples
483
+
484
+ - Saving Checkpoints
485
+
486
+ ```python
487
+ @checkpoint
488
+ @step
489
+ def train(self):
490
+ model = create_model(self.parameters, checkpoint_path = None)
491
+ for i in range(self.epochs):
492
+ # some training logic
493
+ loss = model.train(self.dataset)
494
+ if i % 10 == 0:
495
+ model.save(
496
+ current.checkpoint.directory,
497
+ )
498
+ # saves the contents of the `current.checkpoint.directory` as a checkpoint
499
+ # and returns a reference dictionary to the checkpoint saved in the datastore
500
+ self.latest_checkpoint = current.checkpoint.save(
501
+ name="epoch_checkpoint",
502
+ metadata={
503
+ "epoch": i,
504
+ "loss": loss,
505
+ }
506
+ )
507
+ ```
508
+
509
+ - Using Loaded Checkpoints
510
+
511
+ ```python
512
+ @retry(times=3)
513
+ @checkpoint
514
+ @step
515
+ def train(self):
516
+ # Assume that the task has restarted and the previous attempt of the task
517
+ # saved a checkpoint
518
+ checkpoint_path = None
519
+ if current.checkpoint.is_loaded: # Check if a checkpoint is loaded
520
+ print("Loaded checkpoint from the previous attempt")
521
+ checkpoint_path = current.checkpoint.directory
522
+
523
+ model = create_model(self.parameters, checkpoint_path = checkpoint_path)
524
+ for i in range(self.epochs):
525
+ ...
526
+ ```
527
+
528
+
529
+ Parameters
530
+ ----------
531
+ load_policy : str, default: "fresh"
532
+ The policy for loading the checkpoint. The following policies are supported:
533
+ - "eager": Loads the the latest available checkpoint within the namespace.
534
+ With this mode, the latest checkpoint written by any previous task (can be even a different run) of the step
535
+ will be loaded at the start of the task.
536
+ - "none": Do not load any checkpoint
537
+ - "fresh": Loads the lastest checkpoint created within the running Task.
538
+ This mode helps loading checkpoints across various retry attempts of the same task.
539
+ With this mode, no checkpoint will be loaded at the start of a task but any checkpoints
540
+ created within the task will be loaded when the task is retries execution on failure.
541
+
542
+ temp_dir_root : str, default: None
543
+ The root directory under which `current.checkpoint.directory` will be created.
544
+ """
545
+ ...
546
+
547
+ @typing.overload
548
+ def checkpoint(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
549
+ ...
550
+
551
+ @typing.overload
552
+ def checkpoint(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
553
+ ...
554
+
555
+ def checkpoint(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, load_policy: str = 'fresh', temp_dir_root: str = None):
556
+ """
557
+ Enables checkpointing for a step.
558
+
559
+ > Examples
560
+
561
+ - Saving Checkpoints
562
+
563
+ ```python
564
+ @checkpoint
565
+ @step
566
+ def train(self):
567
+ model = create_model(self.parameters, checkpoint_path = None)
568
+ for i in range(self.epochs):
569
+ # some training logic
570
+ loss = model.train(self.dataset)
571
+ if i % 10 == 0:
572
+ model.save(
573
+ current.checkpoint.directory,
574
+ )
575
+ # saves the contents of the `current.checkpoint.directory` as a checkpoint
576
+ # and returns a reference dictionary to the checkpoint saved in the datastore
577
+ self.latest_checkpoint = current.checkpoint.save(
578
+ name="epoch_checkpoint",
579
+ metadata={
580
+ "epoch": i,
581
+ "loss": loss,
582
+ }
583
+ )
584
+ ```
585
+
586
+ - Using Loaded Checkpoints
587
+
588
+ ```python
589
+ @retry(times=3)
590
+ @checkpoint
591
+ @step
592
+ def train(self):
593
+ # Assume that the task has restarted and the previous attempt of the task
594
+ # saved a checkpoint
595
+ checkpoint_path = None
596
+ if current.checkpoint.is_loaded: # Check if a checkpoint is loaded
597
+ print("Loaded checkpoint from the previous attempt")
598
+ checkpoint_path = current.checkpoint.directory
599
+
600
+ model = create_model(self.parameters, checkpoint_path = checkpoint_path)
601
+ for i in range(self.epochs):
602
+ ...
603
+ ```
604
+
605
+
606
+ Parameters
607
+ ----------
608
+ load_policy : str, default: "fresh"
609
+ The policy for loading the checkpoint. The following policies are supported:
610
+ - "eager": Loads the the latest available checkpoint within the namespace.
611
+ With this mode, the latest checkpoint written by any previous task (can be even a different run) of the step
612
+ will be loaded at the start of the task.
613
+ - "none": Do not load any checkpoint
614
+ - "fresh": Loads the lastest checkpoint created within the running Task.
615
+ This mode helps loading checkpoints across various retry attempts of the same task.
616
+ With this mode, no checkpoint will be loaded at the start of a task but any checkpoints
617
+ created within the task will be loaded when the task is retries execution on failure.
618
+
619
+ temp_dir_root : str, default: None
620
+ The root directory under which `current.checkpoint.directory` will be created.
621
+ """
622
+ ...
623
+
624
+ @typing.overload
625
+ def environment(*, vars: typing.Dict[str, str] = {}) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
626
+ """
627
+ Specifies environment variables to be set prior to the execution of a step.
628
+
629
+
630
+ Parameters
631
+ ----------
632
+ vars : Dict[str, str], default {}
633
+ Dictionary of environment variables to set.
634
+ """
635
+ ...
636
+
637
+ @typing.overload
638
+ def environment(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
639
+ ...
640
+
641
+ @typing.overload
642
+ def environment(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
643
+ ...
644
+
645
+ def environment(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, vars: typing.Dict[str, str] = {}):
646
+ """
647
+ Specifies environment variables to be set prior to the execution of a step.
648
+
649
+
650
+ Parameters
651
+ ----------
652
+ vars : Dict[str, str], default {}
653
+ Dictionary of environment variables to set.
654
+ """
655
+ ...
656
+
657
+ @typing.overload
658
+ def resources(*, cpu: int = 1, gpu: typing.Optional[int] = None, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
659
+ """
660
+ Specifies the resources needed when executing this step.
661
+
662
+ Use `@resources` to specify the resource requirements
663
+ independently of the specific compute layer (`@batch`, `@kubernetes`).
664
+
665
+ You can choose the compute layer on the command line by executing e.g.
666
+ ```
667
+ python myflow.py run --with batch
668
+ ```
669
+ or
670
+ ```
671
+ python myflow.py run --with kubernetes
672
+ ```
673
+ which executes the flow on the desired system using the
674
+ requirements specified in `@resources`.
675
+
676
+
677
+ Parameters
678
+ ----------
679
+ cpu : int, default 1
680
+ Number of CPUs required for this step.
681
+ gpu : int, optional, default None
682
+ Number of GPUs required for this step.
683
+ disk : int, optional, default None
684
+ Disk size (in MB) required for this step. Only applies on Kubernetes.
685
+ memory : int, default 4096
686
+ Memory size (in MB) required for this step.
472
687
  shared_memory : int, optional, default None
473
688
  The value for the size (in MiB) of the /dev/shm volume for this step.
474
689
  This parameter maps to the `--shm-size` option in Docker.
@@ -518,7 +733,66 @@ def resources(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None]
518
733
  """
519
734
  ...
520
735
 
521
- def huggingface_hub(*, temp_dir_root: typing.Optional[str] = None, cache_scope: typing.Optional[str] = None, load: typing.Union[typing.List[str], typing.List[typing.Tuple[typing.Dict, str]], typing.List[typing.Tuple[str, str]], typing.List[typing.Dict], None]) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
736
+ @typing.overload
737
+ def conda(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
738
+ """
739
+ Specifies the Conda environment for the step.
740
+
741
+ Information in this decorator will augment any
742
+ attributes set in the `@conda_base` flow-level decorator. Hence,
743
+ you can use `@conda_base` to set packages required by all
744
+ steps and use `@conda` to specify step-specific overrides.
745
+
746
+
747
+ Parameters
748
+ ----------
749
+ packages : Dict[str, str], default {}
750
+ Packages to use for this step. The key is the name of the package
751
+ and the value is the version to use.
752
+ libraries : Dict[str, str], default {}
753
+ Supported for backward compatibility. When used with packages, packages will take precedence.
754
+ python : str, optional, default None
755
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
756
+ that the version used will correspond to the version of the Python interpreter used to start the run.
757
+ disabled : bool, default False
758
+ If set to True, disables @conda.
759
+ """
760
+ ...
761
+
762
+ @typing.overload
763
+ def conda(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
764
+ ...
765
+
766
+ @typing.overload
767
+ def conda(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
768
+ ...
769
+
770
+ def conda(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
771
+ """
772
+ Specifies the Conda environment for the step.
773
+
774
+ Information in this decorator will augment any
775
+ attributes set in the `@conda_base` flow-level decorator. Hence,
776
+ you can use `@conda_base` to set packages required by all
777
+ steps and use `@conda` to specify step-specific overrides.
778
+
779
+
780
+ Parameters
781
+ ----------
782
+ packages : Dict[str, str], default {}
783
+ Packages to use for this step. The key is the name of the package
784
+ and the value is the version to use.
785
+ libraries : Dict[str, str], default {}
786
+ Supported for backward compatibility. When used with packages, packages will take precedence.
787
+ python : str, optional, default None
788
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
789
+ that the version used will correspond to the version of the Python interpreter used to start the run.
790
+ disabled : bool, default False
791
+ If set to True, disables @conda.
792
+ """
793
+ ...
794
+
795
+ def huggingface_hub(*, temp_dir_root: typing.Optional[str] = None, cache_scope: typing.Optional[str] = None, load: typing.Union[typing.List[str], typing.List[typing.Tuple[typing.Dict, str]], typing.List[typing.Tuple[str, str]], typing.List[typing.Dict], None]) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
522
796
  """
523
797
  Decorator that helps cache, version, and store models/datasets from the Hugging Face Hub.
524
798
 
@@ -646,57 +920,45 @@ def huggingface_hub(*, temp_dir_root: typing.Optional[str] = None, cache_scope:
646
920
  ...
647
921
 
648
922
  @typing.overload
649
- def retry(*, times: int = 3, minutes_between_retries: int = 2) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
923
+ def coreweave_s3_proxy(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
650
924
  """
651
- Specifies the number of times the task corresponding
652
- to a step needs to be retried.
653
-
654
- This decorator is useful for handling transient errors, such as networking issues.
655
- If your task contains operations that can't be retried safely, e.g. database updates,
656
- it is advisable to annotate it with `@retry(times=0)`.
657
-
658
- This can be used in conjunction with the `@catch` decorator. The `@catch`
659
- decorator will execute a no-op task after all retries have been exhausted,
660
- ensuring that the flow execution can continue.
661
-
662
-
663
- Parameters
664
- ----------
665
- times : int, default 3
666
- Number of times to retry this task.
667
- minutes_between_retries : int, default 2
668
- Number of minutes between retries.
925
+ CoreWeave-specific S3 Proxy decorator for routing S3 requests through a local proxy service.
926
+ It exists to make it easier for users to know that this decorator should only be used with
927
+ a Neo Cloud like CoreWeave.
669
928
  """
670
929
  ...
671
930
 
672
931
  @typing.overload
673
- def retry(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
932
+ def coreweave_s3_proxy(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
674
933
  ...
675
934
 
676
- @typing.overload
677
- def retry(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
935
+ def coreweave_s3_proxy(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
936
+ """
937
+ CoreWeave-specific S3 Proxy decorator for routing S3 requests through a local proxy service.
938
+ It exists to make it easier for users to know that this decorator should only be used with
939
+ a Neo Cloud like CoreWeave.
940
+ """
678
941
  ...
679
942
 
680
- def retry(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, times: int = 3, minutes_between_retries: int = 2):
943
+ def s3_proxy(*, integration_name: typing.Optional[str] = None, write_mode: typing.Optional[str] = None, debug: typing.Optional[bool] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
681
944
  """
682
- Specifies the number of times the task corresponding
683
- to a step needs to be retried.
684
-
685
- This decorator is useful for handling transient errors, such as networking issues.
686
- If your task contains operations that can't be retried safely, e.g. database updates,
687
- it is advisable to annotate it with `@retry(times=0)`.
688
-
689
- This can be used in conjunction with the `@catch` decorator. The `@catch`
690
- decorator will execute a no-op task after all retries have been exhausted,
691
- ensuring that the flow execution can continue.
945
+ S3 Proxy decorator for routing S3 requests through a local proxy service.
692
946
 
693
947
 
694
948
  Parameters
695
949
  ----------
696
- times : int, default 3
697
- Number of times to retry this task.
698
- minutes_between_retries : int, default 2
699
- Number of minutes between retries.
950
+ integration_name : str, optional
951
+ Name of the S3 proxy integration. If not specified, will use the only
952
+ available S3 proxy integration in the namespace (fails if multiple exist).
953
+ write_mode : str, optional
954
+ The desired behavior during write operations to target (origin) S3 bucket.
955
+ allowed options are:
956
+ "origin-and-cache" -> write to both the target S3 bucket and local object
957
+ storage
958
+ "origin" -> only write to the target S3 bucket
959
+ "cache" -> only write to the object storage service used for caching
960
+ debug : bool, optional
961
+ Enable debug logging for proxy operations.
700
962
  """
701
963
  ...
702
964
 
@@ -764,493 +1026,87 @@ def kubernetes(*, cpu: int = 1, memory: int = 4096, disk: int = 10240, image: ty
764
1026
  tmpfs_path : str, optional, default /metaflow_temp
765
1027
  Path to tmpfs mount for this step.
766
1028
  persistent_volume_claims : Dict[str, str], optional, default None
767
- A map (dictionary) of persistent volumes to be mounted to the pod for this step. The map is from persistent
768
- volumes to the path to which the volume is to be mounted, e.g., `{'pvc-name': '/path/to/mount/on'}`.
769
- shared_memory: int, optional
770
- Shared memory size (in MiB) required for this step
771
- port: int, optional
772
- Port number to specify in the Kubernetes job object
773
- compute_pool : str, optional, default None
774
- Compute pool to be used for for this step.
775
- If not specified, any accessible compute pool within the perimeter is used.
776
- hostname_resolution_timeout: int, default 10 * 60
777
- Timeout in seconds for the workers tasks in the gang scheduled cluster to resolve the hostname of control task.
778
- Only applicable when @parallel is used.
779
- qos: str, default: Burstable
780
- Quality of Service class to assign to the pod. Supported values are: Guaranteed, Burstable, BestEffort
781
-
782
- security_context: Dict[str, Any], optional, default None
783
- Container security context. Applies to the task container. Allows the following keys:
784
- - privileged: bool, optional, default None
785
- - allow_privilege_escalation: bool, optional, default None
786
- - run_as_user: int, optional, default None
787
- - run_as_group: int, optional, default None
788
- - run_as_non_root: bool, optional, default None
789
- """
790
- ...
791
-
792
- @typing.overload
793
- def app_deploy(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
794
- """
795
- Decorator prototype for all step decorators. This function gets specialized
796
- and imported for all decorators types by _import_plugin_decorators().
797
- """
798
- ...
799
-
800
- @typing.overload
801
- def app_deploy(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
802
- ...
803
-
804
- def app_deploy(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
805
- """
806
- Decorator prototype for all step decorators. This function gets specialized
807
- and imported for all decorators types by _import_plugin_decorators().
808
- """
809
- ...
810
-
811
- def s3_proxy(*, integration_name: typing.Optional[str] = None, write_mode: typing.Optional[str] = None, debug: typing.Optional[bool] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
812
- """
813
- S3 Proxy decorator for routing S3 requests through a local proxy service.
814
-
815
-
816
- Parameters
817
- ----------
818
- integration_name : str, optional
819
- Name of the S3 proxy integration. If not specified, will use the only
820
- available S3 proxy integration in the namespace (fails if multiple exist).
821
- write_mode : str, optional
822
- The desired behavior during write operations to target (origin) S3 bucket.
823
- allowed options are:
824
- "origin-and-cache" -> write to both the target S3 bucket and local object
825
- storage
826
- "origin" -> only write to the target S3 bucket
827
- "cache" -> only write to the object storage service used for caching
828
- debug : bool, optional
829
- Enable debug logging for proxy operations.
830
- """
831
- ...
832
-
833
- @typing.overload
834
- def secrets(*, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], role: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
835
- """
836
- Specifies secrets to be retrieved and injected as environment variables prior to
837
- the execution of a step.
838
-
839
-
840
- Parameters
841
- ----------
842
- sources : List[Union[str, Dict[str, Any]]], default: []
843
- List of secret specs, defining how the secrets are to be retrieved
844
- role : str, optional, default: None
845
- Role to use for fetching secrets
846
- """
847
- ...
848
-
849
- @typing.overload
850
- def secrets(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
851
- ...
852
-
853
- @typing.overload
854
- def secrets(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
855
- ...
856
-
857
- def secrets(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], role: typing.Optional[str] = None):
858
- """
859
- Specifies secrets to be retrieved and injected as environment variables prior to
860
- the execution of a step.
861
-
862
-
863
- Parameters
864
- ----------
865
- sources : List[Union[str, Dict[str, Any]]], default: []
866
- List of secret specs, defining how the secrets are to be retrieved
867
- role : str, optional, default: None
868
- Role to use for fetching secrets
869
- """
870
- ...
871
-
872
- @typing.overload
873
- def environment(*, vars: typing.Dict[str, str] = {}) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
874
- """
875
- Specifies environment variables to be set prior to the execution of a step.
876
-
877
-
878
- Parameters
879
- ----------
880
- vars : Dict[str, str], default {}
881
- Dictionary of environment variables to set.
882
- """
883
- ...
884
-
885
- @typing.overload
886
- def environment(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
887
- ...
888
-
889
- @typing.overload
890
- def environment(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
891
- ...
892
-
893
- def environment(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, vars: typing.Dict[str, str] = {}):
894
- """
895
- Specifies environment variables to be set prior to the execution of a step.
896
-
897
-
898
- Parameters
899
- ----------
900
- vars : Dict[str, str], default {}
901
- Dictionary of environment variables to set.
902
- """
903
- ...
904
-
905
- @typing.overload
906
- def timeout(*, seconds: int = 0, minutes: int = 0, hours: int = 0) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
907
- """
908
- Specifies a timeout for your step.
909
-
910
- This decorator is useful if this step may hang indefinitely.
911
-
912
- This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
913
- A timeout is considered to be an exception thrown by the step. It will cause the step to be
914
- retried if needed and the exception will be caught by the `@catch` decorator, if present.
915
-
916
- Note that all the values specified in parameters are added together so if you specify
917
- 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
918
-
919
-
920
- Parameters
921
- ----------
922
- seconds : int, default 0
923
- Number of seconds to wait prior to timing out.
924
- minutes : int, default 0
925
- Number of minutes to wait prior to timing out.
926
- hours : int, default 0
927
- Number of hours to wait prior to timing out.
928
- """
929
- ...
930
-
931
- @typing.overload
932
- def timeout(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
933
- ...
934
-
935
- @typing.overload
936
- def timeout(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
937
- ...
938
-
939
- def timeout(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, seconds: int = 0, minutes: int = 0, hours: int = 0):
940
- """
941
- Specifies a timeout for your step.
942
-
943
- This decorator is useful if this step may hang indefinitely.
944
-
945
- This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
946
- A timeout is considered to be an exception thrown by the step. It will cause the step to be
947
- retried if needed and the exception will be caught by the `@catch` decorator, if present.
948
-
949
- Note that all the values specified in parameters are added together so if you specify
950
- 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
951
-
952
-
953
- Parameters
954
- ----------
955
- seconds : int, default 0
956
- Number of seconds to wait prior to timing out.
957
- minutes : int, default 0
958
- Number of minutes to wait prior to timing out.
959
- hours : int, default 0
960
- Number of hours to wait prior to timing out.
961
- """
962
- ...
963
-
964
- @typing.overload
965
- def checkpoint(*, load_policy: str = 'fresh', temp_dir_root: str = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
966
- """
967
- Enables checkpointing for a step.
968
-
969
- > Examples
970
-
971
- - Saving Checkpoints
972
-
973
- ```python
974
- @checkpoint
975
- @step
976
- def train(self):
977
- model = create_model(self.parameters, checkpoint_path = None)
978
- for i in range(self.epochs):
979
- # some training logic
980
- loss = model.train(self.dataset)
981
- if i % 10 == 0:
982
- model.save(
983
- current.checkpoint.directory,
984
- )
985
- # saves the contents of the `current.checkpoint.directory` as a checkpoint
986
- # and returns a reference dictionary to the checkpoint saved in the datastore
987
- self.latest_checkpoint = current.checkpoint.save(
988
- name="epoch_checkpoint",
989
- metadata={
990
- "epoch": i,
991
- "loss": loss,
992
- }
993
- )
994
- ```
995
-
996
- - Using Loaded Checkpoints
997
-
998
- ```python
999
- @retry(times=3)
1000
- @checkpoint
1001
- @step
1002
- def train(self):
1003
- # Assume that the task has restarted and the previous attempt of the task
1004
- # saved a checkpoint
1005
- checkpoint_path = None
1006
- if current.checkpoint.is_loaded: # Check if a checkpoint is loaded
1007
- print("Loaded checkpoint from the previous attempt")
1008
- checkpoint_path = current.checkpoint.directory
1009
-
1010
- model = create_model(self.parameters, checkpoint_path = checkpoint_path)
1011
- for i in range(self.epochs):
1012
- ...
1013
- ```
1014
-
1015
-
1016
- Parameters
1017
- ----------
1018
- load_policy : str, default: "fresh"
1019
- The policy for loading the checkpoint. The following policies are supported:
1020
- - "eager": Loads the the latest available checkpoint within the namespace.
1021
- With this mode, the latest checkpoint written by any previous task (can be even a different run) of the step
1022
- will be loaded at the start of the task.
1023
- - "none": Do not load any checkpoint
1024
- - "fresh": Loads the lastest checkpoint created within the running Task.
1025
- This mode helps loading checkpoints across various retry attempts of the same task.
1026
- With this mode, no checkpoint will be loaded at the start of a task but any checkpoints
1027
- created within the task will be loaded when the task is retries execution on failure.
1028
-
1029
- temp_dir_root : str, default: None
1030
- The root directory under which `current.checkpoint.directory` will be created.
1031
- """
1032
- ...
1033
-
1034
- @typing.overload
1035
- def checkpoint(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1036
- ...
1037
-
1038
- @typing.overload
1039
- def checkpoint(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1040
- ...
1041
-
1042
- def checkpoint(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, load_policy: str = 'fresh', temp_dir_root: str = None):
1043
- """
1044
- Enables checkpointing for a step.
1045
-
1046
- > Examples
1047
-
1048
- - Saving Checkpoints
1049
-
1050
- ```python
1051
- @checkpoint
1052
- @step
1053
- def train(self):
1054
- model = create_model(self.parameters, checkpoint_path = None)
1055
- for i in range(self.epochs):
1056
- # some training logic
1057
- loss = model.train(self.dataset)
1058
- if i % 10 == 0:
1059
- model.save(
1060
- current.checkpoint.directory,
1061
- )
1062
- # saves the contents of the `current.checkpoint.directory` as a checkpoint
1063
- # and returns a reference dictionary to the checkpoint saved in the datastore
1064
- self.latest_checkpoint = current.checkpoint.save(
1065
- name="epoch_checkpoint",
1066
- metadata={
1067
- "epoch": i,
1068
- "loss": loss,
1069
- }
1070
- )
1071
- ```
1072
-
1073
- - Using Loaded Checkpoints
1074
-
1075
- ```python
1076
- @retry(times=3)
1077
- @checkpoint
1078
- @step
1079
- def train(self):
1080
- # Assume that the task has restarted and the previous attempt of the task
1081
- # saved a checkpoint
1082
- checkpoint_path = None
1083
- if current.checkpoint.is_loaded: # Check if a checkpoint is loaded
1084
- print("Loaded checkpoint from the previous attempt")
1085
- checkpoint_path = current.checkpoint.directory
1086
-
1087
- model = create_model(self.parameters, checkpoint_path = checkpoint_path)
1088
- for i in range(self.epochs):
1089
- ...
1090
- ```
1091
-
1092
-
1093
- Parameters
1094
- ----------
1095
- load_policy : str, default: "fresh"
1096
- The policy for loading the checkpoint. The following policies are supported:
1097
- - "eager": Loads the the latest available checkpoint within the namespace.
1098
- With this mode, the latest checkpoint written by any previous task (can be even a different run) of the step
1099
- will be loaded at the start of the task.
1100
- - "none": Do not load any checkpoint
1101
- - "fresh": Loads the lastest checkpoint created within the running Task.
1102
- This mode helps loading checkpoints across various retry attempts of the same task.
1103
- With this mode, no checkpoint will be loaded at the start of a task but any checkpoints
1104
- created within the task will be loaded when the task is retries execution on failure.
1105
-
1106
- temp_dir_root : str, default: None
1107
- The root directory under which `current.checkpoint.directory` will be created.
1108
- """
1109
- ...
1110
-
1111
- @typing.overload
1112
- def catch(*, var: typing.Optional[str] = None, print_exception: bool = True) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1113
- """
1114
- Specifies that the step will success under all circumstances.
1115
-
1116
- The decorator will create an optional artifact, specified by `var`, which
1117
- contains the exception raised. You can use it to detect the presence
1118
- of errors, indicating that all happy-path artifacts produced by the step
1119
- are missing.
1120
-
1121
-
1122
- Parameters
1123
- ----------
1124
- var : str, optional, default None
1125
- Name of the artifact in which to store the caught exception.
1126
- If not specified, the exception is not stored.
1127
- print_exception : bool, default True
1128
- Determines whether or not the exception is printed to
1129
- stdout when caught.
1130
- """
1131
- ...
1132
-
1133
- @typing.overload
1134
- def catch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1135
- ...
1136
-
1137
- @typing.overload
1138
- def catch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1139
- ...
1140
-
1141
- def catch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, var: typing.Optional[str] = None, print_exception: bool = True):
1142
- """
1143
- Specifies that the step will success under all circumstances.
1144
-
1145
- The decorator will create an optional artifact, specified by `var`, which
1146
- contains the exception raised. You can use it to detect the presence
1147
- of errors, indicating that all happy-path artifacts produced by the step
1148
- are missing.
1149
-
1150
-
1151
- Parameters
1152
- ----------
1153
- var : str, optional, default None
1154
- Name of the artifact in which to store the caught exception.
1155
- If not specified, the exception is not stored.
1156
- print_exception : bool, default True
1157
- Determines whether or not the exception is printed to
1158
- stdout when caught.
1159
- """
1160
- ...
1161
-
1162
- @typing.overload
1163
- def test_append_card(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1164
- """
1165
- A simple decorator that demonstrates using CardDecoratorInjector
1166
- to inject a card and render simple markdown content.
1167
- """
1168
- ...
1169
-
1170
- @typing.overload
1171
- def test_append_card(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1172
- ...
1173
-
1174
- def test_append_card(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
1175
- """
1176
- A simple decorator that demonstrates using CardDecoratorInjector
1177
- to inject a card and render simple markdown content.
1178
- """
1179
- ...
1180
-
1181
- @typing.overload
1182
- def fast_bakery_internal(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1183
- """
1184
- Internal decorator to support Fast bakery
1185
- """
1186
- ...
1187
-
1188
- @typing.overload
1189
- def fast_bakery_internal(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1190
- ...
1191
-
1192
- def fast_bakery_internal(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
1193
- """
1194
- Internal decorator to support Fast bakery
1029
+ A map (dictionary) of persistent volumes to be mounted to the pod for this step. The map is from persistent
1030
+ volumes to the path to which the volume is to be mounted, e.g., `{'pvc-name': '/path/to/mount/on'}`.
1031
+ shared_memory: int, optional
1032
+ Shared memory size (in MiB) required for this step
1033
+ port: int, optional
1034
+ Port number to specify in the Kubernetes job object
1035
+ compute_pool : str, optional, default None
1036
+ Compute pool to be used for for this step.
1037
+ If not specified, any accessible compute pool within the perimeter is used.
1038
+ hostname_resolution_timeout: int, default 10 * 60
1039
+ Timeout in seconds for the workers tasks in the gang scheduled cluster to resolve the hostname of control task.
1040
+ Only applicable when @parallel is used.
1041
+ qos: str, default: Burstable
1042
+ Quality of Service class to assign to the pod. Supported values are: Guaranteed, Burstable, BestEffort
1043
+
1044
+ security_context: Dict[str, Any], optional, default None
1045
+ Container security context. Applies to the task container. Allows the following keys:
1046
+ - privileged: bool, optional, default None
1047
+ - allow_privilege_escalation: bool, optional, default None
1048
+ - run_as_user: int, optional, default None
1049
+ - run_as_group: int, optional, default None
1050
+ - run_as_non_root: bool, optional, default None
1195
1051
  """
1196
1052
  ...
1197
1053
 
1198
1054
  @typing.overload
1199
- def conda(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1055
+ def timeout(*, seconds: int = 0, minutes: int = 0, hours: int = 0) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1200
1056
  """
1201
- Specifies the Conda environment for the step.
1057
+ Specifies a timeout for your step.
1202
1058
 
1203
- Information in this decorator will augment any
1204
- attributes set in the `@conda_base` flow-level decorator. Hence,
1205
- you can use `@conda_base` to set packages required by all
1206
- steps and use `@conda` to specify step-specific overrides.
1059
+ This decorator is useful if this step may hang indefinitely.
1060
+
1061
+ This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
1062
+ A timeout is considered to be an exception thrown by the step. It will cause the step to be
1063
+ retried if needed and the exception will be caught by the `@catch` decorator, if present.
1064
+
1065
+ Note that all the values specified in parameters are added together so if you specify
1066
+ 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
1207
1067
 
1208
1068
 
1209
1069
  Parameters
1210
1070
  ----------
1211
- packages : Dict[str, str], default {}
1212
- Packages to use for this step. The key is the name of the package
1213
- and the value is the version to use.
1214
- libraries : Dict[str, str], default {}
1215
- Supported for backward compatibility. When used with packages, packages will take precedence.
1216
- python : str, optional, default None
1217
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
1218
- that the version used will correspond to the version of the Python interpreter used to start the run.
1219
- disabled : bool, default False
1220
- If set to True, disables @conda.
1071
+ seconds : int, default 0
1072
+ Number of seconds to wait prior to timing out.
1073
+ minutes : int, default 0
1074
+ Number of minutes to wait prior to timing out.
1075
+ hours : int, default 0
1076
+ Number of hours to wait prior to timing out.
1221
1077
  """
1222
1078
  ...
1223
1079
 
1224
1080
  @typing.overload
1225
- def conda(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1081
+ def timeout(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1226
1082
  ...
1227
1083
 
1228
1084
  @typing.overload
1229
- def conda(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1085
+ def timeout(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1230
1086
  ...
1231
1087
 
1232
- def conda(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
1088
+ def timeout(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, seconds: int = 0, minutes: int = 0, hours: int = 0):
1233
1089
  """
1234
- Specifies the Conda environment for the step.
1090
+ Specifies a timeout for your step.
1235
1091
 
1236
- Information in this decorator will augment any
1237
- attributes set in the `@conda_base` flow-level decorator. Hence,
1238
- you can use `@conda_base` to set packages required by all
1239
- steps and use `@conda` to specify step-specific overrides.
1092
+ This decorator is useful if this step may hang indefinitely.
1093
+
1094
+ This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
1095
+ A timeout is considered to be an exception thrown by the step. It will cause the step to be
1096
+ retried if needed and the exception will be caught by the `@catch` decorator, if present.
1097
+
1098
+ Note that all the values specified in parameters are added together so if you specify
1099
+ 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
1240
1100
 
1241
1101
 
1242
1102
  Parameters
1243
1103
  ----------
1244
- packages : Dict[str, str], default {}
1245
- Packages to use for this step. The key is the name of the package
1246
- and the value is the version to use.
1247
- libraries : Dict[str, str], default {}
1248
- Supported for backward compatibility. When used with packages, packages will take precedence.
1249
- python : str, optional, default None
1250
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
1251
- that the version used will correspond to the version of the Python interpreter used to start the run.
1252
- disabled : bool, default False
1253
- If set to True, disables @conda.
1104
+ seconds : int, default 0
1105
+ Number of seconds to wait prior to timing out.
1106
+ minutes : int, default 0
1107
+ Number of minutes to wait prior to timing out.
1108
+ hours : int, default 0
1109
+ Number of hours to wait prior to timing out.
1254
1110
  """
1255
1111
  ...
1256
1112
 
@@ -1303,7 +1159,7 @@ def card(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typ
1303
1159
  """
1304
1160
  ...
1305
1161
 
1306
- def nvct(*, gpu: int, gpu_type: str) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1162
+ def nvidia(*, gpu: int, gpu_type: str, queue_timeout: int) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1307
1163
  """
1308
1164
  Specifies that this step should execute on DGX cloud.
1309
1165
 
@@ -1314,84 +1170,228 @@ def nvct(*, gpu: int, gpu_type: str) -> typing.Callable[[typing.Union[typing.Cal
1314
1170
  Number of GPUs to use.
1315
1171
  gpu_type : str
1316
1172
  Type of Nvidia GPU to use.
1173
+ queue_timeout : int
1174
+ Time to keep the job in NVCF's queue.
1175
+ """
1176
+ ...
1177
+
1178
+ @typing.overload
1179
+ def model(*, load: typing.Union[typing.List[str], str, typing.List[typing.Tuple[str, typing.Optional[str]]]] = None, temp_dir_root: str = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1180
+ """
1181
+ Enables loading / saving of models within a step.
1182
+
1183
+ > Examples
1184
+ - Saving Models
1185
+ ```python
1186
+ @model
1187
+ @step
1188
+ def train(self):
1189
+ # current.model.save returns a dictionary reference to the model saved
1190
+ self.my_model = current.model.save(
1191
+ path_to_my_model,
1192
+ label="my_model",
1193
+ metadata={
1194
+ "epochs": 10,
1195
+ "batch-size": 32,
1196
+ "learning-rate": 0.001,
1197
+ }
1198
+ )
1199
+ self.next(self.test)
1200
+
1201
+ @model(load="my_model")
1202
+ @step
1203
+ def test(self):
1204
+ # `current.model.loaded` returns a dictionary of the loaded models
1205
+ # where the key is the name of the artifact and the value is the path to the model
1206
+ print(os.listdir(current.model.loaded["my_model"]))
1207
+ self.next(self.end)
1208
+ ```
1209
+
1210
+ - Loading models
1211
+ ```python
1212
+ @step
1213
+ def train(self):
1214
+ # current.model.load returns the path to the model loaded
1215
+ checkpoint_path = current.model.load(
1216
+ self.checkpoint_key,
1217
+ )
1218
+ model_path = current.model.load(
1219
+ self.model,
1220
+ )
1221
+ self.next(self.test)
1222
+ ```
1223
+
1224
+
1225
+ Parameters
1226
+ ----------
1227
+ load : Union[List[str],str,List[Tuple[str,Union[str,None]]]], default: None
1228
+ Artifact name/s referencing the models/checkpoints to load. Artifact names refer to the names of the instance variables set to `self`.
1229
+ These artifact names give to `load` be reference objects or reference `key` string's from objects created by `current.checkpoint` / `current.model` / `current.huggingface_hub`.
1230
+ If a list of tuples is provided, the first element is the artifact name and the second element is the path the artifact needs be unpacked on
1231
+ the local filesystem. If the second element is None, the artifact will be unpacked in the current working directory.
1232
+ If a string is provided, then the artifact corresponding to that name will be loaded in the current working directory.
1233
+
1234
+ temp_dir_root : str, default: None
1235
+ The root directory under which `current.model.loaded` will store loaded models
1236
+ """
1237
+ ...
1238
+
1239
+ @typing.overload
1240
+ def model(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1241
+ ...
1242
+
1243
+ @typing.overload
1244
+ def model(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1245
+ ...
1246
+
1247
+ def model(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, load: typing.Union[typing.List[str], str, typing.List[typing.Tuple[str, typing.Optional[str]]]] = None, temp_dir_root: str = None):
1248
+ """
1249
+ Enables loading / saving of models within a step.
1250
+
1251
+ > Examples
1252
+ - Saving Models
1253
+ ```python
1254
+ @model
1255
+ @step
1256
+ def train(self):
1257
+ # current.model.save returns a dictionary reference to the model saved
1258
+ self.my_model = current.model.save(
1259
+ path_to_my_model,
1260
+ label="my_model",
1261
+ metadata={
1262
+ "epochs": 10,
1263
+ "batch-size": 32,
1264
+ "learning-rate": 0.001,
1265
+ }
1266
+ )
1267
+ self.next(self.test)
1268
+
1269
+ @model(load="my_model")
1270
+ @step
1271
+ def test(self):
1272
+ # `current.model.loaded` returns a dictionary of the loaded models
1273
+ # where the key is the name of the artifact and the value is the path to the model
1274
+ print(os.listdir(current.model.loaded["my_model"]))
1275
+ self.next(self.end)
1276
+ ```
1277
+
1278
+ - Loading models
1279
+ ```python
1280
+ @step
1281
+ def train(self):
1282
+ # current.model.load returns the path to the model loaded
1283
+ checkpoint_path = current.model.load(
1284
+ self.checkpoint_key,
1285
+ )
1286
+ model_path = current.model.load(
1287
+ self.model,
1288
+ )
1289
+ self.next(self.test)
1290
+ ```
1291
+
1292
+
1293
+ Parameters
1294
+ ----------
1295
+ load : Union[List[str],str,List[Tuple[str,Union[str,None]]]], default: None
1296
+ Artifact name/s referencing the models/checkpoints to load. Artifact names refer to the names of the instance variables set to `self`.
1297
+ These artifact names give to `load` be reference objects or reference `key` string's from objects created by `current.checkpoint` / `current.model` / `current.huggingface_hub`.
1298
+ If a list of tuples is provided, the first element is the artifact name and the second element is the path the artifact needs be unpacked on
1299
+ the local filesystem. If the second element is None, the artifact will be unpacked in the current working directory.
1300
+ If a string is provided, then the artifact corresponding to that name will be loaded in the current working directory.
1301
+
1302
+ temp_dir_root : str, default: None
1303
+ The root directory under which `current.model.loaded` will store loaded models
1304
+ """
1305
+ ...
1306
+
1307
+ @typing.overload
1308
+ def nebius_s3_proxy(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1309
+ """
1310
+ Nebius-specific S3 Proxy decorator for routing S3 requests through a local proxy service.
1311
+ It exists to make it easier for users to know that this decorator should only be used with
1312
+ a Neo Cloud like Nebius.
1313
+ """
1314
+ ...
1315
+
1316
+ @typing.overload
1317
+ def nebius_s3_proxy(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1318
+ ...
1319
+
1320
+ def nebius_s3_proxy(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
1317
1321
  """
1322
+ Nebius-specific S3 Proxy decorator for routing S3 requests through a local proxy service.
1323
+ It exists to make it easier for users to know that this decorator should only be used with
1324
+ a Neo Cloud like Nebius.
1325
+ """
1326
+ ...
1327
+
1328
+ @typing.overload
1329
+ def app_deploy(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1330
+ """
1331
+ Decorator prototype for all step decorators. This function gets specialized
1332
+ and imported for all decorators types by _import_plugin_decorators().
1333
+ """
1334
+ ...
1335
+
1336
+ @typing.overload
1337
+ def app_deploy(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1318
1338
  ...
1319
1339
 
1320
- def nvidia(*, gpu: int, gpu_type: str, queue_timeout: int) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1340
+ def app_deploy(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
1321
1341
  """
1322
- Specifies that this step should execute on DGX cloud.
1323
-
1324
-
1325
- Parameters
1326
- ----------
1327
- gpu : int
1328
- Number of GPUs to use.
1329
- gpu_type : str
1330
- Type of Nvidia GPU to use.
1331
- queue_timeout : int
1332
- Time to keep the job in NVCF's queue.
1342
+ Decorator prototype for all step decorators. This function gets specialized
1343
+ and imported for all decorators types by _import_plugin_decorators().
1333
1344
  """
1334
1345
  ...
1335
1346
 
1336
- def ollama(*, models: list, backend: str, force_pull: bool, cache_update_policy: str, force_cache_update: bool, debug: bool, circuit_breaker_config: dict, timeout_config: dict) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1347
+ @typing.overload
1348
+ def catch(*, var: typing.Optional[str] = None, print_exception: bool = True) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1337
1349
  """
1338
- This decorator is used to run Ollama APIs as Metaflow task sidecars.
1339
-
1340
- User code call
1341
- --------------
1342
- @ollama(
1343
- models=[...],
1344
- ...
1345
- )
1346
-
1347
- Valid backend options
1348
- ---------------------
1349
- - 'local': Run as a separate process on the local task machine.
1350
- - (TODO) 'managed': Outerbounds hosts and selects compute provider.
1351
- - (TODO) 'remote': Spin up separate instance to serve Ollama models.
1350
+ Specifies that the step will success under all circumstances.
1352
1351
 
1353
- Valid model options
1354
- -------------------
1355
- Any model here https://ollama.com/search, e.g. 'llama3.2', 'llama3.3'
1352
+ The decorator will create an optional artifact, specified by `var`, which
1353
+ contains the exception raised. You can use it to detect the presence
1354
+ of errors, indicating that all happy-path artifacts produced by the step
1355
+ are missing.
1356
1356
 
1357
1357
 
1358
1358
  Parameters
1359
1359
  ----------
1360
- models: list[str]
1361
- List of Ollama containers running models in sidecars.
1362
- backend: str
1363
- Determines where and how to run the Ollama process.
1364
- force_pull: bool
1365
- Whether to run `ollama pull` no matter what, or first check the remote cache in Metaflow datastore for this model key.
1366
- cache_update_policy: str
1367
- Cache update policy: "auto", "force", or "never".
1368
- force_cache_update: bool
1369
- Simple override for "force" cache update policy.
1370
- debug: bool
1371
- Whether to turn on verbose debugging logs.
1372
- circuit_breaker_config: dict
1373
- Configuration for circuit breaker protection. Keys: failure_threshold, recovery_timeout, reset_timeout.
1374
- timeout_config: dict
1375
- Configuration for various operation timeouts. Keys: pull, stop, health_check, install, server_startup.
1360
+ var : str, optional, default None
1361
+ Name of the artifact in which to store the caught exception.
1362
+ If not specified, the exception is not stored.
1363
+ print_exception : bool, default True
1364
+ Determines whether or not the exception is printed to
1365
+ stdout when caught.
1376
1366
  """
1377
1367
  ...
1378
1368
 
1379
1369
  @typing.overload
1380
- def parallel(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1381
- """
1382
- Decorator prototype for all step decorators. This function gets specialized
1383
- and imported for all decorators types by _import_plugin_decorators().
1384
- """
1370
+ def catch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1385
1371
  ...
1386
1372
 
1387
1373
  @typing.overload
1388
- def parallel(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1374
+ def catch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1389
1375
  ...
1390
1376
 
1391
- def parallel(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
1377
+ def catch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, var: typing.Optional[str] = None, print_exception: bool = True):
1392
1378
  """
1393
- Decorator prototype for all step decorators. This function gets specialized
1394
- and imported for all decorators types by _import_plugin_decorators().
1379
+ Specifies that the step will success under all circumstances.
1380
+
1381
+ The decorator will create an optional artifact, specified by `var`, which
1382
+ contains the exception raised. You can use it to detect the presence
1383
+ of errors, indicating that all happy-path artifacts produced by the step
1384
+ are missing.
1385
+
1386
+
1387
+ Parameters
1388
+ ----------
1389
+ var : str, optional, default None
1390
+ Name of the artifact in which to store the caught exception.
1391
+ If not specified, the exception is not stored.
1392
+ print_exception : bool, default True
1393
+ Determines whether or not the exception is printed to
1394
+ stdout when caught.
1395
1395
  """
1396
1396
  ...
1397
1397
 
@@ -1496,10 +1496,13 @@ def trigger_on_finish(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *
1496
1496
  """
1497
1497
  ...
1498
1498
 
1499
- def airflow_external_task_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, external_dag_id: str, external_task_ids: typing.List[str], allowed_states: typing.List[str], failed_states: typing.List[str], execution_delta: "datetime.timedelta", check_existence: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1499
+ def airflow_s3_key_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, bucket_key: typing.Union[str, typing.List[str]], bucket_name: str, wildcard_match: bool, aws_conn_id: str, verify: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1500
1500
  """
1501
- The `@airflow_external_task_sensor` decorator attaches a Airflow [ExternalTaskSensor](https://airflow.apache.org/docs/apache-airflow/stable/_api/airflow/sensors/external_task/index.html#airflow.sensors.external_task.ExternalTaskSensor) before the start step of the flow.
1502
- This decorator only works when a flow is scheduled on Airflow and is compiled using `airflow create`. More than one `@airflow_external_task_sensor` can be added as a flow decorators. Adding more than one decorator will ensure that `start` step starts only after all sensors finish.
1501
+ The `@airflow_s3_key_sensor` decorator attaches a Airflow [S3KeySensor](https://airflow.apache.org/docs/apache-airflow-providers-amazon/stable/_api/airflow/providers/amazon/aws/sensors/s3/index.html#airflow.providers.amazon.aws.sensors.s3.S3KeySensor)
1502
+ before the start step of the flow. This decorator only works when a flow is scheduled on Airflow
1503
+ and is compiled using `airflow create`. More than one `@airflow_s3_key_sensor` can be
1504
+ added as a flow decorators. Adding more than one decorator will ensure that `start` step
1505
+ starts only after all sensors finish.
1503
1506
 
1504
1507
 
1505
1508
  Parameters
@@ -1521,21 +1524,162 @@ def airflow_external_task_sensor(*, timeout: int, poke_interval: int, mode: str,
1521
1524
  Name of the sensor on Airflow
1522
1525
  description : str
1523
1526
  Description of sensor in the Airflow UI
1524
- external_dag_id : str
1525
- The dag_id that contains the task you want to wait for.
1526
- external_task_ids : List[str]
1527
- The list of task_ids that you want to wait for.
1528
- If None (default value) the sensor waits for the DAG. (Default: None)
1529
- allowed_states : List[str]
1530
- Iterable of allowed states, (Default: ['success'])
1531
- failed_states : List[str]
1532
- Iterable of failed or dis-allowed states. (Default: None)
1533
- execution_delta : datetime.timedelta
1534
- time difference with the previous execution to look at,
1535
- the default is the same logical date as the current task or DAG. (Default: None)
1536
- check_existence: bool
1537
- Set to True to check if the external task exists or check if
1538
- the DAG to wait for exists. (Default: True)
1527
+ bucket_key : Union[str, List[str]]
1528
+ The key(s) being waited on. Supports full s3:// style url or relative path from root level.
1529
+ When it's specified as a full s3:// url, please leave `bucket_name` as None
1530
+ bucket_name : str
1531
+ Name of the S3 bucket. Only needed when bucket_key is not provided as a full s3:// url.
1532
+ When specified, all the keys passed to bucket_key refers to this bucket. (Default:None)
1533
+ wildcard_match : bool
1534
+ whether the bucket_key should be interpreted as a Unix wildcard pattern. (Default: False)
1535
+ aws_conn_id : str
1536
+ a reference to the s3 connection on Airflow. (Default: None)
1537
+ verify : bool
1538
+ Whether or not to verify SSL certificates for S3 connection. (Default: None)
1539
+ """
1540
+ ...
1541
+
1542
+ @typing.overload
1543
+ def trigger(*, event: typing.Union[str, typing.Dict[str, typing.Any], None] = None, events: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1544
+ """
1545
+ Specifies the event(s) that this flow depends on.
1546
+
1547
+ ```
1548
+ @trigger(event='foo')
1549
+ ```
1550
+ or
1551
+ ```
1552
+ @trigger(events=['foo', 'bar'])
1553
+ ```
1554
+
1555
+ Additionally, you can specify the parameter mappings
1556
+ to map event payload to Metaflow parameters for the flow.
1557
+ ```
1558
+ @trigger(event={'name':'foo', 'parameters':{'flow_param': 'event_field'}})
1559
+ ```
1560
+ or
1561
+ ```
1562
+ @trigger(events=[{'name':'foo', 'parameters':{'flow_param_1': 'event_field_1'},
1563
+ {'name':'bar', 'parameters':{'flow_param_2': 'event_field_2'}])
1564
+ ```
1565
+
1566
+ 'parameters' can also be a list of strings and tuples like so:
1567
+ ```
1568
+ @trigger(event={'name':'foo', 'parameters':['common_name', ('flow_param', 'event_field')]})
1569
+ ```
1570
+ This is equivalent to:
1571
+ ```
1572
+ @trigger(event={'name':'foo', 'parameters':{'common_name': 'common_name', 'flow_param': 'event_field'}})
1573
+ ```
1574
+
1575
+
1576
+ Parameters
1577
+ ----------
1578
+ event : Union[str, Dict[str, Any]], optional, default None
1579
+ Event dependency for this flow.
1580
+ events : List[Union[str, Dict[str, Any]]], default []
1581
+ Events dependency for this flow.
1582
+ options : Dict[str, Any], default {}
1583
+ Backend-specific configuration for tuning eventing behavior.
1584
+ """
1585
+ ...
1586
+
1587
+ @typing.overload
1588
+ def trigger(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1589
+ ...
1590
+
1591
+ def trigger(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, event: typing.Union[str, typing.Dict[str, typing.Any], None] = None, events: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], options: typing.Dict[str, typing.Any] = {}):
1592
+ """
1593
+ Specifies the event(s) that this flow depends on.
1594
+
1595
+ ```
1596
+ @trigger(event='foo')
1597
+ ```
1598
+ or
1599
+ ```
1600
+ @trigger(events=['foo', 'bar'])
1601
+ ```
1602
+
1603
+ Additionally, you can specify the parameter mappings
1604
+ to map event payload to Metaflow parameters for the flow.
1605
+ ```
1606
+ @trigger(event={'name':'foo', 'parameters':{'flow_param': 'event_field'}})
1607
+ ```
1608
+ or
1609
+ ```
1610
+ @trigger(events=[{'name':'foo', 'parameters':{'flow_param_1': 'event_field_1'},
1611
+ {'name':'bar', 'parameters':{'flow_param_2': 'event_field_2'}])
1612
+ ```
1613
+
1614
+ 'parameters' can also be a list of strings and tuples like so:
1615
+ ```
1616
+ @trigger(event={'name':'foo', 'parameters':['common_name', ('flow_param', 'event_field')]})
1617
+ ```
1618
+ This is equivalent to:
1619
+ ```
1620
+ @trigger(event={'name':'foo', 'parameters':{'common_name': 'common_name', 'flow_param': 'event_field'}})
1621
+ ```
1622
+
1623
+
1624
+ Parameters
1625
+ ----------
1626
+ event : Union[str, Dict[str, Any]], optional, default None
1627
+ Event dependency for this flow.
1628
+ events : List[Union[str, Dict[str, Any]]], default []
1629
+ Events dependency for this flow.
1630
+ options : Dict[str, Any], default {}
1631
+ Backend-specific configuration for tuning eventing behavior.
1632
+ """
1633
+ ...
1634
+
1635
+ @typing.overload
1636
+ def conda_base(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1637
+ """
1638
+ Specifies the Conda environment for all steps of the flow.
1639
+
1640
+ Use `@conda_base` to set common libraries required by all
1641
+ steps and use `@conda` to specify step-specific additions.
1642
+
1643
+
1644
+ Parameters
1645
+ ----------
1646
+ packages : Dict[str, str], default {}
1647
+ Packages to use for this flow. The key is the name of the package
1648
+ and the value is the version to use.
1649
+ libraries : Dict[str, str], default {}
1650
+ Supported for backward compatibility. When used with packages, packages will take precedence.
1651
+ python : str, optional, default None
1652
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1653
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1654
+ disabled : bool, default False
1655
+ If set to True, disables Conda.
1656
+ """
1657
+ ...
1658
+
1659
+ @typing.overload
1660
+ def conda_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1661
+ ...
1662
+
1663
+ def conda_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
1664
+ """
1665
+ Specifies the Conda environment for all steps of the flow.
1666
+
1667
+ Use `@conda_base` to set common libraries required by all
1668
+ steps and use `@conda` to specify step-specific additions.
1669
+
1670
+
1671
+ Parameters
1672
+ ----------
1673
+ packages : Dict[str, str], default {}
1674
+ Packages to use for this flow. The key is the name of the package
1675
+ and the value is the version to use.
1676
+ libraries : Dict[str, str], default {}
1677
+ Supported for backward compatibility. When used with packages, packages will take precedence.
1678
+ python : str, optional, default None
1679
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1680
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1681
+ disabled : bool, default False
1682
+ If set to True, disables Conda.
1539
1683
  """
1540
1684
  ...
1541
1685
 
@@ -1580,57 +1724,6 @@ def pypi_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packag
1580
1724
  """
1581
1725
  ...
1582
1726
 
1583
- @typing.overload
1584
- def schedule(*, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1585
- """
1586
- Specifies the times when the flow should be run when running on a
1587
- production scheduler.
1588
-
1589
-
1590
- Parameters
1591
- ----------
1592
- hourly : bool, default False
1593
- Run the workflow hourly.
1594
- daily : bool, default True
1595
- Run the workflow daily.
1596
- weekly : bool, default False
1597
- Run the workflow weekly.
1598
- cron : str, optional, default None
1599
- Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
1600
- specified by this expression.
1601
- timezone : str, optional, default None
1602
- Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1603
- which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
1604
- """
1605
- ...
1606
-
1607
- @typing.overload
1608
- def schedule(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1609
- ...
1610
-
1611
- def schedule(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None):
1612
- """
1613
- Specifies the times when the flow should be run when running on a
1614
- production scheduler.
1615
-
1616
-
1617
- Parameters
1618
- ----------
1619
- hourly : bool, default False
1620
- Run the workflow hourly.
1621
- daily : bool, default True
1622
- Run the workflow daily.
1623
- weekly : bool, default False
1624
- Run the workflow weekly.
1625
- cron : str, optional, default None
1626
- Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
1627
- specified by this expression.
1628
- timezone : str, optional, default None
1629
- Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1630
- which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
1631
- """
1632
- ...
1633
-
1634
1727
  def with_artifact_store(f: typing.Optional[typing.Type[FlowSpecDerived]] = None):
1635
1728
  """
1636
1729
  Allows setting external datastores to save data for the
@@ -1745,64 +1838,10 @@ def with_artifact_store(f: typing.Optional[typing.Type[FlowSpecDerived]] = None)
1745
1838
  """
1746
1839
  ...
1747
1840
 
1748
- @typing.overload
1749
- def conda_base(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1750
- """
1751
- Specifies the Conda environment for all steps of the flow.
1752
-
1753
- Use `@conda_base` to set common libraries required by all
1754
- steps and use `@conda` to specify step-specific additions.
1755
-
1756
-
1757
- Parameters
1758
- ----------
1759
- packages : Dict[str, str], default {}
1760
- Packages to use for this flow. The key is the name of the package
1761
- and the value is the version to use.
1762
- libraries : Dict[str, str], default {}
1763
- Supported for backward compatibility. When used with packages, packages will take precedence.
1764
- python : str, optional, default None
1765
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
1766
- that the version used will correspond to the version of the Python interpreter used to start the run.
1767
- disabled : bool, default False
1768
- If set to True, disables Conda.
1769
- """
1770
- ...
1771
-
1772
- @typing.overload
1773
- def conda_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1774
- ...
1775
-
1776
- def conda_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
1777
- """
1778
- Specifies the Conda environment for all steps of the flow.
1779
-
1780
- Use `@conda_base` to set common libraries required by all
1781
- steps and use `@conda` to specify step-specific additions.
1782
-
1783
-
1784
- Parameters
1785
- ----------
1786
- packages : Dict[str, str], default {}
1787
- Packages to use for this flow. The key is the name of the package
1788
- and the value is the version to use.
1789
- libraries : Dict[str, str], default {}
1790
- Supported for backward compatibility. When used with packages, packages will take precedence.
1791
- python : str, optional, default None
1792
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
1793
- that the version used will correspond to the version of the Python interpreter used to start the run.
1794
- disabled : bool, default False
1795
- If set to True, disables Conda.
1796
- """
1797
- ...
1798
-
1799
- def airflow_s3_key_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, bucket_key: typing.Union[str, typing.List[str]], bucket_name: str, wildcard_match: bool, aws_conn_id: str, verify: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1841
+ def airflow_external_task_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, external_dag_id: str, external_task_ids: typing.List[str], allowed_states: typing.List[str], failed_states: typing.List[str], execution_delta: "datetime.timedelta", check_existence: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1800
1842
  """
1801
- The `@airflow_s3_key_sensor` decorator attaches a Airflow [S3KeySensor](https://airflow.apache.org/docs/apache-airflow-providers-amazon/stable/_api/airflow/providers/amazon/aws/sensors/s3/index.html#airflow.providers.amazon.aws.sensors.s3.S3KeySensor)
1802
- before the start step of the flow. This decorator only works when a flow is scheduled on Airflow
1803
- and is compiled using `airflow create`. More than one `@airflow_s3_key_sensor` can be
1804
- added as a flow decorators. Adding more than one decorator will ensure that `start` step
1805
- starts only after all sensors finish.
1843
+ The `@airflow_external_task_sensor` decorator attaches a Airflow [ExternalTaskSensor](https://airflow.apache.org/docs/apache-airflow/stable/_api/airflow/sensors/external_task/index.html#airflow.sensors.external_task.ExternalTaskSensor) before the start step of the flow.
1844
+ This decorator only works when a flow is scheduled on Airflow and is compiled using `airflow create`. More than one `@airflow_external_task_sensor` can be added as a flow decorators. Adding more than one decorator will ensure that `start` step starts only after all sensors finish.
1806
1845
 
1807
1846
 
1808
1847
  Parameters
@@ -1824,18 +1863,21 @@ def airflow_s3_key_sensor(*, timeout: int, poke_interval: int, mode: str, expone
1824
1863
  Name of the sensor on Airflow
1825
1864
  description : str
1826
1865
  Description of sensor in the Airflow UI
1827
- bucket_key : Union[str, List[str]]
1828
- The key(s) being waited on. Supports full s3:// style url or relative path from root level.
1829
- When it's specified as a full s3:// url, please leave `bucket_name` as None
1830
- bucket_name : str
1831
- Name of the S3 bucket. Only needed when bucket_key is not provided as a full s3:// url.
1832
- When specified, all the keys passed to bucket_key refers to this bucket. (Default:None)
1833
- wildcard_match : bool
1834
- whether the bucket_key should be interpreted as a Unix wildcard pattern. (Default: False)
1835
- aws_conn_id : str
1836
- a reference to the s3 connection on Airflow. (Default: None)
1837
- verify : bool
1838
- Whether or not to verify SSL certificates for S3 connection. (Default: None)
1866
+ external_dag_id : str
1867
+ The dag_id that contains the task you want to wait for.
1868
+ external_task_ids : List[str]
1869
+ The list of task_ids that you want to wait for.
1870
+ If None (default value) the sensor waits for the DAG. (Default: None)
1871
+ allowed_states : List[str]
1872
+ Iterable of allowed states, (Default: ['success'])
1873
+ failed_states : List[str]
1874
+ Iterable of failed or dis-allowed states. (Default: None)
1875
+ execution_delta : datetime.timedelta
1876
+ time difference with the previous execution to look at,
1877
+ the default is the same logical date as the current task or DAG. (Default: None)
1878
+ check_existence: bool
1879
+ Set to True to check if the external task exists or check if
1880
+ the DAG to wait for exists. (Default: True)
1839
1881
  """
1840
1882
  ...
1841
1883
 
@@ -1875,95 +1917,53 @@ def project(*, name: str, branch: typing.Optional[str] = None, production: bool
1875
1917
  ...
1876
1918
 
1877
1919
  @typing.overload
1878
- def trigger(*, event: typing.Union[str, typing.Dict[str, typing.Any], None] = None, events: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1920
+ def schedule(*, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1879
1921
  """
1880
- Specifies the event(s) that this flow depends on.
1881
-
1882
- ```
1883
- @trigger(event='foo')
1884
- ```
1885
- or
1886
- ```
1887
- @trigger(events=['foo', 'bar'])
1888
- ```
1889
-
1890
- Additionally, you can specify the parameter mappings
1891
- to map event payload to Metaflow parameters for the flow.
1892
- ```
1893
- @trigger(event={'name':'foo', 'parameters':{'flow_param': 'event_field'}})
1894
- ```
1895
- or
1896
- ```
1897
- @trigger(events=[{'name':'foo', 'parameters':{'flow_param_1': 'event_field_1'},
1898
- {'name':'bar', 'parameters':{'flow_param_2': 'event_field_2'}])
1899
- ```
1900
-
1901
- 'parameters' can also be a list of strings and tuples like so:
1902
- ```
1903
- @trigger(event={'name':'foo', 'parameters':['common_name', ('flow_param', 'event_field')]})
1904
- ```
1905
- This is equivalent to:
1906
- ```
1907
- @trigger(event={'name':'foo', 'parameters':{'common_name': 'common_name', 'flow_param': 'event_field'}})
1908
- ```
1922
+ Specifies the times when the flow should be run when running on a
1923
+ production scheduler.
1909
1924
 
1910
1925
 
1911
1926
  Parameters
1912
1927
  ----------
1913
- event : Union[str, Dict[str, Any]], optional, default None
1914
- Event dependency for this flow.
1915
- events : List[Union[str, Dict[str, Any]]], default []
1916
- Events dependency for this flow.
1917
- options : Dict[str, Any], default {}
1918
- Backend-specific configuration for tuning eventing behavior.
1928
+ hourly : bool, default False
1929
+ Run the workflow hourly.
1930
+ daily : bool, default True
1931
+ Run the workflow daily.
1932
+ weekly : bool, default False
1933
+ Run the workflow weekly.
1934
+ cron : str, optional, default None
1935
+ Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
1936
+ specified by this expression.
1937
+ timezone : str, optional, default None
1938
+ Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1939
+ which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
1919
1940
  """
1920
1941
  ...
1921
1942
 
1922
1943
  @typing.overload
1923
- def trigger(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1944
+ def schedule(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1924
1945
  ...
1925
1946
 
1926
- def trigger(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, event: typing.Union[str, typing.Dict[str, typing.Any], None] = None, events: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], options: typing.Dict[str, typing.Any] = {}):
1947
+ def schedule(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None):
1927
1948
  """
1928
- Specifies the event(s) that this flow depends on.
1929
-
1930
- ```
1931
- @trigger(event='foo')
1932
- ```
1933
- or
1934
- ```
1935
- @trigger(events=['foo', 'bar'])
1936
- ```
1937
-
1938
- Additionally, you can specify the parameter mappings
1939
- to map event payload to Metaflow parameters for the flow.
1940
- ```
1941
- @trigger(event={'name':'foo', 'parameters':{'flow_param': 'event_field'}})
1942
- ```
1943
- or
1944
- ```
1945
- @trigger(events=[{'name':'foo', 'parameters':{'flow_param_1': 'event_field_1'},
1946
- {'name':'bar', 'parameters':{'flow_param_2': 'event_field_2'}])
1947
- ```
1948
-
1949
- 'parameters' can also be a list of strings and tuples like so:
1950
- ```
1951
- @trigger(event={'name':'foo', 'parameters':['common_name', ('flow_param', 'event_field')]})
1952
- ```
1953
- This is equivalent to:
1954
- ```
1955
- @trigger(event={'name':'foo', 'parameters':{'common_name': 'common_name', 'flow_param': 'event_field'}})
1956
- ```
1949
+ Specifies the times when the flow should be run when running on a
1950
+ production scheduler.
1957
1951
 
1958
1952
 
1959
1953
  Parameters
1960
1954
  ----------
1961
- event : Union[str, Dict[str, Any]], optional, default None
1962
- Event dependency for this flow.
1963
- events : List[Union[str, Dict[str, Any]]], default []
1964
- Events dependency for this flow.
1965
- options : Dict[str, Any], default {}
1966
- Backend-specific configuration for tuning eventing behavior.
1955
+ hourly : bool, default False
1956
+ Run the workflow hourly.
1957
+ daily : bool, default True
1958
+ Run the workflow daily.
1959
+ weekly : bool, default False
1960
+ Run the workflow weekly.
1961
+ cron : str, optional, default None
1962
+ Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
1963
+ specified by this expression.
1964
+ timezone : str, optional, default None
1965
+ Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1966
+ which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
1967
1967
  """
1968
1968
  ...
1969
1969