ob-metaflow-stubs 6.0.3.179rc1__py2.py3-none-any.whl → 6.0.3.179rc3__py2.py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (220) hide show
  1. metaflow-stubs/__init__.pyi +679 -679
  2. metaflow-stubs/cards.pyi +1 -1
  3. metaflow-stubs/cli.pyi +1 -1
  4. metaflow-stubs/cli_components/__init__.pyi +1 -1
  5. metaflow-stubs/cli_components/utils.pyi +1 -1
  6. metaflow-stubs/client/__init__.pyi +1 -1
  7. metaflow-stubs/client/core.pyi +6 -6
  8. metaflow-stubs/client/filecache.pyi +2 -2
  9. metaflow-stubs/events.pyi +1 -1
  10. metaflow-stubs/exception.pyi +1 -1
  11. metaflow-stubs/flowspec.pyi +4 -4
  12. metaflow-stubs/generated_for.txt +1 -1
  13. metaflow-stubs/includefile.pyi +3 -3
  14. metaflow-stubs/info_file.pyi +1 -1
  15. metaflow-stubs/metadata_provider/__init__.pyi +1 -1
  16. metaflow-stubs/metadata_provider/heartbeat.pyi +1 -1
  17. metaflow-stubs/metadata_provider/metadata.pyi +2 -2
  18. metaflow-stubs/metadata_provider/util.pyi +1 -1
  19. metaflow-stubs/metaflow_config.pyi +1 -1
  20. metaflow-stubs/metaflow_current.pyi +6 -6
  21. metaflow-stubs/metaflow_git.pyi +1 -1
  22. metaflow-stubs/mf_extensions/__init__.pyi +1 -1
  23. metaflow-stubs/mf_extensions/obcheckpoint/__init__.pyi +1 -1
  24. metaflow-stubs/mf_extensions/obcheckpoint/plugins/__init__.pyi +1 -1
  25. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/__init__.pyi +1 -1
  26. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/card_utils/__init__.pyi +1 -1
  27. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/card_utils/async_cards.pyi +2 -2
  28. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/card_utils/deco_injection_mixin.pyi +1 -1
  29. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/card_utils/extra_components.pyi +3 -3
  30. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/__init__.pyi +1 -1
  31. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/cards/__init__.pyi +1 -1
  32. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/cards/checkpoint_lister.pyi +4 -4
  33. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/cards/lineage_card.pyi +1 -1
  34. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/checkpoint_storage.pyi +3 -3
  35. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/constructors.pyi +1 -1
  36. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/core.pyi +2 -2
  37. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/decorator.pyi +3 -3
  38. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/exceptions.pyi +1 -1
  39. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/final_api.pyi +1 -1
  40. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/lineage.pyi +1 -1
  41. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/__init__.pyi +1 -1
  42. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/context.pyi +2 -2
  43. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/core.pyi +1 -1
  44. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/decorator.pyi +1 -1
  45. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/exceptions.pyi +1 -1
  46. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/task_utils.pyi +3 -3
  47. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/utils.pyi +1 -1
  48. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastructures.pyi +2 -2
  49. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/exceptions.pyi +1 -1
  50. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/hf_hub/__init__.pyi +1 -1
  51. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/hf_hub/decorator.pyi +1 -1
  52. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/modeling_utils/__init__.pyi +1 -1
  53. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/modeling_utils/core.pyi +2 -2
  54. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/modeling_utils/exceptions.pyi +1 -1
  55. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/modeling_utils/model_storage.pyi +3 -3
  56. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/__init__.pyi +1 -1
  57. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/flowspec_utils.pyi +1 -1
  58. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/general.pyi +1 -1
  59. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/identity_utils.pyi +2 -2
  60. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/serialization_handler/__init__.pyi +1 -1
  61. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/serialization_handler/base.pyi +1 -1
  62. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/serialization_handler/tar.pyi +2 -2
  63. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/tar_utils.pyi +2 -2
  64. metaflow-stubs/mf_extensions/outerbounds/__init__.pyi +1 -1
  65. metaflow-stubs/mf_extensions/outerbounds/plugins/__init__.pyi +1 -1
  66. metaflow-stubs/mf_extensions/outerbounds/plugins/aws/__init__.pyi +1 -1
  67. metaflow-stubs/mf_extensions/outerbounds/plugins/aws/assume_role_decorator.pyi +1 -1
  68. metaflow-stubs/mf_extensions/outerbounds/plugins/card_utilities/__init__.pyi +1 -1
  69. metaflow-stubs/mf_extensions/outerbounds/plugins/card_utilities/async_cards.pyi +1 -1
  70. metaflow-stubs/mf_extensions/outerbounds/plugins/card_utilities/injector.pyi +1 -1
  71. metaflow-stubs/mf_extensions/outerbounds/plugins/checkpoint_datastores/__init__.pyi +1 -1
  72. metaflow-stubs/mf_extensions/outerbounds/plugins/checkpoint_datastores/coreweave.pyi +1 -1
  73. metaflow-stubs/mf_extensions/outerbounds/plugins/checkpoint_datastores/nebius.pyi +1 -1
  74. metaflow-stubs/mf_extensions/outerbounds/plugins/fast_bakery/__init__.pyi +1 -1
  75. metaflow-stubs/mf_extensions/outerbounds/plugins/fast_bakery/baker.pyi +3 -3
  76. metaflow-stubs/mf_extensions/outerbounds/plugins/fast_bakery/docker_environment.pyi +2 -2
  77. metaflow-stubs/mf_extensions/outerbounds/plugins/fast_bakery/fast_bakery.pyi +1 -1
  78. metaflow-stubs/mf_extensions/outerbounds/plugins/kubernetes/__init__.pyi +1 -1
  79. metaflow-stubs/mf_extensions/outerbounds/plugins/kubernetes/pod_killer.pyi +1 -1
  80. metaflow-stubs/mf_extensions/outerbounds/plugins/ollama/__init__.pyi +1 -1
  81. metaflow-stubs/mf_extensions/outerbounds/plugins/ollama/constants.pyi +1 -1
  82. metaflow-stubs/mf_extensions/outerbounds/plugins/ollama/exceptions.pyi +1 -1
  83. metaflow-stubs/mf_extensions/outerbounds/plugins/ollama/ollama.pyi +1 -1
  84. metaflow-stubs/mf_extensions/outerbounds/plugins/ollama/status_card.pyi +1 -1
  85. metaflow-stubs/mf_extensions/outerbounds/plugins/snowflake/__init__.pyi +1 -1
  86. metaflow-stubs/mf_extensions/outerbounds/plugins/snowflake/snowflake.pyi +1 -1
  87. metaflow-stubs/mf_extensions/outerbounds/profilers/__init__.pyi +1 -1
  88. metaflow-stubs/mf_extensions/outerbounds/profilers/gpu.pyi +1 -1
  89. metaflow-stubs/mf_extensions/outerbounds/remote_config.pyi +1 -1
  90. metaflow-stubs/mf_extensions/outerbounds/toplevel/__init__.pyi +1 -1
  91. metaflow-stubs/mf_extensions/outerbounds/toplevel/global_aliases_for_metaflow_package.pyi +1 -1
  92. metaflow-stubs/multicore_utils.pyi +1 -1
  93. metaflow-stubs/ob_internal.pyi +1 -1
  94. metaflow-stubs/parameters.pyi +3 -3
  95. metaflow-stubs/plugins/__init__.pyi +11 -11
  96. metaflow-stubs/plugins/airflow/__init__.pyi +1 -1
  97. metaflow-stubs/plugins/airflow/airflow_utils.pyi +1 -1
  98. metaflow-stubs/plugins/airflow/exception.pyi +1 -1
  99. metaflow-stubs/plugins/airflow/sensors/__init__.pyi +1 -1
  100. metaflow-stubs/plugins/airflow/sensors/base_sensor.pyi +1 -1
  101. metaflow-stubs/plugins/airflow/sensors/external_task_sensor.pyi +1 -1
  102. metaflow-stubs/plugins/airflow/sensors/s3_sensor.pyi +1 -1
  103. metaflow-stubs/plugins/argo/__init__.pyi +1 -1
  104. metaflow-stubs/plugins/argo/argo_client.pyi +1 -1
  105. metaflow-stubs/plugins/argo/argo_events.pyi +1 -1
  106. metaflow-stubs/plugins/argo/argo_workflows.pyi +3 -3
  107. metaflow-stubs/plugins/argo/argo_workflows_decorator.pyi +2 -2
  108. metaflow-stubs/plugins/argo/argo_workflows_deployer.pyi +3 -3
  109. metaflow-stubs/plugins/argo/argo_workflows_deployer_objects.pyi +3 -3
  110. metaflow-stubs/plugins/aws/__init__.pyi +1 -1
  111. metaflow-stubs/plugins/aws/aws_client.pyi +1 -1
  112. metaflow-stubs/plugins/aws/aws_utils.pyi +1 -1
  113. metaflow-stubs/plugins/aws/batch/__init__.pyi +1 -1
  114. metaflow-stubs/plugins/aws/batch/batch.pyi +1 -1
  115. metaflow-stubs/plugins/aws/batch/batch_client.pyi +1 -1
  116. metaflow-stubs/plugins/aws/batch/batch_decorator.pyi +1 -1
  117. metaflow-stubs/plugins/aws/secrets_manager/__init__.pyi +1 -1
  118. metaflow-stubs/plugins/aws/secrets_manager/aws_secrets_manager_secrets_provider.pyi +3 -3
  119. metaflow-stubs/plugins/aws/step_functions/__init__.pyi +1 -1
  120. metaflow-stubs/plugins/aws/step_functions/event_bridge_client.pyi +1 -1
  121. metaflow-stubs/plugins/aws/step_functions/schedule_decorator.pyi +1 -1
  122. metaflow-stubs/plugins/aws/step_functions/step_functions.pyi +1 -1
  123. metaflow-stubs/plugins/aws/step_functions/step_functions_client.pyi +1 -1
  124. metaflow-stubs/plugins/aws/step_functions/step_functions_deployer.pyi +3 -3
  125. metaflow-stubs/plugins/aws/step_functions/step_functions_deployer_objects.pyi +2 -2
  126. metaflow-stubs/plugins/azure/__init__.pyi +1 -1
  127. metaflow-stubs/plugins/azure/azure_credential.pyi +1 -1
  128. metaflow-stubs/plugins/azure/azure_exceptions.pyi +1 -1
  129. metaflow-stubs/plugins/azure/azure_secret_manager_secrets_provider.pyi +3 -3
  130. metaflow-stubs/plugins/azure/azure_utils.pyi +1 -1
  131. metaflow-stubs/plugins/azure/blob_service_client_factory.pyi +1 -1
  132. metaflow-stubs/plugins/azure/includefile_support.pyi +1 -1
  133. metaflow-stubs/plugins/cards/__init__.pyi +1 -1
  134. metaflow-stubs/plugins/cards/card_client.pyi +1 -1
  135. metaflow-stubs/plugins/cards/card_creator.pyi +1 -1
  136. metaflow-stubs/plugins/cards/card_datastore.pyi +1 -1
  137. metaflow-stubs/plugins/cards/card_decorator.pyi +1 -1
  138. metaflow-stubs/plugins/cards/card_modules/__init__.pyi +1 -1
  139. metaflow-stubs/plugins/cards/card_modules/basic.pyi +2 -2
  140. metaflow-stubs/plugins/cards/card_modules/card.pyi +1 -1
  141. metaflow-stubs/plugins/cards/card_modules/components.pyi +4 -4
  142. metaflow-stubs/plugins/cards/card_modules/convert_to_native_type.pyi +1 -1
  143. metaflow-stubs/plugins/cards/card_modules/renderer_tools.pyi +1 -1
  144. metaflow-stubs/plugins/cards/card_modules/test_cards.pyi +1 -1
  145. metaflow-stubs/plugins/cards/card_resolver.pyi +1 -1
  146. metaflow-stubs/plugins/cards/component_serializer.pyi +1 -1
  147. metaflow-stubs/plugins/cards/exception.pyi +1 -1
  148. metaflow-stubs/plugins/catch_decorator.pyi +1 -1
  149. metaflow-stubs/plugins/datatools/__init__.pyi +1 -1
  150. metaflow-stubs/plugins/datatools/local.pyi +1 -1
  151. metaflow-stubs/plugins/datatools/s3/__init__.pyi +1 -1
  152. metaflow-stubs/plugins/datatools/s3/s3.pyi +3 -3
  153. metaflow-stubs/plugins/datatools/s3/s3tail.pyi +1 -1
  154. metaflow-stubs/plugins/datatools/s3/s3util.pyi +1 -1
  155. metaflow-stubs/plugins/debug_logger.pyi +1 -1
  156. metaflow-stubs/plugins/debug_monitor.pyi +1 -1
  157. metaflow-stubs/plugins/environment_decorator.pyi +1 -1
  158. metaflow-stubs/plugins/events_decorator.pyi +1 -1
  159. metaflow-stubs/plugins/frameworks/__init__.pyi +1 -1
  160. metaflow-stubs/plugins/frameworks/pytorch.pyi +1 -1
  161. metaflow-stubs/plugins/gcp/__init__.pyi +1 -1
  162. metaflow-stubs/plugins/gcp/gcp_secret_manager_secrets_provider.pyi +3 -3
  163. metaflow-stubs/plugins/gcp/gs_exceptions.pyi +1 -1
  164. metaflow-stubs/plugins/gcp/gs_storage_client_factory.pyi +1 -1
  165. metaflow-stubs/plugins/gcp/gs_utils.pyi +1 -1
  166. metaflow-stubs/plugins/gcp/includefile_support.pyi +1 -1
  167. metaflow-stubs/plugins/kubernetes/__init__.pyi +1 -1
  168. metaflow-stubs/plugins/kubernetes/kube_utils.pyi +2 -2
  169. metaflow-stubs/plugins/kubernetes/kubernetes.pyi +1 -1
  170. metaflow-stubs/plugins/kubernetes/kubernetes_client.pyi +1 -1
  171. metaflow-stubs/plugins/kubernetes/kubernetes_decorator.pyi +1 -1
  172. metaflow-stubs/plugins/kubernetes/kubernetes_jobsets.pyi +1 -1
  173. metaflow-stubs/plugins/kubernetes/spot_monitor_sidecar.pyi +1 -1
  174. metaflow-stubs/plugins/ollama/__init__.pyi +2 -2
  175. metaflow-stubs/plugins/parallel_decorator.pyi +1 -1
  176. metaflow-stubs/plugins/perimeters.pyi +1 -1
  177. metaflow-stubs/plugins/project_decorator.pyi +1 -1
  178. metaflow-stubs/plugins/pypi/__init__.pyi +1 -1
  179. metaflow-stubs/plugins/pypi/conda_decorator.pyi +1 -1
  180. metaflow-stubs/plugins/pypi/conda_environment.pyi +2 -2
  181. metaflow-stubs/plugins/pypi/parsers.pyi +1 -1
  182. metaflow-stubs/plugins/pypi/pypi_decorator.pyi +1 -1
  183. metaflow-stubs/plugins/pypi/pypi_environment.pyi +1 -1
  184. metaflow-stubs/plugins/pypi/utils.pyi +1 -1
  185. metaflow-stubs/plugins/resources_decorator.pyi +1 -1
  186. metaflow-stubs/plugins/retry_decorator.pyi +1 -1
  187. metaflow-stubs/plugins/secrets/__init__.pyi +2 -2
  188. metaflow-stubs/plugins/secrets/inline_secrets_provider.pyi +2 -2
  189. metaflow-stubs/plugins/secrets/secrets_decorator.pyi +1 -1
  190. metaflow-stubs/plugins/snowflake/__init__.pyi +1 -1
  191. metaflow-stubs/plugins/storage_executor.pyi +1 -1
  192. metaflow-stubs/plugins/test_unbounded_foreach_decorator.pyi +1 -1
  193. metaflow-stubs/plugins/timeout_decorator.pyi +1 -1
  194. metaflow-stubs/plugins/torchtune/__init__.pyi +1 -1
  195. metaflow-stubs/plugins/uv/__init__.pyi +1 -1
  196. metaflow-stubs/plugins/uv/uv_environment.pyi +2 -2
  197. metaflow-stubs/profilers/__init__.pyi +1 -1
  198. metaflow-stubs/pylint_wrapper.pyi +1 -1
  199. metaflow-stubs/runner/__init__.pyi +1 -1
  200. metaflow-stubs/runner/deployer.pyi +29 -29
  201. metaflow-stubs/runner/deployer_impl.pyi +2 -2
  202. metaflow-stubs/runner/metaflow_runner.pyi +3 -3
  203. metaflow-stubs/runner/nbdeploy.pyi +1 -1
  204. metaflow-stubs/runner/nbrun.pyi +1 -1
  205. metaflow-stubs/runner/subprocess_manager.pyi +1 -1
  206. metaflow-stubs/runner/utils.pyi +1 -1
  207. metaflow-stubs/system/__init__.pyi +1 -1
  208. metaflow-stubs/system/system_logger.pyi +1 -1
  209. metaflow-stubs/system/system_monitor.pyi +1 -1
  210. metaflow-stubs/tagging_util.pyi +1 -1
  211. metaflow-stubs/tuple_util.pyi +1 -1
  212. metaflow-stubs/user_configs/__init__.pyi +1 -1
  213. metaflow-stubs/user_configs/config_decorators.pyi +5 -5
  214. metaflow-stubs/user_configs/config_options.pyi +2 -2
  215. metaflow-stubs/user_configs/config_parameters.pyi +5 -5
  216. {ob_metaflow_stubs-6.0.3.179rc1.dist-info → ob_metaflow_stubs-6.0.3.179rc3.dist-info}/METADATA +1 -1
  217. ob_metaflow_stubs-6.0.3.179rc3.dist-info/RECORD +220 -0
  218. ob_metaflow_stubs-6.0.3.179rc1.dist-info/RECORD +0 -220
  219. {ob_metaflow_stubs-6.0.3.179rc1.dist-info → ob_metaflow_stubs-6.0.3.179rc3.dist-info}/WHEEL +0 -0
  220. {ob_metaflow_stubs-6.0.3.179rc1.dist-info → ob_metaflow_stubs-6.0.3.179rc3.dist-info}/top_level.txt +0 -0
@@ -1,7 +1,7 @@
1
1
  ######################################################################################################
2
2
  # Auto-generated Metaflow stub file #
3
3
  # MF version: 2.15.17.1+obcheckpoint(0.2.1);ob(v1) #
4
- # Generated on 2025-06-13T20:01:40.043002 #
4
+ # Generated on 2025-06-13T20:43:51.355806 #
5
5
  ######################################################################################################
6
6
 
7
7
  from __future__ import annotations
@@ -37,16 +37,16 @@ from .user_configs.config_decorators import CustomFlowDecorator as CustomFlowDec
37
37
  from .user_configs.config_decorators import CustomStepDecorator as CustomStepDecorator
38
38
  from . import tuple_util as tuple_util
39
39
  from . import cards as cards
40
- from . import metaflow_git as metaflow_git
41
40
  from . import events as events
41
+ from . import metaflow_git as metaflow_git
42
42
  from . import runner as runner
43
43
  from . import plugins as plugins
44
44
  from .mf_extensions.outerbounds.toplevel.global_aliases_for_metaflow_package import S3 as S3
45
45
  from . import includefile as includefile
46
46
  from .includefile import IncludeFile as IncludeFile
47
47
  from .plugins.pypi.parsers import pyproject_toml_parser as pyproject_toml_parser
48
- from .plugins.pypi.parsers import requirements_txt_parser as requirements_txt_parser
49
48
  from .plugins.pypi.parsers import conda_environment_yml_parser as conda_environment_yml_parser
49
+ from .plugins.pypi.parsers import requirements_txt_parser as requirements_txt_parser
50
50
  from . import client as client
51
51
  from .client.core import namespace as namespace
52
52
  from .client.core import get_namespace as get_namespace
@@ -156,78 +156,59 @@ def step(f: typing.Union[typing.Callable[[FlowSpecDerived], None], typing.Callab
156
156
  ...
157
157
 
158
158
  @typing.overload
159
- def fast_bakery_internal(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
160
- """
161
- Internal decorator to support Fast bakery
162
- """
163
- ...
164
-
165
- @typing.overload
166
- def fast_bakery_internal(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
167
- ...
168
-
169
- def fast_bakery_internal(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
170
- """
171
- Internal decorator to support Fast bakery
172
- """
173
- ...
174
-
175
- @typing.overload
176
- def timeout(*, seconds: int = 0, minutes: int = 0, hours: int = 0) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
159
+ def checkpoint(*, load_policy: str = 'fresh', temp_dir_root: str = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
177
160
  """
178
- Specifies a timeout for your step.
179
-
180
- This decorator is useful if this step may hang indefinitely.
181
-
182
- This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
183
- A timeout is considered to be an exception thrown by the step. It will cause the step to be
184
- retried if needed and the exception will be caught by the `@catch` decorator, if present.
161
+ Enables checkpointing for a step.
185
162
 
186
- Note that all the values specified in parameters are added together so if you specify
187
- 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
188
163
 
189
164
 
190
165
  Parameters
191
166
  ----------
192
- seconds : int, default 0
193
- Number of seconds to wait prior to timing out.
194
- minutes : int, default 0
195
- Number of minutes to wait prior to timing out.
196
- hours : int, default 0
197
- Number of hours to wait prior to timing out.
167
+ load_policy : str, default: "fresh"
168
+ The policy for loading the checkpoint. The following policies are supported:
169
+ - "eager": Loads the the latest available checkpoint within the namespace.
170
+ With this mode, the latest checkpoint written by any previous task (can be even a different run) of the step
171
+ will be loaded at the start of the task.
172
+ - "none": Do not load any checkpoint
173
+ - "fresh": Loads the lastest checkpoint created within the running Task.
174
+ This mode helps loading checkpoints across various retry attempts of the same task.
175
+ With this mode, no checkpoint will be loaded at the start of a task but any checkpoints
176
+ created within the task will be loaded when the task is retries execution on failure.
177
+
178
+ temp_dir_root : str, default: None
179
+ The root directory under which `current.checkpoint.directory` will be created.
198
180
  """
199
181
  ...
200
182
 
201
183
  @typing.overload
202
- def timeout(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
184
+ def checkpoint(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
203
185
  ...
204
186
 
205
187
  @typing.overload
206
- def timeout(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
188
+ def checkpoint(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
207
189
  ...
208
190
 
209
- def timeout(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, seconds: int = 0, minutes: int = 0, hours: int = 0):
191
+ def checkpoint(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, load_policy: str = 'fresh', temp_dir_root: str = None):
210
192
  """
211
- Specifies a timeout for your step.
212
-
213
- This decorator is useful if this step may hang indefinitely.
214
-
215
- This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
216
- A timeout is considered to be an exception thrown by the step. It will cause the step to be
217
- retried if needed and the exception will be caught by the `@catch` decorator, if present.
193
+ Enables checkpointing for a step.
218
194
 
219
- Note that all the values specified in parameters are added together so if you specify
220
- 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
221
195
 
222
196
 
223
197
  Parameters
224
198
  ----------
225
- seconds : int, default 0
226
- Number of seconds to wait prior to timing out.
227
- minutes : int, default 0
228
- Number of minutes to wait prior to timing out.
229
- hours : int, default 0
230
- Number of hours to wait prior to timing out.
199
+ load_policy : str, default: "fresh"
200
+ The policy for loading the checkpoint. The following policies are supported:
201
+ - "eager": Loads the the latest available checkpoint within the namespace.
202
+ With this mode, the latest checkpoint written by any previous task (can be even a different run) of the step
203
+ will be loaded at the start of the task.
204
+ - "none": Do not load any checkpoint
205
+ - "fresh": Loads the lastest checkpoint created within the running Task.
206
+ This mode helps loading checkpoints across various retry attempts of the same task.
207
+ With this mode, no checkpoint will be loaded at the start of a task but any checkpoints
208
+ created within the task will be loaded when the task is retries execution on failure.
209
+
210
+ temp_dir_root : str, default: None
211
+ The root directory under which `current.checkpoint.directory` will be created.
231
212
  """
232
213
  ...
233
214
 
@@ -286,119 +267,261 @@ def retry(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], ty
286
267
  """
287
268
  ...
288
269
 
289
- @typing.overload
290
- def checkpoint(*, load_policy: str = 'fresh', temp_dir_root: str = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
270
+ def nvct(*, gpu: int, gpu_type: str) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
291
271
  """
292
- Enables checkpointing for a step.
293
-
272
+ Specifies that this step should execute on DGX cloud.
294
273
 
295
274
 
296
275
  Parameters
297
276
  ----------
298
- load_policy : str, default: "fresh"
299
- The policy for loading the checkpoint. The following policies are supported:
300
- - "eager": Loads the the latest available checkpoint within the namespace.
301
- With this mode, the latest checkpoint written by any previous task (can be even a different run) of the step
302
- will be loaded at the start of the task.
303
- - "none": Do not load any checkpoint
304
- - "fresh": Loads the lastest checkpoint created within the running Task.
305
- This mode helps loading checkpoints across various retry attempts of the same task.
306
- With this mode, no checkpoint will be loaded at the start of a task but any checkpoints
307
- created within the task will be loaded when the task is retries execution on failure.
308
-
309
- temp_dir_root : str, default: None
310
- The root directory under which `current.checkpoint.directory` will be created.
277
+ gpu : int
278
+ Number of GPUs to use.
279
+ gpu_type : str
280
+ Type of Nvidia GPU to use.
311
281
  """
312
282
  ...
313
283
 
314
- @typing.overload
315
- def checkpoint(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
316
- ...
317
-
318
- @typing.overload
319
- def checkpoint(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
320
- ...
321
-
322
- def checkpoint(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, load_policy: str = 'fresh', temp_dir_root: str = None):
284
+ def kubernetes(*, cpu: int = 1, memory: int = 4096, disk: int = 10240, image: typing.Optional[str] = None, image_pull_policy: str = 'KUBERNETES_IMAGE_PULL_POLICY', image_pull_secrets: typing.List[str] = [], service_account: str = 'METAFLOW_KUBERNETES_SERVICE_ACCOUNT', secrets: typing.Optional[typing.List[str]] = None, node_selector: typing.Union[typing.Dict[str, str], str, None] = None, namespace: str = 'METAFLOW_KUBERNETES_NAMESPACE', gpu: typing.Optional[int] = None, gpu_vendor: str = 'KUBERNETES_GPU_VENDOR', tolerations: typing.List[str] = [], labels: typing.Dict[str, str] = 'METAFLOW_KUBERNETES_LABELS', annotations: typing.Dict[str, str] = 'METAFLOW_KUBERNETES_ANNOTATIONS', use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = '/metaflow_temp', persistent_volume_claims: typing.Optional[typing.Dict[str, str]] = None, shared_memory: typing.Optional[int] = None, port: typing.Optional[int] = None, compute_pool: typing.Optional[str] = None, hostname_resolution_timeout: int = 600, qos: str = 'Burstable', security_context: typing.Optional[typing.Dict[str, typing.Any]] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
323
285
  """
324
- Enables checkpointing for a step.
325
-
286
+ Specifies that this step should execute on Kubernetes.
326
287
 
327
288
 
328
289
  Parameters
329
290
  ----------
330
- load_policy : str, default: "fresh"
331
- The policy for loading the checkpoint. The following policies are supported:
332
- - "eager": Loads the the latest available checkpoint within the namespace.
333
- With this mode, the latest checkpoint written by any previous task (can be even a different run) of the step
334
- will be loaded at the start of the task.
335
- - "none": Do not load any checkpoint
336
- - "fresh": Loads the lastest checkpoint created within the running Task.
337
- This mode helps loading checkpoints across various retry attempts of the same task.
338
- With this mode, no checkpoint will be loaded at the start of a task but any checkpoints
339
- created within the task will be loaded when the task is retries execution on failure.
291
+ cpu : int, default 1
292
+ Number of CPUs required for this step. If `@resources` is
293
+ also present, the maximum value from all decorators is used.
294
+ memory : int, default 4096
295
+ Memory size (in MB) required for this step. If
296
+ `@resources` is also present, the maximum value from all decorators is
297
+ used.
298
+ disk : int, default 10240
299
+ Disk size (in MB) required for this step. If
300
+ `@resources` is also present, the maximum value from all decorators is
301
+ used.
302
+ image : str, optional, default None
303
+ Docker image to use when launching on Kubernetes. If not specified, and
304
+ METAFLOW_KUBERNETES_CONTAINER_IMAGE is specified, that image is used. If
305
+ not, a default Docker image mapping to the current version of Python is used.
306
+ image_pull_policy: str, default KUBERNETES_IMAGE_PULL_POLICY
307
+ If given, the imagePullPolicy to be applied to the Docker image of the step.
308
+ image_pull_secrets: List[str], default []
309
+ The default is extracted from METAFLOW_KUBERNETES_IMAGE_PULL_SECRETS.
310
+ Kubernetes image pull secrets to use when pulling container images
311
+ in Kubernetes.
312
+ service_account : str, default METAFLOW_KUBERNETES_SERVICE_ACCOUNT
313
+ Kubernetes service account to use when launching pod in Kubernetes.
314
+ secrets : List[str], optional, default None
315
+ Kubernetes secrets to use when launching pod in Kubernetes. These
316
+ secrets are in addition to the ones defined in `METAFLOW_KUBERNETES_SECRETS`
317
+ in Metaflow configuration.
318
+ node_selector: Union[Dict[str,str], str], optional, default None
319
+ Kubernetes node selector(s) to apply to the pod running the task.
320
+ Can be passed in as a comma separated string of values e.g.
321
+ 'kubernetes.io/os=linux,kubernetes.io/arch=amd64' or as a dictionary
322
+ {'kubernetes.io/os': 'linux', 'kubernetes.io/arch': 'amd64'}
323
+ namespace : str, default METAFLOW_KUBERNETES_NAMESPACE
324
+ Kubernetes namespace to use when launching pod in Kubernetes.
325
+ gpu : int, optional, default None
326
+ Number of GPUs required for this step. A value of zero implies that
327
+ the scheduled node should not have GPUs.
328
+ gpu_vendor : str, default KUBERNETES_GPU_VENDOR
329
+ The vendor of the GPUs to be used for this step.
330
+ tolerations : List[str], default []
331
+ The default is extracted from METAFLOW_KUBERNETES_TOLERATIONS.
332
+ Kubernetes tolerations to use when launching pod in Kubernetes.
333
+ labels: Dict[str, str], default: METAFLOW_KUBERNETES_LABELS
334
+ Kubernetes labels to use when launching pod in Kubernetes.
335
+ annotations: Dict[str, str], default: METAFLOW_KUBERNETES_ANNOTATIONS
336
+ Kubernetes annotations to use when launching pod in Kubernetes.
337
+ use_tmpfs : bool, default False
338
+ This enables an explicit tmpfs mount for this step.
339
+ tmpfs_tempdir : bool, default True
340
+ sets METAFLOW_TEMPDIR to tmpfs_path if set for this step.
341
+ tmpfs_size : int, optional, default: None
342
+ The value for the size (in MiB) of the tmpfs mount for this step.
343
+ This parameter maps to the `--tmpfs` option in Docker. Defaults to 50% of the
344
+ memory allocated for this step.
345
+ tmpfs_path : str, optional, default /metaflow_temp
346
+ Path to tmpfs mount for this step.
347
+ persistent_volume_claims : Dict[str, str], optional, default None
348
+ A map (dictionary) of persistent volumes to be mounted to the pod for this step. The map is from persistent
349
+ volumes to the path to which the volume is to be mounted, e.g., `{'pvc-name': '/path/to/mount/on'}`.
350
+ shared_memory: int, optional
351
+ Shared memory size (in MiB) required for this step
352
+ port: int, optional
353
+ Port number to specify in the Kubernetes job object
354
+ compute_pool : str, optional, default None
355
+ Compute pool to be used for for this step.
356
+ If not specified, any accessible compute pool within the perimeter is used.
357
+ hostname_resolution_timeout: int, default 10 * 60
358
+ Timeout in seconds for the workers tasks in the gang scheduled cluster to resolve the hostname of control task.
359
+ Only applicable when @parallel is used.
360
+ qos: str, default: Burstable
361
+ Quality of Service class to assign to the pod. Supported values are: Guaranteed, Burstable, BestEffort
340
362
 
341
- temp_dir_root : str, default: None
342
- The root directory under which `current.checkpoint.directory` will be created.
363
+ security_context: Dict[str, Any], optional, default None
364
+ Container security context. Applies to the task container. Allows the following keys:
365
+ - privileged: bool, optional, default None
366
+ - allow_privilege_escalation: bool, optional, default None
367
+ - run_as_user: int, optional, default None
368
+ - run_as_group: int, optional, default None
369
+ - run_as_non_root: bool, optional, default None
343
370
  """
344
371
  ...
345
372
 
346
- @typing.overload
347
- def conda(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
373
+ def ollama(*, models: list, backend: str, force_pull: bool, cache_update_policy: str, force_cache_update: bool, debug: bool, circuit_breaker_config: dict, timeout_config: dict) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
348
374
  """
349
- Specifies the Conda environment for the step.
375
+ This decorator is used to run Ollama APIs as Metaflow task sidecars.
350
376
 
351
- Information in this decorator will augment any
352
- attributes set in the `@conda_base` flow-level decorator. Hence,
353
- you can use `@conda_base` to set packages required by all
354
- steps and use `@conda` to specify step-specific overrides.
377
+ User code call
378
+ --------------
379
+ @ollama(
380
+ models=[...],
381
+ ...
382
+ )
383
+
384
+ Valid backend options
385
+ ---------------------
386
+ - 'local': Run as a separate process on the local task machine.
387
+ - (TODO) 'managed': Outerbounds hosts and selects compute provider.
388
+ - (TODO) 'remote': Spin up separate instance to serve Ollama models.
389
+
390
+ Valid model options
391
+ -------------------
392
+ Any model here https://ollama.com/search, e.g. 'llama3.2', 'llama3.3'
355
393
 
356
394
 
357
395
  Parameters
358
396
  ----------
359
- packages : Dict[str, str], default {}
360
- Packages to use for this step. The key is the name of the package
361
- and the value is the version to use.
362
- libraries : Dict[str, str], default {}
363
- Supported for backward compatibility. When used with packages, packages will take precedence.
364
- python : str, optional, default None
365
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
366
- that the version used will correspond to the version of the Python interpreter used to start the run.
367
- disabled : bool, default False
368
- If set to True, disables @conda.
369
- """
370
- ...
371
-
372
- @typing.overload
373
- def conda(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
374
- ...
375
-
376
- @typing.overload
377
- def conda(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
397
+ models: list[str]
398
+ List of Ollama containers running models in sidecars.
399
+ backend: str
400
+ Determines where and how to run the Ollama process.
401
+ force_pull: bool
402
+ Whether to run `ollama pull` no matter what, or first check the remote cache in Metaflow datastore for this model key.
403
+ cache_update_policy: str
404
+ Cache update policy: "auto", "force", or "never".
405
+ force_cache_update: bool
406
+ Simple override for "force" cache update policy.
407
+ debug: bool
408
+ Whether to turn on verbose debugging logs.
409
+ circuit_breaker_config: dict
410
+ Configuration for circuit breaker protection. Keys: failure_threshold, recovery_timeout, reset_timeout.
411
+ timeout_config: dict
412
+ Configuration for various operation timeouts. Keys: pull, stop, health_check, install, server_startup.
413
+ """
378
414
  ...
379
415
 
380
- def conda(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
416
+ def nvidia(*, gpu: int, gpu_type: str, queue_timeout: int) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
381
417
  """
382
- Specifies the Conda environment for the step.
418
+ Specifies that this step should execute on DGX cloud.
383
419
 
384
- Information in this decorator will augment any
385
- attributes set in the `@conda_base` flow-level decorator. Hence,
386
- you can use `@conda_base` to set packages required by all
387
- steps and use `@conda` to specify step-specific overrides.
420
+
421
+ Parameters
422
+ ----------
423
+ gpu : int
424
+ Number of GPUs to use.
425
+ gpu_type : str
426
+ Type of Nvidia GPU to use.
427
+ queue_timeout : int
428
+ Time to keep the job in NVCF's queue.
429
+ """
430
+ ...
431
+
432
+ @typing.overload
433
+ def fast_bakery_internal(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
434
+ """
435
+ Internal decorator to support Fast bakery
436
+ """
437
+ ...
438
+
439
+ @typing.overload
440
+ def fast_bakery_internal(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
441
+ ...
442
+
443
+ def fast_bakery_internal(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
444
+ """
445
+ Internal decorator to support Fast bakery
446
+ """
447
+ ...
448
+
449
+ @typing.overload
450
+ def resources(*, cpu: int = 1, gpu: typing.Optional[int] = None, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
451
+ """
452
+ Specifies the resources needed when executing this step.
453
+
454
+ Use `@resources` to specify the resource requirements
455
+ independently of the specific compute layer (`@batch`, `@kubernetes`).
456
+
457
+ You can choose the compute layer on the command line by executing e.g.
458
+ ```
459
+ python myflow.py run --with batch
460
+ ```
461
+ or
462
+ ```
463
+ python myflow.py run --with kubernetes
464
+ ```
465
+ which executes the flow on the desired system using the
466
+ requirements specified in `@resources`.
388
467
 
389
468
 
390
469
  Parameters
391
470
  ----------
392
- packages : Dict[str, str], default {}
393
- Packages to use for this step. The key is the name of the package
394
- and the value is the version to use.
395
- libraries : Dict[str, str], default {}
396
- Supported for backward compatibility. When used with packages, packages will take precedence.
397
- python : str, optional, default None
398
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
399
- that the version used will correspond to the version of the Python interpreter used to start the run.
400
- disabled : bool, default False
401
- If set to True, disables @conda.
471
+ cpu : int, default 1
472
+ Number of CPUs required for this step.
473
+ gpu : int, optional, default None
474
+ Number of GPUs required for this step.
475
+ disk : int, optional, default None
476
+ Disk size (in MB) required for this step. Only applies on Kubernetes.
477
+ memory : int, default 4096
478
+ Memory size (in MB) required for this step.
479
+ shared_memory : int, optional, default None
480
+ The value for the size (in MiB) of the /dev/shm volume for this step.
481
+ This parameter maps to the `--shm-size` option in Docker.
482
+ """
483
+ ...
484
+
485
+ @typing.overload
486
+ def resources(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
487
+ ...
488
+
489
+ @typing.overload
490
+ def resources(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
491
+ ...
492
+
493
+ def resources(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, cpu: int = 1, gpu: typing.Optional[int] = None, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None):
494
+ """
495
+ Specifies the resources needed when executing this step.
496
+
497
+ Use `@resources` to specify the resource requirements
498
+ independently of the specific compute layer (`@batch`, `@kubernetes`).
499
+
500
+ You can choose the compute layer on the command line by executing e.g.
501
+ ```
502
+ python myflow.py run --with batch
503
+ ```
504
+ or
505
+ ```
506
+ python myflow.py run --with kubernetes
507
+ ```
508
+ which executes the flow on the desired system using the
509
+ requirements specified in `@resources`.
510
+
511
+
512
+ Parameters
513
+ ----------
514
+ cpu : int, default 1
515
+ Number of CPUs required for this step.
516
+ gpu : int, optional, default None
517
+ Number of GPUs required for this step.
518
+ disk : int, optional, default None
519
+ Disk size (in MB) required for this step. Only applies on Kubernetes.
520
+ memory : int, default 4096
521
+ Memory size (in MB) required for this step.
522
+ shared_memory : int, optional, default None
523
+ The value for the size (in MiB) of the /dev/shm volume for this step.
524
+ This parameter maps to the `--shm-size` option in Docker.
402
525
  """
403
526
  ...
404
527
 
@@ -504,67 +627,61 @@ def parallel(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None],
504
627
  ...
505
628
 
506
629
  @typing.overload
507
- def catch(*, var: typing.Optional[str] = None, print_exception: bool = True) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
630
+ def timeout(*, seconds: int = 0, minutes: int = 0, hours: int = 0) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
508
631
  """
509
- Specifies that the step will success under all circumstances.
632
+ Specifies a timeout for your step.
510
633
 
511
- The decorator will create an optional artifact, specified by `var`, which
512
- contains the exception raised. You can use it to detect the presence
513
- of errors, indicating that all happy-path artifacts produced by the step
514
- are missing.
634
+ This decorator is useful if this step may hang indefinitely.
635
+
636
+ This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
637
+ A timeout is considered to be an exception thrown by the step. It will cause the step to be
638
+ retried if needed and the exception will be caught by the `@catch` decorator, if present.
639
+
640
+ Note that all the values specified in parameters are added together so if you specify
641
+ 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
515
642
 
516
643
 
517
644
  Parameters
518
645
  ----------
519
- var : str, optional, default None
520
- Name of the artifact in which to store the caught exception.
521
- If not specified, the exception is not stored.
522
- print_exception : bool, default True
523
- Determines whether or not the exception is printed to
524
- stdout when caught.
646
+ seconds : int, default 0
647
+ Number of seconds to wait prior to timing out.
648
+ minutes : int, default 0
649
+ Number of minutes to wait prior to timing out.
650
+ hours : int, default 0
651
+ Number of hours to wait prior to timing out.
525
652
  """
526
653
  ...
527
654
 
528
655
  @typing.overload
529
- def catch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
656
+ def timeout(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
530
657
  ...
531
658
 
532
659
  @typing.overload
533
- def catch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
660
+ def timeout(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
534
661
  ...
535
662
 
536
- def catch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, var: typing.Optional[str] = None, print_exception: bool = True):
663
+ def timeout(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, seconds: int = 0, minutes: int = 0, hours: int = 0):
537
664
  """
538
- Specifies that the step will success under all circumstances.
665
+ Specifies a timeout for your step.
539
666
 
540
- The decorator will create an optional artifact, specified by `var`, which
541
- contains the exception raised. You can use it to detect the presence
542
- of errors, indicating that all happy-path artifacts produced by the step
543
- are missing.
667
+ This decorator is useful if this step may hang indefinitely.
544
668
 
669
+ This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
670
+ A timeout is considered to be an exception thrown by the step. It will cause the step to be
671
+ retried if needed and the exception will be caught by the `@catch` decorator, if present.
545
672
 
546
- Parameters
547
- ----------
548
- var : str, optional, default None
549
- Name of the artifact in which to store the caught exception.
550
- If not specified, the exception is not stored.
551
- print_exception : bool, default True
552
- Determines whether or not the exception is printed to
553
- stdout when caught.
554
- """
555
- ...
556
-
557
- def nvct(*, gpu: int, gpu_type: str) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
558
- """
559
- Specifies that this step should execute on DGX cloud.
673
+ Note that all the values specified in parameters are added together so if you specify
674
+ 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
560
675
 
561
676
 
562
677
  Parameters
563
678
  ----------
564
- gpu : int
565
- Number of GPUs to use.
566
- gpu_type : str
567
- Type of Nvidia GPU to use.
679
+ seconds : int, default 0
680
+ Number of seconds to wait prior to timing out.
681
+ minutes : int, default 0
682
+ Number of minutes to wait prior to timing out.
683
+ hours : int, default 0
684
+ Number of hours to wait prior to timing out.
568
685
  """
569
686
  ...
570
687
 
@@ -601,128 +718,113 @@ def environment(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], Non
601
718
  """
602
719
  ...
603
720
 
604
- def app_deploy(*, app_port: int, app_name: str) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
605
- """
606
- Specifies that this step is used to deploy an instance of the app.
607
- Requires that self.app_name, self.app_port, self.entrypoint and self.deployDir is set.
608
-
609
-
610
- Parameters
611
- ----------
612
- app_port : int
613
- Number of GPUs to use.
614
- app_name : str
615
- Name of the app to deploy.
616
- """
617
- ...
618
-
619
721
  @typing.overload
620
- def pypi(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
722
+ def conda(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
621
723
  """
622
- Specifies the PyPI packages for the step.
724
+ Specifies the Conda environment for the step.
623
725
 
624
726
  Information in this decorator will augment any
625
- attributes set in the `@pyi_base` flow-level decorator. Hence,
626
- you can use `@pypi_base` to set packages required by all
627
- steps and use `@pypi` to specify step-specific overrides.
727
+ attributes set in the `@conda_base` flow-level decorator. Hence,
728
+ you can use `@conda_base` to set packages required by all
729
+ steps and use `@conda` to specify step-specific overrides.
628
730
 
629
731
 
630
732
  Parameters
631
733
  ----------
632
- packages : Dict[str, str], default: {}
734
+ packages : Dict[str, str], default {}
633
735
  Packages to use for this step. The key is the name of the package
634
736
  and the value is the version to use.
635
- python : str, optional, default: None
737
+ libraries : Dict[str, str], default {}
738
+ Supported for backward compatibility. When used with packages, packages will take precedence.
739
+ python : str, optional, default None
636
740
  Version of Python to use, e.g. '3.7.4'. A default value of None implies
637
741
  that the version used will correspond to the version of the Python interpreter used to start the run.
742
+ disabled : bool, default False
743
+ If set to True, disables @conda.
638
744
  """
639
745
  ...
640
746
 
641
747
  @typing.overload
642
- def pypi(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
748
+ def conda(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
643
749
  ...
644
750
 
645
751
  @typing.overload
646
- def pypi(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
752
+ def conda(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
647
753
  ...
648
754
 
649
- def pypi(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
755
+ def conda(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
650
756
  """
651
- Specifies the PyPI packages for the step.
757
+ Specifies the Conda environment for the step.
652
758
 
653
759
  Information in this decorator will augment any
654
- attributes set in the `@pyi_base` flow-level decorator. Hence,
655
- you can use `@pypi_base` to set packages required by all
656
- steps and use `@pypi` to specify step-specific overrides.
760
+ attributes set in the `@conda_base` flow-level decorator. Hence,
761
+ you can use `@conda_base` to set packages required by all
762
+ steps and use `@conda` to specify step-specific overrides.
657
763
 
658
764
 
659
765
  Parameters
660
766
  ----------
661
- packages : Dict[str, str], default: {}
767
+ packages : Dict[str, str], default {}
662
768
  Packages to use for this step. The key is the name of the package
663
769
  and the value is the version to use.
664
- python : str, optional, default: None
770
+ libraries : Dict[str, str], default {}
771
+ Supported for backward compatibility. When used with packages, packages will take precedence.
772
+ python : str, optional, default None
665
773
  Version of Python to use, e.g. '3.7.4'. A default value of None implies
666
774
  that the version used will correspond to the version of the Python interpreter used to start the run.
775
+ disabled : bool, default False
776
+ If set to True, disables @conda.
667
777
  """
668
778
  ...
669
779
 
670
- def ollama(*, models: list, backend: str, force_pull: bool, cache_update_policy: str, force_cache_update: bool, debug: bool, circuit_breaker_config: dict, timeout_config: dict) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
780
+ @typing.overload
781
+ def catch(*, var: typing.Optional[str] = None, print_exception: bool = True) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
671
782
  """
672
- This decorator is used to run Ollama APIs as Metaflow task sidecars.
673
-
674
- User code call
675
- --------------
676
- @ollama(
677
- models=[...],
678
- ...
679
- )
680
-
681
- Valid backend options
682
- ---------------------
683
- - 'local': Run as a separate process on the local task machine.
684
- - (TODO) 'managed': Outerbounds hosts and selects compute provider.
685
- - (TODO) 'remote': Spin up separate instance to serve Ollama models.
783
+ Specifies that the step will success under all circumstances.
686
784
 
687
- Valid model options
688
- -------------------
689
- Any model here https://ollama.com/search, e.g. 'llama3.2', 'llama3.3'
785
+ The decorator will create an optional artifact, specified by `var`, which
786
+ contains the exception raised. You can use it to detect the presence
787
+ of errors, indicating that all happy-path artifacts produced by the step
788
+ are missing.
690
789
 
691
790
 
692
791
  Parameters
693
792
  ----------
694
- models: list[str]
695
- List of Ollama containers running models in sidecars.
696
- backend: str
697
- Determines where and how to run the Ollama process.
698
- force_pull: bool
699
- Whether to run `ollama pull` no matter what, or first check the remote cache in Metaflow datastore for this model key.
700
- cache_update_policy: str
701
- Cache update policy: "auto", "force", or "never".
702
- force_cache_update: bool
703
- Simple override for "force" cache update policy.
704
- debug: bool
705
- Whether to turn on verbose debugging logs.
706
- circuit_breaker_config: dict
707
- Configuration for circuit breaker protection. Keys: failure_threshold, recovery_timeout, reset_timeout.
708
- timeout_config: dict
709
- Configuration for various operation timeouts. Keys: pull, stop, health_check, install, server_startup.
793
+ var : str, optional, default None
794
+ Name of the artifact in which to store the caught exception.
795
+ If not specified, the exception is not stored.
796
+ print_exception : bool, default True
797
+ Determines whether or not the exception is printed to
798
+ stdout when caught.
710
799
  """
711
800
  ...
712
801
 
713
- def nvidia(*, gpu: int, gpu_type: str, queue_timeout: int) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
802
+ @typing.overload
803
+ def catch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
804
+ ...
805
+
806
+ @typing.overload
807
+ def catch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
808
+ ...
809
+
810
+ def catch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, var: typing.Optional[str] = None, print_exception: bool = True):
714
811
  """
715
- Specifies that this step should execute on DGX cloud.
812
+ Specifies that the step will success under all circumstances.
813
+
814
+ The decorator will create an optional artifact, specified by `var`, which
815
+ contains the exception raised. You can use it to detect the presence
816
+ of errors, indicating that all happy-path artifacts produced by the step
817
+ are missing.
716
818
 
717
819
 
718
820
  Parameters
719
821
  ----------
720
- gpu : int
721
- Number of GPUs to use.
722
- gpu_type : str
723
- Type of Nvidia GPU to use.
724
- queue_timeout : int
725
- Time to keep the job in NVCF's queue.
822
+ var : str, optional, default None
823
+ Name of the artifact in which to store the caught exception.
824
+ If not specified, the exception is not stored.
825
+ print_exception : bool, default True
826
+ Determines whether or not the exception is printed to
827
+ stdout when caught.
726
828
  """
727
829
  ...
728
830
 
@@ -761,6 +863,72 @@ def secrets(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None],
761
863
  """
762
864
  ...
763
865
 
866
+ @typing.overload
867
+ def pypi(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
868
+ """
869
+ Specifies the PyPI packages for the step.
870
+
871
+ Information in this decorator will augment any
872
+ attributes set in the `@pyi_base` flow-level decorator. Hence,
873
+ you can use `@pypi_base` to set packages required by all
874
+ steps and use `@pypi` to specify step-specific overrides.
875
+
876
+
877
+ Parameters
878
+ ----------
879
+ packages : Dict[str, str], default: {}
880
+ Packages to use for this step. The key is the name of the package
881
+ and the value is the version to use.
882
+ python : str, optional, default: None
883
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
884
+ that the version used will correspond to the version of the Python interpreter used to start the run.
885
+ """
886
+ ...
887
+
888
+ @typing.overload
889
+ def pypi(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
890
+ ...
891
+
892
+ @typing.overload
893
+ def pypi(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
894
+ ...
895
+
896
+ def pypi(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
897
+ """
898
+ Specifies the PyPI packages for the step.
899
+
900
+ Information in this decorator will augment any
901
+ attributes set in the `@pyi_base` flow-level decorator. Hence,
902
+ you can use `@pypi_base` to set packages required by all
903
+ steps and use `@pypi` to specify step-specific overrides.
904
+
905
+
906
+ Parameters
907
+ ----------
908
+ packages : Dict[str, str], default: {}
909
+ Packages to use for this step. The key is the name of the package
910
+ and the value is the version to use.
911
+ python : str, optional, default: None
912
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
913
+ that the version used will correspond to the version of the Python interpreter used to start the run.
914
+ """
915
+ ...
916
+
917
+ def app_deploy(*, app_port: int, app_name: str) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
918
+ """
919
+ Specifies that this step is used to deploy an instance of the app.
920
+ Requires that self.app_name, self.app_port, self.entrypoint and self.deployDir is set.
921
+
922
+
923
+ Parameters
924
+ ----------
925
+ app_port : int
926
+ Number of GPUs to use.
927
+ app_name : str
928
+ Name of the app to deploy.
929
+ """
930
+ ...
931
+
764
932
  @typing.overload
765
933
  def card(*, type: str = 'default', id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
766
934
  """
@@ -810,255 +978,96 @@ def card(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typ
810
978
  """
811
979
  ...
812
980
 
813
- def kubernetes(*, cpu: int = 1, memory: int = 4096, disk: int = 10240, image: typing.Optional[str] = None, image_pull_policy: str = 'KUBERNETES_IMAGE_PULL_POLICY', image_pull_secrets: typing.List[str] = [], service_account: str = 'METAFLOW_KUBERNETES_SERVICE_ACCOUNT', secrets: typing.Optional[typing.List[str]] = None, node_selector: typing.Union[typing.Dict[str, str], str, None] = None, namespace: str = 'METAFLOW_KUBERNETES_NAMESPACE', gpu: typing.Optional[int] = None, gpu_vendor: str = 'KUBERNETES_GPU_VENDOR', tolerations: typing.List[str] = [], labels: typing.Dict[str, str] = 'METAFLOW_KUBERNETES_LABELS', annotations: typing.Dict[str, str] = 'METAFLOW_KUBERNETES_ANNOTATIONS', use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = '/metaflow_temp', persistent_volume_claims: typing.Optional[typing.Dict[str, str]] = None, shared_memory: typing.Optional[int] = None, port: typing.Optional[int] = None, compute_pool: typing.Optional[str] = None, hostname_resolution_timeout: int = 600, qos: str = 'Burstable', security_context: typing.Optional[typing.Dict[str, typing.Any]] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
814
- """
815
- Specifies that this step should execute on Kubernetes.
816
-
817
-
818
- Parameters
819
- ----------
820
- cpu : int, default 1
821
- Number of CPUs required for this step. If `@resources` is
822
- also present, the maximum value from all decorators is used.
823
- memory : int, default 4096
824
- Memory size (in MB) required for this step. If
825
- `@resources` is also present, the maximum value from all decorators is
826
- used.
827
- disk : int, default 10240
828
- Disk size (in MB) required for this step. If
829
- `@resources` is also present, the maximum value from all decorators is
830
- used.
831
- image : str, optional, default None
832
- Docker image to use when launching on Kubernetes. If not specified, and
833
- METAFLOW_KUBERNETES_CONTAINER_IMAGE is specified, that image is used. If
834
- not, a default Docker image mapping to the current version of Python is used.
835
- image_pull_policy: str, default KUBERNETES_IMAGE_PULL_POLICY
836
- If given, the imagePullPolicy to be applied to the Docker image of the step.
837
- image_pull_secrets: List[str], default []
838
- The default is extracted from METAFLOW_KUBERNETES_IMAGE_PULL_SECRETS.
839
- Kubernetes image pull secrets to use when pulling container images
840
- in Kubernetes.
841
- service_account : str, default METAFLOW_KUBERNETES_SERVICE_ACCOUNT
842
- Kubernetes service account to use when launching pod in Kubernetes.
843
- secrets : List[str], optional, default None
844
- Kubernetes secrets to use when launching pod in Kubernetes. These
845
- secrets are in addition to the ones defined in `METAFLOW_KUBERNETES_SECRETS`
846
- in Metaflow configuration.
847
- node_selector: Union[Dict[str,str], str], optional, default None
848
- Kubernetes node selector(s) to apply to the pod running the task.
849
- Can be passed in as a comma separated string of values e.g.
850
- 'kubernetes.io/os=linux,kubernetes.io/arch=amd64' or as a dictionary
851
- {'kubernetes.io/os': 'linux', 'kubernetes.io/arch': 'amd64'}
852
- namespace : str, default METAFLOW_KUBERNETES_NAMESPACE
853
- Kubernetes namespace to use when launching pod in Kubernetes.
854
- gpu : int, optional, default None
855
- Number of GPUs required for this step. A value of zero implies that
856
- the scheduled node should not have GPUs.
857
- gpu_vendor : str, default KUBERNETES_GPU_VENDOR
858
- The vendor of the GPUs to be used for this step.
859
- tolerations : List[str], default []
860
- The default is extracted from METAFLOW_KUBERNETES_TOLERATIONS.
861
- Kubernetes tolerations to use when launching pod in Kubernetes.
862
- labels: Dict[str, str], default: METAFLOW_KUBERNETES_LABELS
863
- Kubernetes labels to use when launching pod in Kubernetes.
864
- annotations: Dict[str, str], default: METAFLOW_KUBERNETES_ANNOTATIONS
865
- Kubernetes annotations to use when launching pod in Kubernetes.
866
- use_tmpfs : bool, default False
867
- This enables an explicit tmpfs mount for this step.
868
- tmpfs_tempdir : bool, default True
869
- sets METAFLOW_TEMPDIR to tmpfs_path if set for this step.
870
- tmpfs_size : int, optional, default: None
871
- The value for the size (in MiB) of the tmpfs mount for this step.
872
- This parameter maps to the `--tmpfs` option in Docker. Defaults to 50% of the
873
- memory allocated for this step.
874
- tmpfs_path : str, optional, default /metaflow_temp
875
- Path to tmpfs mount for this step.
876
- persistent_volume_claims : Dict[str, str], optional, default None
877
- A map (dictionary) of persistent volumes to be mounted to the pod for this step. The map is from persistent
878
- volumes to the path to which the volume is to be mounted, e.g., `{'pvc-name': '/path/to/mount/on'}`.
879
- shared_memory: int, optional
880
- Shared memory size (in MiB) required for this step
881
- port: int, optional
882
- Port number to specify in the Kubernetes job object
883
- compute_pool : str, optional, default None
884
- Compute pool to be used for for this step.
885
- If not specified, any accessible compute pool within the perimeter is used.
886
- hostname_resolution_timeout: int, default 10 * 60
887
- Timeout in seconds for the workers tasks in the gang scheduled cluster to resolve the hostname of control task.
888
- Only applicable when @parallel is used.
889
- qos: str, default: Burstable
890
- Quality of Service class to assign to the pod. Supported values are: Guaranteed, Burstable, BestEffort
891
-
892
- security_context: Dict[str, Any], optional, default None
893
- Container security context. Applies to the task container. Allows the following keys:
894
- - privileged: bool, optional, default None
895
- - allow_privilege_escalation: bool, optional, default None
896
- - run_as_user: int, optional, default None
897
- - run_as_group: int, optional, default None
898
- - run_as_non_root: bool, optional, default None
899
- """
900
- ...
901
-
902
981
  @typing.overload
903
- def resources(*, cpu: int = 1, gpu: typing.Optional[int] = None, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
982
+ def trigger(*, event: typing.Union[str, typing.Dict[str, typing.Any], None] = None, events: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
904
983
  """
905
- Specifies the resources needed when executing this step.
906
-
907
- Use `@resources` to specify the resource requirements
908
- independently of the specific compute layer (`@batch`, `@kubernetes`).
984
+ Specifies the event(s) that this flow depends on.
909
985
 
910
- You can choose the compute layer on the command line by executing e.g.
911
986
  ```
912
- python myflow.py run --with batch
987
+ @trigger(event='foo')
913
988
  ```
914
989
  or
915
990
  ```
916
- python myflow.py run --with kubernetes
991
+ @trigger(events=['foo', 'bar'])
917
992
  ```
918
- which executes the flow on the desired system using the
919
- requirements specified in `@resources`.
920
-
921
993
 
922
- Parameters
923
- ----------
924
- cpu : int, default 1
925
- Number of CPUs required for this step.
926
- gpu : int, optional, default None
927
- Number of GPUs required for this step.
928
- disk : int, optional, default None
929
- Disk size (in MB) required for this step. Only applies on Kubernetes.
930
- memory : int, default 4096
931
- Memory size (in MB) required for this step.
932
- shared_memory : int, optional, default None
933
- The value for the size (in MiB) of the /dev/shm volume for this step.
934
- This parameter maps to the `--shm-size` option in Docker.
935
- """
936
- ...
937
-
938
- @typing.overload
939
- def resources(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
940
- ...
941
-
942
- @typing.overload
943
- def resources(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
944
- ...
945
-
946
- def resources(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, cpu: int = 1, gpu: typing.Optional[int] = None, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None):
947
- """
948
- Specifies the resources needed when executing this step.
949
-
950
- Use `@resources` to specify the resource requirements
951
- independently of the specific compute layer (`@batch`, `@kubernetes`).
952
-
953
- You can choose the compute layer on the command line by executing e.g.
994
+ Additionally, you can specify the parameter mappings
995
+ to map event payload to Metaflow parameters for the flow.
954
996
  ```
955
- python myflow.py run --with batch
997
+ @trigger(event={'name':'foo', 'parameters':{'flow_param': 'event_field'}})
956
998
  ```
957
999
  or
958
1000
  ```
959
- python myflow.py run --with kubernetes
1001
+ @trigger(events=[{'name':'foo', 'parameters':{'flow_param_1': 'event_field_1'},
1002
+ {'name':'bar', 'parameters':{'flow_param_2': 'event_field_2'}])
960
1003
  ```
961
- which executes the flow on the desired system using the
962
- requirements specified in `@resources`.
963
-
964
-
965
- Parameters
966
- ----------
967
- cpu : int, default 1
968
- Number of CPUs required for this step.
969
- gpu : int, optional, default None
970
- Number of GPUs required for this step.
971
- disk : int, optional, default None
972
- Disk size (in MB) required for this step. Only applies on Kubernetes.
973
- memory : int, default 4096
974
- Memory size (in MB) required for this step.
975
- shared_memory : int, optional, default None
976
- The value for the size (in MiB) of the /dev/shm volume for this step.
977
- This parameter maps to the `--shm-size` option in Docker.
978
- """
979
- ...
980
-
981
- def airflow_external_task_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, external_dag_id: str, external_task_ids: typing.List[str], allowed_states: typing.List[str], failed_states: typing.List[str], execution_delta: "datetime.timedelta", check_existence: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
982
- """
983
- The `@airflow_external_task_sensor` decorator attaches a Airflow [ExternalTaskSensor](https://airflow.apache.org/docs/apache-airflow/stable/_api/airflow/sensors/external_task/index.html#airflow.sensors.external_task.ExternalTaskSensor) before the start step of the flow.
984
- This decorator only works when a flow is scheduled on Airflow and is compiled using `airflow create`. More than one `@airflow_external_task_sensor` can be added as a flow decorators. Adding more than one decorator will ensure that `start` step starts only after all sensors finish.
985
-
986
1004
 
987
- Parameters
988
- ----------
989
- timeout : int
990
- Time, in seconds before the task times out and fails. (Default: 3600)
991
- poke_interval : int
992
- Time in seconds that the job should wait in between each try. (Default: 60)
993
- mode : str
994
- How the sensor operates. Options are: { poke | reschedule }. (Default: "poke")
995
- exponential_backoff : bool
996
- allow progressive longer waits between pokes by using exponential backoff algorithm. (Default: True)
997
- pool : str
998
- the slot pool this task should run in,
999
- slot pools are a way to limit concurrency for certain tasks. (Default:None)
1000
- soft_fail : bool
1001
- Set to true to mark the task as SKIPPED on failure. (Default: False)
1002
- name : str
1003
- Name of the sensor on Airflow
1004
- description : str
1005
- Description of sensor in the Airflow UI
1006
- external_dag_id : str
1007
- The dag_id that contains the task you want to wait for.
1008
- external_task_ids : List[str]
1009
- The list of task_ids that you want to wait for.
1010
- If None (default value) the sensor waits for the DAG. (Default: None)
1011
- allowed_states : List[str]
1012
- Iterable of allowed states, (Default: ['success'])
1013
- failed_states : List[str]
1014
- Iterable of failed or dis-allowed states. (Default: None)
1015
- execution_delta : datetime.timedelta
1016
- time difference with the previous execution to look at,
1017
- the default is the same logical date as the current task or DAG. (Default: None)
1018
- check_existence: bool
1019
- Set to True to check if the external task exists or check if
1020
- the DAG to wait for exists. (Default: True)
1021
- """
1022
- ...
1023
-
1024
- @typing.overload
1025
- def pypi_base(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1026
- """
1027
- Specifies the PyPI packages for all steps of the flow.
1005
+ 'parameters' can also be a list of strings and tuples like so:
1006
+ ```
1007
+ @trigger(event={'name':'foo', 'parameters':['common_name', ('flow_param', 'event_field')]})
1008
+ ```
1009
+ This is equivalent to:
1010
+ ```
1011
+ @trigger(event={'name':'foo', 'parameters':{'common_name': 'common_name', 'flow_param': 'event_field'}})
1012
+ ```
1028
1013
 
1029
- Use `@pypi_base` to set common packages required by all
1030
- steps and use `@pypi` to specify step-specific overrides.
1031
1014
 
1032
1015
  Parameters
1033
1016
  ----------
1034
- packages : Dict[str, str], default: {}
1035
- Packages to use for this flow. The key is the name of the package
1036
- and the value is the version to use.
1037
- python : str, optional, default: None
1038
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
1039
- that the version used will correspond to the version of the Python interpreter used to start the run.
1017
+ event : Union[str, Dict[str, Any]], optional, default None
1018
+ Event dependency for this flow.
1019
+ events : List[Union[str, Dict[str, Any]]], default []
1020
+ Events dependency for this flow.
1021
+ options : Dict[str, Any], default {}
1022
+ Backend-specific configuration for tuning eventing behavior.
1040
1023
  """
1041
1024
  ...
1042
1025
 
1043
1026
  @typing.overload
1044
- def pypi_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1027
+ def trigger(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1045
1028
  ...
1046
1029
 
1047
- def pypi_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
1030
+ def trigger(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, event: typing.Union[str, typing.Dict[str, typing.Any], None] = None, events: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], options: typing.Dict[str, typing.Any] = {}):
1048
1031
  """
1049
- Specifies the PyPI packages for all steps of the flow.
1032
+ Specifies the event(s) that this flow depends on.
1033
+
1034
+ ```
1035
+ @trigger(event='foo')
1036
+ ```
1037
+ or
1038
+ ```
1039
+ @trigger(events=['foo', 'bar'])
1040
+ ```
1041
+
1042
+ Additionally, you can specify the parameter mappings
1043
+ to map event payload to Metaflow parameters for the flow.
1044
+ ```
1045
+ @trigger(event={'name':'foo', 'parameters':{'flow_param': 'event_field'}})
1046
+ ```
1047
+ or
1048
+ ```
1049
+ @trigger(events=[{'name':'foo', 'parameters':{'flow_param_1': 'event_field_1'},
1050
+ {'name':'bar', 'parameters':{'flow_param_2': 'event_field_2'}])
1051
+ ```
1052
+
1053
+ 'parameters' can also be a list of strings and tuples like so:
1054
+ ```
1055
+ @trigger(event={'name':'foo', 'parameters':['common_name', ('flow_param', 'event_field')]})
1056
+ ```
1057
+ This is equivalent to:
1058
+ ```
1059
+ @trigger(event={'name':'foo', 'parameters':{'common_name': 'common_name', 'flow_param': 'event_field'}})
1060
+ ```
1050
1061
 
1051
- Use `@pypi_base` to set common packages required by all
1052
- steps and use `@pypi` to specify step-specific overrides.
1053
1062
 
1054
1063
  Parameters
1055
1064
  ----------
1056
- packages : Dict[str, str], default: {}
1057
- Packages to use for this flow. The key is the name of the package
1058
- and the value is the version to use.
1059
- python : str, optional, default: None
1060
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
1061
- that the version used will correspond to the version of the Python interpreter used to start the run.
1065
+ event : Union[str, Dict[str, Any]], optional, default None
1066
+ Event dependency for this flow.
1067
+ events : List[Union[str, Dict[str, Any]]], default []
1068
+ Events dependency for this flow.
1069
+ options : Dict[str, Any], default {}
1070
+ Backend-specific configuration for tuning eventing behavior.
1062
1071
  """
1063
1072
  ...
1064
1073
 
@@ -1114,96 +1123,155 @@ def conda_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packa
1114
1123
  ...
1115
1124
 
1116
1125
  @typing.overload
1117
- def trigger(*, event: typing.Union[str, typing.Dict[str, typing.Any], None] = None, events: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1126
+ def trigger_on_finish(*, flow: typing.Union[typing.Dict[str, str], str, None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1118
1127
  """
1119
- Specifies the event(s) that this flow depends on.
1128
+ Specifies the flow(s) that this flow depends on.
1120
1129
 
1121
1130
  ```
1122
- @trigger(event='foo')
1131
+ @trigger_on_finish(flow='FooFlow')
1123
1132
  ```
1124
1133
  or
1125
1134
  ```
1126
- @trigger(events=['foo', 'bar'])
1135
+ @trigger_on_finish(flows=['FooFlow', 'BarFlow'])
1127
1136
  ```
1137
+ This decorator respects the @project decorator and triggers the flow
1138
+ when upstream runs within the same namespace complete successfully
1128
1139
 
1129
- Additionally, you can specify the parameter mappings
1130
- to map event payload to Metaflow parameters for the flow.
1140
+ Additionally, you can specify project aware upstream flow dependencies
1141
+ by specifying the fully qualified project_flow_name.
1131
1142
  ```
1132
- @trigger(event={'name':'foo', 'parameters':{'flow_param': 'event_field'}})
1143
+ @trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
1133
1144
  ```
1134
1145
  or
1135
1146
  ```
1136
- @trigger(events=[{'name':'foo', 'parameters':{'flow_param_1': 'event_field_1'},
1137
- {'name':'bar', 'parameters':{'flow_param_2': 'event_field_2'}])
1147
+ @trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
1138
1148
  ```
1139
1149
 
1140
- 'parameters' can also be a list of strings and tuples like so:
1141
- ```
1142
- @trigger(event={'name':'foo', 'parameters':['common_name', ('flow_param', 'event_field')]})
1143
- ```
1144
- This is equivalent to:
1150
+ You can also specify just the project or project branch (other values will be
1151
+ inferred from the current project or project branch):
1145
1152
  ```
1146
- @trigger(event={'name':'foo', 'parameters':{'common_name': 'common_name', 'flow_param': 'event_field'}})
1153
+ @trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
1147
1154
  ```
1148
1155
 
1156
+ Note that `branch` is typically one of:
1157
+ - `prod`
1158
+ - `user.bob`
1159
+ - `test.my_experiment`
1160
+ - `prod.staging`
1161
+
1149
1162
 
1150
1163
  Parameters
1151
1164
  ----------
1152
- event : Union[str, Dict[str, Any]], optional, default None
1153
- Event dependency for this flow.
1154
- events : List[Union[str, Dict[str, Any]]], default []
1155
- Events dependency for this flow.
1165
+ flow : Union[str, Dict[str, str]], optional, default None
1166
+ Upstream flow dependency for this flow.
1167
+ flows : List[Union[str, Dict[str, str]]], default []
1168
+ Upstream flow dependencies for this flow.
1156
1169
  options : Dict[str, Any], default {}
1157
1170
  Backend-specific configuration for tuning eventing behavior.
1158
1171
  """
1159
1172
  ...
1160
1173
 
1161
1174
  @typing.overload
1162
- def trigger(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1175
+ def trigger_on_finish(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1163
1176
  ...
1164
1177
 
1165
- def trigger(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, event: typing.Union[str, typing.Dict[str, typing.Any], None] = None, events: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], options: typing.Dict[str, typing.Any] = {}):
1178
+ def trigger_on_finish(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, flow: typing.Union[typing.Dict[str, str], str, None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}):
1166
1179
  """
1167
- Specifies the event(s) that this flow depends on.
1180
+ Specifies the flow(s) that this flow depends on.
1168
1181
 
1169
1182
  ```
1170
- @trigger(event='foo')
1183
+ @trigger_on_finish(flow='FooFlow')
1171
1184
  ```
1172
1185
  or
1173
1186
  ```
1174
- @trigger(events=['foo', 'bar'])
1187
+ @trigger_on_finish(flows=['FooFlow', 'BarFlow'])
1175
1188
  ```
1189
+ This decorator respects the @project decorator and triggers the flow
1190
+ when upstream runs within the same namespace complete successfully
1176
1191
 
1177
- Additionally, you can specify the parameter mappings
1178
- to map event payload to Metaflow parameters for the flow.
1192
+ Additionally, you can specify project aware upstream flow dependencies
1193
+ by specifying the fully qualified project_flow_name.
1179
1194
  ```
1180
- @trigger(event={'name':'foo', 'parameters':{'flow_param': 'event_field'}})
1195
+ @trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
1181
1196
  ```
1182
1197
  or
1183
1198
  ```
1184
- @trigger(events=[{'name':'foo', 'parameters':{'flow_param_1': 'event_field_1'},
1185
- {'name':'bar', 'parameters':{'flow_param_2': 'event_field_2'}])
1199
+ @trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
1186
1200
  ```
1187
1201
 
1188
- 'parameters' can also be a list of strings and tuples like so:
1189
- ```
1190
- @trigger(event={'name':'foo', 'parameters':['common_name', ('flow_param', 'event_field')]})
1191
- ```
1192
- This is equivalent to:
1202
+ You can also specify just the project or project branch (other values will be
1203
+ inferred from the current project or project branch):
1193
1204
  ```
1194
- @trigger(event={'name':'foo', 'parameters':{'common_name': 'common_name', 'flow_param': 'event_field'}})
1205
+ @trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
1195
1206
  ```
1196
1207
 
1208
+ Note that `branch` is typically one of:
1209
+ - `prod`
1210
+ - `user.bob`
1211
+ - `test.my_experiment`
1212
+ - `prod.staging`
1213
+
1197
1214
 
1198
1215
  Parameters
1199
1216
  ----------
1200
- event : Union[str, Dict[str, Any]], optional, default None
1201
- Event dependency for this flow.
1202
- events : List[Union[str, Dict[str, Any]]], default []
1203
- Events dependency for this flow.
1204
- options : Dict[str, Any], default {}
1205
- Backend-specific configuration for tuning eventing behavior.
1206
- """
1217
+ flow : Union[str, Dict[str, str]], optional, default None
1218
+ Upstream flow dependency for this flow.
1219
+ flows : List[Union[str, Dict[str, str]]], default []
1220
+ Upstream flow dependencies for this flow.
1221
+ options : Dict[str, Any], default {}
1222
+ Backend-specific configuration for tuning eventing behavior.
1223
+ """
1224
+ ...
1225
+
1226
+ @typing.overload
1227
+ def schedule(*, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1228
+ """
1229
+ Specifies the times when the flow should be run when running on a
1230
+ production scheduler.
1231
+
1232
+
1233
+ Parameters
1234
+ ----------
1235
+ hourly : bool, default False
1236
+ Run the workflow hourly.
1237
+ daily : bool, default True
1238
+ Run the workflow daily.
1239
+ weekly : bool, default False
1240
+ Run the workflow weekly.
1241
+ cron : str, optional, default None
1242
+ Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
1243
+ specified by this expression.
1244
+ timezone : str, optional, default None
1245
+ Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1246
+ which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
1247
+ """
1248
+ ...
1249
+
1250
+ @typing.overload
1251
+ def schedule(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1252
+ ...
1253
+
1254
+ def schedule(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None):
1255
+ """
1256
+ Specifies the times when the flow should be run when running on a
1257
+ production scheduler.
1258
+
1259
+
1260
+ Parameters
1261
+ ----------
1262
+ hourly : bool, default False
1263
+ Run the workflow hourly.
1264
+ daily : bool, default True
1265
+ Run the workflow daily.
1266
+ weekly : bool, default False
1267
+ Run the workflow weekly.
1268
+ cron : str, optional, default None
1269
+ Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
1270
+ specified by this expression.
1271
+ timezone : str, optional, default None
1272
+ Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1273
+ which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
1274
+ """
1207
1275
  ...
1208
1276
 
1209
1277
  def with_artifact_store(f: typing.Optional[typing.Type[FlowSpecDerived]] = None):
@@ -1320,46 +1388,44 @@ def with_artifact_store(f: typing.Optional[typing.Type[FlowSpecDerived]] = None)
1320
1388
  """
1321
1389
  ...
1322
1390
 
1323
- def airflow_s3_key_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, bucket_key: typing.Union[str, typing.List[str]], bucket_name: str, wildcard_match: bool, aws_conn_id: str, verify: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1391
+ @typing.overload
1392
+ def pypi_base(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1324
1393
  """
1325
- The `@airflow_s3_key_sensor` decorator attaches a Airflow [S3KeySensor](https://airflow.apache.org/docs/apache-airflow-providers-amazon/stable/_api/airflow/providers/amazon/aws/sensors/s3/index.html#airflow.providers.amazon.aws.sensors.s3.S3KeySensor)
1326
- before the start step of the flow. This decorator only works when a flow is scheduled on Airflow
1327
- and is compiled using `airflow create`. More than one `@airflow_s3_key_sensor` can be
1328
- added as a flow decorators. Adding more than one decorator will ensure that `start` step
1329
- starts only after all sensors finish.
1394
+ Specifies the PyPI packages for all steps of the flow.
1330
1395
 
1396
+ Use `@pypi_base` to set common packages required by all
1397
+ steps and use `@pypi` to specify step-specific overrides.
1331
1398
 
1332
1399
  Parameters
1333
1400
  ----------
1334
- timeout : int
1335
- Time, in seconds before the task times out and fails. (Default: 3600)
1336
- poke_interval : int
1337
- Time in seconds that the job should wait in between each try. (Default: 60)
1338
- mode : str
1339
- How the sensor operates. Options are: { poke | reschedule }. (Default: "poke")
1340
- exponential_backoff : bool
1341
- allow progressive longer waits between pokes by using exponential backoff algorithm. (Default: True)
1342
- pool : str
1343
- the slot pool this task should run in,
1344
- slot pools are a way to limit concurrency for certain tasks. (Default:None)
1345
- soft_fail : bool
1346
- Set to true to mark the task as SKIPPED on failure. (Default: False)
1347
- name : str
1348
- Name of the sensor on Airflow
1349
- description : str
1350
- Description of sensor in the Airflow UI
1351
- bucket_key : Union[str, List[str]]
1352
- The key(s) being waited on. Supports full s3:// style url or relative path from root level.
1353
- When it's specified as a full s3:// url, please leave `bucket_name` as None
1354
- bucket_name : str
1355
- Name of the S3 bucket. Only needed when bucket_key is not provided as a full s3:// url.
1356
- When specified, all the keys passed to bucket_key refers to this bucket. (Default:None)
1357
- wildcard_match : bool
1358
- whether the bucket_key should be interpreted as a Unix wildcard pattern. (Default: False)
1359
- aws_conn_id : str
1360
- a reference to the s3 connection on Airflow. (Default: None)
1361
- verify : bool
1362
- Whether or not to verify SSL certificates for S3 connection. (Default: None)
1401
+ packages : Dict[str, str], default: {}
1402
+ Packages to use for this flow. The key is the name of the package
1403
+ and the value is the version to use.
1404
+ python : str, optional, default: None
1405
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1406
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1407
+ """
1408
+ ...
1409
+
1410
+ @typing.overload
1411
+ def pypi_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1412
+ ...
1413
+
1414
+ def pypi_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
1415
+ """
1416
+ Specifies the PyPI packages for all steps of the flow.
1417
+
1418
+ Use `@pypi_base` to set common packages required by all
1419
+ steps and use `@pypi` to specify step-specific overrides.
1420
+
1421
+ Parameters
1422
+ ----------
1423
+ packages : Dict[str, str], default: {}
1424
+ Packages to use for this flow. The key is the name of the package
1425
+ and the value is the version to use.
1426
+ python : str, optional, default: None
1427
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1428
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1363
1429
  """
1364
1430
  ...
1365
1431
 
@@ -1398,155 +1464,89 @@ def project(*, name: str, branch: typing.Optional[str] = None, production: bool
1398
1464
  """
1399
1465
  ...
1400
1466
 
1401
- @typing.overload
1402
- def schedule(*, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1403
- """
1404
- Specifies the times when the flow should be run when running on a
1405
- production scheduler.
1406
-
1407
-
1408
- Parameters
1409
- ----------
1410
- hourly : bool, default False
1411
- Run the workflow hourly.
1412
- daily : bool, default True
1413
- Run the workflow daily.
1414
- weekly : bool, default False
1415
- Run the workflow weekly.
1416
- cron : str, optional, default None
1417
- Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
1418
- specified by this expression.
1419
- timezone : str, optional, default None
1420
- Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1421
- which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
1422
- """
1423
- ...
1424
-
1425
- @typing.overload
1426
- def schedule(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1427
- ...
1428
-
1429
- def schedule(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None):
1430
- """
1431
- Specifies the times when the flow should be run when running on a
1432
- production scheduler.
1433
-
1434
-
1435
- Parameters
1436
- ----------
1437
- hourly : bool, default False
1438
- Run the workflow hourly.
1439
- daily : bool, default True
1440
- Run the workflow daily.
1441
- weekly : bool, default False
1442
- Run the workflow weekly.
1443
- cron : str, optional, default None
1444
- Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
1445
- specified by this expression.
1446
- timezone : str, optional, default None
1447
- Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1448
- which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
1449
- """
1450
- ...
1451
-
1452
- @typing.overload
1453
- def trigger_on_finish(*, flow: typing.Union[typing.Dict[str, str], str, None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1467
+ def airflow_external_task_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, external_dag_id: str, external_task_ids: typing.List[str], allowed_states: typing.List[str], failed_states: typing.List[str], execution_delta: "datetime.timedelta", check_existence: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1454
1468
  """
1455
- Specifies the flow(s) that this flow depends on.
1456
-
1457
- ```
1458
- @trigger_on_finish(flow='FooFlow')
1459
- ```
1460
- or
1461
- ```
1462
- @trigger_on_finish(flows=['FooFlow', 'BarFlow'])
1463
- ```
1464
- This decorator respects the @project decorator and triggers the flow
1465
- when upstream runs within the same namespace complete successfully
1466
-
1467
- Additionally, you can specify project aware upstream flow dependencies
1468
- by specifying the fully qualified project_flow_name.
1469
- ```
1470
- @trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
1471
- ```
1472
- or
1473
- ```
1474
- @trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
1475
- ```
1476
-
1477
- You can also specify just the project or project branch (other values will be
1478
- inferred from the current project or project branch):
1479
- ```
1480
- @trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
1481
- ```
1482
-
1483
- Note that `branch` is typically one of:
1484
- - `prod`
1485
- - `user.bob`
1486
- - `test.my_experiment`
1487
- - `prod.staging`
1469
+ The `@airflow_external_task_sensor` decorator attaches a Airflow [ExternalTaskSensor](https://airflow.apache.org/docs/apache-airflow/stable/_api/airflow/sensors/external_task/index.html#airflow.sensors.external_task.ExternalTaskSensor) before the start step of the flow.
1470
+ This decorator only works when a flow is scheduled on Airflow and is compiled using `airflow create`. More than one `@airflow_external_task_sensor` can be added as a flow decorators. Adding more than one decorator will ensure that `start` step starts only after all sensors finish.
1488
1471
 
1489
1472
 
1490
1473
  Parameters
1491
1474
  ----------
1492
- flow : Union[str, Dict[str, str]], optional, default None
1493
- Upstream flow dependency for this flow.
1494
- flows : List[Union[str, Dict[str, str]]], default []
1495
- Upstream flow dependencies for this flow.
1496
- options : Dict[str, Any], default {}
1497
- Backend-specific configuration for tuning eventing behavior.
1475
+ timeout : int
1476
+ Time, in seconds before the task times out and fails. (Default: 3600)
1477
+ poke_interval : int
1478
+ Time in seconds that the job should wait in between each try. (Default: 60)
1479
+ mode : str
1480
+ How the sensor operates. Options are: { poke | reschedule }. (Default: "poke")
1481
+ exponential_backoff : bool
1482
+ allow progressive longer waits between pokes by using exponential backoff algorithm. (Default: True)
1483
+ pool : str
1484
+ the slot pool this task should run in,
1485
+ slot pools are a way to limit concurrency for certain tasks. (Default:None)
1486
+ soft_fail : bool
1487
+ Set to true to mark the task as SKIPPED on failure. (Default: False)
1488
+ name : str
1489
+ Name of the sensor on Airflow
1490
+ description : str
1491
+ Description of sensor in the Airflow UI
1492
+ external_dag_id : str
1493
+ The dag_id that contains the task you want to wait for.
1494
+ external_task_ids : List[str]
1495
+ The list of task_ids that you want to wait for.
1496
+ If None (default value) the sensor waits for the DAG. (Default: None)
1497
+ allowed_states : List[str]
1498
+ Iterable of allowed states, (Default: ['success'])
1499
+ failed_states : List[str]
1500
+ Iterable of failed or dis-allowed states. (Default: None)
1501
+ execution_delta : datetime.timedelta
1502
+ time difference with the previous execution to look at,
1503
+ the default is the same logical date as the current task or DAG. (Default: None)
1504
+ check_existence: bool
1505
+ Set to True to check if the external task exists or check if
1506
+ the DAG to wait for exists. (Default: True)
1498
1507
  """
1499
1508
  ...
1500
1509
 
1501
- @typing.overload
1502
- def trigger_on_finish(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1503
- ...
1504
-
1505
- def trigger_on_finish(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, flow: typing.Union[typing.Dict[str, str], str, None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}):
1510
+ def airflow_s3_key_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, bucket_key: typing.Union[str, typing.List[str]], bucket_name: str, wildcard_match: bool, aws_conn_id: str, verify: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1506
1511
  """
1507
- Specifies the flow(s) that this flow depends on.
1508
-
1509
- ```
1510
- @trigger_on_finish(flow='FooFlow')
1511
- ```
1512
- or
1513
- ```
1514
- @trigger_on_finish(flows=['FooFlow', 'BarFlow'])
1515
- ```
1516
- This decorator respects the @project decorator and triggers the flow
1517
- when upstream runs within the same namespace complete successfully
1518
-
1519
- Additionally, you can specify project aware upstream flow dependencies
1520
- by specifying the fully qualified project_flow_name.
1521
- ```
1522
- @trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
1523
- ```
1524
- or
1525
- ```
1526
- @trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
1527
- ```
1528
-
1529
- You can also specify just the project or project branch (other values will be
1530
- inferred from the current project or project branch):
1531
- ```
1532
- @trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
1533
- ```
1534
-
1535
- Note that `branch` is typically one of:
1536
- - `prod`
1537
- - `user.bob`
1538
- - `test.my_experiment`
1539
- - `prod.staging`
1512
+ The `@airflow_s3_key_sensor` decorator attaches a Airflow [S3KeySensor](https://airflow.apache.org/docs/apache-airflow-providers-amazon/stable/_api/airflow/providers/amazon/aws/sensors/s3/index.html#airflow.providers.amazon.aws.sensors.s3.S3KeySensor)
1513
+ before the start step of the flow. This decorator only works when a flow is scheduled on Airflow
1514
+ and is compiled using `airflow create`. More than one `@airflow_s3_key_sensor` can be
1515
+ added as a flow decorators. Adding more than one decorator will ensure that `start` step
1516
+ starts only after all sensors finish.
1540
1517
 
1541
1518
 
1542
1519
  Parameters
1543
1520
  ----------
1544
- flow : Union[str, Dict[str, str]], optional, default None
1545
- Upstream flow dependency for this flow.
1546
- flows : List[Union[str, Dict[str, str]]], default []
1547
- Upstream flow dependencies for this flow.
1548
- options : Dict[str, Any], default {}
1549
- Backend-specific configuration for tuning eventing behavior.
1521
+ timeout : int
1522
+ Time, in seconds before the task times out and fails. (Default: 3600)
1523
+ poke_interval : int
1524
+ Time in seconds that the job should wait in between each try. (Default: 60)
1525
+ mode : str
1526
+ How the sensor operates. Options are: { poke | reschedule }. (Default: "poke")
1527
+ exponential_backoff : bool
1528
+ allow progressive longer waits between pokes by using exponential backoff algorithm. (Default: True)
1529
+ pool : str
1530
+ the slot pool this task should run in,
1531
+ slot pools are a way to limit concurrency for certain tasks. (Default:None)
1532
+ soft_fail : bool
1533
+ Set to true to mark the task as SKIPPED on failure. (Default: False)
1534
+ name : str
1535
+ Name of the sensor on Airflow
1536
+ description : str
1537
+ Description of sensor in the Airflow UI
1538
+ bucket_key : Union[str, List[str]]
1539
+ The key(s) being waited on. Supports full s3:// style url or relative path from root level.
1540
+ When it's specified as a full s3:// url, please leave `bucket_name` as None
1541
+ bucket_name : str
1542
+ Name of the S3 bucket. Only needed when bucket_key is not provided as a full s3:// url.
1543
+ When specified, all the keys passed to bucket_key refers to this bucket. (Default:None)
1544
+ wildcard_match : bool
1545
+ whether the bucket_key should be interpreted as a Unix wildcard pattern. (Default: False)
1546
+ aws_conn_id : str
1547
+ a reference to the s3 connection on Airflow. (Default: None)
1548
+ verify : bool
1549
+ Whether or not to verify SSL certificates for S3 connection. (Default: None)
1550
1550
  """
1551
1551
  ...
1552
1552