ob-metaflow-stubs 6.0.3.171__py2.py3-none-any.whl → 6.0.3.173rc0__py2.py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (214) hide show
  1. metaflow-stubs/__init__.pyi +708 -707
  2. metaflow-stubs/cards.pyi +1 -1
  3. metaflow-stubs/cli.pyi +1 -1
  4. metaflow-stubs/cli_components/__init__.pyi +1 -1
  5. metaflow-stubs/cli_components/utils.pyi +1 -1
  6. metaflow-stubs/client/__init__.pyi +1 -1
  7. metaflow-stubs/client/core.pyi +4 -4
  8. metaflow-stubs/client/filecache.pyi +1 -1
  9. metaflow-stubs/events.pyi +2 -2
  10. metaflow-stubs/exception.pyi +1 -1
  11. metaflow-stubs/flowspec.pyi +4 -4
  12. metaflow-stubs/generated_for.txt +1 -1
  13. metaflow-stubs/includefile.pyi +3 -3
  14. metaflow-stubs/info_file.pyi +1 -1
  15. metaflow-stubs/metadata_provider/__init__.pyi +1 -1
  16. metaflow-stubs/metadata_provider/heartbeat.pyi +1 -1
  17. metaflow-stubs/metadata_provider/metadata.pyi +2 -2
  18. metaflow-stubs/metadata_provider/util.pyi +1 -1
  19. metaflow-stubs/metaflow_config.pyi +1 -1
  20. metaflow-stubs/metaflow_current.pyi +123 -123
  21. metaflow-stubs/metaflow_git.pyi +1 -1
  22. metaflow-stubs/mf_extensions/__init__.pyi +1 -1
  23. metaflow-stubs/mf_extensions/obcheckpoint/__init__.pyi +1 -1
  24. metaflow-stubs/mf_extensions/obcheckpoint/plugins/__init__.pyi +1 -1
  25. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/__init__.pyi +1 -1
  26. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/card_utils/__init__.pyi +1 -1
  27. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/card_utils/async_cards.pyi +1 -1
  28. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/card_utils/deco_injection_mixin.pyi +1 -1
  29. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/card_utils/extra_components.pyi +2 -2
  30. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/__init__.pyi +1 -1
  31. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/cards/__init__.pyi +1 -1
  32. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/cards/checkpoint_lister.pyi +4 -4
  33. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/cards/lineage_card.pyi +1 -1
  34. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/checkpoint_storage.pyi +4 -4
  35. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/constructors.pyi +1 -1
  36. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/core.pyi +3 -3
  37. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/decorator.pyi +3 -3
  38. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/exceptions.pyi +1 -1
  39. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/final_api.pyi +1 -1
  40. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/lineage.pyi +1 -1
  41. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/__init__.pyi +1 -1
  42. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/context.pyi +2 -2
  43. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/core.pyi +2 -2
  44. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/decorator.pyi +1 -1
  45. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/exceptions.pyi +1 -1
  46. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/task_utils.pyi +3 -3
  47. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/utils.pyi +1 -1
  48. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastructures.pyi +2 -2
  49. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/exceptions.pyi +1 -1
  50. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/hf_hub/__init__.pyi +1 -1
  51. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/hf_hub/decorator.pyi +1 -1
  52. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/modeling_utils/__init__.pyi +1 -1
  53. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/modeling_utils/core.pyi +1 -1
  54. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/modeling_utils/exceptions.pyi +1 -1
  55. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/modeling_utils/model_storage.pyi +4 -4
  56. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/__init__.pyi +1 -1
  57. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/flowspec_utils.pyi +1 -1
  58. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/general.pyi +1 -1
  59. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/identity_utils.pyi +2 -2
  60. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/serialization_handler/__init__.pyi +1 -1
  61. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/serialization_handler/base.pyi +1 -1
  62. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/serialization_handler/tar.pyi +2 -2
  63. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/tar_utils.pyi +2 -2
  64. metaflow-stubs/mf_extensions/outerbounds/__init__.pyi +1 -1
  65. metaflow-stubs/mf_extensions/outerbounds/plugins/__init__.pyi +1 -1
  66. metaflow-stubs/mf_extensions/outerbounds/plugins/card_utilities/__init__.pyi +1 -1
  67. metaflow-stubs/mf_extensions/outerbounds/plugins/card_utilities/injector.pyi +1 -1
  68. metaflow-stubs/mf_extensions/outerbounds/plugins/checkpoint_datastores/__init__.pyi +1 -1
  69. metaflow-stubs/mf_extensions/outerbounds/plugins/checkpoint_datastores/coreweave.pyi +1 -1
  70. metaflow-stubs/mf_extensions/outerbounds/plugins/checkpoint_datastores/nebius.pyi +1 -1
  71. metaflow-stubs/mf_extensions/outerbounds/plugins/fast_bakery/__init__.pyi +6 -0
  72. metaflow-stubs/mf_extensions/outerbounds/plugins/fast_bakery/baker.pyi +51 -0
  73. metaflow-stubs/mf_extensions/outerbounds/plugins/fast_bakery/docker_environment.pyi +65 -0
  74. metaflow-stubs/mf_extensions/outerbounds/plugins/fast_bakery/fast_bakery.pyi +74 -0
  75. metaflow-stubs/mf_extensions/outerbounds/plugins/ollama/__init__.pyi +1 -1
  76. metaflow-stubs/mf_extensions/outerbounds/plugins/ollama/constants.pyi +1 -1
  77. metaflow-stubs/mf_extensions/outerbounds/plugins/ollama/exceptions.pyi +1 -1
  78. metaflow-stubs/mf_extensions/outerbounds/plugins/ollama/ollama.pyi +1 -1
  79. metaflow-stubs/mf_extensions/outerbounds/plugins/snowflake/__init__.pyi +1 -1
  80. metaflow-stubs/mf_extensions/outerbounds/plugins/snowflake/snowflake.pyi +1 -1
  81. metaflow-stubs/mf_extensions/outerbounds/profilers/__init__.pyi +1 -1
  82. metaflow-stubs/mf_extensions/outerbounds/profilers/gpu.pyi +1 -1
  83. metaflow-stubs/mf_extensions/outerbounds/remote_config.pyi +1 -1
  84. metaflow-stubs/mf_extensions/outerbounds/toplevel/__init__.pyi +1 -1
  85. metaflow-stubs/mf_extensions/outerbounds/toplevel/global_aliases_for_metaflow_package.pyi +2 -1
  86. metaflow-stubs/multicore_utils.pyi +1 -1
  87. metaflow-stubs/ob_internal.pyi +11 -0
  88. metaflow-stubs/parameters.pyi +3 -3
  89. metaflow-stubs/plugins/__init__.pyi +12 -11
  90. metaflow-stubs/plugins/airflow/__init__.pyi +1 -1
  91. metaflow-stubs/plugins/airflow/airflow_utils.pyi +1 -1
  92. metaflow-stubs/plugins/airflow/exception.pyi +1 -1
  93. metaflow-stubs/plugins/airflow/sensors/__init__.pyi +1 -1
  94. metaflow-stubs/plugins/airflow/sensors/base_sensor.pyi +1 -1
  95. metaflow-stubs/plugins/airflow/sensors/external_task_sensor.pyi +1 -1
  96. metaflow-stubs/plugins/airflow/sensors/s3_sensor.pyi +1 -1
  97. metaflow-stubs/plugins/argo/__init__.pyi +1 -1
  98. metaflow-stubs/plugins/argo/argo_client.pyi +1 -1
  99. metaflow-stubs/plugins/argo/argo_events.pyi +1 -1
  100. metaflow-stubs/plugins/argo/argo_workflows.pyi +3 -3
  101. metaflow-stubs/plugins/argo/argo_workflows_decorator.pyi +2 -2
  102. metaflow-stubs/plugins/argo/argo_workflows_deployer.pyi +3 -3
  103. metaflow-stubs/plugins/argo/argo_workflows_deployer_objects.pyi +2 -2
  104. metaflow-stubs/plugins/aws/__init__.pyi +1 -1
  105. metaflow-stubs/plugins/aws/aws_client.pyi +1 -1
  106. metaflow-stubs/plugins/aws/aws_utils.pyi +1 -1
  107. metaflow-stubs/plugins/aws/batch/__init__.pyi +1 -1
  108. metaflow-stubs/plugins/aws/batch/batch.pyi +1 -1
  109. metaflow-stubs/plugins/aws/batch/batch_client.pyi +1 -1
  110. metaflow-stubs/plugins/aws/batch/batch_decorator.pyi +1 -1
  111. metaflow-stubs/plugins/aws/secrets_manager/__init__.pyi +1 -1
  112. metaflow-stubs/plugins/aws/secrets_manager/aws_secrets_manager_secrets_provider.pyi +2 -2
  113. metaflow-stubs/plugins/aws/step_functions/__init__.pyi +1 -1
  114. metaflow-stubs/plugins/aws/step_functions/event_bridge_client.pyi +1 -1
  115. metaflow-stubs/plugins/aws/step_functions/schedule_decorator.pyi +1 -1
  116. metaflow-stubs/plugins/aws/step_functions/step_functions.pyi +1 -1
  117. metaflow-stubs/plugins/aws/step_functions/step_functions_client.pyi +1 -1
  118. metaflow-stubs/plugins/aws/step_functions/step_functions_deployer.pyi +3 -3
  119. metaflow-stubs/plugins/aws/step_functions/step_functions_deployer_objects.pyi +2 -2
  120. metaflow-stubs/plugins/azure/__init__.pyi +1 -1
  121. metaflow-stubs/plugins/azure/azure_credential.pyi +1 -1
  122. metaflow-stubs/plugins/azure/azure_exceptions.pyi +1 -1
  123. metaflow-stubs/plugins/azure/azure_secret_manager_secrets_provider.pyi +2 -2
  124. metaflow-stubs/plugins/azure/azure_utils.pyi +1 -1
  125. metaflow-stubs/plugins/azure/blob_service_client_factory.pyi +1 -1
  126. metaflow-stubs/plugins/azure/includefile_support.pyi +1 -1
  127. metaflow-stubs/plugins/cards/__init__.pyi +5 -5
  128. metaflow-stubs/plugins/cards/card_client.pyi +2 -2
  129. metaflow-stubs/plugins/cards/card_creator.pyi +1 -1
  130. metaflow-stubs/plugins/cards/card_datastore.pyi +1 -1
  131. metaflow-stubs/plugins/cards/card_decorator.pyi +1 -1
  132. metaflow-stubs/plugins/cards/card_modules/__init__.pyi +1 -1
  133. metaflow-stubs/plugins/cards/card_modules/basic.pyi +1 -1
  134. metaflow-stubs/plugins/cards/card_modules/card.pyi +1 -1
  135. metaflow-stubs/plugins/cards/card_modules/components.pyi +3 -3
  136. metaflow-stubs/plugins/cards/card_modules/convert_to_native_type.pyi +1 -1
  137. metaflow-stubs/plugins/cards/card_modules/renderer_tools.pyi +1 -1
  138. metaflow-stubs/plugins/cards/card_modules/test_cards.pyi +1 -1
  139. metaflow-stubs/plugins/cards/card_resolver.pyi +1 -1
  140. metaflow-stubs/plugins/cards/component_serializer.pyi +1 -1
  141. metaflow-stubs/plugins/cards/exception.pyi +1 -1
  142. metaflow-stubs/plugins/catch_decorator.pyi +1 -1
  143. metaflow-stubs/plugins/datatools/__init__.pyi +1 -1
  144. metaflow-stubs/plugins/datatools/local.pyi +1 -1
  145. metaflow-stubs/plugins/datatools/s3/__init__.pyi +1 -1
  146. metaflow-stubs/plugins/datatools/s3/s3.pyi +3 -3
  147. metaflow-stubs/plugins/datatools/s3/s3tail.pyi +1 -1
  148. metaflow-stubs/plugins/datatools/s3/s3util.pyi +1 -1
  149. metaflow-stubs/plugins/debug_logger.pyi +1 -1
  150. metaflow-stubs/plugins/debug_monitor.pyi +1 -1
  151. metaflow-stubs/plugins/environment_decorator.pyi +1 -1
  152. metaflow-stubs/plugins/events_decorator.pyi +1 -1
  153. metaflow-stubs/plugins/frameworks/__init__.pyi +1 -1
  154. metaflow-stubs/plugins/frameworks/pytorch.pyi +1 -1
  155. metaflow-stubs/plugins/gcp/__init__.pyi +1 -1
  156. metaflow-stubs/plugins/gcp/gcp_secret_manager_secrets_provider.pyi +2 -2
  157. metaflow-stubs/plugins/gcp/gs_exceptions.pyi +1 -1
  158. metaflow-stubs/plugins/gcp/gs_storage_client_factory.pyi +1 -1
  159. metaflow-stubs/plugins/gcp/gs_utils.pyi +1 -1
  160. metaflow-stubs/plugins/gcp/includefile_support.pyi +1 -1
  161. metaflow-stubs/plugins/kubernetes/__init__.pyi +1 -1
  162. metaflow-stubs/plugins/kubernetes/kube_utils.pyi +2 -2
  163. metaflow-stubs/plugins/kubernetes/kubernetes.pyi +1 -1
  164. metaflow-stubs/plugins/kubernetes/kubernetes_client.pyi +1 -1
  165. metaflow-stubs/plugins/kubernetes/kubernetes_decorator.pyi +1 -1
  166. metaflow-stubs/plugins/kubernetes/kubernetes_jobsets.pyi +1 -1
  167. metaflow-stubs/plugins/kubernetes/spot_monitor_sidecar.pyi +1 -1
  168. metaflow-stubs/plugins/ollama/__init__.pyi +2 -2
  169. metaflow-stubs/plugins/parallel_decorator.pyi +1 -1
  170. metaflow-stubs/plugins/perimeters.pyi +1 -1
  171. metaflow-stubs/plugins/project_decorator.pyi +1 -1
  172. metaflow-stubs/plugins/pypi/__init__.pyi +2 -2
  173. metaflow-stubs/plugins/pypi/conda_decorator.pyi +1 -1
  174. metaflow-stubs/plugins/pypi/conda_environment.pyi +2 -2
  175. metaflow-stubs/plugins/pypi/parsers.pyi +1 -1
  176. metaflow-stubs/plugins/pypi/pypi_decorator.pyi +1 -1
  177. metaflow-stubs/plugins/pypi/pypi_environment.pyi +1 -1
  178. metaflow-stubs/plugins/pypi/utils.pyi +1 -1
  179. metaflow-stubs/plugins/resources_decorator.pyi +1 -1
  180. metaflow-stubs/plugins/retry_decorator.pyi +1 -1
  181. metaflow-stubs/plugins/secrets/__init__.pyi +1 -1
  182. metaflow-stubs/plugins/secrets/inline_secrets_provider.pyi +1 -1
  183. metaflow-stubs/plugins/secrets/secrets_decorator.pyi +1 -1
  184. metaflow-stubs/plugins/snowflake/__init__.pyi +1 -1
  185. metaflow-stubs/plugins/storage_executor.pyi +1 -1
  186. metaflow-stubs/plugins/test_unbounded_foreach_decorator.pyi +2 -2
  187. metaflow-stubs/plugins/timeout_decorator.pyi +1 -1
  188. metaflow-stubs/plugins/torchtune/__init__.pyi +39 -0
  189. metaflow-stubs/plugins/uv/__init__.pyi +1 -1
  190. metaflow-stubs/plugins/uv/uv_environment.pyi +2 -2
  191. metaflow-stubs/profilers/__init__.pyi +1 -1
  192. metaflow-stubs/pylint_wrapper.pyi +1 -1
  193. metaflow-stubs/runner/__init__.pyi +1 -1
  194. metaflow-stubs/runner/deployer.pyi +27 -27
  195. metaflow-stubs/runner/deployer_impl.pyi +1 -1
  196. metaflow-stubs/runner/metaflow_runner.pyi +2 -2
  197. metaflow-stubs/runner/nbdeploy.pyi +1 -1
  198. metaflow-stubs/runner/nbrun.pyi +1 -1
  199. metaflow-stubs/runner/subprocess_manager.pyi +1 -1
  200. metaflow-stubs/runner/utils.pyi +3 -3
  201. metaflow-stubs/system/__init__.pyi +1 -1
  202. metaflow-stubs/system/system_logger.pyi +1 -1
  203. metaflow-stubs/system/system_monitor.pyi +1 -1
  204. metaflow-stubs/tagging_util.pyi +1 -1
  205. metaflow-stubs/tuple_util.pyi +1 -1
  206. metaflow-stubs/user_configs/__init__.pyi +1 -1
  207. metaflow-stubs/user_configs/config_decorators.pyi +4 -4
  208. metaflow-stubs/user_configs/config_options.pyi +3 -3
  209. metaflow-stubs/user_configs/config_parameters.pyi +6 -6
  210. {ob_metaflow_stubs-6.0.3.171.dist-info → ob_metaflow_stubs-6.0.3.173rc0.dist-info}/METADATA +1 -1
  211. ob_metaflow_stubs-6.0.3.173rc0.dist-info/RECORD +214 -0
  212. ob_metaflow_stubs-6.0.3.171.dist-info/RECORD +0 -208
  213. {ob_metaflow_stubs-6.0.3.171.dist-info → ob_metaflow_stubs-6.0.3.173rc0.dist-info}/WHEEL +0 -0
  214. {ob_metaflow_stubs-6.0.3.171.dist-info → ob_metaflow_stubs-6.0.3.173rc0.dist-info}/top_level.txt +0 -0
@@ -1,7 +1,7 @@
1
1
  ######################################################################################################
2
2
  # Auto-generated Metaflow stub file #
3
3
  # MF version: 2.15.14.1+obcheckpoint(0.2.1);ob(v1) #
4
- # Generated on 2025-05-27T00:40:42.234892 #
4
+ # Generated on 2025-05-28T01:05:51.019297 #
5
5
  ######################################################################################################
6
6
 
7
7
  from __future__ import annotations
@@ -35,18 +35,18 @@ from .user_configs.config_parameters import ConfigValue as ConfigValue
35
35
  from .user_configs.config_parameters import config_expr as config_expr
36
36
  from .user_configs.config_decorators import CustomFlowDecorator as CustomFlowDecorator
37
37
  from .user_configs.config_decorators import CustomStepDecorator as CustomStepDecorator
38
+ from . import metaflow_git as metaflow_git
39
+ from . import events as events
38
40
  from . import tuple_util as tuple_util
39
41
  from . import cards as cards
40
- from . import events as events
41
- from . import metaflow_git as metaflow_git
42
42
  from . import runner as runner
43
43
  from . import plugins as plugins
44
44
  from .mf_extensions.outerbounds.toplevel.global_aliases_for_metaflow_package import S3 as S3
45
45
  from . import includefile as includefile
46
46
  from .includefile import IncludeFile as IncludeFile
47
- from .plugins.pypi.parsers import requirements_txt_parser as requirements_txt_parser
48
- from .plugins.pypi.parsers import pyproject_toml_parser as pyproject_toml_parser
49
47
  from .plugins.pypi.parsers import conda_environment_yml_parser as conda_environment_yml_parser
48
+ from .plugins.pypi.parsers import pyproject_toml_parser as pyproject_toml_parser
49
+ from .plugins.pypi.parsers import requirements_txt_parser as requirements_txt_parser
50
50
  from . import client as client
51
51
  from .client.core import namespace as namespace
52
52
  from .client.core import get_namespace as get_namespace
@@ -77,6 +77,7 @@ from . import system as system
77
77
  from . import pylint_wrapper as pylint_wrapper
78
78
  from . import cli as cli
79
79
  from . import profilers as profilers
80
+ from . import ob_internal as ob_internal
80
81
 
81
82
  EXT_PKG: str
82
83
 
@@ -153,511 +154,325 @@ def step(f: typing.Union[typing.Callable[[FlowSpecDerived], None], typing.Callab
153
154
  """
154
155
  ...
155
156
 
156
- @typing.overload
157
- def retry(*, times: int = 3, minutes_between_retries: int = 2) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
157
+ def nvidia(*, gpu: int, gpu_type: str, queue_timeout: int) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
158
158
  """
159
- Specifies the number of times the task corresponding
160
- to a step needs to be retried.
161
-
162
- This decorator is useful for handling transient errors, such as networking issues.
163
- If your task contains operations that can't be retried safely, e.g. database updates,
164
- it is advisable to annotate it with `@retry(times=0)`.
165
-
166
- This can be used in conjunction with the `@catch` decorator. The `@catch`
167
- decorator will execute a no-op task after all retries have been exhausted,
168
- ensuring that the flow execution can continue.
159
+ Specifies that this step should execute on DGX cloud.
169
160
 
170
161
 
171
162
  Parameters
172
163
  ----------
173
- times : int, default 3
174
- Number of times to retry this task.
175
- minutes_between_retries : int, default 2
176
- Number of minutes between retries.
164
+ gpu : int
165
+ Number of GPUs to use.
166
+ gpu_type : str
167
+ Type of Nvidia GPU to use.
168
+ queue_timeout : int
169
+ Time to keep the job in NVCF's queue.
177
170
  """
178
171
  ...
179
172
 
180
173
  @typing.overload
181
- def retry(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
174
+ def parallel(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
175
+ """
176
+ Decorator prototype for all step decorators. This function gets specialized
177
+ and imported for all decorators types by _import_plugin_decorators().
178
+ """
182
179
  ...
183
180
 
184
181
  @typing.overload
185
- def retry(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
182
+ def parallel(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
186
183
  ...
187
184
 
188
- def retry(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, times: int = 3, minutes_between_retries: int = 2):
185
+ def parallel(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
189
186
  """
190
- Specifies the number of times the task corresponding
191
- to a step needs to be retried.
192
-
193
- This decorator is useful for handling transient errors, such as networking issues.
194
- If your task contains operations that can't be retried safely, e.g. database updates,
195
- it is advisable to annotate it with `@retry(times=0)`.
196
-
197
- This can be used in conjunction with the `@catch` decorator. The `@catch`
198
- decorator will execute a no-op task after all retries have been exhausted,
199
- ensuring that the flow execution can continue.
200
-
201
-
202
- Parameters
203
- ----------
204
- times : int, default 3
205
- Number of times to retry this task.
206
- minutes_between_retries : int, default 2
207
- Number of minutes between retries.
187
+ Decorator prototype for all step decorators. This function gets specialized
188
+ and imported for all decorators types by _import_plugin_decorators().
208
189
  """
209
190
  ...
210
191
 
211
192
  @typing.overload
212
- def secrets(*, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
193
+ def card(*, type: str = 'default', id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
213
194
  """
214
- Specifies secrets to be retrieved and injected as environment variables prior to
215
- the execution of a step.
195
+ Creates a human-readable report, a Metaflow Card, after this step completes.
196
+
197
+ Note that you may add multiple `@card` decorators in a step with different parameters.
216
198
 
217
199
 
218
200
  Parameters
219
201
  ----------
220
- sources : List[Union[str, Dict[str, Any]]], default: []
221
- List of secret specs, defining how the secrets are to be retrieved
202
+ type : str, default 'default'
203
+ Card type.
204
+ id : str, optional, default None
205
+ If multiple cards are present, use this id to identify this card.
206
+ options : Dict[str, Any], default {}
207
+ Options passed to the card. The contents depend on the card type.
208
+ timeout : int, default 45
209
+ Interrupt reporting if it takes more than this many seconds.
222
210
  """
223
211
  ...
224
212
 
225
213
  @typing.overload
226
- def secrets(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
214
+ def card(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
227
215
  ...
228
216
 
229
217
  @typing.overload
230
- def secrets(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
218
+ def card(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
231
219
  ...
232
220
 
233
- def secrets(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []):
221
+ def card(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, type: str = 'default', id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45):
234
222
  """
235
- Specifies secrets to be retrieved and injected as environment variables prior to
236
- the execution of a step.
223
+ Creates a human-readable report, a Metaflow Card, after this step completes.
224
+
225
+ Note that you may add multiple `@card` decorators in a step with different parameters.
237
226
 
238
227
 
239
228
  Parameters
240
229
  ----------
241
- sources : List[Union[str, Dict[str, Any]]], default: []
242
- List of secret specs, defining how the secrets are to be retrieved
230
+ type : str, default 'default'
231
+ Card type.
232
+ id : str, optional, default None
233
+ If multiple cards are present, use this id to identify this card.
234
+ options : Dict[str, Any], default {}
235
+ Options passed to the card. The contents depend on the card type.
236
+ timeout : int, default 45
237
+ Interrupt reporting if it takes more than this many seconds.
243
238
  """
244
239
  ...
245
240
 
246
241
  @typing.overload
247
- def resources(*, cpu: int = 1, gpu: typing.Optional[int] = None, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
242
+ def environment(*, vars: typing.Dict[str, str] = {}) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
248
243
  """
249
- Specifies the resources needed when executing this step.
250
-
251
- Use `@resources` to specify the resource requirements
252
- independently of the specific compute layer (`@batch`, `@kubernetes`).
253
-
254
- You can choose the compute layer on the command line by executing e.g.
255
- ```
256
- python myflow.py run --with batch
257
- ```
258
- or
259
- ```
260
- python myflow.py run --with kubernetes
261
- ```
262
- which executes the flow on the desired system using the
263
- requirements specified in `@resources`.
244
+ Specifies environment variables to be set prior to the execution of a step.
264
245
 
265
246
 
266
247
  Parameters
267
248
  ----------
268
- cpu : int, default 1
269
- Number of CPUs required for this step.
270
- gpu : int, optional, default None
271
- Number of GPUs required for this step.
272
- disk : int, optional, default None
273
- Disk size (in MB) required for this step. Only applies on Kubernetes.
274
- memory : int, default 4096
275
- Memory size (in MB) required for this step.
276
- shared_memory : int, optional, default None
277
- The value for the size (in MiB) of the /dev/shm volume for this step.
278
- This parameter maps to the `--shm-size` option in Docker.
249
+ vars : Dict[str, str], default {}
250
+ Dictionary of environment variables to set.
279
251
  """
280
252
  ...
281
253
 
282
254
  @typing.overload
283
- def resources(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
255
+ def environment(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
284
256
  ...
285
257
 
286
258
  @typing.overload
287
- def resources(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
259
+ def environment(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
288
260
  ...
289
261
 
290
- def resources(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, cpu: int = 1, gpu: typing.Optional[int] = None, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None):
262
+ def environment(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, vars: typing.Dict[str, str] = {}):
291
263
  """
292
- Specifies the resources needed when executing this step.
293
-
294
- Use `@resources` to specify the resource requirements
295
- independently of the specific compute layer (`@batch`, `@kubernetes`).
296
-
297
- You can choose the compute layer on the command line by executing e.g.
298
- ```
299
- python myflow.py run --with batch
300
- ```
301
- or
302
- ```
303
- python myflow.py run --with kubernetes
304
- ```
305
- which executes the flow on the desired system using the
306
- requirements specified in `@resources`.
264
+ Specifies environment variables to be set prior to the execution of a step.
307
265
 
308
266
 
309
267
  Parameters
310
268
  ----------
311
- cpu : int, default 1
312
- Number of CPUs required for this step.
313
- gpu : int, optional, default None
314
- Number of GPUs required for this step.
315
- disk : int, optional, default None
316
- Disk size (in MB) required for this step. Only applies on Kubernetes.
317
- memory : int, default 4096
318
- Memory size (in MB) required for this step.
319
- shared_memory : int, optional, default None
320
- The value for the size (in MiB) of the /dev/shm volume for this step.
321
- This parameter maps to the `--shm-size` option in Docker.
269
+ vars : Dict[str, str], default {}
270
+ Dictionary of environment variables to set.
322
271
  """
323
272
  ...
324
273
 
325
274
  @typing.overload
326
- def timeout(*, seconds: int = 0, minutes: int = 0, hours: int = 0) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
275
+ def secrets(*, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
327
276
  """
328
- Specifies a timeout for your step.
329
-
330
- This decorator is useful if this step may hang indefinitely.
331
-
332
- This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
333
- A timeout is considered to be an exception thrown by the step. It will cause the step to be
334
- retried if needed and the exception will be caught by the `@catch` decorator, if present.
335
-
336
- Note that all the values specified in parameters are added together so if you specify
337
- 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
277
+ Specifies secrets to be retrieved and injected as environment variables prior to
278
+ the execution of a step.
338
279
 
339
280
 
340
281
  Parameters
341
282
  ----------
342
- seconds : int, default 0
343
- Number of seconds to wait prior to timing out.
344
- minutes : int, default 0
345
- Number of minutes to wait prior to timing out.
346
- hours : int, default 0
347
- Number of hours to wait prior to timing out.
283
+ sources : List[Union[str, Dict[str, Any]]], default: []
284
+ List of secret specs, defining how the secrets are to be retrieved
348
285
  """
349
286
  ...
350
287
 
351
288
  @typing.overload
352
- def timeout(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
289
+ def secrets(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
353
290
  ...
354
291
 
355
292
  @typing.overload
356
- def timeout(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
293
+ def secrets(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
357
294
  ...
358
295
 
359
- def timeout(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, seconds: int = 0, minutes: int = 0, hours: int = 0):
296
+ def secrets(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []):
360
297
  """
361
- Specifies a timeout for your step.
362
-
363
- This decorator is useful if this step may hang indefinitely.
364
-
365
- This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
366
- A timeout is considered to be an exception thrown by the step. It will cause the step to be
367
- retried if needed and the exception will be caught by the `@catch` decorator, if present.
368
-
369
- Note that all the values specified in parameters are added together so if you specify
370
- 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
298
+ Specifies secrets to be retrieved and injected as environment variables prior to
299
+ the execution of a step.
371
300
 
372
301
 
373
302
  Parameters
374
303
  ----------
375
- seconds : int, default 0
376
- Number of seconds to wait prior to timing out.
377
- minutes : int, default 0
378
- Number of minutes to wait prior to timing out.
379
- hours : int, default 0
380
- Number of hours to wait prior to timing out.
304
+ sources : List[Union[str, Dict[str, Any]]], default: []
305
+ List of secret specs, defining how the secrets are to be retrieved
381
306
  """
382
307
  ...
383
308
 
384
- def huggingface_hub(*, temp_dir_root: typing.Optional[str] = None, load: typing.Union[typing.List[str], typing.List[typing.Tuple[typing.Dict, str]], typing.List[typing.Tuple[str, str]], typing.List[typing.Dict], None]) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
309
+ @typing.overload
310
+ def fast_bakery_internal(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
385
311
  """
386
- Decorator that helps cache, version and store models/datasets from huggingface hub.
387
-
388
-
389
- Parameters
390
- ----------
391
- temp_dir_root : str, optional
392
- The root directory that will hold the temporary directory where objects will be downloaded.
393
-
394
- load: Union[List[str], List[Tuple[Dict, str]], List[Tuple[str, str]], List[Dict], None]
395
- The list of repos (models/datasets) to load.
396
-
397
- Loaded repos can be accessed via `current.huggingface_hub.loaded`. If load is set, then the following happens:
398
-
399
- - If repo (model/dataset) is not found in the datastore:
400
- - Downloads the repo from Hugging Face Hub to a temporary directory (or uses specified path) for local access
401
- - Stores it in Metaflow's datastore (s3/gcs/azure etc.) with a unique name based on repo_type/repo_id
402
- - All HF models loaded for a `@step` will be cached separately under flow/step/namespace.
403
-
404
- - If repo is found in the datastore:
405
- - Loads it directly from datastore to local path (can be temporary directory or specified path)
312
+ Internal decorator to support Fast bakery
406
313
  """
407
314
  ...
408
315
 
409
- def app_deploy(*, app_port: int, app_name: str) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
316
+ @typing.overload
317
+ def fast_bakery_internal(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
318
+ ...
319
+
320
+ def fast_bakery_internal(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
410
321
  """
411
- Specifies that this step is used to deploy an instance of the app.
412
- Requires that self.app_name, self.app_port, self.entrypoint and self.deployDir is set.
413
-
414
-
415
- Parameters
416
- ----------
417
- app_port : int
418
- Number of GPUs to use.
419
- app_name : str
420
- Name of the app to deploy.
421
- """
422
- ...
423
-
424
- def ollama(*, models: list, backend: str, force_pull: bool, skip_push_check: bool, debug: bool) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
425
- """
426
- This decorator is used to run Ollama APIs as Metaflow task sidecars.
427
-
428
- User code call
429
- --------------
430
- @ollama(
431
- models=[...],
432
- ...
433
- )
434
-
435
- Valid backend options
436
- ---------------------
437
- - 'local': Run as a separate process on the local task machine.
438
- - (TODO) 'managed': Outerbounds hosts and selects compute provider.
439
- - (TODO) 'remote': Spin up separate instance to serve Ollama models.
440
-
441
- Valid model options
442
- -------------------
443
- Any model here https://ollama.com/search, e.g. 'llama3.2', 'llama3.3'
444
-
445
-
446
- Parameters
447
- ----------
448
- models: list[str]
449
- List of Ollama containers running models in sidecars.
450
- backend: str
451
- Determines where and how to run the Ollama process.
452
- force_pull: bool
453
- Whether to run `ollama pull` no matter what, or first check the remote cache in Metaflow datastore for this model key.
454
- skip_push_check: bool
455
- Whether to skip the check that populates/overwrites remote cache on terminating an ollama model.
456
- debug: bool
457
- Whether to turn on verbose debugging logs.
458
- """
459
- ...
460
-
461
- @typing.overload
462
- def environment(*, vars: typing.Dict[str, str] = {}) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
463
- """
464
- Specifies environment variables to be set prior to the execution of a step.
465
-
466
-
467
- Parameters
468
- ----------
469
- vars : Dict[str, str], default {}
470
- Dictionary of environment variables to set.
322
+ Internal decorator to support Fast bakery
471
323
  """
472
324
  ...
473
325
 
474
326
  @typing.overload
475
- def environment(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
476
- ...
477
-
478
- @typing.overload
479
- def environment(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
480
- ...
481
-
482
- def environment(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, vars: typing.Dict[str, str] = {}):
483
- """
484
- Specifies environment variables to be set prior to the execution of a step.
485
-
486
-
487
- Parameters
488
- ----------
489
- vars : Dict[str, str], default {}
490
- Dictionary of environment variables to set.
491
- """
492
- ...
493
-
494
- @typing.overload
495
- def pypi(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
327
+ def conda(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
496
328
  """
497
- Specifies the PyPI packages for the step.
329
+ Specifies the Conda environment for the step.
498
330
 
499
331
  Information in this decorator will augment any
500
- attributes set in the `@pyi_base` flow-level decorator. Hence,
501
- you can use `@pypi_base` to set packages required by all
502
- steps and use `@pypi` to specify step-specific overrides.
332
+ attributes set in the `@conda_base` flow-level decorator. Hence,
333
+ you can use `@conda_base` to set packages required by all
334
+ steps and use `@conda` to specify step-specific overrides.
503
335
 
504
336
 
505
337
  Parameters
506
338
  ----------
507
- packages : Dict[str, str], default: {}
339
+ packages : Dict[str, str], default {}
508
340
  Packages to use for this step. The key is the name of the package
509
341
  and the value is the version to use.
510
- python : str, optional, default: None
342
+ libraries : Dict[str, str], default {}
343
+ Supported for backward compatibility. When used with packages, packages will take precedence.
344
+ python : str, optional, default None
511
345
  Version of Python to use, e.g. '3.7.4'. A default value of None implies
512
346
  that the version used will correspond to the version of the Python interpreter used to start the run.
347
+ disabled : bool, default False
348
+ If set to True, disables @conda.
513
349
  """
514
350
  ...
515
351
 
516
352
  @typing.overload
517
- def pypi(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
353
+ def conda(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
518
354
  ...
519
355
 
520
356
  @typing.overload
521
- def pypi(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
357
+ def conda(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
522
358
  ...
523
359
 
524
- def pypi(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
360
+ def conda(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
525
361
  """
526
- Specifies the PyPI packages for the step.
362
+ Specifies the Conda environment for the step.
527
363
 
528
364
  Information in this decorator will augment any
529
- attributes set in the `@pyi_base` flow-level decorator. Hence,
530
- you can use `@pypi_base` to set packages required by all
531
- steps and use `@pypi` to specify step-specific overrides.
365
+ attributes set in the `@conda_base` flow-level decorator. Hence,
366
+ you can use `@conda_base` to set packages required by all
367
+ steps and use `@conda` to specify step-specific overrides.
532
368
 
533
369
 
534
370
  Parameters
535
371
  ----------
536
- packages : Dict[str, str], default: {}
372
+ packages : Dict[str, str], default {}
537
373
  Packages to use for this step. The key is the name of the package
538
374
  and the value is the version to use.
539
- python : str, optional, default: None
375
+ libraries : Dict[str, str], default {}
376
+ Supported for backward compatibility. When used with packages, packages will take precedence.
377
+ python : str, optional, default None
540
378
  Version of Python to use, e.g. '3.7.4'. A default value of None implies
541
379
  that the version used will correspond to the version of the Python interpreter used to start the run.
380
+ disabled : bool, default False
381
+ If set to True, disables @conda.
542
382
  """
543
383
  ...
544
384
 
545
385
  @typing.overload
546
- def catch(*, var: typing.Optional[str] = None, print_exception: bool = True) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
386
+ def checkpoint(*, load_policy: str = 'fresh', temp_dir_root: str = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
547
387
  """
548
- Specifies that the step will success under all circumstances.
388
+ Enables checkpointing for a step.
549
389
 
550
- The decorator will create an optional artifact, specified by `var`, which
551
- contains the exception raised. You can use it to detect the presence
552
- of errors, indicating that all happy-path artifacts produced by the step
553
- are missing.
554
390
 
555
391
 
556
392
  Parameters
557
393
  ----------
558
- var : str, optional, default None
559
- Name of the artifact in which to store the caught exception.
560
- If not specified, the exception is not stored.
561
- print_exception : bool, default True
562
- Determines whether or not the exception is printed to
563
- stdout when caught.
394
+ load_policy : str, default: "fresh"
395
+ The policy for loading the checkpoint. The following policies are supported:
396
+ - "eager": Loads the the latest available checkpoint within the namespace.
397
+ With this mode, the latest checkpoint written by any previous task (can be even a different run) of the step
398
+ will be loaded at the start of the task.
399
+ - "none": Do not load any checkpoint
400
+ - "fresh": Loads the lastest checkpoint created within the running Task.
401
+ This mode helps loading checkpoints across various retry attempts of the same task.
402
+ With this mode, no checkpoint will be loaded at the start of a task but any checkpoints
403
+ created within the task will be loaded when the task is retries execution on failure.
404
+
405
+ temp_dir_root : str, default: None
406
+ The root directory under which `current.checkpoint.directory` will be created.
564
407
  """
565
408
  ...
566
409
 
567
410
  @typing.overload
568
- def catch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
411
+ def checkpoint(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
569
412
  ...
570
413
 
571
414
  @typing.overload
572
- def catch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
415
+ def checkpoint(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
573
416
  ...
574
417
 
575
- def catch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, var: typing.Optional[str] = None, print_exception: bool = True):
418
+ def checkpoint(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, load_policy: str = 'fresh', temp_dir_root: str = None):
576
419
  """
577
- Specifies that the step will success under all circumstances.
420
+ Enables checkpointing for a step.
578
421
 
579
- The decorator will create an optional artifact, specified by `var`, which
580
- contains the exception raised. You can use it to detect the presence
581
- of errors, indicating that all happy-path artifacts produced by the step
582
- are missing.
583
422
 
584
423
 
585
424
  Parameters
586
425
  ----------
587
- var : str, optional, default None
588
- Name of the artifact in which to store the caught exception.
589
- If not specified, the exception is not stored.
590
- print_exception : bool, default True
591
- Determines whether or not the exception is printed to
592
- stdout when caught.
593
- """
594
- ...
595
-
596
- @typing.overload
597
- def parallel(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
598
- """
599
- Decorator prototype for all step decorators. This function gets specialized
600
- and imported for all decorators types by _import_plugin_decorators().
601
- """
602
- ...
603
-
604
- @typing.overload
605
- def parallel(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
606
- ...
607
-
608
- def parallel(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
609
- """
610
- Decorator prototype for all step decorators. This function gets specialized
611
- and imported for all decorators types by _import_plugin_decorators().
426
+ load_policy : str, default: "fresh"
427
+ The policy for loading the checkpoint. The following policies are supported:
428
+ - "eager": Loads the the latest available checkpoint within the namespace.
429
+ With this mode, the latest checkpoint written by any previous task (can be even a different run) of the step
430
+ will be loaded at the start of the task.
431
+ - "none": Do not load any checkpoint
432
+ - "fresh": Loads the lastest checkpoint created within the running Task.
433
+ This mode helps loading checkpoints across various retry attempts of the same task.
434
+ With this mode, no checkpoint will be loaded at the start of a task but any checkpoints
435
+ created within the task will be loaded when the task is retries execution on failure.
436
+
437
+ temp_dir_root : str, default: None
438
+ The root directory under which `current.checkpoint.directory` will be created.
612
439
  """
613
440
  ...
614
441
 
615
- @typing.overload
616
- def card(*, type: str = 'default', id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
442
+ def ollama(*, models: list, backend: str, force_pull: bool, skip_push_check: bool, debug: bool) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
617
443
  """
618
- Creates a human-readable report, a Metaflow Card, after this step completes.
619
-
620
- Note that you may add multiple `@card` decorators in a step with different parameters.
444
+ This decorator is used to run Ollama APIs as Metaflow task sidecars.
621
445
 
446
+ User code call
447
+ --------------
448
+ @ollama(
449
+ models=[...],
450
+ ...
451
+ )
622
452
 
623
- Parameters
624
- ----------
625
- type : str, default 'default'
626
- Card type.
627
- id : str, optional, default None
628
- If multiple cards are present, use this id to identify this card.
629
- options : Dict[str, Any], default {}
630
- Options passed to the card. The contents depend on the card type.
631
- timeout : int, default 45
632
- Interrupt reporting if it takes more than this many seconds.
633
- """
634
- ...
635
-
636
- @typing.overload
637
- def card(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
638
- ...
639
-
640
- @typing.overload
641
- def card(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
642
- ...
643
-
644
- def card(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, type: str = 'default', id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45):
645
- """
646
- Creates a human-readable report, a Metaflow Card, after this step completes.
453
+ Valid backend options
454
+ ---------------------
455
+ - 'local': Run as a separate process on the local task machine.
456
+ - (TODO) 'managed': Outerbounds hosts and selects compute provider.
457
+ - (TODO) 'remote': Spin up separate instance to serve Ollama models.
647
458
 
648
- Note that you may add multiple `@card` decorators in a step with different parameters.
459
+ Valid model options
460
+ -------------------
461
+ Any model here https://ollama.com/search, e.g. 'llama3.2', 'llama3.3'
649
462
 
650
463
 
651
464
  Parameters
652
465
  ----------
653
- type : str, default 'default'
654
- Card type.
655
- id : str, optional, default None
656
- If multiple cards are present, use this id to identify this card.
657
- options : Dict[str, Any], default {}
658
- Options passed to the card. The contents depend on the card type.
659
- timeout : int, default 45
660
- Interrupt reporting if it takes more than this many seconds.
466
+ models: list[str]
467
+ List of Ollama containers running models in sidecars.
468
+ backend: str
469
+ Determines where and how to run the Ollama process.
470
+ force_pull: bool
471
+ Whether to run `ollama pull` no matter what, or first check the remote cache in Metaflow datastore for this model key.
472
+ skip_push_check: bool
473
+ Whether to skip the check that populates/overwrites remote cache on terminating an ollama model.
474
+ debug: bool
475
+ Whether to turn on verbose debugging logs.
661
476
  """
662
477
  ...
663
478
 
@@ -718,76 +533,151 @@ def model(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], ty
718
533
  """
719
534
  ...
720
535
 
721
- def nvct(*, gpu: int, gpu_type: str) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
536
+ @typing.overload
537
+ def resources(*, cpu: int = 1, gpu: typing.Optional[int] = None, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
722
538
  """
723
- Specifies that this step should execute on DGX cloud.
539
+ Specifies the resources needed when executing this step.
540
+
541
+ Use `@resources` to specify the resource requirements
542
+ independently of the specific compute layer (`@batch`, `@kubernetes`).
543
+
544
+ You can choose the compute layer on the command line by executing e.g.
545
+ ```
546
+ python myflow.py run --with batch
547
+ ```
548
+ or
549
+ ```
550
+ python myflow.py run --with kubernetes
551
+ ```
552
+ which executes the flow on the desired system using the
553
+ requirements specified in `@resources`.
724
554
 
725
555
 
726
556
  Parameters
727
557
  ----------
728
- gpu : int
729
- Number of GPUs to use.
730
- gpu_type : str
731
- Type of Nvidia GPU to use.
558
+ cpu : int, default 1
559
+ Number of CPUs required for this step.
560
+ gpu : int, optional, default None
561
+ Number of GPUs required for this step.
562
+ disk : int, optional, default None
563
+ Disk size (in MB) required for this step. Only applies on Kubernetes.
564
+ memory : int, default 4096
565
+ Memory size (in MB) required for this step.
566
+ shared_memory : int, optional, default None
567
+ The value for the size (in MiB) of the /dev/shm volume for this step.
568
+ This parameter maps to the `--shm-size` option in Docker.
732
569
  """
733
570
  ...
734
571
 
735
572
  @typing.overload
736
- def conda(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
573
+ def resources(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
574
+ ...
575
+
576
+ @typing.overload
577
+ def resources(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
578
+ ...
579
+
580
+ def resources(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, cpu: int = 1, gpu: typing.Optional[int] = None, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None):
737
581
  """
738
- Specifies the Conda environment for the step.
582
+ Specifies the resources needed when executing this step.
739
583
 
740
- Information in this decorator will augment any
741
- attributes set in the `@conda_base` flow-level decorator. Hence,
742
- you can use `@conda_base` to set packages required by all
743
- steps and use `@conda` to specify step-specific overrides.
584
+ Use `@resources` to specify the resource requirements
585
+ independently of the specific compute layer (`@batch`, `@kubernetes`).
586
+
587
+ You can choose the compute layer on the command line by executing e.g.
588
+ ```
589
+ python myflow.py run --with batch
590
+ ```
591
+ or
592
+ ```
593
+ python myflow.py run --with kubernetes
594
+ ```
595
+ which executes the flow on the desired system using the
596
+ requirements specified in `@resources`.
744
597
 
745
598
 
746
599
  Parameters
747
600
  ----------
748
- packages : Dict[str, str], default {}
749
- Packages to use for this step. The key is the name of the package
750
- and the value is the version to use.
751
- libraries : Dict[str, str], default {}
752
- Supported for backward compatibility. When used with packages, packages will take precedence.
753
- python : str, optional, default None
754
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
755
- that the version used will correspond to the version of the Python interpreter used to start the run.
756
- disabled : bool, default False
757
- If set to True, disables @conda.
601
+ cpu : int, default 1
602
+ Number of CPUs required for this step.
603
+ gpu : int, optional, default None
604
+ Number of GPUs required for this step.
605
+ disk : int, optional, default None
606
+ Disk size (in MB) required for this step. Only applies on Kubernetes.
607
+ memory : int, default 4096
608
+ Memory size (in MB) required for this step.
609
+ shared_memory : int, optional, default None
610
+ The value for the size (in MiB) of the /dev/shm volume for this step.
611
+ This parameter maps to the `--shm-size` option in Docker.
758
612
  """
759
613
  ...
760
614
 
761
615
  @typing.overload
762
- def conda(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
616
+ def retry(*, times: int = 3, minutes_between_retries: int = 2) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
617
+ """
618
+ Specifies the number of times the task corresponding
619
+ to a step needs to be retried.
620
+
621
+ This decorator is useful for handling transient errors, such as networking issues.
622
+ If your task contains operations that can't be retried safely, e.g. database updates,
623
+ it is advisable to annotate it with `@retry(times=0)`.
624
+
625
+ This can be used in conjunction with the `@catch` decorator. The `@catch`
626
+ decorator will execute a no-op task after all retries have been exhausted,
627
+ ensuring that the flow execution can continue.
628
+
629
+
630
+ Parameters
631
+ ----------
632
+ times : int, default 3
633
+ Number of times to retry this task.
634
+ minutes_between_retries : int, default 2
635
+ Number of minutes between retries.
636
+ """
763
637
  ...
764
638
 
765
639
  @typing.overload
766
- def conda(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
640
+ def retry(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
767
641
  ...
768
642
 
769
- def conda(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
643
+ @typing.overload
644
+ def retry(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
645
+ ...
646
+
647
+ def retry(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, times: int = 3, minutes_between_retries: int = 2):
770
648
  """
771
- Specifies the Conda environment for the step.
649
+ Specifies the number of times the task corresponding
650
+ to a step needs to be retried.
772
651
 
773
- Information in this decorator will augment any
774
- attributes set in the `@conda_base` flow-level decorator. Hence,
775
- you can use `@conda_base` to set packages required by all
776
- steps and use `@conda` to specify step-specific overrides.
652
+ This decorator is useful for handling transient errors, such as networking issues.
653
+ If your task contains operations that can't be retried safely, e.g. database updates,
654
+ it is advisable to annotate it with `@retry(times=0)`.
655
+
656
+ This can be used in conjunction with the `@catch` decorator. The `@catch`
657
+ decorator will execute a no-op task after all retries have been exhausted,
658
+ ensuring that the flow execution can continue.
659
+
660
+
661
+ Parameters
662
+ ----------
663
+ times : int, default 3
664
+ Number of times to retry this task.
665
+ minutes_between_retries : int, default 2
666
+ Number of minutes between retries.
667
+ """
668
+ ...
669
+
670
+ def nvct(*, gpu: int, gpu_type: str) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
671
+ """
672
+ Specifies that this step should execute on DGX cloud.
777
673
 
778
674
 
779
675
  Parameters
780
676
  ----------
781
- packages : Dict[str, str], default {}
782
- Packages to use for this step. The key is the name of the package
783
- and the value is the version to use.
784
- libraries : Dict[str, str], default {}
785
- Supported for backward compatibility. When used with packages, packages will take precedence.
786
- python : str, optional, default None
787
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
788
- that the version used will correspond to the version of the Python interpreter used to start the run.
789
- disabled : bool, default False
790
- If set to True, disables @conda.
677
+ gpu : int
678
+ Number of GPUs to use.
679
+ gpu_type : str
680
+ Type of Nvidia GPU to use.
791
681
  """
792
682
  ...
793
683
 
@@ -876,136 +766,296 @@ def kubernetes(*, cpu: int = 1, memory: int = 4096, disk: int = 10240, image: ty
876
766
  """
877
767
  ...
878
768
 
769
+ def huggingface_hub(*, temp_dir_root: typing.Optional[str] = None, load: typing.Union[typing.List[str], typing.List[typing.Tuple[typing.Dict, str]], typing.List[typing.Tuple[str, str]], typing.List[typing.Dict], None]) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
770
+ """
771
+ Decorator that helps cache, version and store models/datasets from huggingface hub.
772
+
773
+
774
+ Parameters
775
+ ----------
776
+ temp_dir_root : str, optional
777
+ The root directory that will hold the temporary directory where objects will be downloaded.
778
+
779
+ load: Union[List[str], List[Tuple[Dict, str]], List[Tuple[str, str]], List[Dict], None]
780
+ The list of repos (models/datasets) to load.
781
+
782
+ Loaded repos can be accessed via `current.huggingface_hub.loaded`. If load is set, then the following happens:
783
+
784
+ - If repo (model/dataset) is not found in the datastore:
785
+ - Downloads the repo from Hugging Face Hub to a temporary directory (or uses specified path) for local access
786
+ - Stores it in Metaflow's datastore (s3/gcs/azure etc.) with a unique name based on repo_type/repo_id
787
+ - All HF models loaded for a `@step` will be cached separately under flow/step/namespace.
788
+
789
+ - If repo is found in the datastore:
790
+ - Loads it directly from datastore to local path (can be temporary directory or specified path)
791
+ """
792
+ ...
793
+
794
+ def app_deploy(*, app_port: int, app_name: str) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
795
+ """
796
+ Specifies that this step is used to deploy an instance of the app.
797
+ Requires that self.app_name, self.app_port, self.entrypoint and self.deployDir is set.
798
+
799
+
800
+ Parameters
801
+ ----------
802
+ app_port : int
803
+ Number of GPUs to use.
804
+ app_name : str
805
+ Name of the app to deploy.
806
+ """
807
+ ...
808
+
879
809
  @typing.overload
880
- def fast_bakery_internal(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
810
+ def pypi(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
881
811
  """
882
- Internal decorator to support Fast bakery
812
+ Specifies the PyPI packages for the step.
813
+
814
+ Information in this decorator will augment any
815
+ attributes set in the `@pyi_base` flow-level decorator. Hence,
816
+ you can use `@pypi_base` to set packages required by all
817
+ steps and use `@pypi` to specify step-specific overrides.
818
+
819
+
820
+ Parameters
821
+ ----------
822
+ packages : Dict[str, str], default: {}
823
+ Packages to use for this step. The key is the name of the package
824
+ and the value is the version to use.
825
+ python : str, optional, default: None
826
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
827
+ that the version used will correspond to the version of the Python interpreter used to start the run.
883
828
  """
884
829
  ...
885
830
 
886
831
  @typing.overload
887
- def fast_bakery_internal(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
832
+ def pypi(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
888
833
  ...
889
834
 
890
- def fast_bakery_internal(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
835
+ @typing.overload
836
+ def pypi(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
837
+ ...
838
+
839
+ def pypi(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
891
840
  """
892
- Internal decorator to support Fast bakery
841
+ Specifies the PyPI packages for the step.
842
+
843
+ Information in this decorator will augment any
844
+ attributes set in the `@pyi_base` flow-level decorator. Hence,
845
+ you can use `@pypi_base` to set packages required by all
846
+ steps and use `@pypi` to specify step-specific overrides.
847
+
848
+
849
+ Parameters
850
+ ----------
851
+ packages : Dict[str, str], default: {}
852
+ Packages to use for this step. The key is the name of the package
853
+ and the value is the version to use.
854
+ python : str, optional, default: None
855
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
856
+ that the version used will correspond to the version of the Python interpreter used to start the run.
893
857
  """
894
858
  ...
895
859
 
896
860
  @typing.overload
897
- def checkpoint(*, load_policy: str = 'fresh', temp_dir_root: str = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
861
+ def catch(*, var: typing.Optional[str] = None, print_exception: bool = True) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
898
862
  """
899
- Enables checkpointing for a step.
863
+ Specifies that the step will success under all circumstances.
900
864
 
865
+ The decorator will create an optional artifact, specified by `var`, which
866
+ contains the exception raised. You can use it to detect the presence
867
+ of errors, indicating that all happy-path artifacts produced by the step
868
+ are missing.
901
869
 
902
870
 
903
871
  Parameters
904
872
  ----------
905
- load_policy : str, default: "fresh"
906
- The policy for loading the checkpoint. The following policies are supported:
907
- - "eager": Loads the the latest available checkpoint within the namespace.
908
- With this mode, the latest checkpoint written by any previous task (can be even a different run) of the step
909
- will be loaded at the start of the task.
910
- - "none": Do not load any checkpoint
911
- - "fresh": Loads the lastest checkpoint created within the running Task.
912
- This mode helps loading checkpoints across various retry attempts of the same task.
913
- With this mode, no checkpoint will be loaded at the start of a task but any checkpoints
914
- created within the task will be loaded when the task is retries execution on failure.
873
+ var : str, optional, default None
874
+ Name of the artifact in which to store the caught exception.
875
+ If not specified, the exception is not stored.
876
+ print_exception : bool, default True
877
+ Determines whether or not the exception is printed to
878
+ stdout when caught.
879
+ """
880
+ ...
881
+
882
+ @typing.overload
883
+ def catch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
884
+ ...
885
+
886
+ @typing.overload
887
+ def catch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
888
+ ...
889
+
890
+ def catch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, var: typing.Optional[str] = None, print_exception: bool = True):
891
+ """
892
+ Specifies that the step will success under all circumstances.
915
893
 
916
- temp_dir_root : str, default: None
917
- The root directory under which `current.checkpoint.directory` will be created.
894
+ The decorator will create an optional artifact, specified by `var`, which
895
+ contains the exception raised. You can use it to detect the presence
896
+ of errors, indicating that all happy-path artifacts produced by the step
897
+ are missing.
898
+
899
+
900
+ Parameters
901
+ ----------
902
+ var : str, optional, default None
903
+ Name of the artifact in which to store the caught exception.
904
+ If not specified, the exception is not stored.
905
+ print_exception : bool, default True
906
+ Determines whether or not the exception is printed to
907
+ stdout when caught.
918
908
  """
919
909
  ...
920
910
 
921
911
  @typing.overload
922
- def checkpoint(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
912
+ def timeout(*, seconds: int = 0, minutes: int = 0, hours: int = 0) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
913
+ """
914
+ Specifies a timeout for your step.
915
+
916
+ This decorator is useful if this step may hang indefinitely.
917
+
918
+ This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
919
+ A timeout is considered to be an exception thrown by the step. It will cause the step to be
920
+ retried if needed and the exception will be caught by the `@catch` decorator, if present.
921
+
922
+ Note that all the values specified in parameters are added together so if you specify
923
+ 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
924
+
925
+
926
+ Parameters
927
+ ----------
928
+ seconds : int, default 0
929
+ Number of seconds to wait prior to timing out.
930
+ minutes : int, default 0
931
+ Number of minutes to wait prior to timing out.
932
+ hours : int, default 0
933
+ Number of hours to wait prior to timing out.
934
+ """
923
935
  ...
924
936
 
925
937
  @typing.overload
926
- def checkpoint(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
938
+ def timeout(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
927
939
  ...
928
940
 
929
- def checkpoint(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, load_policy: str = 'fresh', temp_dir_root: str = None):
941
+ @typing.overload
942
+ def timeout(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
943
+ ...
944
+
945
+ def timeout(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, seconds: int = 0, minutes: int = 0, hours: int = 0):
930
946
  """
931
- Enables checkpointing for a step.
947
+ Specifies a timeout for your step.
948
+
949
+ This decorator is useful if this step may hang indefinitely.
950
+
951
+ This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
952
+ A timeout is considered to be an exception thrown by the step. It will cause the step to be
953
+ retried if needed and the exception will be caught by the `@catch` decorator, if present.
932
954
 
955
+ Note that all the values specified in parameters are added together so if you specify
956
+ 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
933
957
 
934
958
 
935
959
  Parameters
936
960
  ----------
937
- load_policy : str, default: "fresh"
938
- The policy for loading the checkpoint. The following policies are supported:
939
- - "eager": Loads the the latest available checkpoint within the namespace.
940
- With this mode, the latest checkpoint written by any previous task (can be even a different run) of the step
941
- will be loaded at the start of the task.
942
- - "none": Do not load any checkpoint
943
- - "fresh": Loads the lastest checkpoint created within the running Task.
944
- This mode helps loading checkpoints across various retry attempts of the same task.
945
- With this mode, no checkpoint will be loaded at the start of a task but any checkpoints
946
- created within the task will be loaded when the task is retries execution on failure.
961
+ seconds : int, default 0
962
+ Number of seconds to wait prior to timing out.
963
+ minutes : int, default 0
964
+ Number of minutes to wait prior to timing out.
965
+ hours : int, default 0
966
+ Number of hours to wait prior to timing out.
967
+ """
968
+ ...
969
+
970
+ @typing.overload
971
+ def pypi_base(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
972
+ """
973
+ Specifies the PyPI packages for all steps of the flow.
947
974
 
948
- temp_dir_root : str, default: None
949
- The root directory under which `current.checkpoint.directory` will be created.
975
+ Use `@pypi_base` to set common packages required by all
976
+ steps and use `@pypi` to specify step-specific overrides.
977
+
978
+ Parameters
979
+ ----------
980
+ packages : Dict[str, str], default: {}
981
+ Packages to use for this flow. The key is the name of the package
982
+ and the value is the version to use.
983
+ python : str, optional, default: None
984
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
985
+ that the version used will correspond to the version of the Python interpreter used to start the run.
950
986
  """
951
987
  ...
952
988
 
953
- def nvidia(*, gpu: int, gpu_type: str, queue_timeout: int) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
989
+ @typing.overload
990
+ def pypi_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
991
+ ...
992
+
993
+ def pypi_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
994
+ """
995
+ Specifies the PyPI packages for all steps of the flow.
996
+
997
+ Use `@pypi_base` to set common packages required by all
998
+ steps and use `@pypi` to specify step-specific overrides.
999
+
1000
+ Parameters
1001
+ ----------
1002
+ packages : Dict[str, str], default: {}
1003
+ Packages to use for this flow. The key is the name of the package
1004
+ and the value is the version to use.
1005
+ python : str, optional, default: None
1006
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1007
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1008
+ """
1009
+ ...
1010
+
1011
+ @typing.overload
1012
+ def schedule(*, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
954
1013
  """
955
- Specifies that this step should execute on DGX cloud.
1014
+ Specifies the times when the flow should be run when running on a
1015
+ production scheduler.
956
1016
 
957
1017
 
958
1018
  Parameters
959
1019
  ----------
960
- gpu : int
961
- Number of GPUs to use.
962
- gpu_type : str
963
- Type of Nvidia GPU to use.
964
- queue_timeout : int
965
- Time to keep the job in NVCF's queue.
1020
+ hourly : bool, default False
1021
+ Run the workflow hourly.
1022
+ daily : bool, default True
1023
+ Run the workflow daily.
1024
+ weekly : bool, default False
1025
+ Run the workflow weekly.
1026
+ cron : str, optional, default None
1027
+ Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
1028
+ specified by this expression.
1029
+ timezone : str, optional, default None
1030
+ Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1031
+ which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
966
1032
  """
967
1033
  ...
968
1034
 
969
- def airflow_external_task_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, external_dag_id: str, external_task_ids: typing.List[str], allowed_states: typing.List[str], failed_states: typing.List[str], execution_delta: "datetime.timedelta", check_existence: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1035
+ @typing.overload
1036
+ def schedule(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1037
+ ...
1038
+
1039
+ def schedule(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None):
970
1040
  """
971
- The `@airflow_external_task_sensor` decorator attaches a Airflow [ExternalTaskSensor](https://airflow.apache.org/docs/apache-airflow/stable/_api/airflow/sensors/external_task/index.html#airflow.sensors.external_task.ExternalTaskSensor) before the start step of the flow.
972
- This decorator only works when a flow is scheduled on Airflow and is compiled using `airflow create`. More than one `@airflow_external_task_sensor` can be added as a flow decorators. Adding more than one decorator will ensure that `start` step starts only after all sensors finish.
1041
+ Specifies the times when the flow should be run when running on a
1042
+ production scheduler.
973
1043
 
974
1044
 
975
1045
  Parameters
976
1046
  ----------
977
- timeout : int
978
- Time, in seconds before the task times out and fails. (Default: 3600)
979
- poke_interval : int
980
- Time in seconds that the job should wait in between each try. (Default: 60)
981
- mode : str
982
- How the sensor operates. Options are: { poke | reschedule }. (Default: "poke")
983
- exponential_backoff : bool
984
- allow progressive longer waits between pokes by using exponential backoff algorithm. (Default: True)
985
- pool : str
986
- the slot pool this task should run in,
987
- slot pools are a way to limit concurrency for certain tasks. (Default:None)
988
- soft_fail : bool
989
- Set to true to mark the task as SKIPPED on failure. (Default: False)
990
- name : str
991
- Name of the sensor on Airflow
992
- description : str
993
- Description of sensor in the Airflow UI
994
- external_dag_id : str
995
- The dag_id that contains the task you want to wait for.
996
- external_task_ids : List[str]
997
- The list of task_ids that you want to wait for.
998
- If None (default value) the sensor waits for the DAG. (Default: None)
999
- allowed_states : List[str]
1000
- Iterable of allowed states, (Default: ['success'])
1001
- failed_states : List[str]
1002
- Iterable of failed or dis-allowed states. (Default: None)
1003
- execution_delta : datetime.timedelta
1004
- time difference with the previous execution to look at,
1005
- the default is the same logical date as the current task or DAG. (Default: None)
1006
- check_existence: bool
1007
- Set to True to check if the external task exists or check if
1008
- the DAG to wait for exists. (Default: True)
1047
+ hourly : bool, default False
1048
+ Run the workflow hourly.
1049
+ daily : bool, default True
1050
+ Run the workflow daily.
1051
+ weekly : bool, default False
1052
+ Run the workflow weekly.
1053
+ cron : str, optional, default None
1054
+ Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
1055
+ specified by this expression.
1056
+ timezone : str, optional, default None
1057
+ Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1058
+ which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
1009
1059
  """
1010
1060
  ...
1011
1061
 
@@ -1044,49 +1094,6 @@ def project(*, name: str, branch: typing.Optional[str] = None, production: bool
1044
1094
  """
1045
1095
  ...
1046
1096
 
1047
- def airflow_s3_key_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, bucket_key: typing.Union[str, typing.List[str]], bucket_name: str, wildcard_match: bool, aws_conn_id: str, verify: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1048
- """
1049
- The `@airflow_s3_key_sensor` decorator attaches a Airflow [S3KeySensor](https://airflow.apache.org/docs/apache-airflow-providers-amazon/stable/_api/airflow/providers/amazon/aws/sensors/s3/index.html#airflow.providers.amazon.aws.sensors.s3.S3KeySensor)
1050
- before the start step of the flow. This decorator only works when a flow is scheduled on Airflow
1051
- and is compiled using `airflow create`. More than one `@airflow_s3_key_sensor` can be
1052
- added as a flow decorators. Adding more than one decorator will ensure that `start` step
1053
- starts only after all sensors finish.
1054
-
1055
-
1056
- Parameters
1057
- ----------
1058
- timeout : int
1059
- Time, in seconds before the task times out and fails. (Default: 3600)
1060
- poke_interval : int
1061
- Time in seconds that the job should wait in between each try. (Default: 60)
1062
- mode : str
1063
- How the sensor operates. Options are: { poke | reschedule }. (Default: "poke")
1064
- exponential_backoff : bool
1065
- allow progressive longer waits between pokes by using exponential backoff algorithm. (Default: True)
1066
- pool : str
1067
- the slot pool this task should run in,
1068
- slot pools are a way to limit concurrency for certain tasks. (Default:None)
1069
- soft_fail : bool
1070
- Set to true to mark the task as SKIPPED on failure. (Default: False)
1071
- name : str
1072
- Name of the sensor on Airflow
1073
- description : str
1074
- Description of sensor in the Airflow UI
1075
- bucket_key : Union[str, List[str]]
1076
- The key(s) being waited on. Supports full s3:// style url or relative path from root level.
1077
- When it's specified as a full s3:// url, please leave `bucket_name` as None
1078
- bucket_name : str
1079
- Name of the S3 bucket. Only needed when bucket_key is not provided as a full s3:// url.
1080
- When specified, all the keys passed to bucket_key refers to this bucket. (Default:None)
1081
- wildcard_match : bool
1082
- whether the bucket_key should be interpreted as a Unix wildcard pattern. (Default: False)
1083
- aws_conn_id : str
1084
- a reference to the s3 connection on Airflow. (Default: None)
1085
- verify : bool
1086
- Whether or not to verify SSL certificates for S3 connection. (Default: None)
1087
- """
1088
- ...
1089
-
1090
1097
  @typing.overload
1091
1098
  def trigger(*, event: typing.Union[str, typing.Dict[str, typing.Any], None] = None, events: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1092
1099
  """
@@ -1171,12 +1178,199 @@ def trigger(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, event: t
1171
1178
 
1172
1179
  Parameters
1173
1180
  ----------
1174
- event : Union[str, Dict[str, Any]], optional, default None
1175
- Event dependency for this flow.
1176
- events : List[Union[str, Dict[str, Any]]], default []
1177
- Events dependency for this flow.
1178
- options : Dict[str, Any], default {}
1179
- Backend-specific configuration for tuning eventing behavior.
1181
+ event : Union[str, Dict[str, Any]], optional, default None
1182
+ Event dependency for this flow.
1183
+ events : List[Union[str, Dict[str, Any]]], default []
1184
+ Events dependency for this flow.
1185
+ options : Dict[str, Any], default {}
1186
+ Backend-specific configuration for tuning eventing behavior.
1187
+ """
1188
+ ...
1189
+
1190
+ @typing.overload
1191
+ def trigger_on_finish(*, flow: typing.Union[typing.Dict[str, str], str, None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1192
+ """
1193
+ Specifies the flow(s) that this flow depends on.
1194
+
1195
+ ```
1196
+ @trigger_on_finish(flow='FooFlow')
1197
+ ```
1198
+ or
1199
+ ```
1200
+ @trigger_on_finish(flows=['FooFlow', 'BarFlow'])
1201
+ ```
1202
+ This decorator respects the @project decorator and triggers the flow
1203
+ when upstream runs within the same namespace complete successfully
1204
+
1205
+ Additionally, you can specify project aware upstream flow dependencies
1206
+ by specifying the fully qualified project_flow_name.
1207
+ ```
1208
+ @trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
1209
+ ```
1210
+ or
1211
+ ```
1212
+ @trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
1213
+ ```
1214
+
1215
+ You can also specify just the project or project branch (other values will be
1216
+ inferred from the current project or project branch):
1217
+ ```
1218
+ @trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
1219
+ ```
1220
+
1221
+ Note that `branch` is typically one of:
1222
+ - `prod`
1223
+ - `user.bob`
1224
+ - `test.my_experiment`
1225
+ - `prod.staging`
1226
+
1227
+
1228
+ Parameters
1229
+ ----------
1230
+ flow : Union[str, Dict[str, str]], optional, default None
1231
+ Upstream flow dependency for this flow.
1232
+ flows : List[Union[str, Dict[str, str]]], default []
1233
+ Upstream flow dependencies for this flow.
1234
+ options : Dict[str, Any], default {}
1235
+ Backend-specific configuration for tuning eventing behavior.
1236
+ """
1237
+ ...
1238
+
1239
+ @typing.overload
1240
+ def trigger_on_finish(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1241
+ ...
1242
+
1243
+ def trigger_on_finish(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, flow: typing.Union[typing.Dict[str, str], str, None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}):
1244
+ """
1245
+ Specifies the flow(s) that this flow depends on.
1246
+
1247
+ ```
1248
+ @trigger_on_finish(flow='FooFlow')
1249
+ ```
1250
+ or
1251
+ ```
1252
+ @trigger_on_finish(flows=['FooFlow', 'BarFlow'])
1253
+ ```
1254
+ This decorator respects the @project decorator and triggers the flow
1255
+ when upstream runs within the same namespace complete successfully
1256
+
1257
+ Additionally, you can specify project aware upstream flow dependencies
1258
+ by specifying the fully qualified project_flow_name.
1259
+ ```
1260
+ @trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
1261
+ ```
1262
+ or
1263
+ ```
1264
+ @trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
1265
+ ```
1266
+
1267
+ You can also specify just the project or project branch (other values will be
1268
+ inferred from the current project or project branch):
1269
+ ```
1270
+ @trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
1271
+ ```
1272
+
1273
+ Note that `branch` is typically one of:
1274
+ - `prod`
1275
+ - `user.bob`
1276
+ - `test.my_experiment`
1277
+ - `prod.staging`
1278
+
1279
+
1280
+ Parameters
1281
+ ----------
1282
+ flow : Union[str, Dict[str, str]], optional, default None
1283
+ Upstream flow dependency for this flow.
1284
+ flows : List[Union[str, Dict[str, str]]], default []
1285
+ Upstream flow dependencies for this flow.
1286
+ options : Dict[str, Any], default {}
1287
+ Backend-specific configuration for tuning eventing behavior.
1288
+ """
1289
+ ...
1290
+
1291
+ def airflow_external_task_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, external_dag_id: str, external_task_ids: typing.List[str], allowed_states: typing.List[str], failed_states: typing.List[str], execution_delta: "datetime.timedelta", check_existence: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1292
+ """
1293
+ The `@airflow_external_task_sensor` decorator attaches a Airflow [ExternalTaskSensor](https://airflow.apache.org/docs/apache-airflow/stable/_api/airflow/sensors/external_task/index.html#airflow.sensors.external_task.ExternalTaskSensor) before the start step of the flow.
1294
+ This decorator only works when a flow is scheduled on Airflow and is compiled using `airflow create`. More than one `@airflow_external_task_sensor` can be added as a flow decorators. Adding more than one decorator will ensure that `start` step starts only after all sensors finish.
1295
+
1296
+
1297
+ Parameters
1298
+ ----------
1299
+ timeout : int
1300
+ Time, in seconds before the task times out and fails. (Default: 3600)
1301
+ poke_interval : int
1302
+ Time in seconds that the job should wait in between each try. (Default: 60)
1303
+ mode : str
1304
+ How the sensor operates. Options are: { poke | reschedule }. (Default: "poke")
1305
+ exponential_backoff : bool
1306
+ allow progressive longer waits between pokes by using exponential backoff algorithm. (Default: True)
1307
+ pool : str
1308
+ the slot pool this task should run in,
1309
+ slot pools are a way to limit concurrency for certain tasks. (Default:None)
1310
+ soft_fail : bool
1311
+ Set to true to mark the task as SKIPPED on failure. (Default: False)
1312
+ name : str
1313
+ Name of the sensor on Airflow
1314
+ description : str
1315
+ Description of sensor in the Airflow UI
1316
+ external_dag_id : str
1317
+ The dag_id that contains the task you want to wait for.
1318
+ external_task_ids : List[str]
1319
+ The list of task_ids that you want to wait for.
1320
+ If None (default value) the sensor waits for the DAG. (Default: None)
1321
+ allowed_states : List[str]
1322
+ Iterable of allowed states, (Default: ['success'])
1323
+ failed_states : List[str]
1324
+ Iterable of failed or dis-allowed states. (Default: None)
1325
+ execution_delta : datetime.timedelta
1326
+ time difference with the previous execution to look at,
1327
+ the default is the same logical date as the current task or DAG. (Default: None)
1328
+ check_existence: bool
1329
+ Set to True to check if the external task exists or check if
1330
+ the DAG to wait for exists. (Default: True)
1331
+ """
1332
+ ...
1333
+
1334
+ def airflow_s3_key_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, bucket_key: typing.Union[str, typing.List[str]], bucket_name: str, wildcard_match: bool, aws_conn_id: str, verify: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1335
+ """
1336
+ The `@airflow_s3_key_sensor` decorator attaches a Airflow [S3KeySensor](https://airflow.apache.org/docs/apache-airflow-providers-amazon/stable/_api/airflow/providers/amazon/aws/sensors/s3/index.html#airflow.providers.amazon.aws.sensors.s3.S3KeySensor)
1337
+ before the start step of the flow. This decorator only works when a flow is scheduled on Airflow
1338
+ and is compiled using `airflow create`. More than one `@airflow_s3_key_sensor` can be
1339
+ added as a flow decorators. Adding more than one decorator will ensure that `start` step
1340
+ starts only after all sensors finish.
1341
+
1342
+
1343
+ Parameters
1344
+ ----------
1345
+ timeout : int
1346
+ Time, in seconds before the task times out and fails. (Default: 3600)
1347
+ poke_interval : int
1348
+ Time in seconds that the job should wait in between each try. (Default: 60)
1349
+ mode : str
1350
+ How the sensor operates. Options are: { poke | reschedule }. (Default: "poke")
1351
+ exponential_backoff : bool
1352
+ allow progressive longer waits between pokes by using exponential backoff algorithm. (Default: True)
1353
+ pool : str
1354
+ the slot pool this task should run in,
1355
+ slot pools are a way to limit concurrency for certain tasks. (Default:None)
1356
+ soft_fail : bool
1357
+ Set to true to mark the task as SKIPPED on failure. (Default: False)
1358
+ name : str
1359
+ Name of the sensor on Airflow
1360
+ description : str
1361
+ Description of sensor in the Airflow UI
1362
+ bucket_key : Union[str, List[str]]
1363
+ The key(s) being waited on. Supports full s3:// style url or relative path from root level.
1364
+ When it's specified as a full s3:// url, please leave `bucket_name` as None
1365
+ bucket_name : str
1366
+ Name of the S3 bucket. Only needed when bucket_key is not provided as a full s3:// url.
1367
+ When specified, all the keys passed to bucket_key refers to this bucket. (Default:None)
1368
+ wildcard_match : bool
1369
+ whether the bucket_key should be interpreted as a Unix wildcard pattern. (Default: False)
1370
+ aws_conn_id : str
1371
+ a reference to the s3 connection on Airflow. (Default: None)
1372
+ verify : bool
1373
+ Whether or not to verify SSL certificates for S3 connection. (Default: None)
1180
1374
  """
1181
1375
  ...
1182
1376
 
@@ -1345,198 +1539,5 @@ def with_artifact_store(f: typing.Optional[typing.Type[FlowSpecDerived]] = None)
1345
1539
  """
1346
1540
  ...
1347
1541
 
1348
- @typing.overload
1349
- def pypi_base(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1350
- """
1351
- Specifies the PyPI packages for all steps of the flow.
1352
-
1353
- Use `@pypi_base` to set common packages required by all
1354
- steps and use `@pypi` to specify step-specific overrides.
1355
-
1356
- Parameters
1357
- ----------
1358
- packages : Dict[str, str], default: {}
1359
- Packages to use for this flow. The key is the name of the package
1360
- and the value is the version to use.
1361
- python : str, optional, default: None
1362
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
1363
- that the version used will correspond to the version of the Python interpreter used to start the run.
1364
- """
1365
- ...
1366
-
1367
- @typing.overload
1368
- def pypi_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1369
- ...
1370
-
1371
- def pypi_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
1372
- """
1373
- Specifies the PyPI packages for all steps of the flow.
1374
-
1375
- Use `@pypi_base` to set common packages required by all
1376
- steps and use `@pypi` to specify step-specific overrides.
1377
-
1378
- Parameters
1379
- ----------
1380
- packages : Dict[str, str], default: {}
1381
- Packages to use for this flow. The key is the name of the package
1382
- and the value is the version to use.
1383
- python : str, optional, default: None
1384
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
1385
- that the version used will correspond to the version of the Python interpreter used to start the run.
1386
- """
1387
- ...
1388
-
1389
- @typing.overload
1390
- def trigger_on_finish(*, flow: typing.Union[typing.Dict[str, str], str, None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1391
- """
1392
- Specifies the flow(s) that this flow depends on.
1393
-
1394
- ```
1395
- @trigger_on_finish(flow='FooFlow')
1396
- ```
1397
- or
1398
- ```
1399
- @trigger_on_finish(flows=['FooFlow', 'BarFlow'])
1400
- ```
1401
- This decorator respects the @project decorator and triggers the flow
1402
- when upstream runs within the same namespace complete successfully
1403
-
1404
- Additionally, you can specify project aware upstream flow dependencies
1405
- by specifying the fully qualified project_flow_name.
1406
- ```
1407
- @trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
1408
- ```
1409
- or
1410
- ```
1411
- @trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
1412
- ```
1413
-
1414
- You can also specify just the project or project branch (other values will be
1415
- inferred from the current project or project branch):
1416
- ```
1417
- @trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
1418
- ```
1419
-
1420
- Note that `branch` is typically one of:
1421
- - `prod`
1422
- - `user.bob`
1423
- - `test.my_experiment`
1424
- - `prod.staging`
1425
-
1426
-
1427
- Parameters
1428
- ----------
1429
- flow : Union[str, Dict[str, str]], optional, default None
1430
- Upstream flow dependency for this flow.
1431
- flows : List[Union[str, Dict[str, str]]], default []
1432
- Upstream flow dependencies for this flow.
1433
- options : Dict[str, Any], default {}
1434
- Backend-specific configuration for tuning eventing behavior.
1435
- """
1436
- ...
1437
-
1438
- @typing.overload
1439
- def trigger_on_finish(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1440
- ...
1441
-
1442
- def trigger_on_finish(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, flow: typing.Union[typing.Dict[str, str], str, None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}):
1443
- """
1444
- Specifies the flow(s) that this flow depends on.
1445
-
1446
- ```
1447
- @trigger_on_finish(flow='FooFlow')
1448
- ```
1449
- or
1450
- ```
1451
- @trigger_on_finish(flows=['FooFlow', 'BarFlow'])
1452
- ```
1453
- This decorator respects the @project decorator and triggers the flow
1454
- when upstream runs within the same namespace complete successfully
1455
-
1456
- Additionally, you can specify project aware upstream flow dependencies
1457
- by specifying the fully qualified project_flow_name.
1458
- ```
1459
- @trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
1460
- ```
1461
- or
1462
- ```
1463
- @trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
1464
- ```
1465
-
1466
- You can also specify just the project or project branch (other values will be
1467
- inferred from the current project or project branch):
1468
- ```
1469
- @trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
1470
- ```
1471
-
1472
- Note that `branch` is typically one of:
1473
- - `prod`
1474
- - `user.bob`
1475
- - `test.my_experiment`
1476
- - `prod.staging`
1477
-
1478
-
1479
- Parameters
1480
- ----------
1481
- flow : Union[str, Dict[str, str]], optional, default None
1482
- Upstream flow dependency for this flow.
1483
- flows : List[Union[str, Dict[str, str]]], default []
1484
- Upstream flow dependencies for this flow.
1485
- options : Dict[str, Any], default {}
1486
- Backend-specific configuration for tuning eventing behavior.
1487
- """
1488
- ...
1489
-
1490
- @typing.overload
1491
- def schedule(*, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1492
- """
1493
- Specifies the times when the flow should be run when running on a
1494
- production scheduler.
1495
-
1496
-
1497
- Parameters
1498
- ----------
1499
- hourly : bool, default False
1500
- Run the workflow hourly.
1501
- daily : bool, default True
1502
- Run the workflow daily.
1503
- weekly : bool, default False
1504
- Run the workflow weekly.
1505
- cron : str, optional, default None
1506
- Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
1507
- specified by this expression.
1508
- timezone : str, optional, default None
1509
- Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1510
- which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
1511
- """
1512
- ...
1513
-
1514
- @typing.overload
1515
- def schedule(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1516
- ...
1517
-
1518
- def schedule(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None):
1519
- """
1520
- Specifies the times when the flow should be run when running on a
1521
- production scheduler.
1522
-
1523
-
1524
- Parameters
1525
- ----------
1526
- hourly : bool, default False
1527
- Run the workflow hourly.
1528
- daily : bool, default True
1529
- Run the workflow daily.
1530
- weekly : bool, default False
1531
- Run the workflow weekly.
1532
- cron : str, optional, default None
1533
- Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
1534
- specified by this expression.
1535
- timezone : str, optional, default None
1536
- Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1537
- which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
1538
- """
1539
- ...
1540
-
1541
1542
  pkg_name: str
1542
1543