ob-metaflow-stubs 6.0.3.173rc0__py2.py3-none-any.whl → 6.0.3.174__py2.py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (215) hide show
  1. metaflow-stubs/__init__.pyi +710 -711
  2. metaflow-stubs/cards.pyi +1 -1
  3. metaflow-stubs/cli.pyi +1 -1
  4. metaflow-stubs/cli_components/__init__.pyi +1 -1
  5. metaflow-stubs/cli_components/utils.pyi +1 -1
  6. metaflow-stubs/client/__init__.pyi +1 -1
  7. metaflow-stubs/client/core.pyi +3 -3
  8. metaflow-stubs/client/filecache.pyi +2 -2
  9. metaflow-stubs/events.pyi +2 -2
  10. metaflow-stubs/exception.pyi +1 -1
  11. metaflow-stubs/flowspec.pyi +4 -4
  12. metaflow-stubs/generated_for.txt +1 -1
  13. metaflow-stubs/includefile.pyi +2 -2
  14. metaflow-stubs/info_file.pyi +1 -1
  15. metaflow-stubs/metadata_provider/__init__.pyi +1 -1
  16. metaflow-stubs/metadata_provider/heartbeat.pyi +1 -1
  17. metaflow-stubs/metadata_provider/metadata.pyi +1 -1
  18. metaflow-stubs/metadata_provider/util.pyi +1 -1
  19. metaflow-stubs/metaflow_config.pyi +1 -1
  20. metaflow-stubs/metaflow_current.pyi +81 -81
  21. metaflow-stubs/metaflow_git.pyi +1 -1
  22. metaflow-stubs/mf_extensions/__init__.pyi +1 -1
  23. metaflow-stubs/mf_extensions/obcheckpoint/__init__.pyi +1 -1
  24. metaflow-stubs/mf_extensions/obcheckpoint/plugins/__init__.pyi +1 -1
  25. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/__init__.pyi +1 -1
  26. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/card_utils/__init__.pyi +1 -1
  27. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/card_utils/async_cards.pyi +2 -2
  28. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/card_utils/deco_injection_mixin.pyi +1 -1
  29. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/card_utils/extra_components.pyi +2 -2
  30. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/__init__.pyi +1 -1
  31. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/cards/__init__.pyi +1 -1
  32. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/cards/checkpoint_lister.pyi +3 -3
  33. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/cards/lineage_card.pyi +1 -1
  34. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/checkpoint_storage.pyi +3 -3
  35. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/constructors.pyi +1 -1
  36. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/core.pyi +3 -3
  37. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/decorator.pyi +2 -2
  38. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/exceptions.pyi +1 -1
  39. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/final_api.pyi +1 -1
  40. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/lineage.pyi +1 -1
  41. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/__init__.pyi +1 -1
  42. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/context.pyi +1 -1
  43. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/core.pyi +2 -2
  44. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/decorator.pyi +1 -1
  45. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/exceptions.pyi +1 -1
  46. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/task_utils.pyi +1 -1
  47. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/utils.pyi +1 -1
  48. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastructures.pyi +1 -1
  49. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/exceptions.pyi +1 -1
  50. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/hf_hub/__init__.pyi +1 -1
  51. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/hf_hub/decorator.pyi +1 -1
  52. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/modeling_utils/__init__.pyi +1 -1
  53. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/modeling_utils/core.pyi +2 -2
  54. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/modeling_utils/exceptions.pyi +1 -1
  55. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/modeling_utils/model_storage.pyi +2 -2
  56. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/__init__.pyi +1 -1
  57. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/flowspec_utils.pyi +1 -1
  58. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/general.pyi +1 -1
  59. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/identity_utils.pyi +1 -1
  60. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/serialization_handler/__init__.pyi +1 -1
  61. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/serialization_handler/base.pyi +1 -1
  62. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/serialization_handler/tar.pyi +1 -1
  63. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/tar_utils.pyi +1 -1
  64. metaflow-stubs/mf_extensions/outerbounds/__init__.pyi +1 -1
  65. metaflow-stubs/mf_extensions/outerbounds/plugins/__init__.pyi +1 -1
  66. metaflow-stubs/mf_extensions/outerbounds/plugins/card_utilities/__init__.pyi +1 -1
  67. metaflow-stubs/mf_extensions/outerbounds/plugins/card_utilities/injector.pyi +1 -1
  68. metaflow-stubs/mf_extensions/outerbounds/plugins/checkpoint_datastores/__init__.pyi +1 -1
  69. metaflow-stubs/mf_extensions/outerbounds/plugins/checkpoint_datastores/coreweave.pyi +1 -1
  70. metaflow-stubs/mf_extensions/outerbounds/plugins/checkpoint_datastores/nebius.pyi +1 -1
  71. metaflow-stubs/mf_extensions/outerbounds/plugins/{fast_bakery → kubernetes}/__init__.pyi +1 -1
  72. metaflow-stubs/mf_extensions/outerbounds/plugins/kubernetes/pod_killer.pyi +31 -0
  73. metaflow-stubs/mf_extensions/outerbounds/plugins/ollama/__init__.pyi +1 -1
  74. metaflow-stubs/mf_extensions/outerbounds/plugins/ollama/constants.pyi +1 -1
  75. metaflow-stubs/mf_extensions/outerbounds/plugins/ollama/exceptions.pyi +1 -1
  76. metaflow-stubs/mf_extensions/outerbounds/plugins/ollama/ollama.pyi +1 -1
  77. metaflow-stubs/mf_extensions/outerbounds/plugins/snowflake/__init__.pyi +1 -1
  78. metaflow-stubs/mf_extensions/outerbounds/plugins/snowflake/snowflake.pyi +1 -1
  79. metaflow-stubs/mf_extensions/outerbounds/profilers/__init__.pyi +1 -1
  80. metaflow-stubs/mf_extensions/outerbounds/profilers/gpu.pyi +1 -1
  81. metaflow-stubs/mf_extensions/outerbounds/remote_config.pyi +1 -1
  82. metaflow-stubs/mf_extensions/outerbounds/toplevel/__init__.pyi +1 -1
  83. metaflow-stubs/mf_extensions/outerbounds/toplevel/global_aliases_for_metaflow_package.pyi +1 -2
  84. metaflow-stubs/multicore_utils.pyi +1 -1
  85. metaflow-stubs/parameters.pyi +2 -2
  86. metaflow-stubs/plugins/__init__.pyi +10 -10
  87. metaflow-stubs/plugins/airflow/__init__.pyi +1 -1
  88. metaflow-stubs/plugins/airflow/airflow_utils.pyi +1 -1
  89. metaflow-stubs/plugins/airflow/exception.pyi +1 -1
  90. metaflow-stubs/plugins/airflow/sensors/__init__.pyi +1 -1
  91. metaflow-stubs/plugins/airflow/sensors/base_sensor.pyi +1 -1
  92. metaflow-stubs/plugins/airflow/sensors/external_task_sensor.pyi +1 -1
  93. metaflow-stubs/plugins/airflow/sensors/s3_sensor.pyi +1 -1
  94. metaflow-stubs/plugins/argo/__init__.pyi +1 -1
  95. metaflow-stubs/plugins/argo/argo_client.pyi +1 -1
  96. metaflow-stubs/plugins/argo/argo_events.pyi +1 -1
  97. metaflow-stubs/plugins/argo/argo_workflows.pyi +1 -1
  98. metaflow-stubs/plugins/argo/argo_workflows_decorator.pyi +3 -3
  99. metaflow-stubs/plugins/argo/argo_workflows_deployer.pyi +2 -2
  100. metaflow-stubs/plugins/argo/argo_workflows_deployer_objects.pyi +1 -1
  101. metaflow-stubs/plugins/aws/__init__.pyi +1 -1
  102. metaflow-stubs/plugins/aws/aws_client.pyi +1 -1
  103. metaflow-stubs/plugins/aws/aws_utils.pyi +1 -1
  104. metaflow-stubs/plugins/aws/batch/__init__.pyi +1 -1
  105. metaflow-stubs/plugins/aws/batch/batch.pyi +1 -1
  106. metaflow-stubs/plugins/aws/batch/batch_client.pyi +1 -1
  107. metaflow-stubs/plugins/aws/batch/batch_decorator.pyi +1 -1
  108. metaflow-stubs/plugins/aws/secrets_manager/__init__.pyi +1 -1
  109. metaflow-stubs/plugins/aws/secrets_manager/aws_secrets_manager_secrets_provider.pyi +2 -2
  110. metaflow-stubs/plugins/aws/step_functions/__init__.pyi +1 -1
  111. metaflow-stubs/plugins/aws/step_functions/event_bridge_client.pyi +1 -1
  112. metaflow-stubs/plugins/aws/step_functions/schedule_decorator.pyi +1 -1
  113. metaflow-stubs/plugins/aws/step_functions/step_functions.pyi +1 -1
  114. metaflow-stubs/plugins/aws/step_functions/step_functions_client.pyi +1 -1
  115. metaflow-stubs/plugins/aws/step_functions/step_functions_deployer.pyi +2 -2
  116. metaflow-stubs/plugins/aws/step_functions/step_functions_deployer_objects.pyi +1 -1
  117. metaflow-stubs/plugins/azure/__init__.pyi +1 -1
  118. metaflow-stubs/plugins/azure/azure_credential.pyi +1 -1
  119. metaflow-stubs/plugins/azure/azure_exceptions.pyi +1 -1
  120. metaflow-stubs/plugins/azure/azure_secret_manager_secrets_provider.pyi +2 -2
  121. metaflow-stubs/plugins/azure/azure_utils.pyi +1 -1
  122. metaflow-stubs/plugins/azure/blob_service_client_factory.pyi +1 -1
  123. metaflow-stubs/plugins/azure/includefile_support.pyi +1 -1
  124. metaflow-stubs/plugins/cards/__init__.pyi +5 -5
  125. metaflow-stubs/plugins/cards/card_client.pyi +1 -1
  126. metaflow-stubs/plugins/cards/card_creator.pyi +1 -1
  127. metaflow-stubs/plugins/cards/card_datastore.pyi +1 -1
  128. metaflow-stubs/plugins/cards/card_decorator.pyi +1 -1
  129. metaflow-stubs/plugins/cards/card_modules/__init__.pyi +1 -1
  130. metaflow-stubs/plugins/cards/card_modules/basic.pyi +2 -2
  131. metaflow-stubs/plugins/cards/card_modules/card.pyi +1 -1
  132. metaflow-stubs/plugins/cards/card_modules/components.pyi +3 -3
  133. metaflow-stubs/plugins/cards/card_modules/convert_to_native_type.pyi +1 -1
  134. metaflow-stubs/plugins/cards/card_modules/renderer_tools.pyi +1 -1
  135. metaflow-stubs/plugins/cards/card_modules/test_cards.pyi +1 -1
  136. metaflow-stubs/plugins/cards/card_resolver.pyi +1 -1
  137. metaflow-stubs/plugins/cards/component_serializer.pyi +1 -1
  138. metaflow-stubs/plugins/cards/exception.pyi +1 -1
  139. metaflow-stubs/plugins/catch_decorator.pyi +1 -1
  140. metaflow-stubs/plugins/datatools/__init__.pyi +1 -1
  141. metaflow-stubs/plugins/datatools/local.pyi +1 -1
  142. metaflow-stubs/plugins/datatools/s3/__init__.pyi +1 -1
  143. metaflow-stubs/plugins/datatools/s3/s3.pyi +2 -2
  144. metaflow-stubs/plugins/datatools/s3/s3tail.pyi +1 -1
  145. metaflow-stubs/plugins/datatools/s3/s3util.pyi +1 -1
  146. metaflow-stubs/plugins/debug_logger.pyi +1 -1
  147. metaflow-stubs/plugins/debug_monitor.pyi +1 -1
  148. metaflow-stubs/plugins/environment_decorator.pyi +1 -1
  149. metaflow-stubs/plugins/events_decorator.pyi +1 -1
  150. metaflow-stubs/plugins/frameworks/__init__.pyi +1 -1
  151. metaflow-stubs/plugins/frameworks/pytorch.pyi +1 -1
  152. metaflow-stubs/plugins/gcp/__init__.pyi +1 -1
  153. metaflow-stubs/plugins/gcp/gcp_secret_manager_secrets_provider.pyi +2 -2
  154. metaflow-stubs/plugins/gcp/gs_exceptions.pyi +1 -1
  155. metaflow-stubs/plugins/gcp/gs_storage_client_factory.pyi +1 -1
  156. metaflow-stubs/plugins/gcp/gs_utils.pyi +1 -1
  157. metaflow-stubs/plugins/gcp/includefile_support.pyi +1 -1
  158. metaflow-stubs/plugins/kubernetes/__init__.pyi +1 -1
  159. metaflow-stubs/plugins/kubernetes/kube_utils.pyi +1 -1
  160. metaflow-stubs/plugins/kubernetes/kubernetes.pyi +1 -1
  161. metaflow-stubs/plugins/kubernetes/kubernetes_client.pyi +2 -1
  162. metaflow-stubs/plugins/kubernetes/kubernetes_decorator.pyi +1 -1
  163. metaflow-stubs/plugins/kubernetes/kubernetes_jobsets.pyi +1 -1
  164. metaflow-stubs/plugins/kubernetes/spot_monitor_sidecar.pyi +1 -1
  165. metaflow-stubs/plugins/ollama/__init__.pyi +2 -2
  166. metaflow-stubs/plugins/parallel_decorator.pyi +1 -1
  167. metaflow-stubs/plugins/perimeters.pyi +1 -1
  168. metaflow-stubs/plugins/project_decorator.pyi +1 -1
  169. metaflow-stubs/plugins/pypi/__init__.pyi +2 -2
  170. metaflow-stubs/plugins/pypi/conda_decorator.pyi +1 -1
  171. metaflow-stubs/plugins/pypi/conda_environment.pyi +3 -3
  172. metaflow-stubs/plugins/pypi/parsers.pyi +1 -1
  173. metaflow-stubs/plugins/pypi/pypi_decorator.pyi +1 -1
  174. metaflow-stubs/plugins/pypi/pypi_environment.pyi +1 -1
  175. metaflow-stubs/plugins/pypi/utils.pyi +1 -1
  176. metaflow-stubs/plugins/resources_decorator.pyi +1 -1
  177. metaflow-stubs/plugins/retry_decorator.pyi +1 -1
  178. metaflow-stubs/plugins/secrets/__init__.pyi +1 -1
  179. metaflow-stubs/plugins/secrets/inline_secrets_provider.pyi +1 -1
  180. metaflow-stubs/plugins/secrets/secrets_decorator.pyi +1 -1
  181. metaflow-stubs/plugins/snowflake/__init__.pyi +1 -1
  182. metaflow-stubs/plugins/storage_executor.pyi +1 -1
  183. metaflow-stubs/plugins/test_unbounded_foreach_decorator.pyi +1 -1
  184. metaflow-stubs/plugins/timeout_decorator.pyi +1 -1
  185. metaflow-stubs/plugins/torchtune/__init__.pyi +1 -1
  186. metaflow-stubs/plugins/uv/__init__.pyi +1 -1
  187. metaflow-stubs/plugins/uv/uv_environment.pyi +1 -1
  188. metaflow-stubs/profilers/__init__.pyi +1 -1
  189. metaflow-stubs/pylint_wrapper.pyi +1 -1
  190. metaflow-stubs/runner/__init__.pyi +1 -1
  191. metaflow-stubs/runner/deployer.pyi +27 -27
  192. metaflow-stubs/runner/deployer_impl.pyi +2 -2
  193. metaflow-stubs/runner/metaflow_runner.pyi +2 -2
  194. metaflow-stubs/runner/nbdeploy.pyi +1 -1
  195. metaflow-stubs/runner/nbrun.pyi +1 -1
  196. metaflow-stubs/runner/subprocess_manager.pyi +1 -1
  197. metaflow-stubs/runner/utils.pyi +2 -2
  198. metaflow-stubs/system/__init__.pyi +1 -1
  199. metaflow-stubs/system/system_logger.pyi +1 -1
  200. metaflow-stubs/system/system_monitor.pyi +1 -1
  201. metaflow-stubs/tagging_util.pyi +1 -1
  202. metaflow-stubs/tuple_util.pyi +1 -1
  203. metaflow-stubs/user_configs/__init__.pyi +1 -1
  204. metaflow-stubs/user_configs/config_decorators.pyi +5 -5
  205. metaflow-stubs/user_configs/config_options.pyi +2 -2
  206. metaflow-stubs/user_configs/config_parameters.pyi +3 -3
  207. {ob_metaflow_stubs-6.0.3.173rc0.dist-info → ob_metaflow_stubs-6.0.3.174.dist-info}/METADATA +1 -1
  208. ob_metaflow_stubs-6.0.3.174.dist-info/RECORD +211 -0
  209. metaflow-stubs/mf_extensions/outerbounds/plugins/fast_bakery/baker.pyi +0 -51
  210. metaflow-stubs/mf_extensions/outerbounds/plugins/fast_bakery/docker_environment.pyi +0 -65
  211. metaflow-stubs/mf_extensions/outerbounds/plugins/fast_bakery/fast_bakery.pyi +0 -74
  212. metaflow-stubs/ob_internal.pyi +0 -11
  213. ob_metaflow_stubs-6.0.3.173rc0.dist-info/RECORD +0 -214
  214. {ob_metaflow_stubs-6.0.3.173rc0.dist-info → ob_metaflow_stubs-6.0.3.174.dist-info}/WHEEL +0 -0
  215. {ob_metaflow_stubs-6.0.3.173rc0.dist-info → ob_metaflow_stubs-6.0.3.174.dist-info}/top_level.txt +0 -0
@@ -1,7 +1,7 @@
1
1
  ######################################################################################################
2
2
  # Auto-generated Metaflow stub file #
3
3
  # MF version: 2.15.14.1+obcheckpoint(0.2.1);ob(v1) #
4
- # Generated on 2025-05-28T01:05:51.019297 #
4
+ # Generated on 2025-05-29T18:56:59.040642 #
5
5
  ######################################################################################################
6
6
 
7
7
  from __future__ import annotations
@@ -35,18 +35,18 @@ from .user_configs.config_parameters import ConfigValue as ConfigValue
35
35
  from .user_configs.config_parameters import config_expr as config_expr
36
36
  from .user_configs.config_decorators import CustomFlowDecorator as CustomFlowDecorator
37
37
  from .user_configs.config_decorators import CustomStepDecorator as CustomStepDecorator
38
- from . import metaflow_git as metaflow_git
39
- from . import events as events
40
38
  from . import tuple_util as tuple_util
41
39
  from . import cards as cards
40
+ from . import metaflow_git as metaflow_git
41
+ from . import events as events
42
42
  from . import runner as runner
43
43
  from . import plugins as plugins
44
44
  from .mf_extensions.outerbounds.toplevel.global_aliases_for_metaflow_package import S3 as S3
45
45
  from . import includefile as includefile
46
46
  from .includefile import IncludeFile as IncludeFile
47
- from .plugins.pypi.parsers import conda_environment_yml_parser as conda_environment_yml_parser
48
- from .plugins.pypi.parsers import pyproject_toml_parser as pyproject_toml_parser
49
47
  from .plugins.pypi.parsers import requirements_txt_parser as requirements_txt_parser
48
+ from .plugins.pypi.parsers import pyproject_toml_parser as pyproject_toml_parser
49
+ from .plugins.pypi.parsers import conda_environment_yml_parser as conda_environment_yml_parser
50
50
  from . import client as client
51
51
  from .client.core import namespace as namespace
52
52
  from .client.core import get_namespace as get_namespace
@@ -77,7 +77,6 @@ from . import system as system
77
77
  from . import pylint_wrapper as pylint_wrapper
78
78
  from . import cli as cli
79
79
  from . import profilers as profilers
80
- from . import ob_internal as ob_internal
81
80
 
82
81
  EXT_PKG: str
83
82
 
@@ -154,231 +153,388 @@ def step(f: typing.Union[typing.Callable[[FlowSpecDerived], None], typing.Callab
154
153
  """
155
154
  ...
156
155
 
157
- def nvidia(*, gpu: int, gpu_type: str, queue_timeout: int) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
156
+ @typing.overload
157
+ def resources(*, cpu: int = 1, gpu: typing.Optional[int] = None, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
158
158
  """
159
- Specifies that this step should execute on DGX cloud.
159
+ Specifies the resources needed when executing this step.
160
+
161
+ Use `@resources` to specify the resource requirements
162
+ independently of the specific compute layer (`@batch`, `@kubernetes`).
163
+
164
+ You can choose the compute layer on the command line by executing e.g.
165
+ ```
166
+ python myflow.py run --with batch
167
+ ```
168
+ or
169
+ ```
170
+ python myflow.py run --with kubernetes
171
+ ```
172
+ which executes the flow on the desired system using the
173
+ requirements specified in `@resources`.
160
174
 
161
175
 
162
176
  Parameters
163
177
  ----------
164
- gpu : int
165
- Number of GPUs to use.
166
- gpu_type : str
167
- Type of Nvidia GPU to use.
168
- queue_timeout : int
169
- Time to keep the job in NVCF's queue.
178
+ cpu : int, default 1
179
+ Number of CPUs required for this step.
180
+ gpu : int, optional, default None
181
+ Number of GPUs required for this step.
182
+ disk : int, optional, default None
183
+ Disk size (in MB) required for this step. Only applies on Kubernetes.
184
+ memory : int, default 4096
185
+ Memory size (in MB) required for this step.
186
+ shared_memory : int, optional, default None
187
+ The value for the size (in MiB) of the /dev/shm volume for this step.
188
+ This parameter maps to the `--shm-size` option in Docker.
170
189
  """
171
190
  ...
172
191
 
173
192
  @typing.overload
174
- def parallel(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
175
- """
176
- Decorator prototype for all step decorators. This function gets specialized
177
- and imported for all decorators types by _import_plugin_decorators().
178
- """
193
+ def resources(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
179
194
  ...
180
195
 
181
196
  @typing.overload
182
- def parallel(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
197
+ def resources(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
183
198
  ...
184
199
 
185
- def parallel(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
200
+ def resources(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, cpu: int = 1, gpu: typing.Optional[int] = None, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None):
186
201
  """
187
- Decorator prototype for all step decorators. This function gets specialized
188
- and imported for all decorators types by _import_plugin_decorators().
202
+ Specifies the resources needed when executing this step.
203
+
204
+ Use `@resources` to specify the resource requirements
205
+ independently of the specific compute layer (`@batch`, `@kubernetes`).
206
+
207
+ You can choose the compute layer on the command line by executing e.g.
208
+ ```
209
+ python myflow.py run --with batch
210
+ ```
211
+ or
212
+ ```
213
+ python myflow.py run --with kubernetes
214
+ ```
215
+ which executes the flow on the desired system using the
216
+ requirements specified in `@resources`.
217
+
218
+
219
+ Parameters
220
+ ----------
221
+ cpu : int, default 1
222
+ Number of CPUs required for this step.
223
+ gpu : int, optional, default None
224
+ Number of GPUs required for this step.
225
+ disk : int, optional, default None
226
+ Disk size (in MB) required for this step. Only applies on Kubernetes.
227
+ memory : int, default 4096
228
+ Memory size (in MB) required for this step.
229
+ shared_memory : int, optional, default None
230
+ The value for the size (in MiB) of the /dev/shm volume for this step.
231
+ This parameter maps to the `--shm-size` option in Docker.
189
232
  """
190
233
  ...
191
234
 
192
235
  @typing.overload
193
- def card(*, type: str = 'default', id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
236
+ def timeout(*, seconds: int = 0, minutes: int = 0, hours: int = 0) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
194
237
  """
195
- Creates a human-readable report, a Metaflow Card, after this step completes.
238
+ Specifies a timeout for your step.
196
239
 
197
- Note that you may add multiple `@card` decorators in a step with different parameters.
240
+ This decorator is useful if this step may hang indefinitely.
241
+
242
+ This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
243
+ A timeout is considered to be an exception thrown by the step. It will cause the step to be
244
+ retried if needed and the exception will be caught by the `@catch` decorator, if present.
245
+
246
+ Note that all the values specified in parameters are added together so if you specify
247
+ 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
198
248
 
199
249
 
200
250
  Parameters
201
251
  ----------
202
- type : str, default 'default'
203
- Card type.
204
- id : str, optional, default None
205
- If multiple cards are present, use this id to identify this card.
206
- options : Dict[str, Any], default {}
207
- Options passed to the card. The contents depend on the card type.
208
- timeout : int, default 45
209
- Interrupt reporting if it takes more than this many seconds.
252
+ seconds : int, default 0
253
+ Number of seconds to wait prior to timing out.
254
+ minutes : int, default 0
255
+ Number of minutes to wait prior to timing out.
256
+ hours : int, default 0
257
+ Number of hours to wait prior to timing out.
210
258
  """
211
259
  ...
212
260
 
213
261
  @typing.overload
214
- def card(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
262
+ def timeout(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
215
263
  ...
216
264
 
217
265
  @typing.overload
218
- def card(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
266
+ def timeout(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
219
267
  ...
220
268
 
221
- def card(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, type: str = 'default', id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45):
269
+ def timeout(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, seconds: int = 0, minutes: int = 0, hours: int = 0):
222
270
  """
223
- Creates a human-readable report, a Metaflow Card, after this step completes.
271
+ Specifies a timeout for your step.
224
272
 
225
- Note that you may add multiple `@card` decorators in a step with different parameters.
273
+ This decorator is useful if this step may hang indefinitely.
274
+
275
+ This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
276
+ A timeout is considered to be an exception thrown by the step. It will cause the step to be
277
+ retried if needed and the exception will be caught by the `@catch` decorator, if present.
278
+
279
+ Note that all the values specified in parameters are added together so if you specify
280
+ 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
226
281
 
227
282
 
228
283
  Parameters
229
284
  ----------
230
- type : str, default 'default'
231
- Card type.
232
- id : str, optional, default None
233
- If multiple cards are present, use this id to identify this card.
234
- options : Dict[str, Any], default {}
235
- Options passed to the card. The contents depend on the card type.
236
- timeout : int, default 45
237
- Interrupt reporting if it takes more than this many seconds.
285
+ seconds : int, default 0
286
+ Number of seconds to wait prior to timing out.
287
+ minutes : int, default 0
288
+ Number of minutes to wait prior to timing out.
289
+ hours : int, default 0
290
+ Number of hours to wait prior to timing out.
238
291
  """
239
292
  ...
240
293
 
241
- @typing.overload
242
- def environment(*, vars: typing.Dict[str, str] = {}) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
294
+ def ollama(*, models: list, backend: str, force_pull: bool, skip_push_check: bool, debug: bool) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
243
295
  """
244
- Specifies environment variables to be set prior to the execution of a step.
296
+ This decorator is used to run Ollama APIs as Metaflow task sidecars.
297
+
298
+ User code call
299
+ --------------
300
+ @ollama(
301
+ models=[...],
302
+ ...
303
+ )
304
+
305
+ Valid backend options
306
+ ---------------------
307
+ - 'local': Run as a separate process on the local task machine.
308
+ - (TODO) 'managed': Outerbounds hosts and selects compute provider.
309
+ - (TODO) 'remote': Spin up separate instance to serve Ollama models.
310
+
311
+ Valid model options
312
+ -------------------
313
+ Any model here https://ollama.com/search, e.g. 'llama3.2', 'llama3.3'
245
314
 
246
315
 
247
316
  Parameters
248
317
  ----------
249
- vars : Dict[str, str], default {}
250
- Dictionary of environment variables to set.
318
+ models: list[str]
319
+ List of Ollama containers running models in sidecars.
320
+ backend: str
321
+ Determines where and how to run the Ollama process.
322
+ force_pull: bool
323
+ Whether to run `ollama pull` no matter what, or first check the remote cache in Metaflow datastore for this model key.
324
+ skip_push_check: bool
325
+ Whether to skip the check that populates/overwrites remote cache on terminating an ollama model.
326
+ debug: bool
327
+ Whether to turn on verbose debugging logs.
251
328
  """
252
329
  ...
253
330
 
254
- @typing.overload
255
- def environment(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
256
- ...
257
-
258
- @typing.overload
259
- def environment(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
260
- ...
261
-
262
- def environment(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, vars: typing.Dict[str, str] = {}):
331
+ def huggingface_hub(*, temp_dir_root: typing.Optional[str] = None, load: typing.Union[typing.List[str], typing.List[typing.Tuple[typing.Dict, str]], typing.List[typing.Tuple[str, str]], typing.List[typing.Dict], None]) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
263
332
  """
264
- Specifies environment variables to be set prior to the execution of a step.
333
+ Decorator that helps cache, version and store models/datasets from huggingface hub.
265
334
 
266
335
 
267
336
  Parameters
268
337
  ----------
269
- vars : Dict[str, str], default {}
270
- Dictionary of environment variables to set.
338
+ temp_dir_root : str, optional
339
+ The root directory that will hold the temporary directory where objects will be downloaded.
340
+
341
+ load: Union[List[str], List[Tuple[Dict, str]], List[Tuple[str, str]], List[Dict], None]
342
+ The list of repos (models/datasets) to load.
343
+
344
+ Loaded repos can be accessed via `current.huggingface_hub.loaded`. If load is set, then the following happens:
345
+
346
+ - If repo (model/dataset) is not found in the datastore:
347
+ - Downloads the repo from Hugging Face Hub to a temporary directory (or uses specified path) for local access
348
+ - Stores it in Metaflow's datastore (s3/gcs/azure etc.) with a unique name based on repo_type/repo_id
349
+ - All HF models loaded for a `@step` will be cached separately under flow/step/namespace.
350
+
351
+ - If repo is found in the datastore:
352
+ - Loads it directly from datastore to local path (can be temporary directory or specified path)
271
353
  """
272
354
  ...
273
355
 
274
356
  @typing.overload
275
- def secrets(*, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
357
+ def retry(*, times: int = 3, minutes_between_retries: int = 2) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
276
358
  """
277
- Specifies secrets to be retrieved and injected as environment variables prior to
278
- the execution of a step.
359
+ Specifies the number of times the task corresponding
360
+ to a step needs to be retried.
361
+
362
+ This decorator is useful for handling transient errors, such as networking issues.
363
+ If your task contains operations that can't be retried safely, e.g. database updates,
364
+ it is advisable to annotate it with `@retry(times=0)`.
365
+
366
+ This can be used in conjunction with the `@catch` decorator. The `@catch`
367
+ decorator will execute a no-op task after all retries have been exhausted,
368
+ ensuring that the flow execution can continue.
279
369
 
280
370
 
281
371
  Parameters
282
372
  ----------
283
- sources : List[Union[str, Dict[str, Any]]], default: []
284
- List of secret specs, defining how the secrets are to be retrieved
373
+ times : int, default 3
374
+ Number of times to retry this task.
375
+ minutes_between_retries : int, default 2
376
+ Number of minutes between retries.
285
377
  """
286
378
  ...
287
379
 
288
380
  @typing.overload
289
- def secrets(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
381
+ def retry(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
290
382
  ...
291
383
 
292
384
  @typing.overload
293
- def secrets(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
385
+ def retry(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
294
386
  ...
295
387
 
296
- def secrets(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []):
388
+ def retry(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, times: int = 3, minutes_between_retries: int = 2):
297
389
  """
298
- Specifies secrets to be retrieved and injected as environment variables prior to
299
- the execution of a step.
390
+ Specifies the number of times the task corresponding
391
+ to a step needs to be retried.
392
+
393
+ This decorator is useful for handling transient errors, such as networking issues.
394
+ If your task contains operations that can't be retried safely, e.g. database updates,
395
+ it is advisable to annotate it with `@retry(times=0)`.
396
+
397
+ This can be used in conjunction with the `@catch` decorator. The `@catch`
398
+ decorator will execute a no-op task after all retries have been exhausted,
399
+ ensuring that the flow execution can continue.
300
400
 
301
401
 
302
402
  Parameters
303
403
  ----------
304
- sources : List[Union[str, Dict[str, Any]]], default: []
305
- List of secret specs, defining how the secrets are to be retrieved
404
+ times : int, default 3
405
+ Number of times to retry this task.
406
+ minutes_between_retries : int, default 2
407
+ Number of minutes between retries.
306
408
  """
307
409
  ...
308
410
 
309
411
  @typing.overload
310
- def fast_bakery_internal(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
311
- """
312
- Internal decorator to support Fast bakery
412
+ def environment(*, vars: typing.Dict[str, str] = {}) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
313
413
  """
314
- ...
414
+ Specifies environment variables to be set prior to the execution of a step.
415
+
416
+
417
+ Parameters
418
+ ----------
419
+ vars : Dict[str, str], default {}
420
+ Dictionary of environment variables to set.
421
+ """
422
+ ...
315
423
 
316
424
  @typing.overload
317
- def fast_bakery_internal(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
425
+ def environment(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
318
426
  ...
319
427
 
320
- def fast_bakery_internal(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
428
+ @typing.overload
429
+ def environment(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
430
+ ...
431
+
432
+ def environment(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, vars: typing.Dict[str, str] = {}):
321
433
  """
322
- Internal decorator to support Fast bakery
434
+ Specifies environment variables to be set prior to the execution of a step.
435
+
436
+
437
+ Parameters
438
+ ----------
439
+ vars : Dict[str, str], default {}
440
+ Dictionary of environment variables to set.
441
+ """
442
+ ...
443
+
444
+ def app_deploy(*, app_port: int, app_name: str) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
445
+ """
446
+ Specifies that this step is used to deploy an instance of the app.
447
+ Requires that self.app_name, self.app_port, self.entrypoint and self.deployDir is set.
448
+
449
+
450
+ Parameters
451
+ ----------
452
+ app_port : int
453
+ Number of GPUs to use.
454
+ app_name : str
455
+ Name of the app to deploy.
456
+ """
457
+ ...
458
+
459
+ def nvct(*, gpu: int, gpu_type: str) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
460
+ """
461
+ Specifies that this step should execute on DGX cloud.
462
+
463
+
464
+ Parameters
465
+ ----------
466
+ gpu : int
467
+ Number of GPUs to use.
468
+ gpu_type : str
469
+ Type of Nvidia GPU to use.
323
470
  """
324
471
  ...
325
472
 
326
473
  @typing.overload
327
- def conda(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
474
+ def parallel(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
328
475
  """
329
- Specifies the Conda environment for the step.
476
+ Decorator prototype for all step decorators. This function gets specialized
477
+ and imported for all decorators types by _import_plugin_decorators().
478
+ """
479
+ ...
480
+
481
+ @typing.overload
482
+ def parallel(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
483
+ ...
484
+
485
+ def parallel(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
486
+ """
487
+ Decorator prototype for all step decorators. This function gets specialized
488
+ and imported for all decorators types by _import_plugin_decorators().
489
+ """
490
+ ...
491
+
492
+ @typing.overload
493
+ def card(*, type: str = 'default', id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
494
+ """
495
+ Creates a human-readable report, a Metaflow Card, after this step completes.
330
496
 
331
- Information in this decorator will augment any
332
- attributes set in the `@conda_base` flow-level decorator. Hence,
333
- you can use `@conda_base` to set packages required by all
334
- steps and use `@conda` to specify step-specific overrides.
497
+ Note that you may add multiple `@card` decorators in a step with different parameters.
335
498
 
336
499
 
337
500
  Parameters
338
501
  ----------
339
- packages : Dict[str, str], default {}
340
- Packages to use for this step. The key is the name of the package
341
- and the value is the version to use.
342
- libraries : Dict[str, str], default {}
343
- Supported for backward compatibility. When used with packages, packages will take precedence.
344
- python : str, optional, default None
345
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
346
- that the version used will correspond to the version of the Python interpreter used to start the run.
347
- disabled : bool, default False
348
- If set to True, disables @conda.
502
+ type : str, default 'default'
503
+ Card type.
504
+ id : str, optional, default None
505
+ If multiple cards are present, use this id to identify this card.
506
+ options : Dict[str, Any], default {}
507
+ Options passed to the card. The contents depend on the card type.
508
+ timeout : int, default 45
509
+ Interrupt reporting if it takes more than this many seconds.
349
510
  """
350
511
  ...
351
512
 
352
513
  @typing.overload
353
- def conda(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
514
+ def card(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
354
515
  ...
355
516
 
356
517
  @typing.overload
357
- def conda(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
518
+ def card(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
358
519
  ...
359
520
 
360
- def conda(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
521
+ def card(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, type: str = 'default', id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45):
361
522
  """
362
- Specifies the Conda environment for the step.
523
+ Creates a human-readable report, a Metaflow Card, after this step completes.
363
524
 
364
- Information in this decorator will augment any
365
- attributes set in the `@conda_base` flow-level decorator. Hence,
366
- you can use `@conda_base` to set packages required by all
367
- steps and use `@conda` to specify step-specific overrides.
525
+ Note that you may add multiple `@card` decorators in a step with different parameters.
368
526
 
369
527
 
370
528
  Parameters
371
529
  ----------
372
- packages : Dict[str, str], default {}
373
- Packages to use for this step. The key is the name of the package
374
- and the value is the version to use.
375
- libraries : Dict[str, str], default {}
376
- Supported for backward compatibility. When used with packages, packages will take precedence.
377
- python : str, optional, default None
378
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
379
- that the version used will correspond to the version of the Python interpreter used to start the run.
380
- disabled : bool, default False
381
- If set to True, disables @conda.
530
+ type : str, default 'default'
531
+ Card type.
532
+ id : str, optional, default None
533
+ If multiple cards are present, use this id to identify this card.
534
+ options : Dict[str, Any], default {}
535
+ Options passed to the card. The contents depend on the card type.
536
+ timeout : int, default 45
537
+ Interrupt reporting if it takes more than this many seconds.
382
538
  """
383
539
  ...
384
540
 
@@ -439,245 +595,197 @@ def checkpoint(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None
439
595
  """
440
596
  ...
441
597
 
442
- def ollama(*, models: list, backend: str, force_pull: bool, skip_push_check: bool, debug: bool) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
443
- """
444
- This decorator is used to run Ollama APIs as Metaflow task sidecars.
445
-
446
- User code call
447
- --------------
448
- @ollama(
449
- models=[...],
450
- ...
451
- )
452
-
453
- Valid backend options
454
- ---------------------
455
- - 'local': Run as a separate process on the local task machine.
456
- - (TODO) 'managed': Outerbounds hosts and selects compute provider.
457
- - (TODO) 'remote': Spin up separate instance to serve Ollama models.
458
-
459
- Valid model options
460
- -------------------
461
- Any model here https://ollama.com/search, e.g. 'llama3.2', 'llama3.3'
462
-
463
-
464
- Parameters
465
- ----------
466
- models: list[str]
467
- List of Ollama containers running models in sidecars.
468
- backend: str
469
- Determines where and how to run the Ollama process.
470
- force_pull: bool
471
- Whether to run `ollama pull` no matter what, or first check the remote cache in Metaflow datastore for this model key.
472
- skip_push_check: bool
473
- Whether to skip the check that populates/overwrites remote cache on terminating an ollama model.
474
- debug: bool
475
- Whether to turn on verbose debugging logs.
476
- """
477
- ...
478
-
479
598
  @typing.overload
480
- def model(*, load: typing.Union[typing.List[str], str, typing.List[typing.Tuple[str, typing.Optional[str]]]] = None, temp_dir_root: str = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
599
+ def secrets(*, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
481
600
  """
482
- Enables loading / saving of models within a step.
483
-
601
+ Specifies secrets to be retrieved and injected as environment variables prior to
602
+ the execution of a step.
484
603
 
485
604
 
486
605
  Parameters
487
606
  ----------
488
- load : Union[List[str],str,List[Tuple[str,Union[str,None]]]], default: None
489
- Artifact name/s referencing the models/checkpoints to load. Artifact names refer to the names of the instance variables set to `self`.
490
- These artifact names give to `load` be reference objects or reference `key` string's from objects created by:
491
- - `current.checkpoint`
492
- - `current.model`
493
- - `current.huggingface_hub`
494
-
495
- If a list of tuples is provided, the first element is the artifact name and the second element is the path the artifact needs be unpacked on
496
- the local filesystem. If the second element is None, the artifact will be unpacked in the current working directory.
497
- If a string is provided, then the artifact corresponding to that name will be loaded in the current working directory.
498
-
499
- temp_dir_root : str, default: None
500
- The root directory under which `current.model.loaded` will store loaded models
607
+ sources : List[Union[str, Dict[str, Any]]], default: []
608
+ List of secret specs, defining how the secrets are to be retrieved
501
609
  """
502
610
  ...
503
611
 
504
612
  @typing.overload
505
- def model(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
613
+ def secrets(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
506
614
  ...
507
615
 
508
616
  @typing.overload
509
- def model(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
617
+ def secrets(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
510
618
  ...
511
619
 
512
- def model(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, load: typing.Union[typing.List[str], str, typing.List[typing.Tuple[str, typing.Optional[str]]]] = None, temp_dir_root: str = None):
620
+ def secrets(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []):
513
621
  """
514
- Enables loading / saving of models within a step.
515
-
622
+ Specifies secrets to be retrieved and injected as environment variables prior to
623
+ the execution of a step.
516
624
 
517
625
 
518
626
  Parameters
519
627
  ----------
520
- load : Union[List[str],str,List[Tuple[str,Union[str,None]]]], default: None
521
- Artifact name/s referencing the models/checkpoints to load. Artifact names refer to the names of the instance variables set to `self`.
522
- These artifact names give to `load` be reference objects or reference `key` string's from objects created by:
523
- - `current.checkpoint`
524
- - `current.model`
525
- - `current.huggingface_hub`
526
-
527
- If a list of tuples is provided, the first element is the artifact name and the second element is the path the artifact needs be unpacked on
528
- the local filesystem. If the second element is None, the artifact will be unpacked in the current working directory.
529
- If a string is provided, then the artifact corresponding to that name will be loaded in the current working directory.
530
-
531
- temp_dir_root : str, default: None
532
- The root directory under which `current.model.loaded` will store loaded models
628
+ sources : List[Union[str, Dict[str, Any]]], default: []
629
+ List of secret specs, defining how the secrets are to be retrieved
533
630
  """
534
631
  ...
535
632
 
536
633
  @typing.overload
537
- def resources(*, cpu: int = 1, gpu: typing.Optional[int] = None, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
634
+ def pypi(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
538
635
  """
539
- Specifies the resources needed when executing this step.
540
-
541
- Use `@resources` to specify the resource requirements
542
- independently of the specific compute layer (`@batch`, `@kubernetes`).
636
+ Specifies the PyPI packages for the step.
543
637
 
544
- You can choose the compute layer on the command line by executing e.g.
545
- ```
546
- python myflow.py run --with batch
547
- ```
548
- or
549
- ```
550
- python myflow.py run --with kubernetes
551
- ```
552
- which executes the flow on the desired system using the
553
- requirements specified in `@resources`.
638
+ Information in this decorator will augment any
639
+ attributes set in the `@pyi_base` flow-level decorator. Hence,
640
+ you can use `@pypi_base` to set packages required by all
641
+ steps and use `@pypi` to specify step-specific overrides.
554
642
 
555
643
 
556
644
  Parameters
557
645
  ----------
558
- cpu : int, default 1
559
- Number of CPUs required for this step.
560
- gpu : int, optional, default None
561
- Number of GPUs required for this step.
562
- disk : int, optional, default None
563
- Disk size (in MB) required for this step. Only applies on Kubernetes.
564
- memory : int, default 4096
565
- Memory size (in MB) required for this step.
566
- shared_memory : int, optional, default None
567
- The value for the size (in MiB) of the /dev/shm volume for this step.
568
- This parameter maps to the `--shm-size` option in Docker.
646
+ packages : Dict[str, str], default: {}
647
+ Packages to use for this step. The key is the name of the package
648
+ and the value is the version to use.
649
+ python : str, optional, default: None
650
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
651
+ that the version used will correspond to the version of the Python interpreter used to start the run.
569
652
  """
570
653
  ...
571
654
 
572
655
  @typing.overload
573
- def resources(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
656
+ def pypi(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
574
657
  ...
575
658
 
576
659
  @typing.overload
577
- def resources(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
660
+ def pypi(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
578
661
  ...
579
662
 
580
- def resources(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, cpu: int = 1, gpu: typing.Optional[int] = None, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None):
663
+ def pypi(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
581
664
  """
582
- Specifies the resources needed when executing this step.
583
-
584
- Use `@resources` to specify the resource requirements
585
- independently of the specific compute layer (`@batch`, `@kubernetes`).
665
+ Specifies the PyPI packages for the step.
586
666
 
587
- You can choose the compute layer on the command line by executing e.g.
588
- ```
589
- python myflow.py run --with batch
590
- ```
591
- or
592
- ```
593
- python myflow.py run --with kubernetes
594
- ```
595
- which executes the flow on the desired system using the
596
- requirements specified in `@resources`.
667
+ Information in this decorator will augment any
668
+ attributes set in the `@pyi_base` flow-level decorator. Hence,
669
+ you can use `@pypi_base` to set packages required by all
670
+ steps and use `@pypi` to specify step-specific overrides.
597
671
 
598
672
 
599
673
  Parameters
600
674
  ----------
601
- cpu : int, default 1
602
- Number of CPUs required for this step.
603
- gpu : int, optional, default None
604
- Number of GPUs required for this step.
605
- disk : int, optional, default None
606
- Disk size (in MB) required for this step. Only applies on Kubernetes.
607
- memory : int, default 4096
608
- Memory size (in MB) required for this step.
609
- shared_memory : int, optional, default None
610
- The value for the size (in MiB) of the /dev/shm volume for this step.
611
- This parameter maps to the `--shm-size` option in Docker.
675
+ packages : Dict[str, str], default: {}
676
+ Packages to use for this step. The key is the name of the package
677
+ and the value is the version to use.
678
+ python : str, optional, default: None
679
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
680
+ that the version used will correspond to the version of the Python interpreter used to start the run.
612
681
  """
613
682
  ...
614
683
 
615
684
  @typing.overload
616
- def retry(*, times: int = 3, minutes_between_retries: int = 2) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
685
+ def catch(*, var: typing.Optional[str] = None, print_exception: bool = True) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
617
686
  """
618
- Specifies the number of times the task corresponding
619
- to a step needs to be retried.
620
-
621
- This decorator is useful for handling transient errors, such as networking issues.
622
- If your task contains operations that can't be retried safely, e.g. database updates,
623
- it is advisable to annotate it with `@retry(times=0)`.
687
+ Specifies that the step will success under all circumstances.
624
688
 
625
- This can be used in conjunction with the `@catch` decorator. The `@catch`
626
- decorator will execute a no-op task after all retries have been exhausted,
627
- ensuring that the flow execution can continue.
689
+ The decorator will create an optional artifact, specified by `var`, which
690
+ contains the exception raised. You can use it to detect the presence
691
+ of errors, indicating that all happy-path artifacts produced by the step
692
+ are missing.
628
693
 
629
694
 
630
695
  Parameters
631
696
  ----------
632
- times : int, default 3
633
- Number of times to retry this task.
634
- minutes_between_retries : int, default 2
635
- Number of minutes between retries.
697
+ var : str, optional, default None
698
+ Name of the artifact in which to store the caught exception.
699
+ If not specified, the exception is not stored.
700
+ print_exception : bool, default True
701
+ Determines whether or not the exception is printed to
702
+ stdout when caught.
636
703
  """
637
704
  ...
638
705
 
639
706
  @typing.overload
640
- def retry(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
707
+ def catch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
641
708
  ...
642
709
 
643
710
  @typing.overload
644
- def retry(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
711
+ def catch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
645
712
  ...
646
713
 
647
- def retry(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, times: int = 3, minutes_between_retries: int = 2):
714
+ def catch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, var: typing.Optional[str] = None, print_exception: bool = True):
648
715
  """
649
- Specifies the number of times the task corresponding
650
- to a step needs to be retried.
716
+ Specifies that the step will success under all circumstances.
651
717
 
652
- This decorator is useful for handling transient errors, such as networking issues.
653
- If your task contains operations that can't be retried safely, e.g. database updates,
654
- it is advisable to annotate it with `@retry(times=0)`.
718
+ The decorator will create an optional artifact, specified by `var`, which
719
+ contains the exception raised. You can use it to detect the presence
720
+ of errors, indicating that all happy-path artifacts produced by the step
721
+ are missing.
722
+
723
+
724
+ Parameters
725
+ ----------
726
+ var : str, optional, default None
727
+ Name of the artifact in which to store the caught exception.
728
+ If not specified, the exception is not stored.
729
+ print_exception : bool, default True
730
+ Determines whether or not the exception is printed to
731
+ stdout when caught.
732
+ """
733
+ ...
734
+
735
+ @typing.overload
736
+ def model(*, load: typing.Union[typing.List[str], str, typing.List[typing.Tuple[str, typing.Optional[str]]]] = None, temp_dir_root: str = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
737
+ """
738
+ Enables loading / saving of models within a step.
655
739
 
656
- This can be used in conjunction with the `@catch` decorator. The `@catch`
657
- decorator will execute a no-op task after all retries have been exhausted,
658
- ensuring that the flow execution can continue.
659
740
 
660
741
 
661
742
  Parameters
662
743
  ----------
663
- times : int, default 3
664
- Number of times to retry this task.
665
- minutes_between_retries : int, default 2
666
- Number of minutes between retries.
744
+ load : Union[List[str],str,List[Tuple[str,Union[str,None]]]], default: None
745
+ Artifact name/s referencing the models/checkpoints to load. Artifact names refer to the names of the instance variables set to `self`.
746
+ These artifact names give to `load` be reference objects or reference `key` string's from objects created by:
747
+ - `current.checkpoint`
748
+ - `current.model`
749
+ - `current.huggingface_hub`
750
+
751
+ If a list of tuples is provided, the first element is the artifact name and the second element is the path the artifact needs be unpacked on
752
+ the local filesystem. If the second element is None, the artifact will be unpacked in the current working directory.
753
+ If a string is provided, then the artifact corresponding to that name will be loaded in the current working directory.
754
+
755
+ temp_dir_root : str, default: None
756
+ The root directory under which `current.model.loaded` will store loaded models
667
757
  """
668
758
  ...
669
759
 
670
- def nvct(*, gpu: int, gpu_type: str) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
760
+ @typing.overload
761
+ def model(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
762
+ ...
763
+
764
+ @typing.overload
765
+ def model(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
766
+ ...
767
+
768
+ def model(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, load: typing.Union[typing.List[str], str, typing.List[typing.Tuple[str, typing.Optional[str]]]] = None, temp_dir_root: str = None):
671
769
  """
672
- Specifies that this step should execute on DGX cloud.
770
+ Enables loading / saving of models within a step.
771
+
673
772
 
674
773
 
675
774
  Parameters
676
775
  ----------
677
- gpu : int
678
- Number of GPUs to use.
679
- gpu_type : str
680
- Type of Nvidia GPU to use.
776
+ load : Union[List[str],str,List[Tuple[str,Union[str,None]]]], default: None
777
+ Artifact name/s referencing the models/checkpoints to load. Artifact names refer to the names of the instance variables set to `self`.
778
+ These artifact names give to `load` be reference objects or reference `key` string's from objects created by:
779
+ - `current.checkpoint`
780
+ - `current.model`
781
+ - `current.huggingface_hub`
782
+
783
+ If a list of tuples is provided, the first element is the artifact name and the second element is the path the artifact needs be unpacked on
784
+ the local filesystem. If the second element is None, the artifact will be unpacked in the current working directory.
785
+ If a string is provided, then the artifact corresponding to that name will be loaded in the current working directory.
786
+
787
+ temp_dir_root : str, default: None
788
+ The root directory under which `current.model.loaded` will store loaded models
681
789
  """
682
790
  ...
683
791
 
@@ -766,204 +874,196 @@ def kubernetes(*, cpu: int = 1, memory: int = 4096, disk: int = 10240, image: ty
766
874
  """
767
875
  ...
768
876
 
769
- def huggingface_hub(*, temp_dir_root: typing.Optional[str] = None, load: typing.Union[typing.List[str], typing.List[typing.Tuple[typing.Dict, str]], typing.List[typing.Tuple[str, str]], typing.List[typing.Dict], None]) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
770
- """
771
- Decorator that helps cache, version and store models/datasets from huggingface hub.
772
-
773
-
774
- Parameters
775
- ----------
776
- temp_dir_root : str, optional
777
- The root directory that will hold the temporary directory where objects will be downloaded.
778
-
779
- load: Union[List[str], List[Tuple[Dict, str]], List[Tuple[str, str]], List[Dict], None]
780
- The list of repos (models/datasets) to load.
781
-
782
- Loaded repos can be accessed via `current.huggingface_hub.loaded`. If load is set, then the following happens:
783
-
784
- - If repo (model/dataset) is not found in the datastore:
785
- - Downloads the repo from Hugging Face Hub to a temporary directory (or uses specified path) for local access
786
- - Stores it in Metaflow's datastore (s3/gcs/azure etc.) with a unique name based on repo_type/repo_id
787
- - All HF models loaded for a `@step` will be cached separately under flow/step/namespace.
788
-
789
- - If repo is found in the datastore:
790
- - Loads it directly from datastore to local path (can be temporary directory or specified path)
791
- """
792
- ...
793
-
794
- def app_deploy(*, app_port: int, app_name: str) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
877
+ def nvidia(*, gpu: int, gpu_type: str, queue_timeout: int) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
795
878
  """
796
- Specifies that this step is used to deploy an instance of the app.
797
- Requires that self.app_name, self.app_port, self.entrypoint and self.deployDir is set.
879
+ Specifies that this step should execute on DGX cloud.
798
880
 
799
881
 
800
882
  Parameters
801
883
  ----------
802
- app_port : int
884
+ gpu : int
803
885
  Number of GPUs to use.
804
- app_name : str
805
- Name of the app to deploy.
886
+ gpu_type : str
887
+ Type of Nvidia GPU to use.
888
+ queue_timeout : int
889
+ Time to keep the job in NVCF's queue.
806
890
  """
807
891
  ...
808
892
 
809
893
  @typing.overload
810
- def pypi(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
894
+ def conda(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
811
895
  """
812
- Specifies the PyPI packages for the step.
896
+ Specifies the Conda environment for the step.
813
897
 
814
898
  Information in this decorator will augment any
815
- attributes set in the `@pyi_base` flow-level decorator. Hence,
816
- you can use `@pypi_base` to set packages required by all
817
- steps and use `@pypi` to specify step-specific overrides.
899
+ attributes set in the `@conda_base` flow-level decorator. Hence,
900
+ you can use `@conda_base` to set packages required by all
901
+ steps and use `@conda` to specify step-specific overrides.
818
902
 
819
903
 
820
904
  Parameters
821
905
  ----------
822
- packages : Dict[str, str], default: {}
906
+ packages : Dict[str, str], default {}
823
907
  Packages to use for this step. The key is the name of the package
824
908
  and the value is the version to use.
825
- python : str, optional, default: None
909
+ libraries : Dict[str, str], default {}
910
+ Supported for backward compatibility. When used with packages, packages will take precedence.
911
+ python : str, optional, default None
826
912
  Version of Python to use, e.g. '3.7.4'. A default value of None implies
827
913
  that the version used will correspond to the version of the Python interpreter used to start the run.
914
+ disabled : bool, default False
915
+ If set to True, disables @conda.
828
916
  """
829
917
  ...
830
918
 
831
919
  @typing.overload
832
- def pypi(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
920
+ def conda(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
833
921
  ...
834
922
 
835
923
  @typing.overload
836
- def pypi(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
924
+ def conda(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
837
925
  ...
838
926
 
839
- def pypi(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
927
+ def conda(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
840
928
  """
841
- Specifies the PyPI packages for the step.
929
+ Specifies the Conda environment for the step.
842
930
 
843
931
  Information in this decorator will augment any
844
- attributes set in the `@pyi_base` flow-level decorator. Hence,
845
- you can use `@pypi_base` to set packages required by all
846
- steps and use `@pypi` to specify step-specific overrides.
932
+ attributes set in the `@conda_base` flow-level decorator. Hence,
933
+ you can use `@conda_base` to set packages required by all
934
+ steps and use `@conda` to specify step-specific overrides.
847
935
 
848
936
 
849
937
  Parameters
850
938
  ----------
851
- packages : Dict[str, str], default: {}
939
+ packages : Dict[str, str], default {}
852
940
  Packages to use for this step. The key is the name of the package
853
941
  and the value is the version to use.
854
- python : str, optional, default: None
942
+ libraries : Dict[str, str], default {}
943
+ Supported for backward compatibility. When used with packages, packages will take precedence.
944
+ python : str, optional, default None
855
945
  Version of Python to use, e.g. '3.7.4'. A default value of None implies
856
946
  that the version used will correspond to the version of the Python interpreter used to start the run.
947
+ disabled : bool, default False
948
+ If set to True, disables @conda.
857
949
  """
858
950
  ...
859
951
 
860
952
  @typing.overload
861
- def catch(*, var: typing.Optional[str] = None, print_exception: bool = True) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
953
+ def fast_bakery_internal(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
862
954
  """
863
- Specifies that the step will success under all circumstances.
864
-
865
- The decorator will create an optional artifact, specified by `var`, which
866
- contains the exception raised. You can use it to detect the presence
867
- of errors, indicating that all happy-path artifacts produced by the step
868
- are missing.
869
-
870
-
871
- Parameters
872
- ----------
873
- var : str, optional, default None
874
- Name of the artifact in which to store the caught exception.
875
- If not specified, the exception is not stored.
876
- print_exception : bool, default True
877
- Determines whether or not the exception is printed to
878
- stdout when caught.
955
+ Internal decorator to support Fast bakery
879
956
  """
880
957
  ...
881
958
 
882
959
  @typing.overload
883
- def catch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
960
+ def fast_bakery_internal(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
884
961
  ...
885
962
 
886
- @typing.overload
887
- def catch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
963
+ def fast_bakery_internal(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
964
+ """
965
+ Internal decorator to support Fast bakery
966
+ """
888
967
  ...
889
968
 
890
- def catch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, var: typing.Optional[str] = None, print_exception: bool = True):
969
+ @typing.overload
970
+ def trigger_on_finish(*, flow: typing.Union[typing.Dict[str, str], str, None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
891
971
  """
892
- Specifies that the step will success under all circumstances.
893
-
894
- The decorator will create an optional artifact, specified by `var`, which
895
- contains the exception raised. You can use it to detect the presence
896
- of errors, indicating that all happy-path artifacts produced by the step
897
- are missing.
898
-
972
+ Specifies the flow(s) that this flow depends on.
899
973
 
900
- Parameters
901
- ----------
902
- var : str, optional, default None
903
- Name of the artifact in which to store the caught exception.
904
- If not specified, the exception is not stored.
905
- print_exception : bool, default True
906
- Determines whether or not the exception is printed to
907
- stdout when caught.
908
- """
909
- ...
910
-
911
- @typing.overload
912
- def timeout(*, seconds: int = 0, minutes: int = 0, hours: int = 0) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
913
- """
914
- Specifies a timeout for your step.
974
+ ```
975
+ @trigger_on_finish(flow='FooFlow')
976
+ ```
977
+ or
978
+ ```
979
+ @trigger_on_finish(flows=['FooFlow', 'BarFlow'])
980
+ ```
981
+ This decorator respects the @project decorator and triggers the flow
982
+ when upstream runs within the same namespace complete successfully
915
983
 
916
- This decorator is useful if this step may hang indefinitely.
984
+ Additionally, you can specify project aware upstream flow dependencies
985
+ by specifying the fully qualified project_flow_name.
986
+ ```
987
+ @trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
988
+ ```
989
+ or
990
+ ```
991
+ @trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
992
+ ```
917
993
 
918
- This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
919
- A timeout is considered to be an exception thrown by the step. It will cause the step to be
920
- retried if needed and the exception will be caught by the `@catch` decorator, if present.
994
+ You can also specify just the project or project branch (other values will be
995
+ inferred from the current project or project branch):
996
+ ```
997
+ @trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
998
+ ```
921
999
 
922
- Note that all the values specified in parameters are added together so if you specify
923
- 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
1000
+ Note that `branch` is typically one of:
1001
+ - `prod`
1002
+ - `user.bob`
1003
+ - `test.my_experiment`
1004
+ - `prod.staging`
924
1005
 
925
1006
 
926
1007
  Parameters
927
1008
  ----------
928
- seconds : int, default 0
929
- Number of seconds to wait prior to timing out.
930
- minutes : int, default 0
931
- Number of minutes to wait prior to timing out.
932
- hours : int, default 0
933
- Number of hours to wait prior to timing out.
1009
+ flow : Union[str, Dict[str, str]], optional, default None
1010
+ Upstream flow dependency for this flow.
1011
+ flows : List[Union[str, Dict[str, str]]], default []
1012
+ Upstream flow dependencies for this flow.
1013
+ options : Dict[str, Any], default {}
1014
+ Backend-specific configuration for tuning eventing behavior.
934
1015
  """
935
1016
  ...
936
1017
 
937
1018
  @typing.overload
938
- def timeout(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
939
- ...
940
-
941
- @typing.overload
942
- def timeout(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1019
+ def trigger_on_finish(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
943
1020
  ...
944
1021
 
945
- def timeout(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, seconds: int = 0, minutes: int = 0, hours: int = 0):
1022
+ def trigger_on_finish(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, flow: typing.Union[typing.Dict[str, str], str, None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}):
946
1023
  """
947
- Specifies a timeout for your step.
1024
+ Specifies the flow(s) that this flow depends on.
948
1025
 
949
- This decorator is useful if this step may hang indefinitely.
1026
+ ```
1027
+ @trigger_on_finish(flow='FooFlow')
1028
+ ```
1029
+ or
1030
+ ```
1031
+ @trigger_on_finish(flows=['FooFlow', 'BarFlow'])
1032
+ ```
1033
+ This decorator respects the @project decorator and triggers the flow
1034
+ when upstream runs within the same namespace complete successfully
950
1035
 
951
- This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
952
- A timeout is considered to be an exception thrown by the step. It will cause the step to be
953
- retried if needed and the exception will be caught by the `@catch` decorator, if present.
1036
+ Additionally, you can specify project aware upstream flow dependencies
1037
+ by specifying the fully qualified project_flow_name.
1038
+ ```
1039
+ @trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
1040
+ ```
1041
+ or
1042
+ ```
1043
+ @trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
1044
+ ```
954
1045
 
955
- Note that all the values specified in parameters are added together so if you specify
956
- 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
1046
+ You can also specify just the project or project branch (other values will be
1047
+ inferred from the current project or project branch):
1048
+ ```
1049
+ @trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
1050
+ ```
1051
+
1052
+ Note that `branch` is typically one of:
1053
+ - `prod`
1054
+ - `user.bob`
1055
+ - `test.my_experiment`
1056
+ - `prod.staging`
957
1057
 
958
1058
 
959
1059
  Parameters
960
1060
  ----------
961
- seconds : int, default 0
962
- Number of seconds to wait prior to timing out.
963
- minutes : int, default 0
964
- Number of minutes to wait prior to timing out.
965
- hours : int, default 0
966
- Number of hours to wait prior to timing out.
1061
+ flow : Union[str, Dict[str, str]], optional, default None
1062
+ Upstream flow dependency for this flow.
1063
+ flows : List[Union[str, Dict[str, str]]], default []
1064
+ Upstream flow dependencies for this flow.
1065
+ options : Dict[str, Any], default {}
1066
+ Backend-specific configuration for tuning eventing behavior.
967
1067
  """
968
1068
  ...
969
1069
 
@@ -1008,57 +1108,6 @@ def pypi_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packag
1008
1108
  """
1009
1109
  ...
1010
1110
 
1011
- @typing.overload
1012
- def schedule(*, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1013
- """
1014
- Specifies the times when the flow should be run when running on a
1015
- production scheduler.
1016
-
1017
-
1018
- Parameters
1019
- ----------
1020
- hourly : bool, default False
1021
- Run the workflow hourly.
1022
- daily : bool, default True
1023
- Run the workflow daily.
1024
- weekly : bool, default False
1025
- Run the workflow weekly.
1026
- cron : str, optional, default None
1027
- Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
1028
- specified by this expression.
1029
- timezone : str, optional, default None
1030
- Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1031
- which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
1032
- """
1033
- ...
1034
-
1035
- @typing.overload
1036
- def schedule(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1037
- ...
1038
-
1039
- def schedule(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None):
1040
- """
1041
- Specifies the times when the flow should be run when running on a
1042
- production scheduler.
1043
-
1044
-
1045
- Parameters
1046
- ----------
1047
- hourly : bool, default False
1048
- Run the workflow hourly.
1049
- daily : bool, default True
1050
- Run the workflow daily.
1051
- weekly : bool, default False
1052
- Run the workflow weekly.
1053
- cron : str, optional, default None
1054
- Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
1055
- specified by this expression.
1056
- timezone : str, optional, default None
1057
- Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1058
- which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
1059
- """
1060
- ...
1061
-
1062
1111
  def project(*, name: str, branch: typing.Optional[str] = None, production: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1063
1112
  """
1064
1113
  Specifies what flows belong to the same project.
@@ -1094,195 +1143,259 @@ def project(*, name: str, branch: typing.Optional[str] = None, production: bool
1094
1143
  """
1095
1144
  ...
1096
1145
 
1097
- @typing.overload
1098
- def trigger(*, event: typing.Union[str, typing.Dict[str, typing.Any], None] = None, events: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1146
+ def with_artifact_store(f: typing.Optional[typing.Type[FlowSpecDerived]] = None):
1099
1147
  """
1100
- Specifies the event(s) that this flow depends on.
1148
+ Allows setting external datastores to save data for the
1149
+ `@checkpoint`/`@model`/`@huggingface_hub` decorators.
1101
1150
 
1102
- ```
1103
- @trigger(event='foo')
1104
- ```
1105
- or
1106
- ```
1107
- @trigger(events=['foo', 'bar'])
1108
- ```
1151
+ This decorator is useful when users wish to save data to a different datastore
1152
+ than what is configured in Metaflow. This can be for variety of reasons:
1109
1153
 
1110
- Additionally, you can specify the parameter mappings
1111
- to map event payload to Metaflow parameters for the flow.
1112
- ```
1113
- @trigger(event={'name':'foo', 'parameters':{'flow_param': 'event_field'}})
1114
- ```
1115
- or
1116
- ```
1117
- @trigger(events=[{'name':'foo', 'parameters':{'flow_param_1': 'event_field_1'},
1118
- {'name':'bar', 'parameters':{'flow_param_2': 'event_field_2'}])
1119
- ```
1154
+ 1. Data security: The objects needs to be stored in a bucket (object storage) that is not accessible by other flows.
1155
+ 2. Data Locality: The location where the task is executing is not located in the same region as the datastore.
1156
+ - Example: Metaflow datastore lives in US East, but the task is executing in Finland datacenters.
1157
+ 3. Data Lifecycle Policies: The objects need to be archived / managed separately from the Metaflow managed objects.
1158
+ - Example: Flow is training very large models that need to be stored separately and will be deleted more aggressively than the Metaflow managed objects.
1120
1159
 
1121
- 'parameters' can also be a list of strings and tuples like so:
1122
- ```
1123
- @trigger(event={'name':'foo', 'parameters':['common_name', ('flow_param', 'event_field')]})
1124
- ```
1125
- This is equivalent to:
1126
- ```
1127
- @trigger(event={'name':'foo', 'parameters':{'common_name': 'common_name', 'flow_param': 'event_field'}})
1128
- ```
1160
+ Usage:
1161
+ ----------
1162
+
1163
+ - Using a custom IAM role to access the datastore.
1164
+
1165
+ ```python
1166
+ @with_artifact_store(
1167
+ type="s3",
1168
+ config=lambda: {
1169
+ "root": "s3://my-bucket-foo/path/to/root",
1170
+ "role_arn": ROLE,
1171
+ },
1172
+ )
1173
+ class MyFlow(FlowSpec):
1174
+
1175
+ @checkpoint
1176
+ @step
1177
+ def start(self):
1178
+ with open("my_file.txt", "w") as f:
1179
+ f.write("Hello, World!")
1180
+ self.external_bucket_checkpoint = current.checkpoint.save("my_file.txt")
1181
+ self.next(self.end)
1182
+
1183
+ ```
1184
+
1185
+ - Using credentials to access the s3-compatible datastore.
1186
+
1187
+ ```python
1188
+ @with_artifact_store(
1189
+ type="s3",
1190
+ config=lambda: {
1191
+ "root": "s3://my-bucket-foo/path/to/root",
1192
+ "client_params": {
1193
+ "aws_access_key_id": os.environ.get("MY_CUSTOM_ACCESS_KEY"),
1194
+ "aws_secret_access_key": os.environ.get("MY_CUSTOM_SECRET_KEY"),
1195
+ },
1196
+ },
1197
+ )
1198
+ class MyFlow(FlowSpec):
1199
+
1200
+ @checkpoint
1201
+ @step
1202
+ def start(self):
1203
+ with open("my_file.txt", "w") as f:
1204
+ f.write("Hello, World!")
1205
+ self.external_bucket_checkpoint = current.checkpoint.save("my_file.txt")
1206
+ self.next(self.end)
1207
+
1208
+ ```
1209
+
1210
+ - Accessing objects stored in external datastores after task execution.
1211
+
1212
+ ```python
1213
+ run = Run("CheckpointsTestsFlow/8992")
1214
+ with artifact_store_from(run=run, config={
1215
+ "client_params": {
1216
+ "aws_access_key_id": os.environ.get("MY_CUSTOM_ACCESS_KEY"),
1217
+ "aws_secret_access_key": os.environ.get("MY_CUSTOM_SECRET_KEY"),
1218
+ },
1219
+ }):
1220
+ with Checkpoint() as cp:
1221
+ latest = cp.list(
1222
+ task=run["start"].task
1223
+ )[0]
1224
+ print(latest)
1225
+ cp.load(
1226
+ latest,
1227
+ "test-checkpoints"
1228
+ )
1229
+
1230
+ task = Task("TorchTuneFlow/8484/train/53673")
1231
+ with artifact_store_from(run=run, config={
1232
+ "client_params": {
1233
+ "aws_access_key_id": os.environ.get("MY_CUSTOM_ACCESS_KEY"),
1234
+ "aws_secret_access_key": os.environ.get("MY_CUSTOM_SECRET_KEY"),
1235
+ },
1236
+ }):
1237
+ load_model(
1238
+ task.data.model_ref,
1239
+ "test-models"
1240
+ )
1241
+ ```
1242
+ Parameters:
1243
+ ----------
1244
+
1245
+ type: str
1246
+ The type of the datastore. Can be one of 's3', 'gcs', 'azure' or any other supported metaflow Datastore.
1247
+
1248
+ config: dict or Callable
1249
+ Dictionary of configuration options for the datastore. The following keys are required:
1250
+ - root: The root path in the datastore where the data will be saved. (needs to be in the format expected by the datastore)
1251
+ - example: 's3://bucket-name/path/to/root'
1252
+ - example: 'gs://bucket-name/path/to/root'
1253
+ - example: 'https://myblockacc.blob.core.windows.net/metaflow/'
1254
+ - role_arn (optional): AWS IAM role to access s3 bucket (only when `type` is 's3')
1255
+ - session_vars (optional): AWS session variables to access s3 bucket (only when `type` is 's3')
1256
+ - client_params (optional): AWS client parameters to access s3 bucket (only when `type` is 's3')
1257
+ """
1258
+ ...
1259
+
1260
+ @typing.overload
1261
+ def conda_base(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1262
+ """
1263
+ Specifies the Conda environment for all steps of the flow.
1264
+
1265
+ Use `@conda_base` to set common libraries required by all
1266
+ steps and use `@conda` to specify step-specific additions.
1129
1267
 
1130
1268
 
1131
1269
  Parameters
1132
1270
  ----------
1133
- event : Union[str, Dict[str, Any]], optional, default None
1134
- Event dependency for this flow.
1135
- events : List[Union[str, Dict[str, Any]]], default []
1136
- Events dependency for this flow.
1137
- options : Dict[str, Any], default {}
1138
- Backend-specific configuration for tuning eventing behavior.
1271
+ packages : Dict[str, str], default {}
1272
+ Packages to use for this flow. The key is the name of the package
1273
+ and the value is the version to use.
1274
+ libraries : Dict[str, str], default {}
1275
+ Supported for backward compatibility. When used with packages, packages will take precedence.
1276
+ python : str, optional, default None
1277
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1278
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1279
+ disabled : bool, default False
1280
+ If set to True, disables Conda.
1139
1281
  """
1140
1282
  ...
1141
1283
 
1142
1284
  @typing.overload
1143
- def trigger(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1285
+ def conda_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1144
1286
  ...
1145
1287
 
1146
- def trigger(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, event: typing.Union[str, typing.Dict[str, typing.Any], None] = None, events: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], options: typing.Dict[str, typing.Any] = {}):
1288
+ def conda_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
1147
1289
  """
1148
- Specifies the event(s) that this flow depends on.
1149
-
1150
- ```
1151
- @trigger(event='foo')
1152
- ```
1153
- or
1154
- ```
1155
- @trigger(events=['foo', 'bar'])
1156
- ```
1157
-
1158
- Additionally, you can specify the parameter mappings
1159
- to map event payload to Metaflow parameters for the flow.
1160
- ```
1161
- @trigger(event={'name':'foo', 'parameters':{'flow_param': 'event_field'}})
1162
- ```
1163
- or
1164
- ```
1165
- @trigger(events=[{'name':'foo', 'parameters':{'flow_param_1': 'event_field_1'},
1166
- {'name':'bar', 'parameters':{'flow_param_2': 'event_field_2'}])
1167
- ```
1290
+ Specifies the Conda environment for all steps of the flow.
1168
1291
 
1169
- 'parameters' can also be a list of strings and tuples like so:
1170
- ```
1171
- @trigger(event={'name':'foo', 'parameters':['common_name', ('flow_param', 'event_field')]})
1172
- ```
1173
- This is equivalent to:
1174
- ```
1175
- @trigger(event={'name':'foo', 'parameters':{'common_name': 'common_name', 'flow_param': 'event_field'}})
1176
- ```
1292
+ Use `@conda_base` to set common libraries required by all
1293
+ steps and use `@conda` to specify step-specific additions.
1177
1294
 
1178
1295
 
1179
1296
  Parameters
1180
1297
  ----------
1181
- event : Union[str, Dict[str, Any]], optional, default None
1182
- Event dependency for this flow.
1183
- events : List[Union[str, Dict[str, Any]]], default []
1184
- Events dependency for this flow.
1185
- options : Dict[str, Any], default {}
1186
- Backend-specific configuration for tuning eventing behavior.
1298
+ packages : Dict[str, str], default {}
1299
+ Packages to use for this flow. The key is the name of the package
1300
+ and the value is the version to use.
1301
+ libraries : Dict[str, str], default {}
1302
+ Supported for backward compatibility. When used with packages, packages will take precedence.
1303
+ python : str, optional, default None
1304
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1305
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1306
+ disabled : bool, default False
1307
+ If set to True, disables Conda.
1187
1308
  """
1188
1309
  ...
1189
1310
 
1190
1311
  @typing.overload
1191
- def trigger_on_finish(*, flow: typing.Union[typing.Dict[str, str], str, None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1312
+ def trigger(*, event: typing.Union[str, typing.Dict[str, typing.Any], None] = None, events: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1192
1313
  """
1193
- Specifies the flow(s) that this flow depends on.
1314
+ Specifies the event(s) that this flow depends on.
1194
1315
 
1195
1316
  ```
1196
- @trigger_on_finish(flow='FooFlow')
1317
+ @trigger(event='foo')
1197
1318
  ```
1198
1319
  or
1199
1320
  ```
1200
- @trigger_on_finish(flows=['FooFlow', 'BarFlow'])
1321
+ @trigger(events=['foo', 'bar'])
1201
1322
  ```
1202
- This decorator respects the @project decorator and triggers the flow
1203
- when upstream runs within the same namespace complete successfully
1204
1323
 
1205
- Additionally, you can specify project aware upstream flow dependencies
1206
- by specifying the fully qualified project_flow_name.
1324
+ Additionally, you can specify the parameter mappings
1325
+ to map event payload to Metaflow parameters for the flow.
1207
1326
  ```
1208
- @trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
1327
+ @trigger(event={'name':'foo', 'parameters':{'flow_param': 'event_field'}})
1209
1328
  ```
1210
1329
  or
1211
1330
  ```
1212
- @trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
1331
+ @trigger(events=[{'name':'foo', 'parameters':{'flow_param_1': 'event_field_1'},
1332
+ {'name':'bar', 'parameters':{'flow_param_2': 'event_field_2'}])
1213
1333
  ```
1214
1334
 
1215
- You can also specify just the project or project branch (other values will be
1216
- inferred from the current project or project branch):
1335
+ 'parameters' can also be a list of strings and tuples like so:
1217
1336
  ```
1218
- @trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
1337
+ @trigger(event={'name':'foo', 'parameters':['common_name', ('flow_param', 'event_field')]})
1338
+ ```
1339
+ This is equivalent to:
1340
+ ```
1341
+ @trigger(event={'name':'foo', 'parameters':{'common_name': 'common_name', 'flow_param': 'event_field'}})
1219
1342
  ```
1220
-
1221
- Note that `branch` is typically one of:
1222
- - `prod`
1223
- - `user.bob`
1224
- - `test.my_experiment`
1225
- - `prod.staging`
1226
1343
 
1227
1344
 
1228
1345
  Parameters
1229
1346
  ----------
1230
- flow : Union[str, Dict[str, str]], optional, default None
1231
- Upstream flow dependency for this flow.
1232
- flows : List[Union[str, Dict[str, str]]], default []
1233
- Upstream flow dependencies for this flow.
1347
+ event : Union[str, Dict[str, Any]], optional, default None
1348
+ Event dependency for this flow.
1349
+ events : List[Union[str, Dict[str, Any]]], default []
1350
+ Events dependency for this flow.
1234
1351
  options : Dict[str, Any], default {}
1235
1352
  Backend-specific configuration for tuning eventing behavior.
1236
1353
  """
1237
1354
  ...
1238
1355
 
1239
1356
  @typing.overload
1240
- def trigger_on_finish(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1357
+ def trigger(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1241
1358
  ...
1242
1359
 
1243
- def trigger_on_finish(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, flow: typing.Union[typing.Dict[str, str], str, None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}):
1360
+ def trigger(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, event: typing.Union[str, typing.Dict[str, typing.Any], None] = None, events: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], options: typing.Dict[str, typing.Any] = {}):
1244
1361
  """
1245
- Specifies the flow(s) that this flow depends on.
1362
+ Specifies the event(s) that this flow depends on.
1246
1363
 
1247
1364
  ```
1248
- @trigger_on_finish(flow='FooFlow')
1365
+ @trigger(event='foo')
1249
1366
  ```
1250
1367
  or
1251
1368
  ```
1252
- @trigger_on_finish(flows=['FooFlow', 'BarFlow'])
1369
+ @trigger(events=['foo', 'bar'])
1253
1370
  ```
1254
- This decorator respects the @project decorator and triggers the flow
1255
- when upstream runs within the same namespace complete successfully
1256
1371
 
1257
- Additionally, you can specify project aware upstream flow dependencies
1258
- by specifying the fully qualified project_flow_name.
1372
+ Additionally, you can specify the parameter mappings
1373
+ to map event payload to Metaflow parameters for the flow.
1259
1374
  ```
1260
- @trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
1375
+ @trigger(event={'name':'foo', 'parameters':{'flow_param': 'event_field'}})
1261
1376
  ```
1262
1377
  or
1263
1378
  ```
1264
- @trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
1379
+ @trigger(events=[{'name':'foo', 'parameters':{'flow_param_1': 'event_field_1'},
1380
+ {'name':'bar', 'parameters':{'flow_param_2': 'event_field_2'}])
1265
1381
  ```
1266
1382
 
1267
- You can also specify just the project or project branch (other values will be
1268
- inferred from the current project or project branch):
1383
+ 'parameters' can also be a list of strings and tuples like so:
1269
1384
  ```
1270
- @trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
1385
+ @trigger(event={'name':'foo', 'parameters':['common_name', ('flow_param', 'event_field')]})
1386
+ ```
1387
+ This is equivalent to:
1388
+ ```
1389
+ @trigger(event={'name':'foo', 'parameters':{'common_name': 'common_name', 'flow_param': 'event_field'}})
1271
1390
  ```
1272
-
1273
- Note that `branch` is typically one of:
1274
- - `prod`
1275
- - `user.bob`
1276
- - `test.my_experiment`
1277
- - `prod.staging`
1278
1391
 
1279
1392
 
1280
1393
  Parameters
1281
1394
  ----------
1282
- flow : Union[str, Dict[str, str]], optional, default None
1283
- Upstream flow dependency for this flow.
1284
- flows : List[Union[str, Dict[str, str]]], default []
1285
- Upstream flow dependencies for this flow.
1395
+ event : Union[str, Dict[str, Any]], optional, default None
1396
+ Event dependency for this flow.
1397
+ events : List[Union[str, Dict[str, Any]]], default []
1398
+ Events dependency for this flow.
1286
1399
  options : Dict[str, Any], default {}
1287
1400
  Backend-specific configuration for tuning eventing behavior.
1288
1401
  """
@@ -1375,167 +1488,53 @@ def airflow_s3_key_sensor(*, timeout: int, poke_interval: int, mode: str, expone
1375
1488
  ...
1376
1489
 
1377
1490
  @typing.overload
1378
- def conda_base(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1491
+ def schedule(*, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1379
1492
  """
1380
- Specifies the Conda environment for all steps of the flow.
1381
-
1382
- Use `@conda_base` to set common libraries required by all
1383
- steps and use `@conda` to specify step-specific additions.
1493
+ Specifies the times when the flow should be run when running on a
1494
+ production scheduler.
1384
1495
 
1385
1496
 
1386
1497
  Parameters
1387
1498
  ----------
1388
- packages : Dict[str, str], default {}
1389
- Packages to use for this flow. The key is the name of the package
1390
- and the value is the version to use.
1391
- libraries : Dict[str, str], default {}
1392
- Supported for backward compatibility. When used with packages, packages will take precedence.
1393
- python : str, optional, default None
1394
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
1395
- that the version used will correspond to the version of the Python interpreter used to start the run.
1396
- disabled : bool, default False
1397
- If set to True, disables Conda.
1499
+ hourly : bool, default False
1500
+ Run the workflow hourly.
1501
+ daily : bool, default True
1502
+ Run the workflow daily.
1503
+ weekly : bool, default False
1504
+ Run the workflow weekly.
1505
+ cron : str, optional, default None
1506
+ Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
1507
+ specified by this expression.
1508
+ timezone : str, optional, default None
1509
+ Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1510
+ which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
1398
1511
  """
1399
1512
  ...
1400
1513
 
1401
1514
  @typing.overload
1402
- def conda_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1515
+ def schedule(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1403
1516
  ...
1404
1517
 
1405
- def conda_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
1518
+ def schedule(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None):
1406
1519
  """
1407
- Specifies the Conda environment for all steps of the flow.
1408
-
1409
- Use `@conda_base` to set common libraries required by all
1410
- steps and use `@conda` to specify step-specific additions.
1520
+ Specifies the times when the flow should be run when running on a
1521
+ production scheduler.
1411
1522
 
1412
1523
 
1413
1524
  Parameters
1414
1525
  ----------
1415
- packages : Dict[str, str], default {}
1416
- Packages to use for this flow. The key is the name of the package
1417
- and the value is the version to use.
1418
- libraries : Dict[str, str], default {}
1419
- Supported for backward compatibility. When used with packages, packages will take precedence.
1420
- python : str, optional, default None
1421
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
1422
- that the version used will correspond to the version of the Python interpreter used to start the run.
1423
- disabled : bool, default False
1424
- If set to True, disables Conda.
1425
- """
1426
- ...
1427
-
1428
- def with_artifact_store(f: typing.Optional[typing.Type[FlowSpecDerived]] = None):
1429
- """
1430
- Allows setting external datastores to save data for the
1431
- `@checkpoint`/`@model`/`@huggingface_hub` decorators.
1432
-
1433
- This decorator is useful when users wish to save data to a different datastore
1434
- than what is configured in Metaflow. This can be for variety of reasons:
1435
-
1436
- 1. Data security: The objects needs to be stored in a bucket (object storage) that is not accessible by other flows.
1437
- 2. Data Locality: The location where the task is executing is not located in the same region as the datastore.
1438
- - Example: Metaflow datastore lives in US East, but the task is executing in Finland datacenters.
1439
- 3. Data Lifecycle Policies: The objects need to be archived / managed separately from the Metaflow managed objects.
1440
- - Example: Flow is training very large models that need to be stored separately and will be deleted more aggressively than the Metaflow managed objects.
1441
-
1442
- Usage:
1443
- ----------
1444
-
1445
- - Using a custom IAM role to access the datastore.
1446
-
1447
- ```python
1448
- @with_artifact_store(
1449
- type="s3",
1450
- config=lambda: {
1451
- "root": "s3://my-bucket-foo/path/to/root",
1452
- "role_arn": ROLE,
1453
- },
1454
- )
1455
- class MyFlow(FlowSpec):
1456
-
1457
- @checkpoint
1458
- @step
1459
- def start(self):
1460
- with open("my_file.txt", "w") as f:
1461
- f.write("Hello, World!")
1462
- self.external_bucket_checkpoint = current.checkpoint.save("my_file.txt")
1463
- self.next(self.end)
1464
-
1465
- ```
1466
-
1467
- - Using credentials to access the s3-compatible datastore.
1468
-
1469
- ```python
1470
- @with_artifact_store(
1471
- type="s3",
1472
- config=lambda: {
1473
- "root": "s3://my-bucket-foo/path/to/root",
1474
- "client_params": {
1475
- "aws_access_key_id": os.environ.get("MY_CUSTOM_ACCESS_KEY"),
1476
- "aws_secret_access_key": os.environ.get("MY_CUSTOM_SECRET_KEY"),
1477
- },
1478
- },
1479
- )
1480
- class MyFlow(FlowSpec):
1481
-
1482
- @checkpoint
1483
- @step
1484
- def start(self):
1485
- with open("my_file.txt", "w") as f:
1486
- f.write("Hello, World!")
1487
- self.external_bucket_checkpoint = current.checkpoint.save("my_file.txt")
1488
- self.next(self.end)
1489
-
1490
- ```
1491
-
1492
- - Accessing objects stored in external datastores after task execution.
1493
-
1494
- ```python
1495
- run = Run("CheckpointsTestsFlow/8992")
1496
- with artifact_store_from(run=run, config={
1497
- "client_params": {
1498
- "aws_access_key_id": os.environ.get("MY_CUSTOM_ACCESS_KEY"),
1499
- "aws_secret_access_key": os.environ.get("MY_CUSTOM_SECRET_KEY"),
1500
- },
1501
- }):
1502
- with Checkpoint() as cp:
1503
- latest = cp.list(
1504
- task=run["start"].task
1505
- )[0]
1506
- print(latest)
1507
- cp.load(
1508
- latest,
1509
- "test-checkpoints"
1510
- )
1511
-
1512
- task = Task("TorchTuneFlow/8484/train/53673")
1513
- with artifact_store_from(run=run, config={
1514
- "client_params": {
1515
- "aws_access_key_id": os.environ.get("MY_CUSTOM_ACCESS_KEY"),
1516
- "aws_secret_access_key": os.environ.get("MY_CUSTOM_SECRET_KEY"),
1517
- },
1518
- }):
1519
- load_model(
1520
- task.data.model_ref,
1521
- "test-models"
1522
- )
1523
- ```
1524
- Parameters:
1525
- ----------
1526
-
1527
- type: str
1528
- The type of the datastore. Can be one of 's3', 'gcs', 'azure' or any other supported metaflow Datastore.
1529
-
1530
- config: dict or Callable
1531
- Dictionary of configuration options for the datastore. The following keys are required:
1532
- - root: The root path in the datastore where the data will be saved. (needs to be in the format expected by the datastore)
1533
- - example: 's3://bucket-name/path/to/root'
1534
- - example: 'gs://bucket-name/path/to/root'
1535
- - example: 'https://myblockacc.blob.core.windows.net/metaflow/'
1536
- - role_arn (optional): AWS IAM role to access s3 bucket (only when `type` is 's3')
1537
- - session_vars (optional): AWS session variables to access s3 bucket (only when `type` is 's3')
1538
- - client_params (optional): AWS client parameters to access s3 bucket (only when `type` is 's3')
1526
+ hourly : bool, default False
1527
+ Run the workflow hourly.
1528
+ daily : bool, default True
1529
+ Run the workflow daily.
1530
+ weekly : bool, default False
1531
+ Run the workflow weekly.
1532
+ cron : str, optional, default None
1533
+ Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
1534
+ specified by this expression.
1535
+ timezone : str, optional, default None
1536
+ Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1537
+ which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
1539
1538
  """
1540
1539
  ...
1541
1540