ob-metaflow-stubs 6.0.3.158__py2.py3-none-any.whl → 6.0.3.160__py2.py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (203) hide show
  1. metaflow-stubs/__init__.pyi +734 -732
  2. metaflow-stubs/cards.pyi +1 -1
  3. metaflow-stubs/cli.pyi +1 -1
  4. metaflow-stubs/cli_components/__init__.pyi +1 -1
  5. metaflow-stubs/cli_components/utils.pyi +1 -1
  6. metaflow-stubs/client/__init__.pyi +1 -1
  7. metaflow-stubs/client/core.pyi +4 -4
  8. metaflow-stubs/client/filecache.pyi +2 -2
  9. metaflow-stubs/events.pyi +2 -2
  10. metaflow-stubs/exception.pyi +1 -1
  11. metaflow-stubs/flowspec.pyi +4 -4
  12. metaflow-stubs/generated_for.txt +1 -1
  13. metaflow-stubs/includefile.pyi +2 -2
  14. metaflow-stubs/info_file.pyi +1 -1
  15. metaflow-stubs/metadata_provider/__init__.pyi +1 -1
  16. metaflow-stubs/metadata_provider/heartbeat.pyi +1 -1
  17. metaflow-stubs/metadata_provider/metadata.pyi +1 -1
  18. metaflow-stubs/metadata_provider/util.pyi +1 -1
  19. metaflow-stubs/metaflow_config.pyi +1 -1
  20. metaflow-stubs/metaflow_current.pyi +117 -117
  21. metaflow-stubs/mf_extensions/__init__.pyi +1 -1
  22. metaflow-stubs/mf_extensions/obcheckpoint/__init__.pyi +1 -1
  23. metaflow-stubs/mf_extensions/obcheckpoint/plugins/__init__.pyi +1 -1
  24. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/__init__.pyi +1 -1
  25. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/card_utils/__init__.pyi +1 -1
  26. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/card_utils/async_cards.pyi +1 -1
  27. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/card_utils/deco_injection_mixin.pyi +1 -1
  28. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/card_utils/extra_components.pyi +1 -1
  29. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/__init__.pyi +1 -1
  30. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/cards/__init__.pyi +1 -1
  31. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/cards/checkpoint_lister.pyi +3 -3
  32. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/cards/lineage_card.pyi +1 -1
  33. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/checkpoint_storage.pyi +3 -3
  34. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/constructors.pyi +1 -1
  35. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/core.pyi +2 -2
  36. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/decorator.pyi +3 -3
  37. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/exceptions.pyi +1 -1
  38. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/final_api.pyi +1 -1
  39. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/lineage.pyi +1 -1
  40. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/__init__.pyi +1 -1
  41. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/context.pyi +2 -2
  42. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/core.pyi +2 -2
  43. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/decorator.pyi +1 -1
  44. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/exceptions.pyi +1 -1
  45. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/task_utils.pyi +2 -2
  46. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/utils.pyi +1 -1
  47. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastructures.pyi +1 -1
  48. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/exceptions.pyi +1 -1
  49. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/hf_hub/__init__.pyi +1 -1
  50. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/hf_hub/decorator.pyi +2 -2
  51. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/modeling_utils/__init__.pyi +1 -1
  52. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/modeling_utils/core.pyi +2 -2
  53. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/modeling_utils/exceptions.pyi +1 -1
  54. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/modeling_utils/model_storage.pyi +3 -3
  55. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/__init__.pyi +1 -1
  56. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/flowspec_utils.pyi +1 -1
  57. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/general.pyi +1 -1
  58. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/identity_utils.pyi +2 -2
  59. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/serialization_handler/__init__.pyi +1 -1
  60. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/serialization_handler/base.pyi +1 -1
  61. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/serialization_handler/tar.pyi +1 -1
  62. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/tar_utils.pyi +1 -1
  63. metaflow-stubs/mf_extensions/outerbounds/__init__.pyi +1 -1
  64. metaflow-stubs/mf_extensions/outerbounds/plugins/__init__.pyi +1 -1
  65. metaflow-stubs/mf_extensions/outerbounds/plugins/card_utilities/__init__.pyi +1 -1
  66. metaflow-stubs/mf_extensions/outerbounds/plugins/card_utilities/injector.pyi +1 -1
  67. metaflow-stubs/mf_extensions/outerbounds/plugins/checkpoint_datastores/__init__.pyi +6 -0
  68. metaflow-stubs/mf_extensions/outerbounds/plugins/checkpoint_datastores/coreweave.pyi +58 -0
  69. metaflow-stubs/mf_extensions/outerbounds/plugins/checkpoint_datastores/nebius.pyi +64 -0
  70. metaflow-stubs/mf_extensions/outerbounds/plugins/ollama/__init__.pyi +1 -1
  71. metaflow-stubs/mf_extensions/outerbounds/plugins/ollama/ollama.pyi +1 -1
  72. metaflow-stubs/mf_extensions/outerbounds/plugins/snowflake/__init__.pyi +1 -1
  73. metaflow-stubs/mf_extensions/outerbounds/plugins/snowflake/snowflake.pyi +1 -1
  74. metaflow-stubs/mf_extensions/outerbounds/profilers/__init__.pyi +1 -1
  75. metaflow-stubs/mf_extensions/outerbounds/profilers/gpu.pyi +1 -1
  76. metaflow-stubs/mf_extensions/outerbounds/remote_config.pyi +1 -1
  77. metaflow-stubs/mf_extensions/outerbounds/toplevel/__init__.pyi +1 -1
  78. metaflow-stubs/mf_extensions/outerbounds/toplevel/global_aliases_for_metaflow_package.pyi +3 -1
  79. metaflow-stubs/multicore_utils.pyi +1 -1
  80. metaflow-stubs/parameters.pyi +2 -2
  81. metaflow-stubs/plugins/__init__.pyi +13 -13
  82. metaflow-stubs/plugins/airflow/__init__.pyi +1 -1
  83. metaflow-stubs/plugins/airflow/airflow_utils.pyi +1 -1
  84. metaflow-stubs/plugins/airflow/exception.pyi +1 -1
  85. metaflow-stubs/plugins/airflow/sensors/__init__.pyi +1 -1
  86. metaflow-stubs/plugins/airflow/sensors/base_sensor.pyi +1 -1
  87. metaflow-stubs/plugins/airflow/sensors/external_task_sensor.pyi +1 -1
  88. metaflow-stubs/plugins/airflow/sensors/s3_sensor.pyi +1 -1
  89. metaflow-stubs/plugins/argo/__init__.pyi +1 -1
  90. metaflow-stubs/plugins/argo/argo_client.pyi +1 -1
  91. metaflow-stubs/plugins/argo/argo_events.pyi +1 -1
  92. metaflow-stubs/plugins/argo/argo_workflows.pyi +2 -2
  93. metaflow-stubs/plugins/argo/argo_workflows_decorator.pyi +2 -2
  94. metaflow-stubs/plugins/argo/argo_workflows_deployer.pyi +1 -1
  95. metaflow-stubs/plugins/argo/argo_workflows_deployer_objects.pyi +1 -1
  96. metaflow-stubs/plugins/aws/__init__.pyi +1 -1
  97. metaflow-stubs/plugins/aws/aws_client.pyi +1 -1
  98. metaflow-stubs/plugins/aws/aws_utils.pyi +1 -1
  99. metaflow-stubs/plugins/aws/batch/__init__.pyi +1 -1
  100. metaflow-stubs/plugins/aws/batch/batch.pyi +1 -1
  101. metaflow-stubs/plugins/aws/batch/batch_client.pyi +1 -1
  102. metaflow-stubs/plugins/aws/batch/batch_decorator.pyi +1 -1
  103. metaflow-stubs/plugins/aws/secrets_manager/__init__.pyi +1 -1
  104. metaflow-stubs/plugins/aws/secrets_manager/aws_secrets_manager_secrets_provider.pyi +2 -2
  105. metaflow-stubs/plugins/aws/step_functions/__init__.pyi +1 -1
  106. metaflow-stubs/plugins/aws/step_functions/event_bridge_client.pyi +1 -1
  107. metaflow-stubs/plugins/aws/step_functions/schedule_decorator.pyi +1 -1
  108. metaflow-stubs/plugins/aws/step_functions/step_functions.pyi +1 -1
  109. metaflow-stubs/plugins/aws/step_functions/step_functions_client.pyi +1 -1
  110. metaflow-stubs/plugins/aws/step_functions/step_functions_deployer.pyi +1 -1
  111. metaflow-stubs/plugins/aws/step_functions/step_functions_deployer_objects.pyi +1 -1
  112. metaflow-stubs/plugins/azure/__init__.pyi +1 -1
  113. metaflow-stubs/plugins/azure/azure_credential.pyi +1 -1
  114. metaflow-stubs/plugins/azure/azure_exceptions.pyi +1 -1
  115. metaflow-stubs/plugins/azure/azure_secret_manager_secrets_provider.pyi +2 -2
  116. metaflow-stubs/plugins/azure/azure_utils.pyi +1 -1
  117. metaflow-stubs/plugins/azure/blob_service_client_factory.pyi +1 -1
  118. metaflow-stubs/plugins/azure/includefile_support.pyi +1 -1
  119. metaflow-stubs/plugins/cards/__init__.pyi +5 -5
  120. metaflow-stubs/plugins/cards/card_client.pyi +1 -1
  121. metaflow-stubs/plugins/cards/card_creator.pyi +1 -1
  122. metaflow-stubs/plugins/cards/card_datastore.pyi +1 -1
  123. metaflow-stubs/plugins/cards/card_decorator.pyi +1 -1
  124. metaflow-stubs/plugins/cards/card_modules/__init__.pyi +1 -1
  125. metaflow-stubs/plugins/cards/card_modules/basic.pyi +1 -1
  126. metaflow-stubs/plugins/cards/card_modules/card.pyi +1 -1
  127. metaflow-stubs/plugins/cards/card_modules/components.pyi +2 -2
  128. metaflow-stubs/plugins/cards/card_modules/convert_to_native_type.pyi +1 -1
  129. metaflow-stubs/plugins/cards/card_modules/renderer_tools.pyi +1 -1
  130. metaflow-stubs/plugins/cards/card_modules/test_cards.pyi +1 -1
  131. metaflow-stubs/plugins/cards/card_resolver.pyi +1 -1
  132. metaflow-stubs/plugins/cards/component_serializer.pyi +1 -1
  133. metaflow-stubs/plugins/cards/exception.pyi +1 -1
  134. metaflow-stubs/plugins/catch_decorator.pyi +2 -2
  135. metaflow-stubs/plugins/datatools/__init__.pyi +1 -1
  136. metaflow-stubs/plugins/datatools/local.pyi +1 -1
  137. metaflow-stubs/plugins/datatools/s3/__init__.pyi +1 -1
  138. metaflow-stubs/plugins/datatools/s3/s3.pyi +2 -2
  139. metaflow-stubs/plugins/datatools/s3/s3tail.pyi +1 -1
  140. metaflow-stubs/plugins/datatools/s3/s3util.pyi +1 -1
  141. metaflow-stubs/plugins/debug_logger.pyi +1 -1
  142. metaflow-stubs/plugins/debug_monitor.pyi +1 -1
  143. metaflow-stubs/plugins/environment_decorator.pyi +1 -1
  144. metaflow-stubs/plugins/events_decorator.pyi +1 -1
  145. metaflow-stubs/plugins/frameworks/__init__.pyi +1 -1
  146. metaflow-stubs/plugins/frameworks/pytorch.pyi +1 -1
  147. metaflow-stubs/plugins/gcp/__init__.pyi +1 -1
  148. metaflow-stubs/plugins/gcp/gcp_secret_manager_secrets_provider.pyi +2 -2
  149. metaflow-stubs/plugins/gcp/gs_exceptions.pyi +1 -1
  150. metaflow-stubs/plugins/gcp/gs_storage_client_factory.pyi +1 -1
  151. metaflow-stubs/plugins/gcp/gs_utils.pyi +1 -1
  152. metaflow-stubs/plugins/gcp/includefile_support.pyi +1 -1
  153. metaflow-stubs/plugins/kubernetes/__init__.pyi +1 -1
  154. metaflow-stubs/plugins/kubernetes/kube_utils.pyi +1 -1
  155. metaflow-stubs/plugins/kubernetes/kubernetes.pyi +1 -1
  156. metaflow-stubs/plugins/kubernetes/kubernetes_client.pyi +1 -1
  157. metaflow-stubs/plugins/kubernetes/kubernetes_decorator.pyi +1 -1
  158. metaflow-stubs/plugins/kubernetes/kubernetes_jobsets.pyi +1 -1
  159. metaflow-stubs/plugins/kubernetes/spot_monitor_sidecar.pyi +1 -1
  160. metaflow-stubs/plugins/ollama/__init__.pyi +1 -1
  161. metaflow-stubs/plugins/parallel_decorator.pyi +1 -1
  162. metaflow-stubs/plugins/perimeters.pyi +1 -1
  163. metaflow-stubs/plugins/project_decorator.pyi +1 -1
  164. metaflow-stubs/plugins/pypi/__init__.pyi +2 -2
  165. metaflow-stubs/plugins/pypi/conda_decorator.pyi +1 -1
  166. metaflow-stubs/plugins/pypi/conda_environment.pyi +4 -4
  167. metaflow-stubs/plugins/pypi/parsers.pyi +1 -1
  168. metaflow-stubs/plugins/pypi/pypi_decorator.pyi +1 -1
  169. metaflow-stubs/plugins/pypi/pypi_environment.pyi +1 -1
  170. metaflow-stubs/plugins/pypi/utils.pyi +1 -1
  171. metaflow-stubs/plugins/resources_decorator.pyi +1 -1
  172. metaflow-stubs/plugins/retry_decorator.pyi +1 -1
  173. metaflow-stubs/plugins/secrets/__init__.pyi +2 -2
  174. metaflow-stubs/plugins/secrets/inline_secrets_provider.pyi +1 -1
  175. metaflow-stubs/plugins/secrets/secrets_decorator.pyi +1 -1
  176. metaflow-stubs/plugins/snowflake/__init__.pyi +1 -1
  177. metaflow-stubs/plugins/storage_executor.pyi +1 -1
  178. metaflow-stubs/plugins/test_unbounded_foreach_decorator.pyi +2 -2
  179. metaflow-stubs/plugins/timeout_decorator.pyi +2 -2
  180. metaflow-stubs/profilers/__init__.pyi +1 -1
  181. metaflow-stubs/pylint_wrapper.pyi +1 -1
  182. metaflow-stubs/runner/__init__.pyi +1 -1
  183. metaflow-stubs/runner/deployer.pyi +3 -3
  184. metaflow-stubs/runner/deployer_impl.pyi +1 -1
  185. metaflow-stubs/runner/metaflow_runner.pyi +1 -1
  186. metaflow-stubs/runner/nbdeploy.pyi +1 -1
  187. metaflow-stubs/runner/nbrun.pyi +1 -1
  188. metaflow-stubs/runner/subprocess_manager.pyi +1 -1
  189. metaflow-stubs/runner/utils.pyi +2 -2
  190. metaflow-stubs/system/__init__.pyi +1 -1
  191. metaflow-stubs/system/system_logger.pyi +1 -1
  192. metaflow-stubs/system/system_monitor.pyi +1 -1
  193. metaflow-stubs/tagging_util.pyi +1 -1
  194. metaflow-stubs/tuple_util.pyi +1 -1
  195. metaflow-stubs/user_configs/__init__.pyi +1 -1
  196. metaflow-stubs/user_configs/config_decorators.pyi +5 -5
  197. metaflow-stubs/user_configs/config_options.pyi +1 -1
  198. metaflow-stubs/user_configs/config_parameters.pyi +3 -3
  199. {ob_metaflow_stubs-6.0.3.158.dist-info → ob_metaflow_stubs-6.0.3.160.dist-info}/METADATA +1 -1
  200. ob_metaflow_stubs-6.0.3.160.dist-info/RECORD +203 -0
  201. ob_metaflow_stubs-6.0.3.158.dist-info/RECORD +0 -200
  202. {ob_metaflow_stubs-6.0.3.158.dist-info → ob_metaflow_stubs-6.0.3.160.dist-info}/WHEEL +0 -0
  203. {ob_metaflow_stubs-6.0.3.158.dist-info → ob_metaflow_stubs-6.0.3.160.dist-info}/top_level.txt +0 -0
@@ -1,15 +1,15 @@
1
1
  ######################################################################################################
2
2
  # Auto-generated Metaflow stub file #
3
3
  # MF version: 2.15.7.2+obcheckpoint(0.2.1);ob(v1) #
4
- # Generated on 2025-04-17T20:45:30.855915 #
4
+ # Generated on 2025-05-01T00:24:18.378249 #
5
5
  ######################################################################################################
6
6
 
7
7
  from __future__ import annotations
8
8
 
9
9
  import typing
10
10
  if typing.TYPE_CHECKING:
11
- import typing
12
11
  import datetime
12
+ import typing
13
13
  FlowSpecDerived = typing.TypeVar("FlowSpecDerived", bound="FlowSpec", contravariant=False, covariant=False)
14
14
  StepFlag = typing.NewType("StepFlag", bool)
15
15
 
@@ -35,17 +35,17 @@ from .user_configs.config_parameters import ConfigValue as ConfigValue
35
35
  from .user_configs.config_parameters import config_expr as config_expr
36
36
  from .user_configs.config_decorators import CustomFlowDecorator as CustomFlowDecorator
37
37
  from .user_configs.config_decorators import CustomStepDecorator as CustomStepDecorator
38
- from . import cards as cards
39
38
  from . import tuple_util as tuple_util
39
+ from . import cards as cards
40
40
  from . import events as events
41
41
  from . import runner as runner
42
42
  from . import plugins as plugins
43
43
  from .mf_extensions.outerbounds.toplevel.global_aliases_for_metaflow_package import S3 as S3
44
44
  from . import includefile as includefile
45
45
  from .includefile import IncludeFile as IncludeFile
46
- from .plugins.pypi.parsers import requirements_txt_parser as requirements_txt_parser
47
46
  from .plugins.pypi.parsers import pyproject_toml_parser as pyproject_toml_parser
48
47
  from .plugins.pypi.parsers import conda_environment_yml_parser as conda_environment_yml_parser
48
+ from .plugins.pypi.parsers import requirements_txt_parser as requirements_txt_parser
49
49
  from . import client as client
50
50
  from .client.core import namespace as namespace
51
51
  from .client.core import get_namespace as get_namespace
@@ -69,6 +69,8 @@ from .mf_extensions.obcheckpoint.plugins.machine_learning_utilities.datastructur
69
69
  from .mf_extensions.obcheckpoint.plugins.machine_learning_utilities.datastore.context import artifact_store_from as artifact_store_from
70
70
  from .mf_extensions.outerbounds.toplevel.global_aliases_for_metaflow_package import get_aws_client as get_aws_client
71
71
  from .mf_extensions.outerbounds.plugins.snowflake.snowflake import Snowflake as Snowflake
72
+ from .mf_extensions.outerbounds.plugins.checkpoint_datastores.nebius import nebius_checkpoints as nebius_checkpoints
73
+ from .mf_extensions.outerbounds.plugins.checkpoint_datastores.coreweave import coreweave_checkpoints as coreweave_checkpoints
72
74
  from . import cli_components as cli_components
73
75
  from . import system as system
74
76
  from . import pylint_wrapper as pylint_wrapper
@@ -201,6 +203,177 @@ def catch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], ty
201
203
  """
202
204
  ...
203
205
 
206
+ @typing.overload
207
+ def retry(*, times: int = 3, minutes_between_retries: int = 2) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
208
+ """
209
+ Specifies the number of times the task corresponding
210
+ to a step needs to be retried.
211
+
212
+ This decorator is useful for handling transient errors, such as networking issues.
213
+ If your task contains operations that can't be retried safely, e.g. database updates,
214
+ it is advisable to annotate it with `@retry(times=0)`.
215
+
216
+ This can be used in conjunction with the `@catch` decorator. The `@catch`
217
+ decorator will execute a no-op task after all retries have been exhausted,
218
+ ensuring that the flow execution can continue.
219
+
220
+
221
+ Parameters
222
+ ----------
223
+ times : int, default 3
224
+ Number of times to retry this task.
225
+ minutes_between_retries : int, default 2
226
+ Number of minutes between retries.
227
+ """
228
+ ...
229
+
230
+ @typing.overload
231
+ def retry(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
232
+ ...
233
+
234
+ @typing.overload
235
+ def retry(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
236
+ ...
237
+
238
+ def retry(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, times: int = 3, minutes_between_retries: int = 2):
239
+ """
240
+ Specifies the number of times the task corresponding
241
+ to a step needs to be retried.
242
+
243
+ This decorator is useful for handling transient errors, such as networking issues.
244
+ If your task contains operations that can't be retried safely, e.g. database updates,
245
+ it is advisable to annotate it with `@retry(times=0)`.
246
+
247
+ This can be used in conjunction with the `@catch` decorator. The `@catch`
248
+ decorator will execute a no-op task after all retries have been exhausted,
249
+ ensuring that the flow execution can continue.
250
+
251
+
252
+ Parameters
253
+ ----------
254
+ times : int, default 3
255
+ Number of times to retry this task.
256
+ minutes_between_retries : int, default 2
257
+ Number of minutes between retries.
258
+ """
259
+ ...
260
+
261
+ def huggingface_hub(*, temp_dir_root: typing.Optional[str] = None, load: typing.Union[typing.List[str], typing.List[typing.Tuple[typing.Dict, str]], typing.List[typing.Tuple[str, str]], typing.List[typing.Dict], None]) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
262
+ """
263
+ Decorator that helps cache, version and store models/datasets from huggingface hub.
264
+
265
+
266
+ Parameters
267
+ ----------
268
+ temp_dir_root : str, optional
269
+ The root directory that will hold the temporary directory where objects will be downloaded.
270
+
271
+ load: Union[List[str], List[Tuple[Dict, str]], List[Tuple[str, str]], List[Dict], None]
272
+ The list of repos (models/datasets) to load.
273
+
274
+ Loaded repos can be accessed via `current.huggingface_hub.loaded`. If load is set, then the following happens:
275
+
276
+ - If repo (model/dataset) is not found in the datastore:
277
+ - Downloads the repo from Hugging Face Hub to a temporary directory (or uses specified path) for local access
278
+ - Stores it in Metaflow's datastore (s3/gcs/azure etc.) with a unique name based on repo_type/repo_id
279
+ - All HF models loaded for a `@step` will be cached separately under flow/step/namespace.
280
+
281
+ - If repo is found in the datastore:
282
+ - Loads it directly from datastore to local path (can be temporary directory or specified path)
283
+ """
284
+ ...
285
+
286
+ def app_deploy(*, app_port: int, app_name: str) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
287
+ """
288
+ Specifies that this step is used to deploy an instance of the app.
289
+ Requires that self.app_name, self.app_port, self.entrypoint and self.deployDir is set.
290
+
291
+
292
+ Parameters
293
+ ----------
294
+ app_port : int
295
+ Number of GPUs to use.
296
+ app_name : str
297
+ Name of the app to deploy.
298
+ """
299
+ ...
300
+
301
+ @typing.overload
302
+ def timeout(*, seconds: int = 0, minutes: int = 0, hours: int = 0) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
303
+ """
304
+ Specifies a timeout for your step.
305
+
306
+ This decorator is useful if this step may hang indefinitely.
307
+
308
+ This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
309
+ A timeout is considered to be an exception thrown by the step. It will cause the step to be
310
+ retried if needed and the exception will be caught by the `@catch` decorator, if present.
311
+
312
+ Note that all the values specified in parameters are added together so if you specify
313
+ 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
314
+
315
+
316
+ Parameters
317
+ ----------
318
+ seconds : int, default 0
319
+ Number of seconds to wait prior to timing out.
320
+ minutes : int, default 0
321
+ Number of minutes to wait prior to timing out.
322
+ hours : int, default 0
323
+ Number of hours to wait prior to timing out.
324
+ """
325
+ ...
326
+
327
+ @typing.overload
328
+ def timeout(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
329
+ ...
330
+
331
+ @typing.overload
332
+ def timeout(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
333
+ ...
334
+
335
+ def timeout(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, seconds: int = 0, minutes: int = 0, hours: int = 0):
336
+ """
337
+ Specifies a timeout for your step.
338
+
339
+ This decorator is useful if this step may hang indefinitely.
340
+
341
+ This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
342
+ A timeout is considered to be an exception thrown by the step. It will cause the step to be
343
+ retried if needed and the exception will be caught by the `@catch` decorator, if present.
344
+
345
+ Note that all the values specified in parameters are added together so if you specify
346
+ 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
347
+
348
+
349
+ Parameters
350
+ ----------
351
+ seconds : int, default 0
352
+ Number of seconds to wait prior to timing out.
353
+ minutes : int, default 0
354
+ Number of minutes to wait prior to timing out.
355
+ hours : int, default 0
356
+ Number of hours to wait prior to timing out.
357
+ """
358
+ ...
359
+
360
+ @typing.overload
361
+ def fast_bakery_internal(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
362
+ """
363
+ Internal decorator to support Fast bakery
364
+ """
365
+ ...
366
+
367
+ @typing.overload
368
+ def fast_bakery_internal(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
369
+ ...
370
+
371
+ def fast_bakery_internal(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
372
+ """
373
+ Internal decorator to support Fast bakery
374
+ """
375
+ ...
376
+
204
377
  @typing.overload
205
378
  def card(*, type: str = 'default', id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
206
379
  """
@@ -250,146 +423,180 @@ def card(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typ
250
423
  """
251
424
  ...
252
425
 
253
- def ollama(*, models: "list[Ollama]", backend: str) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
426
+ def nim(*, models: "list[NIM]", backend: str, queue_timeout: int) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
254
427
  """
255
- This decorator is used to run Ollama APIs as Metaflow task sidecars.
428
+ This decorator is used to run NIM containers in Metaflow tasks as sidecars.
256
429
 
257
430
  User code call
258
431
  -----------
259
- @ollama(
432
+ @nim(
260
433
  models=['meta/llama3-8b-instruct', 'meta/llama3-70b-instruct'],
261
- backend='local'
434
+ backend='managed'
262
435
  )
263
436
 
264
437
  Valid backend options
265
438
  ---------------------
266
- - 'local': Run as a separate process on the local task machine.
267
- - (TODO) 'managed': Outerbounds hosts and selects compute provider.
268
- - (TODO) 'remote': Spin up separate instance to serve Ollama models.
439
+ - 'managed': Outerbounds selects a compute provider based on the model.
269
440
 
270
441
  Valid model options
271
442
  ----------------
272
- - 'llama3.2'
273
- - 'llama3.3'
274
- - any model here https://ollama.com/search
443
+ - 'meta/llama3-8b-instruct': 8B parameter model
444
+ - 'meta/llama3-70b-instruct': 70B parameter model
445
+ - any model here: https://nvcf.ngc.nvidia.com/functions?filter=nvidia-functions
275
446
 
276
447
 
277
448
  Parameters
278
449
  ----------
279
- models: list[Ollama]
280
- List of Ollama containers running models in sidecars.
450
+ models: list[NIM]
451
+ List of NIM containers running models in sidecars.
281
452
  backend: str
282
- Determines where and how to run the Ollama process.
453
+ Compute provider to run the NIM container.
454
+ queue_timeout : int
455
+ Time to keep the job in NVCF's queue.
283
456
  """
284
457
  ...
285
458
 
286
- def kubernetes(*, cpu: int = 1, memory: int = 4096, disk: int = 10240, image: typing.Optional[str] = None, image_pull_policy: str = 'KUBERNETES_IMAGE_PULL_POLICY', service_account: str = 'METAFLOW_KUBERNETES_SERVICE_ACCOUNT', secrets: typing.Optional[typing.List[str]] = None, node_selector: typing.Union[typing.Dict[str, str], str, None] = None, namespace: str = 'METAFLOW_KUBERNETES_NAMESPACE', gpu: typing.Optional[int] = None, gpu_vendor: str = 'KUBERNETES_GPU_VENDOR', tolerations: typing.List[str] = [], labels: typing.Dict[str, str] = 'METAFLOW_KUBERNETES_LABELS', annotations: typing.Dict[str, str] = 'METAFLOW_KUBERNETES_ANNOTATIONS', use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = '/metaflow_temp', persistent_volume_claims: typing.Optional[typing.Dict[str, str]] = None, shared_memory: typing.Optional[int] = None, port: typing.Optional[int] = None, compute_pool: typing.Optional[str] = None, hostname_resolution_timeout: int = 600, qos: str = 'Burstable') -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
459
+ @typing.overload
460
+ def conda(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
287
461
  """
288
- Specifies that this step should execute on Kubernetes.
462
+ Specifies the Conda environment for the step.
463
+
464
+ Information in this decorator will augment any
465
+ attributes set in the `@conda_base` flow-level decorator. Hence,
466
+ you can use `@conda_base` to set packages required by all
467
+ steps and use `@conda` to specify step-specific overrides.
289
468
 
290
469
 
291
470
  Parameters
292
471
  ----------
293
- cpu : int, default 1
294
- Number of CPUs required for this step. If `@resources` is
295
- also present, the maximum value from all decorators is used.
296
- memory : int, default 4096
297
- Memory size (in MB) required for this step. If
298
- `@resources` is also present, the maximum value from all decorators is
299
- used.
300
- disk : int, default 10240
301
- Disk size (in MB) required for this step. If
302
- `@resources` is also present, the maximum value from all decorators is
303
- used.
304
- image : str, optional, default None
305
- Docker image to use when launching on Kubernetes. If not specified, and
306
- METAFLOW_KUBERNETES_CONTAINER_IMAGE is specified, that image is used. If
307
- not, a default Docker image mapping to the current version of Python is used.
308
- image_pull_policy: str, default KUBERNETES_IMAGE_PULL_POLICY
309
- If given, the imagePullPolicy to be applied to the Docker image of the step.
310
- service_account : str, default METAFLOW_KUBERNETES_SERVICE_ACCOUNT
311
- Kubernetes service account to use when launching pod in Kubernetes.
312
- secrets : List[str], optional, default None
313
- Kubernetes secrets to use when launching pod in Kubernetes. These
314
- secrets are in addition to the ones defined in `METAFLOW_KUBERNETES_SECRETS`
315
- in Metaflow configuration.
316
- node_selector: Union[Dict[str,str], str], optional, default None
317
- Kubernetes node selector(s) to apply to the pod running the task.
318
- Can be passed in as a comma separated string of values e.g.
319
- 'kubernetes.io/os=linux,kubernetes.io/arch=amd64' or as a dictionary
320
- {'kubernetes.io/os': 'linux', 'kubernetes.io/arch': 'amd64'}
321
- namespace : str, default METAFLOW_KUBERNETES_NAMESPACE
322
- Kubernetes namespace to use when launching pod in Kubernetes.
323
- gpu : int, optional, default None
324
- Number of GPUs required for this step. A value of zero implies that
325
- the scheduled node should not have GPUs.
326
- gpu_vendor : str, default KUBERNETES_GPU_VENDOR
327
- The vendor of the GPUs to be used for this step.
328
- tolerations : List[str], default []
329
- The default is extracted from METAFLOW_KUBERNETES_TOLERATIONS.
330
- Kubernetes tolerations to use when launching pod in Kubernetes.
331
- labels: Dict[str, str], default: METAFLOW_KUBERNETES_LABELS
332
- Kubernetes labels to use when launching pod in Kubernetes.
333
- annotations: Dict[str, str], default: METAFLOW_KUBERNETES_ANNOTATIONS
334
- Kubernetes annotations to use when launching pod in Kubernetes.
335
- use_tmpfs : bool, default False
336
- This enables an explicit tmpfs mount for this step.
337
- tmpfs_tempdir : bool, default True
338
- sets METAFLOW_TEMPDIR to tmpfs_path if set for this step.
339
- tmpfs_size : int, optional, default: None
340
- The value for the size (in MiB) of the tmpfs mount for this step.
341
- This parameter maps to the `--tmpfs` option in Docker. Defaults to 50% of the
342
- memory allocated for this step.
343
- tmpfs_path : str, optional, default /metaflow_temp
344
- Path to tmpfs mount for this step.
345
- persistent_volume_claims : Dict[str, str], optional, default None
346
- A map (dictionary) of persistent volumes to be mounted to the pod for this step. The map is from persistent
347
- volumes to the path to which the volume is to be mounted, e.g., `{'pvc-name': '/path/to/mount/on'}`.
348
- shared_memory: int, optional
349
- Shared memory size (in MiB) required for this step
350
- port: int, optional
351
- Port number to specify in the Kubernetes job object
352
- compute_pool : str, optional, default None
353
- Compute pool to be used for for this step.
354
- If not specified, any accessible compute pool within the perimeter is used.
355
- hostname_resolution_timeout: int, default 10 * 60
356
- Timeout in seconds for the workers tasks in the gang scheduled cluster to resolve the hostname of control task.
357
- Only applicable when @parallel is used.
358
- qos: str, default: Burstable
359
- Quality of Service class to assign to the pod. Supported values are: Guaranteed, Burstable, BestEffort
472
+ packages : Dict[str, str], default {}
473
+ Packages to use for this step. The key is the name of the package
474
+ and the value is the version to use.
475
+ libraries : Dict[str, str], default {}
476
+ Supported for backward compatibility. When used with packages, packages will take precedence.
477
+ python : str, optional, default None
478
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
479
+ that the version used will correspond to the version of the Python interpreter used to start the run.
480
+ disabled : bool, default False
481
+ If set to True, disables @conda.
360
482
  """
361
483
  ...
362
484
 
363
- def nim(*, models: "list[NIM]", backend: str, queue_timeout: int) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
485
+ @typing.overload
486
+ def conda(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
487
+ ...
488
+
489
+ @typing.overload
490
+ def conda(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
491
+ ...
492
+
493
+ def conda(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
364
494
  """
365
- This decorator is used to run NIM containers in Metaflow tasks as sidecars.
495
+ Specifies the Conda environment for the step.
496
+
497
+ Information in this decorator will augment any
498
+ attributes set in the `@conda_base` flow-level decorator. Hence,
499
+ you can use `@conda_base` to set packages required by all
500
+ steps and use `@conda` to specify step-specific overrides.
501
+
502
+
503
+ Parameters
504
+ ----------
505
+ packages : Dict[str, str], default {}
506
+ Packages to use for this step. The key is the name of the package
507
+ and the value is the version to use.
508
+ libraries : Dict[str, str], default {}
509
+ Supported for backward compatibility. When used with packages, packages will take precedence.
510
+ python : str, optional, default None
511
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
512
+ that the version used will correspond to the version of the Python interpreter used to start the run.
513
+ disabled : bool, default False
514
+ If set to True, disables @conda.
515
+ """
516
+ ...
517
+
518
+ def ollama(*, models: "list[Ollama]", backend: str) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
519
+ """
520
+ This decorator is used to run Ollama APIs as Metaflow task sidecars.
366
521
 
367
522
  User code call
368
523
  -----------
369
- @nim(
524
+ @ollama(
370
525
  models=['meta/llama3-8b-instruct', 'meta/llama3-70b-instruct'],
371
- backend='managed'
526
+ backend='local'
372
527
  )
373
528
 
374
529
  Valid backend options
375
530
  ---------------------
376
- - 'managed': Outerbounds selects a compute provider based on the model.
531
+ - 'local': Run as a separate process on the local task machine.
532
+ - (TODO) 'managed': Outerbounds hosts and selects compute provider.
533
+ - (TODO) 'remote': Spin up separate instance to serve Ollama models.
377
534
 
378
535
  Valid model options
379
536
  ----------------
380
- - 'meta/llama3-8b-instruct': 8B parameter model
381
- - 'meta/llama3-70b-instruct': 70B parameter model
382
- - any model here: https://nvcf.ngc.nvidia.com/functions?filter=nvidia-functions
537
+ - 'llama3.2'
538
+ - 'llama3.3'
539
+ - any model here https://ollama.com/search
383
540
 
384
541
 
385
542
  Parameters
386
543
  ----------
387
- models: list[NIM]
388
- List of NIM containers running models in sidecars.
544
+ models: list[Ollama]
545
+ List of Ollama containers running models in sidecars.
389
546
  backend: str
390
- Compute provider to run the NIM container.
391
- queue_timeout : int
392
- Time to keep the job in NVCF's queue.
547
+ Determines where and how to run the Ollama process.
548
+ """
549
+ ...
550
+
551
+ @typing.overload
552
+ def parallel(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
553
+ """
554
+ Decorator prototype for all step decorators. This function gets specialized
555
+ and imported for all decorators types by _import_plugin_decorators().
556
+ """
557
+ ...
558
+
559
+ @typing.overload
560
+ def parallel(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
561
+ ...
562
+
563
+ def parallel(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
564
+ """
565
+ Decorator prototype for all step decorators. This function gets specialized
566
+ and imported for all decorators types by _import_plugin_decorators().
567
+ """
568
+ ...
569
+
570
+ @typing.overload
571
+ def environment(*, vars: typing.Dict[str, str] = {}) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
572
+ """
573
+ Specifies environment variables to be set prior to the execution of a step.
574
+
575
+
576
+ Parameters
577
+ ----------
578
+ vars : Dict[str, str], default {}
579
+ Dictionary of environment variables to set.
580
+ """
581
+ ...
582
+
583
+ @typing.overload
584
+ def environment(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
585
+ ...
586
+
587
+ @typing.overload
588
+ def environment(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
589
+ ...
590
+
591
+ def environment(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, vars: typing.Dict[str, str] = {}):
592
+ """
593
+ Specifies environment variables to be set prior to the execution of a step.
594
+
595
+
596
+ Parameters
597
+ ----------
598
+ vars : Dict[str, str], default {}
599
+ Dictionary of environment variables to set.
393
600
  """
394
601
  ...
395
602
 
@@ -451,294 +658,165 @@ def checkpoint(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None
451
658
  ...
452
659
 
453
660
  @typing.overload
454
- def fast_bakery_internal(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
661
+ def secrets(*, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
455
662
  """
456
- Internal decorator to support Fast bakery
663
+ Specifies secrets to be retrieved and injected as environment variables prior to
664
+ the execution of a step.
665
+
666
+
667
+ Parameters
668
+ ----------
669
+ sources : List[Union[str, Dict[str, Any]]], default: []
670
+ List of secret specs, defining how the secrets are to be retrieved
457
671
  """
458
672
  ...
459
673
 
460
674
  @typing.overload
461
- def fast_bakery_internal(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
675
+ def secrets(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
462
676
  ...
463
677
 
464
- def fast_bakery_internal(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
465
- """
466
- Internal decorator to support Fast bakery
467
- """
678
+ @typing.overload
679
+ def secrets(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
468
680
  ...
469
681
 
470
- def huggingface_hub(*, temp_dir_root: typing.Optional[str] = None, load: typing.Union[typing.List[str], typing.List[typing.Tuple[typing.Dict, str]], typing.List[typing.Tuple[str, str]], typing.List[typing.Dict], None]) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
682
+ def secrets(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []):
471
683
  """
472
- Decorator that helps cache, version and store models/datasets from huggingface hub.
684
+ Specifies secrets to be retrieved and injected as environment variables prior to
685
+ the execution of a step.
473
686
 
474
687
 
475
688
  Parameters
476
689
  ----------
477
- temp_dir_root : str, optional
478
- The root directory that will hold the temporary directory where objects will be downloaded.
479
-
480
- load: Union[List[str], List[Tuple[Dict, str]], List[Tuple[str, str]], List[Dict], None]
481
- The list of repos (models/datasets) to load.
482
-
483
- Loaded repos can be accessed via `current.huggingface_hub.loaded`. If load is set, then the following happens:
484
-
485
- - If repo (model/dataset) is not found in the datastore:
486
- - Downloads the repo from Hugging Face Hub to a temporary directory (or uses specified path) for local access
487
- - Stores it in Metaflow's datastore (s3/gcs/azure etc.) with a unique name based on repo_type/repo_id
488
- - All HF models loaded for a `@step` will be cached separately under flow/step/namespace.
489
-
490
- - If repo is found in the datastore:
491
- - Loads it directly from datastore to local path (can be temporary directory or specified path)
690
+ sources : List[Union[str, Dict[str, Any]]], default: []
691
+ List of secret specs, defining how the secrets are to be retrieved
492
692
  """
493
693
  ...
494
694
 
495
695
  @typing.overload
496
- def model(*, load: typing.Union[typing.List[str], str, typing.List[typing.Tuple[str, typing.Optional[str]]]] = None, temp_dir_root: str = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
696
+ def pypi(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
497
697
  """
498
- Enables loading / saving of models within a step.
698
+ Specifies the PyPI packages for the step.
499
699
 
700
+ Information in this decorator will augment any
701
+ attributes set in the `@pyi_base` flow-level decorator. Hence,
702
+ you can use `@pypi_base` to set packages required by all
703
+ steps and use `@pypi` to specify step-specific overrides.
500
704
 
501
705
 
502
706
  Parameters
503
707
  ----------
504
- load : Union[List[str],str,List[Tuple[str,Union[str,None]]]], default: None
505
- Artifact name/s referencing the models/checkpoints to load. Artifact names refer to the names of the instance variables set to `self`.
506
- These artifact names give to `load` be reference objects or reference `key` string's from objects created by:
507
- - `current.checkpoint`
508
- - `current.model`
509
- - `current.huggingface_hub`
510
-
511
- If a list of tuples is provided, the first element is the artifact name and the second element is the path the artifact needs be unpacked on
512
- the local filesystem. If the second element is None, the artifact will be unpacked in the current working directory.
513
- If a string is provided, then the artifact corresponding to that name will be loaded in the current working directory.
514
-
515
- temp_dir_root : str, default: None
516
- The root directory under which `current.model.loaded` will store loaded models
708
+ packages : Dict[str, str], default: {}
709
+ Packages to use for this step. The key is the name of the package
710
+ and the value is the version to use.
711
+ python : str, optional, default: None
712
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
713
+ that the version used will correspond to the version of the Python interpreter used to start the run.
517
714
  """
518
715
  ...
519
716
 
520
717
  @typing.overload
521
- def model(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
718
+ def pypi(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
522
719
  ...
523
720
 
524
721
  @typing.overload
525
- def model(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
722
+ def pypi(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
526
723
  ...
527
724
 
528
- def model(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, load: typing.Union[typing.List[str], str, typing.List[typing.Tuple[str, typing.Optional[str]]]] = None, temp_dir_root: str = None):
725
+ def pypi(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
529
726
  """
530
- Enables loading / saving of models within a step.
727
+ Specifies the PyPI packages for the step.
531
728
 
729
+ Information in this decorator will augment any
730
+ attributes set in the `@pyi_base` flow-level decorator. Hence,
731
+ you can use `@pypi_base` to set packages required by all
732
+ steps and use `@pypi` to specify step-specific overrides.
532
733
 
533
734
 
534
735
  Parameters
535
736
  ----------
536
- load : Union[List[str],str,List[Tuple[str,Union[str,None]]]], default: None
537
- Artifact name/s referencing the models/checkpoints to load. Artifact names refer to the names of the instance variables set to `self`.
538
- These artifact names give to `load` be reference objects or reference `key` string's from objects created by:
539
- - `current.checkpoint`
540
- - `current.model`
541
- - `current.huggingface_hub`
542
-
543
- If a list of tuples is provided, the first element is the artifact name and the second element is the path the artifact needs be unpacked on
544
- the local filesystem. If the second element is None, the artifact will be unpacked in the current working directory.
545
- If a string is provided, then the artifact corresponding to that name will be loaded in the current working directory.
546
-
547
- temp_dir_root : str, default: None
548
- The root directory under which `current.model.loaded` will store loaded models
737
+ packages : Dict[str, str], default: {}
738
+ Packages to use for this step. The key is the name of the package
739
+ and the value is the version to use.
740
+ python : str, optional, default: None
741
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
742
+ that the version used will correspond to the version of the Python interpreter used to start the run.
549
743
  """
550
744
  ...
551
745
 
552
- @typing.overload
553
- def parallel(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
554
- """
555
- Decorator prototype for all step decorators. This function gets specialized
556
- and imported for all decorators types by _import_plugin_decorators().
746
+ def kubernetes(*, cpu: int = 1, memory: int = 4096, disk: int = 10240, image: typing.Optional[str] = None, image_pull_policy: str = 'KUBERNETES_IMAGE_PULL_POLICY', service_account: str = 'METAFLOW_KUBERNETES_SERVICE_ACCOUNT', secrets: typing.Optional[typing.List[str]] = None, node_selector: typing.Union[typing.Dict[str, str], str, None] = None, namespace: str = 'METAFLOW_KUBERNETES_NAMESPACE', gpu: typing.Optional[int] = None, gpu_vendor: str = 'KUBERNETES_GPU_VENDOR', tolerations: typing.List[str] = [], labels: typing.Dict[str, str] = 'METAFLOW_KUBERNETES_LABELS', annotations: typing.Dict[str, str] = 'METAFLOW_KUBERNETES_ANNOTATIONS', use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = '/metaflow_temp', persistent_volume_claims: typing.Optional[typing.Dict[str, str]] = None, shared_memory: typing.Optional[int] = None, port: typing.Optional[int] = None, compute_pool: typing.Optional[str] = None, hostname_resolution_timeout: int = 600, qos: str = 'Burstable') -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
557
747
  """
558
- ...
559
-
560
- @typing.overload
561
- def parallel(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
562
- ...
563
-
564
- def parallel(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
565
- """
566
- Decorator prototype for all step decorators. This function gets specialized
567
- and imported for all decorators types by _import_plugin_decorators().
568
- """
569
- ...
570
-
571
- def nvidia(*, gpu: int, gpu_type: str, queue_timeout: int) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
572
- """
573
- Specifies that this step should execute on DGX cloud.
574
-
575
-
576
- Parameters
577
- ----------
578
- gpu : int
579
- Number of GPUs to use.
580
- gpu_type : str
581
- Type of Nvidia GPU to use.
582
- queue_timeout : int
583
- Time to keep the job in NVCF's queue.
584
- """
585
- ...
586
-
587
- @typing.overload
588
- def environment(*, vars: typing.Dict[str, str] = {}) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
589
- """
590
- Specifies environment variables to be set prior to the execution of a step.
591
-
592
-
593
- Parameters
594
- ----------
595
- vars : Dict[str, str], default {}
596
- Dictionary of environment variables to set.
597
- """
598
- ...
599
-
600
- @typing.overload
601
- def environment(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
602
- ...
603
-
604
- @typing.overload
605
- def environment(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
606
- ...
607
-
608
- def environment(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, vars: typing.Dict[str, str] = {}):
609
- """
610
- Specifies environment variables to be set prior to the execution of a step.
611
-
612
-
613
- Parameters
614
- ----------
615
- vars : Dict[str, str], default {}
616
- Dictionary of environment variables to set.
617
- """
618
- ...
619
-
620
- @typing.overload
621
- def timeout(*, seconds: int = 0, minutes: int = 0, hours: int = 0) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
622
- """
623
- Specifies a timeout for your step.
624
-
625
- This decorator is useful if this step may hang indefinitely.
626
-
627
- This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
628
- A timeout is considered to be an exception thrown by the step. It will cause the step to be
629
- retried if needed and the exception will be caught by the `@catch` decorator, if present.
630
-
631
- Note that all the values specified in parameters are added together so if you specify
632
- 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
633
-
634
-
635
- Parameters
636
- ----------
637
- seconds : int, default 0
638
- Number of seconds to wait prior to timing out.
639
- minutes : int, default 0
640
- Number of minutes to wait prior to timing out.
641
- hours : int, default 0
642
- Number of hours to wait prior to timing out.
643
- """
644
- ...
645
-
646
- @typing.overload
647
- def timeout(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
648
- ...
649
-
650
- @typing.overload
651
- def timeout(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
652
- ...
653
-
654
- def timeout(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, seconds: int = 0, minutes: int = 0, hours: int = 0):
655
- """
656
- Specifies a timeout for your step.
657
-
658
- This decorator is useful if this step may hang indefinitely.
659
-
660
- This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
661
- A timeout is considered to be an exception thrown by the step. It will cause the step to be
662
- retried if needed and the exception will be caught by the `@catch` decorator, if present.
663
-
664
- Note that all the values specified in parameters are added together so if you specify
665
- 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
666
-
667
-
668
- Parameters
669
- ----------
670
- seconds : int, default 0
671
- Number of seconds to wait prior to timing out.
672
- minutes : int, default 0
673
- Number of minutes to wait prior to timing out.
674
- hours : int, default 0
675
- Number of hours to wait prior to timing out.
676
- """
677
- ...
678
-
679
- def app_deploy(*, app_port: int, app_name: str) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
680
- """
681
- Specifies that this step is used to deploy an instance of the app.
682
- Requires that self.app_name, self.app_port, self.entrypoint and self.deployDir is set.
683
-
684
-
685
- Parameters
686
- ----------
687
- app_port : int
688
- Number of GPUs to use.
689
- app_name : str
690
- Name of the app to deploy.
691
- """
692
- ...
693
-
694
- @typing.overload
695
- def pypi(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
696
- """
697
- Specifies the PyPI packages for the step.
698
-
699
- Information in this decorator will augment any
700
- attributes set in the `@pyi_base` flow-level decorator. Hence,
701
- you can use `@pypi_base` to set packages required by all
702
- steps and use `@pypi` to specify step-specific overrides.
703
-
704
-
705
- Parameters
706
- ----------
707
- packages : Dict[str, str], default: {}
708
- Packages to use for this step. The key is the name of the package
709
- and the value is the version to use.
710
- python : str, optional, default: None
711
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
712
- that the version used will correspond to the version of the Python interpreter used to start the run.
713
- """
714
- ...
715
-
716
- @typing.overload
717
- def pypi(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
718
- ...
719
-
720
- @typing.overload
721
- def pypi(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
722
- ...
723
-
724
- def pypi(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
725
- """
726
- Specifies the PyPI packages for the step.
727
-
728
- Information in this decorator will augment any
729
- attributes set in the `@pyi_base` flow-level decorator. Hence,
730
- you can use `@pypi_base` to set packages required by all
731
- steps and use `@pypi` to specify step-specific overrides.
748
+ Specifies that this step should execute on Kubernetes.
732
749
 
733
750
 
734
751
  Parameters
735
752
  ----------
736
- packages : Dict[str, str], default: {}
737
- Packages to use for this step. The key is the name of the package
738
- and the value is the version to use.
739
- python : str, optional, default: None
740
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
741
- that the version used will correspond to the version of the Python interpreter used to start the run.
753
+ cpu : int, default 1
754
+ Number of CPUs required for this step. If `@resources` is
755
+ also present, the maximum value from all decorators is used.
756
+ memory : int, default 4096
757
+ Memory size (in MB) required for this step. If
758
+ `@resources` is also present, the maximum value from all decorators is
759
+ used.
760
+ disk : int, default 10240
761
+ Disk size (in MB) required for this step. If
762
+ `@resources` is also present, the maximum value from all decorators is
763
+ used.
764
+ image : str, optional, default None
765
+ Docker image to use when launching on Kubernetes. If not specified, and
766
+ METAFLOW_KUBERNETES_CONTAINER_IMAGE is specified, that image is used. If
767
+ not, a default Docker image mapping to the current version of Python is used.
768
+ image_pull_policy: str, default KUBERNETES_IMAGE_PULL_POLICY
769
+ If given, the imagePullPolicy to be applied to the Docker image of the step.
770
+ service_account : str, default METAFLOW_KUBERNETES_SERVICE_ACCOUNT
771
+ Kubernetes service account to use when launching pod in Kubernetes.
772
+ secrets : List[str], optional, default None
773
+ Kubernetes secrets to use when launching pod in Kubernetes. These
774
+ secrets are in addition to the ones defined in `METAFLOW_KUBERNETES_SECRETS`
775
+ in Metaflow configuration.
776
+ node_selector: Union[Dict[str,str], str], optional, default None
777
+ Kubernetes node selector(s) to apply to the pod running the task.
778
+ Can be passed in as a comma separated string of values e.g.
779
+ 'kubernetes.io/os=linux,kubernetes.io/arch=amd64' or as a dictionary
780
+ {'kubernetes.io/os': 'linux', 'kubernetes.io/arch': 'amd64'}
781
+ namespace : str, default METAFLOW_KUBERNETES_NAMESPACE
782
+ Kubernetes namespace to use when launching pod in Kubernetes.
783
+ gpu : int, optional, default None
784
+ Number of GPUs required for this step. A value of zero implies that
785
+ the scheduled node should not have GPUs.
786
+ gpu_vendor : str, default KUBERNETES_GPU_VENDOR
787
+ The vendor of the GPUs to be used for this step.
788
+ tolerations : List[str], default []
789
+ The default is extracted from METAFLOW_KUBERNETES_TOLERATIONS.
790
+ Kubernetes tolerations to use when launching pod in Kubernetes.
791
+ labels: Dict[str, str], default: METAFLOW_KUBERNETES_LABELS
792
+ Kubernetes labels to use when launching pod in Kubernetes.
793
+ annotations: Dict[str, str], default: METAFLOW_KUBERNETES_ANNOTATIONS
794
+ Kubernetes annotations to use when launching pod in Kubernetes.
795
+ use_tmpfs : bool, default False
796
+ This enables an explicit tmpfs mount for this step.
797
+ tmpfs_tempdir : bool, default True
798
+ sets METAFLOW_TEMPDIR to tmpfs_path if set for this step.
799
+ tmpfs_size : int, optional, default: None
800
+ The value for the size (in MiB) of the tmpfs mount for this step.
801
+ This parameter maps to the `--tmpfs` option in Docker. Defaults to 50% of the
802
+ memory allocated for this step.
803
+ tmpfs_path : str, optional, default /metaflow_temp
804
+ Path to tmpfs mount for this step.
805
+ persistent_volume_claims : Dict[str, str], optional, default None
806
+ A map (dictionary) of persistent volumes to be mounted to the pod for this step. The map is from persistent
807
+ volumes to the path to which the volume is to be mounted, e.g., `{'pvc-name': '/path/to/mount/on'}`.
808
+ shared_memory: int, optional
809
+ Shared memory size (in MiB) required for this step
810
+ port: int, optional
811
+ Port number to specify in the Kubernetes job object
812
+ compute_pool : str, optional, default None
813
+ Compute pool to be used for for this step.
814
+ If not specified, any accessible compute pool within the perimeter is used.
815
+ hostname_resolution_timeout: int, default 10 * 60
816
+ Timeout in seconds for the workers tasks in the gang scheduled cluster to resolve the hostname of control task.
817
+ Only applicable when @parallel is used.
818
+ qos: str, default: Burstable
819
+ Quality of Service class to assign to the pod. Supported values are: Guaranteed, Burstable, BestEffort
742
820
  """
743
821
  ...
744
822
 
@@ -822,293 +900,110 @@ def resources(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None]
822
900
  ...
823
901
 
824
902
  @typing.overload
825
- def conda(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
826
- """
827
- Specifies the Conda environment for the step.
828
-
829
- Information in this decorator will augment any
830
- attributes set in the `@conda_base` flow-level decorator. Hence,
831
- you can use `@conda_base` to set packages required by all
832
- steps and use `@conda` to specify step-specific overrides.
833
-
834
-
835
- Parameters
836
- ----------
837
- packages : Dict[str, str], default {}
838
- Packages to use for this step. The key is the name of the package
839
- and the value is the version to use.
840
- libraries : Dict[str, str], default {}
841
- Supported for backward compatibility. When used with packages, packages will take precedence.
842
- python : str, optional, default None
843
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
844
- that the version used will correspond to the version of the Python interpreter used to start the run.
845
- disabled : bool, default False
846
- If set to True, disables @conda.
847
- """
848
- ...
849
-
850
- @typing.overload
851
- def conda(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
852
- ...
853
-
854
- @typing.overload
855
- def conda(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
856
- ...
857
-
858
- def conda(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
859
- """
860
- Specifies the Conda environment for the step.
861
-
862
- Information in this decorator will augment any
863
- attributes set in the `@conda_base` flow-level decorator. Hence,
864
- you can use `@conda_base` to set packages required by all
865
- steps and use `@conda` to specify step-specific overrides.
866
-
867
-
868
- Parameters
869
- ----------
870
- packages : Dict[str, str], default {}
871
- Packages to use for this step. The key is the name of the package
872
- and the value is the version to use.
873
- libraries : Dict[str, str], default {}
874
- Supported for backward compatibility. When used with packages, packages will take precedence.
875
- python : str, optional, default None
876
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
877
- that the version used will correspond to the version of the Python interpreter used to start the run.
878
- disabled : bool, default False
879
- If set to True, disables @conda.
880
- """
881
- ...
882
-
883
- @typing.overload
884
- def secrets(*, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
885
- """
886
- Specifies secrets to be retrieved and injected as environment variables prior to
887
- the execution of a step.
888
-
889
-
890
- Parameters
891
- ----------
892
- sources : List[Union[str, Dict[str, Any]]], default: []
893
- List of secret specs, defining how the secrets are to be retrieved
894
- """
895
- ...
896
-
897
- @typing.overload
898
- def secrets(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
899
- ...
900
-
901
- @typing.overload
902
- def secrets(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
903
- ...
904
-
905
- def secrets(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []):
906
- """
907
- Specifies secrets to be retrieved and injected as environment variables prior to
908
- the execution of a step.
909
-
910
-
911
- Parameters
912
- ----------
913
- sources : List[Union[str, Dict[str, Any]]], default: []
914
- List of secret specs, defining how the secrets are to be retrieved
915
- """
916
- ...
917
-
918
- @typing.overload
919
- def retry(*, times: int = 3, minutes_between_retries: int = 2) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
920
- """
921
- Specifies the number of times the task corresponding
922
- to a step needs to be retried.
923
-
924
- This decorator is useful for handling transient errors, such as networking issues.
925
- If your task contains operations that can't be retried safely, e.g. database updates,
926
- it is advisable to annotate it with `@retry(times=0)`.
927
-
928
- This can be used in conjunction with the `@catch` decorator. The `@catch`
929
- decorator will execute a no-op task after all retries have been exhausted,
930
- ensuring that the flow execution can continue.
931
-
932
-
933
- Parameters
934
- ----------
935
- times : int, default 3
936
- Number of times to retry this task.
937
- minutes_between_retries : int, default 2
938
- Number of minutes between retries.
939
- """
940
- ...
941
-
942
- @typing.overload
943
- def retry(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
944
- ...
945
-
946
- @typing.overload
947
- def retry(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
948
- ...
949
-
950
- def retry(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, times: int = 3, minutes_between_retries: int = 2):
903
+ def model(*, load: typing.Union[typing.List[str], str, typing.List[typing.Tuple[str, typing.Optional[str]]]] = None, temp_dir_root: str = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
951
904
  """
952
- Specifies the number of times the task corresponding
953
- to a step needs to be retried.
905
+ Enables loading / saving of models within a step.
954
906
 
955
- This decorator is useful for handling transient errors, such as networking issues.
956
- If your task contains operations that can't be retried safely, e.g. database updates,
957
- it is advisable to annotate it with `@retry(times=0)`.
958
-
959
- This can be used in conjunction with the `@catch` decorator. The `@catch`
960
- decorator will execute a no-op task after all retries have been exhausted,
961
- ensuring that the flow execution can continue.
962
907
 
963
908
 
964
909
  Parameters
965
910
  ----------
966
- times : int, default 3
967
- Number of times to retry this task.
968
- minutes_between_retries : int, default 2
969
- Number of minutes between retries.
970
- """
971
- ...
972
-
973
- @typing.overload
974
- def trigger_on_finish(*, flow: typing.Union[typing.Dict[str, str], str, None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
975
- """
976
- Specifies the flow(s) that this flow depends on.
977
-
978
- ```
979
- @trigger_on_finish(flow='FooFlow')
980
- ```
981
- or
982
- ```
983
- @trigger_on_finish(flows=['FooFlow', 'BarFlow'])
984
- ```
985
- This decorator respects the @project decorator and triggers the flow
986
- when upstream runs within the same namespace complete successfully
987
-
988
- Additionally, you can specify project aware upstream flow dependencies
989
- by specifying the fully qualified project_flow_name.
990
- ```
991
- @trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
992
- ```
993
- or
994
- ```
995
- @trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
996
- ```
997
-
998
- You can also specify just the project or project branch (other values will be
999
- inferred from the current project or project branch):
1000
- ```
1001
- @trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
1002
- ```
1003
-
1004
- Note that `branch` is typically one of:
1005
- - `prod`
1006
- - `user.bob`
1007
- - `test.my_experiment`
1008
- - `prod.staging`
1009
-
911
+ load : Union[List[str],str,List[Tuple[str,Union[str,None]]]], default: None
912
+ Artifact name/s referencing the models/checkpoints to load. Artifact names refer to the names of the instance variables set to `self`.
913
+ These artifact names give to `load` be reference objects or reference `key` string's from objects created by:
914
+ - `current.checkpoint`
915
+ - `current.model`
916
+ - `current.huggingface_hub`
1010
917
 
1011
- Parameters
1012
- ----------
1013
- flow : Union[str, Dict[str, str]], optional, default None
1014
- Upstream flow dependency for this flow.
1015
- flows : List[Union[str, Dict[str, str]]], default []
1016
- Upstream flow dependencies for this flow.
1017
- options : Dict[str, Any], default {}
1018
- Backend-specific configuration for tuning eventing behavior.
918
+ If a list of tuples is provided, the first element is the artifact name and the second element is the path the artifact needs be unpacked on
919
+ the local filesystem. If the second element is None, the artifact will be unpacked in the current working directory.
920
+ If a string is provided, then the artifact corresponding to that name will be loaded in the current working directory.
921
+
922
+ temp_dir_root : str, default: None
923
+ The root directory under which `current.model.loaded` will store loaded models
1019
924
  """
1020
925
  ...
1021
926
 
1022
927
  @typing.overload
1023
- def trigger_on_finish(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
928
+ def model(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1024
929
  ...
1025
930
 
1026
- def trigger_on_finish(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, flow: typing.Union[typing.Dict[str, str], str, None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}):
931
+ @typing.overload
932
+ def model(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
933
+ ...
934
+
935
+ def model(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, load: typing.Union[typing.List[str], str, typing.List[typing.Tuple[str, typing.Optional[str]]]] = None, temp_dir_root: str = None):
1027
936
  """
1028
- Specifies the flow(s) that this flow depends on.
1029
-
1030
- ```
1031
- @trigger_on_finish(flow='FooFlow')
1032
- ```
1033
- or
1034
- ```
1035
- @trigger_on_finish(flows=['FooFlow', 'BarFlow'])
1036
- ```
1037
- This decorator respects the @project decorator and triggers the flow
1038
- when upstream runs within the same namespace complete successfully
1039
-
1040
- Additionally, you can specify project aware upstream flow dependencies
1041
- by specifying the fully qualified project_flow_name.
1042
- ```
1043
- @trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
1044
- ```
1045
- or
1046
- ```
1047
- @trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
1048
- ```
1049
-
1050
- You can also specify just the project or project branch (other values will be
1051
- inferred from the current project or project branch):
1052
- ```
1053
- @trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
1054
- ```
937
+ Enables loading / saving of models within a step.
1055
938
 
1056
- Note that `branch` is typically one of:
1057
- - `prod`
1058
- - `user.bob`
1059
- - `test.my_experiment`
1060
- - `prod.staging`
1061
939
 
1062
940
 
1063
941
  Parameters
1064
942
  ----------
1065
- flow : Union[str, Dict[str, str]], optional, default None
1066
- Upstream flow dependency for this flow.
1067
- flows : List[Union[str, Dict[str, str]]], default []
1068
- Upstream flow dependencies for this flow.
1069
- options : Dict[str, Any], default {}
1070
- Backend-specific configuration for tuning eventing behavior.
943
+ load : Union[List[str],str,List[Tuple[str,Union[str,None]]]], default: None
944
+ Artifact name/s referencing the models/checkpoints to load. Artifact names refer to the names of the instance variables set to `self`.
945
+ These artifact names give to `load` be reference objects or reference `key` string's from objects created by:
946
+ - `current.checkpoint`
947
+ - `current.model`
948
+ - `current.huggingface_hub`
949
+
950
+ If a list of tuples is provided, the first element is the artifact name and the second element is the path the artifact needs be unpacked on
951
+ the local filesystem. If the second element is None, the artifact will be unpacked in the current working directory.
952
+ If a string is provided, then the artifact corresponding to that name will be loaded in the current working directory.
953
+
954
+ temp_dir_root : str, default: None
955
+ The root directory under which `current.model.loaded` will store loaded models
1071
956
  """
1072
957
  ...
1073
958
 
1074
- @typing.overload
1075
- def pypi_base(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
959
+ def nvidia(*, gpu: int, gpu_type: str, queue_timeout: int) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1076
960
  """
1077
- Specifies the PyPI packages for all steps of the flow.
961
+ Specifies that this step should execute on DGX cloud.
1078
962
 
1079
- Use `@pypi_base` to set common packages required by all
1080
- steps and use `@pypi` to specify step-specific overrides.
1081
963
 
1082
964
  Parameters
1083
965
  ----------
1084
- packages : Dict[str, str], default: {}
1085
- Packages to use for this flow. The key is the name of the package
1086
- and the value is the version to use.
1087
- python : str, optional, default: None
1088
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
1089
- that the version used will correspond to the version of the Python interpreter used to start the run.
966
+ gpu : int
967
+ Number of GPUs to use.
968
+ gpu_type : str
969
+ Type of Nvidia GPU to use.
970
+ queue_timeout : int
971
+ Time to keep the job in NVCF's queue.
1090
972
  """
1091
973
  ...
1092
974
 
1093
- @typing.overload
1094
- def pypi_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1095
- ...
1096
-
1097
- def pypi_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
975
+ def project(*, name: str, branch: typing.Optional[str] = None, production: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1098
976
  """
1099
- Specifies the PyPI packages for all steps of the flow.
977
+ Specifies what flows belong to the same project.
978
+
979
+ A project-specific namespace is created for all flows that
980
+ use the same `@project(name)`.
1100
981
 
1101
- Use `@pypi_base` to set common packages required by all
1102
- steps and use `@pypi` to specify step-specific overrides.
1103
982
 
1104
983
  Parameters
1105
984
  ----------
1106
- packages : Dict[str, str], default: {}
1107
- Packages to use for this flow. The key is the name of the package
1108
- and the value is the version to use.
1109
- python : str, optional, default: None
1110
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
1111
- that the version used will correspond to the version of the Python interpreter used to start the run.
985
+ name : str
986
+ Project name. Make sure that the name is unique amongst all
987
+ projects that use the same production scheduler. The name may
988
+ contain only lowercase alphanumeric characters and underscores.
989
+
990
+ branch : Optional[str], default None
991
+ The branch to use. If not specified, the branch is set to
992
+ `user.<username>` unless `production` is set to `True`. This can
993
+ also be set on the command line using `--branch` as a top-level option.
994
+ It is an error to specify `branch` in the decorator and on the command line.
995
+
996
+ production : bool, default False
997
+ Whether or not the branch is the production branch. This can also be set on the
998
+ command line using `--production` as a top-level option. It is an error to specify
999
+ `production` in the decorator and on the command line.
1000
+ The project branch name will be:
1001
+ - if `branch` is specified:
1002
+ - if `production` is True: `prod.<branch>`
1003
+ - if `production` is False: `test.<branch>`
1004
+ - if `branch` is not specified:
1005
+ - if `production` is True: `prod`
1006
+ - if `production` is False: `user.<username>`
1112
1007
  """
1113
1008
  ...
1114
1009
 
@@ -1155,6 +1050,49 @@ def airflow_s3_key_sensor(*, timeout: int, poke_interval: int, mode: str, expone
1155
1050
  """
1156
1051
  ...
1157
1052
 
1053
+ def airflow_external_task_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, external_dag_id: str, external_task_ids: typing.List[str], allowed_states: typing.List[str], failed_states: typing.List[str], execution_delta: "datetime.timedelta", check_existence: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1054
+ """
1055
+ The `@airflow_external_task_sensor` decorator attaches a Airflow [ExternalTaskSensor](https://airflow.apache.org/docs/apache-airflow/stable/_api/airflow/sensors/external_task/index.html#airflow.sensors.external_task.ExternalTaskSensor) before the start step of the flow.
1056
+ This decorator only works when a flow is scheduled on Airflow and is compiled using `airflow create`. More than one `@airflow_external_task_sensor` can be added as a flow decorators. Adding more than one decorator will ensure that `start` step starts only after all sensors finish.
1057
+
1058
+
1059
+ Parameters
1060
+ ----------
1061
+ timeout : int
1062
+ Time, in seconds before the task times out and fails. (Default: 3600)
1063
+ poke_interval : int
1064
+ Time in seconds that the job should wait in between each try. (Default: 60)
1065
+ mode : str
1066
+ How the sensor operates. Options are: { poke | reschedule }. (Default: "poke")
1067
+ exponential_backoff : bool
1068
+ allow progressive longer waits between pokes by using exponential backoff algorithm. (Default: True)
1069
+ pool : str
1070
+ the slot pool this task should run in,
1071
+ slot pools are a way to limit concurrency for certain tasks. (Default:None)
1072
+ soft_fail : bool
1073
+ Set to true to mark the task as SKIPPED on failure. (Default: False)
1074
+ name : str
1075
+ Name of the sensor on Airflow
1076
+ description : str
1077
+ Description of sensor in the Airflow UI
1078
+ external_dag_id : str
1079
+ The dag_id that contains the task you want to wait for.
1080
+ external_task_ids : List[str]
1081
+ The list of task_ids that you want to wait for.
1082
+ If None (default value) the sensor waits for the DAG. (Default: None)
1083
+ allowed_states : List[str]
1084
+ Iterable of allowed states, (Default: ['success'])
1085
+ failed_states : List[str]
1086
+ Iterable of failed or dis-allowed states. (Default: None)
1087
+ execution_delta : datetime.timedelta
1088
+ time difference with the previous execution to look at,
1089
+ the default is the same logical date as the current task or DAG. (Default: None)
1090
+ check_existence: bool
1091
+ Set to True to check if the external task exists or check if
1092
+ the DAG to wait for exists. (Default: True)
1093
+ """
1094
+ ...
1095
+
1158
1096
  def with_artifact_store(f: typing.Optional[typing.Type[FlowSpecDerived]] = None):
1159
1097
  """
1160
1098
  Allows setting external datastores to save data for the
@@ -1231,170 +1169,82 @@ def with_artifact_store(f: typing.Optional[typing.Type[FlowSpecDerived]] = None)
1231
1169
  }):
1232
1170
  with Checkpoint() as cp:
1233
1171
  latest = cp.list(
1234
- task=run["start"].task
1235
- )[0]
1236
- print(latest)
1237
- cp.load(
1238
- latest,
1239
- "test-checkpoints"
1240
- )
1241
-
1242
- task = Task("TorchTuneFlow/8484/train/53673")
1243
- with artifact_store_from(run=run, config={
1244
- "client_params": {
1245
- "aws_access_key_id": os.environ.get("MY_CUSTOM_ACCESS_KEY"),
1246
- "aws_secret_access_key": os.environ.get("MY_CUSTOM_SECRET_KEY"),
1247
- },
1248
- }):
1249
- load_model(
1250
- task.data.model_ref,
1251
- "test-models"
1252
- )
1253
- ```
1254
- Parameters:
1255
- ----------
1256
-
1257
- type: str
1258
- The type of the datastore. Can be one of 's3', 'gcs', 'azure' or any other supported metaflow Datastore.
1259
-
1260
- config: dict or Callable
1261
- Dictionary of configuration options for the datastore. The following keys are required:
1262
- - root: The root path in the datastore where the data will be saved. (needs to be in the format expected by the datastore)
1263
- - example: 's3://bucket-name/path/to/root'
1264
- - example: 'gs://bucket-name/path/to/root'
1265
- - example: 'https://myblockacc.blob.core.windows.net/metaflow/'
1266
- - role_arn (optional): AWS IAM role to access s3 bucket (only when `type` is 's3')
1267
- - session_vars (optional): AWS session variables to access s3 bucket (only when `type` is 's3')
1268
- - client_params (optional): AWS client parameters to access s3 bucket (only when `type` is 's3')
1269
- """
1270
- ...
1271
-
1272
- def airflow_external_task_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, external_dag_id: str, external_task_ids: typing.List[str], allowed_states: typing.List[str], failed_states: typing.List[str], execution_delta: "datetime.timedelta", check_existence: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1273
- """
1274
- The `@airflow_external_task_sensor` decorator attaches a Airflow [ExternalTaskSensor](https://airflow.apache.org/docs/apache-airflow/stable/_api/airflow/sensors/external_task/index.html#airflow.sensors.external_task.ExternalTaskSensor) before the start step of the flow.
1275
- This decorator only works when a flow is scheduled on Airflow and is compiled using `airflow create`. More than one `@airflow_external_task_sensor` can be added as a flow decorators. Adding more than one decorator will ensure that `start` step starts only after all sensors finish.
1276
-
1277
-
1278
- Parameters
1279
- ----------
1280
- timeout : int
1281
- Time, in seconds before the task times out and fails. (Default: 3600)
1282
- poke_interval : int
1283
- Time in seconds that the job should wait in between each try. (Default: 60)
1284
- mode : str
1285
- How the sensor operates. Options are: { poke | reschedule }. (Default: "poke")
1286
- exponential_backoff : bool
1287
- allow progressive longer waits between pokes by using exponential backoff algorithm. (Default: True)
1288
- pool : str
1289
- the slot pool this task should run in,
1290
- slot pools are a way to limit concurrency for certain tasks. (Default:None)
1291
- soft_fail : bool
1292
- Set to true to mark the task as SKIPPED on failure. (Default: False)
1293
- name : str
1294
- Name of the sensor on Airflow
1295
- description : str
1296
- Description of sensor in the Airflow UI
1297
- external_dag_id : str
1298
- The dag_id that contains the task you want to wait for.
1299
- external_task_ids : List[str]
1300
- The list of task_ids that you want to wait for.
1301
- If None (default value) the sensor waits for the DAG. (Default: None)
1302
- allowed_states : List[str]
1303
- Iterable of allowed states, (Default: ['success'])
1304
- failed_states : List[str]
1305
- Iterable of failed or dis-allowed states. (Default: None)
1306
- execution_delta : datetime.timedelta
1307
- time difference with the previous execution to look at,
1308
- the default is the same logical date as the current task or DAG. (Default: None)
1309
- check_existence: bool
1310
- Set to True to check if the external task exists or check if
1311
- the DAG to wait for exists. (Default: True)
1312
- """
1313
- ...
1314
-
1315
- def project(*, name: str, branch: typing.Optional[str] = None, production: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1316
- """
1317
- Specifies what flows belong to the same project.
1318
-
1319
- A project-specific namespace is created for all flows that
1320
- use the same `@project(name)`.
1321
-
1172
+ task=run["start"].task
1173
+ )[0]
1174
+ print(latest)
1175
+ cp.load(
1176
+ latest,
1177
+ "test-checkpoints"
1178
+ )
1322
1179
 
1323
- Parameters
1180
+ task = Task("TorchTuneFlow/8484/train/53673")
1181
+ with artifact_store_from(run=run, config={
1182
+ "client_params": {
1183
+ "aws_access_key_id": os.environ.get("MY_CUSTOM_ACCESS_KEY"),
1184
+ "aws_secret_access_key": os.environ.get("MY_CUSTOM_SECRET_KEY"),
1185
+ },
1186
+ }):
1187
+ load_model(
1188
+ task.data.model_ref,
1189
+ "test-models"
1190
+ )
1191
+ ```
1192
+ Parameters:
1324
1193
  ----------
1325
- name : str
1326
- Project name. Make sure that the name is unique amongst all
1327
- projects that use the same production scheduler. The name may
1328
- contain only lowercase alphanumeric characters and underscores.
1329
1194
 
1330
- branch : Optional[str], default None
1331
- The branch to use. If not specified, the branch is set to
1332
- `user.<username>` unless `production` is set to `True`. This can
1333
- also be set on the command line using `--branch` as a top-level option.
1334
- It is an error to specify `branch` in the decorator and on the command line.
1195
+ type: str
1196
+ The type of the datastore. Can be one of 's3', 'gcs', 'azure' or any other supported metaflow Datastore.
1335
1197
 
1336
- production : bool, default False
1337
- Whether or not the branch is the production branch. This can also be set on the
1338
- command line using `--production` as a top-level option. It is an error to specify
1339
- `production` in the decorator and on the command line.
1340
- The project branch name will be:
1341
- - if `branch` is specified:
1342
- - if `production` is True: `prod.<branch>`
1343
- - if `production` is False: `test.<branch>`
1344
- - if `branch` is not specified:
1345
- - if `production` is True: `prod`
1346
- - if `production` is False: `user.<username>`
1198
+ config: dict or Callable
1199
+ Dictionary of configuration options for the datastore. The following keys are required:
1200
+ - root: The root path in the datastore where the data will be saved. (needs to be in the format expected by the datastore)
1201
+ - example: 's3://bucket-name/path/to/root'
1202
+ - example: 'gs://bucket-name/path/to/root'
1203
+ - example: 'https://myblockacc.blob.core.windows.net/metaflow/'
1204
+ - role_arn (optional): AWS IAM role to access s3 bucket (only when `type` is 's3')
1205
+ - session_vars (optional): AWS session variables to access s3 bucket (only when `type` is 's3')
1206
+ - client_params (optional): AWS client parameters to access s3 bucket (only when `type` is 's3')
1347
1207
  """
1348
1208
  ...
1349
1209
 
1350
1210
  @typing.overload
1351
- def schedule(*, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1211
+ def pypi_base(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1352
1212
  """
1353
- Specifies the times when the flow should be run when running on a
1354
- production scheduler.
1213
+ Specifies the PyPI packages for all steps of the flow.
1355
1214
 
1215
+ Use `@pypi_base` to set common packages required by all
1216
+ steps and use `@pypi` to specify step-specific overrides.
1356
1217
 
1357
1218
  Parameters
1358
1219
  ----------
1359
- hourly : bool, default False
1360
- Run the workflow hourly.
1361
- daily : bool, default True
1362
- Run the workflow daily.
1363
- weekly : bool, default False
1364
- Run the workflow weekly.
1365
- cron : str, optional, default None
1366
- Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
1367
- specified by this expression.
1368
- timezone : str, optional, default None
1369
- Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1370
- which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
1220
+ packages : Dict[str, str], default: {}
1221
+ Packages to use for this flow. The key is the name of the package
1222
+ and the value is the version to use.
1223
+ python : str, optional, default: None
1224
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1225
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1371
1226
  """
1372
1227
  ...
1373
1228
 
1374
1229
  @typing.overload
1375
- def schedule(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1230
+ def pypi_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1376
1231
  ...
1377
1232
 
1378
- def schedule(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None):
1233
+ def pypi_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
1379
1234
  """
1380
- Specifies the times when the flow should be run when running on a
1381
- production scheduler.
1235
+ Specifies the PyPI packages for all steps of the flow.
1382
1236
 
1237
+ Use `@pypi_base` to set common packages required by all
1238
+ steps and use `@pypi` to specify step-specific overrides.
1383
1239
 
1384
1240
  Parameters
1385
1241
  ----------
1386
- hourly : bool, default False
1387
- Run the workflow hourly.
1388
- daily : bool, default True
1389
- Run the workflow daily.
1390
- weekly : bool, default False
1391
- Run the workflow weekly.
1392
- cron : str, optional, default None
1393
- Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
1394
- specified by this expression.
1395
- timezone : str, optional, default None
1396
- Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1397
- which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
1242
+ packages : Dict[str, str], default: {}
1243
+ Packages to use for this flow. The key is the name of the package
1244
+ and the value is the version to use.
1245
+ python : str, optional, default: None
1246
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1247
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1398
1248
  """
1399
1249
  ...
1400
1250
 
@@ -1491,6 +1341,107 @@ def trigger(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, event: t
1491
1341
  """
1492
1342
  ...
1493
1343
 
1344
+ @typing.overload
1345
+ def trigger_on_finish(*, flow: typing.Union[typing.Dict[str, str], str, None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1346
+ """
1347
+ Specifies the flow(s) that this flow depends on.
1348
+
1349
+ ```
1350
+ @trigger_on_finish(flow='FooFlow')
1351
+ ```
1352
+ or
1353
+ ```
1354
+ @trigger_on_finish(flows=['FooFlow', 'BarFlow'])
1355
+ ```
1356
+ This decorator respects the @project decorator and triggers the flow
1357
+ when upstream runs within the same namespace complete successfully
1358
+
1359
+ Additionally, you can specify project aware upstream flow dependencies
1360
+ by specifying the fully qualified project_flow_name.
1361
+ ```
1362
+ @trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
1363
+ ```
1364
+ or
1365
+ ```
1366
+ @trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
1367
+ ```
1368
+
1369
+ You can also specify just the project or project branch (other values will be
1370
+ inferred from the current project or project branch):
1371
+ ```
1372
+ @trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
1373
+ ```
1374
+
1375
+ Note that `branch` is typically one of:
1376
+ - `prod`
1377
+ - `user.bob`
1378
+ - `test.my_experiment`
1379
+ - `prod.staging`
1380
+
1381
+
1382
+ Parameters
1383
+ ----------
1384
+ flow : Union[str, Dict[str, str]], optional, default None
1385
+ Upstream flow dependency for this flow.
1386
+ flows : List[Union[str, Dict[str, str]]], default []
1387
+ Upstream flow dependencies for this flow.
1388
+ options : Dict[str, Any], default {}
1389
+ Backend-specific configuration for tuning eventing behavior.
1390
+ """
1391
+ ...
1392
+
1393
+ @typing.overload
1394
+ def trigger_on_finish(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1395
+ ...
1396
+
1397
+ def trigger_on_finish(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, flow: typing.Union[typing.Dict[str, str], str, None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}):
1398
+ """
1399
+ Specifies the flow(s) that this flow depends on.
1400
+
1401
+ ```
1402
+ @trigger_on_finish(flow='FooFlow')
1403
+ ```
1404
+ or
1405
+ ```
1406
+ @trigger_on_finish(flows=['FooFlow', 'BarFlow'])
1407
+ ```
1408
+ This decorator respects the @project decorator and triggers the flow
1409
+ when upstream runs within the same namespace complete successfully
1410
+
1411
+ Additionally, you can specify project aware upstream flow dependencies
1412
+ by specifying the fully qualified project_flow_name.
1413
+ ```
1414
+ @trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
1415
+ ```
1416
+ or
1417
+ ```
1418
+ @trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
1419
+ ```
1420
+
1421
+ You can also specify just the project or project branch (other values will be
1422
+ inferred from the current project or project branch):
1423
+ ```
1424
+ @trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
1425
+ ```
1426
+
1427
+ Note that `branch` is typically one of:
1428
+ - `prod`
1429
+ - `user.bob`
1430
+ - `test.my_experiment`
1431
+ - `prod.staging`
1432
+
1433
+
1434
+ Parameters
1435
+ ----------
1436
+ flow : Union[str, Dict[str, str]], optional, default None
1437
+ Upstream flow dependency for this flow.
1438
+ flows : List[Union[str, Dict[str, str]]], default []
1439
+ Upstream flow dependencies for this flow.
1440
+ options : Dict[str, Any], default {}
1441
+ Backend-specific configuration for tuning eventing behavior.
1442
+ """
1443
+ ...
1444
+
1494
1445
  @typing.overload
1495
1446
  def conda_base(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1496
1447
  """
@@ -1542,5 +1493,56 @@ def conda_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packa
1542
1493
  """
1543
1494
  ...
1544
1495
 
1496
+ @typing.overload
1497
+ def schedule(*, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1498
+ """
1499
+ Specifies the times when the flow should be run when running on a
1500
+ production scheduler.
1501
+
1502
+
1503
+ Parameters
1504
+ ----------
1505
+ hourly : bool, default False
1506
+ Run the workflow hourly.
1507
+ daily : bool, default True
1508
+ Run the workflow daily.
1509
+ weekly : bool, default False
1510
+ Run the workflow weekly.
1511
+ cron : str, optional, default None
1512
+ Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
1513
+ specified by this expression.
1514
+ timezone : str, optional, default None
1515
+ Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1516
+ which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
1517
+ """
1518
+ ...
1519
+
1520
+ @typing.overload
1521
+ def schedule(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1522
+ ...
1523
+
1524
+ def schedule(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None):
1525
+ """
1526
+ Specifies the times when the flow should be run when running on a
1527
+ production scheduler.
1528
+
1529
+
1530
+ Parameters
1531
+ ----------
1532
+ hourly : bool, default False
1533
+ Run the workflow hourly.
1534
+ daily : bool, default True
1535
+ Run the workflow daily.
1536
+ weekly : bool, default False
1537
+ Run the workflow weekly.
1538
+ cron : str, optional, default None
1539
+ Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
1540
+ specified by this expression.
1541
+ timezone : str, optional, default None
1542
+ Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1543
+ which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
1544
+ """
1545
+ ...
1546
+
1545
1547
  pkg_name: str
1546
1548