ob-metaflow-stubs 6.0.3.175rc1__py2.py3-none-any.whl → 6.0.3.176__py2.py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (219) hide show
  1. metaflow-stubs/__init__.pyi +724 -718
  2. metaflow-stubs/cards.pyi +1 -1
  3. metaflow-stubs/cli.pyi +1 -1
  4. metaflow-stubs/cli_components/__init__.pyi +1 -1
  5. metaflow-stubs/cli_components/utils.pyi +1 -1
  6. metaflow-stubs/client/__init__.pyi +1 -1
  7. metaflow-stubs/client/core.pyi +3 -3
  8. metaflow-stubs/client/filecache.pyi +1 -1
  9. metaflow-stubs/events.pyi +2 -2
  10. metaflow-stubs/exception.pyi +1 -1
  11. metaflow-stubs/flowspec.pyi +3 -3
  12. metaflow-stubs/generated_for.txt +1 -1
  13. metaflow-stubs/includefile.pyi +2 -2
  14. metaflow-stubs/info_file.pyi +1 -1
  15. metaflow-stubs/metadata_provider/__init__.pyi +1 -1
  16. metaflow-stubs/metadata_provider/heartbeat.pyi +1 -1
  17. metaflow-stubs/metadata_provider/metadata.pyi +1 -1
  18. metaflow-stubs/metadata_provider/util.pyi +1 -1
  19. metaflow-stubs/metaflow_config.pyi +1 -1
  20. metaflow-stubs/metaflow_current.pyi +114 -114
  21. metaflow-stubs/metaflow_git.pyi +1 -1
  22. metaflow-stubs/mf_extensions/__init__.pyi +1 -1
  23. metaflow-stubs/mf_extensions/obcheckpoint/__init__.pyi +1 -1
  24. metaflow-stubs/mf_extensions/obcheckpoint/plugins/__init__.pyi +1 -1
  25. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/__init__.pyi +1 -1
  26. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/card_utils/__init__.pyi +1 -1
  27. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/card_utils/async_cards.pyi +2 -2
  28. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/card_utils/deco_injection_mixin.pyi +1 -1
  29. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/card_utils/extra_components.pyi +1 -1
  30. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/__init__.pyi +1 -1
  31. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/cards/__init__.pyi +1 -1
  32. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/cards/checkpoint_lister.pyi +4 -4
  33. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/cards/lineage_card.pyi +1 -1
  34. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/checkpoint_storage.pyi +3 -3
  35. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/constructors.pyi +1 -1
  36. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/core.pyi +3 -3
  37. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/decorator.pyi +3 -3
  38. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/exceptions.pyi +1 -1
  39. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/final_api.pyi +2 -2
  40. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/lineage.pyi +1 -1
  41. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/__init__.pyi +1 -1
  42. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/context.pyi +2 -2
  43. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/core.pyi +1 -1
  44. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/decorator.pyi +1 -1
  45. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/exceptions.pyi +1 -1
  46. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/task_utils.pyi +2 -2
  47. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/utils.pyi +1 -1
  48. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastructures.pyi +1 -1
  49. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/exceptions.pyi +1 -1
  50. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/hf_hub/__init__.pyi +1 -1
  51. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/hf_hub/decorator.pyi +2 -2
  52. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/modeling_utils/__init__.pyi +1 -1
  53. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/modeling_utils/core.pyi +2 -2
  54. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/modeling_utils/exceptions.pyi +1 -1
  55. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/modeling_utils/model_storage.pyi +2 -2
  56. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/__init__.pyi +1 -1
  57. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/flowspec_utils.pyi +1 -1
  58. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/general.pyi +1 -1
  59. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/identity_utils.pyi +2 -2
  60. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/serialization_handler/__init__.pyi +1 -1
  61. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/serialization_handler/base.pyi +1 -1
  62. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/serialization_handler/tar.pyi +1 -1
  63. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/tar_utils.pyi +1 -1
  64. metaflow-stubs/mf_extensions/outerbounds/__init__.pyi +1 -1
  65. metaflow-stubs/mf_extensions/outerbounds/plugins/__init__.pyi +1 -1
  66. metaflow-stubs/mf_extensions/outerbounds/plugins/{fast_bakery → aws}/__init__.pyi +1 -1
  67. metaflow-stubs/mf_extensions/outerbounds/plugins/aws/assume_role_decorator.pyi +52 -0
  68. metaflow-stubs/mf_extensions/outerbounds/plugins/card_utilities/__init__.pyi +1 -1
  69. metaflow-stubs/mf_extensions/outerbounds/plugins/card_utilities/async_cards.pyi +60 -0
  70. metaflow-stubs/mf_extensions/outerbounds/plugins/card_utilities/injector.pyi +1 -1
  71. metaflow-stubs/mf_extensions/outerbounds/plugins/checkpoint_datastores/__init__.pyi +1 -1
  72. metaflow-stubs/mf_extensions/outerbounds/plugins/checkpoint_datastores/coreweave.pyi +1 -1
  73. metaflow-stubs/mf_extensions/outerbounds/plugins/checkpoint_datastores/nebius.pyi +1 -1
  74. metaflow-stubs/mf_extensions/outerbounds/plugins/kubernetes/__init__.pyi +1 -1
  75. metaflow-stubs/mf_extensions/outerbounds/plugins/kubernetes/pod_killer.pyi +1 -1
  76. metaflow-stubs/mf_extensions/outerbounds/plugins/ollama/__init__.pyi +1 -1
  77. metaflow-stubs/mf_extensions/outerbounds/plugins/ollama/constants.pyi +1 -1
  78. metaflow-stubs/mf_extensions/outerbounds/plugins/ollama/exceptions.pyi +1 -1
  79. metaflow-stubs/mf_extensions/outerbounds/plugins/ollama/ollama.pyi +60 -2
  80. metaflow-stubs/mf_extensions/outerbounds/plugins/ollama/status_card.pyi +73 -0
  81. metaflow-stubs/mf_extensions/outerbounds/plugins/snowflake/__init__.pyi +1 -1
  82. metaflow-stubs/mf_extensions/outerbounds/plugins/snowflake/snowflake.pyi +1 -1
  83. metaflow-stubs/mf_extensions/outerbounds/profilers/__init__.pyi +1 -1
  84. metaflow-stubs/mf_extensions/outerbounds/profilers/gpu.pyi +1 -1
  85. metaflow-stubs/mf_extensions/outerbounds/remote_config.pyi +1 -1
  86. metaflow-stubs/mf_extensions/outerbounds/toplevel/__init__.pyi +1 -1
  87. metaflow-stubs/mf_extensions/outerbounds/toplevel/global_aliases_for_metaflow_package.pyi +2 -2
  88. metaflow-stubs/multicore_utils.pyi +1 -1
  89. metaflow-stubs/parameters.pyi +1 -1
  90. metaflow-stubs/plugins/__init__.pyi +12 -12
  91. metaflow-stubs/plugins/airflow/__init__.pyi +1 -1
  92. metaflow-stubs/plugins/airflow/airflow_utils.pyi +1 -1
  93. metaflow-stubs/plugins/airflow/exception.pyi +1 -1
  94. metaflow-stubs/plugins/airflow/sensors/__init__.pyi +1 -1
  95. metaflow-stubs/plugins/airflow/sensors/base_sensor.pyi +1 -1
  96. metaflow-stubs/plugins/airflow/sensors/external_task_sensor.pyi +1 -1
  97. metaflow-stubs/plugins/airflow/sensors/s3_sensor.pyi +1 -1
  98. metaflow-stubs/plugins/argo/__init__.pyi +1 -1
  99. metaflow-stubs/plugins/argo/argo_client.pyi +1 -1
  100. metaflow-stubs/plugins/argo/argo_events.pyi +1 -1
  101. metaflow-stubs/plugins/argo/argo_workflows.pyi +2 -2
  102. metaflow-stubs/plugins/argo/argo_workflows_decorator.pyi +2 -2
  103. metaflow-stubs/plugins/argo/argo_workflows_deployer.pyi +2 -2
  104. metaflow-stubs/plugins/argo/argo_workflows_deployer_objects.pyi +1 -1
  105. metaflow-stubs/plugins/aws/__init__.pyi +1 -1
  106. metaflow-stubs/plugins/aws/aws_client.pyi +1 -1
  107. metaflow-stubs/plugins/aws/aws_utils.pyi +1 -1
  108. metaflow-stubs/plugins/aws/batch/__init__.pyi +1 -1
  109. metaflow-stubs/plugins/aws/batch/batch.pyi +1 -1
  110. metaflow-stubs/plugins/aws/batch/batch_client.pyi +1 -1
  111. metaflow-stubs/plugins/aws/batch/batch_decorator.pyi +1 -1
  112. metaflow-stubs/plugins/aws/secrets_manager/__init__.pyi +1 -1
  113. metaflow-stubs/plugins/aws/secrets_manager/aws_secrets_manager_secrets_provider.pyi +2 -2
  114. metaflow-stubs/plugins/aws/step_functions/__init__.pyi +1 -1
  115. metaflow-stubs/plugins/aws/step_functions/event_bridge_client.pyi +1 -1
  116. metaflow-stubs/plugins/aws/step_functions/schedule_decorator.pyi +1 -1
  117. metaflow-stubs/plugins/aws/step_functions/step_functions.pyi +1 -1
  118. metaflow-stubs/plugins/aws/step_functions/step_functions_client.pyi +1 -1
  119. metaflow-stubs/plugins/aws/step_functions/step_functions_deployer.pyi +2 -2
  120. metaflow-stubs/plugins/aws/step_functions/step_functions_deployer_objects.pyi +1 -1
  121. metaflow-stubs/plugins/azure/__init__.pyi +1 -1
  122. metaflow-stubs/plugins/azure/azure_credential.pyi +1 -1
  123. metaflow-stubs/plugins/azure/azure_exceptions.pyi +1 -1
  124. metaflow-stubs/plugins/azure/azure_secret_manager_secrets_provider.pyi +2 -2
  125. metaflow-stubs/plugins/azure/azure_utils.pyi +1 -1
  126. metaflow-stubs/plugins/azure/blob_service_client_factory.pyi +1 -1
  127. metaflow-stubs/plugins/azure/includefile_support.pyi +1 -1
  128. metaflow-stubs/plugins/cards/__init__.pyi +1 -1
  129. metaflow-stubs/plugins/cards/card_client.pyi +2 -2
  130. metaflow-stubs/plugins/cards/card_creator.pyi +1 -1
  131. metaflow-stubs/plugins/cards/card_datastore.pyi +1 -1
  132. metaflow-stubs/plugins/cards/card_decorator.pyi +1 -1
  133. metaflow-stubs/plugins/cards/card_modules/__init__.pyi +1 -1
  134. metaflow-stubs/plugins/cards/card_modules/basic.pyi +2 -2
  135. metaflow-stubs/plugins/cards/card_modules/card.pyi +1 -1
  136. metaflow-stubs/plugins/cards/card_modules/components.pyi +1 -1
  137. metaflow-stubs/plugins/cards/card_modules/convert_to_native_type.pyi +1 -1
  138. metaflow-stubs/plugins/cards/card_modules/renderer_tools.pyi +1 -1
  139. metaflow-stubs/plugins/cards/card_modules/test_cards.pyi +1 -1
  140. metaflow-stubs/plugins/cards/card_resolver.pyi +1 -1
  141. metaflow-stubs/plugins/cards/component_serializer.pyi +1 -1
  142. metaflow-stubs/plugins/cards/exception.pyi +1 -1
  143. metaflow-stubs/plugins/catch_decorator.pyi +2 -2
  144. metaflow-stubs/plugins/datatools/__init__.pyi +1 -1
  145. metaflow-stubs/plugins/datatools/local.pyi +1 -1
  146. metaflow-stubs/plugins/datatools/s3/__init__.pyi +1 -1
  147. metaflow-stubs/plugins/datatools/s3/s3.pyi +2 -2
  148. metaflow-stubs/plugins/datatools/s3/s3tail.pyi +1 -1
  149. metaflow-stubs/plugins/datatools/s3/s3util.pyi +1 -1
  150. metaflow-stubs/plugins/debug_logger.pyi +1 -1
  151. metaflow-stubs/plugins/debug_monitor.pyi +1 -1
  152. metaflow-stubs/plugins/environment_decorator.pyi +1 -1
  153. metaflow-stubs/plugins/events_decorator.pyi +1 -1
  154. metaflow-stubs/plugins/frameworks/__init__.pyi +1 -1
  155. metaflow-stubs/plugins/frameworks/pytorch.pyi +1 -1
  156. metaflow-stubs/plugins/gcp/__init__.pyi +1 -1
  157. metaflow-stubs/plugins/gcp/gcp_secret_manager_secrets_provider.pyi +2 -2
  158. metaflow-stubs/plugins/gcp/gs_exceptions.pyi +1 -1
  159. metaflow-stubs/plugins/gcp/gs_storage_client_factory.pyi +1 -1
  160. metaflow-stubs/plugins/gcp/gs_utils.pyi +1 -1
  161. metaflow-stubs/plugins/gcp/includefile_support.pyi +1 -1
  162. metaflow-stubs/plugins/kubernetes/__init__.pyi +1 -1
  163. metaflow-stubs/plugins/kubernetes/kube_utils.pyi +1 -1
  164. metaflow-stubs/plugins/kubernetes/kubernetes.pyi +1 -1
  165. metaflow-stubs/plugins/kubernetes/kubernetes_client.pyi +1 -1
  166. metaflow-stubs/plugins/kubernetes/kubernetes_decorator.pyi +1 -1
  167. metaflow-stubs/plugins/kubernetes/kubernetes_jobsets.pyi +1 -1
  168. metaflow-stubs/plugins/kubernetes/spot_monitor_sidecar.pyi +1 -1
  169. metaflow-stubs/plugins/ollama/__init__.pyi +13 -4
  170. metaflow-stubs/plugins/parallel_decorator.pyi +1 -1
  171. metaflow-stubs/plugins/perimeters.pyi +1 -1
  172. metaflow-stubs/plugins/project_decorator.pyi +1 -1
  173. metaflow-stubs/plugins/pypi/__init__.pyi +1 -1
  174. metaflow-stubs/plugins/pypi/conda_decorator.pyi +1 -1
  175. metaflow-stubs/plugins/pypi/conda_environment.pyi +4 -4
  176. metaflow-stubs/plugins/pypi/parsers.pyi +1 -1
  177. metaflow-stubs/plugins/pypi/pypi_decorator.pyi +1 -1
  178. metaflow-stubs/plugins/pypi/pypi_environment.pyi +1 -1
  179. metaflow-stubs/plugins/pypi/utils.pyi +1 -1
  180. metaflow-stubs/plugins/resources_decorator.pyi +1 -1
  181. metaflow-stubs/plugins/retry_decorator.pyi +1 -1
  182. metaflow-stubs/plugins/secrets/__init__.pyi +1 -1
  183. metaflow-stubs/plugins/secrets/inline_secrets_provider.pyi +1 -1
  184. metaflow-stubs/plugins/secrets/secrets_decorator.pyi +1 -1
  185. metaflow-stubs/plugins/snowflake/__init__.pyi +1 -1
  186. metaflow-stubs/plugins/storage_executor.pyi +1 -1
  187. metaflow-stubs/plugins/test_unbounded_foreach_decorator.pyi +1 -1
  188. metaflow-stubs/plugins/timeout_decorator.pyi +2 -2
  189. metaflow-stubs/plugins/torchtune/__init__.pyi +1 -1
  190. metaflow-stubs/plugins/uv/__init__.pyi +1 -1
  191. metaflow-stubs/plugins/uv/uv_environment.pyi +2 -2
  192. metaflow-stubs/profilers/__init__.pyi +1 -1
  193. metaflow-stubs/pylint_wrapper.pyi +1 -1
  194. metaflow-stubs/runner/__init__.pyi +1 -1
  195. metaflow-stubs/runner/deployer.pyi +28 -28
  196. metaflow-stubs/runner/deployer_impl.pyi +1 -1
  197. metaflow-stubs/runner/metaflow_runner.pyi +1 -1
  198. metaflow-stubs/runner/nbdeploy.pyi +1 -1
  199. metaflow-stubs/runner/nbrun.pyi +1 -1
  200. metaflow-stubs/runner/subprocess_manager.pyi +1 -1
  201. metaflow-stubs/runner/utils.pyi +1 -1
  202. metaflow-stubs/system/__init__.pyi +1 -1
  203. metaflow-stubs/system/system_logger.pyi +2 -2
  204. metaflow-stubs/system/system_monitor.pyi +1 -1
  205. metaflow-stubs/tagging_util.pyi +1 -1
  206. metaflow-stubs/tuple_util.pyi +1 -1
  207. metaflow-stubs/user_configs/__init__.pyi +1 -1
  208. metaflow-stubs/user_configs/config_decorators.pyi +3 -3
  209. metaflow-stubs/user_configs/config_options.pyi +1 -1
  210. metaflow-stubs/user_configs/config_parameters.pyi +4 -4
  211. {ob_metaflow_stubs-6.0.3.175rc1.dist-info → ob_metaflow_stubs-6.0.3.176.dist-info}/METADATA +1 -1
  212. ob_metaflow_stubs-6.0.3.176.dist-info/RECORD +215 -0
  213. metaflow-stubs/mf_extensions/outerbounds/plugins/fast_bakery/baker.pyi +0 -51
  214. metaflow-stubs/mf_extensions/outerbounds/plugins/fast_bakery/docker_environment.pyi +0 -65
  215. metaflow-stubs/mf_extensions/outerbounds/plugins/fast_bakery/fast_bakery.pyi +0 -74
  216. metaflow-stubs/ob_internal.pyi +0 -11
  217. ob_metaflow_stubs-6.0.3.175rc1.dist-info/RECORD +0 -216
  218. {ob_metaflow_stubs-6.0.3.175rc1.dist-info → ob_metaflow_stubs-6.0.3.176.dist-info}/WHEEL +0 -0
  219. {ob_metaflow_stubs-6.0.3.175rc1.dist-info → ob_metaflow_stubs-6.0.3.176.dist-info}/top_level.txt +0 -0
@@ -1,15 +1,15 @@
1
1
  ######################################################################################################
2
2
  # Auto-generated Metaflow stub file #
3
3
  # MF version: 2.15.14.1+obcheckpoint(0.2.1);ob(v1) #
4
- # Generated on 2025-05-31T01:09:57.775269 #
4
+ # Generated on 2025-06-09T21:12:38.857633 #
5
5
  ######################################################################################################
6
6
 
7
7
  from __future__ import annotations
8
8
 
9
9
  import typing
10
10
  if typing.TYPE_CHECKING:
11
- import datetime
12
11
  import typing
12
+ import datetime
13
13
  FlowSpecDerived = typing.TypeVar("FlowSpecDerived", bound="FlowSpec", contravariant=False, covariant=False)
14
14
  StepFlag = typing.NewType("StepFlag", bool)
15
15
 
@@ -35,18 +35,18 @@ from .user_configs.config_parameters import ConfigValue as ConfigValue
35
35
  from .user_configs.config_parameters import config_expr as config_expr
36
36
  from .user_configs.config_decorators import CustomFlowDecorator as CustomFlowDecorator
37
37
  from .user_configs.config_decorators import CustomStepDecorator as CustomStepDecorator
38
- from . import tuple_util as tuple_util
39
38
  from . import cards as cards
40
- from . import events as events
39
+ from . import tuple_util as tuple_util
41
40
  from . import metaflow_git as metaflow_git
41
+ from . import events as events
42
42
  from . import runner as runner
43
43
  from . import plugins as plugins
44
44
  from .mf_extensions.outerbounds.toplevel.global_aliases_for_metaflow_package import S3 as S3
45
45
  from . import includefile as includefile
46
46
  from .includefile import IncludeFile as IncludeFile
47
+ from .plugins.pypi.parsers import requirements_txt_parser as requirements_txt_parser
47
48
  from .plugins.pypi.parsers import conda_environment_yml_parser as conda_environment_yml_parser
48
49
  from .plugins.pypi.parsers import pyproject_toml_parser as pyproject_toml_parser
49
- from .plugins.pypi.parsers import requirements_txt_parser as requirements_txt_parser
50
50
  from . import client as client
51
51
  from .client.core import namespace as namespace
52
52
  from .client.core import get_namespace as get_namespace
@@ -72,12 +72,12 @@ from .mf_extensions.outerbounds.toplevel.global_aliases_for_metaflow_package imp
72
72
  from .mf_extensions.outerbounds.plugins.snowflake.snowflake import Snowflake as Snowflake
73
73
  from .mf_extensions.outerbounds.plugins.checkpoint_datastores.nebius import nebius_checkpoints as nebius_checkpoints
74
74
  from .mf_extensions.outerbounds.plugins.checkpoint_datastores.coreweave import coreweave_checkpoints as coreweave_checkpoints
75
+ from .mf_extensions.outerbounds.plugins.aws.assume_role_decorator import assume_role as assume_role
75
76
  from . import cli_components as cli_components
76
77
  from . import system as system
77
78
  from . import pylint_wrapper as pylint_wrapper
78
79
  from . import cli as cli
79
80
  from . import profilers as profilers
80
- from . import ob_internal as ob_internal
81
81
 
82
82
  EXT_PKG: str
83
83
 
@@ -155,193 +155,154 @@ def step(f: typing.Union[typing.Callable[[FlowSpecDerived], None], typing.Callab
155
155
  ...
156
156
 
157
157
  @typing.overload
158
- def resources(*, cpu: int = 1, gpu: typing.Optional[int] = None, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
158
+ def timeout(*, seconds: int = 0, minutes: int = 0, hours: int = 0) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
159
159
  """
160
- Specifies the resources needed when executing this step.
160
+ Specifies a timeout for your step.
161
161
 
162
- Use `@resources` to specify the resource requirements
163
- independently of the specific compute layer (`@batch`, `@kubernetes`).
162
+ This decorator is useful if this step may hang indefinitely.
164
163
 
165
- You can choose the compute layer on the command line by executing e.g.
166
- ```
167
- python myflow.py run --with batch
168
- ```
169
- or
170
- ```
171
- python myflow.py run --with kubernetes
172
- ```
173
- which executes the flow on the desired system using the
174
- requirements specified in `@resources`.
164
+ This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
165
+ A timeout is considered to be an exception thrown by the step. It will cause the step to be
166
+ retried if needed and the exception will be caught by the `@catch` decorator, if present.
167
+
168
+ Note that all the values specified in parameters are added together so if you specify
169
+ 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
175
170
 
176
171
 
177
172
  Parameters
178
173
  ----------
179
- cpu : int, default 1
180
- Number of CPUs required for this step.
181
- gpu : int, optional, default None
182
- Number of GPUs required for this step.
183
- disk : int, optional, default None
184
- Disk size (in MB) required for this step. Only applies on Kubernetes.
185
- memory : int, default 4096
186
- Memory size (in MB) required for this step.
187
- shared_memory : int, optional, default None
188
- The value for the size (in MiB) of the /dev/shm volume for this step.
189
- This parameter maps to the `--shm-size` option in Docker.
174
+ seconds : int, default 0
175
+ Number of seconds to wait prior to timing out.
176
+ minutes : int, default 0
177
+ Number of minutes to wait prior to timing out.
178
+ hours : int, default 0
179
+ Number of hours to wait prior to timing out.
190
180
  """
191
181
  ...
192
182
 
193
183
  @typing.overload
194
- def resources(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
184
+ def timeout(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
195
185
  ...
196
186
 
197
187
  @typing.overload
198
- def resources(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
188
+ def timeout(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
199
189
  ...
200
190
 
201
- def resources(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, cpu: int = 1, gpu: typing.Optional[int] = None, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None):
191
+ def timeout(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, seconds: int = 0, minutes: int = 0, hours: int = 0):
202
192
  """
203
- Specifies the resources needed when executing this step.
193
+ Specifies a timeout for your step.
204
194
 
205
- Use `@resources` to specify the resource requirements
206
- independently of the specific compute layer (`@batch`, `@kubernetes`).
195
+ This decorator is useful if this step may hang indefinitely.
207
196
 
208
- You can choose the compute layer on the command line by executing e.g.
209
- ```
210
- python myflow.py run --with batch
211
- ```
212
- or
213
- ```
214
- python myflow.py run --with kubernetes
215
- ```
216
- which executes the flow on the desired system using the
217
- requirements specified in `@resources`.
197
+ This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
198
+ A timeout is considered to be an exception thrown by the step. It will cause the step to be
199
+ retried if needed and the exception will be caught by the `@catch` decorator, if present.
200
+
201
+ Note that all the values specified in parameters are added together so if you specify
202
+ 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
218
203
 
219
204
 
220
205
  Parameters
221
206
  ----------
222
- cpu : int, default 1
223
- Number of CPUs required for this step.
224
- gpu : int, optional, default None
225
- Number of GPUs required for this step.
226
- disk : int, optional, default None
227
- Disk size (in MB) required for this step. Only applies on Kubernetes.
228
- memory : int, default 4096
229
- Memory size (in MB) required for this step.
230
- shared_memory : int, optional, default None
231
- The value for the size (in MiB) of the /dev/shm volume for this step.
232
- This parameter maps to the `--shm-size` option in Docker.
207
+ seconds : int, default 0
208
+ Number of seconds to wait prior to timing out.
209
+ minutes : int, default 0
210
+ Number of minutes to wait prior to timing out.
211
+ hours : int, default 0
212
+ Number of hours to wait prior to timing out.
233
213
  """
234
214
  ...
235
215
 
236
- @typing.overload
237
- def catch(*, var: typing.Optional[str] = None, print_exception: bool = True) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
216
+ def app_deploy(*, app_port: int, app_name: str) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
238
217
  """
239
- Specifies that the step will success under all circumstances.
240
-
241
- The decorator will create an optional artifact, specified by `var`, which
242
- contains the exception raised. You can use it to detect the presence
243
- of errors, indicating that all happy-path artifacts produced by the step
244
- are missing.
218
+ Specifies that this step is used to deploy an instance of the app.
219
+ Requires that self.app_name, self.app_port, self.entrypoint and self.deployDir is set.
245
220
 
246
221
 
247
222
  Parameters
248
223
  ----------
249
- var : str, optional, default None
250
- Name of the artifact in which to store the caught exception.
251
- If not specified, the exception is not stored.
252
- print_exception : bool, default True
253
- Determines whether or not the exception is printed to
254
- stdout when caught.
224
+ app_port : int
225
+ Number of GPUs to use.
226
+ app_name : str
227
+ Name of the app to deploy.
255
228
  """
256
229
  ...
257
230
 
258
231
  @typing.overload
259
- def catch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
260
- ...
261
-
262
- @typing.overload
263
- def catch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
264
- ...
265
-
266
- def catch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, var: typing.Optional[str] = None, print_exception: bool = True):
232
+ def checkpoint(*, load_policy: str = 'fresh', temp_dir_root: str = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
267
233
  """
268
- Specifies that the step will success under all circumstances.
234
+ Enables checkpointing for a step.
269
235
 
270
- The decorator will create an optional artifact, specified by `var`, which
271
- contains the exception raised. You can use it to detect the presence
272
- of errors, indicating that all happy-path artifacts produced by the step
273
- are missing.
274
236
 
275
237
 
276
238
  Parameters
277
239
  ----------
278
- var : str, optional, default None
279
- Name of the artifact in which to store the caught exception.
280
- If not specified, the exception is not stored.
281
- print_exception : bool, default True
282
- Determines whether or not the exception is printed to
283
- stdout when caught.
240
+ load_policy : str, default: "fresh"
241
+ The policy for loading the checkpoint. The following policies are supported:
242
+ - "eager": Loads the the latest available checkpoint within the namespace.
243
+ With this mode, the latest checkpoint written by any previous task (can be even a different run) of the step
244
+ will be loaded at the start of the task.
245
+ - "none": Do not load any checkpoint
246
+ - "fresh": Loads the lastest checkpoint created within the running Task.
247
+ This mode helps loading checkpoints across various retry attempts of the same task.
248
+ With this mode, no checkpoint will be loaded at the start of a task but any checkpoints
249
+ created within the task will be loaded when the task is retries execution on failure.
250
+
251
+ temp_dir_root : str, default: None
252
+ The root directory under which `current.checkpoint.directory` will be created.
284
253
  """
285
254
  ...
286
255
 
287
256
  @typing.overload
288
- def model(*, load: typing.Union[typing.List[str], str, typing.List[typing.Tuple[str, typing.Optional[str]]]] = None, temp_dir_root: str = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
257
+ def checkpoint(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
258
+ ...
259
+
260
+ @typing.overload
261
+ def checkpoint(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
262
+ ...
263
+
264
+ def checkpoint(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, load_policy: str = 'fresh', temp_dir_root: str = None):
289
265
  """
290
- Enables loading / saving of models within a step.
266
+ Enables checkpointing for a step.
291
267
 
292
268
 
293
269
 
294
270
  Parameters
295
271
  ----------
296
- load : Union[List[str],str,List[Tuple[str,Union[str,None]]]], default: None
297
- Artifact name/s referencing the models/checkpoints to load. Artifact names refer to the names of the instance variables set to `self`.
298
- These artifact names give to `load` be reference objects or reference `key` string's from objects created by:
299
- - `current.checkpoint`
300
- - `current.model`
301
- - `current.huggingface_hub`
302
-
303
- If a list of tuples is provided, the first element is the artifact name and the second element is the path the artifact needs be unpacked on
304
- the local filesystem. If the second element is None, the artifact will be unpacked in the current working directory.
305
- If a string is provided, then the artifact corresponding to that name will be loaded in the current working directory.
272
+ load_policy : str, default: "fresh"
273
+ The policy for loading the checkpoint. The following policies are supported:
274
+ - "eager": Loads the the latest available checkpoint within the namespace.
275
+ With this mode, the latest checkpoint written by any previous task (can be even a different run) of the step
276
+ will be loaded at the start of the task.
277
+ - "none": Do not load any checkpoint
278
+ - "fresh": Loads the lastest checkpoint created within the running Task.
279
+ This mode helps loading checkpoints across various retry attempts of the same task.
280
+ With this mode, no checkpoint will be loaded at the start of a task but any checkpoints
281
+ created within the task will be loaded when the task is retries execution on failure.
306
282
 
307
283
  temp_dir_root : str, default: None
308
- The root directory under which `current.model.loaded` will store loaded models
284
+ The root directory under which `current.checkpoint.directory` will be created.
309
285
  """
310
286
  ...
311
287
 
312
288
  @typing.overload
313
- def model(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
289
+ def fast_bakery_internal(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
290
+ """
291
+ Internal decorator to support Fast bakery
292
+ """
314
293
  ...
315
294
 
316
295
  @typing.overload
317
- def model(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
296
+ def fast_bakery_internal(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
318
297
  ...
319
298
 
320
- def model(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, load: typing.Union[typing.List[str], str, typing.List[typing.Tuple[str, typing.Optional[str]]]] = None, temp_dir_root: str = None):
299
+ def fast_bakery_internal(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
321
300
  """
322
- Enables loading / saving of models within a step.
323
-
324
-
325
-
326
- Parameters
327
- ----------
328
- load : Union[List[str],str,List[Tuple[str,Union[str,None]]]], default: None
329
- Artifact name/s referencing the models/checkpoints to load. Artifact names refer to the names of the instance variables set to `self`.
330
- These artifact names give to `load` be reference objects or reference `key` string's from objects created by:
331
- - `current.checkpoint`
332
- - `current.model`
333
- - `current.huggingface_hub`
334
-
335
- If a list of tuples is provided, the first element is the artifact name and the second element is the path the artifact needs be unpacked on
336
- the local filesystem. If the second element is None, the artifact will be unpacked in the current working directory.
337
- If a string is provided, then the artifact corresponding to that name will be loaded in the current working directory.
338
-
339
- temp_dir_root : str, default: None
340
- The root directory under which `current.model.loaded` will store loaded models
301
+ Internal decorator to support Fast bakery
341
302
  """
342
303
  ...
343
304
 
344
- def ollama(*, models: list, backend: str, force_pull: bool, skip_push_check: bool, debug: bool) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
305
+ def ollama(*, models: list, backend: str, force_pull: bool, cache_update_policy: str, force_cache_update: bool, debug: bool, circuit_breaker_config: dict, timeout_config: dict) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
345
306
  """
346
307
  This decorator is used to run Ollama APIs as Metaflow task sidecars.
347
308
 
@@ -371,599 +332,745 @@ def ollama(*, models: list, backend: str, force_pull: bool, skip_push_check: boo
371
332
  Determines where and how to run the Ollama process.
372
333
  force_pull: bool
373
334
  Whether to run `ollama pull` no matter what, or first check the remote cache in Metaflow datastore for this model key.
374
- skip_push_check: bool
375
- Whether to skip the check that populates/overwrites remote cache on terminating an ollama model.
335
+ cache_update_policy: str
336
+ Cache update policy: "auto", "force", or "never".
337
+ force_cache_update: bool
338
+ Simple override for "force" cache update policy.
376
339
  debug: bool
377
340
  Whether to turn on verbose debugging logs.
341
+ circuit_breaker_config: dict
342
+ Configuration for circuit breaker protection. Keys: failure_threshold, recovery_timeout, reset_timeout.
343
+ timeout_config: dict
344
+ Configuration for various operation timeouts. Keys: pull, stop, health_check, install, server_startup.
378
345
  """
379
346
  ...
380
347
 
381
- def app_deploy(*, app_port: int, app_name: str) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
348
+ @typing.overload
349
+ def secrets(*, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
382
350
  """
383
- Specifies that this step is used to deploy an instance of the app.
384
- Requires that self.app_name, self.app_port, self.entrypoint and self.deployDir is set.
351
+ Specifies secrets to be retrieved and injected as environment variables prior to
352
+ the execution of a step.
385
353
 
386
354
 
387
355
  Parameters
388
356
  ----------
389
- app_port : int
390
- Number of GPUs to use.
391
- app_name : str
392
- Name of the app to deploy.
357
+ sources : List[Union[str, Dict[str, Any]]], default: []
358
+ List of secret specs, defining how the secrets are to be retrieved
393
359
  """
394
360
  ...
395
361
 
396
362
  @typing.overload
397
- def parallel(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
398
- """
399
- Decorator prototype for all step decorators. This function gets specialized
400
- and imported for all decorators types by _import_plugin_decorators().
401
- """
363
+ def secrets(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
402
364
  ...
403
365
 
404
366
  @typing.overload
405
- def parallel(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
406
- ...
407
-
408
- def parallel(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
409
- """
410
- Decorator prototype for all step decorators. This function gets specialized
411
- and imported for all decorators types by _import_plugin_decorators().
412
- """
367
+ def secrets(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
413
368
  ...
414
369
 
415
- @typing.overload
416
- def fast_bakery_internal(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
370
+ def secrets(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []):
417
371
  """
418
- Internal decorator to support Fast bakery
372
+ Specifies secrets to be retrieved and injected as environment variables prior to
373
+ the execution of a step.
374
+
375
+
376
+ Parameters
377
+ ----------
378
+ sources : List[Union[str, Dict[str, Any]]], default: []
379
+ List of secret specs, defining how the secrets are to be retrieved
419
380
  """
420
381
  ...
421
382
 
422
383
  @typing.overload
423
- def fast_bakery_internal(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
424
- ...
425
-
426
- def fast_bakery_internal(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
427
- """
428
- Internal decorator to support Fast bakery
384
+ def card(*, type: str = 'default', id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
429
385
  """
430
- ...
431
-
432
- def nvct(*, gpu: int, gpu_type: str) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
433
- """
434
- Specifies that this step should execute on DGX cloud.
386
+ Creates a human-readable report, a Metaflow Card, after this step completes.
387
+
388
+ Note that you may add multiple `@card` decorators in a step with different parameters.
435
389
 
436
390
 
437
391
  Parameters
438
392
  ----------
439
- gpu : int
440
- Number of GPUs to use.
441
- gpu_type : str
442
- Type of Nvidia GPU to use.
393
+ type : str, default 'default'
394
+ Card type.
395
+ id : str, optional, default None
396
+ If multiple cards are present, use this id to identify this card.
397
+ options : Dict[str, Any], default {}
398
+ Options passed to the card. The contents depend on the card type.
399
+ timeout : int, default 45
400
+ Interrupt reporting if it takes more than this many seconds.
443
401
  """
444
402
  ...
445
403
 
446
- def kubernetes(*, cpu: int = 1, memory: int = 4096, disk: int = 10240, image: typing.Optional[str] = None, image_pull_policy: str = 'KUBERNETES_IMAGE_PULL_POLICY', service_account: str = 'METAFLOW_KUBERNETES_SERVICE_ACCOUNT', secrets: typing.Optional[typing.List[str]] = None, node_selector: typing.Union[typing.Dict[str, str], str, None] = None, namespace: str = 'METAFLOW_KUBERNETES_NAMESPACE', gpu: typing.Optional[int] = None, gpu_vendor: str = 'KUBERNETES_GPU_VENDOR', tolerations: typing.List[str] = [], labels: typing.Dict[str, str] = 'METAFLOW_KUBERNETES_LABELS', annotations: typing.Dict[str, str] = 'METAFLOW_KUBERNETES_ANNOTATIONS', use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = '/metaflow_temp', persistent_volume_claims: typing.Optional[typing.Dict[str, str]] = None, shared_memory: typing.Optional[int] = None, port: typing.Optional[int] = None, compute_pool: typing.Optional[str] = None, hostname_resolution_timeout: int = 600, qos: str = 'Burstable', security_context: typing.Optional[typing.Dict[str, typing.Any]] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
447
- """
448
- Specifies that this step should execute on Kubernetes.
449
-
450
-
451
- Parameters
452
- ----------
453
- cpu : int, default 1
454
- Number of CPUs required for this step. If `@resources` is
455
- also present, the maximum value from all decorators is used.
456
- memory : int, default 4096
457
- Memory size (in MB) required for this step. If
458
- `@resources` is also present, the maximum value from all decorators is
459
- used.
460
- disk : int, default 10240
461
- Disk size (in MB) required for this step. If
462
- `@resources` is also present, the maximum value from all decorators is
463
- used.
464
- image : str, optional, default None
465
- Docker image to use when launching on Kubernetes. If not specified, and
466
- METAFLOW_KUBERNETES_CONTAINER_IMAGE is specified, that image is used. If
467
- not, a default Docker image mapping to the current version of Python is used.
468
- image_pull_policy: str, default KUBERNETES_IMAGE_PULL_POLICY
469
- If given, the imagePullPolicy to be applied to the Docker image of the step.
470
- service_account : str, default METAFLOW_KUBERNETES_SERVICE_ACCOUNT
471
- Kubernetes service account to use when launching pod in Kubernetes.
472
- secrets : List[str], optional, default None
473
- Kubernetes secrets to use when launching pod in Kubernetes. These
474
- secrets are in addition to the ones defined in `METAFLOW_KUBERNETES_SECRETS`
475
- in Metaflow configuration.
476
- node_selector: Union[Dict[str,str], str], optional, default None
477
- Kubernetes node selector(s) to apply to the pod running the task.
478
- Can be passed in as a comma separated string of values e.g.
479
- 'kubernetes.io/os=linux,kubernetes.io/arch=amd64' or as a dictionary
480
- {'kubernetes.io/os': 'linux', 'kubernetes.io/arch': 'amd64'}
481
- namespace : str, default METAFLOW_KUBERNETES_NAMESPACE
482
- Kubernetes namespace to use when launching pod in Kubernetes.
483
- gpu : int, optional, default None
484
- Number of GPUs required for this step. A value of zero implies that
485
- the scheduled node should not have GPUs.
486
- gpu_vendor : str, default KUBERNETES_GPU_VENDOR
487
- The vendor of the GPUs to be used for this step.
488
- tolerations : List[str], default []
489
- The default is extracted from METAFLOW_KUBERNETES_TOLERATIONS.
490
- Kubernetes tolerations to use when launching pod in Kubernetes.
491
- labels: Dict[str, str], default: METAFLOW_KUBERNETES_LABELS
492
- Kubernetes labels to use when launching pod in Kubernetes.
493
- annotations: Dict[str, str], default: METAFLOW_KUBERNETES_ANNOTATIONS
494
- Kubernetes annotations to use when launching pod in Kubernetes.
495
- use_tmpfs : bool, default False
496
- This enables an explicit tmpfs mount for this step.
497
- tmpfs_tempdir : bool, default True
498
- sets METAFLOW_TEMPDIR to tmpfs_path if set for this step.
499
- tmpfs_size : int, optional, default: None
500
- The value for the size (in MiB) of the tmpfs mount for this step.
501
- This parameter maps to the `--tmpfs` option in Docker. Defaults to 50% of the
502
- memory allocated for this step.
503
- tmpfs_path : str, optional, default /metaflow_temp
504
- Path to tmpfs mount for this step.
505
- persistent_volume_claims : Dict[str, str], optional, default None
506
- A map (dictionary) of persistent volumes to be mounted to the pod for this step. The map is from persistent
507
- volumes to the path to which the volume is to be mounted, e.g., `{'pvc-name': '/path/to/mount/on'}`.
508
- shared_memory: int, optional
509
- Shared memory size (in MiB) required for this step
510
- port: int, optional
511
- Port number to specify in the Kubernetes job object
512
- compute_pool : str, optional, default None
513
- Compute pool to be used for for this step.
514
- If not specified, any accessible compute pool within the perimeter is used.
515
- hostname_resolution_timeout: int, default 10 * 60
516
- Timeout in seconds for the workers tasks in the gang scheduled cluster to resolve the hostname of control task.
517
- Only applicable when @parallel is used.
518
- qos: str, default: Burstable
519
- Quality of Service class to assign to the pod. Supported values are: Guaranteed, Burstable, BestEffort
520
-
521
- security_context: Dict[str, Any], optional, default None
522
- Container security context. Applies to the task container. Allows the following keys:
523
- - privileged: bool, optional, default None
524
- - allow_privilege_escalation: bool, optional, default None
525
- - run_as_user: int, optional, default None
526
- - run_as_group: int, optional, default None
527
- - run_as_non_root: bool, optional, default None
528
- """
404
+ @typing.overload
405
+ def card(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
529
406
  ...
530
407
 
531
- def nvidia(*, gpu: int, gpu_type: str, queue_timeout: int) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
408
+ @typing.overload
409
+ def card(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
410
+ ...
411
+
412
+ def card(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, type: str = 'default', id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45):
532
413
  """
533
- Specifies that this step should execute on DGX cloud.
414
+ Creates a human-readable report, a Metaflow Card, after this step completes.
415
+
416
+ Note that you may add multiple `@card` decorators in a step with different parameters.
534
417
 
535
418
 
536
419
  Parameters
537
420
  ----------
538
- gpu : int
539
- Number of GPUs to use.
540
- gpu_type : str
541
- Type of Nvidia GPU to use.
542
- queue_timeout : int
543
- Time to keep the job in NVCF's queue.
421
+ type : str, default 'default'
422
+ Card type.
423
+ id : str, optional, default None
424
+ If multiple cards are present, use this id to identify this card.
425
+ options : Dict[str, Any], default {}
426
+ Options passed to the card. The contents depend on the card type.
427
+ timeout : int, default 45
428
+ Interrupt reporting if it takes more than this many seconds.
544
429
  """
545
430
  ...
546
431
 
547
432
  @typing.overload
548
- def environment(*, vars: typing.Dict[str, str] = {}) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
433
+ def resources(*, cpu: int = 1, gpu: typing.Optional[int] = None, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
549
434
  """
550
- Specifies environment variables to be set prior to the execution of a step.
435
+ Specifies the resources needed when executing this step.
436
+
437
+ Use `@resources` to specify the resource requirements
438
+ independently of the specific compute layer (`@batch`, `@kubernetes`).
439
+
440
+ You can choose the compute layer on the command line by executing e.g.
441
+ ```
442
+ python myflow.py run --with batch
443
+ ```
444
+ or
445
+ ```
446
+ python myflow.py run --with kubernetes
447
+ ```
448
+ which executes the flow on the desired system using the
449
+ requirements specified in `@resources`.
551
450
 
552
451
 
553
452
  Parameters
554
453
  ----------
555
- vars : Dict[str, str], default {}
556
- Dictionary of environment variables to set.
454
+ cpu : int, default 1
455
+ Number of CPUs required for this step.
456
+ gpu : int, optional, default None
457
+ Number of GPUs required for this step.
458
+ disk : int, optional, default None
459
+ Disk size (in MB) required for this step. Only applies on Kubernetes.
460
+ memory : int, default 4096
461
+ Memory size (in MB) required for this step.
462
+ shared_memory : int, optional, default None
463
+ The value for the size (in MiB) of the /dev/shm volume for this step.
464
+ This parameter maps to the `--shm-size` option in Docker.
557
465
  """
558
466
  ...
559
467
 
560
468
  @typing.overload
561
- def environment(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
469
+ def resources(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
562
470
  ...
563
471
 
564
472
  @typing.overload
565
- def environment(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
473
+ def resources(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
566
474
  ...
567
475
 
568
- def environment(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, vars: typing.Dict[str, str] = {}):
476
+ def resources(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, cpu: int = 1, gpu: typing.Optional[int] = None, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None):
569
477
  """
570
- Specifies environment variables to be set prior to the execution of a step.
478
+ Specifies the resources needed when executing this step.
479
+
480
+ Use `@resources` to specify the resource requirements
481
+ independently of the specific compute layer (`@batch`, `@kubernetes`).
482
+
483
+ You can choose the compute layer on the command line by executing e.g.
484
+ ```
485
+ python myflow.py run --with batch
486
+ ```
487
+ or
488
+ ```
489
+ python myflow.py run --with kubernetes
490
+ ```
491
+ which executes the flow on the desired system using the
492
+ requirements specified in `@resources`.
571
493
 
572
494
 
573
495
  Parameters
574
496
  ----------
575
- vars : Dict[str, str], default {}
576
- Dictionary of environment variables to set.
497
+ cpu : int, default 1
498
+ Number of CPUs required for this step.
499
+ gpu : int, optional, default None
500
+ Number of GPUs required for this step.
501
+ disk : int, optional, default None
502
+ Disk size (in MB) required for this step. Only applies on Kubernetes.
503
+ memory : int, default 4096
504
+ Memory size (in MB) required for this step.
505
+ shared_memory : int, optional, default None
506
+ The value for the size (in MiB) of the /dev/shm volume for this step.
507
+ This parameter maps to the `--shm-size` option in Docker.
577
508
  """
578
509
  ...
579
510
 
580
- def huggingface_hub(*, temp_dir_root: typing.Optional[str] = None, load: typing.Union[typing.List[str], typing.List[typing.Tuple[typing.Dict, str]], typing.List[typing.Tuple[str, str]], typing.List[typing.Dict], None]) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
511
+ @typing.overload
512
+ def parallel(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
581
513
  """
582
- Decorator that helps cache, version and store models/datasets from huggingface hub.
583
-
584
-
585
- Parameters
586
- ----------
587
- temp_dir_root : str, optional
588
- The root directory that will hold the temporary directory where objects will be downloaded.
589
-
590
- load: Union[List[str], List[Tuple[Dict, str]], List[Tuple[str, str]], List[Dict], None]
591
- The list of repos (models/datasets) to load.
592
-
593
- Loaded repos can be accessed via `current.huggingface_hub.loaded`. If load is set, then the following happens:
594
-
595
- - If repo (model/dataset) is not found in the datastore:
596
- - Downloads the repo from Hugging Face Hub to a temporary directory (or uses specified path) for local access
597
- - Stores it in Metaflow's datastore (s3/gcs/azure etc.) with a unique name based on repo_type/repo_id
598
- - All HF models loaded for a `@step` will be cached separately under flow/step/namespace.
599
-
600
- - If repo is found in the datastore:
601
- - Loads it directly from datastore to local path (can be temporary directory or specified path)
514
+ Decorator prototype for all step decorators. This function gets specialized
515
+ and imported for all decorators types by _import_plugin_decorators().
602
516
  """
603
517
  ...
604
518
 
605
519
  @typing.overload
606
- def retry(*, times: int = 3, minutes_between_retries: int = 2) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
520
+ def parallel(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
521
+ ...
522
+
523
+ def parallel(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
607
524
  """
608
- Specifies the number of times the task corresponding
609
- to a step needs to be retried.
610
-
611
- This decorator is useful for handling transient errors, such as networking issues.
612
- If your task contains operations that can't be retried safely, e.g. database updates,
613
- it is advisable to annotate it with `@retry(times=0)`.
525
+ Decorator prototype for all step decorators. This function gets specialized
526
+ and imported for all decorators types by _import_plugin_decorators().
527
+ """
528
+ ...
529
+
530
+ @typing.overload
531
+ def conda(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
532
+ """
533
+ Specifies the Conda environment for the step.
614
534
 
615
- This can be used in conjunction with the `@catch` decorator. The `@catch`
616
- decorator will execute a no-op task after all retries have been exhausted,
617
- ensuring that the flow execution can continue.
535
+ Information in this decorator will augment any
536
+ attributes set in the `@conda_base` flow-level decorator. Hence,
537
+ you can use `@conda_base` to set packages required by all
538
+ steps and use `@conda` to specify step-specific overrides.
618
539
 
619
540
 
620
541
  Parameters
621
542
  ----------
622
- times : int, default 3
623
- Number of times to retry this task.
624
- minutes_between_retries : int, default 2
625
- Number of minutes between retries.
543
+ packages : Dict[str, str], default {}
544
+ Packages to use for this step. The key is the name of the package
545
+ and the value is the version to use.
546
+ libraries : Dict[str, str], default {}
547
+ Supported for backward compatibility. When used with packages, packages will take precedence.
548
+ python : str, optional, default None
549
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
550
+ that the version used will correspond to the version of the Python interpreter used to start the run.
551
+ disabled : bool, default False
552
+ If set to True, disables @conda.
626
553
  """
627
554
  ...
628
555
 
629
556
  @typing.overload
630
- def retry(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
557
+ def conda(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
631
558
  ...
632
559
 
633
560
  @typing.overload
634
- def retry(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
561
+ def conda(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
635
562
  ...
636
563
 
637
- def retry(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, times: int = 3, minutes_between_retries: int = 2):
564
+ def conda(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
638
565
  """
639
- Specifies the number of times the task corresponding
640
- to a step needs to be retried.
641
-
642
- This decorator is useful for handling transient errors, such as networking issues.
643
- If your task contains operations that can't be retried safely, e.g. database updates,
644
- it is advisable to annotate it with `@retry(times=0)`.
566
+ Specifies the Conda environment for the step.
645
567
 
646
- This can be used in conjunction with the `@catch` decorator. The `@catch`
647
- decorator will execute a no-op task after all retries have been exhausted,
648
- ensuring that the flow execution can continue.
568
+ Information in this decorator will augment any
569
+ attributes set in the `@conda_base` flow-level decorator. Hence,
570
+ you can use `@conda_base` to set packages required by all
571
+ steps and use `@conda` to specify step-specific overrides.
649
572
 
650
573
 
651
574
  Parameters
652
575
  ----------
653
- times : int, default 3
654
- Number of times to retry this task.
655
- minutes_between_retries : int, default 2
656
- Number of minutes between retries.
576
+ packages : Dict[str, str], default {}
577
+ Packages to use for this step. The key is the name of the package
578
+ and the value is the version to use.
579
+ libraries : Dict[str, str], default {}
580
+ Supported for backward compatibility. When used with packages, packages will take precedence.
581
+ python : str, optional, default None
582
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
583
+ that the version used will correspond to the version of the Python interpreter used to start the run.
584
+ disabled : bool, default False
585
+ If set to True, disables @conda.
657
586
  """
658
587
  ...
659
588
 
660
589
  @typing.overload
661
- def timeout(*, seconds: int = 0, minutes: int = 0, hours: int = 0) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
590
+ def catch(*, var: typing.Optional[str] = None, print_exception: bool = True) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
662
591
  """
663
- Specifies a timeout for your step.
664
-
665
- This decorator is useful if this step may hang indefinitely.
666
-
667
- This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
668
- A timeout is considered to be an exception thrown by the step. It will cause the step to be
669
- retried if needed and the exception will be caught by the `@catch` decorator, if present.
592
+ Specifies that the step will success under all circumstances.
670
593
 
671
- Note that all the values specified in parameters are added together so if you specify
672
- 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
594
+ The decorator will create an optional artifact, specified by `var`, which
595
+ contains the exception raised. You can use it to detect the presence
596
+ of errors, indicating that all happy-path artifacts produced by the step
597
+ are missing.
673
598
 
674
599
 
675
600
  Parameters
676
601
  ----------
677
- seconds : int, default 0
678
- Number of seconds to wait prior to timing out.
679
- minutes : int, default 0
680
- Number of minutes to wait prior to timing out.
681
- hours : int, default 0
682
- Number of hours to wait prior to timing out.
602
+ var : str, optional, default None
603
+ Name of the artifact in which to store the caught exception.
604
+ If not specified, the exception is not stored.
605
+ print_exception : bool, default True
606
+ Determines whether or not the exception is printed to
607
+ stdout when caught.
683
608
  """
684
609
  ...
685
610
 
686
611
  @typing.overload
687
- def timeout(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
612
+ def catch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
688
613
  ...
689
614
 
690
615
  @typing.overload
691
- def timeout(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
616
+ def catch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
692
617
  ...
693
618
 
694
- def timeout(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, seconds: int = 0, minutes: int = 0, hours: int = 0):
619
+ def catch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, var: typing.Optional[str] = None, print_exception: bool = True):
695
620
  """
696
- Specifies a timeout for your step.
621
+ Specifies that the step will success under all circumstances.
697
622
 
698
- This decorator is useful if this step may hang indefinitely.
623
+ The decorator will create an optional artifact, specified by `var`, which
624
+ contains the exception raised. You can use it to detect the presence
625
+ of errors, indicating that all happy-path artifacts produced by the step
626
+ are missing.
699
627
 
700
- This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
701
- A timeout is considered to be an exception thrown by the step. It will cause the step to be
702
- retried if needed and the exception will be caught by the `@catch` decorator, if present.
703
628
 
704
- Note that all the values specified in parameters are added together so if you specify
705
- 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
629
+ Parameters
630
+ ----------
631
+ var : str, optional, default None
632
+ Name of the artifact in which to store the caught exception.
633
+ If not specified, the exception is not stored.
634
+ print_exception : bool, default True
635
+ Determines whether or not the exception is printed to
636
+ stdout when caught.
637
+ """
638
+ ...
639
+
640
+ def nvct(*, gpu: int, gpu_type: str) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
641
+ """
642
+ Specifies that this step should execute on DGX cloud.
706
643
 
707
644
 
708
645
  Parameters
709
646
  ----------
710
- seconds : int, default 0
711
- Number of seconds to wait prior to timing out.
712
- minutes : int, default 0
713
- Number of minutes to wait prior to timing out.
714
- hours : int, default 0
715
- Number of hours to wait prior to timing out.
647
+ gpu : int
648
+ Number of GPUs to use.
649
+ gpu_type : str
650
+ Type of Nvidia GPU to use.
651
+ """
652
+ ...
653
+
654
+ def nvidia(*, gpu: int, gpu_type: str, queue_timeout: int) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
655
+ """
656
+ Specifies that this step should execute on DGX cloud.
657
+
658
+
659
+ Parameters
660
+ ----------
661
+ gpu : int
662
+ Number of GPUs to use.
663
+ gpu_type : str
664
+ Type of Nvidia GPU to use.
665
+ queue_timeout : int
666
+ Time to keep the job in NVCF's queue.
716
667
  """
717
668
  ...
718
669
 
719
670
  @typing.overload
720
- def card(*, type: str = 'default', id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
671
+ def pypi(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
721
672
  """
722
- Creates a human-readable report, a Metaflow Card, after this step completes.
673
+ Specifies the PyPI packages for the step.
723
674
 
724
- Note that you may add multiple `@card` decorators in a step with different parameters.
675
+ Information in this decorator will augment any
676
+ attributes set in the `@pyi_base` flow-level decorator. Hence,
677
+ you can use `@pypi_base` to set packages required by all
678
+ steps and use `@pypi` to specify step-specific overrides.
725
679
 
726
680
 
727
681
  Parameters
728
682
  ----------
729
- type : str, default 'default'
730
- Card type.
731
- id : str, optional, default None
732
- If multiple cards are present, use this id to identify this card.
733
- options : Dict[str, Any], default {}
734
- Options passed to the card. The contents depend on the card type.
735
- timeout : int, default 45
736
- Interrupt reporting if it takes more than this many seconds.
683
+ packages : Dict[str, str], default: {}
684
+ Packages to use for this step. The key is the name of the package
685
+ and the value is the version to use.
686
+ python : str, optional, default: None
687
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
688
+ that the version used will correspond to the version of the Python interpreter used to start the run.
737
689
  """
738
690
  ...
739
691
 
740
692
  @typing.overload
741
- def card(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
693
+ def pypi(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
742
694
  ...
743
695
 
744
696
  @typing.overload
745
- def card(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
697
+ def pypi(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
746
698
  ...
747
699
 
748
- def card(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, type: str = 'default', id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45):
700
+ def pypi(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
749
701
  """
750
- Creates a human-readable report, a Metaflow Card, after this step completes.
702
+ Specifies the PyPI packages for the step.
751
703
 
752
- Note that you may add multiple `@card` decorators in a step with different parameters.
704
+ Information in this decorator will augment any
705
+ attributes set in the `@pyi_base` flow-level decorator. Hence,
706
+ you can use `@pypi_base` to set packages required by all
707
+ steps and use `@pypi` to specify step-specific overrides.
753
708
 
754
709
 
755
710
  Parameters
756
711
  ----------
757
- type : str, default 'default'
758
- Card type.
759
- id : str, optional, default None
760
- If multiple cards are present, use this id to identify this card.
761
- options : Dict[str, Any], default {}
762
- Options passed to the card. The contents depend on the card type.
763
- timeout : int, default 45
764
- Interrupt reporting if it takes more than this many seconds.
712
+ packages : Dict[str, str], default: {}
713
+ Packages to use for this step. The key is the name of the package
714
+ and the value is the version to use.
715
+ python : str, optional, default: None
716
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
717
+ that the version used will correspond to the version of the Python interpreter used to start the run.
765
718
  """
766
719
  ...
767
720
 
768
721
  @typing.overload
769
- def checkpoint(*, load_policy: str = 'fresh', temp_dir_root: str = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
722
+ def model(*, load: typing.Union[typing.List[str], str, typing.List[typing.Tuple[str, typing.Optional[str]]]] = None, temp_dir_root: str = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
770
723
  """
771
- Enables checkpointing for a step.
724
+ Enables loading / saving of models within a step.
772
725
 
773
726
 
774
727
 
775
728
  Parameters
776
729
  ----------
777
- load_policy : str, default: "fresh"
778
- The policy for loading the checkpoint. The following policies are supported:
779
- - "eager": Loads the the latest available checkpoint within the namespace.
780
- With this mode, the latest checkpoint written by any previous task (can be even a different run) of the step
781
- will be loaded at the start of the task.
782
- - "none": Do not load any checkpoint
783
- - "fresh": Loads the lastest checkpoint created within the running Task.
784
- This mode helps loading checkpoints across various retry attempts of the same task.
785
- With this mode, no checkpoint will be loaded at the start of a task but any checkpoints
786
- created within the task will be loaded when the task is retries execution on failure.
730
+ load : Union[List[str],str,List[Tuple[str,Union[str,None]]]], default: None
731
+ Artifact name/s referencing the models/checkpoints to load. Artifact names refer to the names of the instance variables set to `self`.
732
+ These artifact names give to `load` be reference objects or reference `key` string's from objects created by:
733
+ - `current.checkpoint`
734
+ - `current.model`
735
+ - `current.huggingface_hub`
736
+
737
+ If a list of tuples is provided, the first element is the artifact name and the second element is the path the artifact needs be unpacked on
738
+ the local filesystem. If the second element is None, the artifact will be unpacked in the current working directory.
739
+ If a string is provided, then the artifact corresponding to that name will be loaded in the current working directory.
787
740
 
788
741
  temp_dir_root : str, default: None
789
- The root directory under which `current.checkpoint.directory` will be created.
742
+ The root directory under which `current.model.loaded` will store loaded models
790
743
  """
791
744
  ...
792
745
 
793
746
  @typing.overload
794
- def checkpoint(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
747
+ def model(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
795
748
  ...
796
749
 
797
750
  @typing.overload
798
- def checkpoint(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
751
+ def model(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
799
752
  ...
800
753
 
801
- def checkpoint(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, load_policy: str = 'fresh', temp_dir_root: str = None):
754
+ def model(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, load: typing.Union[typing.List[str], str, typing.List[typing.Tuple[str, typing.Optional[str]]]] = None, temp_dir_root: str = None):
802
755
  """
803
- Enables checkpointing for a step.
756
+ Enables loading / saving of models within a step.
804
757
 
805
758
 
806
759
 
807
760
  Parameters
808
761
  ----------
809
- load_policy : str, default: "fresh"
810
- The policy for loading the checkpoint. The following policies are supported:
811
- - "eager": Loads the the latest available checkpoint within the namespace.
812
- With this mode, the latest checkpoint written by any previous task (can be even a different run) of the step
813
- will be loaded at the start of the task.
814
- - "none": Do not load any checkpoint
815
- - "fresh": Loads the lastest checkpoint created within the running Task.
816
- This mode helps loading checkpoints across various retry attempts of the same task.
817
- With this mode, no checkpoint will be loaded at the start of a task but any checkpoints
818
- created within the task will be loaded when the task is retries execution on failure.
762
+ load : Union[List[str],str,List[Tuple[str,Union[str,None]]]], default: None
763
+ Artifact name/s referencing the models/checkpoints to load. Artifact names refer to the names of the instance variables set to `self`.
764
+ These artifact names give to `load` be reference objects or reference `key` string's from objects created by:
765
+ - `current.checkpoint`
766
+ - `current.model`
767
+ - `current.huggingface_hub`
768
+
769
+ If a list of tuples is provided, the first element is the artifact name and the second element is the path the artifact needs be unpacked on
770
+ the local filesystem. If the second element is None, the artifact will be unpacked in the current working directory.
771
+ If a string is provided, then the artifact corresponding to that name will be loaded in the current working directory.
819
772
 
820
773
  temp_dir_root : str, default: None
821
- The root directory under which `current.checkpoint.directory` will be created.
774
+ The root directory under which `current.model.loaded` will store loaded models
775
+ """
776
+ ...
777
+
778
+ def huggingface_hub(*, temp_dir_root: typing.Optional[str] = None, load: typing.Union[typing.List[str], typing.List[typing.Tuple[typing.Dict, str]], typing.List[typing.Tuple[str, str]], typing.List[typing.Dict], None]) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
779
+ """
780
+ Decorator that helps cache, version and store models/datasets from huggingface hub.
781
+
782
+
783
+ Parameters
784
+ ----------
785
+ temp_dir_root : str, optional
786
+ The root directory that will hold the temporary directory where objects will be downloaded.
787
+
788
+ load: Union[List[str], List[Tuple[Dict, str]], List[Tuple[str, str]], List[Dict], None]
789
+ The list of repos (models/datasets) to load.
790
+
791
+ Loaded repos can be accessed via `current.huggingface_hub.loaded`. If load is set, then the following happens:
792
+
793
+ - If repo (model/dataset) is not found in the datastore:
794
+ - Downloads the repo from Hugging Face Hub to a temporary directory (or uses specified path) for local access
795
+ - Stores it in Metaflow's datastore (s3/gcs/azure etc.) with a unique name based on repo_type/repo_id
796
+ - All HF models loaded for a `@step` will be cached separately under flow/step/namespace.
797
+
798
+ - If repo is found in the datastore:
799
+ - Loads it directly from datastore to local path (can be temporary directory or specified path)
800
+ """
801
+ ...
802
+
803
+ def kubernetes(*, cpu: int = 1, memory: int = 4096, disk: int = 10240, image: typing.Optional[str] = None, image_pull_policy: str = 'KUBERNETES_IMAGE_PULL_POLICY', service_account: str = 'METAFLOW_KUBERNETES_SERVICE_ACCOUNT', secrets: typing.Optional[typing.List[str]] = None, node_selector: typing.Union[typing.Dict[str, str], str, None] = None, namespace: str = 'METAFLOW_KUBERNETES_NAMESPACE', gpu: typing.Optional[int] = None, gpu_vendor: str = 'KUBERNETES_GPU_VENDOR', tolerations: typing.List[str] = [], labels: typing.Dict[str, str] = 'METAFLOW_KUBERNETES_LABELS', annotations: typing.Dict[str, str] = 'METAFLOW_KUBERNETES_ANNOTATIONS', use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = '/metaflow_temp', persistent_volume_claims: typing.Optional[typing.Dict[str, str]] = None, shared_memory: typing.Optional[int] = None, port: typing.Optional[int] = None, compute_pool: typing.Optional[str] = None, hostname_resolution_timeout: int = 600, qos: str = 'Burstable', security_context: typing.Optional[typing.Dict[str, typing.Any]] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
804
+ """
805
+ Specifies that this step should execute on Kubernetes.
806
+
807
+
808
+ Parameters
809
+ ----------
810
+ cpu : int, default 1
811
+ Number of CPUs required for this step. If `@resources` is
812
+ also present, the maximum value from all decorators is used.
813
+ memory : int, default 4096
814
+ Memory size (in MB) required for this step. If
815
+ `@resources` is also present, the maximum value from all decorators is
816
+ used.
817
+ disk : int, default 10240
818
+ Disk size (in MB) required for this step. If
819
+ `@resources` is also present, the maximum value from all decorators is
820
+ used.
821
+ image : str, optional, default None
822
+ Docker image to use when launching on Kubernetes. If not specified, and
823
+ METAFLOW_KUBERNETES_CONTAINER_IMAGE is specified, that image is used. If
824
+ not, a default Docker image mapping to the current version of Python is used.
825
+ image_pull_policy: str, default KUBERNETES_IMAGE_PULL_POLICY
826
+ If given, the imagePullPolicy to be applied to the Docker image of the step.
827
+ service_account : str, default METAFLOW_KUBERNETES_SERVICE_ACCOUNT
828
+ Kubernetes service account to use when launching pod in Kubernetes.
829
+ secrets : List[str], optional, default None
830
+ Kubernetes secrets to use when launching pod in Kubernetes. These
831
+ secrets are in addition to the ones defined in `METAFLOW_KUBERNETES_SECRETS`
832
+ in Metaflow configuration.
833
+ node_selector: Union[Dict[str,str], str], optional, default None
834
+ Kubernetes node selector(s) to apply to the pod running the task.
835
+ Can be passed in as a comma separated string of values e.g.
836
+ 'kubernetes.io/os=linux,kubernetes.io/arch=amd64' or as a dictionary
837
+ {'kubernetes.io/os': 'linux', 'kubernetes.io/arch': 'amd64'}
838
+ namespace : str, default METAFLOW_KUBERNETES_NAMESPACE
839
+ Kubernetes namespace to use when launching pod in Kubernetes.
840
+ gpu : int, optional, default None
841
+ Number of GPUs required for this step. A value of zero implies that
842
+ the scheduled node should not have GPUs.
843
+ gpu_vendor : str, default KUBERNETES_GPU_VENDOR
844
+ The vendor of the GPUs to be used for this step.
845
+ tolerations : List[str], default []
846
+ The default is extracted from METAFLOW_KUBERNETES_TOLERATIONS.
847
+ Kubernetes tolerations to use when launching pod in Kubernetes.
848
+ labels: Dict[str, str], default: METAFLOW_KUBERNETES_LABELS
849
+ Kubernetes labels to use when launching pod in Kubernetes.
850
+ annotations: Dict[str, str], default: METAFLOW_KUBERNETES_ANNOTATIONS
851
+ Kubernetes annotations to use when launching pod in Kubernetes.
852
+ use_tmpfs : bool, default False
853
+ This enables an explicit tmpfs mount for this step.
854
+ tmpfs_tempdir : bool, default True
855
+ sets METAFLOW_TEMPDIR to tmpfs_path if set for this step.
856
+ tmpfs_size : int, optional, default: None
857
+ The value for the size (in MiB) of the tmpfs mount for this step.
858
+ This parameter maps to the `--tmpfs` option in Docker. Defaults to 50% of the
859
+ memory allocated for this step.
860
+ tmpfs_path : str, optional, default /metaflow_temp
861
+ Path to tmpfs mount for this step.
862
+ persistent_volume_claims : Dict[str, str], optional, default None
863
+ A map (dictionary) of persistent volumes to be mounted to the pod for this step. The map is from persistent
864
+ volumes to the path to which the volume is to be mounted, e.g., `{'pvc-name': '/path/to/mount/on'}`.
865
+ shared_memory: int, optional
866
+ Shared memory size (in MiB) required for this step
867
+ port: int, optional
868
+ Port number to specify in the Kubernetes job object
869
+ compute_pool : str, optional, default None
870
+ Compute pool to be used for for this step.
871
+ If not specified, any accessible compute pool within the perimeter is used.
872
+ hostname_resolution_timeout: int, default 10 * 60
873
+ Timeout in seconds for the workers tasks in the gang scheduled cluster to resolve the hostname of control task.
874
+ Only applicable when @parallel is used.
875
+ qos: str, default: Burstable
876
+ Quality of Service class to assign to the pod. Supported values are: Guaranteed, Burstable, BestEffort
877
+
878
+ security_context: Dict[str, Any], optional, default None
879
+ Container security context. Applies to the task container. Allows the following keys:
880
+ - privileged: bool, optional, default None
881
+ - allow_privilege_escalation: bool, optional, default None
882
+ - run_as_user: int, optional, default None
883
+ - run_as_group: int, optional, default None
884
+ - run_as_non_root: bool, optional, default None
822
885
  """
823
886
  ...
824
887
 
825
888
  @typing.overload
826
- def conda(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
889
+ def retry(*, times: int = 3, minutes_between_retries: int = 2) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
827
890
  """
828
- Specifies the Conda environment for the step.
891
+ Specifies the number of times the task corresponding
892
+ to a step needs to be retried.
829
893
 
830
- Information in this decorator will augment any
831
- attributes set in the `@conda_base` flow-level decorator. Hence,
832
- you can use `@conda_base` to set packages required by all
833
- steps and use `@conda` to specify step-specific overrides.
894
+ This decorator is useful for handling transient errors, such as networking issues.
895
+ If your task contains operations that can't be retried safely, e.g. database updates,
896
+ it is advisable to annotate it with `@retry(times=0)`.
897
+
898
+ This can be used in conjunction with the `@catch` decorator. The `@catch`
899
+ decorator will execute a no-op task after all retries have been exhausted,
900
+ ensuring that the flow execution can continue.
834
901
 
835
902
 
836
903
  Parameters
837
904
  ----------
838
- packages : Dict[str, str], default {}
839
- Packages to use for this step. The key is the name of the package
840
- and the value is the version to use.
841
- libraries : Dict[str, str], default {}
842
- Supported for backward compatibility. When used with packages, packages will take precedence.
843
- python : str, optional, default None
844
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
845
- that the version used will correspond to the version of the Python interpreter used to start the run.
846
- disabled : bool, default False
847
- If set to True, disables @conda.
905
+ times : int, default 3
906
+ Number of times to retry this task.
907
+ minutes_between_retries : int, default 2
908
+ Number of minutes between retries.
848
909
  """
849
910
  ...
850
911
 
851
912
  @typing.overload
852
- def conda(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
913
+ def retry(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
853
914
  ...
854
915
 
855
916
  @typing.overload
856
- def conda(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
917
+ def retry(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
857
918
  ...
858
919
 
859
- def conda(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
920
+ def retry(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, times: int = 3, minutes_between_retries: int = 2):
860
921
  """
861
- Specifies the Conda environment for the step.
922
+ Specifies the number of times the task corresponding
923
+ to a step needs to be retried.
862
924
 
863
- Information in this decorator will augment any
864
- attributes set in the `@conda_base` flow-level decorator. Hence,
865
- you can use `@conda_base` to set packages required by all
866
- steps and use `@conda` to specify step-specific overrides.
925
+ This decorator is useful for handling transient errors, such as networking issues.
926
+ If your task contains operations that can't be retried safely, e.g. database updates,
927
+ it is advisable to annotate it with `@retry(times=0)`.
928
+
929
+ This can be used in conjunction with the `@catch` decorator. The `@catch`
930
+ decorator will execute a no-op task after all retries have been exhausted,
931
+ ensuring that the flow execution can continue.
867
932
 
868
933
 
869
934
  Parameters
870
935
  ----------
871
- packages : Dict[str, str], default {}
872
- Packages to use for this step. The key is the name of the package
873
- and the value is the version to use.
874
- libraries : Dict[str, str], default {}
875
- Supported for backward compatibility. When used with packages, packages will take precedence.
876
- python : str, optional, default None
877
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
878
- that the version used will correspond to the version of the Python interpreter used to start the run.
879
- disabled : bool, default False
880
- If set to True, disables @conda.
936
+ times : int, default 3
937
+ Number of times to retry this task.
938
+ minutes_between_retries : int, default 2
939
+ Number of minutes between retries.
881
940
  """
882
941
  ...
883
942
 
884
943
  @typing.overload
885
- def secrets(*, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
944
+ def environment(*, vars: typing.Dict[str, str] = {}) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
886
945
  """
887
- Specifies secrets to be retrieved and injected as environment variables prior to
888
- the execution of a step.
946
+ Specifies environment variables to be set prior to the execution of a step.
889
947
 
890
948
 
891
949
  Parameters
892
950
  ----------
893
- sources : List[Union[str, Dict[str, Any]]], default: []
894
- List of secret specs, defining how the secrets are to be retrieved
951
+ vars : Dict[str, str], default {}
952
+ Dictionary of environment variables to set.
895
953
  """
896
954
  ...
897
955
 
898
956
  @typing.overload
899
- def secrets(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
957
+ def environment(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
900
958
  ...
901
959
 
902
960
  @typing.overload
903
- def secrets(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
961
+ def environment(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
904
962
  ...
905
963
 
906
- def secrets(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []):
964
+ def environment(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, vars: typing.Dict[str, str] = {}):
907
965
  """
908
- Specifies secrets to be retrieved and injected as environment variables prior to
909
- the execution of a step.
966
+ Specifies environment variables to be set prior to the execution of a step.
910
967
 
911
968
 
912
969
  Parameters
913
970
  ----------
914
- sources : List[Union[str, Dict[str, Any]]], default: []
915
- List of secret specs, defining how the secrets are to be retrieved
971
+ vars : Dict[str, str], default {}
972
+ Dictionary of environment variables to set.
916
973
  """
917
974
  ...
918
975
 
919
976
  @typing.overload
920
- def pypi(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
977
+ def trigger_on_finish(*, flow: typing.Union[typing.Dict[str, str], str, None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
921
978
  """
922
- Specifies the PyPI packages for the step.
979
+ Specifies the flow(s) that this flow depends on.
923
980
 
924
- Information in this decorator will augment any
925
- attributes set in the `@pyi_base` flow-level decorator. Hence,
926
- you can use `@pypi_base` to set packages required by all
927
- steps and use `@pypi` to specify step-specific overrides.
981
+ ```
982
+ @trigger_on_finish(flow='FooFlow')
983
+ ```
984
+ or
985
+ ```
986
+ @trigger_on_finish(flows=['FooFlow', 'BarFlow'])
987
+ ```
988
+ This decorator respects the @project decorator and triggers the flow
989
+ when upstream runs within the same namespace complete successfully
990
+
991
+ Additionally, you can specify project aware upstream flow dependencies
992
+ by specifying the fully qualified project_flow_name.
993
+ ```
994
+ @trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
995
+ ```
996
+ or
997
+ ```
998
+ @trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
999
+ ```
1000
+
1001
+ You can also specify just the project or project branch (other values will be
1002
+ inferred from the current project or project branch):
1003
+ ```
1004
+ @trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
1005
+ ```
1006
+
1007
+ Note that `branch` is typically one of:
1008
+ - `prod`
1009
+ - `user.bob`
1010
+ - `test.my_experiment`
1011
+ - `prod.staging`
928
1012
 
929
1013
 
930
1014
  Parameters
931
1015
  ----------
932
- packages : Dict[str, str], default: {}
933
- Packages to use for this step. The key is the name of the package
934
- and the value is the version to use.
935
- python : str, optional, default: None
936
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
937
- that the version used will correspond to the version of the Python interpreter used to start the run.
1016
+ flow : Union[str, Dict[str, str]], optional, default None
1017
+ Upstream flow dependency for this flow.
1018
+ flows : List[Union[str, Dict[str, str]]], default []
1019
+ Upstream flow dependencies for this flow.
1020
+ options : Dict[str, Any], default {}
1021
+ Backend-specific configuration for tuning eventing behavior.
938
1022
  """
939
1023
  ...
940
1024
 
941
1025
  @typing.overload
942
- def pypi(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
943
- ...
944
-
945
- @typing.overload
946
- def pypi(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1026
+ def trigger_on_finish(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
947
1027
  ...
948
1028
 
949
- def pypi(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
1029
+ def trigger_on_finish(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, flow: typing.Union[typing.Dict[str, str], str, None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}):
950
1030
  """
951
- Specifies the PyPI packages for the step.
1031
+ Specifies the flow(s) that this flow depends on.
952
1032
 
953
- Information in this decorator will augment any
954
- attributes set in the `@pyi_base` flow-level decorator. Hence,
955
- you can use `@pypi_base` to set packages required by all
956
- steps and use `@pypi` to specify step-specific overrides.
1033
+ ```
1034
+ @trigger_on_finish(flow='FooFlow')
1035
+ ```
1036
+ or
1037
+ ```
1038
+ @trigger_on_finish(flows=['FooFlow', 'BarFlow'])
1039
+ ```
1040
+ This decorator respects the @project decorator and triggers the flow
1041
+ when upstream runs within the same namespace complete successfully
1042
+
1043
+ Additionally, you can specify project aware upstream flow dependencies
1044
+ by specifying the fully qualified project_flow_name.
1045
+ ```
1046
+ @trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
1047
+ ```
1048
+ or
1049
+ ```
1050
+ @trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
1051
+ ```
1052
+
1053
+ You can also specify just the project or project branch (other values will be
1054
+ inferred from the current project or project branch):
1055
+ ```
1056
+ @trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
1057
+ ```
1058
+
1059
+ Note that `branch` is typically one of:
1060
+ - `prod`
1061
+ - `user.bob`
1062
+ - `test.my_experiment`
1063
+ - `prod.staging`
957
1064
 
958
1065
 
959
1066
  Parameters
960
1067
  ----------
961
- packages : Dict[str, str], default: {}
962
- Packages to use for this step. The key is the name of the package
963
- and the value is the version to use.
964
- python : str, optional, default: None
965
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
966
- that the version used will correspond to the version of the Python interpreter used to start the run.
1068
+ flow : Union[str, Dict[str, str]], optional, default None
1069
+ Upstream flow dependency for this flow.
1070
+ flows : List[Union[str, Dict[str, str]]], default []
1071
+ Upstream flow dependencies for this flow.
1072
+ options : Dict[str, Any], default {}
1073
+ Backend-specific configuration for tuning eventing behavior.
967
1074
  """
968
1075
  ...
969
1076
 
@@ -1061,103 +1168,86 @@ def trigger(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, event: t
1061
1168
  ...
1062
1169
 
1063
1170
  @typing.overload
1064
- def trigger_on_finish(*, flow: typing.Union[typing.Dict[str, str], str, None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1171
+ def pypi_base(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1065
1172
  """
1066
- Specifies the flow(s) that this flow depends on.
1067
-
1068
- ```
1069
- @trigger_on_finish(flow='FooFlow')
1070
- ```
1071
- or
1072
- ```
1073
- @trigger_on_finish(flows=['FooFlow', 'BarFlow'])
1074
- ```
1075
- This decorator respects the @project decorator and triggers the flow
1076
- when upstream runs within the same namespace complete successfully
1077
-
1078
- Additionally, you can specify project aware upstream flow dependencies
1079
- by specifying the fully qualified project_flow_name.
1080
- ```
1081
- @trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
1082
- ```
1083
- or
1084
- ```
1085
- @trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
1086
- ```
1087
-
1088
- You can also specify just the project or project branch (other values will be
1089
- inferred from the current project or project branch):
1090
- ```
1091
- @trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
1092
- ```
1093
-
1094
- Note that `branch` is typically one of:
1095
- - `prod`
1096
- - `user.bob`
1097
- - `test.my_experiment`
1098
- - `prod.staging`
1173
+ Specifies the PyPI packages for all steps of the flow.
1099
1174
 
1175
+ Use `@pypi_base` to set common packages required by all
1176
+ steps and use `@pypi` to specify step-specific overrides.
1100
1177
 
1101
1178
  Parameters
1102
1179
  ----------
1103
- flow : Union[str, Dict[str, str]], optional, default None
1104
- Upstream flow dependency for this flow.
1105
- flows : List[Union[str, Dict[str, str]]], default []
1106
- Upstream flow dependencies for this flow.
1107
- options : Dict[str, Any], default {}
1108
- Backend-specific configuration for tuning eventing behavior.
1180
+ packages : Dict[str, str], default: {}
1181
+ Packages to use for this flow. The key is the name of the package
1182
+ and the value is the version to use.
1183
+ python : str, optional, default: None
1184
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1185
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1109
1186
  """
1110
1187
  ...
1111
1188
 
1112
1189
  @typing.overload
1113
- def trigger_on_finish(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1190
+ def pypi_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1114
1191
  ...
1115
1192
 
1116
- def trigger_on_finish(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, flow: typing.Union[typing.Dict[str, str], str, None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}):
1193
+ def pypi_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
1117
1194
  """
1118
- Specifies the flow(s) that this flow depends on.
1119
-
1120
- ```
1121
- @trigger_on_finish(flow='FooFlow')
1122
- ```
1123
- or
1124
- ```
1125
- @trigger_on_finish(flows=['FooFlow', 'BarFlow'])
1126
- ```
1127
- This decorator respects the @project decorator and triggers the flow
1128
- when upstream runs within the same namespace complete successfully
1129
-
1130
- Additionally, you can specify project aware upstream flow dependencies
1131
- by specifying the fully qualified project_flow_name.
1132
- ```
1133
- @trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
1134
- ```
1135
- or
1136
- ```
1137
- @trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
1138
- ```
1195
+ Specifies the PyPI packages for all steps of the flow.
1139
1196
 
1140
- You can also specify just the project or project branch (other values will be
1141
- inferred from the current project or project branch):
1142
- ```
1143
- @trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
1144
- ```
1197
+ Use `@pypi_base` to set common packages required by all
1198
+ steps and use `@pypi` to specify step-specific overrides.
1145
1199
 
1146
- Note that `branch` is typically one of:
1147
- - `prod`
1148
- - `user.bob`
1149
- - `test.my_experiment`
1150
- - `prod.staging`
1200
+ Parameters
1201
+ ----------
1202
+ packages : Dict[str, str], default: {}
1203
+ Packages to use for this flow. The key is the name of the package
1204
+ and the value is the version to use.
1205
+ python : str, optional, default: None
1206
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1207
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1208
+ """
1209
+ ...
1210
+
1211
+ def airflow_external_task_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, external_dag_id: str, external_task_ids: typing.List[str], allowed_states: typing.List[str], failed_states: typing.List[str], execution_delta: "datetime.timedelta", check_existence: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1212
+ """
1213
+ The `@airflow_external_task_sensor` decorator attaches a Airflow [ExternalTaskSensor](https://airflow.apache.org/docs/apache-airflow/stable/_api/airflow/sensors/external_task/index.html#airflow.sensors.external_task.ExternalTaskSensor) before the start step of the flow.
1214
+ This decorator only works when a flow is scheduled on Airflow and is compiled using `airflow create`. More than one `@airflow_external_task_sensor` can be added as a flow decorators. Adding more than one decorator will ensure that `start` step starts only after all sensors finish.
1151
1215
 
1152
1216
 
1153
1217
  Parameters
1154
1218
  ----------
1155
- flow : Union[str, Dict[str, str]], optional, default None
1156
- Upstream flow dependency for this flow.
1157
- flows : List[Union[str, Dict[str, str]]], default []
1158
- Upstream flow dependencies for this flow.
1159
- options : Dict[str, Any], default {}
1160
- Backend-specific configuration for tuning eventing behavior.
1219
+ timeout : int
1220
+ Time, in seconds before the task times out and fails. (Default: 3600)
1221
+ poke_interval : int
1222
+ Time in seconds that the job should wait in between each try. (Default: 60)
1223
+ mode : str
1224
+ How the sensor operates. Options are: { poke | reschedule }. (Default: "poke")
1225
+ exponential_backoff : bool
1226
+ allow progressive longer waits between pokes by using exponential backoff algorithm. (Default: True)
1227
+ pool : str
1228
+ the slot pool this task should run in,
1229
+ slot pools are a way to limit concurrency for certain tasks. (Default:None)
1230
+ soft_fail : bool
1231
+ Set to true to mark the task as SKIPPED on failure. (Default: False)
1232
+ name : str
1233
+ Name of the sensor on Airflow
1234
+ description : str
1235
+ Description of sensor in the Airflow UI
1236
+ external_dag_id : str
1237
+ The dag_id that contains the task you want to wait for.
1238
+ external_task_ids : List[str]
1239
+ The list of task_ids that you want to wait for.
1240
+ If None (default value) the sensor waits for the DAG. (Default: None)
1241
+ allowed_states : List[str]
1242
+ Iterable of allowed states, (Default: ['success'])
1243
+ failed_states : List[str]
1244
+ Iterable of failed or dis-allowed states. (Default: None)
1245
+ execution_delta : datetime.timedelta
1246
+ time difference with the previous execution to look at,
1247
+ the default is the same logical date as the current task or DAG. (Default: None)
1248
+ check_existence: bool
1249
+ Set to True to check if the external task exists or check if
1250
+ the DAG to wait for exists. (Default: True)
1161
1251
  """
1162
1252
  ...
1163
1253
 
@@ -1255,133 +1345,6 @@ def airflow_s3_key_sensor(*, timeout: int, poke_interval: int, mode: str, expone
1255
1345
  """
1256
1346
  ...
1257
1347
 
1258
- @typing.overload
1259
- def schedule(*, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1260
- """
1261
- Specifies the times when the flow should be run when running on a
1262
- production scheduler.
1263
-
1264
-
1265
- Parameters
1266
- ----------
1267
- hourly : bool, default False
1268
- Run the workflow hourly.
1269
- daily : bool, default True
1270
- Run the workflow daily.
1271
- weekly : bool, default False
1272
- Run the workflow weekly.
1273
- cron : str, optional, default None
1274
- Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
1275
- specified by this expression.
1276
- timezone : str, optional, default None
1277
- Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1278
- which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
1279
- """
1280
- ...
1281
-
1282
- @typing.overload
1283
- def schedule(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1284
- ...
1285
-
1286
- def schedule(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None):
1287
- """
1288
- Specifies the times when the flow should be run when running on a
1289
- production scheduler.
1290
-
1291
-
1292
- Parameters
1293
- ----------
1294
- hourly : bool, default False
1295
- Run the workflow hourly.
1296
- daily : bool, default True
1297
- Run the workflow daily.
1298
- weekly : bool, default False
1299
- Run the workflow weekly.
1300
- cron : str, optional, default None
1301
- Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
1302
- specified by this expression.
1303
- timezone : str, optional, default None
1304
- Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1305
- which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
1306
- """
1307
- ...
1308
-
1309
- def project(*, name: str, branch: typing.Optional[str] = None, production: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1310
- """
1311
- Specifies what flows belong to the same project.
1312
-
1313
- A project-specific namespace is created for all flows that
1314
- use the same `@project(name)`.
1315
-
1316
-
1317
- Parameters
1318
- ----------
1319
- name : str
1320
- Project name. Make sure that the name is unique amongst all
1321
- projects that use the same production scheduler. The name may
1322
- contain only lowercase alphanumeric characters and underscores.
1323
-
1324
- branch : Optional[str], default None
1325
- The branch to use. If not specified, the branch is set to
1326
- `user.<username>` unless `production` is set to `True`. This can
1327
- also be set on the command line using `--branch` as a top-level option.
1328
- It is an error to specify `branch` in the decorator and on the command line.
1329
-
1330
- production : bool, default False
1331
- Whether or not the branch is the production branch. This can also be set on the
1332
- command line using `--production` as a top-level option. It is an error to specify
1333
- `production` in the decorator and on the command line.
1334
- The project branch name will be:
1335
- - if `branch` is specified:
1336
- - if `production` is True: `prod.<branch>`
1337
- - if `production` is False: `test.<branch>`
1338
- - if `branch` is not specified:
1339
- - if `production` is True: `prod`
1340
- - if `production` is False: `user.<username>`
1341
- """
1342
- ...
1343
-
1344
- @typing.overload
1345
- def pypi_base(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1346
- """
1347
- Specifies the PyPI packages for all steps of the flow.
1348
-
1349
- Use `@pypi_base` to set common packages required by all
1350
- steps and use `@pypi` to specify step-specific overrides.
1351
-
1352
- Parameters
1353
- ----------
1354
- packages : Dict[str, str], default: {}
1355
- Packages to use for this flow. The key is the name of the package
1356
- and the value is the version to use.
1357
- python : str, optional, default: None
1358
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
1359
- that the version used will correspond to the version of the Python interpreter used to start the run.
1360
- """
1361
- ...
1362
-
1363
- @typing.overload
1364
- def pypi_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1365
- ...
1366
-
1367
- def pypi_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
1368
- """
1369
- Specifies the PyPI packages for all steps of the flow.
1370
-
1371
- Use `@pypi_base` to set common packages required by all
1372
- steps and use `@pypi` to specify step-specific overrides.
1373
-
1374
- Parameters
1375
- ----------
1376
- packages : Dict[str, str], default: {}
1377
- Packages to use for this flow. The key is the name of the package
1378
- and the value is the version to use.
1379
- python : str, optional, default: None
1380
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
1381
- that the version used will correspond to the version of the Python interpreter used to start the run.
1382
- """
1383
- ...
1384
-
1385
1348
  def with_artifact_store(f: typing.Optional[typing.Type[FlowSpecDerived]] = None):
1386
1349
  """
1387
1350
  Allows setting external datastores to save data for the
@@ -1496,46 +1459,89 @@ def with_artifact_store(f: typing.Optional[typing.Type[FlowSpecDerived]] = None)
1496
1459
  """
1497
1460
  ...
1498
1461
 
1499
- def airflow_external_task_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, external_dag_id: str, external_task_ids: typing.List[str], allowed_states: typing.List[str], failed_states: typing.List[str], execution_delta: "datetime.timedelta", check_existence: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1462
+ def project(*, name: str, branch: typing.Optional[str] = None, production: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1500
1463
  """
1501
- The `@airflow_external_task_sensor` decorator attaches a Airflow [ExternalTaskSensor](https://airflow.apache.org/docs/apache-airflow/stable/_api/airflow/sensors/external_task/index.html#airflow.sensors.external_task.ExternalTaskSensor) before the start step of the flow.
1502
- This decorator only works when a flow is scheduled on Airflow and is compiled using `airflow create`. More than one `@airflow_external_task_sensor` can be added as a flow decorators. Adding more than one decorator will ensure that `start` step starts only after all sensors finish.
1464
+ Specifies what flows belong to the same project.
1465
+
1466
+ A project-specific namespace is created for all flows that
1467
+ use the same `@project(name)`.
1503
1468
 
1504
1469
 
1505
1470
  Parameters
1506
1471
  ----------
1507
- timeout : int
1508
- Time, in seconds before the task times out and fails. (Default: 3600)
1509
- poke_interval : int
1510
- Time in seconds that the job should wait in between each try. (Default: 60)
1511
- mode : str
1512
- How the sensor operates. Options are: { poke | reschedule }. (Default: "poke")
1513
- exponential_backoff : bool
1514
- allow progressive longer waits between pokes by using exponential backoff algorithm. (Default: True)
1515
- pool : str
1516
- the slot pool this task should run in,
1517
- slot pools are a way to limit concurrency for certain tasks. (Default:None)
1518
- soft_fail : bool
1519
- Set to true to mark the task as SKIPPED on failure. (Default: False)
1520
1472
  name : str
1521
- Name of the sensor on Airflow
1522
- description : str
1523
- Description of sensor in the Airflow UI
1524
- external_dag_id : str
1525
- The dag_id that contains the task you want to wait for.
1526
- external_task_ids : List[str]
1527
- The list of task_ids that you want to wait for.
1528
- If None (default value) the sensor waits for the DAG. (Default: None)
1529
- allowed_states : List[str]
1530
- Iterable of allowed states, (Default: ['success'])
1531
- failed_states : List[str]
1532
- Iterable of failed or dis-allowed states. (Default: None)
1533
- execution_delta : datetime.timedelta
1534
- time difference with the previous execution to look at,
1535
- the default is the same logical date as the current task or DAG. (Default: None)
1536
- check_existence: bool
1537
- Set to True to check if the external task exists or check if
1538
- the DAG to wait for exists. (Default: True)
1473
+ Project name. Make sure that the name is unique amongst all
1474
+ projects that use the same production scheduler. The name may
1475
+ contain only lowercase alphanumeric characters and underscores.
1476
+
1477
+ branch : Optional[str], default None
1478
+ The branch to use. If not specified, the branch is set to
1479
+ `user.<username>` unless `production` is set to `True`. This can
1480
+ also be set on the command line using `--branch` as a top-level option.
1481
+ It is an error to specify `branch` in the decorator and on the command line.
1482
+
1483
+ production : bool, default False
1484
+ Whether or not the branch is the production branch. This can also be set on the
1485
+ command line using `--production` as a top-level option. It is an error to specify
1486
+ `production` in the decorator and on the command line.
1487
+ The project branch name will be:
1488
+ - if `branch` is specified:
1489
+ - if `production` is True: `prod.<branch>`
1490
+ - if `production` is False: `test.<branch>`
1491
+ - if `branch` is not specified:
1492
+ - if `production` is True: `prod`
1493
+ - if `production` is False: `user.<username>`
1494
+ """
1495
+ ...
1496
+
1497
+ @typing.overload
1498
+ def schedule(*, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1499
+ """
1500
+ Specifies the times when the flow should be run when running on a
1501
+ production scheduler.
1502
+
1503
+
1504
+ Parameters
1505
+ ----------
1506
+ hourly : bool, default False
1507
+ Run the workflow hourly.
1508
+ daily : bool, default True
1509
+ Run the workflow daily.
1510
+ weekly : bool, default False
1511
+ Run the workflow weekly.
1512
+ cron : str, optional, default None
1513
+ Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
1514
+ specified by this expression.
1515
+ timezone : str, optional, default None
1516
+ Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1517
+ which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
1518
+ """
1519
+ ...
1520
+
1521
+ @typing.overload
1522
+ def schedule(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1523
+ ...
1524
+
1525
+ def schedule(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None):
1526
+ """
1527
+ Specifies the times when the flow should be run when running on a
1528
+ production scheduler.
1529
+
1530
+
1531
+ Parameters
1532
+ ----------
1533
+ hourly : bool, default False
1534
+ Run the workflow hourly.
1535
+ daily : bool, default True
1536
+ Run the workflow daily.
1537
+ weekly : bool, default False
1538
+ Run the workflow weekly.
1539
+ cron : str, optional, default None
1540
+ Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
1541
+ specified by this expression.
1542
+ timezone : str, optional, default None
1543
+ Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1544
+ which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
1539
1545
  """
1540
1546
  ...
1541
1547