ob-metaflow-stubs 6.0.3.172__py2.py3-none-any.whl → 6.0.3.173__py2.py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (211) hide show
  1. metaflow-stubs/__init__.pyi +737 -737
  2. metaflow-stubs/cards.pyi +1 -1
  3. metaflow-stubs/cli.pyi +1 -1
  4. metaflow-stubs/cli_components/__init__.pyi +1 -1
  5. metaflow-stubs/cli_components/utils.pyi +1 -1
  6. metaflow-stubs/client/__init__.pyi +1 -1
  7. metaflow-stubs/client/core.pyi +3 -3
  8. metaflow-stubs/client/filecache.pyi +2 -2
  9. metaflow-stubs/events.pyi +2 -2
  10. metaflow-stubs/exception.pyi +1 -1
  11. metaflow-stubs/flowspec.pyi +3 -3
  12. metaflow-stubs/generated_for.txt +1 -1
  13. metaflow-stubs/includefile.pyi +1 -1
  14. metaflow-stubs/info_file.pyi +1 -1
  15. metaflow-stubs/metadata_provider/__init__.pyi +1 -1
  16. metaflow-stubs/metadata_provider/heartbeat.pyi +1 -1
  17. metaflow-stubs/metadata_provider/metadata.pyi +1 -1
  18. metaflow-stubs/metadata_provider/util.pyi +1 -1
  19. metaflow-stubs/metaflow_config.pyi +1 -1
  20. metaflow-stubs/metaflow_current.pyi +140 -140
  21. metaflow-stubs/metaflow_git.pyi +1 -1
  22. metaflow-stubs/mf_extensions/__init__.pyi +1 -1
  23. metaflow-stubs/mf_extensions/obcheckpoint/__init__.pyi +1 -1
  24. metaflow-stubs/mf_extensions/obcheckpoint/plugins/__init__.pyi +1 -1
  25. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/__init__.pyi +1 -1
  26. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/card_utils/__init__.pyi +1 -1
  27. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/card_utils/async_cards.pyi +2 -2
  28. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/card_utils/deco_injection_mixin.pyi +1 -1
  29. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/card_utils/extra_components.pyi +1 -1
  30. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/__init__.pyi +1 -1
  31. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/cards/__init__.pyi +1 -1
  32. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/cards/checkpoint_lister.pyi +2 -2
  33. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/cards/lineage_card.pyi +1 -1
  34. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/checkpoint_storage.pyi +3 -3
  35. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/constructors.pyi +1 -1
  36. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/core.pyi +3 -3
  37. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/decorator.pyi +3 -3
  38. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/exceptions.pyi +1 -1
  39. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/final_api.pyi +2 -2
  40. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/lineage.pyi +1 -1
  41. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/__init__.pyi +1 -1
  42. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/context.pyi +2 -2
  43. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/core.pyi +2 -2
  44. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/decorator.pyi +1 -1
  45. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/exceptions.pyi +1 -1
  46. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/task_utils.pyi +1 -1
  47. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/utils.pyi +1 -1
  48. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastructures.pyi +1 -1
  49. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/exceptions.pyi +1 -1
  50. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/hf_hub/__init__.pyi +1 -1
  51. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/hf_hub/decorator.pyi +2 -2
  52. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/modeling_utils/__init__.pyi +1 -1
  53. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/modeling_utils/core.pyi +2 -2
  54. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/modeling_utils/exceptions.pyi +1 -1
  55. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/modeling_utils/model_storage.pyi +2 -2
  56. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/__init__.pyi +1 -1
  57. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/flowspec_utils.pyi +1 -1
  58. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/general.pyi +1 -1
  59. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/identity_utils.pyi +1 -1
  60. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/serialization_handler/__init__.pyi +1 -1
  61. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/serialization_handler/base.pyi +1 -1
  62. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/serialization_handler/tar.pyi +1 -1
  63. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/tar_utils.pyi +1 -1
  64. metaflow-stubs/mf_extensions/outerbounds/__init__.pyi +1 -1
  65. metaflow-stubs/mf_extensions/outerbounds/plugins/__init__.pyi +1 -1
  66. metaflow-stubs/mf_extensions/outerbounds/plugins/card_utilities/__init__.pyi +1 -1
  67. metaflow-stubs/mf_extensions/outerbounds/plugins/card_utilities/injector.pyi +1 -1
  68. metaflow-stubs/mf_extensions/outerbounds/plugins/checkpoint_datastores/__init__.pyi +1 -1
  69. metaflow-stubs/mf_extensions/outerbounds/plugins/checkpoint_datastores/coreweave.pyi +1 -1
  70. metaflow-stubs/mf_extensions/outerbounds/plugins/checkpoint_datastores/nebius.pyi +1 -1
  71. metaflow-stubs/mf_extensions/outerbounds/plugins/kubernetes/__init__.pyi +6 -0
  72. metaflow-stubs/mf_extensions/outerbounds/plugins/kubernetes/pod_killer.pyi +31 -0
  73. metaflow-stubs/mf_extensions/outerbounds/plugins/ollama/__init__.pyi +1 -1
  74. metaflow-stubs/mf_extensions/outerbounds/plugins/ollama/constants.pyi +1 -1
  75. metaflow-stubs/mf_extensions/outerbounds/plugins/ollama/exceptions.pyi +1 -1
  76. metaflow-stubs/mf_extensions/outerbounds/plugins/ollama/ollama.pyi +1 -1
  77. metaflow-stubs/mf_extensions/outerbounds/plugins/snowflake/__init__.pyi +1 -1
  78. metaflow-stubs/mf_extensions/outerbounds/plugins/snowflake/snowflake.pyi +1 -1
  79. metaflow-stubs/mf_extensions/outerbounds/profilers/__init__.pyi +1 -1
  80. metaflow-stubs/mf_extensions/outerbounds/profilers/gpu.pyi +1 -1
  81. metaflow-stubs/mf_extensions/outerbounds/remote_config.pyi +1 -1
  82. metaflow-stubs/mf_extensions/outerbounds/toplevel/__init__.pyi +1 -1
  83. metaflow-stubs/mf_extensions/outerbounds/toplevel/global_aliases_for_metaflow_package.pyi +1 -1
  84. metaflow-stubs/multicore_utils.pyi +1 -1
  85. metaflow-stubs/parameters.pyi +2 -2
  86. metaflow-stubs/plugins/__init__.pyi +10 -10
  87. metaflow-stubs/plugins/airflow/__init__.pyi +1 -1
  88. metaflow-stubs/plugins/airflow/airflow_utils.pyi +1 -1
  89. metaflow-stubs/plugins/airflow/exception.pyi +1 -1
  90. metaflow-stubs/plugins/airflow/sensors/__init__.pyi +1 -1
  91. metaflow-stubs/plugins/airflow/sensors/base_sensor.pyi +1 -1
  92. metaflow-stubs/plugins/airflow/sensors/external_task_sensor.pyi +1 -1
  93. metaflow-stubs/plugins/airflow/sensors/s3_sensor.pyi +1 -1
  94. metaflow-stubs/plugins/argo/__init__.pyi +1 -1
  95. metaflow-stubs/plugins/argo/argo_client.pyi +1 -1
  96. metaflow-stubs/plugins/argo/argo_events.pyi +1 -1
  97. metaflow-stubs/plugins/argo/argo_workflows.pyi +1 -1
  98. metaflow-stubs/plugins/argo/argo_workflows_decorator.pyi +3 -3
  99. metaflow-stubs/plugins/argo/argo_workflows_deployer.pyi +2 -2
  100. metaflow-stubs/plugins/argo/argo_workflows_deployer_objects.pyi +1 -1
  101. metaflow-stubs/plugins/aws/__init__.pyi +1 -1
  102. metaflow-stubs/plugins/aws/aws_client.pyi +1 -1
  103. metaflow-stubs/plugins/aws/aws_utils.pyi +1 -1
  104. metaflow-stubs/plugins/aws/batch/__init__.pyi +1 -1
  105. metaflow-stubs/plugins/aws/batch/batch.pyi +1 -1
  106. metaflow-stubs/plugins/aws/batch/batch_client.pyi +1 -1
  107. metaflow-stubs/plugins/aws/batch/batch_decorator.pyi +1 -1
  108. metaflow-stubs/plugins/aws/secrets_manager/__init__.pyi +1 -1
  109. metaflow-stubs/plugins/aws/secrets_manager/aws_secrets_manager_secrets_provider.pyi +3 -3
  110. metaflow-stubs/plugins/aws/step_functions/__init__.pyi +1 -1
  111. metaflow-stubs/plugins/aws/step_functions/event_bridge_client.pyi +1 -1
  112. metaflow-stubs/plugins/aws/step_functions/schedule_decorator.pyi +1 -1
  113. metaflow-stubs/plugins/aws/step_functions/step_functions.pyi +1 -1
  114. metaflow-stubs/plugins/aws/step_functions/step_functions_client.pyi +1 -1
  115. metaflow-stubs/plugins/aws/step_functions/step_functions_deployer.pyi +2 -2
  116. metaflow-stubs/plugins/aws/step_functions/step_functions_deployer_objects.pyi +2 -2
  117. metaflow-stubs/plugins/azure/__init__.pyi +1 -1
  118. metaflow-stubs/plugins/azure/azure_credential.pyi +1 -1
  119. metaflow-stubs/plugins/azure/azure_exceptions.pyi +1 -1
  120. metaflow-stubs/plugins/azure/azure_secret_manager_secrets_provider.pyi +3 -3
  121. metaflow-stubs/plugins/azure/azure_utils.pyi +1 -1
  122. metaflow-stubs/plugins/azure/blob_service_client_factory.pyi +1 -1
  123. metaflow-stubs/plugins/azure/includefile_support.pyi +1 -1
  124. metaflow-stubs/plugins/cards/__init__.pyi +1 -1
  125. metaflow-stubs/plugins/cards/card_client.pyi +2 -2
  126. metaflow-stubs/plugins/cards/card_creator.pyi +1 -1
  127. metaflow-stubs/plugins/cards/card_datastore.pyi +1 -1
  128. metaflow-stubs/plugins/cards/card_decorator.pyi +1 -1
  129. metaflow-stubs/plugins/cards/card_modules/__init__.pyi +1 -1
  130. metaflow-stubs/plugins/cards/card_modules/basic.pyi +2 -2
  131. metaflow-stubs/plugins/cards/card_modules/card.pyi +1 -1
  132. metaflow-stubs/plugins/cards/card_modules/components.pyi +2 -2
  133. metaflow-stubs/plugins/cards/card_modules/convert_to_native_type.pyi +1 -1
  134. metaflow-stubs/plugins/cards/card_modules/renderer_tools.pyi +1 -1
  135. metaflow-stubs/plugins/cards/card_modules/test_cards.pyi +1 -1
  136. metaflow-stubs/plugins/cards/card_resolver.pyi +1 -1
  137. metaflow-stubs/plugins/cards/component_serializer.pyi +1 -1
  138. metaflow-stubs/plugins/cards/exception.pyi +1 -1
  139. metaflow-stubs/plugins/catch_decorator.pyi +1 -1
  140. metaflow-stubs/plugins/datatools/__init__.pyi +1 -1
  141. metaflow-stubs/plugins/datatools/local.pyi +1 -1
  142. metaflow-stubs/plugins/datatools/s3/__init__.pyi +1 -1
  143. metaflow-stubs/plugins/datatools/s3/s3.pyi +2 -2
  144. metaflow-stubs/plugins/datatools/s3/s3tail.pyi +1 -1
  145. metaflow-stubs/plugins/datatools/s3/s3util.pyi +1 -1
  146. metaflow-stubs/plugins/debug_logger.pyi +1 -1
  147. metaflow-stubs/plugins/debug_monitor.pyi +1 -1
  148. metaflow-stubs/plugins/environment_decorator.pyi +1 -1
  149. metaflow-stubs/plugins/events_decorator.pyi +1 -1
  150. metaflow-stubs/plugins/frameworks/__init__.pyi +1 -1
  151. metaflow-stubs/plugins/frameworks/pytorch.pyi +1 -1
  152. metaflow-stubs/plugins/gcp/__init__.pyi +1 -1
  153. metaflow-stubs/plugins/gcp/gcp_secret_manager_secrets_provider.pyi +3 -3
  154. metaflow-stubs/plugins/gcp/gs_exceptions.pyi +1 -1
  155. metaflow-stubs/plugins/gcp/gs_storage_client_factory.pyi +1 -1
  156. metaflow-stubs/plugins/gcp/gs_utils.pyi +1 -1
  157. metaflow-stubs/plugins/gcp/includefile_support.pyi +1 -1
  158. metaflow-stubs/plugins/kubernetes/__init__.pyi +1 -1
  159. metaflow-stubs/plugins/kubernetes/kube_utils.pyi +1 -1
  160. metaflow-stubs/plugins/kubernetes/kubernetes.pyi +1 -1
  161. metaflow-stubs/plugins/kubernetes/kubernetes_client.pyi +2 -1
  162. metaflow-stubs/plugins/kubernetes/kubernetes_decorator.pyi +1 -1
  163. metaflow-stubs/plugins/kubernetes/kubernetes_jobsets.pyi +1 -1
  164. metaflow-stubs/plugins/kubernetes/spot_monitor_sidecar.pyi +1 -1
  165. metaflow-stubs/plugins/ollama/__init__.pyi +2 -2
  166. metaflow-stubs/plugins/parallel_decorator.pyi +1 -1
  167. metaflow-stubs/plugins/perimeters.pyi +1 -1
  168. metaflow-stubs/plugins/project_decorator.pyi +1 -1
  169. metaflow-stubs/plugins/pypi/__init__.pyi +2 -2
  170. metaflow-stubs/plugins/pypi/conda_decorator.pyi +1 -1
  171. metaflow-stubs/plugins/pypi/conda_environment.pyi +5 -5
  172. metaflow-stubs/plugins/pypi/parsers.pyi +1 -1
  173. metaflow-stubs/plugins/pypi/pypi_decorator.pyi +1 -1
  174. metaflow-stubs/plugins/pypi/pypi_environment.pyi +1 -1
  175. metaflow-stubs/plugins/pypi/utils.pyi +1 -1
  176. metaflow-stubs/plugins/resources_decorator.pyi +1 -1
  177. metaflow-stubs/plugins/retry_decorator.pyi +1 -1
  178. metaflow-stubs/plugins/secrets/__init__.pyi +2 -2
  179. metaflow-stubs/plugins/secrets/inline_secrets_provider.pyi +3 -3
  180. metaflow-stubs/plugins/secrets/secrets_decorator.pyi +1 -1
  181. metaflow-stubs/plugins/snowflake/__init__.pyi +1 -1
  182. metaflow-stubs/plugins/storage_executor.pyi +1 -1
  183. metaflow-stubs/plugins/test_unbounded_foreach_decorator.pyi +1 -1
  184. metaflow-stubs/plugins/timeout_decorator.pyi +1 -1
  185. metaflow-stubs/plugins/torchtune/__init__.pyi +1 -1
  186. metaflow-stubs/plugins/uv/__init__.pyi +1 -1
  187. metaflow-stubs/plugins/uv/uv_environment.pyi +1 -1
  188. metaflow-stubs/profilers/__init__.pyi +1 -1
  189. metaflow-stubs/pylint_wrapper.pyi +1 -1
  190. metaflow-stubs/runner/__init__.pyi +1 -1
  191. metaflow-stubs/runner/deployer.pyi +27 -27
  192. metaflow-stubs/runner/deployer_impl.pyi +1 -1
  193. metaflow-stubs/runner/metaflow_runner.pyi +2 -2
  194. metaflow-stubs/runner/nbdeploy.pyi +1 -1
  195. metaflow-stubs/runner/nbrun.pyi +1 -1
  196. metaflow-stubs/runner/subprocess_manager.pyi +1 -1
  197. metaflow-stubs/runner/utils.pyi +2 -2
  198. metaflow-stubs/system/__init__.pyi +1 -1
  199. metaflow-stubs/system/system_logger.pyi +2 -2
  200. metaflow-stubs/system/system_monitor.pyi +1 -1
  201. metaflow-stubs/tagging_util.pyi +1 -1
  202. metaflow-stubs/tuple_util.pyi +1 -1
  203. metaflow-stubs/user_configs/__init__.pyi +1 -1
  204. metaflow-stubs/user_configs/config_decorators.pyi +4 -4
  205. metaflow-stubs/user_configs/config_options.pyi +1 -1
  206. metaflow-stubs/user_configs/config_parameters.pyi +4 -4
  207. {ob_metaflow_stubs-6.0.3.172.dist-info → ob_metaflow_stubs-6.0.3.173.dist-info}/METADATA +1 -1
  208. ob_metaflow_stubs-6.0.3.173.dist-info/RECORD +211 -0
  209. ob_metaflow_stubs-6.0.3.172.dist-info/RECORD +0 -209
  210. {ob_metaflow_stubs-6.0.3.172.dist-info → ob_metaflow_stubs-6.0.3.173.dist-info}/WHEEL +0 -0
  211. {ob_metaflow_stubs-6.0.3.172.dist-info → ob_metaflow_stubs-6.0.3.173.dist-info}/top_level.txt +0 -0
@@ -1,15 +1,15 @@
1
1
  ######################################################################################################
2
2
  # Auto-generated Metaflow stub file #
3
3
  # MF version: 2.15.14.1+obcheckpoint(0.2.1);ob(v1) #
4
- # Generated on 2025-05-27T03:43:23.657123 #
4
+ # Generated on 2025-05-29T17:52:46.232236 #
5
5
  ######################################################################################################
6
6
 
7
7
  from __future__ import annotations
8
8
 
9
9
  import typing
10
10
  if typing.TYPE_CHECKING:
11
- import typing
12
11
  import datetime
12
+ import typing
13
13
  FlowSpecDerived = typing.TypeVar("FlowSpecDerived", bound="FlowSpec", contravariant=False, covariant=False)
14
14
  StepFlag = typing.NewType("StepFlag", bool)
15
15
 
@@ -35,17 +35,17 @@ from .user_configs.config_parameters import ConfigValue as ConfigValue
35
35
  from .user_configs.config_parameters import config_expr as config_expr
36
36
  from .user_configs.config_decorators import CustomFlowDecorator as CustomFlowDecorator
37
37
  from .user_configs.config_decorators import CustomStepDecorator as CustomStepDecorator
38
- from . import metaflow_git as metaflow_git
39
38
  from . import tuple_util as tuple_util
40
39
  from . import cards as cards
41
40
  from . import events as events
41
+ from . import metaflow_git as metaflow_git
42
42
  from . import runner as runner
43
43
  from . import plugins as plugins
44
44
  from .mf_extensions.outerbounds.toplevel.global_aliases_for_metaflow_package import S3 as S3
45
45
  from . import includefile as includefile
46
46
  from .includefile import IncludeFile as IncludeFile
47
- from .plugins.pypi.parsers import conda_environment_yml_parser as conda_environment_yml_parser
48
47
  from .plugins.pypi.parsers import pyproject_toml_parser as pyproject_toml_parser
48
+ from .plugins.pypi.parsers import conda_environment_yml_parser as conda_environment_yml_parser
49
49
  from .plugins.pypi.parsers import requirements_txt_parser as requirements_txt_parser
50
50
  from . import client as client
51
51
  from .client.core import namespace as namespace
@@ -153,256 +153,6 @@ def step(f: typing.Union[typing.Callable[[FlowSpecDerived], None], typing.Callab
153
153
  """
154
154
  ...
155
155
 
156
- @typing.overload
157
- def conda(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
158
- """
159
- Specifies the Conda environment for the step.
160
-
161
- Information in this decorator will augment any
162
- attributes set in the `@conda_base` flow-level decorator. Hence,
163
- you can use `@conda_base` to set packages required by all
164
- steps and use `@conda` to specify step-specific overrides.
165
-
166
-
167
- Parameters
168
- ----------
169
- packages : Dict[str, str], default {}
170
- Packages to use for this step. The key is the name of the package
171
- and the value is the version to use.
172
- libraries : Dict[str, str], default {}
173
- Supported for backward compatibility. When used with packages, packages will take precedence.
174
- python : str, optional, default None
175
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
176
- that the version used will correspond to the version of the Python interpreter used to start the run.
177
- disabled : bool, default False
178
- If set to True, disables @conda.
179
- """
180
- ...
181
-
182
- @typing.overload
183
- def conda(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
184
- ...
185
-
186
- @typing.overload
187
- def conda(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
188
- ...
189
-
190
- def conda(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
191
- """
192
- Specifies the Conda environment for the step.
193
-
194
- Information in this decorator will augment any
195
- attributes set in the `@conda_base` flow-level decorator. Hence,
196
- you can use `@conda_base` to set packages required by all
197
- steps and use `@conda` to specify step-specific overrides.
198
-
199
-
200
- Parameters
201
- ----------
202
- packages : Dict[str, str], default {}
203
- Packages to use for this step. The key is the name of the package
204
- and the value is the version to use.
205
- libraries : Dict[str, str], default {}
206
- Supported for backward compatibility. When used with packages, packages will take precedence.
207
- python : str, optional, default None
208
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
209
- that the version used will correspond to the version of the Python interpreter used to start the run.
210
- disabled : bool, default False
211
- If set to True, disables @conda.
212
- """
213
- ...
214
-
215
- @typing.overload
216
- def resources(*, cpu: int = 1, gpu: typing.Optional[int] = None, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
217
- """
218
- Specifies the resources needed when executing this step.
219
-
220
- Use `@resources` to specify the resource requirements
221
- independently of the specific compute layer (`@batch`, `@kubernetes`).
222
-
223
- You can choose the compute layer on the command line by executing e.g.
224
- ```
225
- python myflow.py run --with batch
226
- ```
227
- or
228
- ```
229
- python myflow.py run --with kubernetes
230
- ```
231
- which executes the flow on the desired system using the
232
- requirements specified in `@resources`.
233
-
234
-
235
- Parameters
236
- ----------
237
- cpu : int, default 1
238
- Number of CPUs required for this step.
239
- gpu : int, optional, default None
240
- Number of GPUs required for this step.
241
- disk : int, optional, default None
242
- Disk size (in MB) required for this step. Only applies on Kubernetes.
243
- memory : int, default 4096
244
- Memory size (in MB) required for this step.
245
- shared_memory : int, optional, default None
246
- The value for the size (in MiB) of the /dev/shm volume for this step.
247
- This parameter maps to the `--shm-size` option in Docker.
248
- """
249
- ...
250
-
251
- @typing.overload
252
- def resources(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
253
- ...
254
-
255
- @typing.overload
256
- def resources(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
257
- ...
258
-
259
- def resources(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, cpu: int = 1, gpu: typing.Optional[int] = None, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None):
260
- """
261
- Specifies the resources needed when executing this step.
262
-
263
- Use `@resources` to specify the resource requirements
264
- independently of the specific compute layer (`@batch`, `@kubernetes`).
265
-
266
- You can choose the compute layer on the command line by executing e.g.
267
- ```
268
- python myflow.py run --with batch
269
- ```
270
- or
271
- ```
272
- python myflow.py run --with kubernetes
273
- ```
274
- which executes the flow on the desired system using the
275
- requirements specified in `@resources`.
276
-
277
-
278
- Parameters
279
- ----------
280
- cpu : int, default 1
281
- Number of CPUs required for this step.
282
- gpu : int, optional, default None
283
- Number of GPUs required for this step.
284
- disk : int, optional, default None
285
- Disk size (in MB) required for this step. Only applies on Kubernetes.
286
- memory : int, default 4096
287
- Memory size (in MB) required for this step.
288
- shared_memory : int, optional, default None
289
- The value for the size (in MiB) of the /dev/shm volume for this step.
290
- This parameter maps to the `--shm-size` option in Docker.
291
- """
292
- ...
293
-
294
- @typing.overload
295
- def model(*, load: typing.Union[typing.List[str], str, typing.List[typing.Tuple[str, typing.Optional[str]]]] = None, temp_dir_root: str = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
296
- """
297
- Enables loading / saving of models within a step.
298
-
299
-
300
-
301
- Parameters
302
- ----------
303
- load : Union[List[str],str,List[Tuple[str,Union[str,None]]]], default: None
304
- Artifact name/s referencing the models/checkpoints to load. Artifact names refer to the names of the instance variables set to `self`.
305
- These artifact names give to `load` be reference objects or reference `key` string's from objects created by:
306
- - `current.checkpoint`
307
- - `current.model`
308
- - `current.huggingface_hub`
309
-
310
- If a list of tuples is provided, the first element is the artifact name and the second element is the path the artifact needs be unpacked on
311
- the local filesystem. If the second element is None, the artifact will be unpacked in the current working directory.
312
- If a string is provided, then the artifact corresponding to that name will be loaded in the current working directory.
313
-
314
- temp_dir_root : str, default: None
315
- The root directory under which `current.model.loaded` will store loaded models
316
- """
317
- ...
318
-
319
- @typing.overload
320
- def model(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
321
- ...
322
-
323
- @typing.overload
324
- def model(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
325
- ...
326
-
327
- def model(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, load: typing.Union[typing.List[str], str, typing.List[typing.Tuple[str, typing.Optional[str]]]] = None, temp_dir_root: str = None):
328
- """
329
- Enables loading / saving of models within a step.
330
-
331
-
332
-
333
- Parameters
334
- ----------
335
- load : Union[List[str],str,List[Tuple[str,Union[str,None]]]], default: None
336
- Artifact name/s referencing the models/checkpoints to load. Artifact names refer to the names of the instance variables set to `self`.
337
- These artifact names give to `load` be reference objects or reference `key` string's from objects created by:
338
- - `current.checkpoint`
339
- - `current.model`
340
- - `current.huggingface_hub`
341
-
342
- If a list of tuples is provided, the first element is the artifact name and the second element is the path the artifact needs be unpacked on
343
- the local filesystem. If the second element is None, the artifact will be unpacked in the current working directory.
344
- If a string is provided, then the artifact corresponding to that name will be loaded in the current working directory.
345
-
346
- temp_dir_root : str, default: None
347
- The root directory under which `current.model.loaded` will store loaded models
348
- """
349
- ...
350
-
351
- @typing.overload
352
- def retry(*, times: int = 3, minutes_between_retries: int = 2) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
353
- """
354
- Specifies the number of times the task corresponding
355
- to a step needs to be retried.
356
-
357
- This decorator is useful for handling transient errors, such as networking issues.
358
- If your task contains operations that can't be retried safely, e.g. database updates,
359
- it is advisable to annotate it with `@retry(times=0)`.
360
-
361
- This can be used in conjunction with the `@catch` decorator. The `@catch`
362
- decorator will execute a no-op task after all retries have been exhausted,
363
- ensuring that the flow execution can continue.
364
-
365
-
366
- Parameters
367
- ----------
368
- times : int, default 3
369
- Number of times to retry this task.
370
- minutes_between_retries : int, default 2
371
- Number of minutes between retries.
372
- """
373
- ...
374
-
375
- @typing.overload
376
- def retry(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
377
- ...
378
-
379
- @typing.overload
380
- def retry(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
381
- ...
382
-
383
- def retry(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, times: int = 3, minutes_between_retries: int = 2):
384
- """
385
- Specifies the number of times the task corresponding
386
- to a step needs to be retried.
387
-
388
- This decorator is useful for handling transient errors, such as networking issues.
389
- If your task contains operations that can't be retried safely, e.g. database updates,
390
- it is advisable to annotate it with `@retry(times=0)`.
391
-
392
- This can be used in conjunction with the `@catch` decorator. The `@catch`
393
- decorator will execute a no-op task after all retries have been exhausted,
394
- ensuring that the flow execution can continue.
395
-
396
-
397
- Parameters
398
- ----------
399
- times : int, default 3
400
- Number of times to retry this task.
401
- minutes_between_retries : int, default 2
402
- Number of minutes between retries.
403
- """
404
- ...
405
-
406
156
  def kubernetes(*, cpu: int = 1, memory: int = 4096, disk: int = 10240, image: typing.Optional[str] = None, image_pull_policy: str = 'KUBERNETES_IMAGE_PULL_POLICY', service_account: str = 'METAFLOW_KUBERNETES_SERVICE_ACCOUNT', secrets: typing.Optional[typing.List[str]] = None, node_selector: typing.Union[typing.Dict[str, str], str, None] = None, namespace: str = 'METAFLOW_KUBERNETES_NAMESPACE', gpu: typing.Optional[int] = None, gpu_vendor: str = 'KUBERNETES_GPU_VENDOR', tolerations: typing.List[str] = [], labels: typing.Dict[str, str] = 'METAFLOW_KUBERNETES_LABELS', annotations: typing.Dict[str, str] = 'METAFLOW_KUBERNETES_ANNOTATIONS', use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = '/metaflow_temp', persistent_volume_claims: typing.Optional[typing.Dict[str, str]] = None, shared_memory: typing.Optional[int] = None, port: typing.Optional[int] = None, compute_pool: typing.Optional[str] = None, hostname_resolution_timeout: int = 600, qos: str = 'Burstable', security_context: typing.Optional[typing.Dict[str, typing.Any]] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
407
157
  """
408
158
  Specifies that this step should execute on Kubernetes.
@@ -489,158 +239,38 @@ def kubernetes(*, cpu: int = 1, memory: int = 4096, disk: int = 10240, image: ty
489
239
  ...
490
240
 
491
241
  @typing.overload
492
- def catch(*, var: typing.Optional[str] = None, print_exception: bool = True) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
493
- """
494
- Specifies that the step will success under all circumstances.
495
-
496
- The decorator will create an optional artifact, specified by `var`, which
497
- contains the exception raised. You can use it to detect the presence
498
- of errors, indicating that all happy-path artifacts produced by the step
499
- are missing.
500
-
501
-
502
- Parameters
503
- ----------
504
- var : str, optional, default None
505
- Name of the artifact in which to store the caught exception.
506
- If not specified, the exception is not stored.
507
- print_exception : bool, default True
508
- Determines whether or not the exception is printed to
509
- stdout when caught.
510
- """
511
- ...
512
-
513
- @typing.overload
514
- def catch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
515
- ...
516
-
517
- @typing.overload
518
- def catch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
519
- ...
520
-
521
- def catch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, var: typing.Optional[str] = None, print_exception: bool = True):
522
- """
523
- Specifies that the step will success under all circumstances.
524
-
525
- The decorator will create an optional artifact, specified by `var`, which
526
- contains the exception raised. You can use it to detect the presence
527
- of errors, indicating that all happy-path artifacts produced by the step
528
- are missing.
529
-
530
-
531
- Parameters
532
- ----------
533
- var : str, optional, default None
534
- Name of the artifact in which to store the caught exception.
535
- If not specified, the exception is not stored.
536
- print_exception : bool, default True
537
- Determines whether or not the exception is printed to
538
- stdout when caught.
539
- """
540
- ...
541
-
542
- @typing.overload
543
- def checkpoint(*, load_policy: str = 'fresh', temp_dir_root: str = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
242
+ def fast_bakery_internal(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
544
243
  """
545
- Enables checkpointing for a step.
546
-
547
-
548
-
549
- Parameters
550
- ----------
551
- load_policy : str, default: "fresh"
552
- The policy for loading the checkpoint. The following policies are supported:
553
- - "eager": Loads the the latest available checkpoint within the namespace.
554
- With this mode, the latest checkpoint written by any previous task (can be even a different run) of the step
555
- will be loaded at the start of the task.
556
- - "none": Do not load any checkpoint
557
- - "fresh": Loads the lastest checkpoint created within the running Task.
558
- This mode helps loading checkpoints across various retry attempts of the same task.
559
- With this mode, no checkpoint will be loaded at the start of a task but any checkpoints
560
- created within the task will be loaded when the task is retries execution on failure.
561
-
562
- temp_dir_root : str, default: None
563
- The root directory under which `current.checkpoint.directory` will be created.
244
+ Internal decorator to support Fast bakery
564
245
  """
565
246
  ...
566
247
 
567
248
  @typing.overload
568
- def checkpoint(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
569
- ...
570
-
571
- @typing.overload
572
- def checkpoint(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
573
- ...
574
-
575
- def checkpoint(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, load_policy: str = 'fresh', temp_dir_root: str = None):
576
- """
577
- Enables checkpointing for a step.
578
-
579
-
580
-
581
- Parameters
582
- ----------
583
- load_policy : str, default: "fresh"
584
- The policy for loading the checkpoint. The following policies are supported:
585
- - "eager": Loads the the latest available checkpoint within the namespace.
586
- With this mode, the latest checkpoint written by any previous task (can be even a different run) of the step
587
- will be loaded at the start of the task.
588
- - "none": Do not load any checkpoint
589
- - "fresh": Loads the lastest checkpoint created within the running Task.
590
- This mode helps loading checkpoints across various retry attempts of the same task.
591
- With this mode, no checkpoint will be loaded at the start of a task but any checkpoints
592
- created within the task will be loaded when the task is retries execution on failure.
593
-
594
- temp_dir_root : str, default: None
595
- The root directory under which `current.checkpoint.directory` will be created.
596
- """
249
+ def fast_bakery_internal(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
597
250
  ...
598
251
 
599
- def app_deploy(*, app_port: int, app_name: str) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
252
+ def fast_bakery_internal(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
600
253
  """
601
- Specifies that this step is used to deploy an instance of the app.
602
- Requires that self.app_name, self.app_port, self.entrypoint and self.deployDir is set.
603
-
604
-
605
- Parameters
606
- ----------
607
- app_port : int
608
- Number of GPUs to use.
609
- app_name : str
610
- Name of the app to deploy.
254
+ Internal decorator to support Fast bakery
611
255
  """
612
256
  ...
613
257
 
614
258
  @typing.overload
615
- def fast_bakery_internal(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
259
+ def parallel(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
616
260
  """
617
- Internal decorator to support Fast bakery
261
+ Decorator prototype for all step decorators. This function gets specialized
262
+ and imported for all decorators types by _import_plugin_decorators().
618
263
  """
619
264
  ...
620
265
 
621
266
  @typing.overload
622
- def fast_bakery_internal(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
623
- ...
624
-
625
- def fast_bakery_internal(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
626
- """
627
- Internal decorator to support Fast bakery
628
- """
267
+ def parallel(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
629
268
  ...
630
269
 
631
- def nvidia(*, gpu: int, gpu_type: str, queue_timeout: int) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
270
+ def parallel(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
632
271
  """
633
- Specifies that this step should execute on DGX cloud.
634
-
635
-
636
- Parameters
637
- ----------
638
- gpu : int
639
- Number of GPUs to use.
640
- gpu_type : str
641
- Type of Nvidia GPU to use.
642
- queue_timeout : int
643
- Time to keep the job in NVCF's queue.
272
+ Decorator prototype for all step decorators. This function gets specialized
273
+ and imported for all decorators types by _import_plugin_decorators().
644
274
  """
645
275
  ...
646
276
 
@@ -670,104 +300,50 @@ def huggingface_hub(*, temp_dir_root: typing.Optional[str] = None, load: typing.
670
300
  ...
671
301
 
672
302
  @typing.overload
673
- def pypi(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
303
+ def environment(*, vars: typing.Dict[str, str] = {}) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
674
304
  """
675
- Specifies the PyPI packages for the step.
676
-
677
- Information in this decorator will augment any
678
- attributes set in the `@pyi_base` flow-level decorator. Hence,
679
- you can use `@pypi_base` to set packages required by all
680
- steps and use `@pypi` to specify step-specific overrides.
305
+ Specifies environment variables to be set prior to the execution of a step.
681
306
 
682
307
 
683
308
  Parameters
684
309
  ----------
685
- packages : Dict[str, str], default: {}
686
- Packages to use for this step. The key is the name of the package
687
- and the value is the version to use.
688
- python : str, optional, default: None
689
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
690
- that the version used will correspond to the version of the Python interpreter used to start the run.
310
+ vars : Dict[str, str], default {}
311
+ Dictionary of environment variables to set.
691
312
  """
692
313
  ...
693
314
 
694
315
  @typing.overload
695
- def pypi(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
316
+ def environment(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
696
317
  ...
697
318
 
698
319
  @typing.overload
699
- def pypi(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
700
- ...
701
-
702
- def pypi(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
703
- """
704
- Specifies the PyPI packages for the step.
705
-
706
- Information in this decorator will augment any
707
- attributes set in the `@pyi_base` flow-level decorator. Hence,
708
- you can use `@pypi_base` to set packages required by all
709
- steps and use `@pypi` to specify step-specific overrides.
710
-
711
-
712
- Parameters
713
- ----------
714
- packages : Dict[str, str], default: {}
715
- Packages to use for this step. The key is the name of the package
716
- and the value is the version to use.
717
- python : str, optional, default: None
718
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
719
- that the version used will correspond to the version of the Python interpreter used to start the run.
720
- """
320
+ def environment(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
721
321
  ...
722
322
 
723
- def ollama(*, models: list, backend: str, force_pull: bool, skip_push_check: bool, debug: bool) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
323
+ def environment(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, vars: typing.Dict[str, str] = {}):
724
324
  """
725
- This decorator is used to run Ollama APIs as Metaflow task sidecars.
726
-
727
- User code call
728
- --------------
729
- @ollama(
730
- models=[...],
731
- ...
732
- )
733
-
734
- Valid backend options
735
- ---------------------
736
- - 'local': Run as a separate process on the local task machine.
737
- - (TODO) 'managed': Outerbounds hosts and selects compute provider.
738
- - (TODO) 'remote': Spin up separate instance to serve Ollama models.
739
-
740
- Valid model options
741
- -------------------
742
- Any model here https://ollama.com/search, e.g. 'llama3.2', 'llama3.3'
325
+ Specifies environment variables to be set prior to the execution of a step.
743
326
 
744
327
 
745
328
  Parameters
746
329
  ----------
747
- models: list[str]
748
- List of Ollama containers running models in sidecars.
749
- backend: str
750
- Determines where and how to run the Ollama process.
751
- force_pull: bool
752
- Whether to run `ollama pull` no matter what, or first check the remote cache in Metaflow datastore for this model key.
753
- skip_push_check: bool
754
- Whether to skip the check that populates/overwrites remote cache on terminating an ollama model.
755
- debug: bool
756
- Whether to turn on verbose debugging logs.
330
+ vars : Dict[str, str], default {}
331
+ Dictionary of environment variables to set.
757
332
  """
758
333
  ...
759
334
 
760
- def nvct(*, gpu: int, gpu_type: str) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
335
+ def app_deploy(*, app_port: int, app_name: str) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
761
336
  """
762
- Specifies that this step should execute on DGX cloud.
337
+ Specifies that this step is used to deploy an instance of the app.
338
+ Requires that self.app_name, self.app_port, self.entrypoint and self.deployDir is set.
763
339
 
764
340
 
765
341
  Parameters
766
342
  ----------
767
- gpu : int
343
+ app_port : int
768
344
  Number of GPUs to use.
769
- gpu_type : str
770
- Type of Nvidia GPU to use.
345
+ app_name : str
346
+ Name of the app to deploy.
771
347
  """
772
348
  ...
773
349
 
@@ -806,22 +382,17 @@ def secrets(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None],
806
382
  """
807
383
  ...
808
384
 
809
- @typing.overload
810
- def parallel(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
811
- """
812
- Decorator prototype for all step decorators. This function gets specialized
813
- and imported for all decorators types by _import_plugin_decorators().
814
- """
815
- ...
816
-
817
- @typing.overload
818
- def parallel(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
819
- ...
820
-
821
- def parallel(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
385
+ def nvct(*, gpu: int, gpu_type: str) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
822
386
  """
823
- Decorator prototype for all step decorators. This function gets specialized
824
- and imported for all decorators types by _import_plugin_decorators().
387
+ Specifies that this step should execute on DGX cloud.
388
+
389
+
390
+ Parameters
391
+ ----------
392
+ gpu : int
393
+ Number of GPUs to use.
394
+ gpu_type : str
395
+ Type of Nvidia GPU to use.
825
396
  """
826
397
  ...
827
398
 
@@ -875,35 +446,407 @@ def card(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typ
875
446
  ...
876
447
 
877
448
  @typing.overload
878
- def environment(*, vars: typing.Dict[str, str] = {}) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
449
+ def retry(*, times: int = 3, minutes_between_retries: int = 2) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
879
450
  """
880
- Specifies environment variables to be set prior to the execution of a step.
451
+ Specifies the number of times the task corresponding
452
+ to a step needs to be retried.
453
+
454
+ This decorator is useful for handling transient errors, such as networking issues.
455
+ If your task contains operations that can't be retried safely, e.g. database updates,
456
+ it is advisable to annotate it with `@retry(times=0)`.
457
+
458
+ This can be used in conjunction with the `@catch` decorator. The `@catch`
459
+ decorator will execute a no-op task after all retries have been exhausted,
460
+ ensuring that the flow execution can continue.
881
461
 
882
462
 
883
463
  Parameters
884
464
  ----------
885
- vars : Dict[str, str], default {}
886
- Dictionary of environment variables to set.
465
+ times : int, default 3
466
+ Number of times to retry this task.
467
+ minutes_between_retries : int, default 2
468
+ Number of minutes between retries.
887
469
  """
888
470
  ...
889
471
 
890
472
  @typing.overload
891
- def environment(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
473
+ def retry(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
892
474
  ...
893
475
 
894
476
  @typing.overload
895
- def environment(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
477
+ def retry(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
896
478
  ...
897
479
 
898
- def environment(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, vars: typing.Dict[str, str] = {}):
480
+ def retry(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, times: int = 3, minutes_between_retries: int = 2):
899
481
  """
900
- Specifies environment variables to be set prior to the execution of a step.
482
+ Specifies the number of times the task corresponding
483
+ to a step needs to be retried.
484
+
485
+ This decorator is useful for handling transient errors, such as networking issues.
486
+ If your task contains operations that can't be retried safely, e.g. database updates,
487
+ it is advisable to annotate it with `@retry(times=0)`.
488
+
489
+ This can be used in conjunction with the `@catch` decorator. The `@catch`
490
+ decorator will execute a no-op task after all retries have been exhausted,
491
+ ensuring that the flow execution can continue.
901
492
 
902
493
 
903
494
  Parameters
904
495
  ----------
905
- vars : Dict[str, str], default {}
906
- Dictionary of environment variables to set.
496
+ times : int, default 3
497
+ Number of times to retry this task.
498
+ minutes_between_retries : int, default 2
499
+ Number of minutes between retries.
500
+ """
501
+ ...
502
+
503
+ @typing.overload
504
+ def resources(*, cpu: int = 1, gpu: typing.Optional[int] = None, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
505
+ """
506
+ Specifies the resources needed when executing this step.
507
+
508
+ Use `@resources` to specify the resource requirements
509
+ independently of the specific compute layer (`@batch`, `@kubernetes`).
510
+
511
+ You can choose the compute layer on the command line by executing e.g.
512
+ ```
513
+ python myflow.py run --with batch
514
+ ```
515
+ or
516
+ ```
517
+ python myflow.py run --with kubernetes
518
+ ```
519
+ which executes the flow on the desired system using the
520
+ requirements specified in `@resources`.
521
+
522
+
523
+ Parameters
524
+ ----------
525
+ cpu : int, default 1
526
+ Number of CPUs required for this step.
527
+ gpu : int, optional, default None
528
+ Number of GPUs required for this step.
529
+ disk : int, optional, default None
530
+ Disk size (in MB) required for this step. Only applies on Kubernetes.
531
+ memory : int, default 4096
532
+ Memory size (in MB) required for this step.
533
+ shared_memory : int, optional, default None
534
+ The value for the size (in MiB) of the /dev/shm volume for this step.
535
+ This parameter maps to the `--shm-size` option in Docker.
536
+ """
537
+ ...
538
+
539
+ @typing.overload
540
+ def resources(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
541
+ ...
542
+
543
+ @typing.overload
544
+ def resources(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
545
+ ...
546
+
547
+ def resources(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, cpu: int = 1, gpu: typing.Optional[int] = None, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None):
548
+ """
549
+ Specifies the resources needed when executing this step.
550
+
551
+ Use `@resources` to specify the resource requirements
552
+ independently of the specific compute layer (`@batch`, `@kubernetes`).
553
+
554
+ You can choose the compute layer on the command line by executing e.g.
555
+ ```
556
+ python myflow.py run --with batch
557
+ ```
558
+ or
559
+ ```
560
+ python myflow.py run --with kubernetes
561
+ ```
562
+ which executes the flow on the desired system using the
563
+ requirements specified in `@resources`.
564
+
565
+
566
+ Parameters
567
+ ----------
568
+ cpu : int, default 1
569
+ Number of CPUs required for this step.
570
+ gpu : int, optional, default None
571
+ Number of GPUs required for this step.
572
+ disk : int, optional, default None
573
+ Disk size (in MB) required for this step. Only applies on Kubernetes.
574
+ memory : int, default 4096
575
+ Memory size (in MB) required for this step.
576
+ shared_memory : int, optional, default None
577
+ The value for the size (in MiB) of the /dev/shm volume for this step.
578
+ This parameter maps to the `--shm-size` option in Docker.
579
+ """
580
+ ...
581
+
582
+ @typing.overload
583
+ def pypi(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
584
+ """
585
+ Specifies the PyPI packages for the step.
586
+
587
+ Information in this decorator will augment any
588
+ attributes set in the `@pyi_base` flow-level decorator. Hence,
589
+ you can use `@pypi_base` to set packages required by all
590
+ steps and use `@pypi` to specify step-specific overrides.
591
+
592
+
593
+ Parameters
594
+ ----------
595
+ packages : Dict[str, str], default: {}
596
+ Packages to use for this step. The key is the name of the package
597
+ and the value is the version to use.
598
+ python : str, optional, default: None
599
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
600
+ that the version used will correspond to the version of the Python interpreter used to start the run.
601
+ """
602
+ ...
603
+
604
+ @typing.overload
605
+ def pypi(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
606
+ ...
607
+
608
+ @typing.overload
609
+ def pypi(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
610
+ ...
611
+
612
+ def pypi(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
613
+ """
614
+ Specifies the PyPI packages for the step.
615
+
616
+ Information in this decorator will augment any
617
+ attributes set in the `@pyi_base` flow-level decorator. Hence,
618
+ you can use `@pypi_base` to set packages required by all
619
+ steps and use `@pypi` to specify step-specific overrides.
620
+
621
+
622
+ Parameters
623
+ ----------
624
+ packages : Dict[str, str], default: {}
625
+ Packages to use for this step. The key is the name of the package
626
+ and the value is the version to use.
627
+ python : str, optional, default: None
628
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
629
+ that the version used will correspond to the version of the Python interpreter used to start the run.
630
+ """
631
+ ...
632
+
633
+ @typing.overload
634
+ def conda(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
635
+ """
636
+ Specifies the Conda environment for the step.
637
+
638
+ Information in this decorator will augment any
639
+ attributes set in the `@conda_base` flow-level decorator. Hence,
640
+ you can use `@conda_base` to set packages required by all
641
+ steps and use `@conda` to specify step-specific overrides.
642
+
643
+
644
+ Parameters
645
+ ----------
646
+ packages : Dict[str, str], default {}
647
+ Packages to use for this step. The key is the name of the package
648
+ and the value is the version to use.
649
+ libraries : Dict[str, str], default {}
650
+ Supported for backward compatibility. When used with packages, packages will take precedence.
651
+ python : str, optional, default None
652
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
653
+ that the version used will correspond to the version of the Python interpreter used to start the run.
654
+ disabled : bool, default False
655
+ If set to True, disables @conda.
656
+ """
657
+ ...
658
+
659
+ @typing.overload
660
+ def conda(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
661
+ ...
662
+
663
+ @typing.overload
664
+ def conda(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
665
+ ...
666
+
667
+ def conda(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
668
+ """
669
+ Specifies the Conda environment for the step.
670
+
671
+ Information in this decorator will augment any
672
+ attributes set in the `@conda_base` flow-level decorator. Hence,
673
+ you can use `@conda_base` to set packages required by all
674
+ steps and use `@conda` to specify step-specific overrides.
675
+
676
+
677
+ Parameters
678
+ ----------
679
+ packages : Dict[str, str], default {}
680
+ Packages to use for this step. The key is the name of the package
681
+ and the value is the version to use.
682
+ libraries : Dict[str, str], default {}
683
+ Supported for backward compatibility. When used with packages, packages will take precedence.
684
+ python : str, optional, default None
685
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
686
+ that the version used will correspond to the version of the Python interpreter used to start the run.
687
+ disabled : bool, default False
688
+ If set to True, disables @conda.
689
+ """
690
+ ...
691
+
692
+ @typing.overload
693
+ def checkpoint(*, load_policy: str = 'fresh', temp_dir_root: str = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
694
+ """
695
+ Enables checkpointing for a step.
696
+
697
+
698
+
699
+ Parameters
700
+ ----------
701
+ load_policy : str, default: "fresh"
702
+ The policy for loading the checkpoint. The following policies are supported:
703
+ - "eager": Loads the the latest available checkpoint within the namespace.
704
+ With this mode, the latest checkpoint written by any previous task (can be even a different run) of the step
705
+ will be loaded at the start of the task.
706
+ - "none": Do not load any checkpoint
707
+ - "fresh": Loads the lastest checkpoint created within the running Task.
708
+ This mode helps loading checkpoints across various retry attempts of the same task.
709
+ With this mode, no checkpoint will be loaded at the start of a task but any checkpoints
710
+ created within the task will be loaded when the task is retries execution on failure.
711
+
712
+ temp_dir_root : str, default: None
713
+ The root directory under which `current.checkpoint.directory` will be created.
714
+ """
715
+ ...
716
+
717
+ @typing.overload
718
+ def checkpoint(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
719
+ ...
720
+
721
+ @typing.overload
722
+ def checkpoint(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
723
+ ...
724
+
725
+ def checkpoint(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, load_policy: str = 'fresh', temp_dir_root: str = None):
726
+ """
727
+ Enables checkpointing for a step.
728
+
729
+
730
+
731
+ Parameters
732
+ ----------
733
+ load_policy : str, default: "fresh"
734
+ The policy for loading the checkpoint. The following policies are supported:
735
+ - "eager": Loads the the latest available checkpoint within the namespace.
736
+ With this mode, the latest checkpoint written by any previous task (can be even a different run) of the step
737
+ will be loaded at the start of the task.
738
+ - "none": Do not load any checkpoint
739
+ - "fresh": Loads the lastest checkpoint created within the running Task.
740
+ This mode helps loading checkpoints across various retry attempts of the same task.
741
+ With this mode, no checkpoint will be loaded at the start of a task but any checkpoints
742
+ created within the task will be loaded when the task is retries execution on failure.
743
+
744
+ temp_dir_root : str, default: None
745
+ The root directory under which `current.checkpoint.directory` will be created.
746
+ """
747
+ ...
748
+
749
+ def ollama(*, models: list, backend: str, force_pull: bool, skip_push_check: bool, debug: bool) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
750
+ """
751
+ This decorator is used to run Ollama APIs as Metaflow task sidecars.
752
+
753
+ User code call
754
+ --------------
755
+ @ollama(
756
+ models=[...],
757
+ ...
758
+ )
759
+
760
+ Valid backend options
761
+ ---------------------
762
+ - 'local': Run as a separate process on the local task machine.
763
+ - (TODO) 'managed': Outerbounds hosts and selects compute provider.
764
+ - (TODO) 'remote': Spin up separate instance to serve Ollama models.
765
+
766
+ Valid model options
767
+ -------------------
768
+ Any model here https://ollama.com/search, e.g. 'llama3.2', 'llama3.3'
769
+
770
+
771
+ Parameters
772
+ ----------
773
+ models: list[str]
774
+ List of Ollama containers running models in sidecars.
775
+ backend: str
776
+ Determines where and how to run the Ollama process.
777
+ force_pull: bool
778
+ Whether to run `ollama pull` no matter what, or first check the remote cache in Metaflow datastore for this model key.
779
+ skip_push_check: bool
780
+ Whether to skip the check that populates/overwrites remote cache on terminating an ollama model.
781
+ debug: bool
782
+ Whether to turn on verbose debugging logs.
783
+ """
784
+ ...
785
+
786
+ def nvidia(*, gpu: int, gpu_type: str, queue_timeout: int) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
787
+ """
788
+ Specifies that this step should execute on DGX cloud.
789
+
790
+
791
+ Parameters
792
+ ----------
793
+ gpu : int
794
+ Number of GPUs to use.
795
+ gpu_type : str
796
+ Type of Nvidia GPU to use.
797
+ queue_timeout : int
798
+ Time to keep the job in NVCF's queue.
799
+ """
800
+ ...
801
+
802
+ @typing.overload
803
+ def catch(*, var: typing.Optional[str] = None, print_exception: bool = True) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
804
+ """
805
+ Specifies that the step will success under all circumstances.
806
+
807
+ The decorator will create an optional artifact, specified by `var`, which
808
+ contains the exception raised. You can use it to detect the presence
809
+ of errors, indicating that all happy-path artifacts produced by the step
810
+ are missing.
811
+
812
+
813
+ Parameters
814
+ ----------
815
+ var : str, optional, default None
816
+ Name of the artifact in which to store the caught exception.
817
+ If not specified, the exception is not stored.
818
+ print_exception : bool, default True
819
+ Determines whether or not the exception is printed to
820
+ stdout when caught.
821
+ """
822
+ ...
823
+
824
+ @typing.overload
825
+ def catch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
826
+ ...
827
+
828
+ @typing.overload
829
+ def catch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
830
+ ...
831
+
832
+ def catch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, var: typing.Optional[str] = None, print_exception: bool = True):
833
+ """
834
+ Specifies that the step will success under all circumstances.
835
+
836
+ The decorator will create an optional artifact, specified by `var`, which
837
+ contains the exception raised. You can use it to detect the presence
838
+ of errors, indicating that all happy-path artifacts produced by the step
839
+ are missing.
840
+
841
+
842
+ Parameters
843
+ ----------
844
+ var : str, optional, default None
845
+ Name of the artifact in which to store the caught exception.
846
+ If not specified, the exception is not stored.
847
+ print_exception : bool, default True
848
+ Determines whether or not the exception is printed to
849
+ stdout when caught.
907
850
  """
908
851
  ...
909
852
 
@@ -938,82 +881,139 @@ def timeout(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Cal
938
881
  ...
939
882
 
940
883
  @typing.overload
941
- def timeout(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
884
+ def timeout(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
885
+ ...
886
+
887
+ def timeout(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, seconds: int = 0, minutes: int = 0, hours: int = 0):
888
+ """
889
+ Specifies a timeout for your step.
890
+
891
+ This decorator is useful if this step may hang indefinitely.
892
+
893
+ This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
894
+ A timeout is considered to be an exception thrown by the step. It will cause the step to be
895
+ retried if needed and the exception will be caught by the `@catch` decorator, if present.
896
+
897
+ Note that all the values specified in parameters are added together so if you specify
898
+ 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
899
+
900
+
901
+ Parameters
902
+ ----------
903
+ seconds : int, default 0
904
+ Number of seconds to wait prior to timing out.
905
+ minutes : int, default 0
906
+ Number of minutes to wait prior to timing out.
907
+ hours : int, default 0
908
+ Number of hours to wait prior to timing out.
909
+ """
910
+ ...
911
+
912
+ @typing.overload
913
+ def model(*, load: typing.Union[typing.List[str], str, typing.List[typing.Tuple[str, typing.Optional[str]]]] = None, temp_dir_root: str = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
914
+ """
915
+ Enables loading / saving of models within a step.
916
+
917
+
918
+
919
+ Parameters
920
+ ----------
921
+ load : Union[List[str],str,List[Tuple[str,Union[str,None]]]], default: None
922
+ Artifact name/s referencing the models/checkpoints to load. Artifact names refer to the names of the instance variables set to `self`.
923
+ These artifact names give to `load` be reference objects or reference `key` string's from objects created by:
924
+ - `current.checkpoint`
925
+ - `current.model`
926
+ - `current.huggingface_hub`
927
+
928
+ If a list of tuples is provided, the first element is the artifact name and the second element is the path the artifact needs be unpacked on
929
+ the local filesystem. If the second element is None, the artifact will be unpacked in the current working directory.
930
+ If a string is provided, then the artifact corresponding to that name will be loaded in the current working directory.
931
+
932
+ temp_dir_root : str, default: None
933
+ The root directory under which `current.model.loaded` will store loaded models
934
+ """
935
+ ...
936
+
937
+ @typing.overload
938
+ def model(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
939
+ ...
940
+
941
+ @typing.overload
942
+ def model(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
942
943
  ...
943
944
 
944
- def timeout(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, seconds: int = 0, minutes: int = 0, hours: int = 0):
945
+ def model(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, load: typing.Union[typing.List[str], str, typing.List[typing.Tuple[str, typing.Optional[str]]]] = None, temp_dir_root: str = None):
945
946
  """
946
- Specifies a timeout for your step.
947
-
948
- This decorator is useful if this step may hang indefinitely.
949
-
950
- This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
951
- A timeout is considered to be an exception thrown by the step. It will cause the step to be
952
- retried if needed and the exception will be caught by the `@catch` decorator, if present.
947
+ Enables loading / saving of models within a step.
953
948
 
954
- Note that all the values specified in parameters are added together so if you specify
955
- 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
956
949
 
957
950
 
958
951
  Parameters
959
952
  ----------
960
- seconds : int, default 0
961
- Number of seconds to wait prior to timing out.
962
- minutes : int, default 0
963
- Number of minutes to wait prior to timing out.
964
- hours : int, default 0
965
- Number of hours to wait prior to timing out.
953
+ load : Union[List[str],str,List[Tuple[str,Union[str,None]]]], default: None
954
+ Artifact name/s referencing the models/checkpoints to load. Artifact names refer to the names of the instance variables set to `self`.
955
+ These artifact names give to `load` be reference objects or reference `key` string's from objects created by:
956
+ - `current.checkpoint`
957
+ - `current.model`
958
+ - `current.huggingface_hub`
959
+
960
+ If a list of tuples is provided, the first element is the artifact name and the second element is the path the artifact needs be unpacked on
961
+ the local filesystem. If the second element is None, the artifact will be unpacked in the current working directory.
962
+ If a string is provided, then the artifact corresponding to that name will be loaded in the current working directory.
963
+
964
+ temp_dir_root : str, default: None
965
+ The root directory under which `current.model.loaded` will store loaded models
966
966
  """
967
967
  ...
968
968
 
969
969
  @typing.overload
970
- def conda_base(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
970
+ def schedule(*, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
971
971
  """
972
- Specifies the Conda environment for all steps of the flow.
973
-
974
- Use `@conda_base` to set common libraries required by all
975
- steps and use `@conda` to specify step-specific additions.
972
+ Specifies the times when the flow should be run when running on a
973
+ production scheduler.
976
974
 
977
975
 
978
976
  Parameters
979
977
  ----------
980
- packages : Dict[str, str], default {}
981
- Packages to use for this flow. The key is the name of the package
982
- and the value is the version to use.
983
- libraries : Dict[str, str], default {}
984
- Supported for backward compatibility. When used with packages, packages will take precedence.
985
- python : str, optional, default None
986
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
987
- that the version used will correspond to the version of the Python interpreter used to start the run.
988
- disabled : bool, default False
989
- If set to True, disables Conda.
978
+ hourly : bool, default False
979
+ Run the workflow hourly.
980
+ daily : bool, default True
981
+ Run the workflow daily.
982
+ weekly : bool, default False
983
+ Run the workflow weekly.
984
+ cron : str, optional, default None
985
+ Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
986
+ specified by this expression.
987
+ timezone : str, optional, default None
988
+ Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
989
+ which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
990
990
  """
991
991
  ...
992
992
 
993
993
  @typing.overload
994
- def conda_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
994
+ def schedule(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
995
995
  ...
996
996
 
997
- def conda_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
997
+ def schedule(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None):
998
998
  """
999
- Specifies the Conda environment for all steps of the flow.
1000
-
1001
- Use `@conda_base` to set common libraries required by all
1002
- steps and use `@conda` to specify step-specific additions.
999
+ Specifies the times when the flow should be run when running on a
1000
+ production scheduler.
1003
1001
 
1004
1002
 
1005
1003
  Parameters
1006
1004
  ----------
1007
- packages : Dict[str, str], default {}
1008
- Packages to use for this flow. The key is the name of the package
1009
- and the value is the version to use.
1010
- libraries : Dict[str, str], default {}
1011
- Supported for backward compatibility. When used with packages, packages will take precedence.
1012
- python : str, optional, default None
1013
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
1014
- that the version used will correspond to the version of the Python interpreter used to start the run.
1015
- disabled : bool, default False
1016
- If set to True, disables Conda.
1005
+ hourly : bool, default False
1006
+ Run the workflow hourly.
1007
+ daily : bool, default True
1008
+ Run the workflow daily.
1009
+ weekly : bool, default False
1010
+ Run the workflow weekly.
1011
+ cron : str, optional, default None
1012
+ Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
1013
+ specified by this expression.
1014
+ timezone : str, optional, default None
1015
+ Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1016
+ which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
1017
1017
  """
1018
1018
  ...
1019
1019
 
@@ -1131,6 +1131,49 @@ def with_artifact_store(f: typing.Optional[typing.Type[FlowSpecDerived]] = None)
1131
1131
  """
1132
1132
  ...
1133
1133
 
1134
+ def airflow_s3_key_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, bucket_key: typing.Union[str, typing.List[str]], bucket_name: str, wildcard_match: bool, aws_conn_id: str, verify: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1135
+ """
1136
+ The `@airflow_s3_key_sensor` decorator attaches a Airflow [S3KeySensor](https://airflow.apache.org/docs/apache-airflow-providers-amazon/stable/_api/airflow/providers/amazon/aws/sensors/s3/index.html#airflow.providers.amazon.aws.sensors.s3.S3KeySensor)
1137
+ before the start step of the flow. This decorator only works when a flow is scheduled on Airflow
1138
+ and is compiled using `airflow create`. More than one `@airflow_s3_key_sensor` can be
1139
+ added as a flow decorators. Adding more than one decorator will ensure that `start` step
1140
+ starts only after all sensors finish.
1141
+
1142
+
1143
+ Parameters
1144
+ ----------
1145
+ timeout : int
1146
+ Time, in seconds before the task times out and fails. (Default: 3600)
1147
+ poke_interval : int
1148
+ Time in seconds that the job should wait in between each try. (Default: 60)
1149
+ mode : str
1150
+ How the sensor operates. Options are: { poke | reschedule }. (Default: "poke")
1151
+ exponential_backoff : bool
1152
+ allow progressive longer waits between pokes by using exponential backoff algorithm. (Default: True)
1153
+ pool : str
1154
+ the slot pool this task should run in,
1155
+ slot pools are a way to limit concurrency for certain tasks. (Default:None)
1156
+ soft_fail : bool
1157
+ Set to true to mark the task as SKIPPED on failure. (Default: False)
1158
+ name : str
1159
+ Name of the sensor on Airflow
1160
+ description : str
1161
+ Description of sensor in the Airflow UI
1162
+ bucket_key : Union[str, List[str]]
1163
+ The key(s) being waited on. Supports full s3:// style url or relative path from root level.
1164
+ When it's specified as a full s3:// url, please leave `bucket_name` as None
1165
+ bucket_name : str
1166
+ Name of the S3 bucket. Only needed when bucket_key is not provided as a full s3:// url.
1167
+ When specified, all the keys passed to bucket_key refers to this bucket. (Default:None)
1168
+ wildcard_match : bool
1169
+ whether the bucket_key should be interpreted as a Unix wildcard pattern. (Default: False)
1170
+ aws_conn_id : str
1171
+ a reference to the s3 connection on Airflow. (Default: None)
1172
+ verify : bool
1173
+ Whether or not to verify SSL certificates for S3 connection. (Default: None)
1174
+ """
1175
+ ...
1176
+
1134
1177
  def project(*, name: str, branch: typing.Optional[str] = None, production: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1135
1178
  """
1136
1179
  Specifies what flows belong to the same project.
@@ -1146,66 +1189,157 @@ def project(*, name: str, branch: typing.Optional[str] = None, production: bool
1146
1189
  projects that use the same production scheduler. The name may
1147
1190
  contain only lowercase alphanumeric characters and underscores.
1148
1191
 
1149
- branch : Optional[str], default None
1150
- The branch to use. If not specified, the branch is set to
1151
- `user.<username>` unless `production` is set to `True`. This can
1152
- also be set on the command line using `--branch` as a top-level option.
1153
- It is an error to specify `branch` in the decorator and on the command line.
1192
+ branch : Optional[str], default None
1193
+ The branch to use. If not specified, the branch is set to
1194
+ `user.<username>` unless `production` is set to `True`. This can
1195
+ also be set on the command line using `--branch` as a top-level option.
1196
+ It is an error to specify `branch` in the decorator and on the command line.
1197
+
1198
+ production : bool, default False
1199
+ Whether or not the branch is the production branch. This can also be set on the
1200
+ command line using `--production` as a top-level option. It is an error to specify
1201
+ `production` in the decorator and on the command line.
1202
+ The project branch name will be:
1203
+ - if `branch` is specified:
1204
+ - if `production` is True: `prod.<branch>`
1205
+ - if `production` is False: `test.<branch>`
1206
+ - if `branch` is not specified:
1207
+ - if `production` is True: `prod`
1208
+ - if `production` is False: `user.<username>`
1209
+ """
1210
+ ...
1211
+
1212
+ @typing.overload
1213
+ def trigger(*, event: typing.Union[str, typing.Dict[str, typing.Any], None] = None, events: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1214
+ """
1215
+ Specifies the event(s) that this flow depends on.
1216
+
1217
+ ```
1218
+ @trigger(event='foo')
1219
+ ```
1220
+ or
1221
+ ```
1222
+ @trigger(events=['foo', 'bar'])
1223
+ ```
1224
+
1225
+ Additionally, you can specify the parameter mappings
1226
+ to map event payload to Metaflow parameters for the flow.
1227
+ ```
1228
+ @trigger(event={'name':'foo', 'parameters':{'flow_param': 'event_field'}})
1229
+ ```
1230
+ or
1231
+ ```
1232
+ @trigger(events=[{'name':'foo', 'parameters':{'flow_param_1': 'event_field_1'},
1233
+ {'name':'bar', 'parameters':{'flow_param_2': 'event_field_2'}])
1234
+ ```
1235
+
1236
+ 'parameters' can also be a list of strings and tuples like so:
1237
+ ```
1238
+ @trigger(event={'name':'foo', 'parameters':['common_name', ('flow_param', 'event_field')]})
1239
+ ```
1240
+ This is equivalent to:
1241
+ ```
1242
+ @trigger(event={'name':'foo', 'parameters':{'common_name': 'common_name', 'flow_param': 'event_field'}})
1243
+ ```
1244
+
1245
+
1246
+ Parameters
1247
+ ----------
1248
+ event : Union[str, Dict[str, Any]], optional, default None
1249
+ Event dependency for this flow.
1250
+ events : List[Union[str, Dict[str, Any]]], default []
1251
+ Events dependency for this flow.
1252
+ options : Dict[str, Any], default {}
1253
+ Backend-specific configuration for tuning eventing behavior.
1254
+ """
1255
+ ...
1256
+
1257
+ @typing.overload
1258
+ def trigger(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1259
+ ...
1260
+
1261
+ def trigger(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, event: typing.Union[str, typing.Dict[str, typing.Any], None] = None, events: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], options: typing.Dict[str, typing.Any] = {}):
1262
+ """
1263
+ Specifies the event(s) that this flow depends on.
1264
+
1265
+ ```
1266
+ @trigger(event='foo')
1267
+ ```
1268
+ or
1269
+ ```
1270
+ @trigger(events=['foo', 'bar'])
1271
+ ```
1272
+
1273
+ Additionally, you can specify the parameter mappings
1274
+ to map event payload to Metaflow parameters for the flow.
1275
+ ```
1276
+ @trigger(event={'name':'foo', 'parameters':{'flow_param': 'event_field'}})
1277
+ ```
1278
+ or
1279
+ ```
1280
+ @trigger(events=[{'name':'foo', 'parameters':{'flow_param_1': 'event_field_1'},
1281
+ {'name':'bar', 'parameters':{'flow_param_2': 'event_field_2'}])
1282
+ ```
1283
+
1284
+ 'parameters' can also be a list of strings and tuples like so:
1285
+ ```
1286
+ @trigger(event={'name':'foo', 'parameters':['common_name', ('flow_param', 'event_field')]})
1287
+ ```
1288
+ This is equivalent to:
1289
+ ```
1290
+ @trigger(event={'name':'foo', 'parameters':{'common_name': 'common_name', 'flow_param': 'event_field'}})
1291
+ ```
1292
+
1293
+
1294
+ Parameters
1295
+ ----------
1296
+ event : Union[str, Dict[str, Any]], optional, default None
1297
+ Event dependency for this flow.
1298
+ events : List[Union[str, Dict[str, Any]]], default []
1299
+ Events dependency for this flow.
1300
+ options : Dict[str, Any], default {}
1301
+ Backend-specific configuration for tuning eventing behavior.
1302
+ """
1303
+ ...
1304
+
1305
+ @typing.overload
1306
+ def pypi_base(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1307
+ """
1308
+ Specifies the PyPI packages for all steps of the flow.
1309
+
1310
+ Use `@pypi_base` to set common packages required by all
1311
+ steps and use `@pypi` to specify step-specific overrides.
1154
1312
 
1155
- production : bool, default False
1156
- Whether or not the branch is the production branch. This can also be set on the
1157
- command line using `--production` as a top-level option. It is an error to specify
1158
- `production` in the decorator and on the command line.
1159
- The project branch name will be:
1160
- - if `branch` is specified:
1161
- - if `production` is True: `prod.<branch>`
1162
- - if `production` is False: `test.<branch>`
1163
- - if `branch` is not specified:
1164
- - if `production` is True: `prod`
1165
- - if `production` is False: `user.<username>`
1313
+ Parameters
1314
+ ----------
1315
+ packages : Dict[str, str], default: {}
1316
+ Packages to use for this flow. The key is the name of the package
1317
+ and the value is the version to use.
1318
+ python : str, optional, default: None
1319
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1320
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1166
1321
  """
1167
1322
  ...
1168
1323
 
1169
- def airflow_s3_key_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, bucket_key: typing.Union[str, typing.List[str]], bucket_name: str, wildcard_match: bool, aws_conn_id: str, verify: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1324
+ @typing.overload
1325
+ def pypi_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1326
+ ...
1327
+
1328
+ def pypi_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
1170
1329
  """
1171
- The `@airflow_s3_key_sensor` decorator attaches a Airflow [S3KeySensor](https://airflow.apache.org/docs/apache-airflow-providers-amazon/stable/_api/airflow/providers/amazon/aws/sensors/s3/index.html#airflow.providers.amazon.aws.sensors.s3.S3KeySensor)
1172
- before the start step of the flow. This decorator only works when a flow is scheduled on Airflow
1173
- and is compiled using `airflow create`. More than one `@airflow_s3_key_sensor` can be
1174
- added as a flow decorators. Adding more than one decorator will ensure that `start` step
1175
- starts only after all sensors finish.
1330
+ Specifies the PyPI packages for all steps of the flow.
1176
1331
 
1332
+ Use `@pypi_base` to set common packages required by all
1333
+ steps and use `@pypi` to specify step-specific overrides.
1177
1334
 
1178
1335
  Parameters
1179
1336
  ----------
1180
- timeout : int
1181
- Time, in seconds before the task times out and fails. (Default: 3600)
1182
- poke_interval : int
1183
- Time in seconds that the job should wait in between each try. (Default: 60)
1184
- mode : str
1185
- How the sensor operates. Options are: { poke | reschedule }. (Default: "poke")
1186
- exponential_backoff : bool
1187
- allow progressive longer waits between pokes by using exponential backoff algorithm. (Default: True)
1188
- pool : str
1189
- the slot pool this task should run in,
1190
- slot pools are a way to limit concurrency for certain tasks. (Default:None)
1191
- soft_fail : bool
1192
- Set to true to mark the task as SKIPPED on failure. (Default: False)
1193
- name : str
1194
- Name of the sensor on Airflow
1195
- description : str
1196
- Description of sensor in the Airflow UI
1197
- bucket_key : Union[str, List[str]]
1198
- The key(s) being waited on. Supports full s3:// style url or relative path from root level.
1199
- When it's specified as a full s3:// url, please leave `bucket_name` as None
1200
- bucket_name : str
1201
- Name of the S3 bucket. Only needed when bucket_key is not provided as a full s3:// url.
1202
- When specified, all the keys passed to bucket_key refers to this bucket. (Default:None)
1203
- wildcard_match : bool
1204
- whether the bucket_key should be interpreted as a Unix wildcard pattern. (Default: False)
1205
- aws_conn_id : str
1206
- a reference to the s3 connection on Airflow. (Default: None)
1207
- verify : bool
1208
- Whether or not to verify SSL certificates for S3 connection. (Default: None)
1337
+ packages : Dict[str, str], default: {}
1338
+ Packages to use for this flow. The key is the name of the package
1339
+ and the value is the version to use.
1340
+ python : str, optional, default: None
1341
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1342
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1209
1343
  """
1210
1344
  ...
1211
1345
 
@@ -1253,43 +1387,53 @@ def airflow_external_task_sensor(*, timeout: int, poke_interval: int, mode: str,
1253
1387
  ...
1254
1388
 
1255
1389
  @typing.overload
1256
- def pypi_base(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1390
+ def conda_base(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1257
1391
  """
1258
- Specifies the PyPI packages for all steps of the flow.
1392
+ Specifies the Conda environment for all steps of the flow.
1393
+
1394
+ Use `@conda_base` to set common libraries required by all
1395
+ steps and use `@conda` to specify step-specific additions.
1259
1396
 
1260
- Use `@pypi_base` to set common packages required by all
1261
- steps and use `@pypi` to specify step-specific overrides.
1262
1397
 
1263
1398
  Parameters
1264
1399
  ----------
1265
- packages : Dict[str, str], default: {}
1400
+ packages : Dict[str, str], default {}
1266
1401
  Packages to use for this flow. The key is the name of the package
1267
1402
  and the value is the version to use.
1268
- python : str, optional, default: None
1403
+ libraries : Dict[str, str], default {}
1404
+ Supported for backward compatibility. When used with packages, packages will take precedence.
1405
+ python : str, optional, default None
1269
1406
  Version of Python to use, e.g. '3.7.4'. A default value of None implies
1270
1407
  that the version used will correspond to the version of the Python interpreter used to start the run.
1408
+ disabled : bool, default False
1409
+ If set to True, disables Conda.
1271
1410
  """
1272
1411
  ...
1273
1412
 
1274
1413
  @typing.overload
1275
- def pypi_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1414
+ def conda_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1276
1415
  ...
1277
1416
 
1278
- def pypi_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
1417
+ def conda_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
1279
1418
  """
1280
- Specifies the PyPI packages for all steps of the flow.
1419
+ Specifies the Conda environment for all steps of the flow.
1420
+
1421
+ Use `@conda_base` to set common libraries required by all
1422
+ steps and use `@conda` to specify step-specific additions.
1281
1423
 
1282
- Use `@pypi_base` to set common packages required by all
1283
- steps and use `@pypi` to specify step-specific overrides.
1284
1424
 
1285
1425
  Parameters
1286
1426
  ----------
1287
- packages : Dict[str, str], default: {}
1427
+ packages : Dict[str, str], default {}
1288
1428
  Packages to use for this flow. The key is the name of the package
1289
1429
  and the value is the version to use.
1290
- python : str, optional, default: None
1430
+ libraries : Dict[str, str], default {}
1431
+ Supported for backward compatibility. When used with packages, packages will take precedence.
1432
+ python : str, optional, default None
1291
1433
  Version of Python to use, e.g. '3.7.4'. A default value of None implies
1292
1434
  that the version used will correspond to the version of the Python interpreter used to start the run.
1435
+ disabled : bool, default False
1436
+ If set to True, disables Conda.
1293
1437
  """
1294
1438
  ...
1295
1439
 
@@ -1394,149 +1538,5 @@ def trigger_on_finish(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *
1394
1538
  """
1395
1539
  ...
1396
1540
 
1397
- @typing.overload
1398
- def trigger(*, event: typing.Union[str, typing.Dict[str, typing.Any], None] = None, events: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1399
- """
1400
- Specifies the event(s) that this flow depends on.
1401
-
1402
- ```
1403
- @trigger(event='foo')
1404
- ```
1405
- or
1406
- ```
1407
- @trigger(events=['foo', 'bar'])
1408
- ```
1409
-
1410
- Additionally, you can specify the parameter mappings
1411
- to map event payload to Metaflow parameters for the flow.
1412
- ```
1413
- @trigger(event={'name':'foo', 'parameters':{'flow_param': 'event_field'}})
1414
- ```
1415
- or
1416
- ```
1417
- @trigger(events=[{'name':'foo', 'parameters':{'flow_param_1': 'event_field_1'},
1418
- {'name':'bar', 'parameters':{'flow_param_2': 'event_field_2'}])
1419
- ```
1420
-
1421
- 'parameters' can also be a list of strings and tuples like so:
1422
- ```
1423
- @trigger(event={'name':'foo', 'parameters':['common_name', ('flow_param', 'event_field')]})
1424
- ```
1425
- This is equivalent to:
1426
- ```
1427
- @trigger(event={'name':'foo', 'parameters':{'common_name': 'common_name', 'flow_param': 'event_field'}})
1428
- ```
1429
-
1430
-
1431
- Parameters
1432
- ----------
1433
- event : Union[str, Dict[str, Any]], optional, default None
1434
- Event dependency for this flow.
1435
- events : List[Union[str, Dict[str, Any]]], default []
1436
- Events dependency for this flow.
1437
- options : Dict[str, Any], default {}
1438
- Backend-specific configuration for tuning eventing behavior.
1439
- """
1440
- ...
1441
-
1442
- @typing.overload
1443
- def trigger(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1444
- ...
1445
-
1446
- def trigger(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, event: typing.Union[str, typing.Dict[str, typing.Any], None] = None, events: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], options: typing.Dict[str, typing.Any] = {}):
1447
- """
1448
- Specifies the event(s) that this flow depends on.
1449
-
1450
- ```
1451
- @trigger(event='foo')
1452
- ```
1453
- or
1454
- ```
1455
- @trigger(events=['foo', 'bar'])
1456
- ```
1457
-
1458
- Additionally, you can specify the parameter mappings
1459
- to map event payload to Metaflow parameters for the flow.
1460
- ```
1461
- @trigger(event={'name':'foo', 'parameters':{'flow_param': 'event_field'}})
1462
- ```
1463
- or
1464
- ```
1465
- @trigger(events=[{'name':'foo', 'parameters':{'flow_param_1': 'event_field_1'},
1466
- {'name':'bar', 'parameters':{'flow_param_2': 'event_field_2'}])
1467
- ```
1468
-
1469
- 'parameters' can also be a list of strings and tuples like so:
1470
- ```
1471
- @trigger(event={'name':'foo', 'parameters':['common_name', ('flow_param', 'event_field')]})
1472
- ```
1473
- This is equivalent to:
1474
- ```
1475
- @trigger(event={'name':'foo', 'parameters':{'common_name': 'common_name', 'flow_param': 'event_field'}})
1476
- ```
1477
-
1478
-
1479
- Parameters
1480
- ----------
1481
- event : Union[str, Dict[str, Any]], optional, default None
1482
- Event dependency for this flow.
1483
- events : List[Union[str, Dict[str, Any]]], default []
1484
- Events dependency for this flow.
1485
- options : Dict[str, Any], default {}
1486
- Backend-specific configuration for tuning eventing behavior.
1487
- """
1488
- ...
1489
-
1490
- @typing.overload
1491
- def schedule(*, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1492
- """
1493
- Specifies the times when the flow should be run when running on a
1494
- production scheduler.
1495
-
1496
-
1497
- Parameters
1498
- ----------
1499
- hourly : bool, default False
1500
- Run the workflow hourly.
1501
- daily : bool, default True
1502
- Run the workflow daily.
1503
- weekly : bool, default False
1504
- Run the workflow weekly.
1505
- cron : str, optional, default None
1506
- Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
1507
- specified by this expression.
1508
- timezone : str, optional, default None
1509
- Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1510
- which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
1511
- """
1512
- ...
1513
-
1514
- @typing.overload
1515
- def schedule(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1516
- ...
1517
-
1518
- def schedule(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None):
1519
- """
1520
- Specifies the times when the flow should be run when running on a
1521
- production scheduler.
1522
-
1523
-
1524
- Parameters
1525
- ----------
1526
- hourly : bool, default False
1527
- Run the workflow hourly.
1528
- daily : bool, default True
1529
- Run the workflow daily.
1530
- weekly : bool, default False
1531
- Run the workflow weekly.
1532
- cron : str, optional, default None
1533
- Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
1534
- specified by this expression.
1535
- timezone : str, optional, default None
1536
- Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1537
- which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
1538
- """
1539
- ...
1540
-
1541
1541
  pkg_name: str
1542
1542