metaflow-stubs 2.15.15__py2.py3-none-any.whl → 2.15.17__py2.py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (149) hide show
  1. metaflow-stubs/__init__.pyi +456 -452
  2. metaflow-stubs/cards.pyi +2 -2
  3. metaflow-stubs/cli.pyi +2 -2
  4. metaflow-stubs/cli_components/__init__.pyi +2 -2
  5. metaflow-stubs/cli_components/utils.pyi +2 -2
  6. metaflow-stubs/client/__init__.pyi +2 -2
  7. metaflow-stubs/client/core.pyi +26 -26
  8. metaflow-stubs/client/filecache.pyi +3 -3
  9. metaflow-stubs/events.pyi +6 -6
  10. metaflow-stubs/exception.pyi +2 -2
  11. metaflow-stubs/flowspec.pyi +5 -5
  12. metaflow-stubs/generated_for.txt +1 -1
  13. metaflow-stubs/includefile.pyi +5 -5
  14. metaflow-stubs/info_file.pyi +2 -2
  15. metaflow-stubs/metadata_provider/__init__.pyi +2 -2
  16. metaflow-stubs/metadata_provider/heartbeat.pyi +2 -2
  17. metaflow-stubs/metadata_provider/metadata.pyi +3 -3
  18. metaflow-stubs/metadata_provider/util.pyi +2 -2
  19. metaflow-stubs/metaflow_config.pyi +4 -2
  20. metaflow-stubs/metaflow_current.pyi +22 -22
  21. metaflow-stubs/metaflow_git.pyi +2 -2
  22. metaflow-stubs/multicore_utils.pyi +2 -2
  23. metaflow-stubs/parameters.pyi +4 -4
  24. metaflow-stubs/plugins/__init__.pyi +11 -11
  25. metaflow-stubs/plugins/airflow/__init__.pyi +2 -2
  26. metaflow-stubs/plugins/airflow/airflow_decorator.pyi +2 -2
  27. metaflow-stubs/plugins/airflow/airflow_utils.pyi +2 -2
  28. metaflow-stubs/plugins/airflow/exception.pyi +2 -2
  29. metaflow-stubs/plugins/airflow/sensors/__init__.pyi +2 -2
  30. metaflow-stubs/plugins/airflow/sensors/base_sensor.pyi +2 -2
  31. metaflow-stubs/plugins/airflow/sensors/external_task_sensor.pyi +2 -2
  32. metaflow-stubs/plugins/airflow/sensors/s3_sensor.pyi +2 -2
  33. metaflow-stubs/plugins/argo/__init__.pyi +2 -2
  34. metaflow-stubs/plugins/argo/argo_client.pyi +2 -2
  35. metaflow-stubs/plugins/argo/argo_events.pyi +2 -2
  36. metaflow-stubs/plugins/argo/argo_workflows.pyi +5 -3
  37. metaflow-stubs/plugins/argo/argo_workflows_decorator.pyi +4 -4
  38. metaflow-stubs/plugins/argo/argo_workflows_deployer.pyi +4 -4
  39. metaflow-stubs/plugins/argo/argo_workflows_deployer_objects.pyi +4 -6
  40. metaflow-stubs/plugins/aws/__init__.pyi +2 -2
  41. metaflow-stubs/plugins/aws/aws_client.pyi +2 -2
  42. metaflow-stubs/plugins/aws/aws_utils.pyi +2 -2
  43. metaflow-stubs/plugins/aws/batch/__init__.pyi +2 -2
  44. metaflow-stubs/plugins/aws/batch/batch.pyi +2 -2
  45. metaflow-stubs/plugins/aws/batch/batch_client.pyi +2 -2
  46. metaflow-stubs/plugins/aws/batch/batch_decorator.pyi +2 -2
  47. metaflow-stubs/plugins/aws/secrets_manager/__init__.pyi +2 -2
  48. metaflow-stubs/plugins/aws/secrets_manager/aws_secrets_manager_secrets_provider.pyi +3 -3
  49. metaflow-stubs/plugins/aws/step_functions/__init__.pyi +2 -2
  50. metaflow-stubs/plugins/aws/step_functions/dynamo_db_client.pyi +2 -2
  51. metaflow-stubs/plugins/aws/step_functions/event_bridge_client.pyi +2 -2
  52. metaflow-stubs/plugins/aws/step_functions/schedule_decorator.pyi +2 -2
  53. metaflow-stubs/plugins/aws/step_functions/step_functions.pyi +2 -2
  54. metaflow-stubs/plugins/aws/step_functions/step_functions_client.pyi +2 -2
  55. metaflow-stubs/plugins/aws/step_functions/step_functions_decorator.pyi +2 -2
  56. metaflow-stubs/plugins/aws/step_functions/step_functions_deployer.pyi +4 -4
  57. metaflow-stubs/plugins/aws/step_functions/step_functions_deployer_objects.pyi +4 -4
  58. metaflow-stubs/plugins/azure/__init__.pyi +2 -2
  59. metaflow-stubs/plugins/azure/azure_credential.pyi +2 -2
  60. metaflow-stubs/plugins/azure/azure_exceptions.pyi +2 -2
  61. metaflow-stubs/plugins/azure/azure_secret_manager_secrets_provider.pyi +3 -3
  62. metaflow-stubs/plugins/azure/azure_utils.pyi +2 -2
  63. metaflow-stubs/plugins/azure/blob_service_client_factory.pyi +2 -2
  64. metaflow-stubs/plugins/azure/includefile_support.pyi +2 -2
  65. metaflow-stubs/plugins/cards/__init__.pyi +2 -2
  66. metaflow-stubs/plugins/cards/card_client.pyi +2 -2
  67. metaflow-stubs/plugins/cards/card_creator.pyi +2 -2
  68. metaflow-stubs/plugins/cards/card_datastore.pyi +2 -2
  69. metaflow-stubs/plugins/cards/card_decorator.pyi +2 -2
  70. metaflow-stubs/plugins/cards/card_modules/__init__.pyi +2 -2
  71. metaflow-stubs/plugins/cards/card_modules/basic.pyi +3 -3
  72. metaflow-stubs/plugins/cards/card_modules/card.pyi +2 -2
  73. metaflow-stubs/plugins/cards/card_modules/components.pyi +5 -5
  74. metaflow-stubs/plugins/cards/card_modules/convert_to_native_type.pyi +2 -2
  75. metaflow-stubs/plugins/cards/card_modules/renderer_tools.pyi +2 -2
  76. metaflow-stubs/plugins/cards/card_modules/test_cards.pyi +2 -2
  77. metaflow-stubs/plugins/cards/card_resolver.pyi +2 -2
  78. metaflow-stubs/plugins/cards/component_serializer.pyi +2 -2
  79. metaflow-stubs/plugins/cards/exception.pyi +2 -2
  80. metaflow-stubs/plugins/catch_decorator.pyi +3 -3
  81. metaflow-stubs/plugins/datatools/__init__.pyi +2 -2
  82. metaflow-stubs/plugins/datatools/local.pyi +2 -2
  83. metaflow-stubs/plugins/datatools/s3/__init__.pyi +2 -2
  84. metaflow-stubs/plugins/datatools/s3/s3.pyi +17 -17
  85. metaflow-stubs/plugins/datatools/s3/s3tail.pyi +2 -2
  86. metaflow-stubs/plugins/datatools/s3/s3util.pyi +2 -2
  87. metaflow-stubs/plugins/debug_logger.pyi +2 -2
  88. metaflow-stubs/plugins/debug_monitor.pyi +2 -2
  89. metaflow-stubs/plugins/environment_decorator.pyi +2 -2
  90. metaflow-stubs/plugins/events_decorator.pyi +2 -2
  91. metaflow-stubs/plugins/frameworks/__init__.pyi +2 -2
  92. metaflow-stubs/plugins/frameworks/pytorch.pyi +2 -2
  93. metaflow-stubs/plugins/gcp/__init__.pyi +2 -2
  94. metaflow-stubs/plugins/gcp/gcp_secret_manager_secrets_provider.pyi +3 -3
  95. metaflow-stubs/plugins/gcp/gs_exceptions.pyi +2 -2
  96. metaflow-stubs/plugins/gcp/gs_storage_client_factory.pyi +2 -2
  97. metaflow-stubs/plugins/gcp/gs_utils.pyi +2 -2
  98. metaflow-stubs/plugins/gcp/includefile_support.pyi +2 -2
  99. metaflow-stubs/plugins/kubernetes/__init__.pyi +2 -2
  100. metaflow-stubs/plugins/kubernetes/kube_utils.pyi +3 -3
  101. metaflow-stubs/plugins/kubernetes/kubernetes.pyi +4 -4
  102. metaflow-stubs/plugins/kubernetes/kubernetes_client.pyi +2 -2
  103. metaflow-stubs/plugins/kubernetes/kubernetes_decorator.pyi +8 -2
  104. metaflow-stubs/plugins/kubernetes/kubernetes_job.pyi +2 -2
  105. metaflow-stubs/plugins/kubernetes/kubernetes_jobsets.pyi +2 -2
  106. metaflow-stubs/plugins/kubernetes/spot_monitor_sidecar.pyi +2 -2
  107. metaflow-stubs/plugins/parallel_decorator.pyi +2 -2
  108. metaflow-stubs/plugins/project_decorator.pyi +2 -2
  109. metaflow-stubs/plugins/pypi/__init__.pyi +2 -2
  110. metaflow-stubs/plugins/pypi/conda_decorator.pyi +2 -2
  111. metaflow-stubs/plugins/pypi/conda_environment.pyi +5 -5
  112. metaflow-stubs/plugins/pypi/parsers.pyi +2 -2
  113. metaflow-stubs/plugins/pypi/pypi_decorator.pyi +2 -2
  114. metaflow-stubs/plugins/pypi/pypi_environment.pyi +2 -2
  115. metaflow-stubs/plugins/pypi/utils.pyi +2 -2
  116. metaflow-stubs/plugins/resources_decorator.pyi +2 -2
  117. metaflow-stubs/plugins/retry_decorator.pyi +2 -2
  118. metaflow-stubs/plugins/secrets/__init__.pyi +2 -2
  119. metaflow-stubs/plugins/secrets/inline_secrets_provider.pyi +2 -2
  120. metaflow-stubs/plugins/secrets/secrets_decorator.pyi +2 -2
  121. metaflow-stubs/plugins/storage_executor.pyi +2 -2
  122. metaflow-stubs/plugins/test_unbounded_foreach_decorator.pyi +3 -3
  123. metaflow-stubs/plugins/timeout_decorator.pyi +3 -3
  124. metaflow-stubs/plugins/uv/__init__.pyi +2 -2
  125. metaflow-stubs/plugins/uv/uv_environment.pyi +3 -3
  126. metaflow-stubs/pylint_wrapper.pyi +2 -2
  127. metaflow-stubs/runner/__init__.pyi +2 -2
  128. metaflow-stubs/runner/deployer.pyi +9 -6
  129. metaflow-stubs/runner/deployer_impl.pyi +2 -2
  130. metaflow-stubs/runner/metaflow_runner.pyi +4 -4
  131. metaflow-stubs/runner/nbdeploy.pyi +2 -2
  132. metaflow-stubs/runner/nbrun.pyi +2 -2
  133. metaflow-stubs/runner/subprocess_manager.pyi +2 -2
  134. metaflow-stubs/runner/utils.pyi +3 -3
  135. metaflow-stubs/system/__init__.pyi +2 -2
  136. metaflow-stubs/system/system_logger.pyi +3 -3
  137. metaflow-stubs/system/system_monitor.pyi +2 -2
  138. metaflow-stubs/tagging_util.pyi +2 -2
  139. metaflow-stubs/tuple_util.pyi +2 -2
  140. metaflow-stubs/user_configs/__init__.pyi +2 -2
  141. metaflow-stubs/user_configs/config_decorators.pyi +7 -7
  142. metaflow-stubs/user_configs/config_options.pyi +3 -3
  143. metaflow-stubs/user_configs/config_parameters.pyi +4 -4
  144. metaflow-stubs/version.pyi +2 -2
  145. {metaflow_stubs-2.15.15.dist-info → metaflow_stubs-2.15.17.dist-info}/METADATA +2 -2
  146. metaflow_stubs-2.15.17.dist-info/RECORD +149 -0
  147. metaflow_stubs-2.15.15.dist-info/RECORD +0 -149
  148. {metaflow_stubs-2.15.15.dist-info → metaflow_stubs-2.15.17.dist-info}/WHEEL +0 -0
  149. {metaflow_stubs-2.15.15.dist-info → metaflow_stubs-2.15.17.dist-info}/top_level.txt +0 -0
@@ -1,15 +1,15 @@
1
1
  ######################################################################################################
2
2
  # Auto-generated Metaflow stub file #
3
- # MF version: 2.15.15 #
4
- # Generated on 2025-05-30T13:33:59.796602 #
3
+ # MF version: 2.15.17 #
4
+ # Generated on 2025-06-13T18:00:28.046472 #
5
5
  ######################################################################################################
6
6
 
7
7
  from __future__ import annotations
8
8
 
9
9
  import typing
10
10
  if typing.TYPE_CHECKING:
11
- import typing
12
11
  import datetime
12
+ import typing
13
13
  FlowSpecDerived = typing.TypeVar("FlowSpecDerived", bound="FlowSpec", contravariant=False, covariant=False)
14
14
  StepFlag = typing.NewType("StepFlag", bool)
15
15
 
@@ -35,17 +35,17 @@ from .user_configs.config_parameters import ConfigValue as ConfigValue
35
35
  from .user_configs.config_parameters import config_expr as config_expr
36
36
  from .user_configs.config_decorators import CustomFlowDecorator as CustomFlowDecorator
37
37
  from .user_configs.config_decorators import CustomStepDecorator as CustomStepDecorator
38
+ from . import metaflow_git as metaflow_git
38
39
  from . import events as events
39
40
  from . import tuple_util as tuple_util
40
- from . import metaflow_git as metaflow_git
41
41
  from . import runner as runner
42
42
  from . import plugins as plugins
43
43
  from .plugins.datatools.s3.s3 import S3 as S3
44
44
  from . import includefile as includefile
45
45
  from .includefile import IncludeFile as IncludeFile
46
- from .plugins.pypi.parsers import requirements_txt_parser as requirements_txt_parser
47
- from .plugins.pypi.parsers import conda_environment_yml_parser as conda_environment_yml_parser
48
46
  from .plugins.pypi.parsers import pyproject_toml_parser as pyproject_toml_parser
47
+ from .plugins.pypi.parsers import conda_environment_yml_parser as conda_environment_yml_parser
48
+ from .plugins.pypi.parsers import requirements_txt_parser as requirements_txt_parser
49
49
  from . import cards as cards
50
50
  from . import client as client
51
51
  from .client.core import namespace as namespace
@@ -146,77 +146,7 @@ def step(f: typing.Union[typing.Callable[[FlowSpecDerived], None], typing.Callab
146
146
  """
147
147
  ...
148
148
 
149
- @typing.overload
150
- def catch(*, var: typing.Optional[str] = None, print_exception: bool = True) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
151
- """
152
- Specifies that the step will success under all circumstances.
153
-
154
- The decorator will create an optional artifact, specified by `var`, which
155
- contains the exception raised. You can use it to detect the presence
156
- of errors, indicating that all happy-path artifacts produced by the step
157
- are missing.
158
-
159
-
160
- Parameters
161
- ----------
162
- var : str, optional, default None
163
- Name of the artifact in which to store the caught exception.
164
- If not specified, the exception is not stored.
165
- print_exception : bool, default True
166
- Determines whether or not the exception is printed to
167
- stdout when caught.
168
- """
169
- ...
170
-
171
- @typing.overload
172
- def catch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
173
- ...
174
-
175
- @typing.overload
176
- def catch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
177
- ...
178
-
179
- def catch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, var: typing.Optional[str] = None, print_exception: bool = True):
180
- """
181
- Specifies that the step will success under all circumstances.
182
-
183
- The decorator will create an optional artifact, specified by `var`, which
184
- contains the exception raised. You can use it to detect the presence
185
- of errors, indicating that all happy-path artifacts produced by the step
186
- are missing.
187
-
188
-
189
- Parameters
190
- ----------
191
- var : str, optional, default None
192
- Name of the artifact in which to store the caught exception.
193
- If not specified, the exception is not stored.
194
- print_exception : bool, default True
195
- Determines whether or not the exception is printed to
196
- stdout when caught.
197
- """
198
- ...
199
-
200
- @typing.overload
201
- def parallel(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
202
- """
203
- Decorator prototype for all step decorators. This function gets specialized
204
- and imported for all decorators types by _import_plugin_decorators().
205
- """
206
- ...
207
-
208
- @typing.overload
209
- def parallel(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
210
- ...
211
-
212
- def parallel(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
213
- """
214
- Decorator prototype for all step decorators. This function gets specialized
215
- and imported for all decorators types by _import_plugin_decorators().
216
- """
217
- ...
218
-
219
- def kubernetes(*, cpu: int = 1, memory: int = 4096, disk: int = 10240, image: typing.Optional[str] = None, image_pull_policy: str = 'KUBERNETES_IMAGE_PULL_POLICY', service_account: str = 'METAFLOW_KUBERNETES_SERVICE_ACCOUNT', secrets: typing.Optional[typing.List[str]] = None, node_selector: typing.Union[typing.Dict[str, str], str, None] = None, namespace: str = 'METAFLOW_KUBERNETES_NAMESPACE', gpu: typing.Optional[int] = None, gpu_vendor: str = 'KUBERNETES_GPU_VENDOR', tolerations: typing.List[str] = [], labels: typing.Dict[str, str] = 'METAFLOW_KUBERNETES_LABELS', annotations: typing.Dict[str, str] = 'METAFLOW_KUBERNETES_ANNOTATIONS', use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = '/metaflow_temp', persistent_volume_claims: typing.Optional[typing.Dict[str, str]] = None, shared_memory: typing.Optional[int] = None, port: typing.Optional[int] = None, compute_pool: typing.Optional[str] = None, hostname_resolution_timeout: int = 600, qos: str = 'Burstable', security_context: typing.Optional[typing.Dict[str, typing.Any]] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
149
+ def kubernetes(*, cpu: int = 1, memory: int = 4096, disk: int = 10240, image: typing.Optional[str] = None, image_pull_policy: str = 'KUBERNETES_IMAGE_PULL_POLICY', image_pull_secrets: typing.List[str] = [], service_account: str = 'METAFLOW_KUBERNETES_SERVICE_ACCOUNT', secrets: typing.Optional[typing.List[str]] = None, node_selector: typing.Union[typing.Dict[str, str], str, None] = None, namespace: str = 'METAFLOW_KUBERNETES_NAMESPACE', gpu: typing.Optional[int] = None, gpu_vendor: str = 'KUBERNETES_GPU_VENDOR', tolerations: typing.List[str] = [], labels: typing.Dict[str, str] = 'METAFLOW_KUBERNETES_LABELS', annotations: typing.Dict[str, str] = 'METAFLOW_KUBERNETES_ANNOTATIONS', use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = '/metaflow_temp', persistent_volume_claims: typing.Optional[typing.Dict[str, str]] = None, shared_memory: typing.Optional[int] = None, port: typing.Optional[int] = None, compute_pool: typing.Optional[str] = None, hostname_resolution_timeout: int = 600, qos: str = 'Burstable', security_context: typing.Optional[typing.Dict[str, typing.Any]] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
220
150
  """
221
151
  Specifies that this step should execute on Kubernetes.
222
152
 
@@ -240,6 +170,10 @@ def kubernetes(*, cpu: int = 1, memory: int = 4096, disk: int = 10240, image: ty
240
170
  not, a default Docker image mapping to the current version of Python is used.
241
171
  image_pull_policy: str, default KUBERNETES_IMAGE_PULL_POLICY
242
172
  If given, the imagePullPolicy to be applied to the Docker image of the step.
173
+ image_pull_secrets: List[str], default []
174
+ The default is extracted from METAFLOW_KUBERNETES_IMAGE_PULL_SECRETS.
175
+ Kubernetes image pull secrets to use when pulling container images
176
+ in Kubernetes.
243
177
  service_account : str, default METAFLOW_KUBERNETES_SERVICE_ACCOUNT
244
178
  Kubernetes service account to use when launching pod in Kubernetes.
245
179
  secrets : List[str], optional, default None
@@ -301,6 +235,57 @@ def kubernetes(*, cpu: int = 1, memory: int = 4096, disk: int = 10240, image: ty
301
235
  """
302
236
  ...
303
237
 
238
+ @typing.overload
239
+ def pypi(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
240
+ """
241
+ Specifies the PyPI packages for the step.
242
+
243
+ Information in this decorator will augment any
244
+ attributes set in the `@pyi_base` flow-level decorator. Hence,
245
+ you can use `@pypi_base` to set packages required by all
246
+ steps and use `@pypi` to specify step-specific overrides.
247
+
248
+
249
+ Parameters
250
+ ----------
251
+ packages : Dict[str, str], default: {}
252
+ Packages to use for this step. The key is the name of the package
253
+ and the value is the version to use.
254
+ python : str, optional, default: None
255
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
256
+ that the version used will correspond to the version of the Python interpreter used to start the run.
257
+ """
258
+ ...
259
+
260
+ @typing.overload
261
+ def pypi(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
262
+ ...
263
+
264
+ @typing.overload
265
+ def pypi(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
266
+ ...
267
+
268
+ def pypi(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
269
+ """
270
+ Specifies the PyPI packages for the step.
271
+
272
+ Information in this decorator will augment any
273
+ attributes set in the `@pyi_base` flow-level decorator. Hence,
274
+ you can use `@pypi_base` to set packages required by all
275
+ steps and use `@pypi` to specify step-specific overrides.
276
+
277
+
278
+ Parameters
279
+ ----------
280
+ packages : Dict[str, str], default: {}
281
+ Packages to use for this step. The key is the name of the package
282
+ and the value is the version to use.
283
+ python : str, optional, default: None
284
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
285
+ that the version used will correspond to the version of the Python interpreter used to start the run.
286
+ """
287
+ ...
288
+
304
289
  @typing.overload
305
290
  def card(*, type: str = 'default', id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
306
291
  """
@@ -351,202 +336,57 @@ def card(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typ
351
336
  ...
352
337
 
353
338
  @typing.overload
354
- def batch(*, cpu: int = 1, gpu: int = 0, memory: int = 4096, image: typing.Optional[str] = None, queue: str = 'METAFLOW_BATCH_JOB_QUEUE', iam_role: str = 'METAFLOW_ECS_S3_ACCESS_IAM_ROLE', execution_role: str = 'METAFLOW_ECS_FARGATE_EXECUTION_ROLE', shared_memory: typing.Optional[int] = None, max_swap: typing.Optional[int] = None, swappiness: typing.Optional[int] = None, use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = None, inferentia: int = 0, trainium: int = None, efa: int = 0, ephemeral_storage: int = None, log_driver: typing.Optional[str] = None, log_options: typing.Optional[typing.List[str]] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
339
+ def retry(*, times: int = 3, minutes_between_retries: int = 2) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
355
340
  """
356
- Specifies that this step should execute on [AWS Batch](https://aws.amazon.com/batch/).
341
+ Specifies the number of times the task corresponding
342
+ to a step needs to be retried.
343
+
344
+ This decorator is useful for handling transient errors, such as networking issues.
345
+ If your task contains operations that can't be retried safely, e.g. database updates,
346
+ it is advisable to annotate it with `@retry(times=0)`.
347
+
348
+ This can be used in conjunction with the `@catch` decorator. The `@catch`
349
+ decorator will execute a no-op task after all retries have been exhausted,
350
+ ensuring that the flow execution can continue.
357
351
 
358
352
 
359
353
  Parameters
360
354
  ----------
361
- cpu : int, default 1
362
- Number of CPUs required for this step. If `@resources` is
363
- also present, the maximum value from all decorators is used.
364
- gpu : int, default 0
365
- Number of GPUs required for this step. If `@resources` is
366
- also present, the maximum value from all decorators is used.
367
- memory : int, default 4096
368
- Memory size (in MB) required for this step. If
369
- `@resources` is also present, the maximum value from all decorators is
370
- used.
371
- image : str, optional, default None
372
- Docker image to use when launching on AWS Batch. If not specified, and
373
- METAFLOW_BATCH_CONTAINER_IMAGE is specified, that image is used. If
374
- not, a default Docker image mapping to the current version of Python is used.
375
- queue : str, default METAFLOW_BATCH_JOB_QUEUE
376
- AWS Batch Job Queue to submit the job to.
377
- iam_role : str, default METAFLOW_ECS_S3_ACCESS_IAM_ROLE
378
- AWS IAM role that AWS Batch container uses to access AWS cloud resources.
379
- execution_role : str, default METAFLOW_ECS_FARGATE_EXECUTION_ROLE
380
- AWS IAM role that AWS Batch can use [to trigger AWS Fargate tasks]
381
- (https://docs.aws.amazon.com/batch/latest/userguide/execution-IAM-role.html).
382
- shared_memory : int, optional, default None
383
- The value for the size (in MiB) of the /dev/shm volume for this step.
384
- This parameter maps to the `--shm-size` option in Docker.
385
- max_swap : int, optional, default None
386
- The total amount of swap memory (in MiB) a container can use for this
387
- step. This parameter is translated to the `--memory-swap` option in
388
- Docker where the value is the sum of the container memory plus the
389
- `max_swap` value.
390
- swappiness : int, optional, default None
391
- This allows you to tune memory swappiness behavior for this step.
392
- A swappiness value of 0 causes swapping not to happen unless absolutely
393
- necessary. A swappiness value of 100 causes pages to be swapped very
394
- aggressively. Accepted values are whole numbers between 0 and 100.
395
- use_tmpfs : bool, default False
396
- This enables an explicit tmpfs mount for this step. Note that tmpfs is
397
- not available on Fargate compute environments
398
- tmpfs_tempdir : bool, default True
399
- sets METAFLOW_TEMPDIR to tmpfs_path if set for this step.
400
- tmpfs_size : int, optional, default None
401
- The value for the size (in MiB) of the tmpfs mount for this step.
402
- This parameter maps to the `--tmpfs` option in Docker. Defaults to 50% of the
403
- memory allocated for this step.
404
- tmpfs_path : str, optional, default None
405
- Path to tmpfs mount for this step. Defaults to /metaflow_temp.
406
- inferentia : int, default 0
407
- Number of Inferentia chips required for this step.
408
- trainium : int, default None
409
- Alias for inferentia. Use only one of the two.
410
- efa : int, default 0
411
- Number of elastic fabric adapter network devices to attach to container
412
- ephemeral_storage : int, default None
413
- The total amount, in GiB, of ephemeral storage to set for the task, 21-200GiB.
414
- This is only relevant for Fargate compute environments
415
- log_driver: str, optional, default None
416
- The log driver to use for the Amazon ECS container.
417
- log_options: List[str], optional, default None
418
- List of strings containing options for the chosen log driver. The configurable values
419
- depend on the `log driver` chosen. Validation of these options is not supported yet.
420
- Example: [`awslogs-group:aws/batch/job`]
355
+ times : int, default 3
356
+ Number of times to retry this task.
357
+ minutes_between_retries : int, default 2
358
+ Number of minutes between retries.
421
359
  """
422
360
  ...
423
361
 
424
362
  @typing.overload
425
- def batch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
363
+ def retry(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
426
364
  ...
427
365
 
428
366
  @typing.overload
429
- def batch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
367
+ def retry(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
430
368
  ...
431
369
 
432
- def batch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, cpu: int = 1, gpu: int = 0, memory: int = 4096, image: typing.Optional[str] = None, queue: str = 'METAFLOW_BATCH_JOB_QUEUE', iam_role: str = 'METAFLOW_ECS_S3_ACCESS_IAM_ROLE', execution_role: str = 'METAFLOW_ECS_FARGATE_EXECUTION_ROLE', shared_memory: typing.Optional[int] = None, max_swap: typing.Optional[int] = None, swappiness: typing.Optional[int] = None, use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = None, inferentia: int = 0, trainium: int = None, efa: int = 0, ephemeral_storage: int = None, log_driver: typing.Optional[str] = None, log_options: typing.Optional[typing.List[str]] = None):
370
+ def retry(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, times: int = 3, minutes_between_retries: int = 2):
433
371
  """
434
- Specifies that this step should execute on [AWS Batch](https://aws.amazon.com/batch/).
372
+ Specifies the number of times the task corresponding
373
+ to a step needs to be retried.
374
+
375
+ This decorator is useful for handling transient errors, such as networking issues.
376
+ If your task contains operations that can't be retried safely, e.g. database updates,
377
+ it is advisable to annotate it with `@retry(times=0)`.
378
+
379
+ This can be used in conjunction with the `@catch` decorator. The `@catch`
380
+ decorator will execute a no-op task after all retries have been exhausted,
381
+ ensuring that the flow execution can continue.
435
382
 
436
383
 
437
384
  Parameters
438
385
  ----------
439
- cpu : int, default 1
440
- Number of CPUs required for this step. If `@resources` is
441
- also present, the maximum value from all decorators is used.
442
- gpu : int, default 0
443
- Number of GPUs required for this step. If `@resources` is
444
- also present, the maximum value from all decorators is used.
445
- memory : int, default 4096
446
- Memory size (in MB) required for this step. If
447
- `@resources` is also present, the maximum value from all decorators is
448
- used.
449
- image : str, optional, default None
450
- Docker image to use when launching on AWS Batch. If not specified, and
451
- METAFLOW_BATCH_CONTAINER_IMAGE is specified, that image is used. If
452
- not, a default Docker image mapping to the current version of Python is used.
453
- queue : str, default METAFLOW_BATCH_JOB_QUEUE
454
- AWS Batch Job Queue to submit the job to.
455
- iam_role : str, default METAFLOW_ECS_S3_ACCESS_IAM_ROLE
456
- AWS IAM role that AWS Batch container uses to access AWS cloud resources.
457
- execution_role : str, default METAFLOW_ECS_FARGATE_EXECUTION_ROLE
458
- AWS IAM role that AWS Batch can use [to trigger AWS Fargate tasks]
459
- (https://docs.aws.amazon.com/batch/latest/userguide/execution-IAM-role.html).
460
- shared_memory : int, optional, default None
461
- The value for the size (in MiB) of the /dev/shm volume for this step.
462
- This parameter maps to the `--shm-size` option in Docker.
463
- max_swap : int, optional, default None
464
- The total amount of swap memory (in MiB) a container can use for this
465
- step. This parameter is translated to the `--memory-swap` option in
466
- Docker where the value is the sum of the container memory plus the
467
- `max_swap` value.
468
- swappiness : int, optional, default None
469
- This allows you to tune memory swappiness behavior for this step.
470
- A swappiness value of 0 causes swapping not to happen unless absolutely
471
- necessary. A swappiness value of 100 causes pages to be swapped very
472
- aggressively. Accepted values are whole numbers between 0 and 100.
473
- use_tmpfs : bool, default False
474
- This enables an explicit tmpfs mount for this step. Note that tmpfs is
475
- not available on Fargate compute environments
476
- tmpfs_tempdir : bool, default True
477
- sets METAFLOW_TEMPDIR to tmpfs_path if set for this step.
478
- tmpfs_size : int, optional, default None
479
- The value for the size (in MiB) of the tmpfs mount for this step.
480
- This parameter maps to the `--tmpfs` option in Docker. Defaults to 50% of the
481
- memory allocated for this step.
482
- tmpfs_path : str, optional, default None
483
- Path to tmpfs mount for this step. Defaults to /metaflow_temp.
484
- inferentia : int, default 0
485
- Number of Inferentia chips required for this step.
486
- trainium : int, default None
487
- Alias for inferentia. Use only one of the two.
488
- efa : int, default 0
489
- Number of elastic fabric adapter network devices to attach to container
490
- ephemeral_storage : int, default None
491
- The total amount, in GiB, of ephemeral storage to set for the task, 21-200GiB.
492
- This is only relevant for Fargate compute environments
493
- log_driver: str, optional, default None
494
- The log driver to use for the Amazon ECS container.
495
- log_options: List[str], optional, default None
496
- List of strings containing options for the chosen log driver. The configurable values
497
- depend on the `log driver` chosen. Validation of these options is not supported yet.
498
- Example: [`awslogs-group:aws/batch/job`]
499
- """
500
- ...
501
-
502
- @typing.overload
503
- def pypi(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
504
- """
505
- Specifies the PyPI packages for the step.
506
-
507
- Information in this decorator will augment any
508
- attributes set in the `@pyi_base` flow-level decorator. Hence,
509
- you can use `@pypi_base` to set packages required by all
510
- steps and use `@pypi` to specify step-specific overrides.
511
-
512
-
513
- Parameters
514
- ----------
515
- packages : Dict[str, str], default: {}
516
- Packages to use for this step. The key is the name of the package
517
- and the value is the version to use.
518
- python : str, optional, default: None
519
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
520
- that the version used will correspond to the version of the Python interpreter used to start the run.
521
- """
522
- ...
523
-
524
- @typing.overload
525
- def pypi(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
526
- ...
527
-
528
- @typing.overload
529
- def pypi(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
530
- ...
531
-
532
- def pypi(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
533
- """
534
- Specifies the PyPI packages for the step.
535
-
536
- Information in this decorator will augment any
537
- attributes set in the `@pyi_base` flow-level decorator. Hence,
538
- you can use `@pypi_base` to set packages required by all
539
- steps and use `@pypi` to specify step-specific overrides.
540
-
541
-
542
- Parameters
543
- ----------
544
- packages : Dict[str, str], default: {}
545
- Packages to use for this step. The key is the name of the package
546
- and the value is the version to use.
547
- python : str, optional, default: None
548
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
549
- that the version used will correspond to the version of the Python interpreter used to start the run.
386
+ times : int, default 3
387
+ Number of times to retry this task.
388
+ minutes_between_retries : int, default 2
389
+ Number of minutes between retries.
550
390
  """
551
391
  ...
552
392
 
@@ -584,116 +424,56 @@ def environment(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], Non
584
424
  ...
585
425
 
586
426
  @typing.overload
587
- def conda(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
427
+ def secrets(*, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
588
428
  """
589
- Specifies the Conda environment for the step.
590
-
591
- Information in this decorator will augment any
592
- attributes set in the `@conda_base` flow-level decorator. Hence,
593
- you can use `@conda_base` to set packages required by all
594
- steps and use `@conda` to specify step-specific overrides.
429
+ Specifies secrets to be retrieved and injected as environment variables prior to
430
+ the execution of a step.
595
431
 
596
432
 
597
433
  Parameters
598
434
  ----------
599
- packages : Dict[str, str], default {}
600
- Packages to use for this step. The key is the name of the package
601
- and the value is the version to use.
602
- libraries : Dict[str, str], default {}
603
- Supported for backward compatibility. When used with packages, packages will take precedence.
604
- python : str, optional, default None
605
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
606
- that the version used will correspond to the version of the Python interpreter used to start the run.
607
- disabled : bool, default False
608
- If set to True, disables @conda.
435
+ sources : List[Union[str, Dict[str, Any]]], default: []
436
+ List of secret specs, defining how the secrets are to be retrieved
609
437
  """
610
438
  ...
611
439
 
612
440
  @typing.overload
613
- def conda(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
441
+ def secrets(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
614
442
  ...
615
443
 
616
444
  @typing.overload
617
- def conda(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
445
+ def secrets(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
618
446
  ...
619
447
 
620
- def conda(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
448
+ def secrets(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []):
621
449
  """
622
- Specifies the Conda environment for the step.
623
-
624
- Information in this decorator will augment any
625
- attributes set in the `@conda_base` flow-level decorator. Hence,
626
- you can use `@conda_base` to set packages required by all
627
- steps and use `@conda` to specify step-specific overrides.
450
+ Specifies secrets to be retrieved and injected as environment variables prior to
451
+ the execution of a step.
628
452
 
629
453
 
630
454
  Parameters
631
455
  ----------
632
- packages : Dict[str, str], default {}
633
- Packages to use for this step. The key is the name of the package
634
- and the value is the version to use.
635
- libraries : Dict[str, str], default {}
636
- Supported for backward compatibility. When used with packages, packages will take precedence.
637
- python : str, optional, default None
638
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
639
- that the version used will correspond to the version of the Python interpreter used to start the run.
640
- disabled : bool, default False
641
- If set to True, disables @conda.
456
+ sources : List[Union[str, Dict[str, Any]]], default: []
457
+ List of secret specs, defining how the secrets are to be retrieved
642
458
  """
643
459
  ...
644
460
 
645
461
  @typing.overload
646
- def retry(*, times: int = 3, minutes_between_retries: int = 2) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
462
+ def parallel(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
647
463
  """
648
- Specifies the number of times the task corresponding
649
- to a step needs to be retried.
650
-
651
- This decorator is useful for handling transient errors, such as networking issues.
652
- If your task contains operations that can't be retried safely, e.g. database updates,
653
- it is advisable to annotate it with `@retry(times=0)`.
654
-
655
- This can be used in conjunction with the `@catch` decorator. The `@catch`
656
- decorator will execute a no-op task after all retries have been exhausted,
657
- ensuring that the flow execution can continue.
658
-
659
-
660
- Parameters
661
- ----------
662
- times : int, default 3
663
- Number of times to retry this task.
664
- minutes_between_retries : int, default 2
665
- Number of minutes between retries.
464
+ Decorator prototype for all step decorators. This function gets specialized
465
+ and imported for all decorators types by _import_plugin_decorators().
666
466
  """
667
467
  ...
668
468
 
669
469
  @typing.overload
670
- def retry(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
671
- ...
672
-
673
- @typing.overload
674
- def retry(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
470
+ def parallel(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
675
471
  ...
676
472
 
677
- def retry(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, times: int = 3, minutes_between_retries: int = 2):
473
+ def parallel(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
678
474
  """
679
- Specifies the number of times the task corresponding
680
- to a step needs to be retried.
681
-
682
- This decorator is useful for handling transient errors, such as networking issues.
683
- If your task contains operations that can't be retried safely, e.g. database updates,
684
- it is advisable to annotate it with `@retry(times=0)`.
685
-
686
- This can be used in conjunction with the `@catch` decorator. The `@catch`
687
- decorator will execute a no-op task after all retries have been exhausted,
688
- ensuring that the flow execution can continue.
689
-
690
-
691
- Parameters
692
- ----------
693
- times : int, default 3
694
- Number of times to retry this task.
695
- minutes_between_retries : int, default 2
696
- Number of minutes between retries.
475
+ Decorator prototype for all step decorators. This function gets specialized
476
+ and imported for all decorators types by _import_plugin_decorators().
697
477
  """
698
478
  ...
699
479
 
@@ -777,37 +557,210 @@ def resources(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None]
777
557
  ...
778
558
 
779
559
  @typing.overload
780
- def secrets(*, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
560
+ def batch(*, cpu: int = 1, gpu: int = 0, memory: int = 4096, image: typing.Optional[str] = None, queue: str = 'METAFLOW_BATCH_JOB_QUEUE', iam_role: str = 'METAFLOW_ECS_S3_ACCESS_IAM_ROLE', execution_role: str = 'METAFLOW_ECS_FARGATE_EXECUTION_ROLE', shared_memory: typing.Optional[int] = None, max_swap: typing.Optional[int] = None, swappiness: typing.Optional[int] = None, use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = None, inferentia: int = 0, trainium: int = None, efa: int = 0, ephemeral_storage: int = None, log_driver: typing.Optional[str] = None, log_options: typing.Optional[typing.List[str]] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
781
561
  """
782
- Specifies secrets to be retrieved and injected as environment variables prior to
783
- the execution of a step.
562
+ Specifies that this step should execute on [AWS Batch](https://aws.amazon.com/batch/).
784
563
 
785
564
 
786
565
  Parameters
787
566
  ----------
788
- sources : List[Union[str, Dict[str, Any]]], default: []
789
- List of secret specs, defining how the secrets are to be retrieved
567
+ cpu : int, default 1
568
+ Number of CPUs required for this step. If `@resources` is
569
+ also present, the maximum value from all decorators is used.
570
+ gpu : int, default 0
571
+ Number of GPUs required for this step. If `@resources` is
572
+ also present, the maximum value from all decorators is used.
573
+ memory : int, default 4096
574
+ Memory size (in MB) required for this step. If
575
+ `@resources` is also present, the maximum value from all decorators is
576
+ used.
577
+ image : str, optional, default None
578
+ Docker image to use when launching on AWS Batch. If not specified, and
579
+ METAFLOW_BATCH_CONTAINER_IMAGE is specified, that image is used. If
580
+ not, a default Docker image mapping to the current version of Python is used.
581
+ queue : str, default METAFLOW_BATCH_JOB_QUEUE
582
+ AWS Batch Job Queue to submit the job to.
583
+ iam_role : str, default METAFLOW_ECS_S3_ACCESS_IAM_ROLE
584
+ AWS IAM role that AWS Batch container uses to access AWS cloud resources.
585
+ execution_role : str, default METAFLOW_ECS_FARGATE_EXECUTION_ROLE
586
+ AWS IAM role that AWS Batch can use [to trigger AWS Fargate tasks]
587
+ (https://docs.aws.amazon.com/batch/latest/userguide/execution-IAM-role.html).
588
+ shared_memory : int, optional, default None
589
+ The value for the size (in MiB) of the /dev/shm volume for this step.
590
+ This parameter maps to the `--shm-size` option in Docker.
591
+ max_swap : int, optional, default None
592
+ The total amount of swap memory (in MiB) a container can use for this
593
+ step. This parameter is translated to the `--memory-swap` option in
594
+ Docker where the value is the sum of the container memory plus the
595
+ `max_swap` value.
596
+ swappiness : int, optional, default None
597
+ This allows you to tune memory swappiness behavior for this step.
598
+ A swappiness value of 0 causes swapping not to happen unless absolutely
599
+ necessary. A swappiness value of 100 causes pages to be swapped very
600
+ aggressively. Accepted values are whole numbers between 0 and 100.
601
+ use_tmpfs : bool, default False
602
+ This enables an explicit tmpfs mount for this step. Note that tmpfs is
603
+ not available on Fargate compute environments
604
+ tmpfs_tempdir : bool, default True
605
+ sets METAFLOW_TEMPDIR to tmpfs_path if set for this step.
606
+ tmpfs_size : int, optional, default None
607
+ The value for the size (in MiB) of the tmpfs mount for this step.
608
+ This parameter maps to the `--tmpfs` option in Docker. Defaults to 50% of the
609
+ memory allocated for this step.
610
+ tmpfs_path : str, optional, default None
611
+ Path to tmpfs mount for this step. Defaults to /metaflow_temp.
612
+ inferentia : int, default 0
613
+ Number of Inferentia chips required for this step.
614
+ trainium : int, default None
615
+ Alias for inferentia. Use only one of the two.
616
+ efa : int, default 0
617
+ Number of elastic fabric adapter network devices to attach to container
618
+ ephemeral_storage : int, default None
619
+ The total amount, in GiB, of ephemeral storage to set for the task, 21-200GiB.
620
+ This is only relevant for Fargate compute environments
621
+ log_driver: str, optional, default None
622
+ The log driver to use for the Amazon ECS container.
623
+ log_options: List[str], optional, default None
624
+ List of strings containing options for the chosen log driver. The configurable values
625
+ depend on the `log driver` chosen. Validation of these options is not supported yet.
626
+ Example: [`awslogs-group:aws/batch/job`]
790
627
  """
791
628
  ...
792
629
 
793
630
  @typing.overload
794
- def secrets(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
631
+ def batch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
795
632
  ...
796
633
 
797
634
  @typing.overload
798
- def secrets(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
635
+ def batch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
799
636
  ...
800
637
 
801
- def secrets(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []):
638
+ def batch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, cpu: int = 1, gpu: int = 0, memory: int = 4096, image: typing.Optional[str] = None, queue: str = 'METAFLOW_BATCH_JOB_QUEUE', iam_role: str = 'METAFLOW_ECS_S3_ACCESS_IAM_ROLE', execution_role: str = 'METAFLOW_ECS_FARGATE_EXECUTION_ROLE', shared_memory: typing.Optional[int] = None, max_swap: typing.Optional[int] = None, swappiness: typing.Optional[int] = None, use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = None, inferentia: int = 0, trainium: int = None, efa: int = 0, ephemeral_storage: int = None, log_driver: typing.Optional[str] = None, log_options: typing.Optional[typing.List[str]] = None):
802
639
  """
803
- Specifies secrets to be retrieved and injected as environment variables prior to
804
- the execution of a step.
640
+ Specifies that this step should execute on [AWS Batch](https://aws.amazon.com/batch/).
805
641
 
806
642
 
807
643
  Parameters
808
644
  ----------
809
- sources : List[Union[str, Dict[str, Any]]], default: []
810
- List of secret specs, defining how the secrets are to be retrieved
645
+ cpu : int, default 1
646
+ Number of CPUs required for this step. If `@resources` is
647
+ also present, the maximum value from all decorators is used.
648
+ gpu : int, default 0
649
+ Number of GPUs required for this step. If `@resources` is
650
+ also present, the maximum value from all decorators is used.
651
+ memory : int, default 4096
652
+ Memory size (in MB) required for this step. If
653
+ `@resources` is also present, the maximum value from all decorators is
654
+ used.
655
+ image : str, optional, default None
656
+ Docker image to use when launching on AWS Batch. If not specified, and
657
+ METAFLOW_BATCH_CONTAINER_IMAGE is specified, that image is used. If
658
+ not, a default Docker image mapping to the current version of Python is used.
659
+ queue : str, default METAFLOW_BATCH_JOB_QUEUE
660
+ AWS Batch Job Queue to submit the job to.
661
+ iam_role : str, default METAFLOW_ECS_S3_ACCESS_IAM_ROLE
662
+ AWS IAM role that AWS Batch container uses to access AWS cloud resources.
663
+ execution_role : str, default METAFLOW_ECS_FARGATE_EXECUTION_ROLE
664
+ AWS IAM role that AWS Batch can use [to trigger AWS Fargate tasks]
665
+ (https://docs.aws.amazon.com/batch/latest/userguide/execution-IAM-role.html).
666
+ shared_memory : int, optional, default None
667
+ The value for the size (in MiB) of the /dev/shm volume for this step.
668
+ This parameter maps to the `--shm-size` option in Docker.
669
+ max_swap : int, optional, default None
670
+ The total amount of swap memory (in MiB) a container can use for this
671
+ step. This parameter is translated to the `--memory-swap` option in
672
+ Docker where the value is the sum of the container memory plus the
673
+ `max_swap` value.
674
+ swappiness : int, optional, default None
675
+ This allows you to tune memory swappiness behavior for this step.
676
+ A swappiness value of 0 causes swapping not to happen unless absolutely
677
+ necessary. A swappiness value of 100 causes pages to be swapped very
678
+ aggressively. Accepted values are whole numbers between 0 and 100.
679
+ use_tmpfs : bool, default False
680
+ This enables an explicit tmpfs mount for this step. Note that tmpfs is
681
+ not available on Fargate compute environments
682
+ tmpfs_tempdir : bool, default True
683
+ sets METAFLOW_TEMPDIR to tmpfs_path if set for this step.
684
+ tmpfs_size : int, optional, default None
685
+ The value for the size (in MiB) of the tmpfs mount for this step.
686
+ This parameter maps to the `--tmpfs` option in Docker. Defaults to 50% of the
687
+ memory allocated for this step.
688
+ tmpfs_path : str, optional, default None
689
+ Path to tmpfs mount for this step. Defaults to /metaflow_temp.
690
+ inferentia : int, default 0
691
+ Number of Inferentia chips required for this step.
692
+ trainium : int, default None
693
+ Alias for inferentia. Use only one of the two.
694
+ efa : int, default 0
695
+ Number of elastic fabric adapter network devices to attach to container
696
+ ephemeral_storage : int, default None
697
+ The total amount, in GiB, of ephemeral storage to set for the task, 21-200GiB.
698
+ This is only relevant for Fargate compute environments
699
+ log_driver: str, optional, default None
700
+ The log driver to use for the Amazon ECS container.
701
+ log_options: List[str], optional, default None
702
+ List of strings containing options for the chosen log driver. The configurable values
703
+ depend on the `log driver` chosen. Validation of these options is not supported yet.
704
+ Example: [`awslogs-group:aws/batch/job`]
705
+ """
706
+ ...
707
+
708
+ @typing.overload
709
+ def conda(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
710
+ """
711
+ Specifies the Conda environment for the step.
712
+
713
+ Information in this decorator will augment any
714
+ attributes set in the `@conda_base` flow-level decorator. Hence,
715
+ you can use `@conda_base` to set packages required by all
716
+ steps and use `@conda` to specify step-specific overrides.
717
+
718
+
719
+ Parameters
720
+ ----------
721
+ packages : Dict[str, str], default {}
722
+ Packages to use for this step. The key is the name of the package
723
+ and the value is the version to use.
724
+ libraries : Dict[str, str], default {}
725
+ Supported for backward compatibility. When used with packages, packages will take precedence.
726
+ python : str, optional, default None
727
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
728
+ that the version used will correspond to the version of the Python interpreter used to start the run.
729
+ disabled : bool, default False
730
+ If set to True, disables @conda.
731
+ """
732
+ ...
733
+
734
+ @typing.overload
735
+ def conda(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
736
+ ...
737
+
738
+ @typing.overload
739
+ def conda(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
740
+ ...
741
+
742
+ def conda(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
743
+ """
744
+ Specifies the Conda environment for the step.
745
+
746
+ Information in this decorator will augment any
747
+ attributes set in the `@conda_base` flow-level decorator. Hence,
748
+ you can use `@conda_base` to set packages required by all
749
+ steps and use `@conda` to specify step-specific overrides.
750
+
751
+
752
+ Parameters
753
+ ----------
754
+ packages : Dict[str, str], default {}
755
+ Packages to use for this step. The key is the name of the package
756
+ and the value is the version to use.
757
+ libraries : Dict[str, str], default {}
758
+ Supported for backward compatibility. When used with packages, packages will take precedence.
759
+ python : str, optional, default None
760
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
761
+ that the version used will correspond to the version of the Python interpreter used to start the run.
762
+ disabled : bool, default False
763
+ If set to True, disables @conda.
811
764
  """
812
765
  ...
813
766
 
@@ -871,43 +824,53 @@ def timeout(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None],
871
824
  ...
872
825
 
873
826
  @typing.overload
874
- def pypi_base(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
827
+ def catch(*, var: typing.Optional[str] = None, print_exception: bool = True) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
875
828
  """
876
- Specifies the PyPI packages for all steps of the flow.
829
+ Specifies that the step will success under all circumstances.
830
+
831
+ The decorator will create an optional artifact, specified by `var`, which
832
+ contains the exception raised. You can use it to detect the presence
833
+ of errors, indicating that all happy-path artifacts produced by the step
834
+ are missing.
877
835
 
878
- Use `@pypi_base` to set common packages required by all
879
- steps and use `@pypi` to specify step-specific overrides.
880
836
 
881
837
  Parameters
882
838
  ----------
883
- packages : Dict[str, str], default: {}
884
- Packages to use for this flow. The key is the name of the package
885
- and the value is the version to use.
886
- python : str, optional, default: None
887
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
888
- that the version used will correspond to the version of the Python interpreter used to start the run.
839
+ var : str, optional, default None
840
+ Name of the artifact in which to store the caught exception.
841
+ If not specified, the exception is not stored.
842
+ print_exception : bool, default True
843
+ Determines whether or not the exception is printed to
844
+ stdout when caught.
889
845
  """
890
846
  ...
891
847
 
892
848
  @typing.overload
893
- def pypi_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
849
+ def catch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
894
850
  ...
895
851
 
896
- def pypi_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
852
+ @typing.overload
853
+ def catch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
854
+ ...
855
+
856
+ def catch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, var: typing.Optional[str] = None, print_exception: bool = True):
897
857
  """
898
- Specifies the PyPI packages for all steps of the flow.
858
+ Specifies that the step will success under all circumstances.
859
+
860
+ The decorator will create an optional artifact, specified by `var`, which
861
+ contains the exception raised. You can use it to detect the presence
862
+ of errors, indicating that all happy-path artifacts produced by the step
863
+ are missing.
899
864
 
900
- Use `@pypi_base` to set common packages required by all
901
- steps and use `@pypi` to specify step-specific overrides.
902
865
 
903
866
  Parameters
904
867
  ----------
905
- packages : Dict[str, str], default: {}
906
- Packages to use for this flow. The key is the name of the package
907
- and the value is the version to use.
908
- python : str, optional, default: None
909
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
910
- that the version used will correspond to the version of the Python interpreter used to start the run.
868
+ var : str, optional, default None
869
+ Name of the artifact in which to store the caught exception.
870
+ If not specified, the exception is not stored.
871
+ print_exception : bool, default True
872
+ Determines whether or not the exception is printed to
873
+ stdout when caught.
911
874
  """
912
875
  ...
913
876
 
@@ -1004,6 +967,49 @@ def trigger(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, event: t
1004
967
  """
1005
968
  ...
1006
969
 
970
+ def airflow_s3_key_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, bucket_key: typing.Union[str, typing.List[str]], bucket_name: str, wildcard_match: bool, aws_conn_id: str, verify: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
971
+ """
972
+ The `@airflow_s3_key_sensor` decorator attaches a Airflow [S3KeySensor](https://airflow.apache.org/docs/apache-airflow-providers-amazon/stable/_api/airflow/providers/amazon/aws/sensors/s3/index.html#airflow.providers.amazon.aws.sensors.s3.S3KeySensor)
973
+ before the start step of the flow. This decorator only works when a flow is scheduled on Airflow
974
+ and is compiled using `airflow create`. More than one `@airflow_s3_key_sensor` can be
975
+ added as a flow decorators. Adding more than one decorator will ensure that `start` step
976
+ starts only after all sensors finish.
977
+
978
+
979
+ Parameters
980
+ ----------
981
+ timeout : int
982
+ Time, in seconds before the task times out and fails. (Default: 3600)
983
+ poke_interval : int
984
+ Time in seconds that the job should wait in between each try. (Default: 60)
985
+ mode : str
986
+ How the sensor operates. Options are: { poke | reschedule }. (Default: "poke")
987
+ exponential_backoff : bool
988
+ allow progressive longer waits between pokes by using exponential backoff algorithm. (Default: True)
989
+ pool : str
990
+ the slot pool this task should run in,
991
+ slot pools are a way to limit concurrency for certain tasks. (Default:None)
992
+ soft_fail : bool
993
+ Set to true to mark the task as SKIPPED on failure. (Default: False)
994
+ name : str
995
+ Name of the sensor on Airflow
996
+ description : str
997
+ Description of sensor in the Airflow UI
998
+ bucket_key : Union[str, List[str]]
999
+ The key(s) being waited on. Supports full s3:// style url or relative path from root level.
1000
+ When it's specified as a full s3:// url, please leave `bucket_name` as None
1001
+ bucket_name : str
1002
+ Name of the S3 bucket. Only needed when bucket_key is not provided as a full s3:// url.
1003
+ When specified, all the keys passed to bucket_key refers to this bucket. (Default:None)
1004
+ wildcard_match : bool
1005
+ whether the bucket_key should be interpreted as a Unix wildcard pattern. (Default: False)
1006
+ aws_conn_id : str
1007
+ a reference to the s3 connection on Airflow. (Default: None)
1008
+ verify : bool
1009
+ Whether or not to verify SSL certificates for S3 connection. (Default: None)
1010
+ """
1011
+ ...
1012
+
1007
1013
  @typing.overload
1008
1014
  def trigger_on_finish(*, flow: typing.Union[typing.Dict[str, str], str, None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1009
1015
  """
@@ -1140,92 +1146,6 @@ def project(*, name: str, branch: typing.Optional[str] = None, production: bool
1140
1146
  """
1141
1147
  ...
1142
1148
 
1143
- def airflow_external_task_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, external_dag_id: str, external_task_ids: typing.List[str], allowed_states: typing.List[str], failed_states: typing.List[str], execution_delta: "datetime.timedelta", check_existence: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1144
- """
1145
- The `@airflow_external_task_sensor` decorator attaches a Airflow [ExternalTaskSensor](https://airflow.apache.org/docs/apache-airflow/stable/_api/airflow/sensors/external_task/index.html#airflow.sensors.external_task.ExternalTaskSensor) before the start step of the flow.
1146
- This decorator only works when a flow is scheduled on Airflow and is compiled using `airflow create`. More than one `@airflow_external_task_sensor` can be added as a flow decorators. Adding more than one decorator will ensure that `start` step starts only after all sensors finish.
1147
-
1148
-
1149
- Parameters
1150
- ----------
1151
- timeout : int
1152
- Time, in seconds before the task times out and fails. (Default: 3600)
1153
- poke_interval : int
1154
- Time in seconds that the job should wait in between each try. (Default: 60)
1155
- mode : str
1156
- How the sensor operates. Options are: { poke | reschedule }. (Default: "poke")
1157
- exponential_backoff : bool
1158
- allow progressive longer waits between pokes by using exponential backoff algorithm. (Default: True)
1159
- pool : str
1160
- the slot pool this task should run in,
1161
- slot pools are a way to limit concurrency for certain tasks. (Default:None)
1162
- soft_fail : bool
1163
- Set to true to mark the task as SKIPPED on failure. (Default: False)
1164
- name : str
1165
- Name of the sensor on Airflow
1166
- description : str
1167
- Description of sensor in the Airflow UI
1168
- external_dag_id : str
1169
- The dag_id that contains the task you want to wait for.
1170
- external_task_ids : List[str]
1171
- The list of task_ids that you want to wait for.
1172
- If None (default value) the sensor waits for the DAG. (Default: None)
1173
- allowed_states : List[str]
1174
- Iterable of allowed states, (Default: ['success'])
1175
- failed_states : List[str]
1176
- Iterable of failed or dis-allowed states. (Default: None)
1177
- execution_delta : datetime.timedelta
1178
- time difference with the previous execution to look at,
1179
- the default is the same logical date as the current task or DAG. (Default: None)
1180
- check_existence: bool
1181
- Set to True to check if the external task exists or check if
1182
- the DAG to wait for exists. (Default: True)
1183
- """
1184
- ...
1185
-
1186
- def airflow_s3_key_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, bucket_key: typing.Union[str, typing.List[str]], bucket_name: str, wildcard_match: bool, aws_conn_id: str, verify: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1187
- """
1188
- The `@airflow_s3_key_sensor` decorator attaches a Airflow [S3KeySensor](https://airflow.apache.org/docs/apache-airflow-providers-amazon/stable/_api/airflow/providers/amazon/aws/sensors/s3/index.html#airflow.providers.amazon.aws.sensors.s3.S3KeySensor)
1189
- before the start step of the flow. This decorator only works when a flow is scheduled on Airflow
1190
- and is compiled using `airflow create`. More than one `@airflow_s3_key_sensor` can be
1191
- added as a flow decorators. Adding more than one decorator will ensure that `start` step
1192
- starts only after all sensors finish.
1193
-
1194
-
1195
- Parameters
1196
- ----------
1197
- timeout : int
1198
- Time, in seconds before the task times out and fails. (Default: 3600)
1199
- poke_interval : int
1200
- Time in seconds that the job should wait in between each try. (Default: 60)
1201
- mode : str
1202
- How the sensor operates. Options are: { poke | reschedule }. (Default: "poke")
1203
- exponential_backoff : bool
1204
- allow progressive longer waits between pokes by using exponential backoff algorithm. (Default: True)
1205
- pool : str
1206
- the slot pool this task should run in,
1207
- slot pools are a way to limit concurrency for certain tasks. (Default:None)
1208
- soft_fail : bool
1209
- Set to true to mark the task as SKIPPED on failure. (Default: False)
1210
- name : str
1211
- Name of the sensor on Airflow
1212
- description : str
1213
- Description of sensor in the Airflow UI
1214
- bucket_key : Union[str, List[str]]
1215
- The key(s) being waited on. Supports full s3:// style url or relative path from root level.
1216
- When it's specified as a full s3:// url, please leave `bucket_name` as None
1217
- bucket_name : str
1218
- Name of the S3 bucket. Only needed when bucket_key is not provided as a full s3:// url.
1219
- When specified, all the keys passed to bucket_key refers to this bucket. (Default:None)
1220
- wildcard_match : bool
1221
- whether the bucket_key should be interpreted as a Unix wildcard pattern. (Default: False)
1222
- aws_conn_id : str
1223
- a reference to the s3 connection on Airflow. (Default: None)
1224
- verify : bool
1225
- Whether or not to verify SSL certificates for S3 connection. (Default: None)
1226
- """
1227
- ...
1228
-
1229
1149
  @typing.overload
1230
1150
  def conda_base(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1231
1151
  """
@@ -1328,3 +1248,87 @@ def schedule(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, hourly:
1328
1248
  """
1329
1249
  ...
1330
1250
 
1251
+ @typing.overload
1252
+ def pypi_base(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1253
+ """
1254
+ Specifies the PyPI packages for all steps of the flow.
1255
+
1256
+ Use `@pypi_base` to set common packages required by all
1257
+ steps and use `@pypi` to specify step-specific overrides.
1258
+
1259
+ Parameters
1260
+ ----------
1261
+ packages : Dict[str, str], default: {}
1262
+ Packages to use for this flow. The key is the name of the package
1263
+ and the value is the version to use.
1264
+ python : str, optional, default: None
1265
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1266
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1267
+ """
1268
+ ...
1269
+
1270
+ @typing.overload
1271
+ def pypi_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1272
+ ...
1273
+
1274
+ def pypi_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
1275
+ """
1276
+ Specifies the PyPI packages for all steps of the flow.
1277
+
1278
+ Use `@pypi_base` to set common packages required by all
1279
+ steps and use `@pypi` to specify step-specific overrides.
1280
+
1281
+ Parameters
1282
+ ----------
1283
+ packages : Dict[str, str], default: {}
1284
+ Packages to use for this flow. The key is the name of the package
1285
+ and the value is the version to use.
1286
+ python : str, optional, default: None
1287
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1288
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1289
+ """
1290
+ ...
1291
+
1292
+ def airflow_external_task_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, external_dag_id: str, external_task_ids: typing.List[str], allowed_states: typing.List[str], failed_states: typing.List[str], execution_delta: "datetime.timedelta", check_existence: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1293
+ """
1294
+ The `@airflow_external_task_sensor` decorator attaches a Airflow [ExternalTaskSensor](https://airflow.apache.org/docs/apache-airflow/stable/_api/airflow/sensors/external_task/index.html#airflow.sensors.external_task.ExternalTaskSensor) before the start step of the flow.
1295
+ This decorator only works when a flow is scheduled on Airflow and is compiled using `airflow create`. More than one `@airflow_external_task_sensor` can be added as a flow decorators. Adding more than one decorator will ensure that `start` step starts only after all sensors finish.
1296
+
1297
+
1298
+ Parameters
1299
+ ----------
1300
+ timeout : int
1301
+ Time, in seconds before the task times out and fails. (Default: 3600)
1302
+ poke_interval : int
1303
+ Time in seconds that the job should wait in between each try. (Default: 60)
1304
+ mode : str
1305
+ How the sensor operates. Options are: { poke | reschedule }. (Default: "poke")
1306
+ exponential_backoff : bool
1307
+ allow progressive longer waits between pokes by using exponential backoff algorithm. (Default: True)
1308
+ pool : str
1309
+ the slot pool this task should run in,
1310
+ slot pools are a way to limit concurrency for certain tasks. (Default:None)
1311
+ soft_fail : bool
1312
+ Set to true to mark the task as SKIPPED on failure. (Default: False)
1313
+ name : str
1314
+ Name of the sensor on Airflow
1315
+ description : str
1316
+ Description of sensor in the Airflow UI
1317
+ external_dag_id : str
1318
+ The dag_id that contains the task you want to wait for.
1319
+ external_task_ids : List[str]
1320
+ The list of task_ids that you want to wait for.
1321
+ If None (default value) the sensor waits for the DAG. (Default: None)
1322
+ allowed_states : List[str]
1323
+ Iterable of allowed states, (Default: ['success'])
1324
+ failed_states : List[str]
1325
+ Iterable of failed or dis-allowed states. (Default: None)
1326
+ execution_delta : datetime.timedelta
1327
+ time difference with the previous execution to look at,
1328
+ the default is the same logical date as the current task or DAG. (Default: None)
1329
+ check_existence: bool
1330
+ Set to True to check if the external task exists or check if
1331
+ the DAG to wait for exists. (Default: True)
1332
+ """
1333
+ ...
1334
+