metaflow-stubs 2.16.8__py2.py3-none-any.whl → 2.17.0__py2.py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of metaflow-stubs might be problematic. Click here for more details.

Files changed (166) hide show
  1. metaflow-stubs/__init__.pyi +577 -577
  2. metaflow-stubs/cards.pyi +2 -2
  3. metaflow-stubs/cli.pyi +2 -2
  4. metaflow-stubs/cli_components/__init__.pyi +2 -2
  5. metaflow-stubs/cli_components/utils.pyi +2 -2
  6. metaflow-stubs/client/__init__.pyi +2 -2
  7. metaflow-stubs/client/core.pyi +6 -6
  8. metaflow-stubs/client/filecache.pyi +2 -2
  9. metaflow-stubs/events.pyi +3 -3
  10. metaflow-stubs/exception.pyi +2 -2
  11. metaflow-stubs/flowspec.pyi +6 -6
  12. metaflow-stubs/generated_for.txt +1 -1
  13. metaflow-stubs/includefile.pyi +3 -3
  14. metaflow-stubs/meta_files.pyi +2 -2
  15. metaflow-stubs/metadata_provider/__init__.pyi +2 -2
  16. metaflow-stubs/metadata_provider/heartbeat.pyi +2 -2
  17. metaflow-stubs/metadata_provider/metadata.pyi +3 -3
  18. metaflow-stubs/metadata_provider/util.pyi +2 -2
  19. metaflow-stubs/metaflow_config.pyi +2 -2
  20. metaflow-stubs/metaflow_current.pyi +24 -24
  21. metaflow-stubs/metaflow_git.pyi +2 -2
  22. metaflow-stubs/multicore_utils.pyi +2 -2
  23. metaflow-stubs/packaging_sys/__init__.pyi +6 -6
  24. metaflow-stubs/packaging_sys/backend.pyi +2 -2
  25. metaflow-stubs/packaging_sys/distribution_support.pyi +4 -4
  26. metaflow-stubs/packaging_sys/tar_backend.pyi +5 -5
  27. metaflow-stubs/packaging_sys/utils.pyi +2 -2
  28. metaflow-stubs/packaging_sys/v1.pyi +3 -3
  29. metaflow-stubs/parameters.pyi +3 -3
  30. metaflow-stubs/plugins/__init__.pyi +13 -13
  31. metaflow-stubs/plugins/airflow/__init__.pyi +2 -2
  32. metaflow-stubs/plugins/airflow/airflow_decorator.pyi +2 -2
  33. metaflow-stubs/plugins/airflow/airflow_utils.pyi +2 -2
  34. metaflow-stubs/plugins/airflow/exception.pyi +2 -2
  35. metaflow-stubs/plugins/airflow/sensors/__init__.pyi +2 -2
  36. metaflow-stubs/plugins/airflow/sensors/base_sensor.pyi +2 -2
  37. metaflow-stubs/plugins/airflow/sensors/external_task_sensor.pyi +2 -2
  38. metaflow-stubs/plugins/airflow/sensors/s3_sensor.pyi +2 -2
  39. metaflow-stubs/plugins/argo/__init__.pyi +2 -2
  40. metaflow-stubs/plugins/argo/argo_client.pyi +2 -2
  41. metaflow-stubs/plugins/argo/argo_events.pyi +2 -2
  42. metaflow-stubs/plugins/argo/argo_workflows.pyi +3 -3
  43. metaflow-stubs/plugins/argo/argo_workflows_decorator.pyi +3 -3
  44. metaflow-stubs/plugins/argo/argo_workflows_deployer.pyi +4 -4
  45. metaflow-stubs/plugins/argo/argo_workflows_deployer_objects.pyi +4 -4
  46. metaflow-stubs/plugins/argo/exit_hooks.pyi +3 -3
  47. metaflow-stubs/plugins/aws/__init__.pyi +2 -2
  48. metaflow-stubs/plugins/aws/aws_client.pyi +2 -2
  49. metaflow-stubs/plugins/aws/aws_utils.pyi +2 -2
  50. metaflow-stubs/plugins/aws/batch/__init__.pyi +2 -2
  51. metaflow-stubs/plugins/aws/batch/batch.pyi +2 -2
  52. metaflow-stubs/plugins/aws/batch/batch_client.pyi +2 -2
  53. metaflow-stubs/plugins/aws/batch/batch_decorator.pyi +2 -2
  54. metaflow-stubs/plugins/aws/secrets_manager/__init__.pyi +2 -2
  55. metaflow-stubs/plugins/aws/secrets_manager/aws_secrets_manager_secrets_provider.pyi +3 -3
  56. metaflow-stubs/plugins/aws/step_functions/__init__.pyi +2 -2
  57. metaflow-stubs/plugins/aws/step_functions/dynamo_db_client.pyi +2 -2
  58. metaflow-stubs/plugins/aws/step_functions/event_bridge_client.pyi +2 -2
  59. metaflow-stubs/plugins/aws/step_functions/schedule_decorator.pyi +2 -2
  60. metaflow-stubs/plugins/aws/step_functions/step_functions.pyi +2 -2
  61. metaflow-stubs/plugins/aws/step_functions/step_functions_client.pyi +2 -2
  62. metaflow-stubs/plugins/aws/step_functions/step_functions_decorator.pyi +2 -2
  63. metaflow-stubs/plugins/aws/step_functions/step_functions_deployer.pyi +5 -5
  64. metaflow-stubs/plugins/aws/step_functions/step_functions_deployer_objects.pyi +4 -4
  65. metaflow-stubs/plugins/azure/__init__.pyi +2 -2
  66. metaflow-stubs/plugins/azure/azure_credential.pyi +2 -2
  67. metaflow-stubs/plugins/azure/azure_exceptions.pyi +2 -2
  68. metaflow-stubs/plugins/azure/azure_secret_manager_secrets_provider.pyi +3 -3
  69. metaflow-stubs/plugins/azure/azure_utils.pyi +2 -2
  70. metaflow-stubs/plugins/azure/blob_service_client_factory.pyi +2 -2
  71. metaflow-stubs/plugins/azure/includefile_support.pyi +2 -2
  72. metaflow-stubs/plugins/cards/__init__.pyi +2 -2
  73. metaflow-stubs/plugins/cards/card_client.pyi +2 -2
  74. metaflow-stubs/plugins/cards/card_creator.pyi +2 -2
  75. metaflow-stubs/plugins/cards/card_datastore.pyi +2 -2
  76. metaflow-stubs/plugins/cards/card_decorator.pyi +3 -3
  77. metaflow-stubs/plugins/cards/card_modules/__init__.pyi +2 -2
  78. metaflow-stubs/plugins/cards/card_modules/basic.pyi +3 -3
  79. metaflow-stubs/plugins/cards/card_modules/card.pyi +2 -2
  80. metaflow-stubs/plugins/cards/card_modules/components.pyi +4 -4
  81. metaflow-stubs/plugins/cards/card_modules/convert_to_native_type.pyi +2 -2
  82. metaflow-stubs/plugins/cards/card_modules/renderer_tools.pyi +2 -2
  83. metaflow-stubs/plugins/cards/card_modules/test_cards.pyi +2 -2
  84. metaflow-stubs/plugins/cards/card_resolver.pyi +2 -2
  85. metaflow-stubs/plugins/cards/component_serializer.pyi +2 -2
  86. metaflow-stubs/plugins/cards/exception.pyi +2 -2
  87. metaflow-stubs/plugins/catch_decorator.pyi +3 -3
  88. metaflow-stubs/plugins/datatools/__init__.pyi +2 -2
  89. metaflow-stubs/plugins/datatools/local.pyi +2 -2
  90. metaflow-stubs/plugins/datatools/s3/__init__.pyi +2 -2
  91. metaflow-stubs/plugins/datatools/s3/s3.pyi +4 -4
  92. metaflow-stubs/plugins/datatools/s3/s3tail.pyi +2 -2
  93. metaflow-stubs/plugins/datatools/s3/s3util.pyi +2 -2
  94. metaflow-stubs/plugins/debug_logger.pyi +2 -2
  95. metaflow-stubs/plugins/debug_monitor.pyi +2 -2
  96. metaflow-stubs/plugins/environment_decorator.pyi +2 -2
  97. metaflow-stubs/plugins/events_decorator.pyi +2 -2
  98. metaflow-stubs/plugins/exit_hook/__init__.pyi +2 -2
  99. metaflow-stubs/plugins/exit_hook/exit_hook_decorator.pyi +2 -2
  100. metaflow-stubs/plugins/frameworks/__init__.pyi +2 -2
  101. metaflow-stubs/plugins/frameworks/pytorch.pyi +2 -2
  102. metaflow-stubs/plugins/gcp/__init__.pyi +2 -2
  103. metaflow-stubs/plugins/gcp/gcp_secret_manager_secrets_provider.pyi +3 -3
  104. metaflow-stubs/plugins/gcp/gs_exceptions.pyi +2 -2
  105. metaflow-stubs/plugins/gcp/gs_storage_client_factory.pyi +2 -2
  106. metaflow-stubs/plugins/gcp/gs_utils.pyi +2 -2
  107. metaflow-stubs/plugins/gcp/includefile_support.pyi +2 -2
  108. metaflow-stubs/plugins/kubernetes/__init__.pyi +2 -2
  109. metaflow-stubs/plugins/kubernetes/kube_utils.pyi +3 -3
  110. metaflow-stubs/plugins/kubernetes/kubernetes.pyi +2 -2
  111. metaflow-stubs/plugins/kubernetes/kubernetes_client.pyi +2 -2
  112. metaflow-stubs/plugins/kubernetes/kubernetes_decorator.pyi +2 -2
  113. metaflow-stubs/plugins/kubernetes/kubernetes_job.pyi +2 -2
  114. metaflow-stubs/plugins/kubernetes/kubernetes_jobsets.pyi +2 -2
  115. metaflow-stubs/plugins/kubernetes/spot_monitor_sidecar.pyi +2 -2
  116. metaflow-stubs/plugins/parallel_decorator.pyi +2 -2
  117. metaflow-stubs/plugins/project_decorator.pyi +2 -2
  118. metaflow-stubs/plugins/pypi/__init__.pyi +2 -2
  119. metaflow-stubs/plugins/pypi/conda_decorator.pyi +2 -2
  120. metaflow-stubs/plugins/pypi/conda_environment.pyi +4 -4
  121. metaflow-stubs/plugins/pypi/parsers.pyi +2 -2
  122. metaflow-stubs/plugins/pypi/pypi_decorator.pyi +2 -2
  123. metaflow-stubs/plugins/pypi/pypi_environment.pyi +2 -2
  124. metaflow-stubs/plugins/pypi/utils.pyi +2 -2
  125. metaflow-stubs/plugins/resources_decorator.pyi +2 -2
  126. metaflow-stubs/plugins/retry_decorator.pyi +2 -2
  127. metaflow-stubs/plugins/secrets/__init__.pyi +2 -2
  128. metaflow-stubs/plugins/secrets/inline_secrets_provider.pyi +3 -3
  129. metaflow-stubs/plugins/secrets/secrets_decorator.pyi +2 -2
  130. metaflow-stubs/plugins/secrets/secrets_func.pyi +2 -2
  131. metaflow-stubs/plugins/secrets/secrets_spec.pyi +2 -2
  132. metaflow-stubs/plugins/secrets/utils.pyi +2 -2
  133. metaflow-stubs/plugins/storage_executor.pyi +2 -2
  134. metaflow-stubs/plugins/test_unbounded_foreach_decorator.pyi +3 -3
  135. metaflow-stubs/plugins/timeout_decorator.pyi +3 -3
  136. metaflow-stubs/plugins/uv/__init__.pyi +2 -2
  137. metaflow-stubs/plugins/uv/uv_environment.pyi +2 -2
  138. metaflow-stubs/pylint_wrapper.pyi +2 -2
  139. metaflow-stubs/runner/__init__.pyi +2 -2
  140. metaflow-stubs/runner/deployer.pyi +33 -33
  141. metaflow-stubs/runner/deployer_impl.pyi +2 -2
  142. metaflow-stubs/runner/metaflow_runner.pyi +3 -3
  143. metaflow-stubs/runner/nbdeploy.pyi +2 -2
  144. metaflow-stubs/runner/nbrun.pyi +2 -2
  145. metaflow-stubs/runner/subprocess_manager.pyi +2 -2
  146. metaflow-stubs/runner/utils.pyi +3 -3
  147. metaflow-stubs/system/__init__.pyi +2 -2
  148. metaflow-stubs/system/system_logger.pyi +2 -2
  149. metaflow-stubs/system/system_monitor.pyi +2 -2
  150. metaflow-stubs/tagging_util.pyi +2 -2
  151. metaflow-stubs/tuple_util.pyi +2 -2
  152. metaflow-stubs/user_configs/__init__.pyi +2 -2
  153. metaflow-stubs/user_configs/config_options.pyi +3 -3
  154. metaflow-stubs/user_configs/config_parameters.pyi +7 -7
  155. metaflow-stubs/user_decorators/__init__.pyi +2 -2
  156. metaflow-stubs/user_decorators/common.pyi +2 -2
  157. metaflow-stubs/user_decorators/mutable_flow.pyi +4 -4
  158. metaflow-stubs/user_decorators/mutable_step.pyi +5 -5
  159. metaflow-stubs/user_decorators/user_flow_decorator.pyi +4 -4
  160. metaflow-stubs/user_decorators/user_step_decorator.pyi +5 -5
  161. metaflow-stubs/version.pyi +2 -2
  162. {metaflow_stubs-2.16.8.dist-info → metaflow_stubs-2.17.0.dist-info}/METADATA +2 -2
  163. metaflow_stubs-2.17.0.dist-info/RECORD +166 -0
  164. metaflow_stubs-2.16.8.dist-info/RECORD +0 -166
  165. {metaflow_stubs-2.16.8.dist-info → metaflow_stubs-2.17.0.dist-info}/WHEEL +0 -0
  166. {metaflow_stubs-2.16.8.dist-info → metaflow_stubs-2.17.0.dist-info}/top_level.txt +0 -0
@@ -1,7 +1,7 @@
1
1
  ######################################################################################################
2
2
  # Auto-generated Metaflow stub file #
3
- # MF version: 2.16.8 #
4
- # Generated on 2025-07-29T19:28:34.106017 #
3
+ # MF version: 2.17.0 #
4
+ # Generated on 2025-08-06T11:05:04.016056 #
5
5
  ######################################################################################################
6
6
 
7
7
  from __future__ import annotations
@@ -39,17 +39,17 @@ from .user_decorators.user_step_decorator import UserStepDecorator as UserStepDe
39
39
  from .user_decorators.user_step_decorator import StepMutator as StepMutator
40
40
  from .user_decorators.user_step_decorator import user_step_decorator as user_step_decorator
41
41
  from .user_decorators.user_flow_decorator import FlowMutator as FlowMutator
42
+ from . import tuple_util as tuple_util
42
43
  from . import metaflow_git as metaflow_git
43
44
  from . import events as events
44
- from . import tuple_util as tuple_util
45
45
  from . import runner as runner
46
46
  from . import plugins as plugins
47
47
  from .plugins.datatools.s3.s3 import S3 as S3
48
48
  from . import includefile as includefile
49
49
  from .includefile import IncludeFile as IncludeFile
50
+ from .plugins.pypi.parsers import pyproject_toml_parser as pyproject_toml_parser
50
51
  from .plugins.pypi.parsers import requirements_txt_parser as requirements_txt_parser
51
52
  from .plugins.pypi.parsers import conda_environment_yml_parser as conda_environment_yml_parser
52
- from .plugins.pypi.parsers import pyproject_toml_parser as pyproject_toml_parser
53
53
  from . import cards as cards
54
54
  from . import client as client
55
55
  from .client.core import namespace as namespace
@@ -153,21 +153,81 @@ def step(f: typing.Union[typing.Callable[[FlowSpecDerived], None], typing.Callab
153
153
  ...
154
154
 
155
155
  @typing.overload
156
- def parallel(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
156
+ def resources(*, cpu: int = 1, gpu: typing.Optional[int] = None, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
157
157
  """
158
- Decorator prototype for all step decorators. This function gets specialized
159
- and imported for all decorators types by _import_plugin_decorators().
158
+ Specifies the resources needed when executing this step.
159
+
160
+ Use `@resources` to specify the resource requirements
161
+ independently of the specific compute layer (`@batch`, `@kubernetes`).
162
+
163
+ You can choose the compute layer on the command line by executing e.g.
164
+ ```
165
+ python myflow.py run --with batch
166
+ ```
167
+ or
168
+ ```
169
+ python myflow.py run --with kubernetes
170
+ ```
171
+ which executes the flow on the desired system using the
172
+ requirements specified in `@resources`.
173
+
174
+
175
+ Parameters
176
+ ----------
177
+ cpu : int, default 1
178
+ Number of CPUs required for this step.
179
+ gpu : int, optional, default None
180
+ Number of GPUs required for this step.
181
+ disk : int, optional, default None
182
+ Disk size (in MB) required for this step. Only applies on Kubernetes.
183
+ memory : int, default 4096
184
+ Memory size (in MB) required for this step.
185
+ shared_memory : int, optional, default None
186
+ The value for the size (in MiB) of the /dev/shm volume for this step.
187
+ This parameter maps to the `--shm-size` option in Docker.
160
188
  """
161
189
  ...
162
190
 
163
191
  @typing.overload
164
- def parallel(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
192
+ def resources(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
165
193
  ...
166
194
 
167
- def parallel(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
195
+ @typing.overload
196
+ def resources(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
197
+ ...
198
+
199
+ def resources(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, cpu: int = 1, gpu: typing.Optional[int] = None, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None):
168
200
  """
169
- Decorator prototype for all step decorators. This function gets specialized
170
- and imported for all decorators types by _import_plugin_decorators().
201
+ Specifies the resources needed when executing this step.
202
+
203
+ Use `@resources` to specify the resource requirements
204
+ independently of the specific compute layer (`@batch`, `@kubernetes`).
205
+
206
+ You can choose the compute layer on the command line by executing e.g.
207
+ ```
208
+ python myflow.py run --with batch
209
+ ```
210
+ or
211
+ ```
212
+ python myflow.py run --with kubernetes
213
+ ```
214
+ which executes the flow on the desired system using the
215
+ requirements specified in `@resources`.
216
+
217
+
218
+ Parameters
219
+ ----------
220
+ cpu : int, default 1
221
+ Number of CPUs required for this step.
222
+ gpu : int, optional, default None
223
+ Number of GPUs required for this step.
224
+ disk : int, optional, default None
225
+ Disk size (in MB) required for this step. Only applies on Kubernetes.
226
+ memory : int, default 4096
227
+ Memory size (in MB) required for this step.
228
+ shared_memory : int, optional, default None
229
+ The value for the size (in MiB) of the /dev/shm volume for this step.
230
+ This parameter maps to the `--shm-size` option in Docker.
171
231
  """
172
232
  ...
173
233
 
@@ -261,251 +321,146 @@ def kubernetes(*, cpu: int = 1, memory: int = 4096, disk: int = 10240, image: ty
261
321
  ...
262
322
 
263
323
  @typing.overload
264
- def conda(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
324
+ def timeout(*, seconds: int = 0, minutes: int = 0, hours: int = 0) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
265
325
  """
266
- Specifies the Conda environment for the step.
326
+ Specifies a timeout for your step.
267
327
 
268
- Information in this decorator will augment any
269
- attributes set in the `@conda_base` flow-level decorator. Hence,
270
- you can use `@conda_base` to set packages required by all
271
- steps and use `@conda` to specify step-specific overrides.
328
+ This decorator is useful if this step may hang indefinitely.
329
+
330
+ This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
331
+ A timeout is considered to be an exception thrown by the step. It will cause the step to be
332
+ retried if needed and the exception will be caught by the `@catch` decorator, if present.
333
+
334
+ Note that all the values specified in parameters are added together so if you specify
335
+ 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
272
336
 
273
337
 
274
338
  Parameters
275
339
  ----------
276
- packages : Dict[str, str], default {}
277
- Packages to use for this step. The key is the name of the package
278
- and the value is the version to use.
279
- libraries : Dict[str, str], default {}
280
- Supported for backward compatibility. When used with packages, packages will take precedence.
281
- python : str, optional, default None
282
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
283
- that the version used will correspond to the version of the Python interpreter used to start the run.
284
- disabled : bool, default False
285
- If set to True, disables @conda.
340
+ seconds : int, default 0
341
+ Number of seconds to wait prior to timing out.
342
+ minutes : int, default 0
343
+ Number of minutes to wait prior to timing out.
344
+ hours : int, default 0
345
+ Number of hours to wait prior to timing out.
286
346
  """
287
347
  ...
288
348
 
289
349
  @typing.overload
290
- def conda(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
350
+ def timeout(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
291
351
  ...
292
352
 
293
353
  @typing.overload
294
- def conda(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
354
+ def timeout(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
295
355
  ...
296
356
 
297
- def conda(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
357
+ def timeout(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, seconds: int = 0, minutes: int = 0, hours: int = 0):
298
358
  """
299
- Specifies the Conda environment for the step.
359
+ Specifies a timeout for your step.
300
360
 
301
- Information in this decorator will augment any
302
- attributes set in the `@conda_base` flow-level decorator. Hence,
303
- you can use `@conda_base` to set packages required by all
304
- steps and use `@conda` to specify step-specific overrides.
361
+ This decorator is useful if this step may hang indefinitely.
362
+
363
+ This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
364
+ A timeout is considered to be an exception thrown by the step. It will cause the step to be
365
+ retried if needed and the exception will be caught by the `@catch` decorator, if present.
366
+
367
+ Note that all the values specified in parameters are added together so if you specify
368
+ 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
305
369
 
306
370
 
307
371
  Parameters
308
372
  ----------
309
- packages : Dict[str, str], default {}
310
- Packages to use for this step. The key is the name of the package
311
- and the value is the version to use.
312
- libraries : Dict[str, str], default {}
313
- Supported for backward compatibility. When used with packages, packages will take precedence.
314
- python : str, optional, default None
315
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
316
- that the version used will correspond to the version of the Python interpreter used to start the run.
317
- disabled : bool, default False
318
- If set to True, disables @conda.
373
+ seconds : int, default 0
374
+ Number of seconds to wait prior to timing out.
375
+ minutes : int, default 0
376
+ Number of minutes to wait prior to timing out.
377
+ hours : int, default 0
378
+ Number of hours to wait prior to timing out.
319
379
  """
320
380
  ...
321
381
 
322
382
  @typing.overload
323
- def resources(*, cpu: int = 1, gpu: typing.Optional[int] = None, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
383
+ def batch(*, cpu: int = 1, gpu: int = 0, memory: int = 4096, image: typing.Optional[str] = None, queue: str = 'METAFLOW_BATCH_JOB_QUEUE', iam_role: str = 'METAFLOW_ECS_S3_ACCESS_IAM_ROLE', execution_role: str = 'METAFLOW_ECS_FARGATE_EXECUTION_ROLE', shared_memory: typing.Optional[int] = None, max_swap: typing.Optional[int] = None, swappiness: typing.Optional[int] = None, use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = None, inferentia: int = 0, trainium: int = None, efa: int = 0, ephemeral_storage: int = None, log_driver: typing.Optional[str] = None, log_options: typing.Optional[typing.List[str]] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
324
384
  """
325
- Specifies the resources needed when executing this step.
326
-
327
- Use `@resources` to specify the resource requirements
328
- independently of the specific compute layer (`@batch`, `@kubernetes`).
329
-
330
- You can choose the compute layer on the command line by executing e.g.
331
- ```
332
- python myflow.py run --with batch
333
- ```
334
- or
335
- ```
336
- python myflow.py run --with kubernetes
337
- ```
338
- which executes the flow on the desired system using the
339
- requirements specified in `@resources`.
385
+ Specifies that this step should execute on [AWS Batch](https://aws.amazon.com/batch/).
340
386
 
341
387
 
342
388
  Parameters
343
389
  ----------
344
390
  cpu : int, default 1
345
- Number of CPUs required for this step.
346
- gpu : int, optional, default None
347
- Number of GPUs required for this step.
348
- disk : int, optional, default None
349
- Disk size (in MB) required for this step. Only applies on Kubernetes.
391
+ Number of CPUs required for this step. If `@resources` is
392
+ also present, the maximum value from all decorators is used.
393
+ gpu : int, default 0
394
+ Number of GPUs required for this step. If `@resources` is
395
+ also present, the maximum value from all decorators is used.
350
396
  memory : int, default 4096
351
- Memory size (in MB) required for this step.
397
+ Memory size (in MB) required for this step. If
398
+ `@resources` is also present, the maximum value from all decorators is
399
+ used.
400
+ image : str, optional, default None
401
+ Docker image to use when launching on AWS Batch. If not specified, and
402
+ METAFLOW_BATCH_CONTAINER_IMAGE is specified, that image is used. If
403
+ not, a default Docker image mapping to the current version of Python is used.
404
+ queue : str, default METAFLOW_BATCH_JOB_QUEUE
405
+ AWS Batch Job Queue to submit the job to.
406
+ iam_role : str, default METAFLOW_ECS_S3_ACCESS_IAM_ROLE
407
+ AWS IAM role that AWS Batch container uses to access AWS cloud resources.
408
+ execution_role : str, default METAFLOW_ECS_FARGATE_EXECUTION_ROLE
409
+ AWS IAM role that AWS Batch can use [to trigger AWS Fargate tasks]
410
+ (https://docs.aws.amazon.com/batch/latest/userguide/execution-IAM-role.html).
352
411
  shared_memory : int, optional, default None
353
412
  The value for the size (in MiB) of the /dev/shm volume for this step.
354
413
  This parameter maps to the `--shm-size` option in Docker.
414
+ max_swap : int, optional, default None
415
+ The total amount of swap memory (in MiB) a container can use for this
416
+ step. This parameter is translated to the `--memory-swap` option in
417
+ Docker where the value is the sum of the container memory plus the
418
+ `max_swap` value.
419
+ swappiness : int, optional, default None
420
+ This allows you to tune memory swappiness behavior for this step.
421
+ A swappiness value of 0 causes swapping not to happen unless absolutely
422
+ necessary. A swappiness value of 100 causes pages to be swapped very
423
+ aggressively. Accepted values are whole numbers between 0 and 100.
424
+ use_tmpfs : bool, default False
425
+ This enables an explicit tmpfs mount for this step. Note that tmpfs is
426
+ not available on Fargate compute environments
427
+ tmpfs_tempdir : bool, default True
428
+ sets METAFLOW_TEMPDIR to tmpfs_path if set for this step.
429
+ tmpfs_size : int, optional, default None
430
+ The value for the size (in MiB) of the tmpfs mount for this step.
431
+ This parameter maps to the `--tmpfs` option in Docker. Defaults to 50% of the
432
+ memory allocated for this step.
433
+ tmpfs_path : str, optional, default None
434
+ Path to tmpfs mount for this step. Defaults to /metaflow_temp.
435
+ inferentia : int, default 0
436
+ Number of Inferentia chips required for this step.
437
+ trainium : int, default None
438
+ Alias for inferentia. Use only one of the two.
439
+ efa : int, default 0
440
+ Number of elastic fabric adapter network devices to attach to container
441
+ ephemeral_storage : int, default None
442
+ The total amount, in GiB, of ephemeral storage to set for the task, 21-200GiB.
443
+ This is only relevant for Fargate compute environments
444
+ log_driver: str, optional, default None
445
+ The log driver to use for the Amazon ECS container.
446
+ log_options: List[str], optional, default None
447
+ List of strings containing options for the chosen log driver. The configurable values
448
+ depend on the `log driver` chosen. Validation of these options is not supported yet.
449
+ Example: [`awslogs-group:aws/batch/job`]
355
450
  """
356
451
  ...
357
452
 
358
453
  @typing.overload
359
- def resources(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
454
+ def batch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
360
455
  ...
361
456
 
362
457
  @typing.overload
363
- def resources(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
458
+ def batch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
364
459
  ...
365
460
 
366
- def resources(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, cpu: int = 1, gpu: typing.Optional[int] = None, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None):
461
+ def batch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, cpu: int = 1, gpu: int = 0, memory: int = 4096, image: typing.Optional[str] = None, queue: str = 'METAFLOW_BATCH_JOB_QUEUE', iam_role: str = 'METAFLOW_ECS_S3_ACCESS_IAM_ROLE', execution_role: str = 'METAFLOW_ECS_FARGATE_EXECUTION_ROLE', shared_memory: typing.Optional[int] = None, max_swap: typing.Optional[int] = None, swappiness: typing.Optional[int] = None, use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = None, inferentia: int = 0, trainium: int = None, efa: int = 0, ephemeral_storage: int = None, log_driver: typing.Optional[str] = None, log_options: typing.Optional[typing.List[str]] = None):
367
462
  """
368
- Specifies the resources needed when executing this step.
369
-
370
- Use `@resources` to specify the resource requirements
371
- independently of the specific compute layer (`@batch`, `@kubernetes`).
372
-
373
- You can choose the compute layer on the command line by executing e.g.
374
- ```
375
- python myflow.py run --with batch
376
- ```
377
- or
378
- ```
379
- python myflow.py run --with kubernetes
380
- ```
381
- which executes the flow on the desired system using the
382
- requirements specified in `@resources`.
383
-
384
-
385
- Parameters
386
- ----------
387
- cpu : int, default 1
388
- Number of CPUs required for this step.
389
- gpu : int, optional, default None
390
- Number of GPUs required for this step.
391
- disk : int, optional, default None
392
- Disk size (in MB) required for this step. Only applies on Kubernetes.
393
- memory : int, default 4096
394
- Memory size (in MB) required for this step.
395
- shared_memory : int, optional, default None
396
- The value for the size (in MiB) of the /dev/shm volume for this step.
397
- This parameter maps to the `--shm-size` option in Docker.
398
- """
399
- ...
400
-
401
- @typing.overload
402
- def retry(*, times: int = 3, minutes_between_retries: int = 2) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
403
- """
404
- Specifies the number of times the task corresponding
405
- to a step needs to be retried.
406
-
407
- This decorator is useful for handling transient errors, such as networking issues.
408
- If your task contains operations that can't be retried safely, e.g. database updates,
409
- it is advisable to annotate it with `@retry(times=0)`.
410
-
411
- This can be used in conjunction with the `@catch` decorator. The `@catch`
412
- decorator will execute a no-op task after all retries have been exhausted,
413
- ensuring that the flow execution can continue.
414
-
415
-
416
- Parameters
417
- ----------
418
- times : int, default 3
419
- Number of times to retry this task.
420
- minutes_between_retries : int, default 2
421
- Number of minutes between retries.
422
- """
423
- ...
424
-
425
- @typing.overload
426
- def retry(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
427
- ...
428
-
429
- @typing.overload
430
- def retry(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
431
- ...
432
-
433
- def retry(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, times: int = 3, minutes_between_retries: int = 2):
434
- """
435
- Specifies the number of times the task corresponding
436
- to a step needs to be retried.
437
-
438
- This decorator is useful for handling transient errors, such as networking issues.
439
- If your task contains operations that can't be retried safely, e.g. database updates,
440
- it is advisable to annotate it with `@retry(times=0)`.
441
-
442
- This can be used in conjunction with the `@catch` decorator. The `@catch`
443
- decorator will execute a no-op task after all retries have been exhausted,
444
- ensuring that the flow execution can continue.
445
-
446
-
447
- Parameters
448
- ----------
449
- times : int, default 3
450
- Number of times to retry this task.
451
- minutes_between_retries : int, default 2
452
- Number of minutes between retries.
453
- """
454
- ...
455
-
456
- @typing.overload
457
- def card(*, type: str = 'default', id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
458
- """
459
- Creates a human-readable report, a Metaflow Card, after this step completes.
460
-
461
- Note that you may add multiple `@card` decorators in a step with different parameters.
462
-
463
-
464
- Parameters
465
- ----------
466
- type : str, default 'default'
467
- Card type.
468
- id : str, optional, default None
469
- If multiple cards are present, use this id to identify this card.
470
- options : Dict[str, Any], default {}
471
- Options passed to the card. The contents depend on the card type.
472
- timeout : int, default 45
473
- Interrupt reporting if it takes more than this many seconds.
474
- """
475
- ...
476
-
477
- @typing.overload
478
- def card(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
479
- ...
480
-
481
- @typing.overload
482
- def card(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
483
- ...
484
-
485
- def card(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, type: str = 'default', id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45):
486
- """
487
- Creates a human-readable report, a Metaflow Card, after this step completes.
488
-
489
- Note that you may add multiple `@card` decorators in a step with different parameters.
490
-
491
-
492
- Parameters
493
- ----------
494
- type : str, default 'default'
495
- Card type.
496
- id : str, optional, default None
497
- If multiple cards are present, use this id to identify this card.
498
- options : Dict[str, Any], default {}
499
- Options passed to the card. The contents depend on the card type.
500
- timeout : int, default 45
501
- Interrupt reporting if it takes more than this many seconds.
502
- """
503
- ...
504
-
505
- @typing.overload
506
- def batch(*, cpu: int = 1, gpu: int = 0, memory: int = 4096, image: typing.Optional[str] = None, queue: str = 'METAFLOW_BATCH_JOB_QUEUE', iam_role: str = 'METAFLOW_ECS_S3_ACCESS_IAM_ROLE', execution_role: str = 'METAFLOW_ECS_FARGATE_EXECUTION_ROLE', shared_memory: typing.Optional[int] = None, max_swap: typing.Optional[int] = None, swappiness: typing.Optional[int] = None, use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = None, inferentia: int = 0, trainium: int = None, efa: int = 0, ephemeral_storage: int = None, log_driver: typing.Optional[str] = None, log_options: typing.Optional[typing.List[str]] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
507
- """
508
- Specifies that this step should execute on [AWS Batch](https://aws.amazon.com/batch/).
463
+ Specifies that this step should execute on [AWS Batch](https://aws.amazon.com/batch/).
509
464
 
510
465
 
511
466
  Parameters
@@ -574,139 +529,162 @@ def batch(*, cpu: int = 1, gpu: int = 0, memory: int = 4096, image: typing.Optio
574
529
  ...
575
530
 
576
531
  @typing.overload
577
- def batch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
532
+ def card(*, type: str = 'default', id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
533
+ """
534
+ Creates a human-readable report, a Metaflow Card, after this step completes.
535
+
536
+ Note that you may add multiple `@card` decorators in a step with different parameters.
537
+
538
+
539
+ Parameters
540
+ ----------
541
+ type : str, default 'default'
542
+ Card type.
543
+ id : str, optional, default None
544
+ If multiple cards are present, use this id to identify this card.
545
+ options : Dict[str, Any], default {}
546
+ Options passed to the card. The contents depend on the card type.
547
+ timeout : int, default 45
548
+ Interrupt reporting if it takes more than this many seconds.
549
+ """
578
550
  ...
579
551
 
580
552
  @typing.overload
581
- def batch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
553
+ def card(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
582
554
  ...
583
555
 
584
- def batch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, cpu: int = 1, gpu: int = 0, memory: int = 4096, image: typing.Optional[str] = None, queue: str = 'METAFLOW_BATCH_JOB_QUEUE', iam_role: str = 'METAFLOW_ECS_S3_ACCESS_IAM_ROLE', execution_role: str = 'METAFLOW_ECS_FARGATE_EXECUTION_ROLE', shared_memory: typing.Optional[int] = None, max_swap: typing.Optional[int] = None, swappiness: typing.Optional[int] = None, use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = None, inferentia: int = 0, trainium: int = None, efa: int = 0, ephemeral_storage: int = None, log_driver: typing.Optional[str] = None, log_options: typing.Optional[typing.List[str]] = None):
556
+ @typing.overload
557
+ def card(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
558
+ ...
559
+
560
+ def card(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, type: str = 'default', id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45):
585
561
  """
586
- Specifies that this step should execute on [AWS Batch](https://aws.amazon.com/batch/).
562
+ Creates a human-readable report, a Metaflow Card, after this step completes.
563
+
564
+ Note that you may add multiple `@card` decorators in a step with different parameters.
587
565
 
588
566
 
589
567
  Parameters
590
568
  ----------
591
- cpu : int, default 1
592
- Number of CPUs required for this step. If `@resources` is
593
- also present, the maximum value from all decorators is used.
594
- gpu : int, default 0
595
- Number of GPUs required for this step. If `@resources` is
596
- also present, the maximum value from all decorators is used.
597
- memory : int, default 4096
598
- Memory size (in MB) required for this step. If
599
- `@resources` is also present, the maximum value from all decorators is
600
- used.
601
- image : str, optional, default None
602
- Docker image to use when launching on AWS Batch. If not specified, and
603
- METAFLOW_BATCH_CONTAINER_IMAGE is specified, that image is used. If
604
- not, a default Docker image mapping to the current version of Python is used.
605
- queue : str, default METAFLOW_BATCH_JOB_QUEUE
606
- AWS Batch Job Queue to submit the job to.
607
- iam_role : str, default METAFLOW_ECS_S3_ACCESS_IAM_ROLE
608
- AWS IAM role that AWS Batch container uses to access AWS cloud resources.
609
- execution_role : str, default METAFLOW_ECS_FARGATE_EXECUTION_ROLE
610
- AWS IAM role that AWS Batch can use [to trigger AWS Fargate tasks]
611
- (https://docs.aws.amazon.com/batch/latest/userguide/execution-IAM-role.html).
612
- shared_memory : int, optional, default None
613
- The value for the size (in MiB) of the /dev/shm volume for this step.
614
- This parameter maps to the `--shm-size` option in Docker.
615
- max_swap : int, optional, default None
616
- The total amount of swap memory (in MiB) a container can use for this
617
- step. This parameter is translated to the `--memory-swap` option in
618
- Docker where the value is the sum of the container memory plus the
619
- `max_swap` value.
620
- swappiness : int, optional, default None
621
- This allows you to tune memory swappiness behavior for this step.
622
- A swappiness value of 0 causes swapping not to happen unless absolutely
623
- necessary. A swappiness value of 100 causes pages to be swapped very
624
- aggressively. Accepted values are whole numbers between 0 and 100.
625
- use_tmpfs : bool, default False
626
- This enables an explicit tmpfs mount for this step. Note that tmpfs is
627
- not available on Fargate compute environments
628
- tmpfs_tempdir : bool, default True
629
- sets METAFLOW_TEMPDIR to tmpfs_path if set for this step.
630
- tmpfs_size : int, optional, default None
631
- The value for the size (in MiB) of the tmpfs mount for this step.
632
- This parameter maps to the `--tmpfs` option in Docker. Defaults to 50% of the
633
- memory allocated for this step.
634
- tmpfs_path : str, optional, default None
635
- Path to tmpfs mount for this step. Defaults to /metaflow_temp.
636
- inferentia : int, default 0
637
- Number of Inferentia chips required for this step.
638
- trainium : int, default None
639
- Alias for inferentia. Use only one of the two.
640
- efa : int, default 0
641
- Number of elastic fabric adapter network devices to attach to container
642
- ephemeral_storage : int, default None
643
- The total amount, in GiB, of ephemeral storage to set for the task, 21-200GiB.
644
- This is only relevant for Fargate compute environments
645
- log_driver: str, optional, default None
646
- The log driver to use for the Amazon ECS container.
647
- log_options: List[str], optional, default None
648
- List of strings containing options for the chosen log driver. The configurable values
649
- depend on the `log driver` chosen. Validation of these options is not supported yet.
650
- Example: [`awslogs-group:aws/batch/job`]
569
+ type : str, default 'default'
570
+ Card type.
571
+ id : str, optional, default None
572
+ If multiple cards are present, use this id to identify this card.
573
+ options : Dict[str, Any], default {}
574
+ Options passed to the card. The contents depend on the card type.
575
+ timeout : int, default 45
576
+ Interrupt reporting if it takes more than this many seconds.
651
577
  """
652
578
  ...
653
579
 
654
580
  @typing.overload
655
- def timeout(*, seconds: int = 0, minutes: int = 0, hours: int = 0) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
581
+ def conda(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
656
582
  """
657
- Specifies a timeout for your step.
583
+ Specifies the Conda environment for the step.
658
584
 
659
- This decorator is useful if this step may hang indefinitely.
585
+ Information in this decorator will augment any
586
+ attributes set in the `@conda_base` flow-level decorator. Hence,
587
+ you can use `@conda_base` to set packages required by all
588
+ steps and use `@conda` to specify step-specific overrides.
660
589
 
661
- This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
662
- A timeout is considered to be an exception thrown by the step. It will cause the step to be
663
- retried if needed and the exception will be caught by the `@catch` decorator, if present.
664
590
 
665
- Note that all the values specified in parameters are added together so if you specify
666
- 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
591
+ Parameters
592
+ ----------
593
+ packages : Dict[str, str], default {}
594
+ Packages to use for this step. The key is the name of the package
595
+ and the value is the version to use.
596
+ libraries : Dict[str, str], default {}
597
+ Supported for backward compatibility. When used with packages, packages will take precedence.
598
+ python : str, optional, default None
599
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
600
+ that the version used will correspond to the version of the Python interpreter used to start the run.
601
+ disabled : bool, default False
602
+ If set to True, disables @conda.
603
+ """
604
+ ...
605
+
606
+ @typing.overload
607
+ def conda(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
608
+ ...
609
+
610
+ @typing.overload
611
+ def conda(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
612
+ ...
613
+
614
+ def conda(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
615
+ """
616
+ Specifies the Conda environment for the step.
617
+
618
+ Information in this decorator will augment any
619
+ attributes set in the `@conda_base` flow-level decorator. Hence,
620
+ you can use `@conda_base` to set packages required by all
621
+ steps and use `@conda` to specify step-specific overrides.
667
622
 
668
623
 
669
624
  Parameters
670
625
  ----------
671
- seconds : int, default 0
672
- Number of seconds to wait prior to timing out.
673
- minutes : int, default 0
674
- Number of minutes to wait prior to timing out.
675
- hours : int, default 0
676
- Number of hours to wait prior to timing out.
626
+ packages : Dict[str, str], default {}
627
+ Packages to use for this step. The key is the name of the package
628
+ and the value is the version to use.
629
+ libraries : Dict[str, str], default {}
630
+ Supported for backward compatibility. When used with packages, packages will take precedence.
631
+ python : str, optional, default None
632
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
633
+ that the version used will correspond to the version of the Python interpreter used to start the run.
634
+ disabled : bool, default False
635
+ If set to True, disables @conda.
677
636
  """
678
637
  ...
679
638
 
680
639
  @typing.overload
681
- def timeout(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
640
+ def parallel(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
641
+ """
642
+ Decorator prototype for all step decorators. This function gets specialized
643
+ and imported for all decorators types by _import_plugin_decorators().
644
+ """
682
645
  ...
683
646
 
684
647
  @typing.overload
685
- def timeout(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
648
+ def parallel(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
686
649
  ...
687
650
 
688
- def timeout(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, seconds: int = 0, minutes: int = 0, hours: int = 0):
651
+ def parallel(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
689
652
  """
690
- Specifies a timeout for your step.
691
-
692
- This decorator is useful if this step may hang indefinitely.
653
+ Decorator prototype for all step decorators. This function gets specialized
654
+ and imported for all decorators types by _import_plugin_decorators().
655
+ """
656
+ ...
657
+
658
+ @typing.overload
659
+ def environment(*, vars: typing.Dict[str, str] = {}) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
660
+ """
661
+ Specifies environment variables to be set prior to the execution of a step.
693
662
 
694
- This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
695
- A timeout is considered to be an exception thrown by the step. It will cause the step to be
696
- retried if needed and the exception will be caught by the `@catch` decorator, if present.
697
663
 
698
- Note that all the values specified in parameters are added together so if you specify
699
- 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
664
+ Parameters
665
+ ----------
666
+ vars : Dict[str, str], default {}
667
+ Dictionary of environment variables to set.
668
+ """
669
+ ...
670
+
671
+ @typing.overload
672
+ def environment(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
673
+ ...
674
+
675
+ @typing.overload
676
+ def environment(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
677
+ ...
678
+
679
+ def environment(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, vars: typing.Dict[str, str] = {}):
680
+ """
681
+ Specifies environment variables to be set prior to the execution of a step.
700
682
 
701
683
 
702
684
  Parameters
703
685
  ----------
704
- seconds : int, default 0
705
- Number of seconds to wait prior to timing out.
706
- minutes : int, default 0
707
- Number of minutes to wait prior to timing out.
708
- hours : int, default 0
709
- Number of hours to wait prior to timing out.
686
+ vars : Dict[str, str], default {}
687
+ Dictionary of environment variables to set.
710
688
  """
711
689
  ...
712
690
 
@@ -742,10 +720,65 @@ def secrets(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None],
742
720
 
743
721
  Parameters
744
722
  ----------
745
- sources : List[Union[str, Dict[str, Any]]], default: []
746
- List of secret specs, defining how the secrets are to be retrieved
747
- role : str, optional, default: None
748
- Role to use for fetching secrets
723
+ sources : List[Union[str, Dict[str, Any]]], default: []
724
+ List of secret specs, defining how the secrets are to be retrieved
725
+ role : str, optional, default: None
726
+ Role to use for fetching secrets
727
+ """
728
+ ...
729
+
730
+ @typing.overload
731
+ def retry(*, times: int = 3, minutes_between_retries: int = 2) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
732
+ """
733
+ Specifies the number of times the task corresponding
734
+ to a step needs to be retried.
735
+
736
+ This decorator is useful for handling transient errors, such as networking issues.
737
+ If your task contains operations that can't be retried safely, e.g. database updates,
738
+ it is advisable to annotate it with `@retry(times=0)`.
739
+
740
+ This can be used in conjunction with the `@catch` decorator. The `@catch`
741
+ decorator will execute a no-op task after all retries have been exhausted,
742
+ ensuring that the flow execution can continue.
743
+
744
+
745
+ Parameters
746
+ ----------
747
+ times : int, default 3
748
+ Number of times to retry this task.
749
+ minutes_between_retries : int, default 2
750
+ Number of minutes between retries.
751
+ """
752
+ ...
753
+
754
+ @typing.overload
755
+ def retry(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
756
+ ...
757
+
758
+ @typing.overload
759
+ def retry(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
760
+ ...
761
+
762
+ def retry(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, times: int = 3, minutes_between_retries: int = 2):
763
+ """
764
+ Specifies the number of times the task corresponding
765
+ to a step needs to be retried.
766
+
767
+ This decorator is useful for handling transient errors, such as networking issues.
768
+ If your task contains operations that can't be retried safely, e.g. database updates,
769
+ it is advisable to annotate it with `@retry(times=0)`.
770
+
771
+ This can be used in conjunction with the `@catch` decorator. The `@catch`
772
+ decorator will execute a no-op task after all retries have been exhausted,
773
+ ensuring that the flow execution can continue.
774
+
775
+
776
+ Parameters
777
+ ----------
778
+ times : int, default 3
779
+ Number of times to retry this task.
780
+ minutes_between_retries : int, default 2
781
+ Number of minutes between retries.
749
782
  """
750
783
  ...
751
784
 
@@ -800,39 +833,6 @@ def pypi(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typ
800
833
  """
801
834
  ...
802
835
 
803
- @typing.overload
804
- def environment(*, vars: typing.Dict[str, str] = {}) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
805
- """
806
- Specifies environment variables to be set prior to the execution of a step.
807
-
808
-
809
- Parameters
810
- ----------
811
- vars : Dict[str, str], default {}
812
- Dictionary of environment variables to set.
813
- """
814
- ...
815
-
816
- @typing.overload
817
- def environment(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
818
- ...
819
-
820
- @typing.overload
821
- def environment(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
822
- ...
823
-
824
- def environment(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, vars: typing.Dict[str, str] = {}):
825
- """
826
- Specifies environment variables to be set prior to the execution of a step.
827
-
828
-
829
- Parameters
830
- ----------
831
- vars : Dict[str, str], default {}
832
- Dictionary of environment variables to set.
833
- """
834
- ...
835
-
836
836
  @typing.overload
837
837
  def catch(*, var: typing.Optional[str] = None, print_exception: bool = True) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
838
838
  """
@@ -884,122 +884,190 @@ def catch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], ty
884
884
  """
885
885
  ...
886
886
 
887
- def airflow_external_task_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, external_dag_id: str, external_task_ids: typing.List[str], allowed_states: typing.List[str], failed_states: typing.List[str], execution_delta: "datetime.timedelta", check_existence: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
887
+ def project(*, name: str, branch: typing.Optional[str] = None, production: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
888
888
  """
889
- The `@airflow_external_task_sensor` decorator attaches a Airflow [ExternalTaskSensor](https://airflow.apache.org/docs/apache-airflow/stable/_api/airflow/sensors/external_task/index.html#airflow.sensors.external_task.ExternalTaskSensor) before the start step of the flow.
890
- This decorator only works when a flow is scheduled on Airflow and is compiled using `airflow create`. More than one `@airflow_external_task_sensor` can be added as a flow decorators. Adding more than one decorator will ensure that `start` step starts only after all sensors finish.
889
+ Specifies what flows belong to the same project.
890
+
891
+ A project-specific namespace is created for all flows that
892
+ use the same `@project(name)`.
891
893
 
892
894
 
893
895
  Parameters
894
896
  ----------
895
- timeout : int
896
- Time, in seconds before the task times out and fails. (Default: 3600)
897
- poke_interval : int
898
- Time in seconds that the job should wait in between each try. (Default: 60)
899
- mode : str
900
- How the sensor operates. Options are: { poke | reschedule }. (Default: "poke")
901
- exponential_backoff : bool
902
- allow progressive longer waits between pokes by using exponential backoff algorithm. (Default: True)
903
- pool : str
904
- the slot pool this task should run in,
905
- slot pools are a way to limit concurrency for certain tasks. (Default:None)
906
- soft_fail : bool
907
- Set to true to mark the task as SKIPPED on failure. (Default: False)
908
897
  name : str
909
- Name of the sensor on Airflow
910
- description : str
911
- Description of sensor in the Airflow UI
912
- external_dag_id : str
913
- The dag_id that contains the task you want to wait for.
914
- external_task_ids : List[str]
915
- The list of task_ids that you want to wait for.
916
- If None (default value) the sensor waits for the DAG. (Default: None)
917
- allowed_states : List[str]
918
- Iterable of allowed states, (Default: ['success'])
919
- failed_states : List[str]
920
- Iterable of failed or dis-allowed states. (Default: None)
921
- execution_delta : datetime.timedelta
922
- time difference with the previous execution to look at,
923
- the default is the same logical date as the current task or DAG. (Default: None)
924
- check_existence: bool
925
- Set to True to check if the external task exists or check if
926
- the DAG to wait for exists. (Default: True)
898
+ Project name. Make sure that the name is unique amongst all
899
+ projects that use the same production scheduler. The name may
900
+ contain only lowercase alphanumeric characters and underscores.
901
+
902
+ branch : Optional[str], default None
903
+ The branch to use. If not specified, the branch is set to
904
+ `user.<username>` unless `production` is set to `True`. This can
905
+ also be set on the command line using `--branch` as a top-level option.
906
+ It is an error to specify `branch` in the decorator and on the command line.
907
+
908
+ production : bool, default False
909
+ Whether or not the branch is the production branch. This can also be set on the
910
+ command line using `--production` as a top-level option. It is an error to specify
911
+ `production` in the decorator and on the command line.
912
+ The project branch name will be:
913
+ - if `branch` is specified:
914
+ - if `production` is True: `prod.<branch>`
915
+ - if `production` is False: `test.<branch>`
916
+ - if `branch` is not specified:
917
+ - if `production` is True: `prod`
918
+ - if `production` is False: `user.<username>`
927
919
  """
928
920
  ...
929
921
 
930
922
  @typing.overload
931
- def pypi_base(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
923
+ def trigger_on_finish(*, flow: typing.Union[typing.Dict[str, str], str, None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
932
924
  """
933
- Specifies the PyPI packages for all steps of the flow.
925
+ Specifies the flow(s) that this flow depends on.
926
+
927
+ ```
928
+ @trigger_on_finish(flow='FooFlow')
929
+ ```
930
+ or
931
+ ```
932
+ @trigger_on_finish(flows=['FooFlow', 'BarFlow'])
933
+ ```
934
+ This decorator respects the @project decorator and triggers the flow
935
+ when upstream runs within the same namespace complete successfully
936
+
937
+ Additionally, you can specify project aware upstream flow dependencies
938
+ by specifying the fully qualified project_flow_name.
939
+ ```
940
+ @trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
941
+ ```
942
+ or
943
+ ```
944
+ @trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
945
+ ```
946
+
947
+ You can also specify just the project or project branch (other values will be
948
+ inferred from the current project or project branch):
949
+ ```
950
+ @trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
951
+ ```
952
+
953
+ Note that `branch` is typically one of:
954
+ - `prod`
955
+ - `user.bob`
956
+ - `test.my_experiment`
957
+ - `prod.staging`
934
958
 
935
- Use `@pypi_base` to set common packages required by all
936
- steps and use `@pypi` to specify step-specific overrides.
937
959
 
938
960
  Parameters
939
961
  ----------
940
- packages : Dict[str, str], default: {}
941
- Packages to use for this flow. The key is the name of the package
942
- and the value is the version to use.
943
- python : str, optional, default: None
944
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
945
- that the version used will correspond to the version of the Python interpreter used to start the run.
962
+ flow : Union[str, Dict[str, str]], optional, default None
963
+ Upstream flow dependency for this flow.
964
+ flows : List[Union[str, Dict[str, str]]], default []
965
+ Upstream flow dependencies for this flow.
966
+ options : Dict[str, Any], default {}
967
+ Backend-specific configuration for tuning eventing behavior.
946
968
  """
947
969
  ...
948
970
 
949
971
  @typing.overload
950
- def pypi_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
972
+ def trigger_on_finish(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
951
973
  ...
952
974
 
953
- def pypi_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
975
+ def trigger_on_finish(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, flow: typing.Union[typing.Dict[str, str], str, None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}):
954
976
  """
955
- Specifies the PyPI packages for all steps of the flow.
977
+ Specifies the flow(s) that this flow depends on.
978
+
979
+ ```
980
+ @trigger_on_finish(flow='FooFlow')
981
+ ```
982
+ or
983
+ ```
984
+ @trigger_on_finish(flows=['FooFlow', 'BarFlow'])
985
+ ```
986
+ This decorator respects the @project decorator and triggers the flow
987
+ when upstream runs within the same namespace complete successfully
988
+
989
+ Additionally, you can specify project aware upstream flow dependencies
990
+ by specifying the fully qualified project_flow_name.
991
+ ```
992
+ @trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
993
+ ```
994
+ or
995
+ ```
996
+ @trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
997
+ ```
998
+
999
+ You can also specify just the project or project branch (other values will be
1000
+ inferred from the current project or project branch):
1001
+ ```
1002
+ @trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
1003
+ ```
1004
+
1005
+ Note that `branch` is typically one of:
1006
+ - `prod`
1007
+ - `user.bob`
1008
+ - `test.my_experiment`
1009
+ - `prod.staging`
956
1010
 
957
- Use `@pypi_base` to set common packages required by all
958
- steps and use `@pypi` to specify step-specific overrides.
959
1011
 
960
1012
  Parameters
961
1013
  ----------
962
- packages : Dict[str, str], default: {}
963
- Packages to use for this flow. The key is the name of the package
964
- and the value is the version to use.
965
- python : str, optional, default: None
966
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
967
- that the version used will correspond to the version of the Python interpreter used to start the run.
1014
+ flow : Union[str, Dict[str, str]], optional, default None
1015
+ Upstream flow dependency for this flow.
1016
+ flows : List[Union[str, Dict[str, str]]], default []
1017
+ Upstream flow dependencies for this flow.
1018
+ options : Dict[str, Any], default {}
1019
+ Backend-specific configuration for tuning eventing behavior.
968
1020
  """
969
1021
  ...
970
1022
 
971
- def project(*, name: str, branch: typing.Optional[str] = None, production: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1023
+ @typing.overload
1024
+ def schedule(*, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
972
1025
  """
973
- Specifies what flows belong to the same project.
974
-
975
- A project-specific namespace is created for all flows that
976
- use the same `@project(name)`.
1026
+ Specifies the times when the flow should be run when running on a
1027
+ production scheduler.
977
1028
 
978
1029
 
979
1030
  Parameters
980
1031
  ----------
981
- name : str
982
- Project name. Make sure that the name is unique amongst all
983
- projects that use the same production scheduler. The name may
984
- contain only lowercase alphanumeric characters and underscores.
985
-
986
- branch : Optional[str], default None
987
- The branch to use. If not specified, the branch is set to
988
- `user.<username>` unless `production` is set to `True`. This can
989
- also be set on the command line using `--branch` as a top-level option.
990
- It is an error to specify `branch` in the decorator and on the command line.
991
-
992
- production : bool, default False
993
- Whether or not the branch is the production branch. This can also be set on the
994
- command line using `--production` as a top-level option. It is an error to specify
995
- `production` in the decorator and on the command line.
996
- The project branch name will be:
997
- - if `branch` is specified:
998
- - if `production` is True: `prod.<branch>`
999
- - if `production` is False: `test.<branch>`
1000
- - if `branch` is not specified:
1001
- - if `production` is True: `prod`
1002
- - if `production` is False: `user.<username>`
1032
+ hourly : bool, default False
1033
+ Run the workflow hourly.
1034
+ daily : bool, default True
1035
+ Run the workflow daily.
1036
+ weekly : bool, default False
1037
+ Run the workflow weekly.
1038
+ cron : str, optional, default None
1039
+ Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
1040
+ specified by this expression.
1041
+ timezone : str, optional, default None
1042
+ Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1043
+ which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
1044
+ """
1045
+ ...
1046
+
1047
+ @typing.overload
1048
+ def schedule(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1049
+ ...
1050
+
1051
+ def schedule(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None):
1052
+ """
1053
+ Specifies the times when the flow should be run when running on a
1054
+ production scheduler.
1055
+
1056
+
1057
+ Parameters
1058
+ ----------
1059
+ hourly : bool, default False
1060
+ Run the workflow hourly.
1061
+ daily : bool, default True
1062
+ Run the workflow daily.
1063
+ weekly : bool, default False
1064
+ Run the workflow weekly.
1065
+ cron : str, optional, default None
1066
+ Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
1067
+ specified by this expression.
1068
+ timezone : str, optional, default None
1069
+ Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1070
+ which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
1003
1071
  """
1004
1072
  ...
1005
1073
 
@@ -1097,6 +1165,90 @@ def airflow_s3_key_sensor(*, timeout: int, poke_interval: int, mode: str, expone
1097
1165
  """
1098
1166
  ...
1099
1167
 
1168
+ @typing.overload
1169
+ def pypi_base(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1170
+ """
1171
+ Specifies the PyPI packages for all steps of the flow.
1172
+
1173
+ Use `@pypi_base` to set common packages required by all
1174
+ steps and use `@pypi` to specify step-specific overrides.
1175
+
1176
+ Parameters
1177
+ ----------
1178
+ packages : Dict[str, str], default: {}
1179
+ Packages to use for this flow. The key is the name of the package
1180
+ and the value is the version to use.
1181
+ python : str, optional, default: None
1182
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1183
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1184
+ """
1185
+ ...
1186
+
1187
+ @typing.overload
1188
+ def pypi_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1189
+ ...
1190
+
1191
+ def pypi_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
1192
+ """
1193
+ Specifies the PyPI packages for all steps of the flow.
1194
+
1195
+ Use `@pypi_base` to set common packages required by all
1196
+ steps and use `@pypi` to specify step-specific overrides.
1197
+
1198
+ Parameters
1199
+ ----------
1200
+ packages : Dict[str, str], default: {}
1201
+ Packages to use for this flow. The key is the name of the package
1202
+ and the value is the version to use.
1203
+ python : str, optional, default: None
1204
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1205
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1206
+ """
1207
+ ...
1208
+
1209
+ def airflow_external_task_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, external_dag_id: str, external_task_ids: typing.List[str], allowed_states: typing.List[str], failed_states: typing.List[str], execution_delta: "datetime.timedelta", check_existence: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1210
+ """
1211
+ The `@airflow_external_task_sensor` decorator attaches a Airflow [ExternalTaskSensor](https://airflow.apache.org/docs/apache-airflow/stable/_api/airflow/sensors/external_task/index.html#airflow.sensors.external_task.ExternalTaskSensor) before the start step of the flow.
1212
+ This decorator only works when a flow is scheduled on Airflow and is compiled using `airflow create`. More than one `@airflow_external_task_sensor` can be added as a flow decorators. Adding more than one decorator will ensure that `start` step starts only after all sensors finish.
1213
+
1214
+
1215
+ Parameters
1216
+ ----------
1217
+ timeout : int
1218
+ Time, in seconds before the task times out and fails. (Default: 3600)
1219
+ poke_interval : int
1220
+ Time in seconds that the job should wait in between each try. (Default: 60)
1221
+ mode : str
1222
+ How the sensor operates. Options are: { poke | reschedule }. (Default: "poke")
1223
+ exponential_backoff : bool
1224
+ allow progressive longer waits between pokes by using exponential backoff algorithm. (Default: True)
1225
+ pool : str
1226
+ the slot pool this task should run in,
1227
+ slot pools are a way to limit concurrency for certain tasks. (Default:None)
1228
+ soft_fail : bool
1229
+ Set to true to mark the task as SKIPPED on failure. (Default: False)
1230
+ name : str
1231
+ Name of the sensor on Airflow
1232
+ description : str
1233
+ Description of sensor in the Airflow UI
1234
+ external_dag_id : str
1235
+ The dag_id that contains the task you want to wait for.
1236
+ external_task_ids : List[str]
1237
+ The list of task_ids that you want to wait for.
1238
+ If None (default value) the sensor waits for the DAG. (Default: None)
1239
+ allowed_states : List[str]
1240
+ Iterable of allowed states, (Default: ['success'])
1241
+ failed_states : List[str]
1242
+ Iterable of failed or dis-allowed states. (Default: None)
1243
+ execution_delta : datetime.timedelta
1244
+ time difference with the previous execution to look at,
1245
+ the default is the same logical date as the current task or DAG. (Default: None)
1246
+ check_existence: bool
1247
+ Set to True to check if the external task exists or check if
1248
+ the DAG to wait for exists. (Default: True)
1249
+ """
1250
+ ...
1251
+
1100
1252
  @typing.overload
1101
1253
  def trigger(*, event: typing.Union[str, typing.Dict[str, typing.Any], None] = None, events: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1102
1254
  """
@@ -1190,155 +1342,3 @@ def trigger(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, event: t
1190
1342
  """
1191
1343
  ...
1192
1344
 
1193
- @typing.overload
1194
- def schedule(*, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1195
- """
1196
- Specifies the times when the flow should be run when running on a
1197
- production scheduler.
1198
-
1199
-
1200
- Parameters
1201
- ----------
1202
- hourly : bool, default False
1203
- Run the workflow hourly.
1204
- daily : bool, default True
1205
- Run the workflow daily.
1206
- weekly : bool, default False
1207
- Run the workflow weekly.
1208
- cron : str, optional, default None
1209
- Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
1210
- specified by this expression.
1211
- timezone : str, optional, default None
1212
- Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1213
- which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
1214
- """
1215
- ...
1216
-
1217
- @typing.overload
1218
- def schedule(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1219
- ...
1220
-
1221
- def schedule(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None):
1222
- """
1223
- Specifies the times when the flow should be run when running on a
1224
- production scheduler.
1225
-
1226
-
1227
- Parameters
1228
- ----------
1229
- hourly : bool, default False
1230
- Run the workflow hourly.
1231
- daily : bool, default True
1232
- Run the workflow daily.
1233
- weekly : bool, default False
1234
- Run the workflow weekly.
1235
- cron : str, optional, default None
1236
- Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
1237
- specified by this expression.
1238
- timezone : str, optional, default None
1239
- Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1240
- which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
1241
- """
1242
- ...
1243
-
1244
- @typing.overload
1245
- def trigger_on_finish(*, flow: typing.Union[typing.Dict[str, str], str, None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1246
- """
1247
- Specifies the flow(s) that this flow depends on.
1248
-
1249
- ```
1250
- @trigger_on_finish(flow='FooFlow')
1251
- ```
1252
- or
1253
- ```
1254
- @trigger_on_finish(flows=['FooFlow', 'BarFlow'])
1255
- ```
1256
- This decorator respects the @project decorator and triggers the flow
1257
- when upstream runs within the same namespace complete successfully
1258
-
1259
- Additionally, you can specify project aware upstream flow dependencies
1260
- by specifying the fully qualified project_flow_name.
1261
- ```
1262
- @trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
1263
- ```
1264
- or
1265
- ```
1266
- @trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
1267
- ```
1268
-
1269
- You can also specify just the project or project branch (other values will be
1270
- inferred from the current project or project branch):
1271
- ```
1272
- @trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
1273
- ```
1274
-
1275
- Note that `branch` is typically one of:
1276
- - `prod`
1277
- - `user.bob`
1278
- - `test.my_experiment`
1279
- - `prod.staging`
1280
-
1281
-
1282
- Parameters
1283
- ----------
1284
- flow : Union[str, Dict[str, str]], optional, default None
1285
- Upstream flow dependency for this flow.
1286
- flows : List[Union[str, Dict[str, str]]], default []
1287
- Upstream flow dependencies for this flow.
1288
- options : Dict[str, Any], default {}
1289
- Backend-specific configuration for tuning eventing behavior.
1290
- """
1291
- ...
1292
-
1293
- @typing.overload
1294
- def trigger_on_finish(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1295
- ...
1296
-
1297
- def trigger_on_finish(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, flow: typing.Union[typing.Dict[str, str], str, None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}):
1298
- """
1299
- Specifies the flow(s) that this flow depends on.
1300
-
1301
- ```
1302
- @trigger_on_finish(flow='FooFlow')
1303
- ```
1304
- or
1305
- ```
1306
- @trigger_on_finish(flows=['FooFlow', 'BarFlow'])
1307
- ```
1308
- This decorator respects the @project decorator and triggers the flow
1309
- when upstream runs within the same namespace complete successfully
1310
-
1311
- Additionally, you can specify project aware upstream flow dependencies
1312
- by specifying the fully qualified project_flow_name.
1313
- ```
1314
- @trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
1315
- ```
1316
- or
1317
- ```
1318
- @trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
1319
- ```
1320
-
1321
- You can also specify just the project or project branch (other values will be
1322
- inferred from the current project or project branch):
1323
- ```
1324
- @trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
1325
- ```
1326
-
1327
- Note that `branch` is typically one of:
1328
- - `prod`
1329
- - `user.bob`
1330
- - `test.my_experiment`
1331
- - `prod.staging`
1332
-
1333
-
1334
- Parameters
1335
- ----------
1336
- flow : Union[str, Dict[str, str]], optional, default None
1337
- Upstream flow dependency for this flow.
1338
- flows : List[Union[str, Dict[str, str]]], default []
1339
- Upstream flow dependencies for this flow.
1340
- options : Dict[str, Any], default {}
1341
- Backend-specific configuration for tuning eventing behavior.
1342
- """
1343
- ...
1344
-