metaflow-stubs 2.18.1__py2.py3-none-any.whl → 2.18.3__py2.py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of metaflow-stubs might be problematic. Click here for more details.

Files changed (166) hide show
  1. metaflow-stubs/__init__.pyi +462 -462
  2. metaflow-stubs/cards.pyi +2 -2
  3. metaflow-stubs/cli.pyi +2 -2
  4. metaflow-stubs/cli_components/__init__.pyi +2 -2
  5. metaflow-stubs/cli_components/utils.pyi +2 -2
  6. metaflow-stubs/client/__init__.pyi +2 -2
  7. metaflow-stubs/client/core.pyi +5 -5
  8. metaflow-stubs/client/filecache.pyi +3 -3
  9. metaflow-stubs/events.pyi +3 -3
  10. metaflow-stubs/exception.pyi +2 -2
  11. metaflow-stubs/flowspec.pyi +5 -5
  12. metaflow-stubs/generated_for.txt +1 -1
  13. metaflow-stubs/includefile.pyi +4 -4
  14. metaflow-stubs/meta_files.pyi +2 -2
  15. metaflow-stubs/metadata_provider/__init__.pyi +2 -2
  16. metaflow-stubs/metadata_provider/heartbeat.pyi +2 -2
  17. metaflow-stubs/metadata_provider/metadata.pyi +3 -3
  18. metaflow-stubs/metadata_provider/util.pyi +2 -2
  19. metaflow-stubs/metaflow_config.pyi +6 -2
  20. metaflow-stubs/metaflow_current.pyi +18 -18
  21. metaflow-stubs/metaflow_git.pyi +2 -2
  22. metaflow-stubs/multicore_utils.pyi +2 -2
  23. metaflow-stubs/packaging_sys/__init__.pyi +5 -5
  24. metaflow-stubs/packaging_sys/backend.pyi +3 -3
  25. metaflow-stubs/packaging_sys/distribution_support.pyi +4 -4
  26. metaflow-stubs/packaging_sys/tar_backend.pyi +6 -6
  27. metaflow-stubs/packaging_sys/utils.pyi +2 -2
  28. metaflow-stubs/packaging_sys/v1.pyi +4 -4
  29. metaflow-stubs/parameters.pyi +4 -4
  30. metaflow-stubs/plugins/__init__.pyi +14 -14
  31. metaflow-stubs/plugins/airflow/__init__.pyi +2 -2
  32. metaflow-stubs/plugins/airflow/airflow_decorator.pyi +2 -2
  33. metaflow-stubs/plugins/airflow/airflow_utils.pyi +2 -2
  34. metaflow-stubs/plugins/airflow/exception.pyi +2 -2
  35. metaflow-stubs/plugins/airflow/sensors/__init__.pyi +2 -2
  36. metaflow-stubs/plugins/airflow/sensors/base_sensor.pyi +2 -2
  37. metaflow-stubs/plugins/airflow/sensors/external_task_sensor.pyi +2 -2
  38. metaflow-stubs/plugins/airflow/sensors/s3_sensor.pyi +2 -2
  39. metaflow-stubs/plugins/argo/__init__.pyi +2 -2
  40. metaflow-stubs/plugins/argo/argo_client.pyi +6 -4
  41. metaflow-stubs/plugins/argo/argo_events.pyi +2 -2
  42. metaflow-stubs/plugins/argo/argo_workflows.pyi +10 -3
  43. metaflow-stubs/plugins/argo/argo_workflows_decorator.pyi +3 -3
  44. metaflow-stubs/plugins/argo/argo_workflows_deployer.pyi +5 -5
  45. metaflow-stubs/plugins/argo/argo_workflows_deployer_objects.pyi +3 -3
  46. metaflow-stubs/plugins/argo/exit_hooks.pyi +3 -3
  47. metaflow-stubs/plugins/aws/__init__.pyi +2 -2
  48. metaflow-stubs/plugins/aws/aws_client.pyi +2 -2
  49. metaflow-stubs/plugins/aws/aws_utils.pyi +2 -2
  50. metaflow-stubs/plugins/aws/batch/__init__.pyi +2 -2
  51. metaflow-stubs/plugins/aws/batch/batch.pyi +4 -4
  52. metaflow-stubs/plugins/aws/batch/batch_client.pyi +2 -2
  53. metaflow-stubs/plugins/aws/batch/batch_decorator.pyi +2 -2
  54. metaflow-stubs/plugins/aws/secrets_manager/__init__.pyi +2 -2
  55. metaflow-stubs/plugins/aws/secrets_manager/aws_secrets_manager_secrets_provider.pyi +5 -5
  56. metaflow-stubs/plugins/aws/step_functions/__init__.pyi +2 -2
  57. metaflow-stubs/plugins/aws/step_functions/dynamo_db_client.pyi +2 -2
  58. metaflow-stubs/plugins/aws/step_functions/event_bridge_client.pyi +2 -2
  59. metaflow-stubs/plugins/aws/step_functions/schedule_decorator.pyi +2 -2
  60. metaflow-stubs/plugins/aws/step_functions/step_functions.pyi +3 -3
  61. metaflow-stubs/plugins/aws/step_functions/step_functions_client.pyi +2 -2
  62. metaflow-stubs/plugins/aws/step_functions/step_functions_decorator.pyi +2 -2
  63. metaflow-stubs/plugins/aws/step_functions/step_functions_deployer.pyi +8 -5
  64. metaflow-stubs/plugins/aws/step_functions/step_functions_deployer_objects.pyi +3 -3
  65. metaflow-stubs/plugins/azure/__init__.pyi +2 -2
  66. metaflow-stubs/plugins/azure/azure_credential.pyi +2 -2
  67. metaflow-stubs/plugins/azure/azure_exceptions.pyi +2 -2
  68. metaflow-stubs/plugins/azure/azure_secret_manager_secrets_provider.pyi +5 -5
  69. metaflow-stubs/plugins/azure/azure_utils.pyi +2 -2
  70. metaflow-stubs/plugins/azure/blob_service_client_factory.pyi +2 -2
  71. metaflow-stubs/plugins/azure/includefile_support.pyi +2 -2
  72. metaflow-stubs/plugins/cards/__init__.pyi +2 -2
  73. metaflow-stubs/plugins/cards/card_client.pyi +3 -3
  74. metaflow-stubs/plugins/cards/card_creator.pyi +2 -2
  75. metaflow-stubs/plugins/cards/card_datastore.pyi +2 -2
  76. metaflow-stubs/plugins/cards/card_decorator.pyi +3 -3
  77. metaflow-stubs/plugins/cards/card_modules/__init__.pyi +2 -2
  78. metaflow-stubs/plugins/cards/card_modules/basic.pyi +3 -3
  79. metaflow-stubs/plugins/cards/card_modules/card.pyi +2 -2
  80. metaflow-stubs/plugins/cards/card_modules/components.pyi +5 -5
  81. metaflow-stubs/plugins/cards/card_modules/convert_to_native_type.pyi +2 -2
  82. metaflow-stubs/plugins/cards/card_modules/renderer_tools.pyi +2 -2
  83. metaflow-stubs/plugins/cards/card_modules/test_cards.pyi +2 -2
  84. metaflow-stubs/plugins/cards/card_resolver.pyi +2 -2
  85. metaflow-stubs/plugins/cards/component_serializer.pyi +2 -2
  86. metaflow-stubs/plugins/cards/exception.pyi +2 -2
  87. metaflow-stubs/plugins/catch_decorator.pyi +3 -3
  88. metaflow-stubs/plugins/datatools/__init__.pyi +2 -2
  89. metaflow-stubs/plugins/datatools/local.pyi +2 -2
  90. metaflow-stubs/plugins/datatools/s3/__init__.pyi +2 -2
  91. metaflow-stubs/plugins/datatools/s3/s3.pyi +5 -5
  92. metaflow-stubs/plugins/datatools/s3/s3tail.pyi +2 -2
  93. metaflow-stubs/plugins/datatools/s3/s3util.pyi +2 -2
  94. metaflow-stubs/plugins/debug_logger.pyi +2 -2
  95. metaflow-stubs/plugins/debug_monitor.pyi +2 -2
  96. metaflow-stubs/plugins/environment_decorator.pyi +2 -2
  97. metaflow-stubs/plugins/events_decorator.pyi +2 -2
  98. metaflow-stubs/plugins/exit_hook/__init__.pyi +2 -2
  99. metaflow-stubs/plugins/exit_hook/exit_hook_decorator.pyi +2 -2
  100. metaflow-stubs/plugins/frameworks/__init__.pyi +2 -2
  101. metaflow-stubs/plugins/frameworks/pytorch.pyi +2 -2
  102. metaflow-stubs/plugins/gcp/__init__.pyi +2 -2
  103. metaflow-stubs/plugins/gcp/gcp_secret_manager_secrets_provider.pyi +5 -5
  104. metaflow-stubs/plugins/gcp/gs_exceptions.pyi +2 -2
  105. metaflow-stubs/plugins/gcp/gs_storage_client_factory.pyi +2 -2
  106. metaflow-stubs/plugins/gcp/gs_utils.pyi +2 -2
  107. metaflow-stubs/plugins/gcp/includefile_support.pyi +2 -2
  108. metaflow-stubs/plugins/kubernetes/__init__.pyi +2 -2
  109. metaflow-stubs/plugins/kubernetes/kube_utils.pyi +3 -3
  110. metaflow-stubs/plugins/kubernetes/kubernetes.pyi +2 -2
  111. metaflow-stubs/plugins/kubernetes/kubernetes_client.pyi +2 -2
  112. metaflow-stubs/plugins/kubernetes/kubernetes_decorator.pyi +2 -2
  113. metaflow-stubs/plugins/kubernetes/kubernetes_job.pyi +2 -2
  114. metaflow-stubs/plugins/kubernetes/kubernetes_jobsets.pyi +2 -2
  115. metaflow-stubs/plugins/kubernetes/spot_monitor_sidecar.pyi +2 -2
  116. metaflow-stubs/plugins/parallel_decorator.pyi +2 -2
  117. metaflow-stubs/plugins/project_decorator.pyi +2 -2
  118. metaflow-stubs/plugins/pypi/__init__.pyi +3 -3
  119. metaflow-stubs/plugins/pypi/conda_decorator.pyi +2 -2
  120. metaflow-stubs/plugins/pypi/conda_environment.pyi +7 -7
  121. metaflow-stubs/plugins/pypi/parsers.pyi +2 -2
  122. metaflow-stubs/plugins/pypi/pypi_decorator.pyi +2 -2
  123. metaflow-stubs/plugins/pypi/pypi_environment.pyi +2 -2
  124. metaflow-stubs/plugins/pypi/utils.pyi +2 -2
  125. metaflow-stubs/plugins/resources_decorator.pyi +2 -2
  126. metaflow-stubs/plugins/retry_decorator.pyi +2 -2
  127. metaflow-stubs/plugins/secrets/__init__.pyi +2 -2
  128. metaflow-stubs/plugins/secrets/inline_secrets_provider.pyi +4 -4
  129. metaflow-stubs/plugins/secrets/secrets_decorator.pyi +2 -2
  130. metaflow-stubs/plugins/secrets/secrets_func.pyi +2 -2
  131. metaflow-stubs/plugins/secrets/secrets_spec.pyi +2 -2
  132. metaflow-stubs/plugins/secrets/utils.pyi +2 -2
  133. metaflow-stubs/plugins/storage_executor.pyi +2 -2
  134. metaflow-stubs/plugins/test_unbounded_foreach_decorator.pyi +2 -2
  135. metaflow-stubs/plugins/timeout_decorator.pyi +3 -3
  136. metaflow-stubs/plugins/uv/__init__.pyi +2 -2
  137. metaflow-stubs/plugins/uv/uv_environment.pyi +3 -3
  138. metaflow-stubs/pylint_wrapper.pyi +2 -2
  139. metaflow-stubs/runner/__init__.pyi +2 -2
  140. metaflow-stubs/runner/deployer.pyi +33 -33
  141. metaflow-stubs/runner/deployer_impl.pyi +3 -3
  142. metaflow-stubs/runner/metaflow_runner.pyi +4 -4
  143. metaflow-stubs/runner/nbdeploy.pyi +2 -2
  144. metaflow-stubs/runner/nbrun.pyi +2 -2
  145. metaflow-stubs/runner/subprocess_manager.pyi +2 -2
  146. metaflow-stubs/runner/utils.pyi +4 -4
  147. metaflow-stubs/system/__init__.pyi +2 -2
  148. metaflow-stubs/system/system_logger.pyi +3 -3
  149. metaflow-stubs/system/system_monitor.pyi +2 -2
  150. metaflow-stubs/tagging_util.pyi +2 -2
  151. metaflow-stubs/tuple_util.pyi +2 -2
  152. metaflow-stubs/user_configs/__init__.pyi +2 -2
  153. metaflow-stubs/user_configs/config_options.pyi +4 -4
  154. metaflow-stubs/user_configs/config_parameters.pyi +6 -6
  155. metaflow-stubs/user_decorators/__init__.pyi +2 -2
  156. metaflow-stubs/user_decorators/common.pyi +2 -2
  157. metaflow-stubs/user_decorators/mutable_flow.pyi +4 -4
  158. metaflow-stubs/user_decorators/mutable_step.pyi +4 -4
  159. metaflow-stubs/user_decorators/user_flow_decorator.pyi +5 -5
  160. metaflow-stubs/user_decorators/user_step_decorator.pyi +6 -6
  161. metaflow-stubs/version.pyi +2 -2
  162. {metaflow_stubs-2.18.1.dist-info → metaflow_stubs-2.18.3.dist-info}/METADATA +2 -2
  163. metaflow_stubs-2.18.3.dist-info/RECORD +166 -0
  164. metaflow_stubs-2.18.1.dist-info/RECORD +0 -166
  165. {metaflow_stubs-2.18.1.dist-info → metaflow_stubs-2.18.3.dist-info}/WHEEL +0 -0
  166. {metaflow_stubs-2.18.1.dist-info → metaflow_stubs-2.18.3.dist-info}/top_level.txt +0 -0
@@ -1,15 +1,15 @@
1
1
  ######################################################################################################
2
2
  # Auto-generated Metaflow stub file #
3
- # MF version: 2.18.1 #
4
- # Generated on 2025-08-29T13:35:56.240161 #
3
+ # MF version: 2.18.3 #
4
+ # Generated on 2025-09-08T23:52:16.137378 #
5
5
  ######################################################################################################
6
6
 
7
7
  from __future__ import annotations
8
8
 
9
9
  import typing
10
10
  if typing.TYPE_CHECKING:
11
- import typing
12
11
  import datetime
12
+ import typing
13
13
  FlowSpecDerived = typing.TypeVar("FlowSpecDerived", bound="FlowSpec", contravariant=False, covariant=False)
14
14
  StepFlag = typing.NewType("StepFlag", bool)
15
15
 
@@ -39,9 +39,9 @@ from .user_decorators.user_step_decorator import UserStepDecorator as UserStepDe
39
39
  from .user_decorators.user_step_decorator import StepMutator as StepMutator
40
40
  from .user_decorators.user_step_decorator import user_step_decorator as user_step_decorator
41
41
  from .user_decorators.user_flow_decorator import FlowMutator as FlowMutator
42
+ from . import metaflow_git as metaflow_git
42
43
  from . import events as events
43
44
  from . import tuple_util as tuple_util
44
- from . import metaflow_git as metaflow_git
45
45
  from . import runner as runner
46
46
  from . import plugins as plugins
47
47
  from .plugins.datatools.s3.s3 import S3 as S3
@@ -152,161 +152,6 @@ def step(f: typing.Union[typing.Callable[[FlowSpecDerived], None], typing.Callab
152
152
  """
153
153
  ...
154
154
 
155
- @typing.overload
156
- def batch(*, cpu: int = 1, gpu: int = 0, memory: int = 4096, image: typing.Optional[str] = None, queue: str = 'METAFLOW_BATCH_JOB_QUEUE', iam_role: str = 'METAFLOW_ECS_S3_ACCESS_IAM_ROLE', execution_role: str = 'METAFLOW_ECS_FARGATE_EXECUTION_ROLE', shared_memory: typing.Optional[int] = None, max_swap: typing.Optional[int] = None, swappiness: typing.Optional[int] = None, aws_batch_tags: typing.Optional[typing.Dict[str, str]] = None, use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = None, inferentia: int = 0, trainium: int = None, efa: int = 0, ephemeral_storage: int = None, log_driver: typing.Optional[str] = None, log_options: typing.Optional[typing.List[str]] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
157
- """
158
- Specifies that this step should execute on [AWS Batch](https://aws.amazon.com/batch/).
159
-
160
-
161
- Parameters
162
- ----------
163
- cpu : int, default 1
164
- Number of CPUs required for this step. If `@resources` is
165
- also present, the maximum value from all decorators is used.
166
- gpu : int, default 0
167
- Number of GPUs required for this step. If `@resources` is
168
- also present, the maximum value from all decorators is used.
169
- memory : int, default 4096
170
- Memory size (in MB) required for this step. If
171
- `@resources` is also present, the maximum value from all decorators is
172
- used.
173
- image : str, optional, default None
174
- Docker image to use when launching on AWS Batch. If not specified, and
175
- METAFLOW_BATCH_CONTAINER_IMAGE is specified, that image is used. If
176
- not, a default Docker image mapping to the current version of Python is used.
177
- queue : str, default METAFLOW_BATCH_JOB_QUEUE
178
- AWS Batch Job Queue to submit the job to.
179
- iam_role : str, default METAFLOW_ECS_S3_ACCESS_IAM_ROLE
180
- AWS IAM role that AWS Batch container uses to access AWS cloud resources.
181
- execution_role : str, default METAFLOW_ECS_FARGATE_EXECUTION_ROLE
182
- AWS IAM role that AWS Batch can use [to trigger AWS Fargate tasks]
183
- (https://docs.aws.amazon.com/batch/latest/userguide/execution-IAM-role.html).
184
- shared_memory : int, optional, default None
185
- The value for the size (in MiB) of the /dev/shm volume for this step.
186
- This parameter maps to the `--shm-size` option in Docker.
187
- max_swap : int, optional, default None
188
- The total amount of swap memory (in MiB) a container can use for this
189
- step. This parameter is translated to the `--memory-swap` option in
190
- Docker where the value is the sum of the container memory plus the
191
- `max_swap` value.
192
- swappiness : int, optional, default None
193
- This allows you to tune memory swappiness behavior for this step.
194
- A swappiness value of 0 causes swapping not to happen unless absolutely
195
- necessary. A swappiness value of 100 causes pages to be swapped very
196
- aggressively. Accepted values are whole numbers between 0 and 100.
197
- aws_batch_tags: Dict[str, str], optional, default None
198
- Sets arbitrary AWS tags on the AWS Batch compute environment.
199
- Set as string key-value pairs.
200
- use_tmpfs : bool, default False
201
- This enables an explicit tmpfs mount for this step. Note that tmpfs is
202
- not available on Fargate compute environments
203
- tmpfs_tempdir : bool, default True
204
- sets METAFLOW_TEMPDIR to tmpfs_path if set for this step.
205
- tmpfs_size : int, optional, default None
206
- The value for the size (in MiB) of the tmpfs mount for this step.
207
- This parameter maps to the `--tmpfs` option in Docker. Defaults to 50% of the
208
- memory allocated for this step.
209
- tmpfs_path : str, optional, default None
210
- Path to tmpfs mount for this step. Defaults to /metaflow_temp.
211
- inferentia : int, default 0
212
- Number of Inferentia chips required for this step.
213
- trainium : int, default None
214
- Alias for inferentia. Use only one of the two.
215
- efa : int, default 0
216
- Number of elastic fabric adapter network devices to attach to container
217
- ephemeral_storage : int, default None
218
- The total amount, in GiB, of ephemeral storage to set for the task, 21-200GiB.
219
- This is only relevant for Fargate compute environments
220
- log_driver: str, optional, default None
221
- The log driver to use for the Amazon ECS container.
222
- log_options: List[str], optional, default None
223
- List of strings containing options for the chosen log driver. The configurable values
224
- depend on the `log driver` chosen. Validation of these options is not supported yet.
225
- Example: [`awslogs-group:aws/batch/job`]
226
- """
227
- ...
228
-
229
- @typing.overload
230
- def batch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
231
- ...
232
-
233
- @typing.overload
234
- def batch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
235
- ...
236
-
237
- def batch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, cpu: int = 1, gpu: int = 0, memory: int = 4096, image: typing.Optional[str] = None, queue: str = 'METAFLOW_BATCH_JOB_QUEUE', iam_role: str = 'METAFLOW_ECS_S3_ACCESS_IAM_ROLE', execution_role: str = 'METAFLOW_ECS_FARGATE_EXECUTION_ROLE', shared_memory: typing.Optional[int] = None, max_swap: typing.Optional[int] = None, swappiness: typing.Optional[int] = None, aws_batch_tags: typing.Optional[typing.Dict[str, str]] = None, use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = None, inferentia: int = 0, trainium: int = None, efa: int = 0, ephemeral_storage: int = None, log_driver: typing.Optional[str] = None, log_options: typing.Optional[typing.List[str]] = None):
238
- """
239
- Specifies that this step should execute on [AWS Batch](https://aws.amazon.com/batch/).
240
-
241
-
242
- Parameters
243
- ----------
244
- cpu : int, default 1
245
- Number of CPUs required for this step. If `@resources` is
246
- also present, the maximum value from all decorators is used.
247
- gpu : int, default 0
248
- Number of GPUs required for this step. If `@resources` is
249
- also present, the maximum value from all decorators is used.
250
- memory : int, default 4096
251
- Memory size (in MB) required for this step. If
252
- `@resources` is also present, the maximum value from all decorators is
253
- used.
254
- image : str, optional, default None
255
- Docker image to use when launching on AWS Batch. If not specified, and
256
- METAFLOW_BATCH_CONTAINER_IMAGE is specified, that image is used. If
257
- not, a default Docker image mapping to the current version of Python is used.
258
- queue : str, default METAFLOW_BATCH_JOB_QUEUE
259
- AWS Batch Job Queue to submit the job to.
260
- iam_role : str, default METAFLOW_ECS_S3_ACCESS_IAM_ROLE
261
- AWS IAM role that AWS Batch container uses to access AWS cloud resources.
262
- execution_role : str, default METAFLOW_ECS_FARGATE_EXECUTION_ROLE
263
- AWS IAM role that AWS Batch can use [to trigger AWS Fargate tasks]
264
- (https://docs.aws.amazon.com/batch/latest/userguide/execution-IAM-role.html).
265
- shared_memory : int, optional, default None
266
- The value for the size (in MiB) of the /dev/shm volume for this step.
267
- This parameter maps to the `--shm-size` option in Docker.
268
- max_swap : int, optional, default None
269
- The total amount of swap memory (in MiB) a container can use for this
270
- step. This parameter is translated to the `--memory-swap` option in
271
- Docker where the value is the sum of the container memory plus the
272
- `max_swap` value.
273
- swappiness : int, optional, default None
274
- This allows you to tune memory swappiness behavior for this step.
275
- A swappiness value of 0 causes swapping not to happen unless absolutely
276
- necessary. A swappiness value of 100 causes pages to be swapped very
277
- aggressively. Accepted values are whole numbers between 0 and 100.
278
- aws_batch_tags: Dict[str, str], optional, default None
279
- Sets arbitrary AWS tags on the AWS Batch compute environment.
280
- Set as string key-value pairs.
281
- use_tmpfs : bool, default False
282
- This enables an explicit tmpfs mount for this step. Note that tmpfs is
283
- not available on Fargate compute environments
284
- tmpfs_tempdir : bool, default True
285
- sets METAFLOW_TEMPDIR to tmpfs_path if set for this step.
286
- tmpfs_size : int, optional, default None
287
- The value for the size (in MiB) of the tmpfs mount for this step.
288
- This parameter maps to the `--tmpfs` option in Docker. Defaults to 50% of the
289
- memory allocated for this step.
290
- tmpfs_path : str, optional, default None
291
- Path to tmpfs mount for this step. Defaults to /metaflow_temp.
292
- inferentia : int, default 0
293
- Number of Inferentia chips required for this step.
294
- trainium : int, default None
295
- Alias for inferentia. Use only one of the two.
296
- efa : int, default 0
297
- Number of elastic fabric adapter network devices to attach to container
298
- ephemeral_storage : int, default None
299
- The total amount, in GiB, of ephemeral storage to set for the task, 21-200GiB.
300
- This is only relevant for Fargate compute environments
301
- log_driver: str, optional, default None
302
- The log driver to use for the Amazon ECS container.
303
- log_options: List[str], optional, default None
304
- List of strings containing options for the chosen log driver. The configurable values
305
- depend on the `log driver` chosen. Validation of these options is not supported yet.
306
- Example: [`awslogs-group:aws/batch/job`]
307
- """
308
- ...
309
-
310
155
  @typing.overload
311
156
  def catch(*, var: typing.Optional[str] = None, print_exception: bool = True) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
312
157
  """
@@ -378,204 +223,241 @@ def parallel(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None],
378
223
  ...
379
224
 
380
225
  @typing.overload
381
- def timeout(*, seconds: int = 0, minutes: int = 0, hours: int = 0) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
226
+ def card(*, type: str = 'default', id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
382
227
  """
383
- Specifies a timeout for your step.
384
-
385
- This decorator is useful if this step may hang indefinitely.
386
-
387
- This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
388
- A timeout is considered to be an exception thrown by the step. It will cause the step to be
389
- retried if needed and the exception will be caught by the `@catch` decorator, if present.
228
+ Creates a human-readable report, a Metaflow Card, after this step completes.
390
229
 
391
- Note that all the values specified in parameters are added together so if you specify
392
- 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
230
+ Note that you may add multiple `@card` decorators in a step with different parameters.
393
231
 
394
232
 
395
233
  Parameters
396
234
  ----------
397
- seconds : int, default 0
398
- Number of seconds to wait prior to timing out.
399
- minutes : int, default 0
400
- Number of minutes to wait prior to timing out.
401
- hours : int, default 0
402
- Number of hours to wait prior to timing out.
235
+ type : str, default 'default'
236
+ Card type.
237
+ id : str, optional, default None
238
+ If multiple cards are present, use this id to identify this card.
239
+ options : Dict[str, Any], default {}
240
+ Options passed to the card. The contents depend on the card type.
241
+ timeout : int, default 45
242
+ Interrupt reporting if it takes more than this many seconds.
403
243
  """
404
244
  ...
405
245
 
406
246
  @typing.overload
407
- def timeout(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
247
+ def card(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
408
248
  ...
409
249
 
410
250
  @typing.overload
411
- def timeout(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
251
+ def card(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
412
252
  ...
413
253
 
414
- def timeout(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, seconds: int = 0, minutes: int = 0, hours: int = 0):
254
+ def card(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, type: str = 'default', id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45):
415
255
  """
416
- Specifies a timeout for your step.
417
-
418
- This decorator is useful if this step may hang indefinitely.
419
-
420
- This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
421
- A timeout is considered to be an exception thrown by the step. It will cause the step to be
422
- retried if needed and the exception will be caught by the `@catch` decorator, if present.
256
+ Creates a human-readable report, a Metaflow Card, after this step completes.
423
257
 
424
- Note that all the values specified in parameters are added together so if you specify
425
- 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
258
+ Note that you may add multiple `@card` decorators in a step with different parameters.
426
259
 
427
260
 
428
261
  Parameters
429
262
  ----------
430
- seconds : int, default 0
431
- Number of seconds to wait prior to timing out.
432
- minutes : int, default 0
433
- Number of minutes to wait prior to timing out.
434
- hours : int, default 0
435
- Number of hours to wait prior to timing out.
263
+ type : str, default 'default'
264
+ Card type.
265
+ id : str, optional, default None
266
+ If multiple cards are present, use this id to identify this card.
267
+ options : Dict[str, Any], default {}
268
+ Options passed to the card. The contents depend on the card type.
269
+ timeout : int, default 45
270
+ Interrupt reporting if it takes more than this many seconds.
436
271
  """
437
272
  ...
438
273
 
439
274
  @typing.overload
440
- def retry(*, times: int = 3, minutes_between_retries: int = 2) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
275
+ def secrets(*, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], role: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
441
276
  """
442
- Specifies the number of times the task corresponding
443
- to a step needs to be retried.
444
-
445
- This decorator is useful for handling transient errors, such as networking issues.
446
- If your task contains operations that can't be retried safely, e.g. database updates,
447
- it is advisable to annotate it with `@retry(times=0)`.
448
-
449
- This can be used in conjunction with the `@catch` decorator. The `@catch`
450
- decorator will execute a no-op task after all retries have been exhausted,
451
- ensuring that the flow execution can continue.
277
+ Specifies secrets to be retrieved and injected as environment variables prior to
278
+ the execution of a step.
452
279
 
453
280
 
454
281
  Parameters
455
282
  ----------
456
- times : int, default 3
457
- Number of times to retry this task.
458
- minutes_between_retries : int, default 2
459
- Number of minutes between retries.
283
+ sources : List[Union[str, Dict[str, Any]]], default: []
284
+ List of secret specs, defining how the secrets are to be retrieved
285
+ role : str, optional, default: None
286
+ Role to use for fetching secrets
460
287
  """
461
288
  ...
462
289
 
463
290
  @typing.overload
464
- def retry(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
291
+ def secrets(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
465
292
  ...
466
293
 
467
294
  @typing.overload
468
- def retry(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
295
+ def secrets(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
469
296
  ...
470
297
 
471
- def retry(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, times: int = 3, minutes_between_retries: int = 2):
298
+ def secrets(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], role: typing.Optional[str] = None):
472
299
  """
473
- Specifies the number of times the task corresponding
474
- to a step needs to be retried.
475
-
476
- This decorator is useful for handling transient errors, such as networking issues.
477
- If your task contains operations that can't be retried safely, e.g. database updates,
478
- it is advisable to annotate it with `@retry(times=0)`.
300
+ Specifies secrets to be retrieved and injected as environment variables prior to
301
+ the execution of a step.
479
302
 
480
- This can be used in conjunction with the `@catch` decorator. The `@catch`
481
- decorator will execute a no-op task after all retries have been exhausted,
482
- ensuring that the flow execution can continue.
303
+
304
+ Parameters
305
+ ----------
306
+ sources : List[Union[str, Dict[str, Any]]], default: []
307
+ List of secret specs, defining how the secrets are to be retrieved
308
+ role : str, optional, default: None
309
+ Role to use for fetching secrets
310
+ """
311
+ ...
312
+
313
+ @typing.overload
314
+ def environment(*, vars: typing.Dict[str, str] = {}) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
315
+ """
316
+ Specifies environment variables to be set prior to the execution of a step.
483
317
 
484
318
 
485
319
  Parameters
486
320
  ----------
487
- times : int, default 3
488
- Number of times to retry this task.
489
- minutes_between_retries : int, default 2
490
- Number of minutes between retries.
321
+ vars : Dict[str, str], default {}
322
+ Dictionary of environment variables to set.
491
323
  """
492
324
  ...
493
325
 
494
326
  @typing.overload
495
- def secrets(*, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], role: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
327
+ def environment(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
328
+ ...
329
+
330
+ @typing.overload
331
+ def environment(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
332
+ ...
333
+
334
+ def environment(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, vars: typing.Dict[str, str] = {}):
496
335
  """
497
- Specifies secrets to be retrieved and injected as environment variables prior to
498
- the execution of a step.
336
+ Specifies environment variables to be set prior to the execution of a step.
499
337
 
500
338
 
501
339
  Parameters
502
340
  ----------
503
- sources : List[Union[str, Dict[str, Any]]], default: []
504
- List of secret specs, defining how the secrets are to be retrieved
505
- role : str, optional, default: None
506
- Role to use for fetching secrets
341
+ vars : Dict[str, str], default {}
342
+ Dictionary of environment variables to set.
507
343
  """
508
344
  ...
509
345
 
510
346
  @typing.overload
511
- def secrets(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
347
+ def timeout(*, seconds: int = 0, minutes: int = 0, hours: int = 0) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
348
+ """
349
+ Specifies a timeout for your step.
350
+
351
+ This decorator is useful if this step may hang indefinitely.
352
+
353
+ This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
354
+ A timeout is considered to be an exception thrown by the step. It will cause the step to be
355
+ retried if needed and the exception will be caught by the `@catch` decorator, if present.
356
+
357
+ Note that all the values specified in parameters are added together so if you specify
358
+ 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
359
+
360
+
361
+ Parameters
362
+ ----------
363
+ seconds : int, default 0
364
+ Number of seconds to wait prior to timing out.
365
+ minutes : int, default 0
366
+ Number of minutes to wait prior to timing out.
367
+ hours : int, default 0
368
+ Number of hours to wait prior to timing out.
369
+ """
512
370
  ...
513
371
 
514
372
  @typing.overload
515
- def secrets(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
373
+ def timeout(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
516
374
  ...
517
375
 
518
- def secrets(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], role: typing.Optional[str] = None):
376
+ @typing.overload
377
+ def timeout(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
378
+ ...
379
+
380
+ def timeout(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, seconds: int = 0, minutes: int = 0, hours: int = 0):
519
381
  """
520
- Specifies secrets to be retrieved and injected as environment variables prior to
521
- the execution of a step.
382
+ Specifies a timeout for your step.
383
+
384
+ This decorator is useful if this step may hang indefinitely.
385
+
386
+ This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
387
+ A timeout is considered to be an exception thrown by the step. It will cause the step to be
388
+ retried if needed and the exception will be caught by the `@catch` decorator, if present.
389
+
390
+ Note that all the values specified in parameters are added together so if you specify
391
+ 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
522
392
 
523
393
 
524
394
  Parameters
525
395
  ----------
526
- sources : List[Union[str, Dict[str, Any]]], default: []
527
- List of secret specs, defining how the secrets are to be retrieved
528
- role : str, optional, default: None
529
- Role to use for fetching secrets
396
+ seconds : int, default 0
397
+ Number of seconds to wait prior to timing out.
398
+ minutes : int, default 0
399
+ Number of minutes to wait prior to timing out.
400
+ hours : int, default 0
401
+ Number of hours to wait prior to timing out.
530
402
  """
531
403
  ...
532
404
 
533
405
  @typing.overload
534
- def card(*, type: str = 'default', id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
406
+ def conda(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
535
407
  """
536
- Creates a human-readable report, a Metaflow Card, after this step completes.
408
+ Specifies the Conda environment for the step.
537
409
 
538
- Note that you may add multiple `@card` decorators in a step with different parameters.
410
+ Information in this decorator will augment any
411
+ attributes set in the `@conda_base` flow-level decorator. Hence,
412
+ you can use `@conda_base` to set packages required by all
413
+ steps and use `@conda` to specify step-specific overrides.
539
414
 
540
415
 
541
416
  Parameters
542
417
  ----------
543
- type : str, default 'default'
544
- Card type.
545
- id : str, optional, default None
546
- If multiple cards are present, use this id to identify this card.
547
- options : Dict[str, Any], default {}
548
- Options passed to the card. The contents depend on the card type.
549
- timeout : int, default 45
550
- Interrupt reporting if it takes more than this many seconds.
418
+ packages : Dict[str, str], default {}
419
+ Packages to use for this step. The key is the name of the package
420
+ and the value is the version to use.
421
+ libraries : Dict[str, str], default {}
422
+ Supported for backward compatibility. When used with packages, packages will take precedence.
423
+ python : str, optional, default None
424
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
425
+ that the version used will correspond to the version of the Python interpreter used to start the run.
426
+ disabled : bool, default False
427
+ If set to True, disables @conda.
551
428
  """
552
429
  ...
553
430
 
554
431
  @typing.overload
555
- def card(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
432
+ def conda(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
556
433
  ...
557
434
 
558
435
  @typing.overload
559
- def card(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
436
+ def conda(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
560
437
  ...
561
438
 
562
- def card(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, type: str = 'default', id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45):
439
+ def conda(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
563
440
  """
564
- Creates a human-readable report, a Metaflow Card, after this step completes.
441
+ Specifies the Conda environment for the step.
565
442
 
566
- Note that you may add multiple `@card` decorators in a step with different parameters.
443
+ Information in this decorator will augment any
444
+ attributes set in the `@conda_base` flow-level decorator. Hence,
445
+ you can use `@conda_base` to set packages required by all
446
+ steps and use `@conda` to specify step-specific overrides.
567
447
 
568
448
 
569
449
  Parameters
570
450
  ----------
571
- type : str, default 'default'
572
- Card type.
573
- id : str, optional, default None
574
- If multiple cards are present, use this id to identify this card.
575
- options : Dict[str, Any], default {}
576
- Options passed to the card. The contents depend on the card type.
577
- timeout : int, default 45
578
- Interrupt reporting if it takes more than this many seconds.
451
+ packages : Dict[str, str], default {}
452
+ Packages to use for this step. The key is the name of the package
453
+ and the value is the version to use.
454
+ libraries : Dict[str, str], default {}
455
+ Supported for backward compatibility. When used with packages, packages will take precedence.
456
+ python : str, optional, default None
457
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
458
+ that the version used will correspond to the version of the Python interpreter used to start the run.
459
+ disabled : bool, default False
460
+ If set to True, disables @conda.
579
461
  """
580
462
  ...
581
463
 
@@ -658,6 +540,161 @@ def resources(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None]
658
540
  """
659
541
  ...
660
542
 
543
+ @typing.overload
544
+ def batch(*, cpu: int = 1, gpu: int = 0, memory: int = 4096, image: typing.Optional[str] = None, queue: str = 'METAFLOW_BATCH_JOB_QUEUE', iam_role: str = 'METAFLOW_ECS_S3_ACCESS_IAM_ROLE', execution_role: str = 'METAFLOW_ECS_FARGATE_EXECUTION_ROLE', shared_memory: typing.Optional[int] = None, max_swap: typing.Optional[int] = None, swappiness: typing.Optional[int] = None, aws_batch_tags: typing.Optional[typing.Dict[str, str]] = None, use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = None, inferentia: int = 0, trainium: int = None, efa: int = 0, ephemeral_storage: int = None, log_driver: typing.Optional[str] = None, log_options: typing.Optional[typing.List[str]] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
545
+ """
546
+ Specifies that this step should execute on [AWS Batch](https://aws.amazon.com/batch/).
547
+
548
+
549
+ Parameters
550
+ ----------
551
+ cpu : int, default 1
552
+ Number of CPUs required for this step. If `@resources` is
553
+ also present, the maximum value from all decorators is used.
554
+ gpu : int, default 0
555
+ Number of GPUs required for this step. If `@resources` is
556
+ also present, the maximum value from all decorators is used.
557
+ memory : int, default 4096
558
+ Memory size (in MB) required for this step. If
559
+ `@resources` is also present, the maximum value from all decorators is
560
+ used.
561
+ image : str, optional, default None
562
+ Docker image to use when launching on AWS Batch. If not specified, and
563
+ METAFLOW_BATCH_CONTAINER_IMAGE is specified, that image is used. If
564
+ not, a default Docker image mapping to the current version of Python is used.
565
+ queue : str, default METAFLOW_BATCH_JOB_QUEUE
566
+ AWS Batch Job Queue to submit the job to.
567
+ iam_role : str, default METAFLOW_ECS_S3_ACCESS_IAM_ROLE
568
+ AWS IAM role that AWS Batch container uses to access AWS cloud resources.
569
+ execution_role : str, default METAFLOW_ECS_FARGATE_EXECUTION_ROLE
570
+ AWS IAM role that AWS Batch can use [to trigger AWS Fargate tasks]
571
+ (https://docs.aws.amazon.com/batch/latest/userguide/execution-IAM-role.html).
572
+ shared_memory : int, optional, default None
573
+ The value for the size (in MiB) of the /dev/shm volume for this step.
574
+ This parameter maps to the `--shm-size` option in Docker.
575
+ max_swap : int, optional, default None
576
+ The total amount of swap memory (in MiB) a container can use for this
577
+ step. This parameter is translated to the `--memory-swap` option in
578
+ Docker where the value is the sum of the container memory plus the
579
+ `max_swap` value.
580
+ swappiness : int, optional, default None
581
+ This allows you to tune memory swappiness behavior for this step.
582
+ A swappiness value of 0 causes swapping not to happen unless absolutely
583
+ necessary. A swappiness value of 100 causes pages to be swapped very
584
+ aggressively. Accepted values are whole numbers between 0 and 100.
585
+ aws_batch_tags: Dict[str, str], optional, default None
586
+ Sets arbitrary AWS tags on the AWS Batch compute environment.
587
+ Set as string key-value pairs.
588
+ use_tmpfs : bool, default False
589
+ This enables an explicit tmpfs mount for this step. Note that tmpfs is
590
+ not available on Fargate compute environments
591
+ tmpfs_tempdir : bool, default True
592
+ sets METAFLOW_TEMPDIR to tmpfs_path if set for this step.
593
+ tmpfs_size : int, optional, default None
594
+ The value for the size (in MiB) of the tmpfs mount for this step.
595
+ This parameter maps to the `--tmpfs` option in Docker. Defaults to 50% of the
596
+ memory allocated for this step.
597
+ tmpfs_path : str, optional, default None
598
+ Path to tmpfs mount for this step. Defaults to /metaflow_temp.
599
+ inferentia : int, default 0
600
+ Number of Inferentia chips required for this step.
601
+ trainium : int, default None
602
+ Alias for inferentia. Use only one of the two.
603
+ efa : int, default 0
604
+ Number of elastic fabric adapter network devices to attach to container
605
+ ephemeral_storage : int, default None
606
+ The total amount, in GiB, of ephemeral storage to set for the task, 21-200GiB.
607
+ This is only relevant for Fargate compute environments
608
+ log_driver: str, optional, default None
609
+ The log driver to use for the Amazon ECS container.
610
+ log_options: List[str], optional, default None
611
+ List of strings containing options for the chosen log driver. The configurable values
612
+ depend on the `log driver` chosen. Validation of these options is not supported yet.
613
+ Example: [`awslogs-group:aws/batch/job`]
614
+ """
615
+ ...
616
+
617
+ @typing.overload
618
+ def batch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
619
+ ...
620
+
621
+ @typing.overload
622
+ def batch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
623
+ ...
624
+
625
+ def batch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, cpu: int = 1, gpu: int = 0, memory: int = 4096, image: typing.Optional[str] = None, queue: str = 'METAFLOW_BATCH_JOB_QUEUE', iam_role: str = 'METAFLOW_ECS_S3_ACCESS_IAM_ROLE', execution_role: str = 'METAFLOW_ECS_FARGATE_EXECUTION_ROLE', shared_memory: typing.Optional[int] = None, max_swap: typing.Optional[int] = None, swappiness: typing.Optional[int] = None, aws_batch_tags: typing.Optional[typing.Dict[str, str]] = None, use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = None, inferentia: int = 0, trainium: int = None, efa: int = 0, ephemeral_storage: int = None, log_driver: typing.Optional[str] = None, log_options: typing.Optional[typing.List[str]] = None):
626
+ """
627
+ Specifies that this step should execute on [AWS Batch](https://aws.amazon.com/batch/).
628
+
629
+
630
+ Parameters
631
+ ----------
632
+ cpu : int, default 1
633
+ Number of CPUs required for this step. If `@resources` is
634
+ also present, the maximum value from all decorators is used.
635
+ gpu : int, default 0
636
+ Number of GPUs required for this step. If `@resources` is
637
+ also present, the maximum value from all decorators is used.
638
+ memory : int, default 4096
639
+ Memory size (in MB) required for this step. If
640
+ `@resources` is also present, the maximum value from all decorators is
641
+ used.
642
+ image : str, optional, default None
643
+ Docker image to use when launching on AWS Batch. If not specified, and
644
+ METAFLOW_BATCH_CONTAINER_IMAGE is specified, that image is used. If
645
+ not, a default Docker image mapping to the current version of Python is used.
646
+ queue : str, default METAFLOW_BATCH_JOB_QUEUE
647
+ AWS Batch Job Queue to submit the job to.
648
+ iam_role : str, default METAFLOW_ECS_S3_ACCESS_IAM_ROLE
649
+ AWS IAM role that AWS Batch container uses to access AWS cloud resources.
650
+ execution_role : str, default METAFLOW_ECS_FARGATE_EXECUTION_ROLE
651
+ AWS IAM role that AWS Batch can use [to trigger AWS Fargate tasks]
652
+ (https://docs.aws.amazon.com/batch/latest/userguide/execution-IAM-role.html).
653
+ shared_memory : int, optional, default None
654
+ The value for the size (in MiB) of the /dev/shm volume for this step.
655
+ This parameter maps to the `--shm-size` option in Docker.
656
+ max_swap : int, optional, default None
657
+ The total amount of swap memory (in MiB) a container can use for this
658
+ step. This parameter is translated to the `--memory-swap` option in
659
+ Docker where the value is the sum of the container memory plus the
660
+ `max_swap` value.
661
+ swappiness : int, optional, default None
662
+ This allows you to tune memory swappiness behavior for this step.
663
+ A swappiness value of 0 causes swapping not to happen unless absolutely
664
+ necessary. A swappiness value of 100 causes pages to be swapped very
665
+ aggressively. Accepted values are whole numbers between 0 and 100.
666
+ aws_batch_tags: Dict[str, str], optional, default None
667
+ Sets arbitrary AWS tags on the AWS Batch compute environment.
668
+ Set as string key-value pairs.
669
+ use_tmpfs : bool, default False
670
+ This enables an explicit tmpfs mount for this step. Note that tmpfs is
671
+ not available on Fargate compute environments
672
+ tmpfs_tempdir : bool, default True
673
+ sets METAFLOW_TEMPDIR to tmpfs_path if set for this step.
674
+ tmpfs_size : int, optional, default None
675
+ The value for the size (in MiB) of the tmpfs mount for this step.
676
+ This parameter maps to the `--tmpfs` option in Docker. Defaults to 50% of the
677
+ memory allocated for this step.
678
+ tmpfs_path : str, optional, default None
679
+ Path to tmpfs mount for this step. Defaults to /metaflow_temp.
680
+ inferentia : int, default 0
681
+ Number of Inferentia chips required for this step.
682
+ trainium : int, default None
683
+ Alias for inferentia. Use only one of the two.
684
+ efa : int, default 0
685
+ Number of elastic fabric adapter network devices to attach to container
686
+ ephemeral_storage : int, default None
687
+ The total amount, in GiB, of ephemeral storage to set for the task, 21-200GiB.
688
+ This is only relevant for Fargate compute environments
689
+ log_driver: str, optional, default None
690
+ The log driver to use for the Amazon ECS container.
691
+ log_options: List[str], optional, default None
692
+ List of strings containing options for the chosen log driver. The configurable values
693
+ depend on the `log driver` chosen. Validation of these options is not supported yet.
694
+ Example: [`awslogs-group:aws/batch/job`]
695
+ """
696
+ ...
697
+
661
698
  def kubernetes(*, cpu: int = 1, memory: int = 4096, disk: int = 10240, image: typing.Optional[str] = None, image_pull_policy: str = 'KUBERNETES_IMAGE_PULL_POLICY', image_pull_secrets: typing.List[str] = [], service_account: str = 'METAFLOW_KUBERNETES_SERVICE_ACCOUNT', secrets: typing.Optional[typing.List[str]] = None, node_selector: typing.Union[typing.Dict[str, str], str, None] = None, namespace: str = 'METAFLOW_KUBERNETES_NAMESPACE', gpu: typing.Optional[int] = None, gpu_vendor: str = 'KUBERNETES_GPU_VENDOR', tolerations: typing.List[typing.Dict[str, str]] = [], labels: typing.Dict[str, str] = 'METAFLOW_KUBERNETES_LABELS', annotations: typing.Dict[str, str] = 'METAFLOW_KUBERNETES_ANNOTATIONS', use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = '/metaflow_temp', persistent_volume_claims: typing.Optional[typing.Dict[str, str]] = None, shared_memory: typing.Optional[int] = None, port: typing.Optional[int] = None, compute_pool: typing.Optional[str] = None, hostname_resolution_timeout: int = 600, qos: str = 'Burstable', security_context: typing.Optional[typing.Dict[str, typing.Any]] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
662
699
  """
663
700
  Specifies that this step should execute on Kubernetes.
@@ -799,137 +836,92 @@ def pypi(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typ
799
836
  ...
800
837
 
801
838
  @typing.overload
802
- def conda(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
839
+ def retry(*, times: int = 3, minutes_between_retries: int = 2) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
803
840
  """
804
- Specifies the Conda environment for the step.
841
+ Specifies the number of times the task corresponding
842
+ to a step needs to be retried.
805
843
 
806
- Information in this decorator will augment any
807
- attributes set in the `@conda_base` flow-level decorator. Hence,
808
- you can use `@conda_base` to set packages required by all
809
- steps and use `@conda` to specify step-specific overrides.
844
+ This decorator is useful for handling transient errors, such as networking issues.
845
+ If your task contains operations that can't be retried safely, e.g. database updates,
846
+ it is advisable to annotate it with `@retry(times=0)`.
847
+
848
+ This can be used in conjunction with the `@catch` decorator. The `@catch`
849
+ decorator will execute a no-op task after all retries have been exhausted,
850
+ ensuring that the flow execution can continue.
810
851
 
811
852
 
812
853
  Parameters
813
854
  ----------
814
- packages : Dict[str, str], default {}
815
- Packages to use for this step. The key is the name of the package
816
- and the value is the version to use.
817
- libraries : Dict[str, str], default {}
818
- Supported for backward compatibility. When used with packages, packages will take precedence.
819
- python : str, optional, default None
820
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
821
- that the version used will correspond to the version of the Python interpreter used to start the run.
822
- disabled : bool, default False
823
- If set to True, disables @conda.
855
+ times : int, default 3
856
+ Number of times to retry this task.
857
+ minutes_between_retries : int, default 2
858
+ Number of minutes between retries.
824
859
  """
825
860
  ...
826
861
 
827
862
  @typing.overload
828
- def conda(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
863
+ def retry(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
829
864
  ...
830
865
 
831
866
  @typing.overload
832
- def conda(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
867
+ def retry(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
833
868
  ...
834
869
 
835
- def conda(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
870
+ def retry(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, times: int = 3, minutes_between_retries: int = 2):
836
871
  """
837
- Specifies the Conda environment for the step.
838
-
839
- Information in this decorator will augment any
840
- attributes set in the `@conda_base` flow-level decorator. Hence,
841
- you can use `@conda_base` to set packages required by all
842
- steps and use `@conda` to specify step-specific overrides.
872
+ Specifies the number of times the task corresponding
873
+ to a step needs to be retried.
843
874
 
875
+ This decorator is useful for handling transient errors, such as networking issues.
876
+ If your task contains operations that can't be retried safely, e.g. database updates,
877
+ it is advisable to annotate it with `@retry(times=0)`.
844
878
 
845
- Parameters
846
- ----------
847
- packages : Dict[str, str], default {}
848
- Packages to use for this step. The key is the name of the package
849
- and the value is the version to use.
850
- libraries : Dict[str, str], default {}
851
- Supported for backward compatibility. When used with packages, packages will take precedence.
852
- python : str, optional, default None
853
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
854
- that the version used will correspond to the version of the Python interpreter used to start the run.
855
- disabled : bool, default False
856
- If set to True, disables @conda.
857
- """
858
- ...
859
-
860
- @typing.overload
861
- def environment(*, vars: typing.Dict[str, str] = {}) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
862
- """
863
- Specifies environment variables to be set prior to the execution of a step.
879
+ This can be used in conjunction with the `@catch` decorator. The `@catch`
880
+ decorator will execute a no-op task after all retries have been exhausted,
881
+ ensuring that the flow execution can continue.
864
882
 
865
883
 
866
884
  Parameters
867
885
  ----------
868
- vars : Dict[str, str], default {}
869
- Dictionary of environment variables to set.
886
+ times : int, default 3
887
+ Number of times to retry this task.
888
+ minutes_between_retries : int, default 2
889
+ Number of minutes between retries.
870
890
  """
871
891
  ...
872
892
 
873
- @typing.overload
874
- def environment(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
875
- ...
876
-
877
- @typing.overload
878
- def environment(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
879
- ...
880
-
881
- def environment(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, vars: typing.Dict[str, str] = {}):
893
+ def project(*, name: str, branch: typing.Optional[str] = None, production: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
882
894
  """
883
- Specifies environment variables to be set prior to the execution of a step.
895
+ Specifies what flows belong to the same project.
896
+
897
+ A project-specific namespace is created for all flows that
898
+ use the same `@project(name)`.
884
899
 
885
900
 
886
901
  Parameters
887
902
  ----------
888
- vars : Dict[str, str], default {}
889
- Dictionary of environment variables to set.
890
- """
891
- ...
892
-
893
- def airflow_external_task_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, external_dag_id: str, external_task_ids: typing.List[str], allowed_states: typing.List[str], failed_states: typing.List[str], execution_delta: "datetime.timedelta", check_existence: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
894
- """
895
- The `@airflow_external_task_sensor` decorator attaches a Airflow [ExternalTaskSensor](https://airflow.apache.org/docs/apache-airflow/stable/_api/airflow/sensors/external_task/index.html#airflow.sensors.external_task.ExternalTaskSensor) before the start step of the flow.
896
- This decorator only works when a flow is scheduled on Airflow and is compiled using `airflow create`. More than one `@airflow_external_task_sensor` can be added as a flow decorators. Adding more than one decorator will ensure that `start` step starts only after all sensors finish.
903
+ name : str
904
+ Project name. Make sure that the name is unique amongst all
905
+ projects that use the same production scheduler. The name may
906
+ contain only lowercase alphanumeric characters and underscores.
897
907
 
908
+ branch : Optional[str], default None
909
+ The branch to use. If not specified, the branch is set to
910
+ `user.<username>` unless `production` is set to `True`. This can
911
+ also be set on the command line using `--branch` as a top-level option.
912
+ It is an error to specify `branch` in the decorator and on the command line.
898
913
 
899
- Parameters
900
- ----------
901
- timeout : int
902
- Time, in seconds before the task times out and fails. (Default: 3600)
903
- poke_interval : int
904
- Time in seconds that the job should wait in between each try. (Default: 60)
905
- mode : str
906
- How the sensor operates. Options are: { poke | reschedule }. (Default: "poke")
907
- exponential_backoff : bool
908
- allow progressive longer waits between pokes by using exponential backoff algorithm. (Default: True)
909
- pool : str
910
- the slot pool this task should run in,
911
- slot pools are a way to limit concurrency for certain tasks. (Default:None)
912
- soft_fail : bool
913
- Set to true to mark the task as SKIPPED on failure. (Default: False)
914
- name : str
915
- Name of the sensor on Airflow
916
- description : str
917
- Description of sensor in the Airflow UI
918
- external_dag_id : str
919
- The dag_id that contains the task you want to wait for.
920
- external_task_ids : List[str]
921
- The list of task_ids that you want to wait for.
922
- If None (default value) the sensor waits for the DAG. (Default: None)
923
- allowed_states : List[str]
924
- Iterable of allowed states, (Default: ['success'])
925
- failed_states : List[str]
926
- Iterable of failed or dis-allowed states. (Default: None)
927
- execution_delta : datetime.timedelta
928
- time difference with the previous execution to look at,
929
- the default is the same logical date as the current task or DAG. (Default: None)
930
- check_existence: bool
931
- Set to True to check if the external task exists or check if
932
- the DAG to wait for exists. (Default: True)
914
+ production : bool, default False
915
+ Whether or not the branch is the production branch. This can also be set on the
916
+ command line using `--production` as a top-level option. It is an error to specify
917
+ `production` in the decorator and on the command line.
918
+ The project branch name will be:
919
+ - if `branch` is specified:
920
+ - if `production` is True: `prod.<branch>`
921
+ - if `production` is False: `test.<branch>`
922
+ - if `branch` is not specified:
923
+ - if `production` is True: `prod`
924
+ - if `production` is False: `user.<username>`
933
925
  """
934
926
  ...
935
927
 
@@ -1026,6 +1018,90 @@ def trigger(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, event: t
1026
1018
  """
1027
1019
  ...
1028
1020
 
1021
+ def airflow_external_task_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, external_dag_id: str, external_task_ids: typing.List[str], allowed_states: typing.List[str], failed_states: typing.List[str], execution_delta: "datetime.timedelta", check_existence: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1022
+ """
1023
+ The `@airflow_external_task_sensor` decorator attaches a Airflow [ExternalTaskSensor](https://airflow.apache.org/docs/apache-airflow/stable/_api/airflow/sensors/external_task/index.html#airflow.sensors.external_task.ExternalTaskSensor) before the start step of the flow.
1024
+ This decorator only works when a flow is scheduled on Airflow and is compiled using `airflow create`. More than one `@airflow_external_task_sensor` can be added as a flow decorators. Adding more than one decorator will ensure that `start` step starts only after all sensors finish.
1025
+
1026
+
1027
+ Parameters
1028
+ ----------
1029
+ timeout : int
1030
+ Time, in seconds before the task times out and fails. (Default: 3600)
1031
+ poke_interval : int
1032
+ Time in seconds that the job should wait in between each try. (Default: 60)
1033
+ mode : str
1034
+ How the sensor operates. Options are: { poke | reschedule }. (Default: "poke")
1035
+ exponential_backoff : bool
1036
+ allow progressive longer waits between pokes by using exponential backoff algorithm. (Default: True)
1037
+ pool : str
1038
+ the slot pool this task should run in,
1039
+ slot pools are a way to limit concurrency for certain tasks. (Default:None)
1040
+ soft_fail : bool
1041
+ Set to true to mark the task as SKIPPED on failure. (Default: False)
1042
+ name : str
1043
+ Name of the sensor on Airflow
1044
+ description : str
1045
+ Description of sensor in the Airflow UI
1046
+ external_dag_id : str
1047
+ The dag_id that contains the task you want to wait for.
1048
+ external_task_ids : List[str]
1049
+ The list of task_ids that you want to wait for.
1050
+ If None (default value) the sensor waits for the DAG. (Default: None)
1051
+ allowed_states : List[str]
1052
+ Iterable of allowed states, (Default: ['success'])
1053
+ failed_states : List[str]
1054
+ Iterable of failed or dis-allowed states. (Default: None)
1055
+ execution_delta : datetime.timedelta
1056
+ time difference with the previous execution to look at,
1057
+ the default is the same logical date as the current task or DAG. (Default: None)
1058
+ check_existence: bool
1059
+ Set to True to check if the external task exists or check if
1060
+ the DAG to wait for exists. (Default: True)
1061
+ """
1062
+ ...
1063
+
1064
+ @typing.overload
1065
+ def pypi_base(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1066
+ """
1067
+ Specifies the PyPI packages for all steps of the flow.
1068
+
1069
+ Use `@pypi_base` to set common packages required by all
1070
+ steps and use `@pypi` to specify step-specific overrides.
1071
+
1072
+ Parameters
1073
+ ----------
1074
+ packages : Dict[str, str], default: {}
1075
+ Packages to use for this flow. The key is the name of the package
1076
+ and the value is the version to use.
1077
+ python : str, optional, default: None
1078
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1079
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1080
+ """
1081
+ ...
1082
+
1083
+ @typing.overload
1084
+ def pypi_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1085
+ ...
1086
+
1087
+ def pypi_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
1088
+ """
1089
+ Specifies the PyPI packages for all steps of the flow.
1090
+
1091
+ Use `@pypi_base` to set common packages required by all
1092
+ steps and use `@pypi` to specify step-specific overrides.
1093
+
1094
+ Parameters
1095
+ ----------
1096
+ packages : Dict[str, str], default: {}
1097
+ Packages to use for this flow. The key is the name of the package
1098
+ and the value is the version to use.
1099
+ python : str, optional, default: None
1100
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1101
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1102
+ """
1103
+ ...
1104
+
1029
1105
  @typing.overload
1030
1106
  def schedule(*, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1031
1107
  """
@@ -1077,57 +1153,6 @@ def schedule(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, hourly:
1077
1153
  """
1078
1154
  ...
1079
1155
 
1080
- @typing.overload
1081
- def conda_base(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1082
- """
1083
- Specifies the Conda environment for all steps of the flow.
1084
-
1085
- Use `@conda_base` to set common libraries required by all
1086
- steps and use `@conda` to specify step-specific additions.
1087
-
1088
-
1089
- Parameters
1090
- ----------
1091
- packages : Dict[str, str], default {}
1092
- Packages to use for this flow. The key is the name of the package
1093
- and the value is the version to use.
1094
- libraries : Dict[str, str], default {}
1095
- Supported for backward compatibility. When used with packages, packages will take precedence.
1096
- python : str, optional, default None
1097
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
1098
- that the version used will correspond to the version of the Python interpreter used to start the run.
1099
- disabled : bool, default False
1100
- If set to True, disables Conda.
1101
- """
1102
- ...
1103
-
1104
- @typing.overload
1105
- def conda_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1106
- ...
1107
-
1108
- def conda_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
1109
- """
1110
- Specifies the Conda environment for all steps of the flow.
1111
-
1112
- Use `@conda_base` to set common libraries required by all
1113
- steps and use `@conda` to specify step-specific additions.
1114
-
1115
-
1116
- Parameters
1117
- ----------
1118
- packages : Dict[str, str], default {}
1119
- Packages to use for this flow. The key is the name of the package
1120
- and the value is the version to use.
1121
- libraries : Dict[str, str], default {}
1122
- Supported for backward compatibility. When used with packages, packages will take precedence.
1123
- python : str, optional, default None
1124
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
1125
- that the version used will correspond to the version of the Python interpreter used to start the run.
1126
- disabled : bool, default False
1127
- If set to True, disables Conda.
1128
- """
1129
- ...
1130
-
1131
1156
  @typing.overload
1132
1157
  def trigger_on_finish(*, flow: typing.Union[typing.Dict[str, str], str, None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1133
1158
  """
@@ -1229,79 +1254,54 @@ def trigger_on_finish(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *
1229
1254
  """
1230
1255
  ...
1231
1256
 
1232
- def project(*, name: str, branch: typing.Optional[str] = None, production: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1233
- """
1234
- Specifies what flows belong to the same project.
1235
-
1236
- A project-specific namespace is created for all flows that
1237
- use the same `@project(name)`.
1238
-
1239
-
1240
- Parameters
1241
- ----------
1242
- name : str
1243
- Project name. Make sure that the name is unique amongst all
1244
- projects that use the same production scheduler. The name may
1245
- contain only lowercase alphanumeric characters and underscores.
1246
-
1247
- branch : Optional[str], default None
1248
- The branch to use. If not specified, the branch is set to
1249
- `user.<username>` unless `production` is set to `True`. This can
1250
- also be set on the command line using `--branch` as a top-level option.
1251
- It is an error to specify `branch` in the decorator and on the command line.
1252
-
1253
- production : bool, default False
1254
- Whether or not the branch is the production branch. This can also be set on the
1255
- command line using `--production` as a top-level option. It is an error to specify
1256
- `production` in the decorator and on the command line.
1257
- The project branch name will be:
1258
- - if `branch` is specified:
1259
- - if `production` is True: `prod.<branch>`
1260
- - if `production` is False: `test.<branch>`
1261
- - if `branch` is not specified:
1262
- - if `production` is True: `prod`
1263
- - if `production` is False: `user.<username>`
1264
- """
1265
- ...
1266
-
1267
1257
  @typing.overload
1268
- def pypi_base(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1258
+ def conda_base(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1269
1259
  """
1270
- Specifies the PyPI packages for all steps of the flow.
1260
+ Specifies the Conda environment for all steps of the flow.
1261
+
1262
+ Use `@conda_base` to set common libraries required by all
1263
+ steps and use `@conda` to specify step-specific additions.
1271
1264
 
1272
- Use `@pypi_base` to set common packages required by all
1273
- steps and use `@pypi` to specify step-specific overrides.
1274
1265
 
1275
1266
  Parameters
1276
1267
  ----------
1277
- packages : Dict[str, str], default: {}
1268
+ packages : Dict[str, str], default {}
1278
1269
  Packages to use for this flow. The key is the name of the package
1279
1270
  and the value is the version to use.
1280
- python : str, optional, default: None
1271
+ libraries : Dict[str, str], default {}
1272
+ Supported for backward compatibility. When used with packages, packages will take precedence.
1273
+ python : str, optional, default None
1281
1274
  Version of Python to use, e.g. '3.7.4'. A default value of None implies
1282
1275
  that the version used will correspond to the version of the Python interpreter used to start the run.
1276
+ disabled : bool, default False
1277
+ If set to True, disables Conda.
1283
1278
  """
1284
1279
  ...
1285
1280
 
1286
1281
  @typing.overload
1287
- def pypi_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1282
+ def conda_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1288
1283
  ...
1289
1284
 
1290
- def pypi_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
1285
+ def conda_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
1291
1286
  """
1292
- Specifies the PyPI packages for all steps of the flow.
1287
+ Specifies the Conda environment for all steps of the flow.
1288
+
1289
+ Use `@conda_base` to set common libraries required by all
1290
+ steps and use `@conda` to specify step-specific additions.
1293
1291
 
1294
- Use `@pypi_base` to set common packages required by all
1295
- steps and use `@pypi` to specify step-specific overrides.
1296
1292
 
1297
1293
  Parameters
1298
1294
  ----------
1299
- packages : Dict[str, str], default: {}
1295
+ packages : Dict[str, str], default {}
1300
1296
  Packages to use for this flow. The key is the name of the package
1301
1297
  and the value is the version to use.
1302
- python : str, optional, default: None
1298
+ libraries : Dict[str, str], default {}
1299
+ Supported for backward compatibility. When used with packages, packages will take precedence.
1300
+ python : str, optional, default None
1303
1301
  Version of Python to use, e.g. '3.7.4'. A default value of None implies
1304
1302
  that the version used will correspond to the version of the Python interpreter used to start the run.
1303
+ disabled : bool, default False
1304
+ If set to True, disables Conda.
1305
1305
  """
1306
1306
  ...
1307
1307