metaflow-stubs 2.17.4__py2.py3-none-any.whl → 2.17.5__py2.py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of metaflow-stubs might be problematic. Click here for more details.

Files changed (166) hide show
  1. metaflow-stubs/__init__.pyi +504 -504
  2. metaflow-stubs/cards.pyi +2 -2
  3. metaflow-stubs/cli.pyi +2 -2
  4. metaflow-stubs/cli_components/__init__.pyi +2 -2
  5. metaflow-stubs/cli_components/utils.pyi +2 -2
  6. metaflow-stubs/client/__init__.pyi +2 -2
  7. metaflow-stubs/client/core.pyi +6 -6
  8. metaflow-stubs/client/filecache.pyi +3 -3
  9. metaflow-stubs/events.pyi +3 -3
  10. metaflow-stubs/exception.pyi +2 -2
  11. metaflow-stubs/flowspec.pyi +5 -5
  12. metaflow-stubs/generated_for.txt +1 -1
  13. metaflow-stubs/includefile.pyi +4 -4
  14. metaflow-stubs/meta_files.pyi +2 -2
  15. metaflow-stubs/metadata_provider/__init__.pyi +2 -2
  16. metaflow-stubs/metadata_provider/heartbeat.pyi +2 -2
  17. metaflow-stubs/metadata_provider/metadata.pyi +3 -3
  18. metaflow-stubs/metadata_provider/util.pyi +2 -2
  19. metaflow-stubs/metaflow_config.pyi +2 -2
  20. metaflow-stubs/metaflow_current.pyi +4 -4
  21. metaflow-stubs/metaflow_git.pyi +2 -2
  22. metaflow-stubs/multicore_utils.pyi +2 -2
  23. metaflow-stubs/packaging_sys/__init__.pyi +5 -5
  24. metaflow-stubs/packaging_sys/backend.pyi +2 -2
  25. metaflow-stubs/packaging_sys/distribution_support.pyi +5 -5
  26. metaflow-stubs/packaging_sys/tar_backend.pyi +4 -4
  27. metaflow-stubs/packaging_sys/utils.pyi +2 -2
  28. metaflow-stubs/packaging_sys/v1.pyi +4 -4
  29. metaflow-stubs/parameters.pyi +4 -4
  30. metaflow-stubs/plugins/__init__.pyi +13 -13
  31. metaflow-stubs/plugins/airflow/__init__.pyi +2 -2
  32. metaflow-stubs/plugins/airflow/airflow_decorator.pyi +2 -2
  33. metaflow-stubs/plugins/airflow/airflow_utils.pyi +2 -2
  34. metaflow-stubs/plugins/airflow/exception.pyi +2 -2
  35. metaflow-stubs/plugins/airflow/sensors/__init__.pyi +2 -2
  36. metaflow-stubs/plugins/airflow/sensors/base_sensor.pyi +2 -2
  37. metaflow-stubs/plugins/airflow/sensors/external_task_sensor.pyi +2 -2
  38. metaflow-stubs/plugins/airflow/sensors/s3_sensor.pyi +2 -2
  39. metaflow-stubs/plugins/argo/__init__.pyi +2 -2
  40. metaflow-stubs/plugins/argo/argo_client.pyi +2 -2
  41. metaflow-stubs/plugins/argo/argo_events.pyi +2 -2
  42. metaflow-stubs/plugins/argo/argo_workflows.pyi +5 -3
  43. metaflow-stubs/plugins/argo/argo_workflows_decorator.pyi +3 -3
  44. metaflow-stubs/plugins/argo/argo_workflows_deployer.pyi +5 -5
  45. metaflow-stubs/plugins/argo/argo_workflows_deployer_objects.pyi +4 -4
  46. metaflow-stubs/plugins/argo/exit_hooks.pyi +3 -3
  47. metaflow-stubs/plugins/aws/__init__.pyi +3 -3
  48. metaflow-stubs/plugins/aws/aws_client.pyi +2 -2
  49. metaflow-stubs/plugins/aws/aws_utils.pyi +2 -2
  50. metaflow-stubs/plugins/aws/batch/__init__.pyi +2 -2
  51. metaflow-stubs/plugins/aws/batch/batch.pyi +2 -2
  52. metaflow-stubs/plugins/aws/batch/batch_client.pyi +2 -2
  53. metaflow-stubs/plugins/aws/batch/batch_decorator.pyi +2 -2
  54. metaflow-stubs/plugins/aws/secrets_manager/__init__.pyi +2 -2
  55. metaflow-stubs/plugins/aws/secrets_manager/aws_secrets_manager_secrets_provider.pyi +5 -5
  56. metaflow-stubs/plugins/aws/step_functions/__init__.pyi +2 -2
  57. metaflow-stubs/plugins/aws/step_functions/dynamo_db_client.pyi +2 -2
  58. metaflow-stubs/plugins/aws/step_functions/event_bridge_client.pyi +2 -2
  59. metaflow-stubs/plugins/aws/step_functions/schedule_decorator.pyi +2 -2
  60. metaflow-stubs/plugins/aws/step_functions/step_functions.pyi +2 -2
  61. metaflow-stubs/plugins/aws/step_functions/step_functions_client.pyi +2 -2
  62. metaflow-stubs/plugins/aws/step_functions/step_functions_decorator.pyi +2 -2
  63. metaflow-stubs/plugins/aws/step_functions/step_functions_deployer.pyi +4 -4
  64. metaflow-stubs/plugins/aws/step_functions/step_functions_deployer_objects.pyi +3 -3
  65. metaflow-stubs/plugins/azure/__init__.pyi +2 -2
  66. metaflow-stubs/plugins/azure/azure_credential.pyi +2 -2
  67. metaflow-stubs/plugins/azure/azure_exceptions.pyi +2 -2
  68. metaflow-stubs/plugins/azure/azure_secret_manager_secrets_provider.pyi +5 -5
  69. metaflow-stubs/plugins/azure/azure_utils.pyi +2 -2
  70. metaflow-stubs/plugins/azure/blob_service_client_factory.pyi +2 -2
  71. metaflow-stubs/plugins/azure/includefile_support.pyi +2 -2
  72. metaflow-stubs/plugins/cards/__init__.pyi +2 -2
  73. metaflow-stubs/plugins/cards/card_client.pyi +3 -3
  74. metaflow-stubs/plugins/cards/card_creator.pyi +2 -2
  75. metaflow-stubs/plugins/cards/card_datastore.pyi +2 -2
  76. metaflow-stubs/plugins/cards/card_decorator.pyi +3 -3
  77. metaflow-stubs/plugins/cards/card_modules/__init__.pyi +2 -2
  78. metaflow-stubs/plugins/cards/card_modules/basic.pyi +2 -2
  79. metaflow-stubs/plugins/cards/card_modules/card.pyi +2 -2
  80. metaflow-stubs/plugins/cards/card_modules/components.pyi +4 -4
  81. metaflow-stubs/plugins/cards/card_modules/convert_to_native_type.pyi +2 -2
  82. metaflow-stubs/plugins/cards/card_modules/renderer_tools.pyi +2 -2
  83. metaflow-stubs/plugins/cards/card_modules/test_cards.pyi +2 -2
  84. metaflow-stubs/plugins/cards/card_resolver.pyi +2 -2
  85. metaflow-stubs/plugins/cards/component_serializer.pyi +2 -2
  86. metaflow-stubs/plugins/cards/exception.pyi +2 -2
  87. metaflow-stubs/plugins/catch_decorator.pyi +3 -3
  88. metaflow-stubs/plugins/datatools/__init__.pyi +2 -2
  89. metaflow-stubs/plugins/datatools/local.pyi +2 -2
  90. metaflow-stubs/plugins/datatools/s3/__init__.pyi +2 -2
  91. metaflow-stubs/plugins/datatools/s3/s3.pyi +4 -4
  92. metaflow-stubs/plugins/datatools/s3/s3tail.pyi +2 -2
  93. metaflow-stubs/plugins/datatools/s3/s3util.pyi +2 -2
  94. metaflow-stubs/plugins/debug_logger.pyi +2 -2
  95. metaflow-stubs/plugins/debug_monitor.pyi +2 -2
  96. metaflow-stubs/plugins/environment_decorator.pyi +2 -2
  97. metaflow-stubs/plugins/events_decorator.pyi +2 -2
  98. metaflow-stubs/plugins/exit_hook/__init__.pyi +2 -2
  99. metaflow-stubs/plugins/exit_hook/exit_hook_decorator.pyi +2 -2
  100. metaflow-stubs/plugins/frameworks/__init__.pyi +2 -2
  101. metaflow-stubs/plugins/frameworks/pytorch.pyi +2 -2
  102. metaflow-stubs/plugins/gcp/__init__.pyi +2 -2
  103. metaflow-stubs/plugins/gcp/gcp_secret_manager_secrets_provider.pyi +5 -5
  104. metaflow-stubs/plugins/gcp/gs_exceptions.pyi +2 -2
  105. metaflow-stubs/plugins/gcp/gs_storage_client_factory.pyi +2 -2
  106. metaflow-stubs/plugins/gcp/gs_utils.pyi +2 -2
  107. metaflow-stubs/plugins/gcp/includefile_support.pyi +2 -2
  108. metaflow-stubs/plugins/kubernetes/__init__.pyi +2 -2
  109. metaflow-stubs/plugins/kubernetes/kube_utils.pyi +3 -3
  110. metaflow-stubs/plugins/kubernetes/kubernetes.pyi +2 -2
  111. metaflow-stubs/plugins/kubernetes/kubernetes_client.pyi +2 -2
  112. metaflow-stubs/plugins/kubernetes/kubernetes_decorator.pyi +2 -2
  113. metaflow-stubs/plugins/kubernetes/kubernetes_job.pyi +2 -2
  114. metaflow-stubs/plugins/kubernetes/kubernetes_jobsets.pyi +2 -2
  115. metaflow-stubs/plugins/kubernetes/spot_monitor_sidecar.pyi +2 -2
  116. metaflow-stubs/plugins/parallel_decorator.pyi +2 -2
  117. metaflow-stubs/plugins/project_decorator.pyi +2 -2
  118. metaflow-stubs/plugins/pypi/__init__.pyi +2 -2
  119. metaflow-stubs/plugins/pypi/conda_decorator.pyi +2 -2
  120. metaflow-stubs/plugins/pypi/conda_environment.pyi +5 -5
  121. metaflow-stubs/plugins/pypi/parsers.pyi +2 -2
  122. metaflow-stubs/plugins/pypi/pypi_decorator.pyi +2 -2
  123. metaflow-stubs/plugins/pypi/pypi_environment.pyi +2 -2
  124. metaflow-stubs/plugins/pypi/utils.pyi +2 -2
  125. metaflow-stubs/plugins/resources_decorator.pyi +2 -2
  126. metaflow-stubs/plugins/retry_decorator.pyi +2 -2
  127. metaflow-stubs/plugins/secrets/__init__.pyi +2 -2
  128. metaflow-stubs/plugins/secrets/inline_secrets_provider.pyi +4 -4
  129. metaflow-stubs/plugins/secrets/secrets_decorator.pyi +2 -2
  130. metaflow-stubs/plugins/secrets/secrets_func.pyi +2 -2
  131. metaflow-stubs/plugins/secrets/secrets_spec.pyi +2 -2
  132. metaflow-stubs/plugins/secrets/utils.pyi +2 -2
  133. metaflow-stubs/plugins/storage_executor.pyi +2 -2
  134. metaflow-stubs/plugins/test_unbounded_foreach_decorator.pyi +3 -3
  135. metaflow-stubs/plugins/timeout_decorator.pyi +3 -3
  136. metaflow-stubs/plugins/uv/__init__.pyi +2 -2
  137. metaflow-stubs/plugins/uv/uv_environment.pyi +3 -3
  138. metaflow-stubs/pylint_wrapper.pyi +2 -2
  139. metaflow-stubs/runner/__init__.pyi +2 -2
  140. metaflow-stubs/runner/deployer.pyi +34 -34
  141. metaflow-stubs/runner/deployer_impl.pyi +3 -3
  142. metaflow-stubs/runner/metaflow_runner.pyi +4 -4
  143. metaflow-stubs/runner/nbdeploy.pyi +2 -2
  144. metaflow-stubs/runner/nbrun.pyi +2 -2
  145. metaflow-stubs/runner/subprocess_manager.pyi +2 -2
  146. metaflow-stubs/runner/utils.pyi +5 -5
  147. metaflow-stubs/system/__init__.pyi +2 -2
  148. metaflow-stubs/system/system_logger.pyi +3 -3
  149. metaflow-stubs/system/system_monitor.pyi +2 -2
  150. metaflow-stubs/tagging_util.pyi +2 -2
  151. metaflow-stubs/tuple_util.pyi +2 -2
  152. metaflow-stubs/user_configs/__init__.pyi +2 -2
  153. metaflow-stubs/user_configs/config_options.pyi +3 -3
  154. metaflow-stubs/user_configs/config_parameters.pyi +7 -7
  155. metaflow-stubs/user_decorators/__init__.pyi +2 -2
  156. metaflow-stubs/user_decorators/common.pyi +2 -2
  157. metaflow-stubs/user_decorators/mutable_flow.pyi +5 -5
  158. metaflow-stubs/user_decorators/mutable_step.pyi +6 -6
  159. metaflow-stubs/user_decorators/user_flow_decorator.pyi +5 -5
  160. metaflow-stubs/user_decorators/user_step_decorator.pyi +7 -7
  161. metaflow-stubs/version.pyi +2 -2
  162. {metaflow_stubs-2.17.4.dist-info → metaflow_stubs-2.17.5.dist-info}/METADATA +2 -2
  163. metaflow_stubs-2.17.5.dist-info/RECORD +166 -0
  164. metaflow_stubs-2.17.4.dist-info/RECORD +0 -166
  165. {metaflow_stubs-2.17.4.dist-info → metaflow_stubs-2.17.5.dist-info}/WHEEL +0 -0
  166. {metaflow_stubs-2.17.4.dist-info → metaflow_stubs-2.17.5.dist-info}/top_level.txt +0 -0
@@ -1,7 +1,7 @@
1
1
  ######################################################################################################
2
2
  # Auto-generated Metaflow stub file #
3
- # MF version: 2.17.4 #
4
- # Generated on 2025-08-25T17:38:02.332471 #
3
+ # MF version: 2.17.5 #
4
+ # Generated on 2025-08-25T21:01:36.499633 #
5
5
  ######################################################################################################
6
6
 
7
7
  from __future__ import annotations
@@ -39,16 +39,16 @@ from .user_decorators.user_step_decorator import UserStepDecorator as UserStepDe
39
39
  from .user_decorators.user_step_decorator import StepMutator as StepMutator
40
40
  from .user_decorators.user_step_decorator import user_step_decorator as user_step_decorator
41
41
  from .user_decorators.user_flow_decorator import FlowMutator as FlowMutator
42
- from . import metaflow_git as metaflow_git
43
42
  from . import tuple_util as tuple_util
43
+ from . import metaflow_git as metaflow_git
44
44
  from . import events as events
45
45
  from . import runner as runner
46
46
  from . import plugins as plugins
47
47
  from .plugins.datatools.s3.s3 import S3 as S3
48
48
  from . import includefile as includefile
49
49
  from .includefile import IncludeFile as IncludeFile
50
- from .plugins.pypi.parsers import conda_environment_yml_parser as conda_environment_yml_parser
51
50
  from .plugins.pypi.parsers import pyproject_toml_parser as pyproject_toml_parser
51
+ from .plugins.pypi.parsers import conda_environment_yml_parser as conda_environment_yml_parser
52
52
  from .plugins.pypi.parsers import requirements_txt_parser as requirements_txt_parser
53
53
  from . import cards as cards
54
54
  from . import client as client
@@ -153,143 +153,170 @@ def step(f: typing.Union[typing.Callable[[FlowSpecDerived], None], typing.Callab
153
153
  ...
154
154
 
155
155
  @typing.overload
156
- def catch(*, var: typing.Optional[str] = None, print_exception: bool = True) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
157
- """
158
- Specifies that the step will success under all circumstances.
159
-
160
- The decorator will create an optional artifact, specified by `var`, which
161
- contains the exception raised. You can use it to detect the presence
162
- of errors, indicating that all happy-path artifacts produced by the step
163
- are missing.
164
-
165
-
166
- Parameters
167
- ----------
168
- var : str, optional, default None
169
- Name of the artifact in which to store the caught exception.
170
- If not specified, the exception is not stored.
171
- print_exception : bool, default True
172
- Determines whether or not the exception is printed to
173
- stdout when caught.
174
- """
175
- ...
176
-
177
- @typing.overload
178
- def catch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
179
- ...
180
-
181
- @typing.overload
182
- def catch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
183
- ...
184
-
185
- def catch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, var: typing.Optional[str] = None, print_exception: bool = True):
186
- """
187
- Specifies that the step will success under all circumstances.
188
-
189
- The decorator will create an optional artifact, specified by `var`, which
190
- contains the exception raised. You can use it to detect the presence
191
- of errors, indicating that all happy-path artifacts produced by the step
192
- are missing.
193
-
194
-
195
- Parameters
196
- ----------
197
- var : str, optional, default None
198
- Name of the artifact in which to store the caught exception.
199
- If not specified, the exception is not stored.
200
- print_exception : bool, default True
201
- Determines whether or not the exception is printed to
202
- stdout when caught.
203
- """
204
- ...
205
-
206
- @typing.overload
207
- def secrets(*, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], role: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
156
+ def batch(*, cpu: int = 1, gpu: int = 0, memory: int = 4096, image: typing.Optional[str] = None, queue: str = 'METAFLOW_BATCH_JOB_QUEUE', iam_role: str = 'METAFLOW_ECS_S3_ACCESS_IAM_ROLE', execution_role: str = 'METAFLOW_ECS_FARGATE_EXECUTION_ROLE', shared_memory: typing.Optional[int] = None, max_swap: typing.Optional[int] = None, swappiness: typing.Optional[int] = None, use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = None, inferentia: int = 0, trainium: int = None, efa: int = 0, ephemeral_storage: int = None, log_driver: typing.Optional[str] = None, log_options: typing.Optional[typing.List[str]] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
208
157
  """
209
- Specifies secrets to be retrieved and injected as environment variables prior to
210
- the execution of a step.
158
+ Specifies that this step should execute on [AWS Batch](https://aws.amazon.com/batch/).
211
159
 
212
160
 
213
161
  Parameters
214
162
  ----------
215
- sources : List[Union[str, Dict[str, Any]]], default: []
216
- List of secret specs, defining how the secrets are to be retrieved
217
- role : str, optional, default: None
218
- Role to use for fetching secrets
163
+ cpu : int, default 1
164
+ Number of CPUs required for this step. If `@resources` is
165
+ also present, the maximum value from all decorators is used.
166
+ gpu : int, default 0
167
+ Number of GPUs required for this step. If `@resources` is
168
+ also present, the maximum value from all decorators is used.
169
+ memory : int, default 4096
170
+ Memory size (in MB) required for this step. If
171
+ `@resources` is also present, the maximum value from all decorators is
172
+ used.
173
+ image : str, optional, default None
174
+ Docker image to use when launching on AWS Batch. If not specified, and
175
+ METAFLOW_BATCH_CONTAINER_IMAGE is specified, that image is used. If
176
+ not, a default Docker image mapping to the current version of Python is used.
177
+ queue : str, default METAFLOW_BATCH_JOB_QUEUE
178
+ AWS Batch Job Queue to submit the job to.
179
+ iam_role : str, default METAFLOW_ECS_S3_ACCESS_IAM_ROLE
180
+ AWS IAM role that AWS Batch container uses to access AWS cloud resources.
181
+ execution_role : str, default METAFLOW_ECS_FARGATE_EXECUTION_ROLE
182
+ AWS IAM role that AWS Batch can use [to trigger AWS Fargate tasks]
183
+ (https://docs.aws.amazon.com/batch/latest/userguide/execution-IAM-role.html).
184
+ shared_memory : int, optional, default None
185
+ The value for the size (in MiB) of the /dev/shm volume for this step.
186
+ This parameter maps to the `--shm-size` option in Docker.
187
+ max_swap : int, optional, default None
188
+ The total amount of swap memory (in MiB) a container can use for this
189
+ step. This parameter is translated to the `--memory-swap` option in
190
+ Docker where the value is the sum of the container memory plus the
191
+ `max_swap` value.
192
+ swappiness : int, optional, default None
193
+ This allows you to tune memory swappiness behavior for this step.
194
+ A swappiness value of 0 causes swapping not to happen unless absolutely
195
+ necessary. A swappiness value of 100 causes pages to be swapped very
196
+ aggressively. Accepted values are whole numbers between 0 and 100.
197
+ use_tmpfs : bool, default False
198
+ This enables an explicit tmpfs mount for this step. Note that tmpfs is
199
+ not available on Fargate compute environments
200
+ tmpfs_tempdir : bool, default True
201
+ sets METAFLOW_TEMPDIR to tmpfs_path if set for this step.
202
+ tmpfs_size : int, optional, default None
203
+ The value for the size (in MiB) of the tmpfs mount for this step.
204
+ This parameter maps to the `--tmpfs` option in Docker. Defaults to 50% of the
205
+ memory allocated for this step.
206
+ tmpfs_path : str, optional, default None
207
+ Path to tmpfs mount for this step. Defaults to /metaflow_temp.
208
+ inferentia : int, default 0
209
+ Number of Inferentia chips required for this step.
210
+ trainium : int, default None
211
+ Alias for inferentia. Use only one of the two.
212
+ efa : int, default 0
213
+ Number of elastic fabric adapter network devices to attach to container
214
+ ephemeral_storage : int, default None
215
+ The total amount, in GiB, of ephemeral storage to set for the task, 21-200GiB.
216
+ This is only relevant for Fargate compute environments
217
+ log_driver: str, optional, default None
218
+ The log driver to use for the Amazon ECS container.
219
+ log_options: List[str], optional, default None
220
+ List of strings containing options for the chosen log driver. The configurable values
221
+ depend on the `log driver` chosen. Validation of these options is not supported yet.
222
+ Example: [`awslogs-group:aws/batch/job`]
219
223
  """
220
224
  ...
221
225
 
222
226
  @typing.overload
223
- def secrets(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
227
+ def batch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
224
228
  ...
225
229
 
226
230
  @typing.overload
227
- def secrets(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
231
+ def batch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
228
232
  ...
229
233
 
230
- def secrets(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], role: typing.Optional[str] = None):
234
+ def batch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, cpu: int = 1, gpu: int = 0, memory: int = 4096, image: typing.Optional[str] = None, queue: str = 'METAFLOW_BATCH_JOB_QUEUE', iam_role: str = 'METAFLOW_ECS_S3_ACCESS_IAM_ROLE', execution_role: str = 'METAFLOW_ECS_FARGATE_EXECUTION_ROLE', shared_memory: typing.Optional[int] = None, max_swap: typing.Optional[int] = None, swappiness: typing.Optional[int] = None, use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = None, inferentia: int = 0, trainium: int = None, efa: int = 0, ephemeral_storage: int = None, log_driver: typing.Optional[str] = None, log_options: typing.Optional[typing.List[str]] = None):
231
235
  """
232
- Specifies secrets to be retrieved and injected as environment variables prior to
233
- the execution of a step.
236
+ Specifies that this step should execute on [AWS Batch](https://aws.amazon.com/batch/).
234
237
 
235
238
 
236
239
  Parameters
237
240
  ----------
238
- sources : List[Union[str, Dict[str, Any]]], default: []
239
- List of secret specs, defining how the secrets are to be retrieved
240
- role : str, optional, default: None
241
- Role to use for fetching secrets
241
+ cpu : int, default 1
242
+ Number of CPUs required for this step. If `@resources` is
243
+ also present, the maximum value from all decorators is used.
244
+ gpu : int, default 0
245
+ Number of GPUs required for this step. If `@resources` is
246
+ also present, the maximum value from all decorators is used.
247
+ memory : int, default 4096
248
+ Memory size (in MB) required for this step. If
249
+ `@resources` is also present, the maximum value from all decorators is
250
+ used.
251
+ image : str, optional, default None
252
+ Docker image to use when launching on AWS Batch. If not specified, and
253
+ METAFLOW_BATCH_CONTAINER_IMAGE is specified, that image is used. If
254
+ not, a default Docker image mapping to the current version of Python is used.
255
+ queue : str, default METAFLOW_BATCH_JOB_QUEUE
256
+ AWS Batch Job Queue to submit the job to.
257
+ iam_role : str, default METAFLOW_ECS_S3_ACCESS_IAM_ROLE
258
+ AWS IAM role that AWS Batch container uses to access AWS cloud resources.
259
+ execution_role : str, default METAFLOW_ECS_FARGATE_EXECUTION_ROLE
260
+ AWS IAM role that AWS Batch can use [to trigger AWS Fargate tasks]
261
+ (https://docs.aws.amazon.com/batch/latest/userguide/execution-IAM-role.html).
262
+ shared_memory : int, optional, default None
263
+ The value for the size (in MiB) of the /dev/shm volume for this step.
264
+ This parameter maps to the `--shm-size` option in Docker.
265
+ max_swap : int, optional, default None
266
+ The total amount of swap memory (in MiB) a container can use for this
267
+ step. This parameter is translated to the `--memory-swap` option in
268
+ Docker where the value is the sum of the container memory plus the
269
+ `max_swap` value.
270
+ swappiness : int, optional, default None
271
+ This allows you to tune memory swappiness behavior for this step.
272
+ A swappiness value of 0 causes swapping not to happen unless absolutely
273
+ necessary. A swappiness value of 100 causes pages to be swapped very
274
+ aggressively. Accepted values are whole numbers between 0 and 100.
275
+ use_tmpfs : bool, default False
276
+ This enables an explicit tmpfs mount for this step. Note that tmpfs is
277
+ not available on Fargate compute environments
278
+ tmpfs_tempdir : bool, default True
279
+ sets METAFLOW_TEMPDIR to tmpfs_path if set for this step.
280
+ tmpfs_size : int, optional, default None
281
+ The value for the size (in MiB) of the tmpfs mount for this step.
282
+ This parameter maps to the `--tmpfs` option in Docker. Defaults to 50% of the
283
+ memory allocated for this step.
284
+ tmpfs_path : str, optional, default None
285
+ Path to tmpfs mount for this step. Defaults to /metaflow_temp.
286
+ inferentia : int, default 0
287
+ Number of Inferentia chips required for this step.
288
+ trainium : int, default None
289
+ Alias for inferentia. Use only one of the two.
290
+ efa : int, default 0
291
+ Number of elastic fabric adapter network devices to attach to container
292
+ ephemeral_storage : int, default None
293
+ The total amount, in GiB, of ephemeral storage to set for the task, 21-200GiB.
294
+ This is only relevant for Fargate compute environments
295
+ log_driver: str, optional, default None
296
+ The log driver to use for the Amazon ECS container.
297
+ log_options: List[str], optional, default None
298
+ List of strings containing options for the chosen log driver. The configurable values
299
+ depend on the `log driver` chosen. Validation of these options is not supported yet.
300
+ Example: [`awslogs-group:aws/batch/job`]
242
301
  """
243
302
  ...
244
303
 
245
304
  @typing.overload
246
- def pypi(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
305
+ def parallel(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
247
306
  """
248
- Specifies the PyPI packages for the step.
249
-
250
- Information in this decorator will augment any
251
- attributes set in the `@pyi_base` flow-level decorator. Hence,
252
- you can use `@pypi_base` to set packages required by all
253
- steps and use `@pypi` to specify step-specific overrides.
254
-
255
-
256
- Parameters
257
- ----------
258
- packages : Dict[str, str], default: {}
259
- Packages to use for this step. The key is the name of the package
260
- and the value is the version to use.
261
- python : str, optional, default: None
262
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
263
- that the version used will correspond to the version of the Python interpreter used to start the run.
307
+ Decorator prototype for all step decorators. This function gets specialized
308
+ and imported for all decorators types by _import_plugin_decorators().
264
309
  """
265
310
  ...
266
311
 
267
312
  @typing.overload
268
- def pypi(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
269
- ...
270
-
271
- @typing.overload
272
- def pypi(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
313
+ def parallel(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
273
314
  ...
274
315
 
275
- def pypi(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
316
+ def parallel(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
276
317
  """
277
- Specifies the PyPI packages for the step.
278
-
279
- Information in this decorator will augment any
280
- attributes set in the `@pyi_base` flow-level decorator. Hence,
281
- you can use `@pypi_base` to set packages required by all
282
- steps and use `@pypi` to specify step-specific overrides.
283
-
284
-
285
- Parameters
286
- ----------
287
- packages : Dict[str, str], default: {}
288
- Packages to use for this step. The key is the name of the package
289
- and the value is the version to use.
290
- python : str, optional, default: None
291
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
292
- that the version used will correspond to the version of the Python interpreter used to start the run.
318
+ Decorator prototype for all step decorators. This function gets specialized
319
+ and imported for all decorators types by _import_plugin_decorators().
293
320
  """
294
321
  ...
295
322
 
@@ -373,18 +400,67 @@ def resources(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None]
373
400
  ...
374
401
 
375
402
  @typing.overload
376
- def timeout(*, seconds: int = 0, minutes: int = 0, hours: int = 0) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
403
+ def card(*, type: str = 'default', id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
377
404
  """
378
- Specifies a timeout for your step.
379
-
380
- This decorator is useful if this step may hang indefinitely.
381
-
382
- This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
383
- A timeout is considered to be an exception thrown by the step. It will cause the step to be
384
- retried if needed and the exception will be caught by the `@catch` decorator, if present.
405
+ Creates a human-readable report, a Metaflow Card, after this step completes.
385
406
 
386
- Note that all the values specified in parameters are added together so if you specify
387
- 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
407
+ Note that you may add multiple `@card` decorators in a step with different parameters.
408
+
409
+
410
+ Parameters
411
+ ----------
412
+ type : str, default 'default'
413
+ Card type.
414
+ id : str, optional, default None
415
+ If multiple cards are present, use this id to identify this card.
416
+ options : Dict[str, Any], default {}
417
+ Options passed to the card. The contents depend on the card type.
418
+ timeout : int, default 45
419
+ Interrupt reporting if it takes more than this many seconds.
420
+ """
421
+ ...
422
+
423
+ @typing.overload
424
+ def card(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
425
+ ...
426
+
427
+ @typing.overload
428
+ def card(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
429
+ ...
430
+
431
+ def card(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, type: str = 'default', id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45):
432
+ """
433
+ Creates a human-readable report, a Metaflow Card, after this step completes.
434
+
435
+ Note that you may add multiple `@card` decorators in a step with different parameters.
436
+
437
+
438
+ Parameters
439
+ ----------
440
+ type : str, default 'default'
441
+ Card type.
442
+ id : str, optional, default None
443
+ If multiple cards are present, use this id to identify this card.
444
+ options : Dict[str, Any], default {}
445
+ Options passed to the card. The contents depend on the card type.
446
+ timeout : int, default 45
447
+ Interrupt reporting if it takes more than this many seconds.
448
+ """
449
+ ...
450
+
451
+ @typing.overload
452
+ def timeout(*, seconds: int = 0, minutes: int = 0, hours: int = 0) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
453
+ """
454
+ Specifies a timeout for your step.
455
+
456
+ This decorator is useful if this step may hang indefinitely.
457
+
458
+ This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
459
+ A timeout is considered to be an exception thrown by the step. It will cause the step to be
460
+ retried if needed and the exception will be caught by the `@catch` decorator, if present.
461
+
462
+ Note that all the values specified in parameters are added together so if you specify
463
+ 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
388
464
 
389
465
 
390
466
  Parameters
@@ -432,54 +508,104 @@ def timeout(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None],
432
508
  ...
433
509
 
434
510
  @typing.overload
435
- def parallel(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
511
+ def catch(*, var: typing.Optional[str] = None, print_exception: bool = True) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
436
512
  """
437
- Decorator prototype for all step decorators. This function gets specialized
438
- and imported for all decorators types by _import_plugin_decorators().
513
+ Specifies that the step will success under all circumstances.
514
+
515
+ The decorator will create an optional artifact, specified by `var`, which
516
+ contains the exception raised. You can use it to detect the presence
517
+ of errors, indicating that all happy-path artifacts produced by the step
518
+ are missing.
519
+
520
+
521
+ Parameters
522
+ ----------
523
+ var : str, optional, default None
524
+ Name of the artifact in which to store the caught exception.
525
+ If not specified, the exception is not stored.
526
+ print_exception : bool, default True
527
+ Determines whether or not the exception is printed to
528
+ stdout when caught.
439
529
  """
440
530
  ...
441
531
 
442
532
  @typing.overload
443
- def parallel(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
533
+ def catch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
444
534
  ...
445
535
 
446
- def parallel(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
536
+ @typing.overload
537
+ def catch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
538
+ ...
539
+
540
+ def catch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, var: typing.Optional[str] = None, print_exception: bool = True):
447
541
  """
448
- Decorator prototype for all step decorators. This function gets specialized
449
- and imported for all decorators types by _import_plugin_decorators().
542
+ Specifies that the step will success under all circumstances.
543
+
544
+ The decorator will create an optional artifact, specified by `var`, which
545
+ contains the exception raised. You can use it to detect the presence
546
+ of errors, indicating that all happy-path artifacts produced by the step
547
+ are missing.
548
+
549
+
550
+ Parameters
551
+ ----------
552
+ var : str, optional, default None
553
+ Name of the artifact in which to store the caught exception.
554
+ If not specified, the exception is not stored.
555
+ print_exception : bool, default True
556
+ Determines whether or not the exception is printed to
557
+ stdout when caught.
450
558
  """
451
559
  ...
452
560
 
453
561
  @typing.overload
454
- def environment(*, vars: typing.Dict[str, str] = {}) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
562
+ def pypi(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
455
563
  """
456
- Specifies environment variables to be set prior to the execution of a step.
564
+ Specifies the PyPI packages for the step.
565
+
566
+ Information in this decorator will augment any
567
+ attributes set in the `@pyi_base` flow-level decorator. Hence,
568
+ you can use `@pypi_base` to set packages required by all
569
+ steps and use `@pypi` to specify step-specific overrides.
457
570
 
458
571
 
459
572
  Parameters
460
573
  ----------
461
- vars : Dict[str, str], default {}
462
- Dictionary of environment variables to set.
574
+ packages : Dict[str, str], default: {}
575
+ Packages to use for this step. The key is the name of the package
576
+ and the value is the version to use.
577
+ python : str, optional, default: None
578
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
579
+ that the version used will correspond to the version of the Python interpreter used to start the run.
463
580
  """
464
581
  ...
465
582
 
466
583
  @typing.overload
467
- def environment(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
584
+ def pypi(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
468
585
  ...
469
586
 
470
587
  @typing.overload
471
- def environment(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
588
+ def pypi(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
472
589
  ...
473
590
 
474
- def environment(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, vars: typing.Dict[str, str] = {}):
591
+ def pypi(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
475
592
  """
476
- Specifies environment variables to be set prior to the execution of a step.
593
+ Specifies the PyPI packages for the step.
594
+
595
+ Information in this decorator will augment any
596
+ attributes set in the `@pyi_base` flow-level decorator. Hence,
597
+ you can use `@pypi_base` to set packages required by all
598
+ steps and use `@pypi` to specify step-specific overrides.
477
599
 
478
600
 
479
601
  Parameters
480
602
  ----------
481
- vars : Dict[str, str], default {}
482
- Dictionary of environment variables to set.
603
+ packages : Dict[str, str], default: {}
604
+ Packages to use for this step. The key is the name of the package
605
+ and the value is the version to use.
606
+ python : str, optional, default: None
607
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
608
+ that the version used will correspond to the version of the Python interpreter used to start the run.
483
609
  """
484
610
  ...
485
611
 
@@ -539,200 +665,61 @@ def retry(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], ty
539
665
  ...
540
666
 
541
667
  @typing.overload
542
- def batch(*, cpu: int = 1, gpu: int = 0, memory: int = 4096, image: typing.Optional[str] = None, queue: str = 'METAFLOW_BATCH_JOB_QUEUE', iam_role: str = 'METAFLOW_ECS_S3_ACCESS_IAM_ROLE', execution_role: str = 'METAFLOW_ECS_FARGATE_EXECUTION_ROLE', shared_memory: typing.Optional[int] = None, max_swap: typing.Optional[int] = None, swappiness: typing.Optional[int] = None, use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = None, inferentia: int = 0, trainium: int = None, efa: int = 0, ephemeral_storage: int = None, log_driver: typing.Optional[str] = None, log_options: typing.Optional[typing.List[str]] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
543
- """
544
- Specifies that this step should execute on [AWS Batch](https://aws.amazon.com/batch/).
545
-
546
-
547
- Parameters
548
- ----------
549
- cpu : int, default 1
550
- Number of CPUs required for this step. If `@resources` is
551
- also present, the maximum value from all decorators is used.
552
- gpu : int, default 0
553
- Number of GPUs required for this step. If `@resources` is
554
- also present, the maximum value from all decorators is used.
555
- memory : int, default 4096
556
- Memory size (in MB) required for this step. If
557
- `@resources` is also present, the maximum value from all decorators is
558
- used.
559
- image : str, optional, default None
560
- Docker image to use when launching on AWS Batch. If not specified, and
561
- METAFLOW_BATCH_CONTAINER_IMAGE is specified, that image is used. If
562
- not, a default Docker image mapping to the current version of Python is used.
563
- queue : str, default METAFLOW_BATCH_JOB_QUEUE
564
- AWS Batch Job Queue to submit the job to.
565
- iam_role : str, default METAFLOW_ECS_S3_ACCESS_IAM_ROLE
566
- AWS IAM role that AWS Batch container uses to access AWS cloud resources.
567
- execution_role : str, default METAFLOW_ECS_FARGATE_EXECUTION_ROLE
568
- AWS IAM role that AWS Batch can use [to trigger AWS Fargate tasks]
569
- (https://docs.aws.amazon.com/batch/latest/userguide/execution-IAM-role.html).
570
- shared_memory : int, optional, default None
571
- The value for the size (in MiB) of the /dev/shm volume for this step.
572
- This parameter maps to the `--shm-size` option in Docker.
573
- max_swap : int, optional, default None
574
- The total amount of swap memory (in MiB) a container can use for this
575
- step. This parameter is translated to the `--memory-swap` option in
576
- Docker where the value is the sum of the container memory plus the
577
- `max_swap` value.
578
- swappiness : int, optional, default None
579
- This allows you to tune memory swappiness behavior for this step.
580
- A swappiness value of 0 causes swapping not to happen unless absolutely
581
- necessary. A swappiness value of 100 causes pages to be swapped very
582
- aggressively. Accepted values are whole numbers between 0 and 100.
583
- use_tmpfs : bool, default False
584
- This enables an explicit tmpfs mount for this step. Note that tmpfs is
585
- not available on Fargate compute environments
586
- tmpfs_tempdir : bool, default True
587
- sets METAFLOW_TEMPDIR to tmpfs_path if set for this step.
588
- tmpfs_size : int, optional, default None
589
- The value for the size (in MiB) of the tmpfs mount for this step.
590
- This parameter maps to the `--tmpfs` option in Docker. Defaults to 50% of the
591
- memory allocated for this step.
592
- tmpfs_path : str, optional, default None
593
- Path to tmpfs mount for this step. Defaults to /metaflow_temp.
594
- inferentia : int, default 0
595
- Number of Inferentia chips required for this step.
596
- trainium : int, default None
597
- Alias for inferentia. Use only one of the two.
598
- efa : int, default 0
599
- Number of elastic fabric adapter network devices to attach to container
600
- ephemeral_storage : int, default None
601
- The total amount, in GiB, of ephemeral storage to set for the task, 21-200GiB.
602
- This is only relevant for Fargate compute environments
603
- log_driver: str, optional, default None
604
- The log driver to use for the Amazon ECS container.
605
- log_options: List[str], optional, default None
606
- List of strings containing options for the chosen log driver. The configurable values
607
- depend on the `log driver` chosen. Validation of these options is not supported yet.
608
- Example: [`awslogs-group:aws/batch/job`]
609
- """
610
- ...
611
-
612
- @typing.overload
613
- def batch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
614
- ...
615
-
616
- @typing.overload
617
- def batch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
618
- ...
619
-
620
- def batch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, cpu: int = 1, gpu: int = 0, memory: int = 4096, image: typing.Optional[str] = None, queue: str = 'METAFLOW_BATCH_JOB_QUEUE', iam_role: str = 'METAFLOW_ECS_S3_ACCESS_IAM_ROLE', execution_role: str = 'METAFLOW_ECS_FARGATE_EXECUTION_ROLE', shared_memory: typing.Optional[int] = None, max_swap: typing.Optional[int] = None, swappiness: typing.Optional[int] = None, use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = None, inferentia: int = 0, trainium: int = None, efa: int = 0, ephemeral_storage: int = None, log_driver: typing.Optional[str] = None, log_options: typing.Optional[typing.List[str]] = None):
621
- """
622
- Specifies that this step should execute on [AWS Batch](https://aws.amazon.com/batch/).
623
-
624
-
625
- Parameters
626
- ----------
627
- cpu : int, default 1
628
- Number of CPUs required for this step. If `@resources` is
629
- also present, the maximum value from all decorators is used.
630
- gpu : int, default 0
631
- Number of GPUs required for this step. If `@resources` is
632
- also present, the maximum value from all decorators is used.
633
- memory : int, default 4096
634
- Memory size (in MB) required for this step. If
635
- `@resources` is also present, the maximum value from all decorators is
636
- used.
637
- image : str, optional, default None
638
- Docker image to use when launching on AWS Batch. If not specified, and
639
- METAFLOW_BATCH_CONTAINER_IMAGE is specified, that image is used. If
640
- not, a default Docker image mapping to the current version of Python is used.
641
- queue : str, default METAFLOW_BATCH_JOB_QUEUE
642
- AWS Batch Job Queue to submit the job to.
643
- iam_role : str, default METAFLOW_ECS_S3_ACCESS_IAM_ROLE
644
- AWS IAM role that AWS Batch container uses to access AWS cloud resources.
645
- execution_role : str, default METAFLOW_ECS_FARGATE_EXECUTION_ROLE
646
- AWS IAM role that AWS Batch can use [to trigger AWS Fargate tasks]
647
- (https://docs.aws.amazon.com/batch/latest/userguide/execution-IAM-role.html).
648
- shared_memory : int, optional, default None
649
- The value for the size (in MiB) of the /dev/shm volume for this step.
650
- This parameter maps to the `--shm-size` option in Docker.
651
- max_swap : int, optional, default None
652
- The total amount of swap memory (in MiB) a container can use for this
653
- step. This parameter is translated to the `--memory-swap` option in
654
- Docker where the value is the sum of the container memory plus the
655
- `max_swap` value.
656
- swappiness : int, optional, default None
657
- This allows you to tune memory swappiness behavior for this step.
658
- A swappiness value of 0 causes swapping not to happen unless absolutely
659
- necessary. A swappiness value of 100 causes pages to be swapped very
660
- aggressively. Accepted values are whole numbers between 0 and 100.
661
- use_tmpfs : bool, default False
662
- This enables an explicit tmpfs mount for this step. Note that tmpfs is
663
- not available on Fargate compute environments
664
- tmpfs_tempdir : bool, default True
665
- sets METAFLOW_TEMPDIR to tmpfs_path if set for this step.
666
- tmpfs_size : int, optional, default None
667
- The value for the size (in MiB) of the tmpfs mount for this step.
668
- This parameter maps to the `--tmpfs` option in Docker. Defaults to 50% of the
669
- memory allocated for this step.
670
- tmpfs_path : str, optional, default None
671
- Path to tmpfs mount for this step. Defaults to /metaflow_temp.
672
- inferentia : int, default 0
673
- Number of Inferentia chips required for this step.
674
- trainium : int, default None
675
- Alias for inferentia. Use only one of the two.
676
- efa : int, default 0
677
- Number of elastic fabric adapter network devices to attach to container
678
- ephemeral_storage : int, default None
679
- The total amount, in GiB, of ephemeral storage to set for the task, 21-200GiB.
680
- This is only relevant for Fargate compute environments
681
- log_driver: str, optional, default None
682
- The log driver to use for the Amazon ECS container.
683
- log_options: List[str], optional, default None
684
- List of strings containing options for the chosen log driver. The configurable values
685
- depend on the `log driver` chosen. Validation of these options is not supported yet.
686
- Example: [`awslogs-group:aws/batch/job`]
687
- """
688
- ...
689
-
690
- @typing.overload
691
- def card(*, type: str = 'default', id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
668
+ def conda(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
692
669
  """
693
- Creates a human-readable report, a Metaflow Card, after this step completes.
670
+ Specifies the Conda environment for the step.
694
671
 
695
- Note that you may add multiple `@card` decorators in a step with different parameters.
672
+ Information in this decorator will augment any
673
+ attributes set in the `@conda_base` flow-level decorator. Hence,
674
+ you can use `@conda_base` to set packages required by all
675
+ steps and use `@conda` to specify step-specific overrides.
696
676
 
697
677
 
698
678
  Parameters
699
679
  ----------
700
- type : str, default 'default'
701
- Card type.
702
- id : str, optional, default None
703
- If multiple cards are present, use this id to identify this card.
704
- options : Dict[str, Any], default {}
705
- Options passed to the card. The contents depend on the card type.
706
- timeout : int, default 45
707
- Interrupt reporting if it takes more than this many seconds.
680
+ packages : Dict[str, str], default {}
681
+ Packages to use for this step. The key is the name of the package
682
+ and the value is the version to use.
683
+ libraries : Dict[str, str], default {}
684
+ Supported for backward compatibility. When used with packages, packages will take precedence.
685
+ python : str, optional, default None
686
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
687
+ that the version used will correspond to the version of the Python interpreter used to start the run.
688
+ disabled : bool, default False
689
+ If set to True, disables @conda.
708
690
  """
709
691
  ...
710
692
 
711
693
  @typing.overload
712
- def card(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
694
+ def conda(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
713
695
  ...
714
696
 
715
697
  @typing.overload
716
- def card(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
698
+ def conda(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
717
699
  ...
718
700
 
719
- def card(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, type: str = 'default', id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45):
701
+ def conda(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
720
702
  """
721
- Creates a human-readable report, a Metaflow Card, after this step completes.
703
+ Specifies the Conda environment for the step.
722
704
 
723
- Note that you may add multiple `@card` decorators in a step with different parameters.
705
+ Information in this decorator will augment any
706
+ attributes set in the `@conda_base` flow-level decorator. Hence,
707
+ you can use `@conda_base` to set packages required by all
708
+ steps and use `@conda` to specify step-specific overrides.
724
709
 
725
710
 
726
711
  Parameters
727
712
  ----------
728
- type : str, default 'default'
729
- Card type.
730
- id : str, optional, default None
731
- If multiple cards are present, use this id to identify this card.
732
- options : Dict[str, Any], default {}
733
- Options passed to the card. The contents depend on the card type.
734
- timeout : int, default 45
735
- Interrupt reporting if it takes more than this many seconds.
713
+ packages : Dict[str, str], default {}
714
+ Packages to use for this step. The key is the name of the package
715
+ and the value is the version to use.
716
+ libraries : Dict[str, str], default {}
717
+ Supported for backward compatibility. When used with packages, packages will take precedence.
718
+ python : str, optional, default None
719
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
720
+ that the version used will correspond to the version of the Python interpreter used to start the run.
721
+ disabled : bool, default False
722
+ If set to True, disables @conda.
736
723
  """
737
724
  ...
738
725
 
@@ -826,145 +813,125 @@ def kubernetes(*, cpu: int = 1, memory: int = 4096, disk: int = 10240, image: ty
826
813
  ...
827
814
 
828
815
  @typing.overload
829
- def conda(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
816
+ def environment(*, vars: typing.Dict[str, str] = {}) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
830
817
  """
831
- Specifies the Conda environment for the step.
832
-
833
- Information in this decorator will augment any
834
- attributes set in the `@conda_base` flow-level decorator. Hence,
835
- you can use `@conda_base` to set packages required by all
836
- steps and use `@conda` to specify step-specific overrides.
818
+ Specifies environment variables to be set prior to the execution of a step.
837
819
 
838
820
 
839
821
  Parameters
840
822
  ----------
841
- packages : Dict[str, str], default {}
842
- Packages to use for this step. The key is the name of the package
843
- and the value is the version to use.
844
- libraries : Dict[str, str], default {}
845
- Supported for backward compatibility. When used with packages, packages will take precedence.
846
- python : str, optional, default None
847
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
848
- that the version used will correspond to the version of the Python interpreter used to start the run.
849
- disabled : bool, default False
850
- If set to True, disables @conda.
823
+ vars : Dict[str, str], default {}
824
+ Dictionary of environment variables to set.
851
825
  """
852
826
  ...
853
827
 
854
828
  @typing.overload
855
- def conda(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
829
+ def environment(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
856
830
  ...
857
831
 
858
832
  @typing.overload
859
- def conda(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
833
+ def environment(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
860
834
  ...
861
835
 
862
- def conda(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
836
+ def environment(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, vars: typing.Dict[str, str] = {}):
863
837
  """
864
- Specifies the Conda environment for the step.
838
+ Specifies environment variables to be set prior to the execution of a step.
865
839
 
866
- Information in this decorator will augment any
867
- attributes set in the `@conda_base` flow-level decorator. Hence,
868
- you can use `@conda_base` to set packages required by all
869
- steps and use `@conda` to specify step-specific overrides.
840
+
841
+ Parameters
842
+ ----------
843
+ vars : Dict[str, str], default {}
844
+ Dictionary of environment variables to set.
845
+ """
846
+ ...
847
+
848
+ @typing.overload
849
+ def secrets(*, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], role: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
850
+ """
851
+ Specifies secrets to be retrieved and injected as environment variables prior to
852
+ the execution of a step.
870
853
 
871
854
 
872
855
  Parameters
873
856
  ----------
874
- packages : Dict[str, str], default {}
875
- Packages to use for this step. The key is the name of the package
876
- and the value is the version to use.
877
- libraries : Dict[str, str], default {}
878
- Supported for backward compatibility. When used with packages, packages will take precedence.
879
- python : str, optional, default None
880
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
881
- that the version used will correspond to the version of the Python interpreter used to start the run.
882
- disabled : bool, default False
883
- If set to True, disables @conda.
857
+ sources : List[Union[str, Dict[str, Any]]], default: []
858
+ List of secret specs, defining how the secrets are to be retrieved
859
+ role : str, optional, default: None
860
+ Role to use for fetching secrets
884
861
  """
885
862
  ...
886
863
 
887
- def airflow_external_task_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, external_dag_id: str, external_task_ids: typing.List[str], allowed_states: typing.List[str], failed_states: typing.List[str], execution_delta: "datetime.timedelta", check_existence: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
864
+ @typing.overload
865
+ def secrets(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
866
+ ...
867
+
868
+ @typing.overload
869
+ def secrets(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
870
+ ...
871
+
872
+ def secrets(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], role: typing.Optional[str] = None):
888
873
  """
889
- The `@airflow_external_task_sensor` decorator attaches a Airflow [ExternalTaskSensor](https://airflow.apache.org/docs/apache-airflow/stable/_api/airflow/sensors/external_task/index.html#airflow.sensors.external_task.ExternalTaskSensor) before the start step of the flow.
890
- This decorator only works when a flow is scheduled on Airflow and is compiled using `airflow create`. More than one `@airflow_external_task_sensor` can be added as a flow decorators. Adding more than one decorator will ensure that `start` step starts only after all sensors finish.
874
+ Specifies secrets to be retrieved and injected as environment variables prior to
875
+ the execution of a step.
891
876
 
892
877
 
893
878
  Parameters
894
879
  ----------
895
- timeout : int
896
- Time, in seconds before the task times out and fails. (Default: 3600)
897
- poke_interval : int
898
- Time in seconds that the job should wait in between each try. (Default: 60)
899
- mode : str
900
- How the sensor operates. Options are: { poke | reschedule }. (Default: "poke")
901
- exponential_backoff : bool
902
- allow progressive longer waits between pokes by using exponential backoff algorithm. (Default: True)
903
- pool : str
904
- the slot pool this task should run in,
905
- slot pools are a way to limit concurrency for certain tasks. (Default:None)
906
- soft_fail : bool
907
- Set to true to mark the task as SKIPPED on failure. (Default: False)
908
- name : str
909
- Name of the sensor on Airflow
910
- description : str
911
- Description of sensor in the Airflow UI
912
- external_dag_id : str
913
- The dag_id that contains the task you want to wait for.
914
- external_task_ids : List[str]
915
- The list of task_ids that you want to wait for.
916
- If None (default value) the sensor waits for the DAG. (Default: None)
917
- allowed_states : List[str]
918
- Iterable of allowed states, (Default: ['success'])
919
- failed_states : List[str]
920
- Iterable of failed or dis-allowed states. (Default: None)
921
- execution_delta : datetime.timedelta
922
- time difference with the previous execution to look at,
923
- the default is the same logical date as the current task or DAG. (Default: None)
924
- check_existence: bool
925
- Set to True to check if the external task exists or check if
926
- the DAG to wait for exists. (Default: True)
880
+ sources : List[Union[str, Dict[str, Any]]], default: []
881
+ List of secret specs, defining how the secrets are to be retrieved
882
+ role : str, optional, default: None
883
+ Role to use for fetching secrets
927
884
  """
928
885
  ...
929
886
 
930
887
  @typing.overload
931
- def pypi_base(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
888
+ def schedule(*, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
932
889
  """
933
- Specifies the PyPI packages for all steps of the flow.
890
+ Specifies the times when the flow should be run when running on a
891
+ production scheduler.
934
892
 
935
- Use `@pypi_base` to set common packages required by all
936
- steps and use `@pypi` to specify step-specific overrides.
937
893
 
938
894
  Parameters
939
895
  ----------
940
- packages : Dict[str, str], default: {}
941
- Packages to use for this flow. The key is the name of the package
942
- and the value is the version to use.
943
- python : str, optional, default: None
944
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
945
- that the version used will correspond to the version of the Python interpreter used to start the run.
896
+ hourly : bool, default False
897
+ Run the workflow hourly.
898
+ daily : bool, default True
899
+ Run the workflow daily.
900
+ weekly : bool, default False
901
+ Run the workflow weekly.
902
+ cron : str, optional, default None
903
+ Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
904
+ specified by this expression.
905
+ timezone : str, optional, default None
906
+ Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
907
+ which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
946
908
  """
947
909
  ...
948
910
 
949
911
  @typing.overload
950
- def pypi_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
912
+ def schedule(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
951
913
  ...
952
914
 
953
- def pypi_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
915
+ def schedule(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None):
954
916
  """
955
- Specifies the PyPI packages for all steps of the flow.
956
-
957
- Use `@pypi_base` to set common packages required by all
958
- steps and use `@pypi` to specify step-specific overrides.
917
+ Specifies the times when the flow should be run when running on a
918
+ production scheduler.
919
+
959
920
 
960
921
  Parameters
961
922
  ----------
962
- packages : Dict[str, str], default: {}
963
- Packages to use for this flow. The key is the name of the package
964
- and the value is the version to use.
965
- python : str, optional, default: None
966
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
967
- that the version used will correspond to the version of the Python interpreter used to start the run.
923
+ hourly : bool, default False
924
+ Run the workflow hourly.
925
+ daily : bool, default True
926
+ Run the workflow daily.
927
+ weekly : bool, default False
928
+ Run the workflow weekly.
929
+ cron : str, optional, default None
930
+ Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
931
+ specified by this expression.
932
+ timezone : str, optional, default None
933
+ Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
934
+ which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
968
935
  """
969
936
  ...
970
937
 
@@ -1069,108 +1036,6 @@ def trigger_on_finish(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *
1069
1036
  """
1070
1037
  ...
1071
1038
 
1072
- @typing.overload
1073
- def conda_base(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1074
- """
1075
- Specifies the Conda environment for all steps of the flow.
1076
-
1077
- Use `@conda_base` to set common libraries required by all
1078
- steps and use `@conda` to specify step-specific additions.
1079
-
1080
-
1081
- Parameters
1082
- ----------
1083
- packages : Dict[str, str], default {}
1084
- Packages to use for this flow. The key is the name of the package
1085
- and the value is the version to use.
1086
- libraries : Dict[str, str], default {}
1087
- Supported for backward compatibility. When used with packages, packages will take precedence.
1088
- python : str, optional, default None
1089
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
1090
- that the version used will correspond to the version of the Python interpreter used to start the run.
1091
- disabled : bool, default False
1092
- If set to True, disables Conda.
1093
- """
1094
- ...
1095
-
1096
- @typing.overload
1097
- def conda_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1098
- ...
1099
-
1100
- def conda_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
1101
- """
1102
- Specifies the Conda environment for all steps of the flow.
1103
-
1104
- Use `@conda_base` to set common libraries required by all
1105
- steps and use `@conda` to specify step-specific additions.
1106
-
1107
-
1108
- Parameters
1109
- ----------
1110
- packages : Dict[str, str], default {}
1111
- Packages to use for this flow. The key is the name of the package
1112
- and the value is the version to use.
1113
- libraries : Dict[str, str], default {}
1114
- Supported for backward compatibility. When used with packages, packages will take precedence.
1115
- python : str, optional, default None
1116
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
1117
- that the version used will correspond to the version of the Python interpreter used to start the run.
1118
- disabled : bool, default False
1119
- If set to True, disables Conda.
1120
- """
1121
- ...
1122
-
1123
- @typing.overload
1124
- def schedule(*, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1125
- """
1126
- Specifies the times when the flow should be run when running on a
1127
- production scheduler.
1128
-
1129
-
1130
- Parameters
1131
- ----------
1132
- hourly : bool, default False
1133
- Run the workflow hourly.
1134
- daily : bool, default True
1135
- Run the workflow daily.
1136
- weekly : bool, default False
1137
- Run the workflow weekly.
1138
- cron : str, optional, default None
1139
- Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
1140
- specified by this expression.
1141
- timezone : str, optional, default None
1142
- Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1143
- which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
1144
- """
1145
- ...
1146
-
1147
- @typing.overload
1148
- def schedule(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1149
- ...
1150
-
1151
- def schedule(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None):
1152
- """
1153
- Specifies the times when the flow should be run when running on a
1154
- production scheduler.
1155
-
1156
-
1157
- Parameters
1158
- ----------
1159
- hourly : bool, default False
1160
- Run the workflow hourly.
1161
- daily : bool, default True
1162
- Run the workflow daily.
1163
- weekly : bool, default False
1164
- Run the workflow weekly.
1165
- cron : str, optional, default None
1166
- Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
1167
- specified by this expression.
1168
- timezone : str, optional, default None
1169
- Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1170
- which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
1171
- """
1172
- ...
1173
-
1174
1039
  def project(*, name: str, branch: typing.Optional[str] = None, production: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1175
1040
  """
1176
1041
  Specifies what flows belong to the same project.
@@ -1206,6 +1071,47 @@ def project(*, name: str, branch: typing.Optional[str] = None, production: bool
1206
1071
  """
1207
1072
  ...
1208
1073
 
1074
+ @typing.overload
1075
+ def pypi_base(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1076
+ """
1077
+ Specifies the PyPI packages for all steps of the flow.
1078
+
1079
+ Use `@pypi_base` to set common packages required by all
1080
+ steps and use `@pypi` to specify step-specific overrides.
1081
+
1082
+ Parameters
1083
+ ----------
1084
+ packages : Dict[str, str], default: {}
1085
+ Packages to use for this flow. The key is the name of the package
1086
+ and the value is the version to use.
1087
+ python : str, optional, default: None
1088
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1089
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1090
+ """
1091
+ ...
1092
+
1093
+ @typing.overload
1094
+ def pypi_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1095
+ ...
1096
+
1097
+ def pypi_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
1098
+ """
1099
+ Specifies the PyPI packages for all steps of the flow.
1100
+
1101
+ Use `@pypi_base` to set common packages required by all
1102
+ steps and use `@pypi` to specify step-specific overrides.
1103
+
1104
+ Parameters
1105
+ ----------
1106
+ packages : Dict[str, str], default: {}
1107
+ Packages to use for this flow. The key is the name of the package
1108
+ and the value is the version to use.
1109
+ python : str, optional, default: None
1110
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1111
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1112
+ """
1113
+ ...
1114
+
1209
1115
  def airflow_s3_key_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, bucket_key: typing.Union[str, typing.List[str]], bucket_name: str, wildcard_match: bool, aws_conn_id: str, verify: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1210
1116
  """
1211
1117
  The `@airflow_s3_key_sensor` decorator attaches a Airflow [S3KeySensor](https://airflow.apache.org/docs/apache-airflow-providers-amazon/stable/_api/airflow/providers/amazon/aws/sensors/s3/index.html#airflow.providers.amazon.aws.sensors.s3.S3KeySensor)
@@ -1342,3 +1248,97 @@ def trigger(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, event: t
1342
1248
  """
1343
1249
  ...
1344
1250
 
1251
+ @typing.overload
1252
+ def conda_base(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1253
+ """
1254
+ Specifies the Conda environment for all steps of the flow.
1255
+
1256
+ Use `@conda_base` to set common libraries required by all
1257
+ steps and use `@conda` to specify step-specific additions.
1258
+
1259
+
1260
+ Parameters
1261
+ ----------
1262
+ packages : Dict[str, str], default {}
1263
+ Packages to use for this flow. The key is the name of the package
1264
+ and the value is the version to use.
1265
+ libraries : Dict[str, str], default {}
1266
+ Supported for backward compatibility. When used with packages, packages will take precedence.
1267
+ python : str, optional, default None
1268
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1269
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1270
+ disabled : bool, default False
1271
+ If set to True, disables Conda.
1272
+ """
1273
+ ...
1274
+
1275
+ @typing.overload
1276
+ def conda_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1277
+ ...
1278
+
1279
+ def conda_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
1280
+ """
1281
+ Specifies the Conda environment for all steps of the flow.
1282
+
1283
+ Use `@conda_base` to set common libraries required by all
1284
+ steps and use `@conda` to specify step-specific additions.
1285
+
1286
+
1287
+ Parameters
1288
+ ----------
1289
+ packages : Dict[str, str], default {}
1290
+ Packages to use for this flow. The key is the name of the package
1291
+ and the value is the version to use.
1292
+ libraries : Dict[str, str], default {}
1293
+ Supported for backward compatibility. When used with packages, packages will take precedence.
1294
+ python : str, optional, default None
1295
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1296
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1297
+ disabled : bool, default False
1298
+ If set to True, disables Conda.
1299
+ """
1300
+ ...
1301
+
1302
+ def airflow_external_task_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, external_dag_id: str, external_task_ids: typing.List[str], allowed_states: typing.List[str], failed_states: typing.List[str], execution_delta: "datetime.timedelta", check_existence: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1303
+ """
1304
+ The `@airflow_external_task_sensor` decorator attaches a Airflow [ExternalTaskSensor](https://airflow.apache.org/docs/apache-airflow/stable/_api/airflow/sensors/external_task/index.html#airflow.sensors.external_task.ExternalTaskSensor) before the start step of the flow.
1305
+ This decorator only works when a flow is scheduled on Airflow and is compiled using `airflow create`. More than one `@airflow_external_task_sensor` can be added as a flow decorators. Adding more than one decorator will ensure that `start` step starts only after all sensors finish.
1306
+
1307
+
1308
+ Parameters
1309
+ ----------
1310
+ timeout : int
1311
+ Time, in seconds before the task times out and fails. (Default: 3600)
1312
+ poke_interval : int
1313
+ Time in seconds that the job should wait in between each try. (Default: 60)
1314
+ mode : str
1315
+ How the sensor operates. Options are: { poke | reschedule }. (Default: "poke")
1316
+ exponential_backoff : bool
1317
+ allow progressive longer waits between pokes by using exponential backoff algorithm. (Default: True)
1318
+ pool : str
1319
+ the slot pool this task should run in,
1320
+ slot pools are a way to limit concurrency for certain tasks. (Default:None)
1321
+ soft_fail : bool
1322
+ Set to true to mark the task as SKIPPED on failure. (Default: False)
1323
+ name : str
1324
+ Name of the sensor on Airflow
1325
+ description : str
1326
+ Description of sensor in the Airflow UI
1327
+ external_dag_id : str
1328
+ The dag_id that contains the task you want to wait for.
1329
+ external_task_ids : List[str]
1330
+ The list of task_ids that you want to wait for.
1331
+ If None (default value) the sensor waits for the DAG. (Default: None)
1332
+ allowed_states : List[str]
1333
+ Iterable of allowed states, (Default: ['success'])
1334
+ failed_states : List[str]
1335
+ Iterable of failed or dis-allowed states. (Default: None)
1336
+ execution_delta : datetime.timedelta
1337
+ time difference with the previous execution to look at,
1338
+ the default is the same logical date as the current task or DAG. (Default: None)
1339
+ check_existence: bool
1340
+ Set to True to check if the external task exists or check if
1341
+ the DAG to wait for exists. (Default: True)
1342
+ """
1343
+ ...
1344
+