metaflow-stubs 2.15.5__py2.py3-none-any.whl → 2.15.6__py2.py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (146) hide show
  1. metaflow-stubs/__init__.pyi +726 -726
  2. metaflow-stubs/cards.pyi +2 -2
  3. metaflow-stubs/cli.pyi +2 -2
  4. metaflow-stubs/cli_components/__init__.pyi +2 -2
  5. metaflow-stubs/cli_components/utils.pyi +2 -2
  6. metaflow-stubs/client/__init__.pyi +2 -2
  7. metaflow-stubs/client/core.pyi +6 -6
  8. metaflow-stubs/client/filecache.pyi +3 -3
  9. metaflow-stubs/events.pyi +2 -2
  10. metaflow-stubs/exception.pyi +2 -2
  11. metaflow-stubs/flowspec.pyi +3 -3
  12. metaflow-stubs/generated_for.txt +1 -1
  13. metaflow-stubs/includefile.pyi +5 -5
  14. metaflow-stubs/info_file.pyi +2 -2
  15. metaflow-stubs/metadata_provider/__init__.pyi +2 -2
  16. metaflow-stubs/metadata_provider/heartbeat.pyi +2 -2
  17. metaflow-stubs/metadata_provider/metadata.pyi +2 -2
  18. metaflow-stubs/metadata_provider/util.pyi +2 -2
  19. metaflow-stubs/metaflow_config.pyi +2 -2
  20. metaflow-stubs/metaflow_current.pyi +27 -27
  21. metaflow-stubs/multicore_utils.pyi +2 -2
  22. metaflow-stubs/parameters.pyi +5 -5
  23. metaflow-stubs/plugins/__init__.pyi +14 -14
  24. metaflow-stubs/plugins/airflow/__init__.pyi +2 -2
  25. metaflow-stubs/plugins/airflow/airflow_decorator.pyi +2 -2
  26. metaflow-stubs/plugins/airflow/airflow_utils.pyi +2 -2
  27. metaflow-stubs/plugins/airflow/exception.pyi +2 -2
  28. metaflow-stubs/plugins/airflow/sensors/__init__.pyi +2 -2
  29. metaflow-stubs/plugins/airflow/sensors/base_sensor.pyi +2 -2
  30. metaflow-stubs/plugins/airflow/sensors/external_task_sensor.pyi +2 -2
  31. metaflow-stubs/plugins/airflow/sensors/s3_sensor.pyi +2 -2
  32. metaflow-stubs/plugins/argo/__init__.pyi +2 -2
  33. metaflow-stubs/plugins/argo/argo_client.pyi +2 -2
  34. metaflow-stubs/plugins/argo/argo_events.pyi +2 -2
  35. metaflow-stubs/plugins/argo/argo_workflows.pyi +2 -2
  36. metaflow-stubs/plugins/argo/argo_workflows_decorator.pyi +3 -3
  37. metaflow-stubs/plugins/argo/argo_workflows_deployer.pyi +3 -3
  38. metaflow-stubs/plugins/argo/argo_workflows_deployer_objects.pyi +3 -3
  39. metaflow-stubs/plugins/aws/__init__.pyi +3 -3
  40. metaflow-stubs/plugins/aws/aws_client.pyi +2 -2
  41. metaflow-stubs/plugins/aws/aws_utils.pyi +2 -2
  42. metaflow-stubs/plugins/aws/batch/__init__.pyi +2 -2
  43. metaflow-stubs/plugins/aws/batch/batch.pyi +2 -2
  44. metaflow-stubs/plugins/aws/batch/batch_client.pyi +2 -2
  45. metaflow-stubs/plugins/aws/batch/batch_decorator.pyi +2 -2
  46. metaflow-stubs/plugins/aws/secrets_manager/__init__.pyi +2 -2
  47. metaflow-stubs/plugins/aws/secrets_manager/aws_secrets_manager_secrets_provider.pyi +4 -4
  48. metaflow-stubs/plugins/aws/step_functions/__init__.pyi +2 -2
  49. metaflow-stubs/plugins/aws/step_functions/dynamo_db_client.pyi +2 -2
  50. metaflow-stubs/plugins/aws/step_functions/event_bridge_client.pyi +2 -2
  51. metaflow-stubs/plugins/aws/step_functions/schedule_decorator.pyi +2 -2
  52. metaflow-stubs/plugins/aws/step_functions/step_functions.pyi +2 -2
  53. metaflow-stubs/plugins/aws/step_functions/step_functions_client.pyi +2 -2
  54. metaflow-stubs/plugins/aws/step_functions/step_functions_decorator.pyi +2 -2
  55. metaflow-stubs/plugins/aws/step_functions/step_functions_deployer.pyi +3 -3
  56. metaflow-stubs/plugins/aws/step_functions/step_functions_deployer_objects.pyi +2 -2
  57. metaflow-stubs/plugins/azure/__init__.pyi +2 -2
  58. metaflow-stubs/plugins/azure/azure_credential.pyi +2 -2
  59. metaflow-stubs/plugins/azure/azure_exceptions.pyi +2 -2
  60. metaflow-stubs/plugins/azure/azure_secret_manager_secrets_provider.pyi +4 -4
  61. metaflow-stubs/plugins/azure/azure_utils.pyi +2 -2
  62. metaflow-stubs/plugins/azure/blob_service_client_factory.pyi +2 -2
  63. metaflow-stubs/plugins/azure/includefile_support.pyi +2 -2
  64. metaflow-stubs/plugins/cards/__init__.pyi +2 -2
  65. metaflow-stubs/plugins/cards/card_client.pyi +2 -2
  66. metaflow-stubs/plugins/cards/card_creator.pyi +2 -2
  67. metaflow-stubs/plugins/cards/card_datastore.pyi +2 -2
  68. metaflow-stubs/plugins/cards/card_decorator.pyi +2 -2
  69. metaflow-stubs/plugins/cards/card_modules/__init__.pyi +2 -2
  70. metaflow-stubs/plugins/cards/card_modules/basic.pyi +3 -3
  71. metaflow-stubs/plugins/cards/card_modules/card.pyi +2 -2
  72. metaflow-stubs/plugins/cards/card_modules/components.pyi +4 -4
  73. metaflow-stubs/plugins/cards/card_modules/convert_to_native_type.pyi +2 -2
  74. metaflow-stubs/plugins/cards/card_modules/renderer_tools.pyi +2 -2
  75. metaflow-stubs/plugins/cards/card_modules/test_cards.pyi +2 -2
  76. metaflow-stubs/plugins/cards/card_resolver.pyi +2 -2
  77. metaflow-stubs/plugins/cards/component_serializer.pyi +2 -2
  78. metaflow-stubs/plugins/cards/exception.pyi +2 -2
  79. metaflow-stubs/plugins/catch_decorator.pyi +3 -3
  80. metaflow-stubs/plugins/datatools/__init__.pyi +2 -2
  81. metaflow-stubs/plugins/datatools/local.pyi +2 -2
  82. metaflow-stubs/plugins/datatools/s3/__init__.pyi +2 -2
  83. metaflow-stubs/plugins/datatools/s3/s3.pyi +4 -4
  84. metaflow-stubs/plugins/datatools/s3/s3tail.pyi +2 -2
  85. metaflow-stubs/plugins/datatools/s3/s3util.pyi +2 -2
  86. metaflow-stubs/plugins/debug_logger.pyi +2 -2
  87. metaflow-stubs/plugins/debug_monitor.pyi +2 -2
  88. metaflow-stubs/plugins/environment_decorator.pyi +2 -2
  89. metaflow-stubs/plugins/events_decorator.pyi +2 -2
  90. metaflow-stubs/plugins/frameworks/__init__.pyi +2 -2
  91. metaflow-stubs/plugins/frameworks/pytorch.pyi +2 -2
  92. metaflow-stubs/plugins/gcp/__init__.pyi +2 -2
  93. metaflow-stubs/plugins/gcp/gcp_secret_manager_secrets_provider.pyi +4 -4
  94. metaflow-stubs/plugins/gcp/gs_exceptions.pyi +2 -2
  95. metaflow-stubs/plugins/gcp/gs_storage_client_factory.pyi +2 -2
  96. metaflow-stubs/plugins/gcp/gs_utils.pyi +2 -2
  97. metaflow-stubs/plugins/gcp/includefile_support.pyi +2 -2
  98. metaflow-stubs/plugins/kubernetes/__init__.pyi +2 -2
  99. metaflow-stubs/plugins/kubernetes/kube_utils.pyi +2 -2
  100. metaflow-stubs/plugins/kubernetes/kubernetes.pyi +2 -2
  101. metaflow-stubs/plugins/kubernetes/kubernetes_client.pyi +2 -2
  102. metaflow-stubs/plugins/kubernetes/kubernetes_decorator.pyi +2 -2
  103. metaflow-stubs/plugins/kubernetes/kubernetes_job.pyi +2 -2
  104. metaflow-stubs/plugins/kubernetes/kubernetes_jobsets.pyi +2 -2
  105. metaflow-stubs/plugins/kubernetes/spot_monitor_sidecar.pyi +2 -2
  106. metaflow-stubs/plugins/parallel_decorator.pyi +2 -2
  107. metaflow-stubs/plugins/project_decorator.pyi +2 -2
  108. metaflow-stubs/plugins/pypi/__init__.pyi +3 -3
  109. metaflow-stubs/plugins/pypi/conda_decorator.pyi +2 -2
  110. metaflow-stubs/plugins/pypi/conda_environment.pyi +4 -4
  111. metaflow-stubs/plugins/pypi/parsers.pyi +2 -2
  112. metaflow-stubs/plugins/pypi/pypi_decorator.pyi +2 -2
  113. metaflow-stubs/plugins/pypi/pypi_environment.pyi +2 -2
  114. metaflow-stubs/plugins/pypi/utils.pyi +2 -2
  115. metaflow-stubs/plugins/resources_decorator.pyi +2 -2
  116. metaflow-stubs/plugins/retry_decorator.pyi +2 -2
  117. metaflow-stubs/plugins/secrets/__init__.pyi +2 -2
  118. metaflow-stubs/plugins/secrets/inline_secrets_provider.pyi +2 -2
  119. metaflow-stubs/plugins/secrets/secrets_decorator.pyi +2 -2
  120. metaflow-stubs/plugins/storage_executor.pyi +2 -2
  121. metaflow-stubs/plugins/test_unbounded_foreach_decorator.pyi +3 -3
  122. metaflow-stubs/plugins/timeout_decorator.pyi +3 -3
  123. metaflow-stubs/pylint_wrapper.pyi +2 -2
  124. metaflow-stubs/runner/__init__.pyi +2 -2
  125. metaflow-stubs/runner/deployer.pyi +4 -4
  126. metaflow-stubs/runner/deployer_impl.pyi +3 -3
  127. metaflow-stubs/runner/metaflow_runner.pyi +3 -3
  128. metaflow-stubs/runner/nbdeploy.pyi +2 -2
  129. metaflow-stubs/runner/nbrun.pyi +2 -2
  130. metaflow-stubs/runner/subprocess_manager.pyi +2 -2
  131. metaflow-stubs/runner/utils.pyi +4 -4
  132. metaflow-stubs/system/__init__.pyi +2 -2
  133. metaflow-stubs/system/system_logger.pyi +3 -3
  134. metaflow-stubs/system/system_monitor.pyi +2 -2
  135. metaflow-stubs/tagging_util.pyi +2 -2
  136. metaflow-stubs/tuple_util.pyi +2 -2
  137. metaflow-stubs/user_configs/__init__.pyi +2 -2
  138. metaflow-stubs/user_configs/config_decorators.pyi +5 -5
  139. metaflow-stubs/user_configs/config_options.pyi +3 -3
  140. metaflow-stubs/user_configs/config_parameters.pyi +4 -4
  141. metaflow-stubs/version.pyi +2 -2
  142. {metaflow_stubs-2.15.5.dist-info → metaflow_stubs-2.15.6.dist-info}/METADATA +2 -2
  143. metaflow_stubs-2.15.6.dist-info/RECORD +146 -0
  144. metaflow_stubs-2.15.5.dist-info/RECORD +0 -146
  145. {metaflow_stubs-2.15.5.dist-info → metaflow_stubs-2.15.6.dist-info}/WHEEL +0 -0
  146. {metaflow_stubs-2.15.5.dist-info → metaflow_stubs-2.15.6.dist-info}/top_level.txt +0 -0
@@ -1,15 +1,15 @@
1
1
  ######################################################################################################
2
2
  # Auto-generated Metaflow stub file #
3
- # MF version: 2.15.5 #
4
- # Generated on 2025-03-13T17:07:28.496640 #
3
+ # MF version: 2.15.6 #
4
+ # Generated on 2025-03-17T21:08:12.538673 #
5
5
  ######################################################################################################
6
6
 
7
7
  from __future__ import annotations
8
8
 
9
9
  import typing
10
10
  if typing.TYPE_CHECKING:
11
- import datetime
12
11
  import typing
12
+ import datetime
13
13
  FlowSpecDerived = typing.TypeVar("FlowSpecDerived", bound="FlowSpec", contravariant=False, covariant=False)
14
14
  StepFlag = typing.NewType("StepFlag", bool)
15
15
 
@@ -146,67 +146,87 @@ def step(f: typing.Union[typing.Callable[[FlowSpecDerived], None], typing.Callab
146
146
  ...
147
147
 
148
148
  @typing.overload
149
- def conda(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
149
+ def batch(*, cpu: int = 1, gpu: int = 0, memory: int = 4096, image: typing.Optional[str] = None, queue: str = 'METAFLOW_BATCH_JOB_QUEUE', iam_role: str = 'METAFLOW_ECS_S3_ACCESS_IAM_ROLE', execution_role: str = 'METAFLOW_ECS_FARGATE_EXECUTION_ROLE', shared_memory: typing.Optional[int] = None, max_swap: typing.Optional[int] = None, swappiness: typing.Optional[int] = None, use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = None, inferentia: int = 0, trainium: int = None, efa: int = 0, ephemeral_storage: int = None, log_driver: typing.Optional[str] = None, log_options: typing.Optional[typing.List[str]] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
150
150
  """
151
- Specifies the Conda environment for the step.
152
-
153
- Information in this decorator will augment any
154
- attributes set in the `@conda_base` flow-level decorator. Hence,
155
- you can use `@conda_base` to set packages required by all
156
- steps and use `@conda` to specify step-specific overrides.
151
+ Specifies that this step should execute on [AWS Batch](https://aws.amazon.com/batch/).
157
152
 
158
153
 
159
154
  Parameters
160
155
  ----------
161
- packages : Dict[str, str], default {}
162
- Packages to use for this step. The key is the name of the package
163
- and the value is the version to use.
164
- libraries : Dict[str, str], default {}
165
- Supported for backward compatibility. When used with packages, packages will take precedence.
166
- python : str, optional, default None
167
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
168
- that the version used will correspond to the version of the Python interpreter used to start the run.
169
- disabled : bool, default False
170
- If set to True, disables @conda.
156
+ cpu : int, default 1
157
+ Number of CPUs required for this step. If `@resources` is
158
+ also present, the maximum value from all decorators is used.
159
+ gpu : int, default 0
160
+ Number of GPUs required for this step. If `@resources` is
161
+ also present, the maximum value from all decorators is used.
162
+ memory : int, default 4096
163
+ Memory size (in MB) required for this step. If
164
+ `@resources` is also present, the maximum value from all decorators is
165
+ used.
166
+ image : str, optional, default None
167
+ Docker image to use when launching on AWS Batch. If not specified, and
168
+ METAFLOW_BATCH_CONTAINER_IMAGE is specified, that image is used. If
169
+ not, a default Docker image mapping to the current version of Python is used.
170
+ queue : str, default METAFLOW_BATCH_JOB_QUEUE
171
+ AWS Batch Job Queue to submit the job to.
172
+ iam_role : str, default METAFLOW_ECS_S3_ACCESS_IAM_ROLE
173
+ AWS IAM role that AWS Batch container uses to access AWS cloud resources.
174
+ execution_role : str, default METAFLOW_ECS_FARGATE_EXECUTION_ROLE
175
+ AWS IAM role that AWS Batch can use [to trigger AWS Fargate tasks]
176
+ (https://docs.aws.amazon.com/batch/latest/userguide/execution-IAM-role.html).
177
+ shared_memory : int, optional, default None
178
+ The value for the size (in MiB) of the /dev/shm volume for this step.
179
+ This parameter maps to the `--shm-size` option in Docker.
180
+ max_swap : int, optional, default None
181
+ The total amount of swap memory (in MiB) a container can use for this
182
+ step. This parameter is translated to the `--memory-swap` option in
183
+ Docker where the value is the sum of the container memory plus the
184
+ `max_swap` value.
185
+ swappiness : int, optional, default None
186
+ This allows you to tune memory swappiness behavior for this step.
187
+ A swappiness value of 0 causes swapping not to happen unless absolutely
188
+ necessary. A swappiness value of 100 causes pages to be swapped very
189
+ aggressively. Accepted values are whole numbers between 0 and 100.
190
+ use_tmpfs : bool, default False
191
+ This enables an explicit tmpfs mount for this step. Note that tmpfs is
192
+ not available on Fargate compute environments
193
+ tmpfs_tempdir : bool, default True
194
+ sets METAFLOW_TEMPDIR to tmpfs_path if set for this step.
195
+ tmpfs_size : int, optional, default None
196
+ The value for the size (in MiB) of the tmpfs mount for this step.
197
+ This parameter maps to the `--tmpfs` option in Docker. Defaults to 50% of the
198
+ memory allocated for this step.
199
+ tmpfs_path : str, optional, default None
200
+ Path to tmpfs mount for this step. Defaults to /metaflow_temp.
201
+ inferentia : int, default 0
202
+ Number of Inferentia chips required for this step.
203
+ trainium : int, default None
204
+ Alias for inferentia. Use only one of the two.
205
+ efa : int, default 0
206
+ Number of elastic fabric adapter network devices to attach to container
207
+ ephemeral_storage : int, default None
208
+ The total amount, in GiB, of ephemeral storage to set for the task, 21-200GiB.
209
+ This is only relevant for Fargate compute environments
210
+ log_driver: str, optional, default None
211
+ The log driver to use for the Amazon ECS container.
212
+ log_options: List[str], optional, default None
213
+ List of strings containing options for the chosen log driver. The configurable values
214
+ depend on the `log driver` chosen. Validation of these options is not supported yet.
215
+ Example: [`awslogs-group:aws/batch/job`]
171
216
  """
172
217
  ...
173
218
 
174
219
  @typing.overload
175
- def conda(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
220
+ def batch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
176
221
  ...
177
222
 
178
223
  @typing.overload
179
- def conda(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
180
- ...
181
-
182
- def conda(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
183
- """
184
- Specifies the Conda environment for the step.
185
-
186
- Information in this decorator will augment any
187
- attributes set in the `@conda_base` flow-level decorator. Hence,
188
- you can use `@conda_base` to set packages required by all
189
- steps and use `@conda` to specify step-specific overrides.
190
-
191
-
192
- Parameters
193
- ----------
194
- packages : Dict[str, str], default {}
195
- Packages to use for this step. The key is the name of the package
196
- and the value is the version to use.
197
- libraries : Dict[str, str], default {}
198
- Supported for backward compatibility. When used with packages, packages will take precedence.
199
- python : str, optional, default None
200
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
201
- that the version used will correspond to the version of the Python interpreter used to start the run.
202
- disabled : bool, default False
203
- If set to True, disables @conda.
204
- """
224
+ def batch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
205
225
  ...
206
226
 
207
- def kubernetes(*, cpu: int = 1, memory: int = 4096, disk: int = 10240, image: typing.Optional[str] = None, image_pull_policy: str = 'KUBERNETES_IMAGE_PULL_POLICY', service_account: str = 'METAFLOW_KUBERNETES_SERVICE_ACCOUNT', secrets: typing.Optional[typing.List[str]] = None, node_selector: typing.Union[typing.Dict[str, str], str, None] = None, namespace: str = 'METAFLOW_KUBERNETES_NAMESPACE', gpu: typing.Optional[int] = None, gpu_vendor: str = 'KUBERNETES_GPU_VENDOR', tolerations: typing.List[str] = [], labels: typing.Dict[str, str] = 'METAFLOW_KUBERNETES_LABELS', annotations: typing.Dict[str, str] = 'METAFLOW_KUBERNETES_ANNOTATIONS', use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = '/metaflow_temp', persistent_volume_claims: typing.Optional[typing.Dict[str, str]] = None, shared_memory: typing.Optional[int] = None, port: typing.Optional[int] = None, compute_pool: typing.Optional[str] = None, hostname_resolution_timeout: int = 600, qos: str = 'Burstable') -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
227
+ def batch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, cpu: int = 1, gpu: int = 0, memory: int = 4096, image: typing.Optional[str] = None, queue: str = 'METAFLOW_BATCH_JOB_QUEUE', iam_role: str = 'METAFLOW_ECS_S3_ACCESS_IAM_ROLE', execution_role: str = 'METAFLOW_ECS_FARGATE_EXECUTION_ROLE', shared_memory: typing.Optional[int] = None, max_swap: typing.Optional[int] = None, swappiness: typing.Optional[int] = None, use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = None, inferentia: int = 0, trainium: int = None, efa: int = 0, ephemeral_storage: int = None, log_driver: typing.Optional[str] = None, log_options: typing.Optional[typing.List[str]] = None):
208
228
  """
209
- Specifies that this step should execute on Kubernetes.
229
+ Specifies that this step should execute on [AWS Batch](https://aws.amazon.com/batch/).
210
230
 
211
231
 
212
232
  Parameters
@@ -214,119 +234,315 @@ def kubernetes(*, cpu: int = 1, memory: int = 4096, disk: int = 10240, image: ty
214
234
  cpu : int, default 1
215
235
  Number of CPUs required for this step. If `@resources` is
216
236
  also present, the maximum value from all decorators is used.
237
+ gpu : int, default 0
238
+ Number of GPUs required for this step. If `@resources` is
239
+ also present, the maximum value from all decorators is used.
217
240
  memory : int, default 4096
218
241
  Memory size (in MB) required for this step. If
219
242
  `@resources` is also present, the maximum value from all decorators is
220
243
  used.
221
- disk : int, default 10240
222
- Disk size (in MB) required for this step. If
223
- `@resources` is also present, the maximum value from all decorators is
224
- used.
225
244
  image : str, optional, default None
226
- Docker image to use when launching on Kubernetes. If not specified, and
227
- METAFLOW_KUBERNETES_CONTAINER_IMAGE is specified, that image is used. If
245
+ Docker image to use when launching on AWS Batch. If not specified, and
246
+ METAFLOW_BATCH_CONTAINER_IMAGE is specified, that image is used. If
228
247
  not, a default Docker image mapping to the current version of Python is used.
229
- image_pull_policy: str, default KUBERNETES_IMAGE_PULL_POLICY
230
- If given, the imagePullPolicy to be applied to the Docker image of the step.
231
- service_account : str, default METAFLOW_KUBERNETES_SERVICE_ACCOUNT
232
- Kubernetes service account to use when launching pod in Kubernetes.
233
- secrets : List[str], optional, default None
234
- Kubernetes secrets to use when launching pod in Kubernetes. These
235
- secrets are in addition to the ones defined in `METAFLOW_KUBERNETES_SECRETS`
236
- in Metaflow configuration.
237
- node_selector: Union[Dict[str,str], str], optional, default None
238
- Kubernetes node selector(s) to apply to the pod running the task.
239
- Can be passed in as a comma separated string of values e.g.
240
- 'kubernetes.io/os=linux,kubernetes.io/arch=amd64' or as a dictionary
241
- {'kubernetes.io/os': 'linux', 'kubernetes.io/arch': 'amd64'}
242
- namespace : str, default METAFLOW_KUBERNETES_NAMESPACE
243
- Kubernetes namespace to use when launching pod in Kubernetes.
244
- gpu : int, optional, default None
245
- Number of GPUs required for this step. A value of zero implies that
246
- the scheduled node should not have GPUs.
247
- gpu_vendor : str, default KUBERNETES_GPU_VENDOR
248
- The vendor of the GPUs to be used for this step.
249
- tolerations : List[str], default []
250
- The default is extracted from METAFLOW_KUBERNETES_TOLERATIONS.
251
- Kubernetes tolerations to use when launching pod in Kubernetes.
252
- labels: Dict[str, str], default: METAFLOW_KUBERNETES_LABELS
253
- Kubernetes labels to use when launching pod in Kubernetes.
254
- annotations: Dict[str, str], default: METAFLOW_KUBERNETES_ANNOTATIONS
255
- Kubernetes annotations to use when launching pod in Kubernetes.
248
+ queue : str, default METAFLOW_BATCH_JOB_QUEUE
249
+ AWS Batch Job Queue to submit the job to.
250
+ iam_role : str, default METAFLOW_ECS_S3_ACCESS_IAM_ROLE
251
+ AWS IAM role that AWS Batch container uses to access AWS cloud resources.
252
+ execution_role : str, default METAFLOW_ECS_FARGATE_EXECUTION_ROLE
253
+ AWS IAM role that AWS Batch can use [to trigger AWS Fargate tasks]
254
+ (https://docs.aws.amazon.com/batch/latest/userguide/execution-IAM-role.html).
255
+ shared_memory : int, optional, default None
256
+ The value for the size (in MiB) of the /dev/shm volume for this step.
257
+ This parameter maps to the `--shm-size` option in Docker.
258
+ max_swap : int, optional, default None
259
+ The total amount of swap memory (in MiB) a container can use for this
260
+ step. This parameter is translated to the `--memory-swap` option in
261
+ Docker where the value is the sum of the container memory plus the
262
+ `max_swap` value.
263
+ swappiness : int, optional, default None
264
+ This allows you to tune memory swappiness behavior for this step.
265
+ A swappiness value of 0 causes swapping not to happen unless absolutely
266
+ necessary. A swappiness value of 100 causes pages to be swapped very
267
+ aggressively. Accepted values are whole numbers between 0 and 100.
256
268
  use_tmpfs : bool, default False
257
- This enables an explicit tmpfs mount for this step.
269
+ This enables an explicit tmpfs mount for this step. Note that tmpfs is
270
+ not available on Fargate compute environments
258
271
  tmpfs_tempdir : bool, default True
259
272
  sets METAFLOW_TEMPDIR to tmpfs_path if set for this step.
260
- tmpfs_size : int, optional, default: None
273
+ tmpfs_size : int, optional, default None
261
274
  The value for the size (in MiB) of the tmpfs mount for this step.
262
275
  This parameter maps to the `--tmpfs` option in Docker. Defaults to 50% of the
263
276
  memory allocated for this step.
264
- tmpfs_path : str, optional, default /metaflow_temp
265
- Path to tmpfs mount for this step.
266
- persistent_volume_claims : Dict[str, str], optional, default None
267
- A map (dictionary) of persistent volumes to be mounted to the pod for this step. The map is from persistent
268
- volumes to the path to which the volume is to be mounted, e.g., `{'pvc-name': '/path/to/mount/on'}`.
269
- shared_memory: int, optional
270
- Shared memory size (in MiB) required for this step
271
- port: int, optional
272
- Port number to specify in the Kubernetes job object
273
- compute_pool : str, optional, default None
274
- Compute pool to be used for for this step.
275
- If not specified, any accessible compute pool within the perimeter is used.
276
- hostname_resolution_timeout: int, default 10 * 60
277
- Timeout in seconds for the workers tasks in the gang scheduled cluster to resolve the hostname of control task.
278
- Only applicable when @parallel is used.
279
- qos: str, default: Burstable
280
- Quality of Service class to assign to the pod. Supported values are: Guaranteed, Burstable, BestEffort
277
+ tmpfs_path : str, optional, default None
278
+ Path to tmpfs mount for this step. Defaults to /metaflow_temp.
279
+ inferentia : int, default 0
280
+ Number of Inferentia chips required for this step.
281
+ trainium : int, default None
282
+ Alias for inferentia. Use only one of the two.
283
+ efa : int, default 0
284
+ Number of elastic fabric adapter network devices to attach to container
285
+ ephemeral_storage : int, default None
286
+ The total amount, in GiB, of ephemeral storage to set for the task, 21-200GiB.
287
+ This is only relevant for Fargate compute environments
288
+ log_driver: str, optional, default None
289
+ The log driver to use for the Amazon ECS container.
290
+ log_options: List[str], optional, default None
291
+ List of strings containing options for the chosen log driver. The configurable values
292
+ depend on the `log driver` chosen. Validation of these options is not supported yet.
293
+ Example: [`awslogs-group:aws/batch/job`]
281
294
  """
282
295
  ...
283
296
 
284
297
  @typing.overload
285
- def card(*, type: str = 'default', id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
298
+ def retry(*, times: int = 3, minutes_between_retries: int = 2) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
286
299
  """
287
- Creates a human-readable report, a Metaflow Card, after this step completes.
300
+ Specifies the number of times the task corresponding
301
+ to a step needs to be retried.
288
302
 
289
- Note that you may add multiple `@card` decorators in a step with different parameters.
303
+ This decorator is useful for handling transient errors, such as networking issues.
304
+ If your task contains operations that can't be retried safely, e.g. database updates,
305
+ it is advisable to annotate it with `@retry(times=0)`.
306
+
307
+ This can be used in conjunction with the `@catch` decorator. The `@catch`
308
+ decorator will execute a no-op task after all retries have been exhausted,
309
+ ensuring that the flow execution can continue.
290
310
 
291
311
 
292
312
  Parameters
293
313
  ----------
294
- type : str, default 'default'
295
- Card type.
296
- id : str, optional, default None
297
- If multiple cards are present, use this id to identify this card.
298
- options : Dict[str, Any], default {}
299
- Options passed to the card. The contents depend on the card type.
300
- timeout : int, default 45
301
- Interrupt reporting if it takes more than this many seconds.
314
+ times : int, default 3
315
+ Number of times to retry this task.
316
+ minutes_between_retries : int, default 2
317
+ Number of minutes between retries.
302
318
  """
303
319
  ...
304
320
 
305
321
  @typing.overload
306
- def card(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
322
+ def retry(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
307
323
  ...
308
324
 
309
325
  @typing.overload
310
- def card(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
326
+ def retry(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
311
327
  ...
312
328
 
313
- def card(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, type: str = 'default', id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45):
329
+ def retry(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, times: int = 3, minutes_between_retries: int = 2):
314
330
  """
315
- Creates a human-readable report, a Metaflow Card, after this step completes.
331
+ Specifies the number of times the task corresponding
332
+ to a step needs to be retried.
316
333
 
317
- Note that you may add multiple `@card` decorators in a step with different parameters.
334
+ This decorator is useful for handling transient errors, such as networking issues.
335
+ If your task contains operations that can't be retried safely, e.g. database updates,
336
+ it is advisable to annotate it with `@retry(times=0)`.
337
+
338
+ This can be used in conjunction with the `@catch` decorator. The `@catch`
339
+ decorator will execute a no-op task after all retries have been exhausted,
340
+ ensuring that the flow execution can continue.
318
341
 
319
342
 
320
343
  Parameters
321
344
  ----------
322
- type : str, default 'default'
323
- Card type.
324
- id : str, optional, default None
325
- If multiple cards are present, use this id to identify this card.
326
- options : Dict[str, Any], default {}
327
- Options passed to the card. The contents depend on the card type.
328
- timeout : int, default 45
329
- Interrupt reporting if it takes more than this many seconds.
345
+ times : int, default 3
346
+ Number of times to retry this task.
347
+ minutes_between_retries : int, default 2
348
+ Number of minutes between retries.
349
+ """
350
+ ...
351
+
352
+ @typing.overload
353
+ def timeout(*, seconds: int = 0, minutes: int = 0, hours: int = 0) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
354
+ """
355
+ Specifies a timeout for your step.
356
+
357
+ This decorator is useful if this step may hang indefinitely.
358
+
359
+ This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
360
+ A timeout is considered to be an exception thrown by the step. It will cause the step to be
361
+ retried if needed and the exception will be caught by the `@catch` decorator, if present.
362
+
363
+ Note that all the values specified in parameters are added together so if you specify
364
+ 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
365
+
366
+
367
+ Parameters
368
+ ----------
369
+ seconds : int, default 0
370
+ Number of seconds to wait prior to timing out.
371
+ minutes : int, default 0
372
+ Number of minutes to wait prior to timing out.
373
+ hours : int, default 0
374
+ Number of hours to wait prior to timing out.
375
+ """
376
+ ...
377
+
378
+ @typing.overload
379
+ def timeout(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
380
+ ...
381
+
382
+ @typing.overload
383
+ def timeout(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
384
+ ...
385
+
386
+ def timeout(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, seconds: int = 0, minutes: int = 0, hours: int = 0):
387
+ """
388
+ Specifies a timeout for your step.
389
+
390
+ This decorator is useful if this step may hang indefinitely.
391
+
392
+ This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
393
+ A timeout is considered to be an exception thrown by the step. It will cause the step to be
394
+ retried if needed and the exception will be caught by the `@catch` decorator, if present.
395
+
396
+ Note that all the values specified in parameters are added together so if you specify
397
+ 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
398
+
399
+
400
+ Parameters
401
+ ----------
402
+ seconds : int, default 0
403
+ Number of seconds to wait prior to timing out.
404
+ minutes : int, default 0
405
+ Number of minutes to wait prior to timing out.
406
+ hours : int, default 0
407
+ Number of hours to wait prior to timing out.
408
+ """
409
+ ...
410
+
411
+ @typing.overload
412
+ def pypi(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
413
+ """
414
+ Specifies the PyPI packages for the step.
415
+
416
+ Information in this decorator will augment any
417
+ attributes set in the `@pyi_base` flow-level decorator. Hence,
418
+ you can use `@pypi_base` to set packages required by all
419
+ steps and use `@pypi` to specify step-specific overrides.
420
+
421
+
422
+ Parameters
423
+ ----------
424
+ packages : Dict[str, str], default: {}
425
+ Packages to use for this step. The key is the name of the package
426
+ and the value is the version to use.
427
+ python : str, optional, default: None
428
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
429
+ that the version used will correspond to the version of the Python interpreter used to start the run.
430
+ """
431
+ ...
432
+
433
+ @typing.overload
434
+ def pypi(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
435
+ ...
436
+
437
+ @typing.overload
438
+ def pypi(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
439
+ ...
440
+
441
+ def pypi(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
442
+ """
443
+ Specifies the PyPI packages for the step.
444
+
445
+ Information in this decorator will augment any
446
+ attributes set in the `@pyi_base` flow-level decorator. Hence,
447
+ you can use `@pypi_base` to set packages required by all
448
+ steps and use `@pypi` to specify step-specific overrides.
449
+
450
+
451
+ Parameters
452
+ ----------
453
+ packages : Dict[str, str], default: {}
454
+ Packages to use for this step. The key is the name of the package
455
+ and the value is the version to use.
456
+ python : str, optional, default: None
457
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
458
+ that the version used will correspond to the version of the Python interpreter used to start the run.
459
+ """
460
+ ...
461
+
462
+ @typing.overload
463
+ def secrets(*, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
464
+ """
465
+ Specifies secrets to be retrieved and injected as environment variables prior to
466
+ the execution of a step.
467
+
468
+
469
+ Parameters
470
+ ----------
471
+ sources : List[Union[str, Dict[str, Any]]], default: []
472
+ List of secret specs, defining how the secrets are to be retrieved
473
+ """
474
+ ...
475
+
476
+ @typing.overload
477
+ def secrets(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
478
+ ...
479
+
480
+ @typing.overload
481
+ def secrets(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
482
+ ...
483
+
484
+ def secrets(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []):
485
+ """
486
+ Specifies secrets to be retrieved and injected as environment variables prior to
487
+ the execution of a step.
488
+
489
+
490
+ Parameters
491
+ ----------
492
+ sources : List[Union[str, Dict[str, Any]]], default: []
493
+ List of secret specs, defining how the secrets are to be retrieved
494
+ """
495
+ ...
496
+
497
+ @typing.overload
498
+ def environment(*, vars: typing.Dict[str, str] = {}) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
499
+ """
500
+ Specifies environment variables to be set prior to the execution of a step.
501
+
502
+
503
+ Parameters
504
+ ----------
505
+ vars : Dict[str, str], default {}
506
+ Dictionary of environment variables to set.
507
+ """
508
+ ...
509
+
510
+ @typing.overload
511
+ def environment(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
512
+ ...
513
+
514
+ @typing.overload
515
+ def environment(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
516
+ ...
517
+
518
+ def environment(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, vars: typing.Dict[str, str] = {}):
519
+ """
520
+ Specifies environment variables to be set prior to the execution of a step.
521
+
522
+
523
+ Parameters
524
+ ----------
525
+ vars : Dict[str, str], default {}
526
+ Dictionary of environment variables to set.
527
+ """
528
+ ...
529
+
530
+ @typing.overload
531
+ def parallel(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
532
+ """
533
+ Decorator prototype for all step decorators. This function gets specialized
534
+ and imported for all decorators types by _import_plugin_decorators().
535
+ """
536
+ ...
537
+
538
+ @typing.overload
539
+ def parallel(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
540
+ ...
541
+
542
+ def parallel(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
543
+ """
544
+ Decorator prototype for all step decorators. This function gets specialized
545
+ and imported for all decorators types by _import_plugin_decorators().
330
546
  """
331
547
  ...
332
548
 
@@ -410,210 +626,110 @@ def resources(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None]
410
626
  ...
411
627
 
412
628
  @typing.overload
413
- def batch(*, cpu: int = 1, gpu: int = 0, memory: int = 4096, image: typing.Optional[str] = None, queue: str = 'METAFLOW_BATCH_JOB_QUEUE', iam_role: str = 'METAFLOW_ECS_S3_ACCESS_IAM_ROLE', execution_role: str = 'METAFLOW_ECS_FARGATE_EXECUTION_ROLE', shared_memory: typing.Optional[int] = None, max_swap: typing.Optional[int] = None, swappiness: typing.Optional[int] = None, use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = None, inferentia: int = 0, trainium: int = None, efa: int = 0, ephemeral_storage: int = None, log_driver: typing.Optional[str] = None, log_options: typing.Optional[typing.List[str]] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
629
+ def card(*, type: str = 'default', id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
414
630
  """
415
- Specifies that this step should execute on [AWS Batch](https://aws.amazon.com/batch/).
631
+ Creates a human-readable report, a Metaflow Card, after this step completes.
632
+
633
+ Note that you may add multiple `@card` decorators in a step with different parameters.
416
634
 
417
635
 
418
636
  Parameters
419
637
  ----------
420
- cpu : int, default 1
421
- Number of CPUs required for this step. If `@resources` is
422
- also present, the maximum value from all decorators is used.
423
- gpu : int, default 0
424
- Number of GPUs required for this step. If `@resources` is
425
- also present, the maximum value from all decorators is used.
426
- memory : int, default 4096
427
- Memory size (in MB) required for this step. If
428
- `@resources` is also present, the maximum value from all decorators is
429
- used.
430
- image : str, optional, default None
431
- Docker image to use when launching on AWS Batch. If not specified, and
432
- METAFLOW_BATCH_CONTAINER_IMAGE is specified, that image is used. If
433
- not, a default Docker image mapping to the current version of Python is used.
434
- queue : str, default METAFLOW_BATCH_JOB_QUEUE
435
- AWS Batch Job Queue to submit the job to.
436
- iam_role : str, default METAFLOW_ECS_S3_ACCESS_IAM_ROLE
437
- AWS IAM role that AWS Batch container uses to access AWS cloud resources.
438
- execution_role : str, default METAFLOW_ECS_FARGATE_EXECUTION_ROLE
439
- AWS IAM role that AWS Batch can use [to trigger AWS Fargate tasks]
440
- (https://docs.aws.amazon.com/batch/latest/userguide/execution-IAM-role.html).
441
- shared_memory : int, optional, default None
442
- The value for the size (in MiB) of the /dev/shm volume for this step.
443
- This parameter maps to the `--shm-size` option in Docker.
444
- max_swap : int, optional, default None
445
- The total amount of swap memory (in MiB) a container can use for this
446
- step. This parameter is translated to the `--memory-swap` option in
447
- Docker where the value is the sum of the container memory plus the
448
- `max_swap` value.
449
- swappiness : int, optional, default None
450
- This allows you to tune memory swappiness behavior for this step.
451
- A swappiness value of 0 causes swapping not to happen unless absolutely
452
- necessary. A swappiness value of 100 causes pages to be swapped very
453
- aggressively. Accepted values are whole numbers between 0 and 100.
454
- use_tmpfs : bool, default False
455
- This enables an explicit tmpfs mount for this step. Note that tmpfs is
456
- not available on Fargate compute environments
457
- tmpfs_tempdir : bool, default True
458
- sets METAFLOW_TEMPDIR to tmpfs_path if set for this step.
459
- tmpfs_size : int, optional, default None
460
- The value for the size (in MiB) of the tmpfs mount for this step.
461
- This parameter maps to the `--tmpfs` option in Docker. Defaults to 50% of the
462
- memory allocated for this step.
463
- tmpfs_path : str, optional, default None
464
- Path to tmpfs mount for this step. Defaults to /metaflow_temp.
465
- inferentia : int, default 0
466
- Number of Inferentia chips required for this step.
467
- trainium : int, default None
468
- Alias for inferentia. Use only one of the two.
469
- efa : int, default 0
470
- Number of elastic fabric adapter network devices to attach to container
471
- ephemeral_storage : int, default None
472
- The total amount, in GiB, of ephemeral storage to set for the task, 21-200GiB.
473
- This is only relevant for Fargate compute environments
474
- log_driver: str, optional, default None
475
- The log driver to use for the Amazon ECS container.
476
- log_options: List[str], optional, default None
477
- List of strings containing options for the chosen log driver. The configurable values
478
- depend on the `log driver` chosen. Validation of these options is not supported yet.
479
- Example: [`awslogs-group:aws/batch/job`]
638
+ type : str, default 'default'
639
+ Card type.
640
+ id : str, optional, default None
641
+ If multiple cards are present, use this id to identify this card.
642
+ options : Dict[str, Any], default {}
643
+ Options passed to the card. The contents depend on the card type.
644
+ timeout : int, default 45
645
+ Interrupt reporting if it takes more than this many seconds.
480
646
  """
481
647
  ...
482
648
 
483
649
  @typing.overload
484
- def batch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
650
+ def card(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
485
651
  ...
486
652
 
487
653
  @typing.overload
488
- def batch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
654
+ def card(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
489
655
  ...
490
656
 
491
- def batch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, cpu: int = 1, gpu: int = 0, memory: int = 4096, image: typing.Optional[str] = None, queue: str = 'METAFLOW_BATCH_JOB_QUEUE', iam_role: str = 'METAFLOW_ECS_S3_ACCESS_IAM_ROLE', execution_role: str = 'METAFLOW_ECS_FARGATE_EXECUTION_ROLE', shared_memory: typing.Optional[int] = None, max_swap: typing.Optional[int] = None, swappiness: typing.Optional[int] = None, use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = None, inferentia: int = 0, trainium: int = None, efa: int = 0, ephemeral_storage: int = None, log_driver: typing.Optional[str] = None, log_options: typing.Optional[typing.List[str]] = None):
657
+ def card(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, type: str = 'default', id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45):
492
658
  """
493
- Specifies that this step should execute on [AWS Batch](https://aws.amazon.com/batch/).
659
+ Creates a human-readable report, a Metaflow Card, after this step completes.
660
+
661
+ Note that you may add multiple `@card` decorators in a step with different parameters.
494
662
 
495
663
 
496
664
  Parameters
497
665
  ----------
498
- cpu : int, default 1
499
- Number of CPUs required for this step. If `@resources` is
500
- also present, the maximum value from all decorators is used.
501
- gpu : int, default 0
502
- Number of GPUs required for this step. If `@resources` is
503
- also present, the maximum value from all decorators is used.
504
- memory : int, default 4096
505
- Memory size (in MB) required for this step. If
506
- `@resources` is also present, the maximum value from all decorators is
507
- used.
508
- image : str, optional, default None
509
- Docker image to use when launching on AWS Batch. If not specified, and
510
- METAFLOW_BATCH_CONTAINER_IMAGE is specified, that image is used. If
511
- not, a default Docker image mapping to the current version of Python is used.
512
- queue : str, default METAFLOW_BATCH_JOB_QUEUE
513
- AWS Batch Job Queue to submit the job to.
514
- iam_role : str, default METAFLOW_ECS_S3_ACCESS_IAM_ROLE
515
- AWS IAM role that AWS Batch container uses to access AWS cloud resources.
516
- execution_role : str, default METAFLOW_ECS_FARGATE_EXECUTION_ROLE
517
- AWS IAM role that AWS Batch can use [to trigger AWS Fargate tasks]
518
- (https://docs.aws.amazon.com/batch/latest/userguide/execution-IAM-role.html).
519
- shared_memory : int, optional, default None
520
- The value for the size (in MiB) of the /dev/shm volume for this step.
521
- This parameter maps to the `--shm-size` option in Docker.
522
- max_swap : int, optional, default None
523
- The total amount of swap memory (in MiB) a container can use for this
524
- step. This parameter is translated to the `--memory-swap` option in
525
- Docker where the value is the sum of the container memory plus the
526
- `max_swap` value.
527
- swappiness : int, optional, default None
528
- This allows you to tune memory swappiness behavior for this step.
529
- A swappiness value of 0 causes swapping not to happen unless absolutely
530
- necessary. A swappiness value of 100 causes pages to be swapped very
531
- aggressively. Accepted values are whole numbers between 0 and 100.
532
- use_tmpfs : bool, default False
533
- This enables an explicit tmpfs mount for this step. Note that tmpfs is
534
- not available on Fargate compute environments
535
- tmpfs_tempdir : bool, default True
536
- sets METAFLOW_TEMPDIR to tmpfs_path if set for this step.
537
- tmpfs_size : int, optional, default None
538
- The value for the size (in MiB) of the tmpfs mount for this step.
539
- This parameter maps to the `--tmpfs` option in Docker. Defaults to 50% of the
540
- memory allocated for this step.
541
- tmpfs_path : str, optional, default None
542
- Path to tmpfs mount for this step. Defaults to /metaflow_temp.
543
- inferentia : int, default 0
544
- Number of Inferentia chips required for this step.
545
- trainium : int, default None
546
- Alias for inferentia. Use only one of the two.
547
- efa : int, default 0
548
- Number of elastic fabric adapter network devices to attach to container
549
- ephemeral_storage : int, default None
550
- The total amount, in GiB, of ephemeral storage to set for the task, 21-200GiB.
551
- This is only relevant for Fargate compute environments
552
- log_driver: str, optional, default None
553
- The log driver to use for the Amazon ECS container.
554
- log_options: List[str], optional, default None
555
- List of strings containing options for the chosen log driver. The configurable values
556
- depend on the `log driver` chosen. Validation of these options is not supported yet.
557
- Example: [`awslogs-group:aws/batch/job`]
666
+ type : str, default 'default'
667
+ Card type.
668
+ id : str, optional, default None
669
+ If multiple cards are present, use this id to identify this card.
670
+ options : Dict[str, Any], default {}
671
+ Options passed to the card. The contents depend on the card type.
672
+ timeout : int, default 45
673
+ Interrupt reporting if it takes more than this many seconds.
558
674
  """
559
675
  ...
560
676
 
561
677
  @typing.overload
562
- def timeout(*, seconds: int = 0, minutes: int = 0, hours: int = 0) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
678
+ def conda(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
563
679
  """
564
- Specifies a timeout for your step.
565
-
566
- This decorator is useful if this step may hang indefinitely.
567
-
568
- This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
569
- A timeout is considered to be an exception thrown by the step. It will cause the step to be
570
- retried if needed and the exception will be caught by the `@catch` decorator, if present.
680
+ Specifies the Conda environment for the step.
571
681
 
572
- Note that all the values specified in parameters are added together so if you specify
573
- 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
682
+ Information in this decorator will augment any
683
+ attributes set in the `@conda_base` flow-level decorator. Hence,
684
+ you can use `@conda_base` to set packages required by all
685
+ steps and use `@conda` to specify step-specific overrides.
574
686
 
575
687
 
576
688
  Parameters
577
689
  ----------
578
- seconds : int, default 0
579
- Number of seconds to wait prior to timing out.
580
- minutes : int, default 0
581
- Number of minutes to wait prior to timing out.
582
- hours : int, default 0
583
- Number of hours to wait prior to timing out.
690
+ packages : Dict[str, str], default {}
691
+ Packages to use for this step. The key is the name of the package
692
+ and the value is the version to use.
693
+ libraries : Dict[str, str], default {}
694
+ Supported for backward compatibility. When used with packages, packages will take precedence.
695
+ python : str, optional, default None
696
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
697
+ that the version used will correspond to the version of the Python interpreter used to start the run.
698
+ disabled : bool, default False
699
+ If set to True, disables @conda.
584
700
  """
585
701
  ...
586
702
 
587
703
  @typing.overload
588
- def timeout(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
704
+ def conda(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
589
705
  ...
590
706
 
591
707
  @typing.overload
592
- def timeout(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
708
+ def conda(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
593
709
  ...
594
710
 
595
- def timeout(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, seconds: int = 0, minutes: int = 0, hours: int = 0):
711
+ def conda(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
596
712
  """
597
- Specifies a timeout for your step.
598
-
599
- This decorator is useful if this step may hang indefinitely.
600
-
601
- This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
602
- A timeout is considered to be an exception thrown by the step. It will cause the step to be
603
- retried if needed and the exception will be caught by the `@catch` decorator, if present.
713
+ Specifies the Conda environment for the step.
604
714
 
605
- Note that all the values specified in parameters are added together so if you specify
606
- 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
715
+ Information in this decorator will augment any
716
+ attributes set in the `@conda_base` flow-level decorator. Hence,
717
+ you can use `@conda_base` to set packages required by all
718
+ steps and use `@conda` to specify step-specific overrides.
607
719
 
608
720
 
609
721
  Parameters
610
722
  ----------
611
- seconds : int, default 0
612
- Number of seconds to wait prior to timing out.
613
- minutes : int, default 0
614
- Number of minutes to wait prior to timing out.
615
- hours : int, default 0
616
- Number of hours to wait prior to timing out.
723
+ packages : Dict[str, str], default {}
724
+ Packages to use for this step. The key is the name of the package
725
+ and the value is the version to use.
726
+ libraries : Dict[str, str], default {}
727
+ Supported for backward compatibility. When used with packages, packages will take precedence.
728
+ python : str, optional, default None
729
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
730
+ that the version used will correspond to the version of the Python interpreter used to start the run.
731
+ disabled : bool, default False
732
+ If set to True, disables @conda.
617
733
  """
618
734
  ...
619
735
 
@@ -668,340 +784,80 @@ def catch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], ty
668
784
  """
669
785
  ...
670
786
 
671
- @typing.overload
672
- def retry(*, times: int = 3, minutes_between_retries: int = 2) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
787
+ def kubernetes(*, cpu: int = 1, memory: int = 4096, disk: int = 10240, image: typing.Optional[str] = None, image_pull_policy: str = 'KUBERNETES_IMAGE_PULL_POLICY', service_account: str = 'METAFLOW_KUBERNETES_SERVICE_ACCOUNT', secrets: typing.Optional[typing.List[str]] = None, node_selector: typing.Union[typing.Dict[str, str], str, None] = None, namespace: str = 'METAFLOW_KUBERNETES_NAMESPACE', gpu: typing.Optional[int] = None, gpu_vendor: str = 'KUBERNETES_GPU_VENDOR', tolerations: typing.List[str] = [], labels: typing.Dict[str, str] = 'METAFLOW_KUBERNETES_LABELS', annotations: typing.Dict[str, str] = 'METAFLOW_KUBERNETES_ANNOTATIONS', use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = '/metaflow_temp', persistent_volume_claims: typing.Optional[typing.Dict[str, str]] = None, shared_memory: typing.Optional[int] = None, port: typing.Optional[int] = None, compute_pool: typing.Optional[str] = None, hostname_resolution_timeout: int = 600, qos: str = 'Burstable') -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
673
788
  """
674
- Specifies the number of times the task corresponding
675
- to a step needs to be retried.
676
-
677
- This decorator is useful for handling transient errors, such as networking issues.
678
- If your task contains operations that can't be retried safely, e.g. database updates,
679
- it is advisable to annotate it with `@retry(times=0)`.
680
-
681
- This can be used in conjunction with the `@catch` decorator. The `@catch`
682
- decorator will execute a no-op task after all retries have been exhausted,
683
- ensuring that the flow execution can continue.
789
+ Specifies that this step should execute on Kubernetes.
684
790
 
685
791
 
686
792
  Parameters
687
793
  ----------
688
- times : int, default 3
689
- Number of times to retry this task.
690
- minutes_between_retries : int, default 2
691
- Number of minutes between retries.
692
- """
693
- ...
694
-
695
- @typing.overload
696
- def retry(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
697
- ...
698
-
699
- @typing.overload
700
- def retry(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
701
- ...
702
-
703
- def retry(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, times: int = 3, minutes_between_retries: int = 2):
704
- """
705
- Specifies the number of times the task corresponding
706
- to a step needs to be retried.
707
-
708
- This decorator is useful for handling transient errors, such as networking issues.
709
- If your task contains operations that can't be retried safely, e.g. database updates,
710
- it is advisable to annotate it with `@retry(times=0)`.
711
-
712
- This can be used in conjunction with the `@catch` decorator. The `@catch`
713
- decorator will execute a no-op task after all retries have been exhausted,
714
- ensuring that the flow execution can continue.
715
-
716
-
717
- Parameters
718
- ----------
719
- times : int, default 3
720
- Number of times to retry this task.
721
- minutes_between_retries : int, default 2
722
- Number of minutes between retries.
723
- """
724
- ...
725
-
726
- @typing.overload
727
- def environment(*, vars: typing.Dict[str, str] = {}) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
728
- """
729
- Specifies environment variables to be set prior to the execution of a step.
730
-
731
-
732
- Parameters
733
- ----------
734
- vars : Dict[str, str], default {}
735
- Dictionary of environment variables to set.
736
- """
737
- ...
738
-
739
- @typing.overload
740
- def environment(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
741
- ...
742
-
743
- @typing.overload
744
- def environment(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
745
- ...
746
-
747
- def environment(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, vars: typing.Dict[str, str] = {}):
748
- """
749
- Specifies environment variables to be set prior to the execution of a step.
750
-
751
-
752
- Parameters
753
- ----------
754
- vars : Dict[str, str], default {}
755
- Dictionary of environment variables to set.
756
- """
757
- ...
758
-
759
- @typing.overload
760
- def pypi(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
761
- """
762
- Specifies the PyPI packages for the step.
763
-
764
- Information in this decorator will augment any
765
- attributes set in the `@pyi_base` flow-level decorator. Hence,
766
- you can use `@pypi_base` to set packages required by all
767
- steps and use `@pypi` to specify step-specific overrides.
768
-
769
-
770
- Parameters
771
- ----------
772
- packages : Dict[str, str], default: {}
773
- Packages to use for this step. The key is the name of the package
774
- and the value is the version to use.
775
- python : str, optional, default: None
776
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
777
- that the version used will correspond to the version of the Python interpreter used to start the run.
778
- """
779
- ...
780
-
781
- @typing.overload
782
- def pypi(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
783
- ...
784
-
785
- @typing.overload
786
- def pypi(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
787
- ...
788
-
789
- def pypi(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
790
- """
791
- Specifies the PyPI packages for the step.
792
-
793
- Information in this decorator will augment any
794
- attributes set in the `@pyi_base` flow-level decorator. Hence,
795
- you can use `@pypi_base` to set packages required by all
796
- steps and use `@pypi` to specify step-specific overrides.
797
-
798
-
799
- Parameters
800
- ----------
801
- packages : Dict[str, str], default: {}
802
- Packages to use for this step. The key is the name of the package
803
- and the value is the version to use.
804
- python : str, optional, default: None
805
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
806
- that the version used will correspond to the version of the Python interpreter used to start the run.
807
- """
808
- ...
809
-
810
- @typing.overload
811
- def secrets(*, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
812
- """
813
- Specifies secrets to be retrieved and injected as environment variables prior to
814
- the execution of a step.
815
-
816
-
817
- Parameters
818
- ----------
819
- sources : List[Union[str, Dict[str, Any]]], default: []
820
- List of secret specs, defining how the secrets are to be retrieved
821
- """
822
- ...
823
-
824
- @typing.overload
825
- def secrets(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
826
- ...
827
-
828
- @typing.overload
829
- def secrets(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
830
- ...
831
-
832
- def secrets(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []):
833
- """
834
- Specifies secrets to be retrieved and injected as environment variables prior to
835
- the execution of a step.
836
-
837
-
838
- Parameters
839
- ----------
840
- sources : List[Union[str, Dict[str, Any]]], default: []
841
- List of secret specs, defining how the secrets are to be retrieved
842
- """
843
- ...
844
-
845
- @typing.overload
846
- def parallel(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
847
- """
848
- Decorator prototype for all step decorators. This function gets specialized
849
- and imported for all decorators types by _import_plugin_decorators().
850
- """
851
- ...
852
-
853
- @typing.overload
854
- def parallel(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
855
- ...
856
-
857
- def parallel(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
858
- """
859
- Decorator prototype for all step decorators. This function gets specialized
860
- and imported for all decorators types by _import_plugin_decorators().
861
- """
862
- ...
863
-
864
- @typing.overload
865
- def trigger(*, event: typing.Union[str, typing.Dict[str, typing.Any], None] = None, events: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
866
- """
867
- Specifies the event(s) that this flow depends on.
868
-
869
- ```
870
- @trigger(event='foo')
871
- ```
872
- or
873
- ```
874
- @trigger(events=['foo', 'bar'])
875
- ```
876
-
877
- Additionally, you can specify the parameter mappings
878
- to map event payload to Metaflow parameters for the flow.
879
- ```
880
- @trigger(event={'name':'foo', 'parameters':{'flow_param': 'event_field'}})
881
- ```
882
- or
883
- ```
884
- @trigger(events=[{'name':'foo', 'parameters':{'flow_param_1': 'event_field_1'},
885
- {'name':'bar', 'parameters':{'flow_param_2': 'event_field_2'}])
886
- ```
887
-
888
- 'parameters' can also be a list of strings and tuples like so:
889
- ```
890
- @trigger(event={'name':'foo', 'parameters':['common_name', ('flow_param', 'event_field')]})
891
- ```
892
- This is equivalent to:
893
- ```
894
- @trigger(event={'name':'foo', 'parameters':{'common_name': 'common_name', 'flow_param': 'event_field'}})
895
- ```
896
-
897
-
898
- Parameters
899
- ----------
900
- event : Union[str, Dict[str, Any]], optional, default None
901
- Event dependency for this flow.
902
- events : List[Union[str, Dict[str, Any]]], default []
903
- Events dependency for this flow.
904
- options : Dict[str, Any], default {}
905
- Backend-specific configuration for tuning eventing behavior.
906
- """
907
- ...
908
-
909
- @typing.overload
910
- def trigger(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
911
- ...
912
-
913
- def trigger(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, event: typing.Union[str, typing.Dict[str, typing.Any], None] = None, events: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], options: typing.Dict[str, typing.Any] = {}):
914
- """
915
- Specifies the event(s) that this flow depends on.
916
-
917
- ```
918
- @trigger(event='foo')
919
- ```
920
- or
921
- ```
922
- @trigger(events=['foo', 'bar'])
923
- ```
924
-
925
- Additionally, you can specify the parameter mappings
926
- to map event payload to Metaflow parameters for the flow.
927
- ```
928
- @trigger(event={'name':'foo', 'parameters':{'flow_param': 'event_field'}})
929
- ```
930
- or
931
- ```
932
- @trigger(events=[{'name':'foo', 'parameters':{'flow_param_1': 'event_field_1'},
933
- {'name':'bar', 'parameters':{'flow_param_2': 'event_field_2'}])
934
- ```
935
-
936
- 'parameters' can also be a list of strings and tuples like so:
937
- ```
938
- @trigger(event={'name':'foo', 'parameters':['common_name', ('flow_param', 'event_field')]})
939
- ```
940
- This is equivalent to:
941
- ```
942
- @trigger(event={'name':'foo', 'parameters':{'common_name': 'common_name', 'flow_param': 'event_field'}})
943
- ```
944
-
945
-
946
- Parameters
947
- ----------
948
- event : Union[str, Dict[str, Any]], optional, default None
949
- Event dependency for this flow.
950
- events : List[Union[str, Dict[str, Any]]], default []
951
- Events dependency for this flow.
952
- options : Dict[str, Any], default {}
953
- Backend-specific configuration for tuning eventing behavior.
954
- """
955
- ...
956
-
957
- @typing.overload
958
- def schedule(*, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
959
- """
960
- Specifies the times when the flow should be run when running on a
961
- production scheduler.
962
-
963
-
964
- Parameters
965
- ----------
966
- hourly : bool, default False
967
- Run the workflow hourly.
968
- daily : bool, default True
969
- Run the workflow daily.
970
- weekly : bool, default False
971
- Run the workflow weekly.
972
- cron : str, optional, default None
973
- Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
974
- specified by this expression.
975
- timezone : str, optional, default None
976
- Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
977
- which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
978
- """
979
- ...
980
-
981
- @typing.overload
982
- def schedule(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
983
- ...
984
-
985
- def schedule(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None):
986
- """
987
- Specifies the times when the flow should be run when running on a
988
- production scheduler.
989
-
990
-
991
- Parameters
992
- ----------
993
- hourly : bool, default False
994
- Run the workflow hourly.
995
- daily : bool, default True
996
- Run the workflow daily.
997
- weekly : bool, default False
998
- Run the workflow weekly.
999
- cron : str, optional, default None
1000
- Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
1001
- specified by this expression.
1002
- timezone : str, optional, default None
1003
- Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1004
- which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
794
+ cpu : int, default 1
795
+ Number of CPUs required for this step. If `@resources` is
796
+ also present, the maximum value from all decorators is used.
797
+ memory : int, default 4096
798
+ Memory size (in MB) required for this step. If
799
+ `@resources` is also present, the maximum value from all decorators is
800
+ used.
801
+ disk : int, default 10240
802
+ Disk size (in MB) required for this step. If
803
+ `@resources` is also present, the maximum value from all decorators is
804
+ used.
805
+ image : str, optional, default None
806
+ Docker image to use when launching on Kubernetes. If not specified, and
807
+ METAFLOW_KUBERNETES_CONTAINER_IMAGE is specified, that image is used. If
808
+ not, a default Docker image mapping to the current version of Python is used.
809
+ image_pull_policy: str, default KUBERNETES_IMAGE_PULL_POLICY
810
+ If given, the imagePullPolicy to be applied to the Docker image of the step.
811
+ service_account : str, default METAFLOW_KUBERNETES_SERVICE_ACCOUNT
812
+ Kubernetes service account to use when launching pod in Kubernetes.
813
+ secrets : List[str], optional, default None
814
+ Kubernetes secrets to use when launching pod in Kubernetes. These
815
+ secrets are in addition to the ones defined in `METAFLOW_KUBERNETES_SECRETS`
816
+ in Metaflow configuration.
817
+ node_selector: Union[Dict[str,str], str], optional, default None
818
+ Kubernetes node selector(s) to apply to the pod running the task.
819
+ Can be passed in as a comma separated string of values e.g.
820
+ 'kubernetes.io/os=linux,kubernetes.io/arch=amd64' or as a dictionary
821
+ {'kubernetes.io/os': 'linux', 'kubernetes.io/arch': 'amd64'}
822
+ namespace : str, default METAFLOW_KUBERNETES_NAMESPACE
823
+ Kubernetes namespace to use when launching pod in Kubernetes.
824
+ gpu : int, optional, default None
825
+ Number of GPUs required for this step. A value of zero implies that
826
+ the scheduled node should not have GPUs.
827
+ gpu_vendor : str, default KUBERNETES_GPU_VENDOR
828
+ The vendor of the GPUs to be used for this step.
829
+ tolerations : List[str], default []
830
+ The default is extracted from METAFLOW_KUBERNETES_TOLERATIONS.
831
+ Kubernetes tolerations to use when launching pod in Kubernetes.
832
+ labels: Dict[str, str], default: METAFLOW_KUBERNETES_LABELS
833
+ Kubernetes labels to use when launching pod in Kubernetes.
834
+ annotations: Dict[str, str], default: METAFLOW_KUBERNETES_ANNOTATIONS
835
+ Kubernetes annotations to use when launching pod in Kubernetes.
836
+ use_tmpfs : bool, default False
837
+ This enables an explicit tmpfs mount for this step.
838
+ tmpfs_tempdir : bool, default True
839
+ sets METAFLOW_TEMPDIR to tmpfs_path if set for this step.
840
+ tmpfs_size : int, optional, default: None
841
+ The value for the size (in MiB) of the tmpfs mount for this step.
842
+ This parameter maps to the `--tmpfs` option in Docker. Defaults to 50% of the
843
+ memory allocated for this step.
844
+ tmpfs_path : str, optional, default /metaflow_temp
845
+ Path to tmpfs mount for this step.
846
+ persistent_volume_claims : Dict[str, str], optional, default None
847
+ A map (dictionary) of persistent volumes to be mounted to the pod for this step. The map is from persistent
848
+ volumes to the path to which the volume is to be mounted, e.g., `{'pvc-name': '/path/to/mount/on'}`.
849
+ shared_memory: int, optional
850
+ Shared memory size (in MiB) required for this step
851
+ port: int, optional
852
+ Port number to specify in the Kubernetes job object
853
+ compute_pool : str, optional, default None
854
+ Compute pool to be used for for this step.
855
+ If not specified, any accessible compute pool within the perimeter is used.
856
+ hostname_resolution_timeout: int, default 10 * 60
857
+ Timeout in seconds for the workers tasks in the gang scheduled cluster to resolve the hostname of control task.
858
+ Only applicable when @parallel is used.
859
+ qos: str, default: Burstable
860
+ Quality of Service class to assign to the pod. Supported values are: Guaranteed, Burstable, BestEffort
1005
861
  """
1006
862
  ...
1007
863
 
@@ -1034,118 +890,17 @@ def airflow_external_task_sensor(*, timeout: int, poke_interval: int, mode: str,
1034
890
  The dag_id that contains the task you want to wait for.
1035
891
  external_task_ids : List[str]
1036
892
  The list of task_ids that you want to wait for.
1037
- If None (default value) the sensor waits for the DAG. (Default: None)
1038
- allowed_states : List[str]
1039
- Iterable of allowed states, (Default: ['success'])
1040
- failed_states : List[str]
1041
- Iterable of failed or dis-allowed states. (Default: None)
1042
- execution_delta : datetime.timedelta
1043
- time difference with the previous execution to look at,
1044
- the default is the same logical date as the current task or DAG. (Default: None)
1045
- check_existence: bool
1046
- Set to True to check if the external task exists or check if
1047
- the DAG to wait for exists. (Default: True)
1048
- """
1049
- ...
1050
-
1051
- @typing.overload
1052
- def trigger_on_finish(*, flow: typing.Union[typing.Dict[str, str], str, None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1053
- """
1054
- Specifies the flow(s) that this flow depends on.
1055
-
1056
- ```
1057
- @trigger_on_finish(flow='FooFlow')
1058
- ```
1059
- or
1060
- ```
1061
- @trigger_on_finish(flows=['FooFlow', 'BarFlow'])
1062
- ```
1063
- This decorator respects the @project decorator and triggers the flow
1064
- when upstream runs within the same namespace complete successfully
1065
-
1066
- Additionally, you can specify project aware upstream flow dependencies
1067
- by specifying the fully qualified project_flow_name.
1068
- ```
1069
- @trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
1070
- ```
1071
- or
1072
- ```
1073
- @trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
1074
- ```
1075
-
1076
- You can also specify just the project or project branch (other values will be
1077
- inferred from the current project or project branch):
1078
- ```
1079
- @trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
1080
- ```
1081
-
1082
- Note that `branch` is typically one of:
1083
- - `prod`
1084
- - `user.bob`
1085
- - `test.my_experiment`
1086
- - `prod.staging`
1087
-
1088
-
1089
- Parameters
1090
- ----------
1091
- flow : Union[str, Dict[str, str]], optional, default None
1092
- Upstream flow dependency for this flow.
1093
- flows : List[Union[str, Dict[str, str]]], default []
1094
- Upstream flow dependencies for this flow.
1095
- options : Dict[str, Any], default {}
1096
- Backend-specific configuration for tuning eventing behavior.
1097
- """
1098
- ...
1099
-
1100
- @typing.overload
1101
- def trigger_on_finish(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1102
- ...
1103
-
1104
- def trigger_on_finish(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, flow: typing.Union[typing.Dict[str, str], str, None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}):
1105
- """
1106
- Specifies the flow(s) that this flow depends on.
1107
-
1108
- ```
1109
- @trigger_on_finish(flow='FooFlow')
1110
- ```
1111
- or
1112
- ```
1113
- @trigger_on_finish(flows=['FooFlow', 'BarFlow'])
1114
- ```
1115
- This decorator respects the @project decorator and triggers the flow
1116
- when upstream runs within the same namespace complete successfully
1117
-
1118
- Additionally, you can specify project aware upstream flow dependencies
1119
- by specifying the fully qualified project_flow_name.
1120
- ```
1121
- @trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
1122
- ```
1123
- or
1124
- ```
1125
- @trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
1126
- ```
1127
-
1128
- You can also specify just the project or project branch (other values will be
1129
- inferred from the current project or project branch):
1130
- ```
1131
- @trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
1132
- ```
1133
-
1134
- Note that `branch` is typically one of:
1135
- - `prod`
1136
- - `user.bob`
1137
- - `test.my_experiment`
1138
- - `prod.staging`
1139
-
1140
-
1141
- Parameters
1142
- ----------
1143
- flow : Union[str, Dict[str, str]], optional, default None
1144
- Upstream flow dependency for this flow.
1145
- flows : List[Union[str, Dict[str, str]]], default []
1146
- Upstream flow dependencies for this flow.
1147
- options : Dict[str, Any], default {}
1148
- Backend-specific configuration for tuning eventing behavior.
893
+ If None (default value) the sensor waits for the DAG. (Default: None)
894
+ allowed_states : List[str]
895
+ Iterable of allowed states, (Default: ['success'])
896
+ failed_states : List[str]
897
+ Iterable of failed or dis-allowed states. (Default: None)
898
+ execution_delta : datetime.timedelta
899
+ time difference with the previous execution to look at,
900
+ the default is the same logical date as the current task or DAG. (Default: None)
901
+ check_existence: bool
902
+ Set to True to check if the external task exists or check if
903
+ the DAG to wait for exists. (Default: True)
1149
904
  """
1150
905
  ...
1151
906
 
@@ -1276,6 +1031,99 @@ def conda_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packa
1276
1031
  """
1277
1032
  ...
1278
1033
 
1034
+ @typing.overload
1035
+ def trigger(*, event: typing.Union[str, typing.Dict[str, typing.Any], None] = None, events: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1036
+ """
1037
+ Specifies the event(s) that this flow depends on.
1038
+
1039
+ ```
1040
+ @trigger(event='foo')
1041
+ ```
1042
+ or
1043
+ ```
1044
+ @trigger(events=['foo', 'bar'])
1045
+ ```
1046
+
1047
+ Additionally, you can specify the parameter mappings
1048
+ to map event payload to Metaflow parameters for the flow.
1049
+ ```
1050
+ @trigger(event={'name':'foo', 'parameters':{'flow_param': 'event_field'}})
1051
+ ```
1052
+ or
1053
+ ```
1054
+ @trigger(events=[{'name':'foo', 'parameters':{'flow_param_1': 'event_field_1'},
1055
+ {'name':'bar', 'parameters':{'flow_param_2': 'event_field_2'}])
1056
+ ```
1057
+
1058
+ 'parameters' can also be a list of strings and tuples like so:
1059
+ ```
1060
+ @trigger(event={'name':'foo', 'parameters':['common_name', ('flow_param', 'event_field')]})
1061
+ ```
1062
+ This is equivalent to:
1063
+ ```
1064
+ @trigger(event={'name':'foo', 'parameters':{'common_name': 'common_name', 'flow_param': 'event_field'}})
1065
+ ```
1066
+
1067
+
1068
+ Parameters
1069
+ ----------
1070
+ event : Union[str, Dict[str, Any]], optional, default None
1071
+ Event dependency for this flow.
1072
+ events : List[Union[str, Dict[str, Any]]], default []
1073
+ Events dependency for this flow.
1074
+ options : Dict[str, Any], default {}
1075
+ Backend-specific configuration for tuning eventing behavior.
1076
+ """
1077
+ ...
1078
+
1079
+ @typing.overload
1080
+ def trigger(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1081
+ ...
1082
+
1083
+ def trigger(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, event: typing.Union[str, typing.Dict[str, typing.Any], None] = None, events: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], options: typing.Dict[str, typing.Any] = {}):
1084
+ """
1085
+ Specifies the event(s) that this flow depends on.
1086
+
1087
+ ```
1088
+ @trigger(event='foo')
1089
+ ```
1090
+ or
1091
+ ```
1092
+ @trigger(events=['foo', 'bar'])
1093
+ ```
1094
+
1095
+ Additionally, you can specify the parameter mappings
1096
+ to map event payload to Metaflow parameters for the flow.
1097
+ ```
1098
+ @trigger(event={'name':'foo', 'parameters':{'flow_param': 'event_field'}})
1099
+ ```
1100
+ or
1101
+ ```
1102
+ @trigger(events=[{'name':'foo', 'parameters':{'flow_param_1': 'event_field_1'},
1103
+ {'name':'bar', 'parameters':{'flow_param_2': 'event_field_2'}])
1104
+ ```
1105
+
1106
+ 'parameters' can also be a list of strings and tuples like so:
1107
+ ```
1108
+ @trigger(event={'name':'foo', 'parameters':['common_name', ('flow_param', 'event_field')]})
1109
+ ```
1110
+ This is equivalent to:
1111
+ ```
1112
+ @trigger(event={'name':'foo', 'parameters':{'common_name': 'common_name', 'flow_param': 'event_field'}})
1113
+ ```
1114
+
1115
+
1116
+ Parameters
1117
+ ----------
1118
+ event : Union[str, Dict[str, Any]], optional, default None
1119
+ Event dependency for this flow.
1120
+ events : List[Union[str, Dict[str, Any]]], default []
1121
+ Events dependency for this flow.
1122
+ options : Dict[str, Any], default {}
1123
+ Backend-specific configuration for tuning eventing behavior.
1124
+ """
1125
+ ...
1126
+
1279
1127
  def airflow_s3_key_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, bucket_key: typing.Union[str, typing.List[str]], bucket_name: str, wildcard_match: bool, aws_conn_id: str, verify: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1280
1128
  """
1281
1129
  The `@airflow_s3_key_sensor` decorator attaches a Airflow [S3KeySensor](https://airflow.apache.org/docs/apache-airflow-providers-amazon/stable/_api/airflow/providers/amazon/aws/sensors/s3/index.html#airflow.providers.amazon.aws.sensors.s3.S3KeySensor)
@@ -1319,3 +1167,155 @@ def airflow_s3_key_sensor(*, timeout: int, poke_interval: int, mode: str, expone
1319
1167
  """
1320
1168
  ...
1321
1169
 
1170
+ @typing.overload
1171
+ def trigger_on_finish(*, flow: typing.Union[typing.Dict[str, str], str, None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1172
+ """
1173
+ Specifies the flow(s) that this flow depends on.
1174
+
1175
+ ```
1176
+ @trigger_on_finish(flow='FooFlow')
1177
+ ```
1178
+ or
1179
+ ```
1180
+ @trigger_on_finish(flows=['FooFlow', 'BarFlow'])
1181
+ ```
1182
+ This decorator respects the @project decorator and triggers the flow
1183
+ when upstream runs within the same namespace complete successfully
1184
+
1185
+ Additionally, you can specify project aware upstream flow dependencies
1186
+ by specifying the fully qualified project_flow_name.
1187
+ ```
1188
+ @trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
1189
+ ```
1190
+ or
1191
+ ```
1192
+ @trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
1193
+ ```
1194
+
1195
+ You can also specify just the project or project branch (other values will be
1196
+ inferred from the current project or project branch):
1197
+ ```
1198
+ @trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
1199
+ ```
1200
+
1201
+ Note that `branch` is typically one of:
1202
+ - `prod`
1203
+ - `user.bob`
1204
+ - `test.my_experiment`
1205
+ - `prod.staging`
1206
+
1207
+
1208
+ Parameters
1209
+ ----------
1210
+ flow : Union[str, Dict[str, str]], optional, default None
1211
+ Upstream flow dependency for this flow.
1212
+ flows : List[Union[str, Dict[str, str]]], default []
1213
+ Upstream flow dependencies for this flow.
1214
+ options : Dict[str, Any], default {}
1215
+ Backend-specific configuration for tuning eventing behavior.
1216
+ """
1217
+ ...
1218
+
1219
+ @typing.overload
1220
+ def trigger_on_finish(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1221
+ ...
1222
+
1223
+ def trigger_on_finish(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, flow: typing.Union[typing.Dict[str, str], str, None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}):
1224
+ """
1225
+ Specifies the flow(s) that this flow depends on.
1226
+
1227
+ ```
1228
+ @trigger_on_finish(flow='FooFlow')
1229
+ ```
1230
+ or
1231
+ ```
1232
+ @trigger_on_finish(flows=['FooFlow', 'BarFlow'])
1233
+ ```
1234
+ This decorator respects the @project decorator and triggers the flow
1235
+ when upstream runs within the same namespace complete successfully
1236
+
1237
+ Additionally, you can specify project aware upstream flow dependencies
1238
+ by specifying the fully qualified project_flow_name.
1239
+ ```
1240
+ @trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
1241
+ ```
1242
+ or
1243
+ ```
1244
+ @trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
1245
+ ```
1246
+
1247
+ You can also specify just the project or project branch (other values will be
1248
+ inferred from the current project or project branch):
1249
+ ```
1250
+ @trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
1251
+ ```
1252
+
1253
+ Note that `branch` is typically one of:
1254
+ - `prod`
1255
+ - `user.bob`
1256
+ - `test.my_experiment`
1257
+ - `prod.staging`
1258
+
1259
+
1260
+ Parameters
1261
+ ----------
1262
+ flow : Union[str, Dict[str, str]], optional, default None
1263
+ Upstream flow dependency for this flow.
1264
+ flows : List[Union[str, Dict[str, str]]], default []
1265
+ Upstream flow dependencies for this flow.
1266
+ options : Dict[str, Any], default {}
1267
+ Backend-specific configuration for tuning eventing behavior.
1268
+ """
1269
+ ...
1270
+
1271
+ @typing.overload
1272
+ def schedule(*, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1273
+ """
1274
+ Specifies the times when the flow should be run when running on a
1275
+ production scheduler.
1276
+
1277
+
1278
+ Parameters
1279
+ ----------
1280
+ hourly : bool, default False
1281
+ Run the workflow hourly.
1282
+ daily : bool, default True
1283
+ Run the workflow daily.
1284
+ weekly : bool, default False
1285
+ Run the workflow weekly.
1286
+ cron : str, optional, default None
1287
+ Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
1288
+ specified by this expression.
1289
+ timezone : str, optional, default None
1290
+ Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1291
+ which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
1292
+ """
1293
+ ...
1294
+
1295
+ @typing.overload
1296
+ def schedule(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1297
+ ...
1298
+
1299
+ def schedule(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None):
1300
+ """
1301
+ Specifies the times when the flow should be run when running on a
1302
+ production scheduler.
1303
+
1304
+
1305
+ Parameters
1306
+ ----------
1307
+ hourly : bool, default False
1308
+ Run the workflow hourly.
1309
+ daily : bool, default True
1310
+ Run the workflow daily.
1311
+ weekly : bool, default False
1312
+ Run the workflow weekly.
1313
+ cron : str, optional, default None
1314
+ Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
1315
+ specified by this expression.
1316
+ timezone : str, optional, default None
1317
+ Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1318
+ which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
1319
+ """
1320
+ ...
1321
+