metaflow-stubs 2.14.3__py2.py3-none-any.whl → 2.15.1__py2.py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (146) hide show
  1. metaflow-stubs/__init__.pyi +644 -641
  2. metaflow-stubs/cards.pyi +2 -2
  3. metaflow-stubs/cli.pyi +2 -2
  4. metaflow-stubs/cli_components/__init__.pyi +2 -2
  5. metaflow-stubs/cli_components/utils.pyi +2 -2
  6. metaflow-stubs/client/__init__.pyi +2 -2
  7. metaflow-stubs/client/core.pyi +4 -4
  8. metaflow-stubs/client/filecache.pyi +3 -3
  9. metaflow-stubs/events.pyi +3 -3
  10. metaflow-stubs/exception.pyi +2 -2
  11. metaflow-stubs/flowspec.pyi +4 -4
  12. metaflow-stubs/generated_for.txt +1 -1
  13. metaflow-stubs/includefile.pyi +2 -2
  14. metaflow-stubs/info_file.pyi +2 -2
  15. metaflow-stubs/metadata_provider/__init__.pyi +2 -2
  16. metaflow-stubs/metadata_provider/heartbeat.pyi +2 -2
  17. metaflow-stubs/metadata_provider/metadata.pyi +2 -2
  18. metaflow-stubs/metadata_provider/util.pyi +2 -2
  19. metaflow-stubs/metaflow_config.pyi +4 -2
  20. metaflow-stubs/metaflow_current.pyi +17 -17
  21. metaflow-stubs/multicore_utils.pyi +2 -2
  22. metaflow-stubs/parameters.pyi +2 -2
  23. metaflow-stubs/plugins/__init__.pyi +13 -13
  24. metaflow-stubs/plugins/airflow/__init__.pyi +2 -2
  25. metaflow-stubs/plugins/airflow/airflow_decorator.pyi +2 -2
  26. metaflow-stubs/plugins/airflow/airflow_utils.pyi +2 -2
  27. metaflow-stubs/plugins/airflow/exception.pyi +2 -2
  28. metaflow-stubs/plugins/airflow/sensors/__init__.pyi +2 -2
  29. metaflow-stubs/plugins/airflow/sensors/base_sensor.pyi +2 -2
  30. metaflow-stubs/plugins/airflow/sensors/external_task_sensor.pyi +2 -2
  31. metaflow-stubs/plugins/airflow/sensors/s3_sensor.pyi +2 -2
  32. metaflow-stubs/plugins/argo/__init__.pyi +2 -2
  33. metaflow-stubs/plugins/argo/argo_client.pyi +2 -2
  34. metaflow-stubs/plugins/argo/argo_events.pyi +2 -2
  35. metaflow-stubs/plugins/argo/argo_workflows.pyi +3 -3
  36. metaflow-stubs/plugins/argo/argo_workflows_decorator.pyi +3 -3
  37. metaflow-stubs/plugins/argo/argo_workflows_deployer.pyi +3 -3
  38. metaflow-stubs/plugins/argo/argo_workflows_deployer_objects.pyi +3 -3
  39. metaflow-stubs/plugins/aws/__init__.pyi +3 -3
  40. metaflow-stubs/plugins/aws/aws_client.pyi +2 -2
  41. metaflow-stubs/plugins/aws/aws_utils.pyi +2 -2
  42. metaflow-stubs/plugins/aws/batch/__init__.pyi +2 -2
  43. metaflow-stubs/plugins/aws/batch/batch.pyi +2 -2
  44. metaflow-stubs/plugins/aws/batch/batch_client.pyi +2 -2
  45. metaflow-stubs/plugins/aws/batch/batch_decorator.pyi +2 -2
  46. metaflow-stubs/plugins/aws/secrets_manager/__init__.pyi +2 -2
  47. metaflow-stubs/plugins/aws/secrets_manager/aws_secrets_manager_secrets_provider.pyi +4 -4
  48. metaflow-stubs/plugins/aws/step_functions/__init__.pyi +2 -2
  49. metaflow-stubs/plugins/aws/step_functions/dynamo_db_client.pyi +2 -2
  50. metaflow-stubs/plugins/aws/step_functions/event_bridge_client.pyi +2 -2
  51. metaflow-stubs/plugins/aws/step_functions/schedule_decorator.pyi +2 -2
  52. metaflow-stubs/plugins/aws/step_functions/step_functions.pyi +2 -2
  53. metaflow-stubs/plugins/aws/step_functions/step_functions_client.pyi +2 -2
  54. metaflow-stubs/plugins/aws/step_functions/step_functions_decorator.pyi +2 -2
  55. metaflow-stubs/plugins/aws/step_functions/step_functions_deployer.pyi +2 -2
  56. metaflow-stubs/plugins/aws/step_functions/step_functions_deployer_objects.pyi +2 -2
  57. metaflow-stubs/plugins/azure/__init__.pyi +2 -2
  58. metaflow-stubs/plugins/azure/azure_credential.pyi +2 -2
  59. metaflow-stubs/plugins/azure/azure_exceptions.pyi +2 -2
  60. metaflow-stubs/plugins/azure/azure_secret_manager_secrets_provider.pyi +4 -4
  61. metaflow-stubs/plugins/azure/azure_utils.pyi +2 -2
  62. metaflow-stubs/plugins/azure/blob_service_client_factory.pyi +2 -2
  63. metaflow-stubs/plugins/azure/includefile_support.pyi +2 -2
  64. metaflow-stubs/plugins/cards/__init__.pyi +2 -2
  65. metaflow-stubs/plugins/cards/card_client.pyi +2 -2
  66. metaflow-stubs/plugins/cards/card_creator.pyi +2 -2
  67. metaflow-stubs/plugins/cards/card_datastore.pyi +2 -2
  68. metaflow-stubs/plugins/cards/card_decorator.pyi +2 -2
  69. metaflow-stubs/plugins/cards/card_modules/__init__.pyi +2 -2
  70. metaflow-stubs/plugins/cards/card_modules/basic.pyi +3 -3
  71. metaflow-stubs/plugins/cards/card_modules/card.pyi +2 -2
  72. metaflow-stubs/plugins/cards/card_modules/components.pyi +4 -4
  73. metaflow-stubs/plugins/cards/card_modules/convert_to_native_type.pyi +2 -2
  74. metaflow-stubs/plugins/cards/card_modules/renderer_tools.pyi +2 -2
  75. metaflow-stubs/plugins/cards/card_modules/test_cards.pyi +2 -2
  76. metaflow-stubs/plugins/cards/card_resolver.pyi +2 -2
  77. metaflow-stubs/plugins/cards/component_serializer.pyi +2 -2
  78. metaflow-stubs/plugins/cards/exception.pyi +2 -2
  79. metaflow-stubs/plugins/catch_decorator.pyi +2 -2
  80. metaflow-stubs/plugins/datatools/__init__.pyi +2 -2
  81. metaflow-stubs/plugins/datatools/local.pyi +2 -2
  82. metaflow-stubs/plugins/datatools/s3/__init__.pyi +2 -2
  83. metaflow-stubs/plugins/datatools/s3/s3.pyi +2 -2
  84. metaflow-stubs/plugins/datatools/s3/s3tail.pyi +2 -2
  85. metaflow-stubs/plugins/datatools/s3/s3util.pyi +2 -2
  86. metaflow-stubs/plugins/debug_logger.pyi +2 -2
  87. metaflow-stubs/plugins/debug_monitor.pyi +2 -2
  88. metaflow-stubs/plugins/environment_decorator.pyi +2 -2
  89. metaflow-stubs/plugins/events_decorator.pyi +2 -2
  90. metaflow-stubs/plugins/frameworks/__init__.pyi +2 -2
  91. metaflow-stubs/plugins/frameworks/pytorch.pyi +2 -2
  92. metaflow-stubs/plugins/gcp/__init__.pyi +2 -2
  93. metaflow-stubs/plugins/gcp/gcp_secret_manager_secrets_provider.pyi +4 -4
  94. metaflow-stubs/plugins/gcp/gs_exceptions.pyi +2 -2
  95. metaflow-stubs/plugins/gcp/gs_storage_client_factory.pyi +2 -2
  96. metaflow-stubs/plugins/gcp/gs_utils.pyi +2 -2
  97. metaflow-stubs/plugins/gcp/includefile_support.pyi +2 -2
  98. metaflow-stubs/plugins/kubernetes/__init__.pyi +2 -2
  99. metaflow-stubs/plugins/kubernetes/kube_utils.pyi +2 -2
  100. metaflow-stubs/plugins/kubernetes/kubernetes.pyi +2 -2
  101. metaflow-stubs/plugins/kubernetes/kubernetes_client.pyi +2 -2
  102. metaflow-stubs/plugins/kubernetes/kubernetes_decorator.pyi +4 -2
  103. metaflow-stubs/plugins/kubernetes/kubernetes_job.pyi +2 -2
  104. metaflow-stubs/plugins/kubernetes/kubernetes_jobsets.pyi +2 -2
  105. metaflow-stubs/plugins/kubernetes/spot_monitor_sidecar.pyi +2 -2
  106. metaflow-stubs/plugins/parallel_decorator.pyi +2 -2
  107. metaflow-stubs/plugins/project_decorator.pyi +2 -2
  108. metaflow-stubs/plugins/pypi/__init__.pyi +3 -2
  109. metaflow-stubs/plugins/pypi/conda_decorator.pyi +2 -2
  110. metaflow-stubs/plugins/pypi/conda_environment.pyi +4 -4
  111. metaflow-stubs/plugins/pypi/parsers.pyi +113 -0
  112. metaflow-stubs/plugins/pypi/pypi_decorator.pyi +2 -2
  113. metaflow-stubs/plugins/pypi/pypi_environment.pyi +2 -2
  114. metaflow-stubs/plugins/pypi/utils.pyi +2 -2
  115. metaflow-stubs/plugins/resources_decorator.pyi +2 -2
  116. metaflow-stubs/plugins/retry_decorator.pyi +2 -2
  117. metaflow-stubs/plugins/secrets/__init__.pyi +2 -2
  118. metaflow-stubs/plugins/secrets/inline_secrets_provider.pyi +4 -4
  119. metaflow-stubs/plugins/secrets/secrets_decorator.pyi +2 -2
  120. metaflow-stubs/plugins/storage_executor.pyi +2 -2
  121. metaflow-stubs/plugins/test_unbounded_foreach_decorator.pyi +2 -2
  122. metaflow-stubs/plugins/timeout_decorator.pyi +2 -2
  123. metaflow-stubs/pylint_wrapper.pyi +2 -2
  124. metaflow-stubs/runner/__init__.pyi +2 -2
  125. metaflow-stubs/runner/deployer.pyi +30 -30
  126. metaflow-stubs/runner/deployer_impl.pyi +2 -2
  127. metaflow-stubs/runner/metaflow_runner.pyi +3 -3
  128. metaflow-stubs/runner/nbdeploy.pyi +2 -2
  129. metaflow-stubs/runner/nbrun.pyi +2 -2
  130. metaflow-stubs/runner/subprocess_manager.pyi +2 -2
  131. metaflow-stubs/runner/utils.pyi +4 -4
  132. metaflow-stubs/system/__init__.pyi +2 -2
  133. metaflow-stubs/system/system_logger.pyi +2 -2
  134. metaflow-stubs/system/system_monitor.pyi +2 -2
  135. metaflow-stubs/tagging_util.pyi +2 -2
  136. metaflow-stubs/tuple_util.pyi +2 -2
  137. metaflow-stubs/user_configs/__init__.pyi +2 -2
  138. metaflow-stubs/user_configs/config_decorators.pyi +7 -7
  139. metaflow-stubs/user_configs/config_options.pyi +2 -2
  140. metaflow-stubs/user_configs/config_parameters.pyi +6 -6
  141. metaflow-stubs/version.pyi +2 -2
  142. {metaflow_stubs-2.14.3.dist-info → metaflow_stubs-2.15.1.dist-info}/METADATA +2 -2
  143. metaflow_stubs-2.15.1.dist-info/RECORD +146 -0
  144. {metaflow_stubs-2.14.3.dist-info → metaflow_stubs-2.15.1.dist-info}/WHEEL +1 -1
  145. metaflow_stubs-2.14.3.dist-info/RECORD +0 -145
  146. {metaflow_stubs-2.14.3.dist-info → metaflow_stubs-2.15.1.dist-info}/top_level.txt +0 -0
@@ -1,7 +1,7 @@
1
1
  ######################################################################################################
2
2
  # Auto-generated Metaflow stub file #
3
- # MF version: 2.14.3 #
4
- # Generated on 2025-02-22T04:36:00.916575 #
3
+ # MF version: 2.15.1 #
4
+ # Generated on 2025-02-28T22:52:51.105998 #
5
5
  ######################################################################################################
6
6
 
7
7
  from __future__ import annotations
@@ -42,6 +42,9 @@ from . import plugins as plugins
42
42
  from .plugins.datatools.s3.s3 import S3 as S3
43
43
  from . import includefile as includefile
44
44
  from .includefile import IncludeFile as IncludeFile
45
+ from .plugins.pypi.parsers import requirements_txt_parser as requirements_txt_parser
46
+ from .plugins.pypi.parsers import pyproject_toml_parser as pyproject_toml_parser
47
+ from .plugins.pypi.parsers import conda_environment_yml_parser as conda_environment_yml_parser
45
48
  from . import cards as cards
46
49
  from . import client as client
47
50
  from .client.core import namespace as namespace
@@ -143,53 +146,241 @@ def step(f: typing.Union[typing.Callable[[FlowSpecDerived], None], typing.Callab
143
146
  ...
144
147
 
145
148
  @typing.overload
146
- def pypi(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
149
+ def secrets(*, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
147
150
  """
148
- Specifies the PyPI packages for the step.
151
+ Specifies secrets to be retrieved and injected as environment variables prior to
152
+ the execution of a step.
149
153
 
150
- Information in this decorator will augment any
151
- attributes set in the `@pyi_base` flow-level decorator. Hence,
152
- you can use `@pypi_base` to set packages required by all
153
- steps and use `@pypi` to specify step-specific overrides.
154
+
155
+ Parameters
156
+ ----------
157
+ sources : List[Union[str, Dict[str, Any]]], default: []
158
+ List of secret specs, defining how the secrets are to be retrieved
159
+ """
160
+ ...
161
+
162
+ @typing.overload
163
+ def secrets(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
164
+ ...
165
+
166
+ @typing.overload
167
+ def secrets(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
168
+ ...
169
+
170
+ def secrets(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []):
171
+ """
172
+ Specifies secrets to be retrieved and injected as environment variables prior to
173
+ the execution of a step.
154
174
 
155
175
 
156
176
  Parameters
157
177
  ----------
158
- packages : Dict[str, str], default: {}
159
- Packages to use for this step. The key is the name of the package
160
- and the value is the version to use.
161
- python : str, optional, default: None
162
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
163
- that the version used will correspond to the version of the Python interpreter used to start the run.
178
+ sources : List[Union[str, Dict[str, Any]]], default: []
179
+ List of secret specs, defining how the secrets are to be retrieved
164
180
  """
165
181
  ...
166
182
 
167
183
  @typing.overload
168
- def pypi(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
184
+ def retry(*, times: int = 3, minutes_between_retries: int = 2) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
185
+ """
186
+ Specifies the number of times the task corresponding
187
+ to a step needs to be retried.
188
+
189
+ This decorator is useful for handling transient errors, such as networking issues.
190
+ If your task contains operations that can't be retried safely, e.g. database updates,
191
+ it is advisable to annotate it with `@retry(times=0)`.
192
+
193
+ This can be used in conjunction with the `@catch` decorator. The `@catch`
194
+ decorator will execute a no-op task after all retries have been exhausted,
195
+ ensuring that the flow execution can continue.
196
+
197
+
198
+ Parameters
199
+ ----------
200
+ times : int, default 3
201
+ Number of times to retry this task.
202
+ minutes_between_retries : int, default 2
203
+ Number of minutes between retries.
204
+ """
169
205
  ...
170
206
 
171
207
  @typing.overload
172
- def pypi(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
208
+ def retry(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
173
209
  ...
174
210
 
175
- def pypi(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
211
+ @typing.overload
212
+ def retry(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
213
+ ...
214
+
215
+ def retry(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, times: int = 3, minutes_between_retries: int = 2):
176
216
  """
177
- Specifies the PyPI packages for the step.
217
+ Specifies the number of times the task corresponding
218
+ to a step needs to be retried.
178
219
 
179
- Information in this decorator will augment any
180
- attributes set in the `@pyi_base` flow-level decorator. Hence,
181
- you can use `@pypi_base` to set packages required by all
182
- steps and use `@pypi` to specify step-specific overrides.
220
+ This decorator is useful for handling transient errors, such as networking issues.
221
+ If your task contains operations that can't be retried safely, e.g. database updates,
222
+ it is advisable to annotate it with `@retry(times=0)`.
223
+
224
+ This can be used in conjunction with the `@catch` decorator. The `@catch`
225
+ decorator will execute a no-op task after all retries have been exhausted,
226
+ ensuring that the flow execution can continue.
183
227
 
184
228
 
185
229
  Parameters
186
230
  ----------
187
- packages : Dict[str, str], default: {}
188
- Packages to use for this step. The key is the name of the package
189
- and the value is the version to use.
190
- python : str, optional, default: None
191
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
192
- that the version used will correspond to the version of the Python interpreter used to start the run.
231
+ times : int, default 3
232
+ Number of times to retry this task.
233
+ minutes_between_retries : int, default 2
234
+ Number of minutes between retries.
235
+ """
236
+ ...
237
+
238
+ @typing.overload
239
+ def batch(*, cpu: int = 1, gpu: int = 0, memory: int = 4096, image: typing.Optional[str] = None, queue: str = 'METAFLOW_BATCH_JOB_QUEUE', iam_role: str = 'METAFLOW_ECS_S3_ACCESS_IAM_ROLE', execution_role: str = 'METAFLOW_ECS_FARGATE_EXECUTION_ROLE', shared_memory: typing.Optional[int] = None, max_swap: typing.Optional[int] = None, swappiness: typing.Optional[int] = None, use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = None, inferentia: int = 0, trainium: int = None, efa: int = 0, ephemeral_storage: int = None, log_driver: typing.Optional[str] = None, log_options: typing.Optional[typing.List[str]] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
240
+ """
241
+ Specifies that this step should execute on [AWS Batch](https://aws.amazon.com/batch/).
242
+
243
+
244
+ Parameters
245
+ ----------
246
+ cpu : int, default 1
247
+ Number of CPUs required for this step. If `@resources` is
248
+ also present, the maximum value from all decorators is used.
249
+ gpu : int, default 0
250
+ Number of GPUs required for this step. If `@resources` is
251
+ also present, the maximum value from all decorators is used.
252
+ memory : int, default 4096
253
+ Memory size (in MB) required for this step. If
254
+ `@resources` is also present, the maximum value from all decorators is
255
+ used.
256
+ image : str, optional, default None
257
+ Docker image to use when launching on AWS Batch. If not specified, and
258
+ METAFLOW_BATCH_CONTAINER_IMAGE is specified, that image is used. If
259
+ not, a default Docker image mapping to the current version of Python is used.
260
+ queue : str, default METAFLOW_BATCH_JOB_QUEUE
261
+ AWS Batch Job Queue to submit the job to.
262
+ iam_role : str, default METAFLOW_ECS_S3_ACCESS_IAM_ROLE
263
+ AWS IAM role that AWS Batch container uses to access AWS cloud resources.
264
+ execution_role : str, default METAFLOW_ECS_FARGATE_EXECUTION_ROLE
265
+ AWS IAM role that AWS Batch can use [to trigger AWS Fargate tasks]
266
+ (https://docs.aws.amazon.com/batch/latest/userguide/execution-IAM-role.html).
267
+ shared_memory : int, optional, default None
268
+ The value for the size (in MiB) of the /dev/shm volume for this step.
269
+ This parameter maps to the `--shm-size` option in Docker.
270
+ max_swap : int, optional, default None
271
+ The total amount of swap memory (in MiB) a container can use for this
272
+ step. This parameter is translated to the `--memory-swap` option in
273
+ Docker where the value is the sum of the container memory plus the
274
+ `max_swap` value.
275
+ swappiness : int, optional, default None
276
+ This allows you to tune memory swappiness behavior for this step.
277
+ A swappiness value of 0 causes swapping not to happen unless absolutely
278
+ necessary. A swappiness value of 100 causes pages to be swapped very
279
+ aggressively. Accepted values are whole numbers between 0 and 100.
280
+ use_tmpfs : bool, default False
281
+ This enables an explicit tmpfs mount for this step. Note that tmpfs is
282
+ not available on Fargate compute environments
283
+ tmpfs_tempdir : bool, default True
284
+ sets METAFLOW_TEMPDIR to tmpfs_path if set for this step.
285
+ tmpfs_size : int, optional, default None
286
+ The value for the size (in MiB) of the tmpfs mount for this step.
287
+ This parameter maps to the `--tmpfs` option in Docker. Defaults to 50% of the
288
+ memory allocated for this step.
289
+ tmpfs_path : str, optional, default None
290
+ Path to tmpfs mount for this step. Defaults to /metaflow_temp.
291
+ inferentia : int, default 0
292
+ Number of Inferentia chips required for this step.
293
+ trainium : int, default None
294
+ Alias for inferentia. Use only one of the two.
295
+ efa : int, default 0
296
+ Number of elastic fabric adapter network devices to attach to container
297
+ ephemeral_storage : int, default None
298
+ The total amount, in GiB, of ephemeral storage to set for the task, 21-200GiB.
299
+ This is only relevant for Fargate compute environments
300
+ log_driver: str, optional, default None
301
+ The log driver to use for the Amazon ECS container.
302
+ log_options: List[str], optional, default None
303
+ List of strings containing options for the chosen log driver. The configurable values
304
+ depend on the `log driver` chosen. Validation of these options is not supported yet.
305
+ Example: [`awslogs-group:aws/batch/job`]
306
+ """
307
+ ...
308
+
309
+ @typing.overload
310
+ def batch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
311
+ ...
312
+
313
+ @typing.overload
314
+ def batch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
315
+ ...
316
+
317
+ def batch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, cpu: int = 1, gpu: int = 0, memory: int = 4096, image: typing.Optional[str] = None, queue: str = 'METAFLOW_BATCH_JOB_QUEUE', iam_role: str = 'METAFLOW_ECS_S3_ACCESS_IAM_ROLE', execution_role: str = 'METAFLOW_ECS_FARGATE_EXECUTION_ROLE', shared_memory: typing.Optional[int] = None, max_swap: typing.Optional[int] = None, swappiness: typing.Optional[int] = None, use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = None, inferentia: int = 0, trainium: int = None, efa: int = 0, ephemeral_storage: int = None, log_driver: typing.Optional[str] = None, log_options: typing.Optional[typing.List[str]] = None):
318
+ """
319
+ Specifies that this step should execute on [AWS Batch](https://aws.amazon.com/batch/).
320
+
321
+
322
+ Parameters
323
+ ----------
324
+ cpu : int, default 1
325
+ Number of CPUs required for this step. If `@resources` is
326
+ also present, the maximum value from all decorators is used.
327
+ gpu : int, default 0
328
+ Number of GPUs required for this step. If `@resources` is
329
+ also present, the maximum value from all decorators is used.
330
+ memory : int, default 4096
331
+ Memory size (in MB) required for this step. If
332
+ `@resources` is also present, the maximum value from all decorators is
333
+ used.
334
+ image : str, optional, default None
335
+ Docker image to use when launching on AWS Batch. If not specified, and
336
+ METAFLOW_BATCH_CONTAINER_IMAGE is specified, that image is used. If
337
+ not, a default Docker image mapping to the current version of Python is used.
338
+ queue : str, default METAFLOW_BATCH_JOB_QUEUE
339
+ AWS Batch Job Queue to submit the job to.
340
+ iam_role : str, default METAFLOW_ECS_S3_ACCESS_IAM_ROLE
341
+ AWS IAM role that AWS Batch container uses to access AWS cloud resources.
342
+ execution_role : str, default METAFLOW_ECS_FARGATE_EXECUTION_ROLE
343
+ AWS IAM role that AWS Batch can use [to trigger AWS Fargate tasks]
344
+ (https://docs.aws.amazon.com/batch/latest/userguide/execution-IAM-role.html).
345
+ shared_memory : int, optional, default None
346
+ The value for the size (in MiB) of the /dev/shm volume for this step.
347
+ This parameter maps to the `--shm-size` option in Docker.
348
+ max_swap : int, optional, default None
349
+ The total amount of swap memory (in MiB) a container can use for this
350
+ step. This parameter is translated to the `--memory-swap` option in
351
+ Docker where the value is the sum of the container memory plus the
352
+ `max_swap` value.
353
+ swappiness : int, optional, default None
354
+ This allows you to tune memory swappiness behavior for this step.
355
+ A swappiness value of 0 causes swapping not to happen unless absolutely
356
+ necessary. A swappiness value of 100 causes pages to be swapped very
357
+ aggressively. Accepted values are whole numbers between 0 and 100.
358
+ use_tmpfs : bool, default False
359
+ This enables an explicit tmpfs mount for this step. Note that tmpfs is
360
+ not available on Fargate compute environments
361
+ tmpfs_tempdir : bool, default True
362
+ sets METAFLOW_TEMPDIR to tmpfs_path if set for this step.
363
+ tmpfs_size : int, optional, default None
364
+ The value for the size (in MiB) of the tmpfs mount for this step.
365
+ This parameter maps to the `--tmpfs` option in Docker. Defaults to 50% of the
366
+ memory allocated for this step.
367
+ tmpfs_path : str, optional, default None
368
+ Path to tmpfs mount for this step. Defaults to /metaflow_temp.
369
+ inferentia : int, default 0
370
+ Number of Inferentia chips required for this step.
371
+ trainium : int, default None
372
+ Alias for inferentia. Use only one of the two.
373
+ efa : int, default 0
374
+ Number of elastic fabric adapter network devices to attach to container
375
+ ephemeral_storage : int, default None
376
+ The total amount, in GiB, of ephemeral storage to set for the task, 21-200GiB.
377
+ This is only relevant for Fargate compute environments
378
+ log_driver: str, optional, default None
379
+ The log driver to use for the Amazon ECS container.
380
+ log_options: List[str], optional, default None
381
+ List of strings containing options for the chosen log driver. The configurable values
382
+ depend on the `log driver` chosen. Validation of these options is not supported yet.
383
+ Example: [`awslogs-group:aws/batch/job`]
193
384
  """
194
385
  ...
195
386
 
@@ -255,8 +446,8 @@ def timeout(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None],
255
446
  @typing.overload
256
447
  def parallel(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
257
448
  """
258
- Parameters
259
- ----------
449
+ Decorator prototype for all step decorators. This function gets specialized
450
+ and imported for all decorators types by _import_plugin_decorators().
260
451
  """
261
452
  ...
262
453
 
@@ -266,118 +457,18 @@ def parallel(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None])
266
457
 
267
458
  def parallel(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
268
459
  """
269
- Parameters
270
- ----------
460
+ Decorator prototype for all step decorators. This function gets specialized
461
+ and imported for all decorators types by _import_plugin_decorators().
271
462
  """
272
463
  ...
273
464
 
274
465
  @typing.overload
275
- def card(*, type: str = 'default', id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
466
+ def resources(*, cpu: int = 1, gpu: typing.Optional[int] = None, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
276
467
  """
277
- Creates a human-readable report, a Metaflow Card, after this step completes.
278
-
279
- Note that you may add multiple `@card` decorators in a step with different parameters.
468
+ Specifies the resources needed when executing this step.
280
469
 
281
-
282
- Parameters
283
- ----------
284
- type : str, default 'default'
285
- Card type.
286
- id : str, optional, default None
287
- If multiple cards are present, use this id to identify this card.
288
- options : Dict[str, Any], default {}
289
- Options passed to the card. The contents depend on the card type.
290
- timeout : int, default 45
291
- Interrupt reporting if it takes more than this many seconds.
292
- """
293
- ...
294
-
295
- @typing.overload
296
- def card(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
297
- ...
298
-
299
- @typing.overload
300
- def card(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
301
- ...
302
-
303
- def card(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, type: str = 'default', id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45):
304
- """
305
- Creates a human-readable report, a Metaflow Card, after this step completes.
306
-
307
- Note that you may add multiple `@card` decorators in a step with different parameters.
308
-
309
-
310
- Parameters
311
- ----------
312
- type : str, default 'default'
313
- Card type.
314
- id : str, optional, default None
315
- If multiple cards are present, use this id to identify this card.
316
- options : Dict[str, Any], default {}
317
- Options passed to the card. The contents depend on the card type.
318
- timeout : int, default 45
319
- Interrupt reporting if it takes more than this many seconds.
320
- """
321
- ...
322
-
323
- @typing.overload
324
- def catch(*, var: typing.Optional[str] = None, print_exception: bool = True) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
325
- """
326
- Specifies that the step will success under all circumstances.
327
-
328
- The decorator will create an optional artifact, specified by `var`, which
329
- contains the exception raised. You can use it to detect the presence
330
- of errors, indicating that all happy-path artifacts produced by the step
331
- are missing.
332
-
333
-
334
- Parameters
335
- ----------
336
- var : str, optional, default None
337
- Name of the artifact in which to store the caught exception.
338
- If not specified, the exception is not stored.
339
- print_exception : bool, default True
340
- Determines whether or not the exception is printed to
341
- stdout when caught.
342
- """
343
- ...
344
-
345
- @typing.overload
346
- def catch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
347
- ...
348
-
349
- @typing.overload
350
- def catch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
351
- ...
352
-
353
- def catch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, var: typing.Optional[str] = None, print_exception: bool = True):
354
- """
355
- Specifies that the step will success under all circumstances.
356
-
357
- The decorator will create an optional artifact, specified by `var`, which
358
- contains the exception raised. You can use it to detect the presence
359
- of errors, indicating that all happy-path artifacts produced by the step
360
- are missing.
361
-
362
-
363
- Parameters
364
- ----------
365
- var : str, optional, default None
366
- Name of the artifact in which to store the caught exception.
367
- If not specified, the exception is not stored.
368
- print_exception : bool, default True
369
- Determines whether or not the exception is printed to
370
- stdout when caught.
371
- """
372
- ...
373
-
374
- @typing.overload
375
- def resources(*, cpu: int = 1, gpu: typing.Optional[int] = None, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
376
- """
377
- Specifies the resources needed when executing this step.
378
-
379
- Use `@resources` to specify the resource requirements
380
- independently of the specific compute layer (`@batch`, `@kubernetes`).
470
+ Use `@resources` to specify the resource requirements
471
+ independently of the specific compute layer (`@batch`, `@kubernetes`).
381
472
 
382
473
  You can choose the compute layer on the command line by executing e.g.
383
474
  ```
@@ -450,6 +541,108 @@ def resources(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None]
450
541
  """
451
542
  ...
452
543
 
544
+ @typing.overload
545
+ def pypi(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
546
+ """
547
+ Specifies the PyPI packages for the step.
548
+
549
+ Information in this decorator will augment any
550
+ attributes set in the `@pyi_base` flow-level decorator. Hence,
551
+ you can use `@pypi_base` to set packages required by all
552
+ steps and use `@pypi` to specify step-specific overrides.
553
+
554
+
555
+ Parameters
556
+ ----------
557
+ packages : Dict[str, str], default: {}
558
+ Packages to use for this step. The key is the name of the package
559
+ and the value is the version to use.
560
+ python : str, optional, default: None
561
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
562
+ that the version used will correspond to the version of the Python interpreter used to start the run.
563
+ """
564
+ ...
565
+
566
+ @typing.overload
567
+ def pypi(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
568
+ ...
569
+
570
+ @typing.overload
571
+ def pypi(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
572
+ ...
573
+
574
+ def pypi(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
575
+ """
576
+ Specifies the PyPI packages for the step.
577
+
578
+ Information in this decorator will augment any
579
+ attributes set in the `@pyi_base` flow-level decorator. Hence,
580
+ you can use `@pypi_base` to set packages required by all
581
+ steps and use `@pypi` to specify step-specific overrides.
582
+
583
+
584
+ Parameters
585
+ ----------
586
+ packages : Dict[str, str], default: {}
587
+ Packages to use for this step. The key is the name of the package
588
+ and the value is the version to use.
589
+ python : str, optional, default: None
590
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
591
+ that the version used will correspond to the version of the Python interpreter used to start the run.
592
+ """
593
+ ...
594
+
595
+ @typing.overload
596
+ def catch(*, var: typing.Optional[str] = None, print_exception: bool = True) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
597
+ """
598
+ Specifies that the step will success under all circumstances.
599
+
600
+ The decorator will create an optional artifact, specified by `var`, which
601
+ contains the exception raised. You can use it to detect the presence
602
+ of errors, indicating that all happy-path artifacts produced by the step
603
+ are missing.
604
+
605
+
606
+ Parameters
607
+ ----------
608
+ var : str, optional, default None
609
+ Name of the artifact in which to store the caught exception.
610
+ If not specified, the exception is not stored.
611
+ print_exception : bool, default True
612
+ Determines whether or not the exception is printed to
613
+ stdout when caught.
614
+ """
615
+ ...
616
+
617
+ @typing.overload
618
+ def catch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
619
+ ...
620
+
621
+ @typing.overload
622
+ def catch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
623
+ ...
624
+
625
+ def catch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, var: typing.Optional[str] = None, print_exception: bool = True):
626
+ """
627
+ Specifies that the step will success under all circumstances.
628
+
629
+ The decorator will create an optional artifact, specified by `var`, which
630
+ contains the exception raised. You can use it to detect the presence
631
+ of errors, indicating that all happy-path artifacts produced by the step
632
+ are missing.
633
+
634
+
635
+ Parameters
636
+ ----------
637
+ var : str, optional, default None
638
+ Name of the artifact in which to store the caught exception.
639
+ If not specified, the exception is not stored.
640
+ print_exception : bool, default True
641
+ Determines whether or not the exception is printed to
642
+ stdout when caught.
643
+ """
644
+ ...
645
+
453
646
  @typing.overload
454
647
  def conda(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
455
648
  """
@@ -510,151 +703,35 @@ def conda(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], ty
510
703
  ...
511
704
 
512
705
  @typing.overload
513
- def batch(*, cpu: int = 1, gpu: int = 0, memory: int = 4096, image: typing.Optional[str] = None, queue: str = 'METAFLOW_BATCH_JOB_QUEUE', iam_role: str = 'METAFLOW_ECS_S3_ACCESS_IAM_ROLE', execution_role: str = 'METAFLOW_ECS_FARGATE_EXECUTION_ROLE', shared_memory: typing.Optional[int] = None, max_swap: typing.Optional[int] = None, swappiness: typing.Optional[int] = None, use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = None, inferentia: int = 0, trainium: int = None, efa: int = 0, ephemeral_storage: int = None, log_driver: typing.Optional[str] = None, log_options: typing.Optional[typing.List[str]] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
706
+ def environment(*, vars: typing.Dict[str, str] = {}) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
514
707
  """
515
- Specifies that this step should execute on [AWS Batch](https://aws.amazon.com/batch/).
708
+ Specifies environment variables to be set prior to the execution of a step.
516
709
 
517
710
 
518
711
  Parameters
519
712
  ----------
520
- cpu : int, default 1
521
- Number of CPUs required for this step. If `@resources` is
522
- also present, the maximum value from all decorators is used.
523
- gpu : int, default 0
524
- Number of GPUs required for this step. If `@resources` is
525
- also present, the maximum value from all decorators is used.
526
- memory : int, default 4096
527
- Memory size (in MB) required for this step. If
528
- `@resources` is also present, the maximum value from all decorators is
529
- used.
530
- image : str, optional, default None
531
- Docker image to use when launching on AWS Batch. If not specified, and
532
- METAFLOW_BATCH_CONTAINER_IMAGE is specified, that image is used. If
533
- not, a default Docker image mapping to the current version of Python is used.
534
- queue : str, default METAFLOW_BATCH_JOB_QUEUE
535
- AWS Batch Job Queue to submit the job to.
536
- iam_role : str, default METAFLOW_ECS_S3_ACCESS_IAM_ROLE
537
- AWS IAM role that AWS Batch container uses to access AWS cloud resources.
538
- execution_role : str, default METAFLOW_ECS_FARGATE_EXECUTION_ROLE
539
- AWS IAM role that AWS Batch can use [to trigger AWS Fargate tasks]
540
- (https://docs.aws.amazon.com/batch/latest/userguide/execution-IAM-role.html).
541
- shared_memory : int, optional, default None
542
- The value for the size (in MiB) of the /dev/shm volume for this step.
543
- This parameter maps to the `--shm-size` option in Docker.
544
- max_swap : int, optional, default None
545
- The total amount of swap memory (in MiB) a container can use for this
546
- step. This parameter is translated to the `--memory-swap` option in
547
- Docker where the value is the sum of the container memory plus the
548
- `max_swap` value.
549
- swappiness : int, optional, default None
550
- This allows you to tune memory swappiness behavior for this step.
551
- A swappiness value of 0 causes swapping not to happen unless absolutely
552
- necessary. A swappiness value of 100 causes pages to be swapped very
553
- aggressively. Accepted values are whole numbers between 0 and 100.
554
- use_tmpfs : bool, default False
555
- This enables an explicit tmpfs mount for this step. Note that tmpfs is
556
- not available on Fargate compute environments
557
- tmpfs_tempdir : bool, default True
558
- sets METAFLOW_TEMPDIR to tmpfs_path if set for this step.
559
- tmpfs_size : int, optional, default None
560
- The value for the size (in MiB) of the tmpfs mount for this step.
561
- This parameter maps to the `--tmpfs` option in Docker. Defaults to 50% of the
562
- memory allocated for this step.
563
- tmpfs_path : str, optional, default None
564
- Path to tmpfs mount for this step. Defaults to /metaflow_temp.
565
- inferentia : int, default 0
566
- Number of Inferentia chips required for this step.
567
- trainium : int, default None
568
- Alias for inferentia. Use only one of the two.
569
- efa : int, default 0
570
- Number of elastic fabric adapter network devices to attach to container
571
- ephemeral_storage : int, default None
572
- The total amount, in GiB, of ephemeral storage to set for the task, 21-200GiB.
573
- This is only relevant for Fargate compute environments
574
- log_driver: str, optional, default None
575
- The log driver to use for the Amazon ECS container.
576
- log_options: List[str], optional, default None
577
- List of strings containing options for the chosen log driver. The configurable values
578
- depend on the `log driver` chosen. Validation of these options is not supported yet.
579
- Example: [`awslogs-group:aws/batch/job`]
713
+ vars : Dict[str, str], default {}
714
+ Dictionary of environment variables to set.
580
715
  """
581
716
  ...
582
717
 
583
718
  @typing.overload
584
- def batch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
719
+ def environment(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
585
720
  ...
586
721
 
587
722
  @typing.overload
588
- def batch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
723
+ def environment(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
589
724
  ...
590
725
 
591
- def batch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, cpu: int = 1, gpu: int = 0, memory: int = 4096, image: typing.Optional[str] = None, queue: str = 'METAFLOW_BATCH_JOB_QUEUE', iam_role: str = 'METAFLOW_ECS_S3_ACCESS_IAM_ROLE', execution_role: str = 'METAFLOW_ECS_FARGATE_EXECUTION_ROLE', shared_memory: typing.Optional[int] = None, max_swap: typing.Optional[int] = None, swappiness: typing.Optional[int] = None, use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = None, inferentia: int = 0, trainium: int = None, efa: int = 0, ephemeral_storage: int = None, log_driver: typing.Optional[str] = None, log_options: typing.Optional[typing.List[str]] = None):
726
+ def environment(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, vars: typing.Dict[str, str] = {}):
592
727
  """
593
- Specifies that this step should execute on [AWS Batch](https://aws.amazon.com/batch/).
728
+ Specifies environment variables to be set prior to the execution of a step.
594
729
 
595
730
 
596
731
  Parameters
597
732
  ----------
598
- cpu : int, default 1
599
- Number of CPUs required for this step. If `@resources` is
600
- also present, the maximum value from all decorators is used.
601
- gpu : int, default 0
602
- Number of GPUs required for this step. If `@resources` is
603
- also present, the maximum value from all decorators is used.
604
- memory : int, default 4096
605
- Memory size (in MB) required for this step. If
606
- `@resources` is also present, the maximum value from all decorators is
607
- used.
608
- image : str, optional, default None
609
- Docker image to use when launching on AWS Batch. If not specified, and
610
- METAFLOW_BATCH_CONTAINER_IMAGE is specified, that image is used. If
611
- not, a default Docker image mapping to the current version of Python is used.
612
- queue : str, default METAFLOW_BATCH_JOB_QUEUE
613
- AWS Batch Job Queue to submit the job to.
614
- iam_role : str, default METAFLOW_ECS_S3_ACCESS_IAM_ROLE
615
- AWS IAM role that AWS Batch container uses to access AWS cloud resources.
616
- execution_role : str, default METAFLOW_ECS_FARGATE_EXECUTION_ROLE
617
- AWS IAM role that AWS Batch can use [to trigger AWS Fargate tasks]
618
- (https://docs.aws.amazon.com/batch/latest/userguide/execution-IAM-role.html).
619
- shared_memory : int, optional, default None
620
- The value for the size (in MiB) of the /dev/shm volume for this step.
621
- This parameter maps to the `--shm-size` option in Docker.
622
- max_swap : int, optional, default None
623
- The total amount of swap memory (in MiB) a container can use for this
624
- step. This parameter is translated to the `--memory-swap` option in
625
- Docker where the value is the sum of the container memory plus the
626
- `max_swap` value.
627
- swappiness : int, optional, default None
628
- This allows you to tune memory swappiness behavior for this step.
629
- A swappiness value of 0 causes swapping not to happen unless absolutely
630
- necessary. A swappiness value of 100 causes pages to be swapped very
631
- aggressively. Accepted values are whole numbers between 0 and 100.
632
- use_tmpfs : bool, default False
633
- This enables an explicit tmpfs mount for this step. Note that tmpfs is
634
- not available on Fargate compute environments
635
- tmpfs_tempdir : bool, default True
636
- sets METAFLOW_TEMPDIR to tmpfs_path if set for this step.
637
- tmpfs_size : int, optional, default None
638
- The value for the size (in MiB) of the tmpfs mount for this step.
639
- This parameter maps to the `--tmpfs` option in Docker. Defaults to 50% of the
640
- memory allocated for this step.
641
- tmpfs_path : str, optional, default None
642
- Path to tmpfs mount for this step. Defaults to /metaflow_temp.
643
- inferentia : int, default 0
644
- Number of Inferentia chips required for this step.
645
- trainium : int, default None
646
- Alias for inferentia. Use only one of the two.
647
- efa : int, default 0
648
- Number of elastic fabric adapter network devices to attach to container
649
- ephemeral_storage : int, default None
650
- The total amount, in GiB, of ephemeral storage to set for the task, 21-200GiB.
651
- This is only relevant for Fargate compute environments
652
- log_driver: str, optional, default None
653
- The log driver to use for the Amazon ECS container.
654
- log_options: List[str], optional, default None
655
- List of strings containing options for the chosen log driver. The configurable values
656
- depend on the `log driver` chosen. Validation of these options is not supported yet.
657
- Example: [`awslogs-group:aws/batch/job`]
733
+ vars : Dict[str, str], default {}
734
+ Dictionary of environment variables to set.
658
735
  """
659
736
  ...
660
737
 
@@ -716,246 +793,71 @@ def kubernetes(*, cpu: int = 1, memory: int = 4096, disk: int = 10240, image: ty
716
793
  This parameter maps to the `--tmpfs` option in Docker. Defaults to 50% of the
717
794
  memory allocated for this step.
718
795
  tmpfs_path : str, optional, default /metaflow_temp
719
- Path to tmpfs mount for this step.
720
- persistent_volume_claims : Dict[str, str], optional, default None
721
- A map (dictionary) of persistent volumes to be mounted to the pod for this step. The map is from persistent
722
- volumes to the path to which the volume is to be mounted, e.g., `{'pvc-name': '/path/to/mount/on'}`.
723
- shared_memory: int, optional
724
- Shared memory size (in MiB) required for this step
725
- port: int, optional
726
- Port number to specify in the Kubernetes job object
727
- compute_pool : str, optional, default None
728
- Compute pool to be used for for this step.
729
- If not specified, any accessible compute pool within the perimeter is used.
730
- hostname_resolution_timeout: int, default 10 * 60
731
- Timeout in seconds for the workers tasks in the gang scheduled cluster to resolve the hostname of control task.
732
- Only applicable when @parallel is used.
733
- qos: str, default: Burstable
734
- Quality of Service class to assign to the pod. Supported values are: Guaranteed, Burstable, BestEffort
735
- """
736
- ...
737
-
738
- @typing.overload
739
- def secrets(*, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
740
- """
741
- Specifies secrets to be retrieved and injected as environment variables prior to
742
- the execution of a step.
743
-
744
-
745
- Parameters
746
- ----------
747
- sources : List[Union[str, Dict[str, Any]]], default: []
748
- List of secret specs, defining how the secrets are to be retrieved
749
- """
750
- ...
751
-
752
- @typing.overload
753
- def secrets(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
754
- ...
755
-
756
- @typing.overload
757
- def secrets(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
758
- ...
759
-
760
- def secrets(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []):
761
- """
762
- Specifies secrets to be retrieved and injected as environment variables prior to
763
- the execution of a step.
764
-
765
-
766
- Parameters
767
- ----------
768
- sources : List[Union[str, Dict[str, Any]]], default: []
769
- List of secret specs, defining how the secrets are to be retrieved
770
- """
771
- ...
772
-
773
- @typing.overload
774
- def retry(*, times: int = 3, minutes_between_retries: int = 2) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
775
- """
776
- Specifies the number of times the task corresponding
777
- to a step needs to be retried.
778
-
779
- This decorator is useful for handling transient errors, such as networking issues.
780
- If your task contains operations that can't be retried safely, e.g. database updates,
781
- it is advisable to annotate it with `@retry(times=0)`.
782
-
783
- This can be used in conjunction with the `@catch` decorator. The `@catch`
784
- decorator will execute a no-op task after all retries have been exhausted,
785
- ensuring that the flow execution can continue.
786
-
787
-
788
- Parameters
789
- ----------
790
- times : int, default 3
791
- Number of times to retry this task.
792
- minutes_between_retries : int, default 2
793
- Number of minutes between retries.
794
- """
795
- ...
796
-
797
- @typing.overload
798
- def retry(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
799
- ...
800
-
801
- @typing.overload
802
- def retry(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
803
- ...
804
-
805
- def retry(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, times: int = 3, minutes_between_retries: int = 2):
806
- """
807
- Specifies the number of times the task corresponding
808
- to a step needs to be retried.
809
-
810
- This decorator is useful for handling transient errors, such as networking issues.
811
- If your task contains operations that can't be retried safely, e.g. database updates,
812
- it is advisable to annotate it with `@retry(times=0)`.
813
-
814
- This can be used in conjunction with the `@catch` decorator. The `@catch`
815
- decorator will execute a no-op task after all retries have been exhausted,
816
- ensuring that the flow execution can continue.
817
-
818
-
819
- Parameters
820
- ----------
821
- times : int, default 3
822
- Number of times to retry this task.
823
- minutes_between_retries : int, default 2
824
- Number of minutes between retries.
825
- """
826
- ...
827
-
828
- @typing.overload
829
- def environment(*, vars: typing.Dict[str, str] = {}) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
830
- """
831
- Specifies environment variables to be set prior to the execution of a step.
832
-
833
-
834
- Parameters
835
- ----------
836
- vars : Dict[str, str], default {}
837
- Dictionary of environment variables to set.
838
- """
839
- ...
840
-
841
- @typing.overload
842
- def environment(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
843
- ...
844
-
845
- @typing.overload
846
- def environment(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
847
- ...
848
-
849
- def environment(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, vars: typing.Dict[str, str] = {}):
850
- """
851
- Specifies environment variables to be set prior to the execution of a step.
852
-
853
-
854
- Parameters
855
- ----------
856
- vars : Dict[str, str], default {}
857
- Dictionary of environment variables to set.
858
- """
859
- ...
860
-
861
- @typing.overload
862
- def trigger_on_finish(*, flow: typing.Union[typing.Dict[str, str], str, None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
863
- """
864
- Specifies the flow(s) that this flow depends on.
865
-
866
- ```
867
- @trigger_on_finish(flow='FooFlow')
868
- ```
869
- or
870
- ```
871
- @trigger_on_finish(flows=['FooFlow', 'BarFlow'])
872
- ```
873
- This decorator respects the @project decorator and triggers the flow
874
- when upstream runs within the same namespace complete successfully
875
-
876
- Additionally, you can specify project aware upstream flow dependencies
877
- by specifying the fully qualified project_flow_name.
878
- ```
879
- @trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
880
- ```
881
- or
882
- ```
883
- @trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
884
- ```
885
-
886
- You can also specify just the project or project branch (other values will be
887
- inferred from the current project or project branch):
888
- ```
889
- @trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
890
- ```
796
+ Path to tmpfs mount for this step.
797
+ persistent_volume_claims : Dict[str, str], optional, default None
798
+ A map (dictionary) of persistent volumes to be mounted to the pod for this step. The map is from persistent
799
+ volumes to the path to which the volume is to be mounted, e.g., `{'pvc-name': '/path/to/mount/on'}`.
800
+ shared_memory: int, optional
801
+ Shared memory size (in MiB) required for this step
802
+ port: int, optional
803
+ Port number to specify in the Kubernetes job object
804
+ compute_pool : str, optional, default None
805
+ Compute pool to be used for for this step.
806
+ If not specified, any accessible compute pool within the perimeter is used.
807
+ hostname_resolution_timeout: int, default 10 * 60
808
+ Timeout in seconds for the workers tasks in the gang scheduled cluster to resolve the hostname of control task.
809
+ Only applicable when @parallel is used.
810
+ qos: str, default: Burstable
811
+ Quality of Service class to assign to the pod. Supported values are: Guaranteed, Burstable, BestEffort
812
+ """
813
+ ...
814
+
815
+ @typing.overload
816
+ def card(*, type: str = 'default', id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
817
+ """
818
+ Creates a human-readable report, a Metaflow Card, after this step completes.
891
819
 
892
- Note that `branch` is typically one of:
893
- - `prod`
894
- - `user.bob`
895
- - `test.my_experiment`
896
- - `prod.staging`
820
+ Note that you may add multiple `@card` decorators in a step with different parameters.
897
821
 
898
822
 
899
823
  Parameters
900
824
  ----------
901
- flow : Union[str, Dict[str, str]], optional, default None
902
- Upstream flow dependency for this flow.
903
- flows : List[Union[str, Dict[str, str]]], default []
904
- Upstream flow dependencies for this flow.
825
+ type : str, default 'default'
826
+ Card type.
827
+ id : str, optional, default None
828
+ If multiple cards are present, use this id to identify this card.
905
829
  options : Dict[str, Any], default {}
906
- Backend-specific configuration for tuning eventing behavior.
830
+ Options passed to the card. The contents depend on the card type.
831
+ timeout : int, default 45
832
+ Interrupt reporting if it takes more than this many seconds.
907
833
  """
908
834
  ...
909
835
 
910
836
  @typing.overload
911
- def trigger_on_finish(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
837
+ def card(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
912
838
  ...
913
839
 
914
- def trigger_on_finish(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, flow: typing.Union[typing.Dict[str, str], str, None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}):
840
+ @typing.overload
841
+ def card(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
842
+ ...
843
+
844
+ def card(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, type: str = 'default', id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45):
915
845
  """
916
- Specifies the flow(s) that this flow depends on.
917
-
918
- ```
919
- @trigger_on_finish(flow='FooFlow')
920
- ```
921
- or
922
- ```
923
- @trigger_on_finish(flows=['FooFlow', 'BarFlow'])
924
- ```
925
- This decorator respects the @project decorator and triggers the flow
926
- when upstream runs within the same namespace complete successfully
927
-
928
- Additionally, you can specify project aware upstream flow dependencies
929
- by specifying the fully qualified project_flow_name.
930
- ```
931
- @trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
932
- ```
933
- or
934
- ```
935
- @trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
936
- ```
937
-
938
- You can also specify just the project or project branch (other values will be
939
- inferred from the current project or project branch):
940
- ```
941
- @trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
942
- ```
846
+ Creates a human-readable report, a Metaflow Card, after this step completes.
943
847
 
944
- Note that `branch` is typically one of:
945
- - `prod`
946
- - `user.bob`
947
- - `test.my_experiment`
948
- - `prod.staging`
848
+ Note that you may add multiple `@card` decorators in a step with different parameters.
949
849
 
950
850
 
951
851
  Parameters
952
852
  ----------
953
- flow : Union[str, Dict[str, str]], optional, default None
954
- Upstream flow dependency for this flow.
955
- flows : List[Union[str, Dict[str, str]]], default []
956
- Upstream flow dependencies for this flow.
853
+ type : str, default 'default'
854
+ Card type.
855
+ id : str, optional, default None
856
+ If multiple cards are present, use this id to identify this card.
957
857
  options : Dict[str, Any], default {}
958
- Backend-specific configuration for tuning eventing behavior.
858
+ Options passed to the card. The contents depend on the card type.
859
+ timeout : int, default 45
860
+ Interrupt reporting if it takes more than this many seconds.
959
861
  """
960
862
  ...
961
863
 
@@ -1002,6 +904,47 @@ def airflow_external_task_sensor(*, timeout: int, poke_interval: int, mode: str,
1002
904
  """
1003
905
  ...
1004
906
 
907
+ @typing.overload
908
+ def pypi_base(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
909
+ """
910
+ Specifies the PyPI packages for all steps of the flow.
911
+
912
+ Use `@pypi_base` to set common packages required by all
913
+ steps and use `@pypi` to specify step-specific overrides.
914
+
915
+ Parameters
916
+ ----------
917
+ packages : Dict[str, str], default: {}
918
+ Packages to use for this flow. The key is the name of the package
919
+ and the value is the version to use.
920
+ python : str, optional, default: None
921
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
922
+ that the version used will correspond to the version of the Python interpreter used to start the run.
923
+ """
924
+ ...
925
+
926
+ @typing.overload
927
+ def pypi_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
928
+ ...
929
+
930
+ def pypi_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
931
+ """
932
+ Specifies the PyPI packages for all steps of the flow.
933
+
934
+ Use `@pypi_base` to set common packages required by all
935
+ steps and use `@pypi` to specify step-specific overrides.
936
+
937
+ Parameters
938
+ ----------
939
+ packages : Dict[str, str], default: {}
940
+ Packages to use for this flow. The key is the name of the package
941
+ and the value is the version to use.
942
+ python : str, optional, default: None
943
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
944
+ that the version used will correspond to the version of the Python interpreter used to start the run.
945
+ """
946
+ ...
947
+
1005
948
  def airflow_s3_key_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, bucket_key: typing.Union[str, typing.List[str]], bucket_name: str, wildcard_match: bool, aws_conn_id: str, verify: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1006
949
  """
1007
950
  The `@airflow_s3_key_sensor` decorator attaches a Airflow [S3KeySensor](https://airflow.apache.org/docs/apache-airflow-providers-amazon/stable/_api/airflow/providers/amazon/aws/sensors/s3/index.html#airflow.providers.amazon.aws.sensors.s3.S3KeySensor)
@@ -1046,43 +989,240 @@ def airflow_s3_key_sensor(*, timeout: int, poke_interval: int, mode: str, expone
1046
989
  ...
1047
990
 
1048
991
  @typing.overload
1049
- def pypi_base(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
992
+ def schedule(*, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
993
+ """
994
+ Specifies the times when the flow should be run when running on a
995
+ production scheduler.
996
+
997
+
998
+ Parameters
999
+ ----------
1000
+ hourly : bool, default False
1001
+ Run the workflow hourly.
1002
+ daily : bool, default True
1003
+ Run the workflow daily.
1004
+ weekly : bool, default False
1005
+ Run the workflow weekly.
1006
+ cron : str, optional, default None
1007
+ Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
1008
+ specified by this expression.
1009
+ timezone : str, optional, default None
1010
+ Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1011
+ which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
1012
+ """
1013
+ ...
1014
+
1015
+ @typing.overload
1016
+ def schedule(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1017
+ ...
1018
+
1019
+ def schedule(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None):
1020
+ """
1021
+ Specifies the times when the flow should be run when running on a
1022
+ production scheduler.
1023
+
1024
+
1025
+ Parameters
1026
+ ----------
1027
+ hourly : bool, default False
1028
+ Run the workflow hourly.
1029
+ daily : bool, default True
1030
+ Run the workflow daily.
1031
+ weekly : bool, default False
1032
+ Run the workflow weekly.
1033
+ cron : str, optional, default None
1034
+ Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
1035
+ specified by this expression.
1036
+ timezone : str, optional, default None
1037
+ Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1038
+ which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
1039
+ """
1040
+ ...
1041
+
1042
+ @typing.overload
1043
+ def conda_base(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1044
+ """
1045
+ Specifies the Conda environment for all steps of the flow.
1046
+
1047
+ Use `@conda_base` to set common libraries required by all
1048
+ steps and use `@conda` to specify step-specific additions.
1049
+
1050
+
1051
+ Parameters
1052
+ ----------
1053
+ packages : Dict[str, str], default {}
1054
+ Packages to use for this flow. The key is the name of the package
1055
+ and the value is the version to use.
1056
+ libraries : Dict[str, str], default {}
1057
+ Supported for backward compatibility. When used with packages, packages will take precedence.
1058
+ python : str, optional, default None
1059
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1060
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1061
+ disabled : bool, default False
1062
+ If set to True, disables Conda.
1063
+ """
1064
+ ...
1065
+
1066
+ @typing.overload
1067
+ def conda_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1068
+ ...
1069
+
1070
+ def conda_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
1071
+ """
1072
+ Specifies the Conda environment for all steps of the flow.
1073
+
1074
+ Use `@conda_base` to set common libraries required by all
1075
+ steps and use `@conda` to specify step-specific additions.
1076
+
1077
+
1078
+ Parameters
1079
+ ----------
1080
+ packages : Dict[str, str], default {}
1081
+ Packages to use for this flow. The key is the name of the package
1082
+ and the value is the version to use.
1083
+ libraries : Dict[str, str], default {}
1084
+ Supported for backward compatibility. When used with packages, packages will take precedence.
1085
+ python : str, optional, default None
1086
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1087
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1088
+ disabled : bool, default False
1089
+ If set to True, disables Conda.
1090
+ """
1091
+ ...
1092
+
1093
+ def project(*, name: str, branch: typing.Optional[str] = None, production: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1094
+ """
1095
+ Specifies what flows belong to the same project.
1096
+
1097
+ A project-specific namespace is created for all flows that
1098
+ use the same `@project(name)`.
1099
+
1100
+
1101
+ Parameters
1102
+ ----------
1103
+ name : str
1104
+ Project name. Make sure that the name is unique amongst all
1105
+ projects that use the same production scheduler. The name may
1106
+ contain only lowercase alphanumeric characters and underscores.
1107
+
1108
+ branch : Optional[str], default None
1109
+ The branch to use. If not specified, the branch is set to
1110
+ `user.<username>` unless `production` is set to `True`. This can
1111
+ also be set on the command line using `--branch` as a top-level option.
1112
+ It is an error to specify `branch` in the decorator and on the command line.
1113
+
1114
+ production : bool, default False
1115
+ Whether or not the branch is the production branch. This can also be set on the
1116
+ command line using `--production` as a top-level option. It is an error to specify
1117
+ `production` in the decorator and on the command line.
1118
+ The project branch name will be:
1119
+ - if `branch` is specified:
1120
+ - if `production` is True: `prod.<branch>`
1121
+ - if `production` is False: `test.<branch>`
1122
+ - if `branch` is not specified:
1123
+ - if `production` is True: `prod`
1124
+ - if `production` is False: `user.<username>`
1125
+ """
1126
+ ...
1127
+
1128
+ @typing.overload
1129
+ def trigger_on_finish(*, flow: typing.Union[typing.Dict[str, str], str, None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1050
1130
  """
1051
- Specifies the PyPI packages for all steps of the flow.
1131
+ Specifies the flow(s) that this flow depends on.
1132
+
1133
+ ```
1134
+ @trigger_on_finish(flow='FooFlow')
1135
+ ```
1136
+ or
1137
+ ```
1138
+ @trigger_on_finish(flows=['FooFlow', 'BarFlow'])
1139
+ ```
1140
+ This decorator respects the @project decorator and triggers the flow
1141
+ when upstream runs within the same namespace complete successfully
1142
+
1143
+ Additionally, you can specify project aware upstream flow dependencies
1144
+ by specifying the fully qualified project_flow_name.
1145
+ ```
1146
+ @trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
1147
+ ```
1148
+ or
1149
+ ```
1150
+ @trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
1151
+ ```
1152
+
1153
+ You can also specify just the project or project branch (other values will be
1154
+ inferred from the current project or project branch):
1155
+ ```
1156
+ @trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
1157
+ ```
1158
+
1159
+ Note that `branch` is typically one of:
1160
+ - `prod`
1161
+ - `user.bob`
1162
+ - `test.my_experiment`
1163
+ - `prod.staging`
1052
1164
 
1053
- Use `@pypi_base` to set common packages required by all
1054
- steps and use `@pypi` to specify step-specific overrides.
1055
1165
 
1056
1166
  Parameters
1057
1167
  ----------
1058
- packages : Dict[str, str], default: {}
1059
- Packages to use for this flow. The key is the name of the package
1060
- and the value is the version to use.
1061
- python : str, optional, default: None
1062
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
1063
- that the version used will correspond to the version of the Python interpreter used to start the run.
1168
+ flow : Union[str, Dict[str, str]], optional, default None
1169
+ Upstream flow dependency for this flow.
1170
+ flows : List[Union[str, Dict[str, str]]], default []
1171
+ Upstream flow dependencies for this flow.
1172
+ options : Dict[str, Any], default {}
1173
+ Backend-specific configuration for tuning eventing behavior.
1064
1174
  """
1065
1175
  ...
1066
1176
 
1067
1177
  @typing.overload
1068
- def pypi_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1178
+ def trigger_on_finish(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1069
1179
  ...
1070
1180
 
1071
- def pypi_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
1181
+ def trigger_on_finish(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, flow: typing.Union[typing.Dict[str, str], str, None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}):
1072
1182
  """
1073
- Specifies the PyPI packages for all steps of the flow.
1183
+ Specifies the flow(s) that this flow depends on.
1184
+
1185
+ ```
1186
+ @trigger_on_finish(flow='FooFlow')
1187
+ ```
1188
+ or
1189
+ ```
1190
+ @trigger_on_finish(flows=['FooFlow', 'BarFlow'])
1191
+ ```
1192
+ This decorator respects the @project decorator and triggers the flow
1193
+ when upstream runs within the same namespace complete successfully
1194
+
1195
+ Additionally, you can specify project aware upstream flow dependencies
1196
+ by specifying the fully qualified project_flow_name.
1197
+ ```
1198
+ @trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
1199
+ ```
1200
+ or
1201
+ ```
1202
+ @trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
1203
+ ```
1204
+
1205
+ You can also specify just the project or project branch (other values will be
1206
+ inferred from the current project or project branch):
1207
+ ```
1208
+ @trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
1209
+ ```
1210
+
1211
+ Note that `branch` is typically one of:
1212
+ - `prod`
1213
+ - `user.bob`
1214
+ - `test.my_experiment`
1215
+ - `prod.staging`
1074
1216
 
1075
- Use `@pypi_base` to set common packages required by all
1076
- steps and use `@pypi` to specify step-specific overrides.
1077
1217
 
1078
1218
  Parameters
1079
1219
  ----------
1080
- packages : Dict[str, str], default: {}
1081
- Packages to use for this flow. The key is the name of the package
1082
- and the value is the version to use.
1083
- python : str, optional, default: None
1084
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
1085
- that the version used will correspond to the version of the Python interpreter used to start the run.
1220
+ flow : Union[str, Dict[str, str]], optional, default None
1221
+ Upstream flow dependency for this flow.
1222
+ flows : List[Union[str, Dict[str, str]]], default []
1223
+ Upstream flow dependencies for this flow.
1224
+ options : Dict[str, Any], default {}
1225
+ Backend-specific configuration for tuning eventing behavior.
1086
1226
  """
1087
1227
  ...
1088
1228
 
@@ -1179,140 +1319,3 @@ def trigger(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, event: t
1179
1319
  """
1180
1320
  ...
1181
1321
 
1182
- @typing.overload
1183
- def schedule(*, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1184
- """
1185
- Specifies the times when the flow should be run when running on a
1186
- production scheduler.
1187
-
1188
-
1189
- Parameters
1190
- ----------
1191
- hourly : bool, default False
1192
- Run the workflow hourly.
1193
- daily : bool, default True
1194
- Run the workflow daily.
1195
- weekly : bool, default False
1196
- Run the workflow weekly.
1197
- cron : str, optional, default None
1198
- Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
1199
- specified by this expression.
1200
- timezone : str, optional, default None
1201
- Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1202
- which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
1203
- """
1204
- ...
1205
-
1206
- @typing.overload
1207
- def schedule(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1208
- ...
1209
-
1210
- def schedule(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None):
1211
- """
1212
- Specifies the times when the flow should be run when running on a
1213
- production scheduler.
1214
-
1215
-
1216
- Parameters
1217
- ----------
1218
- hourly : bool, default False
1219
- Run the workflow hourly.
1220
- daily : bool, default True
1221
- Run the workflow daily.
1222
- weekly : bool, default False
1223
- Run the workflow weekly.
1224
- cron : str, optional, default None
1225
- Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
1226
- specified by this expression.
1227
- timezone : str, optional, default None
1228
- Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1229
- which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
1230
- """
1231
- ...
1232
-
1233
- def project(*, name: str, branch: typing.Optional[str] = None, production: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1234
- """
1235
- Specifies what flows belong to the same project.
1236
-
1237
- A project-specific namespace is created for all flows that
1238
- use the same `@project(name)`.
1239
-
1240
-
1241
- Parameters
1242
- ----------
1243
- name : str
1244
- Project name. Make sure that the name is unique amongst all
1245
- projects that use the same production scheduler. The name may
1246
- contain only lowercase alphanumeric characters and underscores.
1247
-
1248
- branch : Optional[str], default None
1249
- The branch to use. If not specified, the branch is set to
1250
- `user.<username>` unless `production` is set to `True`. This can
1251
- also be set on the command line using `--branch` as a top-level option.
1252
- It is an error to specify `branch` in the decorator and on the command line.
1253
-
1254
- production : bool, default False
1255
- Whether or not the branch is the production branch. This can also be set on the
1256
- command line using `--production` as a top-level option. It is an error to specify
1257
- `production` in the decorator and on the command line.
1258
- The project branch name will be:
1259
- - if `branch` is specified:
1260
- - if `production` is True: `prod.<branch>`
1261
- - if `production` is False: `test.<branch>`
1262
- - if `branch` is not specified:
1263
- - if `production` is True: `prod`
1264
- - if `production` is False: `user.<username>`
1265
- """
1266
- ...
1267
-
1268
- @typing.overload
1269
- def conda_base(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1270
- """
1271
- Specifies the Conda environment for all steps of the flow.
1272
-
1273
- Use `@conda_base` to set common libraries required by all
1274
- steps and use `@conda` to specify step-specific additions.
1275
-
1276
-
1277
- Parameters
1278
- ----------
1279
- packages : Dict[str, str], default {}
1280
- Packages to use for this flow. The key is the name of the package
1281
- and the value is the version to use.
1282
- libraries : Dict[str, str], default {}
1283
- Supported for backward compatibility. When used with packages, packages will take precedence.
1284
- python : str, optional, default None
1285
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
1286
- that the version used will correspond to the version of the Python interpreter used to start the run.
1287
- disabled : bool, default False
1288
- If set to True, disables Conda.
1289
- """
1290
- ...
1291
-
1292
- @typing.overload
1293
- def conda_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1294
- ...
1295
-
1296
- def conda_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
1297
- """
1298
- Specifies the Conda environment for all steps of the flow.
1299
-
1300
- Use `@conda_base` to set common libraries required by all
1301
- steps and use `@conda` to specify step-specific additions.
1302
-
1303
-
1304
- Parameters
1305
- ----------
1306
- packages : Dict[str, str], default {}
1307
- Packages to use for this flow. The key is the name of the package
1308
- and the value is the version to use.
1309
- libraries : Dict[str, str], default {}
1310
- Supported for backward compatibility. When used with packages, packages will take precedence.
1311
- python : str, optional, default None
1312
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
1313
- that the version used will correspond to the version of the Python interpreter used to start the run.
1314
- disabled : bool, default False
1315
- If set to True, disables Conda.
1316
- """
1317
- ...
1318
-