metaflow-stubs 2.13.2__py2.py3-none-any.whl → 2.13.3__py2.py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (144) hide show
  1. metaflow-stubs/__init__.pyi +245 -245
  2. metaflow-stubs/cards.pyi +3 -2
  3. metaflow-stubs/cli.pyi +2 -2
  4. metaflow-stubs/cli_components/__init__.pyi +2 -2
  5. metaflow-stubs/cli_components/utils.pyi +2 -2
  6. metaflow-stubs/client/__init__.pyi +2 -2
  7. metaflow-stubs/client/core.pyi +5 -5
  8. metaflow-stubs/client/filecache.pyi +3 -3
  9. metaflow-stubs/events.pyi +2 -2
  10. metaflow-stubs/exception.pyi +2 -2
  11. metaflow-stubs/flowspec.pyi +6 -6
  12. metaflow-stubs/generated_for.txt +1 -1
  13. metaflow-stubs/includefile.pyi +5 -5
  14. metaflow-stubs/info_file.pyi +2 -2
  15. metaflow-stubs/metadata_provider/__init__.pyi +2 -2
  16. metaflow-stubs/metadata_provider/heartbeat.pyi +2 -2
  17. metaflow-stubs/metadata_provider/metadata.pyi +2 -2
  18. metaflow-stubs/metadata_provider/util.pyi +2 -2
  19. metaflow-stubs/metaflow_config.pyi +2 -2
  20. metaflow-stubs/metaflow_current.pyi +35 -35
  21. metaflow-stubs/multicore_utils.pyi +2 -2
  22. metaflow-stubs/parameters.pyi +5 -5
  23. metaflow-stubs/plugins/__init__.pyi +12 -12
  24. metaflow-stubs/plugins/airflow/__init__.pyi +2 -2
  25. metaflow-stubs/plugins/airflow/airflow_decorator.pyi +2 -2
  26. metaflow-stubs/plugins/airflow/airflow_utils.pyi +2 -2
  27. metaflow-stubs/plugins/airflow/exception.pyi +2 -2
  28. metaflow-stubs/plugins/airflow/sensors/__init__.pyi +2 -2
  29. metaflow-stubs/plugins/airflow/sensors/base_sensor.pyi +2 -2
  30. metaflow-stubs/plugins/airflow/sensors/external_task_sensor.pyi +2 -2
  31. metaflow-stubs/plugins/airflow/sensors/s3_sensor.pyi +2 -2
  32. metaflow-stubs/plugins/argo/__init__.pyi +2 -2
  33. metaflow-stubs/plugins/argo/argo_client.pyi +2 -2
  34. metaflow-stubs/plugins/argo/argo_events.pyi +2 -2
  35. metaflow-stubs/plugins/argo/argo_workflows.pyi +3 -3
  36. metaflow-stubs/plugins/argo/argo_workflows_decorator.pyi +3 -3
  37. metaflow-stubs/plugins/argo/argo_workflows_deployer.pyi +4 -4
  38. metaflow-stubs/plugins/argo/argo_workflows_deployer_objects.pyi +3 -3
  39. metaflow-stubs/plugins/aws/__init__.pyi +3 -3
  40. metaflow-stubs/plugins/aws/aws_client.pyi +2 -2
  41. metaflow-stubs/plugins/aws/aws_utils.pyi +2 -2
  42. metaflow-stubs/plugins/aws/batch/__init__.pyi +2 -2
  43. metaflow-stubs/plugins/aws/batch/batch.pyi +2 -2
  44. metaflow-stubs/plugins/aws/batch/batch_client.pyi +2 -2
  45. metaflow-stubs/plugins/aws/batch/batch_decorator.pyi +2 -2
  46. metaflow-stubs/plugins/aws/secrets_manager/__init__.pyi +2 -2
  47. metaflow-stubs/plugins/aws/secrets_manager/aws_secrets_manager_secrets_provider.pyi +3 -3
  48. metaflow-stubs/plugins/aws/step_functions/__init__.pyi +2 -2
  49. metaflow-stubs/plugins/aws/step_functions/dynamo_db_client.pyi +2 -2
  50. metaflow-stubs/plugins/aws/step_functions/event_bridge_client.pyi +2 -2
  51. metaflow-stubs/plugins/aws/step_functions/schedule_decorator.pyi +2 -2
  52. metaflow-stubs/plugins/aws/step_functions/step_functions.pyi +2 -2
  53. metaflow-stubs/plugins/aws/step_functions/step_functions_client.pyi +2 -2
  54. metaflow-stubs/plugins/aws/step_functions/step_functions_decorator.pyi +2 -2
  55. metaflow-stubs/plugins/aws/step_functions/step_functions_deployer.pyi +4 -4
  56. metaflow-stubs/plugins/aws/step_functions/step_functions_deployer_objects.pyi +4 -4
  57. metaflow-stubs/plugins/azure/__init__.pyi +2 -2
  58. metaflow-stubs/plugins/azure/azure_credential.pyi +2 -2
  59. metaflow-stubs/plugins/azure/azure_exceptions.pyi +2 -2
  60. metaflow-stubs/plugins/azure/azure_secret_manager_secrets_provider.pyi +3 -3
  61. metaflow-stubs/plugins/azure/azure_utils.pyi +2 -2
  62. metaflow-stubs/plugins/azure/blob_service_client_factory.pyi +2 -2
  63. metaflow-stubs/plugins/azure/includefile_support.pyi +2 -2
  64. metaflow-stubs/plugins/cards/__init__.pyi +2 -2
  65. metaflow-stubs/plugins/cards/card_client.pyi +3 -3
  66. metaflow-stubs/plugins/cards/card_creator.pyi +2 -2
  67. metaflow-stubs/plugins/cards/card_datastore.pyi +2 -2
  68. metaflow-stubs/plugins/cards/card_decorator.pyi +2 -2
  69. metaflow-stubs/plugins/cards/card_modules/__init__.pyi +2 -2
  70. metaflow-stubs/plugins/cards/card_modules/basic.pyi +15 -7
  71. metaflow-stubs/plugins/cards/card_modules/card.pyi +9 -3
  72. metaflow-stubs/plugins/cards/card_modules/components.pyi +48 -10
  73. metaflow-stubs/plugins/cards/card_modules/convert_to_native_type.pyi +2 -2
  74. metaflow-stubs/plugins/cards/card_modules/renderer_tools.pyi +2 -2
  75. metaflow-stubs/plugins/cards/card_modules/test_cards.pyi +6 -6
  76. metaflow-stubs/plugins/cards/card_resolver.pyi +2 -2
  77. metaflow-stubs/plugins/cards/component_serializer.pyi +3 -3
  78. metaflow-stubs/plugins/cards/exception.pyi +2 -2
  79. metaflow-stubs/plugins/catch_decorator.pyi +2 -2
  80. metaflow-stubs/plugins/datatools/__init__.pyi +2 -2
  81. metaflow-stubs/plugins/datatools/local.pyi +2 -2
  82. metaflow-stubs/plugins/datatools/s3/__init__.pyi +2 -2
  83. metaflow-stubs/plugins/datatools/s3/s3.pyi +3 -3
  84. metaflow-stubs/plugins/datatools/s3/s3tail.pyi +2 -2
  85. metaflow-stubs/plugins/datatools/s3/s3util.pyi +2 -2
  86. metaflow-stubs/plugins/debug_logger.pyi +2 -2
  87. metaflow-stubs/plugins/debug_monitor.pyi +2 -2
  88. metaflow-stubs/plugins/environment_decorator.pyi +2 -2
  89. metaflow-stubs/plugins/events_decorator.pyi +2 -2
  90. metaflow-stubs/plugins/frameworks/__init__.pyi +2 -2
  91. metaflow-stubs/plugins/frameworks/pytorch.pyi +2 -2
  92. metaflow-stubs/plugins/gcp/__init__.pyi +2 -2
  93. metaflow-stubs/plugins/gcp/gcp_secret_manager_secrets_provider.pyi +3 -3
  94. metaflow-stubs/plugins/gcp/gs_exceptions.pyi +2 -2
  95. metaflow-stubs/plugins/gcp/gs_storage_client_factory.pyi +2 -2
  96. metaflow-stubs/plugins/gcp/gs_utils.pyi +2 -2
  97. metaflow-stubs/plugins/gcp/includefile_support.pyi +2 -2
  98. metaflow-stubs/plugins/kubernetes/__init__.pyi +2 -2
  99. metaflow-stubs/plugins/kubernetes/kube_utils.pyi +3 -3
  100. metaflow-stubs/plugins/kubernetes/kubernetes.pyi +2 -2
  101. metaflow-stubs/plugins/kubernetes/kubernetes_client.pyi +2 -2
  102. metaflow-stubs/plugins/kubernetes/kubernetes_decorator.pyi +2 -2
  103. metaflow-stubs/plugins/kubernetes/kubernetes_job.pyi +2 -2
  104. metaflow-stubs/plugins/kubernetes/kubernetes_jobsets.pyi +2 -2
  105. metaflow-stubs/plugins/parallel_decorator.pyi +2 -2
  106. metaflow-stubs/plugins/project_decorator.pyi +2 -2
  107. metaflow-stubs/plugins/pypi/__init__.pyi +3 -3
  108. metaflow-stubs/plugins/pypi/conda_decorator.pyi +2 -2
  109. metaflow-stubs/plugins/pypi/conda_environment.pyi +3 -3
  110. metaflow-stubs/plugins/pypi/pypi_decorator.pyi +2 -2
  111. metaflow-stubs/plugins/pypi/pypi_environment.pyi +2 -2
  112. metaflow-stubs/plugins/pypi/utils.pyi +2 -2
  113. metaflow-stubs/plugins/resources_decorator.pyi +2 -2
  114. metaflow-stubs/plugins/retry_decorator.pyi +2 -2
  115. metaflow-stubs/plugins/secrets/__init__.pyi +3 -3
  116. metaflow-stubs/plugins/secrets/inline_secrets_provider.pyi +2 -2
  117. metaflow-stubs/plugins/secrets/secrets_decorator.pyi +2 -2
  118. metaflow-stubs/plugins/storage_executor.pyi +2 -2
  119. metaflow-stubs/plugins/test_unbounded_foreach_decorator.pyi +2 -2
  120. metaflow-stubs/plugins/timeout_decorator.pyi +2 -2
  121. metaflow-stubs/pylint_wrapper.pyi +2 -2
  122. metaflow-stubs/runner/__init__.pyi +2 -2
  123. metaflow-stubs/runner/deployer.pyi +31 -31
  124. metaflow-stubs/runner/deployer_impl.pyi +3 -3
  125. metaflow-stubs/runner/metaflow_runner.pyi +4 -4
  126. metaflow-stubs/runner/nbdeploy.pyi +2 -2
  127. metaflow-stubs/runner/nbrun.pyi +2 -2
  128. metaflow-stubs/runner/subprocess_manager.pyi +2 -2
  129. metaflow-stubs/runner/utils.pyi +3 -3
  130. metaflow-stubs/system/__init__.pyi +2 -2
  131. metaflow-stubs/system/system_logger.pyi +3 -3
  132. metaflow-stubs/system/system_monitor.pyi +2 -2
  133. metaflow-stubs/tagging_util.pyi +2 -2
  134. metaflow-stubs/tuple_util.pyi +2 -2
  135. metaflow-stubs/user_configs/__init__.pyi +2 -2
  136. metaflow-stubs/user_configs/config_decorators.pyi +6 -6
  137. metaflow-stubs/user_configs/config_options.pyi +4 -4
  138. metaflow-stubs/user_configs/config_parameters.pyi +6 -6
  139. metaflow-stubs/version.pyi +2 -2
  140. {metaflow_stubs-2.13.2.dist-info → metaflow_stubs-2.13.3.dist-info}/METADATA +13 -3
  141. metaflow_stubs-2.13.3.dist-info/RECORD +144 -0
  142. {metaflow_stubs-2.13.2.dist-info → metaflow_stubs-2.13.3.dist-info}/WHEEL +1 -1
  143. metaflow_stubs-2.13.2.dist-info/RECORD +0 -144
  144. {metaflow_stubs-2.13.2.dist-info → metaflow_stubs-2.13.3.dist-info}/top_level.txt +0 -0
@@ -1,15 +1,15 @@
1
1
  ######################################################################################################
2
2
  # Auto-generated Metaflow stub file #
3
- # MF version: 2.13.2 #
4
- # Generated on 2025-01-08T14:49:03.615477 #
3
+ # MF version: 2.13.3 #
4
+ # Generated on 2025-01-10T15:23:16.057741 #
5
5
  ######################################################################################################
6
6
 
7
7
  from __future__ import annotations
8
8
 
9
9
  import typing
10
10
  if typing.TYPE_CHECKING:
11
- import datetime
12
11
  import typing
12
+ import datetime
13
13
  FlowSpecDerived = typing.TypeVar("FlowSpecDerived", bound="FlowSpec", contravariant=False, covariant=False)
14
14
  StepFlag = typing.NewType("StepFlag", bool)
15
15
 
@@ -35,8 +35,8 @@ from .user_configs.config_parameters import ConfigValue as ConfigValue
35
35
  from .user_configs.config_parameters import config_expr as config_expr
36
36
  from .user_configs.config_decorators import CustomFlowDecorator as CustomFlowDecorator
37
37
  from .user_configs.config_decorators import CustomStepDecorator as CustomStepDecorator
38
- from . import events as events
39
38
  from . import tuple_util as tuple_util
39
+ from . import events as events
40
40
  from . import runner as runner
41
41
  from . import plugins as plugins
42
42
  from .plugins.datatools.s3.s3 import S3 as S3
@@ -142,31 +142,6 @@ def step(f: typing.Union[typing.Callable[[FlowSpecDerived], None], typing.Callab
142
142
  """
143
143
  ...
144
144
 
145
- @typing.overload
146
- def card(*, type: str = 'default', id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
147
- """
148
- Creates a human-readable report, a Metaflow Card, after this step completes.
149
-
150
- Note that you may add multiple `@card` decorators in a step with different parameters.
151
- """
152
- ...
153
-
154
- @typing.overload
155
- def card(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
156
- ...
157
-
158
- @typing.overload
159
- def card(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
160
- ...
161
-
162
- def card(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, type: str = 'default', id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45):
163
- """
164
- Creates a human-readable report, a Metaflow Card, after this step completes.
165
-
166
- Note that you may add multiple `@card` decorators in a step with different parameters.
167
- """
168
- ...
169
-
170
145
  @typing.overload
171
146
  def secrets(*, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
172
147
  """
@@ -190,43 +165,6 @@ def secrets(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None],
190
165
  """
191
166
  ...
192
167
 
193
- @typing.overload
194
- def catch(*, var: typing.Optional[str] = None, print_exception: bool = True) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
195
- """
196
- Specifies that the step will success under all circumstances.
197
-
198
- The decorator will create an optional artifact, specified by `var`, which
199
- contains the exception raised. You can use it to detect the presence
200
- of errors, indicating that all happy-path artifacts produced by the step
201
- are missing.
202
- """
203
- ...
204
-
205
- @typing.overload
206
- def catch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
207
- ...
208
-
209
- @typing.overload
210
- def catch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
211
- ...
212
-
213
- def catch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, var: typing.Optional[str] = None, print_exception: bool = True):
214
- """
215
- Specifies that the step will success under all circumstances.
216
-
217
- The decorator will create an optional artifact, specified by `var`, which
218
- contains the exception raised. You can use it to detect the presence
219
- of errors, indicating that all happy-path artifacts produced by the step
220
- are missing.
221
- """
222
- ...
223
-
224
- def kubernetes(*, cpu: int = 1, memory: int = 4096, disk: int = 10240, image: typing.Optional[str] = None, image_pull_policy: str = 'KUBERNETES_IMAGE_PULL_POLICY', service_account: str = 'METAFLOW_KUBERNETES_SERVICE_ACCOUNT', secrets: typing.Optional[typing.List[str]] = None, node_selector: typing.Union[typing.Dict[str, str], str, None] = None, namespace: str = 'METAFLOW_KUBERNETES_NAMESPACE', gpu: typing.Optional[int] = None, gpu_vendor: str = 'KUBERNETES_GPU_VENDOR', tolerations: typing.List[str] = [], labels: typing.Dict[str, str] = 'METAFLOW_KUBERNETES_LABELS', annotations: typing.Dict[str, str] = 'METAFLOW_KUBERNETES_ANNOTATIONS', use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = '/metaflow_temp', persistent_volume_claims: typing.Optional[typing.Dict[str, str]] = None, shared_memory: typing.Optional[int] = None, port: typing.Optional[int] = None, compute_pool: typing.Optional[str] = None, hostname_resolution_timeout: int = 600, qos: str = 'Burstable') -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
225
- """
226
- Specifies that this step should execute on Kubernetes.
227
- """
228
- ...
229
-
230
168
  @typing.overload
231
169
  def resources(*, cpu: int = 1, gpu: typing.Optional[int] = None, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
232
170
  """
@@ -277,23 +215,54 @@ def resources(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None]
277
215
  ...
278
216
 
279
217
  @typing.overload
280
- def batch(*, cpu: int = 1, gpu: int = 0, memory: int = 4096, image: typing.Optional[str] = None, queue: str = 'METAFLOW_BATCH_JOB_QUEUE', iam_role: str = 'METAFLOW_ECS_S3_ACCESS_IAM_ROLE', execution_role: str = 'METAFLOW_ECS_FARGATE_EXECUTION_ROLE', shared_memory: typing.Optional[int] = None, max_swap: typing.Optional[int] = None, swappiness: typing.Optional[int] = None, use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = None, inferentia: int = 0, trainium: int = None, efa: int = 0, ephemeral_storage: int = None, log_driver: typing.Optional[str] = None, log_options: typing.Optional[typing.List[str]] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
218
+ def pypi(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
281
219
  """
282
- Specifies that this step should execute on [AWS Batch](https://aws.amazon.com/batch/).
220
+ Specifies the PyPI packages for the step.
221
+
222
+ Information in this decorator will augment any
223
+ attributes set in the `@pyi_base` flow-level decorator. Hence,
224
+ you can use `@pypi_base` to set packages required by all
225
+ steps and use `@pypi` to specify step-specific overrides.
283
226
  """
284
227
  ...
285
228
 
286
229
  @typing.overload
287
- def batch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
230
+ def pypi(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
288
231
  ...
289
232
 
290
233
  @typing.overload
291
- def batch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
234
+ def pypi(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
292
235
  ...
293
236
 
294
- def batch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, cpu: int = 1, gpu: int = 0, memory: int = 4096, image: typing.Optional[str] = None, queue: str = 'METAFLOW_BATCH_JOB_QUEUE', iam_role: str = 'METAFLOW_ECS_S3_ACCESS_IAM_ROLE', execution_role: str = 'METAFLOW_ECS_FARGATE_EXECUTION_ROLE', shared_memory: typing.Optional[int] = None, max_swap: typing.Optional[int] = None, swappiness: typing.Optional[int] = None, use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = None, inferentia: int = 0, trainium: int = None, efa: int = 0, ephemeral_storage: int = None, log_driver: typing.Optional[str] = None, log_options: typing.Optional[typing.List[str]] = None):
237
+ def pypi(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
295
238
  """
296
- Specifies that this step should execute on [AWS Batch](https://aws.amazon.com/batch/).
239
+ Specifies the PyPI packages for the step.
240
+
241
+ Information in this decorator will augment any
242
+ attributes set in the `@pyi_base` flow-level decorator. Hence,
243
+ you can use `@pypi_base` to set packages required by all
244
+ steps and use `@pypi` to specify step-specific overrides.
245
+ """
246
+ ...
247
+
248
+ @typing.overload
249
+ def environment(*, vars: typing.Dict[str, str] = {}) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
250
+ """
251
+ Specifies environment variables to be set prior to the execution of a step.
252
+ """
253
+ ...
254
+
255
+ @typing.overload
256
+ def environment(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
257
+ ...
258
+
259
+ @typing.overload
260
+ def environment(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
261
+ ...
262
+
263
+ def environment(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, vars: typing.Dict[str, str] = {}):
264
+ """
265
+ Specifies environment variables to be set prior to the execution of a step.
297
266
  """
298
267
  ...
299
268
 
@@ -317,285 +286,217 @@ def parallel(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None],
317
286
  ...
318
287
 
319
288
  @typing.overload
320
- def retry(*, times: int = 3, minutes_between_retries: int = 2) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
289
+ def card(*, type: str = 'default', id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
321
290
  """
322
- Specifies the number of times the task corresponding
323
- to a step needs to be retried.
324
-
325
- This decorator is useful for handling transient errors, such as networking issues.
326
- If your task contains operations that can't be retried safely, e.g. database updates,
327
- it is advisable to annotate it with `@retry(times=0)`.
291
+ Creates a human-readable report, a Metaflow Card, after this step completes.
328
292
 
329
- This can be used in conjunction with the `@catch` decorator. The `@catch`
330
- decorator will execute a no-op task after all retries have been exhausted,
331
- ensuring that the flow execution can continue.
293
+ Note that you may add multiple `@card` decorators in a step with different parameters.
332
294
  """
333
295
  ...
334
296
 
335
297
  @typing.overload
336
- def retry(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
298
+ def card(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
337
299
  ...
338
300
 
339
301
  @typing.overload
340
- def retry(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
302
+ def card(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
341
303
  ...
342
304
 
343
- def retry(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, times: int = 3, minutes_between_retries: int = 2):
305
+ def card(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, type: str = 'default', id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45):
344
306
  """
345
- Specifies the number of times the task corresponding
346
- to a step needs to be retried.
347
-
348
- This decorator is useful for handling transient errors, such as networking issues.
349
- If your task contains operations that can't be retried safely, e.g. database updates,
350
- it is advisable to annotate it with `@retry(times=0)`.
307
+ Creates a human-readable report, a Metaflow Card, after this step completes.
351
308
 
352
- This can be used in conjunction with the `@catch` decorator. The `@catch`
353
- decorator will execute a no-op task after all retries have been exhausted,
354
- ensuring that the flow execution can continue.
309
+ Note that you may add multiple `@card` decorators in a step with different parameters.
355
310
  """
356
311
  ...
357
312
 
358
313
  @typing.overload
359
- def conda(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
314
+ def timeout(*, seconds: int = 0, minutes: int = 0, hours: int = 0) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
360
315
  """
361
- Specifies the Conda environment for the step.
316
+ Specifies a timeout for your step.
362
317
 
363
- Information in this decorator will augment any
364
- attributes set in the `@conda_base` flow-level decorator. Hence,
365
- you can use `@conda_base` to set packages required by all
366
- steps and use `@conda` to specify step-specific overrides.
318
+ This decorator is useful if this step may hang indefinitely.
319
+
320
+ This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
321
+ A timeout is considered to be an exception thrown by the step. It will cause the step to be
322
+ retried if needed and the exception will be caught by the `@catch` decorator, if present.
323
+
324
+ Note that all the values specified in parameters are added together so if you specify
325
+ 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
367
326
  """
368
327
  ...
369
328
 
370
329
  @typing.overload
371
- def conda(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
330
+ def timeout(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
372
331
  ...
373
332
 
374
333
  @typing.overload
375
- def conda(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
334
+ def timeout(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
376
335
  ...
377
336
 
378
- def conda(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
337
+ def timeout(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, seconds: int = 0, minutes: int = 0, hours: int = 0):
379
338
  """
380
- Specifies the Conda environment for the step.
339
+ Specifies a timeout for your step.
381
340
 
382
- Information in this decorator will augment any
383
- attributes set in the `@conda_base` flow-level decorator. Hence,
384
- you can use `@conda_base` to set packages required by all
385
- steps and use `@conda` to specify step-specific overrides.
341
+ This decorator is useful if this step may hang indefinitely.
342
+
343
+ This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
344
+ A timeout is considered to be an exception thrown by the step. It will cause the step to be
345
+ retried if needed and the exception will be caught by the `@catch` decorator, if present.
346
+
347
+ Note that all the values specified in parameters are added together so if you specify
348
+ 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
386
349
  """
387
350
  ...
388
351
 
389
352
  @typing.overload
390
- def pypi(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
353
+ def conda(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
391
354
  """
392
- Specifies the PyPI packages for the step.
355
+ Specifies the Conda environment for the step.
393
356
 
394
357
  Information in this decorator will augment any
395
- attributes set in the `@pyi_base` flow-level decorator. Hence,
396
- you can use `@pypi_base` to set packages required by all
397
- steps and use `@pypi` to specify step-specific overrides.
358
+ attributes set in the `@conda_base` flow-level decorator. Hence,
359
+ you can use `@conda_base` to set packages required by all
360
+ steps and use `@conda` to specify step-specific overrides.
398
361
  """
399
362
  ...
400
363
 
401
364
  @typing.overload
402
- def pypi(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
365
+ def conda(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
403
366
  ...
404
367
 
405
368
  @typing.overload
406
- def pypi(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
369
+ def conda(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
407
370
  ...
408
371
 
409
- def pypi(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
372
+ def conda(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
410
373
  """
411
- Specifies the PyPI packages for the step.
374
+ Specifies the Conda environment for the step.
412
375
 
413
376
  Information in this decorator will augment any
414
- attributes set in the `@pyi_base` flow-level decorator. Hence,
415
- you can use `@pypi_base` to set packages required by all
416
- steps and use `@pypi` to specify step-specific overrides.
377
+ attributes set in the `@conda_base` flow-level decorator. Hence,
378
+ you can use `@conda_base` to set packages required by all
379
+ steps and use `@conda` to specify step-specific overrides.
417
380
  """
418
381
  ...
419
382
 
420
383
  @typing.overload
421
- def environment(*, vars: typing.Dict[str, str] = {}) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
384
+ def catch(*, var: typing.Optional[str] = None, print_exception: bool = True) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
422
385
  """
423
- Specifies environment variables to be set prior to the execution of a step.
386
+ Specifies that the step will success under all circumstances.
387
+
388
+ The decorator will create an optional artifact, specified by `var`, which
389
+ contains the exception raised. You can use it to detect the presence
390
+ of errors, indicating that all happy-path artifacts produced by the step
391
+ are missing.
424
392
  """
425
393
  ...
426
394
 
427
395
  @typing.overload
428
- def environment(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
396
+ def catch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
429
397
  ...
430
398
 
431
399
  @typing.overload
432
- def environment(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
400
+ def catch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
433
401
  ...
434
402
 
435
- def environment(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, vars: typing.Dict[str, str] = {}):
403
+ def catch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, var: typing.Optional[str] = None, print_exception: bool = True):
436
404
  """
437
- Specifies environment variables to be set prior to the execution of a step.
405
+ Specifies that the step will success under all circumstances.
406
+
407
+ The decorator will create an optional artifact, specified by `var`, which
408
+ contains the exception raised. You can use it to detect the presence
409
+ of errors, indicating that all happy-path artifacts produced by the step
410
+ are missing.
438
411
  """
439
412
  ...
440
413
 
441
414
  @typing.overload
442
- def timeout(*, seconds: int = 0, minutes: int = 0, hours: int = 0) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
415
+ def retry(*, times: int = 3, minutes_between_retries: int = 2) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
443
416
  """
444
- Specifies a timeout for your step.
445
-
446
- This decorator is useful if this step may hang indefinitely.
417
+ Specifies the number of times the task corresponding
418
+ to a step needs to be retried.
447
419
 
448
- This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
449
- A timeout is considered to be an exception thrown by the step. It will cause the step to be
450
- retried if needed and the exception will be caught by the `@catch` decorator, if present.
420
+ This decorator is useful for handling transient errors, such as networking issues.
421
+ If your task contains operations that can't be retried safely, e.g. database updates,
422
+ it is advisable to annotate it with `@retry(times=0)`.
451
423
 
452
- Note that all the values specified in parameters are added together so if you specify
453
- 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
424
+ This can be used in conjunction with the `@catch` decorator. The `@catch`
425
+ decorator will execute a no-op task after all retries have been exhausted,
426
+ ensuring that the flow execution can continue.
454
427
  """
455
428
  ...
456
429
 
457
430
  @typing.overload
458
- def timeout(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
431
+ def retry(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
459
432
  ...
460
433
 
461
434
  @typing.overload
462
- def timeout(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
435
+ def retry(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
463
436
  ...
464
437
 
465
- def timeout(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, seconds: int = 0, minutes: int = 0, hours: int = 0):
438
+ def retry(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, times: int = 3, minutes_between_retries: int = 2):
466
439
  """
467
- Specifies a timeout for your step.
468
-
469
- This decorator is useful if this step may hang indefinitely.
440
+ Specifies the number of times the task corresponding
441
+ to a step needs to be retried.
470
442
 
471
- This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
472
- A timeout is considered to be an exception thrown by the step. It will cause the step to be
473
- retried if needed and the exception will be caught by the `@catch` decorator, if present.
443
+ This decorator is useful for handling transient errors, such as networking issues.
444
+ If your task contains operations that can't be retried safely, e.g. database updates,
445
+ it is advisable to annotate it with `@retry(times=0)`.
474
446
 
475
- Note that all the values specified in parameters are added together so if you specify
476
- 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
477
- """
478
- ...
479
-
480
- def airflow_s3_key_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, bucket_key: typing.Union[str, typing.List[str]], bucket_name: str, wildcard_match: bool, aws_conn_id: str, verify: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
481
- """
482
- The `@airflow_s3_key_sensor` decorator attaches a Airflow [S3KeySensor](https://airflow.apache.org/docs/apache-airflow-providers-amazon/stable/_api/airflow/providers/amazon/aws/sensors/s3/index.html#airflow.providers.amazon.aws.sensors.s3.S3KeySensor)
483
- before the start step of the flow. This decorator only works when a flow is scheduled on Airflow
484
- and is compiled using `airflow create`. More than one `@airflow_s3_key_sensor` can be
485
- added as a flow decorators. Adding more than one decorator will ensure that `start` step
486
- starts only after all sensors finish.
447
+ This can be used in conjunction with the `@catch` decorator. The `@catch`
448
+ decorator will execute a no-op task after all retries have been exhausted,
449
+ ensuring that the flow execution can continue.
487
450
  """
488
451
  ...
489
452
 
490
- def project(*, name: str) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
453
+ def kubernetes(*, cpu: int = 1, memory: int = 4096, disk: int = 10240, image: typing.Optional[str] = None, image_pull_policy: str = 'KUBERNETES_IMAGE_PULL_POLICY', service_account: str = 'METAFLOW_KUBERNETES_SERVICE_ACCOUNT', secrets: typing.Optional[typing.List[str]] = None, node_selector: typing.Union[typing.Dict[str, str], str, None] = None, namespace: str = 'METAFLOW_KUBERNETES_NAMESPACE', gpu: typing.Optional[int] = None, gpu_vendor: str = 'KUBERNETES_GPU_VENDOR', tolerations: typing.List[str] = [], labels: typing.Dict[str, str] = 'METAFLOW_KUBERNETES_LABELS', annotations: typing.Dict[str, str] = 'METAFLOW_KUBERNETES_ANNOTATIONS', use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = '/metaflow_temp', persistent_volume_claims: typing.Optional[typing.Dict[str, str]] = None, shared_memory: typing.Optional[int] = None, port: typing.Optional[int] = None, compute_pool: typing.Optional[str] = None, hostname_resolution_timeout: int = 600, qos: str = 'Burstable') -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
491
454
  """
492
- Specifies what flows belong to the same project.
493
-
494
- A project-specific namespace is created for all flows that
495
- use the same `@project(name)`.
455
+ Specifies that this step should execute on Kubernetes.
496
456
  """
497
457
  ...
498
458
 
499
459
  @typing.overload
500
- def pypi_base(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
460
+ def batch(*, cpu: int = 1, gpu: int = 0, memory: int = 4096, image: typing.Optional[str] = None, queue: str = 'METAFLOW_BATCH_JOB_QUEUE', iam_role: str = 'METAFLOW_ECS_S3_ACCESS_IAM_ROLE', execution_role: str = 'METAFLOW_ECS_FARGATE_EXECUTION_ROLE', shared_memory: typing.Optional[int] = None, max_swap: typing.Optional[int] = None, swappiness: typing.Optional[int] = None, use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = None, inferentia: int = 0, trainium: int = None, efa: int = 0, ephemeral_storage: int = None, log_driver: typing.Optional[str] = None, log_options: typing.Optional[typing.List[str]] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
501
461
  """
502
- Specifies the PyPI packages for all steps of the flow.
503
-
504
- Use `@pypi_base` to set common packages required by all
505
- steps and use `@pypi` to specify step-specific overrides.
462
+ Specifies that this step should execute on [AWS Batch](https://aws.amazon.com/batch/).
506
463
  """
507
464
  ...
508
465
 
509
466
  @typing.overload
510
- def pypi_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
467
+ def batch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
511
468
  ...
512
469
 
513
- def pypi_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
514
- """
515
- Specifies the PyPI packages for all steps of the flow.
516
-
517
- Use `@pypi_base` to set common packages required by all
518
- steps and use `@pypi` to specify step-specific overrides.
519
- """
470
+ @typing.overload
471
+ def batch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
520
472
  ...
521
473
 
522
- def airflow_external_task_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, external_dag_id: str, external_task_ids: typing.List[str], allowed_states: typing.List[str], failed_states: typing.List[str], execution_delta: "datetime.timedelta", check_existence: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
474
+ def batch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, cpu: int = 1, gpu: int = 0, memory: int = 4096, image: typing.Optional[str] = None, queue: str = 'METAFLOW_BATCH_JOB_QUEUE', iam_role: str = 'METAFLOW_ECS_S3_ACCESS_IAM_ROLE', execution_role: str = 'METAFLOW_ECS_FARGATE_EXECUTION_ROLE', shared_memory: typing.Optional[int] = None, max_swap: typing.Optional[int] = None, swappiness: typing.Optional[int] = None, use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = None, inferentia: int = 0, trainium: int = None, efa: int = 0, ephemeral_storage: int = None, log_driver: typing.Optional[str] = None, log_options: typing.Optional[typing.List[str]] = None):
523
475
  """
524
- The `@airflow_external_task_sensor` decorator attaches a Airflow [ExternalTaskSensor](https://airflow.apache.org/docs/apache-airflow/stable/_api/airflow/sensors/external_task/index.html#airflow.sensors.external_task.ExternalTaskSensor) before the start step of the flow.
525
- This decorator only works when a flow is scheduled on Airflow and is compiled using `airflow create`. More than one `@airflow_external_task_sensor` can be added as a flow decorators. Adding more than one decorator will ensure that `start` step starts only after all sensors finish.
476
+ Specifies that this step should execute on [AWS Batch](https://aws.amazon.com/batch/).
526
477
  """
527
478
  ...
528
479
 
529
480
  @typing.overload
530
- def trigger(*, event: typing.Union[str, typing.Dict[str, typing.Any], None] = None, events: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
481
+ def conda_base(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
531
482
  """
532
- Specifies the event(s) that this flow depends on.
533
-
534
- ```
535
- @trigger(event='foo')
536
- ```
537
- or
538
- ```
539
- @trigger(events=['foo', 'bar'])
540
- ```
541
-
542
- Additionally, you can specify the parameter mappings
543
- to map event payload to Metaflow parameters for the flow.
544
- ```
545
- @trigger(event={'name':'foo', 'parameters':{'flow_param': 'event_field'}})
546
- ```
547
- or
548
- ```
549
- @trigger(events=[{'name':'foo', 'parameters':{'flow_param_1': 'event_field_1'},
550
- {'name':'bar', 'parameters':{'flow_param_2': 'event_field_2'}])
551
- ```
483
+ Specifies the Conda environment for all steps of the flow.
552
484
 
553
- 'parameters' can also be a list of strings and tuples like so:
554
- ```
555
- @trigger(event={'name':'foo', 'parameters':['common_name', ('flow_param', 'event_field')]})
556
- ```
557
- This is equivalent to:
558
- ```
559
- @trigger(event={'name':'foo', 'parameters':{'common_name': 'common_name', 'flow_param': 'event_field'}})
560
- ```
485
+ Use `@conda_base` to set common libraries required by all
486
+ steps and use `@conda` to specify step-specific additions.
561
487
  """
562
488
  ...
563
489
 
564
490
  @typing.overload
565
- def trigger(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
491
+ def conda_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
566
492
  ...
567
493
 
568
- def trigger(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, event: typing.Union[str, typing.Dict[str, typing.Any], None] = None, events: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], options: typing.Dict[str, typing.Any] = {}):
494
+ def conda_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
569
495
  """
570
- Specifies the event(s) that this flow depends on.
571
-
572
- ```
573
- @trigger(event='foo')
574
- ```
575
- or
576
- ```
577
- @trigger(events=['foo', 'bar'])
578
- ```
579
-
580
- Additionally, you can specify the parameter mappings
581
- to map event payload to Metaflow parameters for the flow.
582
- ```
583
- @trigger(event={'name':'foo', 'parameters':{'flow_param': 'event_field'}})
584
- ```
585
- or
586
- ```
587
- @trigger(events=[{'name':'foo', 'parameters':{'flow_param_1': 'event_field_1'},
588
- {'name':'bar', 'parameters':{'flow_param_2': 'event_field_2'}])
589
- ```
496
+ Specifies the Conda environment for all steps of the flow.
590
497
 
591
- 'parameters' can also be a list of strings and tuples like so:
592
- ```
593
- @trigger(event={'name':'foo', 'parameters':['common_name', ('flow_param', 'event_field')]})
594
- ```
595
- This is equivalent to:
596
- ```
597
- @trigger(event={'name':'foo', 'parameters':{'common_name': 'common_name', 'flow_param': 'event_field'}})
598
- ```
498
+ Use `@conda_base` to set common libraries required by all
499
+ steps and use `@conda` to specify step-specific additions.
599
500
  """
600
501
  ...
601
502
 
@@ -618,6 +519,13 @@ def schedule(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, hourly:
618
519
  """
619
520
  ...
620
521
 
522
+ def airflow_external_task_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, external_dag_id: str, external_task_ids: typing.List[str], allowed_states: typing.List[str], failed_states: typing.List[str], execution_delta: "datetime.timedelta", check_existence: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
523
+ """
524
+ The `@airflow_external_task_sensor` decorator attaches a Airflow [ExternalTaskSensor](https://airflow.apache.org/docs/apache-airflow/stable/_api/airflow/sensors/external_task/index.html#airflow.sensors.external_task.ExternalTaskSensor) before the start step of the flow.
525
+ This decorator only works when a flow is scheduled on Airflow and is compiled using `airflow create`. More than one `@airflow_external_task_sensor` can be added as a flow decorators. Adding more than one decorator will ensure that `start` step starts only after all sensors finish.
526
+ """
527
+ ...
528
+
621
529
  @typing.overload
622
530
  def trigger_on_finish(*, flow: typing.Union[typing.Dict[str, str], str, None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
623
531
  """
@@ -700,25 +608,117 @@ def trigger_on_finish(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *
700
608
  ...
701
609
 
702
610
  @typing.overload
703
- def conda_base(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
611
+ def pypi_base(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
704
612
  """
705
- Specifies the Conda environment for all steps of the flow.
613
+ Specifies the PyPI packages for all steps of the flow.
706
614
 
707
- Use `@conda_base` to set common libraries required by all
708
- steps and use `@conda` to specify step-specific additions.
615
+ Use `@pypi_base` to set common packages required by all
616
+ steps and use `@pypi` to specify step-specific overrides.
709
617
  """
710
618
  ...
711
619
 
712
620
  @typing.overload
713
- def conda_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
621
+ def pypi_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
714
622
  ...
715
623
 
716
- def conda_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
624
+ def pypi_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
717
625
  """
718
- Specifies the Conda environment for all steps of the flow.
626
+ Specifies the PyPI packages for all steps of the flow.
719
627
 
720
- Use `@conda_base` to set common libraries required by all
721
- steps and use `@conda` to specify step-specific additions.
628
+ Use `@pypi_base` to set common packages required by all
629
+ steps and use `@pypi` to specify step-specific overrides.
630
+ """
631
+ ...
632
+
633
+ def airflow_s3_key_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, bucket_key: typing.Union[str, typing.List[str]], bucket_name: str, wildcard_match: bool, aws_conn_id: str, verify: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
634
+ """
635
+ The `@airflow_s3_key_sensor` decorator attaches a Airflow [S3KeySensor](https://airflow.apache.org/docs/apache-airflow-providers-amazon/stable/_api/airflow/providers/amazon/aws/sensors/s3/index.html#airflow.providers.amazon.aws.sensors.s3.S3KeySensor)
636
+ before the start step of the flow. This decorator only works when a flow is scheduled on Airflow
637
+ and is compiled using `airflow create`. More than one `@airflow_s3_key_sensor` can be
638
+ added as a flow decorators. Adding more than one decorator will ensure that `start` step
639
+ starts only after all sensors finish.
640
+ """
641
+ ...
642
+
643
+ def project(*, name: str) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
644
+ """
645
+ Specifies what flows belong to the same project.
646
+
647
+ A project-specific namespace is created for all flows that
648
+ use the same `@project(name)`.
649
+ """
650
+ ...
651
+
652
+ @typing.overload
653
+ def trigger(*, event: typing.Union[str, typing.Dict[str, typing.Any], None] = None, events: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
654
+ """
655
+ Specifies the event(s) that this flow depends on.
656
+
657
+ ```
658
+ @trigger(event='foo')
659
+ ```
660
+ or
661
+ ```
662
+ @trigger(events=['foo', 'bar'])
663
+ ```
664
+
665
+ Additionally, you can specify the parameter mappings
666
+ to map event payload to Metaflow parameters for the flow.
667
+ ```
668
+ @trigger(event={'name':'foo', 'parameters':{'flow_param': 'event_field'}})
669
+ ```
670
+ or
671
+ ```
672
+ @trigger(events=[{'name':'foo', 'parameters':{'flow_param_1': 'event_field_1'},
673
+ {'name':'bar', 'parameters':{'flow_param_2': 'event_field_2'}])
674
+ ```
675
+
676
+ 'parameters' can also be a list of strings and tuples like so:
677
+ ```
678
+ @trigger(event={'name':'foo', 'parameters':['common_name', ('flow_param', 'event_field')]})
679
+ ```
680
+ This is equivalent to:
681
+ ```
682
+ @trigger(event={'name':'foo', 'parameters':{'common_name': 'common_name', 'flow_param': 'event_field'}})
683
+ ```
684
+ """
685
+ ...
686
+
687
+ @typing.overload
688
+ def trigger(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
689
+ ...
690
+
691
+ def trigger(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, event: typing.Union[str, typing.Dict[str, typing.Any], None] = None, events: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], options: typing.Dict[str, typing.Any] = {}):
692
+ """
693
+ Specifies the event(s) that this flow depends on.
694
+
695
+ ```
696
+ @trigger(event='foo')
697
+ ```
698
+ or
699
+ ```
700
+ @trigger(events=['foo', 'bar'])
701
+ ```
702
+
703
+ Additionally, you can specify the parameter mappings
704
+ to map event payload to Metaflow parameters for the flow.
705
+ ```
706
+ @trigger(event={'name':'foo', 'parameters':{'flow_param': 'event_field'}})
707
+ ```
708
+ or
709
+ ```
710
+ @trigger(events=[{'name':'foo', 'parameters':{'flow_param_1': 'event_field_1'},
711
+ {'name':'bar', 'parameters':{'flow_param_2': 'event_field_2'}])
712
+ ```
713
+
714
+ 'parameters' can also be a list of strings and tuples like so:
715
+ ```
716
+ @trigger(event={'name':'foo', 'parameters':['common_name', ('flow_param', 'event_field')]})
717
+ ```
718
+ This is equivalent to:
719
+ ```
720
+ @trigger(event={'name':'foo', 'parameters':{'common_name': 'common_name', 'flow_param': 'event_field'}})
721
+ ```
722
722
  """
723
723
  ...
724
724