metaflow-stubs 2.13.7__py2.py3-none-any.whl → 2.13.9__py2.py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (145) hide show
  1. metaflow-stubs/__init__.pyi +255 -255
  2. metaflow-stubs/cards.pyi +2 -2
  3. metaflow-stubs/cli.pyi +2 -2
  4. metaflow-stubs/cli_components/__init__.pyi +2 -2
  5. metaflow-stubs/cli_components/utils.pyi +2 -2
  6. metaflow-stubs/client/__init__.pyi +2 -2
  7. metaflow-stubs/client/core.pyi +5 -5
  8. metaflow-stubs/client/filecache.pyi +2 -2
  9. metaflow-stubs/events.pyi +2 -2
  10. metaflow-stubs/exception.pyi +2 -2
  11. metaflow-stubs/flowspec.pyi +4 -4
  12. metaflow-stubs/generated_for.txt +1 -1
  13. metaflow-stubs/includefile.pyi +3 -3
  14. metaflow-stubs/info_file.pyi +2 -2
  15. metaflow-stubs/metadata_provider/__init__.pyi +2 -2
  16. metaflow-stubs/metadata_provider/heartbeat.pyi +2 -2
  17. metaflow-stubs/metadata_provider/metadata.pyi +2 -2
  18. metaflow-stubs/metadata_provider/util.pyi +2 -2
  19. metaflow-stubs/metaflow_config.pyi +4 -2
  20. metaflow-stubs/metaflow_current.pyi +6 -6
  21. metaflow-stubs/multicore_utils.pyi +2 -2
  22. metaflow-stubs/parameters.pyi +6 -3
  23. metaflow-stubs/plugins/__init__.pyi +13 -13
  24. metaflow-stubs/plugins/airflow/__init__.pyi +2 -2
  25. metaflow-stubs/plugins/airflow/airflow_decorator.pyi +2 -2
  26. metaflow-stubs/plugins/airflow/airflow_utils.pyi +2 -2
  27. metaflow-stubs/plugins/airflow/exception.pyi +2 -2
  28. metaflow-stubs/plugins/airflow/sensors/__init__.pyi +2 -2
  29. metaflow-stubs/plugins/airflow/sensors/base_sensor.pyi +2 -2
  30. metaflow-stubs/plugins/airflow/sensors/external_task_sensor.pyi +2 -2
  31. metaflow-stubs/plugins/airflow/sensors/s3_sensor.pyi +2 -2
  32. metaflow-stubs/plugins/argo/__init__.pyi +2 -2
  33. metaflow-stubs/plugins/argo/argo_client.pyi +2 -2
  34. metaflow-stubs/plugins/argo/argo_events.pyi +2 -2
  35. metaflow-stubs/plugins/argo/argo_workflows.pyi +2 -2
  36. metaflow-stubs/plugins/argo/argo_workflows_decorator.pyi +3 -3
  37. metaflow-stubs/plugins/argo/argo_workflows_deployer.pyi +3 -3
  38. metaflow-stubs/plugins/argo/argo_workflows_deployer_objects.pyi +2 -2
  39. metaflow-stubs/plugins/aws/__init__.pyi +3 -3
  40. metaflow-stubs/plugins/aws/aws_client.pyi +2 -2
  41. metaflow-stubs/plugins/aws/aws_utils.pyi +2 -2
  42. metaflow-stubs/plugins/aws/batch/__init__.pyi +2 -2
  43. metaflow-stubs/plugins/aws/batch/batch.pyi +2 -2
  44. metaflow-stubs/plugins/aws/batch/batch_client.pyi +2 -2
  45. metaflow-stubs/plugins/aws/batch/batch_decorator.pyi +2 -2
  46. metaflow-stubs/plugins/aws/secrets_manager/__init__.pyi +2 -2
  47. metaflow-stubs/plugins/aws/secrets_manager/aws_secrets_manager_secrets_provider.pyi +2 -2
  48. metaflow-stubs/plugins/aws/step_functions/__init__.pyi +2 -2
  49. metaflow-stubs/plugins/aws/step_functions/dynamo_db_client.pyi +2 -2
  50. metaflow-stubs/plugins/aws/step_functions/event_bridge_client.pyi +2 -2
  51. metaflow-stubs/plugins/aws/step_functions/schedule_decorator.pyi +2 -2
  52. metaflow-stubs/plugins/aws/step_functions/step_functions.pyi +2 -2
  53. metaflow-stubs/plugins/aws/step_functions/step_functions_client.pyi +2 -2
  54. metaflow-stubs/plugins/aws/step_functions/step_functions_decorator.pyi +2 -2
  55. metaflow-stubs/plugins/aws/step_functions/step_functions_deployer.pyi +3 -3
  56. metaflow-stubs/plugins/aws/step_functions/step_functions_deployer_objects.pyi +3 -3
  57. metaflow-stubs/plugins/azure/__init__.pyi +2 -2
  58. metaflow-stubs/plugins/azure/azure_credential.pyi +2 -2
  59. metaflow-stubs/plugins/azure/azure_exceptions.pyi +2 -2
  60. metaflow-stubs/plugins/azure/azure_secret_manager_secrets_provider.pyi +2 -2
  61. metaflow-stubs/plugins/azure/azure_utils.pyi +2 -2
  62. metaflow-stubs/plugins/azure/blob_service_client_factory.pyi +2 -2
  63. metaflow-stubs/plugins/azure/includefile_support.pyi +2 -2
  64. metaflow-stubs/plugins/cards/__init__.pyi +2 -2
  65. metaflow-stubs/plugins/cards/card_client.pyi +3 -3
  66. metaflow-stubs/plugins/cards/card_creator.pyi +2 -2
  67. metaflow-stubs/plugins/cards/card_datastore.pyi +2 -2
  68. metaflow-stubs/plugins/cards/card_decorator.pyi +2 -2
  69. metaflow-stubs/plugins/cards/card_modules/__init__.pyi +2 -2
  70. metaflow-stubs/plugins/cards/card_modules/basic.pyi +3 -3
  71. metaflow-stubs/plugins/cards/card_modules/card.pyi +2 -2
  72. metaflow-stubs/plugins/cards/card_modules/components.pyi +3 -3
  73. metaflow-stubs/plugins/cards/card_modules/convert_to_native_type.pyi +2 -2
  74. metaflow-stubs/plugins/cards/card_modules/renderer_tools.pyi +2 -2
  75. metaflow-stubs/plugins/cards/card_modules/test_cards.pyi +2 -2
  76. metaflow-stubs/plugins/cards/card_resolver.pyi +2 -2
  77. metaflow-stubs/plugins/cards/component_serializer.pyi +2 -2
  78. metaflow-stubs/plugins/cards/exception.pyi +2 -2
  79. metaflow-stubs/plugins/catch_decorator.pyi +3 -3
  80. metaflow-stubs/plugins/datatools/__init__.pyi +2 -2
  81. metaflow-stubs/plugins/datatools/local.pyi +2 -2
  82. metaflow-stubs/plugins/datatools/s3/__init__.pyi +2 -2
  83. metaflow-stubs/plugins/datatools/s3/s3.pyi +12 -3
  84. metaflow-stubs/plugins/datatools/s3/s3tail.pyi +2 -2
  85. metaflow-stubs/plugins/datatools/s3/s3util.pyi +2 -2
  86. metaflow-stubs/plugins/debug_logger.pyi +2 -2
  87. metaflow-stubs/plugins/debug_monitor.pyi +2 -2
  88. metaflow-stubs/plugins/environment_decorator.pyi +2 -2
  89. metaflow-stubs/plugins/events_decorator.pyi +2 -2
  90. metaflow-stubs/plugins/frameworks/__init__.pyi +2 -2
  91. metaflow-stubs/plugins/frameworks/pytorch.pyi +2 -2
  92. metaflow-stubs/plugins/gcp/__init__.pyi +2 -2
  93. metaflow-stubs/plugins/gcp/gcp_secret_manager_secrets_provider.pyi +2 -2
  94. metaflow-stubs/plugins/gcp/gs_exceptions.pyi +2 -2
  95. metaflow-stubs/plugins/gcp/gs_storage_client_factory.pyi +2 -2
  96. metaflow-stubs/plugins/gcp/gs_utils.pyi +2 -2
  97. metaflow-stubs/plugins/gcp/includefile_support.pyi +2 -2
  98. metaflow-stubs/plugins/kubernetes/__init__.pyi +2 -2
  99. metaflow-stubs/plugins/kubernetes/kube_utils.pyi +2 -2
  100. metaflow-stubs/plugins/kubernetes/kubernetes.pyi +2 -2
  101. metaflow-stubs/plugins/kubernetes/kubernetes_client.pyi +2 -2
  102. metaflow-stubs/plugins/kubernetes/kubernetes_decorator.pyi +2 -2
  103. metaflow-stubs/plugins/kubernetes/kubernetes_job.pyi +2 -2
  104. metaflow-stubs/plugins/kubernetes/kubernetes_jobsets.pyi +2 -2
  105. metaflow-stubs/plugins/kubernetes/spot_monitor_sidecar.pyi +2 -2
  106. metaflow-stubs/plugins/parallel_decorator.pyi +2 -2
  107. metaflow-stubs/plugins/project_decorator.pyi +2 -2
  108. metaflow-stubs/plugins/pypi/__init__.pyi +3 -3
  109. metaflow-stubs/plugins/pypi/conda_decorator.pyi +2 -2
  110. metaflow-stubs/plugins/pypi/conda_environment.pyi +4 -4
  111. metaflow-stubs/plugins/pypi/pypi_decorator.pyi +2 -2
  112. metaflow-stubs/plugins/pypi/pypi_environment.pyi +2 -2
  113. metaflow-stubs/plugins/pypi/utils.pyi +2 -2
  114. metaflow-stubs/plugins/resources_decorator.pyi +2 -2
  115. metaflow-stubs/plugins/retry_decorator.pyi +2 -2
  116. metaflow-stubs/plugins/secrets/__init__.pyi +2 -2
  117. metaflow-stubs/plugins/secrets/inline_secrets_provider.pyi +2 -2
  118. metaflow-stubs/plugins/secrets/secrets_decorator.pyi +2 -2
  119. metaflow-stubs/plugins/storage_executor.pyi +2 -2
  120. metaflow-stubs/plugins/test_unbounded_foreach_decorator.pyi +2 -2
  121. metaflow-stubs/plugins/timeout_decorator.pyi +3 -3
  122. metaflow-stubs/pylint_wrapper.pyi +2 -2
  123. metaflow-stubs/runner/__init__.pyi +2 -2
  124. metaflow-stubs/runner/deployer.pyi +5 -5
  125. metaflow-stubs/runner/deployer_impl.pyi +2 -2
  126. metaflow-stubs/runner/metaflow_runner.pyi +3 -3
  127. metaflow-stubs/runner/nbdeploy.pyi +2 -2
  128. metaflow-stubs/runner/nbrun.pyi +2 -2
  129. metaflow-stubs/runner/subprocess_manager.pyi +2 -2
  130. metaflow-stubs/runner/utils.pyi +5 -4
  131. metaflow-stubs/system/__init__.pyi +2 -2
  132. metaflow-stubs/system/system_logger.pyi +3 -3
  133. metaflow-stubs/system/system_monitor.pyi +2 -2
  134. metaflow-stubs/tagging_util.pyi +2 -2
  135. metaflow-stubs/tuple_util.pyi +2 -2
  136. metaflow-stubs/user_configs/__init__.pyi +2 -2
  137. metaflow-stubs/user_configs/config_decorators.pyi +7 -7
  138. metaflow-stubs/user_configs/config_options.pyi +2 -2
  139. metaflow-stubs/user_configs/config_parameters.pyi +7 -7
  140. metaflow-stubs/version.pyi +2 -2
  141. {metaflow_stubs-2.13.7.dist-info → metaflow_stubs-2.13.9.dist-info}/METADATA +2 -2
  142. metaflow_stubs-2.13.9.dist-info/RECORD +145 -0
  143. metaflow_stubs-2.13.7.dist-info/RECORD +0 -145
  144. {metaflow_stubs-2.13.7.dist-info → metaflow_stubs-2.13.9.dist-info}/WHEEL +0 -0
  145. {metaflow_stubs-2.13.7.dist-info → metaflow_stubs-2.13.9.dist-info}/top_level.txt +0 -0
@@ -1,15 +1,15 @@
1
1
  ######################################################################################################
2
2
  # Auto-generated Metaflow stub file #
3
- # MF version: 2.13.7 #
4
- # Generated on 2025-01-23T20:47:22.326170 #
3
+ # MF version: 2.13.9 #
4
+ # Generated on 2025-01-31T17:23:41.630174 #
5
5
  ######################################################################################################
6
6
 
7
7
  from __future__ import annotations
8
8
 
9
9
  import typing
10
10
  if typing.TYPE_CHECKING:
11
- import typing
12
11
  import datetime
12
+ import typing
13
13
  FlowSpecDerived = typing.TypeVar("FlowSpecDerived", bound="FlowSpec", contravariant=False, covariant=False)
14
14
  StepFlag = typing.NewType("StepFlag", bool)
15
15
 
@@ -142,45 +142,6 @@ def step(f: typing.Union[typing.Callable[[FlowSpecDerived], None], typing.Callab
142
142
  """
143
143
  ...
144
144
 
145
- @typing.overload
146
- def retry(*, times: int = 3, minutes_between_retries: int = 2) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
147
- """
148
- Specifies the number of times the task corresponding
149
- to a step needs to be retried.
150
-
151
- This decorator is useful for handling transient errors, such as networking issues.
152
- If your task contains operations that can't be retried safely, e.g. database updates,
153
- it is advisable to annotate it with `@retry(times=0)`.
154
-
155
- This can be used in conjunction with the `@catch` decorator. The `@catch`
156
- decorator will execute a no-op task after all retries have been exhausted,
157
- ensuring that the flow execution can continue.
158
- """
159
- ...
160
-
161
- @typing.overload
162
- def retry(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
163
- ...
164
-
165
- @typing.overload
166
- def retry(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
167
- ...
168
-
169
- def retry(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, times: int = 3, minutes_between_retries: int = 2):
170
- """
171
- Specifies the number of times the task corresponding
172
- to a step needs to be retried.
173
-
174
- This decorator is useful for handling transient errors, such as networking issues.
175
- If your task contains operations that can't be retried safely, e.g. database updates,
176
- it is advisable to annotate it with `@retry(times=0)`.
177
-
178
- This can be used in conjunction with the `@catch` decorator. The `@catch`
179
- decorator will execute a no-op task after all retries have been exhausted,
180
- ensuring that the flow execution can continue.
181
- """
182
- ...
183
-
184
145
  @typing.overload
185
146
  def timeout(*, seconds: int = 0, minutes: int = 0, hours: int = 0) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
186
147
  """
@@ -220,107 +181,172 @@ def timeout(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None],
220
181
  """
221
182
  ...
222
183
 
184
+ def kubernetes(*, cpu: int = 1, memory: int = 4096, disk: int = 10240, image: typing.Optional[str] = None, image_pull_policy: str = 'KUBERNETES_IMAGE_PULL_POLICY', service_account: str = 'METAFLOW_KUBERNETES_SERVICE_ACCOUNT', secrets: typing.Optional[typing.List[str]] = None, node_selector: typing.Union[typing.Dict[str, str], str, None] = None, namespace: str = 'METAFLOW_KUBERNETES_NAMESPACE', gpu: typing.Optional[int] = None, gpu_vendor: str = 'KUBERNETES_GPU_VENDOR', tolerations: typing.List[str] = [], labels: typing.Dict[str, str] = 'METAFLOW_KUBERNETES_LABELS', annotations: typing.Dict[str, str] = 'METAFLOW_KUBERNETES_ANNOTATIONS', use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = '/metaflow_temp', persistent_volume_claims: typing.Optional[typing.Dict[str, str]] = None, shared_memory: typing.Optional[int] = None, port: typing.Optional[int] = None, compute_pool: typing.Optional[str] = None, hostname_resolution_timeout: int = 600, qos: str = 'Burstable') -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
185
+ """
186
+ Specifies that this step should execute on Kubernetes.
187
+ """
188
+ ...
189
+
223
190
  @typing.overload
224
- def environment(*, vars: typing.Dict[str, str] = {}) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
191
+ def batch(*, cpu: int = 1, gpu: int = 0, memory: int = 4096, image: typing.Optional[str] = None, queue: str = 'METAFLOW_BATCH_JOB_QUEUE', iam_role: str = 'METAFLOW_ECS_S3_ACCESS_IAM_ROLE', execution_role: str = 'METAFLOW_ECS_FARGATE_EXECUTION_ROLE', shared_memory: typing.Optional[int] = None, max_swap: typing.Optional[int] = None, swappiness: typing.Optional[int] = None, use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = None, inferentia: int = 0, trainium: int = None, efa: int = 0, ephemeral_storage: int = None, log_driver: typing.Optional[str] = None, log_options: typing.Optional[typing.List[str]] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
225
192
  """
226
- Specifies environment variables to be set prior to the execution of a step.
193
+ Specifies that this step should execute on [AWS Batch](https://aws.amazon.com/batch/).
227
194
  """
228
195
  ...
229
196
 
230
197
  @typing.overload
231
- def environment(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
198
+ def batch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
232
199
  ...
233
200
 
234
201
  @typing.overload
235
- def environment(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
202
+ def batch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
236
203
  ...
237
204
 
238
- def environment(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, vars: typing.Dict[str, str] = {}):
205
+ def batch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, cpu: int = 1, gpu: int = 0, memory: int = 4096, image: typing.Optional[str] = None, queue: str = 'METAFLOW_BATCH_JOB_QUEUE', iam_role: str = 'METAFLOW_ECS_S3_ACCESS_IAM_ROLE', execution_role: str = 'METAFLOW_ECS_FARGATE_EXECUTION_ROLE', shared_memory: typing.Optional[int] = None, max_swap: typing.Optional[int] = None, swappiness: typing.Optional[int] = None, use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = None, inferentia: int = 0, trainium: int = None, efa: int = 0, ephemeral_storage: int = None, log_driver: typing.Optional[str] = None, log_options: typing.Optional[typing.List[str]] = None):
239
206
  """
240
- Specifies environment variables to be set prior to the execution of a step.
207
+ Specifies that this step should execute on [AWS Batch](https://aws.amazon.com/batch/).
241
208
  """
242
209
  ...
243
210
 
244
211
  @typing.overload
245
- def conda(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
212
+ def pypi(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
246
213
  """
247
- Specifies the Conda environment for the step.
214
+ Specifies the PyPI packages for the step.
248
215
 
249
216
  Information in this decorator will augment any
250
- attributes set in the `@conda_base` flow-level decorator. Hence,
251
- you can use `@conda_base` to set packages required by all
252
- steps and use `@conda` to specify step-specific overrides.
217
+ attributes set in the `@pyi_base` flow-level decorator. Hence,
218
+ you can use `@pypi_base` to set packages required by all
219
+ steps and use `@pypi` to specify step-specific overrides.
253
220
  """
254
221
  ...
255
222
 
256
223
  @typing.overload
257
- def conda(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
224
+ def pypi(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
258
225
  ...
259
226
 
260
227
  @typing.overload
261
- def conda(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
228
+ def pypi(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
262
229
  ...
263
230
 
264
- def conda(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
231
+ def pypi(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
265
232
  """
266
- Specifies the Conda environment for the step.
233
+ Specifies the PyPI packages for the step.
267
234
 
268
235
  Information in this decorator will augment any
269
- attributes set in the `@conda_base` flow-level decorator. Hence,
270
- you can use `@conda_base` to set packages required by all
271
- steps and use `@conda` to specify step-specific overrides.
236
+ attributes set in the `@pyi_base` flow-level decorator. Hence,
237
+ you can use `@pypi_base` to set packages required by all
238
+ steps and use `@pypi` to specify step-specific overrides.
272
239
  """
273
240
  ...
274
241
 
275
242
  @typing.overload
276
- def batch(*, cpu: int = 1, gpu: int = 0, memory: int = 4096, image: typing.Optional[str] = None, queue: str = 'METAFLOW_BATCH_JOB_QUEUE', iam_role: str = 'METAFLOW_ECS_S3_ACCESS_IAM_ROLE', execution_role: str = 'METAFLOW_ECS_FARGATE_EXECUTION_ROLE', shared_memory: typing.Optional[int] = None, max_swap: typing.Optional[int] = None, swappiness: typing.Optional[int] = None, use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = None, inferentia: int = 0, trainium: int = None, efa: int = 0, ephemeral_storage: int = None, log_driver: typing.Optional[str] = None, log_options: typing.Optional[typing.List[str]] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
243
+ def resources(*, cpu: int = 1, gpu: typing.Optional[int] = None, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
277
244
  """
278
- Specifies that this step should execute on [AWS Batch](https://aws.amazon.com/batch/).
245
+ Specifies the resources needed when executing this step.
246
+
247
+ Use `@resources` to specify the resource requirements
248
+ independently of the specific compute layer (`@batch`, `@kubernetes`).
249
+
250
+ You can choose the compute layer on the command line by executing e.g.
251
+ ```
252
+ python myflow.py run --with batch
253
+ ```
254
+ or
255
+ ```
256
+ python myflow.py run --with kubernetes
257
+ ```
258
+ which executes the flow on the desired system using the
259
+ requirements specified in `@resources`.
279
260
  """
280
261
  ...
281
262
 
282
263
  @typing.overload
283
- def batch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
264
+ def resources(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
284
265
  ...
285
266
 
286
267
  @typing.overload
287
- def batch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
268
+ def resources(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
288
269
  ...
289
270
 
290
- def batch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, cpu: int = 1, gpu: int = 0, memory: int = 4096, image: typing.Optional[str] = None, queue: str = 'METAFLOW_BATCH_JOB_QUEUE', iam_role: str = 'METAFLOW_ECS_S3_ACCESS_IAM_ROLE', execution_role: str = 'METAFLOW_ECS_FARGATE_EXECUTION_ROLE', shared_memory: typing.Optional[int] = None, max_swap: typing.Optional[int] = None, swappiness: typing.Optional[int] = None, use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = None, inferentia: int = 0, trainium: int = None, efa: int = 0, ephemeral_storage: int = None, log_driver: typing.Optional[str] = None, log_options: typing.Optional[typing.List[str]] = None):
271
+ def resources(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, cpu: int = 1, gpu: typing.Optional[int] = None, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None):
291
272
  """
292
- Specifies that this step should execute on [AWS Batch](https://aws.amazon.com/batch/).
273
+ Specifies the resources needed when executing this step.
274
+
275
+ Use `@resources` to specify the resource requirements
276
+ independently of the specific compute layer (`@batch`, `@kubernetes`).
277
+
278
+ You can choose the compute layer on the command line by executing e.g.
279
+ ```
280
+ python myflow.py run --with batch
281
+ ```
282
+ or
283
+ ```
284
+ python myflow.py run --with kubernetes
285
+ ```
286
+ which executes the flow on the desired system using the
287
+ requirements specified in `@resources`.
293
288
  """
294
289
  ...
295
290
 
296
291
  @typing.overload
297
- def pypi(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
292
+ def secrets(*, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
298
293
  """
299
- Specifies the PyPI packages for the step.
294
+ Specifies secrets to be retrieved and injected as environment variables prior to
295
+ the execution of a step.
296
+ """
297
+ ...
298
+
299
+ @typing.overload
300
+ def secrets(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
301
+ ...
302
+
303
+ @typing.overload
304
+ def secrets(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
305
+ ...
306
+
307
+ def secrets(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []):
308
+ """
309
+ Specifies secrets to be retrieved and injected as environment variables prior to
310
+ the execution of a step.
311
+ """
312
+ ...
313
+
314
+ @typing.overload
315
+ def retry(*, times: int = 3, minutes_between_retries: int = 2) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
316
+ """
317
+ Specifies the number of times the task corresponding
318
+ to a step needs to be retried.
300
319
 
301
- Information in this decorator will augment any
302
- attributes set in the `@pyi_base` flow-level decorator. Hence,
303
- you can use `@pypi_base` to set packages required by all
304
- steps and use `@pypi` to specify step-specific overrides.
320
+ This decorator is useful for handling transient errors, such as networking issues.
321
+ If your task contains operations that can't be retried safely, e.g. database updates,
322
+ it is advisable to annotate it with `@retry(times=0)`.
323
+
324
+ This can be used in conjunction with the `@catch` decorator. The `@catch`
325
+ decorator will execute a no-op task after all retries have been exhausted,
326
+ ensuring that the flow execution can continue.
305
327
  """
306
328
  ...
307
329
 
308
330
  @typing.overload
309
- def pypi(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
331
+ def retry(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
310
332
  ...
311
333
 
312
334
  @typing.overload
313
- def pypi(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
335
+ def retry(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
314
336
  ...
315
337
 
316
- def pypi(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
338
+ def retry(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, times: int = 3, minutes_between_retries: int = 2):
317
339
  """
318
- Specifies the PyPI packages for the step.
340
+ Specifies the number of times the task corresponding
341
+ to a step needs to be retried.
319
342
 
320
- Information in this decorator will augment any
321
- attributes set in the `@pyi_base` flow-level decorator. Hence,
322
- you can use `@pypi_base` to set packages required by all
323
- steps and use `@pypi` to specify step-specific overrides.
343
+ This decorator is useful for handling transient errors, such as networking issues.
344
+ If your task contains operations that can't be retried safely, e.g. database updates,
345
+ it is advisable to annotate it with `@retry(times=0)`.
346
+
347
+ This can be used in conjunction with the `@catch` decorator. The `@catch`
348
+ decorator will execute a no-op task after all retries have been exhausted,
349
+ ensuring that the flow execution can continue.
324
350
  """
325
351
  ...
326
352
 
@@ -350,99 +376,73 @@ def card(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typ
350
376
  ...
351
377
 
352
378
  @typing.overload
353
- def parallel(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
379
+ def conda(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
354
380
  """
355
- Decorator prototype for all step decorators. This function gets specialized
356
- and imported for all decorators types by _import_plugin_decorators().
381
+ Specifies the Conda environment for the step.
382
+
383
+ Information in this decorator will augment any
384
+ attributes set in the `@conda_base` flow-level decorator. Hence,
385
+ you can use `@conda_base` to set packages required by all
386
+ steps and use `@conda` to specify step-specific overrides.
357
387
  """
358
388
  ...
359
389
 
360
390
  @typing.overload
361
- def parallel(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
391
+ def conda(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
362
392
  ...
363
393
 
364
- def parallel(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
394
+ @typing.overload
395
+ def conda(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
396
+ ...
397
+
398
+ def conda(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
365
399
  """
366
- Decorator prototype for all step decorators. This function gets specialized
367
- and imported for all decorators types by _import_plugin_decorators().
400
+ Specifies the Conda environment for the step.
401
+
402
+ Information in this decorator will augment any
403
+ attributes set in the `@conda_base` flow-level decorator. Hence,
404
+ you can use `@conda_base` to set packages required by all
405
+ steps and use `@conda` to specify step-specific overrides.
368
406
  """
369
407
  ...
370
408
 
371
409
  @typing.overload
372
- def secrets(*, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
410
+ def environment(*, vars: typing.Dict[str, str] = {}) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
373
411
  """
374
- Specifies secrets to be retrieved and injected as environment variables prior to
375
- the execution of a step.
412
+ Specifies environment variables to be set prior to the execution of a step.
376
413
  """
377
414
  ...
378
415
 
379
416
  @typing.overload
380
- def secrets(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
417
+ def environment(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
381
418
  ...
382
419
 
383
420
  @typing.overload
384
- def secrets(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
421
+ def environment(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
385
422
  ...
386
423
 
387
- def secrets(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []):
424
+ def environment(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, vars: typing.Dict[str, str] = {}):
388
425
  """
389
- Specifies secrets to be retrieved and injected as environment variables prior to
390
- the execution of a step.
426
+ Specifies environment variables to be set prior to the execution of a step.
391
427
  """
392
428
  ...
393
429
 
394
430
  @typing.overload
395
- def resources(*, cpu: int = 1, gpu: typing.Optional[int] = None, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
431
+ def parallel(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
396
432
  """
397
- Specifies the resources needed when executing this step.
398
-
399
- Use `@resources` to specify the resource requirements
400
- independently of the specific compute layer (`@batch`, `@kubernetes`).
401
-
402
- You can choose the compute layer on the command line by executing e.g.
403
- ```
404
- python myflow.py run --with batch
405
- ```
406
- or
407
- ```
408
- python myflow.py run --with kubernetes
409
- ```
410
- which executes the flow on the desired system using the
411
- requirements specified in `@resources`.
433
+ Decorator prototype for all step decorators. This function gets specialized
434
+ and imported for all decorators types by _import_plugin_decorators().
412
435
  """
413
436
  ...
414
437
 
415
438
  @typing.overload
416
- def resources(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
417
- ...
418
-
419
- @typing.overload
420
- def resources(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
421
- ...
422
-
423
- def resources(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, cpu: int = 1, gpu: typing.Optional[int] = None, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None):
424
- """
425
- Specifies the resources needed when executing this step.
426
-
427
- Use `@resources` to specify the resource requirements
428
- independently of the specific compute layer (`@batch`, `@kubernetes`).
429
-
430
- You can choose the compute layer on the command line by executing e.g.
431
- ```
432
- python myflow.py run --with batch
433
- ```
434
- or
435
- ```
436
- python myflow.py run --with kubernetes
437
- ```
438
- which executes the flow on the desired system using the
439
- requirements specified in `@resources`.
440
- """
439
+ def parallel(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
441
440
  ...
442
441
 
443
- def kubernetes(*, cpu: int = 1, memory: int = 4096, disk: int = 10240, image: typing.Optional[str] = None, image_pull_policy: str = 'KUBERNETES_IMAGE_PULL_POLICY', service_account: str = 'METAFLOW_KUBERNETES_SERVICE_ACCOUNT', secrets: typing.Optional[typing.List[str]] = None, node_selector: typing.Union[typing.Dict[str, str], str, None] = None, namespace: str = 'METAFLOW_KUBERNETES_NAMESPACE', gpu: typing.Optional[int] = None, gpu_vendor: str = 'KUBERNETES_GPU_VENDOR', tolerations: typing.List[str] = [], labels: typing.Dict[str, str] = 'METAFLOW_KUBERNETES_LABELS', annotations: typing.Dict[str, str] = 'METAFLOW_KUBERNETES_ANNOTATIONS', use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = '/metaflow_temp', persistent_volume_claims: typing.Optional[typing.Dict[str, str]] = None, shared_memory: typing.Optional[int] = None, port: typing.Optional[int] = None, compute_pool: typing.Optional[str] = None, hostname_resolution_timeout: int = 600, qos: str = 'Burstable') -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
442
+ def parallel(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
444
443
  """
445
- Specifies that this step should execute on Kubernetes.
444
+ Decorator prototype for all step decorators. This function gets specialized
445
+ and imported for all decorators types by _import_plugin_decorators().
446
446
  """
447
447
  ...
448
448
 
@@ -477,69 +477,130 @@ def catch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], ty
477
477
  """
478
478
  ...
479
479
 
480
- @typing.overload
481
- def schedule(*, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
480
+ def airflow_s3_key_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, bucket_key: typing.Union[str, typing.List[str]], bucket_name: str, wildcard_match: bool, aws_conn_id: str, verify: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
482
481
  """
483
- Specifies the times when the flow should be run when running on a
484
- production scheduler.
482
+ The `@airflow_s3_key_sensor` decorator attaches a Airflow [S3KeySensor](https://airflow.apache.org/docs/apache-airflow-providers-amazon/stable/_api/airflow/providers/amazon/aws/sensors/s3/index.html#airflow.providers.amazon.aws.sensors.s3.S3KeySensor)
483
+ before the start step of the flow. This decorator only works when a flow is scheduled on Airflow
484
+ and is compiled using `airflow create`. More than one `@airflow_s3_key_sensor` can be
485
+ added as a flow decorators. Adding more than one decorator will ensure that `start` step
486
+ starts only after all sensors finish.
485
487
  """
486
488
  ...
487
489
 
488
490
  @typing.overload
489
- def schedule(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
490
- ...
491
-
492
- def schedule(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None):
491
+ def trigger(*, event: typing.Union[str, typing.Dict[str, typing.Any], None] = None, events: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
493
492
  """
494
- Specifies the times when the flow should be run when running on a
495
- production scheduler.
493
+ Specifies the event(s) that this flow depends on.
494
+
495
+ ```
496
+ @trigger(event='foo')
497
+ ```
498
+ or
499
+ ```
500
+ @trigger(events=['foo', 'bar'])
501
+ ```
502
+
503
+ Additionally, you can specify the parameter mappings
504
+ to map event payload to Metaflow parameters for the flow.
505
+ ```
506
+ @trigger(event={'name':'foo', 'parameters':{'flow_param': 'event_field'}})
507
+ ```
508
+ or
509
+ ```
510
+ @trigger(events=[{'name':'foo', 'parameters':{'flow_param_1': 'event_field_1'},
511
+ {'name':'bar', 'parameters':{'flow_param_2': 'event_field_2'}])
512
+ ```
513
+
514
+ 'parameters' can also be a list of strings and tuples like so:
515
+ ```
516
+ @trigger(event={'name':'foo', 'parameters':['common_name', ('flow_param', 'event_field')]})
517
+ ```
518
+ This is equivalent to:
519
+ ```
520
+ @trigger(event={'name':'foo', 'parameters':{'common_name': 'common_name', 'flow_param': 'event_field'}})
521
+ ```
496
522
  """
497
523
  ...
498
524
 
499
525
  @typing.overload
500
- def trigger_on_finish(*, flow: typing.Union[typing.Dict[str, str], str, None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
526
+ def trigger(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
527
+ ...
528
+
529
+ def trigger(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, event: typing.Union[str, typing.Dict[str, typing.Any], None] = None, events: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], options: typing.Dict[str, typing.Any] = {}):
501
530
  """
502
- Specifies the flow(s) that this flow depends on.
531
+ Specifies the event(s) that this flow depends on.
503
532
 
504
533
  ```
505
- @trigger_on_finish(flow='FooFlow')
534
+ @trigger(event='foo')
506
535
  ```
507
536
  or
508
537
  ```
509
- @trigger_on_finish(flows=['FooFlow', 'BarFlow'])
538
+ @trigger(events=['foo', 'bar'])
510
539
  ```
511
- This decorator respects the @project decorator and triggers the flow
512
- when upstream runs within the same namespace complete successfully
513
540
 
514
- Additionally, you can specify project aware upstream flow dependencies
515
- by specifying the fully qualified project_flow_name.
541
+ Additionally, you can specify the parameter mappings
542
+ to map event payload to Metaflow parameters for the flow.
516
543
  ```
517
- @trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
544
+ @trigger(event={'name':'foo', 'parameters':{'flow_param': 'event_field'}})
518
545
  ```
519
546
  or
520
547
  ```
521
- @trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
548
+ @trigger(events=[{'name':'foo', 'parameters':{'flow_param_1': 'event_field_1'},
549
+ {'name':'bar', 'parameters':{'flow_param_2': 'event_field_2'}])
522
550
  ```
523
551
 
524
- You can also specify just the project or project branch (other values will be
525
- inferred from the current project or project branch):
552
+ 'parameters' can also be a list of strings and tuples like so:
526
553
  ```
527
- @trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
554
+ @trigger(event={'name':'foo', 'parameters':['common_name', ('flow_param', 'event_field')]})
555
+ ```
556
+ This is equivalent to:
557
+ ```
558
+ @trigger(event={'name':'foo', 'parameters':{'common_name': 'common_name', 'flow_param': 'event_field'}})
528
559
  ```
560
+ """
561
+ ...
562
+
563
+ @typing.overload
564
+ def pypi_base(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
565
+ """
566
+ Specifies the PyPI packages for all steps of the flow.
529
567
 
530
- Note that `branch` is typically one of:
531
- - `prod`
532
- - `user.bob`
533
- - `test.my_experiment`
534
- - `prod.staging`
568
+ Use `@pypi_base` to set common packages required by all
569
+ steps and use `@pypi` to specify step-specific overrides.
535
570
  """
536
571
  ...
537
572
 
538
573
  @typing.overload
539
- def trigger_on_finish(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
574
+ def pypi_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
540
575
  ...
541
576
 
542
- def trigger_on_finish(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, flow: typing.Union[typing.Dict[str, str], str, None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}):
577
+ def pypi_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
578
+ """
579
+ Specifies the PyPI packages for all steps of the flow.
580
+
581
+ Use `@pypi_base` to set common packages required by all
582
+ steps and use `@pypi` to specify step-specific overrides.
583
+ """
584
+ ...
585
+
586
+ def project(*, name: str) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
587
+ """
588
+ Specifies what flows belong to the same project.
589
+
590
+ A project-specific namespace is created for all flows that
591
+ use the same `@project(name)`.
592
+ """
593
+ ...
594
+
595
+ def airflow_external_task_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, external_dag_id: str, external_task_ids: typing.List[str], allowed_states: typing.List[str], failed_states: typing.List[str], execution_delta: "datetime.timedelta", check_existence: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
596
+ """
597
+ The `@airflow_external_task_sensor` decorator attaches a Airflow [ExternalTaskSensor](https://airflow.apache.org/docs/apache-airflow/stable/_api/airflow/sensors/external_task/index.html#airflow.sensors.external_task.ExternalTaskSensor) before the start step of the flow.
598
+ This decorator only works when a flow is scheduled on Airflow and is compiled using `airflow create`. More than one `@airflow_external_task_sensor` can be added as a flow decorators. Adding more than one decorator will ensure that `start` step starts only after all sensors finish.
599
+ """
600
+ ...
601
+
602
+ @typing.overload
603
+ def trigger_on_finish(*, flow: typing.Union[typing.Dict[str, str], str, None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
543
604
  """
544
605
  Specifies the flow(s) that this flow depends on.
545
606
 
@@ -578,107 +639,63 @@ def trigger_on_finish(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *
578
639
  ...
579
640
 
580
641
  @typing.overload
581
- def trigger(*, event: typing.Union[str, typing.Dict[str, typing.Any], None] = None, events: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
582
- """
583
- Specifies the event(s) that this flow depends on.
584
-
585
- ```
586
- @trigger(event='foo')
587
- ```
588
- or
589
- ```
590
- @trigger(events=['foo', 'bar'])
591
- ```
592
-
593
- Additionally, you can specify the parameter mappings
594
- to map event payload to Metaflow parameters for the flow.
595
- ```
596
- @trigger(event={'name':'foo', 'parameters':{'flow_param': 'event_field'}})
597
- ```
598
- or
599
- ```
600
- @trigger(events=[{'name':'foo', 'parameters':{'flow_param_1': 'event_field_1'},
601
- {'name':'bar', 'parameters':{'flow_param_2': 'event_field_2'}])
602
- ```
603
-
604
- 'parameters' can also be a list of strings and tuples like so:
605
- ```
606
- @trigger(event={'name':'foo', 'parameters':['common_name', ('flow_param', 'event_field')]})
607
- ```
608
- This is equivalent to:
609
- ```
610
- @trigger(event={'name':'foo', 'parameters':{'common_name': 'common_name', 'flow_param': 'event_field'}})
611
- ```
612
- """
613
- ...
614
-
615
- @typing.overload
616
- def trigger(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
642
+ def trigger_on_finish(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
617
643
  ...
618
644
 
619
- def trigger(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, event: typing.Union[str, typing.Dict[str, typing.Any], None] = None, events: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], options: typing.Dict[str, typing.Any] = {}):
645
+ def trigger_on_finish(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, flow: typing.Union[typing.Dict[str, str], str, None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}):
620
646
  """
621
- Specifies the event(s) that this flow depends on.
647
+ Specifies the flow(s) that this flow depends on.
622
648
 
623
649
  ```
624
- @trigger(event='foo')
650
+ @trigger_on_finish(flow='FooFlow')
625
651
  ```
626
652
  or
627
653
  ```
628
- @trigger(events=['foo', 'bar'])
654
+ @trigger_on_finish(flows=['FooFlow', 'BarFlow'])
629
655
  ```
656
+ This decorator respects the @project decorator and triggers the flow
657
+ when upstream runs within the same namespace complete successfully
630
658
 
631
- Additionally, you can specify the parameter mappings
632
- to map event payload to Metaflow parameters for the flow.
659
+ Additionally, you can specify project aware upstream flow dependencies
660
+ by specifying the fully qualified project_flow_name.
633
661
  ```
634
- @trigger(event={'name':'foo', 'parameters':{'flow_param': 'event_field'}})
662
+ @trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
635
663
  ```
636
664
  or
637
665
  ```
638
- @trigger(events=[{'name':'foo', 'parameters':{'flow_param_1': 'event_field_1'},
639
- {'name':'bar', 'parameters':{'flow_param_2': 'event_field_2'}])
666
+ @trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
640
667
  ```
641
668
 
642
- 'parameters' can also be a list of strings and tuples like so:
643
- ```
644
- @trigger(event={'name':'foo', 'parameters':['common_name', ('flow_param', 'event_field')]})
645
- ```
646
- This is equivalent to:
669
+ You can also specify just the project or project branch (other values will be
670
+ inferred from the current project or project branch):
647
671
  ```
648
- @trigger(event={'name':'foo', 'parameters':{'common_name': 'common_name', 'flow_param': 'event_field'}})
672
+ @trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
649
673
  ```
650
- """
651
- ...
652
-
653
- def project(*, name: str) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
654
- """
655
- Specifies what flows belong to the same project.
656
674
 
657
- A project-specific namespace is created for all flows that
658
- use the same `@project(name)`.
675
+ Note that `branch` is typically one of:
676
+ - `prod`
677
+ - `user.bob`
678
+ - `test.my_experiment`
679
+ - `prod.staging`
659
680
  """
660
681
  ...
661
682
 
662
683
  @typing.overload
663
- def pypi_base(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
684
+ def schedule(*, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
664
685
  """
665
- Specifies the PyPI packages for all steps of the flow.
666
-
667
- Use `@pypi_base` to set common packages required by all
668
- steps and use `@pypi` to specify step-specific overrides.
686
+ Specifies the times when the flow should be run when running on a
687
+ production scheduler.
669
688
  """
670
689
  ...
671
690
 
672
691
  @typing.overload
673
- def pypi_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
692
+ def schedule(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
674
693
  ...
675
694
 
676
- def pypi_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
695
+ def schedule(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None):
677
696
  """
678
- Specifies the PyPI packages for all steps of the flow.
679
-
680
- Use `@pypi_base` to set common packages required by all
681
- steps and use `@pypi` to specify step-specific overrides.
697
+ Specifies the times when the flow should be run when running on a
698
+ production scheduler.
682
699
  """
683
700
  ...
684
701
 
@@ -705,20 +722,3 @@ def conda_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packa
705
722
  """
706
723
  ...
707
724
 
708
- def airflow_s3_key_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, bucket_key: typing.Union[str, typing.List[str]], bucket_name: str, wildcard_match: bool, aws_conn_id: str, verify: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
709
- """
710
- The `@airflow_s3_key_sensor` decorator attaches a Airflow [S3KeySensor](https://airflow.apache.org/docs/apache-airflow-providers-amazon/stable/_api/airflow/providers/amazon/aws/sensors/s3/index.html#airflow.providers.amazon.aws.sensors.s3.S3KeySensor)
711
- before the start step of the flow. This decorator only works when a flow is scheduled on Airflow
712
- and is compiled using `airflow create`. More than one `@airflow_s3_key_sensor` can be
713
- added as a flow decorators. Adding more than one decorator will ensure that `start` step
714
- starts only after all sensors finish.
715
- """
716
- ...
717
-
718
- def airflow_external_task_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, external_dag_id: str, external_task_ids: typing.List[str], allowed_states: typing.List[str], failed_states: typing.List[str], execution_delta: "datetime.timedelta", check_existence: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
719
- """
720
- The `@airflow_external_task_sensor` decorator attaches a Airflow [ExternalTaskSensor](https://airflow.apache.org/docs/apache-airflow/stable/_api/airflow/sensors/external_task/index.html#airflow.sensors.external_task.ExternalTaskSensor) before the start step of the flow.
721
- This decorator only works when a flow is scheduled on Airflow and is compiled using `airflow create`. More than one `@airflow_external_task_sensor` can be added as a flow decorators. Adding more than one decorator will ensure that `start` step starts only after all sensors finish.
722
- """
723
- ...
724
-