metaflow-stubs 2.13.4__py2.py3-none-any.whl → 2.13.6__py2.py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (145) hide show
  1. metaflow-stubs/__init__.pyi +208 -208
  2. metaflow-stubs/cards.pyi +2 -2
  3. metaflow-stubs/cli.pyi +2 -2
  4. metaflow-stubs/cli_components/__init__.pyi +2 -2
  5. metaflow-stubs/cli_components/utils.pyi +2 -2
  6. metaflow-stubs/client/__init__.pyi +2 -2
  7. metaflow-stubs/client/core.pyi +6 -6
  8. metaflow-stubs/client/filecache.pyi +3 -3
  9. metaflow-stubs/events.pyi +3 -3
  10. metaflow-stubs/exception.pyi +2 -2
  11. metaflow-stubs/flowspec.pyi +5 -5
  12. metaflow-stubs/generated_for.txt +1 -1
  13. metaflow-stubs/includefile.pyi +4 -4
  14. metaflow-stubs/info_file.pyi +2 -2
  15. metaflow-stubs/metadata_provider/__init__.pyi +2 -2
  16. metaflow-stubs/metadata_provider/heartbeat.pyi +2 -2
  17. metaflow-stubs/metadata_provider/metadata.pyi +2 -2
  18. metaflow-stubs/metadata_provider/util.pyi +2 -2
  19. metaflow-stubs/metaflow_config.pyi +2 -2
  20. metaflow-stubs/metaflow_current.pyi +23 -23
  21. metaflow-stubs/multicore_utils.pyi +2 -2
  22. metaflow-stubs/parameters.pyi +4 -4
  23. metaflow-stubs/plugins/__init__.pyi +12 -12
  24. metaflow-stubs/plugins/airflow/__init__.pyi +2 -2
  25. metaflow-stubs/plugins/airflow/airflow_decorator.pyi +2 -2
  26. metaflow-stubs/plugins/airflow/airflow_utils.pyi +2 -2
  27. metaflow-stubs/plugins/airflow/exception.pyi +2 -2
  28. metaflow-stubs/plugins/airflow/sensors/__init__.pyi +2 -2
  29. metaflow-stubs/plugins/airflow/sensors/base_sensor.pyi +2 -2
  30. metaflow-stubs/plugins/airflow/sensors/external_task_sensor.pyi +2 -2
  31. metaflow-stubs/plugins/airflow/sensors/s3_sensor.pyi +2 -2
  32. metaflow-stubs/plugins/argo/__init__.pyi +2 -2
  33. metaflow-stubs/plugins/argo/argo_client.pyi +2 -2
  34. metaflow-stubs/plugins/argo/argo_events.pyi +2 -2
  35. metaflow-stubs/plugins/argo/argo_workflows.pyi +2 -2
  36. metaflow-stubs/plugins/argo/argo_workflows_decorator.pyi +3 -3
  37. metaflow-stubs/plugins/argo/argo_workflows_deployer.pyi +3 -3
  38. metaflow-stubs/plugins/argo/argo_workflows_deployer_objects.pyi +4 -4
  39. metaflow-stubs/plugins/aws/__init__.pyi +4 -4
  40. metaflow-stubs/plugins/aws/aws_client.pyi +2 -2
  41. metaflow-stubs/plugins/aws/aws_utils.pyi +2 -2
  42. metaflow-stubs/plugins/aws/batch/__init__.pyi +2 -2
  43. metaflow-stubs/plugins/aws/batch/batch.pyi +2 -2
  44. metaflow-stubs/plugins/aws/batch/batch_client.pyi +2 -2
  45. metaflow-stubs/plugins/aws/batch/batch_decorator.pyi +2 -2
  46. metaflow-stubs/plugins/aws/secrets_manager/__init__.pyi +2 -2
  47. metaflow-stubs/plugins/aws/secrets_manager/aws_secrets_manager_secrets_provider.pyi +2 -2
  48. metaflow-stubs/plugins/aws/step_functions/__init__.pyi +2 -2
  49. metaflow-stubs/plugins/aws/step_functions/dynamo_db_client.pyi +2 -2
  50. metaflow-stubs/plugins/aws/step_functions/event_bridge_client.pyi +2 -2
  51. metaflow-stubs/plugins/aws/step_functions/schedule_decorator.pyi +2 -2
  52. metaflow-stubs/plugins/aws/step_functions/step_functions.pyi +2 -2
  53. metaflow-stubs/plugins/aws/step_functions/step_functions_client.pyi +2 -2
  54. metaflow-stubs/plugins/aws/step_functions/step_functions_decorator.pyi +2 -2
  55. metaflow-stubs/plugins/aws/step_functions/step_functions_deployer.pyi +4 -4
  56. metaflow-stubs/plugins/aws/step_functions/step_functions_deployer_objects.pyi +3 -3
  57. metaflow-stubs/plugins/azure/__init__.pyi +2 -2
  58. metaflow-stubs/plugins/azure/azure_credential.pyi +2 -2
  59. metaflow-stubs/plugins/azure/azure_exceptions.pyi +2 -2
  60. metaflow-stubs/plugins/azure/azure_secret_manager_secrets_provider.pyi +2 -2
  61. metaflow-stubs/plugins/azure/azure_utils.pyi +2 -2
  62. metaflow-stubs/plugins/azure/blob_service_client_factory.pyi +2 -2
  63. metaflow-stubs/plugins/azure/includefile_support.pyi +2 -2
  64. metaflow-stubs/plugins/cards/__init__.pyi +2 -2
  65. metaflow-stubs/plugins/cards/card_client.pyi +3 -3
  66. metaflow-stubs/plugins/cards/card_creator.pyi +2 -2
  67. metaflow-stubs/plugins/cards/card_datastore.pyi +2 -2
  68. metaflow-stubs/plugins/cards/card_decorator.pyi +2 -2
  69. metaflow-stubs/plugins/cards/card_modules/__init__.pyi +2 -2
  70. metaflow-stubs/plugins/cards/card_modules/basic.pyi +2 -2
  71. metaflow-stubs/plugins/cards/card_modules/card.pyi +2 -2
  72. metaflow-stubs/plugins/cards/card_modules/components.pyi +4 -4
  73. metaflow-stubs/plugins/cards/card_modules/convert_to_native_type.pyi +2 -2
  74. metaflow-stubs/plugins/cards/card_modules/renderer_tools.pyi +2 -2
  75. metaflow-stubs/plugins/cards/card_modules/test_cards.pyi +2 -2
  76. metaflow-stubs/plugins/cards/card_resolver.pyi +2 -2
  77. metaflow-stubs/plugins/cards/component_serializer.pyi +2 -2
  78. metaflow-stubs/plugins/cards/exception.pyi +2 -2
  79. metaflow-stubs/plugins/catch_decorator.pyi +3 -3
  80. metaflow-stubs/plugins/datatools/__init__.pyi +2 -2
  81. metaflow-stubs/plugins/datatools/local.pyi +2 -2
  82. metaflow-stubs/plugins/datatools/s3/__init__.pyi +2 -2
  83. metaflow-stubs/plugins/datatools/s3/s3.pyi +4 -4
  84. metaflow-stubs/plugins/datatools/s3/s3tail.pyi +2 -2
  85. metaflow-stubs/plugins/datatools/s3/s3util.pyi +2 -2
  86. metaflow-stubs/plugins/debug_logger.pyi +2 -2
  87. metaflow-stubs/plugins/debug_monitor.pyi +2 -2
  88. metaflow-stubs/plugins/environment_decorator.pyi +2 -2
  89. metaflow-stubs/plugins/events_decorator.pyi +2 -2
  90. metaflow-stubs/plugins/frameworks/__init__.pyi +2 -2
  91. metaflow-stubs/plugins/frameworks/pytorch.pyi +2 -2
  92. metaflow-stubs/plugins/gcp/__init__.pyi +2 -2
  93. metaflow-stubs/plugins/gcp/gcp_secret_manager_secrets_provider.pyi +2 -2
  94. metaflow-stubs/plugins/gcp/gs_exceptions.pyi +2 -2
  95. metaflow-stubs/plugins/gcp/gs_storage_client_factory.pyi +2 -2
  96. metaflow-stubs/plugins/gcp/gs_utils.pyi +2 -2
  97. metaflow-stubs/plugins/gcp/includefile_support.pyi +2 -2
  98. metaflow-stubs/plugins/kubernetes/__init__.pyi +3 -2
  99. metaflow-stubs/plugins/kubernetes/kube_utils.pyi +2 -2
  100. metaflow-stubs/plugins/kubernetes/kubernetes.pyi +2 -2
  101. metaflow-stubs/plugins/kubernetes/kubernetes_client.pyi +2 -2
  102. metaflow-stubs/plugins/kubernetes/kubernetes_decorator.pyi +2 -2
  103. metaflow-stubs/plugins/kubernetes/kubernetes_job.pyi +2 -2
  104. metaflow-stubs/plugins/kubernetes/kubernetes_jobsets.pyi +2 -2
  105. metaflow-stubs/plugins/kubernetes/spot_monitor_sidecar.pyi +20 -0
  106. metaflow-stubs/plugins/parallel_decorator.pyi +2 -2
  107. metaflow-stubs/plugins/project_decorator.pyi +2 -2
  108. metaflow-stubs/plugins/pypi/__init__.pyi +3 -3
  109. metaflow-stubs/plugins/pypi/conda_decorator.pyi +2 -2
  110. metaflow-stubs/plugins/pypi/conda_environment.pyi +5 -5
  111. metaflow-stubs/plugins/pypi/pypi_decorator.pyi +2 -2
  112. metaflow-stubs/plugins/pypi/pypi_environment.pyi +2 -2
  113. metaflow-stubs/plugins/pypi/utils.pyi +2 -2
  114. metaflow-stubs/plugins/resources_decorator.pyi +2 -2
  115. metaflow-stubs/plugins/retry_decorator.pyi +2 -2
  116. metaflow-stubs/plugins/secrets/__init__.pyi +3 -3
  117. metaflow-stubs/plugins/secrets/inline_secrets_provider.pyi +3 -3
  118. metaflow-stubs/plugins/secrets/secrets_decorator.pyi +2 -2
  119. metaflow-stubs/plugins/storage_executor.pyi +2 -2
  120. metaflow-stubs/plugins/test_unbounded_foreach_decorator.pyi +2 -2
  121. metaflow-stubs/plugins/timeout_decorator.pyi +3 -3
  122. metaflow-stubs/pylint_wrapper.pyi +2 -2
  123. metaflow-stubs/runner/__init__.pyi +2 -2
  124. metaflow-stubs/runner/deployer.pyi +29 -29
  125. metaflow-stubs/runner/deployer_impl.pyi +3 -3
  126. metaflow-stubs/runner/metaflow_runner.pyi +4 -4
  127. metaflow-stubs/runner/nbdeploy.pyi +2 -2
  128. metaflow-stubs/runner/nbrun.pyi +2 -2
  129. metaflow-stubs/runner/subprocess_manager.pyi +2 -2
  130. metaflow-stubs/runner/utils.pyi +3 -3
  131. metaflow-stubs/system/__init__.pyi +2 -2
  132. metaflow-stubs/system/system_logger.pyi +3 -3
  133. metaflow-stubs/system/system_monitor.pyi +2 -2
  134. metaflow-stubs/tagging_util.pyi +2 -2
  135. metaflow-stubs/tuple_util.pyi +2 -2
  136. metaflow-stubs/user_configs/__init__.pyi +2 -2
  137. metaflow-stubs/user_configs/config_decorators.pyi +5 -5
  138. metaflow-stubs/user_configs/config_options.pyi +3 -3
  139. metaflow-stubs/user_configs/config_parameters.pyi +6 -6
  140. metaflow-stubs/version.pyi +2 -2
  141. {metaflow_stubs-2.13.4.dist-info → metaflow_stubs-2.13.6.dist-info}/METADATA +2 -2
  142. metaflow_stubs-2.13.6.dist-info/RECORD +145 -0
  143. metaflow_stubs-2.13.4.dist-info/RECORD +0 -144
  144. {metaflow_stubs-2.13.4.dist-info → metaflow_stubs-2.13.6.dist-info}/WHEEL +0 -0
  145. {metaflow_stubs-2.13.4.dist-info → metaflow_stubs-2.13.6.dist-info}/top_level.txt +0 -0
@@ -1,15 +1,15 @@
1
1
  ######################################################################################################
2
2
  # Auto-generated Metaflow stub file #
3
- # MF version: 2.13.4 #
4
- # Generated on 2025-01-15T17:53:58.657195 #
3
+ # MF version: 2.13.6 #
4
+ # Generated on 2025-01-23T12:09:53.031777 #
5
5
  ######################################################################################################
6
6
 
7
7
  from __future__ import annotations
8
8
 
9
9
  import typing
10
10
  if typing.TYPE_CHECKING:
11
- import datetime
12
11
  import typing
12
+ import datetime
13
13
  FlowSpecDerived = typing.TypeVar("FlowSpecDerived", bound="FlowSpec", contravariant=False, covariant=False)
14
14
  StepFlag = typing.NewType("StepFlag", bool)
15
15
 
@@ -35,8 +35,8 @@ from .user_configs.config_parameters import ConfigValue as ConfigValue
35
35
  from .user_configs.config_parameters import config_expr as config_expr
36
36
  from .user_configs.config_decorators import CustomFlowDecorator as CustomFlowDecorator
37
37
  from .user_configs.config_decorators import CustomStepDecorator as CustomStepDecorator
38
- from . import events as events
39
38
  from . import tuple_util as tuple_util
39
+ from . import events as events
40
40
  from . import runner as runner
41
41
  from . import plugins as plugins
42
42
  from .plugins.datatools.s3.s3 import S3 as S3
@@ -143,69 +143,62 @@ def step(f: typing.Union[typing.Callable[[FlowSpecDerived], None], typing.Callab
143
143
  ...
144
144
 
145
145
  @typing.overload
146
- def parallel(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
147
- """
148
- Decorator prototype for all step decorators. This function gets specialized
149
- and imported for all decorators types by _import_plugin_decorators().
150
- """
151
- ...
152
-
153
- @typing.overload
154
- def parallel(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
155
- ...
156
-
157
- def parallel(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
158
- """
159
- Decorator prototype for all step decorators. This function gets specialized
160
- and imported for all decorators types by _import_plugin_decorators().
161
- """
162
- ...
163
-
164
- @typing.overload
165
- def card(*, type: str = 'default', id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
146
+ def timeout(*, seconds: int = 0, minutes: int = 0, hours: int = 0) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
166
147
  """
167
- Creates a human-readable report, a Metaflow Card, after this step completes.
148
+ Specifies a timeout for your step.
168
149
 
169
- Note that you may add multiple `@card` decorators in a step with different parameters.
150
+ This decorator is useful if this step may hang indefinitely.
151
+
152
+ This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
153
+ A timeout is considered to be an exception thrown by the step. It will cause the step to be
154
+ retried if needed and the exception will be caught by the `@catch` decorator, if present.
155
+
156
+ Note that all the values specified in parameters are added together so if you specify
157
+ 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
170
158
  """
171
159
  ...
172
160
 
173
161
  @typing.overload
174
- def card(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
162
+ def timeout(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
175
163
  ...
176
164
 
177
165
  @typing.overload
178
- def card(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
166
+ def timeout(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
179
167
  ...
180
168
 
181
- def card(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, type: str = 'default', id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45):
169
+ def timeout(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, seconds: int = 0, minutes: int = 0, hours: int = 0):
182
170
  """
183
- Creates a human-readable report, a Metaflow Card, after this step completes.
171
+ Specifies a timeout for your step.
184
172
 
185
- Note that you may add multiple `@card` decorators in a step with different parameters.
173
+ This decorator is useful if this step may hang indefinitely.
174
+
175
+ This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
176
+ A timeout is considered to be an exception thrown by the step. It will cause the step to be
177
+ retried if needed and the exception will be caught by the `@catch` decorator, if present.
178
+
179
+ Note that all the values specified in parameters are added together so if you specify
180
+ 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
186
181
  """
187
182
  ...
188
183
 
189
184
  @typing.overload
190
- def secrets(*, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
185
+ def batch(*, cpu: int = 1, gpu: int = 0, memory: int = 4096, image: typing.Optional[str] = None, queue: str = 'METAFLOW_BATCH_JOB_QUEUE', iam_role: str = 'METAFLOW_ECS_S3_ACCESS_IAM_ROLE', execution_role: str = 'METAFLOW_ECS_FARGATE_EXECUTION_ROLE', shared_memory: typing.Optional[int] = None, max_swap: typing.Optional[int] = None, swappiness: typing.Optional[int] = None, use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = None, inferentia: int = 0, trainium: int = None, efa: int = 0, ephemeral_storage: int = None, log_driver: typing.Optional[str] = None, log_options: typing.Optional[typing.List[str]] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
191
186
  """
192
- Specifies secrets to be retrieved and injected as environment variables prior to
193
- the execution of a step.
187
+ Specifies that this step should execute on [AWS Batch](https://aws.amazon.com/batch/).
194
188
  """
195
189
  ...
196
190
 
197
191
  @typing.overload
198
- def secrets(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
192
+ def batch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
199
193
  ...
200
194
 
201
195
  @typing.overload
202
- def secrets(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
196
+ def batch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
203
197
  ...
204
198
 
205
- def secrets(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []):
199
+ def batch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, cpu: int = 1, gpu: int = 0, memory: int = 4096, image: typing.Optional[str] = None, queue: str = 'METAFLOW_BATCH_JOB_QUEUE', iam_role: str = 'METAFLOW_ECS_S3_ACCESS_IAM_ROLE', execution_role: str = 'METAFLOW_ECS_FARGATE_EXECUTION_ROLE', shared_memory: typing.Optional[int] = None, max_swap: typing.Optional[int] = None, swappiness: typing.Optional[int] = None, use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = None, inferentia: int = 0, trainium: int = None, efa: int = 0, ephemeral_storage: int = None, log_driver: typing.Optional[str] = None, log_options: typing.Optional[typing.List[str]] = None):
206
200
  """
207
- Specifies secrets to be retrieved and injected as environment variables prior to
208
- the execution of a step.
201
+ Specifies that this step should execute on [AWS Batch](https://aws.amazon.com/batch/).
209
202
  """
210
203
  ...
211
204
 
@@ -248,100 +241,88 @@ def retry(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], ty
248
241
  """
249
242
  ...
250
243
 
251
- def kubernetes(*, cpu: int = 1, memory: int = 4096, disk: int = 10240, image: typing.Optional[str] = None, image_pull_policy: str = 'KUBERNETES_IMAGE_PULL_POLICY', service_account: str = 'METAFLOW_KUBERNETES_SERVICE_ACCOUNT', secrets: typing.Optional[typing.List[str]] = None, node_selector: typing.Union[typing.Dict[str, str], str, None] = None, namespace: str = 'METAFLOW_KUBERNETES_NAMESPACE', gpu: typing.Optional[int] = None, gpu_vendor: str = 'KUBERNETES_GPU_VENDOR', tolerations: typing.List[str] = [], labels: typing.Dict[str, str] = 'METAFLOW_KUBERNETES_LABELS', annotations: typing.Dict[str, str] = 'METAFLOW_KUBERNETES_ANNOTATIONS', use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = '/metaflow_temp', persistent_volume_claims: typing.Optional[typing.Dict[str, str]] = None, shared_memory: typing.Optional[int] = None, port: typing.Optional[int] = None, compute_pool: typing.Optional[str] = None, hostname_resolution_timeout: int = 600, qos: str = 'Burstable') -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
252
- """
253
- Specifies that this step should execute on Kubernetes.
254
- """
255
- ...
256
-
257
244
  @typing.overload
258
- def timeout(*, seconds: int = 0, minutes: int = 0, hours: int = 0) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
245
+ def conda(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
259
246
  """
260
- Specifies a timeout for your step.
261
-
262
- This decorator is useful if this step may hang indefinitely.
263
-
264
- This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
265
- A timeout is considered to be an exception thrown by the step. It will cause the step to be
266
- retried if needed and the exception will be caught by the `@catch` decorator, if present.
247
+ Specifies the Conda environment for the step.
267
248
 
268
- Note that all the values specified in parameters are added together so if you specify
269
- 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
249
+ Information in this decorator will augment any
250
+ attributes set in the `@conda_base` flow-level decorator. Hence,
251
+ you can use `@conda_base` to set packages required by all
252
+ steps and use `@conda` to specify step-specific overrides.
270
253
  """
271
254
  ...
272
255
 
273
256
  @typing.overload
274
- def timeout(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
257
+ def conda(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
275
258
  ...
276
259
 
277
260
  @typing.overload
278
- def timeout(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
261
+ def conda(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
279
262
  ...
280
263
 
281
- def timeout(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, seconds: int = 0, minutes: int = 0, hours: int = 0):
264
+ def conda(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
282
265
  """
283
- Specifies a timeout for your step.
284
-
285
- This decorator is useful if this step may hang indefinitely.
286
-
287
- This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
288
- A timeout is considered to be an exception thrown by the step. It will cause the step to be
289
- retried if needed and the exception will be caught by the `@catch` decorator, if present.
266
+ Specifies the Conda environment for the step.
290
267
 
291
- Note that all the values specified in parameters are added together so if you specify
292
- 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
268
+ Information in this decorator will augment any
269
+ attributes set in the `@conda_base` flow-level decorator. Hence,
270
+ you can use `@conda_base` to set packages required by all
271
+ steps and use `@conda` to specify step-specific overrides.
272
+ """
273
+ ...
274
+
275
+ def kubernetes(*, cpu: int = 1, memory: int = 4096, disk: int = 10240, image: typing.Optional[str] = None, image_pull_policy: str = 'KUBERNETES_IMAGE_PULL_POLICY', service_account: str = 'METAFLOW_KUBERNETES_SERVICE_ACCOUNT', secrets: typing.Optional[typing.List[str]] = None, node_selector: typing.Union[typing.Dict[str, str], str, None] = None, namespace: str = 'METAFLOW_KUBERNETES_NAMESPACE', gpu: typing.Optional[int] = None, gpu_vendor: str = 'KUBERNETES_GPU_VENDOR', tolerations: typing.List[str] = [], labels: typing.Dict[str, str] = 'METAFLOW_KUBERNETES_LABELS', annotations: typing.Dict[str, str] = 'METAFLOW_KUBERNETES_ANNOTATIONS', use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = '/metaflow_temp', persistent_volume_claims: typing.Optional[typing.Dict[str, str]] = None, shared_memory: typing.Optional[int] = None, port: typing.Optional[int] = None, compute_pool: typing.Optional[str] = None, hostname_resolution_timeout: int = 600, qos: str = 'Burstable') -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
276
+ """
277
+ Specifies that this step should execute on Kubernetes.
293
278
  """
294
279
  ...
295
280
 
296
281
  @typing.overload
297
- def catch(*, var: typing.Optional[str] = None, print_exception: bool = True) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
282
+ def secrets(*, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
298
283
  """
299
- Specifies that the step will success under all circumstances.
300
-
301
- The decorator will create an optional artifact, specified by `var`, which
302
- contains the exception raised. You can use it to detect the presence
303
- of errors, indicating that all happy-path artifacts produced by the step
304
- are missing.
284
+ Specifies secrets to be retrieved and injected as environment variables prior to
285
+ the execution of a step.
305
286
  """
306
287
  ...
307
288
 
308
289
  @typing.overload
309
- def catch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
290
+ def secrets(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
310
291
  ...
311
292
 
312
293
  @typing.overload
313
- def catch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
294
+ def secrets(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
314
295
  ...
315
296
 
316
- def catch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, var: typing.Optional[str] = None, print_exception: bool = True):
297
+ def secrets(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []):
317
298
  """
318
- Specifies that the step will success under all circumstances.
319
-
320
- The decorator will create an optional artifact, specified by `var`, which
321
- contains the exception raised. You can use it to detect the presence
322
- of errors, indicating that all happy-path artifacts produced by the step
323
- are missing.
299
+ Specifies secrets to be retrieved and injected as environment variables prior to
300
+ the execution of a step.
324
301
  """
325
302
  ...
326
303
 
327
304
  @typing.overload
328
- def batch(*, cpu: int = 1, gpu: int = 0, memory: int = 4096, image: typing.Optional[str] = None, queue: str = 'METAFLOW_BATCH_JOB_QUEUE', iam_role: str = 'METAFLOW_ECS_S3_ACCESS_IAM_ROLE', execution_role: str = 'METAFLOW_ECS_FARGATE_EXECUTION_ROLE', shared_memory: typing.Optional[int] = None, max_swap: typing.Optional[int] = None, swappiness: typing.Optional[int] = None, use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = None, inferentia: int = 0, trainium: int = None, efa: int = 0, ephemeral_storage: int = None, log_driver: typing.Optional[str] = None, log_options: typing.Optional[typing.List[str]] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
305
+ def card(*, type: str = 'default', id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
329
306
  """
330
- Specifies that this step should execute on [AWS Batch](https://aws.amazon.com/batch/).
307
+ Creates a human-readable report, a Metaflow Card, after this step completes.
308
+
309
+ Note that you may add multiple `@card` decorators in a step with different parameters.
331
310
  """
332
311
  ...
333
312
 
334
313
  @typing.overload
335
- def batch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
314
+ def card(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
336
315
  ...
337
316
 
338
317
  @typing.overload
339
- def batch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
318
+ def card(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
340
319
  ...
341
320
 
342
- def batch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, cpu: int = 1, gpu: int = 0, memory: int = 4096, image: typing.Optional[str] = None, queue: str = 'METAFLOW_BATCH_JOB_QUEUE', iam_role: str = 'METAFLOW_ECS_S3_ACCESS_IAM_ROLE', execution_role: str = 'METAFLOW_ECS_FARGATE_EXECUTION_ROLE', shared_memory: typing.Optional[int] = None, max_swap: typing.Optional[int] = None, swappiness: typing.Optional[int] = None, use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = None, inferentia: int = 0, trainium: int = None, efa: int = 0, ephemeral_storage: int = None, log_driver: typing.Optional[str] = None, log_options: typing.Optional[typing.List[str]] = None):
321
+ def card(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, type: str = 'default', id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45):
343
322
  """
344
- Specifies that this step should execute on [AWS Batch](https://aws.amazon.com/batch/).
323
+ Creates a human-readable report, a Metaflow Card, after this step completes.
324
+
325
+ Note that you may add multiple `@card` decorators in a step with different parameters.
345
326
  """
346
327
  ...
347
328
 
@@ -376,6 +357,37 @@ def pypi(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typ
376
357
  """
377
358
  ...
378
359
 
360
+ @typing.overload
361
+ def catch(*, var: typing.Optional[str] = None, print_exception: bool = True) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
362
+ """
363
+ Specifies that the step will success under all circumstances.
364
+
365
+ The decorator will create an optional artifact, specified by `var`, which
366
+ contains the exception raised. You can use it to detect the presence
367
+ of errors, indicating that all happy-path artifacts produced by the step
368
+ are missing.
369
+ """
370
+ ...
371
+
372
+ @typing.overload
373
+ def catch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
374
+ ...
375
+
376
+ @typing.overload
377
+ def catch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
378
+ ...
379
+
380
+ def catch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, var: typing.Optional[str] = None, print_exception: bool = True):
381
+ """
382
+ Specifies that the step will success under all circumstances.
383
+
384
+ The decorator will create an optional artifact, specified by `var`, which
385
+ contains the exception raised. You can use it to detect the presence
386
+ of errors, indicating that all happy-path artifacts produced by the step
387
+ are missing.
388
+ """
389
+ ...
390
+
379
391
  @typing.overload
380
392
  def environment(*, vars: typing.Dict[str, str] = {}) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
381
393
  """
@@ -447,33 +459,102 @@ def resources(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None]
447
459
  ...
448
460
 
449
461
  @typing.overload
450
- def conda(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
462
+ def parallel(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
451
463
  """
452
- Specifies the Conda environment for the step.
453
-
454
- Information in this decorator will augment any
455
- attributes set in the `@conda_base` flow-level decorator. Hence,
456
- you can use `@conda_base` to set packages required by all
457
- steps and use `@conda` to specify step-specific overrides.
464
+ Decorator prototype for all step decorators. This function gets specialized
465
+ and imported for all decorators types by _import_plugin_decorators().
458
466
  """
459
467
  ...
460
468
 
461
469
  @typing.overload
462
- def conda(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
470
+ def parallel(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
471
+ ...
472
+
473
+ def parallel(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
474
+ """
475
+ Decorator prototype for all step decorators. This function gets specialized
476
+ and imported for all decorators types by _import_plugin_decorators().
477
+ """
463
478
  ...
464
479
 
465
480
  @typing.overload
466
- def conda(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
481
+ def trigger_on_finish(*, flow: typing.Union[typing.Dict[str, str], str, None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
482
+ """
483
+ Specifies the flow(s) that this flow depends on.
484
+
485
+ ```
486
+ @trigger_on_finish(flow='FooFlow')
487
+ ```
488
+ or
489
+ ```
490
+ @trigger_on_finish(flows=['FooFlow', 'BarFlow'])
491
+ ```
492
+ This decorator respects the @project decorator and triggers the flow
493
+ when upstream runs within the same namespace complete successfully
494
+
495
+ Additionally, you can specify project aware upstream flow dependencies
496
+ by specifying the fully qualified project_flow_name.
497
+ ```
498
+ @trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
499
+ ```
500
+ or
501
+ ```
502
+ @trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
503
+ ```
504
+
505
+ You can also specify just the project or project branch (other values will be
506
+ inferred from the current project or project branch):
507
+ ```
508
+ @trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
509
+ ```
510
+
511
+ Note that `branch` is typically one of:
512
+ - `prod`
513
+ - `user.bob`
514
+ - `test.my_experiment`
515
+ - `prod.staging`
516
+ """
467
517
  ...
468
518
 
469
- def conda(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
519
+ @typing.overload
520
+ def trigger_on_finish(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
521
+ ...
522
+
523
+ def trigger_on_finish(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, flow: typing.Union[typing.Dict[str, str], str, None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}):
470
524
  """
471
- Specifies the Conda environment for the step.
525
+ Specifies the flow(s) that this flow depends on.
472
526
 
473
- Information in this decorator will augment any
474
- attributes set in the `@conda_base` flow-level decorator. Hence,
475
- you can use `@conda_base` to set packages required by all
476
- steps and use `@conda` to specify step-specific overrides.
527
+ ```
528
+ @trigger_on_finish(flow='FooFlow')
529
+ ```
530
+ or
531
+ ```
532
+ @trigger_on_finish(flows=['FooFlow', 'BarFlow'])
533
+ ```
534
+ This decorator respects the @project decorator and triggers the flow
535
+ when upstream runs within the same namespace complete successfully
536
+
537
+ Additionally, you can specify project aware upstream flow dependencies
538
+ by specifying the fully qualified project_flow_name.
539
+ ```
540
+ @trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
541
+ ```
542
+ or
543
+ ```
544
+ @trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
545
+ ```
546
+
547
+ You can also specify just the project or project branch (other values will be
548
+ inferred from the current project or project branch):
549
+ ```
550
+ @trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
551
+ ```
552
+
553
+ Note that `branch` is typically one of:
554
+ - `prod`
555
+ - `user.bob`
556
+ - `test.my_experiment`
557
+ - `prod.staging`
477
558
  """
478
559
  ...
479
560
 
@@ -550,110 +631,22 @@ def trigger(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, event: t
550
631
  """
551
632
  ...
552
633
 
553
- def project(*, name: str) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
554
- """
555
- Specifies what flows belong to the same project.
556
-
557
- A project-specific namespace is created for all flows that
558
- use the same `@project(name)`.
559
- """
560
- ...
561
-
562
- def airflow_external_task_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, external_dag_id: str, external_task_ids: typing.List[str], allowed_states: typing.List[str], failed_states: typing.List[str], execution_delta: "datetime.timedelta", check_existence: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
563
- """
564
- The `@airflow_external_task_sensor` decorator attaches a Airflow [ExternalTaskSensor](https://airflow.apache.org/docs/apache-airflow/stable/_api/airflow/sensors/external_task/index.html#airflow.sensors.external_task.ExternalTaskSensor) before the start step of the flow.
565
- This decorator only works when a flow is scheduled on Airflow and is compiled using `airflow create`. More than one `@airflow_external_task_sensor` can be added as a flow decorators. Adding more than one decorator will ensure that `start` step starts only after all sensors finish.
566
- """
567
- ...
568
-
569
- def airflow_s3_key_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, bucket_key: typing.Union[str, typing.List[str]], bucket_name: str, wildcard_match: bool, aws_conn_id: str, verify: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
570
- """
571
- The `@airflow_s3_key_sensor` decorator attaches a Airflow [S3KeySensor](https://airflow.apache.org/docs/apache-airflow-providers-amazon/stable/_api/airflow/providers/amazon/aws/sensors/s3/index.html#airflow.providers.amazon.aws.sensors.s3.S3KeySensor)
572
- before the start step of the flow. This decorator only works when a flow is scheduled on Airflow
573
- and is compiled using `airflow create`. More than one `@airflow_s3_key_sensor` can be
574
- added as a flow decorators. Adding more than one decorator will ensure that `start` step
575
- starts only after all sensors finish.
576
- """
577
- ...
578
-
579
634
  @typing.overload
580
- def trigger_on_finish(*, flow: typing.Union[typing.Dict[str, str], str, None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
635
+ def schedule(*, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
581
636
  """
582
- Specifies the flow(s) that this flow depends on.
583
-
584
- ```
585
- @trigger_on_finish(flow='FooFlow')
586
- ```
587
- or
588
- ```
589
- @trigger_on_finish(flows=['FooFlow', 'BarFlow'])
590
- ```
591
- This decorator respects the @project decorator and triggers the flow
592
- when upstream runs within the same namespace complete successfully
593
-
594
- Additionally, you can specify project aware upstream flow dependencies
595
- by specifying the fully qualified project_flow_name.
596
- ```
597
- @trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
598
- ```
599
- or
600
- ```
601
- @trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
602
- ```
603
-
604
- You can also specify just the project or project branch (other values will be
605
- inferred from the current project or project branch):
606
- ```
607
- @trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
608
- ```
609
-
610
- Note that `branch` is typically one of:
611
- - `prod`
612
- - `user.bob`
613
- - `test.my_experiment`
614
- - `prod.staging`
637
+ Specifies the times when the flow should be run when running on a
638
+ production scheduler.
615
639
  """
616
640
  ...
617
641
 
618
642
  @typing.overload
619
- def trigger_on_finish(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
643
+ def schedule(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
620
644
  ...
621
645
 
622
- def trigger_on_finish(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, flow: typing.Union[typing.Dict[str, str], str, None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}):
646
+ def schedule(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None):
623
647
  """
624
- Specifies the flow(s) that this flow depends on.
625
-
626
- ```
627
- @trigger_on_finish(flow='FooFlow')
628
- ```
629
- or
630
- ```
631
- @trigger_on_finish(flows=['FooFlow', 'BarFlow'])
632
- ```
633
- This decorator respects the @project decorator and triggers the flow
634
- when upstream runs within the same namespace complete successfully
635
-
636
- Additionally, you can specify project aware upstream flow dependencies
637
- by specifying the fully qualified project_flow_name.
638
- ```
639
- @trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
640
- ```
641
- or
642
- ```
643
- @trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
644
- ```
645
-
646
- You can also specify just the project or project branch (other values will be
647
- inferred from the current project or project branch):
648
- ```
649
- @trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
650
- ```
651
-
652
- Note that `branch` is typically one of:
653
- - `prod`
654
- - `user.bob`
655
- - `test.my_experiment`
656
- - `prod.staging`
648
+ Specifies the times when the flow should be run when running on a
649
+ production scheduler.
657
650
  """
658
651
  ...
659
652
 
@@ -703,22 +696,29 @@ def conda_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packa
703
696
  """
704
697
  ...
705
698
 
706
- @typing.overload
707
- def schedule(*, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
699
+ def airflow_external_task_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, external_dag_id: str, external_task_ids: typing.List[str], allowed_states: typing.List[str], failed_states: typing.List[str], execution_delta: "datetime.timedelta", check_existence: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
708
700
  """
709
- Specifies the times when the flow should be run when running on a
710
- production scheduler.
701
+ The `@airflow_external_task_sensor` decorator attaches a Airflow [ExternalTaskSensor](https://airflow.apache.org/docs/apache-airflow/stable/_api/airflow/sensors/external_task/index.html#airflow.sensors.external_task.ExternalTaskSensor) before the start step of the flow.
702
+ This decorator only works when a flow is scheduled on Airflow and is compiled using `airflow create`. More than one `@airflow_external_task_sensor` can be added as a flow decorators. Adding more than one decorator will ensure that `start` step starts only after all sensors finish.
711
703
  """
712
704
  ...
713
705
 
714
- @typing.overload
715
- def schedule(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
706
+ def project(*, name: str) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
707
+ """
708
+ Specifies what flows belong to the same project.
709
+
710
+ A project-specific namespace is created for all flows that
711
+ use the same `@project(name)`.
712
+ """
716
713
  ...
717
714
 
718
- def schedule(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None):
715
+ def airflow_s3_key_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, bucket_key: typing.Union[str, typing.List[str]], bucket_name: str, wildcard_match: bool, aws_conn_id: str, verify: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
719
716
  """
720
- Specifies the times when the flow should be run when running on a
721
- production scheduler.
717
+ The `@airflow_s3_key_sensor` decorator attaches a Airflow [S3KeySensor](https://airflow.apache.org/docs/apache-airflow-providers-amazon/stable/_api/airflow/providers/amazon/aws/sensors/s3/index.html#airflow.providers.amazon.aws.sensors.s3.S3KeySensor)
718
+ before the start step of the flow. This decorator only works when a flow is scheduled on Airflow
719
+ and is compiled using `airflow create`. More than one `@airflow_s3_key_sensor` can be
720
+ added as a flow decorators. Adding more than one decorator will ensure that `start` step
721
+ starts only after all sensors finish.
722
722
  """
723
723
  ...
724
724
 
metaflow-stubs/cards.pyi CHANGED
@@ -1,7 +1,7 @@
1
1
  ######################################################################################################
2
2
  # Auto-generated Metaflow stub file #
3
- # MF version: 2.13.4 #
4
- # Generated on 2025-01-15T17:53:58.577858 #
3
+ # MF version: 2.13.6 #
4
+ # Generated on 2025-01-23T12:09:52.952052 #
5
5
  ######################################################################################################
6
6
 
7
7
  from __future__ import annotations
metaflow-stubs/cli.pyi CHANGED
@@ -1,7 +1,7 @@
1
1
  ######################################################################################################
2
2
  # Auto-generated Metaflow stub file #
3
- # MF version: 2.13.4 #
4
- # Generated on 2025-01-15T17:53:58.586744 #
3
+ # MF version: 2.13.6 #
4
+ # Generated on 2025-01-23T12:09:52.961230 #
5
5
  ######################################################################################################
6
6
 
7
7
  from __future__ import annotations
@@ -1,7 +1,7 @@
1
1
  ######################################################################################################
2
2
  # Auto-generated Metaflow stub file #
3
- # MF version: 2.13.4 #
4
- # Generated on 2025-01-15T17:53:58.585465 #
3
+ # MF version: 2.13.6 #
4
+ # Generated on 2025-01-23T12:09:52.959924 #
5
5
  ######################################################################################################
6
6
 
7
7
  from __future__ import annotations