metaflow-stubs 2.15.13__py2.py3-none-any.whl → 2.15.14__py2.py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (149) hide show
  1. metaflow-stubs/__init__.pyi +472 -472
  2. metaflow-stubs/cards.pyi +2 -2
  3. metaflow-stubs/cli.pyi +2 -2
  4. metaflow-stubs/cli_components/__init__.pyi +2 -2
  5. metaflow-stubs/cli_components/utils.pyi +2 -2
  6. metaflow-stubs/client/__init__.pyi +2 -2
  7. metaflow-stubs/client/core.pyi +7 -7
  8. metaflow-stubs/client/filecache.pyi +3 -3
  9. metaflow-stubs/events.pyi +3 -3
  10. metaflow-stubs/exception.pyi +2 -2
  11. metaflow-stubs/flowspec.pyi +5 -5
  12. metaflow-stubs/generated_for.txt +1 -1
  13. metaflow-stubs/includefile.pyi +4 -4
  14. metaflow-stubs/info_file.pyi +2 -2
  15. metaflow-stubs/metadata_provider/__init__.pyi +2 -2
  16. metaflow-stubs/metadata_provider/heartbeat.pyi +2 -2
  17. metaflow-stubs/metadata_provider/metadata.pyi +3 -3
  18. metaflow-stubs/metadata_provider/util.pyi +2 -2
  19. metaflow-stubs/metaflow_config.pyi +4 -2
  20. metaflow-stubs/metaflow_current.pyi +6 -6
  21. metaflow-stubs/metaflow_git.pyi +2 -2
  22. metaflow-stubs/multicore_utils.pyi +2 -2
  23. metaflow-stubs/parameters.pyi +4 -4
  24. metaflow-stubs/plugins/__init__.pyi +11 -11
  25. metaflow-stubs/plugins/airflow/__init__.pyi +2 -2
  26. metaflow-stubs/plugins/airflow/airflow_decorator.pyi +2 -2
  27. metaflow-stubs/plugins/airflow/airflow_utils.pyi +2 -2
  28. metaflow-stubs/plugins/airflow/exception.pyi +2 -2
  29. metaflow-stubs/plugins/airflow/sensors/__init__.pyi +2 -2
  30. metaflow-stubs/plugins/airflow/sensors/base_sensor.pyi +2 -2
  31. metaflow-stubs/plugins/airflow/sensors/external_task_sensor.pyi +2 -2
  32. metaflow-stubs/plugins/airflow/sensors/s3_sensor.pyi +2 -2
  33. metaflow-stubs/plugins/argo/__init__.pyi +2 -2
  34. metaflow-stubs/plugins/argo/argo_client.pyi +2 -2
  35. metaflow-stubs/plugins/argo/argo_events.pyi +2 -2
  36. metaflow-stubs/plugins/argo/argo_workflows.pyi +4 -4
  37. metaflow-stubs/plugins/argo/argo_workflows_decorator.pyi +3 -3
  38. metaflow-stubs/plugins/argo/argo_workflows_deployer.pyi +4 -4
  39. metaflow-stubs/plugins/argo/argo_workflows_deployer_objects.pyi +4 -4
  40. metaflow-stubs/plugins/aws/__init__.pyi +3 -3
  41. metaflow-stubs/plugins/aws/aws_client.pyi +2 -2
  42. metaflow-stubs/plugins/aws/aws_utils.pyi +2 -2
  43. metaflow-stubs/plugins/aws/batch/__init__.pyi +2 -2
  44. metaflow-stubs/plugins/aws/batch/batch.pyi +2 -2
  45. metaflow-stubs/plugins/aws/batch/batch_client.pyi +2 -2
  46. metaflow-stubs/plugins/aws/batch/batch_decorator.pyi +2 -2
  47. metaflow-stubs/plugins/aws/secrets_manager/__init__.pyi +2 -2
  48. metaflow-stubs/plugins/aws/secrets_manager/aws_secrets_manager_secrets_provider.pyi +4 -4
  49. metaflow-stubs/plugins/aws/step_functions/__init__.pyi +2 -2
  50. metaflow-stubs/plugins/aws/step_functions/dynamo_db_client.pyi +2 -2
  51. metaflow-stubs/plugins/aws/step_functions/event_bridge_client.pyi +2 -2
  52. metaflow-stubs/plugins/aws/step_functions/schedule_decorator.pyi +2 -2
  53. metaflow-stubs/plugins/aws/step_functions/step_functions.pyi +2 -2
  54. metaflow-stubs/plugins/aws/step_functions/step_functions_client.pyi +2 -2
  55. metaflow-stubs/plugins/aws/step_functions/step_functions_decorator.pyi +2 -2
  56. metaflow-stubs/plugins/aws/step_functions/step_functions_deployer.pyi +5 -5
  57. metaflow-stubs/plugins/aws/step_functions/step_functions_deployer_objects.pyi +4 -4
  58. metaflow-stubs/plugins/azure/__init__.pyi +2 -2
  59. metaflow-stubs/plugins/azure/azure_credential.pyi +2 -2
  60. metaflow-stubs/plugins/azure/azure_exceptions.pyi +2 -2
  61. metaflow-stubs/plugins/azure/azure_secret_manager_secrets_provider.pyi +4 -4
  62. metaflow-stubs/plugins/azure/azure_utils.pyi +2 -2
  63. metaflow-stubs/plugins/azure/blob_service_client_factory.pyi +2 -2
  64. metaflow-stubs/plugins/azure/includefile_support.pyi +2 -2
  65. metaflow-stubs/plugins/cards/__init__.pyi +2 -2
  66. metaflow-stubs/plugins/cards/card_client.pyi +3 -3
  67. metaflow-stubs/plugins/cards/card_creator.pyi +2 -2
  68. metaflow-stubs/plugins/cards/card_datastore.pyi +2 -2
  69. metaflow-stubs/plugins/cards/card_decorator.pyi +2 -2
  70. metaflow-stubs/plugins/cards/card_modules/__init__.pyi +2 -2
  71. metaflow-stubs/plugins/cards/card_modules/basic.pyi +2 -2
  72. metaflow-stubs/plugins/cards/card_modules/card.pyi +2 -2
  73. metaflow-stubs/plugins/cards/card_modules/components.pyi +5 -5
  74. metaflow-stubs/plugins/cards/card_modules/convert_to_native_type.pyi +2 -2
  75. metaflow-stubs/plugins/cards/card_modules/renderer_tools.pyi +2 -2
  76. metaflow-stubs/plugins/cards/card_modules/test_cards.pyi +2 -2
  77. metaflow-stubs/plugins/cards/card_resolver.pyi +2 -2
  78. metaflow-stubs/plugins/cards/component_serializer.pyi +2 -2
  79. metaflow-stubs/plugins/cards/exception.pyi +2 -2
  80. metaflow-stubs/plugins/catch_decorator.pyi +2 -2
  81. metaflow-stubs/plugins/datatools/__init__.pyi +2 -2
  82. metaflow-stubs/plugins/datatools/local.pyi +2 -2
  83. metaflow-stubs/plugins/datatools/s3/__init__.pyi +2 -2
  84. metaflow-stubs/plugins/datatools/s3/s3.pyi +4 -4
  85. metaflow-stubs/plugins/datatools/s3/s3tail.pyi +2 -2
  86. metaflow-stubs/plugins/datatools/s3/s3util.pyi +2 -2
  87. metaflow-stubs/plugins/debug_logger.pyi +2 -2
  88. metaflow-stubs/plugins/debug_monitor.pyi +2 -2
  89. metaflow-stubs/plugins/environment_decorator.pyi +2 -2
  90. metaflow-stubs/plugins/events_decorator.pyi +2 -2
  91. metaflow-stubs/plugins/frameworks/__init__.pyi +2 -2
  92. metaflow-stubs/plugins/frameworks/pytorch.pyi +2 -2
  93. metaflow-stubs/plugins/gcp/__init__.pyi +2 -2
  94. metaflow-stubs/plugins/gcp/gcp_secret_manager_secrets_provider.pyi +4 -4
  95. metaflow-stubs/plugins/gcp/gs_exceptions.pyi +2 -2
  96. metaflow-stubs/plugins/gcp/gs_storage_client_factory.pyi +2 -2
  97. metaflow-stubs/plugins/gcp/gs_utils.pyi +2 -2
  98. metaflow-stubs/plugins/gcp/includefile_support.pyi +2 -2
  99. metaflow-stubs/plugins/kubernetes/__init__.pyi +2 -2
  100. metaflow-stubs/plugins/kubernetes/kube_utils.pyi +3 -3
  101. metaflow-stubs/plugins/kubernetes/kubernetes.pyi +2 -2
  102. metaflow-stubs/plugins/kubernetes/kubernetes_client.pyi +2 -2
  103. metaflow-stubs/plugins/kubernetes/kubernetes_decorator.pyi +2 -2
  104. metaflow-stubs/plugins/kubernetes/kubernetes_job.pyi +2 -2
  105. metaflow-stubs/plugins/kubernetes/kubernetes_jobsets.pyi +2 -2
  106. metaflow-stubs/plugins/kubernetes/spot_monitor_sidecar.pyi +2 -2
  107. metaflow-stubs/plugins/parallel_decorator.pyi +2 -2
  108. metaflow-stubs/plugins/project_decorator.pyi +2 -2
  109. metaflow-stubs/plugins/pypi/__init__.pyi +2 -2
  110. metaflow-stubs/plugins/pypi/conda_decorator.pyi +2 -2
  111. metaflow-stubs/plugins/pypi/conda_environment.pyi +5 -5
  112. metaflow-stubs/plugins/pypi/parsers.pyi +2 -2
  113. metaflow-stubs/plugins/pypi/pypi_decorator.pyi +2 -2
  114. metaflow-stubs/plugins/pypi/pypi_environment.pyi +2 -2
  115. metaflow-stubs/plugins/pypi/utils.pyi +2 -2
  116. metaflow-stubs/plugins/resources_decorator.pyi +2 -2
  117. metaflow-stubs/plugins/retry_decorator.pyi +2 -2
  118. metaflow-stubs/plugins/secrets/__init__.pyi +3 -3
  119. metaflow-stubs/plugins/secrets/inline_secrets_provider.pyi +3 -3
  120. metaflow-stubs/plugins/secrets/secrets_decorator.pyi +2 -2
  121. metaflow-stubs/plugins/storage_executor.pyi +2 -2
  122. metaflow-stubs/plugins/test_unbounded_foreach_decorator.pyi +2 -2
  123. metaflow-stubs/plugins/timeout_decorator.pyi +2 -2
  124. metaflow-stubs/plugins/uv/__init__.pyi +2 -2
  125. metaflow-stubs/plugins/uv/uv_environment.pyi +2 -2
  126. metaflow-stubs/pylint_wrapper.pyi +2 -2
  127. metaflow-stubs/runner/__init__.pyi +2 -2
  128. metaflow-stubs/runner/deployer.pyi +5 -5
  129. metaflow-stubs/runner/deployer_impl.pyi +2 -2
  130. metaflow-stubs/runner/metaflow_runner.pyi +5 -5
  131. metaflow-stubs/runner/nbdeploy.pyi +2 -2
  132. metaflow-stubs/runner/nbrun.pyi +2 -2
  133. metaflow-stubs/runner/subprocess_manager.pyi +2 -2
  134. metaflow-stubs/runner/utils.pyi +4 -4
  135. metaflow-stubs/system/__init__.pyi +2 -2
  136. metaflow-stubs/system/system_logger.pyi +2 -2
  137. metaflow-stubs/system/system_monitor.pyi +2 -2
  138. metaflow-stubs/tagging_util.pyi +2 -2
  139. metaflow-stubs/tuple_util.pyi +2 -2
  140. metaflow-stubs/user_configs/__init__.pyi +2 -2
  141. metaflow-stubs/user_configs/config_decorators.pyi +7 -7
  142. metaflow-stubs/user_configs/config_options.pyi +4 -4
  143. metaflow-stubs/user_configs/config_parameters.pyi +5 -5
  144. metaflow-stubs/version.pyi +2 -2
  145. {metaflow_stubs-2.15.13.dist-info → metaflow_stubs-2.15.14.dist-info}/METADATA +2 -2
  146. metaflow_stubs-2.15.14.dist-info/RECORD +149 -0
  147. metaflow_stubs-2.15.13.dist-info/RECORD +0 -149
  148. {metaflow_stubs-2.15.13.dist-info → metaflow_stubs-2.15.14.dist-info}/WHEEL +0 -0
  149. {metaflow_stubs-2.15.13.dist-info → metaflow_stubs-2.15.14.dist-info}/top_level.txt +0 -0
@@ -1,15 +1,15 @@
1
1
  ######################################################################################################
2
2
  # Auto-generated Metaflow stub file #
3
- # MF version: 2.15.13 #
4
- # Generated on 2025-05-20T18:34:43.119385 #
3
+ # MF version: 2.15.14 #
4
+ # Generated on 2025-05-21T14:01:03.779738 #
5
5
  ######################################################################################################
6
6
 
7
7
  from __future__ import annotations
8
8
 
9
9
  import typing
10
10
  if typing.TYPE_CHECKING:
11
- import datetime
12
11
  import typing
12
+ import datetime
13
13
  FlowSpecDerived = typing.TypeVar("FlowSpecDerived", bound="FlowSpec", contravariant=False, covariant=False)
14
14
  StepFlag = typing.NewType("StepFlag", bool)
15
15
 
@@ -36,16 +36,16 @@ from .user_configs.config_parameters import config_expr as config_expr
36
36
  from .user_configs.config_decorators import CustomFlowDecorator as CustomFlowDecorator
37
37
  from .user_configs.config_decorators import CustomStepDecorator as CustomStepDecorator
38
38
  from . import tuple_util as tuple_util
39
- from . import metaflow_git as metaflow_git
40
39
  from . import events as events
40
+ from . import metaflow_git as metaflow_git
41
41
  from . import runner as runner
42
42
  from . import plugins as plugins
43
43
  from .plugins.datatools.s3.s3 import S3 as S3
44
44
  from . import includefile as includefile
45
45
  from .includefile import IncludeFile as IncludeFile
46
- from .plugins.pypi.parsers import conda_environment_yml_parser as conda_environment_yml_parser
47
46
  from .plugins.pypi.parsers import requirements_txt_parser as requirements_txt_parser
48
47
  from .plugins.pypi.parsers import pyproject_toml_parser as pyproject_toml_parser
48
+ from .plugins.pypi.parsers import conda_environment_yml_parser as conda_environment_yml_parser
49
49
  from . import cards as cards
50
50
  from . import client as client
51
51
  from .client.core import namespace as namespace
@@ -147,37 +147,142 @@ def step(f: typing.Union[typing.Callable[[FlowSpecDerived], None], typing.Callab
147
147
  ...
148
148
 
149
149
  @typing.overload
150
- def secrets(*, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
150
+ def retry(*, times: int = 3, minutes_between_retries: int = 2) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
151
151
  """
152
- Specifies secrets to be retrieved and injected as environment variables prior to
153
- the execution of a step.
152
+ Specifies the number of times the task corresponding
153
+ to a step needs to be retried.
154
+
155
+ This decorator is useful for handling transient errors, such as networking issues.
156
+ If your task contains operations that can't be retried safely, e.g. database updates,
157
+ it is advisable to annotate it with `@retry(times=0)`.
158
+
159
+ This can be used in conjunction with the `@catch` decorator. The `@catch`
160
+ decorator will execute a no-op task after all retries have been exhausted,
161
+ ensuring that the flow execution can continue.
154
162
 
155
163
 
156
164
  Parameters
157
165
  ----------
158
- sources : List[Union[str, Dict[str, Any]]], default: []
159
- List of secret specs, defining how the secrets are to be retrieved
166
+ times : int, default 3
167
+ Number of times to retry this task.
168
+ minutes_between_retries : int, default 2
169
+ Number of minutes between retries.
160
170
  """
161
171
  ...
162
172
 
163
173
  @typing.overload
164
- def secrets(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
174
+ def retry(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
165
175
  ...
166
176
 
167
177
  @typing.overload
168
- def secrets(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
178
+ def retry(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
169
179
  ...
170
180
 
171
- def secrets(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []):
181
+ def retry(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, times: int = 3, minutes_between_retries: int = 2):
172
182
  """
173
- Specifies secrets to be retrieved and injected as environment variables prior to
174
- the execution of a step.
183
+ Specifies the number of times the task corresponding
184
+ to a step needs to be retried.
185
+
186
+ This decorator is useful for handling transient errors, such as networking issues.
187
+ If your task contains operations that can't be retried safely, e.g. database updates,
188
+ it is advisable to annotate it with `@retry(times=0)`.
189
+
190
+ This can be used in conjunction with the `@catch` decorator. The `@catch`
191
+ decorator will execute a no-op task after all retries have been exhausted,
192
+ ensuring that the flow execution can continue.
175
193
 
176
194
 
177
195
  Parameters
178
196
  ----------
179
- sources : List[Union[str, Dict[str, Any]]], default: []
180
- List of secret specs, defining how the secrets are to be retrieved
197
+ times : int, default 3
198
+ Number of times to retry this task.
199
+ minutes_between_retries : int, default 2
200
+ Number of minutes between retries.
201
+ """
202
+ ...
203
+
204
+ def kubernetes(*, cpu: int = 1, memory: int = 4096, disk: int = 10240, image: typing.Optional[str] = None, image_pull_policy: str = 'KUBERNETES_IMAGE_PULL_POLICY', service_account: str = 'METAFLOW_KUBERNETES_SERVICE_ACCOUNT', secrets: typing.Optional[typing.List[str]] = None, node_selector: typing.Union[typing.Dict[str, str], str, None] = None, namespace: str = 'METAFLOW_KUBERNETES_NAMESPACE', gpu: typing.Optional[int] = None, gpu_vendor: str = 'KUBERNETES_GPU_VENDOR', tolerations: typing.List[str] = [], labels: typing.Dict[str, str] = 'METAFLOW_KUBERNETES_LABELS', annotations: typing.Dict[str, str] = 'METAFLOW_KUBERNETES_ANNOTATIONS', use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = '/metaflow_temp', persistent_volume_claims: typing.Optional[typing.Dict[str, str]] = None, shared_memory: typing.Optional[int] = None, port: typing.Optional[int] = None, compute_pool: typing.Optional[str] = None, hostname_resolution_timeout: int = 600, qos: str = 'Burstable', security_context: typing.Optional[typing.Dict[str, typing.Any]] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
205
+ """
206
+ Specifies that this step should execute on Kubernetes.
207
+
208
+
209
+ Parameters
210
+ ----------
211
+ cpu : int, default 1
212
+ Number of CPUs required for this step. If `@resources` is
213
+ also present, the maximum value from all decorators is used.
214
+ memory : int, default 4096
215
+ Memory size (in MB) required for this step. If
216
+ `@resources` is also present, the maximum value from all decorators is
217
+ used.
218
+ disk : int, default 10240
219
+ Disk size (in MB) required for this step. If
220
+ `@resources` is also present, the maximum value from all decorators is
221
+ used.
222
+ image : str, optional, default None
223
+ Docker image to use when launching on Kubernetes. If not specified, and
224
+ METAFLOW_KUBERNETES_CONTAINER_IMAGE is specified, that image is used. If
225
+ not, a default Docker image mapping to the current version of Python is used.
226
+ image_pull_policy: str, default KUBERNETES_IMAGE_PULL_POLICY
227
+ If given, the imagePullPolicy to be applied to the Docker image of the step.
228
+ service_account : str, default METAFLOW_KUBERNETES_SERVICE_ACCOUNT
229
+ Kubernetes service account to use when launching pod in Kubernetes.
230
+ secrets : List[str], optional, default None
231
+ Kubernetes secrets to use when launching pod in Kubernetes. These
232
+ secrets are in addition to the ones defined in `METAFLOW_KUBERNETES_SECRETS`
233
+ in Metaflow configuration.
234
+ node_selector: Union[Dict[str,str], str], optional, default None
235
+ Kubernetes node selector(s) to apply to the pod running the task.
236
+ Can be passed in as a comma separated string of values e.g.
237
+ 'kubernetes.io/os=linux,kubernetes.io/arch=amd64' or as a dictionary
238
+ {'kubernetes.io/os': 'linux', 'kubernetes.io/arch': 'amd64'}
239
+ namespace : str, default METAFLOW_KUBERNETES_NAMESPACE
240
+ Kubernetes namespace to use when launching pod in Kubernetes.
241
+ gpu : int, optional, default None
242
+ Number of GPUs required for this step. A value of zero implies that
243
+ the scheduled node should not have GPUs.
244
+ gpu_vendor : str, default KUBERNETES_GPU_VENDOR
245
+ The vendor of the GPUs to be used for this step.
246
+ tolerations : List[str], default []
247
+ The default is extracted from METAFLOW_KUBERNETES_TOLERATIONS.
248
+ Kubernetes tolerations to use when launching pod in Kubernetes.
249
+ labels: Dict[str, str], default: METAFLOW_KUBERNETES_LABELS
250
+ Kubernetes labels to use when launching pod in Kubernetes.
251
+ annotations: Dict[str, str], default: METAFLOW_KUBERNETES_ANNOTATIONS
252
+ Kubernetes annotations to use when launching pod in Kubernetes.
253
+ use_tmpfs : bool, default False
254
+ This enables an explicit tmpfs mount for this step.
255
+ tmpfs_tempdir : bool, default True
256
+ sets METAFLOW_TEMPDIR to tmpfs_path if set for this step.
257
+ tmpfs_size : int, optional, default: None
258
+ The value for the size (in MiB) of the tmpfs mount for this step.
259
+ This parameter maps to the `--tmpfs` option in Docker. Defaults to 50% of the
260
+ memory allocated for this step.
261
+ tmpfs_path : str, optional, default /metaflow_temp
262
+ Path to tmpfs mount for this step.
263
+ persistent_volume_claims : Dict[str, str], optional, default None
264
+ A map (dictionary) of persistent volumes to be mounted to the pod for this step. The map is from persistent
265
+ volumes to the path to which the volume is to be mounted, e.g., `{'pvc-name': '/path/to/mount/on'}`.
266
+ shared_memory: int, optional
267
+ Shared memory size (in MiB) required for this step
268
+ port: int, optional
269
+ Port number to specify in the Kubernetes job object
270
+ compute_pool : str, optional, default None
271
+ Compute pool to be used for for this step.
272
+ If not specified, any accessible compute pool within the perimeter is used.
273
+ hostname_resolution_timeout: int, default 10 * 60
274
+ Timeout in seconds for the workers tasks in the gang scheduled cluster to resolve the hostname of control task.
275
+ Only applicable when @parallel is used.
276
+ qos: str, default: Burstable
277
+ Quality of Service class to assign to the pod. Supported values are: Guaranteed, Burstable, BestEffort
278
+
279
+ security_context: Dict[str, Any], optional, default None
280
+ Container security context. Applies to the task container. Allows the following keys:
281
+ - privileged: bool, optional, default None
282
+ - allow_privilege_escalation: bool, optional, default None
283
+ - run_as_user: int, optional, default None
284
+ - run_as_group: int, optional, default None
285
+ - run_as_non_root: bool, optional, default None
181
286
  """
182
287
  ...
183
288
 
@@ -389,57 +494,6 @@ def conda(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], ty
389
494
  """
390
495
  ...
391
496
 
392
- @typing.overload
393
- def catch(*, var: typing.Optional[str] = None, print_exception: bool = True) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
394
- """
395
- Specifies that the step will success under all circumstances.
396
-
397
- The decorator will create an optional artifact, specified by `var`, which
398
- contains the exception raised. You can use it to detect the presence
399
- of errors, indicating that all happy-path artifacts produced by the step
400
- are missing.
401
-
402
-
403
- Parameters
404
- ----------
405
- var : str, optional, default None
406
- Name of the artifact in which to store the caught exception.
407
- If not specified, the exception is not stored.
408
- print_exception : bool, default True
409
- Determines whether or not the exception is printed to
410
- stdout when caught.
411
- """
412
- ...
413
-
414
- @typing.overload
415
- def catch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
416
- ...
417
-
418
- @typing.overload
419
- def catch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
420
- ...
421
-
422
- def catch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, var: typing.Optional[str] = None, print_exception: bool = True):
423
- """
424
- Specifies that the step will success under all circumstances.
425
-
426
- The decorator will create an optional artifact, specified by `var`, which
427
- contains the exception raised. You can use it to detect the presence
428
- of errors, indicating that all happy-path artifacts produced by the step
429
- are missing.
430
-
431
-
432
- Parameters
433
- ----------
434
- var : str, optional, default None
435
- Name of the artifact in which to store the caught exception.
436
- If not specified, the exception is not stored.
437
- print_exception : bool, default True
438
- Determines whether or not the exception is printed to
439
- stdout when caught.
440
- """
441
- ...
442
-
443
497
  @typing.overload
444
498
  def resources(*, cpu: int = 1, gpu: typing.Optional[int] = None, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
445
499
  """
@@ -520,253 +574,199 @@ def resources(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None]
520
574
  ...
521
575
 
522
576
  @typing.overload
523
- def timeout(*, seconds: int = 0, minutes: int = 0, hours: int = 0) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
577
+ def parallel(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
524
578
  """
525
- Specifies a timeout for your step.
526
-
527
- This decorator is useful if this step may hang indefinitely.
528
-
529
- This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
530
- A timeout is considered to be an exception thrown by the step. It will cause the step to be
531
- retried if needed and the exception will be caught by the `@catch` decorator, if present.
532
-
533
- Note that all the values specified in parameters are added together so if you specify
534
- 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
535
-
536
-
537
- Parameters
538
- ----------
539
- seconds : int, default 0
540
- Number of seconds to wait prior to timing out.
541
- minutes : int, default 0
542
- Number of minutes to wait prior to timing out.
543
- hours : int, default 0
544
- Number of hours to wait prior to timing out.
579
+ Decorator prototype for all step decorators. This function gets specialized
580
+ and imported for all decorators types by _import_plugin_decorators().
545
581
  """
546
582
  ...
547
583
 
548
584
  @typing.overload
549
- def timeout(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
585
+ def parallel(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
550
586
  ...
551
587
 
552
- @typing.overload
553
- def timeout(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
588
+ def parallel(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
589
+ """
590
+ Decorator prototype for all step decorators. This function gets specialized
591
+ and imported for all decorators types by _import_plugin_decorators().
592
+ """
554
593
  ...
555
594
 
556
- def timeout(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, seconds: int = 0, minutes: int = 0, hours: int = 0):
595
+ @typing.overload
596
+ def environment(*, vars: typing.Dict[str, str] = {}) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
557
597
  """
558
- Specifies a timeout for your step.
559
-
560
- This decorator is useful if this step may hang indefinitely.
561
-
562
- This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
563
- A timeout is considered to be an exception thrown by the step. It will cause the step to be
564
- retried if needed and the exception will be caught by the `@catch` decorator, if present.
565
-
566
- Note that all the values specified in parameters are added together so if you specify
567
- 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
598
+ Specifies environment variables to be set prior to the execution of a step.
568
599
 
569
600
 
570
601
  Parameters
571
602
  ----------
572
- seconds : int, default 0
573
- Number of seconds to wait prior to timing out.
574
- minutes : int, default 0
575
- Number of minutes to wait prior to timing out.
576
- hours : int, default 0
577
- Number of hours to wait prior to timing out.
603
+ vars : Dict[str, str], default {}
604
+ Dictionary of environment variables to set.
578
605
  """
579
606
  ...
580
607
 
581
608
  @typing.overload
582
- def parallel(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
583
- """
584
- Decorator prototype for all step decorators. This function gets specialized
585
- and imported for all decorators types by _import_plugin_decorators().
586
- """
609
+ def environment(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
587
610
  ...
588
611
 
589
612
  @typing.overload
590
- def parallel(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
613
+ def environment(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
591
614
  ...
592
615
 
593
- def parallel(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
594
- """
595
- Decorator prototype for all step decorators. This function gets specialized
596
- and imported for all decorators types by _import_plugin_decorators().
616
+ def environment(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, vars: typing.Dict[str, str] = {}):
597
617
  """
598
- ...
599
-
600
- def kubernetes(*, cpu: int = 1, memory: int = 4096, disk: int = 10240, image: typing.Optional[str] = None, image_pull_policy: str = 'KUBERNETES_IMAGE_PULL_POLICY', service_account: str = 'METAFLOW_KUBERNETES_SERVICE_ACCOUNT', secrets: typing.Optional[typing.List[str]] = None, node_selector: typing.Union[typing.Dict[str, str], str, None] = None, namespace: str = 'METAFLOW_KUBERNETES_NAMESPACE', gpu: typing.Optional[int] = None, gpu_vendor: str = 'KUBERNETES_GPU_VENDOR', tolerations: typing.List[str] = [], labels: typing.Dict[str, str] = 'METAFLOW_KUBERNETES_LABELS', annotations: typing.Dict[str, str] = 'METAFLOW_KUBERNETES_ANNOTATIONS', use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = '/metaflow_temp', persistent_volume_claims: typing.Optional[typing.Dict[str, str]] = None, shared_memory: typing.Optional[int] = None, port: typing.Optional[int] = None, compute_pool: typing.Optional[str] = None, hostname_resolution_timeout: int = 600, qos: str = 'Burstable', security_context: typing.Optional[typing.Dict[str, typing.Any]] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
618
+ Specifies environment variables to be set prior to the execution of a step.
619
+
620
+
621
+ Parameters
622
+ ----------
623
+ vars : Dict[str, str], default {}
624
+ Dictionary of environment variables to set.
601
625
  """
602
- Specifies that this step should execute on Kubernetes.
626
+ ...
627
+
628
+ @typing.overload
629
+ def pypi(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
630
+ """
631
+ Specifies the PyPI packages for the step.
632
+
633
+ Information in this decorator will augment any
634
+ attributes set in the `@pyi_base` flow-level decorator. Hence,
635
+ you can use `@pypi_base` to set packages required by all
636
+ steps and use `@pypi` to specify step-specific overrides.
603
637
 
604
638
 
605
639
  Parameters
606
640
  ----------
607
- cpu : int, default 1
608
- Number of CPUs required for this step. If `@resources` is
609
- also present, the maximum value from all decorators is used.
610
- memory : int, default 4096
611
- Memory size (in MB) required for this step. If
612
- `@resources` is also present, the maximum value from all decorators is
613
- used.
614
- disk : int, default 10240
615
- Disk size (in MB) required for this step. If
616
- `@resources` is also present, the maximum value from all decorators is
617
- used.
618
- image : str, optional, default None
619
- Docker image to use when launching on Kubernetes. If not specified, and
620
- METAFLOW_KUBERNETES_CONTAINER_IMAGE is specified, that image is used. If
621
- not, a default Docker image mapping to the current version of Python is used.
622
- image_pull_policy: str, default KUBERNETES_IMAGE_PULL_POLICY
623
- If given, the imagePullPolicy to be applied to the Docker image of the step.
624
- service_account : str, default METAFLOW_KUBERNETES_SERVICE_ACCOUNT
625
- Kubernetes service account to use when launching pod in Kubernetes.
626
- secrets : List[str], optional, default None
627
- Kubernetes secrets to use when launching pod in Kubernetes. These
628
- secrets are in addition to the ones defined in `METAFLOW_KUBERNETES_SECRETS`
629
- in Metaflow configuration.
630
- node_selector: Union[Dict[str,str], str], optional, default None
631
- Kubernetes node selector(s) to apply to the pod running the task.
632
- Can be passed in as a comma separated string of values e.g.
633
- 'kubernetes.io/os=linux,kubernetes.io/arch=amd64' or as a dictionary
634
- {'kubernetes.io/os': 'linux', 'kubernetes.io/arch': 'amd64'}
635
- namespace : str, default METAFLOW_KUBERNETES_NAMESPACE
636
- Kubernetes namespace to use when launching pod in Kubernetes.
637
- gpu : int, optional, default None
638
- Number of GPUs required for this step. A value of zero implies that
639
- the scheduled node should not have GPUs.
640
- gpu_vendor : str, default KUBERNETES_GPU_VENDOR
641
- The vendor of the GPUs to be used for this step.
642
- tolerations : List[str], default []
643
- The default is extracted from METAFLOW_KUBERNETES_TOLERATIONS.
644
- Kubernetes tolerations to use when launching pod in Kubernetes.
645
- labels: Dict[str, str], default: METAFLOW_KUBERNETES_LABELS
646
- Kubernetes labels to use when launching pod in Kubernetes.
647
- annotations: Dict[str, str], default: METAFLOW_KUBERNETES_ANNOTATIONS
648
- Kubernetes annotations to use when launching pod in Kubernetes.
649
- use_tmpfs : bool, default False
650
- This enables an explicit tmpfs mount for this step.
651
- tmpfs_tempdir : bool, default True
652
- sets METAFLOW_TEMPDIR to tmpfs_path if set for this step.
653
- tmpfs_size : int, optional, default: None
654
- The value for the size (in MiB) of the tmpfs mount for this step.
655
- This parameter maps to the `--tmpfs` option in Docker. Defaults to 50% of the
656
- memory allocated for this step.
657
- tmpfs_path : str, optional, default /metaflow_temp
658
- Path to tmpfs mount for this step.
659
- persistent_volume_claims : Dict[str, str], optional, default None
660
- A map (dictionary) of persistent volumes to be mounted to the pod for this step. The map is from persistent
661
- volumes to the path to which the volume is to be mounted, e.g., `{'pvc-name': '/path/to/mount/on'}`.
662
- shared_memory: int, optional
663
- Shared memory size (in MiB) required for this step
664
- port: int, optional
665
- Port number to specify in the Kubernetes job object
666
- compute_pool : str, optional, default None
667
- Compute pool to be used for for this step.
668
- If not specified, any accessible compute pool within the perimeter is used.
669
- hostname_resolution_timeout: int, default 10 * 60
670
- Timeout in seconds for the workers tasks in the gang scheduled cluster to resolve the hostname of control task.
671
- Only applicable when @parallel is used.
672
- qos: str, default: Burstable
673
- Quality of Service class to assign to the pod. Supported values are: Guaranteed, Burstable, BestEffort
641
+ packages : Dict[str, str], default: {}
642
+ Packages to use for this step. The key is the name of the package
643
+ and the value is the version to use.
644
+ python : str, optional, default: None
645
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
646
+ that the version used will correspond to the version of the Python interpreter used to start the run.
647
+ """
648
+ ...
649
+
650
+ @typing.overload
651
+ def pypi(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
652
+ ...
653
+
654
+ @typing.overload
655
+ def pypi(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
656
+ ...
657
+
658
+ def pypi(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
659
+ """
660
+ Specifies the PyPI packages for the step.
674
661
 
675
- security_context: Dict[str, Any], optional, default None
676
- Container security context. Applies to the task container. Allows the following keys:
677
- - privileged: bool, optional, default None
678
- - allow_privilege_escalation: bool, optional, default None
679
- - run_as_user: int, optional, default None
680
- - run_as_group: int, optional, default None
681
- - run_as_non_root: bool, optional, default None
662
+ Information in this decorator will augment any
663
+ attributes set in the `@pyi_base` flow-level decorator. Hence,
664
+ you can use `@pypi_base` to set packages required by all
665
+ steps and use `@pypi` to specify step-specific overrides.
666
+
667
+
668
+ Parameters
669
+ ----------
670
+ packages : Dict[str, str], default: {}
671
+ Packages to use for this step. The key is the name of the package
672
+ and the value is the version to use.
673
+ python : str, optional, default: None
674
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
675
+ that the version used will correspond to the version of the Python interpreter used to start the run.
682
676
  """
683
677
  ...
684
678
 
685
679
  @typing.overload
686
- def environment(*, vars: typing.Dict[str, str] = {}) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
680
+ def secrets(*, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
687
681
  """
688
- Specifies environment variables to be set prior to the execution of a step.
682
+ Specifies secrets to be retrieved and injected as environment variables prior to
683
+ the execution of a step.
689
684
 
690
685
 
691
686
  Parameters
692
687
  ----------
693
- vars : Dict[str, str], default {}
694
- Dictionary of environment variables to set.
688
+ sources : List[Union[str, Dict[str, Any]]], default: []
689
+ List of secret specs, defining how the secrets are to be retrieved
695
690
  """
696
691
  ...
697
692
 
698
693
  @typing.overload
699
- def environment(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
694
+ def secrets(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
700
695
  ...
701
696
 
702
697
  @typing.overload
703
- def environment(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
698
+ def secrets(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
704
699
  ...
705
700
 
706
- def environment(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, vars: typing.Dict[str, str] = {}):
701
+ def secrets(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []):
707
702
  """
708
- Specifies environment variables to be set prior to the execution of a step.
703
+ Specifies secrets to be retrieved and injected as environment variables prior to
704
+ the execution of a step.
709
705
 
710
706
 
711
707
  Parameters
712
708
  ----------
713
- vars : Dict[str, str], default {}
714
- Dictionary of environment variables to set.
709
+ sources : List[Union[str, Dict[str, Any]]], default: []
710
+ List of secret specs, defining how the secrets are to be retrieved
715
711
  """
716
712
  ...
717
713
 
718
714
  @typing.overload
719
- def retry(*, times: int = 3, minutes_between_retries: int = 2) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
715
+ def timeout(*, seconds: int = 0, minutes: int = 0, hours: int = 0) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
720
716
  """
721
- Specifies the number of times the task corresponding
722
- to a step needs to be retried.
717
+ Specifies a timeout for your step.
723
718
 
724
- This decorator is useful for handling transient errors, such as networking issues.
725
- If your task contains operations that can't be retried safely, e.g. database updates,
726
- it is advisable to annotate it with `@retry(times=0)`.
719
+ This decorator is useful if this step may hang indefinitely.
727
720
 
728
- This can be used in conjunction with the `@catch` decorator. The `@catch`
729
- decorator will execute a no-op task after all retries have been exhausted,
730
- ensuring that the flow execution can continue.
721
+ This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
722
+ A timeout is considered to be an exception thrown by the step. It will cause the step to be
723
+ retried if needed and the exception will be caught by the `@catch` decorator, if present.
724
+
725
+ Note that all the values specified in parameters are added together so if you specify
726
+ 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
731
727
 
732
728
 
733
729
  Parameters
734
730
  ----------
735
- times : int, default 3
736
- Number of times to retry this task.
737
- minutes_between_retries : int, default 2
738
- Number of minutes between retries.
731
+ seconds : int, default 0
732
+ Number of seconds to wait prior to timing out.
733
+ minutes : int, default 0
734
+ Number of minutes to wait prior to timing out.
735
+ hours : int, default 0
736
+ Number of hours to wait prior to timing out.
739
737
  """
740
738
  ...
741
739
 
742
740
  @typing.overload
743
- def retry(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
741
+ def timeout(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
744
742
  ...
745
743
 
746
744
  @typing.overload
747
- def retry(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
745
+ def timeout(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
748
746
  ...
749
747
 
750
- def retry(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, times: int = 3, minutes_between_retries: int = 2):
748
+ def timeout(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, seconds: int = 0, minutes: int = 0, hours: int = 0):
751
749
  """
752
- Specifies the number of times the task corresponding
753
- to a step needs to be retried.
750
+ Specifies a timeout for your step.
754
751
 
755
- This decorator is useful for handling transient errors, such as networking issues.
756
- If your task contains operations that can't be retried safely, e.g. database updates,
757
- it is advisable to annotate it with `@retry(times=0)`.
752
+ This decorator is useful if this step may hang indefinitely.
758
753
 
759
- This can be used in conjunction with the `@catch` decorator. The `@catch`
760
- decorator will execute a no-op task after all retries have been exhausted,
761
- ensuring that the flow execution can continue.
754
+ This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
755
+ A timeout is considered to be an exception thrown by the step. It will cause the step to be
756
+ retried if needed and the exception will be caught by the `@catch` decorator, if present.
757
+
758
+ Note that all the values specified in parameters are added together so if you specify
759
+ 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
762
760
 
763
761
 
764
762
  Parameters
765
763
  ----------
766
- times : int, default 3
767
- Number of times to retry this task.
768
- minutes_between_retries : int, default 2
769
- Number of minutes between retries.
764
+ seconds : int, default 0
765
+ Number of seconds to wait prior to timing out.
766
+ minutes : int, default 0
767
+ Number of minutes to wait prior to timing out.
768
+ hours : int, default 0
769
+ Number of hours to wait prior to timing out.
770
770
  """
771
771
  ...
772
772
 
@@ -820,149 +820,200 @@ def card(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typ
820
820
  ...
821
821
 
822
822
  @typing.overload
823
- def pypi(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
823
+ def catch(*, var: typing.Optional[str] = None, print_exception: bool = True) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
824
824
  """
825
- Specifies the PyPI packages for the step.
825
+ Specifies that the step will success under all circumstances.
826
826
 
827
- Information in this decorator will augment any
828
- attributes set in the `@pyi_base` flow-level decorator. Hence,
829
- you can use `@pypi_base` to set packages required by all
830
- steps and use `@pypi` to specify step-specific overrides.
827
+ The decorator will create an optional artifact, specified by `var`, which
828
+ contains the exception raised. You can use it to detect the presence
829
+ of errors, indicating that all happy-path artifacts produced by the step
830
+ are missing.
831
831
 
832
832
 
833
833
  Parameters
834
834
  ----------
835
- packages : Dict[str, str], default: {}
836
- Packages to use for this step. The key is the name of the package
837
- and the value is the version to use.
838
- python : str, optional, default: None
839
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
840
- that the version used will correspond to the version of the Python interpreter used to start the run.
835
+ var : str, optional, default None
836
+ Name of the artifact in which to store the caught exception.
837
+ If not specified, the exception is not stored.
838
+ print_exception : bool, default True
839
+ Determines whether or not the exception is printed to
840
+ stdout when caught.
841
841
  """
842
842
  ...
843
843
 
844
844
  @typing.overload
845
- def pypi(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
845
+ def catch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
846
846
  ...
847
847
 
848
848
  @typing.overload
849
- def pypi(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
849
+ def catch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
850
850
  ...
851
851
 
852
- def pypi(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
852
+ def catch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, var: typing.Optional[str] = None, print_exception: bool = True):
853
853
  """
854
- Specifies the PyPI packages for the step.
854
+ Specifies that the step will success under all circumstances.
855
855
 
856
- Information in this decorator will augment any
857
- attributes set in the `@pyi_base` flow-level decorator. Hence,
858
- you can use `@pypi_base` to set packages required by all
859
- steps and use `@pypi` to specify step-specific overrides.
856
+ The decorator will create an optional artifact, specified by `var`, which
857
+ contains the exception raised. You can use it to detect the presence
858
+ of errors, indicating that all happy-path artifacts produced by the step
859
+ are missing.
860
860
 
861
861
 
862
862
  Parameters
863
863
  ----------
864
- packages : Dict[str, str], default: {}
865
- Packages to use for this step. The key is the name of the package
866
- and the value is the version to use.
867
- python : str, optional, default: None
868
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
869
- that the version used will correspond to the version of the Python interpreter used to start the run.
864
+ var : str, optional, default None
865
+ Name of the artifact in which to store the caught exception.
866
+ If not specified, the exception is not stored.
867
+ print_exception : bool, default True
868
+ Determines whether or not the exception is printed to
869
+ stdout when caught.
870
870
  """
871
871
  ...
872
872
 
873
873
  @typing.overload
874
- def trigger(*, event: typing.Union[str, typing.Dict[str, typing.Any], None] = None, events: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
874
+ def trigger_on_finish(*, flow: typing.Union[typing.Dict[str, str], str, None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
875
875
  """
876
- Specifies the event(s) that this flow depends on.
876
+ Specifies the flow(s) that this flow depends on.
877
877
 
878
878
  ```
879
- @trigger(event='foo')
879
+ @trigger_on_finish(flow='FooFlow')
880
880
  ```
881
881
  or
882
882
  ```
883
- @trigger(events=['foo', 'bar'])
883
+ @trigger_on_finish(flows=['FooFlow', 'BarFlow'])
884
884
  ```
885
+ This decorator respects the @project decorator and triggers the flow
886
+ when upstream runs within the same namespace complete successfully
885
887
 
886
- Additionally, you can specify the parameter mappings
887
- to map event payload to Metaflow parameters for the flow.
888
+ Additionally, you can specify project aware upstream flow dependencies
889
+ by specifying the fully qualified project_flow_name.
888
890
  ```
889
- @trigger(event={'name':'foo', 'parameters':{'flow_param': 'event_field'}})
891
+ @trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
890
892
  ```
891
893
  or
892
894
  ```
893
- @trigger(events=[{'name':'foo', 'parameters':{'flow_param_1': 'event_field_1'},
894
- {'name':'bar', 'parameters':{'flow_param_2': 'event_field_2'}])
895
+ @trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
895
896
  ```
896
897
 
897
- 'parameters' can also be a list of strings and tuples like so:
898
- ```
899
- @trigger(event={'name':'foo', 'parameters':['common_name', ('flow_param', 'event_field')]})
900
- ```
901
- This is equivalent to:
898
+ You can also specify just the project or project branch (other values will be
899
+ inferred from the current project or project branch):
902
900
  ```
903
- @trigger(event={'name':'foo', 'parameters':{'common_name': 'common_name', 'flow_param': 'event_field'}})
901
+ @trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
904
902
  ```
905
903
 
904
+ Note that `branch` is typically one of:
905
+ - `prod`
906
+ - `user.bob`
907
+ - `test.my_experiment`
908
+ - `prod.staging`
909
+
906
910
 
907
911
  Parameters
908
912
  ----------
909
- event : Union[str, Dict[str, Any]], optional, default None
910
- Event dependency for this flow.
911
- events : List[Union[str, Dict[str, Any]]], default []
912
- Events dependency for this flow.
913
+ flow : Union[str, Dict[str, str]], optional, default None
914
+ Upstream flow dependency for this flow.
915
+ flows : List[Union[str, Dict[str, str]]], default []
916
+ Upstream flow dependencies for this flow.
913
917
  options : Dict[str, Any], default {}
914
918
  Backend-specific configuration for tuning eventing behavior.
915
919
  """
916
920
  ...
917
921
 
918
922
  @typing.overload
919
- def trigger(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
923
+ def trigger_on_finish(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
920
924
  ...
921
925
 
922
- def trigger(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, event: typing.Union[str, typing.Dict[str, typing.Any], None] = None, events: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], options: typing.Dict[str, typing.Any] = {}):
926
+ def trigger_on_finish(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, flow: typing.Union[typing.Dict[str, str], str, None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}):
923
927
  """
924
- Specifies the event(s) that this flow depends on.
928
+ Specifies the flow(s) that this flow depends on.
925
929
 
926
930
  ```
927
- @trigger(event='foo')
931
+ @trigger_on_finish(flow='FooFlow')
928
932
  ```
929
933
  or
930
934
  ```
931
- @trigger(events=['foo', 'bar'])
935
+ @trigger_on_finish(flows=['FooFlow', 'BarFlow'])
932
936
  ```
937
+ This decorator respects the @project decorator and triggers the flow
938
+ when upstream runs within the same namespace complete successfully
933
939
 
934
- Additionally, you can specify the parameter mappings
935
- to map event payload to Metaflow parameters for the flow.
940
+ Additionally, you can specify project aware upstream flow dependencies
941
+ by specifying the fully qualified project_flow_name.
936
942
  ```
937
- @trigger(event={'name':'foo', 'parameters':{'flow_param': 'event_field'}})
943
+ @trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
938
944
  ```
939
945
  or
940
946
  ```
941
- @trigger(events=[{'name':'foo', 'parameters':{'flow_param_1': 'event_field_1'},
942
- {'name':'bar', 'parameters':{'flow_param_2': 'event_field_2'}])
947
+ @trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
943
948
  ```
944
949
 
945
- 'parameters' can also be a list of strings and tuples like so:
946
- ```
947
- @trigger(event={'name':'foo', 'parameters':['common_name', ('flow_param', 'event_field')]})
948
- ```
949
- This is equivalent to:
950
+ You can also specify just the project or project branch (other values will be
951
+ inferred from the current project or project branch):
950
952
  ```
951
- @trigger(event={'name':'foo', 'parameters':{'common_name': 'common_name', 'flow_param': 'event_field'}})
953
+ @trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
952
954
  ```
953
955
 
956
+ Note that `branch` is typically one of:
957
+ - `prod`
958
+ - `user.bob`
959
+ - `test.my_experiment`
960
+ - `prod.staging`
961
+
954
962
 
955
963
  Parameters
956
964
  ----------
957
- event : Union[str, Dict[str, Any]], optional, default None
958
- Event dependency for this flow.
959
- events : List[Union[str, Dict[str, Any]]], default []
960
- Events dependency for this flow.
965
+ flow : Union[str, Dict[str, str]], optional, default None
966
+ Upstream flow dependency for this flow.
967
+ flows : List[Union[str, Dict[str, str]]], default []
968
+ Upstream flow dependencies for this flow.
961
969
  options : Dict[str, Any], default {}
962
970
  Backend-specific configuration for tuning eventing behavior.
963
971
  """
964
972
  ...
965
973
 
974
+ def airflow_external_task_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, external_dag_id: str, external_task_ids: typing.List[str], allowed_states: typing.List[str], failed_states: typing.List[str], execution_delta: "datetime.timedelta", check_existence: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
975
+ """
976
+ The `@airflow_external_task_sensor` decorator attaches a Airflow [ExternalTaskSensor](https://airflow.apache.org/docs/apache-airflow/stable/_api/airflow/sensors/external_task/index.html#airflow.sensors.external_task.ExternalTaskSensor) before the start step of the flow.
977
+ This decorator only works when a flow is scheduled on Airflow and is compiled using `airflow create`. More than one `@airflow_external_task_sensor` can be added as a flow decorators. Adding more than one decorator will ensure that `start` step starts only after all sensors finish.
978
+
979
+
980
+ Parameters
981
+ ----------
982
+ timeout : int
983
+ Time, in seconds before the task times out and fails. (Default: 3600)
984
+ poke_interval : int
985
+ Time in seconds that the job should wait in between each try. (Default: 60)
986
+ mode : str
987
+ How the sensor operates. Options are: { poke | reschedule }. (Default: "poke")
988
+ exponential_backoff : bool
989
+ allow progressive longer waits between pokes by using exponential backoff algorithm. (Default: True)
990
+ pool : str
991
+ the slot pool this task should run in,
992
+ slot pools are a way to limit concurrency for certain tasks. (Default:None)
993
+ soft_fail : bool
994
+ Set to true to mark the task as SKIPPED on failure. (Default: False)
995
+ name : str
996
+ Name of the sensor on Airflow
997
+ description : str
998
+ Description of sensor in the Airflow UI
999
+ external_dag_id : str
1000
+ The dag_id that contains the task you want to wait for.
1001
+ external_task_ids : List[str]
1002
+ The list of task_ids that you want to wait for.
1003
+ If None (default value) the sensor waits for the DAG. (Default: None)
1004
+ allowed_states : List[str]
1005
+ Iterable of allowed states, (Default: ['success'])
1006
+ failed_states : List[str]
1007
+ Iterable of failed or dis-allowed states. (Default: None)
1008
+ execution_delta : datetime.timedelta
1009
+ time difference with the previous execution to look at,
1010
+ the default is the same logical date as the current task or DAG. (Default: None)
1011
+ check_existence: bool
1012
+ Set to True to check if the external task exists or check if
1013
+ the DAG to wait for exists. (Default: True)
1014
+ """
1015
+ ...
1016
+
966
1017
  @typing.overload
967
1018
  def pypi_base(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
968
1019
  """
@@ -1004,10 +1055,13 @@ def pypi_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packag
1004
1055
  """
1005
1056
  ...
1006
1057
 
1007
- def airflow_external_task_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, external_dag_id: str, external_task_ids: typing.List[str], allowed_states: typing.List[str], failed_states: typing.List[str], execution_delta: "datetime.timedelta", check_existence: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1058
+ def airflow_s3_key_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, bucket_key: typing.Union[str, typing.List[str]], bucket_name: str, wildcard_match: bool, aws_conn_id: str, verify: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1008
1059
  """
1009
- The `@airflow_external_task_sensor` decorator attaches a Airflow [ExternalTaskSensor](https://airflow.apache.org/docs/apache-airflow/stable/_api/airflow/sensors/external_task/index.html#airflow.sensors.external_task.ExternalTaskSensor) before the start step of the flow.
1010
- This decorator only works when a flow is scheduled on Airflow and is compiled using `airflow create`. More than one `@airflow_external_task_sensor` can be added as a flow decorators. Adding more than one decorator will ensure that `start` step starts only after all sensors finish.
1060
+ The `@airflow_s3_key_sensor` decorator attaches a Airflow [S3KeySensor](https://airflow.apache.org/docs/apache-airflow-providers-amazon/stable/_api/airflow/providers/amazon/aws/sensors/s3/index.html#airflow.providers.amazon.aws.sensors.s3.S3KeySensor)
1061
+ before the start step of the flow. This decorator only works when a flow is scheduled on Airflow
1062
+ and is compiled using `airflow create`. More than one `@airflow_s3_key_sensor` can be
1063
+ added as a flow decorators. Adding more than one decorator will ensure that `start` step
1064
+ starts only after all sensors finish.
1011
1065
 
1012
1066
 
1013
1067
  Parameters
@@ -1016,138 +1070,178 @@ def airflow_external_task_sensor(*, timeout: int, poke_interval: int, mode: str,
1016
1070
  Time, in seconds before the task times out and fails. (Default: 3600)
1017
1071
  poke_interval : int
1018
1072
  Time in seconds that the job should wait in between each try. (Default: 60)
1019
- mode : str
1020
- How the sensor operates. Options are: { poke | reschedule }. (Default: "poke")
1021
- exponential_backoff : bool
1022
- allow progressive longer waits between pokes by using exponential backoff algorithm. (Default: True)
1023
- pool : str
1024
- the slot pool this task should run in,
1025
- slot pools are a way to limit concurrency for certain tasks. (Default:None)
1026
- soft_fail : bool
1027
- Set to true to mark the task as SKIPPED on failure. (Default: False)
1028
- name : str
1029
- Name of the sensor on Airflow
1030
- description : str
1031
- Description of sensor in the Airflow UI
1032
- external_dag_id : str
1033
- The dag_id that contains the task you want to wait for.
1034
- external_task_ids : List[str]
1035
- The list of task_ids that you want to wait for.
1036
- If None (default value) the sensor waits for the DAG. (Default: None)
1037
- allowed_states : List[str]
1038
- Iterable of allowed states, (Default: ['success'])
1039
- failed_states : List[str]
1040
- Iterable of failed or dis-allowed states. (Default: None)
1041
- execution_delta : datetime.timedelta
1042
- time difference with the previous execution to look at,
1043
- the default is the same logical date as the current task or DAG. (Default: None)
1044
- check_existence: bool
1045
- Set to True to check if the external task exists or check if
1046
- the DAG to wait for exists. (Default: True)
1073
+ mode : str
1074
+ How the sensor operates. Options are: { poke | reschedule }. (Default: "poke")
1075
+ exponential_backoff : bool
1076
+ allow progressive longer waits between pokes by using exponential backoff algorithm. (Default: True)
1077
+ pool : str
1078
+ the slot pool this task should run in,
1079
+ slot pools are a way to limit concurrency for certain tasks. (Default:None)
1080
+ soft_fail : bool
1081
+ Set to true to mark the task as SKIPPED on failure. (Default: False)
1082
+ name : str
1083
+ Name of the sensor on Airflow
1084
+ description : str
1085
+ Description of sensor in the Airflow UI
1086
+ bucket_key : Union[str, List[str]]
1087
+ The key(s) being waited on. Supports full s3:// style url or relative path from root level.
1088
+ When it's specified as a full s3:// url, please leave `bucket_name` as None
1089
+ bucket_name : str
1090
+ Name of the S3 bucket. Only needed when bucket_key is not provided as a full s3:// url.
1091
+ When specified, all the keys passed to bucket_key refers to this bucket. (Default:None)
1092
+ wildcard_match : bool
1093
+ whether the bucket_key should be interpreted as a Unix wildcard pattern. (Default: False)
1094
+ aws_conn_id : str
1095
+ a reference to the s3 connection on Airflow. (Default: None)
1096
+ verify : bool
1097
+ Whether or not to verify SSL certificates for S3 connection. (Default: None)
1047
1098
  """
1048
1099
  ...
1049
1100
 
1050
1101
  @typing.overload
1051
- def trigger_on_finish(*, flow: typing.Union[typing.Dict[str, str], str, None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1102
+ def trigger(*, event: typing.Union[str, typing.Dict[str, typing.Any], None] = None, events: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1052
1103
  """
1053
- Specifies the flow(s) that this flow depends on.
1104
+ Specifies the event(s) that this flow depends on.
1054
1105
 
1055
1106
  ```
1056
- @trigger_on_finish(flow='FooFlow')
1107
+ @trigger(event='foo')
1057
1108
  ```
1058
1109
  or
1059
1110
  ```
1060
- @trigger_on_finish(flows=['FooFlow', 'BarFlow'])
1111
+ @trigger(events=['foo', 'bar'])
1061
1112
  ```
1062
- This decorator respects the @project decorator and triggers the flow
1063
- when upstream runs within the same namespace complete successfully
1064
1113
 
1065
- Additionally, you can specify project aware upstream flow dependencies
1066
- by specifying the fully qualified project_flow_name.
1114
+ Additionally, you can specify the parameter mappings
1115
+ to map event payload to Metaflow parameters for the flow.
1067
1116
  ```
1068
- @trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
1117
+ @trigger(event={'name':'foo', 'parameters':{'flow_param': 'event_field'}})
1069
1118
  ```
1070
1119
  or
1071
1120
  ```
1072
- @trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
1121
+ @trigger(events=[{'name':'foo', 'parameters':{'flow_param_1': 'event_field_1'},
1122
+ {'name':'bar', 'parameters':{'flow_param_2': 'event_field_2'}])
1073
1123
  ```
1074
1124
 
1075
- You can also specify just the project or project branch (other values will be
1076
- inferred from the current project or project branch):
1125
+ 'parameters' can also be a list of strings and tuples like so:
1077
1126
  ```
1078
- @trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
1127
+ @trigger(event={'name':'foo', 'parameters':['common_name', ('flow_param', 'event_field')]})
1128
+ ```
1129
+ This is equivalent to:
1130
+ ```
1131
+ @trigger(event={'name':'foo', 'parameters':{'common_name': 'common_name', 'flow_param': 'event_field'}})
1079
1132
  ```
1080
-
1081
- Note that `branch` is typically one of:
1082
- - `prod`
1083
- - `user.bob`
1084
- - `test.my_experiment`
1085
- - `prod.staging`
1086
1133
 
1087
1134
 
1088
1135
  Parameters
1089
1136
  ----------
1090
- flow : Union[str, Dict[str, str]], optional, default None
1091
- Upstream flow dependency for this flow.
1092
- flows : List[Union[str, Dict[str, str]]], default []
1093
- Upstream flow dependencies for this flow.
1137
+ event : Union[str, Dict[str, Any]], optional, default None
1138
+ Event dependency for this flow.
1139
+ events : List[Union[str, Dict[str, Any]]], default []
1140
+ Events dependency for this flow.
1094
1141
  options : Dict[str, Any], default {}
1095
1142
  Backend-specific configuration for tuning eventing behavior.
1096
1143
  """
1097
1144
  ...
1098
1145
 
1099
1146
  @typing.overload
1100
- def trigger_on_finish(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1147
+ def trigger(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1101
1148
  ...
1102
1149
 
1103
- def trigger_on_finish(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, flow: typing.Union[typing.Dict[str, str], str, None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}):
1150
+ def trigger(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, event: typing.Union[str, typing.Dict[str, typing.Any], None] = None, events: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], options: typing.Dict[str, typing.Any] = {}):
1104
1151
  """
1105
- Specifies the flow(s) that this flow depends on.
1152
+ Specifies the event(s) that this flow depends on.
1106
1153
 
1107
1154
  ```
1108
- @trigger_on_finish(flow='FooFlow')
1155
+ @trigger(event='foo')
1109
1156
  ```
1110
1157
  or
1111
1158
  ```
1112
- @trigger_on_finish(flows=['FooFlow', 'BarFlow'])
1159
+ @trigger(events=['foo', 'bar'])
1113
1160
  ```
1114
- This decorator respects the @project decorator and triggers the flow
1115
- when upstream runs within the same namespace complete successfully
1116
1161
 
1117
- Additionally, you can specify project aware upstream flow dependencies
1118
- by specifying the fully qualified project_flow_name.
1162
+ Additionally, you can specify the parameter mappings
1163
+ to map event payload to Metaflow parameters for the flow.
1119
1164
  ```
1120
- @trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
1165
+ @trigger(event={'name':'foo', 'parameters':{'flow_param': 'event_field'}})
1121
1166
  ```
1122
1167
  or
1123
1168
  ```
1124
- @trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
1169
+ @trigger(events=[{'name':'foo', 'parameters':{'flow_param_1': 'event_field_1'},
1170
+ {'name':'bar', 'parameters':{'flow_param_2': 'event_field_2'}])
1125
1171
  ```
1126
1172
 
1127
- You can also specify just the project or project branch (other values will be
1128
- inferred from the current project or project branch):
1173
+ 'parameters' can also be a list of strings and tuples like so:
1129
1174
  ```
1130
- @trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
1175
+ @trigger(event={'name':'foo', 'parameters':['common_name', ('flow_param', 'event_field')]})
1176
+ ```
1177
+ This is equivalent to:
1178
+ ```
1179
+ @trigger(event={'name':'foo', 'parameters':{'common_name': 'common_name', 'flow_param': 'event_field'}})
1131
1180
  ```
1132
-
1133
- Note that `branch` is typically one of:
1134
- - `prod`
1135
- - `user.bob`
1136
- - `test.my_experiment`
1137
- - `prod.staging`
1138
1181
 
1139
1182
 
1140
1183
  Parameters
1141
1184
  ----------
1142
- flow : Union[str, Dict[str, str]], optional, default None
1143
- Upstream flow dependency for this flow.
1144
- flows : List[Union[str, Dict[str, str]]], default []
1145
- Upstream flow dependencies for this flow.
1185
+ event : Union[str, Dict[str, Any]], optional, default None
1186
+ Event dependency for this flow.
1187
+ events : List[Union[str, Dict[str, Any]]], default []
1188
+ Events dependency for this flow.
1146
1189
  options : Dict[str, Any], default {}
1147
1190
  Backend-specific configuration for tuning eventing behavior.
1148
1191
  """
1149
1192
  ...
1150
1193
 
1194
+ @typing.overload
1195
+ def schedule(*, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1196
+ """
1197
+ Specifies the times when the flow should be run when running on a
1198
+ production scheduler.
1199
+
1200
+
1201
+ Parameters
1202
+ ----------
1203
+ hourly : bool, default False
1204
+ Run the workflow hourly.
1205
+ daily : bool, default True
1206
+ Run the workflow daily.
1207
+ weekly : bool, default False
1208
+ Run the workflow weekly.
1209
+ cron : str, optional, default None
1210
+ Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
1211
+ specified by this expression.
1212
+ timezone : str, optional, default None
1213
+ Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1214
+ which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
1215
+ """
1216
+ ...
1217
+
1218
+ @typing.overload
1219
+ def schedule(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1220
+ ...
1221
+
1222
+ def schedule(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None):
1223
+ """
1224
+ Specifies the times when the flow should be run when running on a
1225
+ production scheduler.
1226
+
1227
+
1228
+ Parameters
1229
+ ----------
1230
+ hourly : bool, default False
1231
+ Run the workflow hourly.
1232
+ daily : bool, default True
1233
+ Run the workflow daily.
1234
+ weekly : bool, default False
1235
+ Run the workflow weekly.
1236
+ cron : str, optional, default None
1237
+ Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
1238
+ specified by this expression.
1239
+ timezone : str, optional, default None
1240
+ Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1241
+ which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
1242
+ """
1243
+ ...
1244
+
1151
1245
  def project(*, name: str, branch: typing.Optional[str] = None, production: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1152
1246
  """
1153
1247
  Specifies what flows belong to the same project.
@@ -1234,97 +1328,3 @@ def conda_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packa
1234
1328
  """
1235
1329
  ...
1236
1330
 
1237
- @typing.overload
1238
- def schedule(*, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1239
- """
1240
- Specifies the times when the flow should be run when running on a
1241
- production scheduler.
1242
-
1243
-
1244
- Parameters
1245
- ----------
1246
- hourly : bool, default False
1247
- Run the workflow hourly.
1248
- daily : bool, default True
1249
- Run the workflow daily.
1250
- weekly : bool, default False
1251
- Run the workflow weekly.
1252
- cron : str, optional, default None
1253
- Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
1254
- specified by this expression.
1255
- timezone : str, optional, default None
1256
- Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1257
- which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
1258
- """
1259
- ...
1260
-
1261
- @typing.overload
1262
- def schedule(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1263
- ...
1264
-
1265
- def schedule(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None):
1266
- """
1267
- Specifies the times when the flow should be run when running on a
1268
- production scheduler.
1269
-
1270
-
1271
- Parameters
1272
- ----------
1273
- hourly : bool, default False
1274
- Run the workflow hourly.
1275
- daily : bool, default True
1276
- Run the workflow daily.
1277
- weekly : bool, default False
1278
- Run the workflow weekly.
1279
- cron : str, optional, default None
1280
- Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
1281
- specified by this expression.
1282
- timezone : str, optional, default None
1283
- Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1284
- which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
1285
- """
1286
- ...
1287
-
1288
- def airflow_s3_key_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, bucket_key: typing.Union[str, typing.List[str]], bucket_name: str, wildcard_match: bool, aws_conn_id: str, verify: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1289
- """
1290
- The `@airflow_s3_key_sensor` decorator attaches a Airflow [S3KeySensor](https://airflow.apache.org/docs/apache-airflow-providers-amazon/stable/_api/airflow/providers/amazon/aws/sensors/s3/index.html#airflow.providers.amazon.aws.sensors.s3.S3KeySensor)
1291
- before the start step of the flow. This decorator only works when a flow is scheduled on Airflow
1292
- and is compiled using `airflow create`. More than one `@airflow_s3_key_sensor` can be
1293
- added as a flow decorators. Adding more than one decorator will ensure that `start` step
1294
- starts only after all sensors finish.
1295
-
1296
-
1297
- Parameters
1298
- ----------
1299
- timeout : int
1300
- Time, in seconds before the task times out and fails. (Default: 3600)
1301
- poke_interval : int
1302
- Time in seconds that the job should wait in between each try. (Default: 60)
1303
- mode : str
1304
- How the sensor operates. Options are: { poke | reschedule }. (Default: "poke")
1305
- exponential_backoff : bool
1306
- allow progressive longer waits between pokes by using exponential backoff algorithm. (Default: True)
1307
- pool : str
1308
- the slot pool this task should run in,
1309
- slot pools are a way to limit concurrency for certain tasks. (Default:None)
1310
- soft_fail : bool
1311
- Set to true to mark the task as SKIPPED on failure. (Default: False)
1312
- name : str
1313
- Name of the sensor on Airflow
1314
- description : str
1315
- Description of sensor in the Airflow UI
1316
- bucket_key : Union[str, List[str]]
1317
- The key(s) being waited on. Supports full s3:// style url or relative path from root level.
1318
- When it's specified as a full s3:// url, please leave `bucket_name` as None
1319
- bucket_name : str
1320
- Name of the S3 bucket. Only needed when bucket_key is not provided as a full s3:// url.
1321
- When specified, all the keys passed to bucket_key refers to this bucket. (Default:None)
1322
- wildcard_match : bool
1323
- whether the bucket_key should be interpreted as a Unix wildcard pattern. (Default: False)
1324
- aws_conn_id : str
1325
- a reference to the s3 connection on Airflow. (Default: None)
1326
- verify : bool
1327
- Whether or not to verify SSL certificates for S3 connection. (Default: None)
1328
- """
1329
- ...
1330
-