metaflow-stubs 2.14.3__py2.py3-none-any.whl → 2.15.0__py2.py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (145) hide show
  1. metaflow-stubs/__init__.pyi +573 -573
  2. metaflow-stubs/cards.pyi +2 -2
  3. metaflow-stubs/cli.pyi +2 -2
  4. metaflow-stubs/cli_components/__init__.pyi +2 -2
  5. metaflow-stubs/cli_components/utils.pyi +2 -2
  6. metaflow-stubs/client/__init__.pyi +2 -2
  7. metaflow-stubs/client/core.pyi +6 -6
  8. metaflow-stubs/client/filecache.pyi +2 -2
  9. metaflow-stubs/events.pyi +3 -3
  10. metaflow-stubs/exception.pyi +2 -2
  11. metaflow-stubs/flowspec.pyi +3 -3
  12. metaflow-stubs/generated_for.txt +1 -1
  13. metaflow-stubs/includefile.pyi +3 -3
  14. metaflow-stubs/info_file.pyi +2 -2
  15. metaflow-stubs/metadata_provider/__init__.pyi +2 -2
  16. metaflow-stubs/metadata_provider/heartbeat.pyi +2 -2
  17. metaflow-stubs/metadata_provider/metadata.pyi +2 -2
  18. metaflow-stubs/metadata_provider/util.pyi +2 -2
  19. metaflow-stubs/metaflow_config.pyi +2 -2
  20. metaflow-stubs/metaflow_current.pyi +5 -5
  21. metaflow-stubs/multicore_utils.pyi +2 -2
  22. metaflow-stubs/parameters.pyi +3 -3
  23. metaflow-stubs/plugins/__init__.pyi +12 -12
  24. metaflow-stubs/plugins/airflow/__init__.pyi +2 -2
  25. metaflow-stubs/plugins/airflow/airflow_decorator.pyi +2 -2
  26. metaflow-stubs/plugins/airflow/airflow_utils.pyi +2 -2
  27. metaflow-stubs/plugins/airflow/exception.pyi +2 -2
  28. metaflow-stubs/plugins/airflow/sensors/__init__.pyi +2 -2
  29. metaflow-stubs/plugins/airflow/sensors/base_sensor.pyi +2 -2
  30. metaflow-stubs/plugins/airflow/sensors/external_task_sensor.pyi +2 -2
  31. metaflow-stubs/plugins/airflow/sensors/s3_sensor.pyi +2 -2
  32. metaflow-stubs/plugins/argo/__init__.pyi +2 -2
  33. metaflow-stubs/plugins/argo/argo_client.pyi +2 -2
  34. metaflow-stubs/plugins/argo/argo_events.pyi +2 -2
  35. metaflow-stubs/plugins/argo/argo_workflows.pyi +2 -2
  36. metaflow-stubs/plugins/argo/argo_workflows_decorator.pyi +3 -3
  37. metaflow-stubs/plugins/argo/argo_workflows_deployer.pyi +3 -3
  38. metaflow-stubs/plugins/argo/argo_workflows_deployer_objects.pyi +2 -2
  39. metaflow-stubs/plugins/aws/__init__.pyi +3 -3
  40. metaflow-stubs/plugins/aws/aws_client.pyi +2 -2
  41. metaflow-stubs/plugins/aws/aws_utils.pyi +2 -2
  42. metaflow-stubs/plugins/aws/batch/__init__.pyi +2 -2
  43. metaflow-stubs/plugins/aws/batch/batch.pyi +2 -2
  44. metaflow-stubs/plugins/aws/batch/batch_client.pyi +2 -2
  45. metaflow-stubs/plugins/aws/batch/batch_decorator.pyi +2 -2
  46. metaflow-stubs/plugins/aws/secrets_manager/__init__.pyi +2 -2
  47. metaflow-stubs/plugins/aws/secrets_manager/aws_secrets_manager_secrets_provider.pyi +4 -4
  48. metaflow-stubs/plugins/aws/step_functions/__init__.pyi +2 -2
  49. metaflow-stubs/plugins/aws/step_functions/dynamo_db_client.pyi +2 -2
  50. metaflow-stubs/plugins/aws/step_functions/event_bridge_client.pyi +2 -2
  51. metaflow-stubs/plugins/aws/step_functions/schedule_decorator.pyi +2 -2
  52. metaflow-stubs/plugins/aws/step_functions/step_functions.pyi +2 -2
  53. metaflow-stubs/plugins/aws/step_functions/step_functions_client.pyi +2 -2
  54. metaflow-stubs/plugins/aws/step_functions/step_functions_decorator.pyi +2 -2
  55. metaflow-stubs/plugins/aws/step_functions/step_functions_deployer.pyi +3 -3
  56. metaflow-stubs/plugins/aws/step_functions/step_functions_deployer_objects.pyi +2 -2
  57. metaflow-stubs/plugins/azure/__init__.pyi +2 -2
  58. metaflow-stubs/plugins/azure/azure_credential.pyi +2 -2
  59. metaflow-stubs/plugins/azure/azure_exceptions.pyi +2 -2
  60. metaflow-stubs/plugins/azure/azure_secret_manager_secrets_provider.pyi +4 -4
  61. metaflow-stubs/plugins/azure/azure_utils.pyi +2 -2
  62. metaflow-stubs/plugins/azure/blob_service_client_factory.pyi +2 -2
  63. metaflow-stubs/plugins/azure/includefile_support.pyi +2 -2
  64. metaflow-stubs/plugins/cards/__init__.pyi +2 -2
  65. metaflow-stubs/plugins/cards/card_client.pyi +2 -2
  66. metaflow-stubs/plugins/cards/card_creator.pyi +2 -2
  67. metaflow-stubs/plugins/cards/card_datastore.pyi +2 -2
  68. metaflow-stubs/plugins/cards/card_decorator.pyi +2 -2
  69. metaflow-stubs/plugins/cards/card_modules/__init__.pyi +2 -2
  70. metaflow-stubs/plugins/cards/card_modules/basic.pyi +3 -3
  71. metaflow-stubs/plugins/cards/card_modules/card.pyi +2 -2
  72. metaflow-stubs/plugins/cards/card_modules/components.pyi +2 -2
  73. metaflow-stubs/plugins/cards/card_modules/convert_to_native_type.pyi +2 -2
  74. metaflow-stubs/plugins/cards/card_modules/renderer_tools.pyi +2 -2
  75. metaflow-stubs/plugins/cards/card_modules/test_cards.pyi +2 -2
  76. metaflow-stubs/plugins/cards/card_resolver.pyi +2 -2
  77. metaflow-stubs/plugins/cards/component_serializer.pyi +2 -2
  78. metaflow-stubs/plugins/cards/exception.pyi +2 -2
  79. metaflow-stubs/plugins/catch_decorator.pyi +2 -2
  80. metaflow-stubs/plugins/datatools/__init__.pyi +2 -2
  81. metaflow-stubs/plugins/datatools/local.pyi +2 -2
  82. metaflow-stubs/plugins/datatools/s3/__init__.pyi +2 -2
  83. metaflow-stubs/plugins/datatools/s3/s3.pyi +2 -2
  84. metaflow-stubs/plugins/datatools/s3/s3tail.pyi +2 -2
  85. metaflow-stubs/plugins/datatools/s3/s3util.pyi +2 -2
  86. metaflow-stubs/plugins/debug_logger.pyi +2 -2
  87. metaflow-stubs/plugins/debug_monitor.pyi +2 -2
  88. metaflow-stubs/plugins/environment_decorator.pyi +2 -2
  89. metaflow-stubs/plugins/events_decorator.pyi +2 -2
  90. metaflow-stubs/plugins/frameworks/__init__.pyi +2 -2
  91. metaflow-stubs/plugins/frameworks/pytorch.pyi +2 -2
  92. metaflow-stubs/plugins/gcp/__init__.pyi +2 -2
  93. metaflow-stubs/plugins/gcp/gcp_secret_manager_secrets_provider.pyi +4 -4
  94. metaflow-stubs/plugins/gcp/gs_exceptions.pyi +2 -2
  95. metaflow-stubs/plugins/gcp/gs_storage_client_factory.pyi +2 -2
  96. metaflow-stubs/plugins/gcp/gs_utils.pyi +2 -2
  97. metaflow-stubs/plugins/gcp/includefile_support.pyi +2 -2
  98. metaflow-stubs/plugins/kubernetes/__init__.pyi +2 -2
  99. metaflow-stubs/plugins/kubernetes/kube_utils.pyi +2 -2
  100. metaflow-stubs/plugins/kubernetes/kubernetes.pyi +2 -2
  101. metaflow-stubs/plugins/kubernetes/kubernetes_client.pyi +2 -2
  102. metaflow-stubs/plugins/kubernetes/kubernetes_decorator.pyi +2 -2
  103. metaflow-stubs/plugins/kubernetes/kubernetes_job.pyi +2 -2
  104. metaflow-stubs/plugins/kubernetes/kubernetes_jobsets.pyi +2 -2
  105. metaflow-stubs/plugins/kubernetes/spot_monitor_sidecar.pyi +2 -2
  106. metaflow-stubs/plugins/parallel_decorator.pyi +2 -2
  107. metaflow-stubs/plugins/project_decorator.pyi +2 -2
  108. metaflow-stubs/plugins/pypi/__init__.pyi +3 -3
  109. metaflow-stubs/plugins/pypi/conda_decorator.pyi +2 -2
  110. metaflow-stubs/plugins/pypi/conda_environment.pyi +6 -6
  111. metaflow-stubs/plugins/pypi/pypi_decorator.pyi +2 -2
  112. metaflow-stubs/plugins/pypi/pypi_environment.pyi +2 -2
  113. metaflow-stubs/plugins/pypi/utils.pyi +2 -2
  114. metaflow-stubs/plugins/resources_decorator.pyi +2 -2
  115. metaflow-stubs/plugins/retry_decorator.pyi +2 -2
  116. metaflow-stubs/plugins/secrets/__init__.pyi +3 -3
  117. metaflow-stubs/plugins/secrets/inline_secrets_provider.pyi +4 -4
  118. metaflow-stubs/plugins/secrets/secrets_decorator.pyi +2 -2
  119. metaflow-stubs/plugins/storage_executor.pyi +2 -2
  120. metaflow-stubs/plugins/test_unbounded_foreach_decorator.pyi +2 -2
  121. metaflow-stubs/plugins/timeout_decorator.pyi +2 -2
  122. metaflow-stubs/pylint_wrapper.pyi +2 -2
  123. metaflow-stubs/runner/__init__.pyi +2 -2
  124. metaflow-stubs/runner/deployer.pyi +4 -4
  125. metaflow-stubs/runner/deployer_impl.pyi +2 -2
  126. metaflow-stubs/runner/metaflow_runner.pyi +4 -4
  127. metaflow-stubs/runner/nbdeploy.pyi +2 -2
  128. metaflow-stubs/runner/nbrun.pyi +2 -2
  129. metaflow-stubs/runner/subprocess_manager.pyi +2 -2
  130. metaflow-stubs/runner/utils.pyi +4 -4
  131. metaflow-stubs/system/__init__.pyi +2 -2
  132. metaflow-stubs/system/system_logger.pyi +2 -2
  133. metaflow-stubs/system/system_monitor.pyi +2 -2
  134. metaflow-stubs/tagging_util.pyi +2 -2
  135. metaflow-stubs/tuple_util.pyi +2 -2
  136. metaflow-stubs/user_configs/__init__.pyi +2 -2
  137. metaflow-stubs/user_configs/config_decorators.pyi +5 -5
  138. metaflow-stubs/user_configs/config_options.pyi +2 -2
  139. metaflow-stubs/user_configs/config_parameters.pyi +5 -5
  140. metaflow-stubs/version.pyi +2 -2
  141. {metaflow_stubs-2.14.3.dist-info → metaflow_stubs-2.15.0.dist-info}/METADATA +2 -2
  142. metaflow_stubs-2.15.0.dist-info/RECORD +145 -0
  143. {metaflow_stubs-2.14.3.dist-info → metaflow_stubs-2.15.0.dist-info}/WHEEL +1 -1
  144. metaflow_stubs-2.14.3.dist-info/RECORD +0 -145
  145. {metaflow_stubs-2.14.3.dist-info → metaflow_stubs-2.15.0.dist-info}/top_level.txt +0 -0
@@ -1,7 +1,7 @@
1
1
  ######################################################################################################
2
2
  # Auto-generated Metaflow stub file #
3
- # MF version: 2.14.3 #
4
- # Generated on 2025-02-22T04:36:00.916575 #
3
+ # MF version: 2.15.0 #
4
+ # Generated on 2025-02-25T21:21:51.409752 #
5
5
  ######################################################################################################
6
6
 
7
7
  from __future__ import annotations
@@ -35,8 +35,8 @@ from .user_configs.config_parameters import ConfigValue as ConfigValue
35
35
  from .user_configs.config_parameters import config_expr as config_expr
36
36
  from .user_configs.config_decorators import CustomFlowDecorator as CustomFlowDecorator
37
37
  from .user_configs.config_decorators import CustomStepDecorator as CustomStepDecorator
38
- from . import tuple_util as tuple_util
39
38
  from . import events as events
39
+ from . import tuple_util as tuple_util
40
40
  from . import runner as runner
41
41
  from . import plugins as plugins
42
42
  from .plugins.datatools.s3.s3 import S3 as S3
@@ -143,112 +143,140 @@ def step(f: typing.Union[typing.Callable[[FlowSpecDerived], None], typing.Callab
143
143
  ...
144
144
 
145
145
  @typing.overload
146
- def pypi(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
146
+ def conda(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
147
147
  """
148
- Specifies the PyPI packages for the step.
148
+ Specifies the Conda environment for the step.
149
149
 
150
150
  Information in this decorator will augment any
151
- attributes set in the `@pyi_base` flow-level decorator. Hence,
152
- you can use `@pypi_base` to set packages required by all
153
- steps and use `@pypi` to specify step-specific overrides.
151
+ attributes set in the `@conda_base` flow-level decorator. Hence,
152
+ you can use `@conda_base` to set packages required by all
153
+ steps and use `@conda` to specify step-specific overrides.
154
154
 
155
155
 
156
156
  Parameters
157
157
  ----------
158
- packages : Dict[str, str], default: {}
158
+ packages : Dict[str, str], default {}
159
159
  Packages to use for this step. The key is the name of the package
160
160
  and the value is the version to use.
161
- python : str, optional, default: None
161
+ libraries : Dict[str, str], default {}
162
+ Supported for backward compatibility. When used with packages, packages will take precedence.
163
+ python : str, optional, default None
162
164
  Version of Python to use, e.g. '3.7.4'. A default value of None implies
163
165
  that the version used will correspond to the version of the Python interpreter used to start the run.
166
+ disabled : bool, default False
167
+ If set to True, disables @conda.
164
168
  """
165
169
  ...
166
170
 
167
171
  @typing.overload
168
- def pypi(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
172
+ def conda(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
169
173
  ...
170
174
 
171
175
  @typing.overload
172
- def pypi(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
176
+ def conda(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
173
177
  ...
174
178
 
175
- def pypi(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
179
+ def conda(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
176
180
  """
177
- Specifies the PyPI packages for the step.
181
+ Specifies the Conda environment for the step.
178
182
 
179
183
  Information in this decorator will augment any
180
- attributes set in the `@pyi_base` flow-level decorator. Hence,
181
- you can use `@pypi_base` to set packages required by all
182
- steps and use `@pypi` to specify step-specific overrides.
184
+ attributes set in the `@conda_base` flow-level decorator. Hence,
185
+ you can use `@conda_base` to set packages required by all
186
+ steps and use `@conda` to specify step-specific overrides.
183
187
 
184
188
 
185
189
  Parameters
186
190
  ----------
187
- packages : Dict[str, str], default: {}
191
+ packages : Dict[str, str], default {}
188
192
  Packages to use for this step. The key is the name of the package
189
193
  and the value is the version to use.
190
- python : str, optional, default: None
194
+ libraries : Dict[str, str], default {}
195
+ Supported for backward compatibility. When used with packages, packages will take precedence.
196
+ python : str, optional, default None
191
197
  Version of Python to use, e.g. '3.7.4'. A default value of None implies
192
198
  that the version used will correspond to the version of the Python interpreter used to start the run.
199
+ disabled : bool, default False
200
+ If set to True, disables @conda.
193
201
  """
194
202
  ...
195
203
 
196
204
  @typing.overload
197
- def timeout(*, seconds: int = 0, minutes: int = 0, hours: int = 0) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
205
+ def resources(*, cpu: int = 1, gpu: typing.Optional[int] = None, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
198
206
  """
199
- Specifies a timeout for your step.
200
-
201
- This decorator is useful if this step may hang indefinitely.
207
+ Specifies the resources needed when executing this step.
202
208
 
203
- This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
204
- A timeout is considered to be an exception thrown by the step. It will cause the step to be
205
- retried if needed and the exception will be caught by the `@catch` decorator, if present.
209
+ Use `@resources` to specify the resource requirements
210
+ independently of the specific compute layer (`@batch`, `@kubernetes`).
206
211
 
207
- Note that all the values specified in parameters are added together so if you specify
208
- 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
212
+ You can choose the compute layer on the command line by executing e.g.
213
+ ```
214
+ python myflow.py run --with batch
215
+ ```
216
+ or
217
+ ```
218
+ python myflow.py run --with kubernetes
219
+ ```
220
+ which executes the flow on the desired system using the
221
+ requirements specified in `@resources`.
209
222
 
210
223
 
211
224
  Parameters
212
225
  ----------
213
- seconds : int, default 0
214
- Number of seconds to wait prior to timing out.
215
- minutes : int, default 0
216
- Number of minutes to wait prior to timing out.
217
- hours : int, default 0
218
- Number of hours to wait prior to timing out.
226
+ cpu : int, default 1
227
+ Number of CPUs required for this step.
228
+ gpu : int, optional, default None
229
+ Number of GPUs required for this step.
230
+ disk : int, optional, default None
231
+ Disk size (in MB) required for this step. Only applies on Kubernetes.
232
+ memory : int, default 4096
233
+ Memory size (in MB) required for this step.
234
+ shared_memory : int, optional, default None
235
+ The value for the size (in MiB) of the /dev/shm volume for this step.
236
+ This parameter maps to the `--shm-size` option in Docker.
219
237
  """
220
238
  ...
221
239
 
222
240
  @typing.overload
223
- def timeout(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
241
+ def resources(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
224
242
  ...
225
243
 
226
244
  @typing.overload
227
- def timeout(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
245
+ def resources(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
228
246
  ...
229
247
 
230
- def timeout(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, seconds: int = 0, minutes: int = 0, hours: int = 0):
248
+ def resources(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, cpu: int = 1, gpu: typing.Optional[int] = None, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None):
231
249
  """
232
- Specifies a timeout for your step.
233
-
234
- This decorator is useful if this step may hang indefinitely.
250
+ Specifies the resources needed when executing this step.
235
251
 
236
- This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
237
- A timeout is considered to be an exception thrown by the step. It will cause the step to be
238
- retried if needed and the exception will be caught by the `@catch` decorator, if present.
252
+ Use `@resources` to specify the resource requirements
253
+ independently of the specific compute layer (`@batch`, `@kubernetes`).
239
254
 
240
- Note that all the values specified in parameters are added together so if you specify
241
- 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
255
+ You can choose the compute layer on the command line by executing e.g.
256
+ ```
257
+ python myflow.py run --with batch
258
+ ```
259
+ or
260
+ ```
261
+ python myflow.py run --with kubernetes
262
+ ```
263
+ which executes the flow on the desired system using the
264
+ requirements specified in `@resources`.
242
265
 
243
266
 
244
267
  Parameters
245
268
  ----------
246
- seconds : int, default 0
247
- Number of seconds to wait prior to timing out.
248
- minutes : int, default 0
249
- Number of minutes to wait prior to timing out.
250
- hours : int, default 0
251
- Number of hours to wait prior to timing out.
269
+ cpu : int, default 1
270
+ Number of CPUs required for this step.
271
+ gpu : int, optional, default None
272
+ Number of GPUs required for this step.
273
+ disk : int, optional, default None
274
+ Disk size (in MB) required for this step. Only applies on Kubernetes.
275
+ memory : int, default 4096
276
+ Memory size (in MB) required for this step.
277
+ shared_memory : int, optional, default None
278
+ The value for the size (in MiB) of the /dev/shm volume for this step.
279
+ This parameter maps to the `--shm-size` option in Docker.
252
280
  """
253
281
  ...
254
282
 
@@ -271,6 +299,41 @@ def parallel(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None],
271
299
  """
272
300
  ...
273
301
 
302
+ @typing.overload
303
+ def secrets(*, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
304
+ """
305
+ Specifies secrets to be retrieved and injected as environment variables prior to
306
+ the execution of a step.
307
+
308
+
309
+ Parameters
310
+ ----------
311
+ sources : List[Union[str, Dict[str, Any]]], default: []
312
+ List of secret specs, defining how the secrets are to be retrieved
313
+ """
314
+ ...
315
+
316
+ @typing.overload
317
+ def secrets(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
318
+ ...
319
+
320
+ @typing.overload
321
+ def secrets(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
322
+ ...
323
+
324
+ def secrets(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []):
325
+ """
326
+ Specifies secrets to be retrieved and injected as environment variables prior to
327
+ the execution of a step.
328
+
329
+
330
+ Parameters
331
+ ----------
332
+ sources : List[Union[str, Dict[str, Any]]], default: []
333
+ List of secret specs, defining how the secrets are to be retrieved
334
+ """
335
+ ...
336
+
274
337
  @typing.overload
275
338
  def card(*, type: str = 'default', id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
276
339
  """
@@ -321,198 +384,87 @@ def card(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typ
321
384
  ...
322
385
 
323
386
  @typing.overload
324
- def catch(*, var: typing.Optional[str] = None, print_exception: bool = True) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
387
+ def batch(*, cpu: int = 1, gpu: int = 0, memory: int = 4096, image: typing.Optional[str] = None, queue: str = 'METAFLOW_BATCH_JOB_QUEUE', iam_role: str = 'METAFLOW_ECS_S3_ACCESS_IAM_ROLE', execution_role: str = 'METAFLOW_ECS_FARGATE_EXECUTION_ROLE', shared_memory: typing.Optional[int] = None, max_swap: typing.Optional[int] = None, swappiness: typing.Optional[int] = None, use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = None, inferentia: int = 0, trainium: int = None, efa: int = 0, ephemeral_storage: int = None, log_driver: typing.Optional[str] = None, log_options: typing.Optional[typing.List[str]] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
325
388
  """
326
- Specifies that the step will success under all circumstances.
327
-
328
- The decorator will create an optional artifact, specified by `var`, which
329
- contains the exception raised. You can use it to detect the presence
330
- of errors, indicating that all happy-path artifacts produced by the step
331
- are missing.
389
+ Specifies that this step should execute on [AWS Batch](https://aws.amazon.com/batch/).
332
390
 
333
391
 
334
392
  Parameters
335
393
  ----------
336
- var : str, optional, default None
337
- Name of the artifact in which to store the caught exception.
338
- If not specified, the exception is not stored.
339
- print_exception : bool, default True
340
- Determines whether or not the exception is printed to
341
- stdout when caught.
394
+ cpu : int, default 1
395
+ Number of CPUs required for this step. If `@resources` is
396
+ also present, the maximum value from all decorators is used.
397
+ gpu : int, default 0
398
+ Number of GPUs required for this step. If `@resources` is
399
+ also present, the maximum value from all decorators is used.
400
+ memory : int, default 4096
401
+ Memory size (in MB) required for this step. If
402
+ `@resources` is also present, the maximum value from all decorators is
403
+ used.
404
+ image : str, optional, default None
405
+ Docker image to use when launching on AWS Batch. If not specified, and
406
+ METAFLOW_BATCH_CONTAINER_IMAGE is specified, that image is used. If
407
+ not, a default Docker image mapping to the current version of Python is used.
408
+ queue : str, default METAFLOW_BATCH_JOB_QUEUE
409
+ AWS Batch Job Queue to submit the job to.
410
+ iam_role : str, default METAFLOW_ECS_S3_ACCESS_IAM_ROLE
411
+ AWS IAM role that AWS Batch container uses to access AWS cloud resources.
412
+ execution_role : str, default METAFLOW_ECS_FARGATE_EXECUTION_ROLE
413
+ AWS IAM role that AWS Batch can use [to trigger AWS Fargate tasks]
414
+ (https://docs.aws.amazon.com/batch/latest/userguide/execution-IAM-role.html).
415
+ shared_memory : int, optional, default None
416
+ The value for the size (in MiB) of the /dev/shm volume for this step.
417
+ This parameter maps to the `--shm-size` option in Docker.
418
+ max_swap : int, optional, default None
419
+ The total amount of swap memory (in MiB) a container can use for this
420
+ step. This parameter is translated to the `--memory-swap` option in
421
+ Docker where the value is the sum of the container memory plus the
422
+ `max_swap` value.
423
+ swappiness : int, optional, default None
424
+ This allows you to tune memory swappiness behavior for this step.
425
+ A swappiness value of 0 causes swapping not to happen unless absolutely
426
+ necessary. A swappiness value of 100 causes pages to be swapped very
427
+ aggressively. Accepted values are whole numbers between 0 and 100.
428
+ use_tmpfs : bool, default False
429
+ This enables an explicit tmpfs mount for this step. Note that tmpfs is
430
+ not available on Fargate compute environments
431
+ tmpfs_tempdir : bool, default True
432
+ sets METAFLOW_TEMPDIR to tmpfs_path if set for this step.
433
+ tmpfs_size : int, optional, default None
434
+ The value for the size (in MiB) of the tmpfs mount for this step.
435
+ This parameter maps to the `--tmpfs` option in Docker. Defaults to 50% of the
436
+ memory allocated for this step.
437
+ tmpfs_path : str, optional, default None
438
+ Path to tmpfs mount for this step. Defaults to /metaflow_temp.
439
+ inferentia : int, default 0
440
+ Number of Inferentia chips required for this step.
441
+ trainium : int, default None
442
+ Alias for inferentia. Use only one of the two.
443
+ efa : int, default 0
444
+ Number of elastic fabric adapter network devices to attach to container
445
+ ephemeral_storage : int, default None
446
+ The total amount, in GiB, of ephemeral storage to set for the task, 21-200GiB.
447
+ This is only relevant for Fargate compute environments
448
+ log_driver: str, optional, default None
449
+ The log driver to use for the Amazon ECS container.
450
+ log_options: List[str], optional, default None
451
+ List of strings containing options for the chosen log driver. The configurable values
452
+ depend on the `log driver` chosen. Validation of these options is not supported yet.
453
+ Example: [`awslogs-group:aws/batch/job`]
342
454
  """
343
455
  ...
344
456
 
345
457
  @typing.overload
346
- def catch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
458
+ def batch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
347
459
  ...
348
460
 
349
461
  @typing.overload
350
- def catch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
462
+ def batch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
351
463
  ...
352
464
 
353
- def catch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, var: typing.Optional[str] = None, print_exception: bool = True):
465
+ def batch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, cpu: int = 1, gpu: int = 0, memory: int = 4096, image: typing.Optional[str] = None, queue: str = 'METAFLOW_BATCH_JOB_QUEUE', iam_role: str = 'METAFLOW_ECS_S3_ACCESS_IAM_ROLE', execution_role: str = 'METAFLOW_ECS_FARGATE_EXECUTION_ROLE', shared_memory: typing.Optional[int] = None, max_swap: typing.Optional[int] = None, swappiness: typing.Optional[int] = None, use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = None, inferentia: int = 0, trainium: int = None, efa: int = 0, ephemeral_storage: int = None, log_driver: typing.Optional[str] = None, log_options: typing.Optional[typing.List[str]] = None):
354
466
  """
355
- Specifies that the step will success under all circumstances.
356
-
357
- The decorator will create an optional artifact, specified by `var`, which
358
- contains the exception raised. You can use it to detect the presence
359
- of errors, indicating that all happy-path artifacts produced by the step
360
- are missing.
361
-
362
-
363
- Parameters
364
- ----------
365
- var : str, optional, default None
366
- Name of the artifact in which to store the caught exception.
367
- If not specified, the exception is not stored.
368
- print_exception : bool, default True
369
- Determines whether or not the exception is printed to
370
- stdout when caught.
371
- """
372
- ...
373
-
374
- @typing.overload
375
- def resources(*, cpu: int = 1, gpu: typing.Optional[int] = None, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
376
- """
377
- Specifies the resources needed when executing this step.
378
-
379
- Use `@resources` to specify the resource requirements
380
- independently of the specific compute layer (`@batch`, `@kubernetes`).
381
-
382
- You can choose the compute layer on the command line by executing e.g.
383
- ```
384
- python myflow.py run --with batch
385
- ```
386
- or
387
- ```
388
- python myflow.py run --with kubernetes
389
- ```
390
- which executes the flow on the desired system using the
391
- requirements specified in `@resources`.
392
-
393
-
394
- Parameters
395
- ----------
396
- cpu : int, default 1
397
- Number of CPUs required for this step.
398
- gpu : int, optional, default None
399
- Number of GPUs required for this step.
400
- disk : int, optional, default None
401
- Disk size (in MB) required for this step. Only applies on Kubernetes.
402
- memory : int, default 4096
403
- Memory size (in MB) required for this step.
404
- shared_memory : int, optional, default None
405
- The value for the size (in MiB) of the /dev/shm volume for this step.
406
- This parameter maps to the `--shm-size` option in Docker.
407
- """
408
- ...
409
-
410
- @typing.overload
411
- def resources(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
412
- ...
413
-
414
- @typing.overload
415
- def resources(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
416
- ...
417
-
418
- def resources(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, cpu: int = 1, gpu: typing.Optional[int] = None, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None):
419
- """
420
- Specifies the resources needed when executing this step.
421
-
422
- Use `@resources` to specify the resource requirements
423
- independently of the specific compute layer (`@batch`, `@kubernetes`).
424
-
425
- You can choose the compute layer on the command line by executing e.g.
426
- ```
427
- python myflow.py run --with batch
428
- ```
429
- or
430
- ```
431
- python myflow.py run --with kubernetes
432
- ```
433
- which executes the flow on the desired system using the
434
- requirements specified in `@resources`.
435
-
436
-
437
- Parameters
438
- ----------
439
- cpu : int, default 1
440
- Number of CPUs required for this step.
441
- gpu : int, optional, default None
442
- Number of GPUs required for this step.
443
- disk : int, optional, default None
444
- Disk size (in MB) required for this step. Only applies on Kubernetes.
445
- memory : int, default 4096
446
- Memory size (in MB) required for this step.
447
- shared_memory : int, optional, default None
448
- The value for the size (in MiB) of the /dev/shm volume for this step.
449
- This parameter maps to the `--shm-size` option in Docker.
450
- """
451
- ...
452
-
453
- @typing.overload
454
- def conda(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
455
- """
456
- Specifies the Conda environment for the step.
457
-
458
- Information in this decorator will augment any
459
- attributes set in the `@conda_base` flow-level decorator. Hence,
460
- you can use `@conda_base` to set packages required by all
461
- steps and use `@conda` to specify step-specific overrides.
462
-
463
-
464
- Parameters
465
- ----------
466
- packages : Dict[str, str], default {}
467
- Packages to use for this step. The key is the name of the package
468
- and the value is the version to use.
469
- libraries : Dict[str, str], default {}
470
- Supported for backward compatibility. When used with packages, packages will take precedence.
471
- python : str, optional, default None
472
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
473
- that the version used will correspond to the version of the Python interpreter used to start the run.
474
- disabled : bool, default False
475
- If set to True, disables @conda.
476
- """
477
- ...
478
-
479
- @typing.overload
480
- def conda(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
481
- ...
482
-
483
- @typing.overload
484
- def conda(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
485
- ...
486
-
487
- def conda(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
488
- """
489
- Specifies the Conda environment for the step.
490
-
491
- Information in this decorator will augment any
492
- attributes set in the `@conda_base` flow-level decorator. Hence,
493
- you can use `@conda_base` to set packages required by all
494
- steps and use `@conda` to specify step-specific overrides.
495
-
496
-
497
- Parameters
498
- ----------
499
- packages : Dict[str, str], default {}
500
- Packages to use for this step. The key is the name of the package
501
- and the value is the version to use.
502
- libraries : Dict[str, str], default {}
503
- Supported for backward compatibility. When used with packages, packages will take precedence.
504
- python : str, optional, default None
505
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
506
- that the version used will correspond to the version of the Python interpreter used to start the run.
507
- disabled : bool, default False
508
- If set to True, disables @conda.
509
- """
510
- ...
511
-
512
- @typing.overload
513
- def batch(*, cpu: int = 1, gpu: int = 0, memory: int = 4096, image: typing.Optional[str] = None, queue: str = 'METAFLOW_BATCH_JOB_QUEUE', iam_role: str = 'METAFLOW_ECS_S3_ACCESS_IAM_ROLE', execution_role: str = 'METAFLOW_ECS_FARGATE_EXECUTION_ROLE', shared_memory: typing.Optional[int] = None, max_swap: typing.Optional[int] = None, swappiness: typing.Optional[int] = None, use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = None, inferentia: int = 0, trainium: int = None, efa: int = 0, ephemeral_storage: int = None, log_driver: typing.Optional[str] = None, log_options: typing.Optional[typing.List[str]] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
514
- """
515
- Specifies that this step should execute on [AWS Batch](https://aws.amazon.com/batch/).
467
+ Specifies that this step should execute on [AWS Batch](https://aws.amazon.com/batch/).
516
468
 
517
469
 
518
470
  Parameters
@@ -581,80 +533,159 @@ def batch(*, cpu: int = 1, gpu: int = 0, memory: int = 4096, image: typing.Optio
581
533
  ...
582
534
 
583
535
  @typing.overload
584
- def batch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
536
+ def pypi(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
537
+ """
538
+ Specifies the PyPI packages for the step.
539
+
540
+ Information in this decorator will augment any
541
+ attributes set in the `@pyi_base` flow-level decorator. Hence,
542
+ you can use `@pypi_base` to set packages required by all
543
+ steps and use `@pypi` to specify step-specific overrides.
544
+
545
+
546
+ Parameters
547
+ ----------
548
+ packages : Dict[str, str], default: {}
549
+ Packages to use for this step. The key is the name of the package
550
+ and the value is the version to use.
551
+ python : str, optional, default: None
552
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
553
+ that the version used will correspond to the version of the Python interpreter used to start the run.
554
+ """
585
555
  ...
586
556
 
587
557
  @typing.overload
588
- def batch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
558
+ def pypi(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
589
559
  ...
590
560
 
591
- def batch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, cpu: int = 1, gpu: int = 0, memory: int = 4096, image: typing.Optional[str] = None, queue: str = 'METAFLOW_BATCH_JOB_QUEUE', iam_role: str = 'METAFLOW_ECS_S3_ACCESS_IAM_ROLE', execution_role: str = 'METAFLOW_ECS_FARGATE_EXECUTION_ROLE', shared_memory: typing.Optional[int] = None, max_swap: typing.Optional[int] = None, swappiness: typing.Optional[int] = None, use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = None, inferentia: int = 0, trainium: int = None, efa: int = 0, ephemeral_storage: int = None, log_driver: typing.Optional[str] = None, log_options: typing.Optional[typing.List[str]] = None):
561
+ @typing.overload
562
+ def pypi(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
563
+ ...
564
+
565
+ def pypi(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
592
566
  """
593
- Specifies that this step should execute on [AWS Batch](https://aws.amazon.com/batch/).
567
+ Specifies the PyPI packages for the step.
568
+
569
+ Information in this decorator will augment any
570
+ attributes set in the `@pyi_base` flow-level decorator. Hence,
571
+ you can use `@pypi_base` to set packages required by all
572
+ steps and use `@pypi` to specify step-specific overrides.
594
573
 
595
574
 
596
575
  Parameters
597
576
  ----------
598
- cpu : int, default 1
599
- Number of CPUs required for this step. If `@resources` is
600
- also present, the maximum value from all decorators is used.
601
- gpu : int, default 0
602
- Number of GPUs required for this step. If `@resources` is
603
- also present, the maximum value from all decorators is used.
604
- memory : int, default 4096
605
- Memory size (in MB) required for this step. If
606
- `@resources` is also present, the maximum value from all decorators is
607
- used.
608
- image : str, optional, default None
609
- Docker image to use when launching on AWS Batch. If not specified, and
610
- METAFLOW_BATCH_CONTAINER_IMAGE is specified, that image is used. If
611
- not, a default Docker image mapping to the current version of Python is used.
612
- queue : str, default METAFLOW_BATCH_JOB_QUEUE
613
- AWS Batch Job Queue to submit the job to.
614
- iam_role : str, default METAFLOW_ECS_S3_ACCESS_IAM_ROLE
615
- AWS IAM role that AWS Batch container uses to access AWS cloud resources.
616
- execution_role : str, default METAFLOW_ECS_FARGATE_EXECUTION_ROLE
617
- AWS IAM role that AWS Batch can use [to trigger AWS Fargate tasks]
618
- (https://docs.aws.amazon.com/batch/latest/userguide/execution-IAM-role.html).
619
- shared_memory : int, optional, default None
620
- The value for the size (in MiB) of the /dev/shm volume for this step.
621
- This parameter maps to the `--shm-size` option in Docker.
622
- max_swap : int, optional, default None
623
- The total amount of swap memory (in MiB) a container can use for this
624
- step. This parameter is translated to the `--memory-swap` option in
625
- Docker where the value is the sum of the container memory plus the
626
- `max_swap` value.
627
- swappiness : int, optional, default None
628
- This allows you to tune memory swappiness behavior for this step.
629
- A swappiness value of 0 causes swapping not to happen unless absolutely
630
- necessary. A swappiness value of 100 causes pages to be swapped very
631
- aggressively. Accepted values are whole numbers between 0 and 100.
632
- use_tmpfs : bool, default False
633
- This enables an explicit tmpfs mount for this step. Note that tmpfs is
634
- not available on Fargate compute environments
635
- tmpfs_tempdir : bool, default True
636
- sets METAFLOW_TEMPDIR to tmpfs_path if set for this step.
637
- tmpfs_size : int, optional, default None
638
- The value for the size (in MiB) of the tmpfs mount for this step.
639
- This parameter maps to the `--tmpfs` option in Docker. Defaults to 50% of the
640
- memory allocated for this step.
641
- tmpfs_path : str, optional, default None
642
- Path to tmpfs mount for this step. Defaults to /metaflow_temp.
643
- inferentia : int, default 0
644
- Number of Inferentia chips required for this step.
645
- trainium : int, default None
646
- Alias for inferentia. Use only one of the two.
647
- efa : int, default 0
648
- Number of elastic fabric adapter network devices to attach to container
649
- ephemeral_storage : int, default None
650
- The total amount, in GiB, of ephemeral storage to set for the task, 21-200GiB.
651
- This is only relevant for Fargate compute environments
652
- log_driver: str, optional, default None
653
- The log driver to use for the Amazon ECS container.
654
- log_options: List[str], optional, default None
655
- List of strings containing options for the chosen log driver. The configurable values
656
- depend on the `log driver` chosen. Validation of these options is not supported yet.
657
- Example: [`awslogs-group:aws/batch/job`]
577
+ packages : Dict[str, str], default: {}
578
+ Packages to use for this step. The key is the name of the package
579
+ and the value is the version to use.
580
+ python : str, optional, default: None
581
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
582
+ that the version used will correspond to the version of the Python interpreter used to start the run.
583
+ """
584
+ ...
585
+
586
+ @typing.overload
587
+ def catch(*, var: typing.Optional[str] = None, print_exception: bool = True) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
588
+ """
589
+ Specifies that the step will success under all circumstances.
590
+
591
+ The decorator will create an optional artifact, specified by `var`, which
592
+ contains the exception raised. You can use it to detect the presence
593
+ of errors, indicating that all happy-path artifacts produced by the step
594
+ are missing.
595
+
596
+
597
+ Parameters
598
+ ----------
599
+ var : str, optional, default None
600
+ Name of the artifact in which to store the caught exception.
601
+ If not specified, the exception is not stored.
602
+ print_exception : bool, default True
603
+ Determines whether or not the exception is printed to
604
+ stdout when caught.
605
+ """
606
+ ...
607
+
608
+ @typing.overload
609
+ def catch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
610
+ ...
611
+
612
+ @typing.overload
613
+ def catch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
614
+ ...
615
+
616
+ def catch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, var: typing.Optional[str] = None, print_exception: bool = True):
617
+ """
618
+ Specifies that the step will success under all circumstances.
619
+
620
+ The decorator will create an optional artifact, specified by `var`, which
621
+ contains the exception raised. You can use it to detect the presence
622
+ of errors, indicating that all happy-path artifacts produced by the step
623
+ are missing.
624
+
625
+
626
+ Parameters
627
+ ----------
628
+ var : str, optional, default None
629
+ Name of the artifact in which to store the caught exception.
630
+ If not specified, the exception is not stored.
631
+ print_exception : bool, default True
632
+ Determines whether or not the exception is printed to
633
+ stdout when caught.
634
+ """
635
+ ...
636
+
637
+ @typing.overload
638
+ def retry(*, times: int = 3, minutes_between_retries: int = 2) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
639
+ """
640
+ Specifies the number of times the task corresponding
641
+ to a step needs to be retried.
642
+
643
+ This decorator is useful for handling transient errors, such as networking issues.
644
+ If your task contains operations that can't be retried safely, e.g. database updates,
645
+ it is advisable to annotate it with `@retry(times=0)`.
646
+
647
+ This can be used in conjunction with the `@catch` decorator. The `@catch`
648
+ decorator will execute a no-op task after all retries have been exhausted,
649
+ ensuring that the flow execution can continue.
650
+
651
+
652
+ Parameters
653
+ ----------
654
+ times : int, default 3
655
+ Number of times to retry this task.
656
+ minutes_between_retries : int, default 2
657
+ Number of minutes between retries.
658
+ """
659
+ ...
660
+
661
+ @typing.overload
662
+ def retry(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
663
+ ...
664
+
665
+ @typing.overload
666
+ def retry(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
667
+ ...
668
+
669
+ def retry(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, times: int = 3, minutes_between_retries: int = 2):
670
+ """
671
+ Specifies the number of times the task corresponding
672
+ to a step needs to be retried.
673
+
674
+ This decorator is useful for handling transient errors, such as networking issues.
675
+ If your task contains operations that can't be retried safely, e.g. database updates,
676
+ it is advisable to annotate it with `@retry(times=0)`.
677
+
678
+ This can be used in conjunction with the `@catch` decorator. The `@catch`
679
+ decorator will execute a no-op task after all retries have been exhausted,
680
+ ensuring that the flow execution can continue.
681
+
682
+
683
+ Parameters
684
+ ----------
685
+ times : int, default 3
686
+ Number of times to retry this task.
687
+ minutes_between_retries : int, default 2
688
+ Number of minutes between retries.
658
689
  """
659
690
  ...
660
691
 
@@ -727,101 +758,70 @@ def kubernetes(*, cpu: int = 1, memory: int = 4096, disk: int = 10240, image: ty
727
758
  compute_pool : str, optional, default None
728
759
  Compute pool to be used for for this step.
729
760
  If not specified, any accessible compute pool within the perimeter is used.
730
- hostname_resolution_timeout: int, default 10 * 60
731
- Timeout in seconds for the workers tasks in the gang scheduled cluster to resolve the hostname of control task.
732
- Only applicable when @parallel is used.
733
- qos: str, default: Burstable
734
- Quality of Service class to assign to the pod. Supported values are: Guaranteed, Burstable, BestEffort
735
- """
736
- ...
737
-
738
- @typing.overload
739
- def secrets(*, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
740
- """
741
- Specifies secrets to be retrieved and injected as environment variables prior to
742
- the execution of a step.
743
-
744
-
745
- Parameters
746
- ----------
747
- sources : List[Union[str, Dict[str, Any]]], default: []
748
- List of secret specs, defining how the secrets are to be retrieved
761
+ hostname_resolution_timeout: int, default 10 * 60
762
+ Timeout in seconds for the workers tasks in the gang scheduled cluster to resolve the hostname of control task.
763
+ Only applicable when @parallel is used.
764
+ qos: str, default: Burstable
765
+ Quality of Service class to assign to the pod. Supported values are: Guaranteed, Burstable, BestEffort
749
766
  """
750
767
  ...
751
768
 
752
769
  @typing.overload
753
- def secrets(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
754
- ...
755
-
756
- @typing.overload
757
- def secrets(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
758
- ...
759
-
760
- def secrets(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []):
770
+ def timeout(*, seconds: int = 0, minutes: int = 0, hours: int = 0) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
761
771
  """
762
- Specifies secrets to be retrieved and injected as environment variables prior to
763
- the execution of a step.
764
-
772
+ Specifies a timeout for your step.
765
773
 
766
- Parameters
767
- ----------
768
- sources : List[Union[str, Dict[str, Any]]], default: []
769
- List of secret specs, defining how the secrets are to be retrieved
770
- """
771
- ...
772
-
773
- @typing.overload
774
- def retry(*, times: int = 3, minutes_between_retries: int = 2) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
775
- """
776
- Specifies the number of times the task corresponding
777
- to a step needs to be retried.
774
+ This decorator is useful if this step may hang indefinitely.
778
775
 
779
- This decorator is useful for handling transient errors, such as networking issues.
780
- If your task contains operations that can't be retried safely, e.g. database updates,
781
- it is advisable to annotate it with `@retry(times=0)`.
776
+ This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
777
+ A timeout is considered to be an exception thrown by the step. It will cause the step to be
778
+ retried if needed and the exception will be caught by the `@catch` decorator, if present.
782
779
 
783
- This can be used in conjunction with the `@catch` decorator. The `@catch`
784
- decorator will execute a no-op task after all retries have been exhausted,
785
- ensuring that the flow execution can continue.
780
+ Note that all the values specified in parameters are added together so if you specify
781
+ 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
786
782
 
787
783
 
788
784
  Parameters
789
785
  ----------
790
- times : int, default 3
791
- Number of times to retry this task.
792
- minutes_between_retries : int, default 2
793
- Number of minutes between retries.
786
+ seconds : int, default 0
787
+ Number of seconds to wait prior to timing out.
788
+ minutes : int, default 0
789
+ Number of minutes to wait prior to timing out.
790
+ hours : int, default 0
791
+ Number of hours to wait prior to timing out.
794
792
  """
795
793
  ...
796
794
 
797
795
  @typing.overload
798
- def retry(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
796
+ def timeout(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
799
797
  ...
800
798
 
801
799
  @typing.overload
802
- def retry(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
800
+ def timeout(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
803
801
  ...
804
802
 
805
- def retry(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, times: int = 3, minutes_between_retries: int = 2):
803
+ def timeout(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, seconds: int = 0, minutes: int = 0, hours: int = 0):
806
804
  """
807
- Specifies the number of times the task corresponding
808
- to a step needs to be retried.
805
+ Specifies a timeout for your step.
809
806
 
810
- This decorator is useful for handling transient errors, such as networking issues.
811
- If your task contains operations that can't be retried safely, e.g. database updates,
812
- it is advisable to annotate it with `@retry(times=0)`.
807
+ This decorator is useful if this step may hang indefinitely.
813
808
 
814
- This can be used in conjunction with the `@catch` decorator. The `@catch`
815
- decorator will execute a no-op task after all retries have been exhausted,
816
- ensuring that the flow execution can continue.
809
+ This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
810
+ A timeout is considered to be an exception thrown by the step. It will cause the step to be
811
+ retried if needed and the exception will be caught by the `@catch` decorator, if present.
812
+
813
+ Note that all the values specified in parameters are added together so if you specify
814
+ 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
817
815
 
818
816
 
819
817
  Parameters
820
818
  ----------
821
- times : int, default 3
822
- Number of times to retry this task.
823
- minutes_between_retries : int, default 2
824
- Number of minutes between retries.
819
+ seconds : int, default 0
820
+ Number of seconds to wait prior to timing out.
821
+ minutes : int, default 0
822
+ Number of minutes to wait prior to timing out.
823
+ hours : int, default 0
824
+ Number of hours to wait prior to timing out.
825
825
  """
826
826
  ...
827
827
 
@@ -859,321 +859,245 @@ def environment(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], Non
859
859
  ...
860
860
 
861
861
  @typing.overload
862
- def trigger_on_finish(*, flow: typing.Union[typing.Dict[str, str], str, None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
862
+ def trigger(*, event: typing.Union[str, typing.Dict[str, typing.Any], None] = None, events: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
863
863
  """
864
- Specifies the flow(s) that this flow depends on.
864
+ Specifies the event(s) that this flow depends on.
865
865
 
866
866
  ```
867
- @trigger_on_finish(flow='FooFlow')
867
+ @trigger(event='foo')
868
868
  ```
869
869
  or
870
870
  ```
871
- @trigger_on_finish(flows=['FooFlow', 'BarFlow'])
871
+ @trigger(events=['foo', 'bar'])
872
872
  ```
873
- This decorator respects the @project decorator and triggers the flow
874
- when upstream runs within the same namespace complete successfully
875
873
 
876
- Additionally, you can specify project aware upstream flow dependencies
877
- by specifying the fully qualified project_flow_name.
874
+ Additionally, you can specify the parameter mappings
875
+ to map event payload to Metaflow parameters for the flow.
878
876
  ```
879
- @trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
877
+ @trigger(event={'name':'foo', 'parameters':{'flow_param': 'event_field'}})
880
878
  ```
881
879
  or
882
880
  ```
883
- @trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
881
+ @trigger(events=[{'name':'foo', 'parameters':{'flow_param_1': 'event_field_1'},
882
+ {'name':'bar', 'parameters':{'flow_param_2': 'event_field_2'}])
884
883
  ```
885
884
 
886
- You can also specify just the project or project branch (other values will be
887
- inferred from the current project or project branch):
885
+ 'parameters' can also be a list of strings and tuples like so:
888
886
  ```
889
- @trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
887
+ @trigger(event={'name':'foo', 'parameters':['common_name', ('flow_param', 'event_field')]})
888
+ ```
889
+ This is equivalent to:
890
+ ```
891
+ @trigger(event={'name':'foo', 'parameters':{'common_name': 'common_name', 'flow_param': 'event_field'}})
890
892
  ```
891
-
892
- Note that `branch` is typically one of:
893
- - `prod`
894
- - `user.bob`
895
- - `test.my_experiment`
896
- - `prod.staging`
897
893
 
898
894
 
899
895
  Parameters
900
896
  ----------
901
- flow : Union[str, Dict[str, str]], optional, default None
902
- Upstream flow dependency for this flow.
903
- flows : List[Union[str, Dict[str, str]]], default []
904
- Upstream flow dependencies for this flow.
897
+ event : Union[str, Dict[str, Any]], optional, default None
898
+ Event dependency for this flow.
899
+ events : List[Union[str, Dict[str, Any]]], default []
900
+ Events dependency for this flow.
905
901
  options : Dict[str, Any], default {}
906
902
  Backend-specific configuration for tuning eventing behavior.
907
903
  """
908
904
  ...
909
905
 
910
906
  @typing.overload
911
- def trigger_on_finish(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
907
+ def trigger(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
912
908
  ...
913
909
 
914
- def trigger_on_finish(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, flow: typing.Union[typing.Dict[str, str], str, None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}):
910
+ def trigger(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, event: typing.Union[str, typing.Dict[str, typing.Any], None] = None, events: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], options: typing.Dict[str, typing.Any] = {}):
915
911
  """
916
- Specifies the flow(s) that this flow depends on.
912
+ Specifies the event(s) that this flow depends on.
917
913
 
918
914
  ```
919
- @trigger_on_finish(flow='FooFlow')
915
+ @trigger(event='foo')
920
916
  ```
921
917
  or
922
918
  ```
923
- @trigger_on_finish(flows=['FooFlow', 'BarFlow'])
919
+ @trigger(events=['foo', 'bar'])
924
920
  ```
925
- This decorator respects the @project decorator and triggers the flow
926
- when upstream runs within the same namespace complete successfully
927
921
 
928
- Additionally, you can specify project aware upstream flow dependencies
929
- by specifying the fully qualified project_flow_name.
922
+ Additionally, you can specify the parameter mappings
923
+ to map event payload to Metaflow parameters for the flow.
930
924
  ```
931
- @trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
925
+ @trigger(event={'name':'foo', 'parameters':{'flow_param': 'event_field'}})
932
926
  ```
933
927
  or
934
928
  ```
935
- @trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
929
+ @trigger(events=[{'name':'foo', 'parameters':{'flow_param_1': 'event_field_1'},
930
+ {'name':'bar', 'parameters':{'flow_param_2': 'event_field_2'}])
936
931
  ```
937
932
 
938
- You can also specify just the project or project branch (other values will be
939
- inferred from the current project or project branch):
933
+ 'parameters' can also be a list of strings and tuples like so:
940
934
  ```
941
- @trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
935
+ @trigger(event={'name':'foo', 'parameters':['common_name', ('flow_param', 'event_field')]})
942
936
  ```
943
-
944
- Note that `branch` is typically one of:
945
- - `prod`
946
- - `user.bob`
947
- - `test.my_experiment`
948
- - `prod.staging`
949
-
950
-
951
- Parameters
952
- ----------
953
- flow : Union[str, Dict[str, str]], optional, default None
954
- Upstream flow dependency for this flow.
955
- flows : List[Union[str, Dict[str, str]]], default []
956
- Upstream flow dependencies for this flow.
957
- options : Dict[str, Any], default {}
958
- Backend-specific configuration for tuning eventing behavior.
959
- """
960
- ...
961
-
962
- def airflow_external_task_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, external_dag_id: str, external_task_ids: typing.List[str], allowed_states: typing.List[str], failed_states: typing.List[str], execution_delta: "datetime.timedelta", check_existence: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
963
- """
964
- The `@airflow_external_task_sensor` decorator attaches a Airflow [ExternalTaskSensor](https://airflow.apache.org/docs/apache-airflow/stable/_api/airflow/sensors/external_task/index.html#airflow.sensors.external_task.ExternalTaskSensor) before the start step of the flow.
965
- This decorator only works when a flow is scheduled on Airflow and is compiled using `airflow create`. More than one `@airflow_external_task_sensor` can be added as a flow decorators. Adding more than one decorator will ensure that `start` step starts only after all sensors finish.
966
-
967
-
968
- Parameters
969
- ----------
970
- timeout : int
971
- Time, in seconds before the task times out and fails. (Default: 3600)
972
- poke_interval : int
973
- Time in seconds that the job should wait in between each try. (Default: 60)
974
- mode : str
975
- How the sensor operates. Options are: { poke | reschedule }. (Default: "poke")
976
- exponential_backoff : bool
977
- allow progressive longer waits between pokes by using exponential backoff algorithm. (Default: True)
978
- pool : str
979
- the slot pool this task should run in,
980
- slot pools are a way to limit concurrency for certain tasks. (Default:None)
981
- soft_fail : bool
982
- Set to true to mark the task as SKIPPED on failure. (Default: False)
983
- name : str
984
- Name of the sensor on Airflow
985
- description : str
986
- Description of sensor in the Airflow UI
987
- external_dag_id : str
988
- The dag_id that contains the task you want to wait for.
989
- external_task_ids : List[str]
990
- The list of task_ids that you want to wait for.
991
- If None (default value) the sensor waits for the DAG. (Default: None)
992
- allowed_states : List[str]
993
- Iterable of allowed states, (Default: ['success'])
994
- failed_states : List[str]
995
- Iterable of failed or dis-allowed states. (Default: None)
996
- execution_delta : datetime.timedelta
997
- time difference with the previous execution to look at,
998
- the default is the same logical date as the current task or DAG. (Default: None)
999
- check_existence: bool
1000
- Set to True to check if the external task exists or check if
1001
- the DAG to wait for exists. (Default: True)
1002
- """
1003
- ...
1004
-
1005
- def airflow_s3_key_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, bucket_key: typing.Union[str, typing.List[str]], bucket_name: str, wildcard_match: bool, aws_conn_id: str, verify: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1006
- """
1007
- The `@airflow_s3_key_sensor` decorator attaches a Airflow [S3KeySensor](https://airflow.apache.org/docs/apache-airflow-providers-amazon/stable/_api/airflow/providers/amazon/aws/sensors/s3/index.html#airflow.providers.amazon.aws.sensors.s3.S3KeySensor)
1008
- before the start step of the flow. This decorator only works when a flow is scheduled on Airflow
1009
- and is compiled using `airflow create`. More than one `@airflow_s3_key_sensor` can be
1010
- added as a flow decorators. Adding more than one decorator will ensure that `start` step
1011
- starts only after all sensors finish.
1012
-
1013
-
1014
- Parameters
1015
- ----------
1016
- timeout : int
1017
- Time, in seconds before the task times out and fails. (Default: 3600)
1018
- poke_interval : int
1019
- Time in seconds that the job should wait in between each try. (Default: 60)
1020
- mode : str
1021
- How the sensor operates. Options are: { poke | reschedule }. (Default: "poke")
1022
- exponential_backoff : bool
1023
- allow progressive longer waits between pokes by using exponential backoff algorithm. (Default: True)
1024
- pool : str
1025
- the slot pool this task should run in,
1026
- slot pools are a way to limit concurrency for certain tasks. (Default:None)
1027
- soft_fail : bool
1028
- Set to true to mark the task as SKIPPED on failure. (Default: False)
1029
- name : str
1030
- Name of the sensor on Airflow
1031
- description : str
1032
- Description of sensor in the Airflow UI
1033
- bucket_key : Union[str, List[str]]
1034
- The key(s) being waited on. Supports full s3:// style url or relative path from root level.
1035
- When it's specified as a full s3:// url, please leave `bucket_name` as None
1036
- bucket_name : str
1037
- Name of the S3 bucket. Only needed when bucket_key is not provided as a full s3:// url.
1038
- When specified, all the keys passed to bucket_key refers to this bucket. (Default:None)
1039
- wildcard_match : bool
1040
- whether the bucket_key should be interpreted as a Unix wildcard pattern. (Default: False)
1041
- aws_conn_id : str
1042
- a reference to the s3 connection on Airflow. (Default: None)
1043
- verify : bool
1044
- Whether or not to verify SSL certificates for S3 connection. (Default: None)
937
+ This is equivalent to:
938
+ ```
939
+ @trigger(event={'name':'foo', 'parameters':{'common_name': 'common_name', 'flow_param': 'event_field'}})
940
+ ```
941
+
942
+
943
+ Parameters
944
+ ----------
945
+ event : Union[str, Dict[str, Any]], optional, default None
946
+ Event dependency for this flow.
947
+ events : List[Union[str, Dict[str, Any]]], default []
948
+ Events dependency for this flow.
949
+ options : Dict[str, Any], default {}
950
+ Backend-specific configuration for tuning eventing behavior.
1045
951
  """
1046
952
  ...
1047
953
 
1048
954
  @typing.overload
1049
- def pypi_base(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
955
+ def conda_base(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1050
956
  """
1051
- Specifies the PyPI packages for all steps of the flow.
957
+ Specifies the Conda environment for all steps of the flow.
958
+
959
+ Use `@conda_base` to set common libraries required by all
960
+ steps and use `@conda` to specify step-specific additions.
1052
961
 
1053
- Use `@pypi_base` to set common packages required by all
1054
- steps and use `@pypi` to specify step-specific overrides.
1055
962
 
1056
963
  Parameters
1057
964
  ----------
1058
- packages : Dict[str, str], default: {}
965
+ packages : Dict[str, str], default {}
1059
966
  Packages to use for this flow. The key is the name of the package
1060
967
  and the value is the version to use.
1061
- python : str, optional, default: None
968
+ libraries : Dict[str, str], default {}
969
+ Supported for backward compatibility. When used with packages, packages will take precedence.
970
+ python : str, optional, default None
1062
971
  Version of Python to use, e.g. '3.7.4'. A default value of None implies
1063
972
  that the version used will correspond to the version of the Python interpreter used to start the run.
973
+ disabled : bool, default False
974
+ If set to True, disables Conda.
1064
975
  """
1065
976
  ...
1066
977
 
1067
978
  @typing.overload
1068
- def pypi_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
979
+ def conda_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1069
980
  ...
1070
981
 
1071
- def pypi_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
982
+ def conda_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
1072
983
  """
1073
- Specifies the PyPI packages for all steps of the flow.
984
+ Specifies the Conda environment for all steps of the flow.
985
+
986
+ Use `@conda_base` to set common libraries required by all
987
+ steps and use `@conda` to specify step-specific additions.
1074
988
 
1075
- Use `@pypi_base` to set common packages required by all
1076
- steps and use `@pypi` to specify step-specific overrides.
1077
989
 
1078
990
  Parameters
1079
991
  ----------
1080
- packages : Dict[str, str], default: {}
992
+ packages : Dict[str, str], default {}
1081
993
  Packages to use for this flow. The key is the name of the package
1082
994
  and the value is the version to use.
1083
- python : str, optional, default: None
995
+ libraries : Dict[str, str], default {}
996
+ Supported for backward compatibility. When used with packages, packages will take precedence.
997
+ python : str, optional, default None
1084
998
  Version of Python to use, e.g. '3.7.4'. A default value of None implies
1085
999
  that the version used will correspond to the version of the Python interpreter used to start the run.
1000
+ disabled : bool, default False
1001
+ If set to True, disables Conda.
1086
1002
  """
1087
1003
  ...
1088
1004
 
1089
1005
  @typing.overload
1090
- def trigger(*, event: typing.Union[str, typing.Dict[str, typing.Any], None] = None, events: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1006
+ def trigger_on_finish(*, flow: typing.Union[typing.Dict[str, str], str, None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1091
1007
  """
1092
- Specifies the event(s) that this flow depends on.
1008
+ Specifies the flow(s) that this flow depends on.
1093
1009
 
1094
1010
  ```
1095
- @trigger(event='foo')
1011
+ @trigger_on_finish(flow='FooFlow')
1096
1012
  ```
1097
1013
  or
1098
1014
  ```
1099
- @trigger(events=['foo', 'bar'])
1015
+ @trigger_on_finish(flows=['FooFlow', 'BarFlow'])
1100
1016
  ```
1017
+ This decorator respects the @project decorator and triggers the flow
1018
+ when upstream runs within the same namespace complete successfully
1101
1019
 
1102
- Additionally, you can specify the parameter mappings
1103
- to map event payload to Metaflow parameters for the flow.
1020
+ Additionally, you can specify project aware upstream flow dependencies
1021
+ by specifying the fully qualified project_flow_name.
1104
1022
  ```
1105
- @trigger(event={'name':'foo', 'parameters':{'flow_param': 'event_field'}})
1023
+ @trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
1106
1024
  ```
1107
1025
  or
1108
1026
  ```
1109
- @trigger(events=[{'name':'foo', 'parameters':{'flow_param_1': 'event_field_1'},
1110
- {'name':'bar', 'parameters':{'flow_param_2': 'event_field_2'}])
1027
+ @trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
1111
1028
  ```
1112
1029
 
1113
- 'parameters' can also be a list of strings and tuples like so:
1114
- ```
1115
- @trigger(event={'name':'foo', 'parameters':['common_name', ('flow_param', 'event_field')]})
1116
- ```
1117
- This is equivalent to:
1030
+ You can also specify just the project or project branch (other values will be
1031
+ inferred from the current project or project branch):
1118
1032
  ```
1119
- @trigger(event={'name':'foo', 'parameters':{'common_name': 'common_name', 'flow_param': 'event_field'}})
1033
+ @trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
1120
1034
  ```
1121
1035
 
1036
+ Note that `branch` is typically one of:
1037
+ - `prod`
1038
+ - `user.bob`
1039
+ - `test.my_experiment`
1040
+ - `prod.staging`
1041
+
1122
1042
 
1123
1043
  Parameters
1124
1044
  ----------
1125
- event : Union[str, Dict[str, Any]], optional, default None
1126
- Event dependency for this flow.
1127
- events : List[Union[str, Dict[str, Any]]], default []
1128
- Events dependency for this flow.
1045
+ flow : Union[str, Dict[str, str]], optional, default None
1046
+ Upstream flow dependency for this flow.
1047
+ flows : List[Union[str, Dict[str, str]]], default []
1048
+ Upstream flow dependencies for this flow.
1129
1049
  options : Dict[str, Any], default {}
1130
1050
  Backend-specific configuration for tuning eventing behavior.
1131
1051
  """
1132
1052
  ...
1133
1053
 
1134
1054
  @typing.overload
1135
- def trigger(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1055
+ def trigger_on_finish(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1136
1056
  ...
1137
1057
 
1138
- def trigger(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, event: typing.Union[str, typing.Dict[str, typing.Any], None] = None, events: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], options: typing.Dict[str, typing.Any] = {}):
1058
+ def trigger_on_finish(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, flow: typing.Union[typing.Dict[str, str], str, None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}):
1139
1059
  """
1140
- Specifies the event(s) that this flow depends on.
1060
+ Specifies the flow(s) that this flow depends on.
1141
1061
 
1142
1062
  ```
1143
- @trigger(event='foo')
1063
+ @trigger_on_finish(flow='FooFlow')
1144
1064
  ```
1145
1065
  or
1146
1066
  ```
1147
- @trigger(events=['foo', 'bar'])
1067
+ @trigger_on_finish(flows=['FooFlow', 'BarFlow'])
1148
1068
  ```
1069
+ This decorator respects the @project decorator and triggers the flow
1070
+ when upstream runs within the same namespace complete successfully
1149
1071
 
1150
- Additionally, you can specify the parameter mappings
1151
- to map event payload to Metaflow parameters for the flow.
1072
+ Additionally, you can specify project aware upstream flow dependencies
1073
+ by specifying the fully qualified project_flow_name.
1152
1074
  ```
1153
- @trigger(event={'name':'foo', 'parameters':{'flow_param': 'event_field'}})
1075
+ @trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
1154
1076
  ```
1155
1077
  or
1156
1078
  ```
1157
- @trigger(events=[{'name':'foo', 'parameters':{'flow_param_1': 'event_field_1'},
1158
- {'name':'bar', 'parameters':{'flow_param_2': 'event_field_2'}])
1079
+ @trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
1159
1080
  ```
1160
1081
 
1161
- 'parameters' can also be a list of strings and tuples like so:
1162
- ```
1163
- @trigger(event={'name':'foo', 'parameters':['common_name', ('flow_param', 'event_field')]})
1164
- ```
1165
- This is equivalent to:
1082
+ You can also specify just the project or project branch (other values will be
1083
+ inferred from the current project or project branch):
1166
1084
  ```
1167
- @trigger(event={'name':'foo', 'parameters':{'common_name': 'common_name', 'flow_param': 'event_field'}})
1085
+ @trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
1168
1086
  ```
1169
1087
 
1088
+ Note that `branch` is typically one of:
1089
+ - `prod`
1090
+ - `user.bob`
1091
+ - `test.my_experiment`
1092
+ - `prod.staging`
1093
+
1170
1094
 
1171
1095
  Parameters
1172
1096
  ----------
1173
- event : Union[str, Dict[str, Any]], optional, default None
1174
- Event dependency for this flow.
1175
- events : List[Union[str, Dict[str, Any]]], default []
1176
- Events dependency for this flow.
1097
+ flow : Union[str, Dict[str, str]], optional, default None
1098
+ Upstream flow dependency for this flow.
1099
+ flows : List[Union[str, Dict[str, str]]], default []
1100
+ Upstream flow dependencies for this flow.
1177
1101
  options : Dict[str, Any], default {}
1178
1102
  Backend-specific configuration for tuning eventing behavior.
1179
1103
  """
@@ -1230,6 +1154,49 @@ def schedule(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, hourly:
1230
1154
  """
1231
1155
  ...
1232
1156
 
1157
+ def airflow_s3_key_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, bucket_key: typing.Union[str, typing.List[str]], bucket_name: str, wildcard_match: bool, aws_conn_id: str, verify: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1158
+ """
1159
+ The `@airflow_s3_key_sensor` decorator attaches a Airflow [S3KeySensor](https://airflow.apache.org/docs/apache-airflow-providers-amazon/stable/_api/airflow/providers/amazon/aws/sensors/s3/index.html#airflow.providers.amazon.aws.sensors.s3.S3KeySensor)
1160
+ before the start step of the flow. This decorator only works when a flow is scheduled on Airflow
1161
+ and is compiled using `airflow create`. More than one `@airflow_s3_key_sensor` can be
1162
+ added as a flow decorators. Adding more than one decorator will ensure that `start` step
1163
+ starts only after all sensors finish.
1164
+
1165
+
1166
+ Parameters
1167
+ ----------
1168
+ timeout : int
1169
+ Time, in seconds before the task times out and fails. (Default: 3600)
1170
+ poke_interval : int
1171
+ Time in seconds that the job should wait in between each try. (Default: 60)
1172
+ mode : str
1173
+ How the sensor operates. Options are: { poke | reschedule }. (Default: "poke")
1174
+ exponential_backoff : bool
1175
+ allow progressive longer waits between pokes by using exponential backoff algorithm. (Default: True)
1176
+ pool : str
1177
+ the slot pool this task should run in,
1178
+ slot pools are a way to limit concurrency for certain tasks. (Default:None)
1179
+ soft_fail : bool
1180
+ Set to true to mark the task as SKIPPED on failure. (Default: False)
1181
+ name : str
1182
+ Name of the sensor on Airflow
1183
+ description : str
1184
+ Description of sensor in the Airflow UI
1185
+ bucket_key : Union[str, List[str]]
1186
+ The key(s) being waited on. Supports full s3:// style url or relative path from root level.
1187
+ When it's specified as a full s3:// url, please leave `bucket_name` as None
1188
+ bucket_name : str
1189
+ Name of the S3 bucket. Only needed when bucket_key is not provided as a full s3:// url.
1190
+ When specified, all the keys passed to bucket_key refers to this bucket. (Default:None)
1191
+ wildcard_match : bool
1192
+ whether the bucket_key should be interpreted as a Unix wildcard pattern. (Default: False)
1193
+ aws_conn_id : str
1194
+ a reference to the s3 connection on Airflow. (Default: None)
1195
+ verify : bool
1196
+ Whether or not to verify SSL certificates for S3 connection. (Default: None)
1197
+ """
1198
+ ...
1199
+
1233
1200
  def project(*, name: str, branch: typing.Optional[str] = None, production: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1234
1201
  """
1235
1202
  Specifies what flows belong to the same project.
@@ -1265,54 +1232,87 @@ def project(*, name: str, branch: typing.Optional[str] = None, production: bool
1265
1232
  """
1266
1233
  ...
1267
1234
 
1268
- @typing.overload
1269
- def conda_base(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1235
+ def airflow_external_task_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, external_dag_id: str, external_task_ids: typing.List[str], allowed_states: typing.List[str], failed_states: typing.List[str], execution_delta: "datetime.timedelta", check_existence: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1270
1236
  """
1271
- Specifies the Conda environment for all steps of the flow.
1237
+ The `@airflow_external_task_sensor` decorator attaches a Airflow [ExternalTaskSensor](https://airflow.apache.org/docs/apache-airflow/stable/_api/airflow/sensors/external_task/index.html#airflow.sensors.external_task.ExternalTaskSensor) before the start step of the flow.
1238
+ This decorator only works when a flow is scheduled on Airflow and is compiled using `airflow create`. More than one `@airflow_external_task_sensor` can be added as a flow decorators. Adding more than one decorator will ensure that `start` step starts only after all sensors finish.
1272
1239
 
1273
- Use `@conda_base` to set common libraries required by all
1274
- steps and use `@conda` to specify step-specific additions.
1275
1240
 
1241
+ Parameters
1242
+ ----------
1243
+ timeout : int
1244
+ Time, in seconds before the task times out and fails. (Default: 3600)
1245
+ poke_interval : int
1246
+ Time in seconds that the job should wait in between each try. (Default: 60)
1247
+ mode : str
1248
+ How the sensor operates. Options are: { poke | reschedule }. (Default: "poke")
1249
+ exponential_backoff : bool
1250
+ allow progressive longer waits between pokes by using exponential backoff algorithm. (Default: True)
1251
+ pool : str
1252
+ the slot pool this task should run in,
1253
+ slot pools are a way to limit concurrency for certain tasks. (Default:None)
1254
+ soft_fail : bool
1255
+ Set to true to mark the task as SKIPPED on failure. (Default: False)
1256
+ name : str
1257
+ Name of the sensor on Airflow
1258
+ description : str
1259
+ Description of sensor in the Airflow UI
1260
+ external_dag_id : str
1261
+ The dag_id that contains the task you want to wait for.
1262
+ external_task_ids : List[str]
1263
+ The list of task_ids that you want to wait for.
1264
+ If None (default value) the sensor waits for the DAG. (Default: None)
1265
+ allowed_states : List[str]
1266
+ Iterable of allowed states, (Default: ['success'])
1267
+ failed_states : List[str]
1268
+ Iterable of failed or dis-allowed states. (Default: None)
1269
+ execution_delta : datetime.timedelta
1270
+ time difference with the previous execution to look at,
1271
+ the default is the same logical date as the current task or DAG. (Default: None)
1272
+ check_existence: bool
1273
+ Set to True to check if the external task exists or check if
1274
+ the DAG to wait for exists. (Default: True)
1275
+ """
1276
+ ...
1277
+
1278
+ @typing.overload
1279
+ def pypi_base(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1280
+ """
1281
+ Specifies the PyPI packages for all steps of the flow.
1282
+
1283
+ Use `@pypi_base` to set common packages required by all
1284
+ steps and use `@pypi` to specify step-specific overrides.
1276
1285
 
1277
1286
  Parameters
1278
1287
  ----------
1279
- packages : Dict[str, str], default {}
1288
+ packages : Dict[str, str], default: {}
1280
1289
  Packages to use for this flow. The key is the name of the package
1281
1290
  and the value is the version to use.
1282
- libraries : Dict[str, str], default {}
1283
- Supported for backward compatibility. When used with packages, packages will take precedence.
1284
- python : str, optional, default None
1291
+ python : str, optional, default: None
1285
1292
  Version of Python to use, e.g. '3.7.4'. A default value of None implies
1286
1293
  that the version used will correspond to the version of the Python interpreter used to start the run.
1287
- disabled : bool, default False
1288
- If set to True, disables Conda.
1289
1294
  """
1290
1295
  ...
1291
1296
 
1292
1297
  @typing.overload
1293
- def conda_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1298
+ def pypi_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1294
1299
  ...
1295
1300
 
1296
- def conda_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
1301
+ def pypi_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
1297
1302
  """
1298
- Specifies the Conda environment for all steps of the flow.
1299
-
1300
- Use `@conda_base` to set common libraries required by all
1301
- steps and use `@conda` to specify step-specific additions.
1303
+ Specifies the PyPI packages for all steps of the flow.
1302
1304
 
1305
+ Use `@pypi_base` to set common packages required by all
1306
+ steps and use `@pypi` to specify step-specific overrides.
1303
1307
 
1304
1308
  Parameters
1305
1309
  ----------
1306
- packages : Dict[str, str], default {}
1310
+ packages : Dict[str, str], default: {}
1307
1311
  Packages to use for this flow. The key is the name of the package
1308
1312
  and the value is the version to use.
1309
- libraries : Dict[str, str], default {}
1310
- Supported for backward compatibility. When used with packages, packages will take precedence.
1311
- python : str, optional, default None
1313
+ python : str, optional, default: None
1312
1314
  Version of Python to use, e.g. '3.7.4'. A default value of None implies
1313
1315
  that the version used will correspond to the version of the Python interpreter used to start the run.
1314
- disabled : bool, default False
1315
- If set to True, disables Conda.
1316
1316
  """
1317
1317
  ...
1318
1318