ob-metaflow-stubs 2.11.4.1__py2.py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (131) hide show
  1. metaflow-stubs/__init__.pyi +2753 -0
  2. metaflow-stubs/cards.pyi +266 -0
  3. metaflow-stubs/cli.pyi +137 -0
  4. metaflow-stubs/client/__init__.pyi +993 -0
  5. metaflow-stubs/client/core.pyi +1425 -0
  6. metaflow-stubs/client/filecache.pyi +87 -0
  7. metaflow-stubs/events.pyi +107 -0
  8. metaflow-stubs/exception.pyi +98 -0
  9. metaflow-stubs/flowspec.pyi +297 -0
  10. metaflow-stubs/generated_for.txt +1 -0
  11. metaflow-stubs/includefile.pyi +524 -0
  12. metaflow-stubs/metadata/metadata.pyi +377 -0
  13. metaflow-stubs/metadata/util.pyi +18 -0
  14. metaflow-stubs/metaflow_config.pyi +263 -0
  15. metaflow-stubs/metaflow_current.pyi +327 -0
  16. metaflow-stubs/mflog/mflog.pyi +22 -0
  17. metaflow-stubs/multicore_utils.pyi +62 -0
  18. metaflow-stubs/parameters.pyi +114 -0
  19. metaflow-stubs/plugins/__init__.pyi +209 -0
  20. metaflow-stubs/plugins/airflow/__init__.pyi +9 -0
  21. metaflow-stubs/plugins/airflow/airflow.pyi +179 -0
  22. metaflow-stubs/plugins/airflow/airflow_cli.pyi +90 -0
  23. metaflow-stubs/plugins/airflow/airflow_decorator.pyi +50 -0
  24. metaflow-stubs/plugins/airflow/airflow_utils.pyi +137 -0
  25. metaflow-stubs/plugins/airflow/exception.pyi +27 -0
  26. metaflow-stubs/plugins/airflow/sensors/__init__.pyi +26 -0
  27. metaflow-stubs/plugins/airflow/sensors/base_sensor.pyi +60 -0
  28. metaflow-stubs/plugins/airflow/sensors/external_task_sensor.pyi +54 -0
  29. metaflow-stubs/plugins/airflow/sensors/s3_sensor.pyi +50 -0
  30. metaflow-stubs/plugins/argo/__init__.pyi +9 -0
  31. metaflow-stubs/plugins/argo/argo_client.pyi +77 -0
  32. metaflow-stubs/plugins/argo/argo_events.pyi +79 -0
  33. metaflow-stubs/plugins/argo/argo_workflows.pyi +604 -0
  34. metaflow-stubs/plugins/argo/argo_workflows_cli.pyi +180 -0
  35. metaflow-stubs/plugins/argo/argo_workflows_decorator.pyi +169 -0
  36. metaflow-stubs/plugins/aws/__init__.pyi +9 -0
  37. metaflow-stubs/plugins/aws/aws_client.pyi +22 -0
  38. metaflow-stubs/plugins/aws/aws_utils.pyi +93 -0
  39. metaflow-stubs/plugins/aws/batch/__init__.pyi +9 -0
  40. metaflow-stubs/plugins/aws/batch/batch.pyi +120 -0
  41. metaflow-stubs/plugins/aws/batch/batch_cli.pyi +42 -0
  42. metaflow-stubs/plugins/aws/batch/batch_client.pyi +159 -0
  43. metaflow-stubs/plugins/aws/batch/batch_decorator.pyi +145 -0
  44. metaflow-stubs/plugins/aws/secrets_manager/__init__.pyi +9 -0
  45. metaflow-stubs/plugins/aws/secrets_manager/aws_secrets_manager_secrets_provider.pyi +73 -0
  46. metaflow-stubs/plugins/aws/step_functions/__init__.pyi +9 -0
  47. metaflow-stubs/plugins/aws/step_functions/dynamo_db_client.pyi +22 -0
  48. metaflow-stubs/plugins/aws/step_functions/event_bridge_client.pyi +27 -0
  49. metaflow-stubs/plugins/aws/step_functions/production_token.pyi +18 -0
  50. metaflow-stubs/plugins/aws/step_functions/schedule_decorator.pyi +17 -0
  51. metaflow-stubs/plugins/aws/step_functions/step_functions.pyi +220 -0
  52. metaflow-stubs/plugins/aws/step_functions/step_functions_cli.pyi +139 -0
  53. metaflow-stubs/plugins/aws/step_functions/step_functions_client.pyi +36 -0
  54. metaflow-stubs/plugins/aws/step_functions/step_functions_decorator.pyi +49 -0
  55. metaflow-stubs/plugins/azure/__init__.pyi +9 -0
  56. metaflow-stubs/plugins/azure/azure_credential.pyi +28 -0
  57. metaflow-stubs/plugins/azure/azure_exceptions.pyi +28 -0
  58. metaflow-stubs/plugins/azure/azure_utils.pyi +76 -0
  59. metaflow-stubs/plugins/azure/blob_service_client_factory.pyi +52 -0
  60. metaflow-stubs/plugins/azure/includefile_support.pyi +63 -0
  61. metaflow-stubs/plugins/cards/__init__.pyi +9 -0
  62. metaflow-stubs/plugins/cards/card_cli.pyi +557 -0
  63. metaflow-stubs/plugins/cards/card_client.pyi +178 -0
  64. metaflow-stubs/plugins/cards/card_creator.pyi +26 -0
  65. metaflow-stubs/plugins/cards/card_datastore.pyi +111 -0
  66. metaflow-stubs/plugins/cards/card_decorator.pyi +133 -0
  67. metaflow-stubs/plugins/cards/card_modules/__init__.pyi +67 -0
  68. metaflow-stubs/plugins/cards/card_modules/basic.pyi +263 -0
  69. metaflow-stubs/plugins/cards/card_modules/card.pyi +62 -0
  70. metaflow-stubs/plugins/cards/card_modules/chevron/__init__.pyi +78 -0
  71. metaflow-stubs/plugins/cards/card_modules/chevron/main.pyi +77 -0
  72. metaflow-stubs/plugins/cards/card_modules/chevron/metadata.pyi +11 -0
  73. metaflow-stubs/plugins/cards/card_modules/chevron/renderer.pyi +114 -0
  74. metaflow-stubs/plugins/cards/card_modules/chevron/tokenizer.pyi +75 -0
  75. metaflow-stubs/plugins/cards/card_modules/components.pyi +251 -0
  76. metaflow-stubs/plugins/cards/card_modules/convert_to_native_type.pyi +45 -0
  77. metaflow-stubs/plugins/cards/card_modules/renderer_tools.pyi +24 -0
  78. metaflow-stubs/plugins/cards/card_modules/test_cards.pyi +155 -0
  79. metaflow-stubs/plugins/cards/card_resolver.pyi +60 -0
  80. metaflow-stubs/plugins/cards/component_serializer.pyi +227 -0
  81. metaflow-stubs/plugins/cards/exception.pyi +71 -0
  82. metaflow-stubs/plugins/catch_decorator.pyi +58 -0
  83. metaflow-stubs/plugins/datatools/__init__.pyi +339 -0
  84. metaflow-stubs/plugins/datatools/local.pyi +82 -0
  85. metaflow-stubs/plugins/datatools/s3/__init__.pyi +589 -0
  86. metaflow-stubs/plugins/datatools/s3/s3.pyi +875 -0
  87. metaflow-stubs/plugins/datatools/s3/s3tail.pyi +34 -0
  88. metaflow-stubs/plugins/datatools/s3/s3util.pyi +45 -0
  89. metaflow-stubs/plugins/debug_logger.pyi +25 -0
  90. metaflow-stubs/plugins/debug_monitor.pyi +25 -0
  91. metaflow-stubs/plugins/environment_decorator.pyi +17 -0
  92. metaflow-stubs/plugins/events_decorator.pyi +34 -0
  93. metaflow-stubs/plugins/frameworks/__init__.pyi +9 -0
  94. metaflow-stubs/plugins/frameworks/pytorch.pyi +42 -0
  95. metaflow-stubs/plugins/gcp/__init__.pyi +9 -0
  96. metaflow-stubs/plugins/gcp/gs_exceptions.pyi +22 -0
  97. metaflow-stubs/plugins/gcp/gs_storage_client_factory.pyi +26 -0
  98. metaflow-stubs/plugins/gcp/gs_utils.pyi +38 -0
  99. metaflow-stubs/plugins/gcp/includefile_support.pyi +63 -0
  100. metaflow-stubs/plugins/kubernetes/__init__.pyi +9 -0
  101. metaflow-stubs/plugins/kubernetes/kubernetes.pyi +127 -0
  102. metaflow-stubs/plugins/kubernetes/kubernetes_cli.pyi +73 -0
  103. metaflow-stubs/plugins/kubernetes/kubernetes_client.pyi +62 -0
  104. metaflow-stubs/plugins/kubernetes/kubernetes_decorator.pyi +165 -0
  105. metaflow-stubs/plugins/kubernetes/kubernetes_job.pyi +100 -0
  106. metaflow-stubs/plugins/package_cli.pyi +9 -0
  107. metaflow-stubs/plugins/parallel_decorator.pyi +34 -0
  108. metaflow-stubs/plugins/project_decorator.pyi +36 -0
  109. metaflow-stubs/plugins/pypi/__init__.pyi +18 -0
  110. metaflow-stubs/plugins/pypi/conda_decorator.pyi +59 -0
  111. metaflow-stubs/plugins/pypi/conda_environment.pyi +86 -0
  112. metaflow-stubs/plugins/pypi/pypi_decorator.pyi +22 -0
  113. metaflow-stubs/plugins/pypi/pypi_environment.pyi +50 -0
  114. metaflow-stubs/plugins/pypi/utils.pyi +28 -0
  115. metaflow-stubs/plugins/resources_decorator.pyi +15 -0
  116. metaflow-stubs/plugins/retry_decorator.pyi +28 -0
  117. metaflow-stubs/plugins/secrets/__init__.pyi +21 -0
  118. metaflow-stubs/plugins/secrets/inline_secrets_provider.pyi +30 -0
  119. metaflow-stubs/plugins/secrets/secrets_decorator.pyi +75 -0
  120. metaflow-stubs/plugins/storage_executor.pyi +33 -0
  121. metaflow-stubs/plugins/tag_cli.pyi +370 -0
  122. metaflow-stubs/plugins/test_unbounded_foreach_decorator.pyi +54 -0
  123. metaflow-stubs/plugins/timeout_decorator.pyi +39 -0
  124. metaflow-stubs/procpoll.pyi +51 -0
  125. metaflow-stubs/py.typed +0 -0
  126. metaflow-stubs/pylint_wrapper.pyi +31 -0
  127. metaflow-stubs/tagging_util.pyi +52 -0
  128. ob_metaflow_stubs-2.11.4.1.dist-info/METADATA +22 -0
  129. ob_metaflow_stubs-2.11.4.1.dist-info/RECORD +131 -0
  130. ob_metaflow_stubs-2.11.4.1.dist-info/WHEEL +6 -0
  131. ob_metaflow_stubs-2.11.4.1.dist-info/top_level.txt +1 -0
@@ -0,0 +1,2753 @@
1
+ ##################################################################################
2
+ # Auto-generated Metaflow stub file #
3
+ # MF version: 2.11.4.1 #
4
+ # Generated on 2024-02-28T05:30:43.887672 #
5
+ ##################################################################################
6
+
7
+ from __future__ import annotations
8
+
9
+ import typing
10
+ if typing.TYPE_CHECKING:
11
+ import metaflow.plugins.datatools.s3.s3
12
+ import metaflow.metaflow_current
13
+ import io
14
+ import metaflow.client.core
15
+ import metaflow.parameters
16
+ import metaflow.datastore.inputs
17
+ import metaflow._vendor.click.types
18
+ import datetime
19
+ import metaflow.events
20
+ import typing
21
+ FlowSpecDerived = typing.TypeVar("FlowSpecDerived", bound="FlowSpec", contravariant=False, covariant=False)
22
+ StepFlag = typing.NewType("StepFlag", bool)
23
+
24
+ CURRENT_DIRECTORY: str
25
+
26
+ INFO_FILE: str
27
+
28
+ EXT_PKG: str
29
+
30
+ def parallel_imap_unordered(func: typing.Callable[[typing.Any], typing.Any], iterable: typing.Iterable[typing.Any], max_parallel: typing.Optional[int] = None, dir: typing.Optional[str] = None) -> typing.Iterator[typing.Any]:
31
+ """
32
+ Parallelizes execution of a function using multiprocessing. The result
33
+ order is not guaranteed.
34
+
35
+ Parameters
36
+ ----------
37
+ func : Callable[[Any], Any]
38
+ Function taking a single argument and returning a result
39
+ iterable : Iterable[Any]
40
+ Iterable over arguments to pass to fun
41
+ max_parallel int, optional, default None
42
+ Maximum parallelism. If not specified, uses the number of CPUs
43
+ dir : str, optional, default None
44
+ If specified, directory where temporary files are created
45
+
46
+ Yields
47
+ ------
48
+ Any
49
+ One result from calling func on one argument
50
+ """
51
+ ...
52
+
53
+ def parallel_map(func: typing.Callable[[typing.Any], typing.Any], iterable: typing.Iterable[typing.Any], max_parallel: typing.Optional[int] = None, dir: typing.Optional[str] = None) -> typing.List[typing.Any]:
54
+ """
55
+ Parallelizes execution of a function using multiprocessing. The result
56
+ order is that of the arguments in `iterable`
57
+
58
+ Parameters
59
+ ----------
60
+ func : Callable[[Any], Any]
61
+ Function taking a single argument and returning a result
62
+ iterable : Iterable[Any]
63
+ Iterable over arguments to pass to fun
64
+ max_parallel int, optional, default None
65
+ Maximum parallelism. If not specified, uses the number of CPUs
66
+ dir : str, optional, default None
67
+ If specified, directory where temporary files are created
68
+
69
+ Returns
70
+ -------
71
+ List[Any]
72
+ Results. The items in the list are in the same order as the items
73
+ in `iterable`.
74
+ """
75
+ ...
76
+
77
+ current: metaflow.metaflow_current.Current
78
+
79
+ def metadata(ms: str) -> str:
80
+ """
81
+ Switch Metadata provider.
82
+
83
+ This call has a global effect. Selecting the local metadata will,
84
+ for example, not allow access to information stored in remote
85
+ metadata providers.
86
+
87
+ Note that you don't typically have to call this function directly. Usually
88
+ the metadata provider is set through the Metaflow configuration file. If you
89
+ need to switch between multiple providers, you can use the `METAFLOW_PROFILE`
90
+ environment variable to switch between configurations.
91
+
92
+ Parameters
93
+ ----------
94
+ ms : str
95
+ Can be a path (selects local metadata), a URL starting with http (selects
96
+ the service metadata) or an explicit specification <metadata_type>@<info>; as an
97
+ example, you can specify local@<path> or service@<url>.
98
+
99
+ Returns
100
+ -------
101
+ str
102
+ The description of the metadata selected (equivalent to the result of
103
+ get_metadata()).
104
+ """
105
+ ...
106
+
107
+ class FlowSpec(object, metaclass=type):
108
+ def __init__(self, use_cli = True):
109
+ """
110
+ Construct a FlowSpec
111
+
112
+ Parameters
113
+ ----------
114
+ use_cli : bool, default True
115
+ Set to True if the flow is invoked from __main__ or the command line
116
+ """
117
+ ...
118
+ @property
119
+ def script_name(self) -> str:
120
+ """
121
+ [Legacy function - do not use. Use `current` instead]
122
+
123
+ Returns the name of the script containing the flow
124
+
125
+ Returns
126
+ -------
127
+ str
128
+ A string containing the name of the script
129
+ """
130
+ ...
131
+ def __iter__(self):
132
+ """
133
+ [Legacy function - do not use]
134
+
135
+ Iterate over all steps in the Flow
136
+
137
+ Returns
138
+ -------
139
+ Iterator[graph.DAGNode]
140
+ Iterator over the steps in the flow
141
+ """
142
+ ...
143
+ def __getattr__(self, name: str):
144
+ ...
145
+ def cmd(self, cmdline, input = {}, output = []):
146
+ """
147
+ [Legacy function - do not use]
148
+ """
149
+ ...
150
+ @property
151
+ def index(self) -> typing.Optional[int]:
152
+ """
153
+ The index of this foreach branch.
154
+
155
+ In a foreach step, multiple instances of this step (tasks) will be executed,
156
+ one for each element in the foreach. This property returns the zero based index
157
+ of the current task. If this is not a foreach step, this returns None.
158
+
159
+ If you need to know the indices of the parent tasks in a nested foreach, use
160
+ `FlowSpec.foreach_stack`.
161
+
162
+ Returns
163
+ -------
164
+ int, optional
165
+ Index of the task in a foreach step.
166
+ """
167
+ ...
168
+ @property
169
+ def input(self) -> typing.Optional[typing.Any]:
170
+ """
171
+ The value of the foreach artifact in this foreach branch.
172
+
173
+ In a foreach step, multiple instances of this step (tasks) will be executed,
174
+ one for each element in the foreach. This property returns the element passed
175
+ to the current task. If this is not a foreach step, this returns None.
176
+
177
+ If you need to know the values of the parent tasks in a nested foreach, use
178
+ `FlowSpec.foreach_stack`.
179
+
180
+ Returns
181
+ -------
182
+ object, optional
183
+ Input passed to the foreach task.
184
+ """
185
+ ...
186
+ def foreach_stack(self) -> typing.Optional[typing.List[typing.Tuple[int, int, typing.Any]]]:
187
+ """
188
+ Returns the current stack of foreach indexes and values for the current step.
189
+
190
+ Use this information to understand what data is being processed in the current
191
+ foreach branch. For example, considering the following code:
192
+ ```
193
+ @step
194
+ def root(self):
195
+ self.split_1 = ['a', 'b', 'c']
196
+ self.next(self.nest_1, foreach='split_1')
197
+
198
+ @step
199
+ def nest_1(self):
200
+ self.split_2 = ['d', 'e', 'f', 'g']
201
+ self.next(self.nest_2, foreach='split_2'):
202
+
203
+ @step
204
+ def nest_2(self):
205
+ foo = self.foreach_stack()
206
+ ```
207
+
208
+ `foo` will take the following values in the various tasks for nest_2:
209
+ ```
210
+ [(0, 3, 'a'), (0, 4, 'd')]
211
+ [(0, 3, 'a'), (1, 4, 'e')]
212
+ ...
213
+ [(0, 3, 'a'), (3, 4, 'g')]
214
+ [(1, 3, 'b'), (0, 4, 'd')]
215
+ ...
216
+ ```
217
+ where each tuple corresponds to:
218
+
219
+ - The index of the task for that level of the loop.
220
+ - The number of splits for that level of the loop.
221
+ - The value for that level of the loop.
222
+
223
+ Note that the last tuple returned in a task corresponds to:
224
+
225
+ - 1st element: value returned by `self.index`.
226
+ - 3rd element: value returned by `self.input`.
227
+
228
+ Returns
229
+ -------
230
+ List[Tuple[int, int, Any]]
231
+ An array describing the current stack of foreach steps.
232
+ """
233
+ ...
234
+ def merge_artifacts(self, inputs: metaflow.datastore.inputs.Inputs, exclude: typing.Optional[typing.List[str]] = None, include: typing.Optional[typing.List[str]] = None):
235
+ """
236
+ Helper function for merging artifacts in a join step.
237
+
238
+ This function takes all the artifacts coming from the branches of a
239
+ join point and assigns them to self in the calling step. Only artifacts
240
+ not set in the current step are considered. If, for a given artifact, different
241
+ values are present on the incoming edges, an error will be thrown and the artifacts
242
+ that conflict will be reported.
243
+
244
+ As a few examples, in the simple graph: A splitting into B and C and joining in D:
245
+ ```
246
+ A:
247
+ self.x = 5
248
+ self.y = 6
249
+ B:
250
+ self.b_var = 1
251
+ self.x = from_b
252
+ C:
253
+ self.x = from_c
254
+
255
+ D:
256
+ merge_artifacts(inputs)
257
+ ```
258
+ In D, the following artifacts are set:
259
+ - `y` (value: 6), `b_var` (value: 1)
260
+ - if `from_b` and `from_c` are the same, `x` will be accessible and have value `from_b`
261
+ - if `from_b` and `from_c` are different, an error will be thrown. To prevent this error,
262
+ you need to manually set `self.x` in D to a merged value (for example the max) prior to
263
+ calling `merge_artifacts`.
264
+
265
+ Parameters
266
+ ----------
267
+ inputs : Inputs
268
+ Incoming steps to the join point.
269
+ exclude : List[str], optional, default None
270
+ If specified, do not consider merging artifacts with a name in `exclude`.
271
+ Cannot specify if `include` is also specified.
272
+ include : List[str], optional, default None
273
+ If specified, only merge artifacts specified. Cannot specify if `exclude` is
274
+ also specified.
275
+
276
+ Raises
277
+ ------
278
+ MetaflowException
279
+ This exception is thrown if this is not called in a join step.
280
+ UnhandledInMergeArtifactsException
281
+ This exception is thrown in case of unresolved conflicts.
282
+ MissingInMergeArtifactsException
283
+ This exception is thrown in case an artifact specified in `include` cannot
284
+ be found.
285
+ """
286
+ ...
287
+ def next(self, *dsts: typing.Callable[..., None], **kwargs):
288
+ """
289
+ Indicates the next step to execute after this step has completed.
290
+
291
+ This statement should appear as the last statement of each step, except
292
+ the end step.
293
+
294
+ There are several valid formats to specify the next step:
295
+
296
+ - Straight-line connection: `self.next(self.next_step)` where `next_step` is a method in
297
+ the current class decorated with the `@step` decorator.
298
+
299
+ - Static fan-out connection: `self.next(self.step1, self.step2, ...)` where `stepX` are
300
+ methods in the current class decorated with the `@step` decorator.
301
+
302
+ - Foreach branch:
303
+ ```
304
+ self.next(self.foreach_step, foreach='foreach_iterator')
305
+ ```
306
+ In this situation, `foreach_step` is a method in the current class decorated with the
307
+ `@step` decorator and `foreach_iterator` is a variable name in the current class that
308
+ evaluates to an iterator. A task will be launched for each value in the iterator and
309
+ each task will execute the code specified by the step `foreach_step`.
310
+
311
+ Parameters
312
+ ----------
313
+ dsts : Callable[..., None]
314
+ One or more methods annotated with `@step`.
315
+
316
+ Raises
317
+ ------
318
+ InvalidNextException
319
+ Raised if the format of the arguments does not match one of the ones given above.
320
+ """
321
+ ...
322
+ def __str__(self):
323
+ ...
324
+ def __getstate__(self):
325
+ ...
326
+ ...
327
+
328
+ class Parameter(object, metaclass=type):
329
+ def __init__(self, name: str, default: typing.Union[str, float, int, bool, typing.Dict[str, typing.Any], typing.Callable[[], typing.Union[str, float, int, bool, typing.Dict[str, typing.Any]]], None] = None, type: typing.Union[typing.Type[str], typing.Type[float], typing.Type[int], typing.Type[bool], metaflow.parameters.JSONTypeClass, None] = None, help: typing.Optional[str] = None, required: bool = False, show_default: bool = True, **kwargs: typing.Dict[str, typing.Any]):
330
+ ...
331
+ def __repr__(self):
332
+ ...
333
+ def __str__(self):
334
+ ...
335
+ def option_kwargs(self, deploy_mode):
336
+ ...
337
+ def load_parameter(self, v):
338
+ ...
339
+ @property
340
+ def is_string_type(self):
341
+ ...
342
+ def __getitem__(self, x):
343
+ ...
344
+ ...
345
+
346
+ class JSONTypeClass(metaflow._vendor.click.types.ParamType, metaclass=type):
347
+ def convert(self, value, param, ctx):
348
+ ...
349
+ def __str__(self):
350
+ ...
351
+ def __repr__(self):
352
+ ...
353
+ ...
354
+
355
+ JSONType: metaflow.parameters.JSONTypeClass
356
+
357
+ class S3(object, metaclass=type):
358
+ @classmethod
359
+ def get_root_from_config(cls, echo, create_on_absent = True):
360
+ ...
361
+ def __enter__(self) -> metaflow.plugins.datatools.s3.s3.S3:
362
+ ...
363
+ def __exit__(self, *args):
364
+ ...
365
+ def close(self):
366
+ """
367
+ Delete all temporary files downloaded in this context.
368
+ """
369
+ ...
370
+ def list_paths(self, keys: typing.Optional[typing.Iterable[str]] = None) -> typing.List[metaflow.plugins.datatools.s3.s3.S3Object]:
371
+ """
372
+ List the next level of paths in S3.
373
+
374
+ If multiple keys are specified, listings are done in parallel. The returned
375
+ S3Objects have `.exists == False` if the path refers to a prefix, not an
376
+ existing S3 object.
377
+
378
+ For instance, if the directory hierarchy is
379
+ ```
380
+ a/0.txt
381
+ a/b/1.txt
382
+ a/c/2.txt
383
+ a/d/e/3.txt
384
+ f/4.txt
385
+ ```
386
+ The `list_paths(['a', 'f'])` call returns
387
+ ```
388
+ a/0.txt (exists == True)
389
+ a/b/ (exists == False)
390
+ a/c/ (exists == False)
391
+ a/d/ (exists == False)
392
+ f/4.txt (exists == True)
393
+ ```
394
+
395
+ Parameters
396
+ ----------
397
+ keys : Iterable[str], optional, default None
398
+ List of paths.
399
+
400
+ Returns
401
+ -------
402
+ List[S3Object]
403
+ S3Objects under the given paths, including prefixes (directories) that
404
+ do not correspond to leaf objects.
405
+ """
406
+ ...
407
+ def list_recursive(self, keys: typing.Optional[typing.Iterable[str]] = None) -> typing.List[metaflow.plugins.datatools.s3.s3.S3Object]:
408
+ """
409
+ List all objects recursively under the given prefixes.
410
+
411
+ If multiple keys are specified, listings are done in parallel. All objects
412
+ returned have `.exists == True` as this call always returns leaf objects.
413
+
414
+ For instance, if the directory hierarchy is
415
+ ```
416
+ a/0.txt
417
+ a/b/1.txt
418
+ a/c/2.txt
419
+ a/d/e/3.txt
420
+ f/4.txt
421
+ ```
422
+ The `list_paths(['a', 'f'])` call returns
423
+ ```
424
+ a/0.txt (exists == True)
425
+ a/b/1.txt (exists == True)
426
+ a/c/2.txt (exists == True)
427
+ a/d/e/3.txt (exists == True)
428
+ f/4.txt (exists == True)
429
+ ```
430
+
431
+ Parameters
432
+ ----------
433
+ keys : Iterable[str], optional, default None
434
+ List of paths.
435
+
436
+ Returns
437
+ -------
438
+ List[S3Object]
439
+ S3Objects under the given paths.
440
+ """
441
+ ...
442
+ def info(self, key: typing.Optional[str] = None, return_missing: bool = False) -> metaflow.plugins.datatools.s3.s3.S3Object:
443
+ """
444
+ Get metadata about a single object in S3.
445
+
446
+ This call makes a single `HEAD` request to S3 which can be
447
+ much faster than downloading all data with `get`.
448
+
449
+ Parameters
450
+ ----------
451
+ key : str, optional, default None
452
+ Object to query. It can be an S3 url or a path suffix.
453
+ return_missing : bool, default False
454
+ If set to True, do not raise an exception for a missing key but
455
+ return it as an `S3Object` with `.exists == False`.
456
+
457
+ Returns
458
+ -------
459
+ S3Object
460
+ An S3Object corresponding to the object requested. The object
461
+ will have `.downloaded == False`.
462
+ """
463
+ ...
464
+ def info_many(self, keys: typing.Iterable[str], return_missing: bool = False) -> typing.List[metaflow.plugins.datatools.s3.s3.S3Object]:
465
+ """
466
+ Get metadata about many objects in S3 in parallel.
467
+
468
+ This call makes a single `HEAD` request to S3 which can be
469
+ much faster than downloading all data with `get`.
470
+
471
+ Parameters
472
+ ----------
473
+ keys : Iterable[str]
474
+ Objects to query. Each key can be an S3 url or a path suffix.
475
+ return_missing : bool, default False
476
+ If set to True, do not raise an exception for a missing key but
477
+ return it as an `S3Object` with `.exists == False`.
478
+
479
+ Returns
480
+ -------
481
+ List[S3Object]
482
+ A list of S3Objects corresponding to the paths requested. The
483
+ objects will have `.downloaded == False`.
484
+ """
485
+ ...
486
+ def get(self, key: typing.Union[str, metaflow.plugins.datatools.s3.s3.S3GetObject, None] = None, return_missing: bool = False, return_info: bool = True) -> metaflow.plugins.datatools.s3.s3.S3Object:
487
+ """
488
+ Get a single object from S3.
489
+
490
+ Parameters
491
+ ----------
492
+ key : Union[str, S3GetObject], optional, default None
493
+ Object to download. It can be an S3 url, a path suffix, or
494
+ an S3GetObject that defines a range of data to download. If None, or
495
+ not provided, gets the S3 root.
496
+ return_missing : bool, default False
497
+ If set to True, do not raise an exception for a missing key but
498
+ return it as an `S3Object` with `.exists == False`.
499
+ return_info : bool, default True
500
+ If set to True, fetch the content-type and user metadata associated
501
+ with the object at no extra cost, included for symmetry with `get_many`
502
+
503
+ Returns
504
+ -------
505
+ S3Object
506
+ An S3Object corresponding to the object requested.
507
+ """
508
+ ...
509
+ def get_many(self, keys: typing.Iterable[typing.Union[str, metaflow.plugins.datatools.s3.s3.S3GetObject]], return_missing: bool = False, return_info: bool = True) -> typing.List[metaflow.plugins.datatools.s3.s3.S3Object]:
510
+ """
511
+ Get many objects from S3 in parallel.
512
+
513
+ Parameters
514
+ ----------
515
+ keys : Iterable[Union[str, S3GetObject]]
516
+ Objects to download. Each object can be an S3 url, a path suffix, or
517
+ an S3GetObject that defines a range of data to download.
518
+ return_missing : bool, default False
519
+ If set to True, do not raise an exception for a missing key but
520
+ return it as an `S3Object` with `.exists == False`.
521
+ return_info : bool, default True
522
+ If set to True, fetch the content-type and user metadata associated
523
+ with the object at no extra cost, included for symmetry with `get_many`.
524
+
525
+ Returns
526
+ -------
527
+ List[S3Object]
528
+ S3Objects corresponding to the objects requested.
529
+ """
530
+ ...
531
+ def get_recursive(self, keys: typing.Iterable[str], return_info: bool = False) -> typing.List[metaflow.plugins.datatools.s3.s3.S3Object]:
532
+ """
533
+ Get many objects from S3 recursively in parallel.
534
+
535
+ Parameters
536
+ ----------
537
+ keys : Iterable[str]
538
+ Prefixes to download recursively. Each prefix can be an S3 url or a path suffix
539
+ which define the root prefix under which all objects are downloaded.
540
+ return_info : bool, default False
541
+ If set to True, fetch the content-type and user metadata associated
542
+ with the object.
543
+
544
+ Returns
545
+ -------
546
+ List[S3Object]
547
+ S3Objects stored under the given prefixes.
548
+ """
549
+ ...
550
+ def get_all(self, return_info: bool = False) -> typing.List[metaflow.plugins.datatools.s3.s3.S3Object]:
551
+ """
552
+ Get all objects under the prefix set in the `S3` constructor.
553
+
554
+ This method requires that the `S3` object is initialized either with `run` or
555
+ `s3root`.
556
+
557
+ Parameters
558
+ ----------
559
+ return_info : bool, default False
560
+ If set to True, fetch the content-type and user metadata associated
561
+ with the object.
562
+
563
+ Returns
564
+ -------
565
+ Iterable[S3Object]
566
+ S3Objects stored under the main prefix.
567
+ """
568
+ ...
569
+ def put(self, key: typing.Union[str, metaflow.plugins.datatools.s3.s3.S3PutObject], obj: typing.Union[io.RawIOBase, io.BufferedIOBase, str, bytes], overwrite: bool = True, content_type: typing.Optional[str] = None, metadata: typing.Optional[typing.Dict[str, str]] = None) -> str:
570
+ """
571
+ Upload a single object to S3.
572
+
573
+ Parameters
574
+ ----------
575
+ key : Union[str, S3PutObject]
576
+ Object path. It can be an S3 url or a path suffix.
577
+ obj : PutValue
578
+ An object to store in S3. Strings are converted to UTF-8 encoding.
579
+ overwrite : bool, default True
580
+ Overwrite the object if it exists. If set to False, the operation
581
+ succeeds without uploading anything if the key already exists.
582
+ content_type : str, optional, default None
583
+ Optional MIME type for the object.
584
+ metadata : Dict[str, str], optional, default None
585
+ A JSON-encodable dictionary of additional headers to be stored
586
+ as metadata with the object.
587
+
588
+ Returns
589
+ -------
590
+ str
591
+ URL of the object stored.
592
+ """
593
+ ...
594
+ def put_many(self, key_objs: typing.List[typing.Union[typing.Tuple[str, typing.Union[io.RawIOBase, io.BufferedIOBase, str, bytes]], metaflow.plugins.datatools.s3.s3.S3PutObject]], overwrite: bool = True) -> typing.List[typing.Tuple[str, str]]:
595
+ """
596
+ Upload many objects to S3.
597
+
598
+ Each object to be uploaded can be specified in two ways:
599
+
600
+ 1. As a `(key, obj)` tuple where `key` is a string specifying
601
+ the path and `obj` is a string or a bytes object.
602
+
603
+ 2. As a `S3PutObject` which contains additional metadata to be
604
+ stored with the object.
605
+
606
+ Parameters
607
+ ----------
608
+ key_objs : List[Union[Tuple[str, PutValue], S3PutObject]]
609
+ List of key-object pairs to upload.
610
+ overwrite : bool, default True
611
+ Overwrite the object if it exists. If set to False, the operation
612
+ succeeds without uploading anything if the key already exists.
613
+
614
+ Returns
615
+ -------
616
+ List[Tuple[str, str]]
617
+ List of `(key, url)` pairs corresponding to the objects uploaded.
618
+ """
619
+ ...
620
+ def put_files(self, key_paths: typing.List[typing.Union[typing.Tuple[str, typing.Union[io.RawIOBase, io.BufferedIOBase, str, bytes]], metaflow.plugins.datatools.s3.s3.S3PutObject]], overwrite: bool = True) -> typing.List[typing.Tuple[str, str]]:
621
+ """
622
+ Upload many local files to S3.
623
+
624
+ Each file to be uploaded can be specified in two ways:
625
+
626
+ 1. As a `(key, path)` tuple where `key` is a string specifying
627
+ the S3 path and `path` is the path to a local file.
628
+
629
+ 2. As a `S3PutObject` which contains additional metadata to be
630
+ stored with the file.
631
+
632
+ Parameters
633
+ ----------
634
+ key_paths : List[Union[Tuple[str, PutValue], S3PutObject]]
635
+ List of files to upload.
636
+ overwrite : bool, default True
637
+ Overwrite the object if it exists. If set to False, the operation
638
+ succeeds without uploading anything if the key already exists.
639
+
640
+ Returns
641
+ -------
642
+ List[Tuple[str, str]]
643
+ List of `(key, url)` pairs corresponding to the files uploaded.
644
+ """
645
+ ...
646
+ ...
647
+
648
+ class IncludeFile(metaflow.parameters.Parameter, metaclass=type):
649
+ def __init__(self, name: str, required: bool = False, is_text: bool = True, encoding: str = "utf-8", help: typing.Optional[str] = None, **kwargs: typing.Dict[str, str]):
650
+ ...
651
+ def load_parameter(self, v):
652
+ ...
653
+ ...
654
+
655
+ @typing.overload
656
+ def step(f: typing.Callable[[FlowSpecDerived], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
657
+ """
658
+ Marks a method in a FlowSpec as a Metaflow Step. Note that this
659
+ decorator needs to be placed as close to the method as possible (ie:
660
+ before other decorators).
661
+
662
+ In other words, this is valid:
663
+ ```
664
+ @batch
665
+ @step
666
+ def foo(self):
667
+ pass
668
+ ```
669
+
670
+ whereas this is not:
671
+ ```
672
+ @step
673
+ @batch
674
+ def foo(self):
675
+ pass
676
+ ```
677
+
678
+ Parameters
679
+ ----------
680
+ f : Union[Callable[[FlowSpecDerived], None], Callable[[FlowSpecDerived, Any], None]]
681
+ Function to make into a Metaflow Step
682
+
683
+ Returns
684
+ -------
685
+ Union[Callable[[FlowSpecDerived, StepFlag], None], Callable[[FlowSpecDerived, Any, StepFlag], None]]
686
+ Function that is a Metaflow Step
687
+ """
688
+ ...
689
+
690
+ @typing.overload
691
+ def step(f: typing.Callable[[FlowSpecDerived, typing.Any], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
692
+ ...
693
+
694
+ def step(f: typing.Union[typing.Callable[[FlowSpecDerived], None], typing.Callable[[FlowSpecDerived, typing.Any], None]]):
695
+ """
696
+ Marks a method in a FlowSpec as a Metaflow Step. Note that this
697
+ decorator needs to be placed as close to the method as possible (ie:
698
+ before other decorators).
699
+
700
+ In other words, this is valid:
701
+ ```
702
+ @batch
703
+ @step
704
+ def foo(self):
705
+ pass
706
+ ```
707
+
708
+ whereas this is not:
709
+ ```
710
+ @step
711
+ @batch
712
+ def foo(self):
713
+ pass
714
+ ```
715
+
716
+ Parameters
717
+ ----------
718
+ f : Union[Callable[[FlowSpecDerived], None], Callable[[FlowSpecDerived, Any], None]]
719
+ Function to make into a Metaflow Step
720
+
721
+ Returns
722
+ -------
723
+ Union[Callable[[FlowSpecDerived, StepFlag], None], Callable[[FlowSpecDerived, Any, StepFlag], None]]
724
+ Function that is a Metaflow Step
725
+ """
726
+ ...
727
+
728
+ @typing.overload
729
+ def environment(*, vars: typing.Dict[str, str] = {}) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
730
+ """
731
+ Specifies environment variables to be set prior to the execution of a step.
732
+
733
+ Parameters
734
+ ----------
735
+ vars : Dict[str, str], default {}
736
+ Dictionary of environment variables to set.
737
+ """
738
+ ...
739
+
740
+ @typing.overload
741
+ def environment(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
742
+ ...
743
+
744
+ @typing.overload
745
+ def environment(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
746
+ ...
747
+
748
+ def environment(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, vars: typing.Dict[str, str] = {}):
749
+ """
750
+ Specifies environment variables to be set prior to the execution of a step.
751
+
752
+ Parameters
753
+ ----------
754
+ vars : Dict[str, str], default {}
755
+ Dictionary of environment variables to set.
756
+ """
757
+ ...
758
+
759
+ def kubernetes(*, cpu: int = 1, memory: int = 4096, disk: int = 10240, image: typing.Optional[str] = None, image_pull_policy: str = "KUBERNETES_IMAGE_PULL_POLICY", service_account: str = "METAFLOW_KUBERNETES_SERVICE_ACCOUNT", secrets: typing.Optional[typing.List[str]] = None, namespace: str = "METAFLOW_KUBERNETES_NAMESPACE", gpu: typing.Optional[int] = None, gpu_vendor: str = "KUBERNETES_GPU_VENDOR", tolerations: typing.List[str] = [], use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = "/metaflow_temp", persistent_volume_claims: typing.Optional[typing.Dict[str, str]] = None, port: typing.Optional[int] = None, shared_memory: typing.Optional[int] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
760
+ """
761
+ Specifies that this step should execute on Kubernetes.
762
+
763
+ Parameters
764
+ ----------
765
+ cpu : int, default 1
766
+ Number of CPUs required for this step. If `@resources` is
767
+ also present, the maximum value from all decorators is used.
768
+ memory : int, default 4096
769
+ Memory size (in MB) required for this step. If
770
+ `@resources` is also present, the maximum value from all decorators is
771
+ used.
772
+ disk : int, default 10240
773
+ Disk size (in MB) required for this step. If
774
+ `@resources` is also present, the maximum value from all decorators is
775
+ used.
776
+ image : str, optional, default None
777
+ Docker image to use when launching on Kubernetes. If not specified, and
778
+ METAFLOW_KUBERNETES_CONTAINER_IMAGE is specified, that image is used. If
779
+ not, a default Docker image mapping to the current version of Python is used.
780
+ image_pull_policy: str, default KUBERNETES_IMAGE_PULL_POLICY
781
+ If given, the imagePullPolicy to be applied to the Docker image of the step.
782
+ service_account : str, default METAFLOW_KUBERNETES_SERVICE_ACCOUNT
783
+ Kubernetes service account to use when launching pod in Kubernetes.
784
+ secrets : List[str], optional, default None
785
+ Kubernetes secrets to use when launching pod in Kubernetes. These
786
+ secrets are in addition to the ones defined in `METAFLOW_KUBERNETES_SECRETS`
787
+ in Metaflow configuration.
788
+ namespace : str, default METAFLOW_KUBERNETES_NAMESPACE
789
+ Kubernetes namespace to use when launching pod in Kubernetes.
790
+ gpu : int, optional, default None
791
+ Number of GPUs required for this step. A value of zero implies that
792
+ the scheduled node should not have GPUs.
793
+ gpu_vendor : str, default KUBERNETES_GPU_VENDOR
794
+ The vendor of the GPUs to be used for this step.
795
+ tolerations : List[str], default []
796
+ The default is extracted from METAFLOW_KUBERNETES_TOLERATIONS.
797
+ Kubernetes tolerations to use when launching pod in Kubernetes.
798
+ use_tmpfs : bool, default False
799
+ This enables an explicit tmpfs mount for this step.
800
+ tmpfs_tempdir : bool, default True
801
+ sets METAFLOW_TEMPDIR to tmpfs_path if set for this step.
802
+ tmpfs_size : int, optional, default: None
803
+ The value for the size (in MiB) of the tmpfs mount for this step.
804
+ This parameter maps to the `--tmpfs` option in Docker. Defaults to 50% of the
805
+ memory allocated for this step.
806
+ tmpfs_path : str, optional, default /metaflow_temp
807
+ Path to tmpfs mount for this step.
808
+ persistent_volume_claims : Dict[str, str], optional, default None
809
+ A map (dictionary) of persistent volumes to be mounted to the pod for this step. The map is from persistent
810
+ volumes to the path to which the volume is to be mounted, e.g., `{'pvc-name': '/path/to/mount/on'}`.
811
+ port: int, optional
812
+ Number of the port to specify in the Kubernetes job object
813
+ shared_memory: int, optional
814
+ Shared memory size (in MiB) required for this steps
815
+ """
816
+ ...
817
+
818
+ @typing.overload
819
+ def retry(*, times: int = 3, minutes_between_retries: int = 2) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
820
+ """
821
+ Specifies the number of times the task corresponding
822
+ to a step needs to be retried.
823
+
824
+ This decorator is useful for handling transient errors, such as networking issues.
825
+ If your task contains operations that can't be retried safely, e.g. database updates,
826
+ it is advisable to annotate it with `@retry(times=0)`.
827
+
828
+ This can be used in conjunction with the `@catch` decorator. The `@catch`
829
+ decorator will execute a no-op task after all retries have been exhausted,
830
+ ensuring that the flow execution can continue.
831
+
832
+ Parameters
833
+ ----------
834
+ times : int, default 3
835
+ Number of times to retry this task.
836
+ minutes_between_retries : int, default 2
837
+ Number of minutes between retries.
838
+ """
839
+ ...
840
+
841
+ @typing.overload
842
+ def retry(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
843
+ ...
844
+
845
+ @typing.overload
846
+ def retry(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
847
+ ...
848
+
849
+ def retry(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, times: int = 3, minutes_between_retries: int = 2):
850
+ """
851
+ Specifies the number of times the task corresponding
852
+ to a step needs to be retried.
853
+
854
+ This decorator is useful for handling transient errors, such as networking issues.
855
+ If your task contains operations that can't be retried safely, e.g. database updates,
856
+ it is advisable to annotate it with `@retry(times=0)`.
857
+
858
+ This can be used in conjunction with the `@catch` decorator. The `@catch`
859
+ decorator will execute a no-op task after all retries have been exhausted,
860
+ ensuring that the flow execution can continue.
861
+
862
+ Parameters
863
+ ----------
864
+ times : int, default 3
865
+ Number of times to retry this task.
866
+ minutes_between_retries : int, default 2
867
+ Number of minutes between retries.
868
+ """
869
+ ...
870
+
871
+ @typing.overload
872
+ def pypi(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
873
+ """
874
+ Specifies the PyPI packages for the step.
875
+
876
+ Information in this decorator will augment any
877
+ attributes set in the `@pyi_base` flow-level decorator. Hence,
878
+ you can use `@pypi_base` to set packages required by all
879
+ steps and use `@pypi` to specify step-specific overrides.
880
+
881
+ Parameters
882
+ ----------
883
+ packages : Dict[str, str], default: {}
884
+ Packages to use for this step. The key is the name of the package
885
+ and the value is the version to use.
886
+ python : str, optional, default: None
887
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
888
+ that the version used will correspond to the version of the Python interpreter used to start the run.
889
+ """
890
+ ...
891
+
892
+ @typing.overload
893
+ def pypi(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
894
+ ...
895
+
896
+ @typing.overload
897
+ def pypi(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
898
+ ...
899
+
900
+ def pypi(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
901
+ """
902
+ Specifies the PyPI packages for the step.
903
+
904
+ Information in this decorator will augment any
905
+ attributes set in the `@pyi_base` flow-level decorator. Hence,
906
+ you can use `@pypi_base` to set packages required by all
907
+ steps and use `@pypi` to specify step-specific overrides.
908
+
909
+ Parameters
910
+ ----------
911
+ packages : Dict[str, str], default: {}
912
+ Packages to use for this step. The key is the name of the package
913
+ and the value is the version to use.
914
+ python : str, optional, default: None
915
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
916
+ that the version used will correspond to the version of the Python interpreter used to start the run.
917
+ """
918
+ ...
919
+
920
+ @typing.overload
921
+ def card(*, type: str = "default", id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
922
+ """
923
+ Creates a human-readable report, a Metaflow Card, after this step completes.
924
+
925
+ Note that you may add multiple `@card` decorators in a step with different parameters.
926
+
927
+ Parameters
928
+ ----------
929
+ type : str, default 'default'
930
+ Card type.
931
+ id : str, optional, default None
932
+ If multiple cards are present, use this id to identify this card.
933
+ options : Dict[str, Any], default {}
934
+ Options passed to the card. The contents depend on the card type.
935
+ timeout : int, default 45
936
+ Interrupt reporting if it takes more than this many seconds.
937
+
938
+
939
+ """
940
+ ...
941
+
942
+ @typing.overload
943
+ def card(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
944
+ ...
945
+
946
+ @typing.overload
947
+ def card(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
948
+ ...
949
+
950
+ def card(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, type: str = "default", id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45):
951
+ """
952
+ Creates a human-readable report, a Metaflow Card, after this step completes.
953
+
954
+ Note that you may add multiple `@card` decorators in a step with different parameters.
955
+
956
+ Parameters
957
+ ----------
958
+ type : str, default 'default'
959
+ Card type.
960
+ id : str, optional, default None
961
+ If multiple cards are present, use this id to identify this card.
962
+ options : Dict[str, Any], default {}
963
+ Options passed to the card. The contents depend on the card type.
964
+ timeout : int, default 45
965
+ Interrupt reporting if it takes more than this many seconds.
966
+
967
+
968
+ """
969
+ ...
970
+
971
+ @typing.overload
972
+ def catch(*, var: typing.Optional[str] = None, print_exception: bool = True) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
973
+ """
974
+ Specifies that the step will success under all circumstances.
975
+
976
+ The decorator will create an optional artifact, specified by `var`, which
977
+ contains the exception raised. You can use it to detect the presence
978
+ of errors, indicating that all happy-path artifacts produced by the step
979
+ are missing.
980
+
981
+ Parameters
982
+ ----------
983
+ var : str, optional, default None
984
+ Name of the artifact in which to store the caught exception.
985
+ If not specified, the exception is not stored.
986
+ print_exception : bool, default True
987
+ Determines whether or not the exception is printed to
988
+ stdout when caught.
989
+ """
990
+ ...
991
+
992
+ @typing.overload
993
+ def catch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
994
+ ...
995
+
996
+ @typing.overload
997
+ def catch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
998
+ ...
999
+
1000
+ def catch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, var: typing.Optional[str] = None, print_exception: bool = True):
1001
+ """
1002
+ Specifies that the step will success under all circumstances.
1003
+
1004
+ The decorator will create an optional artifact, specified by `var`, which
1005
+ contains the exception raised. You can use it to detect the presence
1006
+ of errors, indicating that all happy-path artifacts produced by the step
1007
+ are missing.
1008
+
1009
+ Parameters
1010
+ ----------
1011
+ var : str, optional, default None
1012
+ Name of the artifact in which to store the caught exception.
1013
+ If not specified, the exception is not stored.
1014
+ print_exception : bool, default True
1015
+ Determines whether or not the exception is printed to
1016
+ stdout when caught.
1017
+ """
1018
+ ...
1019
+
1020
+ @typing.overload
1021
+ def resources(*, cpu: int = 1, gpu: int = 0, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1022
+ """
1023
+ Specifies the resources needed when executing this step.
1024
+
1025
+ Use `@resources` to specify the resource requirements
1026
+ independently of the specific compute layer (`@batch`, `@kubernetes`).
1027
+
1028
+ You can choose the compute layer on the command line by executing e.g.
1029
+ ```
1030
+ python myflow.py run --with batch
1031
+ ```
1032
+ or
1033
+ ```
1034
+ python myflow.py run --with kubernetes
1035
+ ```
1036
+ which executes the flow on the desired system using the
1037
+ requirements specified in `@resources`.
1038
+
1039
+ Parameters
1040
+ ----------
1041
+ cpu : int, default 1
1042
+ Number of CPUs required for this step.
1043
+ gpu : int, default 0
1044
+ Number of GPUs required for this step.
1045
+ disk : int, optional, default None
1046
+ Disk size (in MB) required for this step. Only applies on Kubernetes.
1047
+ memory : int, default 4096
1048
+ Memory size (in MB) required for this step.
1049
+ shared_memory : int, optional, default None
1050
+ The value for the size (in MiB) of the /dev/shm volume for this step.
1051
+ This parameter maps to the `--shm-size` option in Docker.
1052
+ """
1053
+ ...
1054
+
1055
+ @typing.overload
1056
+ def resources(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1057
+ ...
1058
+
1059
+ @typing.overload
1060
+ def resources(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1061
+ ...
1062
+
1063
+ def resources(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, cpu: int = 1, gpu: int = 0, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None):
1064
+ """
1065
+ Specifies the resources needed when executing this step.
1066
+
1067
+ Use `@resources` to specify the resource requirements
1068
+ independently of the specific compute layer (`@batch`, `@kubernetes`).
1069
+
1070
+ You can choose the compute layer on the command line by executing e.g.
1071
+ ```
1072
+ python myflow.py run --with batch
1073
+ ```
1074
+ or
1075
+ ```
1076
+ python myflow.py run --with kubernetes
1077
+ ```
1078
+ which executes the flow on the desired system using the
1079
+ requirements specified in `@resources`.
1080
+
1081
+ Parameters
1082
+ ----------
1083
+ cpu : int, default 1
1084
+ Number of CPUs required for this step.
1085
+ gpu : int, default 0
1086
+ Number of GPUs required for this step.
1087
+ disk : int, optional, default None
1088
+ Disk size (in MB) required for this step. Only applies on Kubernetes.
1089
+ memory : int, default 4096
1090
+ Memory size (in MB) required for this step.
1091
+ shared_memory : int, optional, default None
1092
+ The value for the size (in MiB) of the /dev/shm volume for this step.
1093
+ This parameter maps to the `--shm-size` option in Docker.
1094
+ """
1095
+ ...
1096
+
1097
+ @typing.overload
1098
+ def timeout(*, seconds: int = 0, minutes: int = 0, hours: int = 0) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1099
+ """
1100
+ Specifies a timeout for your step.
1101
+
1102
+ This decorator is useful if this step may hang indefinitely.
1103
+
1104
+ This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
1105
+ A timeout is considered to be an exception thrown by the step. It will cause the step to be
1106
+ retried if needed and the exception will be caught by the `@catch` decorator, if present.
1107
+
1108
+ Note that all the values specified in parameters are added together so if you specify
1109
+ 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
1110
+
1111
+ Parameters
1112
+ ----------
1113
+ seconds : int, default 0
1114
+ Number of seconds to wait prior to timing out.
1115
+ minutes : int, default 0
1116
+ Number of minutes to wait prior to timing out.
1117
+ hours : int, default 0
1118
+ Number of hours to wait prior to timing out.
1119
+ """
1120
+ ...
1121
+
1122
+ @typing.overload
1123
+ def timeout(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1124
+ ...
1125
+
1126
+ @typing.overload
1127
+ def timeout(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1128
+ ...
1129
+
1130
+ def timeout(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, seconds: int = 0, minutes: int = 0, hours: int = 0):
1131
+ """
1132
+ Specifies a timeout for your step.
1133
+
1134
+ This decorator is useful if this step may hang indefinitely.
1135
+
1136
+ This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
1137
+ A timeout is considered to be an exception thrown by the step. It will cause the step to be
1138
+ retried if needed and the exception will be caught by the `@catch` decorator, if present.
1139
+
1140
+ Note that all the values specified in parameters are added together so if you specify
1141
+ 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
1142
+
1143
+ Parameters
1144
+ ----------
1145
+ seconds : int, default 0
1146
+ Number of seconds to wait prior to timing out.
1147
+ minutes : int, default 0
1148
+ Number of minutes to wait prior to timing out.
1149
+ hours : int, default 0
1150
+ Number of hours to wait prior to timing out.
1151
+ """
1152
+ ...
1153
+
1154
+ @typing.overload
1155
+ def conda(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1156
+ """
1157
+ Specifies the Conda environment for the step.
1158
+
1159
+ Information in this decorator will augment any
1160
+ attributes set in the `@conda_base` flow-level decorator. Hence,
1161
+ you can use `@conda_base` to set packages required by all
1162
+ steps and use `@conda` to specify step-specific overrides.
1163
+
1164
+ Parameters
1165
+ ----------
1166
+ packages : Dict[str, str], default {}
1167
+ Packages to use for this step. The key is the name of the package
1168
+ and the value is the version to use.
1169
+ libraries : Dict[str, str], default {}
1170
+ Supported for backward compatibility. When used with packages, packages will take precedence.
1171
+ python : str, optional, default None
1172
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1173
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1174
+ disabled : bool, default False
1175
+ If set to True, disables @conda.
1176
+ """
1177
+ ...
1178
+
1179
+ @typing.overload
1180
+ def conda(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1181
+ ...
1182
+
1183
+ @typing.overload
1184
+ def conda(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1185
+ ...
1186
+
1187
+ def conda(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
1188
+ """
1189
+ Specifies the Conda environment for the step.
1190
+
1191
+ Information in this decorator will augment any
1192
+ attributes set in the `@conda_base` flow-level decorator. Hence,
1193
+ you can use `@conda_base` to set packages required by all
1194
+ steps and use `@conda` to specify step-specific overrides.
1195
+
1196
+ Parameters
1197
+ ----------
1198
+ packages : Dict[str, str], default {}
1199
+ Packages to use for this step. The key is the name of the package
1200
+ and the value is the version to use.
1201
+ libraries : Dict[str, str], default {}
1202
+ Supported for backward compatibility. When used with packages, packages will take precedence.
1203
+ python : str, optional, default None
1204
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1205
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1206
+ disabled : bool, default False
1207
+ If set to True, disables @conda.
1208
+ """
1209
+ ...
1210
+
1211
+ @typing.overload
1212
+ def batch(*, cpu: int = 1, gpu: int = 0, memory: int = 4096, image: typing.Optional[str] = None, queue: str = "METAFLOW_BATCH_JOB_QUEUE", iam_role: str = "METAFLOW_ECS_S3_ACCESS_IAM_ROLE", execution_role: str = "METAFLOW_ECS_FARGATE_EXECUTION_ROLE", shared_memory: typing.Optional[int] = None, max_swap: typing.Optional[int] = None, swappiness: typing.Optional[int] = None, use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = None, inferentia: int = 0, efa: int = 0) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1213
+ """
1214
+ Specifies that this step should execute on [AWS Batch](https://aws.amazon.com/batch/).
1215
+
1216
+ Parameters
1217
+ ----------
1218
+ cpu : int, default 1
1219
+ Number of CPUs required for this step. If `@resources` is
1220
+ also present, the maximum value from all decorators is used.
1221
+ gpu : int, default 0
1222
+ Number of GPUs required for this step. If `@resources` is
1223
+ also present, the maximum value from all decorators is used.
1224
+ memory : int, default 4096
1225
+ Memory size (in MB) required for this step. If
1226
+ `@resources` is also present, the maximum value from all decorators is
1227
+ used.
1228
+ image : str, optional, default None
1229
+ Docker image to use when launching on AWS Batch. If not specified, and
1230
+ METAFLOW_BATCH_CONTAINER_IMAGE is specified, that image is used. If
1231
+ not, a default Docker image mapping to the current version of Python is used.
1232
+ queue : str, default METAFLOW_BATCH_JOB_QUEUE
1233
+ AWS Batch Job Queue to submit the job to.
1234
+ iam_role : str, default METAFLOW_ECS_S3_ACCESS_IAM_ROLE
1235
+ AWS IAM role that AWS Batch container uses to access AWS cloud resources.
1236
+ execution_role : str, default METAFLOW_ECS_FARGATE_EXECUTION_ROLE
1237
+ AWS IAM role that AWS Batch can use [to trigger AWS Fargate tasks]
1238
+ (https://docs.aws.amazon.com/batch/latest/userguide/execution-IAM-role.html).
1239
+ shared_memory : int, optional, default None
1240
+ The value for the size (in MiB) of the /dev/shm volume for this step.
1241
+ This parameter maps to the `--shm-size` option in Docker.
1242
+ max_swap : int, optional, default None
1243
+ The total amount of swap memory (in MiB) a container can use for this
1244
+ step. This parameter is translated to the `--memory-swap` option in
1245
+ Docker where the value is the sum of the container memory plus the
1246
+ `max_swap` value.
1247
+ swappiness : int, optional, default None
1248
+ This allows you to tune memory swappiness behavior for this step.
1249
+ A swappiness value of 0 causes swapping not to happen unless absolutely
1250
+ necessary. A swappiness value of 100 causes pages to be swapped very
1251
+ aggressively. Accepted values are whole numbers between 0 and 100.
1252
+ use_tmpfs : bool, default False
1253
+ This enables an explicit tmpfs mount for this step.
1254
+ tmpfs_tempdir : bool, default True
1255
+ sets METAFLOW_TEMPDIR to tmpfs_path if set for this step.
1256
+ tmpfs_size : int, optional, default None
1257
+ The value for the size (in MiB) of the tmpfs mount for this step.
1258
+ This parameter maps to the `--tmpfs` option in Docker. Defaults to 50% of the
1259
+ memory allocated for this step.
1260
+ tmpfs_path : str, optional, default None
1261
+ Path to tmpfs mount for this step. Defaults to /metaflow_temp.
1262
+ inferentia : int, default 0
1263
+ Number of Inferentia chips required for this step.
1264
+ efa : int, default 0
1265
+ Number of elastic fabric adapter network devices to attach to container
1266
+ """
1267
+ ...
1268
+
1269
+ @typing.overload
1270
+ def batch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1271
+ ...
1272
+
1273
+ @typing.overload
1274
+ def batch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1275
+ ...
1276
+
1277
+ def batch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, cpu: int = 1, gpu: int = 0, memory: int = 4096, image: typing.Optional[str] = None, queue: str = "METAFLOW_BATCH_JOB_QUEUE", iam_role: str = "METAFLOW_ECS_S3_ACCESS_IAM_ROLE", execution_role: str = "METAFLOW_ECS_FARGATE_EXECUTION_ROLE", shared_memory: typing.Optional[int] = None, max_swap: typing.Optional[int] = None, swappiness: typing.Optional[int] = None, use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = None, inferentia: int = 0, efa: int = 0):
1278
+ """
1279
+ Specifies that this step should execute on [AWS Batch](https://aws.amazon.com/batch/).
1280
+
1281
+ Parameters
1282
+ ----------
1283
+ cpu : int, default 1
1284
+ Number of CPUs required for this step. If `@resources` is
1285
+ also present, the maximum value from all decorators is used.
1286
+ gpu : int, default 0
1287
+ Number of GPUs required for this step. If `@resources` is
1288
+ also present, the maximum value from all decorators is used.
1289
+ memory : int, default 4096
1290
+ Memory size (in MB) required for this step. If
1291
+ `@resources` is also present, the maximum value from all decorators is
1292
+ used.
1293
+ image : str, optional, default None
1294
+ Docker image to use when launching on AWS Batch. If not specified, and
1295
+ METAFLOW_BATCH_CONTAINER_IMAGE is specified, that image is used. If
1296
+ not, a default Docker image mapping to the current version of Python is used.
1297
+ queue : str, default METAFLOW_BATCH_JOB_QUEUE
1298
+ AWS Batch Job Queue to submit the job to.
1299
+ iam_role : str, default METAFLOW_ECS_S3_ACCESS_IAM_ROLE
1300
+ AWS IAM role that AWS Batch container uses to access AWS cloud resources.
1301
+ execution_role : str, default METAFLOW_ECS_FARGATE_EXECUTION_ROLE
1302
+ AWS IAM role that AWS Batch can use [to trigger AWS Fargate tasks]
1303
+ (https://docs.aws.amazon.com/batch/latest/userguide/execution-IAM-role.html).
1304
+ shared_memory : int, optional, default None
1305
+ The value for the size (in MiB) of the /dev/shm volume for this step.
1306
+ This parameter maps to the `--shm-size` option in Docker.
1307
+ max_swap : int, optional, default None
1308
+ The total amount of swap memory (in MiB) a container can use for this
1309
+ step. This parameter is translated to the `--memory-swap` option in
1310
+ Docker where the value is the sum of the container memory plus the
1311
+ `max_swap` value.
1312
+ swappiness : int, optional, default None
1313
+ This allows you to tune memory swappiness behavior for this step.
1314
+ A swappiness value of 0 causes swapping not to happen unless absolutely
1315
+ necessary. A swappiness value of 100 causes pages to be swapped very
1316
+ aggressively. Accepted values are whole numbers between 0 and 100.
1317
+ use_tmpfs : bool, default False
1318
+ This enables an explicit tmpfs mount for this step.
1319
+ tmpfs_tempdir : bool, default True
1320
+ sets METAFLOW_TEMPDIR to tmpfs_path if set for this step.
1321
+ tmpfs_size : int, optional, default None
1322
+ The value for the size (in MiB) of the tmpfs mount for this step.
1323
+ This parameter maps to the `--tmpfs` option in Docker. Defaults to 50% of the
1324
+ memory allocated for this step.
1325
+ tmpfs_path : str, optional, default None
1326
+ Path to tmpfs mount for this step. Defaults to /metaflow_temp.
1327
+ inferentia : int, default 0
1328
+ Number of Inferentia chips required for this step.
1329
+ efa : int, default 0
1330
+ Number of elastic fabric adapter network devices to attach to container
1331
+ """
1332
+ ...
1333
+
1334
+ @typing.overload
1335
+ def secrets(*, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1336
+ """
1337
+ Specifies secrets to be retrieved and injected as environment variables prior to
1338
+ the execution of a step.
1339
+
1340
+ Parameters
1341
+ ----------
1342
+ sources : List[Union[str, Dict[str, Any]]], default: []
1343
+ List of secret specs, defining how the secrets are to be retrieved
1344
+ """
1345
+ ...
1346
+
1347
+ @typing.overload
1348
+ def secrets(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1349
+ ...
1350
+
1351
+ @typing.overload
1352
+ def secrets(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1353
+ ...
1354
+
1355
+ def secrets(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []):
1356
+ """
1357
+ Specifies secrets to be retrieved and injected as environment variables prior to
1358
+ the execution of a step.
1359
+
1360
+ Parameters
1361
+ ----------
1362
+ sources : List[Union[str, Dict[str, Any]]], default: []
1363
+ List of secret specs, defining how the secrets are to be retrieved
1364
+ """
1365
+ ...
1366
+
1367
+ @typing.overload
1368
+ def trigger(*, event: typing.Union[str, typing.Dict[str, typing.Any], None] = None, events: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1369
+ """
1370
+ Specifies the event(s) that this flow depends on.
1371
+
1372
+ ```
1373
+ @trigger(event='foo')
1374
+ ```
1375
+ or
1376
+ ```
1377
+ @trigger(events=['foo', 'bar'])
1378
+ ```
1379
+
1380
+ Additionally, you can specify the parameter mappings
1381
+ to map event payload to Metaflow parameters for the flow.
1382
+ ```
1383
+ @trigger(event={'name':'foo', 'parameters':{'flow_param': 'event_field'}})
1384
+ ```
1385
+ or
1386
+ ```
1387
+ @trigger(events=[{'name':'foo', 'parameters':{'flow_param_1': 'event_field_1'},
1388
+ {'name':'bar', 'parameters':{'flow_param_2': 'event_field_2'}])
1389
+ ```
1390
+
1391
+ 'parameters' can also be a list of strings and tuples like so:
1392
+ ```
1393
+ @trigger(event={'name':'foo', 'parameters':['common_name', ('flow_param', 'event_field')]})
1394
+ ```
1395
+ This is equivalent to:
1396
+ ```
1397
+ @trigger(event={'name':'foo', 'parameters':{'common_name': 'common_name', 'flow_param': 'event_field'}})
1398
+ ```
1399
+
1400
+ Parameters
1401
+ ----------
1402
+ event : Union[str, Dict[str, Any]], optional, default None
1403
+ Event dependency for this flow.
1404
+ events : List[Union[str, Dict[str, Any]]], default []
1405
+ Events dependency for this flow.
1406
+ options : Dict[str, Any], default {}
1407
+ Backend-specific configuration for tuning eventing behavior.
1408
+
1409
+
1410
+ """
1411
+ ...
1412
+
1413
+ @typing.overload
1414
+ def trigger(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1415
+ ...
1416
+
1417
+ def trigger(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, event: typing.Union[str, typing.Dict[str, typing.Any], None] = None, events: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], options: typing.Dict[str, typing.Any] = {}):
1418
+ """
1419
+ Specifies the event(s) that this flow depends on.
1420
+
1421
+ ```
1422
+ @trigger(event='foo')
1423
+ ```
1424
+ or
1425
+ ```
1426
+ @trigger(events=['foo', 'bar'])
1427
+ ```
1428
+
1429
+ Additionally, you can specify the parameter mappings
1430
+ to map event payload to Metaflow parameters for the flow.
1431
+ ```
1432
+ @trigger(event={'name':'foo', 'parameters':{'flow_param': 'event_field'}})
1433
+ ```
1434
+ or
1435
+ ```
1436
+ @trigger(events=[{'name':'foo', 'parameters':{'flow_param_1': 'event_field_1'},
1437
+ {'name':'bar', 'parameters':{'flow_param_2': 'event_field_2'}])
1438
+ ```
1439
+
1440
+ 'parameters' can also be a list of strings and tuples like so:
1441
+ ```
1442
+ @trigger(event={'name':'foo', 'parameters':['common_name', ('flow_param', 'event_field')]})
1443
+ ```
1444
+ This is equivalent to:
1445
+ ```
1446
+ @trigger(event={'name':'foo', 'parameters':{'common_name': 'common_name', 'flow_param': 'event_field'}})
1447
+ ```
1448
+
1449
+ Parameters
1450
+ ----------
1451
+ event : Union[str, Dict[str, Any]], optional, default None
1452
+ Event dependency for this flow.
1453
+ events : List[Union[str, Dict[str, Any]]], default []
1454
+ Events dependency for this flow.
1455
+ options : Dict[str, Any], default {}
1456
+ Backend-specific configuration for tuning eventing behavior.
1457
+
1458
+
1459
+ """
1460
+ ...
1461
+
1462
+ def airflow_external_task_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, external_dag_id: str, external_task_ids: typing.List[str], allowed_states: typing.List[str], failed_states: typing.List[str], execution_delta: "datetime.timedelta", check_existence: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1463
+ """
1464
+ The `@airflow_external_task_sensor` decorator attaches a Airflow [ExternalTaskSensor](https://airflow.apache.org/docs/apache-airflow/stable/_api/airflow/sensors/external_task/index.html#airflow.sensors.external_task.ExternalTaskSensor) before the start step of the flow.
1465
+ This decorator only works when a flow is scheduled on Airflow and is compiled using `airflow create`. More than one `@airflow_external_task_sensor` can be added as a flow decorators. Adding more than one decorator will ensure that `start` step starts only after all sensors finish.
1466
+
1467
+ Parameters
1468
+ ----------
1469
+ timeout : int
1470
+ Time, in seconds before the task times out and fails. (Default: 3600)
1471
+ poke_interval : int
1472
+ Time in seconds that the job should wait in between each try. (Default: 60)
1473
+ mode : str
1474
+ How the sensor operates. Options are: { poke | reschedule }. (Default: "poke")
1475
+ exponential_backoff : bool
1476
+ allow progressive longer waits between pokes by using exponential backoff algorithm. (Default: True)
1477
+ pool : str
1478
+ the slot pool this task should run in,
1479
+ slot pools are a way to limit concurrency for certain tasks. (Default:None)
1480
+ soft_fail : bool
1481
+ Set to true to mark the task as SKIPPED on failure. (Default: False)
1482
+ name : str
1483
+ Name of the sensor on Airflow
1484
+ description : str
1485
+ Description of sensor in the Airflow UI
1486
+ external_dag_id : str
1487
+ The dag_id that contains the task you want to wait for.
1488
+ external_task_ids : List[str]
1489
+ The list of task_ids that you want to wait for.
1490
+ If None (default value) the sensor waits for the DAG. (Default: None)
1491
+ allowed_states : List[str]
1492
+ Iterable of allowed states, (Default: ['success'])
1493
+ failed_states : List[str]
1494
+ Iterable of failed or dis-allowed states. (Default: None)
1495
+ execution_delta : datetime.timedelta
1496
+ time difference with the previous execution to look at,
1497
+ the default is the same logical date as the current task or DAG. (Default: None)
1498
+ check_existence: bool
1499
+ Set to True to check if the external task exists or check if
1500
+ the DAG to wait for exists. (Default: True)
1501
+ """
1502
+ ...
1503
+
1504
+ @typing.overload
1505
+ def trigger_on_finish(*, flow: typing.Union[str, typing.Dict[str, str], None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1506
+ """
1507
+ Specifies the flow(s) that this flow depends on.
1508
+
1509
+ ```
1510
+ @trigger_on_finish(flow='FooFlow')
1511
+ ```
1512
+ or
1513
+ ```
1514
+ @trigger_on_finish(flows=['FooFlow', 'BarFlow'])
1515
+ ```
1516
+ This decorator respects the @project decorator and triggers the flow
1517
+ when upstream runs within the same namespace complete successfully
1518
+
1519
+ Additionally, you can specify project aware upstream flow dependencies
1520
+ by specifying the fully qualified project_flow_name.
1521
+ ```
1522
+ @trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
1523
+ ```
1524
+ or
1525
+ ```
1526
+ @trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
1527
+ ```
1528
+
1529
+ You can also specify just the project or project branch (other values will be
1530
+ inferred from the current project or project branch):
1531
+ ```
1532
+ @trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
1533
+ ```
1534
+
1535
+ Note that `branch` is typically one of:
1536
+ - `prod`
1537
+ - `user.bob`
1538
+ - `test.my_experiment`
1539
+ - `prod.staging`
1540
+
1541
+ Parameters
1542
+ ----------
1543
+ flow : Union[str, Dict[str, str]], optional, default None
1544
+ Upstream flow dependency for this flow.
1545
+ flows : List[Union[str, Dict[str, str]]], default []
1546
+ Upstream flow dependencies for this flow.
1547
+ options : Dict[str, Any], default {}
1548
+ Backend-specific configuration for tuning eventing behavior.
1549
+
1550
+
1551
+ """
1552
+ ...
1553
+
1554
+ @typing.overload
1555
+ def trigger_on_finish(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1556
+ ...
1557
+
1558
+ def trigger_on_finish(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, flow: typing.Union[str, typing.Dict[str, str], None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}):
1559
+ """
1560
+ Specifies the flow(s) that this flow depends on.
1561
+
1562
+ ```
1563
+ @trigger_on_finish(flow='FooFlow')
1564
+ ```
1565
+ or
1566
+ ```
1567
+ @trigger_on_finish(flows=['FooFlow', 'BarFlow'])
1568
+ ```
1569
+ This decorator respects the @project decorator and triggers the flow
1570
+ when upstream runs within the same namespace complete successfully
1571
+
1572
+ Additionally, you can specify project aware upstream flow dependencies
1573
+ by specifying the fully qualified project_flow_name.
1574
+ ```
1575
+ @trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
1576
+ ```
1577
+ or
1578
+ ```
1579
+ @trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
1580
+ ```
1581
+
1582
+ You can also specify just the project or project branch (other values will be
1583
+ inferred from the current project or project branch):
1584
+ ```
1585
+ @trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
1586
+ ```
1587
+
1588
+ Note that `branch` is typically one of:
1589
+ - `prod`
1590
+ - `user.bob`
1591
+ - `test.my_experiment`
1592
+ - `prod.staging`
1593
+
1594
+ Parameters
1595
+ ----------
1596
+ flow : Union[str, Dict[str, str]], optional, default None
1597
+ Upstream flow dependency for this flow.
1598
+ flows : List[Union[str, Dict[str, str]]], default []
1599
+ Upstream flow dependencies for this flow.
1600
+ options : Dict[str, Any], default {}
1601
+ Backend-specific configuration for tuning eventing behavior.
1602
+
1603
+
1604
+ """
1605
+ ...
1606
+
1607
+ def project(*, name: str) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1608
+ """
1609
+ Specifies what flows belong to the same project.
1610
+
1611
+ A project-specific namespace is created for all flows that
1612
+ use the same `@project(name)`.
1613
+
1614
+ Parameters
1615
+ ----------
1616
+ name : str
1617
+ Project name. Make sure that the name is unique amongst all
1618
+ projects that use the same production scheduler. The name may
1619
+ contain only lowercase alphanumeric characters and underscores.
1620
+
1621
+
1622
+ """
1623
+ ...
1624
+
1625
+ def airflow_s3_key_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, bucket_key: typing.Union[str, typing.List[str]], bucket_name: str, wildcard_match: bool, aws_conn_id: str, verify: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1626
+ """
1627
+ The `@airflow_s3_key_sensor` decorator attaches a Airflow [S3KeySensor](https://airflow.apache.org/docs/apache-airflow-providers-amazon/stable/_api/airflow/providers/amazon/aws/sensors/s3/index.html#airflow.providers.amazon.aws.sensors.s3.S3KeySensor)
1628
+ before the start step of the flow. This decorator only works when a flow is scheduled on Airflow
1629
+ and is compiled using `airflow create`. More than one `@airflow_s3_key_sensor` can be
1630
+ added as a flow decorators. Adding more than one decorator will ensure that `start` step
1631
+ starts only after all sensors finish.
1632
+
1633
+ Parameters
1634
+ ----------
1635
+ timeout : int
1636
+ Time, in seconds before the task times out and fails. (Default: 3600)
1637
+ poke_interval : int
1638
+ Time in seconds that the job should wait in between each try. (Default: 60)
1639
+ mode : str
1640
+ How the sensor operates. Options are: { poke | reschedule }. (Default: "poke")
1641
+ exponential_backoff : bool
1642
+ allow progressive longer waits between pokes by using exponential backoff algorithm. (Default: True)
1643
+ pool : str
1644
+ the slot pool this task should run in,
1645
+ slot pools are a way to limit concurrency for certain tasks. (Default:None)
1646
+ soft_fail : bool
1647
+ Set to true to mark the task as SKIPPED on failure. (Default: False)
1648
+ name : str
1649
+ Name of the sensor on Airflow
1650
+ description : str
1651
+ Description of sensor in the Airflow UI
1652
+ bucket_key : Union[str, List[str]]
1653
+ The key(s) being waited on. Supports full s3:// style url or relative path from root level.
1654
+ When it's specified as a full s3:// url, please leave `bucket_name` as None
1655
+ bucket_name : str
1656
+ Name of the S3 bucket. Only needed when bucket_key is not provided as a full s3:// url.
1657
+ When specified, all the keys passed to bucket_key refers to this bucket. (Default:None)
1658
+ wildcard_match : bool
1659
+ whether the bucket_key should be interpreted as a Unix wildcard pattern. (Default: False)
1660
+ aws_conn_id : str
1661
+ a reference to the s3 connection on Airflow. (Default: None)
1662
+ verify : bool
1663
+ Whether or not to verify SSL certificates for S3 connection. (Default: None)
1664
+ """
1665
+ ...
1666
+
1667
+ @typing.overload
1668
+ def conda_base(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1669
+ """
1670
+ Specifies the Conda environment for all steps of the flow.
1671
+
1672
+ Use `@conda_base` to set common libraries required by all
1673
+ steps and use `@conda` to specify step-specific additions.
1674
+
1675
+ Parameters
1676
+ ----------
1677
+ packages : Dict[str, str], default {}
1678
+ Packages to use for this flow. The key is the name of the package
1679
+ and the value is the version to use.
1680
+ libraries : Dict[str, str], default {}
1681
+ Supported for backward compatibility. When used with packages, packages will take precedence.
1682
+ python : str, optional, default None
1683
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1684
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1685
+ disabled : bool, default False
1686
+ If set to True, disables Conda.
1687
+ """
1688
+ ...
1689
+
1690
+ @typing.overload
1691
+ def conda_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1692
+ ...
1693
+
1694
+ def conda_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
1695
+ """
1696
+ Specifies the Conda environment for all steps of the flow.
1697
+
1698
+ Use `@conda_base` to set common libraries required by all
1699
+ steps and use `@conda` to specify step-specific additions.
1700
+
1701
+ Parameters
1702
+ ----------
1703
+ packages : Dict[str, str], default {}
1704
+ Packages to use for this flow. The key is the name of the package
1705
+ and the value is the version to use.
1706
+ libraries : Dict[str, str], default {}
1707
+ Supported for backward compatibility. When used with packages, packages will take precedence.
1708
+ python : str, optional, default None
1709
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1710
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1711
+ disabled : bool, default False
1712
+ If set to True, disables Conda.
1713
+ """
1714
+ ...
1715
+
1716
+ @typing.overload
1717
+ def schedule(*, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1718
+ """
1719
+ Specifies the times when the flow should be run when running on a
1720
+ production scheduler.
1721
+
1722
+ Parameters
1723
+ ----------
1724
+ hourly : bool, default False
1725
+ Run the workflow hourly.
1726
+ daily : bool, default True
1727
+ Run the workflow daily.
1728
+ weekly : bool, default False
1729
+ Run the workflow weekly.
1730
+ cron : str, optional, default None
1731
+ Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
1732
+ specified by this expression.
1733
+ timezone : str, optional, default None
1734
+ Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1735
+ which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
1736
+ """
1737
+ ...
1738
+
1739
+ @typing.overload
1740
+ def schedule(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1741
+ ...
1742
+
1743
+ def schedule(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None):
1744
+ """
1745
+ Specifies the times when the flow should be run when running on a
1746
+ production scheduler.
1747
+
1748
+ Parameters
1749
+ ----------
1750
+ hourly : bool, default False
1751
+ Run the workflow hourly.
1752
+ daily : bool, default True
1753
+ Run the workflow daily.
1754
+ weekly : bool, default False
1755
+ Run the workflow weekly.
1756
+ cron : str, optional, default None
1757
+ Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
1758
+ specified by this expression.
1759
+ timezone : str, optional, default None
1760
+ Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1761
+ which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
1762
+ """
1763
+ ...
1764
+
1765
+ @typing.overload
1766
+ def pypi_base(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1767
+ """
1768
+ Specifies the PyPI packages for all steps of the flow.
1769
+
1770
+ Use `@pypi_base` to set common packages required by all
1771
+ steps and use `@pypi` to specify step-specific overrides.
1772
+ Parameters
1773
+ ----------
1774
+ packages : Dict[str, str], default: {}
1775
+ Packages to use for this flow. The key is the name of the package
1776
+ and the value is the version to use.
1777
+ python : str, optional, default: None
1778
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1779
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1780
+ """
1781
+ ...
1782
+
1783
+ @typing.overload
1784
+ def pypi_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1785
+ ...
1786
+
1787
+ def pypi_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
1788
+ """
1789
+ Specifies the PyPI packages for all steps of the flow.
1790
+
1791
+ Use `@pypi_base` to set common packages required by all
1792
+ steps and use `@pypi` to specify step-specific overrides.
1793
+ Parameters
1794
+ ----------
1795
+ packages : Dict[str, str], default: {}
1796
+ Packages to use for this flow. The key is the name of the package
1797
+ and the value is the version to use.
1798
+ python : str, optional, default: None
1799
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1800
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1801
+ """
1802
+ ...
1803
+
1804
+ def namespace(ns: typing.Optional[str]) -> typing.Optional[str]:
1805
+ """
1806
+ Switch namespace to the one provided.
1807
+
1808
+ This call has a global effect. No objects outside this namespace
1809
+ will be accessible. To access all objects regardless of namespaces,
1810
+ pass None to this call.
1811
+
1812
+ Parameters
1813
+ ----------
1814
+ ns : str, optional
1815
+ Namespace to switch to or None to ignore namespaces.
1816
+
1817
+ Returns
1818
+ -------
1819
+ str, optional
1820
+ Namespace set (result of get_namespace()).
1821
+ """
1822
+ ...
1823
+
1824
+ def get_namespace() -> typing.Optional[str]:
1825
+ """
1826
+ Return the current namespace that is currently being used to filter objects.
1827
+
1828
+ The namespace is a tag associated with all objects in Metaflow.
1829
+
1830
+ Returns
1831
+ -------
1832
+ str, optional
1833
+ The current namespace used to filter objects.
1834
+ """
1835
+ ...
1836
+
1837
+ def default_namespace() -> str:
1838
+ """
1839
+ Resets the namespace used to filter objects to the default one, i.e. the one that was
1840
+ used prior to any `namespace` calls.
1841
+
1842
+ Returns
1843
+ -------
1844
+ str
1845
+ The result of get_namespace() after the namespace has been reset.
1846
+ """
1847
+ ...
1848
+
1849
+ def get_metadata() -> str:
1850
+ """
1851
+ Returns the current Metadata provider.
1852
+
1853
+ If this is not set explicitly using `metadata`, the default value is
1854
+ determined through the Metaflow configuration. You can use this call to
1855
+ check that your configuration is set up properly.
1856
+
1857
+ If multiple configuration profiles are present, this call returns the one
1858
+ selected through the `METAFLOW_PROFILE` environment variable.
1859
+
1860
+ Returns
1861
+ -------
1862
+ str
1863
+ Information about the Metadata provider currently selected. This information typically
1864
+ returns provider specific information (like URL for remote providers or local paths for
1865
+ local providers).
1866
+ """
1867
+ ...
1868
+
1869
+ def default_metadata() -> str:
1870
+ """
1871
+ Resets the Metadata provider to the default value, that is, to the value
1872
+ that was used prior to any `metadata` calls.
1873
+
1874
+ Returns
1875
+ -------
1876
+ str
1877
+ The result of get_metadata() after resetting the provider.
1878
+ """
1879
+ ...
1880
+
1881
+ class Metaflow(object, metaclass=type):
1882
+ def __init__(self):
1883
+ ...
1884
+ @property
1885
+ def flows(self) -> typing.List[metaflow.client.core.Flow]:
1886
+ """
1887
+ Returns a list of all the flows present.
1888
+
1889
+ Only flows present in the set namespace are returned. A flow is present in a namespace if
1890
+ it has at least one run that is in the namespace.
1891
+
1892
+ Returns
1893
+ -------
1894
+ List[Flow]
1895
+ List of all flows present.
1896
+ """
1897
+ ...
1898
+ def __iter__(self) -> typing.Iterator[metaflow.client.core.Flow]:
1899
+ """
1900
+ Iterator over all flows present.
1901
+
1902
+ Only flows present in the set namespace are returned. A flow is present in a
1903
+ namespace if it has at least one run that is in the namespace.
1904
+
1905
+ Yields
1906
+ -------
1907
+ Flow
1908
+ A Flow present in the Metaflow universe.
1909
+ """
1910
+ ...
1911
+ def __str__(self) -> str:
1912
+ ...
1913
+ def __getitem__(self, name: str) -> metaflow.client.core.Flow:
1914
+ """
1915
+ Returns a specific flow by name.
1916
+
1917
+ The flow will only be returned if it is present in the current namespace.
1918
+
1919
+ Parameters
1920
+ ----------
1921
+ name : str
1922
+ Name of the Flow
1923
+
1924
+ Returns
1925
+ -------
1926
+ Flow
1927
+ Flow with the given name.
1928
+ """
1929
+ ...
1930
+ ...
1931
+
1932
+ class Flow(metaflow.client.core.MetaflowObject, metaclass=type):
1933
+ def __init__(self, *args, **kwargs):
1934
+ ...
1935
+ @property
1936
+ def latest_run(self) -> typing.Optional[metaflow.client.core.Run]:
1937
+ """
1938
+ Returns the latest run (either in progress or completed) of this flow.
1939
+
1940
+ Note that an in-progress run may be returned by this call. Use latest_successful_run
1941
+ to get an object representing a completed successful run.
1942
+
1943
+ Returns
1944
+ -------
1945
+ Run, optional
1946
+ Latest run of this flow
1947
+ """
1948
+ ...
1949
+ @property
1950
+ def latest_successful_run(self) -> typing.Optional[metaflow.client.core.Run]:
1951
+ """
1952
+ Returns the latest successful run of this flow.
1953
+
1954
+ Returns
1955
+ -------
1956
+ Run, optional
1957
+ Latest successful run of this flow
1958
+ """
1959
+ ...
1960
+ def runs(self, *tags: str) -> typing.Iterator[metaflow.client.core.Run]:
1961
+ """
1962
+ Returns an iterator over all `Run`s of this flow.
1963
+
1964
+ An optional filter is available that allows you to filter on tags.
1965
+ If multiple tags are specified, only runs that have all the
1966
+ specified tags are returned.
1967
+
1968
+ Parameters
1969
+ ----------
1970
+ tags : str
1971
+ Tags to match.
1972
+
1973
+ Yields
1974
+ ------
1975
+ Run
1976
+ `Run` objects in this flow.
1977
+ """
1978
+ ...
1979
+ def __iter__(self) -> typing.Iterator[metaflow.client.core.Task]:
1980
+ """
1981
+ Iterate over all children Run of this Flow.
1982
+
1983
+ Note that only runs in the current namespace are returned unless
1984
+ _namespace_check is False
1985
+
1986
+ Yields
1987
+ ------
1988
+ Run
1989
+ A Run in this Flow
1990
+ """
1991
+ ...
1992
+ def __getitem__(self, run_id: str) -> metaflow.client.core.Run:
1993
+ """
1994
+ Returns the Run object with the run ID 'run_id'
1995
+
1996
+ Parameters
1997
+ ----------
1998
+ run_id : str
1999
+ Run OD
2000
+
2001
+ Returns
2002
+ -------
2003
+ Run
2004
+ Run for this run ID in this Flow
2005
+
2006
+ Raises
2007
+ ------
2008
+ KeyError
2009
+ If the run_id does not identify a valid Run object
2010
+ """
2011
+ ...
2012
+ def __getstate__(self):
2013
+ ...
2014
+ def __setstate__(self, state):
2015
+ ...
2016
+ ...
2017
+
2018
+ class Run(metaflow.client.core.MetaflowObject, metaclass=type):
2019
+ def steps(self, *tags: str) -> typing.Iterator[metaflow.client.core.Step]:
2020
+ """
2021
+ [Legacy function - do not use]
2022
+
2023
+ Returns an iterator over all `Step` objects in the step. This is an alias
2024
+ to iterating the object itself, i.e.
2025
+ ```
2026
+ list(Run(...)) == list(Run(...).steps())
2027
+ ```
2028
+
2029
+ Parameters
2030
+ ----------
2031
+ tags : str
2032
+ No op (legacy functionality)
2033
+
2034
+ Yields
2035
+ ------
2036
+ Step
2037
+ `Step` objects in this run.
2038
+ """
2039
+ ...
2040
+ @property
2041
+ def code(self) -> typing.Optional[metaflow.client.core.MetaflowCode]:
2042
+ """
2043
+ Returns the MetaflowCode object for this run, if present.
2044
+ Code is packed if atleast one `Step` runs remotely, else None is returned.
2045
+
2046
+ Returns
2047
+ -------
2048
+ MetaflowCode, optional
2049
+ Code package for this run
2050
+ """
2051
+ ...
2052
+ @property
2053
+ def data(self) -> typing.Optional[metaflow.client.core.MetaflowData]:
2054
+ """
2055
+ Returns a container of data artifacts produced by this run.
2056
+
2057
+ You can access data produced by this run as follows:
2058
+ ```
2059
+ print(run.data.my_var)
2060
+ ```
2061
+ This is a shorthand for `run['end'].task.data`. If the 'end' step has not yet
2062
+ executed, returns None.
2063
+
2064
+ Returns
2065
+ -------
2066
+ MetaflowData, optional
2067
+ Container of all artifacts produced by this task
2068
+ """
2069
+ ...
2070
+ @property
2071
+ def successful(self) -> bool:
2072
+ """
2073
+ Indicates whether or not the run completed successfully.
2074
+
2075
+ A run is successful if its 'end' step is successful.
2076
+
2077
+ Returns
2078
+ -------
2079
+ bool
2080
+ True if the run completed successfully and False otherwise
2081
+ """
2082
+ ...
2083
+ @property
2084
+ def finished(self) -> bool:
2085
+ """
2086
+ Indicates whether or not the run completed.
2087
+
2088
+ A run completed if its 'end' step completed.
2089
+
2090
+ Returns
2091
+ -------
2092
+ bool
2093
+ True if the run completed and False otherwise
2094
+ """
2095
+ ...
2096
+ @property
2097
+ def finished_at(self) -> typing.Optional[datetime.datetime]:
2098
+ """
2099
+ Returns the datetime object of when the run finished (successfully or not).
2100
+
2101
+ The completion time of a run is the same as the completion time of its 'end' step.
2102
+ If the 'end' step has not completed, returns None.
2103
+
2104
+ Returns
2105
+ -------
2106
+ datetime, optional
2107
+ Datetime of when the run finished
2108
+ """
2109
+ ...
2110
+ @property
2111
+ def end_task(self) -> typing.Optional[metaflow.client.core.Task]:
2112
+ """
2113
+ Returns the Task corresponding to the 'end' step.
2114
+
2115
+ This returns None if the end step does not yet exist.
2116
+
2117
+ Returns
2118
+ -------
2119
+ Task, optional
2120
+ The 'end' task
2121
+ """
2122
+ ...
2123
+ def add_tag(self, tag: str):
2124
+ """
2125
+ Add a tag to this `Run`.
2126
+
2127
+ Note that if the tag is already a system tag, it is not added as a user tag,
2128
+ and no error is thrown.
2129
+
2130
+ Parameters
2131
+ ----------
2132
+ tag : str
2133
+ Tag to add.
2134
+ """
2135
+ ...
2136
+ def add_tags(self, tags: typing.Iterable[str]):
2137
+ """
2138
+ Add one or more tags to this `Run`.
2139
+
2140
+ Note that if any tag is already a system tag, it is not added as a user tag
2141
+ and no error is thrown.
2142
+
2143
+ Parameters
2144
+ ----------
2145
+ tags : Iterable[str]
2146
+ Tags to add.
2147
+ """
2148
+ ...
2149
+ def remove_tag(self, tag: str):
2150
+ """
2151
+ Remove one tag from this `Run`.
2152
+
2153
+ Removing a system tag is an error. Removing a non-existent
2154
+ user tag is a no-op.
2155
+
2156
+ Parameters
2157
+ ----------
2158
+ tag : str
2159
+ Tag to remove.
2160
+ """
2161
+ ...
2162
+ def remove_tags(self, tags: typing.Iterable[str]):
2163
+ """
2164
+ Remove one or more tags to this `Run`.
2165
+
2166
+ Removing a system tag will result in an error. Removing a non-existent
2167
+ user tag is a no-op.
2168
+
2169
+ Parameters
2170
+ ----------
2171
+ tags : Iterable[str]
2172
+ Tags to remove.
2173
+ """
2174
+ ...
2175
+ def replace_tag(self, tag_to_remove: str, tag_to_add: str):
2176
+ """
2177
+ Remove a tag and add a tag atomically. Removal is done first.
2178
+ The rules for `Run.add_tag` and `Run.remove_tag` also apply here.
2179
+
2180
+ Parameters
2181
+ ----------
2182
+ tag_to_remove : str
2183
+ Tag to remove.
2184
+ tag_to_add : str
2185
+ Tag to add.
2186
+ """
2187
+ ...
2188
+ def replace_tags(self, tags_to_remove: typing.Iterable[str], tags_to_add: typing.Iterable[str]):
2189
+ """
2190
+ Remove and add tags atomically; the removal is done first.
2191
+ The rules for `Run.add_tag` and `Run.remove_tag` also apply here.
2192
+
2193
+ Parameters
2194
+ ----------
2195
+ tags_to_remove : Iterable[str]
2196
+ Tags to remove.
2197
+ tags_to_add : Iterable[str]
2198
+ Tags to add.
2199
+ """
2200
+ ...
2201
+ def __iter__(self) -> typing.Iterator[metaflow.client.core.Step]:
2202
+ """
2203
+ Iterate over all children Step of this Run
2204
+
2205
+ Yields
2206
+ ------
2207
+ Step
2208
+ A Step in this Run
2209
+ """
2210
+ ...
2211
+ def __getitem__(self, name: str) -> metaflow.client.core.Step:
2212
+ """
2213
+ Returns the Step object with the step name 'name'
2214
+
2215
+ Parameters
2216
+ ----------
2217
+ name : str
2218
+ Step name
2219
+
2220
+ Returns
2221
+ -------
2222
+ Step
2223
+ Step for this step name in this Run
2224
+
2225
+ Raises
2226
+ ------
2227
+ KeyError
2228
+ If the name does not identify a valid Step object
2229
+ """
2230
+ ...
2231
+ def __getstate__(self):
2232
+ ...
2233
+ def __setstate__(self, state):
2234
+ ...
2235
+ @property
2236
+ def trigger(self) -> typing.Optional[metaflow.events.Trigger]:
2237
+ """
2238
+ Returns a container of events that triggered this run.
2239
+
2240
+ This returns None if the run was not triggered by any events.
2241
+
2242
+ Returns
2243
+ -------
2244
+ Trigger, optional
2245
+ Container of triggering events
2246
+ """
2247
+ ...
2248
+ ...
2249
+
2250
+ class Step(metaflow.client.core.MetaflowObject, metaclass=type):
2251
+ @property
2252
+ def task(self) -> typing.Optional[metaflow.client.core.Task]:
2253
+ """
2254
+ Returns a Task object belonging to this step.
2255
+
2256
+ This is useful when the step only contains one task (a linear step for example).
2257
+
2258
+ Returns
2259
+ -------
2260
+ Task
2261
+ A task in the step
2262
+ """
2263
+ ...
2264
+ def tasks(self, *tags: str) -> typing.Iterable[metaflow.client.core.Task]:
2265
+ """
2266
+ [Legacy function - do not use]
2267
+
2268
+ Returns an iterator over all `Task` objects in the step. This is an alias
2269
+ to iterating the object itself, i.e.
2270
+ ```
2271
+ list(Step(...)) == list(Step(...).tasks())
2272
+ ```
2273
+
2274
+ Parameters
2275
+ ----------
2276
+ tags : str
2277
+ No op (legacy functionality)
2278
+
2279
+ Yields
2280
+ ------
2281
+ Task
2282
+ `Task` objects in this step.
2283
+ """
2284
+ ...
2285
+ @property
2286
+ def control_task(self) -> typing.Optional[metaflow.client.core.Task]:
2287
+ """
2288
+ [Unpublished API - use with caution!]
2289
+
2290
+ Returns a Control Task object belonging to this step.
2291
+ This is useful when the step only contains one control task.
2292
+
2293
+ Returns
2294
+ -------
2295
+ Task
2296
+ A control task in the step
2297
+ """
2298
+ ...
2299
+ def control_tasks(self, *tags: str) -> typing.Iterator[metaflow.client.core.Task]:
2300
+ """
2301
+ [Unpublished API - use with caution!]
2302
+
2303
+ Returns an iterator over all the control tasks in the step.
2304
+ An optional filter is available that allows you to filter on tags. The
2305
+ control tasks returned if the filter is specified will contain all the
2306
+ tags specified.
2307
+ Parameters
2308
+ ----------
2309
+ tags : str
2310
+ Tags to match
2311
+
2312
+ Yields
2313
+ ------
2314
+ Task
2315
+ Control Task objects for this step
2316
+ """
2317
+ ...
2318
+ def __iter__(self) -> typing.Iterator[metaflow.client.core.Task]:
2319
+ """
2320
+ Iterate over all children Task of this Step
2321
+
2322
+ Yields
2323
+ ------
2324
+ Task
2325
+ A Task in this Step
2326
+ """
2327
+ ...
2328
+ def __getitem__(self, task_id: str) -> metaflow.client.core.Task:
2329
+ """
2330
+ Returns the Task object with the task ID 'task_id'
2331
+
2332
+ Parameters
2333
+ ----------
2334
+ task_id : str
2335
+ Task ID
2336
+
2337
+ Returns
2338
+ -------
2339
+ Task
2340
+ Task for this task ID in this Step
2341
+
2342
+ Raises
2343
+ ------
2344
+ KeyError
2345
+ If the task_id does not identify a valid Task object
2346
+ """
2347
+ ...
2348
+ def __getstate__(self):
2349
+ ...
2350
+ def __setstate__(self, state):
2351
+ ...
2352
+ @property
2353
+ def finished_at(self) -> typing.Optional[datetime.datetime]:
2354
+ """
2355
+ Returns the datetime object of when the step finished (successfully or not).
2356
+
2357
+ A step is considered finished when all the tasks that belong to it have
2358
+ finished. This call will return None if the step has not finished
2359
+
2360
+ Returns
2361
+ -------
2362
+ datetime
2363
+ Datetime of when the step finished
2364
+ """
2365
+ ...
2366
+ @property
2367
+ def environment_info(self) -> typing.Optional[typing.Dict[str, typing.Any]]:
2368
+ """
2369
+ Returns information about the environment that was used to execute this step. As an
2370
+ example, if the Conda environment is selected, this will return information about the
2371
+ dependencies that were used in the environment.
2372
+
2373
+ This environment information is only available for steps that have tasks
2374
+ for which the code package has been saved.
2375
+
2376
+ Returns
2377
+ -------
2378
+ Dict[str, Any], optional
2379
+ Dictionary describing the environment
2380
+ """
2381
+ ...
2382
+ ...
2383
+
2384
+ class Task(metaflow.client.core.MetaflowObject, metaclass=type):
2385
+ def __init__(self, *args, **kwargs):
2386
+ ...
2387
+ @property
2388
+ def metadata(self) -> typing.List[metaflow.client.core.Metadata]:
2389
+ """
2390
+ Metadata events produced by this task across all attempts of the task
2391
+ *except* if you selected a specific task attempt.
2392
+
2393
+ Note that Metadata is different from tags.
2394
+
2395
+ Returns
2396
+ -------
2397
+ List[Metadata]
2398
+ Metadata produced by this task
2399
+ """
2400
+ ...
2401
+ @property
2402
+ def metadata_dict(self) -> typing.Dict[str, str]:
2403
+ """
2404
+ Dictionary mapping metadata names (keys) and their associated values.
2405
+
2406
+ Note that unlike the metadata() method, this call will only return the latest
2407
+ metadata for a given name. For example, if a task executes multiple times (retries),
2408
+ the same metadata name will be generated multiple times (one for each execution of the
2409
+ task). The metadata() method returns all those metadata elements whereas this call will
2410
+ return the metadata associated with the latest execution of the task.
2411
+
2412
+ Returns
2413
+ -------
2414
+ Dict[str, str]
2415
+ Dictionary mapping metadata name with value
2416
+ """
2417
+ ...
2418
+ @property
2419
+ def index(self) -> typing.Optional[int]:
2420
+ """
2421
+ Returns the index of the innermost foreach loop if this task is run inside at least
2422
+ one foreach.
2423
+
2424
+ The index is what distinguishes the various tasks inside a given step.
2425
+ This call returns None if this task was not run in a foreach loop.
2426
+
2427
+ Returns
2428
+ -------
2429
+ int, optional
2430
+ Index in the innermost loop for this task
2431
+ """
2432
+ ...
2433
+ @property
2434
+ def data(self) -> metaflow.client.core.MetaflowData:
2435
+ """
2436
+ Returns a container of data artifacts produced by this task.
2437
+
2438
+ You can access data produced by this task as follows:
2439
+ ```
2440
+ print(task.data.my_var)
2441
+ ```
2442
+
2443
+ Returns
2444
+ -------
2445
+ MetaflowData
2446
+ Container of all artifacts produced by this task
2447
+ """
2448
+ ...
2449
+ @property
2450
+ def artifacts(self) -> typing.NamedTuple:
2451
+ """
2452
+ Returns a container of DataArtifacts produced by this task.
2453
+
2454
+ You can access each DataArtifact by name like so:
2455
+ ```
2456
+ print(task.artifacts.my_var)
2457
+ ```
2458
+ This method differs from data() because it returns DataArtifact objects
2459
+ (which contain additional metadata) as opposed to just the data.
2460
+
2461
+ Returns
2462
+ -------
2463
+ MetaflowArtifacts
2464
+ Container of all DataArtifacts produced by this task
2465
+ """
2466
+ ...
2467
+ @property
2468
+ def successful(self) -> bool:
2469
+ """
2470
+ Indicates whether or not the task completed successfully.
2471
+
2472
+ This information is always about the latest task to have completed (in case
2473
+ of retries).
2474
+
2475
+ Returns
2476
+ -------
2477
+ bool
2478
+ True if the task completed successfully and False otherwise
2479
+ """
2480
+ ...
2481
+ @property
2482
+ def finished(self) -> bool:
2483
+ """
2484
+ Indicates whether or not the task completed.
2485
+
2486
+ This information is always about the latest task to have completed (in case
2487
+ of retries).
2488
+
2489
+ Returns
2490
+ -------
2491
+ bool
2492
+ True if the task completed and False otherwise
2493
+ """
2494
+ ...
2495
+ @property
2496
+ def exception(self) -> typing.Optional[typing.Any]:
2497
+ """
2498
+ Returns the exception that caused the task to fail, if any.
2499
+
2500
+ This information is always about the latest task to have completed (in case
2501
+ of retries). If successful() returns False and finished() returns True,
2502
+ this method can help determine what went wrong.
2503
+
2504
+ Returns
2505
+ -------
2506
+ object
2507
+ Exception raised by the task or None if not applicable
2508
+ """
2509
+ ...
2510
+ @property
2511
+ def finished_at(self) -> typing.Optional[datetime.datetime]:
2512
+ """
2513
+ Returns the datetime object of when the task finished (successfully or not).
2514
+
2515
+ This information is always about the latest task to have completed (in case
2516
+ of retries). This call will return None if the task is not finished.
2517
+
2518
+ Returns
2519
+ -------
2520
+ datetime
2521
+ Datetime of when the task finished
2522
+ """
2523
+ ...
2524
+ @property
2525
+ def runtime_name(self) -> typing.Optional[str]:
2526
+ """
2527
+ Returns the name of the runtime this task executed on.
2528
+
2529
+
2530
+ Returns
2531
+ -------
2532
+ str
2533
+ Name of the runtime this task executed on
2534
+ """
2535
+ ...
2536
+ @property
2537
+ def stdout(self) -> str:
2538
+ """
2539
+ Returns the full standard out of this task.
2540
+
2541
+ If you specify a specific attempt for this task, it will return the
2542
+ standard out for that attempt. If you do not specify an attempt,
2543
+ this will return the current standard out for the latest *started*
2544
+ attempt of the task. In both cases, multiple calls to this
2545
+ method will return the most up-to-date log (so if an attempt is not
2546
+ done, each call will fetch the latest log).
2547
+
2548
+ Returns
2549
+ -------
2550
+ str
2551
+ Standard output of this task
2552
+ """
2553
+ ...
2554
+ @property
2555
+ def stdout_size(self) -> int:
2556
+ """
2557
+ Returns the size of the stdout log of this task.
2558
+
2559
+ Similar to `stdout`, the size returned is the latest size of the log
2560
+ (so for a running attempt, this value will increase as the task produces
2561
+ more output).
2562
+
2563
+ Returns
2564
+ -------
2565
+ int
2566
+ Size of the stdout log content (in bytes)
2567
+ """
2568
+ ...
2569
+ @property
2570
+ def stderr(self) -> str:
2571
+ """
2572
+ Returns the full standard error of this task.
2573
+
2574
+ If you specify a specific attempt for this task, it will return the
2575
+ standard error for that attempt. If you do not specify an attempt,
2576
+ this will return the current standard error for the latest *started*
2577
+ attempt. In both cases, multiple calls to this
2578
+ method will return the most up-to-date log (so if an attempt is not
2579
+ done, each call will fetch the latest log).
2580
+
2581
+ Returns
2582
+ -------
2583
+ str
2584
+ Standard error of this task
2585
+ """
2586
+ ...
2587
+ @property
2588
+ def stderr_size(self) -> int:
2589
+ """
2590
+ Returns the size of the stderr log of this task.
2591
+
2592
+ Similar to `stderr`, the size returned is the latest size of the log
2593
+ (so for a running attempt, this value will increase as the task produces
2594
+ more output).
2595
+
2596
+ Returns
2597
+ -------
2598
+ int
2599
+ Size of the stderr log content (in bytes)
2600
+ """
2601
+ ...
2602
+ @property
2603
+ def current_attempt(self) -> int:
2604
+ """
2605
+ Get the relevant attempt for this Task.
2606
+
2607
+ Returns the specific attempt used when
2608
+ initializing the instance, or the latest *started* attempt for the Task.
2609
+
2610
+ Returns
2611
+ -------
2612
+ int
2613
+ attempt id for this task object
2614
+ """
2615
+ ...
2616
+ @property
2617
+ def code(self) -> typing.Optional[metaflow.client.core.MetaflowCode]:
2618
+ """
2619
+ Returns the MetaflowCode object for this task, if present.
2620
+
2621
+ Not all tasks save their code so this call may return None in those cases.
2622
+
2623
+ Returns
2624
+ -------
2625
+ MetaflowCode
2626
+ Code package for this task
2627
+ """
2628
+ ...
2629
+ @property
2630
+ def environment_info(self) -> typing.Dict[str, typing.Any]:
2631
+ """
2632
+ Returns information about the environment that was used to execute this task. As an
2633
+ example, if the Conda environment is selected, this will return information about the
2634
+ dependencies that were used in the environment.
2635
+
2636
+ This environment information is only available for tasks that have a code package.
2637
+
2638
+ Returns
2639
+ -------
2640
+ Dict
2641
+ Dictionary describing the environment
2642
+ """
2643
+ ...
2644
+ def loglines(self, stream: str, as_unicode: bool = True, meta_dict: typing.Optional[typing.Dict[str, typing.Any]] = None) -> typing.Iterator[typing.Tuple[datetime.datetime, str]]:
2645
+ """
2646
+ Return an iterator over (utc_timestamp, logline) tuples.
2647
+
2648
+ Parameters
2649
+ ----------
2650
+ stream : str
2651
+ Either 'stdout' or 'stderr'.
2652
+ as_unicode : bool, default: True
2653
+ If as_unicode=False, each logline is returned as a byte object. Otherwise,
2654
+ it is returned as a (unicode) string.
2655
+
2656
+ Yields
2657
+ ------
2658
+ Tuple[datetime, str]
2659
+ Tuple of timestamp, logline pairs.
2660
+ """
2661
+ ...
2662
+ def __iter__(self) -> typing.Iterator[metaflow.client.core.DataArtifact]:
2663
+ """
2664
+ Iterate over all children DataArtifact of this Task
2665
+
2666
+ Yields
2667
+ ------
2668
+ DataArtifact
2669
+ A DataArtifact in this Step
2670
+ """
2671
+ ...
2672
+ def __getitem__(self, name: str) -> metaflow.client.core.DataArtifact:
2673
+ """
2674
+ Returns the DataArtifact object with the artifact name 'name'
2675
+
2676
+ Parameters
2677
+ ----------
2678
+ name : str
2679
+ Data artifact name
2680
+
2681
+ Returns
2682
+ -------
2683
+ DataArtifact
2684
+ DataArtifact for this artifact name in this task
2685
+
2686
+ Raises
2687
+ ------
2688
+ KeyError
2689
+ If the name does not identify a valid DataArtifact object
2690
+ """
2691
+ ...
2692
+ def __getstate__(self):
2693
+ ...
2694
+ def __setstate__(self, state):
2695
+ ...
2696
+ ...
2697
+
2698
+ class DataArtifact(metaflow.client.core.MetaflowObject, metaclass=type):
2699
+ @property
2700
+ def data(self) -> typing.Any:
2701
+ """
2702
+ Unpickled representation of the data contained in this artifact.
2703
+
2704
+ Returns
2705
+ -------
2706
+ object
2707
+ Object contained in this artifact
2708
+ """
2709
+ ...
2710
+ @property
2711
+ def size(self) -> int:
2712
+ """
2713
+ Returns the size (in bytes) of the pickled object representing this
2714
+ DataArtifact
2715
+
2716
+ Returns
2717
+ -------
2718
+ int
2719
+ size of the pickled representation of data artifact (in bytes)
2720
+ """
2721
+ ...
2722
+ @property
2723
+ def sha(self) -> str:
2724
+ """
2725
+ Unique identifier for this artifact.
2726
+
2727
+ This is a unique hash of the artifact (historically SHA1 hash)
2728
+
2729
+ Returns
2730
+ -------
2731
+ str
2732
+ Hash of this artifact
2733
+ """
2734
+ ...
2735
+ @property
2736
+ def finished_at(self) -> datetime.datetime:
2737
+ """
2738
+ Creation time for this artifact.
2739
+
2740
+ Alias for created_at.
2741
+
2742
+ Returns
2743
+ -------
2744
+ datetime
2745
+ Creation time
2746
+ """
2747
+ ...
2748
+ def __getstate__(self):
2749
+ ...
2750
+ def __setstate__(self, state):
2751
+ ...
2752
+ ...
2753
+