ob-metaflow 2.12.27.1__py2.py3-none-any.whl → 2.12.30.2__py2.py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of ob-metaflow might be problematic. Click here for more details.
- metaflow/__init__.py +2 -3
- metaflow/cli.py +27 -0
- metaflow/client/core.py +2 -2
- metaflow/clone_util.py +1 -1
- metaflow/cmd/develop/stub_generator.py +623 -233
- metaflow/datastore/task_datastore.py +1 -1
- metaflow/extension_support/plugins.py +1 -0
- metaflow/flowspec.py +2 -2
- metaflow/includefile.py +8 -14
- metaflow/metaflow_config.py +4 -7
- metaflow/metaflow_current.py +1 -1
- metaflow/parameters.py +3 -0
- metaflow/plugins/__init__.py +12 -8
- metaflow/plugins/airflow/airflow_cli.py +5 -0
- metaflow/plugins/airflow/airflow_decorator.py +1 -1
- metaflow/plugins/argo/argo_workflows_decorator.py +1 -1
- metaflow/plugins/argo/argo_workflows_deployer.py +77 -363
- metaflow/plugins/argo/argo_workflows_deployer_objects.py +381 -0
- metaflow/plugins/aws/batch/batch_cli.py +1 -1
- metaflow/plugins/aws/batch/batch_decorator.py +2 -2
- metaflow/plugins/aws/step_functions/step_functions_cli.py +7 -0
- metaflow/plugins/aws/step_functions/step_functions_decorator.py +1 -1
- metaflow/plugins/aws/step_functions/step_functions_deployer.py +65 -224
- metaflow/plugins/aws/step_functions/step_functions_deployer_objects.py +236 -0
- metaflow/plugins/azure/includefile_support.py +2 -0
- metaflow/plugins/cards/card_cli.py +3 -2
- metaflow/plugins/cards/card_modules/components.py +9 -9
- metaflow/plugins/cards/card_server.py +39 -14
- metaflow/plugins/datatools/local.py +2 -0
- metaflow/plugins/datatools/s3/s3.py +2 -0
- metaflow/plugins/env_escape/__init__.py +3 -3
- metaflow/plugins/gcp/includefile_support.py +3 -0
- metaflow/plugins/kubernetes/kubernetes_cli.py +1 -1
- metaflow/plugins/kubernetes/kubernetes_decorator.py +5 -4
- metaflow/plugins/kubernetes/kubernetes_jobsets.py +43 -28
- metaflow/plugins/{metadata → metadata_providers}/local.py +2 -2
- metaflow/plugins/{metadata → metadata_providers}/service.py +2 -2
- metaflow/plugins/parallel_decorator.py +1 -1
- metaflow/plugins/pypi/conda_decorator.py +1 -1
- metaflow/plugins/test_unbounded_foreach_decorator.py +1 -1
- metaflow/runner/click_api.py +4 -0
- metaflow/runner/deployer.py +134 -303
- metaflow/runner/deployer_impl.py +167 -0
- metaflow/runner/metaflow_runner.py +10 -9
- metaflow/runner/nbdeploy.py +12 -13
- metaflow/runner/nbrun.py +3 -3
- metaflow/runner/utils.py +55 -8
- metaflow/runtime.py +1 -1
- metaflow/system/system_logger.py +1 -19
- metaflow/system/system_monitor.py +0 -24
- metaflow/task.py +5 -8
- metaflow/version.py +1 -1
- {ob_metaflow-2.12.27.1.dist-info → ob_metaflow-2.12.30.2.dist-info}/METADATA +2 -2
- {ob_metaflow-2.12.27.1.dist-info → ob_metaflow-2.12.30.2.dist-info}/RECORD +63 -60
- {ob_metaflow-2.12.27.1.dist-info → ob_metaflow-2.12.30.2.dist-info}/WHEEL +1 -1
- /metaflow/{metadata → metadata_provider}/__init__.py +0 -0
- /metaflow/{metadata → metadata_provider}/heartbeat.py +0 -0
- /metaflow/{metadata → metadata_provider}/metadata.py +0 -0
- /metaflow/{metadata → metadata_provider}/util.py +0 -0
- /metaflow/plugins/{metadata → metadata_providers}/__init__.py +0 -0
- {ob_metaflow-2.12.27.1.dist-info → ob_metaflow-2.12.30.2.dist-info}/LICENSE +0 -0
- {ob_metaflow-2.12.27.1.dist-info → ob_metaflow-2.12.30.2.dist-info}/entry_points.txt +0 -0
- {ob_metaflow-2.12.27.1.dist-info → ob_metaflow-2.12.30.2.dist-info}/top_level.txt +0 -0
|
@@ -1,392 +1,106 @@
|
|
|
1
|
-
import
|
|
2
|
-
import json
|
|
3
|
-
import tempfile
|
|
4
|
-
from typing import Optional, ClassVar
|
|
1
|
+
from typing import Any, ClassVar, Dict, Optional, TYPE_CHECKING, Type
|
|
5
2
|
|
|
6
|
-
from metaflow.
|
|
7
|
-
from metaflow.exception import MetaflowException
|
|
8
|
-
from metaflow.plugins.argo.argo_client import ArgoClient
|
|
9
|
-
from metaflow.metaflow_config import KUBERNETES_NAMESPACE
|
|
10
|
-
from metaflow.plugins.argo.argo_workflows import ArgoWorkflows
|
|
11
|
-
from metaflow.runner.deployer import (
|
|
12
|
-
Deployer,
|
|
13
|
-
DeployerImpl,
|
|
14
|
-
DeployedFlow,
|
|
15
|
-
TriggeredRun,
|
|
16
|
-
get_lower_level_group,
|
|
17
|
-
handle_timeout,
|
|
18
|
-
)
|
|
3
|
+
from metaflow.runner.deployer_impl import DeployerImpl
|
|
19
4
|
|
|
20
|
-
|
|
21
|
-
|
|
22
|
-
flow_name: str, param_info: dict, project_name: Optional[str] = None
|
|
23
|
-
):
|
|
24
|
-
params_code = ""
|
|
25
|
-
for _, param_details in param_info.items():
|
|
26
|
-
param_name = param_details["name"]
|
|
27
|
-
param_type = param_details["type"]
|
|
28
|
-
param_help = param_details["description"]
|
|
29
|
-
param_required = param_details["is_required"]
|
|
30
|
-
|
|
31
|
-
if param_type == "JSON":
|
|
32
|
-
params_code += f" {param_name} = Parameter('{param_name}', type=JSONType, help='{param_help}', required={param_required})\n"
|
|
33
|
-
elif param_type == "FilePath":
|
|
34
|
-
is_text = param_details.get("is_text", True)
|
|
35
|
-
encoding = param_details.get("encoding", "utf-8")
|
|
36
|
-
params_code += f" {param_name} = IncludeFile('{param_name}', is_text={is_text}, encoding='{encoding}', help='{param_help}', required={param_required})\n"
|
|
37
|
-
else:
|
|
38
|
-
params_code += f" {param_name} = Parameter('{param_name}', type={param_type}, help='{param_help}', required={param_required})\n"
|
|
39
|
-
|
|
40
|
-
project_decorator = f"@project(name='{project_name}')\n" if project_name else ""
|
|
41
|
-
|
|
42
|
-
contents = f"""\
|
|
43
|
-
from metaflow import FlowSpec, Parameter, IncludeFile, JSONType, step, project
|
|
44
|
-
{project_decorator}class {flow_name}(FlowSpec):
|
|
45
|
-
{params_code}
|
|
46
|
-
@step
|
|
47
|
-
def start(self):
|
|
48
|
-
self.next(self.end)
|
|
49
|
-
@step
|
|
50
|
-
def end(self):
|
|
51
|
-
pass
|
|
52
|
-
if __name__ == '__main__':
|
|
53
|
-
{flow_name}()
|
|
54
|
-
"""
|
|
55
|
-
return contents
|
|
56
|
-
|
|
57
|
-
|
|
58
|
-
def from_deployment(identifier: str, metadata: Optional[str] = None):
|
|
59
|
-
client = ArgoClient(namespace=KUBERNETES_NAMESPACE)
|
|
60
|
-
workflow_template = client.get_workflow_template(identifier)
|
|
61
|
-
|
|
62
|
-
if workflow_template is None:
|
|
63
|
-
raise MetaflowException("No deployed flow found for: %s" % identifier)
|
|
64
|
-
|
|
65
|
-
metadata_annotations = workflow_template.get("metadata", {}).get("annotations", {})
|
|
66
|
-
|
|
67
|
-
flow_name = metadata_annotations.get("metaflow/flow_name", "")
|
|
68
|
-
username = metadata_annotations.get("metaflow/owner", "")
|
|
69
|
-
parameters = json.loads(metadata_annotations.get("metaflow/parameters", {}))
|
|
70
|
-
|
|
71
|
-
# these two only exist if @project decorator is used..
|
|
72
|
-
branch_name = metadata_annotations.get("metaflow/branch_name", None)
|
|
73
|
-
project_name = metadata_annotations.get("metaflow/project_name", None)
|
|
74
|
-
|
|
75
|
-
project_kwargs = {}
|
|
76
|
-
if branch_name is not None:
|
|
77
|
-
if branch_name.startswith("prod."):
|
|
78
|
-
project_kwargs["production"] = True
|
|
79
|
-
project_kwargs["branch"] = branch_name[len("prod.") :]
|
|
80
|
-
elif branch_name.startswith("test."):
|
|
81
|
-
project_kwargs["branch"] = branch_name[len("test.") :]
|
|
82
|
-
elif branch_name == "prod":
|
|
83
|
-
project_kwargs["production"] = True
|
|
84
|
-
|
|
85
|
-
fake_flow_file_contents = generate_fake_flow_file_contents(
|
|
86
|
-
flow_name=flow_name, param_info=parameters, project_name=project_name
|
|
87
|
-
)
|
|
88
|
-
|
|
89
|
-
with tempfile.NamedTemporaryFile(suffix=".py", delete=False) as fake_flow_file:
|
|
90
|
-
with open(fake_flow_file.name, "w") as fp:
|
|
91
|
-
fp.write(fake_flow_file_contents)
|
|
92
|
-
|
|
93
|
-
if branch_name is not None:
|
|
94
|
-
d = Deployer(
|
|
95
|
-
fake_flow_file.name, env={"METAFLOW_USER": username}, **project_kwargs
|
|
96
|
-
).argo_workflows()
|
|
97
|
-
else:
|
|
98
|
-
d = Deployer(
|
|
99
|
-
fake_flow_file.name, env={"METAFLOW_USER": username}
|
|
100
|
-
).argo_workflows(name=identifier)
|
|
101
|
-
|
|
102
|
-
d.name = identifier
|
|
103
|
-
d.flow_name = flow_name
|
|
104
|
-
if metadata is None:
|
|
105
|
-
d.metadata = get_metadata()
|
|
106
|
-
else:
|
|
107
|
-
d.metadata = metadata
|
|
108
|
-
|
|
109
|
-
df = DeployedFlow(deployer=d)
|
|
110
|
-
d._enrich_deployed_flow(df)
|
|
111
|
-
|
|
112
|
-
return df
|
|
113
|
-
|
|
114
|
-
|
|
115
|
-
def suspend(instance: TriggeredRun, **kwargs):
|
|
116
|
-
"""
|
|
117
|
-
Suspend the running workflow.
|
|
118
|
-
|
|
119
|
-
Parameters
|
|
120
|
-
----------
|
|
121
|
-
**kwargs : Any
|
|
122
|
-
Additional arguments to pass to the suspend command.
|
|
123
|
-
|
|
124
|
-
Returns
|
|
125
|
-
-------
|
|
126
|
-
bool
|
|
127
|
-
True if the command was successful, False otherwise.
|
|
128
|
-
"""
|
|
129
|
-
_, run_id = instance.pathspec.split("/")
|
|
130
|
-
|
|
131
|
-
# every subclass needs to have `self.deployer_kwargs`
|
|
132
|
-
command = get_lower_level_group(
|
|
133
|
-
instance.deployer.api,
|
|
134
|
-
instance.deployer.top_level_kwargs,
|
|
135
|
-
instance.deployer.TYPE,
|
|
136
|
-
instance.deployer.deployer_kwargs,
|
|
137
|
-
).suspend(run_id=run_id, **kwargs)
|
|
138
|
-
|
|
139
|
-
pid = instance.deployer.spm.run_command(
|
|
140
|
-
[sys.executable, *command],
|
|
141
|
-
env=instance.deployer.env_vars,
|
|
142
|
-
cwd=instance.deployer.cwd,
|
|
143
|
-
show_output=instance.deployer.show_output,
|
|
144
|
-
)
|
|
145
|
-
|
|
146
|
-
command_obj = instance.deployer.spm.get(pid)
|
|
147
|
-
return command_obj.process.returncode == 0
|
|
148
|
-
|
|
149
|
-
|
|
150
|
-
def unsuspend(instance: TriggeredRun, **kwargs):
|
|
151
|
-
"""
|
|
152
|
-
Unsuspend the suspended workflow.
|
|
153
|
-
|
|
154
|
-
Parameters
|
|
155
|
-
----------
|
|
156
|
-
**kwargs : Any
|
|
157
|
-
Additional arguments to pass to the unsuspend command.
|
|
158
|
-
|
|
159
|
-
Returns
|
|
160
|
-
-------
|
|
161
|
-
bool
|
|
162
|
-
True if the command was successful, False otherwise.
|
|
163
|
-
"""
|
|
164
|
-
_, run_id = instance.pathspec.split("/")
|
|
165
|
-
|
|
166
|
-
# every subclass needs to have `self.deployer_kwargs`
|
|
167
|
-
command = get_lower_level_group(
|
|
168
|
-
instance.deployer.api,
|
|
169
|
-
instance.deployer.top_level_kwargs,
|
|
170
|
-
instance.deployer.TYPE,
|
|
171
|
-
instance.deployer.deployer_kwargs,
|
|
172
|
-
).unsuspend(run_id=run_id, **kwargs)
|
|
173
|
-
|
|
174
|
-
pid = instance.deployer.spm.run_command(
|
|
175
|
-
[sys.executable, *command],
|
|
176
|
-
env=instance.deployer.env_vars,
|
|
177
|
-
cwd=instance.deployer.cwd,
|
|
178
|
-
show_output=instance.deployer.show_output,
|
|
179
|
-
)
|
|
180
|
-
|
|
181
|
-
command_obj = instance.deployer.spm.get(pid)
|
|
182
|
-
return command_obj.process.returncode == 0
|
|
183
|
-
|
|
184
|
-
|
|
185
|
-
def terminate(instance: TriggeredRun, **kwargs):
|
|
186
|
-
"""
|
|
187
|
-
Terminate the running workflow.
|
|
188
|
-
|
|
189
|
-
Parameters
|
|
190
|
-
----------
|
|
191
|
-
**kwargs : Any
|
|
192
|
-
Additional arguments to pass to the terminate command.
|
|
193
|
-
|
|
194
|
-
Returns
|
|
195
|
-
-------
|
|
196
|
-
bool
|
|
197
|
-
True if the command was successful, False otherwise.
|
|
198
|
-
"""
|
|
199
|
-
_, run_id = instance.pathspec.split("/")
|
|
200
|
-
|
|
201
|
-
# every subclass needs to have `self.deployer_kwargs`
|
|
202
|
-
command = get_lower_level_group(
|
|
203
|
-
instance.deployer.api,
|
|
204
|
-
instance.deployer.top_level_kwargs,
|
|
205
|
-
instance.deployer.TYPE,
|
|
206
|
-
instance.deployer.deployer_kwargs,
|
|
207
|
-
).terminate(run_id=run_id, **kwargs)
|
|
208
|
-
|
|
209
|
-
pid = instance.deployer.spm.run_command(
|
|
210
|
-
[sys.executable, *command],
|
|
211
|
-
env=instance.deployer.env_vars,
|
|
212
|
-
cwd=instance.deployer.cwd,
|
|
213
|
-
show_output=instance.deployer.show_output,
|
|
214
|
-
)
|
|
215
|
-
|
|
216
|
-
command_obj = instance.deployer.spm.get(pid)
|
|
217
|
-
return command_obj.process.returncode == 0
|
|
218
|
-
|
|
219
|
-
|
|
220
|
-
def status(instance: TriggeredRun):
|
|
221
|
-
"""
|
|
222
|
-
Get the status of the triggered run.
|
|
223
|
-
|
|
224
|
-
Returns
|
|
225
|
-
-------
|
|
226
|
-
str, optional
|
|
227
|
-
The status of the workflow considering the run object, or None if the status could not be retrieved.
|
|
228
|
-
"""
|
|
229
|
-
from metaflow.plugins.argo.argo_workflows_cli import (
|
|
230
|
-
get_status_considering_run_object,
|
|
231
|
-
)
|
|
232
|
-
|
|
233
|
-
flow_name, run_id = instance.pathspec.split("/")
|
|
234
|
-
name = run_id[5:]
|
|
235
|
-
status = ArgoWorkflows.get_workflow_status(flow_name, name)
|
|
236
|
-
if status is not None:
|
|
237
|
-
return get_status_considering_run_object(status, instance.run)
|
|
238
|
-
return None
|
|
239
|
-
|
|
240
|
-
|
|
241
|
-
def production_token(instance: DeployedFlow):
|
|
242
|
-
"""
|
|
243
|
-
Get the production token for the deployed flow.
|
|
244
|
-
|
|
245
|
-
Returns
|
|
246
|
-
-------
|
|
247
|
-
str, optional
|
|
248
|
-
The production token, None if it cannot be retrieved.
|
|
249
|
-
"""
|
|
250
|
-
try:
|
|
251
|
-
_, production_token = ArgoWorkflows.get_existing_deployment(
|
|
252
|
-
instance.deployer.name
|
|
253
|
-
)
|
|
254
|
-
return production_token
|
|
255
|
-
except TypeError:
|
|
256
|
-
return None
|
|
257
|
-
|
|
258
|
-
|
|
259
|
-
def delete(instance: DeployedFlow, **kwargs):
|
|
260
|
-
"""
|
|
261
|
-
Delete the deployed flow.
|
|
262
|
-
|
|
263
|
-
Parameters
|
|
264
|
-
----------
|
|
265
|
-
**kwargs : Any
|
|
266
|
-
Additional arguments to pass to the delete command.
|
|
267
|
-
|
|
268
|
-
Returns
|
|
269
|
-
-------
|
|
270
|
-
bool
|
|
271
|
-
True if the command was successful, False otherwise.
|
|
272
|
-
"""
|
|
273
|
-
command = get_lower_level_group(
|
|
274
|
-
instance.deployer.api,
|
|
275
|
-
instance.deployer.top_level_kwargs,
|
|
276
|
-
instance.deployer.TYPE,
|
|
277
|
-
instance.deployer.deployer_kwargs,
|
|
278
|
-
).delete(**kwargs)
|
|
279
|
-
|
|
280
|
-
pid = instance.deployer.spm.run_command(
|
|
281
|
-
[sys.executable, *command],
|
|
282
|
-
env=instance.deployer.env_vars,
|
|
283
|
-
cwd=instance.deployer.cwd,
|
|
284
|
-
show_output=instance.deployer.show_output,
|
|
285
|
-
)
|
|
286
|
-
|
|
287
|
-
command_obj = instance.deployer.spm.get(pid)
|
|
288
|
-
return command_obj.process.returncode == 0
|
|
289
|
-
|
|
290
|
-
|
|
291
|
-
def trigger(instance: DeployedFlow, **kwargs):
|
|
292
|
-
"""
|
|
293
|
-
Trigger a new run for the deployed flow.
|
|
294
|
-
|
|
295
|
-
Parameters
|
|
296
|
-
----------
|
|
297
|
-
**kwargs : Any
|
|
298
|
-
Additional arguments to pass to the trigger command, `Parameters` in particular
|
|
299
|
-
|
|
300
|
-
Returns
|
|
301
|
-
-------
|
|
302
|
-
ArgoWorkflowsTriggeredRun
|
|
303
|
-
The triggered run instance.
|
|
304
|
-
|
|
305
|
-
Raises
|
|
306
|
-
------
|
|
307
|
-
Exception
|
|
308
|
-
If there is an error during the trigger process.
|
|
309
|
-
"""
|
|
310
|
-
with tempfile.TemporaryDirectory() as temp_dir:
|
|
311
|
-
tfp_runner_attribute = tempfile.NamedTemporaryFile(dir=temp_dir, delete=False)
|
|
312
|
-
|
|
313
|
-
# every subclass needs to have `self.deployer_kwargs`
|
|
314
|
-
command = get_lower_level_group(
|
|
315
|
-
instance.deployer.api,
|
|
316
|
-
instance.deployer.top_level_kwargs,
|
|
317
|
-
instance.deployer.TYPE,
|
|
318
|
-
instance.deployer.deployer_kwargs,
|
|
319
|
-
).trigger(deployer_attribute_file=tfp_runner_attribute.name, **kwargs)
|
|
320
|
-
|
|
321
|
-
pid = instance.deployer.spm.run_command(
|
|
322
|
-
[sys.executable, *command],
|
|
323
|
-
env=instance.deployer.env_vars,
|
|
324
|
-
cwd=instance.deployer.cwd,
|
|
325
|
-
show_output=instance.deployer.show_output,
|
|
326
|
-
)
|
|
327
|
-
|
|
328
|
-
command_obj = instance.deployer.spm.get(pid)
|
|
329
|
-
content = handle_timeout(
|
|
330
|
-
tfp_runner_attribute, command_obj, instance.deployer.file_read_timeout
|
|
331
|
-
)
|
|
332
|
-
|
|
333
|
-
if command_obj.process.returncode == 0:
|
|
334
|
-
triggered_run = TriggeredRun(deployer=instance.deployer, content=content)
|
|
335
|
-
triggered_run._enrich_object(
|
|
336
|
-
{
|
|
337
|
-
"status": property(status),
|
|
338
|
-
"terminate": terminate,
|
|
339
|
-
"suspend": suspend,
|
|
340
|
-
"unsuspend": unsuspend,
|
|
341
|
-
}
|
|
342
|
-
)
|
|
343
|
-
return triggered_run
|
|
344
|
-
|
|
345
|
-
raise Exception(
|
|
346
|
-
"Error triggering %s on %s for %s"
|
|
347
|
-
% (instance.deployer.name, instance.deployer.TYPE, instance.deployer.flow_file)
|
|
348
|
-
)
|
|
5
|
+
if TYPE_CHECKING:
|
|
6
|
+
import metaflow.plugins.argo.argo_workflows_deployer_objects
|
|
349
7
|
|
|
350
8
|
|
|
351
9
|
class ArgoWorkflowsDeployer(DeployerImpl):
|
|
352
10
|
"""
|
|
353
11
|
Deployer implementation for Argo Workflows.
|
|
354
12
|
|
|
355
|
-
|
|
13
|
+
Parameters
|
|
356
14
|
----------
|
|
357
|
-
|
|
358
|
-
The
|
|
15
|
+
name : str, optional, default None
|
|
16
|
+
Argo workflow name. The flow name is used instead if this option is not specified.
|
|
359
17
|
"""
|
|
360
18
|
|
|
361
19
|
TYPE: ClassVar[Optional[str]] = "argo-workflows"
|
|
362
20
|
|
|
363
|
-
def __init__(self, deployer_kwargs, **kwargs):
|
|
21
|
+
def __init__(self, deployer_kwargs: Dict[str, str], **kwargs):
|
|
364
22
|
"""
|
|
365
23
|
Initialize the ArgoWorkflowsDeployer.
|
|
366
24
|
|
|
367
25
|
Parameters
|
|
368
26
|
----------
|
|
369
|
-
deployer_kwargs :
|
|
27
|
+
deployer_kwargs : Dict[str, str]
|
|
370
28
|
The deployer-specific keyword arguments.
|
|
371
29
|
**kwargs : Any
|
|
372
30
|
Additional arguments to pass to the superclass constructor.
|
|
373
31
|
"""
|
|
374
|
-
self.
|
|
32
|
+
self._deployer_kwargs = deployer_kwargs
|
|
375
33
|
super().__init__(**kwargs)
|
|
376
34
|
|
|
377
|
-
|
|
35
|
+
@property
|
|
36
|
+
def deployer_kwargs(self) -> Dict[str, Any]:
|
|
37
|
+
return self._deployer_kwargs
|
|
38
|
+
|
|
39
|
+
@staticmethod
|
|
40
|
+
def deployed_flow_type() -> (
|
|
41
|
+
Type[
|
|
42
|
+
"metaflow.plugins.argo.argo_workflows_deployer_objects.ArgoWorkflowsDeployedFlow"
|
|
43
|
+
]
|
|
44
|
+
):
|
|
45
|
+
from .argo_workflows_deployer_objects import ArgoWorkflowsDeployedFlow
|
|
46
|
+
|
|
47
|
+
return ArgoWorkflowsDeployedFlow
|
|
48
|
+
|
|
49
|
+
def create(
|
|
50
|
+
self, **kwargs
|
|
51
|
+
) -> "metaflow.plugins.argo.argo_workflows_deployer_objects.ArgoWorkflowsDeployedFlow":
|
|
378
52
|
"""
|
|
379
|
-
|
|
53
|
+
Create a new ArgoWorkflow deployment.
|
|
380
54
|
|
|
381
55
|
Parameters
|
|
382
56
|
----------
|
|
383
|
-
|
|
384
|
-
|
|
57
|
+
authorize : str, optional, default None
|
|
58
|
+
Authorize using this production token. Required when re-deploying an existing flow
|
|
59
|
+
for the first time. The token is cached in METAFLOW_HOME.
|
|
60
|
+
generate_new_token : bool, optional, default False
|
|
61
|
+
Generate a new production token for this flow. Moves the production flow to a new namespace.
|
|
62
|
+
given_token : str, optional, default None
|
|
63
|
+
Use the given production token for this flow. Moves the production flow to the given namespace.
|
|
64
|
+
tags : List[str], optional, default None
|
|
65
|
+
Annotate all objects produced by Argo Workflows runs with these tags.
|
|
66
|
+
user_namespace : str, optional, default None
|
|
67
|
+
Change the namespace from the default (production token) to the given tag.
|
|
68
|
+
only_json : bool, optional, default False
|
|
69
|
+
Only print out JSON sent to Argo Workflows without deploying anything.
|
|
70
|
+
max_workers : int, optional, default 100
|
|
71
|
+
Maximum number of parallel processes.
|
|
72
|
+
workflow_timeout : int, optional, default None
|
|
73
|
+
Workflow timeout in seconds.
|
|
74
|
+
workflow_priority : int, optional, default None
|
|
75
|
+
Workflow priority as an integer. Higher priority workflows are processed first
|
|
76
|
+
if Argo Workflows controller is configured to process limited parallel workflows.
|
|
77
|
+
auto_emit_argo_events : bool, optional, default True
|
|
78
|
+
Auto emits Argo Events when the run completes successfully.
|
|
79
|
+
notify_on_error : bool, optional, default False
|
|
80
|
+
Notify if the workflow fails.
|
|
81
|
+
notify_on_success : bool, optional, default False
|
|
82
|
+
Notify if the workflow succeeds.
|
|
83
|
+
notify_slack_webhook_url : str, optional, default ''
|
|
84
|
+
Slack incoming webhook url for workflow success/failure notifications.
|
|
85
|
+
notify_pager_duty_integration_key : str, optional, default ''
|
|
86
|
+
PagerDuty Events API V2 Integration key for workflow success/failure notifications.
|
|
87
|
+
enable_heartbeat_daemon : bool, optional, default False
|
|
88
|
+
Use a daemon container to broadcast heartbeats.
|
|
89
|
+
deployer_attribute_file : str, optional, default None
|
|
90
|
+
Write the workflow name to the specified file. Used internally for Metaflow's Deployer API.
|
|
91
|
+
enable_error_msg_capture : bool, optional, default True
|
|
92
|
+
Capture stack trace of first failed task in exit hook.
|
|
93
|
+
|
|
94
|
+
Returns
|
|
95
|
+
-------
|
|
96
|
+
ArgoWorkflowsDeployedFlow
|
|
97
|
+
The Flow deployed to Argo Workflows.
|
|
385
98
|
"""
|
|
386
|
-
|
|
387
|
-
|
|
388
|
-
|
|
389
|
-
|
|
390
|
-
|
|
391
|
-
|
|
392
|
-
|
|
99
|
+
|
|
100
|
+
# Prevent circular import
|
|
101
|
+
from .argo_workflows_deployer_objects import ArgoWorkflowsDeployedFlow
|
|
102
|
+
|
|
103
|
+
return self._create(ArgoWorkflowsDeployedFlow, **kwargs)
|
|
104
|
+
|
|
105
|
+
|
|
106
|
+
_addl_stubgen_modules = ["metaflow.plugins.argo.argo_workflows_deployer_objects"]
|