metaflow 2.12.38__py2.py3-none-any.whl → 2.13__py2.py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- metaflow/__init__.py +1 -1
- metaflow/cli.py +111 -36
- metaflow/cli_args.py +2 -2
- metaflow/cli_components/run_cmds.py +3 -1
- metaflow/datastore/flow_datastore.py +2 -2
- metaflow/exception.py +8 -2
- metaflow/flowspec.py +48 -36
- metaflow/graph.py +28 -27
- metaflow/includefile.py +2 -2
- metaflow/lint.py +35 -20
- metaflow/metaflow_config.py +5 -0
- metaflow/parameters.py +11 -4
- metaflow/plugins/argo/argo_workflows_deployer_objects.py +47 -1
- metaflow/plugins/aws/secrets_manager/aws_secrets_manager_secrets_provider.py +13 -10
- metaflow/plugins/aws/step_functions/step_functions_deployer_objects.py +3 -0
- metaflow/plugins/cards/card_creator.py +1 -0
- metaflow/plugins/cards/card_decorator.py +46 -8
- metaflow/plugins/pypi/bootstrap.py +196 -61
- metaflow/plugins/pypi/conda_decorator.py +14 -26
- metaflow/plugins/pypi/conda_environment.py +76 -21
- metaflow/plugins/pypi/micromamba.py +42 -15
- metaflow/plugins/pypi/pip.py +8 -3
- metaflow/plugins/pypi/pypi_decorator.py +10 -9
- metaflow/runner/click_api.py +175 -39
- metaflow/runner/deployer.py +1 -1
- metaflow/runner/deployer_impl.py +8 -3
- metaflow/runner/metaflow_runner.py +10 -2
- metaflow/runner/nbdeploy.py +2 -0
- metaflow/runner/nbrun.py +1 -1
- metaflow/runner/subprocess_manager.py +3 -1
- metaflow/runner/utils.py +41 -19
- metaflow/user_configs/config_options.py +87 -34
- metaflow/user_configs/config_parameters.py +44 -25
- metaflow/util.py +2 -2
- metaflow/version.py +1 -1
- {metaflow-2.12.38.dist-info → metaflow-2.13.dist-info}/METADATA +2 -2
- {metaflow-2.12.38.dist-info → metaflow-2.13.dist-info}/RECORD +41 -41
- {metaflow-2.12.38.dist-info → metaflow-2.13.dist-info}/LICENSE +0 -0
- {metaflow-2.12.38.dist-info → metaflow-2.13.dist-info}/WHEEL +0 -0
- {metaflow-2.12.38.dist-info → metaflow-2.13.dist-info}/entry_points.txt +0 -0
- {metaflow-2.12.38.dist-info → metaflow-2.13.dist-info}/top_level.txt +0 -0
metaflow/lint.py
CHANGED
@@ -52,7 +52,7 @@ def check_reserved_words(graph):
|
|
52
52
|
msg = "Step name *%s* is a reserved word. Choose another name for the " "step."
|
53
53
|
for node in graph:
|
54
54
|
if node.name in RESERVED:
|
55
|
-
raise LintWarn(msg % node.name)
|
55
|
+
raise LintWarn(msg % node.name, node.func_lineno, node.source_file)
|
56
56
|
|
57
57
|
|
58
58
|
@linter.ensure_fundamentals
|
@@ -76,9 +76,9 @@ def check_that_end_is_end(graph):
|
|
76
76
|
node = graph["end"]
|
77
77
|
|
78
78
|
if node.has_tail_next or node.invalid_tail_next:
|
79
|
-
raise LintWarn(msg0, node.tail_next_lineno)
|
79
|
+
raise LintWarn(msg0, node.tail_next_lineno, node.source_file)
|
80
80
|
if node.num_args > 1:
|
81
|
-
raise LintWarn(msg1, node.tail_next_lineno)
|
81
|
+
raise LintWarn(msg1, node.tail_next_lineno, node.source_file)
|
82
82
|
|
83
83
|
|
84
84
|
@linter.ensure_fundamentals
|
@@ -90,7 +90,7 @@ def check_step_names(graph):
|
|
90
90
|
)
|
91
91
|
for node in graph:
|
92
92
|
if re.search("[^a-z0-9_]", node.name) or node.name[0] == "_":
|
93
|
-
raise LintWarn(msg.format(node), node.func_lineno)
|
93
|
+
raise LintWarn(msg.format(node), node.func_lineno, node.source_file)
|
94
94
|
|
95
95
|
|
96
96
|
@linter.ensure_fundamentals
|
@@ -108,11 +108,11 @@ def check_num_args(graph):
|
|
108
108
|
msg2 = "Step *{0.name}* is missing the 'self' argument."
|
109
109
|
for node in graph:
|
110
110
|
if node.num_args > 2:
|
111
|
-
raise LintWarn(msg0.format(node), node.func_lineno)
|
111
|
+
raise LintWarn(msg0.format(node), node.func_lineno, node.source_file)
|
112
112
|
elif node.num_args == 2 and node.type != "join":
|
113
|
-
raise LintWarn(msg1.format(node), node.func_lineno)
|
113
|
+
raise LintWarn(msg1.format(node), node.func_lineno, node.source_file)
|
114
114
|
elif node.num_args == 0:
|
115
|
-
raise LintWarn(msg2.format(node), node.func_lineno)
|
115
|
+
raise LintWarn(msg2.format(node), node.func_lineno, node.source_file)
|
116
116
|
|
117
117
|
|
118
118
|
@linter.ensure_static_graph
|
@@ -125,7 +125,7 @@ def check_static_transitions(graph):
|
|
125
125
|
)
|
126
126
|
for node in graph:
|
127
127
|
if node.type != "end" and not node.has_tail_next:
|
128
|
-
raise LintWarn(msg.format(node), node.func_lineno)
|
128
|
+
raise LintWarn(msg.format(node), node.func_lineno, node.source_file)
|
129
129
|
|
130
130
|
|
131
131
|
@linter.ensure_static_graph
|
@@ -138,7 +138,7 @@ def check_valid_transitions(graph):
|
|
138
138
|
)
|
139
139
|
for node in graph:
|
140
140
|
if node.type != "end" and node.has_tail_next and node.invalid_tail_next:
|
141
|
-
raise LintWarn(msg.format(node), node.tail_next_lineno)
|
141
|
+
raise LintWarn(msg.format(node), node.tail_next_lineno, node.source_file)
|
142
142
|
|
143
143
|
|
144
144
|
@linter.ensure_static_graph
|
@@ -151,7 +151,11 @@ def check_unknown_transitions(graph):
|
|
151
151
|
for node in graph:
|
152
152
|
unknown = [n for n in node.out_funcs if n not in graph]
|
153
153
|
if unknown:
|
154
|
-
raise LintWarn(
|
154
|
+
raise LintWarn(
|
155
|
+
msg.format(node, step=unknown[0]),
|
156
|
+
node.tail_next_lineno,
|
157
|
+
node.source_file,
|
158
|
+
)
|
155
159
|
|
156
160
|
|
157
161
|
@linter.ensure_acyclicity
|
@@ -167,7 +171,9 @@ def check_for_acyclicity(graph):
|
|
167
171
|
for n in node.out_funcs:
|
168
172
|
if n in seen:
|
169
173
|
path = "->".join(seen + [n])
|
170
|
-
raise LintWarn(
|
174
|
+
raise LintWarn(
|
175
|
+
msg.format(path), node.tail_next_lineno, node.source_file
|
176
|
+
)
|
171
177
|
else:
|
172
178
|
check_path(graph[n], seen + [n])
|
173
179
|
|
@@ -195,7 +201,7 @@ def check_for_orphans(graph):
|
|
195
201
|
orphans = nodeset - seen
|
196
202
|
if orphans:
|
197
203
|
orphan = graph[list(orphans)[0]]
|
198
|
-
raise LintWarn(msg.format(orphan), orphan.func_lineno)
|
204
|
+
raise LintWarn(msg.format(orphan), orphan.func_lineno, orphan.source_file)
|
199
205
|
|
200
206
|
|
201
207
|
@linter.ensure_static_graph
|
@@ -230,7 +236,9 @@ def check_split_join_balance(graph):
|
|
230
236
|
if split_stack:
|
231
237
|
_, split_roots = split_stack.pop()
|
232
238
|
roots = ", ".join(split_roots)
|
233
|
-
raise LintWarn(
|
239
|
+
raise LintWarn(
|
240
|
+
msg0.format(roots=roots), node.func_lineno, node.source_file
|
241
|
+
)
|
234
242
|
elif node.type == "join":
|
235
243
|
if split_stack:
|
236
244
|
_, split_roots = split_stack[-1]
|
@@ -243,9 +251,10 @@ def check_split_join_balance(graph):
|
|
243
251
|
node, paths=paths, num_roots=len(split_roots), roots=roots
|
244
252
|
),
|
245
253
|
node.func_lineno,
|
254
|
+
node.source_file,
|
246
255
|
)
|
247
256
|
else:
|
248
|
-
raise LintWarn(msg2.format(node), node.func_lineno)
|
257
|
+
raise LintWarn(msg2.format(node), node.func_lineno, node.source_file)
|
249
258
|
|
250
259
|
# check that incoming steps come from the same lineage
|
251
260
|
# (no cross joins)
|
@@ -256,7 +265,7 @@ def check_split_join_balance(graph):
|
|
256
265
|
return tuple(graph[n].split_parents)
|
257
266
|
|
258
267
|
if not all_equal(map(parents, node.in_funcs)):
|
259
|
-
raise LintWarn(msg3.format(node), node.func_lineno)
|
268
|
+
raise LintWarn(msg3.format(node), node.func_lineno, node.source_file)
|
260
269
|
|
261
270
|
for n in node.out_funcs:
|
262
271
|
traverse(graph[n], new_stack)
|
@@ -276,7 +285,9 @@ def check_empty_foreaches(graph):
|
|
276
285
|
if node.type == "foreach":
|
277
286
|
joins = [n for n in node.out_funcs if graph[n].type == "join"]
|
278
287
|
if joins:
|
279
|
-
raise LintWarn(
|
288
|
+
raise LintWarn(
|
289
|
+
msg.format(node, join=joins[0]), node.func_lineno, node.source_file
|
290
|
+
)
|
280
291
|
|
281
292
|
|
282
293
|
@linter.ensure_static_graph
|
@@ -290,7 +301,7 @@ def check_parallel_step_after_next(graph):
|
|
290
301
|
if node.parallel_foreach and not all(
|
291
302
|
graph[out_node].parallel_step for out_node in node.out_funcs
|
292
303
|
):
|
293
|
-
raise LintWarn(msg.format(node))
|
304
|
+
raise LintWarn(msg.format(node), node.func_lineno, node.source_file)
|
294
305
|
|
295
306
|
|
296
307
|
@linter.ensure_static_graph
|
@@ -303,7 +314,9 @@ def check_join_followed_by_parallel_step(graph):
|
|
303
314
|
)
|
304
315
|
for node in graph:
|
305
316
|
if node.parallel_step and not graph[node.out_funcs[0]].type == "join":
|
306
|
-
raise LintWarn(
|
317
|
+
raise LintWarn(
|
318
|
+
msg.format(node.out_funcs[0]), node.func_lineno, node.source_file
|
319
|
+
)
|
307
320
|
|
308
321
|
|
309
322
|
@linter.ensure_static_graph
|
@@ -318,7 +331,9 @@ def check_parallel_foreach_calls_parallel_step(graph):
|
|
318
331
|
for node2 in graph:
|
319
332
|
if node2.out_funcs and node.name in node2.out_funcs:
|
320
333
|
if not node2.parallel_foreach:
|
321
|
-
raise LintWarn(
|
334
|
+
raise LintWarn(
|
335
|
+
msg.format(node, node2), node.func_lineno, node.source_file
|
336
|
+
)
|
322
337
|
|
323
338
|
|
324
339
|
@linter.ensure_non_nested_foreach
|
@@ -331,4 +346,4 @@ def check_nested_foreach(graph):
|
|
331
346
|
for node in graph:
|
332
347
|
if node.type == "foreach":
|
333
348
|
if any(graph[p].type == "foreach" for p in node.split_parents):
|
334
|
-
raise LintWarn(msg.format(node))
|
349
|
+
raise LintWarn(msg.format(node), node.func_lineno, node.source_file)
|
metaflow/metaflow_config.py
CHANGED
@@ -508,6 +508,11 @@ DISABLE_TRACING = bool(os.environ.get("DISABLE_TRACING", False))
|
|
508
508
|
# lexicographic ordering of attempts. This won't work if MAX_ATTEMPTS > 99.
|
509
509
|
MAX_ATTEMPTS = 6
|
510
510
|
|
511
|
+
# Feature flag (experimental features that are *explicitly* unsupported)
|
512
|
+
|
513
|
+
# Process configs even when using the click_api for Runner/Deployer
|
514
|
+
CLICK_API_PROCESS_CONFIG = from_conf("CLICK_API_PROCESS_CONFIG", False)
|
515
|
+
|
511
516
|
|
512
517
|
# PINNED_CONDA_LIBS are the libraries that metaflow depends on for execution
|
513
518
|
# and are needed within a conda environment
|
metaflow/parameters.py
CHANGED
@@ -359,7 +359,7 @@ class Parameter(object):
|
|
359
359
|
"show_default": show_default,
|
360
360
|
}
|
361
361
|
|
362
|
-
def init(self):
|
362
|
+
def init(self, ignore_errors=False):
|
363
363
|
# Prevent circular import
|
364
364
|
from .user_configs.config_parameters import (
|
365
365
|
resolve_delayed_evaluator,
|
@@ -367,14 +367,21 @@ class Parameter(object):
|
|
367
367
|
)
|
368
368
|
|
369
369
|
# Resolve any value from configurations
|
370
|
-
self.kwargs = unpack_delayed_evaluator(self.kwargs)
|
371
|
-
|
370
|
+
self.kwargs = unpack_delayed_evaluator(self.kwargs, ignore_errors=ignore_errors)
|
371
|
+
# Do it one item at a time so errors are ignored at that level (as opposed to
|
372
|
+
# at the entire kwargs leve)
|
373
|
+
self.kwargs = {
|
374
|
+
k: resolve_delayed_evaluator(v, ignore_errors=ignore_errors)
|
375
|
+
for k, v in self.kwargs.items()
|
376
|
+
}
|
372
377
|
|
373
378
|
# This was the behavior before configs: values specified in args would override
|
374
379
|
# stuff in kwargs which is what we implement here as well
|
375
380
|
for key, value in self._override_kwargs.items():
|
376
381
|
if value is not None:
|
377
|
-
self.kwargs[key] =
|
382
|
+
self.kwargs[key] = resolve_delayed_evaluator(
|
383
|
+
value, ignore_errors=ignore_errors
|
384
|
+
)
|
378
385
|
# Set two default values if no-one specified them
|
379
386
|
self.kwargs.setdefault("required", False)
|
380
387
|
self.kwargs.setdefault("show_default", True)
|
@@ -1,5 +1,6 @@
|
|
1
1
|
import sys
|
2
2
|
import json
|
3
|
+
import time
|
3
4
|
import tempfile
|
4
5
|
from typing import ClassVar, Optional
|
5
6
|
|
@@ -97,6 +98,7 @@ class ArgoWorkflowsTriggeredRun(TriggeredRun):
|
|
97
98
|
)
|
98
99
|
|
99
100
|
command_obj = self.deployer.spm.get(pid)
|
101
|
+
command_obj.sync_wait()
|
100
102
|
return command_obj.process.returncode == 0
|
101
103
|
|
102
104
|
def unsuspend(self, **kwargs) -> bool:
|
@@ -131,6 +133,7 @@ class ArgoWorkflowsTriggeredRun(TriggeredRun):
|
|
131
133
|
)
|
132
134
|
|
133
135
|
command_obj = self.deployer.spm.get(pid)
|
136
|
+
command_obj.sync_wait()
|
134
137
|
return command_obj.process.returncode == 0
|
135
138
|
|
136
139
|
def terminate(self, **kwargs) -> bool:
|
@@ -165,8 +168,50 @@ class ArgoWorkflowsTriggeredRun(TriggeredRun):
|
|
165
168
|
)
|
166
169
|
|
167
170
|
command_obj = self.deployer.spm.get(pid)
|
171
|
+
command_obj.sync_wait()
|
168
172
|
return command_obj.process.returncode == 0
|
169
173
|
|
174
|
+
def wait_for_completion(self, timeout: Optional[int] = None):
|
175
|
+
"""
|
176
|
+
Wait for the workflow to complete or timeout.
|
177
|
+
|
178
|
+
Parameters
|
179
|
+
----------
|
180
|
+
timeout : int, optional, default None
|
181
|
+
Maximum time in seconds to wait for workflow completion.
|
182
|
+
If None, waits indefinitely.
|
183
|
+
|
184
|
+
Raises
|
185
|
+
------
|
186
|
+
TimeoutError
|
187
|
+
If the workflow does not complete within the specified timeout period.
|
188
|
+
"""
|
189
|
+
start_time = time.time()
|
190
|
+
check_interval = 5
|
191
|
+
while self.is_running:
|
192
|
+
if timeout is not None and (time.time() - start_time) > timeout:
|
193
|
+
raise TimeoutError(
|
194
|
+
"Workflow did not complete within specified timeout."
|
195
|
+
)
|
196
|
+
time.sleep(check_interval)
|
197
|
+
|
198
|
+
@property
|
199
|
+
def is_running(self):
|
200
|
+
"""
|
201
|
+
Check if the workflow is currently running.
|
202
|
+
|
203
|
+
Returns
|
204
|
+
-------
|
205
|
+
bool
|
206
|
+
True if the workflow status is either 'Pending' or 'Running',
|
207
|
+
False otherwise.
|
208
|
+
"""
|
209
|
+
workflow_status = self.status
|
210
|
+
# full list of all states present here:
|
211
|
+
# https://github.com/argoproj/argo-workflows/blob/main/pkg/apis/workflow/v1alpha1/workflow_types.go#L54
|
212
|
+
# we only consider non-terminal states to determine if the workflow has not finished
|
213
|
+
return workflow_status is not None and workflow_status in ["Pending", "Running"]
|
214
|
+
|
170
215
|
@property
|
171
216
|
def status(self) -> Optional[str]:
|
172
217
|
"""
|
@@ -319,6 +364,7 @@ class ArgoWorkflowsDeployedFlow(DeployedFlow):
|
|
319
364
|
)
|
320
365
|
|
321
366
|
command_obj = self.deployer.spm.get(pid)
|
367
|
+
command_obj.sync_wait()
|
322
368
|
return command_obj.process.returncode == 0
|
323
369
|
|
324
370
|
def trigger(self, **kwargs) -> ArgoWorkflowsTriggeredRun:
|
@@ -361,7 +407,7 @@ class ArgoWorkflowsDeployedFlow(DeployedFlow):
|
|
361
407
|
content = handle_timeout(
|
362
408
|
attribute_file_fd, command_obj, self.deployer.file_read_timeout
|
363
409
|
)
|
364
|
-
|
410
|
+
command_obj.sync_wait()
|
365
411
|
if command_obj.process.returncode == 0:
|
366
412
|
return ArgoWorkflowsTriggeredRun(
|
367
413
|
deployer=self.deployer, content=content
|
@@ -50,24 +50,27 @@ class AwsSecretsManagerSecretsProvider(SecretsProvider):
|
|
50
50
|
The secret payload from AWS is EITHER a string OR a binary blob.
|
51
51
|
|
52
52
|
If the secret contains a string payload ("SecretString"):
|
53
|
-
- if the `
|
53
|
+
- if the `json` option is True (default):
|
54
54
|
{SecretString} will be parsed as a JSON. If successfully parsed, AND the JSON contains a
|
55
55
|
top-level object, each entry K/V in the object will also be converted to an entry in the result. V will
|
56
56
|
always be casted to a string (if not already a string).
|
57
|
-
- If `
|
58
|
-
{SecretString} will be returned as a single entry in the result,
|
57
|
+
- If `json` option is False:
|
58
|
+
{SecretString} will be returned as a single entry in the result, where the key is either:
|
59
|
+
- the `secret_id`, OR
|
60
|
+
- the value set by `options={"env_var_name": custom_env_var_name}`.
|
59
61
|
|
60
|
-
Otherwise, the secret contains a binary blob payload ("SecretBinary")
|
61
|
-
- The result
|
62
|
+
Otherwise, if the secret contains a binary blob payload ("SecretBinary"):
|
63
|
+
- The result dict contains '{SecretName}': '{SecretBinary}', where {SecretBinary} is a base64-encoded string.
|
62
64
|
|
63
|
-
All keys in the result are sanitized to be more valid environment variable names. This is done on a best
|
65
|
+
All keys in the result are sanitized to be more valid environment variable names. This is done on a best-effort
|
64
66
|
basis. Further validation is expected to be done by the invoking @secrets decorator itself.
|
65
67
|
|
66
|
-
:param secret_id: ARN or friendly name of the secret
|
67
|
-
:param options:
|
68
|
-
:param role: AWS IAM Role ARN to assume before reading the secret
|
69
|
-
:return:
|
68
|
+
:param secret_id: ARN or friendly name of the secret.
|
69
|
+
:param options: Dictionary of additional options. E.g., `options={"env_var_name": custom_env_var_name}`.
|
70
|
+
:param role: AWS IAM Role ARN to assume before reading the secret.
|
71
|
+
:return: Dictionary of environment variables. All keys and values are strings.
|
70
72
|
"""
|
73
|
+
|
71
74
|
import botocore
|
72
75
|
from metaflow.plugins.aws.aws_client import get_aws_client
|
73
76
|
|
@@ -46,6 +46,7 @@ class StepFunctionsTriggeredRun(TriggeredRun):
|
|
46
46
|
)
|
47
47
|
|
48
48
|
command_obj = self.deployer.spm.get(pid)
|
49
|
+
command_obj.sync_wait()
|
49
50
|
return command_obj.process.returncode == 0
|
50
51
|
|
51
52
|
|
@@ -174,6 +175,7 @@ class StepFunctionsDeployedFlow(DeployedFlow):
|
|
174
175
|
)
|
175
176
|
|
176
177
|
command_obj = self.deployer.spm.get(pid)
|
178
|
+
command_obj.sync_wait()
|
177
179
|
return command_obj.process.returncode == 0
|
178
180
|
|
179
181
|
def trigger(self, **kwargs) -> StepFunctionsTriggeredRun:
|
@@ -217,6 +219,7 @@ class StepFunctionsDeployedFlow(DeployedFlow):
|
|
217
219
|
attribute_file_fd, command_obj, self.deployer.file_read_timeout
|
218
220
|
)
|
219
221
|
|
222
|
+
command_obj.sync_wait()
|
220
223
|
if command_obj.process.returncode == 0:
|
221
224
|
return StepFunctionsTriggeredRun(
|
222
225
|
deployer=self.deployer, content=content
|
@@ -1,13 +1,16 @@
|
|
1
|
+
import json
|
2
|
+
import os
|
3
|
+
import re
|
4
|
+
import tempfile
|
5
|
+
|
1
6
|
from metaflow.decorators import StepDecorator
|
2
7
|
from metaflow.metaflow_current import current
|
8
|
+
from metaflow.user_configs.config_options import ConfigInput
|
9
|
+
from metaflow.user_configs.config_parameters import dump_config_values
|
3
10
|
from metaflow.util import to_unicode
|
11
|
+
|
4
12
|
from .component_serializer import CardComponentCollector, get_card_class
|
5
13
|
from .card_creator import CardCreator
|
6
|
-
|
7
|
-
|
8
|
-
# from metaflow import get_metadata
|
9
|
-
import re
|
10
|
-
|
11
14
|
from .exception import CARD_ID_PATTERN, TYPE_CHECK_REGEX
|
12
15
|
|
13
16
|
ASYNC_TIMEOUT = 30
|
@@ -111,6 +114,14 @@ class CardDecorator(StepDecorator):
|
|
111
114
|
self._logger = logger
|
112
115
|
self.card_options = None
|
113
116
|
|
117
|
+
# We check for configuration options. We do this here before they are
|
118
|
+
# converted to properties.
|
119
|
+
self._config_values = [
|
120
|
+
(config.name, ConfigInput.make_key_name(config.name))
|
121
|
+
for _, config in flow._get_parameters()
|
122
|
+
if config.IS_CONFIG_PARAMETER
|
123
|
+
]
|
124
|
+
|
114
125
|
self.card_options = self.attributes["options"]
|
115
126
|
|
116
127
|
evt_name = "step-init"
|
@@ -146,6 +157,18 @@ class CardDecorator(StepDecorator):
|
|
146
157
|
self._task_datastore = task_datastore
|
147
158
|
self._metadata = metadata
|
148
159
|
|
160
|
+
# If we have configs, we need to dump them to a file so we can re-use them
|
161
|
+
# when calling the card creation subprocess.
|
162
|
+
if self._config_values:
|
163
|
+
with tempfile.NamedTemporaryFile(
|
164
|
+
mode="w", encoding="utf-8", delete=False
|
165
|
+
) as config_file:
|
166
|
+
config_value = dump_config_values(flow)
|
167
|
+
json.dump(config_value, config_file)
|
168
|
+
self._config_file_name = config_file.name
|
169
|
+
else:
|
170
|
+
self._config_file_name = None
|
171
|
+
|
149
172
|
card_type = self.attributes["type"]
|
150
173
|
card_class = get_card_class(card_type)
|
151
174
|
|
@@ -179,7 +202,7 @@ class CardDecorator(StepDecorator):
|
|
179
202
|
# we need to ensure that `current.card` has `CardComponentCollector` instantiated only once.
|
180
203
|
if not self._is_event_registered("pre-step"):
|
181
204
|
self._register_event("pre-step")
|
182
|
-
self._set_card_creator(CardCreator(self._create_top_level_args()))
|
205
|
+
self._set_card_creator(CardCreator(self._create_top_level_args(flow)))
|
183
206
|
|
184
207
|
current._update_env(
|
185
208
|
{"card": CardComponentCollector(self._logger, self.card_creator)}
|
@@ -223,6 +246,13 @@ class CardDecorator(StepDecorator):
|
|
223
246
|
self.card_creator.create(mode="render", final=True, **create_options)
|
224
247
|
self.card_creator.create(mode="refresh", final=True, **create_options)
|
225
248
|
|
249
|
+
# Unlink the config file if it exists
|
250
|
+
if self._config_file_name:
|
251
|
+
try:
|
252
|
+
os.unlink(self._config_file_name)
|
253
|
+
except Exception as e:
|
254
|
+
pass
|
255
|
+
|
226
256
|
@staticmethod
|
227
257
|
def _options(mapping):
|
228
258
|
for k, v in mapping.items():
|
@@ -232,9 +262,13 @@ class CardDecorator(StepDecorator):
|
|
232
262
|
for value in v:
|
233
263
|
yield "--%s" % k
|
234
264
|
if not isinstance(value, bool):
|
235
|
-
|
265
|
+
if isinstance(value, tuple):
|
266
|
+
for val in value:
|
267
|
+
yield to_unicode(val)
|
268
|
+
else:
|
269
|
+
yield to_unicode(value)
|
236
270
|
|
237
|
-
def _create_top_level_args(self):
|
271
|
+
def _create_top_level_args(self, flow):
|
238
272
|
top_level_options = {
|
239
273
|
"quiet": True,
|
240
274
|
"metadata": self._metadata.TYPE,
|
@@ -247,4 +281,8 @@ class CardDecorator(StepDecorator):
|
|
247
281
|
# We don't provide --with as all execution is taking place in
|
248
282
|
# the context of the main process
|
249
283
|
}
|
284
|
+
if self._config_values:
|
285
|
+
top_level_options["config-value"] = self._config_values
|
286
|
+
top_level_options["local-config-file"] = self._config_file_name
|
287
|
+
|
250
288
|
return list(self._options(top_level_options))
|