opentf-toolkit-nightly 0.62.0.dev1291__py3-none-any.whl → 0.62.0.dev1298__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- opentf/commons/__init__.py +26 -9
- opentf/toolkit/__init__.py +6 -1
- opentf/toolkit/channels.py +157 -99
- {opentf_toolkit_nightly-0.62.0.dev1291.dist-info → opentf_toolkit_nightly-0.62.0.dev1298.dist-info}/METADATA +1 -1
- {opentf_toolkit_nightly-0.62.0.dev1291.dist-info → opentf_toolkit_nightly-0.62.0.dev1298.dist-info}/RECORD +8 -8
- {opentf_toolkit_nightly-0.62.0.dev1291.dist-info → opentf_toolkit_nightly-0.62.0.dev1298.dist-info}/LICENSE +0 -0
- {opentf_toolkit_nightly-0.62.0.dev1291.dist-info → opentf_toolkit_nightly-0.62.0.dev1298.dist-info}/WHEEL +0 -0
- {opentf_toolkit_nightly-0.62.0.dev1291.dist-info → opentf_toolkit_nightly-0.62.0.dev1298.dist-info}/top_level.txt +0 -0
opentf/commons/__init__.py
CHANGED
|
@@ -181,7 +181,10 @@ def _get_contextparameter_spec(app: Flask, name: str) -> Optional[Dict[str, Any]
|
|
|
181
181
|
Initialize cache if needed, ignoring context parameters specs from
|
|
182
182
|
other services.
|
|
183
183
|
|
|
184
|
-
Adds the
|
|
184
|
+
Adds the following specs if not already present:
|
|
185
|
+
|
|
186
|
+
- `watchdog_polling_delay_seconds`
|
|
187
|
+
- `availability_check_delay_seconds`
|
|
185
188
|
"""
|
|
186
189
|
if PARAMETERS_KEY not in app.config:
|
|
187
190
|
app.config[PARAMETERS_KEY] = []
|
|
@@ -191,14 +194,28 @@ def _get_contextparameter_spec(app: Flask, name: str) -> Optional[Dict[str, Any]
|
|
|
191
194
|
app.config[PARAMETERS_KEY] += manifest.get('spec', {}).get(
|
|
192
195
|
'contextParameters', []
|
|
193
196
|
)
|
|
194
|
-
app.config[PARAMETERS_KEY]
|
|
195
|
-
|
|
196
|
-
|
|
197
|
-
|
|
198
|
-
|
|
199
|
-
|
|
200
|
-
|
|
201
|
-
|
|
197
|
+
known = {spec['name'] for spec in app.config[PARAMETERS_KEY]}
|
|
198
|
+
if 'watchdog_polling_delay_seconds' not in known:
|
|
199
|
+
app.config[PARAMETERS_KEY].append(
|
|
200
|
+
{
|
|
201
|
+
'name': 'watchdog_polling_delay_seconds',
|
|
202
|
+
'descriptiveName': 'files watchdog polling delay in seconds',
|
|
203
|
+
'default': 30,
|
|
204
|
+
'type': 'int',
|
|
205
|
+
}
|
|
206
|
+
)
|
|
207
|
+
if 'availability_check_delay_seconds' not in known:
|
|
208
|
+
app.config[PARAMETERS_KEY].append(
|
|
209
|
+
{
|
|
210
|
+
'name': 'availability_check_delay_seconds',
|
|
211
|
+
'deprecatedNames': ['availability_check_delay'],
|
|
212
|
+
'descriptiveName': 'availability check frequency in seconds',
|
|
213
|
+
'type': 'int',
|
|
214
|
+
'default': 10,
|
|
215
|
+
'minValue': 10,
|
|
216
|
+
}
|
|
217
|
+
)
|
|
218
|
+
|
|
202
219
|
app.logger.info('Configuration:')
|
|
203
220
|
parameters = app.config[PARAMETERS_KEY]
|
|
204
221
|
try:
|
opentf/toolkit/__init__.py
CHANGED
|
@@ -58,6 +58,7 @@ DISPATCHQUEUE_KEY = '__dispatch queue__'
|
|
|
58
58
|
|
|
59
59
|
WATCHDOG_POLLING_DELAY_SECONDS = 30
|
|
60
60
|
WATCHDOG_POLLING_DELAY_KEY = 'watchdog_polling_delay_seconds'
|
|
61
|
+
AVAILABILITY_CHECK_DELAY_SECONDS = 'availability_check_delay_seconds'
|
|
61
62
|
|
|
62
63
|
Handler = Callable[[Dict[str, Any]], Any]
|
|
63
64
|
|
|
@@ -398,6 +399,10 @@ def watch_file(plugin: Flask, path: str, handler, *args, **kwargs) -> None:
|
|
|
398
399
|
a file path (a string). It may take additional parameters. It will
|
|
399
400
|
be called whenever the file changes.
|
|
400
401
|
|
|
402
|
+
The watchdog polls every 30 seconds by default. This can be
|
|
403
|
+
adjusted by setting the `watchdog_polling_delay_seconds` context
|
|
404
|
+
parameter (but it cannot be more frequent).
|
|
405
|
+
|
|
401
406
|
# Required parameters
|
|
402
407
|
|
|
403
408
|
- plugin: a Flask application
|
|
@@ -460,7 +465,7 @@ def watch_and_notify(
|
|
|
460
465
|
- items: an iterable
|
|
461
466
|
- notify: a function of no arguments
|
|
462
467
|
"""
|
|
463
|
-
polling_delay = get_context_parameter(plugin,
|
|
468
|
+
polling_delay = get_context_parameter(plugin, AVAILABILITY_CHECK_DELAY_SECONDS)
|
|
464
469
|
|
|
465
470
|
plugin.logger.debug('Starting watch notifier thread.')
|
|
466
471
|
threading.Thread(
|
opentf/toolkit/channels.py
CHANGED
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
# Copyright (c) 2021
|
|
1
|
+
# Copyright (c) 2021 Henix, Henix.fr
|
|
2
2
|
#
|
|
3
3
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
4
4
|
# you may not use this file except in compliance with the License.
|
|
@@ -43,7 +43,7 @@ WARNING_COMMAND = re.compile(r'^::warning(\s+(.*)+)?::(.*)$')
|
|
|
43
43
|
ERROR_COMMAND = re.compile(r'^::error(\s+(.*)+)?::(.*)$')
|
|
44
44
|
STOPCOMMANDS_COMMAND = re.compile(r'^::stop-commands::(\w+)$')
|
|
45
45
|
ADDMASK_COMMAND = re.compile(r'^::add-mask::(.*)$')
|
|
46
|
-
PUT_FILE_COMMAND = re.compile(r'^::put
|
|
46
|
+
PUT_FILE_COMMAND = re.compile(r'^::put\s+file=(.*?)\s*::(.*?)\s*$')
|
|
47
47
|
|
|
48
48
|
|
|
49
49
|
## step sequence IDs
|
|
@@ -136,7 +136,8 @@ OPENTF_VARIABLES_TYPE = 'application/vnd.opentestfactory.opentf-variables'
|
|
|
136
136
|
OPENTF_VARIABLES_REGEX = re.compile(r'^(export|set)\s(\"?.+)$')
|
|
137
137
|
OPENTF_VARIABLES_NAME_REGEX = re.compile(r'^[a-zA-Z0-9_]+$')
|
|
138
138
|
|
|
139
|
-
|
|
139
|
+
|
|
140
|
+
## Variables helpers
|
|
140
141
|
|
|
141
142
|
|
|
142
143
|
def make_variable_linux(name: str, variable: Union[str, Dict[str, Any]]) -> str:
|
|
@@ -187,27 +188,92 @@ def _add_default_variables(
|
|
|
187
188
|
script.append(VARIABLE_MAKER[runner_os]('CI', 'true'))
|
|
188
189
|
|
|
189
190
|
|
|
191
|
+
def _get_opentf_variables_path(metadata: Dict[str, Any]) -> str:
|
|
192
|
+
return VARIABLES_TEMPLATE[metadata['channel_os']].format(
|
|
193
|
+
job_id=metadata['job_id'], root=metadata['channel_temp']
|
|
194
|
+
)
|
|
195
|
+
|
|
196
|
+
|
|
197
|
+
def _get_opentf_variables(path: str) -> Dict[str, str]:
|
|
198
|
+
variables = {}
|
|
199
|
+
with open(path, 'r') as f:
|
|
200
|
+
for line in f.readlines():
|
|
201
|
+
if '=' not in line:
|
|
202
|
+
continue
|
|
203
|
+
line = line.strip()
|
|
204
|
+
if set_export := OPENTF_VARIABLES_REGEX.match(line):
|
|
205
|
+
line = set_export.group(2)
|
|
206
|
+
if line.startswith('"'):
|
|
207
|
+
line = line[1:-1]
|
|
208
|
+
key, _, value = line.partition('=')
|
|
209
|
+
if OPENTF_VARIABLES_NAME_REGEX.match(key):
|
|
210
|
+
variables[key] = value
|
|
211
|
+
try:
|
|
212
|
+
os.remove(path)
|
|
213
|
+
except FileNotFoundError:
|
|
214
|
+
pass
|
|
215
|
+
return variables
|
|
216
|
+
|
|
217
|
+
|
|
218
|
+
def process_opentf_variables(result: Dict[str, Any]) -> None:
|
|
219
|
+
"""Process OPENTF_VARIABLES attachment if available.
|
|
220
|
+
|
|
221
|
+
If OPENTF_VARIABLES is in the attachments, move its content to
|
|
222
|
+
the variables field of the result.
|
|
223
|
+
"""
|
|
224
|
+
if attachments := result['metadata'].get('attachments'):
|
|
225
|
+
variables = {}
|
|
226
|
+
for path, data in attachments.copy().items():
|
|
227
|
+
if data.get('type') == OPENTF_VARIABLES_TYPE:
|
|
228
|
+
variables = _get_opentf_variables(path)
|
|
229
|
+
del result['metadata']['attachments'][path]
|
|
230
|
+
result['attachments'].remove(path)
|
|
231
|
+
if not result['metadata']['attachments']:
|
|
232
|
+
del result['metadata']['attachments']
|
|
233
|
+
if not result['attachments']:
|
|
234
|
+
del result['attachments']
|
|
235
|
+
result['variables'] = variables
|
|
236
|
+
|
|
237
|
+
|
|
238
|
+
# Workflow commands helpers
|
|
239
|
+
|
|
240
|
+
|
|
190
241
|
def _make_attachment_url(
|
|
191
242
|
metadata: Dict[str, Any],
|
|
192
243
|
remote: str,
|
|
193
244
|
separator: str,
|
|
194
|
-
|
|
245
|
+
is_artifact: bool = False,
|
|
195
246
|
name: Optional[str] = None,
|
|
196
247
|
) -> Tuple[str, str]:
|
|
248
|
+
"""Prepare attachment URL.
|
|
249
|
+
|
|
250
|
+
Attachment URLs are of the form:
|
|
251
|
+
|
|
252
|
+
```
|
|
253
|
+
/tmp/{workflow_id}-{uuid}_WR_{name} # artifacts
|
|
254
|
+
/tmp/{job_id}-{uuid}_{step_sequence_id}_{name} # attachments
|
|
255
|
+
```
|
|
256
|
+
"""
|
|
197
257
|
uuid = make_uuid()
|
|
198
|
-
prefix = metadata['workflow_id'] if
|
|
199
|
-
suffix = 'WR' if
|
|
258
|
+
prefix = metadata['workflow_id'] if is_artifact else metadata['job_id']
|
|
259
|
+
suffix = 'WR' if is_artifact else metadata['step_sequence_id']
|
|
200
260
|
url = f'/tmp/{prefix}-{uuid}_{suffix}_{name or remote.split(separator)[-1]}'
|
|
201
261
|
return url, uuid
|
|
202
262
|
|
|
203
263
|
|
|
204
|
-
|
|
264
|
+
class CommandException(Exception):
|
|
265
|
+
pass
|
|
266
|
+
|
|
267
|
+
|
|
268
|
+
def _get_cmd_params(args: str) -> Dict[str, str]:
|
|
269
|
+
"""Extract workflow command parameters."""
|
|
205
270
|
details = {}
|
|
206
271
|
if args:
|
|
207
272
|
for parameter in args.strip().split(','):
|
|
208
273
|
if '=' not in parameter:
|
|
209
|
-
|
|
210
|
-
|
|
274
|
+
raise CommandException(
|
|
275
|
+
f'Invalid workflow command parameter: {parameter}.'
|
|
276
|
+
)
|
|
211
277
|
key, _, value = parameter.strip().partition('=')
|
|
212
278
|
details[key] = value
|
|
213
279
|
return details
|
|
@@ -239,55 +305,22 @@ def _as_log(line: str, jobstate: JobState):
|
|
|
239
305
|
return mask(line, jobstate).rstrip()
|
|
240
306
|
|
|
241
307
|
|
|
242
|
-
def
|
|
243
|
-
|
|
244
|
-
with open(path, 'r') as f:
|
|
245
|
-
for line in f.readlines():
|
|
246
|
-
if '=' not in line:
|
|
247
|
-
continue
|
|
248
|
-
line = line.strip()
|
|
249
|
-
if set_export := OPENTF_VARIABLES_REGEX.match(line):
|
|
250
|
-
line = set_export.group(2)
|
|
251
|
-
if line.startswith('"'):
|
|
252
|
-
line = line[1:-1]
|
|
253
|
-
key, _, value = line.partition('=')
|
|
254
|
-
if OPENTF_VARIABLES_NAME_REGEX.match(key):
|
|
255
|
-
variables[key] = value
|
|
256
|
-
try:
|
|
257
|
-
os.remove(path)
|
|
258
|
-
except FileNotFoundError:
|
|
259
|
-
pass
|
|
260
|
-
return variables
|
|
261
|
-
|
|
308
|
+
def process_upload(result: Dict[str, Any]) -> Dict[str, Any]:
|
|
309
|
+
"""Process ExecutionResult event containing .metadata.upload flag.
|
|
262
310
|
|
|
263
|
-
|
|
264
|
-
|
|
311
|
+
Remove the artifact-related items from `result` (URLs starting with
|
|
312
|
+
`/tmp/{workflow_id}_WR_`).
|
|
265
313
|
|
|
266
|
-
|
|
267
|
-
the variables field of the result.
|
|
268
|
-
"""
|
|
269
|
-
if attachments := result['metadata'].get('attachments'):
|
|
270
|
-
variables = {}
|
|
271
|
-
for path, data in attachments.copy().items():
|
|
272
|
-
if data.get('type') == OPENTF_VARIABLES_TYPE:
|
|
273
|
-
variables = _get_opentf_variables(path)
|
|
274
|
-
del result['metadata']['attachments'][path]
|
|
275
|
-
result['attachments'].remove(path)
|
|
276
|
-
if not result['metadata']['attachments']:
|
|
277
|
-
del result['metadata']['attachments']
|
|
278
|
-
if not result['attachments']:
|
|
279
|
-
del result['attachments']
|
|
280
|
-
result['variables'] = variables
|
|
314
|
+
# Required parameters
|
|
281
315
|
|
|
316
|
+
- result: a dictionary (an ExecutionResult command)
|
|
282
317
|
|
|
283
|
-
|
|
284
|
-
"""Process ExecutionResult event containing .metadata.upload flag.
|
|
318
|
+
# Returned value
|
|
285
319
|
|
|
286
|
-
|
|
287
|
-
publish WorkflowResult.
|
|
320
|
+
A WorkflowResult event.
|
|
288
321
|
"""
|
|
289
322
|
|
|
290
|
-
def _filter_items(key: str):
|
|
323
|
+
def _filter_items(key: str) -> Tuple[List[str], Dict[str, Any]]:
|
|
291
324
|
metadata = {}
|
|
292
325
|
items = [item for item in result['attachments'] if item[pos:].startswith(key)]
|
|
293
326
|
if items:
|
|
@@ -318,6 +351,53 @@ def process_upload(result: Dict[str, Any]):
|
|
|
318
351
|
return make_event(WORKFLOWRESULT, metadata=metadata, attachments=upload)
|
|
319
352
|
|
|
320
353
|
|
|
354
|
+
def _download_artifacts(
|
|
355
|
+
artifacts: List[str],
|
|
356
|
+
targeted_remote_path: str,
|
|
357
|
+
remote_path: str,
|
|
358
|
+
params: Dict[str, str],
|
|
359
|
+
_put: Callable[[str, str], None],
|
|
360
|
+
is_windows: bool,
|
|
361
|
+
) -> bool:
|
|
362
|
+
"""Download artifacts matching a pattern.
|
|
363
|
+
|
|
364
|
+
# Required parameters
|
|
365
|
+
|
|
366
|
+
- artifacts: a list of artifact names
|
|
367
|
+
- targeted_remote_path: a string
|
|
368
|
+
- remote_path: a string
|
|
369
|
+
- params: a dictionary
|
|
370
|
+
- _put: a function copying a local file to a remote environment
|
|
371
|
+
- is_windows: a boolean
|
|
372
|
+
|
|
373
|
+
# Returned value
|
|
374
|
+
|
|
375
|
+
A boolean indicating if artifacts were uploaded.
|
|
376
|
+
"""
|
|
377
|
+
filename, pattern = params.get('file'), params.get('pattern')
|
|
378
|
+
if filename and pattern:
|
|
379
|
+
raise CommandException('Cannot specify both "file" and "pattern".')
|
|
380
|
+
if not filename and not pattern:
|
|
381
|
+
pattern = '*'
|
|
382
|
+
found = False
|
|
383
|
+
for artifact in artifacts:
|
|
384
|
+
artifact_name = artifact.split('_')[-1]
|
|
385
|
+
if filename and filename == artifact_name:
|
|
386
|
+
found = True
|
|
387
|
+
if not remote_path:
|
|
388
|
+
targeted_remote_path = core.join_path(
|
|
389
|
+
targeted_remote_path, artifact_name, is_windows
|
|
390
|
+
)
|
|
391
|
+
_put(targeted_remote_path, artifact)
|
|
392
|
+
elif pattern and fnmatch.fnmatch(artifact_name, pattern):
|
|
393
|
+
found = True
|
|
394
|
+
pattern_path = core.join_path(
|
|
395
|
+
targeted_remote_path, artifact_name, is_windows
|
|
396
|
+
)
|
|
397
|
+
_put(pattern_path, artifact)
|
|
398
|
+
return found
|
|
399
|
+
|
|
400
|
+
|
|
321
401
|
def process_output(
|
|
322
402
|
event: Dict[str, Any],
|
|
323
403
|
resp: int,
|
|
@@ -366,95 +446,73 @@ def process_output(
|
|
|
366
446
|
May raise exceptions.
|
|
367
447
|
"""
|
|
368
448
|
|
|
369
|
-
def _get_targeted_path(remote_path: str):
|
|
449
|
+
def _get_targeted_path(remote_path: str) -> str:
|
|
370
450
|
working_directory = core.join_path(
|
|
371
451
|
metadata['job_id'], event.get('working-directory'), is_windows
|
|
372
452
|
)
|
|
373
453
|
return core.join_path(working_directory, remote_path, is_windows)
|
|
374
454
|
|
|
375
|
-
def _attach(remote: str, args: str,
|
|
455
|
+
def _attach(remote: str, args: str, is_artifact: bool = False) -> int:
|
|
376
456
|
if is_windows:
|
|
377
457
|
remote = ntpath.normpath(remote)
|
|
378
458
|
try:
|
|
379
|
-
|
|
380
|
-
return 2
|
|
459
|
+
params = _get_cmd_params(args)
|
|
381
460
|
attachment_url, uuid = _make_attachment_url(
|
|
382
|
-
metadata, remote, separator,
|
|
461
|
+
metadata, remote, separator, is_artifact, name=params.get('name')
|
|
383
462
|
)
|
|
384
463
|
params['uuid'] = uuid
|
|
385
464
|
_get(remote, attachment_url)
|
|
386
465
|
attachments_metadata[attachment_url] = params
|
|
387
466
|
attachments.append(attachment_url)
|
|
388
467
|
return resp
|
|
468
|
+
except CommandException as err:
|
|
469
|
+
logs.append(f'ERROR,{err.args[0]}')
|
|
389
470
|
except Exception as err:
|
|
390
471
|
logs.append(f'ERROR,Could not read {remote}: {err}.')
|
|
391
|
-
|
|
472
|
+
return 2
|
|
392
473
|
|
|
393
|
-
def _putfile(remote_path: str, data: str):
|
|
474
|
+
def _putfile(remote_path: str, data: str) -> int:
|
|
394
475
|
targeted_remote_path = _get_targeted_path(remote_path)
|
|
395
476
|
try:
|
|
396
477
|
file_ = f'/tmp/in_{metadata["workflow_id"]}_{data}'
|
|
397
478
|
if not os.path.exists(file_):
|
|
398
|
-
logs.append(f'ERROR,Invalid resources.files reference {data}.')
|
|
479
|
+
logs.append(f'ERROR,Invalid "resources.files" reference "{data}".')
|
|
399
480
|
return 2
|
|
400
481
|
_put(targeted_remote_path, file_)
|
|
401
482
|
return resp
|
|
402
483
|
except Exception as err:
|
|
403
484
|
logs.append(
|
|
404
|
-
f'ERROR,Could not send file {data} to remote path {remote_path}: {err}.'
|
|
485
|
+
f'ERROR,Could not send file "{data}" to remote path "{remote_path}": {err}.'
|
|
405
486
|
)
|
|
406
487
|
return 2
|
|
407
488
|
|
|
408
|
-
def _download(remote_path: str, args: str):
|
|
409
|
-
if not (artifacts := event['metadata'].get('artifacts')):
|
|
410
|
-
logs.append('ERROR,No artifacts available at workflow level.')
|
|
411
|
-
return 2
|
|
489
|
+
def _download(remote_path: str, args: str) -> int:
|
|
412
490
|
try:
|
|
491
|
+
artifacts = metadata.get('artifacts', [])
|
|
413
492
|
targeted_remote_path = _get_targeted_path(remote_path)
|
|
414
|
-
|
|
415
|
-
|
|
416
|
-
|
|
417
|
-
|
|
418
|
-
pattern = '*'
|
|
419
|
-
found = False
|
|
420
|
-
for artifact in artifacts:
|
|
421
|
-
artifact_name = artifact.split('_')[-1]
|
|
422
|
-
if filename and filename == artifact_name:
|
|
423
|
-
found = True
|
|
424
|
-
if not remote_path:
|
|
425
|
-
targeted_remote_path = core.join_path(
|
|
426
|
-
targeted_remote_path, artifact_name, is_windows
|
|
427
|
-
)
|
|
428
|
-
_put(targeted_remote_path, artifact)
|
|
429
|
-
continue
|
|
430
|
-
if pattern and fnmatch.fnmatch(artifact_name, pattern):
|
|
431
|
-
found = True
|
|
432
|
-
pattern_path = core.join_path(
|
|
433
|
-
targeted_remote_path, artifact_name, is_windows
|
|
434
|
-
)
|
|
435
|
-
_put(pattern_path, artifact)
|
|
436
|
-
if found:
|
|
493
|
+
params = _get_cmd_params(args)
|
|
494
|
+
if _download_artifacts(
|
|
495
|
+
artifacts, targeted_remote_path, remote_path, params, _put, is_windows
|
|
496
|
+
):
|
|
437
497
|
return resp
|
|
438
498
|
logs.append('ERROR,No artifact matching requested name/pattern found.')
|
|
439
|
-
|
|
499
|
+
except CommandException as err:
|
|
500
|
+
logs.append(f'ERROR,{err.args[0]}')
|
|
440
501
|
except Exception as err:
|
|
441
502
|
logs.append(
|
|
442
|
-
f'ERROR,Could not send artifacts to remote path {remote_path}: {err}.'
|
|
503
|
+
f'ERROR,Could not send artifacts to remote path "{remote_path}": {err}.'
|
|
443
504
|
)
|
|
444
|
-
|
|
505
|
+
return 2
|
|
445
506
|
|
|
446
507
|
metadata: Dict[str, Any] = event['metadata']
|
|
447
508
|
is_windows: bool = metadata['channel_os'] == 'windows'
|
|
448
|
-
opentfvariables = VARIABLES_TEMPLATE[metadata['channel_os']].format(
|
|
449
|
-
job_id=metadata['job_id'], root=metadata['channel_temp']
|
|
450
|
-
)
|
|
451
509
|
separator = '\\' if is_windows else '/'
|
|
452
510
|
outputs = {}
|
|
453
511
|
logs: List[str] = []
|
|
454
512
|
attachments: List[str] = []
|
|
455
513
|
attachments_metadata = {}
|
|
456
514
|
|
|
457
|
-
|
|
515
|
+
has_artifacts = False
|
|
458
516
|
for line in stdout:
|
|
459
517
|
# Parsing stdout for workflow commands
|
|
460
518
|
if jobstate.stop_command:
|
|
@@ -465,8 +523,8 @@ def process_output(
|
|
|
465
523
|
if wcmd := ATTACH_COMMAND.match(line):
|
|
466
524
|
resp = _attach(wcmd.group(2), wcmd.group(1))
|
|
467
525
|
elif wcmd := UPLOAD_COMMAND.match(line):
|
|
468
|
-
|
|
469
|
-
resp = _attach(wcmd.group(2), wcmd.group(1),
|
|
526
|
+
has_artifacts = True
|
|
527
|
+
resp = _attach(wcmd.group(2), wcmd.group(1), is_artifact=True)
|
|
470
528
|
elif wcmd := DOWNLOAD_COMMAND.match(line):
|
|
471
529
|
resp = _download(wcmd.group(2), wcmd.group(1))
|
|
472
530
|
elif wcmd := PUT_FILE_COMMAND.match(line):
|
|
@@ -487,10 +545,9 @@ def process_output(
|
|
|
487
545
|
del metadata['artifacts']
|
|
488
546
|
|
|
489
547
|
if metadata['step_sequence_id'] != CHANNEL_RELEASE:
|
|
490
|
-
_attach(
|
|
548
|
+
_attach(_get_opentf_variables_path(metadata), f'type={OPENTF_VARIABLES_TYPE}')
|
|
491
549
|
|
|
492
550
|
result = make_event(EXECUTIONRESULT, metadata=metadata, status=resp)
|
|
493
|
-
|
|
494
551
|
if outputs:
|
|
495
552
|
result['outputs'] = outputs
|
|
496
553
|
if logs:
|
|
@@ -498,8 +555,9 @@ def process_output(
|
|
|
498
555
|
if attachments:
|
|
499
556
|
result['attachments'] = attachments
|
|
500
557
|
result['metadata']['attachments'] = attachments_metadata
|
|
501
|
-
if
|
|
558
|
+
if has_artifacts:
|
|
502
559
|
result['metadata']['upload'] = resp
|
|
560
|
+
|
|
503
561
|
return result
|
|
504
562
|
|
|
505
563
|
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.2
|
|
2
2
|
Name: opentf-toolkit-nightly
|
|
3
|
-
Version: 0.62.0.
|
|
3
|
+
Version: 0.62.0.dev1298
|
|
4
4
|
Summary: OpenTestFactory Orchestrator Toolkit
|
|
5
5
|
Home-page: https://gitlab.com/henixdevelopment/open-source/opentestfactory/python-toolkit
|
|
6
6
|
Author: Martin Lafaix
|
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
opentf/commons/__init__.py,sha256=
|
|
1
|
+
opentf/commons/__init__.py,sha256=2hd1g14g9pnZl1uip6Sh90YSmLDWzkH1SGeZweRJ4z8,24180
|
|
2
2
|
opentf/commons/auth.py,sha256=gXRp_0Tf3bfd65F4QiQmh6C6vR9y3ugag_0DSvozJFk,15898
|
|
3
3
|
opentf/commons/config.py,sha256=RVSSdQhMle4oCo_z_AR2EQ4U6sUjSxw-qVBtjKuJVfo,10219
|
|
4
4
|
opentf/commons/exceptions.py,sha256=7dhUXO8iyAbqVwlUKxZhgRzGqVcb7LkG39hFlm-VxIA,2407
|
|
@@ -55,11 +55,11 @@ opentf/schemas/opentestfactory.org/v1beta1/Workflow.json,sha256=QZ8mM9PhzsI9gTmw
|
|
|
55
55
|
opentf/schemas/opentestfactory.org/v1beta2/ServiceConfig.json,sha256=rEvK2YWL5lG94_qYgR_GnLWNsaQhaQ-2kuZdWJr5NnY,3517
|
|
56
56
|
opentf/scripts/launch_java_service.sh,sha256=S0jAaCuv2sZy0Gf2NGBuPX-eD531rcM-b0fNyhmzSjw,2423
|
|
57
57
|
opentf/scripts/startup.py,sha256=vOGxl7xBcp1-_LsAKiOmeOqFl2iT81A2XRrXBLUrNi4,22785
|
|
58
|
-
opentf/toolkit/__init__.py,sha256=
|
|
59
|
-
opentf/toolkit/channels.py,sha256=
|
|
58
|
+
opentf/toolkit/__init__.py,sha256=ohrde5mcMY26p64E0Z2XunZAWYOiEkXKTg5E1J4TGGc,23571
|
|
59
|
+
opentf/toolkit/channels.py,sha256=6qKSsAgq_oJpuDRiKqVUz-EAjdfikcCG3SFAGmKZdhQ,25551
|
|
60
60
|
opentf/toolkit/core.py,sha256=fqnGgaYnuVcd4fyeNIwpc0QtyUo7jsKeVgdkBfY3iqo,9443
|
|
61
|
-
opentf_toolkit_nightly-0.62.0.
|
|
62
|
-
opentf_toolkit_nightly-0.62.0.
|
|
63
|
-
opentf_toolkit_nightly-0.62.0.
|
|
64
|
-
opentf_toolkit_nightly-0.62.0.
|
|
65
|
-
opentf_toolkit_nightly-0.62.0.
|
|
61
|
+
opentf_toolkit_nightly-0.62.0.dev1298.dist-info/LICENSE,sha256=xx0jnfkXJvxRnG63LTGOxlggYnIysveWIZ6H3PNdCrQ,11357
|
|
62
|
+
opentf_toolkit_nightly-0.62.0.dev1298.dist-info/METADATA,sha256=zPOTouTQoLWUCjDEAvzyd6zBD72RgfGlTL9UGDTITuw,2192
|
|
63
|
+
opentf_toolkit_nightly-0.62.0.dev1298.dist-info/WHEEL,sha256=jB7zZ3N9hIM9adW7qlTAyycLYW9npaWKLRzaoVcLKcM,91
|
|
64
|
+
opentf_toolkit_nightly-0.62.0.dev1298.dist-info/top_level.txt,sha256=_gPuE6GTT6UNXy1DjtmQSfCcZb_qYA2vWmjg7a30AGk,7
|
|
65
|
+
opentf_toolkit_nightly-0.62.0.dev1298.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|
|
File without changes
|