opentf-toolkit-nightly 0.62.0.dev1295__py3-none-any.whl → 0.62.0.dev1302__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- opentf/scripts/startup.py +23 -13
- opentf/toolkit/channels.py +157 -99
- {opentf_toolkit_nightly-0.62.0.dev1295.dist-info → opentf_toolkit_nightly-0.62.0.dev1302.dist-info}/METADATA +1 -1
- {opentf_toolkit_nightly-0.62.0.dev1295.dist-info → opentf_toolkit_nightly-0.62.0.dev1302.dist-info}/RECORD +7 -7
- {opentf_toolkit_nightly-0.62.0.dev1295.dist-info → opentf_toolkit_nightly-0.62.0.dev1302.dist-info}/WHEEL +1 -1
- {opentf_toolkit_nightly-0.62.0.dev1295.dist-info → opentf_toolkit_nightly-0.62.0.dev1302.dist-info}/LICENSE +0 -0
- {opentf_toolkit_nightly-0.62.0.dev1295.dist-info → opentf_toolkit_nightly-0.62.0.dev1302.dist-info}/top_level.txt +0 -0
opentf/scripts/startup.py
CHANGED
|
@@ -54,6 +54,7 @@ ENVIRONMENT_VARIABLES = {
|
|
|
54
54
|
'DEBUG_LEVEL': 'INFO',
|
|
55
55
|
'HTTP_PROXY': None,
|
|
56
56
|
'HTTPS_PROXY': None,
|
|
57
|
+
'JAVA_TRUSTSTORE': None,
|
|
57
58
|
'KEY_SIZE': 4096,
|
|
58
59
|
'NO_PROXY': None,
|
|
59
60
|
'OPENTF_TELEMETRY': None,
|
|
@@ -101,7 +102,7 @@ def _get_env_int(var: str) -> int:
|
|
|
101
102
|
except ValueError:
|
|
102
103
|
val = ENVIRONMENT_VARIABLES[var]
|
|
103
104
|
logging.warning(
|
|
104
|
-
|
|
105
|
+
'Environment variable "%s" not an integer, defaulting to %d.',
|
|
105
106
|
var,
|
|
106
107
|
val,
|
|
107
108
|
)
|
|
@@ -228,16 +229,16 @@ def parse_and_start(
|
|
|
228
229
|
return result
|
|
229
230
|
for path in paths:
|
|
230
231
|
for entry in os.walk(path):
|
|
231
|
-
logging.debug(
|
|
232
|
+
logging.debug('Reading path "%s".', entry[0])
|
|
232
233
|
if item not in entry[2]:
|
|
233
234
|
logging.debug('(No manifest found in path.)')
|
|
234
235
|
continue
|
|
235
|
-
logging.debug(
|
|
236
|
+
logging.debug('(Found a "%s" manifest, parsing.)', item)
|
|
236
237
|
with open(os.path.join(entry[0], item), 'r', encoding='utf-8') as manifests:
|
|
237
238
|
for manifest in yaml.safe_load_all(manifests):
|
|
238
239
|
if disabled and manifest['metadata']['name'].lower() in disabled:
|
|
239
240
|
logging.debug(
|
|
240
|
-
|
|
241
|
+
'(Plugin "%s" explicitly disabled, ignoring.)',
|
|
241
242
|
manifest['metadata']['name'],
|
|
242
243
|
)
|
|
243
244
|
continue
|
|
@@ -308,7 +309,7 @@ def maybe_start_otelcol():
|
|
|
308
309
|
logging.error('Failed to start OpenTelemetry Collector: %s.', str(err))
|
|
309
310
|
elif telemetry:
|
|
310
311
|
logging.warning(
|
|
311
|
-
'Unexpected OPENTF_TELEMETRY environment variable value %s (accepted values are %s).',
|
|
312
|
+
'Unexpected OPENTF_TELEMETRY environment variable value "%s" (accepted values are %s).',
|
|
312
313
|
telemetry,
|
|
313
314
|
', '.join(expected),
|
|
314
315
|
)
|
|
@@ -412,7 +413,7 @@ def maybe_generate_token() -> None:
|
|
|
412
413
|
if public_key := os.environ.get('PUBLIC_KEY'):
|
|
413
414
|
if len(public_key.split()) < 2:
|
|
414
415
|
logging.error(
|
|
415
|
-
|
|
416
|
+
'PUBLIC_KEY must be of the form: "type-name base64-encoded-ssh-public-key [optional comment]", got: %s.',
|
|
416
417
|
public_key,
|
|
417
418
|
)
|
|
418
419
|
sys.exit(1)
|
|
@@ -436,7 +437,7 @@ def maybe_populate_keystore() -> None:
|
|
|
436
437
|
if (ca_bundle := os.environ.get('CURL_CA_BUNDLE')) is None:
|
|
437
438
|
return
|
|
438
439
|
if not os.path.isfile(ca_bundle):
|
|
439
|
-
logging.error(
|
|
440
|
+
logging.error('CURL_CA_BUNDLE "%s" does not exist, aborting.', ca_bundle)
|
|
440
441
|
sys.exit(1)
|
|
441
442
|
|
|
442
443
|
with open(ca_bundle, 'r', encoding='utf-8') as bundle_file:
|
|
@@ -444,11 +445,20 @@ def maybe_populate_keystore() -> None:
|
|
|
444
445
|
if not ca_list[-1].rstrip():
|
|
445
446
|
ca_list.pop()
|
|
446
447
|
|
|
448
|
+
if truststore := os.environ.get('JAVA_TRUSTSTORE'):
|
|
449
|
+
logging.debug('Using truststore "%s".', truststore)
|
|
450
|
+
keystore = ('-keystore', truststore)
|
|
451
|
+
else:
|
|
452
|
+
logging.debug('Using default truststore.')
|
|
453
|
+
keystore = ('-cacerts',)
|
|
454
|
+
|
|
447
455
|
for ca_counter, ca in enumerate(ca_list):
|
|
448
|
-
add_keystore_certificate(ca_counter, f'{ca}{CA_END}')
|
|
456
|
+
add_keystore_certificate(ca_counter, f'{ca}{CA_END}', keystore)
|
|
449
457
|
|
|
450
458
|
|
|
451
|
-
def add_keystore_certificate(
|
|
459
|
+
def add_keystore_certificate(
|
|
460
|
+
ca_counter: int, ca: str, keystore: Tuple[str, ...]
|
|
461
|
+
) -> None:
|
|
452
462
|
"""Add certificate to keystore.
|
|
453
463
|
|
|
454
464
|
!!! warning
|
|
@@ -469,7 +479,7 @@ def add_keystore_certificate(ca_counter: int, ca: str) -> None:
|
|
|
469
479
|
ca_alias = f'opentf:{ca_counter}_{os.path.basename(ca_path)}'
|
|
470
480
|
try:
|
|
471
481
|
ca_file.write(ca)
|
|
472
|
-
logging.debug(
|
|
482
|
+
logging.debug('File "%s" written.', ca_path)
|
|
473
483
|
except IOError as err:
|
|
474
484
|
logging.error('An error occurred while writing the file: %s.', err)
|
|
475
485
|
sys.exit(1)
|
|
@@ -483,10 +493,10 @@ def add_keystore_certificate(ca_counter: int, ca: str) -> None:
|
|
|
483
493
|
ca_alias,
|
|
484
494
|
'-file',
|
|
485
495
|
ca_path,
|
|
486
|
-
'-cacerts',
|
|
487
496
|
'-storepass',
|
|
488
497
|
'changeit',
|
|
489
498
|
'-noprompt',
|
|
499
|
+
*keystore,
|
|
490
500
|
],
|
|
491
501
|
stdout=subprocess.PIPE,
|
|
492
502
|
stderr=subprocess.STDOUT,
|
|
@@ -514,14 +524,14 @@ def _ensure_abac_if_defined(name, value):
|
|
|
514
524
|
if not OPENTF_AUTHORIZATION_MODE:
|
|
515
525
|
logging.error(
|
|
516
526
|
'{0} is defined but OPENTF_AUTHORIZATION_MODE is undefined.'
|
|
517
|
-
|
|
527
|
+
' OPENTF_AUTHORIZATION_MODE must include "ABAC" to use {0}.'.format(
|
|
518
528
|
name
|
|
519
529
|
)
|
|
520
530
|
)
|
|
521
531
|
sys.exit(1)
|
|
522
532
|
if 'ABAC' not in OPENTF_AUTHORIZATION_MODE.split(','):
|
|
523
533
|
logging.error(
|
|
524
|
-
|
|
534
|
+
'OPENTF_AUTHORIZATION_MODE must include "ABAC" to use %s.', name
|
|
525
535
|
)
|
|
526
536
|
sys.exit(1)
|
|
527
537
|
if not os.path.isfile(value):
|
opentf/toolkit/channels.py
CHANGED
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
# Copyright (c) 2021
|
|
1
|
+
# Copyright (c) 2021 Henix, Henix.fr
|
|
2
2
|
#
|
|
3
3
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
4
4
|
# you may not use this file except in compliance with the License.
|
|
@@ -43,7 +43,7 @@ WARNING_COMMAND = re.compile(r'^::warning(\s+(.*)+)?::(.*)$')
|
|
|
43
43
|
ERROR_COMMAND = re.compile(r'^::error(\s+(.*)+)?::(.*)$')
|
|
44
44
|
STOPCOMMANDS_COMMAND = re.compile(r'^::stop-commands::(\w+)$')
|
|
45
45
|
ADDMASK_COMMAND = re.compile(r'^::add-mask::(.*)$')
|
|
46
|
-
PUT_FILE_COMMAND = re.compile(r'^::put
|
|
46
|
+
PUT_FILE_COMMAND = re.compile(r'^::put\s+file=(.*?)\s*::(.*?)\s*$')
|
|
47
47
|
|
|
48
48
|
|
|
49
49
|
## step sequence IDs
|
|
@@ -136,7 +136,8 @@ OPENTF_VARIABLES_TYPE = 'application/vnd.opentestfactory.opentf-variables'
|
|
|
136
136
|
OPENTF_VARIABLES_REGEX = re.compile(r'^(export|set)\s(\"?.+)$')
|
|
137
137
|
OPENTF_VARIABLES_NAME_REGEX = re.compile(r'^[a-zA-Z0-9_]+$')
|
|
138
138
|
|
|
139
|
-
|
|
139
|
+
|
|
140
|
+
## Variables helpers
|
|
140
141
|
|
|
141
142
|
|
|
142
143
|
def make_variable_linux(name: str, variable: Union[str, Dict[str, Any]]) -> str:
|
|
@@ -187,27 +188,92 @@ def _add_default_variables(
|
|
|
187
188
|
script.append(VARIABLE_MAKER[runner_os]('CI', 'true'))
|
|
188
189
|
|
|
189
190
|
|
|
191
|
+
def _get_opentf_variables_path(metadata: Dict[str, Any]) -> str:
|
|
192
|
+
return VARIABLES_TEMPLATE[metadata['channel_os']].format(
|
|
193
|
+
job_id=metadata['job_id'], root=metadata['channel_temp']
|
|
194
|
+
)
|
|
195
|
+
|
|
196
|
+
|
|
197
|
+
def _get_opentf_variables(path: str) -> Dict[str, str]:
|
|
198
|
+
variables = {}
|
|
199
|
+
with open(path, 'r') as f:
|
|
200
|
+
for line in f.readlines():
|
|
201
|
+
if '=' not in line:
|
|
202
|
+
continue
|
|
203
|
+
line = line.strip()
|
|
204
|
+
if set_export := OPENTF_VARIABLES_REGEX.match(line):
|
|
205
|
+
line = set_export.group(2)
|
|
206
|
+
if line.startswith('"'):
|
|
207
|
+
line = line[1:-1]
|
|
208
|
+
key, _, value = line.partition('=')
|
|
209
|
+
if OPENTF_VARIABLES_NAME_REGEX.match(key):
|
|
210
|
+
variables[key] = value
|
|
211
|
+
try:
|
|
212
|
+
os.remove(path)
|
|
213
|
+
except FileNotFoundError:
|
|
214
|
+
pass
|
|
215
|
+
return variables
|
|
216
|
+
|
|
217
|
+
|
|
218
|
+
def process_opentf_variables(result: Dict[str, Any]) -> None:
|
|
219
|
+
"""Process OPENTF_VARIABLES attachment if available.
|
|
220
|
+
|
|
221
|
+
If OPENTF_VARIABLES is in the attachments, move its content to
|
|
222
|
+
the variables field of the result.
|
|
223
|
+
"""
|
|
224
|
+
if attachments := result['metadata'].get('attachments'):
|
|
225
|
+
variables = {}
|
|
226
|
+
for path, data in attachments.copy().items():
|
|
227
|
+
if data.get('type') == OPENTF_VARIABLES_TYPE:
|
|
228
|
+
variables = _get_opentf_variables(path)
|
|
229
|
+
del result['metadata']['attachments'][path]
|
|
230
|
+
result['attachments'].remove(path)
|
|
231
|
+
if not result['metadata']['attachments']:
|
|
232
|
+
del result['metadata']['attachments']
|
|
233
|
+
if not result['attachments']:
|
|
234
|
+
del result['attachments']
|
|
235
|
+
result['variables'] = variables
|
|
236
|
+
|
|
237
|
+
|
|
238
|
+
# Workflow commands helpers
|
|
239
|
+
|
|
240
|
+
|
|
190
241
|
def _make_attachment_url(
|
|
191
242
|
metadata: Dict[str, Any],
|
|
192
243
|
remote: str,
|
|
193
244
|
separator: str,
|
|
194
|
-
|
|
245
|
+
is_artifact: bool = False,
|
|
195
246
|
name: Optional[str] = None,
|
|
196
247
|
) -> Tuple[str, str]:
|
|
248
|
+
"""Prepare attachment URL.
|
|
249
|
+
|
|
250
|
+
Attachment URLs are of the form:
|
|
251
|
+
|
|
252
|
+
```
|
|
253
|
+
/tmp/{workflow_id}-{uuid}_WR_{name} # artifacts
|
|
254
|
+
/tmp/{job_id}-{uuid}_{step_sequence_id}_{name} # attachments
|
|
255
|
+
```
|
|
256
|
+
"""
|
|
197
257
|
uuid = make_uuid()
|
|
198
|
-
prefix = metadata['workflow_id'] if
|
|
199
|
-
suffix = 'WR' if
|
|
258
|
+
prefix = metadata['workflow_id'] if is_artifact else metadata['job_id']
|
|
259
|
+
suffix = 'WR' if is_artifact else metadata['step_sequence_id']
|
|
200
260
|
url = f'/tmp/{prefix}-{uuid}_{suffix}_{name or remote.split(separator)[-1]}'
|
|
201
261
|
return url, uuid
|
|
202
262
|
|
|
203
263
|
|
|
204
|
-
|
|
264
|
+
class CommandException(Exception):
|
|
265
|
+
pass
|
|
266
|
+
|
|
267
|
+
|
|
268
|
+
def _get_cmd_params(args: str) -> Dict[str, str]:
|
|
269
|
+
"""Extract workflow command parameters."""
|
|
205
270
|
details = {}
|
|
206
271
|
if args:
|
|
207
272
|
for parameter in args.strip().split(','):
|
|
208
273
|
if '=' not in parameter:
|
|
209
|
-
|
|
210
|
-
|
|
274
|
+
raise CommandException(
|
|
275
|
+
f'Invalid workflow command parameter: {parameter}.'
|
|
276
|
+
)
|
|
211
277
|
key, _, value = parameter.strip().partition('=')
|
|
212
278
|
details[key] = value
|
|
213
279
|
return details
|
|
@@ -239,55 +305,22 @@ def _as_log(line: str, jobstate: JobState):
|
|
|
239
305
|
return mask(line, jobstate).rstrip()
|
|
240
306
|
|
|
241
307
|
|
|
242
|
-
def
|
|
243
|
-
|
|
244
|
-
with open(path, 'r') as f:
|
|
245
|
-
for line in f.readlines():
|
|
246
|
-
if '=' not in line:
|
|
247
|
-
continue
|
|
248
|
-
line = line.strip()
|
|
249
|
-
if set_export := OPENTF_VARIABLES_REGEX.match(line):
|
|
250
|
-
line = set_export.group(2)
|
|
251
|
-
if line.startswith('"'):
|
|
252
|
-
line = line[1:-1]
|
|
253
|
-
key, _, value = line.partition('=')
|
|
254
|
-
if OPENTF_VARIABLES_NAME_REGEX.match(key):
|
|
255
|
-
variables[key] = value
|
|
256
|
-
try:
|
|
257
|
-
os.remove(path)
|
|
258
|
-
except FileNotFoundError:
|
|
259
|
-
pass
|
|
260
|
-
return variables
|
|
261
|
-
|
|
308
|
+
def process_upload(result: Dict[str, Any]) -> Dict[str, Any]:
|
|
309
|
+
"""Process ExecutionResult event containing .metadata.upload flag.
|
|
262
310
|
|
|
263
|
-
|
|
264
|
-
|
|
311
|
+
Remove the artifact-related items from `result` (URLs starting with
|
|
312
|
+
`/tmp/{workflow_id}_WR_`).
|
|
265
313
|
|
|
266
|
-
|
|
267
|
-
the variables field of the result.
|
|
268
|
-
"""
|
|
269
|
-
if attachments := result['metadata'].get('attachments'):
|
|
270
|
-
variables = {}
|
|
271
|
-
for path, data in attachments.copy().items():
|
|
272
|
-
if data.get('type') == OPENTF_VARIABLES_TYPE:
|
|
273
|
-
variables = _get_opentf_variables(path)
|
|
274
|
-
del result['metadata']['attachments'][path]
|
|
275
|
-
result['attachments'].remove(path)
|
|
276
|
-
if not result['metadata']['attachments']:
|
|
277
|
-
del result['metadata']['attachments']
|
|
278
|
-
if not result['attachments']:
|
|
279
|
-
del result['attachments']
|
|
280
|
-
result['variables'] = variables
|
|
314
|
+
# Required parameters
|
|
281
315
|
|
|
316
|
+
- result: a dictionary (an ExecutionResult command)
|
|
282
317
|
|
|
283
|
-
|
|
284
|
-
"""Process ExecutionResult event containing .metadata.upload flag.
|
|
318
|
+
# Returned value
|
|
285
319
|
|
|
286
|
-
|
|
287
|
-
publish WorkflowResult.
|
|
320
|
+
A WorkflowResult event.
|
|
288
321
|
"""
|
|
289
322
|
|
|
290
|
-
def _filter_items(key: str):
|
|
323
|
+
def _filter_items(key: str) -> Tuple[List[str], Dict[str, Any]]:
|
|
291
324
|
metadata = {}
|
|
292
325
|
items = [item for item in result['attachments'] if item[pos:].startswith(key)]
|
|
293
326
|
if items:
|
|
@@ -318,6 +351,53 @@ def process_upload(result: Dict[str, Any]):
|
|
|
318
351
|
return make_event(WORKFLOWRESULT, metadata=metadata, attachments=upload)
|
|
319
352
|
|
|
320
353
|
|
|
354
|
+
def _download_artifacts(
|
|
355
|
+
artifacts: List[str],
|
|
356
|
+
targeted_remote_path: str,
|
|
357
|
+
remote_path: str,
|
|
358
|
+
params: Dict[str, str],
|
|
359
|
+
_put: Callable[[str, str], None],
|
|
360
|
+
is_windows: bool,
|
|
361
|
+
) -> bool:
|
|
362
|
+
"""Download artifacts matching a pattern.
|
|
363
|
+
|
|
364
|
+
# Required parameters
|
|
365
|
+
|
|
366
|
+
- artifacts: a list of artifact names
|
|
367
|
+
- targeted_remote_path: a string
|
|
368
|
+
- remote_path: a string
|
|
369
|
+
- params: a dictionary
|
|
370
|
+
- _put: a function copying a local file to a remote environment
|
|
371
|
+
- is_windows: a boolean
|
|
372
|
+
|
|
373
|
+
# Returned value
|
|
374
|
+
|
|
375
|
+
A boolean indicating if artifacts were uploaded.
|
|
376
|
+
"""
|
|
377
|
+
filename, pattern = params.get('file'), params.get('pattern')
|
|
378
|
+
if filename and pattern:
|
|
379
|
+
raise CommandException('Cannot specify both "file" and "pattern".')
|
|
380
|
+
if not filename and not pattern:
|
|
381
|
+
pattern = '*'
|
|
382
|
+
found = False
|
|
383
|
+
for artifact in artifacts:
|
|
384
|
+
artifact_name = artifact.split('_')[-1]
|
|
385
|
+
if filename and filename == artifact_name:
|
|
386
|
+
found = True
|
|
387
|
+
if not remote_path:
|
|
388
|
+
targeted_remote_path = core.join_path(
|
|
389
|
+
targeted_remote_path, artifact_name, is_windows
|
|
390
|
+
)
|
|
391
|
+
_put(targeted_remote_path, artifact)
|
|
392
|
+
elif pattern and fnmatch.fnmatch(artifact_name, pattern):
|
|
393
|
+
found = True
|
|
394
|
+
pattern_path = core.join_path(
|
|
395
|
+
targeted_remote_path, artifact_name, is_windows
|
|
396
|
+
)
|
|
397
|
+
_put(pattern_path, artifact)
|
|
398
|
+
return found
|
|
399
|
+
|
|
400
|
+
|
|
321
401
|
def process_output(
|
|
322
402
|
event: Dict[str, Any],
|
|
323
403
|
resp: int,
|
|
@@ -366,95 +446,73 @@ def process_output(
|
|
|
366
446
|
May raise exceptions.
|
|
367
447
|
"""
|
|
368
448
|
|
|
369
|
-
def _get_targeted_path(remote_path: str):
|
|
449
|
+
def _get_targeted_path(remote_path: str) -> str:
|
|
370
450
|
working_directory = core.join_path(
|
|
371
451
|
metadata['job_id'], event.get('working-directory'), is_windows
|
|
372
452
|
)
|
|
373
453
|
return core.join_path(working_directory, remote_path, is_windows)
|
|
374
454
|
|
|
375
|
-
def _attach(remote: str, args: str,
|
|
455
|
+
def _attach(remote: str, args: str, is_artifact: bool = False) -> int:
|
|
376
456
|
if is_windows:
|
|
377
457
|
remote = ntpath.normpath(remote)
|
|
378
458
|
try:
|
|
379
|
-
|
|
380
|
-
return 2
|
|
459
|
+
params = _get_cmd_params(args)
|
|
381
460
|
attachment_url, uuid = _make_attachment_url(
|
|
382
|
-
metadata, remote, separator,
|
|
461
|
+
metadata, remote, separator, is_artifact, name=params.get('name')
|
|
383
462
|
)
|
|
384
463
|
params['uuid'] = uuid
|
|
385
464
|
_get(remote, attachment_url)
|
|
386
465
|
attachments_metadata[attachment_url] = params
|
|
387
466
|
attachments.append(attachment_url)
|
|
388
467
|
return resp
|
|
468
|
+
except CommandException as err:
|
|
469
|
+
logs.append(f'ERROR,{err.args[0]}')
|
|
389
470
|
except Exception as err:
|
|
390
471
|
logs.append(f'ERROR,Could not read {remote}: {err}.')
|
|
391
|
-
|
|
472
|
+
return 2
|
|
392
473
|
|
|
393
|
-
def _putfile(remote_path: str, data: str):
|
|
474
|
+
def _putfile(remote_path: str, data: str) -> int:
|
|
394
475
|
targeted_remote_path = _get_targeted_path(remote_path)
|
|
395
476
|
try:
|
|
396
477
|
file_ = f'/tmp/in_{metadata["workflow_id"]}_{data}'
|
|
397
478
|
if not os.path.exists(file_):
|
|
398
|
-
logs.append(f'ERROR,Invalid resources.files reference {data}.')
|
|
479
|
+
logs.append(f'ERROR,Invalid "resources.files" reference "{data}".')
|
|
399
480
|
return 2
|
|
400
481
|
_put(targeted_remote_path, file_)
|
|
401
482
|
return resp
|
|
402
483
|
except Exception as err:
|
|
403
484
|
logs.append(
|
|
404
|
-
f'ERROR,Could not send file {data} to remote path {remote_path}: {err}.'
|
|
485
|
+
f'ERROR,Could not send file "{data}" to remote path "{remote_path}": {err}.'
|
|
405
486
|
)
|
|
406
487
|
return 2
|
|
407
488
|
|
|
408
|
-
def _download(remote_path: str, args: str):
|
|
409
|
-
if not (artifacts := event['metadata'].get('artifacts')):
|
|
410
|
-
logs.append('ERROR,No artifacts available at workflow level.')
|
|
411
|
-
return 2
|
|
489
|
+
def _download(remote_path: str, args: str) -> int:
|
|
412
490
|
try:
|
|
491
|
+
artifacts = metadata.get('artifacts', [])
|
|
413
492
|
targeted_remote_path = _get_targeted_path(remote_path)
|
|
414
|
-
|
|
415
|
-
|
|
416
|
-
|
|
417
|
-
|
|
418
|
-
pattern = '*'
|
|
419
|
-
found = False
|
|
420
|
-
for artifact in artifacts:
|
|
421
|
-
artifact_name = artifact.split('_')[-1]
|
|
422
|
-
if filename and filename == artifact_name:
|
|
423
|
-
found = True
|
|
424
|
-
if not remote_path:
|
|
425
|
-
targeted_remote_path = core.join_path(
|
|
426
|
-
targeted_remote_path, artifact_name, is_windows
|
|
427
|
-
)
|
|
428
|
-
_put(targeted_remote_path, artifact)
|
|
429
|
-
continue
|
|
430
|
-
if pattern and fnmatch.fnmatch(artifact_name, pattern):
|
|
431
|
-
found = True
|
|
432
|
-
pattern_path = core.join_path(
|
|
433
|
-
targeted_remote_path, artifact_name, is_windows
|
|
434
|
-
)
|
|
435
|
-
_put(pattern_path, artifact)
|
|
436
|
-
if found:
|
|
493
|
+
params = _get_cmd_params(args)
|
|
494
|
+
if _download_artifacts(
|
|
495
|
+
artifacts, targeted_remote_path, remote_path, params, _put, is_windows
|
|
496
|
+
):
|
|
437
497
|
return resp
|
|
438
498
|
logs.append('ERROR,No artifact matching requested name/pattern found.')
|
|
439
|
-
|
|
499
|
+
except CommandException as err:
|
|
500
|
+
logs.append(f'ERROR,{err.args[0]}')
|
|
440
501
|
except Exception as err:
|
|
441
502
|
logs.append(
|
|
442
|
-
f'ERROR,Could not send artifacts to remote path {remote_path}: {err}.'
|
|
503
|
+
f'ERROR,Could not send artifacts to remote path "{remote_path}": {err}.'
|
|
443
504
|
)
|
|
444
|
-
|
|
505
|
+
return 2
|
|
445
506
|
|
|
446
507
|
metadata: Dict[str, Any] = event['metadata']
|
|
447
508
|
is_windows: bool = metadata['channel_os'] == 'windows'
|
|
448
|
-
opentfvariables = VARIABLES_TEMPLATE[metadata['channel_os']].format(
|
|
449
|
-
job_id=metadata['job_id'], root=metadata['channel_temp']
|
|
450
|
-
)
|
|
451
509
|
separator = '\\' if is_windows else '/'
|
|
452
510
|
outputs = {}
|
|
453
511
|
logs: List[str] = []
|
|
454
512
|
attachments: List[str] = []
|
|
455
513
|
attachments_metadata = {}
|
|
456
514
|
|
|
457
|
-
|
|
515
|
+
has_artifacts = False
|
|
458
516
|
for line in stdout:
|
|
459
517
|
# Parsing stdout for workflow commands
|
|
460
518
|
if jobstate.stop_command:
|
|
@@ -465,8 +523,8 @@ def process_output(
|
|
|
465
523
|
if wcmd := ATTACH_COMMAND.match(line):
|
|
466
524
|
resp = _attach(wcmd.group(2), wcmd.group(1))
|
|
467
525
|
elif wcmd := UPLOAD_COMMAND.match(line):
|
|
468
|
-
|
|
469
|
-
resp = _attach(wcmd.group(2), wcmd.group(1),
|
|
526
|
+
has_artifacts = True
|
|
527
|
+
resp = _attach(wcmd.group(2), wcmd.group(1), is_artifact=True)
|
|
470
528
|
elif wcmd := DOWNLOAD_COMMAND.match(line):
|
|
471
529
|
resp = _download(wcmd.group(2), wcmd.group(1))
|
|
472
530
|
elif wcmd := PUT_FILE_COMMAND.match(line):
|
|
@@ -487,10 +545,9 @@ def process_output(
|
|
|
487
545
|
del metadata['artifacts']
|
|
488
546
|
|
|
489
547
|
if metadata['step_sequence_id'] != CHANNEL_RELEASE:
|
|
490
|
-
_attach(
|
|
548
|
+
_attach(_get_opentf_variables_path(metadata), f'type={OPENTF_VARIABLES_TYPE}')
|
|
491
549
|
|
|
492
550
|
result = make_event(EXECUTIONRESULT, metadata=metadata, status=resp)
|
|
493
|
-
|
|
494
551
|
if outputs:
|
|
495
552
|
result['outputs'] = outputs
|
|
496
553
|
if logs:
|
|
@@ -498,8 +555,9 @@ def process_output(
|
|
|
498
555
|
if attachments:
|
|
499
556
|
result['attachments'] = attachments
|
|
500
557
|
result['metadata']['attachments'] = attachments_metadata
|
|
501
|
-
if
|
|
558
|
+
if has_artifacts:
|
|
502
559
|
result['metadata']['upload'] = resp
|
|
560
|
+
|
|
503
561
|
return result
|
|
504
562
|
|
|
505
563
|
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.2
|
|
2
2
|
Name: opentf-toolkit-nightly
|
|
3
|
-
Version: 0.62.0.
|
|
3
|
+
Version: 0.62.0.dev1302
|
|
4
4
|
Summary: OpenTestFactory Orchestrator Toolkit
|
|
5
5
|
Home-page: https://gitlab.com/henixdevelopment/open-source/opentestfactory/python-toolkit
|
|
6
6
|
Author: Martin Lafaix
|
|
@@ -54,12 +54,12 @@ opentf/schemas/opentestfactory.org/v1beta1/ServiceConfig.json,sha256=m5ZgWAKbutu
|
|
|
54
54
|
opentf/schemas/opentestfactory.org/v1beta1/Workflow.json,sha256=QZ8mM9PhzsI9gTmwmKTWYNoRn--rtcM3L0PzgnPBfMU,15424
|
|
55
55
|
opentf/schemas/opentestfactory.org/v1beta2/ServiceConfig.json,sha256=rEvK2YWL5lG94_qYgR_GnLWNsaQhaQ-2kuZdWJr5NnY,3517
|
|
56
56
|
opentf/scripts/launch_java_service.sh,sha256=S0jAaCuv2sZy0Gf2NGBuPX-eD531rcM-b0fNyhmzSjw,2423
|
|
57
|
-
opentf/scripts/startup.py,sha256=
|
|
57
|
+
opentf/scripts/startup.py,sha256=AcVXU2auPvqMb_6OpGzkVqrpgYV6vz7x_Rnv8YbAEkk,23114
|
|
58
58
|
opentf/toolkit/__init__.py,sha256=ohrde5mcMY26p64E0Z2XunZAWYOiEkXKTg5E1J4TGGc,23571
|
|
59
|
-
opentf/toolkit/channels.py,sha256=
|
|
59
|
+
opentf/toolkit/channels.py,sha256=6qKSsAgq_oJpuDRiKqVUz-EAjdfikcCG3SFAGmKZdhQ,25551
|
|
60
60
|
opentf/toolkit/core.py,sha256=fqnGgaYnuVcd4fyeNIwpc0QtyUo7jsKeVgdkBfY3iqo,9443
|
|
61
|
-
opentf_toolkit_nightly-0.62.0.
|
|
62
|
-
opentf_toolkit_nightly-0.62.0.
|
|
63
|
-
opentf_toolkit_nightly-0.62.0.
|
|
64
|
-
opentf_toolkit_nightly-0.62.0.
|
|
65
|
-
opentf_toolkit_nightly-0.62.0.
|
|
61
|
+
opentf_toolkit_nightly-0.62.0.dev1302.dist-info/LICENSE,sha256=xx0jnfkXJvxRnG63LTGOxlggYnIysveWIZ6H3PNdCrQ,11357
|
|
62
|
+
opentf_toolkit_nightly-0.62.0.dev1302.dist-info/METADATA,sha256=lva7suqvsswuh12Bjo-ZGIcaCepyWpzylYpxRlVUqQo,2192
|
|
63
|
+
opentf_toolkit_nightly-0.62.0.dev1302.dist-info/WHEEL,sha256=52BFRY2Up02UkjOa29eZOS2VxUrpPORXg1pkohGGUS8,91
|
|
64
|
+
opentf_toolkit_nightly-0.62.0.dev1302.dist-info/top_level.txt,sha256=_gPuE6GTT6UNXy1DjtmQSfCcZb_qYA2vWmjg7a30AGk,7
|
|
65
|
+
opentf_toolkit_nightly-0.62.0.dev1302.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|