@ibm-cloud/cd-tools 1.15.8 → 1.15.10
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/cmd/copy-toolchain.js +1 -1
- package/cmd/utils/import-terraform.js +12 -12
- package/cmd/utils/logger.js +6 -1
- package/cmd/utils/terraform.js +44 -40
- package/cmd/utils/utils.js +3 -3
- package/cmd/utils/validate.js +2 -2
- package/package.json +1 -1
- package/test/utils/testUtils.js +4 -4
package/cmd/copy-toolchain.js
CHANGED
|
@@ -368,7 +368,7 @@ async function main(options) {
|
|
|
368
368
|
|
|
369
369
|
if (verbosity >= 1) logger.print(''); // newline for spacing
|
|
370
370
|
logger.info(`Toolchain "${sourceToolchainData['name']}" from ${sourceRegion} was cloned to "${targetToolchainName ?? sourceToolchainData['name']}" in ${targetRegion} ${applyErrors ? 'with some errors' : 'successfully'}, with ${numResourcesCreated} / ${numResourcesPlanned} resources created!`, LOG_STAGES.info, true);
|
|
371
|
-
if (hasS2SFailures) logger.warn(`One or more service-to-service auth policies could not be created, see ${outputDir}/.s2s-script-failures-${TIME_SUFFIX} for more details.`,
|
|
371
|
+
if (hasS2SFailures) logger.warn(`Warning! One or more service-to-service auth policies could not be created, see ${outputDir}/.s2s-script-failures-${TIME_SUFFIX} for more details.`, LOG_STAGES.info, true);
|
|
372
372
|
if (newTcId) logger.info(`Cloned toolchain: https://${CLOUD_PLATFORM}/devops/toolchains/${newTcId}?env_id=ibm:yp:${targetRegion}`, LOG_STAGES.info, true);
|
|
373
373
|
} else {
|
|
374
374
|
logger.info(`DRY_RUN: ${dryRun}, skipping terraform apply...`, LOG_STAGES.tf);
|
|
@@ -167,7 +167,7 @@ export async function importTerraform(token, apiKey, region, toolchainId, toolch
|
|
|
167
167
|
draftErrors = err;
|
|
168
168
|
});
|
|
169
169
|
// above is a temp fix for errors before post-processing
|
|
170
|
-
//
|
|
170
|
+
// "Insufficient initialization blocks" error is expected
|
|
171
171
|
|
|
172
172
|
let generatedFile = '';
|
|
173
173
|
try {
|
|
@@ -186,7 +186,7 @@ export async function importTerraform(token, apiKey, region, toolchainId, toolch
|
|
|
186
186
|
for (const [k, v] of Object.entries(value)) {
|
|
187
187
|
newTfFileObj['resource'][key] = { ...(newTfFileObj['resource'][key] ?? []), [k]: v[0] };
|
|
188
188
|
|
|
189
|
-
// remove empty tool, which breaks jsonToTf
|
|
189
|
+
// remove empty tool (if it exists), which breaks jsonToTf
|
|
190
190
|
try {
|
|
191
191
|
if (Object.keys(newTfFileObj['resource'][key][k]['source'][0]['properties'][0]['tool'][0]).length < 1) {
|
|
192
192
|
delete newTfFileObj['resource'][key][k]['source'][0]['properties'][0]['tool'];
|
|
@@ -195,7 +195,7 @@ export async function importTerraform(token, apiKey, region, toolchainId, toolch
|
|
|
195
195
|
// do nothing
|
|
196
196
|
}
|
|
197
197
|
|
|
198
|
-
// handle missing worker, which breaks terraform
|
|
198
|
+
// handle missing worker (if it exists), which breaks terraform
|
|
199
199
|
try {
|
|
200
200
|
if (newTfFileObj['resource'][key][k]['worker'][0]['id'] === null) {
|
|
201
201
|
delete newTfFileObj['resource'][key][k]['worker'];
|
|
@@ -209,7 +209,7 @@ export async function importTerraform(token, apiKey, region, toolchainId, toolch
|
|
|
209
209
|
if (v2 === null) delete newTfFileObj['resource'][key][k][k2];
|
|
210
210
|
}
|
|
211
211
|
|
|
212
|
-
// ignore null values in parameters
|
|
212
|
+
// ignore null values (if it exists) in parameters
|
|
213
213
|
try {
|
|
214
214
|
if (Object.keys(v[0]['parameters'][0]).length > 0) {
|
|
215
215
|
for (const [k2, v2] of Object.entries(v[0]['parameters'][0])) {
|
|
@@ -220,7 +220,7 @@ export async function importTerraform(token, apiKey, region, toolchainId, toolch
|
|
|
220
220
|
// do nothing
|
|
221
221
|
}
|
|
222
222
|
|
|
223
|
-
// ignore null values in source properties
|
|
223
|
+
// ignore null values (if it exists) in source properties
|
|
224
224
|
try {
|
|
225
225
|
if (Object.keys(v[0]['source'][0]['properties'][0]).length > 0) {
|
|
226
226
|
for (const [k2, v2] of Object.entries(v[0]['source'][0]['properties'][0])) {
|
|
@@ -262,10 +262,7 @@ export async function importTerraform(token, apiKey, region, toolchainId, toolch
|
|
|
262
262
|
if (propValue.startsWith(START_INDICATOR) && propValue.endsWith(END_INDICATOR)) {
|
|
263
263
|
// skip substitution for jsonencode case, don't want to mangle it
|
|
264
264
|
} else {
|
|
265
|
-
|
|
266
|
-
|
|
267
|
-
// TODO: remove extra backslash in newline replacement once provider is updated
|
|
268
|
-
newValue = newValue.replace(/\\/g, '\\\\').replace(/\n/g, '\\\\n').replace(/\r/g, '\\\\r').replace(/"/g, '\\"');
|
|
265
|
+
newValue = newValue.replace(/\\/g, '\\\\').replace(/\n/g, '\\n').replace(/\r/g, '\\r').replace(/"/g, '\\"');
|
|
269
266
|
}
|
|
270
267
|
newTfFileObj['resource'][key][k]['value'] = newValue;
|
|
271
268
|
}
|
|
@@ -314,16 +311,19 @@ export async function importTerraform(token, apiKey, region, toolchainId, toolch
|
|
|
314
311
|
|
|
315
312
|
// add repo url depends_on on second pass
|
|
316
313
|
for (const [key, value] of Object.entries(generatedFileJson['resource'])) {
|
|
317
|
-
for (const [k,
|
|
314
|
+
for (const [k, v] of Object.entries(value)) {
|
|
318
315
|
if (key === 'ibm_cd_tekton_pipeline_definition' || key === 'ibm_cd_tekton_pipeline_trigger') {
|
|
316
|
+
if (key === 'ibm_cd_tekton_pipeline_trigger' && !v['source']) continue; // skip triggers without source, which aren't tied to a repo
|
|
319
317
|
try {
|
|
320
318
|
const thisUrl = newTfFileObj['resource'][key][k]['source'][0]['properties'][0]['url'];
|
|
321
319
|
|
|
322
320
|
if (thisUrl in repoUrlMap) {
|
|
323
321
|
newTfFileObj['resource'][key][k]['depends_on'] = [`\${${repoUrlMap[thisUrl].type}.${repoUrlMap[thisUrl].name}}`];
|
|
322
|
+
} else {
|
|
323
|
+
newTfFileObj['resource'][key][k]['depends_on'] = []; // we will look for and remove these in terraform.js
|
|
324
324
|
}
|
|
325
|
-
} catch {
|
|
326
|
-
|
|
325
|
+
} catch (err) {
|
|
326
|
+
logger.dump(`\n[Warning] Could not add repo URL depends_on for resource "${k}": ${err.message}`);
|
|
327
327
|
}
|
|
328
328
|
}
|
|
329
329
|
}
|
package/cmd/utils/logger.js
CHANGED
|
@@ -62,7 +62,12 @@ class Logger {
|
|
|
62
62
|
const level = LEVELS[type] || LEVELS.log;
|
|
63
63
|
const formatted = (prefix ? this.#getFullPrefix(prefix) + ' ' : '') + `${level.color}${msg}${COLORS.reset}`;
|
|
64
64
|
console[level.method](formatted);
|
|
65
|
-
|
|
65
|
+
if (type === 'info') {
|
|
66
|
+
// leave out [INFO] type prefix in log file
|
|
67
|
+
this.logStream?.write(stripAnsi((prefix ? this.#getFullPrefix(prefix) + ' ' : '') + msg) + '\n');
|
|
68
|
+
} else {
|
|
69
|
+
this.logStream?.write(stripAnsi((prefix ? this.#getFullPrefix(prefix) + ' ' : '') + `[${type.toUpperCase()}] ` + msg) + '\n');
|
|
70
|
+
}
|
|
66
71
|
}
|
|
67
72
|
|
|
68
73
|
info(msg, prefix = '', force = false) { if (this.verbosity >= 1 || force) this.#baseLog('info', msg, prefix); }
|
package/cmd/utils/terraform.js
CHANGED
|
@@ -179,8 +179,8 @@ async function setupTerraformFiles(config) {
|
|
|
179
179
|
try {
|
|
180
180
|
newTfFileObj['resource']['ibm_cd_toolchain_tool_hostedgit'][k]['parameters'][0]['auth_type'] = 'oauth';
|
|
181
181
|
delete newTfFileObj['resource']['ibm_cd_toolchain_tool_hostedgit'][k]['parameters'][0]['api_token'];
|
|
182
|
-
} catch {
|
|
183
|
-
|
|
182
|
+
} catch (err) {
|
|
183
|
+
logger.dump(`\n[Warning] Could not convert auth_type for hostedgit tool "${k}": ${err.message}`);
|
|
184
184
|
}
|
|
185
185
|
|
|
186
186
|
try {
|
|
@@ -263,20 +263,24 @@ async function setupTerraformFiles(config) {
|
|
|
263
263
|
}
|
|
264
264
|
}
|
|
265
265
|
|
|
266
|
-
// set depends_on for references
|
|
267
|
-
if (
|
|
266
|
+
// set depends_on for references
|
|
267
|
+
if (newTfFileObj['resource']['ibm_cd_tekton_pipeline_trigger']) {
|
|
268
268
|
for (const [k, v] of Object.entries(newTfFileObj['resource']['ibm_cd_tekton_pipeline_trigger'])) {
|
|
269
|
+
if (!v['source']) continue; // skip triggers without source, which aren't tied to a repo
|
|
269
270
|
try {
|
|
270
271
|
const thisUrl = v['source'][0]['properties'][0]['url'];
|
|
271
|
-
if (
|
|
272
|
-
|
|
273
|
-
|
|
274
|
-
|
|
275
|
-
|
|
272
|
+
if (v['depends_on']?.length === 0) {
|
|
273
|
+
if (thisUrl && repoToTfName[thisUrl]) {
|
|
274
|
+
newTfFileObj['resource']['ibm_cd_tekton_pipeline_trigger'][k]['depends_on'] = [`ibm_cd_toolchain_tool_githubconsolidated.${repoToTfName[thisUrl]}`]
|
|
275
|
+
} else if (thisUrl && !repoToTfName[thisUrl]) {
|
|
276
|
+
// warn the user if the URL is not in the repoToTfName map
|
|
277
|
+
logger.warn(`Warning! Could not find a matching tool integration for ${thisUrl}`, LOG_STAGES.tf, true);
|
|
278
|
+
logger.warn(` - for definition found in ${v['pipeline_id']?.slice(2, -9)}`, LOG_STAGES.tf, true); // slice out the terraform address
|
|
279
|
+
delete newTfFileObj['resource']['ibm_cd_tekton_pipeline_trigger'][k]['depends_on'];
|
|
280
|
+
}
|
|
276
281
|
}
|
|
277
|
-
}
|
|
278
|
-
|
|
279
|
-
// do nothing
|
|
282
|
+
} catch (err) {
|
|
283
|
+
logger.dump(`\n[Warning] Could not set depends_on for tekton pipeline trigger "${k}": ${err.message}`);
|
|
280
284
|
}
|
|
281
285
|
}
|
|
282
286
|
}
|
|
@@ -284,6 +288,7 @@ async function setupTerraformFiles(config) {
|
|
|
284
288
|
// update GRIT urls
|
|
285
289
|
if (newTfFileObj['resource']['ibm_cd_tekton_pipeline_trigger']) {
|
|
286
290
|
for (const [k, v] of Object.entries(newTfFileObj['resource']['ibm_cd_tekton_pipeline_trigger'])) {
|
|
291
|
+
if (!v['source']) continue; // skip triggers without source, which aren't tied to a repo
|
|
287
292
|
try {
|
|
288
293
|
const thisUrl = v['source'][0]['properties'][0]['url'];
|
|
289
294
|
const newUrl = gritMapping[thisUrl];
|
|
@@ -291,29 +296,31 @@ async function setupTerraformFiles(config) {
|
|
|
291
296
|
if (newUrl) {
|
|
292
297
|
newTfFileObj['resource']['ibm_cd_tekton_pipeline_trigger'][k]['source'][0]['properties'][0]['url'] = newUrl;
|
|
293
298
|
}
|
|
294
|
-
}
|
|
295
|
-
|
|
296
|
-
// do nothing
|
|
299
|
+
} catch (err) {
|
|
300
|
+
logger.dump(`\n[Warning] Could not update GRIT URL for tekton pipeline trigger "${k}": ${err.message}`);
|
|
297
301
|
}
|
|
298
302
|
}
|
|
299
303
|
}
|
|
300
304
|
}
|
|
301
305
|
|
|
302
306
|
if (isCompact || resourceName === 'ibm_cd_tekton_pipeline_definition') {
|
|
303
|
-
// set depends_on for references
|
|
304
|
-
if (
|
|
307
|
+
// set depends_on for references
|
|
308
|
+
if (newTfFileObj['resource']['ibm_cd_tekton_pipeline_definition']) {
|
|
305
309
|
for (const [k, v] of Object.entries(newTfFileObj['resource']['ibm_cd_tekton_pipeline_definition'])) {
|
|
306
310
|
try {
|
|
307
311
|
const thisUrl = v['source'][0]['properties'][0]['url'];
|
|
308
|
-
if (
|
|
309
|
-
|
|
310
|
-
|
|
311
|
-
|
|
312
|
-
|
|
312
|
+
if (v['depends_on']?.length === 0) {
|
|
313
|
+
if (thisUrl && repoToTfName[thisUrl]) {
|
|
314
|
+
newTfFileObj['resource']['ibm_cd_tekton_pipeline_definition'][k]['depends_on'] = [`ibm_cd_toolchain_tool_githubconsolidated.${repoToTfName[thisUrl]}`]
|
|
315
|
+
} else if (thisUrl && !repoToTfName[thisUrl]) {
|
|
316
|
+
// warn the user if the URL is not in the repoToTfName map
|
|
317
|
+
logger.warn(`Warning! Could not find a matching tool integration for ${thisUrl}`, LOG_STAGES.tf, true);
|
|
318
|
+
logger.warn(` - for definition found in ${v['pipeline_id']?.slice(2, -9)}`, LOG_STAGES.tf, true); // slice out the terraform address
|
|
319
|
+
delete newTfFileObj['resource']['ibm_cd_tekton_pipeline_definition'][k]['depends_on'];
|
|
320
|
+
}
|
|
313
321
|
}
|
|
314
|
-
}
|
|
315
|
-
|
|
316
|
-
// do nothing
|
|
322
|
+
} catch (err) {
|
|
323
|
+
logger.dump(`\n[Warning] Could not set depends_on for tekton pipeline definition "${k}": ${err.message}`);
|
|
317
324
|
}
|
|
318
325
|
}
|
|
319
326
|
}
|
|
@@ -328,9 +335,8 @@ async function setupTerraformFiles(config) {
|
|
|
328
335
|
if (newUrl) {
|
|
329
336
|
newTfFileObj['resource']['ibm_cd_tekton_pipeline_definition'][k]['source'][0]['properties'][0]['url'] = newUrl;
|
|
330
337
|
}
|
|
331
|
-
}
|
|
332
|
-
|
|
333
|
-
// do nothing
|
|
338
|
+
} catch (err) {
|
|
339
|
+
logger.dump(`\n[Warning] Could not update GRIT URL for tekton pipeline definition "${k}": ${err.message}`);
|
|
334
340
|
}
|
|
335
341
|
}
|
|
336
342
|
}
|
|
@@ -354,13 +360,11 @@ async function setupTerraformFiles(config) {
|
|
|
354
360
|
if (thisValue.startsWith(START_INDICATOR) && thisValue.endsWith(END_INDICATOR)) {
|
|
355
361
|
// skip newline substitution for jsonencode case, don't want to mangle it
|
|
356
362
|
} else {
|
|
357
|
-
|
|
358
|
-
newTfFileObj['resource']['ibm_cd_tekton_pipeline_property'][k]['value'] = thisValue.replace(/\\/g, '\\\\').replace(/\n/g, '\\\\n').replace(/\r/g, '\\\\r').replace(/"/g, '\\"');
|
|
363
|
+
newTfFileObj['resource']['ibm_cd_tekton_pipeline_property'][k]['value'] = thisValue.replace(/\\/g, '\\\\').replace(/\n/g, '\\n').replace(/\r/g, '\\r').replace(/"/g, '\\"');
|
|
359
364
|
}
|
|
360
365
|
}
|
|
361
|
-
}
|
|
362
|
-
|
|
363
|
-
// do nothing
|
|
366
|
+
} catch (err) {
|
|
367
|
+
logger.dump(`\n[Warning] Could not process tekton pipeline property "${k}": ${err.message}`);
|
|
364
368
|
}
|
|
365
369
|
|
|
366
370
|
}
|
|
@@ -384,13 +388,11 @@ async function setupTerraformFiles(config) {
|
|
|
384
388
|
if (thisValue.startsWith(START_INDICATOR) && thisValue.endsWith(END_INDICATOR)) {
|
|
385
389
|
// skip newline substitution for jsonencode case, don't want to mangle it
|
|
386
390
|
} else {
|
|
387
|
-
|
|
388
|
-
newTfFileObj['resource']['ibm_cd_tekton_pipeline_trigger_property'][k]['value'] = thisValue.replace(/\\/g, '\\\\').replace(/\n/g, '\\\\n').replace(/\r/g, '\\\\r').replace(/"/g, '\\"');
|
|
391
|
+
newTfFileObj['resource']['ibm_cd_tekton_pipeline_trigger_property'][k]['value'] = thisValue.replace(/\\/g, '\\\\').replace(/\n/g, '\\n').replace(/\r/g, '\\r').replace(/"/g, '\\"');
|
|
389
392
|
}
|
|
390
393
|
}
|
|
391
|
-
}
|
|
392
|
-
|
|
393
|
-
// do nothing
|
|
394
|
+
} catch (err) {
|
|
395
|
+
logger.dump(`\n[Warning] Could not process tekton pipeline trigger property "${k}": ${err.message}`);
|
|
394
396
|
}
|
|
395
397
|
|
|
396
398
|
}
|
|
@@ -532,7 +534,8 @@ function replaceDependsOn(str) {
|
|
|
532
534
|
// get rid of the quotes
|
|
533
535
|
return str.replaceAll(pattern, (match, s) => ` depends_on = \[\n ${s.slice(1, s.length - 1)}\n ]`);
|
|
534
536
|
}
|
|
535
|
-
} catch {
|
|
537
|
+
} catch (err) {
|
|
538
|
+
logger.dump(`\n[Warning] Could not replace depends_on in terraform string: ${err.message}`);
|
|
536
539
|
return str;
|
|
537
540
|
}
|
|
538
541
|
}
|
|
@@ -555,7 +558,8 @@ function addS2sScriptToToolchainTf(str, timeSuffix) {
|
|
|
555
558
|
// get rid of the quotes
|
|
556
559
|
return str.replace(pattern, (match, s1, s2) => `resource "ibm_cd_toolchain" "${s1}" {\n${s2}${provisionerStr(s1)}\n}`);
|
|
557
560
|
}
|
|
558
|
-
} catch {
|
|
561
|
+
} catch (err) {
|
|
562
|
+
logger.dump(`\n[Warning] Could not add S2S script to toolchain terraform: ${err.message}`);
|
|
559
563
|
return str;
|
|
560
564
|
}
|
|
561
565
|
}
|
package/cmd/utils/utils.js
CHANGED
|
@@ -31,7 +31,7 @@ export async function promptUserYesNo(question) {
|
|
|
31
31
|
|
|
32
32
|
const answer = await rl.question(fullPrompt);
|
|
33
33
|
|
|
34
|
-
logger.dump(fullPrompt +
|
|
34
|
+
logger.dump(fullPrompt + answer + '\n\n');
|
|
35
35
|
rl.close();
|
|
36
36
|
|
|
37
37
|
const normalized = answer.toLowerCase().trim();
|
|
@@ -56,7 +56,7 @@ export async function promptUserConfirmation(question, expectedAns, exitMsg) {
|
|
|
56
56
|
const fullPrompt = question + `\n\nOnly '${expectedAns}' will be accepted to proceed. (Ctrl-C to abort)\n\nEnter a value: `;
|
|
57
57
|
const answer = await rl.question(fullPrompt);
|
|
58
58
|
|
|
59
|
-
logger.dump(fullPrompt +
|
|
59
|
+
logger.dump(fullPrompt + answer + '\n\n');
|
|
60
60
|
|
|
61
61
|
if (answer.toLowerCase().trim() !== expectedAns) {
|
|
62
62
|
if (exitMsg) logger.print('\n' + exitMsg);
|
|
@@ -129,7 +129,7 @@ export async function promptUserSelection(question, choices) {
|
|
|
129
129
|
|
|
130
130
|
while (true) {
|
|
131
131
|
const answer = await rl.question(promptText);
|
|
132
|
-
logger.dump(promptText +
|
|
132
|
+
logger.dump(promptText + answer + '\n\n');
|
|
133
133
|
|
|
134
134
|
index = parseInt(answer.trim(), 10) - 1;
|
|
135
135
|
|
package/cmd/utils/validate.js
CHANGED
|
@@ -104,7 +104,7 @@ async function warnDuplicateName(token, accountId, tcName, srcRegion, targetRegi
|
|
|
104
104
|
|
|
105
105
|
if (hasBoth) {
|
|
106
106
|
// warning! prompt user to cancel, rename (e.g. add a suffix) or continue
|
|
107
|
-
logger.warn(
|
|
107
|
+
logger.warn(`Warning! A toolchain named "${tcName}" already exists in:\n - Region: ${targetRegion}\n - Resource Group: ${targetResourceGroupName} (${targetResourceGroupId})`, LOG_STAGES.setup, true);
|
|
108
108
|
logger.print(''); // newline for spacing
|
|
109
109
|
|
|
110
110
|
if (!skipPrompt) {
|
|
@@ -113,7 +113,7 @@ async function warnDuplicateName(token, accountId, tcName, srcRegion, targetRegi
|
|
|
113
113
|
} else {
|
|
114
114
|
if (hasSameRegion) {
|
|
115
115
|
// soft warning of confusion
|
|
116
|
-
logger.warn(
|
|
116
|
+
logger.warn(`Warning! A toolchain named "${tcName}" already exists in:\n - Region: ${targetRegion}`, LOG_STAGES.setup, true);
|
|
117
117
|
}
|
|
118
118
|
}
|
|
119
119
|
|
package/package.json
CHANGED
package/test/utils/testUtils.js
CHANGED
|
@@ -160,10 +160,10 @@ export async function deleteCreatedToolchains(toolchainsToDelete) {
|
|
|
160
160
|
export async function assertExecError(fullCommand, expectedMessage, options, assertionFn) {
|
|
161
161
|
try {
|
|
162
162
|
const output = await execCommand(fullCommand, options);
|
|
163
|
-
logger.dump(output);
|
|
163
|
+
logger.dump(output + '\n');
|
|
164
164
|
throw new Error('Expected command to fail but it succeeded');
|
|
165
165
|
} catch (e) {
|
|
166
|
-
logger.dump(e.message);
|
|
166
|
+
logger.dump(e.message + '\n');
|
|
167
167
|
if (assertionFn) {
|
|
168
168
|
const res = assertionFn(e.message);
|
|
169
169
|
if (res instanceof Promise) await res;
|
|
@@ -178,7 +178,7 @@ export async function assertExecError(fullCommand, expectedMessage, options, ass
|
|
|
178
178
|
export async function assertPtyOutput(fullCommand, expectedMessage, options, assertionFn) {
|
|
179
179
|
try {
|
|
180
180
|
const output = await runPtyProcess(fullCommand, options);
|
|
181
|
-
logger.dump(output);
|
|
181
|
+
logger.dump(output + '\n');
|
|
182
182
|
if (assertionFn) {
|
|
183
183
|
const res = assertionFn(output);
|
|
184
184
|
if (res instanceof Promise) await res;
|
|
@@ -189,7 +189,7 @@ export async function assertPtyOutput(fullCommand, expectedMessage, options, ass
|
|
|
189
189
|
}
|
|
190
190
|
return parseTcIdAndRegion(output);
|
|
191
191
|
} catch (e) {
|
|
192
|
-
logger.dump(e.message);
|
|
192
|
+
logger.dump(e.message + '\n');
|
|
193
193
|
throw (e);
|
|
194
194
|
}
|
|
195
195
|
}
|