@backstage/plugin-scaffolder-backend 0.15.13 → 0.15.17
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +46 -0
- package/assets/nunjucks.js.txt +10385 -0
- package/config.d.ts +12 -12
- package/dist/index.cjs.js +394 -194
- package/dist/index.cjs.js.map +1 -1
- package/dist/index.d.ts +17 -4
- package/package.json +19 -15
package/dist/index.cjs.js
CHANGED
|
@@ -5,17 +5,17 @@ Object.defineProperty(exports, '__esModule', { value: true });
|
|
|
5
5
|
var errors = require('@backstage/errors');
|
|
6
6
|
var catalogModel = require('@backstage/catalog-model');
|
|
7
7
|
var fs = require('fs-extra');
|
|
8
|
-
var path = require('path');
|
|
9
8
|
var yaml = require('yaml');
|
|
10
9
|
var backendCommon = require('@backstage/backend-common');
|
|
10
|
+
var path = require('path');
|
|
11
11
|
var globby = require('globby');
|
|
12
|
-
var nunjucks = require('nunjucks');
|
|
13
12
|
var isbinaryfile = require('isbinaryfile');
|
|
13
|
+
var vm2 = require('vm2');
|
|
14
14
|
var pluginScaffolderBackendModuleCookiecutter = require('@backstage/plugin-scaffolder-backend-module-cookiecutter');
|
|
15
15
|
var child_process = require('child_process');
|
|
16
16
|
var stream = require('stream');
|
|
17
17
|
var azureDevopsNodeApi = require('azure-devops-node-api');
|
|
18
|
-
var fetch = require('
|
|
18
|
+
var fetch = require('node-fetch');
|
|
19
19
|
var integration = require('@backstage/integration');
|
|
20
20
|
var rest = require('@octokit/rest');
|
|
21
21
|
var lodash = require('lodash');
|
|
@@ -27,6 +27,7 @@ var luxon = require('luxon');
|
|
|
27
27
|
var Handlebars = require('handlebars');
|
|
28
28
|
var winston = require('winston');
|
|
29
29
|
var jsonschema = require('jsonschema');
|
|
30
|
+
var nunjucks = require('nunjucks');
|
|
30
31
|
var express = require('express');
|
|
31
32
|
var Router = require('express-promise-router');
|
|
32
33
|
var os = require('os');
|
|
@@ -44,26 +45,23 @@ function _interopNamespace(e) {
|
|
|
44
45
|
var d = Object.getOwnPropertyDescriptor(e, k);
|
|
45
46
|
Object.defineProperty(n, k, d.get ? d : {
|
|
46
47
|
enumerable: true,
|
|
47
|
-
get: function () {
|
|
48
|
-
return e[k];
|
|
49
|
-
}
|
|
48
|
+
get: function () { return e[k]; }
|
|
50
49
|
});
|
|
51
50
|
}
|
|
52
51
|
});
|
|
53
52
|
}
|
|
54
|
-
n[
|
|
53
|
+
n["default"] = e;
|
|
55
54
|
return Object.freeze(n);
|
|
56
55
|
}
|
|
57
56
|
|
|
58
57
|
var fs__default = /*#__PURE__*/_interopDefaultLegacy(fs);
|
|
59
|
-
var path__namespace = /*#__PURE__*/_interopNamespace(path);
|
|
60
|
-
var path__default = /*#__PURE__*/_interopDefaultLegacy(path);
|
|
61
58
|
var yaml__namespace = /*#__PURE__*/_interopNamespace(yaml);
|
|
59
|
+
var path__default = /*#__PURE__*/_interopDefaultLegacy(path);
|
|
62
60
|
var globby__default = /*#__PURE__*/_interopDefaultLegacy(globby);
|
|
63
|
-
var nunjucks__default = /*#__PURE__*/_interopDefaultLegacy(nunjucks);
|
|
64
61
|
var fetch__default = /*#__PURE__*/_interopDefaultLegacy(fetch);
|
|
65
62
|
var Handlebars__namespace = /*#__PURE__*/_interopNamespace(Handlebars);
|
|
66
63
|
var winston__namespace = /*#__PURE__*/_interopNamespace(winston);
|
|
64
|
+
var nunjucks__default = /*#__PURE__*/_interopDefaultLegacy(nunjucks);
|
|
67
65
|
var express__default = /*#__PURE__*/_interopDefaultLegacy(express);
|
|
68
66
|
var Router__default = /*#__PURE__*/_interopDefaultLegacy(Router);
|
|
69
67
|
var os__default = /*#__PURE__*/_interopDefaultLegacy(os);
|
|
@@ -73,7 +71,7 @@ const createTemplateAction = (templateAction) => {
|
|
|
73
71
|
};
|
|
74
72
|
|
|
75
73
|
function createCatalogRegisterAction(options) {
|
|
76
|
-
const {catalogClient, integrations} = options;
|
|
74
|
+
const { catalogClient, integrations } = options;
|
|
77
75
|
return createTemplateAction({
|
|
78
76
|
id: "catalog:register",
|
|
79
77
|
description: "Registers entities from a catalog descriptor file in the workspace into the software catalog.",
|
|
@@ -122,12 +120,12 @@ function createCatalogRegisterAction(options) {
|
|
|
122
120
|
},
|
|
123
121
|
async handler(ctx) {
|
|
124
122
|
var _a;
|
|
125
|
-
const {input} = ctx;
|
|
123
|
+
const { input } = ctx;
|
|
126
124
|
let catalogInfoUrl;
|
|
127
125
|
if ("catalogInfoUrl" in input) {
|
|
128
126
|
catalogInfoUrl = input.catalogInfoUrl;
|
|
129
127
|
} else {
|
|
130
|
-
const {repoContentsUrl, catalogInfoPath = "/catalog-info.yaml"} = input;
|
|
128
|
+
const { repoContentsUrl, catalogInfoPath = "/catalog-info.yaml" } = input;
|
|
131
129
|
const integration = integrations.byUrl(repoContentsUrl);
|
|
132
130
|
if (!integration) {
|
|
133
131
|
throw new errors.InputError(`No integration found for host ${repoContentsUrl}`);
|
|
@@ -141,15 +139,15 @@ function createCatalogRegisterAction(options) {
|
|
|
141
139
|
await catalogClient.addLocation({
|
|
142
140
|
type: "url",
|
|
143
141
|
target: catalogInfoUrl
|
|
144
|
-
}, ctx.token ? {token: ctx.token} : {});
|
|
142
|
+
}, ctx.token ? { token: ctx.token } : {});
|
|
145
143
|
try {
|
|
146
144
|
const result = await catalogClient.addLocation({
|
|
147
145
|
dryRun: true,
|
|
148
146
|
type: "url",
|
|
149
147
|
target: catalogInfoUrl
|
|
150
|
-
}, ctx.token ? {token: ctx.token} : {});
|
|
148
|
+
}, ctx.token ? { token: ctx.token } : {});
|
|
151
149
|
if (result.entities.length > 0) {
|
|
152
|
-
const {entities} = result;
|
|
150
|
+
const { entities } = result;
|
|
153
151
|
const entity = (_a = entities.find((e) => !e.metadata.name.startsWith("generated-"))) != null ? _a : entities[0];
|
|
154
152
|
ctx.output("entityRef", catalogModel.stringifyEntityRef(entity));
|
|
155
153
|
}
|
|
@@ -181,8 +179,8 @@ function createCatalogWriteAction() {
|
|
|
181
179
|
},
|
|
182
180
|
async handler(ctx) {
|
|
183
181
|
ctx.logStream.write(`Writing catalog-info.yaml`);
|
|
184
|
-
const {entity} = ctx.input;
|
|
185
|
-
await fs__default[
|
|
182
|
+
const { entity } = ctx.input;
|
|
183
|
+
await fs__default["default"].writeFile(backendCommon.resolveSafeChildPath(ctx.workspacePath, "catalog-info.yaml"), yaml__namespace.stringify(entity));
|
|
186
184
|
}
|
|
187
185
|
});
|
|
188
186
|
}
|
|
@@ -226,7 +224,7 @@ ${files.map((f) => ` - ${path.relative(ctx.workspacePath, f)}`).join("\n")}`);
|
|
|
226
224
|
async function recursiveReadDir(dir) {
|
|
227
225
|
const subdirs = await fs.readdir(dir);
|
|
228
226
|
const files = await Promise.all(subdirs.map(async (subdir) => {
|
|
229
|
-
const res = path.
|
|
227
|
+
const res = path.join(dir, subdir);
|
|
230
228
|
return (await fs.stat(res)).isDirectory() ? recursiveReadDir(res) : [res];
|
|
231
229
|
}));
|
|
232
230
|
return files.reduce((a, f) => a.concat(f), []);
|
|
@@ -250,8 +248,8 @@ async function fetchContents({
|
|
|
250
248
|
}
|
|
251
249
|
if (!fetchUrlIsAbsolute && (baseUrl == null ? void 0 : baseUrl.startsWith("file://"))) {
|
|
252
250
|
const basePath = baseUrl.slice("file://".length);
|
|
253
|
-
const srcDir = backendCommon.resolveSafeChildPath(
|
|
254
|
-
await fs__default[
|
|
251
|
+
const srcDir = backendCommon.resolveSafeChildPath(path__default["default"].dirname(basePath), fetchUrl);
|
|
252
|
+
await fs__default["default"].copy(srcDir, outputPath);
|
|
255
253
|
} else {
|
|
256
254
|
let readUrl;
|
|
257
255
|
if (fetchUrlIsAbsolute) {
|
|
@@ -269,13 +267,13 @@ async function fetchContents({
|
|
|
269
267
|
throw new errors.InputError(`Failed to fetch, template location could not be determined and the fetch URL is relative, ${fetchUrl}`);
|
|
270
268
|
}
|
|
271
269
|
const res = await reader.readTree(readUrl);
|
|
272
|
-
await fs__default[
|
|
273
|
-
await res.dir({targetDir: outputPath});
|
|
270
|
+
await fs__default["default"].ensureDir(outputPath);
|
|
271
|
+
await res.dir({ targetDir: outputPath });
|
|
274
272
|
}
|
|
275
273
|
}
|
|
276
274
|
|
|
277
275
|
function createFetchPlainAction(options) {
|
|
278
|
-
const {reader, integrations} = options;
|
|
276
|
+
const { reader, integrations } = options;
|
|
279
277
|
return createTemplateAction({
|
|
280
278
|
id: "fetch:plain",
|
|
281
279
|
description: "Downloads content and places it in the workspace, or optionally in a subdirectory specified by the 'targetPath' input option.",
|
|
@@ -313,9 +311,102 @@ function createFetchPlainAction(options) {
|
|
|
313
311
|
});
|
|
314
312
|
}
|
|
315
313
|
|
|
316
|
-
|
|
314
|
+
const mkScript = (nunjucksSource) => `
|
|
315
|
+
const { render, renderCompat } = (() => {
|
|
316
|
+
const module = {};
|
|
317
|
+
const process = { env: {} };
|
|
318
|
+
const require = (pkg) => { if (pkg === 'events') { return function (){}; }};
|
|
319
|
+
|
|
320
|
+
${nunjucksSource}
|
|
321
|
+
|
|
322
|
+
const env = module.exports.configure({
|
|
323
|
+
autoescape: false,
|
|
324
|
+
tags: {
|
|
325
|
+
variableStart: '\${{',
|
|
326
|
+
variableEnd: '}}',
|
|
327
|
+
},
|
|
328
|
+
});
|
|
329
|
+
|
|
330
|
+
const compatEnv = module.exports.configure({
|
|
331
|
+
autoescape: false,
|
|
332
|
+
tags: {
|
|
333
|
+
variableStart: '{{',
|
|
334
|
+
variableEnd: '}}',
|
|
335
|
+
},
|
|
336
|
+
});
|
|
337
|
+
compatEnv.addFilter('jsonify', compatEnv.getFilter('dump'));
|
|
338
|
+
|
|
339
|
+
if (typeof parseRepoUrl !== 'undefined') {
|
|
340
|
+
const safeHelperRef = parseRepoUrl;
|
|
341
|
+
|
|
342
|
+
env.addFilter('parseRepoUrl', repoUrl => {
|
|
343
|
+
return JSON.parse(safeHelperRef(repoUrl))
|
|
344
|
+
});
|
|
345
|
+
env.addFilter('projectSlug', repoUrl => {
|
|
346
|
+
const { owner, repo } = JSON.parse(safeHelperRef(repoUrl));
|
|
347
|
+
return owner + '/' + repo;
|
|
348
|
+
});
|
|
349
|
+
}
|
|
350
|
+
|
|
351
|
+
let uninstallCompat = undefined;
|
|
352
|
+
|
|
353
|
+
function render(str, values) {
|
|
354
|
+
try {
|
|
355
|
+
if (uninstallCompat) {
|
|
356
|
+
uninstallCompat();
|
|
357
|
+
uninstallCompat = undefined;
|
|
358
|
+
}
|
|
359
|
+
return env.renderString(str, JSON.parse(values));
|
|
360
|
+
} catch (error) {
|
|
361
|
+
// Make sure errors don't leak anything
|
|
362
|
+
throw new Error(String(error.message));
|
|
363
|
+
}
|
|
364
|
+
}
|
|
365
|
+
|
|
366
|
+
function renderCompat(str, values) {
|
|
367
|
+
try {
|
|
368
|
+
if (!uninstallCompat) {
|
|
369
|
+
uninstallCompat = module.exports.installJinjaCompat();
|
|
370
|
+
}
|
|
371
|
+
return compatEnv.renderString(str, JSON.parse(values));
|
|
372
|
+
} catch (error) {
|
|
373
|
+
// Make sure errors don't leak anything
|
|
374
|
+
throw new Error(String(error.message));
|
|
375
|
+
}
|
|
376
|
+
}
|
|
377
|
+
|
|
378
|
+
return { render, renderCompat };
|
|
379
|
+
})();
|
|
380
|
+
`;
|
|
381
|
+
class SecureTemplater {
|
|
382
|
+
static async loadRenderer(options = {}) {
|
|
383
|
+
const { parseRepoUrl, cookiecutterCompat } = options;
|
|
384
|
+
let sandbox = void 0;
|
|
385
|
+
if (parseRepoUrl) {
|
|
386
|
+
sandbox = {
|
|
387
|
+
parseRepoUrl: (url) => JSON.stringify(parseRepoUrl(url))
|
|
388
|
+
};
|
|
389
|
+
}
|
|
390
|
+
const vm = new vm2.VM({ sandbox });
|
|
391
|
+
const nunjucksSource = await fs__default["default"].readFile(backendCommon.resolvePackagePath("@backstage/plugin-scaffolder-backend", "assets/nunjucks.js.txt"), "utf-8");
|
|
392
|
+
vm.run(mkScript(nunjucksSource));
|
|
393
|
+
const render = (template, values) => {
|
|
394
|
+
if (!vm) {
|
|
395
|
+
throw new Error("SecureTemplater has not been initialized");
|
|
396
|
+
}
|
|
397
|
+
vm.setGlobal("templateStr", template);
|
|
398
|
+
vm.setGlobal("templateValues", JSON.stringify(values));
|
|
399
|
+
if (cookiecutterCompat) {
|
|
400
|
+
return vm.run(`renderCompat(templateStr, templateValues)`);
|
|
401
|
+
}
|
|
402
|
+
return vm.run(`render(templateStr, templateValues)`);
|
|
403
|
+
};
|
|
404
|
+
return render;
|
|
405
|
+
}
|
|
406
|
+
}
|
|
407
|
+
|
|
317
408
|
function createFetchTemplateAction(options) {
|
|
318
|
-
const {reader, integrations} = options;
|
|
409
|
+
const { reader, integrations } = options;
|
|
319
410
|
return createTemplateAction({
|
|
320
411
|
id: "fetch:template",
|
|
321
412
|
description: "Downloads a skeleton, templates variables into file and directory names and content, and places the result in the workspace, or optionally in a subdirectory specified by the 'targetPath' input option.",
|
|
@@ -364,7 +455,7 @@ function createFetchTemplateAction(options) {
|
|
|
364
455
|
var _a;
|
|
365
456
|
ctx.logger.info("Fetching template content from remote URL");
|
|
366
457
|
const workDir = await ctx.createTemporaryDirectory();
|
|
367
|
-
const templateDir =
|
|
458
|
+
const templateDir = backendCommon.resolveSafeChildPath(workDir, "template");
|
|
368
459
|
const targetPath = (_a = ctx.input.targetPath) != null ? _a : "./";
|
|
369
460
|
const outputDir = backendCommon.resolveSafeChildPath(ctx.workspacePath, targetPath);
|
|
370
461
|
if (ctx.input.copyWithoutRender && !Array.isArray(ctx.input.copyWithoutRender)) {
|
|
@@ -388,35 +479,26 @@ function createFetchTemplateAction(options) {
|
|
|
388
479
|
outputPath: templateDir
|
|
389
480
|
});
|
|
390
481
|
ctx.logger.info("Listing files and directories in template");
|
|
391
|
-
const allEntriesInTemplate = await globby__default[
|
|
482
|
+
const allEntriesInTemplate = await globby__default["default"](`**/*`, {
|
|
392
483
|
cwd: templateDir,
|
|
393
484
|
dot: true,
|
|
394
485
|
onlyFiles: false,
|
|
395
486
|
markDirectories: true
|
|
396
487
|
});
|
|
397
|
-
const nonTemplatedEntries = new Set((await Promise.all((ctx.input.copyWithoutRender || []).map((pattern) => globby__default[
|
|
488
|
+
const nonTemplatedEntries = new Set((await Promise.all((ctx.input.copyWithoutRender || []).map((pattern) => globby__default["default"](pattern, {
|
|
398
489
|
cwd: templateDir,
|
|
399
490
|
dot: true,
|
|
400
491
|
onlyFiles: false,
|
|
401
492
|
markDirectories: true
|
|
402
493
|
})))).flat());
|
|
403
|
-
const
|
|
404
|
-
...ctx.input.cookiecutterCompat ? {} : {
|
|
405
|
-
tags: {
|
|
406
|
-
variableStart: "${{",
|
|
407
|
-
variableEnd: "}}"
|
|
408
|
-
}
|
|
409
|
-
},
|
|
410
|
-
autoescape: false
|
|
411
|
-
});
|
|
412
|
-
if (ctx.input.cookiecutterCompat) {
|
|
413
|
-
templater.addFilter("jsonify", templater.getFilter("dump"));
|
|
414
|
-
}
|
|
415
|
-
const {cookiecutterCompat, values} = ctx.input;
|
|
494
|
+
const { cookiecutterCompat, values } = ctx.input;
|
|
416
495
|
const context = {
|
|
417
496
|
[cookiecutterCompat ? "cookiecutter" : "values"]: values
|
|
418
497
|
};
|
|
419
498
|
ctx.logger.info(`Processing ${allEntriesInTemplate.length} template files/directories with input values`, ctx.input.values);
|
|
499
|
+
const renderTemplate = await SecureTemplater.loadRenderer({
|
|
500
|
+
cookiecutterCompat: ctx.input.cookiecutterCompat
|
|
501
|
+
});
|
|
420
502
|
for (const location of allEntriesInTemplate) {
|
|
421
503
|
let renderFilename;
|
|
422
504
|
let renderContents;
|
|
@@ -431,9 +513,9 @@ function createFetchTemplateAction(options) {
|
|
|
431
513
|
renderFilename = renderContents = !nonTemplatedEntries.has(location);
|
|
432
514
|
}
|
|
433
515
|
if (renderFilename) {
|
|
434
|
-
localOutputPath =
|
|
516
|
+
localOutputPath = renderTemplate(localOutputPath, context);
|
|
435
517
|
}
|
|
436
|
-
const outputPath =
|
|
518
|
+
const outputPath = backendCommon.resolveSafeChildPath(outputDir, localOutputPath);
|
|
437
519
|
if (outputDir === outputPath) {
|
|
438
520
|
continue;
|
|
439
521
|
}
|
|
@@ -442,17 +524,17 @@ function createFetchTemplateAction(options) {
|
|
|
442
524
|
}
|
|
443
525
|
if (location.endsWith("/")) {
|
|
444
526
|
ctx.logger.info(`Writing directory ${location} to template output path.`);
|
|
445
|
-
await fs__default[
|
|
527
|
+
await fs__default["default"].ensureDir(outputPath);
|
|
446
528
|
} else {
|
|
447
|
-
const inputFilePath =
|
|
529
|
+
const inputFilePath = backendCommon.resolveSafeChildPath(templateDir, location);
|
|
448
530
|
if (await isbinaryfile.isBinaryFile(inputFilePath)) {
|
|
449
531
|
ctx.logger.info(`Copying binary file ${location} to template output path.`);
|
|
450
|
-
await fs__default[
|
|
532
|
+
await fs__default["default"].copy(inputFilePath, outputPath);
|
|
451
533
|
} else {
|
|
452
|
-
const statsObj = await fs__default[
|
|
534
|
+
const statsObj = await fs__default["default"].stat(inputFilePath);
|
|
453
535
|
ctx.logger.info(`Writing file ${location} to template output path with mode ${statsObj.mode}.`);
|
|
454
|
-
const inputFileContents = await fs__default[
|
|
455
|
-
await fs__default[
|
|
536
|
+
const inputFileContents = await fs__default["default"].readFile(inputFilePath, "utf-8");
|
|
537
|
+
await fs__default["default"].outputFile(outputPath, renderContents ? renderTemplate(inputFileContents, context) : inputFileContents, { mode: statsObj.mode });
|
|
456
538
|
}
|
|
457
539
|
}
|
|
458
540
|
}
|
|
@@ -489,7 +571,7 @@ const createFilesystemDeleteAction = () => {
|
|
|
489
571
|
for (const file of ctx.input.files) {
|
|
490
572
|
const filepath = backendCommon.resolveSafeChildPath(ctx.workspacePath, file);
|
|
491
573
|
try {
|
|
492
|
-
await fs__default[
|
|
574
|
+
await fs__default["default"].remove(filepath);
|
|
493
575
|
ctx.logger.info(`File ${filepath} deleted successfully`);
|
|
494
576
|
} catch (err) {
|
|
495
577
|
ctx.logger.error(`Failed to delete file ${filepath}:`, err);
|
|
@@ -547,7 +629,7 @@ const createFilesystemRenameAction = () => {
|
|
|
547
629
|
const sourceFilepath = backendCommon.resolveSafeChildPath(ctx.workspacePath, file.from);
|
|
548
630
|
const destFilepath = backendCommon.resolveSafeChildPath(ctx.workspacePath, file.to);
|
|
549
631
|
try {
|
|
550
|
-
await fs__default[
|
|
632
|
+
await fs__default["default"].move(sourceFilepath, destFilepath, {
|
|
551
633
|
overwrite: (_b = file.overwrite) != null ? _b : false
|
|
552
634
|
});
|
|
553
635
|
ctx.logger.info(`File ${sourceFilepath} renamed to ${destFilepath} successfully`);
|
|
@@ -563,10 +645,11 @@ const createFilesystemRenameAction = () => {
|
|
|
563
645
|
const runCommand = async ({
|
|
564
646
|
command,
|
|
565
647
|
args,
|
|
566
|
-
logStream = new stream.PassThrough()
|
|
648
|
+
logStream = new stream.PassThrough(),
|
|
649
|
+
options
|
|
567
650
|
}) => {
|
|
568
651
|
await new Promise((resolve, reject) => {
|
|
569
|
-
const process = child_process.spawn(command, args);
|
|
652
|
+
const process = child_process.spawn(command, args, options);
|
|
570
653
|
process.stdout.on("data", (stream) => {
|
|
571
654
|
logStream.write(stream);
|
|
572
655
|
});
|
|
@@ -603,7 +686,7 @@ async function initRepoAndPush({
|
|
|
603
686
|
dir,
|
|
604
687
|
defaultBranch
|
|
605
688
|
});
|
|
606
|
-
await git.add({dir, filepath: "."});
|
|
689
|
+
await git.add({ dir, filepath: "." });
|
|
607
690
|
const authorInfo = {
|
|
608
691
|
name: (_a = gitAuthorInfo == null ? void 0 : gitAuthorInfo.name) != null ? _a : "Scaffolder",
|
|
609
692
|
email: (_b = gitAuthorInfo == null ? void 0 : gitAuthorInfo.email) != null ? _b : "scaffolder@backstage.io"
|
|
@@ -641,7 +724,7 @@ const enableBranchProtectionOnDefaultRepoBranch = async ({
|
|
|
641
724
|
owner,
|
|
642
725
|
repo: repoName,
|
|
643
726
|
branch: defaultBranch,
|
|
644
|
-
required_status_checks: {strict: true, contexts: []},
|
|
727
|
+
required_status_checks: { strict: true, contexts: [] },
|
|
645
728
|
restrictions: null,
|
|
646
729
|
enforce_admins: true,
|
|
647
730
|
required_pull_request_reviews: {
|
|
@@ -715,7 +798,7 @@ const parseRepoUrl = (repoUrl, integrations) => {
|
|
|
715
798
|
if (!repo) {
|
|
716
799
|
throw new errors.InputError(`Invalid repo URL passed to publisher: ${repoUrl}, missing repo`);
|
|
717
800
|
}
|
|
718
|
-
return {host, owner, repo, organization, workspace, project};
|
|
801
|
+
return { host, owner, repo, organization, workspace, project };
|
|
719
802
|
};
|
|
720
803
|
const isExecutable = (fileMode) => {
|
|
721
804
|
const executeBitMask = 73;
|
|
@@ -724,7 +807,7 @@ const isExecutable = (fileMode) => {
|
|
|
724
807
|
};
|
|
725
808
|
|
|
726
809
|
function createPublishAzureAction(options) {
|
|
727
|
-
const {integrations, config} = options;
|
|
810
|
+
const { integrations, config } = options;
|
|
728
811
|
return createTemplateAction({
|
|
729
812
|
id: "publish:azure",
|
|
730
813
|
description: "Initializes a git repository of the content in the workspace, and publishes it to Azure.",
|
|
@@ -767,8 +850,8 @@ function createPublishAzureAction(options) {
|
|
|
767
850
|
}
|
|
768
851
|
},
|
|
769
852
|
async handler(ctx) {
|
|
770
|
-
const {repoUrl, defaultBranch = "master"} = ctx.input;
|
|
771
|
-
const {owner, repo, host, organization} = parseRepoUrl(repoUrl, integrations);
|
|
853
|
+
const { repoUrl, defaultBranch = "master" } = ctx.input;
|
|
854
|
+
const { owner, repo, host, organization } = parseRepoUrl(repoUrl, integrations);
|
|
772
855
|
if (!organization) {
|
|
773
856
|
throw new errors.InputError(`Invalid URL provider was included in the repo URL to create ${ctx.input.repoUrl}, missing organization`);
|
|
774
857
|
}
|
|
@@ -782,7 +865,7 @@ function createPublishAzureAction(options) {
|
|
|
782
865
|
const authHandler = azureDevopsNodeApi.getPersonalAccessTokenHandler(integrationConfig.config.token);
|
|
783
866
|
const webApi = new azureDevopsNodeApi.WebApi(`https://${host}/${organization}`, authHandler);
|
|
784
867
|
const client = await webApi.getGitApi();
|
|
785
|
-
const createOptions = {name: repo};
|
|
868
|
+
const createOptions = { name: repo };
|
|
786
869
|
const returnedRepo = await client.createRepository(createOptions, owner);
|
|
787
870
|
if (!returnedRepo) {
|
|
788
871
|
throw new errors.InputError(`Unable to create the repository with Organization ${organization}, Project ${owner} and Repo ${repo}.
|
|
@@ -830,7 +913,7 @@ const createBitbucketCloudRepository = async (opts) => {
|
|
|
830
913
|
scm: "git",
|
|
831
914
|
description,
|
|
832
915
|
is_private: repoVisibility === "private",
|
|
833
|
-
project: {key: project}
|
|
916
|
+
project: { key: project }
|
|
834
917
|
}),
|
|
835
918
|
headers: {
|
|
836
919
|
Authorization: authorization,
|
|
@@ -839,7 +922,7 @@ const createBitbucketCloudRepository = async (opts) => {
|
|
|
839
922
|
};
|
|
840
923
|
let response;
|
|
841
924
|
try {
|
|
842
|
-
response = await fetch__default[
|
|
925
|
+
response = await fetch__default["default"](`https://api.bitbucket.org/2.0/repositories/${workspace}/${repo}`, options);
|
|
843
926
|
} catch (e) {
|
|
844
927
|
throw new Error(`Unable to create repository, ${e}`);
|
|
845
928
|
}
|
|
@@ -854,7 +937,7 @@ const createBitbucketCloudRepository = async (opts) => {
|
|
|
854
937
|
}
|
|
855
938
|
}
|
|
856
939
|
const repoContentsUrl = `${r.links.html.href}/src/master`;
|
|
857
|
-
return {remoteUrl, repoContentsUrl};
|
|
940
|
+
return { remoteUrl, repoContentsUrl };
|
|
858
941
|
};
|
|
859
942
|
const createBitbucketServerRepository = async (opts) => {
|
|
860
943
|
const {
|
|
@@ -881,7 +964,7 @@ const createBitbucketServerRepository = async (opts) => {
|
|
|
881
964
|
};
|
|
882
965
|
try {
|
|
883
966
|
const baseUrl = apiBaseUrl ? apiBaseUrl : `https://${host}/rest/api/1.0`;
|
|
884
|
-
response = await fetch__default[
|
|
967
|
+
response = await fetch__default["default"](`${baseUrl}/projects/${project}/repos`, options);
|
|
885
968
|
} catch (e) {
|
|
886
969
|
throw new Error(`Unable to create repository, ${e}`);
|
|
887
970
|
}
|
|
@@ -896,7 +979,7 @@ const createBitbucketServerRepository = async (opts) => {
|
|
|
896
979
|
}
|
|
897
980
|
}
|
|
898
981
|
const repoContentsUrl = `${r.links.self[0].href}`;
|
|
899
|
-
return {remoteUrl, repoContentsUrl};
|
|
982
|
+
return { remoteUrl, repoContentsUrl };
|
|
900
983
|
};
|
|
901
984
|
const getAuthorizationHeader = (config) => {
|
|
902
985
|
if (config.username && config.appPassword) {
|
|
@@ -909,19 +992,19 @@ const getAuthorizationHeader = (config) => {
|
|
|
909
992
|
throw new Error(`Authorization has not been provided for Bitbucket. Please add either username + appPassword or token to the Integrations config`);
|
|
910
993
|
};
|
|
911
994
|
const performEnableLFS = async (opts) => {
|
|
912
|
-
const {authorization, host, project, repo} = opts;
|
|
995
|
+
const { authorization, host, project, repo } = opts;
|
|
913
996
|
const options = {
|
|
914
997
|
method: "PUT",
|
|
915
998
|
headers: {
|
|
916
999
|
Authorization: authorization
|
|
917
1000
|
}
|
|
918
1001
|
};
|
|
919
|
-
const {ok, status, statusText} = await fetch__default[
|
|
1002
|
+
const { ok, status, statusText } = await fetch__default["default"](`https://${host}/rest/git-lfs/admin/projects/${project}/repos/${repo}/enabled`, options);
|
|
920
1003
|
if (!ok)
|
|
921
1004
|
throw new Error(`Failed to enable LFS in the repository, ${status}: ${statusText}`);
|
|
922
1005
|
};
|
|
923
1006
|
function createPublishBitbucketAction(options) {
|
|
924
|
-
const {integrations, config} = options;
|
|
1007
|
+
const { integrations, config } = options;
|
|
925
1008
|
return createTemplateAction({
|
|
926
1009
|
id: "publish:bitbucket",
|
|
927
1010
|
description: "Initializes a git repository of the content in the workspace, and publishes it to Bitbucket.",
|
|
@@ -981,7 +1064,7 @@ function createPublishBitbucketAction(options) {
|
|
|
981
1064
|
repoVisibility = "private",
|
|
982
1065
|
enableLFS = false
|
|
983
1066
|
} = ctx.input;
|
|
984
|
-
const {workspace, project, repo, host} = parseRepoUrl(repoUrl, integrations);
|
|
1067
|
+
const { workspace, project, repo, host } = parseRepoUrl(repoUrl, integrations);
|
|
985
1068
|
if (host === "bitbucket.org") {
|
|
986
1069
|
if (!workspace) {
|
|
987
1070
|
throw new errors.InputError(`Invalid URL provider was included in the repo URL to create ${ctx.input.repoUrl}, missing workspace`);
|
|
@@ -997,7 +1080,7 @@ function createPublishBitbucketAction(options) {
|
|
|
997
1080
|
const authorization = getAuthorizationHeader(integrationConfig.config);
|
|
998
1081
|
const apiBaseUrl = integrationConfig.config.apiBaseUrl;
|
|
999
1082
|
const createMethod = host === "bitbucket.org" ? createBitbucketCloudRepository : createBitbucketServerRepository;
|
|
1000
|
-
const {remoteUrl, repoContentsUrl} = await createMethod({
|
|
1083
|
+
const { remoteUrl, repoContentsUrl } = await createMethod({
|
|
1001
1084
|
authorization,
|
|
1002
1085
|
host,
|
|
1003
1086
|
workspace: workspace || "",
|
|
@@ -1024,7 +1107,7 @@ function createPublishBitbucketAction(options) {
|
|
|
1024
1107
|
gitAuthorInfo
|
|
1025
1108
|
});
|
|
1026
1109
|
if (enableLFS && host !== "bitbucket.org") {
|
|
1027
|
-
await performEnableLFS({authorization, host, project, repo});
|
|
1110
|
+
await performEnableLFS({ authorization, host, project, repo });
|
|
1028
1111
|
}
|
|
1029
1112
|
ctx.output("remoteUrl", remoteUrl);
|
|
1030
1113
|
ctx.output("repoContentsUrl", repoContentsUrl);
|
|
@@ -1049,13 +1132,13 @@ function createPublishFileAction() {
|
|
|
1049
1132
|
}
|
|
1050
1133
|
},
|
|
1051
1134
|
async handler(ctx) {
|
|
1052
|
-
const {path: path$1} = ctx.input;
|
|
1053
|
-
const exists = await fs__default[
|
|
1135
|
+
const { path: path$1 } = ctx.input;
|
|
1136
|
+
const exists = await fs__default["default"].pathExists(path$1);
|
|
1054
1137
|
if (exists) {
|
|
1055
1138
|
throw new errors.InputError("Output path already exists");
|
|
1056
1139
|
}
|
|
1057
|
-
await fs__default[
|
|
1058
|
-
await fs__default[
|
|
1140
|
+
await fs__default["default"].ensureDir(path.dirname(path$1));
|
|
1141
|
+
await fs__default["default"].copy(ctx.workspacePath, path$1);
|
|
1059
1142
|
}
|
|
1060
1143
|
});
|
|
1061
1144
|
}
|
|
@@ -1070,7 +1153,7 @@ class OctokitProvider {
|
|
|
1070
1153
|
}
|
|
1071
1154
|
async getOctokit(repoUrl) {
|
|
1072
1155
|
var _a;
|
|
1073
|
-
const {owner, repo, host} = parseRepoUrl(repoUrl, this.integrations);
|
|
1156
|
+
const { owner, repo, host } = parseRepoUrl(repoUrl, this.integrations);
|
|
1074
1157
|
if (!owner) {
|
|
1075
1158
|
throw new errors.InputError(`No owner provided for repo ${repoUrl}`);
|
|
1076
1159
|
}
|
|
@@ -1082,7 +1165,7 @@ class OctokitProvider {
|
|
|
1082
1165
|
if (!credentialsProvider) {
|
|
1083
1166
|
throw new errors.InputError(`No matching credentials for host ${host}, please check your integrations config`);
|
|
1084
1167
|
}
|
|
1085
|
-
const {token} = await credentialsProvider.getCredentials({
|
|
1168
|
+
const { token } = await credentialsProvider.getCredentials({
|
|
1086
1169
|
url: `https://${host}/${encodeURIComponent(owner)}/${encodeURIComponent(repo)}`
|
|
1087
1170
|
});
|
|
1088
1171
|
if (!token) {
|
|
@@ -1093,12 +1176,12 @@ class OctokitProvider {
|
|
|
1093
1176
|
baseUrl: integrationConfig.apiBaseUrl,
|
|
1094
1177
|
previews: ["nebula-preview"]
|
|
1095
1178
|
});
|
|
1096
|
-
return {client, token, owner, repo};
|
|
1179
|
+
return { client, token, owner, repo };
|
|
1097
1180
|
}
|
|
1098
1181
|
}
|
|
1099
1182
|
|
|
1100
1183
|
function createPublishGithubAction(options) {
|
|
1101
|
-
const {integrations, config} = options;
|
|
1184
|
+
const { integrations, config } = options;
|
|
1102
1185
|
const octokitProvider = new OctokitProvider(integrations);
|
|
1103
1186
|
return createTemplateAction({
|
|
1104
1187
|
id: "publish:github",
|
|
@@ -1194,7 +1277,7 @@ function createPublishGithubAction(options) {
|
|
|
1194
1277
|
collaborators,
|
|
1195
1278
|
topics
|
|
1196
1279
|
} = ctx.input;
|
|
1197
|
-
const {client, token, owner, repo} = await octokitProvider.getOctokit(repoUrl);
|
|
1280
|
+
const { client, token, owner, repo } = await octokitProvider.getOctokit(repoUrl);
|
|
1198
1281
|
const user = await client.users.getByUsername({
|
|
1199
1282
|
username: owner
|
|
1200
1283
|
});
|
|
@@ -1209,7 +1292,7 @@ function createPublishGithubAction(options) {
|
|
|
1209
1292
|
private: repoVisibility === "private",
|
|
1210
1293
|
description
|
|
1211
1294
|
});
|
|
1212
|
-
const {data: newRepo} = await repoCreationPromise;
|
|
1295
|
+
const { data: newRepo } = await repoCreationPromise;
|
|
1213
1296
|
if (access == null ? void 0 : access.startsWith(`${owner}/`)) {
|
|
1214
1297
|
const [, team] = access.split("/");
|
|
1215
1298
|
await client.teams.addOrUpdateRepoPermissionsInOrg({
|
|
@@ -1312,7 +1395,7 @@ const defaultClientFactory = async ({
|
|
|
1312
1395
|
if (!credentialsProvider) {
|
|
1313
1396
|
throw new errors.InputError(`No matching credentials for host ${host}, please check your integrations config`);
|
|
1314
1397
|
}
|
|
1315
|
-
const {token} = await credentialsProvider.getCredentials({
|
|
1398
|
+
const { token } = await credentialsProvider.getCredentials({
|
|
1316
1399
|
url: `https://${host}/${encodeURIComponent(owner)}/${encodeURIComponent(repo)}`
|
|
1317
1400
|
});
|
|
1318
1401
|
if (!token) {
|
|
@@ -1388,21 +1471,21 @@ const createPublishGithubPullRequestAction = ({
|
|
|
1388
1471
|
targetPath,
|
|
1389
1472
|
sourcePath
|
|
1390
1473
|
} = ctx.input;
|
|
1391
|
-
const {owner, repo, host} = parseRepoUrl(repoUrl, integrations);
|
|
1474
|
+
const { owner, repo, host } = parseRepoUrl(repoUrl, integrations);
|
|
1392
1475
|
if (!owner) {
|
|
1393
1476
|
throw new errors.InputError(`No owner provided for host: ${host}, and repo ${repo}`);
|
|
1394
1477
|
}
|
|
1395
|
-
const client = await clientFactory({integrations, host, owner, repo});
|
|
1478
|
+
const client = await clientFactory({ integrations, host, owner, repo });
|
|
1396
1479
|
const fileRoot = sourcePath ? backendCommon.resolveSafeChildPath(ctx.workspacePath, sourcePath) : ctx.workspacePath;
|
|
1397
|
-
const localFilePaths = await globby__default[
|
|
1480
|
+
const localFilePaths = await globby__default["default"](["./**", "./**/.*", "!.git"], {
|
|
1398
1481
|
cwd: fileRoot,
|
|
1399
1482
|
gitignore: true,
|
|
1400
1483
|
dot: true
|
|
1401
1484
|
});
|
|
1402
1485
|
const fileContents = await Promise.all(localFilePaths.map((filePath) => {
|
|
1403
|
-
const absPath =
|
|
1404
|
-
const base64EncodedContent = fs__default[
|
|
1405
|
-
const fileStat = fs__default[
|
|
1486
|
+
const absPath = backendCommon.resolveSafeChildPath(fileRoot, filePath);
|
|
1487
|
+
const base64EncodedContent = fs__default["default"].readFileSync(absPath).toString("base64");
|
|
1488
|
+
const fileStat = fs__default["default"].statSync(absPath);
|
|
1406
1489
|
const githubTreeItemMode = isExecutable(fileStat.mode) ? "100755" : "100644";
|
|
1407
1490
|
const encoding = "base64";
|
|
1408
1491
|
return {
|
|
@@ -1441,7 +1524,7 @@ const createPublishGithubPullRequestAction = ({
|
|
|
1441
1524
|
};
|
|
1442
1525
|
|
|
1443
1526
|
function createPublishGitlabAction(options) {
|
|
1444
|
-
const {integrations, config} = options;
|
|
1527
|
+
const { integrations, config } = options;
|
|
1445
1528
|
return createTemplateAction({
|
|
1446
1529
|
id: "publish:gitlab",
|
|
1447
1530
|
description: "Initializes a git repository of the content in the workspace, and publishes it to GitLab.",
|
|
@@ -1490,7 +1573,7 @@ function createPublishGitlabAction(options) {
|
|
|
1490
1573
|
repoVisibility = "private",
|
|
1491
1574
|
defaultBranch = "master"
|
|
1492
1575
|
} = ctx.input;
|
|
1493
|
-
const {owner, repo, host} = parseRepoUrl(repoUrl, integrations);
|
|
1576
|
+
const { owner, repo, host } = parseRepoUrl(repoUrl, integrations);
|
|
1494
1577
|
if (!owner) {
|
|
1495
1578
|
throw new errors.InputError(`No owner provided for host: ${host}, and repo ${repo}`);
|
|
1496
1579
|
}
|
|
@@ -1505,12 +1588,12 @@ function createPublishGitlabAction(options) {
|
|
|
1505
1588
|
host: integrationConfig.config.baseUrl,
|
|
1506
1589
|
token: integrationConfig.config.token
|
|
1507
1590
|
});
|
|
1508
|
-
let {id: targetNamespace} = await client.Namespaces.show(owner);
|
|
1591
|
+
let { id: targetNamespace } = await client.Namespaces.show(owner);
|
|
1509
1592
|
if (!targetNamespace) {
|
|
1510
|
-
const {id} = await client.Users.current();
|
|
1593
|
+
const { id } = await client.Users.current();
|
|
1511
1594
|
targetNamespace = id;
|
|
1512
1595
|
}
|
|
1513
|
-
const {http_url_to_repo} = await client.Projects.create({
|
|
1596
|
+
const { http_url_to_repo } = await client.Projects.create({
|
|
1514
1597
|
namespace_id: targetNamespace,
|
|
1515
1598
|
name: repo,
|
|
1516
1599
|
visibility: repoVisibility
|
|
@@ -1539,8 +1622,122 @@ function createPublishGitlabAction(options) {
|
|
|
1539
1622
|
});
|
|
1540
1623
|
}
|
|
1541
1624
|
|
|
1625
|
+
const createPublishGitlabMergeRequestAction = (options) => {
|
|
1626
|
+
const { integrations } = options;
|
|
1627
|
+
return createTemplateAction({
|
|
1628
|
+
id: "publish:gitlab:merge-request",
|
|
1629
|
+
schema: {
|
|
1630
|
+
input: {
|
|
1631
|
+
required: ["projectid", "repoUrl", "targetPath", "branchName"],
|
|
1632
|
+
type: "object",
|
|
1633
|
+
properties: {
|
|
1634
|
+
repoUrl: {
|
|
1635
|
+
type: "string",
|
|
1636
|
+
title: "Repository Location",
|
|
1637
|
+
description: `Accepts the format 'gitlab.com/group_name/project_name' where 'project_name' is the repository name and 'group_name' is a group or username`
|
|
1638
|
+
},
|
|
1639
|
+
projectid: {
|
|
1640
|
+
type: "string",
|
|
1641
|
+
title: "projectid",
|
|
1642
|
+
description: "Project ID/Name(slug) of the Gitlab Project"
|
|
1643
|
+
},
|
|
1644
|
+
title: {
|
|
1645
|
+
type: "string",
|
|
1646
|
+
title: "Merge Request Name",
|
|
1647
|
+
description: "The name for the merge request"
|
|
1648
|
+
},
|
|
1649
|
+
description: {
|
|
1650
|
+
type: "string",
|
|
1651
|
+
title: "Merge Request Description",
|
|
1652
|
+
description: "The description of the merge request"
|
|
1653
|
+
},
|
|
1654
|
+
branchName: {
|
|
1655
|
+
type: "string",
|
|
1656
|
+
title: "Destination Branch name",
|
|
1657
|
+
description: "The description of the merge request"
|
|
1658
|
+
},
|
|
1659
|
+
targetPath: {
|
|
1660
|
+
type: "string",
|
|
1661
|
+
title: "Repository Subdirectory",
|
|
1662
|
+
description: "Subdirectory of repository to apply changes to"
|
|
1663
|
+
}
|
|
1664
|
+
}
|
|
1665
|
+
},
|
|
1666
|
+
output: {
|
|
1667
|
+
type: "object",
|
|
1668
|
+
properties: {
|
|
1669
|
+
projectid: {
|
|
1670
|
+
title: "Gitlab Project id/Name(slug)",
|
|
1671
|
+
type: "string"
|
|
1672
|
+
},
|
|
1673
|
+
mergeRequestURL: {
|
|
1674
|
+
title: "MergeRequest(MR) URL",
|
|
1675
|
+
type: "string",
|
|
1676
|
+
description: "Link to the merge request in GitLab"
|
|
1677
|
+
}
|
|
1678
|
+
}
|
|
1679
|
+
}
|
|
1680
|
+
},
|
|
1681
|
+
async handler(ctx) {
|
|
1682
|
+
const repoUrl = ctx.input.repoUrl;
|
|
1683
|
+
const { host } = parseRepoUrl(repoUrl, integrations);
|
|
1684
|
+
const integrationConfig = integrations.gitlab.byHost(host);
|
|
1685
|
+
const actions = [];
|
|
1686
|
+
const destinationBranch = ctx.input.branchName;
|
|
1687
|
+
if (!integrationConfig) {
|
|
1688
|
+
throw new errors.InputError(`No matching integration configuration for host ${host}, please check your integrations config`);
|
|
1689
|
+
}
|
|
1690
|
+
if (!integrationConfig.config.token) {
|
|
1691
|
+
throw new errors.InputError(`No token available for host ${host}`);
|
|
1692
|
+
}
|
|
1693
|
+
const api = new node.Gitlab({
|
|
1694
|
+
host: integrationConfig.config.baseUrl,
|
|
1695
|
+
token: integrationConfig.config.token
|
|
1696
|
+
});
|
|
1697
|
+
const fileRoot = ctx.workspacePath;
|
|
1698
|
+
const localFilePaths = await globby__default["default"]([`${ctx.input.targetPath}/**`], {
|
|
1699
|
+
cwd: fileRoot,
|
|
1700
|
+
gitignore: true,
|
|
1701
|
+
dot: true
|
|
1702
|
+
});
|
|
1703
|
+
const fileContents = await Promise.all(localFilePaths.map((p) => fs.readFile(backendCommon.resolveSafeChildPath(fileRoot, p))));
|
|
1704
|
+
const repoFilePaths = localFilePaths.map((repoFilePath) => {
|
|
1705
|
+
return repoFilePath;
|
|
1706
|
+
});
|
|
1707
|
+
for (let i = 0; i < repoFilePaths.length; i++) {
|
|
1708
|
+
actions.push({
|
|
1709
|
+
action: "create",
|
|
1710
|
+
filePath: repoFilePaths[i],
|
|
1711
|
+
content: fileContents[i].toString()
|
|
1712
|
+
});
|
|
1713
|
+
}
|
|
1714
|
+
const projects = await api.Projects.show(ctx.input.projectid);
|
|
1715
|
+
const { default_branch: defaultBranch } = projects;
|
|
1716
|
+
try {
|
|
1717
|
+
await api.Branches.create(ctx.input.projectid, destinationBranch, String(defaultBranch));
|
|
1718
|
+
} catch (e) {
|
|
1719
|
+
throw new errors.InputError(`The branch creation failed ${e}`);
|
|
1720
|
+
}
|
|
1721
|
+
try {
|
|
1722
|
+
await api.Commits.create(ctx.input.projectid, destinationBranch, ctx.input.title, actions);
|
|
1723
|
+
} catch (e) {
|
|
1724
|
+
throw new errors.InputError(`Committing the changes to ${destinationBranch} failed ${e}`);
|
|
1725
|
+
}
|
|
1726
|
+
try {
|
|
1727
|
+
const mergeRequestUrl = await api.MergeRequests.create(ctx.input.projectid, destinationBranch, String(defaultBranch), ctx.input.title, { description: ctx.input.description }).then((mergeRequest) => {
|
|
1728
|
+
return mergeRequest.web_url;
|
|
1729
|
+
});
|
|
1730
|
+
ctx.output("projectid", ctx.input.projectid);
|
|
1731
|
+
ctx.output("mergeRequestUrl", mergeRequestUrl);
|
|
1732
|
+
} catch (e) {
|
|
1733
|
+
throw new errors.InputError(`Merge request creation failed${e}`);
|
|
1734
|
+
}
|
|
1735
|
+
}
|
|
1736
|
+
});
|
|
1737
|
+
};
|
|
1738
|
+
|
|
1542
1739
|
function createGithubActionsDispatchAction(options) {
|
|
1543
|
-
const {integrations} = options;
|
|
1740
|
+
const { integrations } = options;
|
|
1544
1741
|
const octokitProvider = new OctokitProvider(integrations);
|
|
1545
1742
|
return createTemplateAction({
|
|
1546
1743
|
id: "github:actions:dispatch",
|
|
@@ -1569,9 +1766,9 @@ function createGithubActionsDispatchAction(options) {
|
|
|
1569
1766
|
}
|
|
1570
1767
|
},
|
|
1571
1768
|
async handler(ctx) {
|
|
1572
|
-
const {repoUrl, workflowId, branchOrTagName} = ctx.input;
|
|
1769
|
+
const { repoUrl, workflowId, branchOrTagName } = ctx.input;
|
|
1573
1770
|
ctx.logger.info(`Dispatching workflow ${workflowId} for repo ${repoUrl} on ${branchOrTagName}`);
|
|
1574
|
-
const {client, owner, repo} = await octokitProvider.getOctokit(repoUrl);
|
|
1771
|
+
const { client, owner, repo } = await octokitProvider.getOctokit(repoUrl);
|
|
1575
1772
|
await client.rest.actions.createWorkflowDispatch({
|
|
1576
1773
|
owner,
|
|
1577
1774
|
repo,
|
|
@@ -1584,7 +1781,7 @@ function createGithubActionsDispatchAction(options) {
|
|
|
1584
1781
|
}
|
|
1585
1782
|
|
|
1586
1783
|
function createGithubWebhookAction(options) {
|
|
1587
|
-
const {integrations, defaultWebhookSecret} = options;
|
|
1784
|
+
const { integrations, defaultWebhookSecret } = options;
|
|
1588
1785
|
const octokitProvider = new OctokitProvider(integrations);
|
|
1589
1786
|
const eventNames = webhooks.emitterEventNames.filter((event) => !event.includes("."));
|
|
1590
1787
|
return createTemplateAction({
|
|
@@ -1659,7 +1856,7 @@ function createGithubWebhookAction(options) {
|
|
|
1659
1856
|
insecureSsl = false
|
|
1660
1857
|
} = ctx.input;
|
|
1661
1858
|
ctx.logger.info(`Creating webhook ${webhookUrl} for repo ${repoUrl}`);
|
|
1662
|
-
const {client, owner, repo} = await octokitProvider.getOctokit(repoUrl);
|
|
1859
|
+
const { client, owner, repo } = await octokitProvider.getOctokit(repoUrl);
|
|
1663
1860
|
try {
|
|
1664
1861
|
const insecure_ssl = insecureSsl ? "1" : "0";
|
|
1665
1862
|
await client.repos.createWebhook({
|
|
@@ -1684,17 +1881,12 @@ function createGithubWebhookAction(options) {
|
|
|
1684
1881
|
}
|
|
1685
1882
|
|
|
1686
1883
|
const createBuiltinActions = (options) => {
|
|
1687
|
-
const {reader, integrations, containerRunner, catalogClient, config} = options;
|
|
1688
|
-
|
|
1884
|
+
const { reader, integrations, containerRunner, catalogClient, config } = options;
|
|
1885
|
+
const actions = [
|
|
1689
1886
|
createFetchPlainAction({
|
|
1690
1887
|
reader,
|
|
1691
1888
|
integrations
|
|
1692
1889
|
}),
|
|
1693
|
-
pluginScaffolderBackendModuleCookiecutter.createFetchCookiecutterAction({
|
|
1694
|
-
reader,
|
|
1695
|
-
integrations,
|
|
1696
|
-
containerRunner
|
|
1697
|
-
}),
|
|
1698
1890
|
createFetchTemplateAction({
|
|
1699
1891
|
integrations,
|
|
1700
1892
|
reader
|
|
@@ -1710,6 +1902,9 @@ const createBuiltinActions = (options) => {
|
|
|
1710
1902
|
integrations,
|
|
1711
1903
|
config
|
|
1712
1904
|
}),
|
|
1905
|
+
createPublishGitlabMergeRequestAction({
|
|
1906
|
+
integrations
|
|
1907
|
+
}),
|
|
1713
1908
|
createPublishBitbucketAction({
|
|
1714
1909
|
integrations,
|
|
1715
1910
|
config
|
|
@@ -1719,7 +1914,7 @@ const createBuiltinActions = (options) => {
|
|
|
1719
1914
|
config
|
|
1720
1915
|
}),
|
|
1721
1916
|
createDebugLogAction(),
|
|
1722
|
-
createCatalogRegisterAction({catalogClient, integrations}),
|
|
1917
|
+
createCatalogRegisterAction({ catalogClient, integrations }),
|
|
1723
1918
|
createCatalogWriteAction(),
|
|
1724
1919
|
createFilesystemDeleteAction(),
|
|
1725
1920
|
createFilesystemRenameAction(),
|
|
@@ -1730,11 +1925,19 @@ const createBuiltinActions = (options) => {
|
|
|
1730
1925
|
integrations
|
|
1731
1926
|
})
|
|
1732
1927
|
];
|
|
1928
|
+
if (containerRunner) {
|
|
1929
|
+
actions.push(pluginScaffolderBackendModuleCookiecutter.createFetchCookiecutterAction({
|
|
1930
|
+
reader,
|
|
1931
|
+
integrations,
|
|
1932
|
+
containerRunner
|
|
1933
|
+
}));
|
|
1934
|
+
}
|
|
1935
|
+
return actions;
|
|
1733
1936
|
};
|
|
1734
1937
|
|
|
1735
1938
|
class TemplateActionRegistry {
|
|
1736
1939
|
constructor() {
|
|
1737
|
-
this.actions = new Map();
|
|
1940
|
+
this.actions = /* @__PURE__ */ new Map();
|
|
1738
1941
|
}
|
|
1739
1942
|
register(action) {
|
|
1740
1943
|
if (this.actions.has(action.id)) {
|
|
@@ -1766,7 +1969,7 @@ class DatabaseTaskStore {
|
|
|
1766
1969
|
this.db = options.database;
|
|
1767
1970
|
}
|
|
1768
1971
|
async getTask(taskId) {
|
|
1769
|
-
const [result] = await this.db("tasks").where({id: taskId}).select();
|
|
1972
|
+
const [result] = await this.db("tasks").where({ id: taskId }).select();
|
|
1770
1973
|
if (!result) {
|
|
1771
1974
|
throw new errors.NotFoundError(`No task with id '${taskId}' found`);
|
|
1772
1975
|
}
|
|
@@ -1793,7 +1996,7 @@ class DatabaseTaskStore {
|
|
|
1793
1996
|
secrets: secrets ? JSON.stringify(secrets) : void 0,
|
|
1794
1997
|
status: "open"
|
|
1795
1998
|
});
|
|
1796
|
-
return {taskId};
|
|
1999
|
+
return { taskId };
|
|
1797
2000
|
}
|
|
1798
2001
|
async claimTask() {
|
|
1799
2002
|
return this.db.transaction(async (tx) => {
|
|
@@ -1803,7 +2006,7 @@ class DatabaseTaskStore {
|
|
|
1803
2006
|
if (!task) {
|
|
1804
2007
|
return void 0;
|
|
1805
2008
|
}
|
|
1806
|
-
const updateCount = await tx("tasks").where({id: task.id, status: "open"}).update({
|
|
2009
|
+
const updateCount = await tx("tasks").where({ id: task.id, status: "open" }).update({
|
|
1807
2010
|
status: "processing",
|
|
1808
2011
|
last_heartbeat_at: this.db.fn.now()
|
|
1809
2012
|
});
|
|
@@ -1827,14 +2030,14 @@ class DatabaseTaskStore {
|
|
|
1827
2030
|
});
|
|
1828
2031
|
}
|
|
1829
2032
|
async heartbeatTask(taskId) {
|
|
1830
|
-
const updateCount = await this.db("tasks").where({id: taskId, status: "processing"}).update({
|
|
2033
|
+
const updateCount = await this.db("tasks").where({ id: taskId, status: "processing" }).update({
|
|
1831
2034
|
last_heartbeat_at: this.db.fn.now()
|
|
1832
2035
|
});
|
|
1833
2036
|
if (updateCount === 0) {
|
|
1834
2037
|
throw new errors.ConflictError(`No running task with taskId ${taskId} found`);
|
|
1835
2038
|
}
|
|
1836
2039
|
}
|
|
1837
|
-
async listStaleTasks({timeoutS}) {
|
|
2040
|
+
async listStaleTasks({ timeoutS }) {
|
|
1838
2041
|
const rawRows = await this.db("tasks").where("status", "processing").andWhere("last_heartbeat_at", "<=", this.db.client.config.client === "sqlite3" ? this.db.raw(`datetime('now', ?)`, [`-${timeoutS} seconds`]) : this.db.raw(`dateadd('second', ?, ?)`, [
|
|
1839
2042
|
`-${timeoutS}`,
|
|
1840
2043
|
this.db.fn.now()
|
|
@@ -1842,7 +2045,7 @@ class DatabaseTaskStore {
|
|
|
1842
2045
|
const tasks = rawRows.map((row) => ({
|
|
1843
2046
|
taskId: row.id
|
|
1844
2047
|
}));
|
|
1845
|
-
return {tasks};
|
|
2048
|
+
return { tasks };
|
|
1846
2049
|
}
|
|
1847
2050
|
async completeTask({
|
|
1848
2051
|
taskId,
|
|
@@ -1882,7 +2085,7 @@ class DatabaseTaskStore {
|
|
|
1882
2085
|
});
|
|
1883
2086
|
});
|
|
1884
2087
|
}
|
|
1885
|
-
async emitLogEvent({taskId, body}) {
|
|
2088
|
+
async emitLogEvent({ taskId, body }) {
|
|
1886
2089
|
const serliazedBody = JSON.stringify(body);
|
|
1887
2090
|
await this.db("task_events").insert({
|
|
1888
2091
|
task_id: taskId,
|
|
@@ -1909,13 +2112,13 @@ class DatabaseTaskStore {
|
|
|
1909
2112
|
taskId,
|
|
1910
2113
|
body,
|
|
1911
2114
|
type: event.event_type,
|
|
1912
|
-
createdAt: typeof event.created_at === "string" ? luxon.DateTime.fromSQL(event.created_at, {zone: "UTC"}).toISO() : event.created_at
|
|
2115
|
+
createdAt: typeof event.created_at === "string" ? luxon.DateTime.fromSQL(event.created_at, { zone: "UTC" }).toISO() : event.created_at
|
|
1913
2116
|
};
|
|
1914
2117
|
} catch (error) {
|
|
1915
2118
|
throw new Error(`Failed to parse event body from event taskId=${taskId} id=${event.id}, ${error}`);
|
|
1916
2119
|
}
|
|
1917
2120
|
});
|
|
1918
|
-
return {events};
|
|
2121
|
+
return { events };
|
|
1919
2122
|
}
|
|
1920
2123
|
}
|
|
1921
2124
|
|
|
@@ -1946,7 +2149,7 @@ class TaskManager {
|
|
|
1946
2149
|
async emitLog(message, metadata) {
|
|
1947
2150
|
await this.storage.emitLogEvent({
|
|
1948
2151
|
taskId: this.state.taskId,
|
|
1949
|
-
body: {message, ...metadata}
|
|
2152
|
+
body: { message, ...metadata }
|
|
1950
2153
|
});
|
|
1951
2154
|
}
|
|
1952
2155
|
async complete(result, metadata) {
|
|
@@ -1981,7 +2184,7 @@ function defer() {
|
|
|
1981
2184
|
const promise = new Promise((_resolve) => {
|
|
1982
2185
|
resolve = _resolve;
|
|
1983
2186
|
});
|
|
1984
|
-
return {promise, resolve};
|
|
2187
|
+
return { promise, resolve };
|
|
1985
2188
|
}
|
|
1986
2189
|
class StorageTaskBroker {
|
|
1987
2190
|
constructor(storage, logger) {
|
|
@@ -2013,7 +2216,7 @@ class StorageTaskBroker {
|
|
|
2013
2216
|
return this.storage.getTask(taskId);
|
|
2014
2217
|
}
|
|
2015
2218
|
observe(options, callback) {
|
|
2016
|
-
const {taskId} = options;
|
|
2219
|
+
const { taskId } = options;
|
|
2017
2220
|
let cancelled = false;
|
|
2018
2221
|
const unsubscribe = () => {
|
|
2019
2222
|
cancelled = true;
|
|
@@ -2021,24 +2224,24 @@ class StorageTaskBroker {
|
|
|
2021
2224
|
(async () => {
|
|
2022
2225
|
let after = options.after;
|
|
2023
2226
|
while (!cancelled) {
|
|
2024
|
-
const result = await this.storage.listEvents({taskId, after});
|
|
2025
|
-
const {events} = result;
|
|
2227
|
+
const result = await this.storage.listEvents({ taskId, after });
|
|
2228
|
+
const { events } = result;
|
|
2026
2229
|
if (events.length) {
|
|
2027
2230
|
after = events[events.length - 1].id;
|
|
2028
2231
|
try {
|
|
2029
2232
|
callback(void 0, result);
|
|
2030
2233
|
} catch (error) {
|
|
2031
2234
|
errors.assertError(error);
|
|
2032
|
-
callback(error, {events: []});
|
|
2235
|
+
callback(error, { events: [] });
|
|
2033
2236
|
}
|
|
2034
2237
|
}
|
|
2035
2238
|
await new Promise((resolve) => setTimeout(resolve, 1e3));
|
|
2036
2239
|
}
|
|
2037
2240
|
})();
|
|
2038
|
-
return {unsubscribe};
|
|
2241
|
+
return { unsubscribe };
|
|
2039
2242
|
}
|
|
2040
2243
|
async vacuumTasks(timeoutS) {
|
|
2041
|
-
const {tasks} = await this.storage.listStaleTasks(timeoutS);
|
|
2244
|
+
const { tasks } = await this.storage.listStaleTasks(timeoutS);
|
|
2042
2245
|
await Promise.all(tasks.map(async (task) => {
|
|
2043
2246
|
try {
|
|
2044
2247
|
await this.storage.completeTask({
|
|
@@ -2075,7 +2278,7 @@ class HandlebarsWorkflowRunner {
|
|
|
2075
2278
|
return JSON.stringify(parseRepoUrl(repoUrl, this.options.integrations));
|
|
2076
2279
|
});
|
|
2077
2280
|
this.handlebars.registerHelper("projectSlug", (repoUrl) => {
|
|
2078
|
-
const {owner, repo} = parseRepoUrl(repoUrl, this.options.integrations);
|
|
2281
|
+
const { owner, repo } = parseRepoUrl(repoUrl, this.options.integrations);
|
|
2079
2282
|
return `${owner}/${repo}`;
|
|
2080
2283
|
});
|
|
2081
2284
|
this.handlebars.registerHelper("json", (obj) => JSON.stringify(obj));
|
|
@@ -2087,14 +2290,14 @@ class HandlebarsWorkflowRunner {
|
|
|
2087
2290
|
if (!isValidTaskSpec$1(task.spec)) {
|
|
2088
2291
|
throw new errors.InputError(`Task spec is not a valid v1beta2 task spec`);
|
|
2089
2292
|
}
|
|
2090
|
-
const {actionRegistry} = this.options;
|
|
2091
|
-
const workspacePath = path__default[
|
|
2293
|
+
const { actionRegistry } = this.options;
|
|
2294
|
+
const workspacePath = path__default["default"].join(this.options.workingDirectory, await task.getWorkspaceName());
|
|
2092
2295
|
try {
|
|
2093
|
-
await fs__default[
|
|
2296
|
+
await fs__default["default"].ensureDir(workspacePath);
|
|
2094
2297
|
await task.emitLog(`Starting up task with ${task.spec.steps.length} steps`);
|
|
2095
|
-
const templateCtx = {parameters: task.spec.values, steps: {}};
|
|
2298
|
+
const templateCtx = { parameters: task.spec.values, steps: {} };
|
|
2096
2299
|
for (const step of task.spec.steps) {
|
|
2097
|
-
const metadata = {stepId: step.id};
|
|
2300
|
+
const metadata = { stepId: step.id };
|
|
2098
2301
|
try {
|
|
2099
2302
|
const taskLogger = winston__namespace.createLogger({
|
|
2100
2303
|
level: process.env.LOG_LEVEL || "info",
|
|
@@ -2108,7 +2311,7 @@ class HandlebarsWorkflowRunner {
|
|
|
2108
2311
|
await task.emitLog(message, metadata);
|
|
2109
2312
|
}
|
|
2110
2313
|
});
|
|
2111
|
-
taskLogger.add(new winston__namespace.transports.Stream({stream: stream$1}));
|
|
2314
|
+
taskLogger.add(new winston__namespace.transports.Stream({ stream: stream$1 }));
|
|
2112
2315
|
if (step.if !== void 0) {
|
|
2113
2316
|
let skip = !step.if;
|
|
2114
2317
|
if (typeof step.if === "string") {
|
|
@@ -2189,7 +2392,7 @@ class HandlebarsWorkflowRunner {
|
|
|
2189
2392
|
token: (_b = task.secrets) == null ? void 0 : _b.token,
|
|
2190
2393
|
workspacePath,
|
|
2191
2394
|
async createTemporaryDirectory() {
|
|
2192
|
-
const tmpDir = await fs__default[
|
|
2395
|
+
const tmpDir = await fs__default["default"].mkdtemp(`${workspacePath}_step-${step.id}-`);
|
|
2193
2396
|
tmpDirs.push(tmpDir);
|
|
2194
2397
|
return tmpDir;
|
|
2195
2398
|
},
|
|
@@ -2199,9 +2402,9 @@ class HandlebarsWorkflowRunner {
|
|
|
2199
2402
|
metadata: task.spec.metadata
|
|
2200
2403
|
});
|
|
2201
2404
|
for (const tmpDir of tmpDirs) {
|
|
2202
|
-
await fs__default[
|
|
2405
|
+
await fs__default["default"].remove(tmpDir);
|
|
2203
2406
|
}
|
|
2204
|
-
templateCtx.steps[step.id] = {output: stepOutputs};
|
|
2407
|
+
templateCtx.steps[step.id] = { output: stepOutputs };
|
|
2205
2408
|
await task.emitLog(`Finished step ${step.name}`, {
|
|
2206
2409
|
...metadata,
|
|
2207
2410
|
status: "completed"
|
|
@@ -2235,10 +2438,10 @@ class HandlebarsWorkflowRunner {
|
|
|
2235
2438
|
}
|
|
2236
2439
|
return value;
|
|
2237
2440
|
});
|
|
2238
|
-
return {output};
|
|
2441
|
+
return { output };
|
|
2239
2442
|
} finally {
|
|
2240
2443
|
if (workspacePath) {
|
|
2241
|
-
await fs__default[
|
|
2444
|
+
await fs__default["default"].remove(workspacePath);
|
|
2242
2445
|
}
|
|
2243
2446
|
}
|
|
2244
2447
|
}
|
|
@@ -2251,7 +2454,7 @@ const createStepLogger = ({
|
|
|
2251
2454
|
task,
|
|
2252
2455
|
step
|
|
2253
2456
|
}) => {
|
|
2254
|
-
const metadata = {stepId: step.id};
|
|
2457
|
+
const metadata = { stepId: step.id };
|
|
2255
2458
|
const taskLogger = winston__namespace.createLogger({
|
|
2256
2459
|
level: process.env.LOG_LEVEL || "info",
|
|
2257
2460
|
format: winston__namespace.format.combine(winston__namespace.format.colorize(), winston__namespace.format.timestamp(), winston__namespace.format.simple()),
|
|
@@ -2264,41 +2467,33 @@ const createStepLogger = ({
|
|
|
2264
2467
|
await task.emitLog(message, metadata);
|
|
2265
2468
|
}
|
|
2266
2469
|
});
|
|
2267
|
-
taskLogger.add(new winston__namespace.transports.Stream({stream: streamLogger}));
|
|
2268
|
-
return {taskLogger, streamLogger};
|
|
2470
|
+
taskLogger.add(new winston__namespace.transports.Stream({ stream: streamLogger }));
|
|
2471
|
+
return { taskLogger, streamLogger };
|
|
2269
2472
|
};
|
|
2270
2473
|
class NunjucksWorkflowRunner {
|
|
2271
2474
|
constructor(options) {
|
|
2272
2475
|
this.options = options;
|
|
2273
|
-
|
|
2476
|
+
}
|
|
2477
|
+
isSingleTemplateString(input) {
|
|
2478
|
+
var _a, _b;
|
|
2479
|
+
const { parser, nodes } = nunjucks__default["default"];
|
|
2480
|
+
const parsed = parser.parse(input, {}, {
|
|
2274
2481
|
autoescape: false,
|
|
2275
2482
|
tags: {
|
|
2276
2483
|
variableStart: "${{",
|
|
2277
2484
|
variableEnd: "}}"
|
|
2278
2485
|
}
|
|
2279
|
-
};
|
|
2280
|
-
this.nunjucks = nunjucks__default['default'].configure(this.nunjucksOptions);
|
|
2281
|
-
this.nunjucks.addFilter("parseRepoUrl", (repoUrl) => {
|
|
2282
|
-
return parseRepoUrl(repoUrl, this.options.integrations);
|
|
2283
|
-
});
|
|
2284
|
-
this.nunjucks.addFilter("projectSlug", (repoUrl) => {
|
|
2285
|
-
const {owner, repo} = parseRepoUrl(repoUrl, this.options.integrations);
|
|
2286
|
-
return `${owner}/${repo}`;
|
|
2287
2486
|
});
|
|
2487
|
+
return parsed.children.length === 1 && !(((_b = (_a = parsed.children[0]) == null ? void 0 : _a.children) == null ? void 0 : _b[0]) instanceof nodes.TemplateData);
|
|
2288
2488
|
}
|
|
2289
|
-
|
|
2290
|
-
const {parser, nodes} = require("nunjucks");
|
|
2291
|
-
const parsed = parser.parse(input, {}, this.nunjucksOptions);
|
|
2292
|
-
return parsed.children.length === 1 && !(parsed.children[0] instanceof nodes.TemplateData);
|
|
2293
|
-
}
|
|
2294
|
-
render(input, context) {
|
|
2489
|
+
render(input, context, renderTemplate) {
|
|
2295
2490
|
return JSON.parse(JSON.stringify(input), (_key, value) => {
|
|
2296
2491
|
try {
|
|
2297
2492
|
if (typeof value === "string") {
|
|
2298
2493
|
try {
|
|
2299
2494
|
if (this.isSingleTemplateString(value)) {
|
|
2300
2495
|
const wrappedDumped = value.replace(/\${{(.+)}}/g, "${{ ( $1 ) | dump }}");
|
|
2301
|
-
const templated2 =
|
|
2496
|
+
const templated2 = renderTemplate(wrappedDumped, context);
|
|
2302
2497
|
if (templated2 === "") {
|
|
2303
2498
|
return void 0;
|
|
2304
2499
|
}
|
|
@@ -2307,7 +2502,7 @@ class NunjucksWorkflowRunner {
|
|
|
2307
2502
|
} catch (ex) {
|
|
2308
2503
|
this.options.logger.error(`Failed to parse template string: ${value} with error ${ex.message}`);
|
|
2309
2504
|
}
|
|
2310
|
-
const templated =
|
|
2505
|
+
const templated = renderTemplate(value, context);
|
|
2311
2506
|
if (templated === "") {
|
|
2312
2507
|
return void 0;
|
|
2313
2508
|
}
|
|
@@ -2324,9 +2519,15 @@ class NunjucksWorkflowRunner {
|
|
|
2324
2519
|
if (!isValidTaskSpec(task.spec)) {
|
|
2325
2520
|
throw new errors.InputError("Wrong template version executed with the workflow engine");
|
|
2326
2521
|
}
|
|
2327
|
-
const workspacePath = path__default[
|
|
2522
|
+
const workspacePath = path__default["default"].join(this.options.workingDirectory, await task.getWorkspaceName());
|
|
2523
|
+
const { integrations } = this.options;
|
|
2524
|
+
const renderTemplate = await SecureTemplater.loadRenderer({
|
|
2525
|
+
parseRepoUrl(url) {
|
|
2526
|
+
return parseRepoUrl(url, integrations);
|
|
2527
|
+
}
|
|
2528
|
+
});
|
|
2328
2529
|
try {
|
|
2329
|
-
await fs__default[
|
|
2530
|
+
await fs__default["default"].ensureDir(workspacePath);
|
|
2330
2531
|
await task.emitLog(`Starting up task with ${task.spec.steps.length} steps`);
|
|
2331
2532
|
const context = {
|
|
2332
2533
|
parameters: task.spec.parameters,
|
|
@@ -2335,9 +2536,9 @@ class NunjucksWorkflowRunner {
|
|
|
2335
2536
|
for (const step of task.spec.steps) {
|
|
2336
2537
|
try {
|
|
2337
2538
|
if (step.if) {
|
|
2338
|
-
const ifResult = await this.render(step.if, context);
|
|
2539
|
+
const ifResult = await this.render(step.if, context, renderTemplate);
|
|
2339
2540
|
if (!isTruthy(ifResult)) {
|
|
2340
|
-
await task.emitLog(`Skipping step ${step.id} because it's if condition was false`, {stepId: step.id, status: "skipped"});
|
|
2541
|
+
await task.emitLog(`Skipping step ${step.id} because it's if condition was false`, { stepId: step.id, status: "skipped" });
|
|
2341
2542
|
continue;
|
|
2342
2543
|
}
|
|
2343
2544
|
}
|
|
@@ -2346,8 +2547,8 @@ class NunjucksWorkflowRunner {
|
|
|
2346
2547
|
status: "processing"
|
|
2347
2548
|
});
|
|
2348
2549
|
const action = this.options.actionRegistry.get(step.action);
|
|
2349
|
-
const {taskLogger, streamLogger} = createStepLogger({task, step});
|
|
2350
|
-
const input = (_a = step.input && this.render(step.input, context)) != null ? _a : {};
|
|
2550
|
+
const { taskLogger, streamLogger } = createStepLogger({ task, step });
|
|
2551
|
+
const input = (_a = step.input && this.render(step.input, context, renderTemplate)) != null ? _a : {};
|
|
2351
2552
|
if ((_b = action.schema) == null ? void 0 : _b.input) {
|
|
2352
2553
|
const validateResult = jsonschema.validate(input, action.schema.input);
|
|
2353
2554
|
if (!validateResult.valid) {
|
|
@@ -2367,7 +2568,7 @@ class NunjucksWorkflowRunner {
|
|
|
2367
2568
|
logStream: streamLogger,
|
|
2368
2569
|
workspacePath,
|
|
2369
2570
|
createTemporaryDirectory: async () => {
|
|
2370
|
-
const tmpDir = await fs__default[
|
|
2571
|
+
const tmpDir = await fs__default["default"].mkdtemp(`${workspacePath}_step-${step.id}-`);
|
|
2371
2572
|
tmpDirs.push(tmpDir);
|
|
2372
2573
|
return tmpDir;
|
|
2373
2574
|
},
|
|
@@ -2377,9 +2578,9 @@ class NunjucksWorkflowRunner {
|
|
|
2377
2578
|
metadata: task.spec.metadata
|
|
2378
2579
|
});
|
|
2379
2580
|
for (const tmpDir of tmpDirs) {
|
|
2380
|
-
await fs__default[
|
|
2581
|
+
await fs__default["default"].remove(tmpDir);
|
|
2381
2582
|
}
|
|
2382
|
-
context.steps[step.id] = {output: stepOutput};
|
|
2583
|
+
context.steps[step.id] = { output: stepOutput };
|
|
2383
2584
|
await task.emitLog(`Finished step ${step.name}`, {
|
|
2384
2585
|
stepId: step.id,
|
|
2385
2586
|
status: "completed"
|
|
@@ -2392,11 +2593,11 @@ class NunjucksWorkflowRunner {
|
|
|
2392
2593
|
throw err;
|
|
2393
2594
|
}
|
|
2394
2595
|
}
|
|
2395
|
-
const output = this.render(task.spec.output, context);
|
|
2396
|
-
return {output};
|
|
2596
|
+
const output = this.render(task.spec.output, context, renderTemplate);
|
|
2597
|
+
return { output };
|
|
2397
2598
|
} finally {
|
|
2398
2599
|
if (workspacePath) {
|
|
2399
|
-
await fs__default[
|
|
2600
|
+
await fs__default["default"].remove(workspacePath);
|
|
2400
2601
|
}
|
|
2401
2602
|
}
|
|
2402
2603
|
}
|
|
@@ -2428,7 +2629,7 @@ class TaskWorker {
|
|
|
2428
2629
|
});
|
|
2429
2630
|
return new TaskWorker({
|
|
2430
2631
|
taskBroker,
|
|
2431
|
-
runners: {legacyWorkflowRunner, workflowRunner}
|
|
2632
|
+
runners: { legacyWorkflowRunner, workflowRunner }
|
|
2432
2633
|
});
|
|
2433
2634
|
}
|
|
2434
2635
|
start() {
|
|
@@ -2441,12 +2642,12 @@ class TaskWorker {
|
|
|
2441
2642
|
}
|
|
2442
2643
|
async runOneTask(task) {
|
|
2443
2644
|
try {
|
|
2444
|
-
const {output} = task.spec.apiVersion === "scaffolder.backstage.io/v1beta3" ? await this.options.runners.workflowRunner.execute(task) : await this.options.runners.legacyWorkflowRunner.execute(task);
|
|
2445
|
-
await task.complete("completed", {output});
|
|
2645
|
+
const { output } = task.spec.apiVersion === "scaffolder.backstage.io/v1beta3" ? await this.options.runners.workflowRunner.execute(task) : await this.options.runners.legacyWorkflowRunner.execute(task);
|
|
2646
|
+
await task.complete("completed", { output });
|
|
2446
2647
|
} catch (error) {
|
|
2447
2648
|
errors.assertError(error);
|
|
2448
2649
|
await task.complete("failed", {
|
|
2449
|
-
error: {name: error.name, message: error.message}
|
|
2650
|
+
error: { name: error.name, message: error.message }
|
|
2450
2651
|
});
|
|
2451
2652
|
}
|
|
2452
2653
|
}
|
|
@@ -2457,7 +2658,7 @@ class CatalogEntityClient {
|
|
|
2457
2658
|
this.catalogClient = catalogClient;
|
|
2458
2659
|
}
|
|
2459
2660
|
async findTemplate(templateName, options) {
|
|
2460
|
-
const {items: templates} = await this.catalogClient.getEntities({
|
|
2661
|
+
const { items: templates } = await this.catalogClient.getEntities({
|
|
2461
2662
|
filter: {
|
|
2462
2663
|
kind: "template",
|
|
2463
2664
|
"metadata.name": templateName
|
|
@@ -2476,11 +2677,11 @@ class CatalogEntityClient {
|
|
|
2476
2677
|
|
|
2477
2678
|
async function getWorkingDirectory(config, logger) {
|
|
2478
2679
|
if (!config.has("backend.workingDirectory")) {
|
|
2479
|
-
return os__default[
|
|
2680
|
+
return os__default["default"].tmpdir();
|
|
2480
2681
|
}
|
|
2481
2682
|
const workingDirectory = config.getString("backend.workingDirectory");
|
|
2482
2683
|
try {
|
|
2483
|
-
await fs__default[
|
|
2684
|
+
await fs__default["default"].access(workingDirectory, fs__default["default"].constants.F_OK | fs__default["default"].constants.W_OK);
|
|
2484
2685
|
logger.info(`using working directory: ${workingDirectory}`);
|
|
2485
2686
|
} catch (err) {
|
|
2486
2687
|
errors.assertError(err);
|
|
@@ -2498,7 +2699,7 @@ function getEntityBaseUrl(entity) {
|
|
|
2498
2699
|
if (!location) {
|
|
2499
2700
|
return void 0;
|
|
2500
2701
|
}
|
|
2501
|
-
const {type, target} = catalogModel.parseLocationReference(location);
|
|
2702
|
+
const { type, target } = catalogModel.parseLocationReference(location);
|
|
2502
2703
|
if (type === "url") {
|
|
2503
2704
|
return target;
|
|
2504
2705
|
} else if (type === "file") {
|
|
@@ -2511,8 +2712,8 @@ function isSupportedTemplate(entity) {
|
|
|
2511
2712
|
return entity.apiVersion === "backstage.io/v1beta2" || entity.apiVersion === "scaffolder.backstage.io/v1beta3";
|
|
2512
2713
|
}
|
|
2513
2714
|
async function createRouter(options) {
|
|
2514
|
-
const router = Router__default[
|
|
2515
|
-
router.use(express__default[
|
|
2715
|
+
const router = Router__default["default"]();
|
|
2716
|
+
router.use(express__default["default"].json());
|
|
2516
2717
|
const {
|
|
2517
2718
|
logger: parentLogger,
|
|
2518
2719
|
config,
|
|
@@ -2523,7 +2724,7 @@ async function createRouter(options) {
|
|
|
2523
2724
|
containerRunner,
|
|
2524
2725
|
taskWorkers
|
|
2525
2726
|
} = options;
|
|
2526
|
-
const logger = parentLogger.child({plugin: "scaffolder"});
|
|
2727
|
+
const logger = parentLogger.child({ plugin: "scaffolder" });
|
|
2527
2728
|
const workingDirectory = await getWorkingDirectory(config, logger);
|
|
2528
2729
|
const entityClient = new CatalogEntityClient(catalogClient);
|
|
2529
2730
|
const integrations = integration.ScmIntegrations.fromConfig(config);
|
|
@@ -2559,7 +2760,7 @@ async function createRouter(options) {
|
|
|
2559
2760
|
workers.forEach((worker) => worker.start());
|
|
2560
2761
|
router.get("/v2/templates/:namespace/:kind/:name/parameter-schema", async (req, res) => {
|
|
2561
2762
|
var _a, _b;
|
|
2562
|
-
const {namespace, kind, name} = req.params;
|
|
2763
|
+
const { namespace, kind, name } = req.params;
|
|
2563
2764
|
if (namespace !== "default") {
|
|
2564
2765
|
throw new errors.InputError(`Invalid namespace, only 'default' namespace is supported`);
|
|
2565
2766
|
}
|
|
@@ -2606,7 +2807,7 @@ async function createRouter(options) {
|
|
|
2606
2807
|
for (const parameters of [(_a = template.spec.parameters) != null ? _a : []].flat()) {
|
|
2607
2808
|
const result2 = jsonschema.validate(values, parameters);
|
|
2608
2809
|
if (!result2.valid) {
|
|
2609
|
-
res.status(400).json({errors: result2.errors});
|
|
2810
|
+
res.status(400).json({ errors: result2.errors });
|
|
2610
2811
|
return;
|
|
2611
2812
|
}
|
|
2612
2813
|
}
|
|
@@ -2624,7 +2825,7 @@ async function createRouter(options) {
|
|
|
2624
2825
|
};
|
|
2625
2826
|
}),
|
|
2626
2827
|
output: (_b = template.spec.output) != null ? _b : {},
|
|
2627
|
-
metadata: {name: (_c = template.metadata) == null ? void 0 : _c.name}
|
|
2828
|
+
metadata: { name: (_c = template.metadata) == null ? void 0 : _c.name }
|
|
2628
2829
|
} : {
|
|
2629
2830
|
apiVersion: template.apiVersion,
|
|
2630
2831
|
baseUrl,
|
|
@@ -2638,7 +2839,7 @@ async function createRouter(options) {
|
|
|
2638
2839
|
};
|
|
2639
2840
|
}),
|
|
2640
2841
|
output: (_d = template.spec.output) != null ? _d : {},
|
|
2641
|
-
metadata: {name: (_e = template.metadata) == null ? void 0 : _e.name}
|
|
2842
|
+
metadata: { name: (_e = template.metadata) == null ? void 0 : _e.name }
|
|
2642
2843
|
};
|
|
2643
2844
|
} else {
|
|
2644
2845
|
throw new errors.InputError(`Unsupported apiVersion field in schema entity, ${template.apiVersion}`);
|
|
@@ -2646,9 +2847,9 @@ async function createRouter(options) {
|
|
|
2646
2847
|
const result = await taskBroker.dispatch(taskSpec, {
|
|
2647
2848
|
token
|
|
2648
2849
|
});
|
|
2649
|
-
res.status(201).json({id: result.taskId});
|
|
2850
|
+
res.status(201).json({ id: result.taskId });
|
|
2650
2851
|
}).get("/v2/tasks/:taskId", async (req, res) => {
|
|
2651
|
-
const {taskId} = req.params;
|
|
2852
|
+
const { taskId } = req.params;
|
|
2652
2853
|
const task = await taskBroker.get(taskId);
|
|
2653
2854
|
if (!task) {
|
|
2654
2855
|
throw new errors.NotFoundError(`Task with id ${taskId} does not exist`);
|
|
@@ -2656,7 +2857,7 @@ async function createRouter(options) {
|
|
|
2656
2857
|
delete task.secrets;
|
|
2657
2858
|
res.status(200).json(task);
|
|
2658
2859
|
}).get("/v2/tasks/:taskId/eventstream", async (req, res) => {
|
|
2659
|
-
const {taskId} = req.params;
|
|
2860
|
+
const { taskId } = req.params;
|
|
2660
2861
|
const after = req.query.after !== void 0 ? Number(req.query.after) : void 0;
|
|
2661
2862
|
logger.debug(`Event stream observing taskId '${taskId}' opened`);
|
|
2662
2863
|
res.writeHead(200, {
|
|
@@ -2664,7 +2865,7 @@ async function createRouter(options) {
|
|
|
2664
2865
|
"Cache-Control": "no-cache",
|
|
2665
2866
|
"Content-Type": "text/event-stream"
|
|
2666
2867
|
});
|
|
2667
|
-
const {unsubscribe} = taskBroker.observe({taskId, after}, (error, {events}) => {
|
|
2868
|
+
const { unsubscribe } = taskBroker.observe({ taskId, after }, (error, { events }) => {
|
|
2668
2869
|
var _a;
|
|
2669
2870
|
if (error) {
|
|
2670
2871
|
logger.error(`Received error from event stream when observing taskId '${taskId}', ${error}`);
|
|
@@ -2688,7 +2889,7 @@ data: ${JSON.stringify(event)}
|
|
|
2688
2889
|
logger.debug(`Event stream observing taskId '${taskId}' closed`);
|
|
2689
2890
|
});
|
|
2690
2891
|
}).get("/v2/tasks/:taskId/events", async (req, res) => {
|
|
2691
|
-
const {taskId} = req.params;
|
|
2892
|
+
const { taskId } = req.params;
|
|
2692
2893
|
const after = Number(req.query.after) || void 0;
|
|
2693
2894
|
let unsubscribe = () => {
|
|
2694
2895
|
};
|
|
@@ -2696,7 +2897,7 @@ data: ${JSON.stringify(event)}
|
|
|
2696
2897
|
unsubscribe();
|
|
2697
2898
|
res.json([]);
|
|
2698
2899
|
}, 3e4);
|
|
2699
|
-
({unsubscribe} = taskBroker.observe({taskId, after}, (error, {events}) => {
|
|
2900
|
+
({ unsubscribe } = taskBroker.observe({ taskId, after }, (error, { events }) => {
|
|
2700
2901
|
clearTimeout(timeout);
|
|
2701
2902
|
unsubscribe();
|
|
2702
2903
|
if (error) {
|
|
@@ -2709,7 +2910,7 @@ data: ${JSON.stringify(event)}
|
|
|
2709
2910
|
clearTimeout(timeout);
|
|
2710
2911
|
});
|
|
2711
2912
|
});
|
|
2712
|
-
const app = express__default[
|
|
2913
|
+
const app = express__default["default"]();
|
|
2713
2914
|
app.set("logger", logger);
|
|
2714
2915
|
app.use("/", router);
|
|
2715
2916
|
return app;
|
|
@@ -2769,9 +2970,7 @@ class ScaffolderEntitiesProcessor {
|
|
|
2769
2970
|
|
|
2770
2971
|
Object.defineProperty(exports, 'createFetchCookiecutterAction', {
|
|
2771
2972
|
enumerable: true,
|
|
2772
|
-
get: function () {
|
|
2773
|
-
return pluginScaffolderBackendModuleCookiecutter.createFetchCookiecutterAction;
|
|
2774
|
-
}
|
|
2973
|
+
get: function () { return pluginScaffolderBackendModuleCookiecutter.createFetchCookiecutterAction; }
|
|
2775
2974
|
});
|
|
2776
2975
|
exports.CatalogEntityClient = CatalogEntityClient;
|
|
2777
2976
|
exports.DatabaseTaskStore = DatabaseTaskStore;
|
|
@@ -2796,6 +2995,7 @@ exports.createPublishFileAction = createPublishFileAction;
|
|
|
2796
2995
|
exports.createPublishGithubAction = createPublishGithubAction;
|
|
2797
2996
|
exports.createPublishGithubPullRequestAction = createPublishGithubPullRequestAction;
|
|
2798
2997
|
exports.createPublishGitlabAction = createPublishGitlabAction;
|
|
2998
|
+
exports.createPublishGitlabMergeRequestAction = createPublishGitlabMergeRequestAction;
|
|
2799
2999
|
exports.createRouter = createRouter;
|
|
2800
3000
|
exports.createTemplateAction = createTemplateAction;
|
|
2801
3001
|
exports.fetchContents = fetchContents;
|