@backstage/plugin-scaffolder-backend 0.15.12 → 0.15.16
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +47 -0
- package/assets/nunjucks.js.txt +10385 -0
- package/config.d.ts +12 -12
- package/dist/index.cjs.js +283 -191
- package/dist/index.cjs.js.map +1 -1
- package/dist/index.d.ts +67 -47
- package/package.json +20 -16
package/dist/index.cjs.js
CHANGED
|
@@ -5,17 +5,17 @@ Object.defineProperty(exports, '__esModule', { value: true });
|
|
|
5
5
|
var errors = require('@backstage/errors');
|
|
6
6
|
var catalogModel = require('@backstage/catalog-model');
|
|
7
7
|
var fs = require('fs-extra');
|
|
8
|
-
var path = require('path');
|
|
9
8
|
var yaml = require('yaml');
|
|
10
9
|
var backendCommon = require('@backstage/backend-common');
|
|
10
|
+
var path = require('path');
|
|
11
11
|
var globby = require('globby');
|
|
12
|
-
var nunjucks = require('nunjucks');
|
|
13
12
|
var isbinaryfile = require('isbinaryfile');
|
|
13
|
+
var vm2 = require('vm2');
|
|
14
14
|
var pluginScaffolderBackendModuleCookiecutter = require('@backstage/plugin-scaffolder-backend-module-cookiecutter');
|
|
15
15
|
var child_process = require('child_process');
|
|
16
16
|
var stream = require('stream');
|
|
17
17
|
var azureDevopsNodeApi = require('azure-devops-node-api');
|
|
18
|
-
var fetch = require('
|
|
18
|
+
var fetch = require('node-fetch');
|
|
19
19
|
var integration = require('@backstage/integration');
|
|
20
20
|
var rest = require('@octokit/rest');
|
|
21
21
|
var lodash = require('lodash');
|
|
@@ -27,6 +27,7 @@ var luxon = require('luxon');
|
|
|
27
27
|
var Handlebars = require('handlebars');
|
|
28
28
|
var winston = require('winston');
|
|
29
29
|
var jsonschema = require('jsonschema');
|
|
30
|
+
var nunjucks = require('nunjucks');
|
|
30
31
|
var express = require('express');
|
|
31
32
|
var Router = require('express-promise-router');
|
|
32
33
|
var os = require('os');
|
|
@@ -44,26 +45,23 @@ function _interopNamespace(e) {
|
|
|
44
45
|
var d = Object.getOwnPropertyDescriptor(e, k);
|
|
45
46
|
Object.defineProperty(n, k, d.get ? d : {
|
|
46
47
|
enumerable: true,
|
|
47
|
-
get: function () {
|
|
48
|
-
return e[k];
|
|
49
|
-
}
|
|
48
|
+
get: function () { return e[k]; }
|
|
50
49
|
});
|
|
51
50
|
}
|
|
52
51
|
});
|
|
53
52
|
}
|
|
54
|
-
n[
|
|
53
|
+
n["default"] = e;
|
|
55
54
|
return Object.freeze(n);
|
|
56
55
|
}
|
|
57
56
|
|
|
58
57
|
var fs__default = /*#__PURE__*/_interopDefaultLegacy(fs);
|
|
59
|
-
var path__namespace = /*#__PURE__*/_interopNamespace(path);
|
|
60
|
-
var path__default = /*#__PURE__*/_interopDefaultLegacy(path);
|
|
61
58
|
var yaml__namespace = /*#__PURE__*/_interopNamespace(yaml);
|
|
59
|
+
var path__default = /*#__PURE__*/_interopDefaultLegacy(path);
|
|
62
60
|
var globby__default = /*#__PURE__*/_interopDefaultLegacy(globby);
|
|
63
|
-
var nunjucks__default = /*#__PURE__*/_interopDefaultLegacy(nunjucks);
|
|
64
61
|
var fetch__default = /*#__PURE__*/_interopDefaultLegacy(fetch);
|
|
65
62
|
var Handlebars__namespace = /*#__PURE__*/_interopNamespace(Handlebars);
|
|
66
63
|
var winston__namespace = /*#__PURE__*/_interopNamespace(winston);
|
|
64
|
+
var nunjucks__default = /*#__PURE__*/_interopDefaultLegacy(nunjucks);
|
|
67
65
|
var express__default = /*#__PURE__*/_interopDefaultLegacy(express);
|
|
68
66
|
var Router__default = /*#__PURE__*/_interopDefaultLegacy(Router);
|
|
69
67
|
var os__default = /*#__PURE__*/_interopDefaultLegacy(os);
|
|
@@ -73,7 +71,7 @@ const createTemplateAction = (templateAction) => {
|
|
|
73
71
|
};
|
|
74
72
|
|
|
75
73
|
function createCatalogRegisterAction(options) {
|
|
76
|
-
const {catalogClient, integrations} = options;
|
|
74
|
+
const { catalogClient, integrations } = options;
|
|
77
75
|
return createTemplateAction({
|
|
78
76
|
id: "catalog:register",
|
|
79
77
|
description: "Registers entities from a catalog descriptor file in the workspace into the software catalog.",
|
|
@@ -122,12 +120,12 @@ function createCatalogRegisterAction(options) {
|
|
|
122
120
|
},
|
|
123
121
|
async handler(ctx) {
|
|
124
122
|
var _a;
|
|
125
|
-
const {input} = ctx;
|
|
123
|
+
const { input } = ctx;
|
|
126
124
|
let catalogInfoUrl;
|
|
127
125
|
if ("catalogInfoUrl" in input) {
|
|
128
126
|
catalogInfoUrl = input.catalogInfoUrl;
|
|
129
127
|
} else {
|
|
130
|
-
const {repoContentsUrl, catalogInfoPath = "/catalog-info.yaml"} = input;
|
|
128
|
+
const { repoContentsUrl, catalogInfoPath = "/catalog-info.yaml" } = input;
|
|
131
129
|
const integration = integrations.byUrl(repoContentsUrl);
|
|
132
130
|
if (!integration) {
|
|
133
131
|
throw new errors.InputError(`No integration found for host ${repoContentsUrl}`);
|
|
@@ -141,15 +139,15 @@ function createCatalogRegisterAction(options) {
|
|
|
141
139
|
await catalogClient.addLocation({
|
|
142
140
|
type: "url",
|
|
143
141
|
target: catalogInfoUrl
|
|
144
|
-
}, ctx.token ? {token: ctx.token} : {});
|
|
142
|
+
}, ctx.token ? { token: ctx.token } : {});
|
|
145
143
|
try {
|
|
146
144
|
const result = await catalogClient.addLocation({
|
|
147
145
|
dryRun: true,
|
|
148
146
|
type: "url",
|
|
149
147
|
target: catalogInfoUrl
|
|
150
|
-
}, ctx.token ? {token: ctx.token} : {});
|
|
148
|
+
}, ctx.token ? { token: ctx.token } : {});
|
|
151
149
|
if (result.entities.length > 0) {
|
|
152
|
-
const {entities} = result;
|
|
150
|
+
const { entities } = result;
|
|
153
151
|
const entity = (_a = entities.find((e) => !e.metadata.name.startsWith("generated-"))) != null ? _a : entities[0];
|
|
154
152
|
ctx.output("entityRef", catalogModel.stringifyEntityRef(entity));
|
|
155
153
|
}
|
|
@@ -181,8 +179,8 @@ function createCatalogWriteAction() {
|
|
|
181
179
|
},
|
|
182
180
|
async handler(ctx) {
|
|
183
181
|
ctx.logStream.write(`Writing catalog-info.yaml`);
|
|
184
|
-
const {entity} = ctx.input;
|
|
185
|
-
await fs__default[
|
|
182
|
+
const { entity } = ctx.input;
|
|
183
|
+
await fs__default["default"].writeFile(backendCommon.resolveSafeChildPath(ctx.workspacePath, "catalog-info.yaml"), yaml__namespace.stringify(entity));
|
|
186
184
|
}
|
|
187
185
|
});
|
|
188
186
|
}
|
|
@@ -226,7 +224,7 @@ ${files.map((f) => ` - ${path.relative(ctx.workspacePath, f)}`).join("\n")}`);
|
|
|
226
224
|
async function recursiveReadDir(dir) {
|
|
227
225
|
const subdirs = await fs.readdir(dir);
|
|
228
226
|
const files = await Promise.all(subdirs.map(async (subdir) => {
|
|
229
|
-
const res = path.
|
|
227
|
+
const res = path.join(dir, subdir);
|
|
230
228
|
return (await fs.stat(res)).isDirectory() ? recursiveReadDir(res) : [res];
|
|
231
229
|
}));
|
|
232
230
|
return files.reduce((a, f) => a.concat(f), []);
|
|
@@ -250,8 +248,8 @@ async function fetchContents({
|
|
|
250
248
|
}
|
|
251
249
|
if (!fetchUrlIsAbsolute && (baseUrl == null ? void 0 : baseUrl.startsWith("file://"))) {
|
|
252
250
|
const basePath = baseUrl.slice("file://".length);
|
|
253
|
-
const srcDir = backendCommon.resolveSafeChildPath(
|
|
254
|
-
await fs__default[
|
|
251
|
+
const srcDir = backendCommon.resolveSafeChildPath(path__default["default"].dirname(basePath), fetchUrl);
|
|
252
|
+
await fs__default["default"].copy(srcDir, outputPath);
|
|
255
253
|
} else {
|
|
256
254
|
let readUrl;
|
|
257
255
|
if (fetchUrlIsAbsolute) {
|
|
@@ -269,13 +267,13 @@ async function fetchContents({
|
|
|
269
267
|
throw new errors.InputError(`Failed to fetch, template location could not be determined and the fetch URL is relative, ${fetchUrl}`);
|
|
270
268
|
}
|
|
271
269
|
const res = await reader.readTree(readUrl);
|
|
272
|
-
await fs__default[
|
|
273
|
-
await res.dir({targetDir: outputPath});
|
|
270
|
+
await fs__default["default"].ensureDir(outputPath);
|
|
271
|
+
await res.dir({ targetDir: outputPath });
|
|
274
272
|
}
|
|
275
273
|
}
|
|
276
274
|
|
|
277
275
|
function createFetchPlainAction(options) {
|
|
278
|
-
const {reader, integrations} = options;
|
|
276
|
+
const { reader, integrations } = options;
|
|
279
277
|
return createTemplateAction({
|
|
280
278
|
id: "fetch:plain",
|
|
281
279
|
description: "Downloads content and places it in the workspace, or optionally in a subdirectory specified by the 'targetPath' input option.",
|
|
@@ -313,9 +311,102 @@ function createFetchPlainAction(options) {
|
|
|
313
311
|
});
|
|
314
312
|
}
|
|
315
313
|
|
|
316
|
-
|
|
314
|
+
const mkScript = (nunjucksSource) => `
|
|
315
|
+
const { render, renderCompat } = (() => {
|
|
316
|
+
const module = {};
|
|
317
|
+
const process = { env: {} };
|
|
318
|
+
const require = (pkg) => { if (pkg === 'events') { return function (){}; }};
|
|
319
|
+
|
|
320
|
+
${nunjucksSource}
|
|
321
|
+
|
|
322
|
+
const env = module.exports.configure({
|
|
323
|
+
autoescape: false,
|
|
324
|
+
tags: {
|
|
325
|
+
variableStart: '\${{',
|
|
326
|
+
variableEnd: '}}',
|
|
327
|
+
},
|
|
328
|
+
});
|
|
329
|
+
|
|
330
|
+
const compatEnv = module.exports.configure({
|
|
331
|
+
autoescape: false,
|
|
332
|
+
tags: {
|
|
333
|
+
variableStart: '{{',
|
|
334
|
+
variableEnd: '}}',
|
|
335
|
+
},
|
|
336
|
+
});
|
|
337
|
+
compatEnv.addFilter('jsonify', compatEnv.getFilter('dump'));
|
|
338
|
+
|
|
339
|
+
if (typeof parseRepoUrl !== 'undefined') {
|
|
340
|
+
const safeHelperRef = parseRepoUrl;
|
|
341
|
+
|
|
342
|
+
env.addFilter('parseRepoUrl', repoUrl => {
|
|
343
|
+
return JSON.parse(safeHelperRef(repoUrl))
|
|
344
|
+
});
|
|
345
|
+
env.addFilter('projectSlug', repoUrl => {
|
|
346
|
+
const { owner, repo } = JSON.parse(safeHelperRef(repoUrl));
|
|
347
|
+
return owner + '/' + repo;
|
|
348
|
+
});
|
|
349
|
+
}
|
|
350
|
+
|
|
351
|
+
let uninstallCompat = undefined;
|
|
352
|
+
|
|
353
|
+
function render(str, values) {
|
|
354
|
+
try {
|
|
355
|
+
if (uninstallCompat) {
|
|
356
|
+
uninstallCompat();
|
|
357
|
+
uninstallCompat = undefined;
|
|
358
|
+
}
|
|
359
|
+
return env.renderString(str, JSON.parse(values));
|
|
360
|
+
} catch (error) {
|
|
361
|
+
// Make sure errors don't leak anything
|
|
362
|
+
throw new Error(String(error.message));
|
|
363
|
+
}
|
|
364
|
+
}
|
|
365
|
+
|
|
366
|
+
function renderCompat(str, values) {
|
|
367
|
+
try {
|
|
368
|
+
if (!uninstallCompat) {
|
|
369
|
+
uninstallCompat = module.exports.installJinjaCompat();
|
|
370
|
+
}
|
|
371
|
+
return compatEnv.renderString(str, JSON.parse(values));
|
|
372
|
+
} catch (error) {
|
|
373
|
+
// Make sure errors don't leak anything
|
|
374
|
+
throw new Error(String(error.message));
|
|
375
|
+
}
|
|
376
|
+
}
|
|
377
|
+
|
|
378
|
+
return { render, renderCompat };
|
|
379
|
+
})();
|
|
380
|
+
`;
|
|
381
|
+
class SecureTemplater {
|
|
382
|
+
static async loadRenderer(options = {}) {
|
|
383
|
+
const { parseRepoUrl, cookiecutterCompat } = options;
|
|
384
|
+
let sandbox = void 0;
|
|
385
|
+
if (parseRepoUrl) {
|
|
386
|
+
sandbox = {
|
|
387
|
+
parseRepoUrl: (url) => JSON.stringify(parseRepoUrl(url))
|
|
388
|
+
};
|
|
389
|
+
}
|
|
390
|
+
const vm = new vm2.VM({ sandbox });
|
|
391
|
+
const nunjucksSource = await fs__default["default"].readFile(backendCommon.resolvePackagePath("@backstage/plugin-scaffolder-backend", "assets/nunjucks.js.txt"), "utf-8");
|
|
392
|
+
vm.run(mkScript(nunjucksSource));
|
|
393
|
+
const render = (template, values) => {
|
|
394
|
+
if (!vm) {
|
|
395
|
+
throw new Error("SecureTemplater has not been initialized");
|
|
396
|
+
}
|
|
397
|
+
vm.setGlobal("templateStr", template);
|
|
398
|
+
vm.setGlobal("templateValues", JSON.stringify(values));
|
|
399
|
+
if (cookiecutterCompat) {
|
|
400
|
+
return vm.run(`renderCompat(templateStr, templateValues)`);
|
|
401
|
+
}
|
|
402
|
+
return vm.run(`render(templateStr, templateValues)`);
|
|
403
|
+
};
|
|
404
|
+
return render;
|
|
405
|
+
}
|
|
406
|
+
}
|
|
407
|
+
|
|
317
408
|
function createFetchTemplateAction(options) {
|
|
318
|
-
const {reader, integrations} = options;
|
|
409
|
+
const { reader, integrations } = options;
|
|
319
410
|
return createTemplateAction({
|
|
320
411
|
id: "fetch:template",
|
|
321
412
|
description: "Downloads a skeleton, templates variables into file and directory names and content, and places the result in the workspace, or optionally in a subdirectory specified by the 'targetPath' input option.",
|
|
@@ -364,7 +455,7 @@ function createFetchTemplateAction(options) {
|
|
|
364
455
|
var _a;
|
|
365
456
|
ctx.logger.info("Fetching template content from remote URL");
|
|
366
457
|
const workDir = await ctx.createTemporaryDirectory();
|
|
367
|
-
const templateDir =
|
|
458
|
+
const templateDir = backendCommon.resolveSafeChildPath(workDir, "template");
|
|
368
459
|
const targetPath = (_a = ctx.input.targetPath) != null ? _a : "./";
|
|
369
460
|
const outputDir = backendCommon.resolveSafeChildPath(ctx.workspacePath, targetPath);
|
|
370
461
|
if (ctx.input.copyWithoutRender && !Array.isArray(ctx.input.copyWithoutRender)) {
|
|
@@ -388,35 +479,26 @@ function createFetchTemplateAction(options) {
|
|
|
388
479
|
outputPath: templateDir
|
|
389
480
|
});
|
|
390
481
|
ctx.logger.info("Listing files and directories in template");
|
|
391
|
-
const allEntriesInTemplate = await globby__default[
|
|
482
|
+
const allEntriesInTemplate = await globby__default["default"](`**/*`, {
|
|
392
483
|
cwd: templateDir,
|
|
393
484
|
dot: true,
|
|
394
485
|
onlyFiles: false,
|
|
395
486
|
markDirectories: true
|
|
396
487
|
});
|
|
397
|
-
const nonTemplatedEntries = new Set((await Promise.all((ctx.input.copyWithoutRender || []).map((pattern) => globby__default[
|
|
488
|
+
const nonTemplatedEntries = new Set((await Promise.all((ctx.input.copyWithoutRender || []).map((pattern) => globby__default["default"](pattern, {
|
|
398
489
|
cwd: templateDir,
|
|
399
490
|
dot: true,
|
|
400
491
|
onlyFiles: false,
|
|
401
492
|
markDirectories: true
|
|
402
493
|
})))).flat());
|
|
403
|
-
const
|
|
404
|
-
...ctx.input.cookiecutterCompat ? {} : {
|
|
405
|
-
tags: {
|
|
406
|
-
variableStart: "${{",
|
|
407
|
-
variableEnd: "}}"
|
|
408
|
-
}
|
|
409
|
-
},
|
|
410
|
-
autoescape: false
|
|
411
|
-
});
|
|
412
|
-
if (ctx.input.cookiecutterCompat) {
|
|
413
|
-
templater.addFilter("jsonify", templater.getFilter("dump"));
|
|
414
|
-
}
|
|
415
|
-
const {cookiecutterCompat, values} = ctx.input;
|
|
494
|
+
const { cookiecutterCompat, values } = ctx.input;
|
|
416
495
|
const context = {
|
|
417
496
|
[cookiecutterCompat ? "cookiecutter" : "values"]: values
|
|
418
497
|
};
|
|
419
498
|
ctx.logger.info(`Processing ${allEntriesInTemplate.length} template files/directories with input values`, ctx.input.values);
|
|
499
|
+
const renderTemplate = await SecureTemplater.loadRenderer({
|
|
500
|
+
cookiecutterCompat: ctx.input.cookiecutterCompat
|
|
501
|
+
});
|
|
420
502
|
for (const location of allEntriesInTemplate) {
|
|
421
503
|
let renderFilename;
|
|
422
504
|
let renderContents;
|
|
@@ -431,25 +513,28 @@ function createFetchTemplateAction(options) {
|
|
|
431
513
|
renderFilename = renderContents = !nonTemplatedEntries.has(location);
|
|
432
514
|
}
|
|
433
515
|
if (renderFilename) {
|
|
434
|
-
localOutputPath =
|
|
516
|
+
localOutputPath = renderTemplate(localOutputPath, context);
|
|
517
|
+
}
|
|
518
|
+
const outputPath = backendCommon.resolveSafeChildPath(outputDir, localOutputPath);
|
|
519
|
+
if (outputDir === outputPath) {
|
|
520
|
+
continue;
|
|
435
521
|
}
|
|
436
|
-
const outputPath = path.resolve(outputDir, localOutputPath);
|
|
437
522
|
if (!renderContents && !extension) {
|
|
438
523
|
ctx.logger.info(`Copying file/directory ${location} without processing.`);
|
|
439
524
|
}
|
|
440
525
|
if (location.endsWith("/")) {
|
|
441
526
|
ctx.logger.info(`Writing directory ${location} to template output path.`);
|
|
442
|
-
await fs__default[
|
|
527
|
+
await fs__default["default"].ensureDir(outputPath);
|
|
443
528
|
} else {
|
|
444
|
-
const inputFilePath =
|
|
529
|
+
const inputFilePath = backendCommon.resolveSafeChildPath(templateDir, location);
|
|
445
530
|
if (await isbinaryfile.isBinaryFile(inputFilePath)) {
|
|
446
531
|
ctx.logger.info(`Copying binary file ${location} to template output path.`);
|
|
447
|
-
await fs__default[
|
|
532
|
+
await fs__default["default"].copy(inputFilePath, outputPath);
|
|
448
533
|
} else {
|
|
449
|
-
const statsObj = await fs__default[
|
|
534
|
+
const statsObj = await fs__default["default"].stat(inputFilePath);
|
|
450
535
|
ctx.logger.info(`Writing file ${location} to template output path with mode ${statsObj.mode}.`);
|
|
451
|
-
const inputFileContents = await fs__default[
|
|
452
|
-
await fs__default[
|
|
536
|
+
const inputFileContents = await fs__default["default"].readFile(inputFilePath, "utf-8");
|
|
537
|
+
await fs__default["default"].outputFile(outputPath, renderContents ? renderTemplate(inputFileContents, context) : inputFileContents, { mode: statsObj.mode });
|
|
453
538
|
}
|
|
454
539
|
}
|
|
455
540
|
}
|
|
@@ -486,7 +571,7 @@ const createFilesystemDeleteAction = () => {
|
|
|
486
571
|
for (const file of ctx.input.files) {
|
|
487
572
|
const filepath = backendCommon.resolveSafeChildPath(ctx.workspacePath, file);
|
|
488
573
|
try {
|
|
489
|
-
await fs__default[
|
|
574
|
+
await fs__default["default"].remove(filepath);
|
|
490
575
|
ctx.logger.info(`File ${filepath} deleted successfully`);
|
|
491
576
|
} catch (err) {
|
|
492
577
|
ctx.logger.error(`Failed to delete file ${filepath}:`, err);
|
|
@@ -544,7 +629,7 @@ const createFilesystemRenameAction = () => {
|
|
|
544
629
|
const sourceFilepath = backendCommon.resolveSafeChildPath(ctx.workspacePath, file.from);
|
|
545
630
|
const destFilepath = backendCommon.resolveSafeChildPath(ctx.workspacePath, file.to);
|
|
546
631
|
try {
|
|
547
|
-
await fs__default[
|
|
632
|
+
await fs__default["default"].move(sourceFilepath, destFilepath, {
|
|
548
633
|
overwrite: (_b = file.overwrite) != null ? _b : false
|
|
549
634
|
});
|
|
550
635
|
ctx.logger.info(`File ${sourceFilepath} renamed to ${destFilepath} successfully`);
|
|
@@ -560,10 +645,11 @@ const createFilesystemRenameAction = () => {
|
|
|
560
645
|
const runCommand = async ({
|
|
561
646
|
command,
|
|
562
647
|
args,
|
|
563
|
-
logStream = new stream.PassThrough()
|
|
648
|
+
logStream = new stream.PassThrough(),
|
|
649
|
+
options
|
|
564
650
|
}) => {
|
|
565
651
|
await new Promise((resolve, reject) => {
|
|
566
|
-
const process = child_process.spawn(command, args);
|
|
652
|
+
const process = child_process.spawn(command, args, options);
|
|
567
653
|
process.stdout.on("data", (stream) => {
|
|
568
654
|
logStream.write(stream);
|
|
569
655
|
});
|
|
@@ -600,7 +686,7 @@ async function initRepoAndPush({
|
|
|
600
686
|
dir,
|
|
601
687
|
defaultBranch
|
|
602
688
|
});
|
|
603
|
-
await git.add({dir, filepath: "."});
|
|
689
|
+
await git.add({ dir, filepath: "." });
|
|
604
690
|
const authorInfo = {
|
|
605
691
|
name: (_a = gitAuthorInfo == null ? void 0 : gitAuthorInfo.name) != null ? _a : "Scaffolder",
|
|
606
692
|
email: (_b = gitAuthorInfo == null ? void 0 : gitAuthorInfo.email) != null ? _b : "scaffolder@backstage.io"
|
|
@@ -638,7 +724,7 @@ const enableBranchProtectionOnDefaultRepoBranch = async ({
|
|
|
638
724
|
owner,
|
|
639
725
|
repo: repoName,
|
|
640
726
|
branch: defaultBranch,
|
|
641
|
-
required_status_checks: {strict: true, contexts: []},
|
|
727
|
+
required_status_checks: { strict: true, contexts: [] },
|
|
642
728
|
restrictions: null,
|
|
643
729
|
enforce_admins: true,
|
|
644
730
|
required_pull_request_reviews: {
|
|
@@ -712,7 +798,7 @@ const parseRepoUrl = (repoUrl, integrations) => {
|
|
|
712
798
|
if (!repo) {
|
|
713
799
|
throw new errors.InputError(`Invalid repo URL passed to publisher: ${repoUrl}, missing repo`);
|
|
714
800
|
}
|
|
715
|
-
return {host, owner, repo, organization, workspace, project};
|
|
801
|
+
return { host, owner, repo, organization, workspace, project };
|
|
716
802
|
};
|
|
717
803
|
const isExecutable = (fileMode) => {
|
|
718
804
|
const executeBitMask = 73;
|
|
@@ -721,7 +807,7 @@ const isExecutable = (fileMode) => {
|
|
|
721
807
|
};
|
|
722
808
|
|
|
723
809
|
function createPublishAzureAction(options) {
|
|
724
|
-
const {integrations, config} = options;
|
|
810
|
+
const { integrations, config } = options;
|
|
725
811
|
return createTemplateAction({
|
|
726
812
|
id: "publish:azure",
|
|
727
813
|
description: "Initializes a git repository of the content in the workspace, and publishes it to Azure.",
|
|
@@ -764,8 +850,8 @@ function createPublishAzureAction(options) {
|
|
|
764
850
|
}
|
|
765
851
|
},
|
|
766
852
|
async handler(ctx) {
|
|
767
|
-
const {repoUrl, defaultBranch = "master"} = ctx.input;
|
|
768
|
-
const {owner, repo, host, organization} = parseRepoUrl(repoUrl, integrations);
|
|
853
|
+
const { repoUrl, defaultBranch = "master" } = ctx.input;
|
|
854
|
+
const { owner, repo, host, organization } = parseRepoUrl(repoUrl, integrations);
|
|
769
855
|
if (!organization) {
|
|
770
856
|
throw new errors.InputError(`Invalid URL provider was included in the repo URL to create ${ctx.input.repoUrl}, missing organization`);
|
|
771
857
|
}
|
|
@@ -779,7 +865,7 @@ function createPublishAzureAction(options) {
|
|
|
779
865
|
const authHandler = azureDevopsNodeApi.getPersonalAccessTokenHandler(integrationConfig.config.token);
|
|
780
866
|
const webApi = new azureDevopsNodeApi.WebApi(`https://${host}/${organization}`, authHandler);
|
|
781
867
|
const client = await webApi.getGitApi();
|
|
782
|
-
const createOptions = {name: repo};
|
|
868
|
+
const createOptions = { name: repo };
|
|
783
869
|
const returnedRepo = await client.createRepository(createOptions, owner);
|
|
784
870
|
if (!returnedRepo) {
|
|
785
871
|
throw new errors.InputError(`Unable to create the repository with Organization ${organization}, Project ${owner} and Repo ${repo}.
|
|
@@ -827,7 +913,7 @@ const createBitbucketCloudRepository = async (opts) => {
|
|
|
827
913
|
scm: "git",
|
|
828
914
|
description,
|
|
829
915
|
is_private: repoVisibility === "private",
|
|
830
|
-
project: {key: project}
|
|
916
|
+
project: { key: project }
|
|
831
917
|
}),
|
|
832
918
|
headers: {
|
|
833
919
|
Authorization: authorization,
|
|
@@ -836,7 +922,7 @@ const createBitbucketCloudRepository = async (opts) => {
|
|
|
836
922
|
};
|
|
837
923
|
let response;
|
|
838
924
|
try {
|
|
839
|
-
response = await fetch__default[
|
|
925
|
+
response = await fetch__default["default"](`https://api.bitbucket.org/2.0/repositories/${workspace}/${repo}`, options);
|
|
840
926
|
} catch (e) {
|
|
841
927
|
throw new Error(`Unable to create repository, ${e}`);
|
|
842
928
|
}
|
|
@@ -851,7 +937,7 @@ const createBitbucketCloudRepository = async (opts) => {
|
|
|
851
937
|
}
|
|
852
938
|
}
|
|
853
939
|
const repoContentsUrl = `${r.links.html.href}/src/master`;
|
|
854
|
-
return {remoteUrl, repoContentsUrl};
|
|
940
|
+
return { remoteUrl, repoContentsUrl };
|
|
855
941
|
};
|
|
856
942
|
const createBitbucketServerRepository = async (opts) => {
|
|
857
943
|
const {
|
|
@@ -878,7 +964,7 @@ const createBitbucketServerRepository = async (opts) => {
|
|
|
878
964
|
};
|
|
879
965
|
try {
|
|
880
966
|
const baseUrl = apiBaseUrl ? apiBaseUrl : `https://${host}/rest/api/1.0`;
|
|
881
|
-
response = await fetch__default[
|
|
967
|
+
response = await fetch__default["default"](`${baseUrl}/projects/${project}/repos`, options);
|
|
882
968
|
} catch (e) {
|
|
883
969
|
throw new Error(`Unable to create repository, ${e}`);
|
|
884
970
|
}
|
|
@@ -893,7 +979,7 @@ const createBitbucketServerRepository = async (opts) => {
|
|
|
893
979
|
}
|
|
894
980
|
}
|
|
895
981
|
const repoContentsUrl = `${r.links.self[0].href}`;
|
|
896
|
-
return {remoteUrl, repoContentsUrl};
|
|
982
|
+
return { remoteUrl, repoContentsUrl };
|
|
897
983
|
};
|
|
898
984
|
const getAuthorizationHeader = (config) => {
|
|
899
985
|
if (config.username && config.appPassword) {
|
|
@@ -906,19 +992,19 @@ const getAuthorizationHeader = (config) => {
|
|
|
906
992
|
throw new Error(`Authorization has not been provided for Bitbucket. Please add either username + appPassword or token to the Integrations config`);
|
|
907
993
|
};
|
|
908
994
|
const performEnableLFS = async (opts) => {
|
|
909
|
-
const {authorization, host, project, repo} = opts;
|
|
995
|
+
const { authorization, host, project, repo } = opts;
|
|
910
996
|
const options = {
|
|
911
997
|
method: "PUT",
|
|
912
998
|
headers: {
|
|
913
999
|
Authorization: authorization
|
|
914
1000
|
}
|
|
915
1001
|
};
|
|
916
|
-
const {ok, status, statusText} = await fetch__default[
|
|
1002
|
+
const { ok, status, statusText } = await fetch__default["default"](`https://${host}/rest/git-lfs/admin/projects/${project}/repos/${repo}/enabled`, options);
|
|
917
1003
|
if (!ok)
|
|
918
1004
|
throw new Error(`Failed to enable LFS in the repository, ${status}: ${statusText}`);
|
|
919
1005
|
};
|
|
920
1006
|
function createPublishBitbucketAction(options) {
|
|
921
|
-
const {integrations, config} = options;
|
|
1007
|
+
const { integrations, config } = options;
|
|
922
1008
|
return createTemplateAction({
|
|
923
1009
|
id: "publish:bitbucket",
|
|
924
1010
|
description: "Initializes a git repository of the content in the workspace, and publishes it to Bitbucket.",
|
|
@@ -978,7 +1064,7 @@ function createPublishBitbucketAction(options) {
|
|
|
978
1064
|
repoVisibility = "private",
|
|
979
1065
|
enableLFS = false
|
|
980
1066
|
} = ctx.input;
|
|
981
|
-
const {workspace, project, repo, host} = parseRepoUrl(repoUrl, integrations);
|
|
1067
|
+
const { workspace, project, repo, host } = parseRepoUrl(repoUrl, integrations);
|
|
982
1068
|
if (host === "bitbucket.org") {
|
|
983
1069
|
if (!workspace) {
|
|
984
1070
|
throw new errors.InputError(`Invalid URL provider was included in the repo URL to create ${ctx.input.repoUrl}, missing workspace`);
|
|
@@ -994,7 +1080,7 @@ function createPublishBitbucketAction(options) {
|
|
|
994
1080
|
const authorization = getAuthorizationHeader(integrationConfig.config);
|
|
995
1081
|
const apiBaseUrl = integrationConfig.config.apiBaseUrl;
|
|
996
1082
|
const createMethod = host === "bitbucket.org" ? createBitbucketCloudRepository : createBitbucketServerRepository;
|
|
997
|
-
const {remoteUrl, repoContentsUrl} = await createMethod({
|
|
1083
|
+
const { remoteUrl, repoContentsUrl } = await createMethod({
|
|
998
1084
|
authorization,
|
|
999
1085
|
host,
|
|
1000
1086
|
workspace: workspace || "",
|
|
@@ -1021,7 +1107,7 @@ function createPublishBitbucketAction(options) {
|
|
|
1021
1107
|
gitAuthorInfo
|
|
1022
1108
|
});
|
|
1023
1109
|
if (enableLFS && host !== "bitbucket.org") {
|
|
1024
|
-
await performEnableLFS({authorization, host, project, repo});
|
|
1110
|
+
await performEnableLFS({ authorization, host, project, repo });
|
|
1025
1111
|
}
|
|
1026
1112
|
ctx.output("remoteUrl", remoteUrl);
|
|
1027
1113
|
ctx.output("repoContentsUrl", repoContentsUrl);
|
|
@@ -1046,13 +1132,13 @@ function createPublishFileAction() {
|
|
|
1046
1132
|
}
|
|
1047
1133
|
},
|
|
1048
1134
|
async handler(ctx) {
|
|
1049
|
-
const {path: path$1} = ctx.input;
|
|
1050
|
-
const exists = await fs__default[
|
|
1135
|
+
const { path: path$1 } = ctx.input;
|
|
1136
|
+
const exists = await fs__default["default"].pathExists(path$1);
|
|
1051
1137
|
if (exists) {
|
|
1052
1138
|
throw new errors.InputError("Output path already exists");
|
|
1053
1139
|
}
|
|
1054
|
-
await fs__default[
|
|
1055
|
-
await fs__default[
|
|
1140
|
+
await fs__default["default"].ensureDir(path.dirname(path$1));
|
|
1141
|
+
await fs__default["default"].copy(ctx.workspacePath, path$1);
|
|
1056
1142
|
}
|
|
1057
1143
|
});
|
|
1058
1144
|
}
|
|
@@ -1067,7 +1153,7 @@ class OctokitProvider {
|
|
|
1067
1153
|
}
|
|
1068
1154
|
async getOctokit(repoUrl) {
|
|
1069
1155
|
var _a;
|
|
1070
|
-
const {owner, repo, host} = parseRepoUrl(repoUrl, this.integrations);
|
|
1156
|
+
const { owner, repo, host } = parseRepoUrl(repoUrl, this.integrations);
|
|
1071
1157
|
if (!owner) {
|
|
1072
1158
|
throw new errors.InputError(`No owner provided for repo ${repoUrl}`);
|
|
1073
1159
|
}
|
|
@@ -1079,7 +1165,7 @@ class OctokitProvider {
|
|
|
1079
1165
|
if (!credentialsProvider) {
|
|
1080
1166
|
throw new errors.InputError(`No matching credentials for host ${host}, please check your integrations config`);
|
|
1081
1167
|
}
|
|
1082
|
-
const {token} = await credentialsProvider.getCredentials({
|
|
1168
|
+
const { token } = await credentialsProvider.getCredentials({
|
|
1083
1169
|
url: `https://${host}/${encodeURIComponent(owner)}/${encodeURIComponent(repo)}`
|
|
1084
1170
|
});
|
|
1085
1171
|
if (!token) {
|
|
@@ -1090,12 +1176,12 @@ class OctokitProvider {
|
|
|
1090
1176
|
baseUrl: integrationConfig.apiBaseUrl,
|
|
1091
1177
|
previews: ["nebula-preview"]
|
|
1092
1178
|
});
|
|
1093
|
-
return {client, token, owner, repo};
|
|
1179
|
+
return { client, token, owner, repo };
|
|
1094
1180
|
}
|
|
1095
1181
|
}
|
|
1096
1182
|
|
|
1097
1183
|
function createPublishGithubAction(options) {
|
|
1098
|
-
const {integrations, config} = options;
|
|
1184
|
+
const { integrations, config } = options;
|
|
1099
1185
|
const octokitProvider = new OctokitProvider(integrations);
|
|
1100
1186
|
return createTemplateAction({
|
|
1101
1187
|
id: "publish:github",
|
|
@@ -1191,7 +1277,7 @@ function createPublishGithubAction(options) {
|
|
|
1191
1277
|
collaborators,
|
|
1192
1278
|
topics
|
|
1193
1279
|
} = ctx.input;
|
|
1194
|
-
const {client, token, owner, repo} = await octokitProvider.getOctokit(repoUrl);
|
|
1280
|
+
const { client, token, owner, repo } = await octokitProvider.getOctokit(repoUrl);
|
|
1195
1281
|
const user = await client.users.getByUsername({
|
|
1196
1282
|
username: owner
|
|
1197
1283
|
});
|
|
@@ -1206,7 +1292,7 @@ function createPublishGithubAction(options) {
|
|
|
1206
1292
|
private: repoVisibility === "private",
|
|
1207
1293
|
description
|
|
1208
1294
|
});
|
|
1209
|
-
const {data: newRepo} = await repoCreationPromise;
|
|
1295
|
+
const { data: newRepo } = await repoCreationPromise;
|
|
1210
1296
|
if (access == null ? void 0 : access.startsWith(`${owner}/`)) {
|
|
1211
1297
|
const [, team] = access.split("/");
|
|
1212
1298
|
await client.teams.addOrUpdateRepoPermissionsInOrg({
|
|
@@ -1309,7 +1395,7 @@ const defaultClientFactory = async ({
|
|
|
1309
1395
|
if (!credentialsProvider) {
|
|
1310
1396
|
throw new errors.InputError(`No matching credentials for host ${host}, please check your integrations config`);
|
|
1311
1397
|
}
|
|
1312
|
-
const {token} = await credentialsProvider.getCredentials({
|
|
1398
|
+
const { token } = await credentialsProvider.getCredentials({
|
|
1313
1399
|
url: `https://${host}/${encodeURIComponent(owner)}/${encodeURIComponent(repo)}`
|
|
1314
1400
|
});
|
|
1315
1401
|
if (!token) {
|
|
@@ -1385,21 +1471,21 @@ const createPublishGithubPullRequestAction = ({
|
|
|
1385
1471
|
targetPath,
|
|
1386
1472
|
sourcePath
|
|
1387
1473
|
} = ctx.input;
|
|
1388
|
-
const {owner, repo, host} = parseRepoUrl(repoUrl, integrations);
|
|
1474
|
+
const { owner, repo, host } = parseRepoUrl(repoUrl, integrations);
|
|
1389
1475
|
if (!owner) {
|
|
1390
1476
|
throw new errors.InputError(`No owner provided for host: ${host}, and repo ${repo}`);
|
|
1391
1477
|
}
|
|
1392
|
-
const client = await clientFactory({integrations, host, owner, repo});
|
|
1478
|
+
const client = await clientFactory({ integrations, host, owner, repo });
|
|
1393
1479
|
const fileRoot = sourcePath ? backendCommon.resolveSafeChildPath(ctx.workspacePath, sourcePath) : ctx.workspacePath;
|
|
1394
|
-
const localFilePaths = await globby__default[
|
|
1480
|
+
const localFilePaths = await globby__default["default"](["./**", "./**/.*", "!.git"], {
|
|
1395
1481
|
cwd: fileRoot,
|
|
1396
1482
|
gitignore: true,
|
|
1397
1483
|
dot: true
|
|
1398
1484
|
});
|
|
1399
1485
|
const fileContents = await Promise.all(localFilePaths.map((filePath) => {
|
|
1400
|
-
const absPath =
|
|
1401
|
-
const base64EncodedContent = fs__default[
|
|
1402
|
-
const fileStat = fs__default[
|
|
1486
|
+
const absPath = backendCommon.resolveSafeChildPath(fileRoot, filePath);
|
|
1487
|
+
const base64EncodedContent = fs__default["default"].readFileSync(absPath).toString("base64");
|
|
1488
|
+
const fileStat = fs__default["default"].statSync(absPath);
|
|
1403
1489
|
const githubTreeItemMode = isExecutable(fileStat.mode) ? "100755" : "100644";
|
|
1404
1490
|
const encoding = "base64";
|
|
1405
1491
|
return {
|
|
@@ -1438,7 +1524,7 @@ const createPublishGithubPullRequestAction = ({
|
|
|
1438
1524
|
};
|
|
1439
1525
|
|
|
1440
1526
|
function createPublishGitlabAction(options) {
|
|
1441
|
-
const {integrations, config} = options;
|
|
1527
|
+
const { integrations, config } = options;
|
|
1442
1528
|
return createTemplateAction({
|
|
1443
1529
|
id: "publish:gitlab",
|
|
1444
1530
|
description: "Initializes a git repository of the content in the workspace, and publishes it to GitLab.",
|
|
@@ -1487,7 +1573,7 @@ function createPublishGitlabAction(options) {
|
|
|
1487
1573
|
repoVisibility = "private",
|
|
1488
1574
|
defaultBranch = "master"
|
|
1489
1575
|
} = ctx.input;
|
|
1490
|
-
const {owner, repo, host} = parseRepoUrl(repoUrl, integrations);
|
|
1576
|
+
const { owner, repo, host } = parseRepoUrl(repoUrl, integrations);
|
|
1491
1577
|
if (!owner) {
|
|
1492
1578
|
throw new errors.InputError(`No owner provided for host: ${host}, and repo ${repo}`);
|
|
1493
1579
|
}
|
|
@@ -1502,12 +1588,12 @@ function createPublishGitlabAction(options) {
|
|
|
1502
1588
|
host: integrationConfig.config.baseUrl,
|
|
1503
1589
|
token: integrationConfig.config.token
|
|
1504
1590
|
});
|
|
1505
|
-
let {id: targetNamespace} = await client.Namespaces.show(owner);
|
|
1591
|
+
let { id: targetNamespace } = await client.Namespaces.show(owner);
|
|
1506
1592
|
if (!targetNamespace) {
|
|
1507
|
-
const {id} = await client.Users.current();
|
|
1593
|
+
const { id } = await client.Users.current();
|
|
1508
1594
|
targetNamespace = id;
|
|
1509
1595
|
}
|
|
1510
|
-
const {http_url_to_repo} = await client.Projects.create({
|
|
1596
|
+
const { http_url_to_repo } = await client.Projects.create({
|
|
1511
1597
|
namespace_id: targetNamespace,
|
|
1512
1598
|
name: repo,
|
|
1513
1599
|
visibility: repoVisibility
|
|
@@ -1537,7 +1623,7 @@ function createPublishGitlabAction(options) {
|
|
|
1537
1623
|
}
|
|
1538
1624
|
|
|
1539
1625
|
function createGithubActionsDispatchAction(options) {
|
|
1540
|
-
const {integrations} = options;
|
|
1626
|
+
const { integrations } = options;
|
|
1541
1627
|
const octokitProvider = new OctokitProvider(integrations);
|
|
1542
1628
|
return createTemplateAction({
|
|
1543
1629
|
id: "github:actions:dispatch",
|
|
@@ -1566,9 +1652,9 @@ function createGithubActionsDispatchAction(options) {
|
|
|
1566
1652
|
}
|
|
1567
1653
|
},
|
|
1568
1654
|
async handler(ctx) {
|
|
1569
|
-
const {repoUrl, workflowId, branchOrTagName} = ctx.input;
|
|
1655
|
+
const { repoUrl, workflowId, branchOrTagName } = ctx.input;
|
|
1570
1656
|
ctx.logger.info(`Dispatching workflow ${workflowId} for repo ${repoUrl} on ${branchOrTagName}`);
|
|
1571
|
-
const {client, owner, repo} = await octokitProvider.getOctokit(repoUrl);
|
|
1657
|
+
const { client, owner, repo } = await octokitProvider.getOctokit(repoUrl);
|
|
1572
1658
|
await client.rest.actions.createWorkflowDispatch({
|
|
1573
1659
|
owner,
|
|
1574
1660
|
repo,
|
|
@@ -1581,7 +1667,7 @@ function createGithubActionsDispatchAction(options) {
|
|
|
1581
1667
|
}
|
|
1582
1668
|
|
|
1583
1669
|
function createGithubWebhookAction(options) {
|
|
1584
|
-
const {integrations, defaultWebhookSecret} = options;
|
|
1670
|
+
const { integrations, defaultWebhookSecret } = options;
|
|
1585
1671
|
const octokitProvider = new OctokitProvider(integrations);
|
|
1586
1672
|
const eventNames = webhooks.emitterEventNames.filter((event) => !event.includes("."));
|
|
1587
1673
|
return createTemplateAction({
|
|
@@ -1656,7 +1742,7 @@ function createGithubWebhookAction(options) {
|
|
|
1656
1742
|
insecureSsl = false
|
|
1657
1743
|
} = ctx.input;
|
|
1658
1744
|
ctx.logger.info(`Creating webhook ${webhookUrl} for repo ${repoUrl}`);
|
|
1659
|
-
const {client, owner, repo} = await octokitProvider.getOctokit(repoUrl);
|
|
1745
|
+
const { client, owner, repo } = await octokitProvider.getOctokit(repoUrl);
|
|
1660
1746
|
try {
|
|
1661
1747
|
const insecure_ssl = insecureSsl ? "1" : "0";
|
|
1662
1748
|
await client.repos.createWebhook({
|
|
@@ -1681,7 +1767,7 @@ function createGithubWebhookAction(options) {
|
|
|
1681
1767
|
}
|
|
1682
1768
|
|
|
1683
1769
|
const createBuiltinActions = (options) => {
|
|
1684
|
-
const {reader, integrations, containerRunner, catalogClient, config} = options;
|
|
1770
|
+
const { reader, integrations, containerRunner, catalogClient, config } = options;
|
|
1685
1771
|
return [
|
|
1686
1772
|
createFetchPlainAction({
|
|
1687
1773
|
reader,
|
|
@@ -1716,7 +1802,7 @@ const createBuiltinActions = (options) => {
|
|
|
1716
1802
|
config
|
|
1717
1803
|
}),
|
|
1718
1804
|
createDebugLogAction(),
|
|
1719
|
-
createCatalogRegisterAction({catalogClient, integrations}),
|
|
1805
|
+
createCatalogRegisterAction({ catalogClient, integrations }),
|
|
1720
1806
|
createCatalogWriteAction(),
|
|
1721
1807
|
createFilesystemDeleteAction(),
|
|
1722
1808
|
createFilesystemRenameAction(),
|
|
@@ -1731,7 +1817,7 @@ const createBuiltinActions = (options) => {
|
|
|
1731
1817
|
|
|
1732
1818
|
class TemplateActionRegistry {
|
|
1733
1819
|
constructor() {
|
|
1734
|
-
this.actions = new Map();
|
|
1820
|
+
this.actions = /* @__PURE__ */ new Map();
|
|
1735
1821
|
}
|
|
1736
1822
|
register(action) {
|
|
1737
1823
|
if (this.actions.has(action.id)) {
|
|
@@ -1763,7 +1849,7 @@ class DatabaseTaskStore {
|
|
|
1763
1849
|
this.db = options.database;
|
|
1764
1850
|
}
|
|
1765
1851
|
async getTask(taskId) {
|
|
1766
|
-
const [result] = await this.db("tasks").where({id: taskId}).select();
|
|
1852
|
+
const [result] = await this.db("tasks").where({ id: taskId }).select();
|
|
1767
1853
|
if (!result) {
|
|
1768
1854
|
throw new errors.NotFoundError(`No task with id '${taskId}' found`);
|
|
1769
1855
|
}
|
|
@@ -1790,7 +1876,7 @@ class DatabaseTaskStore {
|
|
|
1790
1876
|
secrets: secrets ? JSON.stringify(secrets) : void 0,
|
|
1791
1877
|
status: "open"
|
|
1792
1878
|
});
|
|
1793
|
-
return {taskId};
|
|
1879
|
+
return { taskId };
|
|
1794
1880
|
}
|
|
1795
1881
|
async claimTask() {
|
|
1796
1882
|
return this.db.transaction(async (tx) => {
|
|
@@ -1800,7 +1886,7 @@ class DatabaseTaskStore {
|
|
|
1800
1886
|
if (!task) {
|
|
1801
1887
|
return void 0;
|
|
1802
1888
|
}
|
|
1803
|
-
const updateCount = await tx("tasks").where({id: task.id, status: "open"}).update({
|
|
1889
|
+
const updateCount = await tx("tasks").where({ id: task.id, status: "open" }).update({
|
|
1804
1890
|
status: "processing",
|
|
1805
1891
|
last_heartbeat_at: this.db.fn.now()
|
|
1806
1892
|
});
|
|
@@ -1824,14 +1910,14 @@ class DatabaseTaskStore {
|
|
|
1824
1910
|
});
|
|
1825
1911
|
}
|
|
1826
1912
|
async heartbeatTask(taskId) {
|
|
1827
|
-
const updateCount = await this.db("tasks").where({id: taskId, status: "processing"}).update({
|
|
1913
|
+
const updateCount = await this.db("tasks").where({ id: taskId, status: "processing" }).update({
|
|
1828
1914
|
last_heartbeat_at: this.db.fn.now()
|
|
1829
1915
|
});
|
|
1830
1916
|
if (updateCount === 0) {
|
|
1831
1917
|
throw new errors.ConflictError(`No running task with taskId ${taskId} found`);
|
|
1832
1918
|
}
|
|
1833
1919
|
}
|
|
1834
|
-
async listStaleTasks({timeoutS}) {
|
|
1920
|
+
async listStaleTasks({ timeoutS }) {
|
|
1835
1921
|
const rawRows = await this.db("tasks").where("status", "processing").andWhere("last_heartbeat_at", "<=", this.db.client.config.client === "sqlite3" ? this.db.raw(`datetime('now', ?)`, [`-${timeoutS} seconds`]) : this.db.raw(`dateadd('second', ?, ?)`, [
|
|
1836
1922
|
`-${timeoutS}`,
|
|
1837
1923
|
this.db.fn.now()
|
|
@@ -1839,7 +1925,7 @@ class DatabaseTaskStore {
|
|
|
1839
1925
|
const tasks = rawRows.map((row) => ({
|
|
1840
1926
|
taskId: row.id
|
|
1841
1927
|
}));
|
|
1842
|
-
return {tasks};
|
|
1928
|
+
return { tasks };
|
|
1843
1929
|
}
|
|
1844
1930
|
async completeTask({
|
|
1845
1931
|
taskId,
|
|
@@ -1879,7 +1965,7 @@ class DatabaseTaskStore {
|
|
|
1879
1965
|
});
|
|
1880
1966
|
});
|
|
1881
1967
|
}
|
|
1882
|
-
async emitLogEvent({taskId, body}) {
|
|
1968
|
+
async emitLogEvent({ taskId, body }) {
|
|
1883
1969
|
const serliazedBody = JSON.stringify(body);
|
|
1884
1970
|
await this.db("task_events").insert({
|
|
1885
1971
|
task_id: taskId,
|
|
@@ -1906,13 +1992,13 @@ class DatabaseTaskStore {
|
|
|
1906
1992
|
taskId,
|
|
1907
1993
|
body,
|
|
1908
1994
|
type: event.event_type,
|
|
1909
|
-
createdAt: typeof event.created_at === "string" ? luxon.DateTime.fromSQL(event.created_at, {zone: "UTC"}).toISO() : event.created_at
|
|
1995
|
+
createdAt: typeof event.created_at === "string" ? luxon.DateTime.fromSQL(event.created_at, { zone: "UTC" }).toISO() : event.created_at
|
|
1910
1996
|
};
|
|
1911
1997
|
} catch (error) {
|
|
1912
1998
|
throw new Error(`Failed to parse event body from event taskId=${taskId} id=${event.id}, ${error}`);
|
|
1913
1999
|
}
|
|
1914
2000
|
});
|
|
1915
|
-
return {events};
|
|
2001
|
+
return { events };
|
|
1916
2002
|
}
|
|
1917
2003
|
}
|
|
1918
2004
|
|
|
@@ -1943,7 +2029,7 @@ class TaskManager {
|
|
|
1943
2029
|
async emitLog(message, metadata) {
|
|
1944
2030
|
await this.storage.emitLogEvent({
|
|
1945
2031
|
taskId: this.state.taskId,
|
|
1946
|
-
body: {message, ...metadata}
|
|
2032
|
+
body: { message, ...metadata }
|
|
1947
2033
|
});
|
|
1948
2034
|
}
|
|
1949
2035
|
async complete(result, metadata) {
|
|
@@ -1978,7 +2064,7 @@ function defer() {
|
|
|
1978
2064
|
const promise = new Promise((_resolve) => {
|
|
1979
2065
|
resolve = _resolve;
|
|
1980
2066
|
});
|
|
1981
|
-
return {promise, resolve};
|
|
2067
|
+
return { promise, resolve };
|
|
1982
2068
|
}
|
|
1983
2069
|
class StorageTaskBroker {
|
|
1984
2070
|
constructor(storage, logger) {
|
|
@@ -2010,7 +2096,7 @@ class StorageTaskBroker {
|
|
|
2010
2096
|
return this.storage.getTask(taskId);
|
|
2011
2097
|
}
|
|
2012
2098
|
observe(options, callback) {
|
|
2013
|
-
const {taskId} = options;
|
|
2099
|
+
const { taskId } = options;
|
|
2014
2100
|
let cancelled = false;
|
|
2015
2101
|
const unsubscribe = () => {
|
|
2016
2102
|
cancelled = true;
|
|
@@ -2018,24 +2104,24 @@ class StorageTaskBroker {
|
|
|
2018
2104
|
(async () => {
|
|
2019
2105
|
let after = options.after;
|
|
2020
2106
|
while (!cancelled) {
|
|
2021
|
-
const result = await this.storage.listEvents({taskId, after});
|
|
2022
|
-
const {events} = result;
|
|
2107
|
+
const result = await this.storage.listEvents({ taskId, after });
|
|
2108
|
+
const { events } = result;
|
|
2023
2109
|
if (events.length) {
|
|
2024
2110
|
after = events[events.length - 1].id;
|
|
2025
2111
|
try {
|
|
2026
2112
|
callback(void 0, result);
|
|
2027
2113
|
} catch (error) {
|
|
2028
2114
|
errors.assertError(error);
|
|
2029
|
-
callback(error, {events: []});
|
|
2115
|
+
callback(error, { events: [] });
|
|
2030
2116
|
}
|
|
2031
2117
|
}
|
|
2032
2118
|
await new Promise((resolve) => setTimeout(resolve, 1e3));
|
|
2033
2119
|
}
|
|
2034
2120
|
})();
|
|
2035
|
-
return {unsubscribe};
|
|
2121
|
+
return { unsubscribe };
|
|
2036
2122
|
}
|
|
2037
2123
|
async vacuumTasks(timeoutS) {
|
|
2038
|
-
const {tasks} = await this.storage.listStaleTasks(timeoutS);
|
|
2124
|
+
const { tasks } = await this.storage.listStaleTasks(timeoutS);
|
|
2039
2125
|
await Promise.all(tasks.map(async (task) => {
|
|
2040
2126
|
try {
|
|
2041
2127
|
await this.storage.completeTask({
|
|
@@ -2072,7 +2158,7 @@ class HandlebarsWorkflowRunner {
|
|
|
2072
2158
|
return JSON.stringify(parseRepoUrl(repoUrl, this.options.integrations));
|
|
2073
2159
|
});
|
|
2074
2160
|
this.handlebars.registerHelper("projectSlug", (repoUrl) => {
|
|
2075
|
-
const {owner, repo} = parseRepoUrl(repoUrl, this.options.integrations);
|
|
2161
|
+
const { owner, repo } = parseRepoUrl(repoUrl, this.options.integrations);
|
|
2076
2162
|
return `${owner}/${repo}`;
|
|
2077
2163
|
});
|
|
2078
2164
|
this.handlebars.registerHelper("json", (obj) => JSON.stringify(obj));
|
|
@@ -2084,14 +2170,14 @@ class HandlebarsWorkflowRunner {
|
|
|
2084
2170
|
if (!isValidTaskSpec$1(task.spec)) {
|
|
2085
2171
|
throw new errors.InputError(`Task spec is not a valid v1beta2 task spec`);
|
|
2086
2172
|
}
|
|
2087
|
-
const {actionRegistry} = this.options;
|
|
2088
|
-
const workspacePath = path__default[
|
|
2173
|
+
const { actionRegistry } = this.options;
|
|
2174
|
+
const workspacePath = path__default["default"].join(this.options.workingDirectory, await task.getWorkspaceName());
|
|
2089
2175
|
try {
|
|
2090
|
-
await fs__default[
|
|
2176
|
+
await fs__default["default"].ensureDir(workspacePath);
|
|
2091
2177
|
await task.emitLog(`Starting up task with ${task.spec.steps.length} steps`);
|
|
2092
|
-
const templateCtx = {parameters: task.spec.values, steps: {}};
|
|
2178
|
+
const templateCtx = { parameters: task.spec.values, steps: {} };
|
|
2093
2179
|
for (const step of task.spec.steps) {
|
|
2094
|
-
const metadata = {stepId: step.id};
|
|
2180
|
+
const metadata = { stepId: step.id };
|
|
2095
2181
|
try {
|
|
2096
2182
|
const taskLogger = winston__namespace.createLogger({
|
|
2097
2183
|
level: process.env.LOG_LEVEL || "info",
|
|
@@ -2105,7 +2191,7 @@ class HandlebarsWorkflowRunner {
|
|
|
2105
2191
|
await task.emitLog(message, metadata);
|
|
2106
2192
|
}
|
|
2107
2193
|
});
|
|
2108
|
-
taskLogger.add(new winston__namespace.transports.Stream({stream: stream$1}));
|
|
2194
|
+
taskLogger.add(new winston__namespace.transports.Stream({ stream: stream$1 }));
|
|
2109
2195
|
if (step.if !== void 0) {
|
|
2110
2196
|
let skip = !step.if;
|
|
2111
2197
|
if (typeof step.if === "string") {
|
|
@@ -2175,6 +2261,9 @@ class HandlebarsWorkflowRunner {
|
|
|
2175
2261
|
this.options.logger.debug(`Running ${action.id} with input`, {
|
|
2176
2262
|
input: JSON.stringify(input, null, 2)
|
|
2177
2263
|
});
|
|
2264
|
+
if (!task.spec.metadata) {
|
|
2265
|
+
console.warn("DEPRECATION NOTICE: metadata is undefined. metadata will be required in the future.");
|
|
2266
|
+
}
|
|
2178
2267
|
await action.handler({
|
|
2179
2268
|
baseUrl: task.spec.baseUrl,
|
|
2180
2269
|
logger: taskLogger,
|
|
@@ -2183,18 +2272,19 @@ class HandlebarsWorkflowRunner {
|
|
|
2183
2272
|
token: (_b = task.secrets) == null ? void 0 : _b.token,
|
|
2184
2273
|
workspacePath,
|
|
2185
2274
|
async createTemporaryDirectory() {
|
|
2186
|
-
const tmpDir = await fs__default[
|
|
2275
|
+
const tmpDir = await fs__default["default"].mkdtemp(`${workspacePath}_step-${step.id}-`);
|
|
2187
2276
|
tmpDirs.push(tmpDir);
|
|
2188
2277
|
return tmpDir;
|
|
2189
2278
|
},
|
|
2190
2279
|
output(name, value) {
|
|
2191
2280
|
stepOutputs[name] = value;
|
|
2192
|
-
}
|
|
2281
|
+
},
|
|
2282
|
+
metadata: task.spec.metadata
|
|
2193
2283
|
});
|
|
2194
2284
|
for (const tmpDir of tmpDirs) {
|
|
2195
|
-
await fs__default[
|
|
2285
|
+
await fs__default["default"].remove(tmpDir);
|
|
2196
2286
|
}
|
|
2197
|
-
templateCtx.steps[step.id] = {output: stepOutputs};
|
|
2287
|
+
templateCtx.steps[step.id] = { output: stepOutputs };
|
|
2198
2288
|
await task.emitLog(`Finished step ${step.name}`, {
|
|
2199
2289
|
...metadata,
|
|
2200
2290
|
status: "completed"
|
|
@@ -2228,10 +2318,10 @@ class HandlebarsWorkflowRunner {
|
|
|
2228
2318
|
}
|
|
2229
2319
|
return value;
|
|
2230
2320
|
});
|
|
2231
|
-
return {output};
|
|
2321
|
+
return { output };
|
|
2232
2322
|
} finally {
|
|
2233
2323
|
if (workspacePath) {
|
|
2234
|
-
await fs__default[
|
|
2324
|
+
await fs__default["default"].remove(workspacePath);
|
|
2235
2325
|
}
|
|
2236
2326
|
}
|
|
2237
2327
|
}
|
|
@@ -2244,7 +2334,7 @@ const createStepLogger = ({
|
|
|
2244
2334
|
task,
|
|
2245
2335
|
step
|
|
2246
2336
|
}) => {
|
|
2247
|
-
const metadata = {stepId: step.id};
|
|
2337
|
+
const metadata = { stepId: step.id };
|
|
2248
2338
|
const taskLogger = winston__namespace.createLogger({
|
|
2249
2339
|
level: process.env.LOG_LEVEL || "info",
|
|
2250
2340
|
format: winston__namespace.format.combine(winston__namespace.format.colorize(), winston__namespace.format.timestamp(), winston__namespace.format.simple()),
|
|
@@ -2257,41 +2347,33 @@ const createStepLogger = ({
|
|
|
2257
2347
|
await task.emitLog(message, metadata);
|
|
2258
2348
|
}
|
|
2259
2349
|
});
|
|
2260
|
-
taskLogger.add(new winston__namespace.transports.Stream({stream: streamLogger}));
|
|
2261
|
-
return {taskLogger, streamLogger};
|
|
2350
|
+
taskLogger.add(new winston__namespace.transports.Stream({ stream: streamLogger }));
|
|
2351
|
+
return { taskLogger, streamLogger };
|
|
2262
2352
|
};
|
|
2263
2353
|
class NunjucksWorkflowRunner {
|
|
2264
2354
|
constructor(options) {
|
|
2265
2355
|
this.options = options;
|
|
2266
|
-
|
|
2356
|
+
}
|
|
2357
|
+
isSingleTemplateString(input) {
|
|
2358
|
+
var _a, _b;
|
|
2359
|
+
const { parser, nodes } = nunjucks__default["default"];
|
|
2360
|
+
const parsed = parser.parse(input, {}, {
|
|
2267
2361
|
autoescape: false,
|
|
2268
2362
|
tags: {
|
|
2269
2363
|
variableStart: "${{",
|
|
2270
2364
|
variableEnd: "}}"
|
|
2271
2365
|
}
|
|
2272
|
-
};
|
|
2273
|
-
this.nunjucks = nunjucks__default['default'].configure(this.nunjucksOptions);
|
|
2274
|
-
this.nunjucks.addFilter("parseRepoUrl", (repoUrl) => {
|
|
2275
|
-
return parseRepoUrl(repoUrl, this.options.integrations);
|
|
2276
|
-
});
|
|
2277
|
-
this.nunjucks.addFilter("projectSlug", (repoUrl) => {
|
|
2278
|
-
const {owner, repo} = parseRepoUrl(repoUrl, this.options.integrations);
|
|
2279
|
-
return `${owner}/${repo}`;
|
|
2280
2366
|
});
|
|
2367
|
+
return parsed.children.length === 1 && !(((_b = (_a = parsed.children[0]) == null ? void 0 : _a.children) == null ? void 0 : _b[0]) instanceof nodes.TemplateData);
|
|
2281
2368
|
}
|
|
2282
|
-
|
|
2283
|
-
const {parser, nodes} = require("nunjucks");
|
|
2284
|
-
const parsed = parser.parse(input, {}, this.nunjucksOptions);
|
|
2285
|
-
return parsed.children.length === 1 && !(parsed.children[0] instanceof nodes.TemplateData);
|
|
2286
|
-
}
|
|
2287
|
-
render(input, context) {
|
|
2369
|
+
render(input, context, renderTemplate) {
|
|
2288
2370
|
return JSON.parse(JSON.stringify(input), (_key, value) => {
|
|
2289
2371
|
try {
|
|
2290
2372
|
if (typeof value === "string") {
|
|
2291
2373
|
try {
|
|
2292
2374
|
if (this.isSingleTemplateString(value)) {
|
|
2293
2375
|
const wrappedDumped = value.replace(/\${{(.+)}}/g, "${{ ( $1 ) | dump }}");
|
|
2294
|
-
const templated2 =
|
|
2376
|
+
const templated2 = renderTemplate(wrappedDumped, context);
|
|
2295
2377
|
if (templated2 === "") {
|
|
2296
2378
|
return void 0;
|
|
2297
2379
|
}
|
|
@@ -2300,7 +2382,7 @@ class NunjucksWorkflowRunner {
|
|
|
2300
2382
|
} catch (ex) {
|
|
2301
2383
|
this.options.logger.error(`Failed to parse template string: ${value} with error ${ex.message}`);
|
|
2302
2384
|
}
|
|
2303
|
-
const templated =
|
|
2385
|
+
const templated = renderTemplate(value, context);
|
|
2304
2386
|
if (templated === "") {
|
|
2305
2387
|
return void 0;
|
|
2306
2388
|
}
|
|
@@ -2317,9 +2399,15 @@ class NunjucksWorkflowRunner {
|
|
|
2317
2399
|
if (!isValidTaskSpec(task.spec)) {
|
|
2318
2400
|
throw new errors.InputError("Wrong template version executed with the workflow engine");
|
|
2319
2401
|
}
|
|
2320
|
-
const workspacePath = path__default[
|
|
2402
|
+
const workspacePath = path__default["default"].join(this.options.workingDirectory, await task.getWorkspaceName());
|
|
2403
|
+
const { integrations } = this.options;
|
|
2404
|
+
const renderTemplate = await SecureTemplater.loadRenderer({
|
|
2405
|
+
parseRepoUrl(url) {
|
|
2406
|
+
return parseRepoUrl(url, integrations);
|
|
2407
|
+
}
|
|
2408
|
+
});
|
|
2321
2409
|
try {
|
|
2322
|
-
await fs__default[
|
|
2410
|
+
await fs__default["default"].ensureDir(workspacePath);
|
|
2323
2411
|
await task.emitLog(`Starting up task with ${task.spec.steps.length} steps`);
|
|
2324
2412
|
const context = {
|
|
2325
2413
|
parameters: task.spec.parameters,
|
|
@@ -2328,9 +2416,9 @@ class NunjucksWorkflowRunner {
|
|
|
2328
2416
|
for (const step of task.spec.steps) {
|
|
2329
2417
|
try {
|
|
2330
2418
|
if (step.if) {
|
|
2331
|
-
const ifResult = await this.render(step.if, context);
|
|
2419
|
+
const ifResult = await this.render(step.if, context, renderTemplate);
|
|
2332
2420
|
if (!isTruthy(ifResult)) {
|
|
2333
|
-
await task.emitLog(`Skipping step ${step.id} because it's if condition was false`, {stepId: step.id, status: "skipped"});
|
|
2421
|
+
await task.emitLog(`Skipping step ${step.id} because it's if condition was false`, { stepId: step.id, status: "skipped" });
|
|
2334
2422
|
continue;
|
|
2335
2423
|
}
|
|
2336
2424
|
}
|
|
@@ -2339,8 +2427,8 @@ class NunjucksWorkflowRunner {
|
|
|
2339
2427
|
status: "processing"
|
|
2340
2428
|
});
|
|
2341
2429
|
const action = this.options.actionRegistry.get(step.action);
|
|
2342
|
-
const {taskLogger, streamLogger} = createStepLogger({task, step});
|
|
2343
|
-
const input = (_a = step.input && this.render(step.input, context)) != null ? _a : {};
|
|
2430
|
+
const { taskLogger, streamLogger } = createStepLogger({ task, step });
|
|
2431
|
+
const input = (_a = step.input && this.render(step.input, context, renderTemplate)) != null ? _a : {};
|
|
2344
2432
|
if ((_b = action.schema) == null ? void 0 : _b.input) {
|
|
2345
2433
|
const validateResult = jsonschema.validate(input, action.schema.input);
|
|
2346
2434
|
if (!validateResult.valid) {
|
|
@@ -2350,6 +2438,9 @@ class NunjucksWorkflowRunner {
|
|
|
2350
2438
|
}
|
|
2351
2439
|
const tmpDirs = new Array();
|
|
2352
2440
|
const stepOutput = {};
|
|
2441
|
+
if (!task.spec.metadata) {
|
|
2442
|
+
console.warn("DEPRECATION NOTICE: metadata is undefined. metadata will be required in the future.");
|
|
2443
|
+
}
|
|
2353
2444
|
await action.handler({
|
|
2354
2445
|
baseUrl: task.spec.baseUrl,
|
|
2355
2446
|
input,
|
|
@@ -2357,18 +2448,19 @@ class NunjucksWorkflowRunner {
|
|
|
2357
2448
|
logStream: streamLogger,
|
|
2358
2449
|
workspacePath,
|
|
2359
2450
|
createTemporaryDirectory: async () => {
|
|
2360
|
-
const tmpDir = await fs__default[
|
|
2451
|
+
const tmpDir = await fs__default["default"].mkdtemp(`${workspacePath}_step-${step.id}-`);
|
|
2361
2452
|
tmpDirs.push(tmpDir);
|
|
2362
2453
|
return tmpDir;
|
|
2363
2454
|
},
|
|
2364
2455
|
output(name, value) {
|
|
2365
2456
|
stepOutput[name] = value;
|
|
2366
|
-
}
|
|
2457
|
+
},
|
|
2458
|
+
metadata: task.spec.metadata
|
|
2367
2459
|
});
|
|
2368
2460
|
for (const tmpDir of tmpDirs) {
|
|
2369
|
-
await fs__default[
|
|
2461
|
+
await fs__default["default"].remove(tmpDir);
|
|
2370
2462
|
}
|
|
2371
|
-
context.steps[step.id] = {output: stepOutput};
|
|
2463
|
+
context.steps[step.id] = { output: stepOutput };
|
|
2372
2464
|
await task.emitLog(`Finished step ${step.name}`, {
|
|
2373
2465
|
stepId: step.id,
|
|
2374
2466
|
status: "completed"
|
|
@@ -2381,11 +2473,11 @@ class NunjucksWorkflowRunner {
|
|
|
2381
2473
|
throw err;
|
|
2382
2474
|
}
|
|
2383
2475
|
}
|
|
2384
|
-
const output = this.render(task.spec.output, context);
|
|
2385
|
-
return {output};
|
|
2476
|
+
const output = this.render(task.spec.output, context, renderTemplate);
|
|
2477
|
+
return { output };
|
|
2386
2478
|
} finally {
|
|
2387
2479
|
if (workspacePath) {
|
|
2388
|
-
await fs__default[
|
|
2480
|
+
await fs__default["default"].remove(workspacePath);
|
|
2389
2481
|
}
|
|
2390
2482
|
}
|
|
2391
2483
|
}
|
|
@@ -2417,7 +2509,7 @@ class TaskWorker {
|
|
|
2417
2509
|
});
|
|
2418
2510
|
return new TaskWorker({
|
|
2419
2511
|
taskBroker,
|
|
2420
|
-
runners: {legacyWorkflowRunner, workflowRunner}
|
|
2512
|
+
runners: { legacyWorkflowRunner, workflowRunner }
|
|
2421
2513
|
});
|
|
2422
2514
|
}
|
|
2423
2515
|
start() {
|
|
@@ -2430,12 +2522,12 @@ class TaskWorker {
|
|
|
2430
2522
|
}
|
|
2431
2523
|
async runOneTask(task) {
|
|
2432
2524
|
try {
|
|
2433
|
-
const {output} = task.spec.apiVersion === "scaffolder.backstage.io/v1beta3" ? await this.options.runners.workflowRunner.execute(task) : await this.options.runners.legacyWorkflowRunner.execute(task);
|
|
2434
|
-
await task.complete("completed", {output});
|
|
2525
|
+
const { output } = task.spec.apiVersion === "scaffolder.backstage.io/v1beta3" ? await this.options.runners.workflowRunner.execute(task) : await this.options.runners.legacyWorkflowRunner.execute(task);
|
|
2526
|
+
await task.complete("completed", { output });
|
|
2435
2527
|
} catch (error) {
|
|
2436
2528
|
errors.assertError(error);
|
|
2437
2529
|
await task.complete("failed", {
|
|
2438
|
-
error: {name: error.name, message: error.message}
|
|
2530
|
+
error: { name: error.name, message: error.message }
|
|
2439
2531
|
});
|
|
2440
2532
|
}
|
|
2441
2533
|
}
|
|
@@ -2446,7 +2538,7 @@ class CatalogEntityClient {
|
|
|
2446
2538
|
this.catalogClient = catalogClient;
|
|
2447
2539
|
}
|
|
2448
2540
|
async findTemplate(templateName, options) {
|
|
2449
|
-
const {items: templates} = await this.catalogClient.getEntities({
|
|
2541
|
+
const { items: templates } = await this.catalogClient.getEntities({
|
|
2450
2542
|
filter: {
|
|
2451
2543
|
kind: "template",
|
|
2452
2544
|
"metadata.name": templateName
|
|
@@ -2465,11 +2557,11 @@ class CatalogEntityClient {
|
|
|
2465
2557
|
|
|
2466
2558
|
async function getWorkingDirectory(config, logger) {
|
|
2467
2559
|
if (!config.has("backend.workingDirectory")) {
|
|
2468
|
-
return os__default[
|
|
2560
|
+
return os__default["default"].tmpdir();
|
|
2469
2561
|
}
|
|
2470
2562
|
const workingDirectory = config.getString("backend.workingDirectory");
|
|
2471
2563
|
try {
|
|
2472
|
-
await fs__default[
|
|
2564
|
+
await fs__default["default"].access(workingDirectory, fs__default["default"].constants.F_OK | fs__default["default"].constants.W_OK);
|
|
2473
2565
|
logger.info(`using working directory: ${workingDirectory}`);
|
|
2474
2566
|
} catch (err) {
|
|
2475
2567
|
errors.assertError(err);
|
|
@@ -2487,7 +2579,7 @@ function getEntityBaseUrl(entity) {
|
|
|
2487
2579
|
if (!location) {
|
|
2488
2580
|
return void 0;
|
|
2489
2581
|
}
|
|
2490
|
-
const {type, target} = catalogModel.parseLocationReference(location);
|
|
2582
|
+
const { type, target } = catalogModel.parseLocationReference(location);
|
|
2491
2583
|
if (type === "url") {
|
|
2492
2584
|
return target;
|
|
2493
2585
|
} else if (type === "file") {
|
|
@@ -2500,8 +2592,8 @@ function isSupportedTemplate(entity) {
|
|
|
2500
2592
|
return entity.apiVersion === "backstage.io/v1beta2" || entity.apiVersion === "scaffolder.backstage.io/v1beta3";
|
|
2501
2593
|
}
|
|
2502
2594
|
async function createRouter(options) {
|
|
2503
|
-
const router = Router__default[
|
|
2504
|
-
router.use(express__default[
|
|
2595
|
+
const router = Router__default["default"]();
|
|
2596
|
+
router.use(express__default["default"].json());
|
|
2505
2597
|
const {
|
|
2506
2598
|
logger: parentLogger,
|
|
2507
2599
|
config,
|
|
@@ -2512,7 +2604,7 @@ async function createRouter(options) {
|
|
|
2512
2604
|
containerRunner,
|
|
2513
2605
|
taskWorkers
|
|
2514
2606
|
} = options;
|
|
2515
|
-
const logger = parentLogger.child({plugin: "scaffolder"});
|
|
2607
|
+
const logger = parentLogger.child({ plugin: "scaffolder" });
|
|
2516
2608
|
const workingDirectory = await getWorkingDirectory(config, logger);
|
|
2517
2609
|
const entityClient = new CatalogEntityClient(catalogClient);
|
|
2518
2610
|
const integrations = integration.ScmIntegrations.fromConfig(config);
|
|
@@ -2548,7 +2640,7 @@ async function createRouter(options) {
|
|
|
2548
2640
|
workers.forEach((worker) => worker.start());
|
|
2549
2641
|
router.get("/v2/templates/:namespace/:kind/:name/parameter-schema", async (req, res) => {
|
|
2550
2642
|
var _a, _b;
|
|
2551
|
-
const {namespace, kind, name} = req.params;
|
|
2643
|
+
const { namespace, kind, name } = req.params;
|
|
2552
2644
|
if (namespace !== "default") {
|
|
2553
2645
|
throw new errors.InputError(`Invalid namespace, only 'default' namespace is supported`);
|
|
2554
2646
|
}
|
|
@@ -2583,7 +2675,7 @@ async function createRouter(options) {
|
|
|
2583
2675
|
});
|
|
2584
2676
|
res.json(actionsList);
|
|
2585
2677
|
}).post("/v2/tasks", async (req, res) => {
|
|
2586
|
-
var _a, _b, _c;
|
|
2678
|
+
var _a, _b, _c, _d, _e;
|
|
2587
2679
|
const templateName = req.body.templateName;
|
|
2588
2680
|
const values = req.body.values;
|
|
2589
2681
|
const token = getBearerToken(req.headers.authorization);
|
|
@@ -2595,7 +2687,7 @@ async function createRouter(options) {
|
|
|
2595
2687
|
for (const parameters of [(_a = template.spec.parameters) != null ? _a : []].flat()) {
|
|
2596
2688
|
const result2 = jsonschema.validate(values, parameters);
|
|
2597
2689
|
if (!result2.valid) {
|
|
2598
|
-
res.status(400).json({errors: result2.errors});
|
|
2690
|
+
res.status(400).json({ errors: result2.errors });
|
|
2599
2691
|
return;
|
|
2600
2692
|
}
|
|
2601
2693
|
}
|
|
@@ -2612,7 +2704,8 @@ async function createRouter(options) {
|
|
|
2612
2704
|
name: (_b2 = step.name) != null ? _b2 : step.action
|
|
2613
2705
|
};
|
|
2614
2706
|
}),
|
|
2615
|
-
output: (_b = template.spec.output) != null ? _b : {}
|
|
2707
|
+
output: (_b = template.spec.output) != null ? _b : {},
|
|
2708
|
+
metadata: { name: (_c = template.metadata) == null ? void 0 : _c.name }
|
|
2616
2709
|
} : {
|
|
2617
2710
|
apiVersion: template.apiVersion,
|
|
2618
2711
|
baseUrl,
|
|
@@ -2625,7 +2718,8 @@ async function createRouter(options) {
|
|
|
2625
2718
|
name: (_b2 = step.name) != null ? _b2 : step.action
|
|
2626
2719
|
};
|
|
2627
2720
|
}),
|
|
2628
|
-
output: (
|
|
2721
|
+
output: (_d = template.spec.output) != null ? _d : {},
|
|
2722
|
+
metadata: { name: (_e = template.metadata) == null ? void 0 : _e.name }
|
|
2629
2723
|
};
|
|
2630
2724
|
} else {
|
|
2631
2725
|
throw new errors.InputError(`Unsupported apiVersion field in schema entity, ${template.apiVersion}`);
|
|
@@ -2633,9 +2727,9 @@ async function createRouter(options) {
|
|
|
2633
2727
|
const result = await taskBroker.dispatch(taskSpec, {
|
|
2634
2728
|
token
|
|
2635
2729
|
});
|
|
2636
|
-
res.status(201).json({id: result.taskId});
|
|
2730
|
+
res.status(201).json({ id: result.taskId });
|
|
2637
2731
|
}).get("/v2/tasks/:taskId", async (req, res) => {
|
|
2638
|
-
const {taskId} = req.params;
|
|
2732
|
+
const { taskId } = req.params;
|
|
2639
2733
|
const task = await taskBroker.get(taskId);
|
|
2640
2734
|
if (!task) {
|
|
2641
2735
|
throw new errors.NotFoundError(`Task with id ${taskId} does not exist`);
|
|
@@ -2643,7 +2737,7 @@ async function createRouter(options) {
|
|
|
2643
2737
|
delete task.secrets;
|
|
2644
2738
|
res.status(200).json(task);
|
|
2645
2739
|
}).get("/v2/tasks/:taskId/eventstream", async (req, res) => {
|
|
2646
|
-
const {taskId} = req.params;
|
|
2740
|
+
const { taskId } = req.params;
|
|
2647
2741
|
const after = req.query.after !== void 0 ? Number(req.query.after) : void 0;
|
|
2648
2742
|
logger.debug(`Event stream observing taskId '${taskId}' opened`);
|
|
2649
2743
|
res.writeHead(200, {
|
|
@@ -2651,7 +2745,7 @@ async function createRouter(options) {
|
|
|
2651
2745
|
"Cache-Control": "no-cache",
|
|
2652
2746
|
"Content-Type": "text/event-stream"
|
|
2653
2747
|
});
|
|
2654
|
-
const {unsubscribe} = taskBroker.observe({taskId, after}, (error, {events}) => {
|
|
2748
|
+
const { unsubscribe } = taskBroker.observe({ taskId, after }, (error, { events }) => {
|
|
2655
2749
|
var _a;
|
|
2656
2750
|
if (error) {
|
|
2657
2751
|
logger.error(`Received error from event stream when observing taskId '${taskId}', ${error}`);
|
|
@@ -2675,7 +2769,7 @@ data: ${JSON.stringify(event)}
|
|
|
2675
2769
|
logger.debug(`Event stream observing taskId '${taskId}' closed`);
|
|
2676
2770
|
});
|
|
2677
2771
|
}).get("/v2/tasks/:taskId/events", async (req, res) => {
|
|
2678
|
-
const {taskId} = req.params;
|
|
2772
|
+
const { taskId } = req.params;
|
|
2679
2773
|
const after = Number(req.query.after) || void 0;
|
|
2680
2774
|
let unsubscribe = () => {
|
|
2681
2775
|
};
|
|
@@ -2683,7 +2777,7 @@ data: ${JSON.stringify(event)}
|
|
|
2683
2777
|
unsubscribe();
|
|
2684
2778
|
res.json([]);
|
|
2685
2779
|
}, 3e4);
|
|
2686
|
-
({unsubscribe} = taskBroker.observe({taskId, after}, (error, {events}) => {
|
|
2780
|
+
({ unsubscribe } = taskBroker.observe({ taskId, after }, (error, { events }) => {
|
|
2687
2781
|
clearTimeout(timeout);
|
|
2688
2782
|
unsubscribe();
|
|
2689
2783
|
if (error) {
|
|
@@ -2696,7 +2790,7 @@ data: ${JSON.stringify(event)}
|
|
|
2696
2790
|
clearTimeout(timeout);
|
|
2697
2791
|
});
|
|
2698
2792
|
});
|
|
2699
|
-
const app = express__default[
|
|
2793
|
+
const app = express__default["default"]();
|
|
2700
2794
|
app.set("logger", logger);
|
|
2701
2795
|
app.use("/", router);
|
|
2702
2796
|
return app;
|
|
@@ -2756,9 +2850,7 @@ class ScaffolderEntitiesProcessor {
|
|
|
2756
2850
|
|
|
2757
2851
|
Object.defineProperty(exports, 'createFetchCookiecutterAction', {
|
|
2758
2852
|
enumerable: true,
|
|
2759
|
-
get: function () {
|
|
2760
|
-
return pluginScaffolderBackendModuleCookiecutter.createFetchCookiecutterAction;
|
|
2761
|
-
}
|
|
2853
|
+
get: function () { return pluginScaffolderBackendModuleCookiecutter.createFetchCookiecutterAction; }
|
|
2762
2854
|
});
|
|
2763
2855
|
exports.CatalogEntityClient = CatalogEntityClient;
|
|
2764
2856
|
exports.DatabaseTaskStore = DatabaseTaskStore;
|