@backstage/plugin-scaffolder-backend 0.15.10 → 0.15.14
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +92 -0
- package/assets/nunjucks.js.txt +10385 -0
- package/dist/index.cjs.js +333 -174
- package/dist/index.cjs.js.map +1 -1
- package/dist/index.d.ts +309 -4
- package/package.json +24 -19
package/dist/index.cjs.js
CHANGED
|
@@ -5,12 +5,12 @@ Object.defineProperty(exports, '__esModule', { value: true });
|
|
|
5
5
|
var errors = require('@backstage/errors');
|
|
6
6
|
var catalogModel = require('@backstage/catalog-model');
|
|
7
7
|
var fs = require('fs-extra');
|
|
8
|
-
var path = require('path');
|
|
9
8
|
var yaml = require('yaml');
|
|
10
9
|
var backendCommon = require('@backstage/backend-common');
|
|
10
|
+
var path = require('path');
|
|
11
11
|
var globby = require('globby');
|
|
12
|
-
var nunjucks = require('nunjucks');
|
|
13
12
|
var isbinaryfile = require('isbinaryfile');
|
|
13
|
+
var vm2 = require('vm2');
|
|
14
14
|
var pluginScaffolderBackendModuleCookiecutter = require('@backstage/plugin-scaffolder-backend-module-cookiecutter');
|
|
15
15
|
var child_process = require('child_process');
|
|
16
16
|
var stream = require('stream');
|
|
@@ -22,14 +22,15 @@ var lodash = require('lodash');
|
|
|
22
22
|
var octokitPluginCreatePullRequest = require('octokit-plugin-create-pull-request');
|
|
23
23
|
var node = require('@gitbeaker/node');
|
|
24
24
|
var webhooks = require('@octokit/webhooks');
|
|
25
|
-
var express = require('express');
|
|
26
|
-
var Router = require('express-promise-router');
|
|
27
|
-
var jsonschema = require('jsonschema');
|
|
28
25
|
var uuid = require('uuid');
|
|
29
26
|
var luxon = require('luxon');
|
|
30
|
-
var os = require('os');
|
|
31
27
|
var Handlebars = require('handlebars');
|
|
32
28
|
var winston = require('winston');
|
|
29
|
+
var jsonschema = require('jsonschema');
|
|
30
|
+
var nunjucks = require('nunjucks');
|
|
31
|
+
var express = require('express');
|
|
32
|
+
var Router = require('express-promise-router');
|
|
33
|
+
var os = require('os');
|
|
33
34
|
var pluginCatalogBackend = require('@backstage/plugin-catalog-backend');
|
|
34
35
|
var pluginScaffolderCommon = require('@backstage/plugin-scaffolder-common');
|
|
35
36
|
|
|
@@ -56,17 +57,16 @@ function _interopNamespace(e) {
|
|
|
56
57
|
}
|
|
57
58
|
|
|
58
59
|
var fs__default = /*#__PURE__*/_interopDefaultLegacy(fs);
|
|
59
|
-
var path__namespace = /*#__PURE__*/_interopNamespace(path);
|
|
60
|
-
var path__default = /*#__PURE__*/_interopDefaultLegacy(path);
|
|
61
60
|
var yaml__namespace = /*#__PURE__*/_interopNamespace(yaml);
|
|
61
|
+
var path__default = /*#__PURE__*/_interopDefaultLegacy(path);
|
|
62
62
|
var globby__default = /*#__PURE__*/_interopDefaultLegacy(globby);
|
|
63
|
-
var nunjucks__default = /*#__PURE__*/_interopDefaultLegacy(nunjucks);
|
|
64
63
|
var fetch__default = /*#__PURE__*/_interopDefaultLegacy(fetch);
|
|
64
|
+
var Handlebars__namespace = /*#__PURE__*/_interopNamespace(Handlebars);
|
|
65
|
+
var winston__namespace = /*#__PURE__*/_interopNamespace(winston);
|
|
66
|
+
var nunjucks__default = /*#__PURE__*/_interopDefaultLegacy(nunjucks);
|
|
65
67
|
var express__default = /*#__PURE__*/_interopDefaultLegacy(express);
|
|
66
68
|
var Router__default = /*#__PURE__*/_interopDefaultLegacy(Router);
|
|
67
69
|
var os__default = /*#__PURE__*/_interopDefaultLegacy(os);
|
|
68
|
-
var Handlebars__namespace = /*#__PURE__*/_interopNamespace(Handlebars);
|
|
69
|
-
var winston__namespace = /*#__PURE__*/_interopNamespace(winston);
|
|
70
70
|
|
|
71
71
|
const createTemplateAction = (templateAction) => {
|
|
72
72
|
return templateAction;
|
|
@@ -182,7 +182,7 @@ function createCatalogWriteAction() {
|
|
|
182
182
|
async handler(ctx) {
|
|
183
183
|
ctx.logStream.write(`Writing catalog-info.yaml`);
|
|
184
184
|
const {entity} = ctx.input;
|
|
185
|
-
await fs__default['default'].writeFile(
|
|
185
|
+
await fs__default['default'].writeFile(backendCommon.resolveSafeChildPath(ctx.workspacePath, "catalog-info.yaml"), yaml__namespace.stringify(entity));
|
|
186
186
|
}
|
|
187
187
|
});
|
|
188
188
|
}
|
|
@@ -226,7 +226,7 @@ ${files.map((f) => ` - ${path.relative(ctx.workspacePath, f)}`).join("\n")}`);
|
|
|
226
226
|
async function recursiveReadDir(dir) {
|
|
227
227
|
const subdirs = await fs.readdir(dir);
|
|
228
228
|
const files = await Promise.all(subdirs.map(async (subdir) => {
|
|
229
|
-
const res = path.
|
|
229
|
+
const res = path.join(dir, subdir);
|
|
230
230
|
return (await fs.stat(res)).isDirectory() ? recursiveReadDir(res) : [res];
|
|
231
231
|
}));
|
|
232
232
|
return files.reduce((a, f) => a.concat(f), []);
|
|
@@ -250,7 +250,7 @@ async function fetchContents({
|
|
|
250
250
|
}
|
|
251
251
|
if (!fetchUrlIsAbsolute && (baseUrl == null ? void 0 : baseUrl.startsWith("file://"))) {
|
|
252
252
|
const basePath = baseUrl.slice("file://".length);
|
|
253
|
-
const srcDir = backendCommon.resolveSafeChildPath(
|
|
253
|
+
const srcDir = backendCommon.resolveSafeChildPath(path__default['default'].dirname(basePath), fetchUrl);
|
|
254
254
|
await fs__default['default'].copy(srcDir, outputPath);
|
|
255
255
|
} else {
|
|
256
256
|
let readUrl;
|
|
@@ -313,7 +313,100 @@ function createFetchPlainAction(options) {
|
|
|
313
313
|
});
|
|
314
314
|
}
|
|
315
315
|
|
|
316
|
-
|
|
316
|
+
const mkScript = (nunjucksSource) => `
|
|
317
|
+
const { render, renderCompat } = (() => {
|
|
318
|
+
const module = {};
|
|
319
|
+
const process = { env: {} };
|
|
320
|
+
const require = (pkg) => { if (pkg === 'events') { return function (){}; }};
|
|
321
|
+
|
|
322
|
+
${nunjucksSource}
|
|
323
|
+
|
|
324
|
+
const env = module.exports.configure({
|
|
325
|
+
autoescape: false,
|
|
326
|
+
tags: {
|
|
327
|
+
variableStart: '\${{',
|
|
328
|
+
variableEnd: '}}',
|
|
329
|
+
},
|
|
330
|
+
});
|
|
331
|
+
|
|
332
|
+
const compatEnv = module.exports.configure({
|
|
333
|
+
autoescape: false,
|
|
334
|
+
tags: {
|
|
335
|
+
variableStart: '{{',
|
|
336
|
+
variableEnd: '}}',
|
|
337
|
+
},
|
|
338
|
+
});
|
|
339
|
+
compatEnv.addFilter('jsonify', compatEnv.getFilter('dump'));
|
|
340
|
+
|
|
341
|
+
if (typeof parseRepoUrl !== 'undefined') {
|
|
342
|
+
const safeHelperRef = parseRepoUrl;
|
|
343
|
+
|
|
344
|
+
env.addFilter('parseRepoUrl', repoUrl => {
|
|
345
|
+
return JSON.parse(safeHelperRef(repoUrl))
|
|
346
|
+
});
|
|
347
|
+
env.addFilter('projectSlug', repoUrl => {
|
|
348
|
+
const { owner, repo } = JSON.parse(safeHelperRef(repoUrl));
|
|
349
|
+
return owner + '/' + repo;
|
|
350
|
+
});
|
|
351
|
+
}
|
|
352
|
+
|
|
353
|
+
let uninstallCompat = undefined;
|
|
354
|
+
|
|
355
|
+
function render(str, values) {
|
|
356
|
+
try {
|
|
357
|
+
if (uninstallCompat) {
|
|
358
|
+
uninstallCompat();
|
|
359
|
+
uninstallCompat = undefined;
|
|
360
|
+
}
|
|
361
|
+
return env.renderString(str, JSON.parse(values));
|
|
362
|
+
} catch (error) {
|
|
363
|
+
// Make sure errors don't leak anything
|
|
364
|
+
throw new Error(String(error.message));
|
|
365
|
+
}
|
|
366
|
+
}
|
|
367
|
+
|
|
368
|
+
function renderCompat(str, values) {
|
|
369
|
+
try {
|
|
370
|
+
if (!uninstallCompat) {
|
|
371
|
+
uninstallCompat = module.exports.installJinjaCompat();
|
|
372
|
+
}
|
|
373
|
+
return compatEnv.renderString(str, JSON.parse(values));
|
|
374
|
+
} catch (error) {
|
|
375
|
+
// Make sure errors don't leak anything
|
|
376
|
+
throw new Error(String(error.message));
|
|
377
|
+
}
|
|
378
|
+
}
|
|
379
|
+
|
|
380
|
+
return { render, renderCompat };
|
|
381
|
+
})();
|
|
382
|
+
`;
|
|
383
|
+
class SecureTemplater {
|
|
384
|
+
static async loadRenderer(options = {}) {
|
|
385
|
+
const {parseRepoUrl, cookiecutterCompat} = options;
|
|
386
|
+
let sandbox = void 0;
|
|
387
|
+
if (parseRepoUrl) {
|
|
388
|
+
sandbox = {
|
|
389
|
+
parseRepoUrl: (url) => JSON.stringify(parseRepoUrl(url))
|
|
390
|
+
};
|
|
391
|
+
}
|
|
392
|
+
const vm = new vm2.VM({sandbox});
|
|
393
|
+
const nunjucksSource = await fs__default['default'].readFile(backendCommon.resolvePackagePath("@backstage/plugin-scaffolder-backend", "assets/nunjucks.js.txt"), "utf-8");
|
|
394
|
+
vm.run(mkScript(nunjucksSource));
|
|
395
|
+
const render = (template, values) => {
|
|
396
|
+
if (!vm) {
|
|
397
|
+
throw new Error("SecureTemplater has not been initialized");
|
|
398
|
+
}
|
|
399
|
+
vm.setGlobal("templateStr", template);
|
|
400
|
+
vm.setGlobal("templateValues", JSON.stringify(values));
|
|
401
|
+
if (cookiecutterCompat) {
|
|
402
|
+
return vm.run(`renderCompat(templateStr, templateValues)`);
|
|
403
|
+
}
|
|
404
|
+
return vm.run(`render(templateStr, templateValues)`);
|
|
405
|
+
};
|
|
406
|
+
return render;
|
|
407
|
+
}
|
|
408
|
+
}
|
|
409
|
+
|
|
317
410
|
function createFetchTemplateAction(options) {
|
|
318
411
|
const {reader, integrations} = options;
|
|
319
412
|
return createTemplateAction({
|
|
@@ -364,7 +457,7 @@ function createFetchTemplateAction(options) {
|
|
|
364
457
|
var _a;
|
|
365
458
|
ctx.logger.info("Fetching template content from remote URL");
|
|
366
459
|
const workDir = await ctx.createTemporaryDirectory();
|
|
367
|
-
const templateDir =
|
|
460
|
+
const templateDir = backendCommon.resolveSafeChildPath(workDir, "template");
|
|
368
461
|
const targetPath = (_a = ctx.input.targetPath) != null ? _a : "./";
|
|
369
462
|
const outputDir = backendCommon.resolveSafeChildPath(ctx.workspacePath, targetPath);
|
|
370
463
|
if (ctx.input.copyWithoutRender && !Array.isArray(ctx.input.copyWithoutRender)) {
|
|
@@ -400,23 +493,14 @@ function createFetchTemplateAction(options) {
|
|
|
400
493
|
onlyFiles: false,
|
|
401
494
|
markDirectories: true
|
|
402
495
|
})))).flat());
|
|
403
|
-
const templater = nunjucks__default['default'].configure({
|
|
404
|
-
...ctx.input.cookiecutterCompat ? {} : {
|
|
405
|
-
tags: {
|
|
406
|
-
variableStart: "${{",
|
|
407
|
-
variableEnd: "}}"
|
|
408
|
-
}
|
|
409
|
-
},
|
|
410
|
-
autoescape: false
|
|
411
|
-
});
|
|
412
|
-
if (ctx.input.cookiecutterCompat) {
|
|
413
|
-
templater.addFilter("jsonify", templater.getFilter("dump"));
|
|
414
|
-
}
|
|
415
496
|
const {cookiecutterCompat, values} = ctx.input;
|
|
416
497
|
const context = {
|
|
417
498
|
[cookiecutterCompat ? "cookiecutter" : "values"]: values
|
|
418
499
|
};
|
|
419
500
|
ctx.logger.info(`Processing ${allEntriesInTemplate.length} template files/directories with input values`, ctx.input.values);
|
|
501
|
+
const renderTemplate = await SecureTemplater.loadRenderer({
|
|
502
|
+
cookiecutterCompat: ctx.input.cookiecutterCompat
|
|
503
|
+
});
|
|
420
504
|
for (const location of allEntriesInTemplate) {
|
|
421
505
|
let renderFilename;
|
|
422
506
|
let renderContents;
|
|
@@ -431,9 +515,12 @@ function createFetchTemplateAction(options) {
|
|
|
431
515
|
renderFilename = renderContents = !nonTemplatedEntries.has(location);
|
|
432
516
|
}
|
|
433
517
|
if (renderFilename) {
|
|
434
|
-
localOutputPath =
|
|
518
|
+
localOutputPath = renderTemplate(localOutputPath, context);
|
|
519
|
+
}
|
|
520
|
+
const outputPath = backendCommon.resolveSafeChildPath(outputDir, localOutputPath);
|
|
521
|
+
if (outputDir === outputPath) {
|
|
522
|
+
continue;
|
|
435
523
|
}
|
|
436
|
-
const outputPath = path.resolve(outputDir, localOutputPath);
|
|
437
524
|
if (!renderContents && !extension) {
|
|
438
525
|
ctx.logger.info(`Copying file/directory ${location} without processing.`);
|
|
439
526
|
}
|
|
@@ -441,7 +528,7 @@ function createFetchTemplateAction(options) {
|
|
|
441
528
|
ctx.logger.info(`Writing directory ${location} to template output path.`);
|
|
442
529
|
await fs__default['default'].ensureDir(outputPath);
|
|
443
530
|
} else {
|
|
444
|
-
const inputFilePath =
|
|
531
|
+
const inputFilePath = backendCommon.resolveSafeChildPath(templateDir, location);
|
|
445
532
|
if (await isbinaryfile.isBinaryFile(inputFilePath)) {
|
|
446
533
|
ctx.logger.info(`Copying binary file ${location} to template output path.`);
|
|
447
534
|
await fs__default['default'].copy(inputFilePath, outputPath);
|
|
@@ -449,7 +536,7 @@ function createFetchTemplateAction(options) {
|
|
|
449
536
|
const statsObj = await fs__default['default'].stat(inputFilePath);
|
|
450
537
|
ctx.logger.info(`Writing file ${location} to template output path with mode ${statsObj.mode}.`);
|
|
451
538
|
const inputFileContents = await fs__default['default'].readFile(inputFilePath, "utf-8");
|
|
452
|
-
await fs__default['default'].outputFile(outputPath, renderContents ?
|
|
539
|
+
await fs__default['default'].outputFile(outputPath, renderContents ? renderTemplate(inputFileContents, context) : inputFileContents, {mode: statsObj.mode});
|
|
453
540
|
}
|
|
454
541
|
}
|
|
455
542
|
}
|
|
@@ -714,6 +801,11 @@ const parseRepoUrl = (repoUrl, integrations) => {
|
|
|
714
801
|
}
|
|
715
802
|
return {host, owner, repo, organization, workspace, project};
|
|
716
803
|
};
|
|
804
|
+
const isExecutable = (fileMode) => {
|
|
805
|
+
const executeBitMask = 73;
|
|
806
|
+
const res = fileMode & executeBitMask;
|
|
807
|
+
return res > 0;
|
|
808
|
+
};
|
|
717
809
|
|
|
718
810
|
function createPublishAzureAction(options) {
|
|
719
811
|
const {integrations, config} = options;
|
|
@@ -1391,13 +1483,24 @@ const createPublishGithubPullRequestAction = ({
|
|
|
1391
1483
|
gitignore: true,
|
|
1392
1484
|
dot: true
|
|
1393
1485
|
});
|
|
1394
|
-
const fileContents = await Promise.all(localFilePaths.map((
|
|
1486
|
+
const fileContents = await Promise.all(localFilePaths.map((filePath) => {
|
|
1487
|
+
const absPath = backendCommon.resolveSafeChildPath(fileRoot, filePath);
|
|
1488
|
+
const base64EncodedContent = fs__default['default'].readFileSync(absPath).toString("base64");
|
|
1489
|
+
const fileStat = fs__default['default'].statSync(absPath);
|
|
1490
|
+
const githubTreeItemMode = isExecutable(fileStat.mode) ? "100755" : "100644";
|
|
1491
|
+
const encoding = "base64";
|
|
1492
|
+
return {
|
|
1493
|
+
encoding,
|
|
1494
|
+
content: base64EncodedContent,
|
|
1495
|
+
mode: githubTreeItemMode
|
|
1496
|
+
};
|
|
1497
|
+
}));
|
|
1395
1498
|
const repoFilePaths = localFilePaths.map((repoFilePath) => {
|
|
1396
1499
|
return targetPath ? `${targetPath}/${repoFilePath}` : repoFilePath;
|
|
1397
1500
|
});
|
|
1398
1501
|
const changes = [
|
|
1399
1502
|
{
|
|
1400
|
-
files: lodash.zipObject(repoFilePaths, fileContents
|
|
1503
|
+
files: lodash.zipObject(repoFilePaths, fileContents),
|
|
1401
1504
|
commit: title
|
|
1402
1505
|
}
|
|
1403
1506
|
];
|
|
@@ -1735,38 +1838,16 @@ class TemplateActionRegistry {
|
|
|
1735
1838
|
}
|
|
1736
1839
|
}
|
|
1737
1840
|
|
|
1738
|
-
class CatalogEntityClient {
|
|
1739
|
-
constructor(catalogClient) {
|
|
1740
|
-
this.catalogClient = catalogClient;
|
|
1741
|
-
}
|
|
1742
|
-
async findTemplate(templateName, options) {
|
|
1743
|
-
const {items: templates} = await this.catalogClient.getEntities({
|
|
1744
|
-
filter: {
|
|
1745
|
-
kind: "template",
|
|
1746
|
-
"metadata.name": templateName
|
|
1747
|
-
}
|
|
1748
|
-
}, options);
|
|
1749
|
-
if (templates.length !== 1) {
|
|
1750
|
-
if (templates.length > 1) {
|
|
1751
|
-
throw new errors.ConflictError("Templates lookup resulted in multiple matches");
|
|
1752
|
-
} else {
|
|
1753
|
-
throw new errors.NotFoundError("Template not found");
|
|
1754
|
-
}
|
|
1755
|
-
}
|
|
1756
|
-
return templates[0];
|
|
1757
|
-
}
|
|
1758
|
-
}
|
|
1759
|
-
|
|
1760
1841
|
const migrationsDir = backendCommon.resolvePackagePath("@backstage/plugin-scaffolder-backend", "migrations");
|
|
1761
1842
|
class DatabaseTaskStore {
|
|
1762
|
-
|
|
1763
|
-
|
|
1764
|
-
}
|
|
1765
|
-
static async create(knex) {
|
|
1766
|
-
await knex.migrate.latest({
|
|
1843
|
+
static async create(options) {
|
|
1844
|
+
await options.database.migrate.latest({
|
|
1767
1845
|
directory: migrationsDir
|
|
1768
1846
|
});
|
|
1769
|
-
return new DatabaseTaskStore(
|
|
1847
|
+
return new DatabaseTaskStore(options);
|
|
1848
|
+
}
|
|
1849
|
+
constructor(options) {
|
|
1850
|
+
this.db = options.database;
|
|
1770
1851
|
}
|
|
1771
1852
|
async getTask(taskId) {
|
|
1772
1853
|
const [result] = await this.db("tasks").where({id: taskId}).select();
|
|
@@ -1922,7 +2003,7 @@ class DatabaseTaskStore {
|
|
|
1922
2003
|
}
|
|
1923
2004
|
}
|
|
1924
2005
|
|
|
1925
|
-
class
|
|
2006
|
+
class TaskManager {
|
|
1926
2007
|
constructor(state, storage, logger) {
|
|
1927
2008
|
this.state = state;
|
|
1928
2009
|
this.storage = storage;
|
|
@@ -1930,7 +2011,7 @@ class TaskAgent {
|
|
|
1930
2011
|
this.isDone = false;
|
|
1931
2012
|
}
|
|
1932
2013
|
static create(state, storage, logger) {
|
|
1933
|
-
const agent = new
|
|
2014
|
+
const agent = new TaskManager(state, storage, logger);
|
|
1934
2015
|
agent.startTimeout();
|
|
1935
2016
|
return agent;
|
|
1936
2017
|
}
|
|
@@ -1996,7 +2077,7 @@ class StorageTaskBroker {
|
|
|
1996
2077
|
for (; ; ) {
|
|
1997
2078
|
const pendingTask = await this.storage.claimTask();
|
|
1998
2079
|
if (pendingTask) {
|
|
1999
|
-
return
|
|
2080
|
+
return TaskManager.create({
|
|
2000
2081
|
taskId: pendingTask.id,
|
|
2001
2082
|
spec: pendingTask.spec,
|
|
2002
2083
|
secrets: pendingTask.secrets
|
|
@@ -2038,7 +2119,7 @@ class StorageTaskBroker {
|
|
|
2038
2119
|
await new Promise((resolve) => setTimeout(resolve, 1e3));
|
|
2039
2120
|
}
|
|
2040
2121
|
})();
|
|
2041
|
-
return unsubscribe;
|
|
2122
|
+
return {unsubscribe};
|
|
2042
2123
|
}
|
|
2043
2124
|
async vacuumTasks(timeoutS) {
|
|
2044
2125
|
const {tasks} = await this.storage.listStaleTasks(timeoutS);
|
|
@@ -2065,70 +2146,12 @@ class StorageTaskBroker {
|
|
|
2065
2146
|
}
|
|
2066
2147
|
}
|
|
2067
2148
|
|
|
2068
|
-
class TaskWorker {
|
|
2069
|
-
constructor(options) {
|
|
2070
|
-
this.options = options;
|
|
2071
|
-
}
|
|
2072
|
-
start() {
|
|
2073
|
-
(async () => {
|
|
2074
|
-
for (; ; ) {
|
|
2075
|
-
const task = await this.options.taskBroker.claim();
|
|
2076
|
-
await this.runOneTask(task);
|
|
2077
|
-
}
|
|
2078
|
-
})();
|
|
2079
|
-
}
|
|
2080
|
-
async runOneTask(task) {
|
|
2081
|
-
try {
|
|
2082
|
-
const {output} = task.spec.apiVersion === "scaffolder.backstage.io/v1beta3" ? await this.options.runners.workflowRunner.execute(task) : await this.options.runners.legacyWorkflowRunner.execute(task);
|
|
2083
|
-
await task.complete("completed", {output});
|
|
2084
|
-
} catch (error) {
|
|
2085
|
-
errors.assertError(error);
|
|
2086
|
-
await task.complete("failed", {
|
|
2087
|
-
error: {name: error.name, message: error.message}
|
|
2088
|
-
});
|
|
2089
|
-
}
|
|
2090
|
-
}
|
|
2091
|
-
}
|
|
2092
|
-
|
|
2093
|
-
async function getWorkingDirectory(config, logger) {
|
|
2094
|
-
if (!config.has("backend.workingDirectory")) {
|
|
2095
|
-
return os__default['default'].tmpdir();
|
|
2096
|
-
}
|
|
2097
|
-
const workingDirectory = config.getString("backend.workingDirectory");
|
|
2098
|
-
try {
|
|
2099
|
-
await fs__default['default'].access(workingDirectory, fs__default['default'].constants.F_OK | fs__default['default'].constants.W_OK);
|
|
2100
|
-
logger.info(`using working directory: ${workingDirectory}`);
|
|
2101
|
-
} catch (err) {
|
|
2102
|
-
errors.assertError(err);
|
|
2103
|
-
logger.error(`working directory ${workingDirectory} ${err.code === "ENOENT" ? "does not exist" : "is not writable"}`);
|
|
2104
|
-
throw err;
|
|
2105
|
-
}
|
|
2106
|
-
return workingDirectory;
|
|
2107
|
-
}
|
|
2108
|
-
function getEntityBaseUrl(entity) {
|
|
2109
|
-
var _a, _b;
|
|
2110
|
-
let location = (_a = entity.metadata.annotations) == null ? void 0 : _a[catalogModel.SOURCE_LOCATION_ANNOTATION];
|
|
2111
|
-
if (!location) {
|
|
2112
|
-
location = (_b = entity.metadata.annotations) == null ? void 0 : _b[catalogModel.LOCATION_ANNOTATION];
|
|
2113
|
-
}
|
|
2114
|
-
if (!location) {
|
|
2115
|
-
return void 0;
|
|
2116
|
-
}
|
|
2117
|
-
const {type, target} = catalogModel.parseLocationReference(location);
|
|
2118
|
-
if (type === "url") {
|
|
2119
|
-
return target;
|
|
2120
|
-
} else if (type === "file") {
|
|
2121
|
-
return `file://${target}`;
|
|
2122
|
-
}
|
|
2123
|
-
return void 0;
|
|
2124
|
-
}
|
|
2125
|
-
|
|
2126
2149
|
function isTruthy(value) {
|
|
2127
2150
|
return lodash.isArray(value) ? value.length > 0 : !!value;
|
|
2128
2151
|
}
|
|
2129
2152
|
|
|
2130
2153
|
const isValidTaskSpec$1 = (taskSpec) => taskSpec.apiVersion === "backstage.io/v1beta2";
|
|
2131
|
-
class
|
|
2154
|
+
class HandlebarsWorkflowRunner {
|
|
2132
2155
|
constructor(options) {
|
|
2133
2156
|
this.options = options;
|
|
2134
2157
|
this.handlebars = Handlebars__namespace.create();
|
|
@@ -2239,6 +2262,9 @@ class LegacyWorkflowRunner {
|
|
|
2239
2262
|
this.options.logger.debug(`Running ${action.id} with input`, {
|
|
2240
2263
|
input: JSON.stringify(input, null, 2)
|
|
2241
2264
|
});
|
|
2265
|
+
if (!task.spec.metadata) {
|
|
2266
|
+
console.warn("DEPRECATION NOTICE: metadata is undefined. metadata will be required in the future.");
|
|
2267
|
+
}
|
|
2242
2268
|
await action.handler({
|
|
2243
2269
|
baseUrl: task.spec.baseUrl,
|
|
2244
2270
|
logger: taskLogger,
|
|
@@ -2253,7 +2279,8 @@ class LegacyWorkflowRunner {
|
|
|
2253
2279
|
},
|
|
2254
2280
|
output(name, value) {
|
|
2255
2281
|
stepOutputs[name] = value;
|
|
2256
|
-
}
|
|
2282
|
+
},
|
|
2283
|
+
metadata: task.spec.metadata
|
|
2257
2284
|
});
|
|
2258
2285
|
for (const tmpDir of tmpDirs) {
|
|
2259
2286
|
await fs__default['default'].remove(tmpDir);
|
|
@@ -2304,7 +2331,10 @@ class LegacyWorkflowRunner {
|
|
|
2304
2331
|
const isValidTaskSpec = (taskSpec) => {
|
|
2305
2332
|
return taskSpec.apiVersion === "scaffolder.backstage.io/v1beta3";
|
|
2306
2333
|
};
|
|
2307
|
-
const createStepLogger = ({
|
|
2334
|
+
const createStepLogger = ({
|
|
2335
|
+
task,
|
|
2336
|
+
step
|
|
2337
|
+
}) => {
|
|
2308
2338
|
const metadata = {stepId: step.id};
|
|
2309
2339
|
const taskLogger = winston__namespace.createLogger({
|
|
2310
2340
|
level: process.env.LOG_LEVEL || "info",
|
|
@@ -2321,38 +2351,30 @@ const createStepLogger = ({task, step}) => {
|
|
|
2321
2351
|
taskLogger.add(new winston__namespace.transports.Stream({stream: streamLogger}));
|
|
2322
2352
|
return {taskLogger, streamLogger};
|
|
2323
2353
|
};
|
|
2324
|
-
class
|
|
2354
|
+
class NunjucksWorkflowRunner {
|
|
2325
2355
|
constructor(options) {
|
|
2326
2356
|
this.options = options;
|
|
2327
|
-
|
|
2357
|
+
}
|
|
2358
|
+
isSingleTemplateString(input) {
|
|
2359
|
+
var _a, _b;
|
|
2360
|
+
const {parser, nodes} = nunjucks__default['default'];
|
|
2361
|
+
const parsed = parser.parse(input, {}, {
|
|
2328
2362
|
autoescape: false,
|
|
2329
2363
|
tags: {
|
|
2330
2364
|
variableStart: "${{",
|
|
2331
2365
|
variableEnd: "}}"
|
|
2332
2366
|
}
|
|
2333
|
-
};
|
|
2334
|
-
this.nunjucks = nunjucks__default['default'].configure(this.nunjucksOptions);
|
|
2335
|
-
this.nunjucks.addFilter("parseRepoUrl", (repoUrl) => {
|
|
2336
|
-
return parseRepoUrl(repoUrl, this.options.integrations);
|
|
2337
|
-
});
|
|
2338
|
-
this.nunjucks.addFilter("projectSlug", (repoUrl) => {
|
|
2339
|
-
const {owner, repo} = parseRepoUrl(repoUrl, this.options.integrations);
|
|
2340
|
-
return `${owner}/${repo}`;
|
|
2341
2367
|
});
|
|
2368
|
+
return parsed.children.length === 1 && !(((_b = (_a = parsed.children[0]) == null ? void 0 : _a.children) == null ? void 0 : _b[0]) instanceof nodes.TemplateData);
|
|
2342
2369
|
}
|
|
2343
|
-
|
|
2344
|
-
const {parser, nodes} = require("nunjucks");
|
|
2345
|
-
const parsed = parser.parse(input, {}, this.nunjucksOptions);
|
|
2346
|
-
return parsed.children.length === 1 && !(parsed.children[0] instanceof nodes.TemplateData);
|
|
2347
|
-
}
|
|
2348
|
-
render(input, context) {
|
|
2370
|
+
render(input, context, renderTemplate) {
|
|
2349
2371
|
return JSON.parse(JSON.stringify(input), (_key, value) => {
|
|
2350
2372
|
try {
|
|
2351
2373
|
if (typeof value === "string") {
|
|
2352
2374
|
try {
|
|
2353
2375
|
if (this.isSingleTemplateString(value)) {
|
|
2354
2376
|
const wrappedDumped = value.replace(/\${{(.+)}}/g, "${{ ( $1 ) | dump }}");
|
|
2355
|
-
const templated2 =
|
|
2377
|
+
const templated2 = renderTemplate(wrappedDumped, context);
|
|
2356
2378
|
if (templated2 === "") {
|
|
2357
2379
|
return void 0;
|
|
2358
2380
|
}
|
|
@@ -2361,7 +2383,7 @@ class DefaultWorkflowRunner {
|
|
|
2361
2383
|
} catch (ex) {
|
|
2362
2384
|
this.options.logger.error(`Failed to parse template string: ${value} with error ${ex.message}`);
|
|
2363
2385
|
}
|
|
2364
|
-
const templated =
|
|
2386
|
+
const templated = renderTemplate(value, context);
|
|
2365
2387
|
if (templated === "") {
|
|
2366
2388
|
return void 0;
|
|
2367
2389
|
}
|
|
@@ -2379,6 +2401,12 @@ class DefaultWorkflowRunner {
|
|
|
2379
2401
|
throw new errors.InputError("Wrong template version executed with the workflow engine");
|
|
2380
2402
|
}
|
|
2381
2403
|
const workspacePath = path__default['default'].join(this.options.workingDirectory, await task.getWorkspaceName());
|
|
2404
|
+
const {integrations} = this.options;
|
|
2405
|
+
const renderTemplate = await SecureTemplater.loadRenderer({
|
|
2406
|
+
parseRepoUrl(url) {
|
|
2407
|
+
return parseRepoUrl(url, integrations);
|
|
2408
|
+
}
|
|
2409
|
+
});
|
|
2382
2410
|
try {
|
|
2383
2411
|
await fs__default['default'].ensureDir(workspacePath);
|
|
2384
2412
|
await task.emitLog(`Starting up task with ${task.spec.steps.length} steps`);
|
|
@@ -2389,7 +2417,7 @@ class DefaultWorkflowRunner {
|
|
|
2389
2417
|
for (const step of task.spec.steps) {
|
|
2390
2418
|
try {
|
|
2391
2419
|
if (step.if) {
|
|
2392
|
-
const ifResult = await this.render(step.if, context);
|
|
2420
|
+
const ifResult = await this.render(step.if, context, renderTemplate);
|
|
2393
2421
|
if (!isTruthy(ifResult)) {
|
|
2394
2422
|
await task.emitLog(`Skipping step ${step.id} because it's if condition was false`, {stepId: step.id, status: "skipped"});
|
|
2395
2423
|
continue;
|
|
@@ -2401,7 +2429,7 @@ class DefaultWorkflowRunner {
|
|
|
2401
2429
|
});
|
|
2402
2430
|
const action = this.options.actionRegistry.get(step.action);
|
|
2403
2431
|
const {taskLogger, streamLogger} = createStepLogger({task, step});
|
|
2404
|
-
const input = (_a = step.input && this.render(step.input, context)) != null ? _a : {};
|
|
2432
|
+
const input = (_a = step.input && this.render(step.input, context, renderTemplate)) != null ? _a : {};
|
|
2405
2433
|
if ((_b = action.schema) == null ? void 0 : _b.input) {
|
|
2406
2434
|
const validateResult = jsonschema.validate(input, action.schema.input);
|
|
2407
2435
|
if (!validateResult.valid) {
|
|
@@ -2411,6 +2439,9 @@ class DefaultWorkflowRunner {
|
|
|
2411
2439
|
}
|
|
2412
2440
|
const tmpDirs = new Array();
|
|
2413
2441
|
const stepOutput = {};
|
|
2442
|
+
if (!task.spec.metadata) {
|
|
2443
|
+
console.warn("DEPRECATION NOTICE: metadata is undefined. metadata will be required in the future.");
|
|
2444
|
+
}
|
|
2414
2445
|
await action.handler({
|
|
2415
2446
|
baseUrl: task.spec.baseUrl,
|
|
2416
2447
|
input,
|
|
@@ -2424,7 +2455,8 @@ class DefaultWorkflowRunner {
|
|
|
2424
2455
|
},
|
|
2425
2456
|
output(name, value) {
|
|
2426
2457
|
stepOutput[name] = value;
|
|
2427
|
-
}
|
|
2458
|
+
},
|
|
2459
|
+
metadata: task.spec.metadata
|
|
2428
2460
|
});
|
|
2429
2461
|
for (const tmpDir of tmpDirs) {
|
|
2430
2462
|
await fs__default['default'].remove(tmpDir);
|
|
@@ -2442,7 +2474,7 @@ class DefaultWorkflowRunner {
|
|
|
2442
2474
|
throw err;
|
|
2443
2475
|
}
|
|
2444
2476
|
}
|
|
2445
|
-
const output = this.render(task.spec.output, context);
|
|
2477
|
+
const output = this.render(task.spec.output, context, renderTemplate);
|
|
2446
2478
|
return {output};
|
|
2447
2479
|
} finally {
|
|
2448
2480
|
if (workspacePath) {
|
|
@@ -2452,6 +2484,111 @@ class DefaultWorkflowRunner {
|
|
|
2452
2484
|
}
|
|
2453
2485
|
}
|
|
2454
2486
|
|
|
2487
|
+
class TaskWorker {
|
|
2488
|
+
constructor(options) {
|
|
2489
|
+
this.options = options;
|
|
2490
|
+
}
|
|
2491
|
+
static async create(options) {
|
|
2492
|
+
const {
|
|
2493
|
+
taskBroker,
|
|
2494
|
+
logger,
|
|
2495
|
+
actionRegistry,
|
|
2496
|
+
integrations,
|
|
2497
|
+
workingDirectory
|
|
2498
|
+
} = options;
|
|
2499
|
+
const legacyWorkflowRunner = new HandlebarsWorkflowRunner({
|
|
2500
|
+
logger,
|
|
2501
|
+
actionRegistry,
|
|
2502
|
+
integrations,
|
|
2503
|
+
workingDirectory
|
|
2504
|
+
});
|
|
2505
|
+
const workflowRunner = new NunjucksWorkflowRunner({
|
|
2506
|
+
actionRegistry,
|
|
2507
|
+
integrations,
|
|
2508
|
+
logger,
|
|
2509
|
+
workingDirectory
|
|
2510
|
+
});
|
|
2511
|
+
return new TaskWorker({
|
|
2512
|
+
taskBroker,
|
|
2513
|
+
runners: {legacyWorkflowRunner, workflowRunner}
|
|
2514
|
+
});
|
|
2515
|
+
}
|
|
2516
|
+
start() {
|
|
2517
|
+
(async () => {
|
|
2518
|
+
for (; ; ) {
|
|
2519
|
+
const task = await this.options.taskBroker.claim();
|
|
2520
|
+
await this.runOneTask(task);
|
|
2521
|
+
}
|
|
2522
|
+
})();
|
|
2523
|
+
}
|
|
2524
|
+
async runOneTask(task) {
|
|
2525
|
+
try {
|
|
2526
|
+
const {output} = task.spec.apiVersion === "scaffolder.backstage.io/v1beta3" ? await this.options.runners.workflowRunner.execute(task) : await this.options.runners.legacyWorkflowRunner.execute(task);
|
|
2527
|
+
await task.complete("completed", {output});
|
|
2528
|
+
} catch (error) {
|
|
2529
|
+
errors.assertError(error);
|
|
2530
|
+
await task.complete("failed", {
|
|
2531
|
+
error: {name: error.name, message: error.message}
|
|
2532
|
+
});
|
|
2533
|
+
}
|
|
2534
|
+
}
|
|
2535
|
+
}
|
|
2536
|
+
|
|
2537
|
+
class CatalogEntityClient {
|
|
2538
|
+
constructor(catalogClient) {
|
|
2539
|
+
this.catalogClient = catalogClient;
|
|
2540
|
+
}
|
|
2541
|
+
async findTemplate(templateName, options) {
|
|
2542
|
+
const {items: templates} = await this.catalogClient.getEntities({
|
|
2543
|
+
filter: {
|
|
2544
|
+
kind: "template",
|
|
2545
|
+
"metadata.name": templateName
|
|
2546
|
+
}
|
|
2547
|
+
}, options);
|
|
2548
|
+
if (templates.length !== 1) {
|
|
2549
|
+
if (templates.length > 1) {
|
|
2550
|
+
throw new errors.ConflictError("Templates lookup resulted in multiple matches");
|
|
2551
|
+
} else {
|
|
2552
|
+
throw new errors.NotFoundError("Template not found");
|
|
2553
|
+
}
|
|
2554
|
+
}
|
|
2555
|
+
return templates[0];
|
|
2556
|
+
}
|
|
2557
|
+
}
|
|
2558
|
+
|
|
2559
|
+
async function getWorkingDirectory(config, logger) {
|
|
2560
|
+
if (!config.has("backend.workingDirectory")) {
|
|
2561
|
+
return os__default['default'].tmpdir();
|
|
2562
|
+
}
|
|
2563
|
+
const workingDirectory = config.getString("backend.workingDirectory");
|
|
2564
|
+
try {
|
|
2565
|
+
await fs__default['default'].access(workingDirectory, fs__default['default'].constants.F_OK | fs__default['default'].constants.W_OK);
|
|
2566
|
+
logger.info(`using working directory: ${workingDirectory}`);
|
|
2567
|
+
} catch (err) {
|
|
2568
|
+
errors.assertError(err);
|
|
2569
|
+
logger.error(`working directory ${workingDirectory} ${err.code === "ENOENT" ? "does not exist" : "is not writable"}`);
|
|
2570
|
+
throw err;
|
|
2571
|
+
}
|
|
2572
|
+
return workingDirectory;
|
|
2573
|
+
}
|
|
2574
|
+
function getEntityBaseUrl(entity) {
|
|
2575
|
+
var _a, _b;
|
|
2576
|
+
let location = (_a = entity.metadata.annotations) == null ? void 0 : _a[catalogModel.SOURCE_LOCATION_ANNOTATION];
|
|
2577
|
+
if (!location) {
|
|
2578
|
+
location = (_b = entity.metadata.annotations) == null ? void 0 : _b[catalogModel.LOCATION_ANNOTATION];
|
|
2579
|
+
}
|
|
2580
|
+
if (!location) {
|
|
2581
|
+
return void 0;
|
|
2582
|
+
}
|
|
2583
|
+
const {type, target} = catalogModel.parseLocationReference(location);
|
|
2584
|
+
if (type === "url") {
|
|
2585
|
+
return target;
|
|
2586
|
+
} else if (type === "file") {
|
|
2587
|
+
return `file://${target}`;
|
|
2588
|
+
}
|
|
2589
|
+
return void 0;
|
|
2590
|
+
}
|
|
2591
|
+
|
|
2455
2592
|
function isSupportedTemplate(entity) {
|
|
2456
2593
|
return entity.apiVersion === "backstage.io/v1beta2" || entity.apiVersion === "scaffolder.backstage.io/v1beta3";
|
|
2457
2594
|
}
|
|
@@ -2472,29 +2609,24 @@ async function createRouter(options) {
|
|
|
2472
2609
|
const workingDirectory = await getWorkingDirectory(config, logger);
|
|
2473
2610
|
const entityClient = new CatalogEntityClient(catalogClient);
|
|
2474
2611
|
const integrations = integration.ScmIntegrations.fromConfig(config);
|
|
2475
|
-
|
|
2476
|
-
|
|
2612
|
+
let taskBroker;
|
|
2613
|
+
if (!options.taskBroker) {
|
|
2614
|
+
const databaseTaskStore = await DatabaseTaskStore.create({
|
|
2615
|
+
database: await database.getClient()
|
|
2616
|
+
});
|
|
2617
|
+
taskBroker = new StorageTaskBroker(databaseTaskStore, logger);
|
|
2618
|
+
} else {
|
|
2619
|
+
taskBroker = options.taskBroker;
|
|
2620
|
+
}
|
|
2477
2621
|
const actionRegistry = new TemplateActionRegistry();
|
|
2478
|
-
const legacyWorkflowRunner = new LegacyWorkflowRunner({
|
|
2479
|
-
logger,
|
|
2480
|
-
actionRegistry,
|
|
2481
|
-
integrations,
|
|
2482
|
-
workingDirectory
|
|
2483
|
-
});
|
|
2484
|
-
const workflowRunner = new DefaultWorkflowRunner({
|
|
2485
|
-
actionRegistry,
|
|
2486
|
-
integrations,
|
|
2487
|
-
logger,
|
|
2488
|
-
workingDirectory
|
|
2489
|
-
});
|
|
2490
2622
|
const workers = [];
|
|
2491
2623
|
for (let i = 0; i < (taskWorkers || 1); i++) {
|
|
2492
|
-
const worker =
|
|
2624
|
+
const worker = await TaskWorker.create({
|
|
2493
2625
|
taskBroker,
|
|
2494
|
-
|
|
2495
|
-
|
|
2496
|
-
|
|
2497
|
-
|
|
2626
|
+
actionRegistry,
|
|
2627
|
+
integrations,
|
|
2628
|
+
logger,
|
|
2629
|
+
workingDirectory
|
|
2498
2630
|
});
|
|
2499
2631
|
workers.push(worker);
|
|
2500
2632
|
}
|
|
@@ -2544,7 +2676,7 @@ async function createRouter(options) {
|
|
|
2544
2676
|
});
|
|
2545
2677
|
res.json(actionsList);
|
|
2546
2678
|
}).post("/v2/tasks", async (req, res) => {
|
|
2547
|
-
var _a, _b, _c;
|
|
2679
|
+
var _a, _b, _c, _d, _e;
|
|
2548
2680
|
const templateName = req.body.templateName;
|
|
2549
2681
|
const values = req.body.values;
|
|
2550
2682
|
const token = getBearerToken(req.headers.authorization);
|
|
@@ -2573,7 +2705,8 @@ async function createRouter(options) {
|
|
|
2573
2705
|
name: (_b2 = step.name) != null ? _b2 : step.action
|
|
2574
2706
|
};
|
|
2575
2707
|
}),
|
|
2576
|
-
output: (_b = template.spec.output) != null ? _b : {}
|
|
2708
|
+
output: (_b = template.spec.output) != null ? _b : {},
|
|
2709
|
+
metadata: {name: (_c = template.metadata) == null ? void 0 : _c.name}
|
|
2577
2710
|
} : {
|
|
2578
2711
|
apiVersion: template.apiVersion,
|
|
2579
2712
|
baseUrl,
|
|
@@ -2586,7 +2719,8 @@ async function createRouter(options) {
|
|
|
2586
2719
|
name: (_b2 = step.name) != null ? _b2 : step.action
|
|
2587
2720
|
};
|
|
2588
2721
|
}),
|
|
2589
|
-
output: (
|
|
2722
|
+
output: (_d = template.spec.output) != null ? _d : {},
|
|
2723
|
+
metadata: {name: (_e = template.metadata) == null ? void 0 : _e.name}
|
|
2590
2724
|
};
|
|
2591
2725
|
} else {
|
|
2592
2726
|
throw new errors.InputError(`Unsupported apiVersion field in schema entity, ${template.apiVersion}`);
|
|
@@ -2605,14 +2739,15 @@ async function createRouter(options) {
|
|
|
2605
2739
|
res.status(200).json(task);
|
|
2606
2740
|
}).get("/v2/tasks/:taskId/eventstream", async (req, res) => {
|
|
2607
2741
|
const {taskId} = req.params;
|
|
2608
|
-
const after = Number(req.query.after)
|
|
2742
|
+
const after = req.query.after !== void 0 ? Number(req.query.after) : void 0;
|
|
2609
2743
|
logger.debug(`Event stream observing taskId '${taskId}' opened`);
|
|
2610
2744
|
res.writeHead(200, {
|
|
2611
2745
|
Connection: "keep-alive",
|
|
2612
2746
|
"Cache-Control": "no-cache",
|
|
2613
2747
|
"Content-Type": "text/event-stream"
|
|
2614
2748
|
});
|
|
2615
|
-
const unsubscribe = taskBroker.observe({taskId, after}, (error, {events}) => {
|
|
2749
|
+
const {unsubscribe} = taskBroker.observe({taskId, after}, (error, {events}) => {
|
|
2750
|
+
var _a;
|
|
2616
2751
|
if (error) {
|
|
2617
2752
|
logger.error(`Received error from event stream when observing taskId '${taskId}', ${error}`);
|
|
2618
2753
|
}
|
|
@@ -2626,7 +2761,7 @@ data: ${JSON.stringify(event)}
|
|
|
2626
2761
|
shouldUnsubscribe = true;
|
|
2627
2762
|
}
|
|
2628
2763
|
}
|
|
2629
|
-
res.flush();
|
|
2764
|
+
(_a = res.flush) == null ? void 0 : _a.call(res);
|
|
2630
2765
|
if (shouldUnsubscribe)
|
|
2631
2766
|
unsubscribe();
|
|
2632
2767
|
});
|
|
@@ -2634,6 +2769,27 @@ data: ${JSON.stringify(event)}
|
|
|
2634
2769
|
unsubscribe();
|
|
2635
2770
|
logger.debug(`Event stream observing taskId '${taskId}' closed`);
|
|
2636
2771
|
});
|
|
2772
|
+
}).get("/v2/tasks/:taskId/events", async (req, res) => {
|
|
2773
|
+
const {taskId} = req.params;
|
|
2774
|
+
const after = Number(req.query.after) || void 0;
|
|
2775
|
+
let unsubscribe = () => {
|
|
2776
|
+
};
|
|
2777
|
+
const timeout = setTimeout(() => {
|
|
2778
|
+
unsubscribe();
|
|
2779
|
+
res.json([]);
|
|
2780
|
+
}, 3e4);
|
|
2781
|
+
({unsubscribe} = taskBroker.observe({taskId, after}, (error, {events}) => {
|
|
2782
|
+
clearTimeout(timeout);
|
|
2783
|
+
unsubscribe();
|
|
2784
|
+
if (error) {
|
|
2785
|
+
logger.error(`Received error from log when observing taskId '${taskId}', ${error}`);
|
|
2786
|
+
}
|
|
2787
|
+
res.json(events);
|
|
2788
|
+
}));
|
|
2789
|
+
req.on("close", () => {
|
|
2790
|
+
unsubscribe();
|
|
2791
|
+
clearTimeout(timeout);
|
|
2792
|
+
});
|
|
2637
2793
|
});
|
|
2638
2794
|
const app = express__default['default']();
|
|
2639
2795
|
app.set("logger", logger);
|
|
@@ -2700,8 +2856,11 @@ Object.defineProperty(exports, 'createFetchCookiecutterAction', {
|
|
|
2700
2856
|
}
|
|
2701
2857
|
});
|
|
2702
2858
|
exports.CatalogEntityClient = CatalogEntityClient;
|
|
2859
|
+
exports.DatabaseTaskStore = DatabaseTaskStore;
|
|
2703
2860
|
exports.OctokitProvider = OctokitProvider;
|
|
2704
2861
|
exports.ScaffolderEntitiesProcessor = ScaffolderEntitiesProcessor;
|
|
2862
|
+
exports.TaskManager = TaskManager;
|
|
2863
|
+
exports.TaskWorker = TaskWorker;
|
|
2705
2864
|
exports.TemplateActionRegistry = TemplateActionRegistry;
|
|
2706
2865
|
exports.createBuiltinActions = createBuiltinActions;
|
|
2707
2866
|
exports.createCatalogRegisterAction = createCatalogRegisterAction;
|