@backstage/plugin-scaffolder-backend 0.15.9 → 0.15.13
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +91 -0
- package/dist/index.cjs.js +236 -135
- package/dist/index.cjs.js.map +1 -1
- package/dist/index.d.ts +309 -4
- package/package.json +16 -15
package/dist/index.cjs.js
CHANGED
|
@@ -22,14 +22,14 @@ var lodash = require('lodash');
|
|
|
22
22
|
var octokitPluginCreatePullRequest = require('octokit-plugin-create-pull-request');
|
|
23
23
|
var node = require('@gitbeaker/node');
|
|
24
24
|
var webhooks = require('@octokit/webhooks');
|
|
25
|
-
var express = require('express');
|
|
26
|
-
var Router = require('express-promise-router');
|
|
27
|
-
var jsonschema = require('jsonschema');
|
|
28
25
|
var uuid = require('uuid');
|
|
29
26
|
var luxon = require('luxon');
|
|
30
|
-
var os = require('os');
|
|
31
27
|
var Handlebars = require('handlebars');
|
|
32
28
|
var winston = require('winston');
|
|
29
|
+
var jsonschema = require('jsonschema');
|
|
30
|
+
var express = require('express');
|
|
31
|
+
var Router = require('express-promise-router');
|
|
32
|
+
var os = require('os');
|
|
33
33
|
var pluginCatalogBackend = require('@backstage/plugin-catalog-backend');
|
|
34
34
|
var pluginScaffolderCommon = require('@backstage/plugin-scaffolder-common');
|
|
35
35
|
|
|
@@ -62,11 +62,11 @@ var yaml__namespace = /*#__PURE__*/_interopNamespace(yaml);
|
|
|
62
62
|
var globby__default = /*#__PURE__*/_interopDefaultLegacy(globby);
|
|
63
63
|
var nunjucks__default = /*#__PURE__*/_interopDefaultLegacy(nunjucks);
|
|
64
64
|
var fetch__default = /*#__PURE__*/_interopDefaultLegacy(fetch);
|
|
65
|
+
var Handlebars__namespace = /*#__PURE__*/_interopNamespace(Handlebars);
|
|
66
|
+
var winston__namespace = /*#__PURE__*/_interopNamespace(winston);
|
|
65
67
|
var express__default = /*#__PURE__*/_interopDefaultLegacy(express);
|
|
66
68
|
var Router__default = /*#__PURE__*/_interopDefaultLegacy(Router);
|
|
67
69
|
var os__default = /*#__PURE__*/_interopDefaultLegacy(os);
|
|
68
|
-
var Handlebars__namespace = /*#__PURE__*/_interopNamespace(Handlebars);
|
|
69
|
-
var winston__namespace = /*#__PURE__*/_interopNamespace(winston);
|
|
70
70
|
|
|
71
71
|
const createTemplateAction = (templateAction) => {
|
|
72
72
|
return templateAction;
|
|
@@ -88,6 +88,11 @@ function createCatalogRegisterAction(options) {
|
|
|
88
88
|
title: "Catalog Info URL",
|
|
89
89
|
description: "An absolute URL pointing to the catalog info file location",
|
|
90
90
|
type: "string"
|
|
91
|
+
},
|
|
92
|
+
optional: {
|
|
93
|
+
title: "Optional",
|
|
94
|
+
description: "Permit the registered location to optionally exist. Default: false",
|
|
95
|
+
type: "boolean"
|
|
91
96
|
}
|
|
92
97
|
}
|
|
93
98
|
},
|
|
@@ -104,6 +109,11 @@ function createCatalogRegisterAction(options) {
|
|
|
104
109
|
title: "Fetch URL",
|
|
105
110
|
description: "A relative path from the repo root pointing to the catalog info file, defaults to /catalog-info.yaml",
|
|
106
111
|
type: "string"
|
|
112
|
+
},
|
|
113
|
+
optional: {
|
|
114
|
+
title: "Optional",
|
|
115
|
+
description: "Permit the registered location to optionally exist. Default: false",
|
|
116
|
+
type: "boolean"
|
|
107
117
|
}
|
|
108
118
|
}
|
|
109
119
|
}
|
|
@@ -132,15 +142,21 @@ function createCatalogRegisterAction(options) {
|
|
|
132
142
|
type: "url",
|
|
133
143
|
target: catalogInfoUrl
|
|
134
144
|
}, ctx.token ? {token: ctx.token} : {});
|
|
135
|
-
|
|
136
|
-
|
|
137
|
-
|
|
138
|
-
|
|
139
|
-
|
|
140
|
-
|
|
141
|
-
|
|
142
|
-
|
|
143
|
-
|
|
145
|
+
try {
|
|
146
|
+
const result = await catalogClient.addLocation({
|
|
147
|
+
dryRun: true,
|
|
148
|
+
type: "url",
|
|
149
|
+
target: catalogInfoUrl
|
|
150
|
+
}, ctx.token ? {token: ctx.token} : {});
|
|
151
|
+
if (result.entities.length > 0) {
|
|
152
|
+
const {entities} = result;
|
|
153
|
+
const entity = (_a = entities.find((e) => !e.metadata.name.startsWith("generated-"))) != null ? _a : entities[0];
|
|
154
|
+
ctx.output("entityRef", catalogModel.stringifyEntityRef(entity));
|
|
155
|
+
}
|
|
156
|
+
} catch (e) {
|
|
157
|
+
if (!input.optional) {
|
|
158
|
+
throw e;
|
|
159
|
+
}
|
|
144
160
|
}
|
|
145
161
|
ctx.output("catalogInfoUrl", catalogInfoUrl);
|
|
146
162
|
}
|
|
@@ -418,6 +434,9 @@ function createFetchTemplateAction(options) {
|
|
|
418
434
|
localOutputPath = templater.renderString(localOutputPath, context);
|
|
419
435
|
}
|
|
420
436
|
const outputPath = path.resolve(outputDir, localOutputPath);
|
|
437
|
+
if (outputDir === outputPath) {
|
|
438
|
+
continue;
|
|
439
|
+
}
|
|
421
440
|
if (!renderContents && !extension) {
|
|
422
441
|
ctx.logger.info(`Copying file/directory ${location} without processing.`);
|
|
423
442
|
}
|
|
@@ -631,6 +650,7 @@ const enableBranchProtectionOnDefaultRepoBranch = async ({
|
|
|
631
650
|
}
|
|
632
651
|
});
|
|
633
652
|
} catch (e) {
|
|
653
|
+
errors.assertError(e);
|
|
634
654
|
if (e.message.includes("Upgrade to GitHub Pro or make this repository public to enable this feature")) {
|
|
635
655
|
logger.warn("Branch protection was not enabled as it requires GitHub Pro for private repositories");
|
|
636
656
|
} else {
|
|
@@ -697,6 +717,11 @@ const parseRepoUrl = (repoUrl, integrations) => {
|
|
|
697
717
|
}
|
|
698
718
|
return {host, owner, repo, organization, workspace, project};
|
|
699
719
|
};
|
|
720
|
+
const isExecutable = (fileMode) => {
|
|
721
|
+
const executeBitMask = 73;
|
|
722
|
+
const res = fileMode & executeBitMask;
|
|
723
|
+
return res > 0;
|
|
724
|
+
};
|
|
700
725
|
|
|
701
726
|
function createPublishAzureAction(options) {
|
|
702
727
|
const {integrations, config} = options;
|
|
@@ -1216,6 +1241,7 @@ function createPublishGithubAction(options) {
|
|
|
1216
1241
|
permission
|
|
1217
1242
|
});
|
|
1218
1243
|
} catch (e) {
|
|
1244
|
+
errors.assertError(e);
|
|
1219
1245
|
ctx.logger.warn(`Skipping ${permission} access for ${team_slug}, ${e.message}`);
|
|
1220
1246
|
}
|
|
1221
1247
|
}
|
|
@@ -1228,6 +1254,7 @@ function createPublishGithubAction(options) {
|
|
|
1228
1254
|
names: topics.map((t) => t.toLowerCase())
|
|
1229
1255
|
});
|
|
1230
1256
|
} catch (e) {
|
|
1257
|
+
errors.assertError(e);
|
|
1231
1258
|
ctx.logger.warn(`Skipping topics ${topics.join(" ")}, ${e.message}`);
|
|
1232
1259
|
}
|
|
1233
1260
|
}
|
|
@@ -1259,6 +1286,7 @@ function createPublishGithubAction(options) {
|
|
|
1259
1286
|
requireCodeOwnerReviews
|
|
1260
1287
|
});
|
|
1261
1288
|
} catch (e) {
|
|
1289
|
+
errors.assertError(e);
|
|
1262
1290
|
ctx.logger.warn(`Skipping: default branch protection on '${newRepo.name}', ${e.message}`);
|
|
1263
1291
|
}
|
|
1264
1292
|
ctx.output("remoteUrl", remoteUrl);
|
|
@@ -1371,13 +1399,24 @@ const createPublishGithubPullRequestAction = ({
|
|
|
1371
1399
|
gitignore: true,
|
|
1372
1400
|
dot: true
|
|
1373
1401
|
});
|
|
1374
|
-
const fileContents = await Promise.all(localFilePaths.map((
|
|
1402
|
+
const fileContents = await Promise.all(localFilePaths.map((filePath) => {
|
|
1403
|
+
const absPath = path__default['default'].resolve(fileRoot, filePath);
|
|
1404
|
+
const base64EncodedContent = fs__default['default'].readFileSync(absPath).toString("base64");
|
|
1405
|
+
const fileStat = fs__default['default'].statSync(absPath);
|
|
1406
|
+
const githubTreeItemMode = isExecutable(fileStat.mode) ? "100755" : "100644";
|
|
1407
|
+
const encoding = "base64";
|
|
1408
|
+
return {
|
|
1409
|
+
encoding,
|
|
1410
|
+
content: base64EncodedContent,
|
|
1411
|
+
mode: githubTreeItemMode
|
|
1412
|
+
};
|
|
1413
|
+
}));
|
|
1375
1414
|
const repoFilePaths = localFilePaths.map((repoFilePath) => {
|
|
1376
1415
|
return targetPath ? `${targetPath}/${repoFilePath}` : repoFilePath;
|
|
1377
1416
|
});
|
|
1378
1417
|
const changes = [
|
|
1379
1418
|
{
|
|
1380
|
-
files: lodash.zipObject(repoFilePaths, fileContents
|
|
1419
|
+
files: lodash.zipObject(repoFilePaths, fileContents),
|
|
1381
1420
|
commit: title
|
|
1382
1421
|
}
|
|
1383
1422
|
];
|
|
@@ -1637,6 +1676,7 @@ function createGithubWebhookAction(options) {
|
|
|
1637
1676
|
});
|
|
1638
1677
|
ctx.logger.info(`Webhook '${webhookUrl}' created successfully`);
|
|
1639
1678
|
} catch (e) {
|
|
1679
|
+
errors.assertError(e);
|
|
1640
1680
|
ctx.logger.warn(`Failed: create webhook '${webhookUrl}' on repo: '${repo}', ${e.message}`);
|
|
1641
1681
|
}
|
|
1642
1682
|
}
|
|
@@ -1714,38 +1754,16 @@ class TemplateActionRegistry {
|
|
|
1714
1754
|
}
|
|
1715
1755
|
}
|
|
1716
1756
|
|
|
1717
|
-
class CatalogEntityClient {
|
|
1718
|
-
constructor(catalogClient) {
|
|
1719
|
-
this.catalogClient = catalogClient;
|
|
1720
|
-
}
|
|
1721
|
-
async findTemplate(templateName, options) {
|
|
1722
|
-
const {items: templates} = await this.catalogClient.getEntities({
|
|
1723
|
-
filter: {
|
|
1724
|
-
kind: "template",
|
|
1725
|
-
"metadata.name": templateName
|
|
1726
|
-
}
|
|
1727
|
-
}, options);
|
|
1728
|
-
if (templates.length !== 1) {
|
|
1729
|
-
if (templates.length > 1) {
|
|
1730
|
-
throw new errors.ConflictError("Templates lookup resulted in multiple matches");
|
|
1731
|
-
} else {
|
|
1732
|
-
throw new errors.NotFoundError("Template not found");
|
|
1733
|
-
}
|
|
1734
|
-
}
|
|
1735
|
-
return templates[0];
|
|
1736
|
-
}
|
|
1737
|
-
}
|
|
1738
|
-
|
|
1739
1757
|
const migrationsDir = backendCommon.resolvePackagePath("@backstage/plugin-scaffolder-backend", "migrations");
|
|
1740
1758
|
class DatabaseTaskStore {
|
|
1741
|
-
|
|
1742
|
-
|
|
1743
|
-
}
|
|
1744
|
-
static async create(knex) {
|
|
1745
|
-
await knex.migrate.latest({
|
|
1759
|
+
static async create(options) {
|
|
1760
|
+
await options.database.migrate.latest({
|
|
1746
1761
|
directory: migrationsDir
|
|
1747
1762
|
});
|
|
1748
|
-
return new DatabaseTaskStore(
|
|
1763
|
+
return new DatabaseTaskStore(options);
|
|
1764
|
+
}
|
|
1765
|
+
constructor(options) {
|
|
1766
|
+
this.db = options.database;
|
|
1749
1767
|
}
|
|
1750
1768
|
async getTask(taskId) {
|
|
1751
1769
|
const [result] = await this.db("tasks").where({id: taskId}).select();
|
|
@@ -1901,7 +1919,7 @@ class DatabaseTaskStore {
|
|
|
1901
1919
|
}
|
|
1902
1920
|
}
|
|
1903
1921
|
|
|
1904
|
-
class
|
|
1922
|
+
class TaskManager {
|
|
1905
1923
|
constructor(state, storage, logger) {
|
|
1906
1924
|
this.state = state;
|
|
1907
1925
|
this.storage = storage;
|
|
@@ -1909,7 +1927,7 @@ class TaskAgent {
|
|
|
1909
1927
|
this.isDone = false;
|
|
1910
1928
|
}
|
|
1911
1929
|
static create(state, storage, logger) {
|
|
1912
|
-
const agent = new
|
|
1930
|
+
const agent = new TaskManager(state, storage, logger);
|
|
1913
1931
|
agent.startTimeout();
|
|
1914
1932
|
return agent;
|
|
1915
1933
|
}
|
|
@@ -1975,7 +1993,7 @@ class StorageTaskBroker {
|
|
|
1975
1993
|
for (; ; ) {
|
|
1976
1994
|
const pendingTask = await this.storage.claimTask();
|
|
1977
1995
|
if (pendingTask) {
|
|
1978
|
-
return
|
|
1996
|
+
return TaskManager.create({
|
|
1979
1997
|
taskId: pendingTask.id,
|
|
1980
1998
|
spec: pendingTask.spec,
|
|
1981
1999
|
secrets: pendingTask.secrets
|
|
@@ -2010,13 +2028,14 @@ class StorageTaskBroker {
|
|
|
2010
2028
|
try {
|
|
2011
2029
|
callback(void 0, result);
|
|
2012
2030
|
} catch (error) {
|
|
2031
|
+
errors.assertError(error);
|
|
2013
2032
|
callback(error, {events: []});
|
|
2014
2033
|
}
|
|
2015
2034
|
}
|
|
2016
2035
|
await new Promise((resolve) => setTimeout(resolve, 1e3));
|
|
2017
2036
|
}
|
|
2018
2037
|
})();
|
|
2019
|
-
return unsubscribe;
|
|
2038
|
+
return {unsubscribe};
|
|
2020
2039
|
}
|
|
2021
2040
|
async vacuumTasks(timeoutS) {
|
|
2022
2041
|
const {tasks} = await this.storage.listStaleTasks(timeoutS);
|
|
@@ -2043,68 +2062,12 @@ class StorageTaskBroker {
|
|
|
2043
2062
|
}
|
|
2044
2063
|
}
|
|
2045
2064
|
|
|
2046
|
-
class TaskWorker {
|
|
2047
|
-
constructor(options) {
|
|
2048
|
-
this.options = options;
|
|
2049
|
-
}
|
|
2050
|
-
start() {
|
|
2051
|
-
(async () => {
|
|
2052
|
-
for (; ; ) {
|
|
2053
|
-
const task = await this.options.taskBroker.claim();
|
|
2054
|
-
await this.runOneTask(task);
|
|
2055
|
-
}
|
|
2056
|
-
})();
|
|
2057
|
-
}
|
|
2058
|
-
async runOneTask(task) {
|
|
2059
|
-
try {
|
|
2060
|
-
const {output} = task.spec.apiVersion === "scaffolder.backstage.io/v1beta3" ? await this.options.runners.workflowRunner.execute(task) : await this.options.runners.legacyWorkflowRunner.execute(task);
|
|
2061
|
-
await task.complete("completed", {output});
|
|
2062
|
-
} catch (error) {
|
|
2063
|
-
await task.complete("failed", {
|
|
2064
|
-
error: {name: error.name, message: error.message}
|
|
2065
|
-
});
|
|
2066
|
-
}
|
|
2067
|
-
}
|
|
2068
|
-
}
|
|
2069
|
-
|
|
2070
|
-
async function getWorkingDirectory(config, logger) {
|
|
2071
|
-
if (!config.has("backend.workingDirectory")) {
|
|
2072
|
-
return os__default['default'].tmpdir();
|
|
2073
|
-
}
|
|
2074
|
-
const workingDirectory = config.getString("backend.workingDirectory");
|
|
2075
|
-
try {
|
|
2076
|
-
await fs__default['default'].access(workingDirectory, fs__default['default'].constants.F_OK | fs__default['default'].constants.W_OK);
|
|
2077
|
-
logger.info(`using working directory: ${workingDirectory}`);
|
|
2078
|
-
} catch (err) {
|
|
2079
|
-
logger.error(`working directory ${workingDirectory} ${err.code === "ENOENT" ? "does not exist" : "is not writable"}`);
|
|
2080
|
-
throw err;
|
|
2081
|
-
}
|
|
2082
|
-
return workingDirectory;
|
|
2083
|
-
}
|
|
2084
|
-
function getEntityBaseUrl(entity) {
|
|
2085
|
-
var _a, _b;
|
|
2086
|
-
let location = (_a = entity.metadata.annotations) == null ? void 0 : _a[catalogModel.SOURCE_LOCATION_ANNOTATION];
|
|
2087
|
-
if (!location) {
|
|
2088
|
-
location = (_b = entity.metadata.annotations) == null ? void 0 : _b[catalogModel.LOCATION_ANNOTATION];
|
|
2089
|
-
}
|
|
2090
|
-
if (!location) {
|
|
2091
|
-
return void 0;
|
|
2092
|
-
}
|
|
2093
|
-
const {type, target} = catalogModel.parseLocationReference(location);
|
|
2094
|
-
if (type === "url") {
|
|
2095
|
-
return target;
|
|
2096
|
-
} else if (type === "file") {
|
|
2097
|
-
return `file://${target}`;
|
|
2098
|
-
}
|
|
2099
|
-
return void 0;
|
|
2100
|
-
}
|
|
2101
|
-
|
|
2102
2065
|
function isTruthy(value) {
|
|
2103
2066
|
return lodash.isArray(value) ? value.length > 0 : !!value;
|
|
2104
2067
|
}
|
|
2105
2068
|
|
|
2106
2069
|
const isValidTaskSpec$1 = (taskSpec) => taskSpec.apiVersion === "backstage.io/v1beta2";
|
|
2107
|
-
class
|
|
2070
|
+
class HandlebarsWorkflowRunner {
|
|
2108
2071
|
constructor(options) {
|
|
2109
2072
|
this.options = options;
|
|
2110
2073
|
this.handlebars = Handlebars__namespace.create();
|
|
@@ -2215,6 +2178,9 @@ class LegacyWorkflowRunner {
|
|
|
2215
2178
|
this.options.logger.debug(`Running ${action.id} with input`, {
|
|
2216
2179
|
input: JSON.stringify(input, null, 2)
|
|
2217
2180
|
});
|
|
2181
|
+
if (!task.spec.metadata) {
|
|
2182
|
+
console.warn("DEPRECATION NOTICE: metadata is undefined. metadata will be required in the future.");
|
|
2183
|
+
}
|
|
2218
2184
|
await action.handler({
|
|
2219
2185
|
baseUrl: task.spec.baseUrl,
|
|
2220
2186
|
logger: taskLogger,
|
|
@@ -2229,7 +2195,8 @@ class LegacyWorkflowRunner {
|
|
|
2229
2195
|
},
|
|
2230
2196
|
output(name, value) {
|
|
2231
2197
|
stepOutputs[name] = value;
|
|
2232
|
-
}
|
|
2198
|
+
},
|
|
2199
|
+
metadata: task.spec.metadata
|
|
2233
2200
|
});
|
|
2234
2201
|
for (const tmpDir of tmpDirs) {
|
|
2235
2202
|
await fs__default['default'].remove(tmpDir);
|
|
@@ -2280,7 +2247,10 @@ class LegacyWorkflowRunner {
|
|
|
2280
2247
|
const isValidTaskSpec = (taskSpec) => {
|
|
2281
2248
|
return taskSpec.apiVersion === "scaffolder.backstage.io/v1beta3";
|
|
2282
2249
|
};
|
|
2283
|
-
const createStepLogger = ({
|
|
2250
|
+
const createStepLogger = ({
|
|
2251
|
+
task,
|
|
2252
|
+
step
|
|
2253
|
+
}) => {
|
|
2284
2254
|
const metadata = {stepId: step.id};
|
|
2285
2255
|
const taskLogger = winston__namespace.createLogger({
|
|
2286
2256
|
level: process.env.LOG_LEVEL || "info",
|
|
@@ -2297,7 +2267,7 @@ const createStepLogger = ({task, step}) => {
|
|
|
2297
2267
|
taskLogger.add(new winston__namespace.transports.Stream({stream: streamLogger}));
|
|
2298
2268
|
return {taskLogger, streamLogger};
|
|
2299
2269
|
};
|
|
2300
|
-
class
|
|
2270
|
+
class NunjucksWorkflowRunner {
|
|
2301
2271
|
constructor(options) {
|
|
2302
2272
|
this.options = options;
|
|
2303
2273
|
this.nunjucksOptions = {
|
|
@@ -2387,6 +2357,9 @@ class DefaultWorkflowRunner {
|
|
|
2387
2357
|
}
|
|
2388
2358
|
const tmpDirs = new Array();
|
|
2389
2359
|
const stepOutput = {};
|
|
2360
|
+
if (!task.spec.metadata) {
|
|
2361
|
+
console.warn("DEPRECATION NOTICE: metadata is undefined. metadata will be required in the future.");
|
|
2362
|
+
}
|
|
2390
2363
|
await action.handler({
|
|
2391
2364
|
baseUrl: task.spec.baseUrl,
|
|
2392
2365
|
input,
|
|
@@ -2400,7 +2373,8 @@ class DefaultWorkflowRunner {
|
|
|
2400
2373
|
},
|
|
2401
2374
|
output(name, value) {
|
|
2402
2375
|
stepOutput[name] = value;
|
|
2403
|
-
}
|
|
2376
|
+
},
|
|
2377
|
+
metadata: task.spec.metadata
|
|
2404
2378
|
});
|
|
2405
2379
|
for (const tmpDir of tmpDirs) {
|
|
2406
2380
|
await fs__default['default'].remove(tmpDir);
|
|
@@ -2428,6 +2402,111 @@ class DefaultWorkflowRunner {
|
|
|
2428
2402
|
}
|
|
2429
2403
|
}
|
|
2430
2404
|
|
|
2405
|
+
class TaskWorker {
|
|
2406
|
+
constructor(options) {
|
|
2407
|
+
this.options = options;
|
|
2408
|
+
}
|
|
2409
|
+
static async create(options) {
|
|
2410
|
+
const {
|
|
2411
|
+
taskBroker,
|
|
2412
|
+
logger,
|
|
2413
|
+
actionRegistry,
|
|
2414
|
+
integrations,
|
|
2415
|
+
workingDirectory
|
|
2416
|
+
} = options;
|
|
2417
|
+
const legacyWorkflowRunner = new HandlebarsWorkflowRunner({
|
|
2418
|
+
logger,
|
|
2419
|
+
actionRegistry,
|
|
2420
|
+
integrations,
|
|
2421
|
+
workingDirectory
|
|
2422
|
+
});
|
|
2423
|
+
const workflowRunner = new NunjucksWorkflowRunner({
|
|
2424
|
+
actionRegistry,
|
|
2425
|
+
integrations,
|
|
2426
|
+
logger,
|
|
2427
|
+
workingDirectory
|
|
2428
|
+
});
|
|
2429
|
+
return new TaskWorker({
|
|
2430
|
+
taskBroker,
|
|
2431
|
+
runners: {legacyWorkflowRunner, workflowRunner}
|
|
2432
|
+
});
|
|
2433
|
+
}
|
|
2434
|
+
start() {
|
|
2435
|
+
(async () => {
|
|
2436
|
+
for (; ; ) {
|
|
2437
|
+
const task = await this.options.taskBroker.claim();
|
|
2438
|
+
await this.runOneTask(task);
|
|
2439
|
+
}
|
|
2440
|
+
})();
|
|
2441
|
+
}
|
|
2442
|
+
async runOneTask(task) {
|
|
2443
|
+
try {
|
|
2444
|
+
const {output} = task.spec.apiVersion === "scaffolder.backstage.io/v1beta3" ? await this.options.runners.workflowRunner.execute(task) : await this.options.runners.legacyWorkflowRunner.execute(task);
|
|
2445
|
+
await task.complete("completed", {output});
|
|
2446
|
+
} catch (error) {
|
|
2447
|
+
errors.assertError(error);
|
|
2448
|
+
await task.complete("failed", {
|
|
2449
|
+
error: {name: error.name, message: error.message}
|
|
2450
|
+
});
|
|
2451
|
+
}
|
|
2452
|
+
}
|
|
2453
|
+
}
|
|
2454
|
+
|
|
2455
|
+
class CatalogEntityClient {
|
|
2456
|
+
constructor(catalogClient) {
|
|
2457
|
+
this.catalogClient = catalogClient;
|
|
2458
|
+
}
|
|
2459
|
+
async findTemplate(templateName, options) {
|
|
2460
|
+
const {items: templates} = await this.catalogClient.getEntities({
|
|
2461
|
+
filter: {
|
|
2462
|
+
kind: "template",
|
|
2463
|
+
"metadata.name": templateName
|
|
2464
|
+
}
|
|
2465
|
+
}, options);
|
|
2466
|
+
if (templates.length !== 1) {
|
|
2467
|
+
if (templates.length > 1) {
|
|
2468
|
+
throw new errors.ConflictError("Templates lookup resulted in multiple matches");
|
|
2469
|
+
} else {
|
|
2470
|
+
throw new errors.NotFoundError("Template not found");
|
|
2471
|
+
}
|
|
2472
|
+
}
|
|
2473
|
+
return templates[0];
|
|
2474
|
+
}
|
|
2475
|
+
}
|
|
2476
|
+
|
|
2477
|
+
async function getWorkingDirectory(config, logger) {
|
|
2478
|
+
if (!config.has("backend.workingDirectory")) {
|
|
2479
|
+
return os__default['default'].tmpdir();
|
|
2480
|
+
}
|
|
2481
|
+
const workingDirectory = config.getString("backend.workingDirectory");
|
|
2482
|
+
try {
|
|
2483
|
+
await fs__default['default'].access(workingDirectory, fs__default['default'].constants.F_OK | fs__default['default'].constants.W_OK);
|
|
2484
|
+
logger.info(`using working directory: ${workingDirectory}`);
|
|
2485
|
+
} catch (err) {
|
|
2486
|
+
errors.assertError(err);
|
|
2487
|
+
logger.error(`working directory ${workingDirectory} ${err.code === "ENOENT" ? "does not exist" : "is not writable"}`);
|
|
2488
|
+
throw err;
|
|
2489
|
+
}
|
|
2490
|
+
return workingDirectory;
|
|
2491
|
+
}
|
|
2492
|
+
function getEntityBaseUrl(entity) {
|
|
2493
|
+
var _a, _b;
|
|
2494
|
+
let location = (_a = entity.metadata.annotations) == null ? void 0 : _a[catalogModel.SOURCE_LOCATION_ANNOTATION];
|
|
2495
|
+
if (!location) {
|
|
2496
|
+
location = (_b = entity.metadata.annotations) == null ? void 0 : _b[catalogModel.LOCATION_ANNOTATION];
|
|
2497
|
+
}
|
|
2498
|
+
if (!location) {
|
|
2499
|
+
return void 0;
|
|
2500
|
+
}
|
|
2501
|
+
const {type, target} = catalogModel.parseLocationReference(location);
|
|
2502
|
+
if (type === "url") {
|
|
2503
|
+
return target;
|
|
2504
|
+
} else if (type === "file") {
|
|
2505
|
+
return `file://${target}`;
|
|
2506
|
+
}
|
|
2507
|
+
return void 0;
|
|
2508
|
+
}
|
|
2509
|
+
|
|
2431
2510
|
function isSupportedTemplate(entity) {
|
|
2432
2511
|
return entity.apiVersion === "backstage.io/v1beta2" || entity.apiVersion === "scaffolder.backstage.io/v1beta3";
|
|
2433
2512
|
}
|
|
@@ -2448,29 +2527,24 @@ async function createRouter(options) {
|
|
|
2448
2527
|
const workingDirectory = await getWorkingDirectory(config, logger);
|
|
2449
2528
|
const entityClient = new CatalogEntityClient(catalogClient);
|
|
2450
2529
|
const integrations = integration.ScmIntegrations.fromConfig(config);
|
|
2451
|
-
|
|
2452
|
-
|
|
2530
|
+
let taskBroker;
|
|
2531
|
+
if (!options.taskBroker) {
|
|
2532
|
+
const databaseTaskStore = await DatabaseTaskStore.create({
|
|
2533
|
+
database: await database.getClient()
|
|
2534
|
+
});
|
|
2535
|
+
taskBroker = new StorageTaskBroker(databaseTaskStore, logger);
|
|
2536
|
+
} else {
|
|
2537
|
+
taskBroker = options.taskBroker;
|
|
2538
|
+
}
|
|
2453
2539
|
const actionRegistry = new TemplateActionRegistry();
|
|
2454
|
-
const legacyWorkflowRunner = new LegacyWorkflowRunner({
|
|
2455
|
-
logger,
|
|
2456
|
-
actionRegistry,
|
|
2457
|
-
integrations,
|
|
2458
|
-
workingDirectory
|
|
2459
|
-
});
|
|
2460
|
-
const workflowRunner = new DefaultWorkflowRunner({
|
|
2461
|
-
actionRegistry,
|
|
2462
|
-
integrations,
|
|
2463
|
-
logger,
|
|
2464
|
-
workingDirectory
|
|
2465
|
-
});
|
|
2466
2540
|
const workers = [];
|
|
2467
2541
|
for (let i = 0; i < (taskWorkers || 1); i++) {
|
|
2468
|
-
const worker =
|
|
2542
|
+
const worker = await TaskWorker.create({
|
|
2469
2543
|
taskBroker,
|
|
2470
|
-
|
|
2471
|
-
|
|
2472
|
-
|
|
2473
|
-
|
|
2544
|
+
actionRegistry,
|
|
2545
|
+
integrations,
|
|
2546
|
+
logger,
|
|
2547
|
+
workingDirectory
|
|
2474
2548
|
});
|
|
2475
2549
|
workers.push(worker);
|
|
2476
2550
|
}
|
|
@@ -2520,7 +2594,7 @@ async function createRouter(options) {
|
|
|
2520
2594
|
});
|
|
2521
2595
|
res.json(actionsList);
|
|
2522
2596
|
}).post("/v2/tasks", async (req, res) => {
|
|
2523
|
-
var _a, _b, _c;
|
|
2597
|
+
var _a, _b, _c, _d, _e;
|
|
2524
2598
|
const templateName = req.body.templateName;
|
|
2525
2599
|
const values = req.body.values;
|
|
2526
2600
|
const token = getBearerToken(req.headers.authorization);
|
|
@@ -2549,7 +2623,8 @@ async function createRouter(options) {
|
|
|
2549
2623
|
name: (_b2 = step.name) != null ? _b2 : step.action
|
|
2550
2624
|
};
|
|
2551
2625
|
}),
|
|
2552
|
-
output: (_b = template.spec.output) != null ? _b : {}
|
|
2626
|
+
output: (_b = template.spec.output) != null ? _b : {},
|
|
2627
|
+
metadata: {name: (_c = template.metadata) == null ? void 0 : _c.name}
|
|
2553
2628
|
} : {
|
|
2554
2629
|
apiVersion: template.apiVersion,
|
|
2555
2630
|
baseUrl,
|
|
@@ -2562,7 +2637,8 @@ async function createRouter(options) {
|
|
|
2562
2637
|
name: (_b2 = step.name) != null ? _b2 : step.action
|
|
2563
2638
|
};
|
|
2564
2639
|
}),
|
|
2565
|
-
output: (
|
|
2640
|
+
output: (_d = template.spec.output) != null ? _d : {},
|
|
2641
|
+
metadata: {name: (_e = template.metadata) == null ? void 0 : _e.name}
|
|
2566
2642
|
};
|
|
2567
2643
|
} else {
|
|
2568
2644
|
throw new errors.InputError(`Unsupported apiVersion field in schema entity, ${template.apiVersion}`);
|
|
@@ -2581,14 +2657,15 @@ async function createRouter(options) {
|
|
|
2581
2657
|
res.status(200).json(task);
|
|
2582
2658
|
}).get("/v2/tasks/:taskId/eventstream", async (req, res) => {
|
|
2583
2659
|
const {taskId} = req.params;
|
|
2584
|
-
const after = Number(req.query.after)
|
|
2660
|
+
const after = req.query.after !== void 0 ? Number(req.query.after) : void 0;
|
|
2585
2661
|
logger.debug(`Event stream observing taskId '${taskId}' opened`);
|
|
2586
2662
|
res.writeHead(200, {
|
|
2587
2663
|
Connection: "keep-alive",
|
|
2588
2664
|
"Cache-Control": "no-cache",
|
|
2589
2665
|
"Content-Type": "text/event-stream"
|
|
2590
2666
|
});
|
|
2591
|
-
const unsubscribe = taskBroker.observe({taskId, after}, (error, {events}) => {
|
|
2667
|
+
const {unsubscribe} = taskBroker.observe({taskId, after}, (error, {events}) => {
|
|
2668
|
+
var _a;
|
|
2592
2669
|
if (error) {
|
|
2593
2670
|
logger.error(`Received error from event stream when observing taskId '${taskId}', ${error}`);
|
|
2594
2671
|
}
|
|
@@ -2602,7 +2679,7 @@ data: ${JSON.stringify(event)}
|
|
|
2602
2679
|
shouldUnsubscribe = true;
|
|
2603
2680
|
}
|
|
2604
2681
|
}
|
|
2605
|
-
res.flush();
|
|
2682
|
+
(_a = res.flush) == null ? void 0 : _a.call(res);
|
|
2606
2683
|
if (shouldUnsubscribe)
|
|
2607
2684
|
unsubscribe();
|
|
2608
2685
|
});
|
|
@@ -2610,6 +2687,27 @@ data: ${JSON.stringify(event)}
|
|
|
2610
2687
|
unsubscribe();
|
|
2611
2688
|
logger.debug(`Event stream observing taskId '${taskId}' closed`);
|
|
2612
2689
|
});
|
|
2690
|
+
}).get("/v2/tasks/:taskId/events", async (req, res) => {
|
|
2691
|
+
const {taskId} = req.params;
|
|
2692
|
+
const after = Number(req.query.after) || void 0;
|
|
2693
|
+
let unsubscribe = () => {
|
|
2694
|
+
};
|
|
2695
|
+
const timeout = setTimeout(() => {
|
|
2696
|
+
unsubscribe();
|
|
2697
|
+
res.json([]);
|
|
2698
|
+
}, 3e4);
|
|
2699
|
+
({unsubscribe} = taskBroker.observe({taskId, after}, (error, {events}) => {
|
|
2700
|
+
clearTimeout(timeout);
|
|
2701
|
+
unsubscribe();
|
|
2702
|
+
if (error) {
|
|
2703
|
+
logger.error(`Received error from log when observing taskId '${taskId}', ${error}`);
|
|
2704
|
+
}
|
|
2705
|
+
res.json(events);
|
|
2706
|
+
}));
|
|
2707
|
+
req.on("close", () => {
|
|
2708
|
+
unsubscribe();
|
|
2709
|
+
clearTimeout(timeout);
|
|
2710
|
+
});
|
|
2613
2711
|
});
|
|
2614
2712
|
const app = express__default['default']();
|
|
2615
2713
|
app.set("logger", logger);
|
|
@@ -2676,8 +2774,11 @@ Object.defineProperty(exports, 'createFetchCookiecutterAction', {
|
|
|
2676
2774
|
}
|
|
2677
2775
|
});
|
|
2678
2776
|
exports.CatalogEntityClient = CatalogEntityClient;
|
|
2777
|
+
exports.DatabaseTaskStore = DatabaseTaskStore;
|
|
2679
2778
|
exports.OctokitProvider = OctokitProvider;
|
|
2680
2779
|
exports.ScaffolderEntitiesProcessor = ScaffolderEntitiesProcessor;
|
|
2780
|
+
exports.TaskManager = TaskManager;
|
|
2781
|
+
exports.TaskWorker = TaskWorker;
|
|
2681
2782
|
exports.TemplateActionRegistry = TemplateActionRegistry;
|
|
2682
2783
|
exports.createBuiltinActions = createBuiltinActions;
|
|
2683
2784
|
exports.createCatalogRegisterAction = createCatalogRegisterAction;
|