@backstage/plugin-scaffolder-backend 0.16.1 → 0.17.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +67 -0
- package/dist/index.cjs.js +236 -204
- package/dist/index.cjs.js.map +1 -1
- package/dist/index.d.ts +131 -44
- package/package.json +14 -12
package/dist/index.cjs.js
CHANGED
|
@@ -24,6 +24,7 @@ var node = require('@gitbeaker/node');
|
|
|
24
24
|
var webhooks = require('@octokit/webhooks');
|
|
25
25
|
var uuid = require('uuid');
|
|
26
26
|
var luxon = require('luxon');
|
|
27
|
+
var ObservableImpl = require('zen-observable');
|
|
27
28
|
var Handlebars = require('handlebars');
|
|
28
29
|
var winston = require('winston');
|
|
29
30
|
var jsonschema = require('jsonschema');
|
|
@@ -59,6 +60,7 @@ var yaml__namespace = /*#__PURE__*/_interopNamespace(yaml);
|
|
|
59
60
|
var path__default = /*#__PURE__*/_interopDefaultLegacy(path);
|
|
60
61
|
var globby__default = /*#__PURE__*/_interopDefaultLegacy(globby);
|
|
61
62
|
var fetch__default = /*#__PURE__*/_interopDefaultLegacy(fetch);
|
|
63
|
+
var ObservableImpl__default = /*#__PURE__*/_interopDefaultLegacy(ObservableImpl);
|
|
62
64
|
var Handlebars__namespace = /*#__PURE__*/_interopNamespace(Handlebars);
|
|
63
65
|
var winston__namespace = /*#__PURE__*/_interopNamespace(winston);
|
|
64
66
|
var nunjucks__default = /*#__PURE__*/_interopDefaultLegacy(nunjucks);
|
|
@@ -250,9 +252,6 @@ async function fetchContents({
|
|
|
250
252
|
fetchUrl = ".",
|
|
251
253
|
outputPath
|
|
252
254
|
}) {
|
|
253
|
-
if (typeof fetchUrl !== "string") {
|
|
254
|
-
throw new errors.InputError(`Invalid url parameter, expected string, got ${typeof fetchUrl}`);
|
|
255
|
-
}
|
|
256
255
|
let fetchUrlIsAbsolute = false;
|
|
257
256
|
try {
|
|
258
257
|
new URL(fetchUrl);
|
|
@@ -309,14 +308,14 @@ function createFetchPlainAction(options) {
|
|
|
309
308
|
}
|
|
310
309
|
},
|
|
311
310
|
async handler(ctx) {
|
|
312
|
-
var _a;
|
|
311
|
+
var _a, _b;
|
|
313
312
|
ctx.logger.info("Fetching plain content from remote URL");
|
|
314
313
|
const targetPath = (_a = ctx.input.targetPath) != null ? _a : "./";
|
|
315
314
|
const outputPath = backendCommon.resolveSafeChildPath(ctx.workspacePath, targetPath);
|
|
316
315
|
await fetchContents({
|
|
317
316
|
reader,
|
|
318
317
|
integrations,
|
|
319
|
-
baseUrl: ctx.baseUrl,
|
|
318
|
+
baseUrl: (_b = ctx.templateInfo) == null ? void 0 : _b.baseUrl,
|
|
320
319
|
fetchUrl: ctx.input.url,
|
|
321
320
|
outputPath
|
|
322
321
|
});
|
|
@@ -475,7 +474,7 @@ function createFetchTemplateAction(options) {
|
|
|
475
474
|
}
|
|
476
475
|
},
|
|
477
476
|
async handler(ctx) {
|
|
478
|
-
var _a;
|
|
477
|
+
var _a, _b;
|
|
479
478
|
ctx.logger.info("Fetching template content from remote URL");
|
|
480
479
|
const workDir = await ctx.createTemporaryDirectory();
|
|
481
480
|
const templateDir = backendCommon.resolveSafeChildPath(workDir, "template");
|
|
@@ -497,7 +496,7 @@ function createFetchTemplateAction(options) {
|
|
|
497
496
|
await fetchContents({
|
|
498
497
|
reader,
|
|
499
498
|
integrations,
|
|
500
|
-
baseUrl: ctx.baseUrl,
|
|
499
|
+
baseUrl: (_b = ctx.templateInfo) == null ? void 0 : _b.baseUrl,
|
|
501
500
|
fetchUrl: ctx.input.url,
|
|
502
501
|
outputPath: templateDir
|
|
503
502
|
});
|
|
@@ -666,14 +665,15 @@ const createFilesystemRenameAction = () => {
|
|
|
666
665
|
});
|
|
667
666
|
};
|
|
668
667
|
|
|
669
|
-
const
|
|
670
|
-
|
|
671
|
-
|
|
672
|
-
|
|
673
|
-
|
|
674
|
-
|
|
668
|
+
const executeShellCommand = async (options) => {
|
|
669
|
+
const {
|
|
670
|
+
command,
|
|
671
|
+
args,
|
|
672
|
+
options: spawnOptions,
|
|
673
|
+
logStream = new stream.PassThrough()
|
|
674
|
+
} = options;
|
|
675
675
|
await new Promise((resolve, reject) => {
|
|
676
|
-
const process = child_process.spawn(command, args,
|
|
676
|
+
const process = child_process.spawn(command, args, spawnOptions);
|
|
677
677
|
process.stdout.on("data", (stream) => {
|
|
678
678
|
logStream.write(stream);
|
|
679
679
|
});
|
|
@@ -691,6 +691,7 @@ const runCommand = async ({
|
|
|
691
691
|
});
|
|
692
692
|
});
|
|
693
693
|
};
|
|
694
|
+
const runCommand = executeShellCommand;
|
|
694
695
|
async function initRepoAndPush({
|
|
695
696
|
dir,
|
|
696
697
|
remoteUrl,
|
|
@@ -1191,47 +1192,40 @@ function createPublishFileAction() {
|
|
|
1191
1192
|
});
|
|
1192
1193
|
}
|
|
1193
1194
|
|
|
1194
|
-
|
|
1195
|
-
|
|
1196
|
-
|
|
1197
|
-
|
|
1195
|
+
async function getOctokitOptions(options) {
|
|
1196
|
+
var _a;
|
|
1197
|
+
const { integrations, credentialsProvider, repoUrl, token } = options;
|
|
1198
|
+
const { owner, repo, host } = parseRepoUrl(repoUrl, integrations);
|
|
1199
|
+
if (!owner) {
|
|
1200
|
+
throw new errors.InputError(`No owner provided for repo ${repoUrl}`);
|
|
1198
1201
|
}
|
|
1199
|
-
|
|
1200
|
-
|
|
1201
|
-
|
|
1202
|
-
|
|
1203
|
-
|
|
1204
|
-
|
|
1205
|
-
const integrationConfig = (_a = this.integrations.github.byHost(host)) == null ? void 0 : _a.config;
|
|
1206
|
-
if (!integrationConfig) {
|
|
1207
|
-
throw new errors.InputError(`No integration for host ${host}`);
|
|
1208
|
-
}
|
|
1209
|
-
if (options == null ? void 0 : options.token) {
|
|
1210
|
-
const client2 = new octokit.Octokit({
|
|
1211
|
-
auth: options.token,
|
|
1212
|
-
baseUrl: integrationConfig.apiBaseUrl,
|
|
1213
|
-
previews: ["nebula-preview"]
|
|
1214
|
-
});
|
|
1215
|
-
return { client: client2, token: options.token, owner, repo };
|
|
1216
|
-
}
|
|
1217
|
-
const { token } = await this.githubCredentialsProvider.getCredentials({
|
|
1218
|
-
url: `https://${host}/${encodeURIComponent(owner)}/${encodeURIComponent(repo)}`
|
|
1219
|
-
});
|
|
1220
|
-
if (!token) {
|
|
1221
|
-
throw new errors.InputError(`No token available for host: ${host}, with owner ${owner}, and repo ${repo}`);
|
|
1222
|
-
}
|
|
1223
|
-
const client = new octokit.Octokit({
|
|
1202
|
+
const integrationConfig = (_a = integrations.github.byHost(host)) == null ? void 0 : _a.config;
|
|
1203
|
+
if (!integrationConfig) {
|
|
1204
|
+
throw new errors.InputError(`No integration for host ${host}`);
|
|
1205
|
+
}
|
|
1206
|
+
if (token) {
|
|
1207
|
+
return {
|
|
1224
1208
|
auth: token,
|
|
1225
1209
|
baseUrl: integrationConfig.apiBaseUrl,
|
|
1226
1210
|
previews: ["nebula-preview"]
|
|
1227
|
-
}
|
|
1228
|
-
return { client, token, owner, repo };
|
|
1211
|
+
};
|
|
1229
1212
|
}
|
|
1213
|
+
const githubCredentialsProvider = credentialsProvider != null ? credentialsProvider : integration.DefaultGithubCredentialsProvider.fromIntegrations(integrations);
|
|
1214
|
+
const { token: credentialProviderToken } = await githubCredentialsProvider.getCredentials({
|
|
1215
|
+
url: `https://${host}/${encodeURIComponent(owner)}/${encodeURIComponent(repo)}`
|
|
1216
|
+
});
|
|
1217
|
+
if (!credentialProviderToken) {
|
|
1218
|
+
throw new errors.InputError(`No token available for host: ${host}, with owner ${owner}, and repo ${repo}`);
|
|
1219
|
+
}
|
|
1220
|
+
return {
|
|
1221
|
+
auth: credentialProviderToken,
|
|
1222
|
+
baseUrl: integrationConfig.apiBaseUrl,
|
|
1223
|
+
previews: ["nebula-preview"]
|
|
1224
|
+
};
|
|
1230
1225
|
}
|
|
1231
1226
|
|
|
1232
1227
|
function createPublishGithubAction(options) {
|
|
1233
1228
|
const { integrations, config, githubCredentialsProvider } = options;
|
|
1234
|
-
const octokitProvider = new OctokitProvider(integrations, githubCredentialsProvider || integration.DefaultGithubCredentialsProvider.fromIntegrations(integrations));
|
|
1235
1229
|
return createTemplateAction({
|
|
1236
1230
|
id: "publish:github",
|
|
1237
1231
|
description: "Initializes a git repository of contents in workspace and publishes it to GitHub.",
|
|
@@ -1334,7 +1328,17 @@ function createPublishGithubAction(options) {
|
|
|
1334
1328
|
topics,
|
|
1335
1329
|
token: providedToken
|
|
1336
1330
|
} = ctx.input;
|
|
1337
|
-
const {
|
|
1331
|
+
const { owner, repo } = parseRepoUrl(repoUrl, integrations);
|
|
1332
|
+
if (!owner) {
|
|
1333
|
+
throw new errors.InputError("Invalid repository owner provided in repoUrl");
|
|
1334
|
+
}
|
|
1335
|
+
const octokitOptions = await getOctokitOptions({
|
|
1336
|
+
integrations,
|
|
1337
|
+
credentialsProvider: githubCredentialsProvider,
|
|
1338
|
+
token: providedToken,
|
|
1339
|
+
repoUrl
|
|
1340
|
+
});
|
|
1341
|
+
const client = new octokit.Octokit(octokitOptions);
|
|
1338
1342
|
const user = await client.rest.users.getByUsername({
|
|
1339
1343
|
username: owner
|
|
1340
1344
|
});
|
|
@@ -1410,7 +1414,7 @@ function createPublishGithubAction(options) {
|
|
|
1410
1414
|
defaultBranch,
|
|
1411
1415
|
auth: {
|
|
1412
1416
|
username: "x-access-token",
|
|
1413
|
-
password:
|
|
1417
|
+
password: octokitOptions.auth
|
|
1414
1418
|
},
|
|
1415
1419
|
logger: ctx.logger,
|
|
1416
1420
|
commitMessage: config.getOptionalString("scaffolder.defaultCommitMessage"),
|
|
@@ -1445,29 +1449,15 @@ const defaultClientFactory = async ({
|
|
|
1445
1449
|
host = "github.com",
|
|
1446
1450
|
token: providedToken
|
|
1447
1451
|
}) => {
|
|
1448
|
-
|
|
1449
|
-
const
|
|
1450
|
-
|
|
1451
|
-
|
|
1452
|
-
|
|
1453
|
-
|
|
1454
|
-
if (providedToken) {
|
|
1455
|
-
return new OctokitPR({
|
|
1456
|
-
auth: providedToken,
|
|
1457
|
-
baseUrl: integrationConfig.apiBaseUrl
|
|
1458
|
-
});
|
|
1459
|
-
}
|
|
1460
|
-
const credentialsProvider = githubCredentialsProvider || integration.SingleInstanceGithubCredentialsProvider.create(integrationConfig);
|
|
1461
|
-
const { token } = await credentialsProvider.getCredentials({
|
|
1462
|
-
url: `https://${host}/${encodeURIComponent(owner)}/${encodeURIComponent(repo)}`
|
|
1463
|
-
});
|
|
1464
|
-
if (!token) {
|
|
1465
|
-
throw new errors.InputError(`No token available for host: ${host}, with owner ${owner}, and repo ${repo}`);
|
|
1466
|
-
}
|
|
1467
|
-
return new OctokitPR({
|
|
1468
|
-
auth: token,
|
|
1469
|
-
baseUrl: integrationConfig.apiBaseUrl
|
|
1452
|
+
const [encodedHost, encodedOwner, encodedRepo] = [host, owner, repo].map(encodeURIComponent);
|
|
1453
|
+
const octokitOptions = await getOctokitOptions({
|
|
1454
|
+
integrations,
|
|
1455
|
+
credentialsProvider: githubCredentialsProvider,
|
|
1456
|
+
repoUrl: `${encodedHost}?owner=${encodedOwner}&repo=${encodedRepo}`,
|
|
1457
|
+
token: providedToken
|
|
1470
1458
|
});
|
|
1459
|
+
const OctokitPR = octokit.Octokit.plugin(octokitPluginCreatePullRequest.createPullRequest);
|
|
1460
|
+
return new OctokitPR(octokitOptions);
|
|
1471
1461
|
};
|
|
1472
1462
|
const createPublishGithubPullRequestAction = ({
|
|
1473
1463
|
integrations,
|
|
@@ -1830,7 +1820,6 @@ const createPublishGitlabMergeRequestAction = (options) => {
|
|
|
1830
1820
|
|
|
1831
1821
|
function createGithubActionsDispatchAction(options) {
|
|
1832
1822
|
const { integrations, githubCredentialsProvider } = options;
|
|
1833
|
-
const octokitProvider = new OctokitProvider(integrations, githubCredentialsProvider || integration.DefaultGithubCredentialsProvider.fromIntegrations(integrations));
|
|
1834
1823
|
return createTemplateAction({
|
|
1835
1824
|
id: "github:actions:dispatch",
|
|
1836
1825
|
description: "Dispatches a GitHub Action workflow for a given branch or tag",
|
|
@@ -1876,7 +1865,16 @@ function createGithubActionsDispatchAction(options) {
|
|
|
1876
1865
|
token: providedToken
|
|
1877
1866
|
} = ctx.input;
|
|
1878
1867
|
ctx.logger.info(`Dispatching workflow ${workflowId} for repo ${repoUrl} on ${branchOrTagName}`);
|
|
1879
|
-
const {
|
|
1868
|
+
const { owner, repo } = parseRepoUrl(repoUrl, integrations);
|
|
1869
|
+
if (!owner) {
|
|
1870
|
+
throw new errors.InputError("Invalid repository owner provided in repoUrl");
|
|
1871
|
+
}
|
|
1872
|
+
const client = new octokit.Octokit(await getOctokitOptions({
|
|
1873
|
+
integrations,
|
|
1874
|
+
repoUrl,
|
|
1875
|
+
credentialsProvider: githubCredentialsProvider,
|
|
1876
|
+
token: providedToken
|
|
1877
|
+
}));
|
|
1880
1878
|
await client.rest.actions.createWorkflowDispatch({
|
|
1881
1879
|
owner,
|
|
1882
1880
|
repo,
|
|
@@ -1891,7 +1889,6 @@ function createGithubActionsDispatchAction(options) {
|
|
|
1891
1889
|
|
|
1892
1890
|
function createGithubWebhookAction(options) {
|
|
1893
1891
|
const { integrations, defaultWebhookSecret, githubCredentialsProvider } = options;
|
|
1894
|
-
const octokitProvider = new OctokitProvider(integrations, githubCredentialsProvider != null ? githubCredentialsProvider : integration.DefaultGithubCredentialsProvider.fromIntegrations(integrations));
|
|
1895
1892
|
const eventNames = webhooks.emitterEventNames.filter((event) => !event.includes("."));
|
|
1896
1893
|
return createTemplateAction({
|
|
1897
1894
|
id: "github:webhook",
|
|
@@ -1971,7 +1968,16 @@ function createGithubWebhookAction(options) {
|
|
|
1971
1968
|
token: providedToken
|
|
1972
1969
|
} = ctx.input;
|
|
1973
1970
|
ctx.logger.info(`Creating webhook ${webhookUrl} for repo ${repoUrl}`);
|
|
1974
|
-
const {
|
|
1971
|
+
const { owner, repo } = parseRepoUrl(repoUrl, integrations);
|
|
1972
|
+
if (!owner) {
|
|
1973
|
+
throw new errors.InputError("Invalid repository owner provided in repoUrl");
|
|
1974
|
+
}
|
|
1975
|
+
const client = new octokit.Octokit(await getOctokitOptions({
|
|
1976
|
+
integrations,
|
|
1977
|
+
credentialsProvider: githubCredentialsProvider,
|
|
1978
|
+
repoUrl,
|
|
1979
|
+
token: providedToken
|
|
1980
|
+
}));
|
|
1975
1981
|
try {
|
|
1976
1982
|
const insecure_ssl = insecureSsl ? "1" : "0";
|
|
1977
1983
|
await client.rest.repos.createWebhook({
|
|
@@ -1995,6 +2001,44 @@ function createGithubWebhookAction(options) {
|
|
|
1995
2001
|
});
|
|
1996
2002
|
}
|
|
1997
2003
|
|
|
2004
|
+
class OctokitProvider {
|
|
2005
|
+
constructor(integrations, githubCredentialsProvider) {
|
|
2006
|
+
this.integrations = integrations;
|
|
2007
|
+
this.githubCredentialsProvider = githubCredentialsProvider || integration.DefaultGithubCredentialsProvider.fromIntegrations(this.integrations);
|
|
2008
|
+
}
|
|
2009
|
+
async getOctokit(repoUrl, options) {
|
|
2010
|
+
var _a;
|
|
2011
|
+
const { owner, repo, host } = parseRepoUrl(repoUrl, this.integrations);
|
|
2012
|
+
if (!owner) {
|
|
2013
|
+
throw new errors.InputError(`No owner provided for repo ${repoUrl}`);
|
|
2014
|
+
}
|
|
2015
|
+
const integrationConfig = (_a = this.integrations.github.byHost(host)) == null ? void 0 : _a.config;
|
|
2016
|
+
if (!integrationConfig) {
|
|
2017
|
+
throw new errors.InputError(`No integration for host ${host}`);
|
|
2018
|
+
}
|
|
2019
|
+
if (options == null ? void 0 : options.token) {
|
|
2020
|
+
const client2 = new octokit.Octokit({
|
|
2021
|
+
auth: options.token,
|
|
2022
|
+
baseUrl: integrationConfig.apiBaseUrl,
|
|
2023
|
+
previews: ["nebula-preview"]
|
|
2024
|
+
});
|
|
2025
|
+
return { client: client2, token: options.token, owner, repo };
|
|
2026
|
+
}
|
|
2027
|
+
const { token } = await this.githubCredentialsProvider.getCredentials({
|
|
2028
|
+
url: `https://${host}/${encodeURIComponent(owner)}/${encodeURIComponent(repo)}`
|
|
2029
|
+
});
|
|
2030
|
+
if (!token) {
|
|
2031
|
+
throw new errors.InputError(`No token available for host: ${host}, with owner ${owner}, and repo ${repo}`);
|
|
2032
|
+
}
|
|
2033
|
+
const client = new octokit.Octokit({
|
|
2034
|
+
auth: token,
|
|
2035
|
+
baseUrl: integrationConfig.apiBaseUrl,
|
|
2036
|
+
previews: ["nebula-preview"]
|
|
2037
|
+
});
|
|
2038
|
+
return { client, token, owner, repo };
|
|
2039
|
+
}
|
|
2040
|
+
}
|
|
2041
|
+
|
|
1998
2042
|
const createBuiltinActions = (options) => {
|
|
1999
2043
|
const {
|
|
2000
2044
|
reader,
|
|
@@ -2054,6 +2098,9 @@ const createBuiltinActions = (options) => {
|
|
|
2054
2098
|
})
|
|
2055
2099
|
];
|
|
2056
2100
|
if (containerRunner) {
|
|
2101
|
+
backendCommon.getRootLogger().warn(`[DEPRECATED] The fetch:cookiecutter action will be removed part of the default scaffolder actions in later versions.
|
|
2102
|
+
You can install the package seperately and remove the containerRunner from the createBuiltInActions to remove this warning,
|
|
2103
|
+
or you can migrate to using fetch:template https://backstage.io/docs/features/software-templates/builtin-actions#migrating-from-fetchcookiecutter-to-fetchtemplate`);
|
|
2057
2104
|
actions.push(pluginScaffolderBackendModuleCookiecutter.createFetchCookiecutterAction({
|
|
2058
2105
|
reader,
|
|
2059
2106
|
integrations,
|
|
@@ -2116,12 +2163,12 @@ class DatabaseTaskStore {
|
|
|
2116
2163
|
throw new Error(`Failed to parse spec of task '${taskId}', ${error}`);
|
|
2117
2164
|
}
|
|
2118
2165
|
}
|
|
2119
|
-
async createTask(
|
|
2166
|
+
async createTask(options) {
|
|
2120
2167
|
const taskId = uuid.v4();
|
|
2121
2168
|
await this.db("tasks").insert({
|
|
2122
2169
|
id: taskId,
|
|
2123
|
-
spec: JSON.stringify(spec),
|
|
2124
|
-
secrets: secrets ? JSON.stringify(secrets) : void 0,
|
|
2170
|
+
spec: JSON.stringify(options.spec),
|
|
2171
|
+
secrets: options.secrets ? JSON.stringify(options.secrets) : void 0,
|
|
2125
2172
|
status: "open"
|
|
2126
2173
|
});
|
|
2127
2174
|
return { taskId };
|
|
@@ -2213,12 +2260,13 @@ class DatabaseTaskStore {
|
|
|
2213
2260
|
});
|
|
2214
2261
|
});
|
|
2215
2262
|
}
|
|
2216
|
-
async emitLogEvent(
|
|
2217
|
-
const
|
|
2263
|
+
async emitLogEvent(options) {
|
|
2264
|
+
const { taskId, body } = options;
|
|
2265
|
+
const serializedBody = JSON.stringify(body);
|
|
2218
2266
|
await this.db("task_events").insert({
|
|
2219
2267
|
task_id: taskId,
|
|
2220
2268
|
event_type: "log",
|
|
2221
|
-
body:
|
|
2269
|
+
body: serializedBody
|
|
2222
2270
|
});
|
|
2223
2271
|
}
|
|
2224
2272
|
async listEvents({
|
|
@@ -2251,38 +2299,38 @@ class DatabaseTaskStore {
|
|
|
2251
2299
|
}
|
|
2252
2300
|
|
|
2253
2301
|
class TaskManager {
|
|
2254
|
-
constructor(
|
|
2255
|
-
this.
|
|
2302
|
+
constructor(task, storage, logger) {
|
|
2303
|
+
this.task = task;
|
|
2256
2304
|
this.storage = storage;
|
|
2257
2305
|
this.logger = logger;
|
|
2258
2306
|
this.isDone = false;
|
|
2259
2307
|
}
|
|
2260
|
-
static create(
|
|
2261
|
-
const agent = new TaskManager(
|
|
2308
|
+
static create(task, storage, logger) {
|
|
2309
|
+
const agent = new TaskManager(task, storage, logger);
|
|
2262
2310
|
agent.startTimeout();
|
|
2263
2311
|
return agent;
|
|
2264
2312
|
}
|
|
2265
2313
|
get spec() {
|
|
2266
|
-
return this.
|
|
2314
|
+
return this.task.spec;
|
|
2267
2315
|
}
|
|
2268
2316
|
get secrets() {
|
|
2269
|
-
return this.
|
|
2317
|
+
return this.task.secrets;
|
|
2270
2318
|
}
|
|
2271
2319
|
async getWorkspaceName() {
|
|
2272
|
-
return this.
|
|
2320
|
+
return this.task.taskId;
|
|
2273
2321
|
}
|
|
2274
2322
|
get done() {
|
|
2275
2323
|
return this.isDone;
|
|
2276
2324
|
}
|
|
2277
|
-
async emitLog(message,
|
|
2325
|
+
async emitLog(message, logMetadata) {
|
|
2278
2326
|
await this.storage.emitLogEvent({
|
|
2279
|
-
taskId: this.
|
|
2280
|
-
body: { message, ...
|
|
2327
|
+
taskId: this.task.taskId,
|
|
2328
|
+
body: { message, ...logMetadata }
|
|
2281
2329
|
});
|
|
2282
2330
|
}
|
|
2283
2331
|
async complete(result, metadata) {
|
|
2284
2332
|
await this.storage.completeTask({
|
|
2285
|
-
taskId: this.
|
|
2333
|
+
taskId: this.task.taskId,
|
|
2286
2334
|
status: result === "failed" ? "failed" : "completed",
|
|
2287
2335
|
eventBody: {
|
|
2288
2336
|
message: `Run completed with status: ${result}`,
|
|
@@ -2297,11 +2345,11 @@ class TaskManager {
|
|
|
2297
2345
|
startTimeout() {
|
|
2298
2346
|
this.heartbeatTimeoutId = setTimeout(async () => {
|
|
2299
2347
|
try {
|
|
2300
|
-
await this.storage.heartbeatTask(this.
|
|
2348
|
+
await this.storage.heartbeatTask(this.task.taskId);
|
|
2301
2349
|
this.startTimeout();
|
|
2302
2350
|
} catch (error) {
|
|
2303
2351
|
this.isDone = true;
|
|
2304
|
-
this.logger.error(`Heartbeat for task ${this.
|
|
2352
|
+
this.logger.error(`Heartbeat for task ${this.task.taskId} failed`, error);
|
|
2305
2353
|
}
|
|
2306
2354
|
}, 1e3);
|
|
2307
2355
|
}
|
|
@@ -2333,8 +2381,8 @@ class StorageTaskBroker {
|
|
|
2333
2381
|
await this.waitForDispatch();
|
|
2334
2382
|
}
|
|
2335
2383
|
}
|
|
2336
|
-
async dispatch(
|
|
2337
|
-
const taskRow = await this.storage.createTask(
|
|
2384
|
+
async dispatch(options) {
|
|
2385
|
+
const taskRow = await this.storage.createTask(options);
|
|
2338
2386
|
this.signalDispatch();
|
|
2339
2387
|
return {
|
|
2340
2388
|
taskId: taskRow.taskId
|
|
@@ -2343,33 +2391,29 @@ class StorageTaskBroker {
|
|
|
2343
2391
|
async get(taskId) {
|
|
2344
2392
|
return this.storage.getTask(taskId);
|
|
2345
2393
|
}
|
|
2346
|
-
|
|
2347
|
-
|
|
2348
|
-
|
|
2349
|
-
const unsubscribe = () => {
|
|
2350
|
-
cancelled = true;
|
|
2351
|
-
};
|
|
2352
|
-
(async () => {
|
|
2394
|
+
event$(options) {
|
|
2395
|
+
return new ObservableImpl__default["default"]((observer) => {
|
|
2396
|
+
const { taskId } = options;
|
|
2353
2397
|
let after = options.after;
|
|
2354
|
-
|
|
2355
|
-
|
|
2356
|
-
|
|
2357
|
-
|
|
2358
|
-
|
|
2359
|
-
|
|
2360
|
-
|
|
2361
|
-
|
|
2362
|
-
errors.assertError(error);
|
|
2363
|
-
callback(error, { events: [] });
|
|
2398
|
+
let cancelled = false;
|
|
2399
|
+
(async () => {
|
|
2400
|
+
while (!cancelled) {
|
|
2401
|
+
const result = await this.storage.listEvents({ taskId, after });
|
|
2402
|
+
const { events } = result;
|
|
2403
|
+
if (events.length) {
|
|
2404
|
+
after = events[events.length - 1].id;
|
|
2405
|
+
observer.next(result);
|
|
2364
2406
|
}
|
|
2407
|
+
await new Promise((resolve) => setTimeout(resolve, 1e3));
|
|
2365
2408
|
}
|
|
2366
|
-
|
|
2367
|
-
|
|
2368
|
-
|
|
2369
|
-
|
|
2409
|
+
})();
|
|
2410
|
+
return () => {
|
|
2411
|
+
cancelled = true;
|
|
2412
|
+
};
|
|
2413
|
+
});
|
|
2370
2414
|
}
|
|
2371
|
-
async vacuumTasks(
|
|
2372
|
-
const { tasks } = await this.storage.listStaleTasks(
|
|
2415
|
+
async vacuumTasks(options) {
|
|
2416
|
+
const { tasks } = await this.storage.listStaleTasks(options);
|
|
2373
2417
|
await Promise.all(tasks.map(async (task) => {
|
|
2374
2418
|
try {
|
|
2375
2419
|
await this.storage.completeTask({
|
|
@@ -2509,9 +2553,6 @@ class HandlebarsWorkflowRunner {
|
|
|
2509
2553
|
this.options.logger.debug(`Running ${action.id} with input`, {
|
|
2510
2554
|
input: JSON.stringify(input, null, 2)
|
|
2511
2555
|
});
|
|
2512
|
-
if (!task.spec.metadata) {
|
|
2513
|
-
console.warn("DEPRECATION NOTICE: metadata is undefined. metadata will be required in the future.");
|
|
2514
|
-
}
|
|
2515
2556
|
await action.handler({
|
|
2516
2557
|
baseUrl: task.spec.baseUrl,
|
|
2517
2558
|
logger: taskLogger,
|
|
@@ -2527,7 +2568,8 @@ class HandlebarsWorkflowRunner {
|
|
|
2527
2568
|
output(name, value) {
|
|
2528
2569
|
stepOutputs[name] = value;
|
|
2529
2570
|
},
|
|
2530
|
-
metadata: task.spec.metadata
|
|
2571
|
+
metadata: task.spec.metadata,
|
|
2572
|
+
templateInfo: task.spec.templateInfo
|
|
2531
2573
|
});
|
|
2532
2574
|
for (const tmpDir of tmpDirs) {
|
|
2533
2575
|
await fs__default["default"].remove(tmpDir);
|
|
@@ -2702,7 +2744,8 @@ class NunjucksWorkflowRunner {
|
|
|
2702
2744
|
output(name, value) {
|
|
2703
2745
|
stepOutput[name] = value;
|
|
2704
2746
|
},
|
|
2705
|
-
metadata: task.spec.metadata
|
|
2747
|
+
metadata: task.spec.metadata,
|
|
2748
|
+
templateInfo: task.spec.templateInfo
|
|
2706
2749
|
});
|
|
2707
2750
|
for (const tmpDir of tmpDirs) {
|
|
2708
2751
|
await fs__default["default"].remove(tmpDir);
|
|
@@ -2814,33 +2857,19 @@ function getEntityBaseUrl(entity) {
|
|
|
2814
2857
|
}
|
|
2815
2858
|
return void 0;
|
|
2816
2859
|
}
|
|
2817
|
-
async function findTemplate({
|
|
2818
|
-
entityRef,
|
|
2819
|
-
|
|
2820
|
-
|
|
2821
|
-
}
|
|
2822
|
-
|
|
2823
|
-
|
|
2824
|
-
|
|
2825
|
-
});
|
|
2826
|
-
|
|
2827
|
-
|
|
2828
|
-
kind: "template",
|
|
2829
|
-
"metadata.name": parsedEntityRef.name,
|
|
2830
|
-
"metadata.namespace": parsedEntityRef.namespace
|
|
2831
|
-
}
|
|
2832
|
-
}, {
|
|
2833
|
-
token
|
|
2834
|
-
});
|
|
2835
|
-
const templates = items.filter((entity) => entity.kind === "Template");
|
|
2836
|
-
if (templates.length !== 1) {
|
|
2837
|
-
if (templates.length > 1) {
|
|
2838
|
-
throw new errors.ConflictError("Templates lookup resulted in multiple matches");
|
|
2839
|
-
} else {
|
|
2840
|
-
throw new errors.NotFoundError("Template not found");
|
|
2841
|
-
}
|
|
2860
|
+
async function findTemplate(options) {
|
|
2861
|
+
const { entityRef, token, catalogApi } = options;
|
|
2862
|
+
if (entityRef.namespace.toLocaleLowerCase("en-US") !== catalogModel.DEFAULT_NAMESPACE) {
|
|
2863
|
+
throw new errors.InputError(`Invalid namespace, only '${catalogModel.DEFAULT_NAMESPACE}' namespace is supported`);
|
|
2864
|
+
}
|
|
2865
|
+
if (entityRef.kind.toLocaleLowerCase("en-US") !== "template") {
|
|
2866
|
+
throw new errors.InputError(`Invalid kind, only 'Template' kind is supported`);
|
|
2867
|
+
}
|
|
2868
|
+
const template = await catalogApi.getEntityByName(entityRef, { token });
|
|
2869
|
+
if (!template) {
|
|
2870
|
+
throw new errors.NotFoundError(`Template ${entityRef} not found`);
|
|
2842
2871
|
}
|
|
2843
|
-
return
|
|
2872
|
+
return template;
|
|
2844
2873
|
}
|
|
2845
2874
|
|
|
2846
2875
|
function isSupportedTemplate(entity) {
|
|
@@ -2898,12 +2927,6 @@ async function createRouter(options) {
|
|
|
2898
2927
|
router.get("/v2/templates/:namespace/:kind/:name/parameter-schema", async (req, res) => {
|
|
2899
2928
|
var _a, _b;
|
|
2900
2929
|
const { namespace, kind, name } = req.params;
|
|
2901
|
-
if (namespace !== "default") {
|
|
2902
|
-
throw new errors.InputError(`Invalid namespace, only 'default' namespace is supported`);
|
|
2903
|
-
}
|
|
2904
|
-
if (kind.toLowerCase() !== "template") {
|
|
2905
|
-
throw new errors.InputError(`Invalid kind, only 'Template' kind is supported`);
|
|
2906
|
-
}
|
|
2907
2930
|
const template = await findTemplate({
|
|
2908
2931
|
catalogApi: catalogClient,
|
|
2909
2932
|
entityRef: { kind, namespace, name },
|
|
@@ -2934,15 +2957,16 @@ async function createRouter(options) {
|
|
|
2934
2957
|
});
|
|
2935
2958
|
res.json(actionsList);
|
|
2936
2959
|
}).post("/v2/tasks", async (req, res) => {
|
|
2937
|
-
var _a, _b, _c, _d
|
|
2938
|
-
const
|
|
2960
|
+
var _a, _b, _c, _d;
|
|
2961
|
+
const templateRef = req.body.templateRef;
|
|
2962
|
+
const { kind, namespace, name } = catalogModel.parseEntityRef(templateRef, {
|
|
2963
|
+
defaultKind: "template"
|
|
2964
|
+
});
|
|
2939
2965
|
const values = req.body.values;
|
|
2940
2966
|
const token = getBearerToken(req.headers.authorization);
|
|
2941
2967
|
const template = await findTemplate({
|
|
2942
2968
|
catalogApi: catalogClient,
|
|
2943
|
-
entityRef: {
|
|
2944
|
-
name: templateName
|
|
2945
|
-
},
|
|
2969
|
+
entityRef: { kind, namespace, name },
|
|
2946
2970
|
token: getBearerToken(req.headers.authorization)
|
|
2947
2971
|
});
|
|
2948
2972
|
let taskSpec;
|
|
@@ -2958,10 +2982,8 @@ async function createRouter(options) {
|
|
|
2958
2982
|
}
|
|
2959
2983
|
}
|
|
2960
2984
|
const baseUrl = getEntityBaseUrl(template);
|
|
2961
|
-
|
|
2962
|
-
apiVersion: template.apiVersion,
|
|
2985
|
+
const baseTaskSpec = {
|
|
2963
2986
|
baseUrl,
|
|
2964
|
-
values,
|
|
2965
2987
|
steps: template.spec.steps.map((step, index) => {
|
|
2966
2988
|
var _a2, _b2;
|
|
2967
2989
|
return {
|
|
@@ -2971,29 +2993,34 @@ async function createRouter(options) {
|
|
|
2971
2993
|
};
|
|
2972
2994
|
}),
|
|
2973
2995
|
output: (_b = template.spec.output) != null ? _b : {},
|
|
2974
|
-
metadata: { name: (_c = template.metadata) == null ? void 0 : _c.name }
|
|
2996
|
+
metadata: { name: (_c = template.metadata) == null ? void 0 : _c.name },
|
|
2997
|
+
templateInfo: {
|
|
2998
|
+
entityRef: catalogModel.stringifyEntityRef({
|
|
2999
|
+
kind,
|
|
3000
|
+
namespace,
|
|
3001
|
+
name: (_d = template.metadata) == null ? void 0 : _d.name
|
|
3002
|
+
}),
|
|
3003
|
+
baseUrl
|
|
3004
|
+
}
|
|
3005
|
+
};
|
|
3006
|
+
taskSpec = template.apiVersion === "backstage.io/v1beta2" ? {
|
|
3007
|
+
...baseTaskSpec,
|
|
3008
|
+
apiVersion: template.apiVersion,
|
|
3009
|
+
values
|
|
2975
3010
|
} : {
|
|
3011
|
+
...baseTaskSpec,
|
|
2976
3012
|
apiVersion: template.apiVersion,
|
|
2977
|
-
|
|
2978
|
-
parameters: values,
|
|
2979
|
-
steps: template.spec.steps.map((step, index) => {
|
|
2980
|
-
var _a2, _b2;
|
|
2981
|
-
return {
|
|
2982
|
-
...step,
|
|
2983
|
-
id: (_a2 = step.id) != null ? _a2 : `step-${index + 1}`,
|
|
2984
|
-
name: (_b2 = step.name) != null ? _b2 : step.action
|
|
2985
|
-
};
|
|
2986
|
-
}),
|
|
2987
|
-
output: (_d = template.spec.output) != null ? _d : {},
|
|
2988
|
-
metadata: { name: (_e = template.metadata) == null ? void 0 : _e.name }
|
|
3013
|
+
parameters: values
|
|
2989
3014
|
};
|
|
2990
3015
|
} else {
|
|
2991
3016
|
throw new errors.InputError(`Unsupported apiVersion field in schema entity, ${template.apiVersion}`);
|
|
2992
3017
|
}
|
|
2993
|
-
const result = await taskBroker.dispatch(
|
|
2994
|
-
|
|
2995
|
-
|
|
2996
|
-
|
|
3018
|
+
const result = await taskBroker.dispatch({
|
|
3019
|
+
spec: taskSpec,
|
|
3020
|
+
secrets: {
|
|
3021
|
+
...req.body.secrets,
|
|
3022
|
+
backstageToken: token
|
|
3023
|
+
}
|
|
2997
3024
|
});
|
|
2998
3025
|
res.status(201).json({ id: result.taskId });
|
|
2999
3026
|
}).get("/v2/tasks/:taskId", async (req, res) => {
|
|
@@ -3013,48 +3040,49 @@ async function createRouter(options) {
|
|
|
3013
3040
|
"Cache-Control": "no-cache",
|
|
3014
3041
|
"Content-Type": "text/event-stream"
|
|
3015
3042
|
});
|
|
3016
|
-
const
|
|
3017
|
-
|
|
3018
|
-
if (error) {
|
|
3043
|
+
const subscription = taskBroker.event$({ taskId, after }).subscribe({
|
|
3044
|
+
error: (error) => {
|
|
3019
3045
|
logger.error(`Received error from event stream when observing taskId '${taskId}', ${error}`);
|
|
3020
|
-
}
|
|
3021
|
-
|
|
3022
|
-
|
|
3023
|
-
|
|
3046
|
+
},
|
|
3047
|
+
next: ({ events }) => {
|
|
3048
|
+
var _a;
|
|
3049
|
+
let shouldUnsubscribe = false;
|
|
3050
|
+
for (const event of events) {
|
|
3051
|
+
res.write(`event: ${event.type}
|
|
3024
3052
|
data: ${JSON.stringify(event)}
|
|
3025
3053
|
|
|
3026
3054
|
`);
|
|
3027
|
-
|
|
3028
|
-
|
|
3055
|
+
if (event.type === "completion") {
|
|
3056
|
+
shouldUnsubscribe = true;
|
|
3057
|
+
}
|
|
3029
3058
|
}
|
|
3059
|
+
(_a = res.flush) == null ? void 0 : _a.call(res);
|
|
3060
|
+
if (shouldUnsubscribe)
|
|
3061
|
+
subscription.unsubscribe();
|
|
3030
3062
|
}
|
|
3031
|
-
(_a = res.flush) == null ? void 0 : _a.call(res);
|
|
3032
|
-
if (shouldUnsubscribe)
|
|
3033
|
-
unsubscribe();
|
|
3034
3063
|
});
|
|
3035
3064
|
req.on("close", () => {
|
|
3036
|
-
unsubscribe();
|
|
3065
|
+
subscription.unsubscribe();
|
|
3037
3066
|
logger.debug(`Event stream observing taskId '${taskId}' closed`);
|
|
3038
3067
|
});
|
|
3039
3068
|
}).get("/v2/tasks/:taskId/events", async (req, res) => {
|
|
3040
3069
|
const { taskId } = req.params;
|
|
3041
3070
|
const after = Number(req.query.after) || void 0;
|
|
3042
|
-
let unsubscribe = () => {
|
|
3043
|
-
};
|
|
3044
3071
|
const timeout = setTimeout(() => {
|
|
3045
|
-
unsubscribe();
|
|
3046
3072
|
res.json([]);
|
|
3047
3073
|
}, 3e4);
|
|
3048
|
-
|
|
3049
|
-
|
|
3050
|
-
|
|
3051
|
-
|
|
3052
|
-
|
|
3074
|
+
const subscription = taskBroker.event$({ taskId, after }).subscribe({
|
|
3075
|
+
error: (error) => {
|
|
3076
|
+
logger.error(`Received error from event stream when observing taskId '${taskId}', ${error}`);
|
|
3077
|
+
},
|
|
3078
|
+
next: ({ events }) => {
|
|
3079
|
+
clearTimeout(timeout);
|
|
3080
|
+
subscription.unsubscribe();
|
|
3081
|
+
res.json(events);
|
|
3053
3082
|
}
|
|
3054
|
-
|
|
3055
|
-
}));
|
|
3083
|
+
});
|
|
3056
3084
|
req.on("close", () => {
|
|
3057
|
-
unsubscribe();
|
|
3085
|
+
subscription.unsubscribe();
|
|
3058
3086
|
clearTimeout(timeout);
|
|
3059
3087
|
});
|
|
3060
3088
|
});
|
|
@@ -3072,6 +3100,9 @@ class ScaffolderEntitiesProcessor {
|
|
|
3072
3100
|
constructor() {
|
|
3073
3101
|
this.validators = [pluginScaffolderCommon.templateEntityV1beta3Validator];
|
|
3074
3102
|
}
|
|
3103
|
+
getProcessorName() {
|
|
3104
|
+
return "ScaffolderEntitiesProcessor";
|
|
3105
|
+
}
|
|
3075
3106
|
async validateEntityKind(entity) {
|
|
3076
3107
|
for (const validator of this.validators) {
|
|
3077
3108
|
if (await validator.check(entity)) {
|
|
@@ -3143,6 +3174,7 @@ exports.createPublishGitlabAction = createPublishGitlabAction;
|
|
|
3143
3174
|
exports.createPublishGitlabMergeRequestAction = createPublishGitlabMergeRequestAction;
|
|
3144
3175
|
exports.createRouter = createRouter;
|
|
3145
3176
|
exports.createTemplateAction = createTemplateAction;
|
|
3177
|
+
exports.executeShellCommand = executeShellCommand;
|
|
3146
3178
|
exports.fetchContents = fetchContents;
|
|
3147
3179
|
exports.runCommand = runCommand;
|
|
3148
3180
|
//# sourceMappingURL=index.cjs.js.map
|