@backstage/plugin-scaffolder-backend 0.15.24 → 0.17.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +106 -0
- package/config.d.ts +0 -25
- package/dist/index.cjs.js +257 -219
- package/dist/index.cjs.js.map +1 -1
- package/dist/index.d.ts +375 -219
- package/package.json +28 -24
package/dist/index.cjs.js
CHANGED
|
@@ -24,6 +24,7 @@ var node = require('@gitbeaker/node');
|
|
|
24
24
|
var webhooks = require('@octokit/webhooks');
|
|
25
25
|
var uuid = require('uuid');
|
|
26
26
|
var luxon = require('luxon');
|
|
27
|
+
var ObservableImpl = require('zen-observable');
|
|
27
28
|
var Handlebars = require('handlebars');
|
|
28
29
|
var winston = require('winston');
|
|
29
30
|
var jsonschema = require('jsonschema');
|
|
@@ -59,6 +60,7 @@ var yaml__namespace = /*#__PURE__*/_interopNamespace(yaml);
|
|
|
59
60
|
var path__default = /*#__PURE__*/_interopDefaultLegacy(path);
|
|
60
61
|
var globby__default = /*#__PURE__*/_interopDefaultLegacy(globby);
|
|
61
62
|
var fetch__default = /*#__PURE__*/_interopDefaultLegacy(fetch);
|
|
63
|
+
var ObservableImpl__default = /*#__PURE__*/_interopDefaultLegacy(ObservableImpl);
|
|
62
64
|
var Handlebars__namespace = /*#__PURE__*/_interopNamespace(Handlebars);
|
|
63
65
|
var winston__namespace = /*#__PURE__*/_interopNamespace(winston);
|
|
64
66
|
var nunjucks__default = /*#__PURE__*/_interopDefaultLegacy(nunjucks);
|
|
@@ -250,9 +252,6 @@ async function fetchContents({
|
|
|
250
252
|
fetchUrl = ".",
|
|
251
253
|
outputPath
|
|
252
254
|
}) {
|
|
253
|
-
if (typeof fetchUrl !== "string") {
|
|
254
|
-
throw new errors.InputError(`Invalid url parameter, expected string, got ${typeof fetchUrl}`);
|
|
255
|
-
}
|
|
256
255
|
let fetchUrlIsAbsolute = false;
|
|
257
256
|
try {
|
|
258
257
|
new URL(fetchUrl);
|
|
@@ -309,14 +308,14 @@ function createFetchPlainAction(options) {
|
|
|
309
308
|
}
|
|
310
309
|
},
|
|
311
310
|
async handler(ctx) {
|
|
312
|
-
var _a;
|
|
311
|
+
var _a, _b;
|
|
313
312
|
ctx.logger.info("Fetching plain content from remote URL");
|
|
314
313
|
const targetPath = (_a = ctx.input.targetPath) != null ? _a : "./";
|
|
315
314
|
const outputPath = backendCommon.resolveSafeChildPath(ctx.workspacePath, targetPath);
|
|
316
315
|
await fetchContents({
|
|
317
316
|
reader,
|
|
318
317
|
integrations,
|
|
319
|
-
baseUrl: ctx.baseUrl,
|
|
318
|
+
baseUrl: (_b = ctx.templateInfo) == null ? void 0 : _b.baseUrl,
|
|
320
319
|
fetchUrl: ctx.input.url,
|
|
321
320
|
outputPath
|
|
322
321
|
});
|
|
@@ -475,7 +474,7 @@ function createFetchTemplateAction(options) {
|
|
|
475
474
|
}
|
|
476
475
|
},
|
|
477
476
|
async handler(ctx) {
|
|
478
|
-
var _a;
|
|
477
|
+
var _a, _b;
|
|
479
478
|
ctx.logger.info("Fetching template content from remote URL");
|
|
480
479
|
const workDir = await ctx.createTemporaryDirectory();
|
|
481
480
|
const templateDir = backendCommon.resolveSafeChildPath(workDir, "template");
|
|
@@ -497,7 +496,7 @@ function createFetchTemplateAction(options) {
|
|
|
497
496
|
await fetchContents({
|
|
498
497
|
reader,
|
|
499
498
|
integrations,
|
|
500
|
-
baseUrl: ctx.baseUrl,
|
|
499
|
+
baseUrl: (_b = ctx.templateInfo) == null ? void 0 : _b.baseUrl,
|
|
501
500
|
fetchUrl: ctx.input.url,
|
|
502
501
|
outputPath: templateDir
|
|
503
502
|
});
|
|
@@ -666,14 +665,15 @@ const createFilesystemRenameAction = () => {
|
|
|
666
665
|
});
|
|
667
666
|
};
|
|
668
667
|
|
|
669
|
-
const
|
|
670
|
-
|
|
671
|
-
|
|
672
|
-
|
|
673
|
-
|
|
674
|
-
|
|
668
|
+
const executeShellCommand = async (options) => {
|
|
669
|
+
const {
|
|
670
|
+
command,
|
|
671
|
+
args,
|
|
672
|
+
options: spawnOptions,
|
|
673
|
+
logStream = new stream.PassThrough()
|
|
674
|
+
} = options;
|
|
675
675
|
await new Promise((resolve, reject) => {
|
|
676
|
-
const process = child_process.spawn(command, args,
|
|
676
|
+
const process = child_process.spawn(command, args, spawnOptions);
|
|
677
677
|
process.stdout.on("data", (stream) => {
|
|
678
678
|
logStream.write(stream);
|
|
679
679
|
});
|
|
@@ -685,12 +685,13 @@ const runCommand = async ({
|
|
|
685
685
|
});
|
|
686
686
|
process.on("close", (code) => {
|
|
687
687
|
if (code !== 0) {
|
|
688
|
-
return reject(`Command ${command} failed, exit code: ${code}`);
|
|
688
|
+
return reject(new Error(`Command ${command} failed, exit code: ${code}`));
|
|
689
689
|
}
|
|
690
690
|
return resolve();
|
|
691
691
|
});
|
|
692
692
|
});
|
|
693
693
|
};
|
|
694
|
+
const runCommand = executeShellCommand;
|
|
694
695
|
async function initRepoAndPush({
|
|
695
696
|
dir,
|
|
696
697
|
remoteUrl,
|
|
@@ -1191,47 +1192,40 @@ function createPublishFileAction() {
|
|
|
1191
1192
|
});
|
|
1192
1193
|
}
|
|
1193
1194
|
|
|
1194
|
-
|
|
1195
|
-
|
|
1196
|
-
|
|
1197
|
-
|
|
1195
|
+
async function getOctokitOptions(options) {
|
|
1196
|
+
var _a;
|
|
1197
|
+
const { integrations, credentialsProvider, repoUrl, token } = options;
|
|
1198
|
+
const { owner, repo, host } = parseRepoUrl(repoUrl, integrations);
|
|
1199
|
+
if (!owner) {
|
|
1200
|
+
throw new errors.InputError(`No owner provided for repo ${repoUrl}`);
|
|
1198
1201
|
}
|
|
1199
|
-
|
|
1200
|
-
|
|
1201
|
-
|
|
1202
|
-
|
|
1203
|
-
|
|
1204
|
-
|
|
1205
|
-
const integrationConfig = (_a = this.integrations.github.byHost(host)) == null ? void 0 : _a.config;
|
|
1206
|
-
if (!integrationConfig) {
|
|
1207
|
-
throw new errors.InputError(`No integration for host ${host}`);
|
|
1208
|
-
}
|
|
1209
|
-
if (options == null ? void 0 : options.token) {
|
|
1210
|
-
const client2 = new octokit.Octokit({
|
|
1211
|
-
auth: options.token,
|
|
1212
|
-
baseUrl: integrationConfig.apiBaseUrl,
|
|
1213
|
-
previews: ["nebula-preview"]
|
|
1214
|
-
});
|
|
1215
|
-
return { client: client2, token: options.token, owner, repo };
|
|
1216
|
-
}
|
|
1217
|
-
const { token } = await this.githubCredentialsProvider.getCredentials({
|
|
1218
|
-
url: `https://${host}/${encodeURIComponent(owner)}/${encodeURIComponent(repo)}`
|
|
1219
|
-
});
|
|
1220
|
-
if (!token) {
|
|
1221
|
-
throw new errors.InputError(`No token available for host: ${host}, with owner ${owner}, and repo ${repo}`);
|
|
1222
|
-
}
|
|
1223
|
-
const client = new octokit.Octokit({
|
|
1202
|
+
const integrationConfig = (_a = integrations.github.byHost(host)) == null ? void 0 : _a.config;
|
|
1203
|
+
if (!integrationConfig) {
|
|
1204
|
+
throw new errors.InputError(`No integration for host ${host}`);
|
|
1205
|
+
}
|
|
1206
|
+
if (token) {
|
|
1207
|
+
return {
|
|
1224
1208
|
auth: token,
|
|
1225
1209
|
baseUrl: integrationConfig.apiBaseUrl,
|
|
1226
1210
|
previews: ["nebula-preview"]
|
|
1227
|
-
}
|
|
1228
|
-
return { client, token, owner, repo };
|
|
1211
|
+
};
|
|
1229
1212
|
}
|
|
1213
|
+
const githubCredentialsProvider = credentialsProvider != null ? credentialsProvider : integration.DefaultGithubCredentialsProvider.fromIntegrations(integrations);
|
|
1214
|
+
const { token: credentialProviderToken } = await githubCredentialsProvider.getCredentials({
|
|
1215
|
+
url: `https://${host}/${encodeURIComponent(owner)}/${encodeURIComponent(repo)}`
|
|
1216
|
+
});
|
|
1217
|
+
if (!credentialProviderToken) {
|
|
1218
|
+
throw new errors.InputError(`No token available for host: ${host}, with owner ${owner}, and repo ${repo}`);
|
|
1219
|
+
}
|
|
1220
|
+
return {
|
|
1221
|
+
auth: credentialProviderToken,
|
|
1222
|
+
baseUrl: integrationConfig.apiBaseUrl,
|
|
1223
|
+
previews: ["nebula-preview"]
|
|
1224
|
+
};
|
|
1230
1225
|
}
|
|
1231
1226
|
|
|
1232
1227
|
function createPublishGithubAction(options) {
|
|
1233
1228
|
const { integrations, config, githubCredentialsProvider } = options;
|
|
1234
|
-
const octokitProvider = new OctokitProvider(integrations, githubCredentialsProvider || integration.DefaultGithubCredentialsProvider.fromIntegrations(integrations));
|
|
1235
1229
|
return createTemplateAction({
|
|
1236
1230
|
id: "publish:github",
|
|
1237
1231
|
description: "Initializes a git repository of contents in workspace and publishes it to GitHub.",
|
|
@@ -1334,7 +1328,17 @@ function createPublishGithubAction(options) {
|
|
|
1334
1328
|
topics,
|
|
1335
1329
|
token: providedToken
|
|
1336
1330
|
} = ctx.input;
|
|
1337
|
-
const {
|
|
1331
|
+
const { owner, repo } = parseRepoUrl(repoUrl, integrations);
|
|
1332
|
+
if (!owner) {
|
|
1333
|
+
throw new errors.InputError("Invalid repository owner provided in repoUrl");
|
|
1334
|
+
}
|
|
1335
|
+
const octokitOptions = await getOctokitOptions({
|
|
1336
|
+
integrations,
|
|
1337
|
+
credentialsProvider: githubCredentialsProvider,
|
|
1338
|
+
token: providedToken,
|
|
1339
|
+
repoUrl
|
|
1340
|
+
});
|
|
1341
|
+
const client = new octokit.Octokit(octokitOptions);
|
|
1338
1342
|
const user = await client.rest.users.getByUsername({
|
|
1339
1343
|
username: owner
|
|
1340
1344
|
});
|
|
@@ -1410,7 +1414,7 @@ function createPublishGithubAction(options) {
|
|
|
1410
1414
|
defaultBranch,
|
|
1411
1415
|
auth: {
|
|
1412
1416
|
username: "x-access-token",
|
|
1413
|
-
password:
|
|
1417
|
+
password: octokitOptions.auth
|
|
1414
1418
|
},
|
|
1415
1419
|
logger: ctx.logger,
|
|
1416
1420
|
commitMessage: config.getOptionalString("scaffolder.defaultCommitMessage"),
|
|
@@ -1445,29 +1449,15 @@ const defaultClientFactory = async ({
|
|
|
1445
1449
|
host = "github.com",
|
|
1446
1450
|
token: providedToken
|
|
1447
1451
|
}) => {
|
|
1448
|
-
|
|
1449
|
-
const
|
|
1450
|
-
|
|
1451
|
-
|
|
1452
|
-
|
|
1453
|
-
|
|
1454
|
-
if (providedToken) {
|
|
1455
|
-
return new OctokitPR({
|
|
1456
|
-
auth: providedToken,
|
|
1457
|
-
baseUrl: integrationConfig.apiBaseUrl
|
|
1458
|
-
});
|
|
1459
|
-
}
|
|
1460
|
-
const credentialsProvider = githubCredentialsProvider || integration.SingleInstanceGithubCredentialsProvider.create(integrationConfig);
|
|
1461
|
-
const { token } = await credentialsProvider.getCredentials({
|
|
1462
|
-
url: `https://${host}/${encodeURIComponent(owner)}/${encodeURIComponent(repo)}`
|
|
1463
|
-
});
|
|
1464
|
-
if (!token) {
|
|
1465
|
-
throw new errors.InputError(`No token available for host: ${host}, with owner ${owner}, and repo ${repo}`);
|
|
1466
|
-
}
|
|
1467
|
-
return new OctokitPR({
|
|
1468
|
-
auth: token,
|
|
1469
|
-
baseUrl: integrationConfig.apiBaseUrl
|
|
1452
|
+
const [encodedHost, encodedOwner, encodedRepo] = [host, owner, repo].map(encodeURIComponent);
|
|
1453
|
+
const octokitOptions = await getOctokitOptions({
|
|
1454
|
+
integrations,
|
|
1455
|
+
credentialsProvider: githubCredentialsProvider,
|
|
1456
|
+
repoUrl: `https://${encodedHost}/${encodedOwner}/${encodedRepo}`,
|
|
1457
|
+
token: providedToken
|
|
1470
1458
|
});
|
|
1459
|
+
const OctokitPR = octokit.Octokit.plugin(octokitPluginCreatePullRequest.createPullRequest);
|
|
1460
|
+
return new OctokitPR(octokitOptions);
|
|
1471
1461
|
};
|
|
1472
1462
|
const createPublishGithubPullRequestAction = ({
|
|
1473
1463
|
integrations,
|
|
@@ -1830,7 +1820,6 @@ const createPublishGitlabMergeRequestAction = (options) => {
|
|
|
1830
1820
|
|
|
1831
1821
|
function createGithubActionsDispatchAction(options) {
|
|
1832
1822
|
const { integrations, githubCredentialsProvider } = options;
|
|
1833
|
-
const octokitProvider = new OctokitProvider(integrations, githubCredentialsProvider || integration.DefaultGithubCredentialsProvider.fromIntegrations(integrations));
|
|
1834
1823
|
return createTemplateAction({
|
|
1835
1824
|
id: "github:actions:dispatch",
|
|
1836
1825
|
description: "Dispatches a GitHub Action workflow for a given branch or tag",
|
|
@@ -1876,7 +1865,16 @@ function createGithubActionsDispatchAction(options) {
|
|
|
1876
1865
|
token: providedToken
|
|
1877
1866
|
} = ctx.input;
|
|
1878
1867
|
ctx.logger.info(`Dispatching workflow ${workflowId} for repo ${repoUrl} on ${branchOrTagName}`);
|
|
1879
|
-
const {
|
|
1868
|
+
const { owner, repo } = parseRepoUrl(repoUrl, integrations);
|
|
1869
|
+
if (!owner) {
|
|
1870
|
+
throw new errors.InputError("Invalid repository owner provided in repoUrl");
|
|
1871
|
+
}
|
|
1872
|
+
const client = new octokit.Octokit(await getOctokitOptions({
|
|
1873
|
+
integrations,
|
|
1874
|
+
repoUrl,
|
|
1875
|
+
credentialsProvider: githubCredentialsProvider,
|
|
1876
|
+
token: providedToken
|
|
1877
|
+
}));
|
|
1880
1878
|
await client.rest.actions.createWorkflowDispatch({
|
|
1881
1879
|
owner,
|
|
1882
1880
|
repo,
|
|
@@ -1891,7 +1889,6 @@ function createGithubActionsDispatchAction(options) {
|
|
|
1891
1889
|
|
|
1892
1890
|
function createGithubWebhookAction(options) {
|
|
1893
1891
|
const { integrations, defaultWebhookSecret, githubCredentialsProvider } = options;
|
|
1894
|
-
const octokitProvider = new OctokitProvider(integrations, githubCredentialsProvider != null ? githubCredentialsProvider : integration.DefaultGithubCredentialsProvider.fromIntegrations(integrations));
|
|
1895
1892
|
const eventNames = webhooks.emitterEventNames.filter((event) => !event.includes("."));
|
|
1896
1893
|
return createTemplateAction({
|
|
1897
1894
|
id: "github:webhook",
|
|
@@ -1971,7 +1968,16 @@ function createGithubWebhookAction(options) {
|
|
|
1971
1968
|
token: providedToken
|
|
1972
1969
|
} = ctx.input;
|
|
1973
1970
|
ctx.logger.info(`Creating webhook ${webhookUrl} for repo ${repoUrl}`);
|
|
1974
|
-
const {
|
|
1971
|
+
const { owner, repo } = parseRepoUrl(repoUrl, integrations);
|
|
1972
|
+
if (!owner) {
|
|
1973
|
+
throw new errors.InputError("Invalid repository owner provided in repoUrl");
|
|
1974
|
+
}
|
|
1975
|
+
const client = new octokit.Octokit(await getOctokitOptions({
|
|
1976
|
+
integrations,
|
|
1977
|
+
credentialsProvider: githubCredentialsProvider,
|
|
1978
|
+
repoUrl,
|
|
1979
|
+
token: providedToken
|
|
1980
|
+
}));
|
|
1975
1981
|
try {
|
|
1976
1982
|
const insecure_ssl = insecureSsl ? "1" : "0";
|
|
1977
1983
|
await client.rest.repos.createWebhook({
|
|
@@ -1995,6 +2001,44 @@ function createGithubWebhookAction(options) {
|
|
|
1995
2001
|
});
|
|
1996
2002
|
}
|
|
1997
2003
|
|
|
2004
|
+
class OctokitProvider {
|
|
2005
|
+
constructor(integrations, githubCredentialsProvider) {
|
|
2006
|
+
this.integrations = integrations;
|
|
2007
|
+
this.githubCredentialsProvider = githubCredentialsProvider || integration.DefaultGithubCredentialsProvider.fromIntegrations(this.integrations);
|
|
2008
|
+
}
|
|
2009
|
+
async getOctokit(repoUrl, options) {
|
|
2010
|
+
var _a;
|
|
2011
|
+
const { owner, repo, host } = parseRepoUrl(repoUrl, this.integrations);
|
|
2012
|
+
if (!owner) {
|
|
2013
|
+
throw new errors.InputError(`No owner provided for repo ${repoUrl}`);
|
|
2014
|
+
}
|
|
2015
|
+
const integrationConfig = (_a = this.integrations.github.byHost(host)) == null ? void 0 : _a.config;
|
|
2016
|
+
if (!integrationConfig) {
|
|
2017
|
+
throw new errors.InputError(`No integration for host ${host}`);
|
|
2018
|
+
}
|
|
2019
|
+
if (options == null ? void 0 : options.token) {
|
|
2020
|
+
const client2 = new octokit.Octokit({
|
|
2021
|
+
auth: options.token,
|
|
2022
|
+
baseUrl: integrationConfig.apiBaseUrl,
|
|
2023
|
+
previews: ["nebula-preview"]
|
|
2024
|
+
});
|
|
2025
|
+
return { client: client2, token: options.token, owner, repo };
|
|
2026
|
+
}
|
|
2027
|
+
const { token } = await this.githubCredentialsProvider.getCredentials({
|
|
2028
|
+
url: `https://${host}/${encodeURIComponent(owner)}/${encodeURIComponent(repo)}`
|
|
2029
|
+
});
|
|
2030
|
+
if (!token) {
|
|
2031
|
+
throw new errors.InputError(`No token available for host: ${host}, with owner ${owner}, and repo ${repo}`);
|
|
2032
|
+
}
|
|
2033
|
+
const client = new octokit.Octokit({
|
|
2034
|
+
auth: token,
|
|
2035
|
+
baseUrl: integrationConfig.apiBaseUrl,
|
|
2036
|
+
previews: ["nebula-preview"]
|
|
2037
|
+
});
|
|
2038
|
+
return { client, token, owner, repo };
|
|
2039
|
+
}
|
|
2040
|
+
}
|
|
2041
|
+
|
|
1998
2042
|
const createBuiltinActions = (options) => {
|
|
1999
2043
|
const {
|
|
2000
2044
|
reader,
|
|
@@ -2054,6 +2098,9 @@ const createBuiltinActions = (options) => {
|
|
|
2054
2098
|
})
|
|
2055
2099
|
];
|
|
2056
2100
|
if (containerRunner) {
|
|
2101
|
+
backendCommon.getRootLogger().warn(`[DEPRECATED] The fetch:cookiecutter action will be removed part of the default scaffolder actions in later versions.
|
|
2102
|
+
You can install the package seperately and remove the containerRunner from the createBuiltInActions to remove this warning,
|
|
2103
|
+
or you can migrate to using fetch:template https://backstage.io/docs/features/software-templates/builtin-actions#migrating-from-fetchcookiecutter-to-fetchtemplate`);
|
|
2057
2104
|
actions.push(pluginScaffolderBackendModuleCookiecutter.createFetchCookiecutterAction({
|
|
2058
2105
|
reader,
|
|
2059
2106
|
integrations,
|
|
@@ -2116,12 +2163,12 @@ class DatabaseTaskStore {
|
|
|
2116
2163
|
throw new Error(`Failed to parse spec of task '${taskId}', ${error}`);
|
|
2117
2164
|
}
|
|
2118
2165
|
}
|
|
2119
|
-
async createTask(
|
|
2166
|
+
async createTask(options) {
|
|
2120
2167
|
const taskId = uuid.v4();
|
|
2121
2168
|
await this.db("tasks").insert({
|
|
2122
2169
|
id: taskId,
|
|
2123
|
-
spec: JSON.stringify(spec),
|
|
2124
|
-
secrets: secrets ? JSON.stringify(secrets) : void 0,
|
|
2170
|
+
spec: JSON.stringify(options.spec),
|
|
2171
|
+
secrets: options.secrets ? JSON.stringify(options.secrets) : void 0,
|
|
2125
2172
|
status: "open"
|
|
2126
2173
|
});
|
|
2127
2174
|
return { taskId };
|
|
@@ -2213,12 +2260,13 @@ class DatabaseTaskStore {
|
|
|
2213
2260
|
});
|
|
2214
2261
|
});
|
|
2215
2262
|
}
|
|
2216
|
-
async emitLogEvent(
|
|
2217
|
-
const
|
|
2263
|
+
async emitLogEvent(options) {
|
|
2264
|
+
const { taskId, body } = options;
|
|
2265
|
+
const serializedBody = JSON.stringify(body);
|
|
2218
2266
|
await this.db("task_events").insert({
|
|
2219
2267
|
task_id: taskId,
|
|
2220
2268
|
event_type: "log",
|
|
2221
|
-
body:
|
|
2269
|
+
body: serializedBody
|
|
2222
2270
|
});
|
|
2223
2271
|
}
|
|
2224
2272
|
async listEvents({
|
|
@@ -2251,38 +2299,38 @@ class DatabaseTaskStore {
|
|
|
2251
2299
|
}
|
|
2252
2300
|
|
|
2253
2301
|
class TaskManager {
|
|
2254
|
-
constructor(
|
|
2255
|
-
this.
|
|
2302
|
+
constructor(task, storage, logger) {
|
|
2303
|
+
this.task = task;
|
|
2256
2304
|
this.storage = storage;
|
|
2257
2305
|
this.logger = logger;
|
|
2258
2306
|
this.isDone = false;
|
|
2259
2307
|
}
|
|
2260
|
-
static create(
|
|
2261
|
-
const agent = new TaskManager(
|
|
2308
|
+
static create(task, storage, logger) {
|
|
2309
|
+
const agent = new TaskManager(task, storage, logger);
|
|
2262
2310
|
agent.startTimeout();
|
|
2263
2311
|
return agent;
|
|
2264
2312
|
}
|
|
2265
2313
|
get spec() {
|
|
2266
|
-
return this.
|
|
2314
|
+
return this.task.spec;
|
|
2267
2315
|
}
|
|
2268
2316
|
get secrets() {
|
|
2269
|
-
return this.
|
|
2317
|
+
return this.task.secrets;
|
|
2270
2318
|
}
|
|
2271
2319
|
async getWorkspaceName() {
|
|
2272
|
-
return this.
|
|
2320
|
+
return this.task.taskId;
|
|
2273
2321
|
}
|
|
2274
2322
|
get done() {
|
|
2275
2323
|
return this.isDone;
|
|
2276
2324
|
}
|
|
2277
|
-
async emitLog(message,
|
|
2325
|
+
async emitLog(message, logMetadata) {
|
|
2278
2326
|
await this.storage.emitLogEvent({
|
|
2279
|
-
taskId: this.
|
|
2280
|
-
body: { message, ...
|
|
2327
|
+
taskId: this.task.taskId,
|
|
2328
|
+
body: { message, ...logMetadata }
|
|
2281
2329
|
});
|
|
2282
2330
|
}
|
|
2283
2331
|
async complete(result, metadata) {
|
|
2284
2332
|
await this.storage.completeTask({
|
|
2285
|
-
taskId: this.
|
|
2333
|
+
taskId: this.task.taskId,
|
|
2286
2334
|
status: result === "failed" ? "failed" : "completed",
|
|
2287
2335
|
eventBody: {
|
|
2288
2336
|
message: `Run completed with status: ${result}`,
|
|
@@ -2297,11 +2345,11 @@ class TaskManager {
|
|
|
2297
2345
|
startTimeout() {
|
|
2298
2346
|
this.heartbeatTimeoutId = setTimeout(async () => {
|
|
2299
2347
|
try {
|
|
2300
|
-
await this.storage.heartbeatTask(this.
|
|
2348
|
+
await this.storage.heartbeatTask(this.task.taskId);
|
|
2301
2349
|
this.startTimeout();
|
|
2302
2350
|
} catch (error) {
|
|
2303
2351
|
this.isDone = true;
|
|
2304
|
-
this.logger.error(`Heartbeat for task ${this.
|
|
2352
|
+
this.logger.error(`Heartbeat for task ${this.task.taskId} failed`, error);
|
|
2305
2353
|
}
|
|
2306
2354
|
}, 1e3);
|
|
2307
2355
|
}
|
|
@@ -2333,8 +2381,8 @@ class StorageTaskBroker {
|
|
|
2333
2381
|
await this.waitForDispatch();
|
|
2334
2382
|
}
|
|
2335
2383
|
}
|
|
2336
|
-
async dispatch(
|
|
2337
|
-
const taskRow = await this.storage.createTask(
|
|
2384
|
+
async dispatch(options) {
|
|
2385
|
+
const taskRow = await this.storage.createTask(options);
|
|
2338
2386
|
this.signalDispatch();
|
|
2339
2387
|
return {
|
|
2340
2388
|
taskId: taskRow.taskId
|
|
@@ -2343,33 +2391,29 @@ class StorageTaskBroker {
|
|
|
2343
2391
|
async get(taskId) {
|
|
2344
2392
|
return this.storage.getTask(taskId);
|
|
2345
2393
|
}
|
|
2346
|
-
|
|
2347
|
-
|
|
2348
|
-
|
|
2349
|
-
const unsubscribe = () => {
|
|
2350
|
-
cancelled = true;
|
|
2351
|
-
};
|
|
2352
|
-
(async () => {
|
|
2394
|
+
event$(options) {
|
|
2395
|
+
return new ObservableImpl__default["default"]((observer) => {
|
|
2396
|
+
const { taskId } = options;
|
|
2353
2397
|
let after = options.after;
|
|
2354
|
-
|
|
2355
|
-
|
|
2356
|
-
|
|
2357
|
-
|
|
2358
|
-
|
|
2359
|
-
|
|
2360
|
-
|
|
2361
|
-
|
|
2362
|
-
errors.assertError(error);
|
|
2363
|
-
callback(error, { events: [] });
|
|
2398
|
+
let cancelled = false;
|
|
2399
|
+
(async () => {
|
|
2400
|
+
while (!cancelled) {
|
|
2401
|
+
const result = await this.storage.listEvents({ taskId, after });
|
|
2402
|
+
const { events } = result;
|
|
2403
|
+
if (events.length) {
|
|
2404
|
+
after = events[events.length - 1].id;
|
|
2405
|
+
observer.next(result);
|
|
2364
2406
|
}
|
|
2407
|
+
await new Promise((resolve) => setTimeout(resolve, 1e3));
|
|
2365
2408
|
}
|
|
2366
|
-
|
|
2367
|
-
|
|
2368
|
-
|
|
2369
|
-
|
|
2409
|
+
})();
|
|
2410
|
+
return () => {
|
|
2411
|
+
cancelled = true;
|
|
2412
|
+
};
|
|
2413
|
+
});
|
|
2370
2414
|
}
|
|
2371
|
-
async vacuumTasks(
|
|
2372
|
-
const { tasks } = await this.storage.listStaleTasks(
|
|
2415
|
+
async vacuumTasks(options) {
|
|
2416
|
+
const { tasks } = await this.storage.listStaleTasks(options);
|
|
2373
2417
|
await Promise.all(tasks.map(async (task) => {
|
|
2374
2418
|
try {
|
|
2375
2419
|
await this.storage.completeTask({
|
|
@@ -2414,7 +2458,7 @@ class HandlebarsWorkflowRunner {
|
|
|
2414
2458
|
this.handlebars.registerHelper("eq", (a, b) => a === b);
|
|
2415
2459
|
}
|
|
2416
2460
|
async execute(task) {
|
|
2417
|
-
var _a, _b
|
|
2461
|
+
var _a, _b;
|
|
2418
2462
|
if (!isValidTaskSpec$1(task.spec)) {
|
|
2419
2463
|
throw new errors.InputError(`Task spec is not a valid v1beta2 task spec`);
|
|
2420
2464
|
}
|
|
@@ -2509,16 +2553,12 @@ class HandlebarsWorkflowRunner {
|
|
|
2509
2553
|
this.options.logger.debug(`Running ${action.id} with input`, {
|
|
2510
2554
|
input: JSON.stringify(input, null, 2)
|
|
2511
2555
|
});
|
|
2512
|
-
if (!task.spec.metadata) {
|
|
2513
|
-
console.warn("DEPRECATION NOTICE: metadata is undefined. metadata will be required in the future.");
|
|
2514
|
-
}
|
|
2515
2556
|
await action.handler({
|
|
2516
2557
|
baseUrl: task.spec.baseUrl,
|
|
2517
2558
|
logger: taskLogger,
|
|
2518
2559
|
logStream: stream$1,
|
|
2519
2560
|
input,
|
|
2520
|
-
|
|
2521
|
-
secrets: (_c = task.secrets) != null ? _c : {},
|
|
2561
|
+
secrets: (_b = task.secrets) != null ? _b : {},
|
|
2522
2562
|
workspacePath,
|
|
2523
2563
|
async createTemporaryDirectory() {
|
|
2524
2564
|
const tmpDir = await fs__default["default"].mkdtemp(`${workspacePath}_step-${step.id}-`);
|
|
@@ -2528,7 +2568,8 @@ class HandlebarsWorkflowRunner {
|
|
|
2528
2568
|
output(name, value) {
|
|
2529
2569
|
stepOutputs[name] = value;
|
|
2530
2570
|
},
|
|
2531
|
-
metadata: task.spec.metadata
|
|
2571
|
+
metadata: task.spec.metadata,
|
|
2572
|
+
templateInfo: task.spec.templateInfo
|
|
2532
2573
|
});
|
|
2533
2574
|
for (const tmpDir of tmpDirs) {
|
|
2534
2575
|
await fs__default["default"].remove(tmpDir);
|
|
@@ -2644,7 +2685,7 @@ class NunjucksWorkflowRunner {
|
|
|
2644
2685
|
});
|
|
2645
2686
|
}
|
|
2646
2687
|
async execute(task) {
|
|
2647
|
-
var _a, _b, _c, _d
|
|
2688
|
+
var _a, _b, _c, _d;
|
|
2648
2689
|
if (!isValidTaskSpec(task.spec)) {
|
|
2649
2690
|
throw new errors.InputError("Wrong template version executed with the workflow engine");
|
|
2650
2691
|
}
|
|
@@ -2688,14 +2729,10 @@ class NunjucksWorkflowRunner {
|
|
|
2688
2729
|
}
|
|
2689
2730
|
const tmpDirs = new Array();
|
|
2690
2731
|
const stepOutput = {};
|
|
2691
|
-
if (!task.spec.metadata) {
|
|
2692
|
-
console.warn("DEPRECATION NOTICE: metadata is undefined. metadata will be required in the future.");
|
|
2693
|
-
}
|
|
2694
2732
|
await action.handler({
|
|
2695
2733
|
baseUrl: task.spec.baseUrl,
|
|
2696
2734
|
input,
|
|
2697
|
-
|
|
2698
|
-
secrets: (_e = task.secrets) != null ? _e : {},
|
|
2735
|
+
secrets: (_d = task.secrets) != null ? _d : {},
|
|
2699
2736
|
logger: taskLogger,
|
|
2700
2737
|
logStream: streamLogger,
|
|
2701
2738
|
workspacePath,
|
|
@@ -2707,7 +2744,8 @@ class NunjucksWorkflowRunner {
|
|
|
2707
2744
|
output(name, value) {
|
|
2708
2745
|
stepOutput[name] = value;
|
|
2709
2746
|
},
|
|
2710
|
-
metadata: task.spec.metadata
|
|
2747
|
+
metadata: task.spec.metadata,
|
|
2748
|
+
templateInfo: task.spec.templateInfo
|
|
2711
2749
|
});
|
|
2712
2750
|
for (const tmpDir of tmpDirs) {
|
|
2713
2751
|
await fs__default["default"].remove(tmpDir);
|
|
@@ -2787,28 +2825,6 @@ class TaskWorker {
|
|
|
2787
2825
|
}
|
|
2788
2826
|
}
|
|
2789
2827
|
|
|
2790
|
-
class CatalogEntityClient {
|
|
2791
|
-
constructor(catalogClient) {
|
|
2792
|
-
this.catalogClient = catalogClient;
|
|
2793
|
-
}
|
|
2794
|
-
async findTemplate(templateName, options) {
|
|
2795
|
-
const { items: templates } = await this.catalogClient.getEntities({
|
|
2796
|
-
filter: {
|
|
2797
|
-
kind: "template",
|
|
2798
|
-
"metadata.name": templateName
|
|
2799
|
-
}
|
|
2800
|
-
}, options);
|
|
2801
|
-
if (templates.length !== 1) {
|
|
2802
|
-
if (templates.length > 1) {
|
|
2803
|
-
throw new errors.ConflictError("Templates lookup resulted in multiple matches");
|
|
2804
|
-
} else {
|
|
2805
|
-
throw new errors.NotFoundError("Template not found");
|
|
2806
|
-
}
|
|
2807
|
-
}
|
|
2808
|
-
return templates[0];
|
|
2809
|
-
}
|
|
2810
|
-
}
|
|
2811
|
-
|
|
2812
2828
|
async function getWorkingDirectory(config, logger) {
|
|
2813
2829
|
if (!config.has("backend.workingDirectory")) {
|
|
2814
2830
|
return os__default["default"].tmpdir();
|
|
@@ -2826,14 +2842,14 @@ async function getWorkingDirectory(config, logger) {
|
|
|
2826
2842
|
}
|
|
2827
2843
|
function getEntityBaseUrl(entity) {
|
|
2828
2844
|
var _a, _b;
|
|
2829
|
-
let location = (_a = entity.metadata.annotations) == null ? void 0 : _a[catalogModel.
|
|
2845
|
+
let location = (_a = entity.metadata.annotations) == null ? void 0 : _a[catalogModel.ANNOTATION_SOURCE_LOCATION];
|
|
2830
2846
|
if (!location) {
|
|
2831
|
-
location = (_b = entity.metadata.annotations) == null ? void 0 : _b[catalogModel.
|
|
2847
|
+
location = (_b = entity.metadata.annotations) == null ? void 0 : _b[catalogModel.ANNOTATION_LOCATION];
|
|
2832
2848
|
}
|
|
2833
2849
|
if (!location) {
|
|
2834
2850
|
return void 0;
|
|
2835
2851
|
}
|
|
2836
|
-
const { type, target } = catalogModel.
|
|
2852
|
+
const { type, target } = catalogModel.parseLocationRef(location);
|
|
2837
2853
|
if (type === "url") {
|
|
2838
2854
|
return target;
|
|
2839
2855
|
} else if (type === "file") {
|
|
@@ -2841,6 +2857,20 @@ function getEntityBaseUrl(entity) {
|
|
|
2841
2857
|
}
|
|
2842
2858
|
return void 0;
|
|
2843
2859
|
}
|
|
2860
|
+
async function findTemplate(options) {
|
|
2861
|
+
const { entityRef, token, catalogApi } = options;
|
|
2862
|
+
if (entityRef.namespace.toLocaleLowerCase("en-US") !== catalogModel.DEFAULT_NAMESPACE) {
|
|
2863
|
+
throw new errors.InputError(`Invalid namespace, only '${catalogModel.DEFAULT_NAMESPACE}' namespace is supported`);
|
|
2864
|
+
}
|
|
2865
|
+
if (entityRef.kind.toLocaleLowerCase("en-US") !== "template") {
|
|
2866
|
+
throw new errors.InputError(`Invalid kind, only 'Template' kind is supported`);
|
|
2867
|
+
}
|
|
2868
|
+
const template = await catalogApi.getEntityByName(entityRef, { token });
|
|
2869
|
+
if (!template) {
|
|
2870
|
+
throw new errors.NotFoundError(`Template ${entityRef} not found`);
|
|
2871
|
+
}
|
|
2872
|
+
return template;
|
|
2873
|
+
}
|
|
2844
2874
|
|
|
2845
2875
|
function isSupportedTemplate(entity) {
|
|
2846
2876
|
return entity.apiVersion === "backstage.io/v1beta2" || entity.apiVersion === "scaffolder.backstage.io/v1beta3";
|
|
@@ -2861,7 +2891,6 @@ async function createRouter(options) {
|
|
|
2861
2891
|
} = options;
|
|
2862
2892
|
const logger = parentLogger.child({ plugin: "scaffolder" });
|
|
2863
2893
|
const workingDirectory = await getWorkingDirectory(config, logger);
|
|
2864
|
-
const entityClient = new CatalogEntityClient(catalogClient);
|
|
2865
2894
|
const integrations = integration.ScmIntegrations.fromConfig(config);
|
|
2866
2895
|
let taskBroker;
|
|
2867
2896
|
if (!options.taskBroker) {
|
|
@@ -2898,13 +2927,9 @@ async function createRouter(options) {
|
|
|
2898
2927
|
router.get("/v2/templates/:namespace/:kind/:name/parameter-schema", async (req, res) => {
|
|
2899
2928
|
var _a, _b;
|
|
2900
2929
|
const { namespace, kind, name } = req.params;
|
|
2901
|
-
|
|
2902
|
-
|
|
2903
|
-
|
|
2904
|
-
if (kind.toLowerCase() !== "template") {
|
|
2905
|
-
throw new errors.InputError(`Invalid kind, only 'Template' kind is supported`);
|
|
2906
|
-
}
|
|
2907
|
-
const template = await entityClient.findTemplate(name, {
|
|
2930
|
+
const template = await findTemplate({
|
|
2931
|
+
catalogApi: catalogClient,
|
|
2932
|
+
entityRef: { kind, namespace, name },
|
|
2908
2933
|
token: getBearerToken(req.headers.authorization)
|
|
2909
2934
|
});
|
|
2910
2935
|
if (isSupportedTemplate(template)) {
|
|
@@ -2932,15 +2957,23 @@ async function createRouter(options) {
|
|
|
2932
2957
|
});
|
|
2933
2958
|
res.json(actionsList);
|
|
2934
2959
|
}).post("/v2/tasks", async (req, res) => {
|
|
2935
|
-
var _a, _b, _c, _d
|
|
2936
|
-
const
|
|
2960
|
+
var _a, _b, _c, _d;
|
|
2961
|
+
const templateRef = req.body.templateRef;
|
|
2962
|
+
const { kind, namespace, name } = catalogModel.parseEntityRef(templateRef, {
|
|
2963
|
+
defaultKind: "template"
|
|
2964
|
+
});
|
|
2937
2965
|
const values = req.body.values;
|
|
2938
2966
|
const token = getBearerToken(req.headers.authorization);
|
|
2939
|
-
const template = await
|
|
2940
|
-
|
|
2967
|
+
const template = await findTemplate({
|
|
2968
|
+
catalogApi: catalogClient,
|
|
2969
|
+
entityRef: { kind, namespace, name },
|
|
2970
|
+
token: getBearerToken(req.headers.authorization)
|
|
2941
2971
|
});
|
|
2942
2972
|
let taskSpec;
|
|
2943
2973
|
if (isSupportedTemplate(template)) {
|
|
2974
|
+
if (template.apiVersion === "backstage.io/v1beta2") {
|
|
2975
|
+
logger.warn(`Scaffolding ${catalogModel.stringifyEntityRef(template)} with deprecated apiVersion ${template.apiVersion}. Please migrate the template to backstage.io/v1beta3. https://backstage.io/docs/features/software-templates/migrating-from-v1beta2-to-v1beta3`);
|
|
2976
|
+
}
|
|
2944
2977
|
for (const parameters of [(_a = template.spec.parameters) != null ? _a : []].flat()) {
|
|
2945
2978
|
const result2 = jsonschema.validate(values, parameters);
|
|
2946
2979
|
if (!result2.valid) {
|
|
@@ -2949,10 +2982,8 @@ async function createRouter(options) {
|
|
|
2949
2982
|
}
|
|
2950
2983
|
}
|
|
2951
2984
|
const baseUrl = getEntityBaseUrl(template);
|
|
2952
|
-
|
|
2953
|
-
apiVersion: template.apiVersion,
|
|
2985
|
+
const baseTaskSpec = {
|
|
2954
2986
|
baseUrl,
|
|
2955
|
-
values,
|
|
2956
2987
|
steps: template.spec.steps.map((step, index) => {
|
|
2957
2988
|
var _a2, _b2;
|
|
2958
2989
|
return {
|
|
@@ -2962,29 +2993,34 @@ async function createRouter(options) {
|
|
|
2962
2993
|
};
|
|
2963
2994
|
}),
|
|
2964
2995
|
output: (_b = template.spec.output) != null ? _b : {},
|
|
2965
|
-
metadata: { name: (_c = template.metadata) == null ? void 0 : _c.name }
|
|
2996
|
+
metadata: { name: (_c = template.metadata) == null ? void 0 : _c.name },
|
|
2997
|
+
templateInfo: {
|
|
2998
|
+
entityRef: catalogModel.stringifyEntityRef({
|
|
2999
|
+
kind,
|
|
3000
|
+
namespace,
|
|
3001
|
+
name: (_d = template.metadata) == null ? void 0 : _d.name
|
|
3002
|
+
}),
|
|
3003
|
+
baseUrl
|
|
3004
|
+
}
|
|
3005
|
+
};
|
|
3006
|
+
taskSpec = template.apiVersion === "backstage.io/v1beta2" ? {
|
|
3007
|
+
...baseTaskSpec,
|
|
3008
|
+
apiVersion: template.apiVersion,
|
|
3009
|
+
values
|
|
2966
3010
|
} : {
|
|
3011
|
+
...baseTaskSpec,
|
|
2967
3012
|
apiVersion: template.apiVersion,
|
|
2968
|
-
|
|
2969
|
-
parameters: values,
|
|
2970
|
-
steps: template.spec.steps.map((step, index) => {
|
|
2971
|
-
var _a2, _b2;
|
|
2972
|
-
return {
|
|
2973
|
-
...step,
|
|
2974
|
-
id: (_a2 = step.id) != null ? _a2 : `step-${index + 1}`,
|
|
2975
|
-
name: (_b2 = step.name) != null ? _b2 : step.action
|
|
2976
|
-
};
|
|
2977
|
-
}),
|
|
2978
|
-
output: (_d = template.spec.output) != null ? _d : {},
|
|
2979
|
-
metadata: { name: (_e = template.metadata) == null ? void 0 : _e.name }
|
|
3013
|
+
parameters: values
|
|
2980
3014
|
};
|
|
2981
3015
|
} else {
|
|
2982
3016
|
throw new errors.InputError(`Unsupported apiVersion field in schema entity, ${template.apiVersion}`);
|
|
2983
3017
|
}
|
|
2984
|
-
const result = await taskBroker.dispatch(
|
|
2985
|
-
|
|
2986
|
-
|
|
2987
|
-
|
|
3018
|
+
const result = await taskBroker.dispatch({
|
|
3019
|
+
spec: taskSpec,
|
|
3020
|
+
secrets: {
|
|
3021
|
+
...req.body.secrets,
|
|
3022
|
+
backstageToken: token
|
|
3023
|
+
}
|
|
2988
3024
|
});
|
|
2989
3025
|
res.status(201).json({ id: result.taskId });
|
|
2990
3026
|
}).get("/v2/tasks/:taskId", async (req, res) => {
|
|
@@ -3004,48 +3040,49 @@ async function createRouter(options) {
|
|
|
3004
3040
|
"Cache-Control": "no-cache",
|
|
3005
3041
|
"Content-Type": "text/event-stream"
|
|
3006
3042
|
});
|
|
3007
|
-
const
|
|
3008
|
-
|
|
3009
|
-
if (error) {
|
|
3043
|
+
const subscription = taskBroker.event$({ taskId, after }).subscribe({
|
|
3044
|
+
error: (error) => {
|
|
3010
3045
|
logger.error(`Received error from event stream when observing taskId '${taskId}', ${error}`);
|
|
3011
|
-
}
|
|
3012
|
-
|
|
3013
|
-
|
|
3014
|
-
|
|
3046
|
+
},
|
|
3047
|
+
next: ({ events }) => {
|
|
3048
|
+
var _a;
|
|
3049
|
+
let shouldUnsubscribe = false;
|
|
3050
|
+
for (const event of events) {
|
|
3051
|
+
res.write(`event: ${event.type}
|
|
3015
3052
|
data: ${JSON.stringify(event)}
|
|
3016
3053
|
|
|
3017
3054
|
`);
|
|
3018
|
-
|
|
3019
|
-
|
|
3055
|
+
if (event.type === "completion") {
|
|
3056
|
+
shouldUnsubscribe = true;
|
|
3057
|
+
}
|
|
3020
3058
|
}
|
|
3059
|
+
(_a = res.flush) == null ? void 0 : _a.call(res);
|
|
3060
|
+
if (shouldUnsubscribe)
|
|
3061
|
+
subscription.unsubscribe();
|
|
3021
3062
|
}
|
|
3022
|
-
(_a = res.flush) == null ? void 0 : _a.call(res);
|
|
3023
|
-
if (shouldUnsubscribe)
|
|
3024
|
-
unsubscribe();
|
|
3025
3063
|
});
|
|
3026
3064
|
req.on("close", () => {
|
|
3027
|
-
unsubscribe();
|
|
3065
|
+
subscription.unsubscribe();
|
|
3028
3066
|
logger.debug(`Event stream observing taskId '${taskId}' closed`);
|
|
3029
3067
|
});
|
|
3030
3068
|
}).get("/v2/tasks/:taskId/events", async (req, res) => {
|
|
3031
3069
|
const { taskId } = req.params;
|
|
3032
3070
|
const after = Number(req.query.after) || void 0;
|
|
3033
|
-
let unsubscribe = () => {
|
|
3034
|
-
};
|
|
3035
3071
|
const timeout = setTimeout(() => {
|
|
3036
|
-
unsubscribe();
|
|
3037
3072
|
res.json([]);
|
|
3038
3073
|
}, 3e4);
|
|
3039
|
-
|
|
3040
|
-
|
|
3041
|
-
|
|
3042
|
-
|
|
3043
|
-
|
|
3074
|
+
const subscription = taskBroker.event$({ taskId, after }).subscribe({
|
|
3075
|
+
error: (error) => {
|
|
3076
|
+
logger.error(`Received error from event stream when observing taskId '${taskId}', ${error}`);
|
|
3077
|
+
},
|
|
3078
|
+
next: ({ events }) => {
|
|
3079
|
+
clearTimeout(timeout);
|
|
3080
|
+
subscription.unsubscribe();
|
|
3081
|
+
res.json(events);
|
|
3044
3082
|
}
|
|
3045
|
-
|
|
3046
|
-
}));
|
|
3083
|
+
});
|
|
3047
3084
|
req.on("close", () => {
|
|
3048
|
-
unsubscribe();
|
|
3085
|
+
subscription.unsubscribe();
|
|
3049
3086
|
clearTimeout(timeout);
|
|
3050
3087
|
});
|
|
3051
3088
|
});
|
|
@@ -3061,13 +3098,14 @@ function getBearerToken(header) {
|
|
|
3061
3098
|
|
|
3062
3099
|
class ScaffolderEntitiesProcessor {
|
|
3063
3100
|
constructor() {
|
|
3064
|
-
this.validators = [
|
|
3065
|
-
|
|
3066
|
-
|
|
3101
|
+
this.validators = [pluginScaffolderCommon.templateEntityV1beta3Validator];
|
|
3102
|
+
}
|
|
3103
|
+
getProcessorName() {
|
|
3104
|
+
return "ScaffolderEntitiesProcessor";
|
|
3067
3105
|
}
|
|
3068
3106
|
async validateEntityKind(entity) {
|
|
3069
3107
|
for (const validator of this.validators) {
|
|
3070
|
-
if (validator(entity)) {
|
|
3108
|
+
if (await validator.check(entity)) {
|
|
3071
3109
|
return true;
|
|
3072
3110
|
}
|
|
3073
3111
|
}
|
|
@@ -3111,7 +3149,6 @@ Object.defineProperty(exports, 'createFetchCookiecutterAction', {
|
|
|
3111
3149
|
enumerable: true,
|
|
3112
3150
|
get: function () { return pluginScaffolderBackendModuleCookiecutter.createFetchCookiecutterAction; }
|
|
3113
3151
|
});
|
|
3114
|
-
exports.CatalogEntityClient = CatalogEntityClient;
|
|
3115
3152
|
exports.DatabaseTaskStore = DatabaseTaskStore;
|
|
3116
3153
|
exports.OctokitProvider = OctokitProvider;
|
|
3117
3154
|
exports.ScaffolderEntitiesProcessor = ScaffolderEntitiesProcessor;
|
|
@@ -3137,6 +3174,7 @@ exports.createPublishGitlabAction = createPublishGitlabAction;
|
|
|
3137
3174
|
exports.createPublishGitlabMergeRequestAction = createPublishGitlabMergeRequestAction;
|
|
3138
3175
|
exports.createRouter = createRouter;
|
|
3139
3176
|
exports.createTemplateAction = createTemplateAction;
|
|
3177
|
+
exports.executeShellCommand = executeShellCommand;
|
|
3140
3178
|
exports.fetchContents = fetchContents;
|
|
3141
3179
|
exports.runCommand = runCommand;
|
|
3142
3180
|
//# sourceMappingURL=index.cjs.js.map
|