@backstage/plugin-scaffolder-backend 0.15.14 → 0.15.18
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +43 -0
- package/config.d.ts +12 -12
- package/dist/index.cjs.js +273 -155
- package/dist/index.cjs.js.map +1 -1
- package/dist/index.d.ts +17 -4
- package/package.json +12 -12
package/dist/index.cjs.js
CHANGED
|
@@ -15,7 +15,7 @@ var pluginScaffolderBackendModuleCookiecutter = require('@backstage/plugin-scaff
|
|
|
15
15
|
var child_process = require('child_process');
|
|
16
16
|
var stream = require('stream');
|
|
17
17
|
var azureDevopsNodeApi = require('azure-devops-node-api');
|
|
18
|
-
var fetch = require('
|
|
18
|
+
var fetch = require('node-fetch');
|
|
19
19
|
var integration = require('@backstage/integration');
|
|
20
20
|
var rest = require('@octokit/rest');
|
|
21
21
|
var lodash = require('lodash');
|
|
@@ -45,14 +45,12 @@ function _interopNamespace(e) {
|
|
|
45
45
|
var d = Object.getOwnPropertyDescriptor(e, k);
|
|
46
46
|
Object.defineProperty(n, k, d.get ? d : {
|
|
47
47
|
enumerable: true,
|
|
48
|
-
get: function () {
|
|
49
|
-
return e[k];
|
|
50
|
-
}
|
|
48
|
+
get: function () { return e[k]; }
|
|
51
49
|
});
|
|
52
50
|
}
|
|
53
51
|
});
|
|
54
52
|
}
|
|
55
|
-
n[
|
|
53
|
+
n["default"] = e;
|
|
56
54
|
return Object.freeze(n);
|
|
57
55
|
}
|
|
58
56
|
|
|
@@ -73,7 +71,7 @@ const createTemplateAction = (templateAction) => {
|
|
|
73
71
|
};
|
|
74
72
|
|
|
75
73
|
function createCatalogRegisterAction(options) {
|
|
76
|
-
const {catalogClient, integrations} = options;
|
|
74
|
+
const { catalogClient, integrations } = options;
|
|
77
75
|
return createTemplateAction({
|
|
78
76
|
id: "catalog:register",
|
|
79
77
|
description: "Registers entities from a catalog descriptor file in the workspace into the software catalog.",
|
|
@@ -122,12 +120,12 @@ function createCatalogRegisterAction(options) {
|
|
|
122
120
|
},
|
|
123
121
|
async handler(ctx) {
|
|
124
122
|
var _a;
|
|
125
|
-
const {input} = ctx;
|
|
123
|
+
const { input } = ctx;
|
|
126
124
|
let catalogInfoUrl;
|
|
127
125
|
if ("catalogInfoUrl" in input) {
|
|
128
126
|
catalogInfoUrl = input.catalogInfoUrl;
|
|
129
127
|
} else {
|
|
130
|
-
const {repoContentsUrl, catalogInfoPath = "/catalog-info.yaml"} = input;
|
|
128
|
+
const { repoContentsUrl, catalogInfoPath = "/catalog-info.yaml" } = input;
|
|
131
129
|
const integration = integrations.byUrl(repoContentsUrl);
|
|
132
130
|
if (!integration) {
|
|
133
131
|
throw new errors.InputError(`No integration found for host ${repoContentsUrl}`);
|
|
@@ -141,15 +139,15 @@ function createCatalogRegisterAction(options) {
|
|
|
141
139
|
await catalogClient.addLocation({
|
|
142
140
|
type: "url",
|
|
143
141
|
target: catalogInfoUrl
|
|
144
|
-
}, ctx.token ? {token: ctx.token} : {});
|
|
142
|
+
}, ctx.token ? { token: ctx.token } : {});
|
|
145
143
|
try {
|
|
146
144
|
const result = await catalogClient.addLocation({
|
|
147
145
|
dryRun: true,
|
|
148
146
|
type: "url",
|
|
149
147
|
target: catalogInfoUrl
|
|
150
|
-
}, ctx.token ? {token: ctx.token} : {});
|
|
148
|
+
}, ctx.token ? { token: ctx.token } : {});
|
|
151
149
|
if (result.entities.length > 0) {
|
|
152
|
-
const {entities} = result;
|
|
150
|
+
const { entities } = result;
|
|
153
151
|
const entity = (_a = entities.find((e) => !e.metadata.name.startsWith("generated-"))) != null ? _a : entities[0];
|
|
154
152
|
ctx.output("entityRef", catalogModel.stringifyEntityRef(entity));
|
|
155
153
|
}
|
|
@@ -181,8 +179,8 @@ function createCatalogWriteAction() {
|
|
|
181
179
|
},
|
|
182
180
|
async handler(ctx) {
|
|
183
181
|
ctx.logStream.write(`Writing catalog-info.yaml`);
|
|
184
|
-
const {entity} = ctx.input;
|
|
185
|
-
await fs__default[
|
|
182
|
+
const { entity } = ctx.input;
|
|
183
|
+
await fs__default["default"].writeFile(backendCommon.resolveSafeChildPath(ctx.workspacePath, "catalog-info.yaml"), yaml__namespace.stringify(entity));
|
|
186
184
|
}
|
|
187
185
|
});
|
|
188
186
|
}
|
|
@@ -250,8 +248,8 @@ async function fetchContents({
|
|
|
250
248
|
}
|
|
251
249
|
if (!fetchUrlIsAbsolute && (baseUrl == null ? void 0 : baseUrl.startsWith("file://"))) {
|
|
252
250
|
const basePath = baseUrl.slice("file://".length);
|
|
253
|
-
const srcDir = backendCommon.resolveSafeChildPath(path__default[
|
|
254
|
-
await fs__default[
|
|
251
|
+
const srcDir = backendCommon.resolveSafeChildPath(path__default["default"].dirname(basePath), fetchUrl);
|
|
252
|
+
await fs__default["default"].copy(srcDir, outputPath);
|
|
255
253
|
} else {
|
|
256
254
|
let readUrl;
|
|
257
255
|
if (fetchUrlIsAbsolute) {
|
|
@@ -269,13 +267,13 @@ async function fetchContents({
|
|
|
269
267
|
throw new errors.InputError(`Failed to fetch, template location could not be determined and the fetch URL is relative, ${fetchUrl}`);
|
|
270
268
|
}
|
|
271
269
|
const res = await reader.readTree(readUrl);
|
|
272
|
-
await fs__default[
|
|
273
|
-
await res.dir({targetDir: outputPath});
|
|
270
|
+
await fs__default["default"].ensureDir(outputPath);
|
|
271
|
+
await res.dir({ targetDir: outputPath });
|
|
274
272
|
}
|
|
275
273
|
}
|
|
276
274
|
|
|
277
275
|
function createFetchPlainAction(options) {
|
|
278
|
-
const {reader, integrations} = options;
|
|
276
|
+
const { reader, integrations } = options;
|
|
279
277
|
return createTemplateAction({
|
|
280
278
|
id: "fetch:plain",
|
|
281
279
|
description: "Downloads content and places it in the workspace, or optionally in a subdirectory specified by the 'targetPath' input option.",
|
|
@@ -382,15 +380,15 @@ const { render, renderCompat } = (() => {
|
|
|
382
380
|
`;
|
|
383
381
|
class SecureTemplater {
|
|
384
382
|
static async loadRenderer(options = {}) {
|
|
385
|
-
const {parseRepoUrl, cookiecutterCompat} = options;
|
|
383
|
+
const { parseRepoUrl, cookiecutterCompat } = options;
|
|
386
384
|
let sandbox = void 0;
|
|
387
385
|
if (parseRepoUrl) {
|
|
388
386
|
sandbox = {
|
|
389
387
|
parseRepoUrl: (url) => JSON.stringify(parseRepoUrl(url))
|
|
390
388
|
};
|
|
391
389
|
}
|
|
392
|
-
const vm = new vm2.VM({sandbox});
|
|
393
|
-
const nunjucksSource = await fs__default[
|
|
390
|
+
const vm = new vm2.VM({ sandbox });
|
|
391
|
+
const nunjucksSource = await fs__default["default"].readFile(backendCommon.resolvePackagePath("@backstage/plugin-scaffolder-backend", "assets/nunjucks.js.txt"), "utf-8");
|
|
394
392
|
vm.run(mkScript(nunjucksSource));
|
|
395
393
|
const render = (template, values) => {
|
|
396
394
|
if (!vm) {
|
|
@@ -408,7 +406,7 @@ class SecureTemplater {
|
|
|
408
406
|
}
|
|
409
407
|
|
|
410
408
|
function createFetchTemplateAction(options) {
|
|
411
|
-
const {reader, integrations} = options;
|
|
409
|
+
const { reader, integrations } = options;
|
|
412
410
|
return createTemplateAction({
|
|
413
411
|
id: "fetch:template",
|
|
414
412
|
description: "Downloads a skeleton, templates variables into file and directory names and content, and places the result in the workspace, or optionally in a subdirectory specified by the 'targetPath' input option.",
|
|
@@ -481,19 +479,19 @@ function createFetchTemplateAction(options) {
|
|
|
481
479
|
outputPath: templateDir
|
|
482
480
|
});
|
|
483
481
|
ctx.logger.info("Listing files and directories in template");
|
|
484
|
-
const allEntriesInTemplate = await globby__default[
|
|
482
|
+
const allEntriesInTemplate = await globby__default["default"](`**/*`, {
|
|
485
483
|
cwd: templateDir,
|
|
486
484
|
dot: true,
|
|
487
485
|
onlyFiles: false,
|
|
488
486
|
markDirectories: true
|
|
489
487
|
});
|
|
490
|
-
const nonTemplatedEntries = new Set((await Promise.all((ctx.input.copyWithoutRender || []).map((pattern) => globby__default[
|
|
488
|
+
const nonTemplatedEntries = new Set((await Promise.all((ctx.input.copyWithoutRender || []).map((pattern) => globby__default["default"](pattern, {
|
|
491
489
|
cwd: templateDir,
|
|
492
490
|
dot: true,
|
|
493
491
|
onlyFiles: false,
|
|
494
492
|
markDirectories: true
|
|
495
493
|
})))).flat());
|
|
496
|
-
const {cookiecutterCompat, values} = ctx.input;
|
|
494
|
+
const { cookiecutterCompat, values } = ctx.input;
|
|
497
495
|
const context = {
|
|
498
496
|
[cookiecutterCompat ? "cookiecutter" : "values"]: values
|
|
499
497
|
};
|
|
@@ -526,17 +524,17 @@ function createFetchTemplateAction(options) {
|
|
|
526
524
|
}
|
|
527
525
|
if (location.endsWith("/")) {
|
|
528
526
|
ctx.logger.info(`Writing directory ${location} to template output path.`);
|
|
529
|
-
await fs__default[
|
|
527
|
+
await fs__default["default"].ensureDir(outputPath);
|
|
530
528
|
} else {
|
|
531
529
|
const inputFilePath = backendCommon.resolveSafeChildPath(templateDir, location);
|
|
532
530
|
if (await isbinaryfile.isBinaryFile(inputFilePath)) {
|
|
533
531
|
ctx.logger.info(`Copying binary file ${location} to template output path.`);
|
|
534
|
-
await fs__default[
|
|
532
|
+
await fs__default["default"].copy(inputFilePath, outputPath);
|
|
535
533
|
} else {
|
|
536
|
-
const statsObj = await fs__default[
|
|
534
|
+
const statsObj = await fs__default["default"].stat(inputFilePath);
|
|
537
535
|
ctx.logger.info(`Writing file ${location} to template output path with mode ${statsObj.mode}.`);
|
|
538
|
-
const inputFileContents = await fs__default[
|
|
539
|
-
await fs__default[
|
|
536
|
+
const inputFileContents = await fs__default["default"].readFile(inputFilePath, "utf-8");
|
|
537
|
+
await fs__default["default"].outputFile(outputPath, renderContents ? renderTemplate(inputFileContents, context) : inputFileContents, { mode: statsObj.mode });
|
|
540
538
|
}
|
|
541
539
|
}
|
|
542
540
|
}
|
|
@@ -573,7 +571,7 @@ const createFilesystemDeleteAction = () => {
|
|
|
573
571
|
for (const file of ctx.input.files) {
|
|
574
572
|
const filepath = backendCommon.resolveSafeChildPath(ctx.workspacePath, file);
|
|
575
573
|
try {
|
|
576
|
-
await fs__default[
|
|
574
|
+
await fs__default["default"].remove(filepath);
|
|
577
575
|
ctx.logger.info(`File ${filepath} deleted successfully`);
|
|
578
576
|
} catch (err) {
|
|
579
577
|
ctx.logger.error(`Failed to delete file ${filepath}:`, err);
|
|
@@ -631,7 +629,7 @@ const createFilesystemRenameAction = () => {
|
|
|
631
629
|
const sourceFilepath = backendCommon.resolveSafeChildPath(ctx.workspacePath, file.from);
|
|
632
630
|
const destFilepath = backendCommon.resolveSafeChildPath(ctx.workspacePath, file.to);
|
|
633
631
|
try {
|
|
634
|
-
await fs__default[
|
|
632
|
+
await fs__default["default"].move(sourceFilepath, destFilepath, {
|
|
635
633
|
overwrite: (_b = file.overwrite) != null ? _b : false
|
|
636
634
|
});
|
|
637
635
|
ctx.logger.info(`File ${sourceFilepath} renamed to ${destFilepath} successfully`);
|
|
@@ -647,10 +645,11 @@ const createFilesystemRenameAction = () => {
|
|
|
647
645
|
const runCommand = async ({
|
|
648
646
|
command,
|
|
649
647
|
args,
|
|
650
|
-
logStream = new stream.PassThrough()
|
|
648
|
+
logStream = new stream.PassThrough(),
|
|
649
|
+
options
|
|
651
650
|
}) => {
|
|
652
651
|
await new Promise((resolve, reject) => {
|
|
653
|
-
const process = child_process.spawn(command, args);
|
|
652
|
+
const process = child_process.spawn(command, args, options);
|
|
654
653
|
process.stdout.on("data", (stream) => {
|
|
655
654
|
logStream.write(stream);
|
|
656
655
|
});
|
|
@@ -687,7 +686,7 @@ async function initRepoAndPush({
|
|
|
687
686
|
dir,
|
|
688
687
|
defaultBranch
|
|
689
688
|
});
|
|
690
|
-
await git.add({dir, filepath: "."});
|
|
689
|
+
await git.add({ dir, filepath: "." });
|
|
691
690
|
const authorInfo = {
|
|
692
691
|
name: (_a = gitAuthorInfo == null ? void 0 : gitAuthorInfo.name) != null ? _a : "Scaffolder",
|
|
693
692
|
email: (_b = gitAuthorInfo == null ? void 0 : gitAuthorInfo.email) != null ? _b : "scaffolder@backstage.io"
|
|
@@ -725,7 +724,7 @@ const enableBranchProtectionOnDefaultRepoBranch = async ({
|
|
|
725
724
|
owner,
|
|
726
725
|
repo: repoName,
|
|
727
726
|
branch: defaultBranch,
|
|
728
|
-
required_status_checks: {strict: true, contexts: []},
|
|
727
|
+
required_status_checks: { strict: true, contexts: [] },
|
|
729
728
|
restrictions: null,
|
|
730
729
|
enforce_admins: true,
|
|
731
730
|
required_pull_request_reviews: {
|
|
@@ -799,7 +798,7 @@ const parseRepoUrl = (repoUrl, integrations) => {
|
|
|
799
798
|
if (!repo) {
|
|
800
799
|
throw new errors.InputError(`Invalid repo URL passed to publisher: ${repoUrl}, missing repo`);
|
|
801
800
|
}
|
|
802
|
-
return {host, owner, repo, organization, workspace, project};
|
|
801
|
+
return { host, owner, repo, organization, workspace, project };
|
|
803
802
|
};
|
|
804
803
|
const isExecutable = (fileMode) => {
|
|
805
804
|
const executeBitMask = 73;
|
|
@@ -808,7 +807,7 @@ const isExecutable = (fileMode) => {
|
|
|
808
807
|
};
|
|
809
808
|
|
|
810
809
|
function createPublishAzureAction(options) {
|
|
811
|
-
const {integrations, config} = options;
|
|
810
|
+
const { integrations, config } = options;
|
|
812
811
|
return createTemplateAction({
|
|
813
812
|
id: "publish:azure",
|
|
814
813
|
description: "Initializes a git repository of the content in the workspace, and publishes it to Azure.",
|
|
@@ -851,8 +850,8 @@ function createPublishAzureAction(options) {
|
|
|
851
850
|
}
|
|
852
851
|
},
|
|
853
852
|
async handler(ctx) {
|
|
854
|
-
const {repoUrl, defaultBranch = "master"} = ctx.input;
|
|
855
|
-
const {owner, repo, host, organization} = parseRepoUrl(repoUrl, integrations);
|
|
853
|
+
const { repoUrl, defaultBranch = "master" } = ctx.input;
|
|
854
|
+
const { owner, repo, host, organization } = parseRepoUrl(repoUrl, integrations);
|
|
856
855
|
if (!organization) {
|
|
857
856
|
throw new errors.InputError(`Invalid URL provider was included in the repo URL to create ${ctx.input.repoUrl}, missing organization`);
|
|
858
857
|
}
|
|
@@ -866,7 +865,7 @@ function createPublishAzureAction(options) {
|
|
|
866
865
|
const authHandler = azureDevopsNodeApi.getPersonalAccessTokenHandler(integrationConfig.config.token);
|
|
867
866
|
const webApi = new azureDevopsNodeApi.WebApi(`https://${host}/${organization}`, authHandler);
|
|
868
867
|
const client = await webApi.getGitApi();
|
|
869
|
-
const createOptions = {name: repo};
|
|
868
|
+
const createOptions = { name: repo };
|
|
870
869
|
const returnedRepo = await client.createRepository(createOptions, owner);
|
|
871
870
|
if (!returnedRepo) {
|
|
872
871
|
throw new errors.InputError(`Unable to create the repository with Organization ${organization}, Project ${owner} and Repo ${repo}.
|
|
@@ -914,7 +913,7 @@ const createBitbucketCloudRepository = async (opts) => {
|
|
|
914
913
|
scm: "git",
|
|
915
914
|
description,
|
|
916
915
|
is_private: repoVisibility === "private",
|
|
917
|
-
project: {key: project}
|
|
916
|
+
project: { key: project }
|
|
918
917
|
}),
|
|
919
918
|
headers: {
|
|
920
919
|
Authorization: authorization,
|
|
@@ -923,7 +922,7 @@ const createBitbucketCloudRepository = async (opts) => {
|
|
|
923
922
|
};
|
|
924
923
|
let response;
|
|
925
924
|
try {
|
|
926
|
-
response = await fetch__default[
|
|
925
|
+
response = await fetch__default["default"](`https://api.bitbucket.org/2.0/repositories/${workspace}/${repo}`, options);
|
|
927
926
|
} catch (e) {
|
|
928
927
|
throw new Error(`Unable to create repository, ${e}`);
|
|
929
928
|
}
|
|
@@ -938,7 +937,7 @@ const createBitbucketCloudRepository = async (opts) => {
|
|
|
938
937
|
}
|
|
939
938
|
}
|
|
940
939
|
const repoContentsUrl = `${r.links.html.href}/src/master`;
|
|
941
|
-
return {remoteUrl, repoContentsUrl};
|
|
940
|
+
return { remoteUrl, repoContentsUrl };
|
|
942
941
|
};
|
|
943
942
|
const createBitbucketServerRepository = async (opts) => {
|
|
944
943
|
const {
|
|
@@ -965,7 +964,7 @@ const createBitbucketServerRepository = async (opts) => {
|
|
|
965
964
|
};
|
|
966
965
|
try {
|
|
967
966
|
const baseUrl = apiBaseUrl ? apiBaseUrl : `https://${host}/rest/api/1.0`;
|
|
968
|
-
response = await fetch__default[
|
|
967
|
+
response = await fetch__default["default"](`${baseUrl}/projects/${project}/repos`, options);
|
|
969
968
|
} catch (e) {
|
|
970
969
|
throw new Error(`Unable to create repository, ${e}`);
|
|
971
970
|
}
|
|
@@ -980,7 +979,7 @@ const createBitbucketServerRepository = async (opts) => {
|
|
|
980
979
|
}
|
|
981
980
|
}
|
|
982
981
|
const repoContentsUrl = `${r.links.self[0].href}`;
|
|
983
|
-
return {remoteUrl, repoContentsUrl};
|
|
982
|
+
return { remoteUrl, repoContentsUrl };
|
|
984
983
|
};
|
|
985
984
|
const getAuthorizationHeader = (config) => {
|
|
986
985
|
if (config.username && config.appPassword) {
|
|
@@ -993,19 +992,19 @@ const getAuthorizationHeader = (config) => {
|
|
|
993
992
|
throw new Error(`Authorization has not been provided for Bitbucket. Please add either username + appPassword or token to the Integrations config`);
|
|
994
993
|
};
|
|
995
994
|
const performEnableLFS = async (opts) => {
|
|
996
|
-
const {authorization, host, project, repo} = opts;
|
|
995
|
+
const { authorization, host, project, repo } = opts;
|
|
997
996
|
const options = {
|
|
998
997
|
method: "PUT",
|
|
999
998
|
headers: {
|
|
1000
999
|
Authorization: authorization
|
|
1001
1000
|
}
|
|
1002
1001
|
};
|
|
1003
|
-
const {ok, status, statusText} = await fetch__default[
|
|
1002
|
+
const { ok, status, statusText } = await fetch__default["default"](`https://${host}/rest/git-lfs/admin/projects/${project}/repos/${repo}/enabled`, options);
|
|
1004
1003
|
if (!ok)
|
|
1005
1004
|
throw new Error(`Failed to enable LFS in the repository, ${status}: ${statusText}`);
|
|
1006
1005
|
};
|
|
1007
1006
|
function createPublishBitbucketAction(options) {
|
|
1008
|
-
const {integrations, config} = options;
|
|
1007
|
+
const { integrations, config } = options;
|
|
1009
1008
|
return createTemplateAction({
|
|
1010
1009
|
id: "publish:bitbucket",
|
|
1011
1010
|
description: "Initializes a git repository of the content in the workspace, and publishes it to Bitbucket.",
|
|
@@ -1065,7 +1064,7 @@ function createPublishBitbucketAction(options) {
|
|
|
1065
1064
|
repoVisibility = "private",
|
|
1066
1065
|
enableLFS = false
|
|
1067
1066
|
} = ctx.input;
|
|
1068
|
-
const {workspace, project, repo, host} = parseRepoUrl(repoUrl, integrations);
|
|
1067
|
+
const { workspace, project, repo, host } = parseRepoUrl(repoUrl, integrations);
|
|
1069
1068
|
if (host === "bitbucket.org") {
|
|
1070
1069
|
if (!workspace) {
|
|
1071
1070
|
throw new errors.InputError(`Invalid URL provider was included in the repo URL to create ${ctx.input.repoUrl}, missing workspace`);
|
|
@@ -1081,7 +1080,7 @@ function createPublishBitbucketAction(options) {
|
|
|
1081
1080
|
const authorization = getAuthorizationHeader(integrationConfig.config);
|
|
1082
1081
|
const apiBaseUrl = integrationConfig.config.apiBaseUrl;
|
|
1083
1082
|
const createMethod = host === "bitbucket.org" ? createBitbucketCloudRepository : createBitbucketServerRepository;
|
|
1084
|
-
const {remoteUrl, repoContentsUrl} = await createMethod({
|
|
1083
|
+
const { remoteUrl, repoContentsUrl } = await createMethod({
|
|
1085
1084
|
authorization,
|
|
1086
1085
|
host,
|
|
1087
1086
|
workspace: workspace || "",
|
|
@@ -1108,7 +1107,7 @@ function createPublishBitbucketAction(options) {
|
|
|
1108
1107
|
gitAuthorInfo
|
|
1109
1108
|
});
|
|
1110
1109
|
if (enableLFS && host !== "bitbucket.org") {
|
|
1111
|
-
await performEnableLFS({authorization, host, project, repo});
|
|
1110
|
+
await performEnableLFS({ authorization, host, project, repo });
|
|
1112
1111
|
}
|
|
1113
1112
|
ctx.output("remoteUrl", remoteUrl);
|
|
1114
1113
|
ctx.output("repoContentsUrl", repoContentsUrl);
|
|
@@ -1133,13 +1132,13 @@ function createPublishFileAction() {
|
|
|
1133
1132
|
}
|
|
1134
1133
|
},
|
|
1135
1134
|
async handler(ctx) {
|
|
1136
|
-
const {path: path$1} = ctx.input;
|
|
1137
|
-
const exists = await fs__default[
|
|
1135
|
+
const { path: path$1 } = ctx.input;
|
|
1136
|
+
const exists = await fs__default["default"].pathExists(path$1);
|
|
1138
1137
|
if (exists) {
|
|
1139
1138
|
throw new errors.InputError("Output path already exists");
|
|
1140
1139
|
}
|
|
1141
|
-
await fs__default[
|
|
1142
|
-
await fs__default[
|
|
1140
|
+
await fs__default["default"].ensureDir(path.dirname(path$1));
|
|
1141
|
+
await fs__default["default"].copy(ctx.workspacePath, path$1);
|
|
1143
1142
|
}
|
|
1144
1143
|
});
|
|
1145
1144
|
}
|
|
@@ -1154,7 +1153,7 @@ class OctokitProvider {
|
|
|
1154
1153
|
}
|
|
1155
1154
|
async getOctokit(repoUrl) {
|
|
1156
1155
|
var _a;
|
|
1157
|
-
const {owner, repo, host} = parseRepoUrl(repoUrl, this.integrations);
|
|
1156
|
+
const { owner, repo, host } = parseRepoUrl(repoUrl, this.integrations);
|
|
1158
1157
|
if (!owner) {
|
|
1159
1158
|
throw new errors.InputError(`No owner provided for repo ${repoUrl}`);
|
|
1160
1159
|
}
|
|
@@ -1166,7 +1165,7 @@ class OctokitProvider {
|
|
|
1166
1165
|
if (!credentialsProvider) {
|
|
1167
1166
|
throw new errors.InputError(`No matching credentials for host ${host}, please check your integrations config`);
|
|
1168
1167
|
}
|
|
1169
|
-
const {token} = await credentialsProvider.getCredentials({
|
|
1168
|
+
const { token } = await credentialsProvider.getCredentials({
|
|
1170
1169
|
url: `https://${host}/${encodeURIComponent(owner)}/${encodeURIComponent(repo)}`
|
|
1171
1170
|
});
|
|
1172
1171
|
if (!token) {
|
|
@@ -1177,12 +1176,12 @@ class OctokitProvider {
|
|
|
1177
1176
|
baseUrl: integrationConfig.apiBaseUrl,
|
|
1178
1177
|
previews: ["nebula-preview"]
|
|
1179
1178
|
});
|
|
1180
|
-
return {client, token, owner, repo};
|
|
1179
|
+
return { client, token, owner, repo };
|
|
1181
1180
|
}
|
|
1182
1181
|
}
|
|
1183
1182
|
|
|
1184
1183
|
function createPublishGithubAction(options) {
|
|
1185
|
-
const {integrations, config} = options;
|
|
1184
|
+
const { integrations, config } = options;
|
|
1186
1185
|
const octokitProvider = new OctokitProvider(integrations);
|
|
1187
1186
|
return createTemplateAction({
|
|
1188
1187
|
id: "publish:github",
|
|
@@ -1278,7 +1277,7 @@ function createPublishGithubAction(options) {
|
|
|
1278
1277
|
collaborators,
|
|
1279
1278
|
topics
|
|
1280
1279
|
} = ctx.input;
|
|
1281
|
-
const {client, token, owner, repo} = await octokitProvider.getOctokit(repoUrl);
|
|
1280
|
+
const { client, token, owner, repo } = await octokitProvider.getOctokit(repoUrl);
|
|
1282
1281
|
const user = await client.users.getByUsername({
|
|
1283
1282
|
username: owner
|
|
1284
1283
|
});
|
|
@@ -1293,7 +1292,7 @@ function createPublishGithubAction(options) {
|
|
|
1293
1292
|
private: repoVisibility === "private",
|
|
1294
1293
|
description
|
|
1295
1294
|
});
|
|
1296
|
-
const {data: newRepo} = await repoCreationPromise;
|
|
1295
|
+
const { data: newRepo } = await repoCreationPromise;
|
|
1297
1296
|
if (access == null ? void 0 : access.startsWith(`${owner}/`)) {
|
|
1298
1297
|
const [, team] = access.split("/");
|
|
1299
1298
|
await client.teams.addOrUpdateRepoPermissionsInOrg({
|
|
@@ -1396,7 +1395,7 @@ const defaultClientFactory = async ({
|
|
|
1396
1395
|
if (!credentialsProvider) {
|
|
1397
1396
|
throw new errors.InputError(`No matching credentials for host ${host}, please check your integrations config`);
|
|
1398
1397
|
}
|
|
1399
|
-
const {token} = await credentialsProvider.getCredentials({
|
|
1398
|
+
const { token } = await credentialsProvider.getCredentials({
|
|
1400
1399
|
url: `https://${host}/${encodeURIComponent(owner)}/${encodeURIComponent(repo)}`
|
|
1401
1400
|
});
|
|
1402
1401
|
if (!token) {
|
|
@@ -1472,21 +1471,21 @@ const createPublishGithubPullRequestAction = ({
|
|
|
1472
1471
|
targetPath,
|
|
1473
1472
|
sourcePath
|
|
1474
1473
|
} = ctx.input;
|
|
1475
|
-
const {owner, repo, host} = parseRepoUrl(repoUrl, integrations);
|
|
1474
|
+
const { owner, repo, host } = parseRepoUrl(repoUrl, integrations);
|
|
1476
1475
|
if (!owner) {
|
|
1477
1476
|
throw new errors.InputError(`No owner provided for host: ${host}, and repo ${repo}`);
|
|
1478
1477
|
}
|
|
1479
|
-
const client = await clientFactory({integrations, host, owner, repo});
|
|
1478
|
+
const client = await clientFactory({ integrations, host, owner, repo });
|
|
1480
1479
|
const fileRoot = sourcePath ? backendCommon.resolveSafeChildPath(ctx.workspacePath, sourcePath) : ctx.workspacePath;
|
|
1481
|
-
const localFilePaths = await globby__default[
|
|
1480
|
+
const localFilePaths = await globby__default["default"](["./**", "./**/.*", "!.git"], {
|
|
1482
1481
|
cwd: fileRoot,
|
|
1483
1482
|
gitignore: true,
|
|
1484
1483
|
dot: true
|
|
1485
1484
|
});
|
|
1486
1485
|
const fileContents = await Promise.all(localFilePaths.map((filePath) => {
|
|
1487
1486
|
const absPath = backendCommon.resolveSafeChildPath(fileRoot, filePath);
|
|
1488
|
-
const base64EncodedContent = fs__default[
|
|
1489
|
-
const fileStat = fs__default[
|
|
1487
|
+
const base64EncodedContent = fs__default["default"].readFileSync(absPath).toString("base64");
|
|
1488
|
+
const fileStat = fs__default["default"].statSync(absPath);
|
|
1490
1489
|
const githubTreeItemMode = isExecutable(fileStat.mode) ? "100755" : "100644";
|
|
1491
1490
|
const encoding = "base64";
|
|
1492
1491
|
return {
|
|
@@ -1525,7 +1524,7 @@ const createPublishGithubPullRequestAction = ({
|
|
|
1525
1524
|
};
|
|
1526
1525
|
|
|
1527
1526
|
function createPublishGitlabAction(options) {
|
|
1528
|
-
const {integrations, config} = options;
|
|
1527
|
+
const { integrations, config } = options;
|
|
1529
1528
|
return createTemplateAction({
|
|
1530
1529
|
id: "publish:gitlab",
|
|
1531
1530
|
description: "Initializes a git repository of the content in the workspace, and publishes it to GitLab.",
|
|
@@ -1574,7 +1573,7 @@ function createPublishGitlabAction(options) {
|
|
|
1574
1573
|
repoVisibility = "private",
|
|
1575
1574
|
defaultBranch = "master"
|
|
1576
1575
|
} = ctx.input;
|
|
1577
|
-
const {owner, repo, host} = parseRepoUrl(repoUrl, integrations);
|
|
1576
|
+
const { owner, repo, host } = parseRepoUrl(repoUrl, integrations);
|
|
1578
1577
|
if (!owner) {
|
|
1579
1578
|
throw new errors.InputError(`No owner provided for host: ${host}, and repo ${repo}`);
|
|
1580
1579
|
}
|
|
@@ -1589,12 +1588,12 @@ function createPublishGitlabAction(options) {
|
|
|
1589
1588
|
host: integrationConfig.config.baseUrl,
|
|
1590
1589
|
token: integrationConfig.config.token
|
|
1591
1590
|
});
|
|
1592
|
-
let {id: targetNamespace} = await client.Namespaces.show(owner);
|
|
1591
|
+
let { id: targetNamespace } = await client.Namespaces.show(owner);
|
|
1593
1592
|
if (!targetNamespace) {
|
|
1594
|
-
const {id} = await client.Users.current();
|
|
1593
|
+
const { id } = await client.Users.current();
|
|
1595
1594
|
targetNamespace = id;
|
|
1596
1595
|
}
|
|
1597
|
-
const {http_url_to_repo} = await client.Projects.create({
|
|
1596
|
+
const { http_url_to_repo } = await client.Projects.create({
|
|
1598
1597
|
namespace_id: targetNamespace,
|
|
1599
1598
|
name: repo,
|
|
1600
1599
|
visibility: repoVisibility
|
|
@@ -1623,8 +1622,122 @@ function createPublishGitlabAction(options) {
|
|
|
1623
1622
|
});
|
|
1624
1623
|
}
|
|
1625
1624
|
|
|
1625
|
+
const createPublishGitlabMergeRequestAction = (options) => {
|
|
1626
|
+
const { integrations } = options;
|
|
1627
|
+
return createTemplateAction({
|
|
1628
|
+
id: "publish:gitlab:merge-request",
|
|
1629
|
+
schema: {
|
|
1630
|
+
input: {
|
|
1631
|
+
required: ["projectid", "repoUrl", "targetPath", "branchName"],
|
|
1632
|
+
type: "object",
|
|
1633
|
+
properties: {
|
|
1634
|
+
repoUrl: {
|
|
1635
|
+
type: "string",
|
|
1636
|
+
title: "Repository Location",
|
|
1637
|
+
description: `Accepts the format 'gitlab.com/group_name/project_name' where 'project_name' is the repository name and 'group_name' is a group or username`
|
|
1638
|
+
},
|
|
1639
|
+
projectid: {
|
|
1640
|
+
type: "string",
|
|
1641
|
+
title: "projectid",
|
|
1642
|
+
description: "Project ID/Name(slug) of the Gitlab Project"
|
|
1643
|
+
},
|
|
1644
|
+
title: {
|
|
1645
|
+
type: "string",
|
|
1646
|
+
title: "Merge Request Name",
|
|
1647
|
+
description: "The name for the merge request"
|
|
1648
|
+
},
|
|
1649
|
+
description: {
|
|
1650
|
+
type: "string",
|
|
1651
|
+
title: "Merge Request Description",
|
|
1652
|
+
description: "The description of the merge request"
|
|
1653
|
+
},
|
|
1654
|
+
branchName: {
|
|
1655
|
+
type: "string",
|
|
1656
|
+
title: "Destination Branch name",
|
|
1657
|
+
description: "The description of the merge request"
|
|
1658
|
+
},
|
|
1659
|
+
targetPath: {
|
|
1660
|
+
type: "string",
|
|
1661
|
+
title: "Repository Subdirectory",
|
|
1662
|
+
description: "Subdirectory of repository to apply changes to"
|
|
1663
|
+
}
|
|
1664
|
+
}
|
|
1665
|
+
},
|
|
1666
|
+
output: {
|
|
1667
|
+
type: "object",
|
|
1668
|
+
properties: {
|
|
1669
|
+
projectid: {
|
|
1670
|
+
title: "Gitlab Project id/Name(slug)",
|
|
1671
|
+
type: "string"
|
|
1672
|
+
},
|
|
1673
|
+
mergeRequestURL: {
|
|
1674
|
+
title: "MergeRequest(MR) URL",
|
|
1675
|
+
type: "string",
|
|
1676
|
+
description: "Link to the merge request in GitLab"
|
|
1677
|
+
}
|
|
1678
|
+
}
|
|
1679
|
+
}
|
|
1680
|
+
},
|
|
1681
|
+
async handler(ctx) {
|
|
1682
|
+
const repoUrl = ctx.input.repoUrl;
|
|
1683
|
+
const { host } = parseRepoUrl(repoUrl, integrations);
|
|
1684
|
+
const integrationConfig = integrations.gitlab.byHost(host);
|
|
1685
|
+
const actions = [];
|
|
1686
|
+
const destinationBranch = ctx.input.branchName;
|
|
1687
|
+
if (!integrationConfig) {
|
|
1688
|
+
throw new errors.InputError(`No matching integration configuration for host ${host}, please check your integrations config`);
|
|
1689
|
+
}
|
|
1690
|
+
if (!integrationConfig.config.token) {
|
|
1691
|
+
throw new errors.InputError(`No token available for host ${host}`);
|
|
1692
|
+
}
|
|
1693
|
+
const api = new node.Gitlab({
|
|
1694
|
+
host: integrationConfig.config.baseUrl,
|
|
1695
|
+
token: integrationConfig.config.token
|
|
1696
|
+
});
|
|
1697
|
+
const fileRoot = ctx.workspacePath;
|
|
1698
|
+
const localFilePaths = await globby__default["default"]([`${ctx.input.targetPath}/**`], {
|
|
1699
|
+
cwd: fileRoot,
|
|
1700
|
+
gitignore: true,
|
|
1701
|
+
dot: true
|
|
1702
|
+
});
|
|
1703
|
+
const fileContents = await Promise.all(localFilePaths.map((p) => fs.readFile(backendCommon.resolveSafeChildPath(fileRoot, p))));
|
|
1704
|
+
const repoFilePaths = localFilePaths.map((repoFilePath) => {
|
|
1705
|
+
return repoFilePath;
|
|
1706
|
+
});
|
|
1707
|
+
for (let i = 0; i < repoFilePaths.length; i++) {
|
|
1708
|
+
actions.push({
|
|
1709
|
+
action: "create",
|
|
1710
|
+
filePath: repoFilePaths[i],
|
|
1711
|
+
content: fileContents[i].toString()
|
|
1712
|
+
});
|
|
1713
|
+
}
|
|
1714
|
+
const projects = await api.Projects.show(ctx.input.projectid);
|
|
1715
|
+
const { default_branch: defaultBranch } = projects;
|
|
1716
|
+
try {
|
|
1717
|
+
await api.Branches.create(ctx.input.projectid, destinationBranch, String(defaultBranch));
|
|
1718
|
+
} catch (e) {
|
|
1719
|
+
throw new errors.InputError(`The branch creation failed ${e}`);
|
|
1720
|
+
}
|
|
1721
|
+
try {
|
|
1722
|
+
await api.Commits.create(ctx.input.projectid, destinationBranch, ctx.input.title, actions);
|
|
1723
|
+
} catch (e) {
|
|
1724
|
+
throw new errors.InputError(`Committing the changes to ${destinationBranch} failed ${e}`);
|
|
1725
|
+
}
|
|
1726
|
+
try {
|
|
1727
|
+
const mergeRequestUrl = await api.MergeRequests.create(ctx.input.projectid, destinationBranch, String(defaultBranch), ctx.input.title, { description: ctx.input.description }).then((mergeRequest) => {
|
|
1728
|
+
return mergeRequest.web_url;
|
|
1729
|
+
});
|
|
1730
|
+
ctx.output("projectid", ctx.input.projectid);
|
|
1731
|
+
ctx.output("mergeRequestUrl", mergeRequestUrl);
|
|
1732
|
+
} catch (e) {
|
|
1733
|
+
throw new errors.InputError(`Merge request creation failed${e}`);
|
|
1734
|
+
}
|
|
1735
|
+
}
|
|
1736
|
+
});
|
|
1737
|
+
};
|
|
1738
|
+
|
|
1626
1739
|
function createGithubActionsDispatchAction(options) {
|
|
1627
|
-
const {integrations} = options;
|
|
1740
|
+
const { integrations } = options;
|
|
1628
1741
|
const octokitProvider = new OctokitProvider(integrations);
|
|
1629
1742
|
return createTemplateAction({
|
|
1630
1743
|
id: "github:actions:dispatch",
|
|
@@ -1653,9 +1766,9 @@ function createGithubActionsDispatchAction(options) {
|
|
|
1653
1766
|
}
|
|
1654
1767
|
},
|
|
1655
1768
|
async handler(ctx) {
|
|
1656
|
-
const {repoUrl, workflowId, branchOrTagName} = ctx.input;
|
|
1769
|
+
const { repoUrl, workflowId, branchOrTagName } = ctx.input;
|
|
1657
1770
|
ctx.logger.info(`Dispatching workflow ${workflowId} for repo ${repoUrl} on ${branchOrTagName}`);
|
|
1658
|
-
const {client, owner, repo} = await octokitProvider.getOctokit(repoUrl);
|
|
1771
|
+
const { client, owner, repo } = await octokitProvider.getOctokit(repoUrl);
|
|
1659
1772
|
await client.rest.actions.createWorkflowDispatch({
|
|
1660
1773
|
owner,
|
|
1661
1774
|
repo,
|
|
@@ -1668,7 +1781,7 @@ function createGithubActionsDispatchAction(options) {
|
|
|
1668
1781
|
}
|
|
1669
1782
|
|
|
1670
1783
|
function createGithubWebhookAction(options) {
|
|
1671
|
-
const {integrations, defaultWebhookSecret} = options;
|
|
1784
|
+
const { integrations, defaultWebhookSecret } = options;
|
|
1672
1785
|
const octokitProvider = new OctokitProvider(integrations);
|
|
1673
1786
|
const eventNames = webhooks.emitterEventNames.filter((event) => !event.includes("."));
|
|
1674
1787
|
return createTemplateAction({
|
|
@@ -1743,7 +1856,7 @@ function createGithubWebhookAction(options) {
|
|
|
1743
1856
|
insecureSsl = false
|
|
1744
1857
|
} = ctx.input;
|
|
1745
1858
|
ctx.logger.info(`Creating webhook ${webhookUrl} for repo ${repoUrl}`);
|
|
1746
|
-
const {client, owner, repo} = await octokitProvider.getOctokit(repoUrl);
|
|
1859
|
+
const { client, owner, repo } = await octokitProvider.getOctokit(repoUrl);
|
|
1747
1860
|
try {
|
|
1748
1861
|
const insecure_ssl = insecureSsl ? "1" : "0";
|
|
1749
1862
|
await client.repos.createWebhook({
|
|
@@ -1768,17 +1881,12 @@ function createGithubWebhookAction(options) {
|
|
|
1768
1881
|
}
|
|
1769
1882
|
|
|
1770
1883
|
const createBuiltinActions = (options) => {
|
|
1771
|
-
const {reader, integrations, containerRunner, catalogClient, config} = options;
|
|
1772
|
-
|
|
1884
|
+
const { reader, integrations, containerRunner, catalogClient, config } = options;
|
|
1885
|
+
const actions = [
|
|
1773
1886
|
createFetchPlainAction({
|
|
1774
1887
|
reader,
|
|
1775
1888
|
integrations
|
|
1776
1889
|
}),
|
|
1777
|
-
pluginScaffolderBackendModuleCookiecutter.createFetchCookiecutterAction({
|
|
1778
|
-
reader,
|
|
1779
|
-
integrations,
|
|
1780
|
-
containerRunner
|
|
1781
|
-
}),
|
|
1782
1890
|
createFetchTemplateAction({
|
|
1783
1891
|
integrations,
|
|
1784
1892
|
reader
|
|
@@ -1794,6 +1902,9 @@ const createBuiltinActions = (options) => {
|
|
|
1794
1902
|
integrations,
|
|
1795
1903
|
config
|
|
1796
1904
|
}),
|
|
1905
|
+
createPublishGitlabMergeRequestAction({
|
|
1906
|
+
integrations
|
|
1907
|
+
}),
|
|
1797
1908
|
createPublishBitbucketAction({
|
|
1798
1909
|
integrations,
|
|
1799
1910
|
config
|
|
@@ -1803,7 +1914,7 @@ const createBuiltinActions = (options) => {
|
|
|
1803
1914
|
config
|
|
1804
1915
|
}),
|
|
1805
1916
|
createDebugLogAction(),
|
|
1806
|
-
createCatalogRegisterAction({catalogClient, integrations}),
|
|
1917
|
+
createCatalogRegisterAction({ catalogClient, integrations }),
|
|
1807
1918
|
createCatalogWriteAction(),
|
|
1808
1919
|
createFilesystemDeleteAction(),
|
|
1809
1920
|
createFilesystemRenameAction(),
|
|
@@ -1814,11 +1925,19 @@ const createBuiltinActions = (options) => {
|
|
|
1814
1925
|
integrations
|
|
1815
1926
|
})
|
|
1816
1927
|
];
|
|
1928
|
+
if (containerRunner) {
|
|
1929
|
+
actions.push(pluginScaffolderBackendModuleCookiecutter.createFetchCookiecutterAction({
|
|
1930
|
+
reader,
|
|
1931
|
+
integrations,
|
|
1932
|
+
containerRunner
|
|
1933
|
+
}));
|
|
1934
|
+
}
|
|
1935
|
+
return actions;
|
|
1817
1936
|
};
|
|
1818
1937
|
|
|
1819
1938
|
class TemplateActionRegistry {
|
|
1820
1939
|
constructor() {
|
|
1821
|
-
this.actions = new Map();
|
|
1940
|
+
this.actions = /* @__PURE__ */ new Map();
|
|
1822
1941
|
}
|
|
1823
1942
|
register(action) {
|
|
1824
1943
|
if (this.actions.has(action.id)) {
|
|
@@ -1850,7 +1969,7 @@ class DatabaseTaskStore {
|
|
|
1850
1969
|
this.db = options.database;
|
|
1851
1970
|
}
|
|
1852
1971
|
async getTask(taskId) {
|
|
1853
|
-
const [result] = await this.db("tasks").where({id: taskId}).select();
|
|
1972
|
+
const [result] = await this.db("tasks").where({ id: taskId }).select();
|
|
1854
1973
|
if (!result) {
|
|
1855
1974
|
throw new errors.NotFoundError(`No task with id '${taskId}' found`);
|
|
1856
1975
|
}
|
|
@@ -1877,7 +1996,7 @@ class DatabaseTaskStore {
|
|
|
1877
1996
|
secrets: secrets ? JSON.stringify(secrets) : void 0,
|
|
1878
1997
|
status: "open"
|
|
1879
1998
|
});
|
|
1880
|
-
return {taskId};
|
|
1999
|
+
return { taskId };
|
|
1881
2000
|
}
|
|
1882
2001
|
async claimTask() {
|
|
1883
2002
|
return this.db.transaction(async (tx) => {
|
|
@@ -1887,7 +2006,7 @@ class DatabaseTaskStore {
|
|
|
1887
2006
|
if (!task) {
|
|
1888
2007
|
return void 0;
|
|
1889
2008
|
}
|
|
1890
|
-
const updateCount = await tx("tasks").where({id: task.id, status: "open"}).update({
|
|
2009
|
+
const updateCount = await tx("tasks").where({ id: task.id, status: "open" }).update({
|
|
1891
2010
|
status: "processing",
|
|
1892
2011
|
last_heartbeat_at: this.db.fn.now()
|
|
1893
2012
|
});
|
|
@@ -1911,14 +2030,14 @@ class DatabaseTaskStore {
|
|
|
1911
2030
|
});
|
|
1912
2031
|
}
|
|
1913
2032
|
async heartbeatTask(taskId) {
|
|
1914
|
-
const updateCount = await this.db("tasks").where({id: taskId, status: "processing"}).update({
|
|
2033
|
+
const updateCount = await this.db("tasks").where({ id: taskId, status: "processing" }).update({
|
|
1915
2034
|
last_heartbeat_at: this.db.fn.now()
|
|
1916
2035
|
});
|
|
1917
2036
|
if (updateCount === 0) {
|
|
1918
2037
|
throw new errors.ConflictError(`No running task with taskId ${taskId} found`);
|
|
1919
2038
|
}
|
|
1920
2039
|
}
|
|
1921
|
-
async listStaleTasks({timeoutS}) {
|
|
2040
|
+
async listStaleTasks({ timeoutS }) {
|
|
1922
2041
|
const rawRows = await this.db("tasks").where("status", "processing").andWhere("last_heartbeat_at", "<=", this.db.client.config.client === "sqlite3" ? this.db.raw(`datetime('now', ?)`, [`-${timeoutS} seconds`]) : this.db.raw(`dateadd('second', ?, ?)`, [
|
|
1923
2042
|
`-${timeoutS}`,
|
|
1924
2043
|
this.db.fn.now()
|
|
@@ -1926,7 +2045,7 @@ class DatabaseTaskStore {
|
|
|
1926
2045
|
const tasks = rawRows.map((row) => ({
|
|
1927
2046
|
taskId: row.id
|
|
1928
2047
|
}));
|
|
1929
|
-
return {tasks};
|
|
2048
|
+
return { tasks };
|
|
1930
2049
|
}
|
|
1931
2050
|
async completeTask({
|
|
1932
2051
|
taskId,
|
|
@@ -1966,7 +2085,7 @@ class DatabaseTaskStore {
|
|
|
1966
2085
|
});
|
|
1967
2086
|
});
|
|
1968
2087
|
}
|
|
1969
|
-
async emitLogEvent({taskId, body}) {
|
|
2088
|
+
async emitLogEvent({ taskId, body }) {
|
|
1970
2089
|
const serliazedBody = JSON.stringify(body);
|
|
1971
2090
|
await this.db("task_events").insert({
|
|
1972
2091
|
task_id: taskId,
|
|
@@ -1993,13 +2112,13 @@ class DatabaseTaskStore {
|
|
|
1993
2112
|
taskId,
|
|
1994
2113
|
body,
|
|
1995
2114
|
type: event.event_type,
|
|
1996
|
-
createdAt: typeof event.created_at === "string" ? luxon.DateTime.fromSQL(event.created_at, {zone: "UTC"}).toISO() : event.created_at
|
|
2115
|
+
createdAt: typeof event.created_at === "string" ? luxon.DateTime.fromSQL(event.created_at, { zone: "UTC" }).toISO() : event.created_at
|
|
1997
2116
|
};
|
|
1998
2117
|
} catch (error) {
|
|
1999
2118
|
throw new Error(`Failed to parse event body from event taskId=${taskId} id=${event.id}, ${error}`);
|
|
2000
2119
|
}
|
|
2001
2120
|
});
|
|
2002
|
-
return {events};
|
|
2121
|
+
return { events };
|
|
2003
2122
|
}
|
|
2004
2123
|
}
|
|
2005
2124
|
|
|
@@ -2030,7 +2149,7 @@ class TaskManager {
|
|
|
2030
2149
|
async emitLog(message, metadata) {
|
|
2031
2150
|
await this.storage.emitLogEvent({
|
|
2032
2151
|
taskId: this.state.taskId,
|
|
2033
|
-
body: {message, ...metadata}
|
|
2152
|
+
body: { message, ...metadata }
|
|
2034
2153
|
});
|
|
2035
2154
|
}
|
|
2036
2155
|
async complete(result, metadata) {
|
|
@@ -2065,7 +2184,7 @@ function defer() {
|
|
|
2065
2184
|
const promise = new Promise((_resolve) => {
|
|
2066
2185
|
resolve = _resolve;
|
|
2067
2186
|
});
|
|
2068
|
-
return {promise, resolve};
|
|
2187
|
+
return { promise, resolve };
|
|
2069
2188
|
}
|
|
2070
2189
|
class StorageTaskBroker {
|
|
2071
2190
|
constructor(storage, logger) {
|
|
@@ -2097,7 +2216,7 @@ class StorageTaskBroker {
|
|
|
2097
2216
|
return this.storage.getTask(taskId);
|
|
2098
2217
|
}
|
|
2099
2218
|
observe(options, callback) {
|
|
2100
|
-
const {taskId} = options;
|
|
2219
|
+
const { taskId } = options;
|
|
2101
2220
|
let cancelled = false;
|
|
2102
2221
|
const unsubscribe = () => {
|
|
2103
2222
|
cancelled = true;
|
|
@@ -2105,24 +2224,24 @@ class StorageTaskBroker {
|
|
|
2105
2224
|
(async () => {
|
|
2106
2225
|
let after = options.after;
|
|
2107
2226
|
while (!cancelled) {
|
|
2108
|
-
const result = await this.storage.listEvents({taskId, after});
|
|
2109
|
-
const {events} = result;
|
|
2227
|
+
const result = await this.storage.listEvents({ taskId, after });
|
|
2228
|
+
const { events } = result;
|
|
2110
2229
|
if (events.length) {
|
|
2111
2230
|
after = events[events.length - 1].id;
|
|
2112
2231
|
try {
|
|
2113
2232
|
callback(void 0, result);
|
|
2114
2233
|
} catch (error) {
|
|
2115
2234
|
errors.assertError(error);
|
|
2116
|
-
callback(error, {events: []});
|
|
2235
|
+
callback(error, { events: [] });
|
|
2117
2236
|
}
|
|
2118
2237
|
}
|
|
2119
2238
|
await new Promise((resolve) => setTimeout(resolve, 1e3));
|
|
2120
2239
|
}
|
|
2121
2240
|
})();
|
|
2122
|
-
return {unsubscribe};
|
|
2241
|
+
return { unsubscribe };
|
|
2123
2242
|
}
|
|
2124
2243
|
async vacuumTasks(timeoutS) {
|
|
2125
|
-
const {tasks} = await this.storage.listStaleTasks(timeoutS);
|
|
2244
|
+
const { tasks } = await this.storage.listStaleTasks(timeoutS);
|
|
2126
2245
|
await Promise.all(tasks.map(async (task) => {
|
|
2127
2246
|
try {
|
|
2128
2247
|
await this.storage.completeTask({
|
|
@@ -2159,7 +2278,7 @@ class HandlebarsWorkflowRunner {
|
|
|
2159
2278
|
return JSON.stringify(parseRepoUrl(repoUrl, this.options.integrations));
|
|
2160
2279
|
});
|
|
2161
2280
|
this.handlebars.registerHelper("projectSlug", (repoUrl) => {
|
|
2162
|
-
const {owner, repo} = parseRepoUrl(repoUrl, this.options.integrations);
|
|
2281
|
+
const { owner, repo } = parseRepoUrl(repoUrl, this.options.integrations);
|
|
2163
2282
|
return `${owner}/${repo}`;
|
|
2164
2283
|
});
|
|
2165
2284
|
this.handlebars.registerHelper("json", (obj) => JSON.stringify(obj));
|
|
@@ -2171,14 +2290,14 @@ class HandlebarsWorkflowRunner {
|
|
|
2171
2290
|
if (!isValidTaskSpec$1(task.spec)) {
|
|
2172
2291
|
throw new errors.InputError(`Task spec is not a valid v1beta2 task spec`);
|
|
2173
2292
|
}
|
|
2174
|
-
const {actionRegistry} = this.options;
|
|
2175
|
-
const workspacePath = path__default[
|
|
2293
|
+
const { actionRegistry } = this.options;
|
|
2294
|
+
const workspacePath = path__default["default"].join(this.options.workingDirectory, await task.getWorkspaceName());
|
|
2176
2295
|
try {
|
|
2177
|
-
await fs__default[
|
|
2296
|
+
await fs__default["default"].ensureDir(workspacePath);
|
|
2178
2297
|
await task.emitLog(`Starting up task with ${task.spec.steps.length} steps`);
|
|
2179
|
-
const templateCtx = {parameters: task.spec.values, steps: {}};
|
|
2298
|
+
const templateCtx = { parameters: task.spec.values, steps: {} };
|
|
2180
2299
|
for (const step of task.spec.steps) {
|
|
2181
|
-
const metadata = {stepId: step.id};
|
|
2300
|
+
const metadata = { stepId: step.id };
|
|
2182
2301
|
try {
|
|
2183
2302
|
const taskLogger = winston__namespace.createLogger({
|
|
2184
2303
|
level: process.env.LOG_LEVEL || "info",
|
|
@@ -2192,7 +2311,7 @@ class HandlebarsWorkflowRunner {
|
|
|
2192
2311
|
await task.emitLog(message, metadata);
|
|
2193
2312
|
}
|
|
2194
2313
|
});
|
|
2195
|
-
taskLogger.add(new winston__namespace.transports.Stream({stream: stream$1}));
|
|
2314
|
+
taskLogger.add(new winston__namespace.transports.Stream({ stream: stream$1 }));
|
|
2196
2315
|
if (step.if !== void 0) {
|
|
2197
2316
|
let skip = !step.if;
|
|
2198
2317
|
if (typeof step.if === "string") {
|
|
@@ -2273,7 +2392,7 @@ class HandlebarsWorkflowRunner {
|
|
|
2273
2392
|
token: (_b = task.secrets) == null ? void 0 : _b.token,
|
|
2274
2393
|
workspacePath,
|
|
2275
2394
|
async createTemporaryDirectory() {
|
|
2276
|
-
const tmpDir = await fs__default[
|
|
2395
|
+
const tmpDir = await fs__default["default"].mkdtemp(`${workspacePath}_step-${step.id}-`);
|
|
2277
2396
|
tmpDirs.push(tmpDir);
|
|
2278
2397
|
return tmpDir;
|
|
2279
2398
|
},
|
|
@@ -2283,9 +2402,9 @@ class HandlebarsWorkflowRunner {
|
|
|
2283
2402
|
metadata: task.spec.metadata
|
|
2284
2403
|
});
|
|
2285
2404
|
for (const tmpDir of tmpDirs) {
|
|
2286
|
-
await fs__default[
|
|
2405
|
+
await fs__default["default"].remove(tmpDir);
|
|
2287
2406
|
}
|
|
2288
|
-
templateCtx.steps[step.id] = {output: stepOutputs};
|
|
2407
|
+
templateCtx.steps[step.id] = { output: stepOutputs };
|
|
2289
2408
|
await task.emitLog(`Finished step ${step.name}`, {
|
|
2290
2409
|
...metadata,
|
|
2291
2410
|
status: "completed"
|
|
@@ -2319,10 +2438,10 @@ class HandlebarsWorkflowRunner {
|
|
|
2319
2438
|
}
|
|
2320
2439
|
return value;
|
|
2321
2440
|
});
|
|
2322
|
-
return {output};
|
|
2441
|
+
return { output };
|
|
2323
2442
|
} finally {
|
|
2324
2443
|
if (workspacePath) {
|
|
2325
|
-
await fs__default[
|
|
2444
|
+
await fs__default["default"].remove(workspacePath);
|
|
2326
2445
|
}
|
|
2327
2446
|
}
|
|
2328
2447
|
}
|
|
@@ -2335,7 +2454,7 @@ const createStepLogger = ({
|
|
|
2335
2454
|
task,
|
|
2336
2455
|
step
|
|
2337
2456
|
}) => {
|
|
2338
|
-
const metadata = {stepId: step.id};
|
|
2457
|
+
const metadata = { stepId: step.id };
|
|
2339
2458
|
const taskLogger = winston__namespace.createLogger({
|
|
2340
2459
|
level: process.env.LOG_LEVEL || "info",
|
|
2341
2460
|
format: winston__namespace.format.combine(winston__namespace.format.colorize(), winston__namespace.format.timestamp(), winston__namespace.format.simple()),
|
|
@@ -2348,8 +2467,8 @@ const createStepLogger = ({
|
|
|
2348
2467
|
await task.emitLog(message, metadata);
|
|
2349
2468
|
}
|
|
2350
2469
|
});
|
|
2351
|
-
taskLogger.add(new winston__namespace.transports.Stream({stream: streamLogger}));
|
|
2352
|
-
return {taskLogger, streamLogger};
|
|
2470
|
+
taskLogger.add(new winston__namespace.transports.Stream({ stream: streamLogger }));
|
|
2471
|
+
return { taskLogger, streamLogger };
|
|
2353
2472
|
};
|
|
2354
2473
|
class NunjucksWorkflowRunner {
|
|
2355
2474
|
constructor(options) {
|
|
@@ -2357,7 +2476,7 @@ class NunjucksWorkflowRunner {
|
|
|
2357
2476
|
}
|
|
2358
2477
|
isSingleTemplateString(input) {
|
|
2359
2478
|
var _a, _b;
|
|
2360
|
-
const {parser, nodes} = nunjucks__default[
|
|
2479
|
+
const { parser, nodes } = nunjucks__default["default"];
|
|
2361
2480
|
const parsed = parser.parse(input, {}, {
|
|
2362
2481
|
autoescape: false,
|
|
2363
2482
|
tags: {
|
|
@@ -2400,15 +2519,15 @@ class NunjucksWorkflowRunner {
|
|
|
2400
2519
|
if (!isValidTaskSpec(task.spec)) {
|
|
2401
2520
|
throw new errors.InputError("Wrong template version executed with the workflow engine");
|
|
2402
2521
|
}
|
|
2403
|
-
const workspacePath = path__default[
|
|
2404
|
-
const {integrations} = this.options;
|
|
2522
|
+
const workspacePath = path__default["default"].join(this.options.workingDirectory, await task.getWorkspaceName());
|
|
2523
|
+
const { integrations } = this.options;
|
|
2405
2524
|
const renderTemplate = await SecureTemplater.loadRenderer({
|
|
2406
2525
|
parseRepoUrl(url) {
|
|
2407
2526
|
return parseRepoUrl(url, integrations);
|
|
2408
2527
|
}
|
|
2409
2528
|
});
|
|
2410
2529
|
try {
|
|
2411
|
-
await fs__default[
|
|
2530
|
+
await fs__default["default"].ensureDir(workspacePath);
|
|
2412
2531
|
await task.emitLog(`Starting up task with ${task.spec.steps.length} steps`);
|
|
2413
2532
|
const context = {
|
|
2414
2533
|
parameters: task.spec.parameters,
|
|
@@ -2419,7 +2538,7 @@ class NunjucksWorkflowRunner {
|
|
|
2419
2538
|
if (step.if) {
|
|
2420
2539
|
const ifResult = await this.render(step.if, context, renderTemplate);
|
|
2421
2540
|
if (!isTruthy(ifResult)) {
|
|
2422
|
-
await task.emitLog(`Skipping step ${step.id} because it's if condition was false`, {stepId: step.id, status: "skipped"});
|
|
2541
|
+
await task.emitLog(`Skipping step ${step.id} because it's if condition was false`, { stepId: step.id, status: "skipped" });
|
|
2423
2542
|
continue;
|
|
2424
2543
|
}
|
|
2425
2544
|
}
|
|
@@ -2428,7 +2547,7 @@ class NunjucksWorkflowRunner {
|
|
|
2428
2547
|
status: "processing"
|
|
2429
2548
|
});
|
|
2430
2549
|
const action = this.options.actionRegistry.get(step.action);
|
|
2431
|
-
const {taskLogger, streamLogger} = createStepLogger({task, step});
|
|
2550
|
+
const { taskLogger, streamLogger } = createStepLogger({ task, step });
|
|
2432
2551
|
const input = (_a = step.input && this.render(step.input, context, renderTemplate)) != null ? _a : {};
|
|
2433
2552
|
if ((_b = action.schema) == null ? void 0 : _b.input) {
|
|
2434
2553
|
const validateResult = jsonschema.validate(input, action.schema.input);
|
|
@@ -2449,7 +2568,7 @@ class NunjucksWorkflowRunner {
|
|
|
2449
2568
|
logStream: streamLogger,
|
|
2450
2569
|
workspacePath,
|
|
2451
2570
|
createTemporaryDirectory: async () => {
|
|
2452
|
-
const tmpDir = await fs__default[
|
|
2571
|
+
const tmpDir = await fs__default["default"].mkdtemp(`${workspacePath}_step-${step.id}-`);
|
|
2453
2572
|
tmpDirs.push(tmpDir);
|
|
2454
2573
|
return tmpDir;
|
|
2455
2574
|
},
|
|
@@ -2459,9 +2578,9 @@ class NunjucksWorkflowRunner {
|
|
|
2459
2578
|
metadata: task.spec.metadata
|
|
2460
2579
|
});
|
|
2461
2580
|
for (const tmpDir of tmpDirs) {
|
|
2462
|
-
await fs__default[
|
|
2581
|
+
await fs__default["default"].remove(tmpDir);
|
|
2463
2582
|
}
|
|
2464
|
-
context.steps[step.id] = {output: stepOutput};
|
|
2583
|
+
context.steps[step.id] = { output: stepOutput };
|
|
2465
2584
|
await task.emitLog(`Finished step ${step.name}`, {
|
|
2466
2585
|
stepId: step.id,
|
|
2467
2586
|
status: "completed"
|
|
@@ -2475,10 +2594,10 @@ class NunjucksWorkflowRunner {
|
|
|
2475
2594
|
}
|
|
2476
2595
|
}
|
|
2477
2596
|
const output = this.render(task.spec.output, context, renderTemplate);
|
|
2478
|
-
return {output};
|
|
2597
|
+
return { output };
|
|
2479
2598
|
} finally {
|
|
2480
2599
|
if (workspacePath) {
|
|
2481
|
-
await fs__default[
|
|
2600
|
+
await fs__default["default"].remove(workspacePath);
|
|
2482
2601
|
}
|
|
2483
2602
|
}
|
|
2484
2603
|
}
|
|
@@ -2510,7 +2629,7 @@ class TaskWorker {
|
|
|
2510
2629
|
});
|
|
2511
2630
|
return new TaskWorker({
|
|
2512
2631
|
taskBroker,
|
|
2513
|
-
runners: {legacyWorkflowRunner, workflowRunner}
|
|
2632
|
+
runners: { legacyWorkflowRunner, workflowRunner }
|
|
2514
2633
|
});
|
|
2515
2634
|
}
|
|
2516
2635
|
start() {
|
|
@@ -2523,12 +2642,12 @@ class TaskWorker {
|
|
|
2523
2642
|
}
|
|
2524
2643
|
async runOneTask(task) {
|
|
2525
2644
|
try {
|
|
2526
|
-
const {output} = task.spec.apiVersion === "scaffolder.backstage.io/v1beta3" ? await this.options.runners.workflowRunner.execute(task) : await this.options.runners.legacyWorkflowRunner.execute(task);
|
|
2527
|
-
await task.complete("completed", {output});
|
|
2645
|
+
const { output } = task.spec.apiVersion === "scaffolder.backstage.io/v1beta3" ? await this.options.runners.workflowRunner.execute(task) : await this.options.runners.legacyWorkflowRunner.execute(task);
|
|
2646
|
+
await task.complete("completed", { output });
|
|
2528
2647
|
} catch (error) {
|
|
2529
2648
|
errors.assertError(error);
|
|
2530
2649
|
await task.complete("failed", {
|
|
2531
|
-
error: {name: error.name, message: error.message}
|
|
2650
|
+
error: { name: error.name, message: error.message }
|
|
2532
2651
|
});
|
|
2533
2652
|
}
|
|
2534
2653
|
}
|
|
@@ -2539,7 +2658,7 @@ class CatalogEntityClient {
|
|
|
2539
2658
|
this.catalogClient = catalogClient;
|
|
2540
2659
|
}
|
|
2541
2660
|
async findTemplate(templateName, options) {
|
|
2542
|
-
const {items: templates} = await this.catalogClient.getEntities({
|
|
2661
|
+
const { items: templates } = await this.catalogClient.getEntities({
|
|
2543
2662
|
filter: {
|
|
2544
2663
|
kind: "template",
|
|
2545
2664
|
"metadata.name": templateName
|
|
@@ -2558,11 +2677,11 @@ class CatalogEntityClient {
|
|
|
2558
2677
|
|
|
2559
2678
|
async function getWorkingDirectory(config, logger) {
|
|
2560
2679
|
if (!config.has("backend.workingDirectory")) {
|
|
2561
|
-
return os__default[
|
|
2680
|
+
return os__default["default"].tmpdir();
|
|
2562
2681
|
}
|
|
2563
2682
|
const workingDirectory = config.getString("backend.workingDirectory");
|
|
2564
2683
|
try {
|
|
2565
|
-
await fs__default[
|
|
2684
|
+
await fs__default["default"].access(workingDirectory, fs__default["default"].constants.F_OK | fs__default["default"].constants.W_OK);
|
|
2566
2685
|
logger.info(`using working directory: ${workingDirectory}`);
|
|
2567
2686
|
} catch (err) {
|
|
2568
2687
|
errors.assertError(err);
|
|
@@ -2580,7 +2699,7 @@ function getEntityBaseUrl(entity) {
|
|
|
2580
2699
|
if (!location) {
|
|
2581
2700
|
return void 0;
|
|
2582
2701
|
}
|
|
2583
|
-
const {type, target} = catalogModel.parseLocationReference(location);
|
|
2702
|
+
const { type, target } = catalogModel.parseLocationReference(location);
|
|
2584
2703
|
if (type === "url") {
|
|
2585
2704
|
return target;
|
|
2586
2705
|
} else if (type === "file") {
|
|
@@ -2593,8 +2712,8 @@ function isSupportedTemplate(entity) {
|
|
|
2593
2712
|
return entity.apiVersion === "backstage.io/v1beta2" || entity.apiVersion === "scaffolder.backstage.io/v1beta3";
|
|
2594
2713
|
}
|
|
2595
2714
|
async function createRouter(options) {
|
|
2596
|
-
const router = Router__default[
|
|
2597
|
-
router.use(express__default[
|
|
2715
|
+
const router = Router__default["default"]();
|
|
2716
|
+
router.use(express__default["default"].json());
|
|
2598
2717
|
const {
|
|
2599
2718
|
logger: parentLogger,
|
|
2600
2719
|
config,
|
|
@@ -2605,7 +2724,7 @@ async function createRouter(options) {
|
|
|
2605
2724
|
containerRunner,
|
|
2606
2725
|
taskWorkers
|
|
2607
2726
|
} = options;
|
|
2608
|
-
const logger = parentLogger.child({plugin: "scaffolder"});
|
|
2727
|
+
const logger = parentLogger.child({ plugin: "scaffolder" });
|
|
2609
2728
|
const workingDirectory = await getWorkingDirectory(config, logger);
|
|
2610
2729
|
const entityClient = new CatalogEntityClient(catalogClient);
|
|
2611
2730
|
const integrations = integration.ScmIntegrations.fromConfig(config);
|
|
@@ -2641,7 +2760,7 @@ async function createRouter(options) {
|
|
|
2641
2760
|
workers.forEach((worker) => worker.start());
|
|
2642
2761
|
router.get("/v2/templates/:namespace/:kind/:name/parameter-schema", async (req, res) => {
|
|
2643
2762
|
var _a, _b;
|
|
2644
|
-
const {namespace, kind, name} = req.params;
|
|
2763
|
+
const { namespace, kind, name } = req.params;
|
|
2645
2764
|
if (namespace !== "default") {
|
|
2646
2765
|
throw new errors.InputError(`Invalid namespace, only 'default' namespace is supported`);
|
|
2647
2766
|
}
|
|
@@ -2688,7 +2807,7 @@ async function createRouter(options) {
|
|
|
2688
2807
|
for (const parameters of [(_a = template.spec.parameters) != null ? _a : []].flat()) {
|
|
2689
2808
|
const result2 = jsonschema.validate(values, parameters);
|
|
2690
2809
|
if (!result2.valid) {
|
|
2691
|
-
res.status(400).json({errors: result2.errors});
|
|
2810
|
+
res.status(400).json({ errors: result2.errors });
|
|
2692
2811
|
return;
|
|
2693
2812
|
}
|
|
2694
2813
|
}
|
|
@@ -2706,7 +2825,7 @@ async function createRouter(options) {
|
|
|
2706
2825
|
};
|
|
2707
2826
|
}),
|
|
2708
2827
|
output: (_b = template.spec.output) != null ? _b : {},
|
|
2709
|
-
metadata: {name: (_c = template.metadata) == null ? void 0 : _c.name}
|
|
2828
|
+
metadata: { name: (_c = template.metadata) == null ? void 0 : _c.name }
|
|
2710
2829
|
} : {
|
|
2711
2830
|
apiVersion: template.apiVersion,
|
|
2712
2831
|
baseUrl,
|
|
@@ -2720,7 +2839,7 @@ async function createRouter(options) {
|
|
|
2720
2839
|
};
|
|
2721
2840
|
}),
|
|
2722
2841
|
output: (_d = template.spec.output) != null ? _d : {},
|
|
2723
|
-
metadata: {name: (_e = template.metadata) == null ? void 0 : _e.name}
|
|
2842
|
+
metadata: { name: (_e = template.metadata) == null ? void 0 : _e.name }
|
|
2724
2843
|
};
|
|
2725
2844
|
} else {
|
|
2726
2845
|
throw new errors.InputError(`Unsupported apiVersion field in schema entity, ${template.apiVersion}`);
|
|
@@ -2728,9 +2847,9 @@ async function createRouter(options) {
|
|
|
2728
2847
|
const result = await taskBroker.dispatch(taskSpec, {
|
|
2729
2848
|
token
|
|
2730
2849
|
});
|
|
2731
|
-
res.status(201).json({id: result.taskId});
|
|
2850
|
+
res.status(201).json({ id: result.taskId });
|
|
2732
2851
|
}).get("/v2/tasks/:taskId", async (req, res) => {
|
|
2733
|
-
const {taskId} = req.params;
|
|
2852
|
+
const { taskId } = req.params;
|
|
2734
2853
|
const task = await taskBroker.get(taskId);
|
|
2735
2854
|
if (!task) {
|
|
2736
2855
|
throw new errors.NotFoundError(`Task with id ${taskId} does not exist`);
|
|
@@ -2738,7 +2857,7 @@ async function createRouter(options) {
|
|
|
2738
2857
|
delete task.secrets;
|
|
2739
2858
|
res.status(200).json(task);
|
|
2740
2859
|
}).get("/v2/tasks/:taskId/eventstream", async (req, res) => {
|
|
2741
|
-
const {taskId} = req.params;
|
|
2860
|
+
const { taskId } = req.params;
|
|
2742
2861
|
const after = req.query.after !== void 0 ? Number(req.query.after) : void 0;
|
|
2743
2862
|
logger.debug(`Event stream observing taskId '${taskId}' opened`);
|
|
2744
2863
|
res.writeHead(200, {
|
|
@@ -2746,7 +2865,7 @@ async function createRouter(options) {
|
|
|
2746
2865
|
"Cache-Control": "no-cache",
|
|
2747
2866
|
"Content-Type": "text/event-stream"
|
|
2748
2867
|
});
|
|
2749
|
-
const {unsubscribe} = taskBroker.observe({taskId, after}, (error, {events}) => {
|
|
2868
|
+
const { unsubscribe } = taskBroker.observe({ taskId, after }, (error, { events }) => {
|
|
2750
2869
|
var _a;
|
|
2751
2870
|
if (error) {
|
|
2752
2871
|
logger.error(`Received error from event stream when observing taskId '${taskId}', ${error}`);
|
|
@@ -2770,7 +2889,7 @@ data: ${JSON.stringify(event)}
|
|
|
2770
2889
|
logger.debug(`Event stream observing taskId '${taskId}' closed`);
|
|
2771
2890
|
});
|
|
2772
2891
|
}).get("/v2/tasks/:taskId/events", async (req, res) => {
|
|
2773
|
-
const {taskId} = req.params;
|
|
2892
|
+
const { taskId } = req.params;
|
|
2774
2893
|
const after = Number(req.query.after) || void 0;
|
|
2775
2894
|
let unsubscribe = () => {
|
|
2776
2895
|
};
|
|
@@ -2778,7 +2897,7 @@ data: ${JSON.stringify(event)}
|
|
|
2778
2897
|
unsubscribe();
|
|
2779
2898
|
res.json([]);
|
|
2780
2899
|
}, 3e4);
|
|
2781
|
-
({unsubscribe} = taskBroker.observe({taskId, after}, (error, {events}) => {
|
|
2900
|
+
({ unsubscribe } = taskBroker.observe({ taskId, after }, (error, { events }) => {
|
|
2782
2901
|
clearTimeout(timeout);
|
|
2783
2902
|
unsubscribe();
|
|
2784
2903
|
if (error) {
|
|
@@ -2791,7 +2910,7 @@ data: ${JSON.stringify(event)}
|
|
|
2791
2910
|
clearTimeout(timeout);
|
|
2792
2911
|
});
|
|
2793
2912
|
});
|
|
2794
|
-
const app = express__default[
|
|
2913
|
+
const app = express__default["default"]();
|
|
2795
2914
|
app.set("logger", logger);
|
|
2796
2915
|
app.use("/", router);
|
|
2797
2916
|
return app;
|
|
@@ -2851,9 +2970,7 @@ class ScaffolderEntitiesProcessor {
|
|
|
2851
2970
|
|
|
2852
2971
|
Object.defineProperty(exports, 'createFetchCookiecutterAction', {
|
|
2853
2972
|
enumerable: true,
|
|
2854
|
-
get: function () {
|
|
2855
|
-
return pluginScaffolderBackendModuleCookiecutter.createFetchCookiecutterAction;
|
|
2856
|
-
}
|
|
2973
|
+
get: function () { return pluginScaffolderBackendModuleCookiecutter.createFetchCookiecutterAction; }
|
|
2857
2974
|
});
|
|
2858
2975
|
exports.CatalogEntityClient = CatalogEntityClient;
|
|
2859
2976
|
exports.DatabaseTaskStore = DatabaseTaskStore;
|
|
@@ -2878,6 +2995,7 @@ exports.createPublishFileAction = createPublishFileAction;
|
|
|
2878
2995
|
exports.createPublishGithubAction = createPublishGithubAction;
|
|
2879
2996
|
exports.createPublishGithubPullRequestAction = createPublishGithubPullRequestAction;
|
|
2880
2997
|
exports.createPublishGitlabAction = createPublishGitlabAction;
|
|
2998
|
+
exports.createPublishGitlabMergeRequestAction = createPublishGitlabMergeRequestAction;
|
|
2881
2999
|
exports.createRouter = createRouter;
|
|
2882
3000
|
exports.createTemplateAction = createTemplateAction;
|
|
2883
3001
|
exports.fetchContents = fetchContents;
|