@backstage/plugin-scaffolder-backend 0.15.15 → 0.15.19
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +44 -0
- package/dist/index.cjs.js +277 -154
- package/dist/index.cjs.js.map +1 -1
- package/dist/index.d.ts +9 -60
- package/package.json +12 -12
package/dist/index.cjs.js
CHANGED
|
@@ -45,14 +45,12 @@ function _interopNamespace(e) {
|
|
|
45
45
|
var d = Object.getOwnPropertyDescriptor(e, k);
|
|
46
46
|
Object.defineProperty(n, k, d.get ? d : {
|
|
47
47
|
enumerable: true,
|
|
48
|
-
get: function () {
|
|
49
|
-
return e[k];
|
|
50
|
-
}
|
|
48
|
+
get: function () { return e[k]; }
|
|
51
49
|
});
|
|
52
50
|
}
|
|
53
51
|
});
|
|
54
52
|
}
|
|
55
|
-
n[
|
|
53
|
+
n["default"] = e;
|
|
56
54
|
return Object.freeze(n);
|
|
57
55
|
}
|
|
58
56
|
|
|
@@ -73,7 +71,7 @@ const createTemplateAction = (templateAction) => {
|
|
|
73
71
|
};
|
|
74
72
|
|
|
75
73
|
function createCatalogRegisterAction(options) {
|
|
76
|
-
const {catalogClient, integrations} = options;
|
|
74
|
+
const { catalogClient, integrations } = options;
|
|
77
75
|
return createTemplateAction({
|
|
78
76
|
id: "catalog:register",
|
|
79
77
|
description: "Registers entities from a catalog descriptor file in the workspace into the software catalog.",
|
|
@@ -122,12 +120,12 @@ function createCatalogRegisterAction(options) {
|
|
|
122
120
|
},
|
|
123
121
|
async handler(ctx) {
|
|
124
122
|
var _a;
|
|
125
|
-
const {input} = ctx;
|
|
123
|
+
const { input } = ctx;
|
|
126
124
|
let catalogInfoUrl;
|
|
127
125
|
if ("catalogInfoUrl" in input) {
|
|
128
126
|
catalogInfoUrl = input.catalogInfoUrl;
|
|
129
127
|
} else {
|
|
130
|
-
const {repoContentsUrl, catalogInfoPath = "/catalog-info.yaml"} = input;
|
|
128
|
+
const { repoContentsUrl, catalogInfoPath = "/catalog-info.yaml" } = input;
|
|
131
129
|
const integration = integrations.byUrl(repoContentsUrl);
|
|
132
130
|
if (!integration) {
|
|
133
131
|
throw new errors.InputError(`No integration found for host ${repoContentsUrl}`);
|
|
@@ -141,15 +139,15 @@ function createCatalogRegisterAction(options) {
|
|
|
141
139
|
await catalogClient.addLocation({
|
|
142
140
|
type: "url",
|
|
143
141
|
target: catalogInfoUrl
|
|
144
|
-
}, ctx.token ? {token: ctx.token} : {});
|
|
142
|
+
}, ctx.token ? { token: ctx.token } : {});
|
|
145
143
|
try {
|
|
146
144
|
const result = await catalogClient.addLocation({
|
|
147
145
|
dryRun: true,
|
|
148
146
|
type: "url",
|
|
149
147
|
target: catalogInfoUrl
|
|
150
|
-
}, ctx.token ? {token: ctx.token} : {});
|
|
148
|
+
}, ctx.token ? { token: ctx.token } : {});
|
|
151
149
|
if (result.entities.length > 0) {
|
|
152
|
-
const {entities} = result;
|
|
150
|
+
const { entities } = result;
|
|
153
151
|
const entity = (_a = entities.find((e) => !e.metadata.name.startsWith("generated-"))) != null ? _a : entities[0];
|
|
154
152
|
ctx.output("entityRef", catalogModel.stringifyEntityRef(entity));
|
|
155
153
|
}
|
|
@@ -171,6 +169,11 @@ function createCatalogWriteAction() {
|
|
|
171
169
|
input: {
|
|
172
170
|
type: "object",
|
|
173
171
|
properties: {
|
|
172
|
+
filePath: {
|
|
173
|
+
title: "Catalog file path",
|
|
174
|
+
description: "Defaults to catalog-info.yaml",
|
|
175
|
+
type: "string"
|
|
176
|
+
},
|
|
174
177
|
entity: {
|
|
175
178
|
title: "Entity info to write catalog-info.yaml",
|
|
176
179
|
description: "You can provide the same values used in the Entity schema.",
|
|
@@ -181,8 +184,9 @@ function createCatalogWriteAction() {
|
|
|
181
184
|
},
|
|
182
185
|
async handler(ctx) {
|
|
183
186
|
ctx.logStream.write(`Writing catalog-info.yaml`);
|
|
184
|
-
const {entity} = ctx.input;
|
|
185
|
-
|
|
187
|
+
const { filePath, entity } = ctx.input;
|
|
188
|
+
const path = filePath != null ? filePath : "catalog-info.yaml";
|
|
189
|
+
await fs__default["default"].writeFile(backendCommon.resolveSafeChildPath(ctx.workspacePath, path), yaml__namespace.stringify(entity));
|
|
186
190
|
}
|
|
187
191
|
});
|
|
188
192
|
}
|
|
@@ -250,8 +254,8 @@ async function fetchContents({
|
|
|
250
254
|
}
|
|
251
255
|
if (!fetchUrlIsAbsolute && (baseUrl == null ? void 0 : baseUrl.startsWith("file://"))) {
|
|
252
256
|
const basePath = baseUrl.slice("file://".length);
|
|
253
|
-
const srcDir = backendCommon.resolveSafeChildPath(path__default[
|
|
254
|
-
await fs__default[
|
|
257
|
+
const srcDir = backendCommon.resolveSafeChildPath(path__default["default"].dirname(basePath), fetchUrl);
|
|
258
|
+
await fs__default["default"].copy(srcDir, outputPath);
|
|
255
259
|
} else {
|
|
256
260
|
let readUrl;
|
|
257
261
|
if (fetchUrlIsAbsolute) {
|
|
@@ -269,13 +273,13 @@ async function fetchContents({
|
|
|
269
273
|
throw new errors.InputError(`Failed to fetch, template location could not be determined and the fetch URL is relative, ${fetchUrl}`);
|
|
270
274
|
}
|
|
271
275
|
const res = await reader.readTree(readUrl);
|
|
272
|
-
await fs__default[
|
|
273
|
-
await res.dir({targetDir: outputPath});
|
|
276
|
+
await fs__default["default"].ensureDir(outputPath);
|
|
277
|
+
await res.dir({ targetDir: outputPath });
|
|
274
278
|
}
|
|
275
279
|
}
|
|
276
280
|
|
|
277
281
|
function createFetchPlainAction(options) {
|
|
278
|
-
const {reader, integrations} = options;
|
|
282
|
+
const { reader, integrations } = options;
|
|
279
283
|
return createTemplateAction({
|
|
280
284
|
id: "fetch:plain",
|
|
281
285
|
description: "Downloads content and places it in the workspace, or optionally in a subdirectory specified by the 'targetPath' input option.",
|
|
@@ -382,15 +386,15 @@ const { render, renderCompat } = (() => {
|
|
|
382
386
|
`;
|
|
383
387
|
class SecureTemplater {
|
|
384
388
|
static async loadRenderer(options = {}) {
|
|
385
|
-
const {parseRepoUrl, cookiecutterCompat} = options;
|
|
389
|
+
const { parseRepoUrl, cookiecutterCompat } = options;
|
|
386
390
|
let sandbox = void 0;
|
|
387
391
|
if (parseRepoUrl) {
|
|
388
392
|
sandbox = {
|
|
389
393
|
parseRepoUrl: (url) => JSON.stringify(parseRepoUrl(url))
|
|
390
394
|
};
|
|
391
395
|
}
|
|
392
|
-
const vm = new vm2.VM({sandbox});
|
|
393
|
-
const nunjucksSource = await fs__default[
|
|
396
|
+
const vm = new vm2.VM({ sandbox });
|
|
397
|
+
const nunjucksSource = await fs__default["default"].readFile(backendCommon.resolvePackagePath("@backstage/plugin-scaffolder-backend", "assets/nunjucks.js.txt"), "utf-8");
|
|
394
398
|
vm.run(mkScript(nunjucksSource));
|
|
395
399
|
const render = (template, values) => {
|
|
396
400
|
if (!vm) {
|
|
@@ -408,7 +412,7 @@ class SecureTemplater {
|
|
|
408
412
|
}
|
|
409
413
|
|
|
410
414
|
function createFetchTemplateAction(options) {
|
|
411
|
-
const {reader, integrations} = options;
|
|
415
|
+
const { reader, integrations } = options;
|
|
412
416
|
return createTemplateAction({
|
|
413
417
|
id: "fetch:template",
|
|
414
418
|
description: "Downloads a skeleton, templates variables into file and directory names and content, and places the result in the workspace, or optionally in a subdirectory specified by the 'targetPath' input option.",
|
|
@@ -481,19 +485,19 @@ function createFetchTemplateAction(options) {
|
|
|
481
485
|
outputPath: templateDir
|
|
482
486
|
});
|
|
483
487
|
ctx.logger.info("Listing files and directories in template");
|
|
484
|
-
const allEntriesInTemplate = await globby__default[
|
|
488
|
+
const allEntriesInTemplate = await globby__default["default"](`**/*`, {
|
|
485
489
|
cwd: templateDir,
|
|
486
490
|
dot: true,
|
|
487
491
|
onlyFiles: false,
|
|
488
492
|
markDirectories: true
|
|
489
493
|
});
|
|
490
|
-
const nonTemplatedEntries = new Set((await Promise.all((ctx.input.copyWithoutRender || []).map((pattern) => globby__default[
|
|
494
|
+
const nonTemplatedEntries = new Set((await Promise.all((ctx.input.copyWithoutRender || []).map((pattern) => globby__default["default"](pattern, {
|
|
491
495
|
cwd: templateDir,
|
|
492
496
|
dot: true,
|
|
493
497
|
onlyFiles: false,
|
|
494
498
|
markDirectories: true
|
|
495
499
|
})))).flat());
|
|
496
|
-
const {cookiecutterCompat, values} = ctx.input;
|
|
500
|
+
const { cookiecutterCompat, values } = ctx.input;
|
|
497
501
|
const context = {
|
|
498
502
|
[cookiecutterCompat ? "cookiecutter" : "values"]: values
|
|
499
503
|
};
|
|
@@ -526,17 +530,17 @@ function createFetchTemplateAction(options) {
|
|
|
526
530
|
}
|
|
527
531
|
if (location.endsWith("/")) {
|
|
528
532
|
ctx.logger.info(`Writing directory ${location} to template output path.`);
|
|
529
|
-
await fs__default[
|
|
533
|
+
await fs__default["default"].ensureDir(outputPath);
|
|
530
534
|
} else {
|
|
531
535
|
const inputFilePath = backendCommon.resolveSafeChildPath(templateDir, location);
|
|
532
536
|
if (await isbinaryfile.isBinaryFile(inputFilePath)) {
|
|
533
537
|
ctx.logger.info(`Copying binary file ${location} to template output path.`);
|
|
534
|
-
await fs__default[
|
|
538
|
+
await fs__default["default"].copy(inputFilePath, outputPath);
|
|
535
539
|
} else {
|
|
536
|
-
const statsObj = await fs__default[
|
|
540
|
+
const statsObj = await fs__default["default"].stat(inputFilePath);
|
|
537
541
|
ctx.logger.info(`Writing file ${location} to template output path with mode ${statsObj.mode}.`);
|
|
538
|
-
const inputFileContents = await fs__default[
|
|
539
|
-
await fs__default[
|
|
542
|
+
const inputFileContents = await fs__default["default"].readFile(inputFilePath, "utf-8");
|
|
543
|
+
await fs__default["default"].outputFile(outputPath, renderContents ? renderTemplate(inputFileContents, context) : inputFileContents, { mode: statsObj.mode });
|
|
540
544
|
}
|
|
541
545
|
}
|
|
542
546
|
}
|
|
@@ -573,7 +577,7 @@ const createFilesystemDeleteAction = () => {
|
|
|
573
577
|
for (const file of ctx.input.files) {
|
|
574
578
|
const filepath = backendCommon.resolveSafeChildPath(ctx.workspacePath, file);
|
|
575
579
|
try {
|
|
576
|
-
await fs__default[
|
|
580
|
+
await fs__default["default"].remove(filepath);
|
|
577
581
|
ctx.logger.info(`File ${filepath} deleted successfully`);
|
|
578
582
|
} catch (err) {
|
|
579
583
|
ctx.logger.error(`Failed to delete file ${filepath}:`, err);
|
|
@@ -631,7 +635,7 @@ const createFilesystemRenameAction = () => {
|
|
|
631
635
|
const sourceFilepath = backendCommon.resolveSafeChildPath(ctx.workspacePath, file.from);
|
|
632
636
|
const destFilepath = backendCommon.resolveSafeChildPath(ctx.workspacePath, file.to);
|
|
633
637
|
try {
|
|
634
|
-
await fs__default[
|
|
638
|
+
await fs__default["default"].move(sourceFilepath, destFilepath, {
|
|
635
639
|
overwrite: (_b = file.overwrite) != null ? _b : false
|
|
636
640
|
});
|
|
637
641
|
ctx.logger.info(`File ${sourceFilepath} renamed to ${destFilepath} successfully`);
|
|
@@ -688,7 +692,7 @@ async function initRepoAndPush({
|
|
|
688
692
|
dir,
|
|
689
693
|
defaultBranch
|
|
690
694
|
});
|
|
691
|
-
await git.add({dir, filepath: "."});
|
|
695
|
+
await git.add({ dir, filepath: "." });
|
|
692
696
|
const authorInfo = {
|
|
693
697
|
name: (_a = gitAuthorInfo == null ? void 0 : gitAuthorInfo.name) != null ? _a : "Scaffolder",
|
|
694
698
|
email: (_b = gitAuthorInfo == null ? void 0 : gitAuthorInfo.email) != null ? _b : "scaffolder@backstage.io"
|
|
@@ -726,7 +730,7 @@ const enableBranchProtectionOnDefaultRepoBranch = async ({
|
|
|
726
730
|
owner,
|
|
727
731
|
repo: repoName,
|
|
728
732
|
branch: defaultBranch,
|
|
729
|
-
required_status_checks: {strict: true, contexts: []},
|
|
733
|
+
required_status_checks: { strict: true, contexts: [] },
|
|
730
734
|
restrictions: null,
|
|
731
735
|
enforce_admins: true,
|
|
732
736
|
required_pull_request_reviews: {
|
|
@@ -800,7 +804,7 @@ const parseRepoUrl = (repoUrl, integrations) => {
|
|
|
800
804
|
if (!repo) {
|
|
801
805
|
throw new errors.InputError(`Invalid repo URL passed to publisher: ${repoUrl}, missing repo`);
|
|
802
806
|
}
|
|
803
|
-
return {host, owner, repo, organization, workspace, project};
|
|
807
|
+
return { host, owner, repo, organization, workspace, project };
|
|
804
808
|
};
|
|
805
809
|
const isExecutable = (fileMode) => {
|
|
806
810
|
const executeBitMask = 73;
|
|
@@ -809,7 +813,7 @@ const isExecutable = (fileMode) => {
|
|
|
809
813
|
};
|
|
810
814
|
|
|
811
815
|
function createPublishAzureAction(options) {
|
|
812
|
-
const {integrations, config} = options;
|
|
816
|
+
const { integrations, config } = options;
|
|
813
817
|
return createTemplateAction({
|
|
814
818
|
id: "publish:azure",
|
|
815
819
|
description: "Initializes a git repository of the content in the workspace, and publishes it to Azure.",
|
|
@@ -852,8 +856,8 @@ function createPublishAzureAction(options) {
|
|
|
852
856
|
}
|
|
853
857
|
},
|
|
854
858
|
async handler(ctx) {
|
|
855
|
-
const {repoUrl, defaultBranch = "master"} = ctx.input;
|
|
856
|
-
const {owner, repo, host, organization} = parseRepoUrl(repoUrl, integrations);
|
|
859
|
+
const { repoUrl, defaultBranch = "master" } = ctx.input;
|
|
860
|
+
const { owner, repo, host, organization } = parseRepoUrl(repoUrl, integrations);
|
|
857
861
|
if (!organization) {
|
|
858
862
|
throw new errors.InputError(`Invalid URL provider was included in the repo URL to create ${ctx.input.repoUrl}, missing organization`);
|
|
859
863
|
}
|
|
@@ -867,7 +871,7 @@ function createPublishAzureAction(options) {
|
|
|
867
871
|
const authHandler = azureDevopsNodeApi.getPersonalAccessTokenHandler(integrationConfig.config.token);
|
|
868
872
|
const webApi = new azureDevopsNodeApi.WebApi(`https://${host}/${organization}`, authHandler);
|
|
869
873
|
const client = await webApi.getGitApi();
|
|
870
|
-
const createOptions = {name: repo};
|
|
874
|
+
const createOptions = { name: repo };
|
|
871
875
|
const returnedRepo = await client.createRepository(createOptions, owner);
|
|
872
876
|
if (!returnedRepo) {
|
|
873
877
|
throw new errors.InputError(`Unable to create the repository with Organization ${organization}, Project ${owner} and Repo ${repo}.
|
|
@@ -915,7 +919,7 @@ const createBitbucketCloudRepository = async (opts) => {
|
|
|
915
919
|
scm: "git",
|
|
916
920
|
description,
|
|
917
921
|
is_private: repoVisibility === "private",
|
|
918
|
-
project: {key: project}
|
|
922
|
+
project: { key: project }
|
|
919
923
|
}),
|
|
920
924
|
headers: {
|
|
921
925
|
Authorization: authorization,
|
|
@@ -924,7 +928,7 @@ const createBitbucketCloudRepository = async (opts) => {
|
|
|
924
928
|
};
|
|
925
929
|
let response;
|
|
926
930
|
try {
|
|
927
|
-
response = await fetch__default[
|
|
931
|
+
response = await fetch__default["default"](`https://api.bitbucket.org/2.0/repositories/${workspace}/${repo}`, options);
|
|
928
932
|
} catch (e) {
|
|
929
933
|
throw new Error(`Unable to create repository, ${e}`);
|
|
930
934
|
}
|
|
@@ -939,7 +943,7 @@ const createBitbucketCloudRepository = async (opts) => {
|
|
|
939
943
|
}
|
|
940
944
|
}
|
|
941
945
|
const repoContentsUrl = `${r.links.html.href}/src/master`;
|
|
942
|
-
return {remoteUrl, repoContentsUrl};
|
|
946
|
+
return { remoteUrl, repoContentsUrl };
|
|
943
947
|
};
|
|
944
948
|
const createBitbucketServerRepository = async (opts) => {
|
|
945
949
|
const {
|
|
@@ -966,7 +970,7 @@ const createBitbucketServerRepository = async (opts) => {
|
|
|
966
970
|
};
|
|
967
971
|
try {
|
|
968
972
|
const baseUrl = apiBaseUrl ? apiBaseUrl : `https://${host}/rest/api/1.0`;
|
|
969
|
-
response = await fetch__default[
|
|
973
|
+
response = await fetch__default["default"](`${baseUrl}/projects/${project}/repos`, options);
|
|
970
974
|
} catch (e) {
|
|
971
975
|
throw new Error(`Unable to create repository, ${e}`);
|
|
972
976
|
}
|
|
@@ -981,7 +985,7 @@ const createBitbucketServerRepository = async (opts) => {
|
|
|
981
985
|
}
|
|
982
986
|
}
|
|
983
987
|
const repoContentsUrl = `${r.links.self[0].href}`;
|
|
984
|
-
return {remoteUrl, repoContentsUrl};
|
|
988
|
+
return { remoteUrl, repoContentsUrl };
|
|
985
989
|
};
|
|
986
990
|
const getAuthorizationHeader = (config) => {
|
|
987
991
|
if (config.username && config.appPassword) {
|
|
@@ -994,19 +998,19 @@ const getAuthorizationHeader = (config) => {
|
|
|
994
998
|
throw new Error(`Authorization has not been provided for Bitbucket. Please add either username + appPassword or token to the Integrations config`);
|
|
995
999
|
};
|
|
996
1000
|
const performEnableLFS = async (opts) => {
|
|
997
|
-
const {authorization, host, project, repo} = opts;
|
|
1001
|
+
const { authorization, host, project, repo } = opts;
|
|
998
1002
|
const options = {
|
|
999
1003
|
method: "PUT",
|
|
1000
1004
|
headers: {
|
|
1001
1005
|
Authorization: authorization
|
|
1002
1006
|
}
|
|
1003
1007
|
};
|
|
1004
|
-
const {ok, status, statusText} = await fetch__default[
|
|
1008
|
+
const { ok, status, statusText } = await fetch__default["default"](`https://${host}/rest/git-lfs/admin/projects/${project}/repos/${repo}/enabled`, options);
|
|
1005
1009
|
if (!ok)
|
|
1006
1010
|
throw new Error(`Failed to enable LFS in the repository, ${status}: ${statusText}`);
|
|
1007
1011
|
};
|
|
1008
1012
|
function createPublishBitbucketAction(options) {
|
|
1009
|
-
const {integrations, config} = options;
|
|
1013
|
+
const { integrations, config } = options;
|
|
1010
1014
|
return createTemplateAction({
|
|
1011
1015
|
id: "publish:bitbucket",
|
|
1012
1016
|
description: "Initializes a git repository of the content in the workspace, and publishes it to Bitbucket.",
|
|
@@ -1066,7 +1070,7 @@ function createPublishBitbucketAction(options) {
|
|
|
1066
1070
|
repoVisibility = "private",
|
|
1067
1071
|
enableLFS = false
|
|
1068
1072
|
} = ctx.input;
|
|
1069
|
-
const {workspace, project, repo, host} = parseRepoUrl(repoUrl, integrations);
|
|
1073
|
+
const { workspace, project, repo, host } = parseRepoUrl(repoUrl, integrations);
|
|
1070
1074
|
if (host === "bitbucket.org") {
|
|
1071
1075
|
if (!workspace) {
|
|
1072
1076
|
throw new errors.InputError(`Invalid URL provider was included in the repo URL to create ${ctx.input.repoUrl}, missing workspace`);
|
|
@@ -1082,7 +1086,7 @@ function createPublishBitbucketAction(options) {
|
|
|
1082
1086
|
const authorization = getAuthorizationHeader(integrationConfig.config);
|
|
1083
1087
|
const apiBaseUrl = integrationConfig.config.apiBaseUrl;
|
|
1084
1088
|
const createMethod = host === "bitbucket.org" ? createBitbucketCloudRepository : createBitbucketServerRepository;
|
|
1085
|
-
const {remoteUrl, repoContentsUrl} = await createMethod({
|
|
1089
|
+
const { remoteUrl, repoContentsUrl } = await createMethod({
|
|
1086
1090
|
authorization,
|
|
1087
1091
|
host,
|
|
1088
1092
|
workspace: workspace || "",
|
|
@@ -1109,7 +1113,7 @@ function createPublishBitbucketAction(options) {
|
|
|
1109
1113
|
gitAuthorInfo
|
|
1110
1114
|
});
|
|
1111
1115
|
if (enableLFS && host !== "bitbucket.org") {
|
|
1112
|
-
await performEnableLFS({authorization, host, project, repo});
|
|
1116
|
+
await performEnableLFS({ authorization, host, project, repo });
|
|
1113
1117
|
}
|
|
1114
1118
|
ctx.output("remoteUrl", remoteUrl);
|
|
1115
1119
|
ctx.output("repoContentsUrl", repoContentsUrl);
|
|
@@ -1134,13 +1138,13 @@ function createPublishFileAction() {
|
|
|
1134
1138
|
}
|
|
1135
1139
|
},
|
|
1136
1140
|
async handler(ctx) {
|
|
1137
|
-
const {path: path$1} = ctx.input;
|
|
1138
|
-
const exists = await fs__default[
|
|
1141
|
+
const { path: path$1 } = ctx.input;
|
|
1142
|
+
const exists = await fs__default["default"].pathExists(path$1);
|
|
1139
1143
|
if (exists) {
|
|
1140
1144
|
throw new errors.InputError("Output path already exists");
|
|
1141
1145
|
}
|
|
1142
|
-
await fs__default[
|
|
1143
|
-
await fs__default[
|
|
1146
|
+
await fs__default["default"].ensureDir(path.dirname(path$1));
|
|
1147
|
+
await fs__default["default"].copy(ctx.workspacePath, path$1);
|
|
1144
1148
|
}
|
|
1145
1149
|
});
|
|
1146
1150
|
}
|
|
@@ -1149,13 +1153,13 @@ class OctokitProvider {
|
|
|
1149
1153
|
constructor(integrations) {
|
|
1150
1154
|
this.integrations = integrations;
|
|
1151
1155
|
this.credentialsProviders = new Map(integrations.github.list().map((integration$1) => {
|
|
1152
|
-
const provider = integration.
|
|
1156
|
+
const provider = integration.SingleInstanceGithubCredentialsProvider.create(integration$1.config);
|
|
1153
1157
|
return [integration$1.config.host, provider];
|
|
1154
1158
|
}));
|
|
1155
1159
|
}
|
|
1156
1160
|
async getOctokit(repoUrl) {
|
|
1157
1161
|
var _a;
|
|
1158
|
-
const {owner, repo, host} = parseRepoUrl(repoUrl, this.integrations);
|
|
1162
|
+
const { owner, repo, host } = parseRepoUrl(repoUrl, this.integrations);
|
|
1159
1163
|
if (!owner) {
|
|
1160
1164
|
throw new errors.InputError(`No owner provided for repo ${repoUrl}`);
|
|
1161
1165
|
}
|
|
@@ -1167,7 +1171,7 @@ class OctokitProvider {
|
|
|
1167
1171
|
if (!credentialsProvider) {
|
|
1168
1172
|
throw new errors.InputError(`No matching credentials for host ${host}, please check your integrations config`);
|
|
1169
1173
|
}
|
|
1170
|
-
const {token} = await credentialsProvider.getCredentials({
|
|
1174
|
+
const { token } = await credentialsProvider.getCredentials({
|
|
1171
1175
|
url: `https://${host}/${encodeURIComponent(owner)}/${encodeURIComponent(repo)}`
|
|
1172
1176
|
});
|
|
1173
1177
|
if (!token) {
|
|
@@ -1178,12 +1182,12 @@ class OctokitProvider {
|
|
|
1178
1182
|
baseUrl: integrationConfig.apiBaseUrl,
|
|
1179
1183
|
previews: ["nebula-preview"]
|
|
1180
1184
|
});
|
|
1181
|
-
return {client, token, owner, repo};
|
|
1185
|
+
return { client, token, owner, repo };
|
|
1182
1186
|
}
|
|
1183
1187
|
}
|
|
1184
1188
|
|
|
1185
1189
|
function createPublishGithubAction(options) {
|
|
1186
|
-
const {integrations, config} = options;
|
|
1190
|
+
const { integrations, config } = options;
|
|
1187
1191
|
const octokitProvider = new OctokitProvider(integrations);
|
|
1188
1192
|
return createTemplateAction({
|
|
1189
1193
|
id: "publish:github",
|
|
@@ -1279,7 +1283,7 @@ function createPublishGithubAction(options) {
|
|
|
1279
1283
|
collaborators,
|
|
1280
1284
|
topics
|
|
1281
1285
|
} = ctx.input;
|
|
1282
|
-
const {client, token, owner, repo} = await octokitProvider.getOctokit(repoUrl);
|
|
1286
|
+
const { client, token, owner, repo } = await octokitProvider.getOctokit(repoUrl);
|
|
1283
1287
|
const user = await client.users.getByUsername({
|
|
1284
1288
|
username: owner
|
|
1285
1289
|
});
|
|
@@ -1294,7 +1298,7 @@ function createPublishGithubAction(options) {
|
|
|
1294
1298
|
private: repoVisibility === "private",
|
|
1295
1299
|
description
|
|
1296
1300
|
});
|
|
1297
|
-
const {data: newRepo} = await repoCreationPromise;
|
|
1301
|
+
const { data: newRepo } = await repoCreationPromise;
|
|
1298
1302
|
if (access == null ? void 0 : access.startsWith(`${owner}/`)) {
|
|
1299
1303
|
const [, team] = access.split("/");
|
|
1300
1304
|
await client.teams.addOrUpdateRepoPermissionsInOrg({
|
|
@@ -1393,11 +1397,11 @@ const defaultClientFactory = async ({
|
|
|
1393
1397
|
if (!integrationConfig) {
|
|
1394
1398
|
throw new errors.InputError(`No integration for host ${host}`);
|
|
1395
1399
|
}
|
|
1396
|
-
const credentialsProvider = integration.
|
|
1400
|
+
const credentialsProvider = integration.SingleInstanceGithubCredentialsProvider.create(integrationConfig);
|
|
1397
1401
|
if (!credentialsProvider) {
|
|
1398
1402
|
throw new errors.InputError(`No matching credentials for host ${host}, please check your integrations config`);
|
|
1399
1403
|
}
|
|
1400
|
-
const {token} = await credentialsProvider.getCredentials({
|
|
1404
|
+
const { token } = await credentialsProvider.getCredentials({
|
|
1401
1405
|
url: `https://${host}/${encodeURIComponent(owner)}/${encodeURIComponent(repo)}`
|
|
1402
1406
|
});
|
|
1403
1407
|
if (!token) {
|
|
@@ -1473,21 +1477,21 @@ const createPublishGithubPullRequestAction = ({
|
|
|
1473
1477
|
targetPath,
|
|
1474
1478
|
sourcePath
|
|
1475
1479
|
} = ctx.input;
|
|
1476
|
-
const {owner, repo, host} = parseRepoUrl(repoUrl, integrations);
|
|
1480
|
+
const { owner, repo, host } = parseRepoUrl(repoUrl, integrations);
|
|
1477
1481
|
if (!owner) {
|
|
1478
1482
|
throw new errors.InputError(`No owner provided for host: ${host}, and repo ${repo}`);
|
|
1479
1483
|
}
|
|
1480
|
-
const client = await clientFactory({integrations, host, owner, repo});
|
|
1484
|
+
const client = await clientFactory({ integrations, host, owner, repo });
|
|
1481
1485
|
const fileRoot = sourcePath ? backendCommon.resolveSafeChildPath(ctx.workspacePath, sourcePath) : ctx.workspacePath;
|
|
1482
|
-
const localFilePaths = await globby__default[
|
|
1486
|
+
const localFilePaths = await globby__default["default"](["./**", "./**/.*", "!.git"], {
|
|
1483
1487
|
cwd: fileRoot,
|
|
1484
1488
|
gitignore: true,
|
|
1485
1489
|
dot: true
|
|
1486
1490
|
});
|
|
1487
1491
|
const fileContents = await Promise.all(localFilePaths.map((filePath) => {
|
|
1488
1492
|
const absPath = backendCommon.resolveSafeChildPath(fileRoot, filePath);
|
|
1489
|
-
const base64EncodedContent = fs__default[
|
|
1490
|
-
const fileStat = fs__default[
|
|
1493
|
+
const base64EncodedContent = fs__default["default"].readFileSync(absPath).toString("base64");
|
|
1494
|
+
const fileStat = fs__default["default"].statSync(absPath);
|
|
1491
1495
|
const githubTreeItemMode = isExecutable(fileStat.mode) ? "100755" : "100644";
|
|
1492
1496
|
const encoding = "base64";
|
|
1493
1497
|
return {
|
|
@@ -1526,7 +1530,7 @@ const createPublishGithubPullRequestAction = ({
|
|
|
1526
1530
|
};
|
|
1527
1531
|
|
|
1528
1532
|
function createPublishGitlabAction(options) {
|
|
1529
|
-
const {integrations, config} = options;
|
|
1533
|
+
const { integrations, config } = options;
|
|
1530
1534
|
return createTemplateAction({
|
|
1531
1535
|
id: "publish:gitlab",
|
|
1532
1536
|
description: "Initializes a git repository of the content in the workspace, and publishes it to GitLab.",
|
|
@@ -1575,7 +1579,7 @@ function createPublishGitlabAction(options) {
|
|
|
1575
1579
|
repoVisibility = "private",
|
|
1576
1580
|
defaultBranch = "master"
|
|
1577
1581
|
} = ctx.input;
|
|
1578
|
-
const {owner, repo, host} = parseRepoUrl(repoUrl, integrations);
|
|
1582
|
+
const { owner, repo, host } = parseRepoUrl(repoUrl, integrations);
|
|
1579
1583
|
if (!owner) {
|
|
1580
1584
|
throw new errors.InputError(`No owner provided for host: ${host}, and repo ${repo}`);
|
|
1581
1585
|
}
|
|
@@ -1590,12 +1594,12 @@ function createPublishGitlabAction(options) {
|
|
|
1590
1594
|
host: integrationConfig.config.baseUrl,
|
|
1591
1595
|
token: integrationConfig.config.token
|
|
1592
1596
|
});
|
|
1593
|
-
let {id: targetNamespace} = await client.Namespaces.show(owner);
|
|
1597
|
+
let { id: targetNamespace } = await client.Namespaces.show(owner);
|
|
1594
1598
|
if (!targetNamespace) {
|
|
1595
|
-
const {id} = await client.Users.current();
|
|
1599
|
+
const { id } = await client.Users.current();
|
|
1596
1600
|
targetNamespace = id;
|
|
1597
1601
|
}
|
|
1598
|
-
const {http_url_to_repo} = await client.Projects.create({
|
|
1602
|
+
const { http_url_to_repo } = await client.Projects.create({
|
|
1599
1603
|
namespace_id: targetNamespace,
|
|
1600
1604
|
name: repo,
|
|
1601
1605
|
visibility: repoVisibility
|
|
@@ -1624,8 +1628,122 @@ function createPublishGitlabAction(options) {
|
|
|
1624
1628
|
});
|
|
1625
1629
|
}
|
|
1626
1630
|
|
|
1631
|
+
const createPublishGitlabMergeRequestAction = (options) => {
|
|
1632
|
+
const { integrations } = options;
|
|
1633
|
+
return createTemplateAction({
|
|
1634
|
+
id: "publish:gitlab:merge-request",
|
|
1635
|
+
schema: {
|
|
1636
|
+
input: {
|
|
1637
|
+
required: ["projectid", "repoUrl", "targetPath", "branchName"],
|
|
1638
|
+
type: "object",
|
|
1639
|
+
properties: {
|
|
1640
|
+
repoUrl: {
|
|
1641
|
+
type: "string",
|
|
1642
|
+
title: "Repository Location",
|
|
1643
|
+
description: `Accepts the format 'gitlab.com/group_name/project_name' where 'project_name' is the repository name and 'group_name' is a group or username`
|
|
1644
|
+
},
|
|
1645
|
+
projectid: {
|
|
1646
|
+
type: "string",
|
|
1647
|
+
title: "projectid",
|
|
1648
|
+
description: "Project ID/Name(slug) of the Gitlab Project"
|
|
1649
|
+
},
|
|
1650
|
+
title: {
|
|
1651
|
+
type: "string",
|
|
1652
|
+
title: "Merge Request Name",
|
|
1653
|
+
description: "The name for the merge request"
|
|
1654
|
+
},
|
|
1655
|
+
description: {
|
|
1656
|
+
type: "string",
|
|
1657
|
+
title: "Merge Request Description",
|
|
1658
|
+
description: "The description of the merge request"
|
|
1659
|
+
},
|
|
1660
|
+
branchName: {
|
|
1661
|
+
type: "string",
|
|
1662
|
+
title: "Destination Branch name",
|
|
1663
|
+
description: "The description of the merge request"
|
|
1664
|
+
},
|
|
1665
|
+
targetPath: {
|
|
1666
|
+
type: "string",
|
|
1667
|
+
title: "Repository Subdirectory",
|
|
1668
|
+
description: "Subdirectory of repository to apply changes to"
|
|
1669
|
+
}
|
|
1670
|
+
}
|
|
1671
|
+
},
|
|
1672
|
+
output: {
|
|
1673
|
+
type: "object",
|
|
1674
|
+
properties: {
|
|
1675
|
+
projectid: {
|
|
1676
|
+
title: "Gitlab Project id/Name(slug)",
|
|
1677
|
+
type: "string"
|
|
1678
|
+
},
|
|
1679
|
+
mergeRequestURL: {
|
|
1680
|
+
title: "MergeRequest(MR) URL",
|
|
1681
|
+
type: "string",
|
|
1682
|
+
description: "Link to the merge request in GitLab"
|
|
1683
|
+
}
|
|
1684
|
+
}
|
|
1685
|
+
}
|
|
1686
|
+
},
|
|
1687
|
+
async handler(ctx) {
|
|
1688
|
+
const repoUrl = ctx.input.repoUrl;
|
|
1689
|
+
const { host } = parseRepoUrl(repoUrl, integrations);
|
|
1690
|
+
const integrationConfig = integrations.gitlab.byHost(host);
|
|
1691
|
+
const actions = [];
|
|
1692
|
+
const destinationBranch = ctx.input.branchName;
|
|
1693
|
+
if (!integrationConfig) {
|
|
1694
|
+
throw new errors.InputError(`No matching integration configuration for host ${host}, please check your integrations config`);
|
|
1695
|
+
}
|
|
1696
|
+
if (!integrationConfig.config.token) {
|
|
1697
|
+
throw new errors.InputError(`No token available for host ${host}`);
|
|
1698
|
+
}
|
|
1699
|
+
const api = new node.Gitlab({
|
|
1700
|
+
host: integrationConfig.config.baseUrl,
|
|
1701
|
+
token: integrationConfig.config.token
|
|
1702
|
+
});
|
|
1703
|
+
const fileRoot = ctx.workspacePath;
|
|
1704
|
+
const localFilePaths = await globby__default["default"]([`${ctx.input.targetPath}/**`], {
|
|
1705
|
+
cwd: fileRoot,
|
|
1706
|
+
gitignore: true,
|
|
1707
|
+
dot: true
|
|
1708
|
+
});
|
|
1709
|
+
const fileContents = await Promise.all(localFilePaths.map((p) => fs.readFile(backendCommon.resolveSafeChildPath(fileRoot, p))));
|
|
1710
|
+
const repoFilePaths = localFilePaths.map((repoFilePath) => {
|
|
1711
|
+
return repoFilePath;
|
|
1712
|
+
});
|
|
1713
|
+
for (let i = 0; i < repoFilePaths.length; i++) {
|
|
1714
|
+
actions.push({
|
|
1715
|
+
action: "create",
|
|
1716
|
+
filePath: repoFilePaths[i],
|
|
1717
|
+
content: fileContents[i].toString()
|
|
1718
|
+
});
|
|
1719
|
+
}
|
|
1720
|
+
const projects = await api.Projects.show(ctx.input.projectid);
|
|
1721
|
+
const { default_branch: defaultBranch } = projects;
|
|
1722
|
+
try {
|
|
1723
|
+
await api.Branches.create(ctx.input.projectid, destinationBranch, String(defaultBranch));
|
|
1724
|
+
} catch (e) {
|
|
1725
|
+
throw new errors.InputError(`The branch creation failed ${e}`);
|
|
1726
|
+
}
|
|
1727
|
+
try {
|
|
1728
|
+
await api.Commits.create(ctx.input.projectid, destinationBranch, ctx.input.title, actions);
|
|
1729
|
+
} catch (e) {
|
|
1730
|
+
throw new errors.InputError(`Committing the changes to ${destinationBranch} failed ${e}`);
|
|
1731
|
+
}
|
|
1732
|
+
try {
|
|
1733
|
+
const mergeRequestUrl = await api.MergeRequests.create(ctx.input.projectid, destinationBranch, String(defaultBranch), ctx.input.title, { description: ctx.input.description }).then((mergeRequest) => {
|
|
1734
|
+
return mergeRequest.web_url;
|
|
1735
|
+
});
|
|
1736
|
+
ctx.output("projectid", ctx.input.projectid);
|
|
1737
|
+
ctx.output("mergeRequestUrl", mergeRequestUrl);
|
|
1738
|
+
} catch (e) {
|
|
1739
|
+
throw new errors.InputError(`Merge request creation failed${e}`);
|
|
1740
|
+
}
|
|
1741
|
+
}
|
|
1742
|
+
});
|
|
1743
|
+
};
|
|
1744
|
+
|
|
1627
1745
|
function createGithubActionsDispatchAction(options) {
|
|
1628
|
-
const {integrations} = options;
|
|
1746
|
+
const { integrations } = options;
|
|
1629
1747
|
const octokitProvider = new OctokitProvider(integrations);
|
|
1630
1748
|
return createTemplateAction({
|
|
1631
1749
|
id: "github:actions:dispatch",
|
|
@@ -1654,9 +1772,9 @@ function createGithubActionsDispatchAction(options) {
|
|
|
1654
1772
|
}
|
|
1655
1773
|
},
|
|
1656
1774
|
async handler(ctx) {
|
|
1657
|
-
const {repoUrl, workflowId, branchOrTagName} = ctx.input;
|
|
1775
|
+
const { repoUrl, workflowId, branchOrTagName } = ctx.input;
|
|
1658
1776
|
ctx.logger.info(`Dispatching workflow ${workflowId} for repo ${repoUrl} on ${branchOrTagName}`);
|
|
1659
|
-
const {client, owner, repo} = await octokitProvider.getOctokit(repoUrl);
|
|
1777
|
+
const { client, owner, repo } = await octokitProvider.getOctokit(repoUrl);
|
|
1660
1778
|
await client.rest.actions.createWorkflowDispatch({
|
|
1661
1779
|
owner,
|
|
1662
1780
|
repo,
|
|
@@ -1669,7 +1787,7 @@ function createGithubActionsDispatchAction(options) {
|
|
|
1669
1787
|
}
|
|
1670
1788
|
|
|
1671
1789
|
function createGithubWebhookAction(options) {
|
|
1672
|
-
const {integrations, defaultWebhookSecret} = options;
|
|
1790
|
+
const { integrations, defaultWebhookSecret } = options;
|
|
1673
1791
|
const octokitProvider = new OctokitProvider(integrations);
|
|
1674
1792
|
const eventNames = webhooks.emitterEventNames.filter((event) => !event.includes("."));
|
|
1675
1793
|
return createTemplateAction({
|
|
@@ -1744,7 +1862,7 @@ function createGithubWebhookAction(options) {
|
|
|
1744
1862
|
insecureSsl = false
|
|
1745
1863
|
} = ctx.input;
|
|
1746
1864
|
ctx.logger.info(`Creating webhook ${webhookUrl} for repo ${repoUrl}`);
|
|
1747
|
-
const {client, owner, repo} = await octokitProvider.getOctokit(repoUrl);
|
|
1865
|
+
const { client, owner, repo } = await octokitProvider.getOctokit(repoUrl);
|
|
1748
1866
|
try {
|
|
1749
1867
|
const insecure_ssl = insecureSsl ? "1" : "0";
|
|
1750
1868
|
await client.repos.createWebhook({
|
|
@@ -1769,17 +1887,12 @@ function createGithubWebhookAction(options) {
|
|
|
1769
1887
|
}
|
|
1770
1888
|
|
|
1771
1889
|
const createBuiltinActions = (options) => {
|
|
1772
|
-
const {reader, integrations, containerRunner, catalogClient, config} = options;
|
|
1773
|
-
|
|
1890
|
+
const { reader, integrations, containerRunner, catalogClient, config } = options;
|
|
1891
|
+
const actions = [
|
|
1774
1892
|
createFetchPlainAction({
|
|
1775
1893
|
reader,
|
|
1776
1894
|
integrations
|
|
1777
1895
|
}),
|
|
1778
|
-
pluginScaffolderBackendModuleCookiecutter.createFetchCookiecutterAction({
|
|
1779
|
-
reader,
|
|
1780
|
-
integrations,
|
|
1781
|
-
containerRunner
|
|
1782
|
-
}),
|
|
1783
1896
|
createFetchTemplateAction({
|
|
1784
1897
|
integrations,
|
|
1785
1898
|
reader
|
|
@@ -1795,6 +1908,9 @@ const createBuiltinActions = (options) => {
|
|
|
1795
1908
|
integrations,
|
|
1796
1909
|
config
|
|
1797
1910
|
}),
|
|
1911
|
+
createPublishGitlabMergeRequestAction({
|
|
1912
|
+
integrations
|
|
1913
|
+
}),
|
|
1798
1914
|
createPublishBitbucketAction({
|
|
1799
1915
|
integrations,
|
|
1800
1916
|
config
|
|
@@ -1804,7 +1920,7 @@ const createBuiltinActions = (options) => {
|
|
|
1804
1920
|
config
|
|
1805
1921
|
}),
|
|
1806
1922
|
createDebugLogAction(),
|
|
1807
|
-
createCatalogRegisterAction({catalogClient, integrations}),
|
|
1923
|
+
createCatalogRegisterAction({ catalogClient, integrations }),
|
|
1808
1924
|
createCatalogWriteAction(),
|
|
1809
1925
|
createFilesystemDeleteAction(),
|
|
1810
1926
|
createFilesystemRenameAction(),
|
|
@@ -1815,11 +1931,19 @@ const createBuiltinActions = (options) => {
|
|
|
1815
1931
|
integrations
|
|
1816
1932
|
})
|
|
1817
1933
|
];
|
|
1934
|
+
if (containerRunner) {
|
|
1935
|
+
actions.push(pluginScaffolderBackendModuleCookiecutter.createFetchCookiecutterAction({
|
|
1936
|
+
reader,
|
|
1937
|
+
integrations,
|
|
1938
|
+
containerRunner
|
|
1939
|
+
}));
|
|
1940
|
+
}
|
|
1941
|
+
return actions;
|
|
1818
1942
|
};
|
|
1819
1943
|
|
|
1820
1944
|
class TemplateActionRegistry {
|
|
1821
1945
|
constructor() {
|
|
1822
|
-
this.actions = new Map();
|
|
1946
|
+
this.actions = /* @__PURE__ */ new Map();
|
|
1823
1947
|
}
|
|
1824
1948
|
register(action) {
|
|
1825
1949
|
if (this.actions.has(action.id)) {
|
|
@@ -1851,7 +1975,7 @@ class DatabaseTaskStore {
|
|
|
1851
1975
|
this.db = options.database;
|
|
1852
1976
|
}
|
|
1853
1977
|
async getTask(taskId) {
|
|
1854
|
-
const [result] = await this.db("tasks").where({id: taskId}).select();
|
|
1978
|
+
const [result] = await this.db("tasks").where({ id: taskId }).select();
|
|
1855
1979
|
if (!result) {
|
|
1856
1980
|
throw new errors.NotFoundError(`No task with id '${taskId}' found`);
|
|
1857
1981
|
}
|
|
@@ -1878,7 +2002,7 @@ class DatabaseTaskStore {
|
|
|
1878
2002
|
secrets: secrets ? JSON.stringify(secrets) : void 0,
|
|
1879
2003
|
status: "open"
|
|
1880
2004
|
});
|
|
1881
|
-
return {taskId};
|
|
2005
|
+
return { taskId };
|
|
1882
2006
|
}
|
|
1883
2007
|
async claimTask() {
|
|
1884
2008
|
return this.db.transaction(async (tx) => {
|
|
@@ -1888,7 +2012,7 @@ class DatabaseTaskStore {
|
|
|
1888
2012
|
if (!task) {
|
|
1889
2013
|
return void 0;
|
|
1890
2014
|
}
|
|
1891
|
-
const updateCount = await tx("tasks").where({id: task.id, status: "open"}).update({
|
|
2015
|
+
const updateCount = await tx("tasks").where({ id: task.id, status: "open" }).update({
|
|
1892
2016
|
status: "processing",
|
|
1893
2017
|
last_heartbeat_at: this.db.fn.now()
|
|
1894
2018
|
});
|
|
@@ -1912,14 +2036,14 @@ class DatabaseTaskStore {
|
|
|
1912
2036
|
});
|
|
1913
2037
|
}
|
|
1914
2038
|
async heartbeatTask(taskId) {
|
|
1915
|
-
const updateCount = await this.db("tasks").where({id: taskId, status: "processing"}).update({
|
|
2039
|
+
const updateCount = await this.db("tasks").where({ id: taskId, status: "processing" }).update({
|
|
1916
2040
|
last_heartbeat_at: this.db.fn.now()
|
|
1917
2041
|
});
|
|
1918
2042
|
if (updateCount === 0) {
|
|
1919
2043
|
throw new errors.ConflictError(`No running task with taskId ${taskId} found`);
|
|
1920
2044
|
}
|
|
1921
2045
|
}
|
|
1922
|
-
async listStaleTasks({timeoutS}) {
|
|
2046
|
+
async listStaleTasks({ timeoutS }) {
|
|
1923
2047
|
const rawRows = await this.db("tasks").where("status", "processing").andWhere("last_heartbeat_at", "<=", this.db.client.config.client === "sqlite3" ? this.db.raw(`datetime('now', ?)`, [`-${timeoutS} seconds`]) : this.db.raw(`dateadd('second', ?, ?)`, [
|
|
1924
2048
|
`-${timeoutS}`,
|
|
1925
2049
|
this.db.fn.now()
|
|
@@ -1927,7 +2051,7 @@ class DatabaseTaskStore {
|
|
|
1927
2051
|
const tasks = rawRows.map((row) => ({
|
|
1928
2052
|
taskId: row.id
|
|
1929
2053
|
}));
|
|
1930
|
-
return {tasks};
|
|
2054
|
+
return { tasks };
|
|
1931
2055
|
}
|
|
1932
2056
|
async completeTask({
|
|
1933
2057
|
taskId,
|
|
@@ -1967,7 +2091,7 @@ class DatabaseTaskStore {
|
|
|
1967
2091
|
});
|
|
1968
2092
|
});
|
|
1969
2093
|
}
|
|
1970
|
-
async emitLogEvent({taskId, body}) {
|
|
2094
|
+
async emitLogEvent({ taskId, body }) {
|
|
1971
2095
|
const serliazedBody = JSON.stringify(body);
|
|
1972
2096
|
await this.db("task_events").insert({
|
|
1973
2097
|
task_id: taskId,
|
|
@@ -1994,13 +2118,13 @@ class DatabaseTaskStore {
|
|
|
1994
2118
|
taskId,
|
|
1995
2119
|
body,
|
|
1996
2120
|
type: event.event_type,
|
|
1997
|
-
createdAt: typeof event.created_at === "string" ? luxon.DateTime.fromSQL(event.created_at, {zone: "UTC"}).toISO() : event.created_at
|
|
2121
|
+
createdAt: typeof event.created_at === "string" ? luxon.DateTime.fromSQL(event.created_at, { zone: "UTC" }).toISO() : event.created_at
|
|
1998
2122
|
};
|
|
1999
2123
|
} catch (error) {
|
|
2000
2124
|
throw new Error(`Failed to parse event body from event taskId=${taskId} id=${event.id}, ${error}`);
|
|
2001
2125
|
}
|
|
2002
2126
|
});
|
|
2003
|
-
return {events};
|
|
2127
|
+
return { events };
|
|
2004
2128
|
}
|
|
2005
2129
|
}
|
|
2006
2130
|
|
|
@@ -2031,7 +2155,7 @@ class TaskManager {
|
|
|
2031
2155
|
async emitLog(message, metadata) {
|
|
2032
2156
|
await this.storage.emitLogEvent({
|
|
2033
2157
|
taskId: this.state.taskId,
|
|
2034
|
-
body: {message, ...metadata}
|
|
2158
|
+
body: { message, ...metadata }
|
|
2035
2159
|
});
|
|
2036
2160
|
}
|
|
2037
2161
|
async complete(result, metadata) {
|
|
@@ -2066,7 +2190,7 @@ function defer() {
|
|
|
2066
2190
|
const promise = new Promise((_resolve) => {
|
|
2067
2191
|
resolve = _resolve;
|
|
2068
2192
|
});
|
|
2069
|
-
return {promise, resolve};
|
|
2193
|
+
return { promise, resolve };
|
|
2070
2194
|
}
|
|
2071
2195
|
class StorageTaskBroker {
|
|
2072
2196
|
constructor(storage, logger) {
|
|
@@ -2098,7 +2222,7 @@ class StorageTaskBroker {
|
|
|
2098
2222
|
return this.storage.getTask(taskId);
|
|
2099
2223
|
}
|
|
2100
2224
|
observe(options, callback) {
|
|
2101
|
-
const {taskId} = options;
|
|
2225
|
+
const { taskId } = options;
|
|
2102
2226
|
let cancelled = false;
|
|
2103
2227
|
const unsubscribe = () => {
|
|
2104
2228
|
cancelled = true;
|
|
@@ -2106,24 +2230,24 @@ class StorageTaskBroker {
|
|
|
2106
2230
|
(async () => {
|
|
2107
2231
|
let after = options.after;
|
|
2108
2232
|
while (!cancelled) {
|
|
2109
|
-
const result = await this.storage.listEvents({taskId, after});
|
|
2110
|
-
const {events} = result;
|
|
2233
|
+
const result = await this.storage.listEvents({ taskId, after });
|
|
2234
|
+
const { events } = result;
|
|
2111
2235
|
if (events.length) {
|
|
2112
2236
|
after = events[events.length - 1].id;
|
|
2113
2237
|
try {
|
|
2114
2238
|
callback(void 0, result);
|
|
2115
2239
|
} catch (error) {
|
|
2116
2240
|
errors.assertError(error);
|
|
2117
|
-
callback(error, {events: []});
|
|
2241
|
+
callback(error, { events: [] });
|
|
2118
2242
|
}
|
|
2119
2243
|
}
|
|
2120
2244
|
await new Promise((resolve) => setTimeout(resolve, 1e3));
|
|
2121
2245
|
}
|
|
2122
2246
|
})();
|
|
2123
|
-
return {unsubscribe};
|
|
2247
|
+
return { unsubscribe };
|
|
2124
2248
|
}
|
|
2125
2249
|
async vacuumTasks(timeoutS) {
|
|
2126
|
-
const {tasks} = await this.storage.listStaleTasks(timeoutS);
|
|
2250
|
+
const { tasks } = await this.storage.listStaleTasks(timeoutS);
|
|
2127
2251
|
await Promise.all(tasks.map(async (task) => {
|
|
2128
2252
|
try {
|
|
2129
2253
|
await this.storage.completeTask({
|
|
@@ -2160,7 +2284,7 @@ class HandlebarsWorkflowRunner {
|
|
|
2160
2284
|
return JSON.stringify(parseRepoUrl(repoUrl, this.options.integrations));
|
|
2161
2285
|
});
|
|
2162
2286
|
this.handlebars.registerHelper("projectSlug", (repoUrl) => {
|
|
2163
|
-
const {owner, repo} = parseRepoUrl(repoUrl, this.options.integrations);
|
|
2287
|
+
const { owner, repo } = parseRepoUrl(repoUrl, this.options.integrations);
|
|
2164
2288
|
return `${owner}/${repo}`;
|
|
2165
2289
|
});
|
|
2166
2290
|
this.handlebars.registerHelper("json", (obj) => JSON.stringify(obj));
|
|
@@ -2172,14 +2296,14 @@ class HandlebarsWorkflowRunner {
|
|
|
2172
2296
|
if (!isValidTaskSpec$1(task.spec)) {
|
|
2173
2297
|
throw new errors.InputError(`Task spec is not a valid v1beta2 task spec`);
|
|
2174
2298
|
}
|
|
2175
|
-
const {actionRegistry} = this.options;
|
|
2176
|
-
const workspacePath = path__default[
|
|
2299
|
+
const { actionRegistry } = this.options;
|
|
2300
|
+
const workspacePath = path__default["default"].join(this.options.workingDirectory, await task.getWorkspaceName());
|
|
2177
2301
|
try {
|
|
2178
|
-
await fs__default[
|
|
2302
|
+
await fs__default["default"].ensureDir(workspacePath);
|
|
2179
2303
|
await task.emitLog(`Starting up task with ${task.spec.steps.length} steps`);
|
|
2180
|
-
const templateCtx = {parameters: task.spec.values, steps: {}};
|
|
2304
|
+
const templateCtx = { parameters: task.spec.values, steps: {} };
|
|
2181
2305
|
for (const step of task.spec.steps) {
|
|
2182
|
-
const metadata = {stepId: step.id};
|
|
2306
|
+
const metadata = { stepId: step.id };
|
|
2183
2307
|
try {
|
|
2184
2308
|
const taskLogger = winston__namespace.createLogger({
|
|
2185
2309
|
level: process.env.LOG_LEVEL || "info",
|
|
@@ -2193,7 +2317,7 @@ class HandlebarsWorkflowRunner {
|
|
|
2193
2317
|
await task.emitLog(message, metadata);
|
|
2194
2318
|
}
|
|
2195
2319
|
});
|
|
2196
|
-
taskLogger.add(new winston__namespace.transports.Stream({stream: stream$1}));
|
|
2320
|
+
taskLogger.add(new winston__namespace.transports.Stream({ stream: stream$1 }));
|
|
2197
2321
|
if (step.if !== void 0) {
|
|
2198
2322
|
let skip = !step.if;
|
|
2199
2323
|
if (typeof step.if === "string") {
|
|
@@ -2274,7 +2398,7 @@ class HandlebarsWorkflowRunner {
|
|
|
2274
2398
|
token: (_b = task.secrets) == null ? void 0 : _b.token,
|
|
2275
2399
|
workspacePath,
|
|
2276
2400
|
async createTemporaryDirectory() {
|
|
2277
|
-
const tmpDir = await fs__default[
|
|
2401
|
+
const tmpDir = await fs__default["default"].mkdtemp(`${workspacePath}_step-${step.id}-`);
|
|
2278
2402
|
tmpDirs.push(tmpDir);
|
|
2279
2403
|
return tmpDir;
|
|
2280
2404
|
},
|
|
@@ -2284,9 +2408,9 @@ class HandlebarsWorkflowRunner {
|
|
|
2284
2408
|
metadata: task.spec.metadata
|
|
2285
2409
|
});
|
|
2286
2410
|
for (const tmpDir of tmpDirs) {
|
|
2287
|
-
await fs__default[
|
|
2411
|
+
await fs__default["default"].remove(tmpDir);
|
|
2288
2412
|
}
|
|
2289
|
-
templateCtx.steps[step.id] = {output: stepOutputs};
|
|
2413
|
+
templateCtx.steps[step.id] = { output: stepOutputs };
|
|
2290
2414
|
await task.emitLog(`Finished step ${step.name}`, {
|
|
2291
2415
|
...metadata,
|
|
2292
2416
|
status: "completed"
|
|
@@ -2320,10 +2444,10 @@ class HandlebarsWorkflowRunner {
|
|
|
2320
2444
|
}
|
|
2321
2445
|
return value;
|
|
2322
2446
|
});
|
|
2323
|
-
return {output};
|
|
2447
|
+
return { output };
|
|
2324
2448
|
} finally {
|
|
2325
2449
|
if (workspacePath) {
|
|
2326
|
-
await fs__default[
|
|
2450
|
+
await fs__default["default"].remove(workspacePath);
|
|
2327
2451
|
}
|
|
2328
2452
|
}
|
|
2329
2453
|
}
|
|
@@ -2336,7 +2460,7 @@ const createStepLogger = ({
|
|
|
2336
2460
|
task,
|
|
2337
2461
|
step
|
|
2338
2462
|
}) => {
|
|
2339
|
-
const metadata = {stepId: step.id};
|
|
2463
|
+
const metadata = { stepId: step.id };
|
|
2340
2464
|
const taskLogger = winston__namespace.createLogger({
|
|
2341
2465
|
level: process.env.LOG_LEVEL || "info",
|
|
2342
2466
|
format: winston__namespace.format.combine(winston__namespace.format.colorize(), winston__namespace.format.timestamp(), winston__namespace.format.simple()),
|
|
@@ -2349,8 +2473,8 @@ const createStepLogger = ({
|
|
|
2349
2473
|
await task.emitLog(message, metadata);
|
|
2350
2474
|
}
|
|
2351
2475
|
});
|
|
2352
|
-
taskLogger.add(new winston__namespace.transports.Stream({stream: streamLogger}));
|
|
2353
|
-
return {taskLogger, streamLogger};
|
|
2476
|
+
taskLogger.add(new winston__namespace.transports.Stream({ stream: streamLogger }));
|
|
2477
|
+
return { taskLogger, streamLogger };
|
|
2354
2478
|
};
|
|
2355
2479
|
class NunjucksWorkflowRunner {
|
|
2356
2480
|
constructor(options) {
|
|
@@ -2358,7 +2482,7 @@ class NunjucksWorkflowRunner {
|
|
|
2358
2482
|
}
|
|
2359
2483
|
isSingleTemplateString(input) {
|
|
2360
2484
|
var _a, _b;
|
|
2361
|
-
const {parser, nodes} = nunjucks__default[
|
|
2485
|
+
const { parser, nodes } = nunjucks__default["default"];
|
|
2362
2486
|
const parsed = parser.parse(input, {}, {
|
|
2363
2487
|
autoescape: false,
|
|
2364
2488
|
tags: {
|
|
@@ -2401,15 +2525,15 @@ class NunjucksWorkflowRunner {
|
|
|
2401
2525
|
if (!isValidTaskSpec(task.spec)) {
|
|
2402
2526
|
throw new errors.InputError("Wrong template version executed with the workflow engine");
|
|
2403
2527
|
}
|
|
2404
|
-
const workspacePath = path__default[
|
|
2405
|
-
const {integrations} = this.options;
|
|
2528
|
+
const workspacePath = path__default["default"].join(this.options.workingDirectory, await task.getWorkspaceName());
|
|
2529
|
+
const { integrations } = this.options;
|
|
2406
2530
|
const renderTemplate = await SecureTemplater.loadRenderer({
|
|
2407
2531
|
parseRepoUrl(url) {
|
|
2408
2532
|
return parseRepoUrl(url, integrations);
|
|
2409
2533
|
}
|
|
2410
2534
|
});
|
|
2411
2535
|
try {
|
|
2412
|
-
await fs__default[
|
|
2536
|
+
await fs__default["default"].ensureDir(workspacePath);
|
|
2413
2537
|
await task.emitLog(`Starting up task with ${task.spec.steps.length} steps`);
|
|
2414
2538
|
const context = {
|
|
2415
2539
|
parameters: task.spec.parameters,
|
|
@@ -2420,7 +2544,7 @@ class NunjucksWorkflowRunner {
|
|
|
2420
2544
|
if (step.if) {
|
|
2421
2545
|
const ifResult = await this.render(step.if, context, renderTemplate);
|
|
2422
2546
|
if (!isTruthy(ifResult)) {
|
|
2423
|
-
await task.emitLog(`Skipping step ${step.id} because it's if condition was false`, {stepId: step.id, status: "skipped"});
|
|
2547
|
+
await task.emitLog(`Skipping step ${step.id} because it's if condition was false`, { stepId: step.id, status: "skipped" });
|
|
2424
2548
|
continue;
|
|
2425
2549
|
}
|
|
2426
2550
|
}
|
|
@@ -2429,7 +2553,7 @@ class NunjucksWorkflowRunner {
|
|
|
2429
2553
|
status: "processing"
|
|
2430
2554
|
});
|
|
2431
2555
|
const action = this.options.actionRegistry.get(step.action);
|
|
2432
|
-
const {taskLogger, streamLogger} = createStepLogger({task, step});
|
|
2556
|
+
const { taskLogger, streamLogger } = createStepLogger({ task, step });
|
|
2433
2557
|
const input = (_a = step.input && this.render(step.input, context, renderTemplate)) != null ? _a : {};
|
|
2434
2558
|
if ((_b = action.schema) == null ? void 0 : _b.input) {
|
|
2435
2559
|
const validateResult = jsonschema.validate(input, action.schema.input);
|
|
@@ -2450,7 +2574,7 @@ class NunjucksWorkflowRunner {
|
|
|
2450
2574
|
logStream: streamLogger,
|
|
2451
2575
|
workspacePath,
|
|
2452
2576
|
createTemporaryDirectory: async () => {
|
|
2453
|
-
const tmpDir = await fs__default[
|
|
2577
|
+
const tmpDir = await fs__default["default"].mkdtemp(`${workspacePath}_step-${step.id}-`);
|
|
2454
2578
|
tmpDirs.push(tmpDir);
|
|
2455
2579
|
return tmpDir;
|
|
2456
2580
|
},
|
|
@@ -2460,9 +2584,9 @@ class NunjucksWorkflowRunner {
|
|
|
2460
2584
|
metadata: task.spec.metadata
|
|
2461
2585
|
});
|
|
2462
2586
|
for (const tmpDir of tmpDirs) {
|
|
2463
|
-
await fs__default[
|
|
2587
|
+
await fs__default["default"].remove(tmpDir);
|
|
2464
2588
|
}
|
|
2465
|
-
context.steps[step.id] = {output: stepOutput};
|
|
2589
|
+
context.steps[step.id] = { output: stepOutput };
|
|
2466
2590
|
await task.emitLog(`Finished step ${step.name}`, {
|
|
2467
2591
|
stepId: step.id,
|
|
2468
2592
|
status: "completed"
|
|
@@ -2476,10 +2600,10 @@ class NunjucksWorkflowRunner {
|
|
|
2476
2600
|
}
|
|
2477
2601
|
}
|
|
2478
2602
|
const output = this.render(task.spec.output, context, renderTemplate);
|
|
2479
|
-
return {output};
|
|
2603
|
+
return { output };
|
|
2480
2604
|
} finally {
|
|
2481
2605
|
if (workspacePath) {
|
|
2482
|
-
await fs__default[
|
|
2606
|
+
await fs__default["default"].remove(workspacePath);
|
|
2483
2607
|
}
|
|
2484
2608
|
}
|
|
2485
2609
|
}
|
|
@@ -2511,7 +2635,7 @@ class TaskWorker {
|
|
|
2511
2635
|
});
|
|
2512
2636
|
return new TaskWorker({
|
|
2513
2637
|
taskBroker,
|
|
2514
|
-
runners: {legacyWorkflowRunner, workflowRunner}
|
|
2638
|
+
runners: { legacyWorkflowRunner, workflowRunner }
|
|
2515
2639
|
});
|
|
2516
2640
|
}
|
|
2517
2641
|
start() {
|
|
@@ -2524,12 +2648,12 @@ class TaskWorker {
|
|
|
2524
2648
|
}
|
|
2525
2649
|
async runOneTask(task) {
|
|
2526
2650
|
try {
|
|
2527
|
-
const {output} = task.spec.apiVersion === "scaffolder.backstage.io/v1beta3" ? await this.options.runners.workflowRunner.execute(task) : await this.options.runners.legacyWorkflowRunner.execute(task);
|
|
2528
|
-
await task.complete("completed", {output});
|
|
2651
|
+
const { output } = task.spec.apiVersion === "scaffolder.backstage.io/v1beta3" ? await this.options.runners.workflowRunner.execute(task) : await this.options.runners.legacyWorkflowRunner.execute(task);
|
|
2652
|
+
await task.complete("completed", { output });
|
|
2529
2653
|
} catch (error) {
|
|
2530
2654
|
errors.assertError(error);
|
|
2531
2655
|
await task.complete("failed", {
|
|
2532
|
-
error: {name: error.name, message: error.message}
|
|
2656
|
+
error: { name: error.name, message: error.message }
|
|
2533
2657
|
});
|
|
2534
2658
|
}
|
|
2535
2659
|
}
|
|
@@ -2540,7 +2664,7 @@ class CatalogEntityClient {
|
|
|
2540
2664
|
this.catalogClient = catalogClient;
|
|
2541
2665
|
}
|
|
2542
2666
|
async findTemplate(templateName, options) {
|
|
2543
|
-
const {items: templates} = await this.catalogClient.getEntities({
|
|
2667
|
+
const { items: templates } = await this.catalogClient.getEntities({
|
|
2544
2668
|
filter: {
|
|
2545
2669
|
kind: "template",
|
|
2546
2670
|
"metadata.name": templateName
|
|
@@ -2559,11 +2683,11 @@ class CatalogEntityClient {
|
|
|
2559
2683
|
|
|
2560
2684
|
async function getWorkingDirectory(config, logger) {
|
|
2561
2685
|
if (!config.has("backend.workingDirectory")) {
|
|
2562
|
-
return os__default[
|
|
2686
|
+
return os__default["default"].tmpdir();
|
|
2563
2687
|
}
|
|
2564
2688
|
const workingDirectory = config.getString("backend.workingDirectory");
|
|
2565
2689
|
try {
|
|
2566
|
-
await fs__default[
|
|
2690
|
+
await fs__default["default"].access(workingDirectory, fs__default["default"].constants.F_OK | fs__default["default"].constants.W_OK);
|
|
2567
2691
|
logger.info(`using working directory: ${workingDirectory}`);
|
|
2568
2692
|
} catch (err) {
|
|
2569
2693
|
errors.assertError(err);
|
|
@@ -2581,7 +2705,7 @@ function getEntityBaseUrl(entity) {
|
|
|
2581
2705
|
if (!location) {
|
|
2582
2706
|
return void 0;
|
|
2583
2707
|
}
|
|
2584
|
-
const {type, target} = catalogModel.parseLocationReference(location);
|
|
2708
|
+
const { type, target } = catalogModel.parseLocationReference(location);
|
|
2585
2709
|
if (type === "url") {
|
|
2586
2710
|
return target;
|
|
2587
2711
|
} else if (type === "file") {
|
|
@@ -2594,8 +2718,8 @@ function isSupportedTemplate(entity) {
|
|
|
2594
2718
|
return entity.apiVersion === "backstage.io/v1beta2" || entity.apiVersion === "scaffolder.backstage.io/v1beta3";
|
|
2595
2719
|
}
|
|
2596
2720
|
async function createRouter(options) {
|
|
2597
|
-
const router = Router__default[
|
|
2598
|
-
router.use(express__default[
|
|
2721
|
+
const router = Router__default["default"]();
|
|
2722
|
+
router.use(express__default["default"].json());
|
|
2599
2723
|
const {
|
|
2600
2724
|
logger: parentLogger,
|
|
2601
2725
|
config,
|
|
@@ -2606,7 +2730,7 @@ async function createRouter(options) {
|
|
|
2606
2730
|
containerRunner,
|
|
2607
2731
|
taskWorkers
|
|
2608
2732
|
} = options;
|
|
2609
|
-
const logger = parentLogger.child({plugin: "scaffolder"});
|
|
2733
|
+
const logger = parentLogger.child({ plugin: "scaffolder" });
|
|
2610
2734
|
const workingDirectory = await getWorkingDirectory(config, logger);
|
|
2611
2735
|
const entityClient = new CatalogEntityClient(catalogClient);
|
|
2612
2736
|
const integrations = integration.ScmIntegrations.fromConfig(config);
|
|
@@ -2642,7 +2766,7 @@ async function createRouter(options) {
|
|
|
2642
2766
|
workers.forEach((worker) => worker.start());
|
|
2643
2767
|
router.get("/v2/templates/:namespace/:kind/:name/parameter-schema", async (req, res) => {
|
|
2644
2768
|
var _a, _b;
|
|
2645
|
-
const {namespace, kind, name} = req.params;
|
|
2769
|
+
const { namespace, kind, name } = req.params;
|
|
2646
2770
|
if (namespace !== "default") {
|
|
2647
2771
|
throw new errors.InputError(`Invalid namespace, only 'default' namespace is supported`);
|
|
2648
2772
|
}
|
|
@@ -2689,7 +2813,7 @@ async function createRouter(options) {
|
|
|
2689
2813
|
for (const parameters of [(_a = template.spec.parameters) != null ? _a : []].flat()) {
|
|
2690
2814
|
const result2 = jsonschema.validate(values, parameters);
|
|
2691
2815
|
if (!result2.valid) {
|
|
2692
|
-
res.status(400).json({errors: result2.errors});
|
|
2816
|
+
res.status(400).json({ errors: result2.errors });
|
|
2693
2817
|
return;
|
|
2694
2818
|
}
|
|
2695
2819
|
}
|
|
@@ -2707,7 +2831,7 @@ async function createRouter(options) {
|
|
|
2707
2831
|
};
|
|
2708
2832
|
}),
|
|
2709
2833
|
output: (_b = template.spec.output) != null ? _b : {},
|
|
2710
|
-
metadata: {name: (_c = template.metadata) == null ? void 0 : _c.name}
|
|
2834
|
+
metadata: { name: (_c = template.metadata) == null ? void 0 : _c.name }
|
|
2711
2835
|
} : {
|
|
2712
2836
|
apiVersion: template.apiVersion,
|
|
2713
2837
|
baseUrl,
|
|
@@ -2721,7 +2845,7 @@ async function createRouter(options) {
|
|
|
2721
2845
|
};
|
|
2722
2846
|
}),
|
|
2723
2847
|
output: (_d = template.spec.output) != null ? _d : {},
|
|
2724
|
-
metadata: {name: (_e = template.metadata) == null ? void 0 : _e.name}
|
|
2848
|
+
metadata: { name: (_e = template.metadata) == null ? void 0 : _e.name }
|
|
2725
2849
|
};
|
|
2726
2850
|
} else {
|
|
2727
2851
|
throw new errors.InputError(`Unsupported apiVersion field in schema entity, ${template.apiVersion}`);
|
|
@@ -2729,9 +2853,9 @@ async function createRouter(options) {
|
|
|
2729
2853
|
const result = await taskBroker.dispatch(taskSpec, {
|
|
2730
2854
|
token
|
|
2731
2855
|
});
|
|
2732
|
-
res.status(201).json({id: result.taskId});
|
|
2856
|
+
res.status(201).json({ id: result.taskId });
|
|
2733
2857
|
}).get("/v2/tasks/:taskId", async (req, res) => {
|
|
2734
|
-
const {taskId} = req.params;
|
|
2858
|
+
const { taskId } = req.params;
|
|
2735
2859
|
const task = await taskBroker.get(taskId);
|
|
2736
2860
|
if (!task) {
|
|
2737
2861
|
throw new errors.NotFoundError(`Task with id ${taskId} does not exist`);
|
|
@@ -2739,7 +2863,7 @@ async function createRouter(options) {
|
|
|
2739
2863
|
delete task.secrets;
|
|
2740
2864
|
res.status(200).json(task);
|
|
2741
2865
|
}).get("/v2/tasks/:taskId/eventstream", async (req, res) => {
|
|
2742
|
-
const {taskId} = req.params;
|
|
2866
|
+
const { taskId } = req.params;
|
|
2743
2867
|
const after = req.query.after !== void 0 ? Number(req.query.after) : void 0;
|
|
2744
2868
|
logger.debug(`Event stream observing taskId '${taskId}' opened`);
|
|
2745
2869
|
res.writeHead(200, {
|
|
@@ -2747,7 +2871,7 @@ async function createRouter(options) {
|
|
|
2747
2871
|
"Cache-Control": "no-cache",
|
|
2748
2872
|
"Content-Type": "text/event-stream"
|
|
2749
2873
|
});
|
|
2750
|
-
const {unsubscribe} = taskBroker.observe({taskId, after}, (error, {events}) => {
|
|
2874
|
+
const { unsubscribe } = taskBroker.observe({ taskId, after }, (error, { events }) => {
|
|
2751
2875
|
var _a;
|
|
2752
2876
|
if (error) {
|
|
2753
2877
|
logger.error(`Received error from event stream when observing taskId '${taskId}', ${error}`);
|
|
@@ -2771,7 +2895,7 @@ data: ${JSON.stringify(event)}
|
|
|
2771
2895
|
logger.debug(`Event stream observing taskId '${taskId}' closed`);
|
|
2772
2896
|
});
|
|
2773
2897
|
}).get("/v2/tasks/:taskId/events", async (req, res) => {
|
|
2774
|
-
const {taskId} = req.params;
|
|
2898
|
+
const { taskId } = req.params;
|
|
2775
2899
|
const after = Number(req.query.after) || void 0;
|
|
2776
2900
|
let unsubscribe = () => {
|
|
2777
2901
|
};
|
|
@@ -2779,7 +2903,7 @@ data: ${JSON.stringify(event)}
|
|
|
2779
2903
|
unsubscribe();
|
|
2780
2904
|
res.json([]);
|
|
2781
2905
|
}, 3e4);
|
|
2782
|
-
({unsubscribe} = taskBroker.observe({taskId, after}, (error, {events}) => {
|
|
2906
|
+
({ unsubscribe } = taskBroker.observe({ taskId, after }, (error, { events }) => {
|
|
2783
2907
|
clearTimeout(timeout);
|
|
2784
2908
|
unsubscribe();
|
|
2785
2909
|
if (error) {
|
|
@@ -2792,7 +2916,7 @@ data: ${JSON.stringify(event)}
|
|
|
2792
2916
|
clearTimeout(timeout);
|
|
2793
2917
|
});
|
|
2794
2918
|
});
|
|
2795
|
-
const app = express__default[
|
|
2919
|
+
const app = express__default["default"]();
|
|
2796
2920
|
app.set("logger", logger);
|
|
2797
2921
|
app.use("/", router);
|
|
2798
2922
|
return app;
|
|
@@ -2852,9 +2976,7 @@ class ScaffolderEntitiesProcessor {
|
|
|
2852
2976
|
|
|
2853
2977
|
Object.defineProperty(exports, 'createFetchCookiecutterAction', {
|
|
2854
2978
|
enumerable: true,
|
|
2855
|
-
get: function () {
|
|
2856
|
-
return pluginScaffolderBackendModuleCookiecutter.createFetchCookiecutterAction;
|
|
2857
|
-
}
|
|
2979
|
+
get: function () { return pluginScaffolderBackendModuleCookiecutter.createFetchCookiecutterAction; }
|
|
2858
2980
|
});
|
|
2859
2981
|
exports.CatalogEntityClient = CatalogEntityClient;
|
|
2860
2982
|
exports.DatabaseTaskStore = DatabaseTaskStore;
|
|
@@ -2879,6 +3001,7 @@ exports.createPublishFileAction = createPublishFileAction;
|
|
|
2879
3001
|
exports.createPublishGithubAction = createPublishGithubAction;
|
|
2880
3002
|
exports.createPublishGithubPullRequestAction = createPublishGithubPullRequestAction;
|
|
2881
3003
|
exports.createPublishGitlabAction = createPublishGitlabAction;
|
|
3004
|
+
exports.createPublishGitlabMergeRequestAction = createPublishGitlabMergeRequestAction;
|
|
2882
3005
|
exports.createRouter = createRouter;
|
|
2883
3006
|
exports.createTemplateAction = createTemplateAction;
|
|
2884
3007
|
exports.fetchContents = fetchContents;
|