@backstage/plugin-scaffolder-backend 0.0.0-nightly-2021103021730 → 0.0.0-nightly-202111422550
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +16 -5
- package/config.d.ts +12 -12
- package/dist/index.cjs.js +145 -148
- package/dist/index.cjs.js.map +1 -1
- package/dist/index.d.ts +10 -1
- package/package.json +7 -7
package/dist/index.cjs.js
CHANGED
|
@@ -45,14 +45,12 @@ function _interopNamespace(e) {
|
|
|
45
45
|
var d = Object.getOwnPropertyDescriptor(e, k);
|
|
46
46
|
Object.defineProperty(n, k, d.get ? d : {
|
|
47
47
|
enumerable: true,
|
|
48
|
-
get: function () {
|
|
49
|
-
return e[k];
|
|
50
|
-
}
|
|
48
|
+
get: function () { return e[k]; }
|
|
51
49
|
});
|
|
52
50
|
}
|
|
53
51
|
});
|
|
54
52
|
}
|
|
55
|
-
n[
|
|
53
|
+
n["default"] = e;
|
|
56
54
|
return Object.freeze(n);
|
|
57
55
|
}
|
|
58
56
|
|
|
@@ -73,7 +71,7 @@ const createTemplateAction = (templateAction) => {
|
|
|
73
71
|
};
|
|
74
72
|
|
|
75
73
|
function createCatalogRegisterAction(options) {
|
|
76
|
-
const {catalogClient, integrations} = options;
|
|
74
|
+
const { catalogClient, integrations } = options;
|
|
77
75
|
return createTemplateAction({
|
|
78
76
|
id: "catalog:register",
|
|
79
77
|
description: "Registers entities from a catalog descriptor file in the workspace into the software catalog.",
|
|
@@ -122,12 +120,12 @@ function createCatalogRegisterAction(options) {
|
|
|
122
120
|
},
|
|
123
121
|
async handler(ctx) {
|
|
124
122
|
var _a;
|
|
125
|
-
const {input} = ctx;
|
|
123
|
+
const { input } = ctx;
|
|
126
124
|
let catalogInfoUrl;
|
|
127
125
|
if ("catalogInfoUrl" in input) {
|
|
128
126
|
catalogInfoUrl = input.catalogInfoUrl;
|
|
129
127
|
} else {
|
|
130
|
-
const {repoContentsUrl, catalogInfoPath = "/catalog-info.yaml"} = input;
|
|
128
|
+
const { repoContentsUrl, catalogInfoPath = "/catalog-info.yaml" } = input;
|
|
131
129
|
const integration = integrations.byUrl(repoContentsUrl);
|
|
132
130
|
if (!integration) {
|
|
133
131
|
throw new errors.InputError(`No integration found for host ${repoContentsUrl}`);
|
|
@@ -141,15 +139,15 @@ function createCatalogRegisterAction(options) {
|
|
|
141
139
|
await catalogClient.addLocation({
|
|
142
140
|
type: "url",
|
|
143
141
|
target: catalogInfoUrl
|
|
144
|
-
}, ctx.token ? {token: ctx.token} : {});
|
|
142
|
+
}, ctx.token ? { token: ctx.token } : {});
|
|
145
143
|
try {
|
|
146
144
|
const result = await catalogClient.addLocation({
|
|
147
145
|
dryRun: true,
|
|
148
146
|
type: "url",
|
|
149
147
|
target: catalogInfoUrl
|
|
150
|
-
}, ctx.token ? {token: ctx.token} : {});
|
|
148
|
+
}, ctx.token ? { token: ctx.token } : {});
|
|
151
149
|
if (result.entities.length > 0) {
|
|
152
|
-
const {entities} = result;
|
|
150
|
+
const { entities } = result;
|
|
153
151
|
const entity = (_a = entities.find((e) => !e.metadata.name.startsWith("generated-"))) != null ? _a : entities[0];
|
|
154
152
|
ctx.output("entityRef", catalogModel.stringifyEntityRef(entity));
|
|
155
153
|
}
|
|
@@ -181,8 +179,8 @@ function createCatalogWriteAction() {
|
|
|
181
179
|
},
|
|
182
180
|
async handler(ctx) {
|
|
183
181
|
ctx.logStream.write(`Writing catalog-info.yaml`);
|
|
184
|
-
const {entity} = ctx.input;
|
|
185
|
-
await fs__default[
|
|
182
|
+
const { entity } = ctx.input;
|
|
183
|
+
await fs__default["default"].writeFile(backendCommon.resolveSafeChildPath(ctx.workspacePath, "catalog-info.yaml"), yaml__namespace.stringify(entity));
|
|
186
184
|
}
|
|
187
185
|
});
|
|
188
186
|
}
|
|
@@ -250,8 +248,8 @@ async function fetchContents({
|
|
|
250
248
|
}
|
|
251
249
|
if (!fetchUrlIsAbsolute && (baseUrl == null ? void 0 : baseUrl.startsWith("file://"))) {
|
|
252
250
|
const basePath = baseUrl.slice("file://".length);
|
|
253
|
-
const srcDir = backendCommon.resolveSafeChildPath(path__default[
|
|
254
|
-
await fs__default[
|
|
251
|
+
const srcDir = backendCommon.resolveSafeChildPath(path__default["default"].dirname(basePath), fetchUrl);
|
|
252
|
+
await fs__default["default"].copy(srcDir, outputPath);
|
|
255
253
|
} else {
|
|
256
254
|
let readUrl;
|
|
257
255
|
if (fetchUrlIsAbsolute) {
|
|
@@ -269,13 +267,13 @@ async function fetchContents({
|
|
|
269
267
|
throw new errors.InputError(`Failed to fetch, template location could not be determined and the fetch URL is relative, ${fetchUrl}`);
|
|
270
268
|
}
|
|
271
269
|
const res = await reader.readTree(readUrl);
|
|
272
|
-
await fs__default[
|
|
273
|
-
await res.dir({targetDir: outputPath});
|
|
270
|
+
await fs__default["default"].ensureDir(outputPath);
|
|
271
|
+
await res.dir({ targetDir: outputPath });
|
|
274
272
|
}
|
|
275
273
|
}
|
|
276
274
|
|
|
277
275
|
function createFetchPlainAction(options) {
|
|
278
|
-
const {reader, integrations} = options;
|
|
276
|
+
const { reader, integrations } = options;
|
|
279
277
|
return createTemplateAction({
|
|
280
278
|
id: "fetch:plain",
|
|
281
279
|
description: "Downloads content and places it in the workspace, or optionally in a subdirectory specified by the 'targetPath' input option.",
|
|
@@ -382,15 +380,15 @@ const { render, renderCompat } = (() => {
|
|
|
382
380
|
`;
|
|
383
381
|
class SecureTemplater {
|
|
384
382
|
static async loadRenderer(options = {}) {
|
|
385
|
-
const {parseRepoUrl, cookiecutterCompat} = options;
|
|
383
|
+
const { parseRepoUrl, cookiecutterCompat } = options;
|
|
386
384
|
let sandbox = void 0;
|
|
387
385
|
if (parseRepoUrl) {
|
|
388
386
|
sandbox = {
|
|
389
387
|
parseRepoUrl: (url) => JSON.stringify(parseRepoUrl(url))
|
|
390
388
|
};
|
|
391
389
|
}
|
|
392
|
-
const vm = new vm2.VM({sandbox});
|
|
393
|
-
const nunjucksSource = await fs__default[
|
|
390
|
+
const vm = new vm2.VM({ sandbox });
|
|
391
|
+
const nunjucksSource = await fs__default["default"].readFile(backendCommon.resolvePackagePath("@backstage/plugin-scaffolder-backend", "assets/nunjucks.js.txt"), "utf-8");
|
|
394
392
|
vm.run(mkScript(nunjucksSource));
|
|
395
393
|
const render = (template, values) => {
|
|
396
394
|
if (!vm) {
|
|
@@ -408,7 +406,7 @@ class SecureTemplater {
|
|
|
408
406
|
}
|
|
409
407
|
|
|
410
408
|
function createFetchTemplateAction(options) {
|
|
411
|
-
const {reader, integrations} = options;
|
|
409
|
+
const { reader, integrations } = options;
|
|
412
410
|
return createTemplateAction({
|
|
413
411
|
id: "fetch:template",
|
|
414
412
|
description: "Downloads a skeleton, templates variables into file and directory names and content, and places the result in the workspace, or optionally in a subdirectory specified by the 'targetPath' input option.",
|
|
@@ -481,19 +479,19 @@ function createFetchTemplateAction(options) {
|
|
|
481
479
|
outputPath: templateDir
|
|
482
480
|
});
|
|
483
481
|
ctx.logger.info("Listing files and directories in template");
|
|
484
|
-
const allEntriesInTemplate = await globby__default[
|
|
482
|
+
const allEntriesInTemplate = await globby__default["default"](`**/*`, {
|
|
485
483
|
cwd: templateDir,
|
|
486
484
|
dot: true,
|
|
487
485
|
onlyFiles: false,
|
|
488
486
|
markDirectories: true
|
|
489
487
|
});
|
|
490
|
-
const nonTemplatedEntries = new Set((await Promise.all((ctx.input.copyWithoutRender || []).map((pattern) => globby__default[
|
|
488
|
+
const nonTemplatedEntries = new Set((await Promise.all((ctx.input.copyWithoutRender || []).map((pattern) => globby__default["default"](pattern, {
|
|
491
489
|
cwd: templateDir,
|
|
492
490
|
dot: true,
|
|
493
491
|
onlyFiles: false,
|
|
494
492
|
markDirectories: true
|
|
495
493
|
})))).flat());
|
|
496
|
-
const {cookiecutterCompat, values} = ctx.input;
|
|
494
|
+
const { cookiecutterCompat, values } = ctx.input;
|
|
497
495
|
const context = {
|
|
498
496
|
[cookiecutterCompat ? "cookiecutter" : "values"]: values
|
|
499
497
|
};
|
|
@@ -526,17 +524,17 @@ function createFetchTemplateAction(options) {
|
|
|
526
524
|
}
|
|
527
525
|
if (location.endsWith("/")) {
|
|
528
526
|
ctx.logger.info(`Writing directory ${location} to template output path.`);
|
|
529
|
-
await fs__default[
|
|
527
|
+
await fs__default["default"].ensureDir(outputPath);
|
|
530
528
|
} else {
|
|
531
529
|
const inputFilePath = backendCommon.resolveSafeChildPath(templateDir, location);
|
|
532
530
|
if (await isbinaryfile.isBinaryFile(inputFilePath)) {
|
|
533
531
|
ctx.logger.info(`Copying binary file ${location} to template output path.`);
|
|
534
|
-
await fs__default[
|
|
532
|
+
await fs__default["default"].copy(inputFilePath, outputPath);
|
|
535
533
|
} else {
|
|
536
|
-
const statsObj = await fs__default[
|
|
534
|
+
const statsObj = await fs__default["default"].stat(inputFilePath);
|
|
537
535
|
ctx.logger.info(`Writing file ${location} to template output path with mode ${statsObj.mode}.`);
|
|
538
|
-
const inputFileContents = await fs__default[
|
|
539
|
-
await fs__default[
|
|
536
|
+
const inputFileContents = await fs__default["default"].readFile(inputFilePath, "utf-8");
|
|
537
|
+
await fs__default["default"].outputFile(outputPath, renderContents ? renderTemplate(inputFileContents, context) : inputFileContents, { mode: statsObj.mode });
|
|
540
538
|
}
|
|
541
539
|
}
|
|
542
540
|
}
|
|
@@ -573,7 +571,7 @@ const createFilesystemDeleteAction = () => {
|
|
|
573
571
|
for (const file of ctx.input.files) {
|
|
574
572
|
const filepath = backendCommon.resolveSafeChildPath(ctx.workspacePath, file);
|
|
575
573
|
try {
|
|
576
|
-
await fs__default[
|
|
574
|
+
await fs__default["default"].remove(filepath);
|
|
577
575
|
ctx.logger.info(`File ${filepath} deleted successfully`);
|
|
578
576
|
} catch (err) {
|
|
579
577
|
ctx.logger.error(`Failed to delete file ${filepath}:`, err);
|
|
@@ -631,7 +629,7 @@ const createFilesystemRenameAction = () => {
|
|
|
631
629
|
const sourceFilepath = backendCommon.resolveSafeChildPath(ctx.workspacePath, file.from);
|
|
632
630
|
const destFilepath = backendCommon.resolveSafeChildPath(ctx.workspacePath, file.to);
|
|
633
631
|
try {
|
|
634
|
-
await fs__default[
|
|
632
|
+
await fs__default["default"].move(sourceFilepath, destFilepath, {
|
|
635
633
|
overwrite: (_b = file.overwrite) != null ? _b : false
|
|
636
634
|
});
|
|
637
635
|
ctx.logger.info(`File ${sourceFilepath} renamed to ${destFilepath} successfully`);
|
|
@@ -647,10 +645,11 @@ const createFilesystemRenameAction = () => {
|
|
|
647
645
|
const runCommand = async ({
|
|
648
646
|
command,
|
|
649
647
|
args,
|
|
650
|
-
logStream = new stream.PassThrough()
|
|
648
|
+
logStream = new stream.PassThrough(),
|
|
649
|
+
options
|
|
651
650
|
}) => {
|
|
652
651
|
await new Promise((resolve, reject) => {
|
|
653
|
-
const process = child_process.spawn(command, args);
|
|
652
|
+
const process = child_process.spawn(command, args, options);
|
|
654
653
|
process.stdout.on("data", (stream) => {
|
|
655
654
|
logStream.write(stream);
|
|
656
655
|
});
|
|
@@ -687,7 +686,7 @@ async function initRepoAndPush({
|
|
|
687
686
|
dir,
|
|
688
687
|
defaultBranch
|
|
689
688
|
});
|
|
690
|
-
await git.add({dir, filepath: "."});
|
|
689
|
+
await git.add({ dir, filepath: "." });
|
|
691
690
|
const authorInfo = {
|
|
692
691
|
name: (_a = gitAuthorInfo == null ? void 0 : gitAuthorInfo.name) != null ? _a : "Scaffolder",
|
|
693
692
|
email: (_b = gitAuthorInfo == null ? void 0 : gitAuthorInfo.email) != null ? _b : "scaffolder@backstage.io"
|
|
@@ -725,7 +724,7 @@ const enableBranchProtectionOnDefaultRepoBranch = async ({
|
|
|
725
724
|
owner,
|
|
726
725
|
repo: repoName,
|
|
727
726
|
branch: defaultBranch,
|
|
728
|
-
required_status_checks: {strict: true, contexts: []},
|
|
727
|
+
required_status_checks: { strict: true, contexts: [] },
|
|
729
728
|
restrictions: null,
|
|
730
729
|
enforce_admins: true,
|
|
731
730
|
required_pull_request_reviews: {
|
|
@@ -799,7 +798,7 @@ const parseRepoUrl = (repoUrl, integrations) => {
|
|
|
799
798
|
if (!repo) {
|
|
800
799
|
throw new errors.InputError(`Invalid repo URL passed to publisher: ${repoUrl}, missing repo`);
|
|
801
800
|
}
|
|
802
|
-
return {host, owner, repo, organization, workspace, project};
|
|
801
|
+
return { host, owner, repo, organization, workspace, project };
|
|
803
802
|
};
|
|
804
803
|
const isExecutable = (fileMode) => {
|
|
805
804
|
const executeBitMask = 73;
|
|
@@ -808,7 +807,7 @@ const isExecutable = (fileMode) => {
|
|
|
808
807
|
};
|
|
809
808
|
|
|
810
809
|
function createPublishAzureAction(options) {
|
|
811
|
-
const {integrations, config} = options;
|
|
810
|
+
const { integrations, config } = options;
|
|
812
811
|
return createTemplateAction({
|
|
813
812
|
id: "publish:azure",
|
|
814
813
|
description: "Initializes a git repository of the content in the workspace, and publishes it to Azure.",
|
|
@@ -851,8 +850,8 @@ function createPublishAzureAction(options) {
|
|
|
851
850
|
}
|
|
852
851
|
},
|
|
853
852
|
async handler(ctx) {
|
|
854
|
-
const {repoUrl, defaultBranch = "master"} = ctx.input;
|
|
855
|
-
const {owner, repo, host, organization} = parseRepoUrl(repoUrl, integrations);
|
|
853
|
+
const { repoUrl, defaultBranch = "master" } = ctx.input;
|
|
854
|
+
const { owner, repo, host, organization } = parseRepoUrl(repoUrl, integrations);
|
|
856
855
|
if (!organization) {
|
|
857
856
|
throw new errors.InputError(`Invalid URL provider was included in the repo URL to create ${ctx.input.repoUrl}, missing organization`);
|
|
858
857
|
}
|
|
@@ -866,7 +865,7 @@ function createPublishAzureAction(options) {
|
|
|
866
865
|
const authHandler = azureDevopsNodeApi.getPersonalAccessTokenHandler(integrationConfig.config.token);
|
|
867
866
|
const webApi = new azureDevopsNodeApi.WebApi(`https://${host}/${organization}`, authHandler);
|
|
868
867
|
const client = await webApi.getGitApi();
|
|
869
|
-
const createOptions = {name: repo};
|
|
868
|
+
const createOptions = { name: repo };
|
|
870
869
|
const returnedRepo = await client.createRepository(createOptions, owner);
|
|
871
870
|
if (!returnedRepo) {
|
|
872
871
|
throw new errors.InputError(`Unable to create the repository with Organization ${organization}, Project ${owner} and Repo ${repo}.
|
|
@@ -914,7 +913,7 @@ const createBitbucketCloudRepository = async (opts) => {
|
|
|
914
913
|
scm: "git",
|
|
915
914
|
description,
|
|
916
915
|
is_private: repoVisibility === "private",
|
|
917
|
-
project: {key: project}
|
|
916
|
+
project: { key: project }
|
|
918
917
|
}),
|
|
919
918
|
headers: {
|
|
920
919
|
Authorization: authorization,
|
|
@@ -923,7 +922,7 @@ const createBitbucketCloudRepository = async (opts) => {
|
|
|
923
922
|
};
|
|
924
923
|
let response;
|
|
925
924
|
try {
|
|
926
|
-
response = await fetch__default[
|
|
925
|
+
response = await fetch__default["default"](`https://api.bitbucket.org/2.0/repositories/${workspace}/${repo}`, options);
|
|
927
926
|
} catch (e) {
|
|
928
927
|
throw new Error(`Unable to create repository, ${e}`);
|
|
929
928
|
}
|
|
@@ -938,7 +937,7 @@ const createBitbucketCloudRepository = async (opts) => {
|
|
|
938
937
|
}
|
|
939
938
|
}
|
|
940
939
|
const repoContentsUrl = `${r.links.html.href}/src/master`;
|
|
941
|
-
return {remoteUrl, repoContentsUrl};
|
|
940
|
+
return { remoteUrl, repoContentsUrl };
|
|
942
941
|
};
|
|
943
942
|
const createBitbucketServerRepository = async (opts) => {
|
|
944
943
|
const {
|
|
@@ -965,7 +964,7 @@ const createBitbucketServerRepository = async (opts) => {
|
|
|
965
964
|
};
|
|
966
965
|
try {
|
|
967
966
|
const baseUrl = apiBaseUrl ? apiBaseUrl : `https://${host}/rest/api/1.0`;
|
|
968
|
-
response = await fetch__default[
|
|
967
|
+
response = await fetch__default["default"](`${baseUrl}/projects/${project}/repos`, options);
|
|
969
968
|
} catch (e) {
|
|
970
969
|
throw new Error(`Unable to create repository, ${e}`);
|
|
971
970
|
}
|
|
@@ -980,7 +979,7 @@ const createBitbucketServerRepository = async (opts) => {
|
|
|
980
979
|
}
|
|
981
980
|
}
|
|
982
981
|
const repoContentsUrl = `${r.links.self[0].href}`;
|
|
983
|
-
return {remoteUrl, repoContentsUrl};
|
|
982
|
+
return { remoteUrl, repoContentsUrl };
|
|
984
983
|
};
|
|
985
984
|
const getAuthorizationHeader = (config) => {
|
|
986
985
|
if (config.username && config.appPassword) {
|
|
@@ -993,19 +992,19 @@ const getAuthorizationHeader = (config) => {
|
|
|
993
992
|
throw new Error(`Authorization has not been provided for Bitbucket. Please add either username + appPassword or token to the Integrations config`);
|
|
994
993
|
};
|
|
995
994
|
const performEnableLFS = async (opts) => {
|
|
996
|
-
const {authorization, host, project, repo} = opts;
|
|
995
|
+
const { authorization, host, project, repo } = opts;
|
|
997
996
|
const options = {
|
|
998
997
|
method: "PUT",
|
|
999
998
|
headers: {
|
|
1000
999
|
Authorization: authorization
|
|
1001
1000
|
}
|
|
1002
1001
|
};
|
|
1003
|
-
const {ok, status, statusText} = await fetch__default[
|
|
1002
|
+
const { ok, status, statusText } = await fetch__default["default"](`https://${host}/rest/git-lfs/admin/projects/${project}/repos/${repo}/enabled`, options);
|
|
1004
1003
|
if (!ok)
|
|
1005
1004
|
throw new Error(`Failed to enable LFS in the repository, ${status}: ${statusText}`);
|
|
1006
1005
|
};
|
|
1007
1006
|
function createPublishBitbucketAction(options) {
|
|
1008
|
-
const {integrations, config} = options;
|
|
1007
|
+
const { integrations, config } = options;
|
|
1009
1008
|
return createTemplateAction({
|
|
1010
1009
|
id: "publish:bitbucket",
|
|
1011
1010
|
description: "Initializes a git repository of the content in the workspace, and publishes it to Bitbucket.",
|
|
@@ -1065,7 +1064,7 @@ function createPublishBitbucketAction(options) {
|
|
|
1065
1064
|
repoVisibility = "private",
|
|
1066
1065
|
enableLFS = false
|
|
1067
1066
|
} = ctx.input;
|
|
1068
|
-
const {workspace, project, repo, host} = parseRepoUrl(repoUrl, integrations);
|
|
1067
|
+
const { workspace, project, repo, host } = parseRepoUrl(repoUrl, integrations);
|
|
1069
1068
|
if (host === "bitbucket.org") {
|
|
1070
1069
|
if (!workspace) {
|
|
1071
1070
|
throw new errors.InputError(`Invalid URL provider was included in the repo URL to create ${ctx.input.repoUrl}, missing workspace`);
|
|
@@ -1081,7 +1080,7 @@ function createPublishBitbucketAction(options) {
|
|
|
1081
1080
|
const authorization = getAuthorizationHeader(integrationConfig.config);
|
|
1082
1081
|
const apiBaseUrl = integrationConfig.config.apiBaseUrl;
|
|
1083
1082
|
const createMethod = host === "bitbucket.org" ? createBitbucketCloudRepository : createBitbucketServerRepository;
|
|
1084
|
-
const {remoteUrl, repoContentsUrl} = await createMethod({
|
|
1083
|
+
const { remoteUrl, repoContentsUrl } = await createMethod({
|
|
1085
1084
|
authorization,
|
|
1086
1085
|
host,
|
|
1087
1086
|
workspace: workspace || "",
|
|
@@ -1108,7 +1107,7 @@ function createPublishBitbucketAction(options) {
|
|
|
1108
1107
|
gitAuthorInfo
|
|
1109
1108
|
});
|
|
1110
1109
|
if (enableLFS && host !== "bitbucket.org") {
|
|
1111
|
-
await performEnableLFS({authorization, host, project, repo});
|
|
1110
|
+
await performEnableLFS({ authorization, host, project, repo });
|
|
1112
1111
|
}
|
|
1113
1112
|
ctx.output("remoteUrl", remoteUrl);
|
|
1114
1113
|
ctx.output("repoContentsUrl", repoContentsUrl);
|
|
@@ -1133,13 +1132,13 @@ function createPublishFileAction() {
|
|
|
1133
1132
|
}
|
|
1134
1133
|
},
|
|
1135
1134
|
async handler(ctx) {
|
|
1136
|
-
const {path: path$1} = ctx.input;
|
|
1137
|
-
const exists = await fs__default[
|
|
1135
|
+
const { path: path$1 } = ctx.input;
|
|
1136
|
+
const exists = await fs__default["default"].pathExists(path$1);
|
|
1138
1137
|
if (exists) {
|
|
1139
1138
|
throw new errors.InputError("Output path already exists");
|
|
1140
1139
|
}
|
|
1141
|
-
await fs__default[
|
|
1142
|
-
await fs__default[
|
|
1140
|
+
await fs__default["default"].ensureDir(path.dirname(path$1));
|
|
1141
|
+
await fs__default["default"].copy(ctx.workspacePath, path$1);
|
|
1143
1142
|
}
|
|
1144
1143
|
});
|
|
1145
1144
|
}
|
|
@@ -1154,7 +1153,7 @@ class OctokitProvider {
|
|
|
1154
1153
|
}
|
|
1155
1154
|
async getOctokit(repoUrl) {
|
|
1156
1155
|
var _a;
|
|
1157
|
-
const {owner, repo, host} = parseRepoUrl(repoUrl, this.integrations);
|
|
1156
|
+
const { owner, repo, host } = parseRepoUrl(repoUrl, this.integrations);
|
|
1158
1157
|
if (!owner) {
|
|
1159
1158
|
throw new errors.InputError(`No owner provided for repo ${repoUrl}`);
|
|
1160
1159
|
}
|
|
@@ -1166,7 +1165,7 @@ class OctokitProvider {
|
|
|
1166
1165
|
if (!credentialsProvider) {
|
|
1167
1166
|
throw new errors.InputError(`No matching credentials for host ${host}, please check your integrations config`);
|
|
1168
1167
|
}
|
|
1169
|
-
const {token} = await credentialsProvider.getCredentials({
|
|
1168
|
+
const { token } = await credentialsProvider.getCredentials({
|
|
1170
1169
|
url: `https://${host}/${encodeURIComponent(owner)}/${encodeURIComponent(repo)}`
|
|
1171
1170
|
});
|
|
1172
1171
|
if (!token) {
|
|
@@ -1177,12 +1176,12 @@ class OctokitProvider {
|
|
|
1177
1176
|
baseUrl: integrationConfig.apiBaseUrl,
|
|
1178
1177
|
previews: ["nebula-preview"]
|
|
1179
1178
|
});
|
|
1180
|
-
return {client, token, owner, repo};
|
|
1179
|
+
return { client, token, owner, repo };
|
|
1181
1180
|
}
|
|
1182
1181
|
}
|
|
1183
1182
|
|
|
1184
1183
|
function createPublishGithubAction(options) {
|
|
1185
|
-
const {integrations, config} = options;
|
|
1184
|
+
const { integrations, config } = options;
|
|
1186
1185
|
const octokitProvider = new OctokitProvider(integrations);
|
|
1187
1186
|
return createTemplateAction({
|
|
1188
1187
|
id: "publish:github",
|
|
@@ -1278,7 +1277,7 @@ function createPublishGithubAction(options) {
|
|
|
1278
1277
|
collaborators,
|
|
1279
1278
|
topics
|
|
1280
1279
|
} = ctx.input;
|
|
1281
|
-
const {client, token, owner, repo} = await octokitProvider.getOctokit(repoUrl);
|
|
1280
|
+
const { client, token, owner, repo } = await octokitProvider.getOctokit(repoUrl);
|
|
1282
1281
|
const user = await client.users.getByUsername({
|
|
1283
1282
|
username: owner
|
|
1284
1283
|
});
|
|
@@ -1293,7 +1292,7 @@ function createPublishGithubAction(options) {
|
|
|
1293
1292
|
private: repoVisibility === "private",
|
|
1294
1293
|
description
|
|
1295
1294
|
});
|
|
1296
|
-
const {data: newRepo} = await repoCreationPromise;
|
|
1295
|
+
const { data: newRepo } = await repoCreationPromise;
|
|
1297
1296
|
if (access == null ? void 0 : access.startsWith(`${owner}/`)) {
|
|
1298
1297
|
const [, team] = access.split("/");
|
|
1299
1298
|
await client.teams.addOrUpdateRepoPermissionsInOrg({
|
|
@@ -1396,7 +1395,7 @@ const defaultClientFactory = async ({
|
|
|
1396
1395
|
if (!credentialsProvider) {
|
|
1397
1396
|
throw new errors.InputError(`No matching credentials for host ${host}, please check your integrations config`);
|
|
1398
1397
|
}
|
|
1399
|
-
const {token} = await credentialsProvider.getCredentials({
|
|
1398
|
+
const { token } = await credentialsProvider.getCredentials({
|
|
1400
1399
|
url: `https://${host}/${encodeURIComponent(owner)}/${encodeURIComponent(repo)}`
|
|
1401
1400
|
});
|
|
1402
1401
|
if (!token) {
|
|
@@ -1472,21 +1471,21 @@ const createPublishGithubPullRequestAction = ({
|
|
|
1472
1471
|
targetPath,
|
|
1473
1472
|
sourcePath
|
|
1474
1473
|
} = ctx.input;
|
|
1475
|
-
const {owner, repo, host} = parseRepoUrl(repoUrl, integrations);
|
|
1474
|
+
const { owner, repo, host } = parseRepoUrl(repoUrl, integrations);
|
|
1476
1475
|
if (!owner) {
|
|
1477
1476
|
throw new errors.InputError(`No owner provided for host: ${host}, and repo ${repo}`);
|
|
1478
1477
|
}
|
|
1479
|
-
const client = await clientFactory({integrations, host, owner, repo});
|
|
1478
|
+
const client = await clientFactory({ integrations, host, owner, repo });
|
|
1480
1479
|
const fileRoot = sourcePath ? backendCommon.resolveSafeChildPath(ctx.workspacePath, sourcePath) : ctx.workspacePath;
|
|
1481
|
-
const localFilePaths = await globby__default[
|
|
1480
|
+
const localFilePaths = await globby__default["default"](["./**", "./**/.*", "!.git"], {
|
|
1482
1481
|
cwd: fileRoot,
|
|
1483
1482
|
gitignore: true,
|
|
1484
1483
|
dot: true
|
|
1485
1484
|
});
|
|
1486
1485
|
const fileContents = await Promise.all(localFilePaths.map((filePath) => {
|
|
1487
1486
|
const absPath = backendCommon.resolveSafeChildPath(fileRoot, filePath);
|
|
1488
|
-
const base64EncodedContent = fs__default[
|
|
1489
|
-
const fileStat = fs__default[
|
|
1487
|
+
const base64EncodedContent = fs__default["default"].readFileSync(absPath).toString("base64");
|
|
1488
|
+
const fileStat = fs__default["default"].statSync(absPath);
|
|
1490
1489
|
const githubTreeItemMode = isExecutable(fileStat.mode) ? "100755" : "100644";
|
|
1491
1490
|
const encoding = "base64";
|
|
1492
1491
|
return {
|
|
@@ -1525,7 +1524,7 @@ const createPublishGithubPullRequestAction = ({
|
|
|
1525
1524
|
};
|
|
1526
1525
|
|
|
1527
1526
|
function createPublishGitlabAction(options) {
|
|
1528
|
-
const {integrations, config} = options;
|
|
1527
|
+
const { integrations, config } = options;
|
|
1529
1528
|
return createTemplateAction({
|
|
1530
1529
|
id: "publish:gitlab",
|
|
1531
1530
|
description: "Initializes a git repository of the content in the workspace, and publishes it to GitLab.",
|
|
@@ -1574,7 +1573,7 @@ function createPublishGitlabAction(options) {
|
|
|
1574
1573
|
repoVisibility = "private",
|
|
1575
1574
|
defaultBranch = "master"
|
|
1576
1575
|
} = ctx.input;
|
|
1577
|
-
const {owner, repo, host} = parseRepoUrl(repoUrl, integrations);
|
|
1576
|
+
const { owner, repo, host } = parseRepoUrl(repoUrl, integrations);
|
|
1578
1577
|
if (!owner) {
|
|
1579
1578
|
throw new errors.InputError(`No owner provided for host: ${host}, and repo ${repo}`);
|
|
1580
1579
|
}
|
|
@@ -1589,12 +1588,12 @@ function createPublishGitlabAction(options) {
|
|
|
1589
1588
|
host: integrationConfig.config.baseUrl,
|
|
1590
1589
|
token: integrationConfig.config.token
|
|
1591
1590
|
});
|
|
1592
|
-
let {id: targetNamespace} = await client.Namespaces.show(owner);
|
|
1591
|
+
let { id: targetNamespace } = await client.Namespaces.show(owner);
|
|
1593
1592
|
if (!targetNamespace) {
|
|
1594
|
-
const {id} = await client.Users.current();
|
|
1593
|
+
const { id } = await client.Users.current();
|
|
1595
1594
|
targetNamespace = id;
|
|
1596
1595
|
}
|
|
1597
|
-
const {http_url_to_repo} = await client.Projects.create({
|
|
1596
|
+
const { http_url_to_repo } = await client.Projects.create({
|
|
1598
1597
|
namespace_id: targetNamespace,
|
|
1599
1598
|
name: repo,
|
|
1600
1599
|
visibility: repoVisibility
|
|
@@ -1624,7 +1623,7 @@ function createPublishGitlabAction(options) {
|
|
|
1624
1623
|
}
|
|
1625
1624
|
|
|
1626
1625
|
function createGithubActionsDispatchAction(options) {
|
|
1627
|
-
const {integrations} = options;
|
|
1626
|
+
const { integrations } = options;
|
|
1628
1627
|
const octokitProvider = new OctokitProvider(integrations);
|
|
1629
1628
|
return createTemplateAction({
|
|
1630
1629
|
id: "github:actions:dispatch",
|
|
@@ -1653,9 +1652,9 @@ function createGithubActionsDispatchAction(options) {
|
|
|
1653
1652
|
}
|
|
1654
1653
|
},
|
|
1655
1654
|
async handler(ctx) {
|
|
1656
|
-
const {repoUrl, workflowId, branchOrTagName} = ctx.input;
|
|
1655
|
+
const { repoUrl, workflowId, branchOrTagName } = ctx.input;
|
|
1657
1656
|
ctx.logger.info(`Dispatching workflow ${workflowId} for repo ${repoUrl} on ${branchOrTagName}`);
|
|
1658
|
-
const {client, owner, repo} = await octokitProvider.getOctokit(repoUrl);
|
|
1657
|
+
const { client, owner, repo } = await octokitProvider.getOctokit(repoUrl);
|
|
1659
1658
|
await client.rest.actions.createWorkflowDispatch({
|
|
1660
1659
|
owner,
|
|
1661
1660
|
repo,
|
|
@@ -1668,7 +1667,7 @@ function createGithubActionsDispatchAction(options) {
|
|
|
1668
1667
|
}
|
|
1669
1668
|
|
|
1670
1669
|
function createGithubWebhookAction(options) {
|
|
1671
|
-
const {integrations, defaultWebhookSecret} = options;
|
|
1670
|
+
const { integrations, defaultWebhookSecret } = options;
|
|
1672
1671
|
const octokitProvider = new OctokitProvider(integrations);
|
|
1673
1672
|
const eventNames = webhooks.emitterEventNames.filter((event) => !event.includes("."));
|
|
1674
1673
|
return createTemplateAction({
|
|
@@ -1743,7 +1742,7 @@ function createGithubWebhookAction(options) {
|
|
|
1743
1742
|
insecureSsl = false
|
|
1744
1743
|
} = ctx.input;
|
|
1745
1744
|
ctx.logger.info(`Creating webhook ${webhookUrl} for repo ${repoUrl}`);
|
|
1746
|
-
const {client, owner, repo} = await octokitProvider.getOctokit(repoUrl);
|
|
1745
|
+
const { client, owner, repo } = await octokitProvider.getOctokit(repoUrl);
|
|
1747
1746
|
try {
|
|
1748
1747
|
const insecure_ssl = insecureSsl ? "1" : "0";
|
|
1749
1748
|
await client.repos.createWebhook({
|
|
@@ -1768,7 +1767,7 @@ function createGithubWebhookAction(options) {
|
|
|
1768
1767
|
}
|
|
1769
1768
|
|
|
1770
1769
|
const createBuiltinActions = (options) => {
|
|
1771
|
-
const {reader, integrations, containerRunner, catalogClient, config} = options;
|
|
1770
|
+
const { reader, integrations, containerRunner, catalogClient, config } = options;
|
|
1772
1771
|
return [
|
|
1773
1772
|
createFetchPlainAction({
|
|
1774
1773
|
reader,
|
|
@@ -1803,7 +1802,7 @@ const createBuiltinActions = (options) => {
|
|
|
1803
1802
|
config
|
|
1804
1803
|
}),
|
|
1805
1804
|
createDebugLogAction(),
|
|
1806
|
-
createCatalogRegisterAction({catalogClient, integrations}),
|
|
1805
|
+
createCatalogRegisterAction({ catalogClient, integrations }),
|
|
1807
1806
|
createCatalogWriteAction(),
|
|
1808
1807
|
createFilesystemDeleteAction(),
|
|
1809
1808
|
createFilesystemRenameAction(),
|
|
@@ -1818,7 +1817,7 @@ const createBuiltinActions = (options) => {
|
|
|
1818
1817
|
|
|
1819
1818
|
class TemplateActionRegistry {
|
|
1820
1819
|
constructor() {
|
|
1821
|
-
this.actions = new Map();
|
|
1820
|
+
this.actions = /* @__PURE__ */ new Map();
|
|
1822
1821
|
}
|
|
1823
1822
|
register(action) {
|
|
1824
1823
|
if (this.actions.has(action.id)) {
|
|
@@ -1850,7 +1849,7 @@ class DatabaseTaskStore {
|
|
|
1850
1849
|
this.db = options.database;
|
|
1851
1850
|
}
|
|
1852
1851
|
async getTask(taskId) {
|
|
1853
|
-
const [result] = await this.db("tasks").where({id: taskId}).select();
|
|
1852
|
+
const [result] = await this.db("tasks").where({ id: taskId }).select();
|
|
1854
1853
|
if (!result) {
|
|
1855
1854
|
throw new errors.NotFoundError(`No task with id '${taskId}' found`);
|
|
1856
1855
|
}
|
|
@@ -1877,7 +1876,7 @@ class DatabaseTaskStore {
|
|
|
1877
1876
|
secrets: secrets ? JSON.stringify(secrets) : void 0,
|
|
1878
1877
|
status: "open"
|
|
1879
1878
|
});
|
|
1880
|
-
return {taskId};
|
|
1879
|
+
return { taskId };
|
|
1881
1880
|
}
|
|
1882
1881
|
async claimTask() {
|
|
1883
1882
|
return this.db.transaction(async (tx) => {
|
|
@@ -1887,7 +1886,7 @@ class DatabaseTaskStore {
|
|
|
1887
1886
|
if (!task) {
|
|
1888
1887
|
return void 0;
|
|
1889
1888
|
}
|
|
1890
|
-
const updateCount = await tx("tasks").where({id: task.id, status: "open"}).update({
|
|
1889
|
+
const updateCount = await tx("tasks").where({ id: task.id, status: "open" }).update({
|
|
1891
1890
|
status: "processing",
|
|
1892
1891
|
last_heartbeat_at: this.db.fn.now()
|
|
1893
1892
|
});
|
|
@@ -1911,14 +1910,14 @@ class DatabaseTaskStore {
|
|
|
1911
1910
|
});
|
|
1912
1911
|
}
|
|
1913
1912
|
async heartbeatTask(taskId) {
|
|
1914
|
-
const updateCount = await this.db("tasks").where({id: taskId, status: "processing"}).update({
|
|
1913
|
+
const updateCount = await this.db("tasks").where({ id: taskId, status: "processing" }).update({
|
|
1915
1914
|
last_heartbeat_at: this.db.fn.now()
|
|
1916
1915
|
});
|
|
1917
1916
|
if (updateCount === 0) {
|
|
1918
1917
|
throw new errors.ConflictError(`No running task with taskId ${taskId} found`);
|
|
1919
1918
|
}
|
|
1920
1919
|
}
|
|
1921
|
-
async listStaleTasks({timeoutS}) {
|
|
1920
|
+
async listStaleTasks({ timeoutS }) {
|
|
1922
1921
|
const rawRows = await this.db("tasks").where("status", "processing").andWhere("last_heartbeat_at", "<=", this.db.client.config.client === "sqlite3" ? this.db.raw(`datetime('now', ?)`, [`-${timeoutS} seconds`]) : this.db.raw(`dateadd('second', ?, ?)`, [
|
|
1923
1922
|
`-${timeoutS}`,
|
|
1924
1923
|
this.db.fn.now()
|
|
@@ -1926,7 +1925,7 @@ class DatabaseTaskStore {
|
|
|
1926
1925
|
const tasks = rawRows.map((row) => ({
|
|
1927
1926
|
taskId: row.id
|
|
1928
1927
|
}));
|
|
1929
|
-
return {tasks};
|
|
1928
|
+
return { tasks };
|
|
1930
1929
|
}
|
|
1931
1930
|
async completeTask({
|
|
1932
1931
|
taskId,
|
|
@@ -1966,7 +1965,7 @@ class DatabaseTaskStore {
|
|
|
1966
1965
|
});
|
|
1967
1966
|
});
|
|
1968
1967
|
}
|
|
1969
|
-
async emitLogEvent({taskId, body}) {
|
|
1968
|
+
async emitLogEvent({ taskId, body }) {
|
|
1970
1969
|
const serliazedBody = JSON.stringify(body);
|
|
1971
1970
|
await this.db("task_events").insert({
|
|
1972
1971
|
task_id: taskId,
|
|
@@ -1993,13 +1992,13 @@ class DatabaseTaskStore {
|
|
|
1993
1992
|
taskId,
|
|
1994
1993
|
body,
|
|
1995
1994
|
type: event.event_type,
|
|
1996
|
-
createdAt: typeof event.created_at === "string" ? luxon.DateTime.fromSQL(event.created_at, {zone: "UTC"}).toISO() : event.created_at
|
|
1995
|
+
createdAt: typeof event.created_at === "string" ? luxon.DateTime.fromSQL(event.created_at, { zone: "UTC" }).toISO() : event.created_at
|
|
1997
1996
|
};
|
|
1998
1997
|
} catch (error) {
|
|
1999
1998
|
throw new Error(`Failed to parse event body from event taskId=${taskId} id=${event.id}, ${error}`);
|
|
2000
1999
|
}
|
|
2001
2000
|
});
|
|
2002
|
-
return {events};
|
|
2001
|
+
return { events };
|
|
2003
2002
|
}
|
|
2004
2003
|
}
|
|
2005
2004
|
|
|
@@ -2030,7 +2029,7 @@ class TaskManager {
|
|
|
2030
2029
|
async emitLog(message, metadata) {
|
|
2031
2030
|
await this.storage.emitLogEvent({
|
|
2032
2031
|
taskId: this.state.taskId,
|
|
2033
|
-
body: {message, ...metadata}
|
|
2032
|
+
body: { message, ...metadata }
|
|
2034
2033
|
});
|
|
2035
2034
|
}
|
|
2036
2035
|
async complete(result, metadata) {
|
|
@@ -2065,7 +2064,7 @@ function defer() {
|
|
|
2065
2064
|
const promise = new Promise((_resolve) => {
|
|
2066
2065
|
resolve = _resolve;
|
|
2067
2066
|
});
|
|
2068
|
-
return {promise, resolve};
|
|
2067
|
+
return { promise, resolve };
|
|
2069
2068
|
}
|
|
2070
2069
|
class StorageTaskBroker {
|
|
2071
2070
|
constructor(storage, logger) {
|
|
@@ -2097,7 +2096,7 @@ class StorageTaskBroker {
|
|
|
2097
2096
|
return this.storage.getTask(taskId);
|
|
2098
2097
|
}
|
|
2099
2098
|
observe(options, callback) {
|
|
2100
|
-
const {taskId} = options;
|
|
2099
|
+
const { taskId } = options;
|
|
2101
2100
|
let cancelled = false;
|
|
2102
2101
|
const unsubscribe = () => {
|
|
2103
2102
|
cancelled = true;
|
|
@@ -2105,24 +2104,24 @@ class StorageTaskBroker {
|
|
|
2105
2104
|
(async () => {
|
|
2106
2105
|
let after = options.after;
|
|
2107
2106
|
while (!cancelled) {
|
|
2108
|
-
const result = await this.storage.listEvents({taskId, after});
|
|
2109
|
-
const {events} = result;
|
|
2107
|
+
const result = await this.storage.listEvents({ taskId, after });
|
|
2108
|
+
const { events } = result;
|
|
2110
2109
|
if (events.length) {
|
|
2111
2110
|
after = events[events.length - 1].id;
|
|
2112
2111
|
try {
|
|
2113
2112
|
callback(void 0, result);
|
|
2114
2113
|
} catch (error) {
|
|
2115
2114
|
errors.assertError(error);
|
|
2116
|
-
callback(error, {events: []});
|
|
2115
|
+
callback(error, { events: [] });
|
|
2117
2116
|
}
|
|
2118
2117
|
}
|
|
2119
2118
|
await new Promise((resolve) => setTimeout(resolve, 1e3));
|
|
2120
2119
|
}
|
|
2121
2120
|
})();
|
|
2122
|
-
return {unsubscribe};
|
|
2121
|
+
return { unsubscribe };
|
|
2123
2122
|
}
|
|
2124
2123
|
async vacuumTasks(timeoutS) {
|
|
2125
|
-
const {tasks} = await this.storage.listStaleTasks(timeoutS);
|
|
2124
|
+
const { tasks } = await this.storage.listStaleTasks(timeoutS);
|
|
2126
2125
|
await Promise.all(tasks.map(async (task) => {
|
|
2127
2126
|
try {
|
|
2128
2127
|
await this.storage.completeTask({
|
|
@@ -2159,7 +2158,7 @@ class HandlebarsWorkflowRunner {
|
|
|
2159
2158
|
return JSON.stringify(parseRepoUrl(repoUrl, this.options.integrations));
|
|
2160
2159
|
});
|
|
2161
2160
|
this.handlebars.registerHelper("projectSlug", (repoUrl) => {
|
|
2162
|
-
const {owner, repo} = parseRepoUrl(repoUrl, this.options.integrations);
|
|
2161
|
+
const { owner, repo } = parseRepoUrl(repoUrl, this.options.integrations);
|
|
2163
2162
|
return `${owner}/${repo}`;
|
|
2164
2163
|
});
|
|
2165
2164
|
this.handlebars.registerHelper("json", (obj) => JSON.stringify(obj));
|
|
@@ -2171,14 +2170,14 @@ class HandlebarsWorkflowRunner {
|
|
|
2171
2170
|
if (!isValidTaskSpec$1(task.spec)) {
|
|
2172
2171
|
throw new errors.InputError(`Task spec is not a valid v1beta2 task spec`);
|
|
2173
2172
|
}
|
|
2174
|
-
const {actionRegistry} = this.options;
|
|
2175
|
-
const workspacePath = path__default[
|
|
2173
|
+
const { actionRegistry } = this.options;
|
|
2174
|
+
const workspacePath = path__default["default"].join(this.options.workingDirectory, await task.getWorkspaceName());
|
|
2176
2175
|
try {
|
|
2177
|
-
await fs__default[
|
|
2176
|
+
await fs__default["default"].ensureDir(workspacePath);
|
|
2178
2177
|
await task.emitLog(`Starting up task with ${task.spec.steps.length} steps`);
|
|
2179
|
-
const templateCtx = {parameters: task.spec.values, steps: {}};
|
|
2178
|
+
const templateCtx = { parameters: task.spec.values, steps: {} };
|
|
2180
2179
|
for (const step of task.spec.steps) {
|
|
2181
|
-
const metadata = {stepId: step.id};
|
|
2180
|
+
const metadata = { stepId: step.id };
|
|
2182
2181
|
try {
|
|
2183
2182
|
const taskLogger = winston__namespace.createLogger({
|
|
2184
2183
|
level: process.env.LOG_LEVEL || "info",
|
|
@@ -2192,7 +2191,7 @@ class HandlebarsWorkflowRunner {
|
|
|
2192
2191
|
await task.emitLog(message, metadata);
|
|
2193
2192
|
}
|
|
2194
2193
|
});
|
|
2195
|
-
taskLogger.add(new winston__namespace.transports.Stream({stream: stream$1}));
|
|
2194
|
+
taskLogger.add(new winston__namespace.transports.Stream({ stream: stream$1 }));
|
|
2196
2195
|
if (step.if !== void 0) {
|
|
2197
2196
|
let skip = !step.if;
|
|
2198
2197
|
if (typeof step.if === "string") {
|
|
@@ -2273,7 +2272,7 @@ class HandlebarsWorkflowRunner {
|
|
|
2273
2272
|
token: (_b = task.secrets) == null ? void 0 : _b.token,
|
|
2274
2273
|
workspacePath,
|
|
2275
2274
|
async createTemporaryDirectory() {
|
|
2276
|
-
const tmpDir = await fs__default[
|
|
2275
|
+
const tmpDir = await fs__default["default"].mkdtemp(`${workspacePath}_step-${step.id}-`);
|
|
2277
2276
|
tmpDirs.push(tmpDir);
|
|
2278
2277
|
return tmpDir;
|
|
2279
2278
|
},
|
|
@@ -2283,9 +2282,9 @@ class HandlebarsWorkflowRunner {
|
|
|
2283
2282
|
metadata: task.spec.metadata
|
|
2284
2283
|
});
|
|
2285
2284
|
for (const tmpDir of tmpDirs) {
|
|
2286
|
-
await fs__default[
|
|
2285
|
+
await fs__default["default"].remove(tmpDir);
|
|
2287
2286
|
}
|
|
2288
|
-
templateCtx.steps[step.id] = {output: stepOutputs};
|
|
2287
|
+
templateCtx.steps[step.id] = { output: stepOutputs };
|
|
2289
2288
|
await task.emitLog(`Finished step ${step.name}`, {
|
|
2290
2289
|
...metadata,
|
|
2291
2290
|
status: "completed"
|
|
@@ -2319,10 +2318,10 @@ class HandlebarsWorkflowRunner {
|
|
|
2319
2318
|
}
|
|
2320
2319
|
return value;
|
|
2321
2320
|
});
|
|
2322
|
-
return {output};
|
|
2321
|
+
return { output };
|
|
2323
2322
|
} finally {
|
|
2324
2323
|
if (workspacePath) {
|
|
2325
|
-
await fs__default[
|
|
2324
|
+
await fs__default["default"].remove(workspacePath);
|
|
2326
2325
|
}
|
|
2327
2326
|
}
|
|
2328
2327
|
}
|
|
@@ -2335,7 +2334,7 @@ const createStepLogger = ({
|
|
|
2335
2334
|
task,
|
|
2336
2335
|
step
|
|
2337
2336
|
}) => {
|
|
2338
|
-
const metadata = {stepId: step.id};
|
|
2337
|
+
const metadata = { stepId: step.id };
|
|
2339
2338
|
const taskLogger = winston__namespace.createLogger({
|
|
2340
2339
|
level: process.env.LOG_LEVEL || "info",
|
|
2341
2340
|
format: winston__namespace.format.combine(winston__namespace.format.colorize(), winston__namespace.format.timestamp(), winston__namespace.format.simple()),
|
|
@@ -2348,8 +2347,8 @@ const createStepLogger = ({
|
|
|
2348
2347
|
await task.emitLog(message, metadata);
|
|
2349
2348
|
}
|
|
2350
2349
|
});
|
|
2351
|
-
taskLogger.add(new winston__namespace.transports.Stream({stream: streamLogger}));
|
|
2352
|
-
return {taskLogger, streamLogger};
|
|
2350
|
+
taskLogger.add(new winston__namespace.transports.Stream({ stream: streamLogger }));
|
|
2351
|
+
return { taskLogger, streamLogger };
|
|
2353
2352
|
};
|
|
2354
2353
|
class NunjucksWorkflowRunner {
|
|
2355
2354
|
constructor(options) {
|
|
@@ -2357,7 +2356,7 @@ class NunjucksWorkflowRunner {
|
|
|
2357
2356
|
}
|
|
2358
2357
|
isSingleTemplateString(input) {
|
|
2359
2358
|
var _a, _b;
|
|
2360
|
-
const {parser, nodes} = nunjucks__default[
|
|
2359
|
+
const { parser, nodes } = nunjucks__default["default"];
|
|
2361
2360
|
const parsed = parser.parse(input, {}, {
|
|
2362
2361
|
autoescape: false,
|
|
2363
2362
|
tags: {
|
|
@@ -2400,15 +2399,15 @@ class NunjucksWorkflowRunner {
|
|
|
2400
2399
|
if (!isValidTaskSpec(task.spec)) {
|
|
2401
2400
|
throw new errors.InputError("Wrong template version executed with the workflow engine");
|
|
2402
2401
|
}
|
|
2403
|
-
const workspacePath = path__default[
|
|
2404
|
-
const {integrations} = this.options;
|
|
2402
|
+
const workspacePath = path__default["default"].join(this.options.workingDirectory, await task.getWorkspaceName());
|
|
2403
|
+
const { integrations } = this.options;
|
|
2405
2404
|
const renderTemplate = await SecureTemplater.loadRenderer({
|
|
2406
2405
|
parseRepoUrl(url) {
|
|
2407
2406
|
return parseRepoUrl(url, integrations);
|
|
2408
2407
|
}
|
|
2409
2408
|
});
|
|
2410
2409
|
try {
|
|
2411
|
-
await fs__default[
|
|
2410
|
+
await fs__default["default"].ensureDir(workspacePath);
|
|
2412
2411
|
await task.emitLog(`Starting up task with ${task.spec.steps.length} steps`);
|
|
2413
2412
|
const context = {
|
|
2414
2413
|
parameters: task.spec.parameters,
|
|
@@ -2419,7 +2418,7 @@ class NunjucksWorkflowRunner {
|
|
|
2419
2418
|
if (step.if) {
|
|
2420
2419
|
const ifResult = await this.render(step.if, context, renderTemplate);
|
|
2421
2420
|
if (!isTruthy(ifResult)) {
|
|
2422
|
-
await task.emitLog(`Skipping step ${step.id} because it's if condition was false`, {stepId: step.id, status: "skipped"});
|
|
2421
|
+
await task.emitLog(`Skipping step ${step.id} because it's if condition was false`, { stepId: step.id, status: "skipped" });
|
|
2423
2422
|
continue;
|
|
2424
2423
|
}
|
|
2425
2424
|
}
|
|
@@ -2428,7 +2427,7 @@ class NunjucksWorkflowRunner {
|
|
|
2428
2427
|
status: "processing"
|
|
2429
2428
|
});
|
|
2430
2429
|
const action = this.options.actionRegistry.get(step.action);
|
|
2431
|
-
const {taskLogger, streamLogger} = createStepLogger({task, step});
|
|
2430
|
+
const { taskLogger, streamLogger } = createStepLogger({ task, step });
|
|
2432
2431
|
const input = (_a = step.input && this.render(step.input, context, renderTemplate)) != null ? _a : {};
|
|
2433
2432
|
if ((_b = action.schema) == null ? void 0 : _b.input) {
|
|
2434
2433
|
const validateResult = jsonschema.validate(input, action.schema.input);
|
|
@@ -2449,7 +2448,7 @@ class NunjucksWorkflowRunner {
|
|
|
2449
2448
|
logStream: streamLogger,
|
|
2450
2449
|
workspacePath,
|
|
2451
2450
|
createTemporaryDirectory: async () => {
|
|
2452
|
-
const tmpDir = await fs__default[
|
|
2451
|
+
const tmpDir = await fs__default["default"].mkdtemp(`${workspacePath}_step-${step.id}-`);
|
|
2453
2452
|
tmpDirs.push(tmpDir);
|
|
2454
2453
|
return tmpDir;
|
|
2455
2454
|
},
|
|
@@ -2459,9 +2458,9 @@ class NunjucksWorkflowRunner {
|
|
|
2459
2458
|
metadata: task.spec.metadata
|
|
2460
2459
|
});
|
|
2461
2460
|
for (const tmpDir of tmpDirs) {
|
|
2462
|
-
await fs__default[
|
|
2461
|
+
await fs__default["default"].remove(tmpDir);
|
|
2463
2462
|
}
|
|
2464
|
-
context.steps[step.id] = {output: stepOutput};
|
|
2463
|
+
context.steps[step.id] = { output: stepOutput };
|
|
2465
2464
|
await task.emitLog(`Finished step ${step.name}`, {
|
|
2466
2465
|
stepId: step.id,
|
|
2467
2466
|
status: "completed"
|
|
@@ -2475,10 +2474,10 @@ class NunjucksWorkflowRunner {
|
|
|
2475
2474
|
}
|
|
2476
2475
|
}
|
|
2477
2476
|
const output = this.render(task.spec.output, context, renderTemplate);
|
|
2478
|
-
return {output};
|
|
2477
|
+
return { output };
|
|
2479
2478
|
} finally {
|
|
2480
2479
|
if (workspacePath) {
|
|
2481
|
-
await fs__default[
|
|
2480
|
+
await fs__default["default"].remove(workspacePath);
|
|
2482
2481
|
}
|
|
2483
2482
|
}
|
|
2484
2483
|
}
|
|
@@ -2510,7 +2509,7 @@ class TaskWorker {
|
|
|
2510
2509
|
});
|
|
2511
2510
|
return new TaskWorker({
|
|
2512
2511
|
taskBroker,
|
|
2513
|
-
runners: {legacyWorkflowRunner, workflowRunner}
|
|
2512
|
+
runners: { legacyWorkflowRunner, workflowRunner }
|
|
2514
2513
|
});
|
|
2515
2514
|
}
|
|
2516
2515
|
start() {
|
|
@@ -2523,12 +2522,12 @@ class TaskWorker {
|
|
|
2523
2522
|
}
|
|
2524
2523
|
async runOneTask(task) {
|
|
2525
2524
|
try {
|
|
2526
|
-
const {output} = task.spec.apiVersion === "scaffolder.backstage.io/v1beta3" ? await this.options.runners.workflowRunner.execute(task) : await this.options.runners.legacyWorkflowRunner.execute(task);
|
|
2527
|
-
await task.complete("completed", {output});
|
|
2525
|
+
const { output } = task.spec.apiVersion === "scaffolder.backstage.io/v1beta3" ? await this.options.runners.workflowRunner.execute(task) : await this.options.runners.legacyWorkflowRunner.execute(task);
|
|
2526
|
+
await task.complete("completed", { output });
|
|
2528
2527
|
} catch (error) {
|
|
2529
2528
|
errors.assertError(error);
|
|
2530
2529
|
await task.complete("failed", {
|
|
2531
|
-
error: {name: error.name, message: error.message}
|
|
2530
|
+
error: { name: error.name, message: error.message }
|
|
2532
2531
|
});
|
|
2533
2532
|
}
|
|
2534
2533
|
}
|
|
@@ -2539,7 +2538,7 @@ class CatalogEntityClient {
|
|
|
2539
2538
|
this.catalogClient = catalogClient;
|
|
2540
2539
|
}
|
|
2541
2540
|
async findTemplate(templateName, options) {
|
|
2542
|
-
const {items: templates} = await this.catalogClient.getEntities({
|
|
2541
|
+
const { items: templates } = await this.catalogClient.getEntities({
|
|
2543
2542
|
filter: {
|
|
2544
2543
|
kind: "template",
|
|
2545
2544
|
"metadata.name": templateName
|
|
@@ -2558,11 +2557,11 @@ class CatalogEntityClient {
|
|
|
2558
2557
|
|
|
2559
2558
|
async function getWorkingDirectory(config, logger) {
|
|
2560
2559
|
if (!config.has("backend.workingDirectory")) {
|
|
2561
|
-
return os__default[
|
|
2560
|
+
return os__default["default"].tmpdir();
|
|
2562
2561
|
}
|
|
2563
2562
|
const workingDirectory = config.getString("backend.workingDirectory");
|
|
2564
2563
|
try {
|
|
2565
|
-
await fs__default[
|
|
2564
|
+
await fs__default["default"].access(workingDirectory, fs__default["default"].constants.F_OK | fs__default["default"].constants.W_OK);
|
|
2566
2565
|
logger.info(`using working directory: ${workingDirectory}`);
|
|
2567
2566
|
} catch (err) {
|
|
2568
2567
|
errors.assertError(err);
|
|
@@ -2580,7 +2579,7 @@ function getEntityBaseUrl(entity) {
|
|
|
2580
2579
|
if (!location) {
|
|
2581
2580
|
return void 0;
|
|
2582
2581
|
}
|
|
2583
|
-
const {type, target} = catalogModel.parseLocationReference(location);
|
|
2582
|
+
const { type, target } = catalogModel.parseLocationReference(location);
|
|
2584
2583
|
if (type === "url") {
|
|
2585
2584
|
return target;
|
|
2586
2585
|
} else if (type === "file") {
|
|
@@ -2593,8 +2592,8 @@ function isSupportedTemplate(entity) {
|
|
|
2593
2592
|
return entity.apiVersion === "backstage.io/v1beta2" || entity.apiVersion === "scaffolder.backstage.io/v1beta3";
|
|
2594
2593
|
}
|
|
2595
2594
|
async function createRouter(options) {
|
|
2596
|
-
const router = Router__default[
|
|
2597
|
-
router.use(express__default[
|
|
2595
|
+
const router = Router__default["default"]();
|
|
2596
|
+
router.use(express__default["default"].json());
|
|
2598
2597
|
const {
|
|
2599
2598
|
logger: parentLogger,
|
|
2600
2599
|
config,
|
|
@@ -2605,7 +2604,7 @@ async function createRouter(options) {
|
|
|
2605
2604
|
containerRunner,
|
|
2606
2605
|
taskWorkers
|
|
2607
2606
|
} = options;
|
|
2608
|
-
const logger = parentLogger.child({plugin: "scaffolder"});
|
|
2607
|
+
const logger = parentLogger.child({ plugin: "scaffolder" });
|
|
2609
2608
|
const workingDirectory = await getWorkingDirectory(config, logger);
|
|
2610
2609
|
const entityClient = new CatalogEntityClient(catalogClient);
|
|
2611
2610
|
const integrations = integration.ScmIntegrations.fromConfig(config);
|
|
@@ -2641,7 +2640,7 @@ async function createRouter(options) {
|
|
|
2641
2640
|
workers.forEach((worker) => worker.start());
|
|
2642
2641
|
router.get("/v2/templates/:namespace/:kind/:name/parameter-schema", async (req, res) => {
|
|
2643
2642
|
var _a, _b;
|
|
2644
|
-
const {namespace, kind, name} = req.params;
|
|
2643
|
+
const { namespace, kind, name } = req.params;
|
|
2645
2644
|
if (namespace !== "default") {
|
|
2646
2645
|
throw new errors.InputError(`Invalid namespace, only 'default' namespace is supported`);
|
|
2647
2646
|
}
|
|
@@ -2688,7 +2687,7 @@ async function createRouter(options) {
|
|
|
2688
2687
|
for (const parameters of [(_a = template.spec.parameters) != null ? _a : []].flat()) {
|
|
2689
2688
|
const result2 = jsonschema.validate(values, parameters);
|
|
2690
2689
|
if (!result2.valid) {
|
|
2691
|
-
res.status(400).json({errors: result2.errors});
|
|
2690
|
+
res.status(400).json({ errors: result2.errors });
|
|
2692
2691
|
return;
|
|
2693
2692
|
}
|
|
2694
2693
|
}
|
|
@@ -2706,7 +2705,7 @@ async function createRouter(options) {
|
|
|
2706
2705
|
};
|
|
2707
2706
|
}),
|
|
2708
2707
|
output: (_b = template.spec.output) != null ? _b : {},
|
|
2709
|
-
metadata: {name: (_c = template.metadata) == null ? void 0 : _c.name}
|
|
2708
|
+
metadata: { name: (_c = template.metadata) == null ? void 0 : _c.name }
|
|
2710
2709
|
} : {
|
|
2711
2710
|
apiVersion: template.apiVersion,
|
|
2712
2711
|
baseUrl,
|
|
@@ -2720,7 +2719,7 @@ async function createRouter(options) {
|
|
|
2720
2719
|
};
|
|
2721
2720
|
}),
|
|
2722
2721
|
output: (_d = template.spec.output) != null ? _d : {},
|
|
2723
|
-
metadata: {name: (_e = template.metadata) == null ? void 0 : _e.name}
|
|
2722
|
+
metadata: { name: (_e = template.metadata) == null ? void 0 : _e.name }
|
|
2724
2723
|
};
|
|
2725
2724
|
} else {
|
|
2726
2725
|
throw new errors.InputError(`Unsupported apiVersion field in schema entity, ${template.apiVersion}`);
|
|
@@ -2728,9 +2727,9 @@ async function createRouter(options) {
|
|
|
2728
2727
|
const result = await taskBroker.dispatch(taskSpec, {
|
|
2729
2728
|
token
|
|
2730
2729
|
});
|
|
2731
|
-
res.status(201).json({id: result.taskId});
|
|
2730
|
+
res.status(201).json({ id: result.taskId });
|
|
2732
2731
|
}).get("/v2/tasks/:taskId", async (req, res) => {
|
|
2733
|
-
const {taskId} = req.params;
|
|
2732
|
+
const { taskId } = req.params;
|
|
2734
2733
|
const task = await taskBroker.get(taskId);
|
|
2735
2734
|
if (!task) {
|
|
2736
2735
|
throw new errors.NotFoundError(`Task with id ${taskId} does not exist`);
|
|
@@ -2738,7 +2737,7 @@ async function createRouter(options) {
|
|
|
2738
2737
|
delete task.secrets;
|
|
2739
2738
|
res.status(200).json(task);
|
|
2740
2739
|
}).get("/v2/tasks/:taskId/eventstream", async (req, res) => {
|
|
2741
|
-
const {taskId} = req.params;
|
|
2740
|
+
const { taskId } = req.params;
|
|
2742
2741
|
const after = req.query.after !== void 0 ? Number(req.query.after) : void 0;
|
|
2743
2742
|
logger.debug(`Event stream observing taskId '${taskId}' opened`);
|
|
2744
2743
|
res.writeHead(200, {
|
|
@@ -2746,7 +2745,7 @@ async function createRouter(options) {
|
|
|
2746
2745
|
"Cache-Control": "no-cache",
|
|
2747
2746
|
"Content-Type": "text/event-stream"
|
|
2748
2747
|
});
|
|
2749
|
-
const {unsubscribe} = taskBroker.observe({taskId, after}, (error, {events}) => {
|
|
2748
|
+
const { unsubscribe } = taskBroker.observe({ taskId, after }, (error, { events }) => {
|
|
2750
2749
|
var _a;
|
|
2751
2750
|
if (error) {
|
|
2752
2751
|
logger.error(`Received error from event stream when observing taskId '${taskId}', ${error}`);
|
|
@@ -2770,7 +2769,7 @@ data: ${JSON.stringify(event)}
|
|
|
2770
2769
|
logger.debug(`Event stream observing taskId '${taskId}' closed`);
|
|
2771
2770
|
});
|
|
2772
2771
|
}).get("/v2/tasks/:taskId/events", async (req, res) => {
|
|
2773
|
-
const {taskId} = req.params;
|
|
2772
|
+
const { taskId } = req.params;
|
|
2774
2773
|
const after = Number(req.query.after) || void 0;
|
|
2775
2774
|
let unsubscribe = () => {
|
|
2776
2775
|
};
|
|
@@ -2778,7 +2777,7 @@ data: ${JSON.stringify(event)}
|
|
|
2778
2777
|
unsubscribe();
|
|
2779
2778
|
res.json([]);
|
|
2780
2779
|
}, 3e4);
|
|
2781
|
-
({unsubscribe} = taskBroker.observe({taskId, after}, (error, {events}) => {
|
|
2780
|
+
({ unsubscribe } = taskBroker.observe({ taskId, after }, (error, { events }) => {
|
|
2782
2781
|
clearTimeout(timeout);
|
|
2783
2782
|
unsubscribe();
|
|
2784
2783
|
if (error) {
|
|
@@ -2791,7 +2790,7 @@ data: ${JSON.stringify(event)}
|
|
|
2791
2790
|
clearTimeout(timeout);
|
|
2792
2791
|
});
|
|
2793
2792
|
});
|
|
2794
|
-
const app = express__default[
|
|
2793
|
+
const app = express__default["default"]();
|
|
2795
2794
|
app.set("logger", logger);
|
|
2796
2795
|
app.use("/", router);
|
|
2797
2796
|
return app;
|
|
@@ -2851,9 +2850,7 @@ class ScaffolderEntitiesProcessor {
|
|
|
2851
2850
|
|
|
2852
2851
|
Object.defineProperty(exports, 'createFetchCookiecutterAction', {
|
|
2853
2852
|
enumerable: true,
|
|
2854
|
-
get: function () {
|
|
2855
|
-
return pluginScaffolderBackendModuleCookiecutter.createFetchCookiecutterAction;
|
|
2856
|
-
}
|
|
2853
|
+
get: function () { return pluginScaffolderBackendModuleCookiecutter.createFetchCookiecutterAction; }
|
|
2857
2854
|
});
|
|
2858
2855
|
exports.CatalogEntityClient = CatalogEntityClient;
|
|
2859
2856
|
exports.DatabaseTaskStore = DatabaseTaskStore;
|