@backstage/plugin-scaffolder-backend 0.0.0-nightly-20220724025214 → 0.0.0-nightly-20220727025454
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +30 -5
- package/alpha/package.json +1 -1
- package/dist/index.cjs.js +716 -290
- package/dist/index.cjs.js.map +1 -1
- package/package.json +8 -8
package/dist/index.cjs.js
CHANGED
|
@@ -147,7 +147,9 @@ function createCatalogRegisterAction(options) {
|
|
|
147
147
|
const { repoContentsUrl, catalogInfoPath = "/catalog-info.yaml" } = input;
|
|
148
148
|
const integration = integrations.byUrl(repoContentsUrl);
|
|
149
149
|
if (!integration) {
|
|
150
|
-
throw new errors.InputError(
|
|
150
|
+
throw new errors.InputError(
|
|
151
|
+
`No integration found for host ${repoContentsUrl}`
|
|
152
|
+
);
|
|
151
153
|
}
|
|
152
154
|
catalogInfoUrl = integration.resolveUrl({
|
|
153
155
|
base: repoContentsUrl,
|
|
@@ -155,22 +157,32 @@ function createCatalogRegisterAction(options) {
|
|
|
155
157
|
});
|
|
156
158
|
}
|
|
157
159
|
ctx.logger.info(`Registering ${catalogInfoUrl} in the catalog`);
|
|
158
|
-
await catalogClient.addLocation(
|
|
159
|
-
|
|
160
|
-
target: catalogInfoUrl
|
|
161
|
-
}, ((_a = ctx.secrets) == null ? void 0 : _a.backstageToken) ? { token: ctx.secrets.backstageToken } : {});
|
|
162
|
-
try {
|
|
163
|
-
const result = await catalogClient.addLocation({
|
|
164
|
-
dryRun: true,
|
|
160
|
+
await catalogClient.addLocation(
|
|
161
|
+
{
|
|
165
162
|
type: "url",
|
|
166
163
|
target: catalogInfoUrl
|
|
167
|
-
},
|
|
164
|
+
},
|
|
165
|
+
((_a = ctx.secrets) == null ? void 0 : _a.backstageToken) ? { token: ctx.secrets.backstageToken } : {}
|
|
166
|
+
);
|
|
167
|
+
try {
|
|
168
|
+
const result = await catalogClient.addLocation(
|
|
169
|
+
{
|
|
170
|
+
dryRun: true,
|
|
171
|
+
type: "url",
|
|
172
|
+
target: catalogInfoUrl
|
|
173
|
+
},
|
|
174
|
+
((_b = ctx.secrets) == null ? void 0 : _b.backstageToken) ? { token: ctx.secrets.backstageToken } : {}
|
|
175
|
+
);
|
|
168
176
|
if (result.entities.length > 0) {
|
|
169
177
|
const { entities } = result;
|
|
170
178
|
let entity;
|
|
171
|
-
entity = entities.find(
|
|
179
|
+
entity = entities.find(
|
|
180
|
+
(e) => !e.metadata.name.startsWith("generated-") && e.kind === "Component"
|
|
181
|
+
);
|
|
172
182
|
if (!entity) {
|
|
173
|
-
entity = entities.find(
|
|
183
|
+
entity = entities.find(
|
|
184
|
+
(e) => !e.metadata.name.startsWith("generated-")
|
|
185
|
+
);
|
|
174
186
|
}
|
|
175
187
|
if (!entity) {
|
|
176
188
|
entity = entities[0];
|
|
@@ -213,7 +225,10 @@ function createCatalogWriteAction() {
|
|
|
213
225
|
ctx.logStream.write(`Writing catalog-info.yaml`);
|
|
214
226
|
const { filePath, entity } = ctx.input;
|
|
215
227
|
const path = filePath != null ? filePath : "catalog-info.yaml";
|
|
216
|
-
await fs__default["default"].writeFile(
|
|
228
|
+
await fs__default["default"].writeFile(
|
|
229
|
+
backendCommon.resolveSafeChildPath(ctx.workspacePath, path),
|
|
230
|
+
yaml__namespace.stringify(entity)
|
|
231
|
+
);
|
|
217
232
|
}
|
|
218
233
|
});
|
|
219
234
|
}
|
|
@@ -249,18 +264,22 @@ function createDebugLogAction() {
|
|
|
249
264
|
}
|
|
250
265
|
if ((_b = ctx.input) == null ? void 0 : _b.listWorkspace) {
|
|
251
266
|
const files = await recursiveReadDir(ctx.workspacePath);
|
|
252
|
-
ctx.logStream.write(
|
|
253
|
-
|
|
267
|
+
ctx.logStream.write(
|
|
268
|
+
`Workspace:
|
|
269
|
+
${files.map((f) => ` - ${path.relative(ctx.workspacePath, f)}`).join("\n")}`
|
|
270
|
+
);
|
|
254
271
|
}
|
|
255
272
|
}
|
|
256
273
|
});
|
|
257
274
|
}
|
|
258
275
|
async function recursiveReadDir(dir) {
|
|
259
276
|
const subdirs = await fs.readdir(dir);
|
|
260
|
-
const files = await Promise.all(
|
|
261
|
-
|
|
262
|
-
|
|
263
|
-
|
|
277
|
+
const files = await Promise.all(
|
|
278
|
+
subdirs.map(async (subdir) => {
|
|
279
|
+
const res = path.join(dir, subdir);
|
|
280
|
+
return (await fs.stat(res)).isDirectory() ? recursiveReadDir(res) : [res];
|
|
281
|
+
})
|
|
282
|
+
);
|
|
264
283
|
return files.reduce((a, f) => a.concat(f), []);
|
|
265
284
|
}
|
|
266
285
|
|
|
@@ -295,7 +314,9 @@ async function fetchContents({
|
|
|
295
314
|
base: baseUrl
|
|
296
315
|
});
|
|
297
316
|
} else {
|
|
298
|
-
throw new errors.InputError(
|
|
317
|
+
throw new errors.InputError(
|
|
318
|
+
`Failed to fetch, template location could not be determined and the fetch URL is relative, ${fetchUrl}`
|
|
319
|
+
);
|
|
299
320
|
}
|
|
300
321
|
const res = await reader.readTree(readUrl);
|
|
301
322
|
await fs__default["default"].ensureDir(outputPath);
|
|
@@ -424,13 +445,21 @@ class SecureTemplater {
|
|
|
424
445
|
sandbox.parseRepoUrl = (url) => JSON.stringify(parseRepoUrl(url));
|
|
425
446
|
}
|
|
426
447
|
if (additionalTemplateFilters) {
|
|
427
|
-
sandbox.additionalTemplateFilters = Object.fromEntries(
|
|
428
|
-
filterName,
|
|
429
|
-
|
|
430
|
-
|
|
448
|
+
sandbox.additionalTemplateFilters = Object.fromEntries(
|
|
449
|
+
Object.entries(additionalTemplateFilters).filter(([_, filterFunction]) => !!filterFunction).map(([filterName, filterFunction]) => [
|
|
450
|
+
filterName,
|
|
451
|
+
(...args) => JSON.stringify(filterFunction(...args))
|
|
452
|
+
])
|
|
453
|
+
);
|
|
431
454
|
}
|
|
432
455
|
const vm = new vm2.VM({ sandbox });
|
|
433
|
-
const nunjucksSource = await fs__default["default"].readFile(
|
|
456
|
+
const nunjucksSource = await fs__default["default"].readFile(
|
|
457
|
+
backendCommon.resolvePackagePath(
|
|
458
|
+
"@backstage/plugin-scaffolder-backend",
|
|
459
|
+
"assets/nunjucks.js.txt"
|
|
460
|
+
),
|
|
461
|
+
"utf-8"
|
|
462
|
+
);
|
|
434
463
|
vm.run(mkScript(nunjucksSource));
|
|
435
464
|
const render = (template, values) => {
|
|
436
465
|
if (!vm) {
|
|
@@ -510,12 +539,16 @@ function createFetchTemplateAction(options) {
|
|
|
510
539
|
const targetPath = (_a = ctx.input.targetPath) != null ? _a : "./";
|
|
511
540
|
const outputDir = backendCommon.resolveSafeChildPath(ctx.workspacePath, targetPath);
|
|
512
541
|
if (ctx.input.copyWithoutRender && ctx.input.copyWithoutTemplating) {
|
|
513
|
-
throw new errors.InputError(
|
|
542
|
+
throw new errors.InputError(
|
|
543
|
+
"Fetch action input copyWithoutRender and copyWithoutTemplating can not be used at the same time"
|
|
544
|
+
);
|
|
514
545
|
}
|
|
515
546
|
let copyOnlyPatterns;
|
|
516
547
|
let renderFilename;
|
|
517
548
|
if (ctx.input.copyWithoutRender) {
|
|
518
|
-
ctx.logger.warn(
|
|
549
|
+
ctx.logger.warn(
|
|
550
|
+
"[Deprecated] Please use copyWithoutTemplating instead."
|
|
551
|
+
);
|
|
519
552
|
copyOnlyPatterns = ctx.input.copyWithoutRender;
|
|
520
553
|
renderFilename = false;
|
|
521
554
|
} else {
|
|
@@ -523,10 +556,14 @@ function createFetchTemplateAction(options) {
|
|
|
523
556
|
renderFilename = true;
|
|
524
557
|
}
|
|
525
558
|
if (copyOnlyPatterns && !Array.isArray(copyOnlyPatterns)) {
|
|
526
|
-
throw new errors.InputError(
|
|
559
|
+
throw new errors.InputError(
|
|
560
|
+
"Fetch action input copyWithoutRender/copyWithoutTemplating must be an Array"
|
|
561
|
+
);
|
|
527
562
|
}
|
|
528
563
|
if (ctx.input.templateFileExtension && (copyOnlyPatterns || ctx.input.cookiecutterCompat)) {
|
|
529
|
-
throw new errors.InputError(
|
|
564
|
+
throw new errors.InputError(
|
|
565
|
+
"Fetch action input extension incompatible with copyWithoutRender/copyWithoutTemplating and cookiecutterCompat"
|
|
566
|
+
);
|
|
530
567
|
}
|
|
531
568
|
let extension = false;
|
|
532
569
|
if (ctx.input.templateFileExtension) {
|
|
@@ -550,18 +587,27 @@ function createFetchTemplateAction(options) {
|
|
|
550
587
|
markDirectories: true,
|
|
551
588
|
followSymbolicLinks: false
|
|
552
589
|
});
|
|
553
|
-
const nonTemplatedEntries = new Set(
|
|
554
|
-
|
|
555
|
-
|
|
556
|
-
|
|
557
|
-
|
|
558
|
-
|
|
559
|
-
|
|
590
|
+
const nonTemplatedEntries = new Set(
|
|
591
|
+
(await Promise.all(
|
|
592
|
+
(copyOnlyPatterns || []).map(
|
|
593
|
+
(pattern) => globby__default["default"](pattern, {
|
|
594
|
+
cwd: templateDir,
|
|
595
|
+
dot: true,
|
|
596
|
+
onlyFiles: false,
|
|
597
|
+
markDirectories: true,
|
|
598
|
+
followSymbolicLinks: false
|
|
599
|
+
})
|
|
600
|
+
)
|
|
601
|
+
)).flat()
|
|
602
|
+
);
|
|
560
603
|
const { cookiecutterCompat, values } = ctx.input;
|
|
561
604
|
const context = {
|
|
562
605
|
[cookiecutterCompat ? "cookiecutter" : "values"]: values
|
|
563
606
|
};
|
|
564
|
-
ctx.logger.info(
|
|
607
|
+
ctx.logger.info(
|
|
608
|
+
`Processing ${allEntriesInTemplate.length} template files/directories with input values`,
|
|
609
|
+
ctx.input.values
|
|
610
|
+
);
|
|
565
611
|
const renderTemplate = await SecureTemplater.loadRenderer({
|
|
566
612
|
cookiecutterCompat: ctx.input.cookiecutterCompat,
|
|
567
613
|
additionalTemplateFilters
|
|
@@ -591,22 +637,34 @@ function createFetchTemplateAction(options) {
|
|
|
591
637
|
continue;
|
|
592
638
|
}
|
|
593
639
|
if (!renderContents && !extension) {
|
|
594
|
-
ctx.logger.info(
|
|
640
|
+
ctx.logger.info(
|
|
641
|
+
`Copying file/directory ${location} without processing.`
|
|
642
|
+
);
|
|
595
643
|
}
|
|
596
644
|
if (location.endsWith("/")) {
|
|
597
|
-
ctx.logger.info(
|
|
645
|
+
ctx.logger.info(
|
|
646
|
+
`Writing directory ${location} to template output path.`
|
|
647
|
+
);
|
|
598
648
|
await fs__default["default"].ensureDir(outputPath);
|
|
599
649
|
} else {
|
|
600
650
|
const inputFilePath = backendCommon.resolveSafeChildPath(templateDir, location);
|
|
601
651
|
const stats = await fs__default["default"].promises.lstat(inputFilePath);
|
|
602
652
|
if (stats.isSymbolicLink() || await isbinaryfile.isBinaryFile(inputFilePath)) {
|
|
603
|
-
ctx.logger.info(
|
|
653
|
+
ctx.logger.info(
|
|
654
|
+
`Copying file binary or symbolic link at ${location}, to template output path.`
|
|
655
|
+
);
|
|
604
656
|
await fs__default["default"].copy(inputFilePath, outputPath);
|
|
605
657
|
} else {
|
|
606
658
|
const statsObj = await fs__default["default"].stat(inputFilePath);
|
|
607
|
-
ctx.logger.info(
|
|
659
|
+
ctx.logger.info(
|
|
660
|
+
`Writing file ${location} to template output path with mode ${statsObj.mode}.`
|
|
661
|
+
);
|
|
608
662
|
const inputFileContents = await fs__default["default"].readFile(inputFilePath, "utf-8");
|
|
609
|
-
await fs__default["default"].outputFile(
|
|
663
|
+
await fs__default["default"].outputFile(
|
|
664
|
+
outputPath,
|
|
665
|
+
renderContents ? renderTemplate(inputFileContents, context) : inputFileContents,
|
|
666
|
+
{ mode: statsObj.mode }
|
|
667
|
+
);
|
|
610
668
|
}
|
|
611
669
|
}
|
|
612
670
|
}
|
|
@@ -703,15 +761,23 @@ const createFilesystemRenameAction = () => {
|
|
|
703
761
|
if (!file.from || !file.to) {
|
|
704
762
|
throw new errors.InputError("each file must have a from and to property");
|
|
705
763
|
}
|
|
706
|
-
const sourceFilepath = backendCommon.resolveSafeChildPath(
|
|
764
|
+
const sourceFilepath = backendCommon.resolveSafeChildPath(
|
|
765
|
+
ctx.workspacePath,
|
|
766
|
+
file.from
|
|
767
|
+
);
|
|
707
768
|
const destFilepath = backendCommon.resolveSafeChildPath(ctx.workspacePath, file.to);
|
|
708
769
|
try {
|
|
709
770
|
await fs__default["default"].move(sourceFilepath, destFilepath, {
|
|
710
771
|
overwrite: (_b = file.overwrite) != null ? _b : false
|
|
711
772
|
});
|
|
712
|
-
ctx.logger.info(
|
|
773
|
+
ctx.logger.info(
|
|
774
|
+
`File ${sourceFilepath} renamed to ${destFilepath} successfully`
|
|
775
|
+
);
|
|
713
776
|
} catch (err) {
|
|
714
|
-
ctx.logger.error(
|
|
777
|
+
ctx.logger.error(
|
|
778
|
+
`Failed to rename file ${sourceFilepath} to ${destFilepath}:`,
|
|
779
|
+
err
|
|
780
|
+
);
|
|
715
781
|
throw err;
|
|
716
782
|
}
|
|
717
783
|
}
|
|
@@ -721,7 +787,10 @@ const createFilesystemRenameAction = () => {
|
|
|
721
787
|
|
|
722
788
|
const getRepoSourceDirectory = (workspacePath, sourcePath) => {
|
|
723
789
|
if (sourcePath) {
|
|
724
|
-
const safeSuffix = path.normalize(sourcePath).replace(
|
|
790
|
+
const safeSuffix = path.normalize(sourcePath).replace(
|
|
791
|
+
/^(\.\.(\/|\\|$))+/,
|
|
792
|
+
""
|
|
793
|
+
);
|
|
725
794
|
const path$1 = path.join(workspacePath, safeSuffix);
|
|
726
795
|
if (!backendCommon.isChildPath(workspacePath, path$1)) {
|
|
727
796
|
throw new Error("Invalid source path");
|
|
@@ -736,7 +805,9 @@ const parseRepoUrl = (repoUrl, integrations) => {
|
|
|
736
805
|
try {
|
|
737
806
|
parsed = new URL(`https://${repoUrl}`);
|
|
738
807
|
} catch (error) {
|
|
739
|
-
throw new errors.InputError(
|
|
808
|
+
throw new errors.InputError(
|
|
809
|
+
`Invalid repo URL passed to publisher, got ${repoUrl}, ${error}`
|
|
810
|
+
);
|
|
740
811
|
}
|
|
741
812
|
const host = parsed.host;
|
|
742
813
|
const owner = (_a = parsed.searchParams.get("owner")) != null ? _a : void 0;
|
|
@@ -745,25 +816,35 @@ const parseRepoUrl = (repoUrl, integrations) => {
|
|
|
745
816
|
const project = (_d = parsed.searchParams.get("project")) != null ? _d : void 0;
|
|
746
817
|
const type = (_e = integrations.byHost(host)) == null ? void 0 : _e.type;
|
|
747
818
|
if (!type) {
|
|
748
|
-
throw new errors.InputError(
|
|
819
|
+
throw new errors.InputError(
|
|
820
|
+
`No matching integration configuration for host ${host}, please check your integrations config`
|
|
821
|
+
);
|
|
749
822
|
}
|
|
750
823
|
if (type === "bitbucket") {
|
|
751
824
|
if (host === "bitbucket.org") {
|
|
752
825
|
if (!workspace) {
|
|
753
|
-
throw new errors.InputError(
|
|
826
|
+
throw new errors.InputError(
|
|
827
|
+
`Invalid repo URL passed to publisher: ${repoUrl}, missing workspace`
|
|
828
|
+
);
|
|
754
829
|
}
|
|
755
830
|
}
|
|
756
831
|
if (!project) {
|
|
757
|
-
throw new errors.InputError(
|
|
832
|
+
throw new errors.InputError(
|
|
833
|
+
`Invalid repo URL passed to publisher: ${repoUrl}, missing project`
|
|
834
|
+
);
|
|
758
835
|
}
|
|
759
836
|
} else {
|
|
760
837
|
if (!owner && type !== "gerrit") {
|
|
761
|
-
throw new errors.InputError(
|
|
838
|
+
throw new errors.InputError(
|
|
839
|
+
`Invalid repo URL passed to publisher: ${repoUrl}, missing owner`
|
|
840
|
+
);
|
|
762
841
|
}
|
|
763
842
|
}
|
|
764
843
|
const repo = parsed.searchParams.get("repo");
|
|
765
844
|
if (!repo) {
|
|
766
|
-
throw new errors.InputError(
|
|
845
|
+
throw new errors.InputError(
|
|
846
|
+
`Invalid repo URL passed to publisher: ${repoUrl}, missing repo`
|
|
847
|
+
);
|
|
767
848
|
}
|
|
768
849
|
return { host, owner, repo, organization, workspace, project };
|
|
769
850
|
};
|
|
@@ -788,7 +869,9 @@ const executeShellCommand = async (options) => {
|
|
|
788
869
|
});
|
|
789
870
|
process.on("close", (code) => {
|
|
790
871
|
if (code !== 0) {
|
|
791
|
-
return reject(
|
|
872
|
+
return reject(
|
|
873
|
+
new Error(`Command ${command} failed, exit code: ${code}`)
|
|
874
|
+
);
|
|
792
875
|
}
|
|
793
876
|
return resolve();
|
|
794
877
|
});
|
|
@@ -805,8 +888,7 @@ async function initRepoAndPush({
|
|
|
805
888
|
}) {
|
|
806
889
|
var _a, _b;
|
|
807
890
|
const git = backendCommon.Git.fromAuth({
|
|
808
|
-
|
|
809
|
-
password: auth.password,
|
|
891
|
+
...auth,
|
|
810
892
|
logger
|
|
811
893
|
});
|
|
812
894
|
await git.init({
|
|
@@ -845,8 +927,7 @@ async function commitAndPushRepo({
|
|
|
845
927
|
}) {
|
|
846
928
|
var _a, _b;
|
|
847
929
|
const git = backendCommon.Git.fromAuth({
|
|
848
|
-
|
|
849
|
-
password: auth.password,
|
|
930
|
+
...auth,
|
|
850
931
|
logger
|
|
851
932
|
});
|
|
852
933
|
await git.fetch({ dir });
|
|
@@ -900,8 +981,12 @@ const enableBranchProtectionOnDefaultRepoBranch = async ({
|
|
|
900
981
|
});
|
|
901
982
|
} catch (e) {
|
|
902
983
|
errors.assertError(e);
|
|
903
|
-
if (e.message.includes(
|
|
904
|
-
|
|
984
|
+
if (e.message.includes(
|
|
985
|
+
"Upgrade to GitHub Pro or make this repository public to enable this feature"
|
|
986
|
+
)) {
|
|
987
|
+
logger.warn(
|
|
988
|
+
"Branch protection was not enabled as it requires GitHub Pro for private repositories"
|
|
989
|
+
);
|
|
905
990
|
} else {
|
|
906
991
|
throw e;
|
|
907
992
|
}
|
|
@@ -943,10 +1028,14 @@ async function getOctokitOptions(options) {
|
|
|
943
1028
|
}
|
|
944
1029
|
const githubCredentialsProvider = credentialsProvider != null ? credentialsProvider : integration.DefaultGithubCredentialsProvider.fromIntegrations(integrations);
|
|
945
1030
|
const { token: credentialProviderToken } = await githubCredentialsProvider.getCredentials({
|
|
946
|
-
url: `https://${host}/${encodeURIComponent(owner)}/${encodeURIComponent(
|
|
1031
|
+
url: `https://${host}/${encodeURIComponent(owner)}/${encodeURIComponent(
|
|
1032
|
+
repo
|
|
1033
|
+
)}`
|
|
947
1034
|
});
|
|
948
1035
|
if (!credentialProviderToken) {
|
|
949
|
-
throw new errors.InputError(
|
|
1036
|
+
throw new errors.InputError(
|
|
1037
|
+
`No token available for host: ${host}, with owner ${owner}, and repo ${repo}`
|
|
1038
|
+
);
|
|
950
1039
|
}
|
|
951
1040
|
return {
|
|
952
1041
|
auth: credentialProviderToken,
|
|
@@ -983,9 +1072,13 @@ async function createGithubRepoWithCollaboratorsAndTopics(client, repo, owner, r
|
|
|
983
1072
|
} catch (e) {
|
|
984
1073
|
errors.assertError(e);
|
|
985
1074
|
if (e.message === "Resource not accessible by integration") {
|
|
986
|
-
logger.warn(
|
|
1075
|
+
logger.warn(
|
|
1076
|
+
`The GitHub app or token provided may not have the required permissions to create the ${user.data.type} repository ${owner}/${repo}.`
|
|
1077
|
+
);
|
|
987
1078
|
}
|
|
988
|
-
throw new Error(
|
|
1079
|
+
throw new Error(
|
|
1080
|
+
`Failed to create the ${user.data.type} repository ${owner}/${repo}, ${e.message}`
|
|
1081
|
+
);
|
|
989
1082
|
}
|
|
990
1083
|
if (access == null ? void 0 : access.startsWith(`${owner}/`)) {
|
|
991
1084
|
const [, team] = access.split("/");
|
|
@@ -1026,7 +1119,9 @@ async function createGithubRepoWithCollaboratorsAndTopics(client, repo, owner, r
|
|
|
1026
1119
|
} catch (e) {
|
|
1027
1120
|
errors.assertError(e);
|
|
1028
1121
|
const name = extractCollaboratorName(collaborator);
|
|
1029
|
-
logger.warn(
|
|
1122
|
+
logger.warn(
|
|
1123
|
+
`Skipping ${collaborator.access} access for ${name}, ${e.message}`
|
|
1124
|
+
);
|
|
1030
1125
|
}
|
|
1031
1126
|
}
|
|
1032
1127
|
}
|
|
@@ -1076,7 +1171,9 @@ async function initRepoPushAndProtect(remoteUrl, password, workspacePath, source
|
|
|
1076
1171
|
});
|
|
1077
1172
|
} catch (e) {
|
|
1078
1173
|
errors.assertError(e);
|
|
1079
|
-
logger.warn(
|
|
1174
|
+
logger.warn(
|
|
1175
|
+
`Skipping: default branch protection on '${repo}', ${e.message}`
|
|
1176
|
+
);
|
|
1080
1177
|
}
|
|
1081
1178
|
}
|
|
1082
1179
|
}
|
|
@@ -1134,17 +1231,21 @@ function createGithubActionsDispatchAction(options) {
|
|
|
1134
1231
|
workflowInputs,
|
|
1135
1232
|
token: providedToken
|
|
1136
1233
|
} = ctx.input;
|
|
1137
|
-
ctx.logger.info(
|
|
1234
|
+
ctx.logger.info(
|
|
1235
|
+
`Dispatching workflow ${workflowId} for repo ${repoUrl} on ${branchOrTagName}`
|
|
1236
|
+
);
|
|
1138
1237
|
const { owner, repo } = parseRepoUrl(repoUrl, integrations);
|
|
1139
1238
|
if (!owner) {
|
|
1140
1239
|
throw new errors.InputError("Invalid repository owner provided in repoUrl");
|
|
1141
1240
|
}
|
|
1142
|
-
const client = new octokit.Octokit(
|
|
1143
|
-
|
|
1144
|
-
|
|
1145
|
-
|
|
1146
|
-
|
|
1147
|
-
|
|
1241
|
+
const client = new octokit.Octokit(
|
|
1242
|
+
await getOctokitOptions({
|
|
1243
|
+
integrations,
|
|
1244
|
+
repoUrl,
|
|
1245
|
+
credentialsProvider: githubCredentialsProvider,
|
|
1246
|
+
token: providedToken
|
|
1247
|
+
})
|
|
1248
|
+
);
|
|
1148
1249
|
await client.rest.actions.createWorkflowDispatch({
|
|
1149
1250
|
owner,
|
|
1150
1251
|
repo,
|
|
@@ -1200,12 +1301,14 @@ function createGithubIssuesLabelAction(options) {
|
|
|
1200
1301
|
if (!owner) {
|
|
1201
1302
|
throw new errors.InputError("Invalid repository owner provided in repoUrl");
|
|
1202
1303
|
}
|
|
1203
|
-
const client = new octokit.Octokit(
|
|
1204
|
-
|
|
1205
|
-
|
|
1206
|
-
|
|
1207
|
-
|
|
1208
|
-
|
|
1304
|
+
const client = new octokit.Octokit(
|
|
1305
|
+
await getOctokitOptions({
|
|
1306
|
+
integrations,
|
|
1307
|
+
credentialsProvider: githubCredentialsProvider,
|
|
1308
|
+
repoUrl,
|
|
1309
|
+
token: providedToken
|
|
1310
|
+
})
|
|
1311
|
+
);
|
|
1209
1312
|
try {
|
|
1210
1313
|
await client.rest.issues.addLabels({
|
|
1211
1314
|
owner,
|
|
@@ -1215,7 +1318,9 @@ function createGithubIssuesLabelAction(options) {
|
|
|
1215
1318
|
});
|
|
1216
1319
|
} catch (e) {
|
|
1217
1320
|
errors.assertError(e);
|
|
1218
|
-
ctx.logger.warn(
|
|
1321
|
+
ctx.logger.warn(
|
|
1322
|
+
`Failed: adding labels to issue: '${number}' on repo: '${repo}', ${e.message}`
|
|
1323
|
+
);
|
|
1219
1324
|
}
|
|
1220
1325
|
}
|
|
1221
1326
|
});
|
|
@@ -1414,7 +1519,21 @@ function createGithubRepoCreateAction(options) {
|
|
|
1414
1519
|
if (!owner) {
|
|
1415
1520
|
throw new errors.InputError("Invalid repository owner provided in repoUrl");
|
|
1416
1521
|
}
|
|
1417
|
-
const newRepo = await createGithubRepoWithCollaboratorsAndTopics(
|
|
1522
|
+
const newRepo = await createGithubRepoWithCollaboratorsAndTopics(
|
|
1523
|
+
client,
|
|
1524
|
+
repo,
|
|
1525
|
+
owner,
|
|
1526
|
+
repoVisibility,
|
|
1527
|
+
description,
|
|
1528
|
+
deleteBranchOnMerge,
|
|
1529
|
+
allowMergeCommit,
|
|
1530
|
+
allowSquashMerge,
|
|
1531
|
+
allowRebaseMerge,
|
|
1532
|
+
access,
|
|
1533
|
+
collaborators,
|
|
1534
|
+
topics,
|
|
1535
|
+
ctx.logger
|
|
1536
|
+
);
|
|
1418
1537
|
ctx.output("remoteUrl", newRepo.clone_url);
|
|
1419
1538
|
}
|
|
1420
1539
|
});
|
|
@@ -1478,7 +1597,25 @@ function createGithubRepoPushAction(options) {
|
|
|
1478
1597
|
const targetRepo = await client.rest.repos.get({ owner, repo });
|
|
1479
1598
|
const remoteUrl = targetRepo.data.clone_url;
|
|
1480
1599
|
const repoContentsUrl = `${targetRepo.data.html_url}/blob/${defaultBranch}`;
|
|
1481
|
-
await initRepoPushAndProtect(
|
|
1600
|
+
await initRepoPushAndProtect(
|
|
1601
|
+
remoteUrl,
|
|
1602
|
+
octokitOptions.auth,
|
|
1603
|
+
ctx.workspacePath,
|
|
1604
|
+
ctx.input.sourcePath,
|
|
1605
|
+
defaultBranch,
|
|
1606
|
+
protectDefaultBranch,
|
|
1607
|
+
protectEnforceAdmins,
|
|
1608
|
+
owner,
|
|
1609
|
+
client,
|
|
1610
|
+
repo,
|
|
1611
|
+
requireCodeOwnerReviews,
|
|
1612
|
+
requiredStatusCheckContexts,
|
|
1613
|
+
config,
|
|
1614
|
+
ctx.logger,
|
|
1615
|
+
gitCommitMessage,
|
|
1616
|
+
gitAuthorName,
|
|
1617
|
+
gitAuthorEmail
|
|
1618
|
+
);
|
|
1482
1619
|
ctx.output("remoteUrl", remoteUrl);
|
|
1483
1620
|
ctx.output("repoContentsUrl", repoContentsUrl);
|
|
1484
1621
|
}
|
|
@@ -1570,12 +1707,14 @@ function createGithubWebhookAction(options) {
|
|
|
1570
1707
|
if (!owner) {
|
|
1571
1708
|
throw new errors.InputError("Invalid repository owner provided in repoUrl");
|
|
1572
1709
|
}
|
|
1573
|
-
const client = new octokit.Octokit(
|
|
1574
|
-
|
|
1575
|
-
|
|
1576
|
-
|
|
1577
|
-
|
|
1578
|
-
|
|
1710
|
+
const client = new octokit.Octokit(
|
|
1711
|
+
await getOctokitOptions({
|
|
1712
|
+
integrations,
|
|
1713
|
+
credentialsProvider: githubCredentialsProvider,
|
|
1714
|
+
repoUrl,
|
|
1715
|
+
token: providedToken
|
|
1716
|
+
})
|
|
1717
|
+
);
|
|
1579
1718
|
try {
|
|
1580
1719
|
const insecure_ssl = insecureSsl ? "1" : "0";
|
|
1581
1720
|
await client.rest.repos.createWebhook({
|
|
@@ -1593,7 +1732,9 @@ function createGithubWebhookAction(options) {
|
|
|
1593
1732
|
ctx.logger.info(`Webhook '${webhookUrl}' created successfully`);
|
|
1594
1733
|
} catch (e) {
|
|
1595
1734
|
errors.assertError(e);
|
|
1596
|
-
ctx.logger.warn(
|
|
1735
|
+
ctx.logger.warn(
|
|
1736
|
+
`Failed: create webhook '${webhookUrl}' on repo: '${repo}', ${e.message}`
|
|
1737
|
+
);
|
|
1597
1738
|
}
|
|
1598
1739
|
}
|
|
1599
1740
|
});
|
|
@@ -1672,13 +1813,20 @@ function createPublishAzureAction(options) {
|
|
|
1672
1813
|
gitAuthorName,
|
|
1673
1814
|
gitAuthorEmail
|
|
1674
1815
|
} = ctx.input;
|
|
1675
|
-
const { owner, repo, host, organization } = parseRepoUrl(
|
|
1816
|
+
const { owner, repo, host, organization } = parseRepoUrl(
|
|
1817
|
+
repoUrl,
|
|
1818
|
+
integrations
|
|
1819
|
+
);
|
|
1676
1820
|
if (!organization) {
|
|
1677
|
-
throw new errors.InputError(
|
|
1821
|
+
throw new errors.InputError(
|
|
1822
|
+
`Invalid URL provider was included in the repo URL to create ${ctx.input.repoUrl}, missing organization`
|
|
1823
|
+
);
|
|
1678
1824
|
}
|
|
1679
1825
|
const integrationConfig = integrations.azure.byHost(host);
|
|
1680
1826
|
if (!integrationConfig) {
|
|
1681
|
-
throw new errors.InputError(
|
|
1827
|
+
throw new errors.InputError(
|
|
1828
|
+
`No matching integration configuration for host ${host}, please check your integrations config`
|
|
1829
|
+
);
|
|
1682
1830
|
}
|
|
1683
1831
|
if (!integrationConfig.config.token && !ctx.input.token) {
|
|
1684
1832
|
throw new errors.InputError(`No token provided for Azure Integration ${host}`);
|
|
@@ -1690,12 +1838,16 @@ function createPublishAzureAction(options) {
|
|
|
1690
1838
|
const createOptions = { name: repo };
|
|
1691
1839
|
const returnedRepo = await client.createRepository(createOptions, owner);
|
|
1692
1840
|
if (!returnedRepo) {
|
|
1693
|
-
throw new errors.InputError(
|
|
1694
|
-
|
|
1841
|
+
throw new errors.InputError(
|
|
1842
|
+
`Unable to create the repository with Organization ${organization}, Project ${owner} and Repo ${repo}.
|
|
1843
|
+
Please make sure that both the Org and Project are typed corrected and exist.`
|
|
1844
|
+
);
|
|
1695
1845
|
}
|
|
1696
1846
|
const remoteUrl = returnedRepo.remoteUrl;
|
|
1697
1847
|
if (!remoteUrl) {
|
|
1698
|
-
throw new errors.InputError(
|
|
1848
|
+
throw new errors.InputError(
|
|
1849
|
+
"No remote URL returned from create repository for Azure"
|
|
1850
|
+
);
|
|
1699
1851
|
}
|
|
1700
1852
|
const repoContentsUrl = remoteUrl;
|
|
1701
1853
|
const gitAuthorInfo = {
|
|
@@ -1746,12 +1898,17 @@ const createBitbucketCloudRepository = async (opts) => {
|
|
|
1746
1898
|
};
|
|
1747
1899
|
let response;
|
|
1748
1900
|
try {
|
|
1749
|
-
response = await fetch__default["default"](
|
|
1901
|
+
response = await fetch__default["default"](
|
|
1902
|
+
`${apiBaseUrl}/repositories/${workspace}/${repo}`,
|
|
1903
|
+
options
|
|
1904
|
+
);
|
|
1750
1905
|
} catch (e) {
|
|
1751
1906
|
throw new Error(`Unable to create repository, ${e}`);
|
|
1752
1907
|
}
|
|
1753
1908
|
if (response.status !== 200) {
|
|
1754
|
-
throw new Error(
|
|
1909
|
+
throw new Error(
|
|
1910
|
+
`Unable to create repository, ${response.status} ${response.statusText}, ${await response.text()}`
|
|
1911
|
+
);
|
|
1755
1912
|
}
|
|
1756
1913
|
const r = await response.json();
|
|
1757
1914
|
let remoteUrl = "";
|
|
@@ -1791,7 +1948,9 @@ const createBitbucketServerRepository = async (opts) => {
|
|
|
1791
1948
|
throw new Error(`Unable to create repository, ${e}`);
|
|
1792
1949
|
}
|
|
1793
1950
|
if (response.status !== 201) {
|
|
1794
|
-
throw new Error(
|
|
1951
|
+
throw new Error(
|
|
1952
|
+
`Unable to create repository, ${response.status} ${response.statusText}, ${await response.text()}`
|
|
1953
|
+
);
|
|
1795
1954
|
}
|
|
1796
1955
|
const r = await response.json();
|
|
1797
1956
|
let remoteUrl = "";
|
|
@@ -1803,15 +1962,20 @@ const createBitbucketServerRepository = async (opts) => {
|
|
|
1803
1962
|
const repoContentsUrl = `${r.links.self[0].href}`;
|
|
1804
1963
|
return { remoteUrl, repoContentsUrl };
|
|
1805
1964
|
};
|
|
1806
|
-
const getAuthorizationHeader$
|
|
1965
|
+
const getAuthorizationHeader$1 = (config) => {
|
|
1807
1966
|
if (config.username && config.appPassword) {
|
|
1808
|
-
const buffer = Buffer.from(
|
|
1967
|
+
const buffer = Buffer.from(
|
|
1968
|
+
`${config.username}:${config.appPassword}`,
|
|
1969
|
+
"utf8"
|
|
1970
|
+
);
|
|
1809
1971
|
return `Basic ${buffer.toString("base64")}`;
|
|
1810
1972
|
}
|
|
1811
1973
|
if (config.token) {
|
|
1812
1974
|
return `Bearer ${config.token}`;
|
|
1813
1975
|
}
|
|
1814
|
-
throw new Error(
|
|
1976
|
+
throw new Error(
|
|
1977
|
+
`Authorization has not been provided for Bitbucket. Please add either username + appPassword or token to the Integrations config`
|
|
1978
|
+
);
|
|
1815
1979
|
};
|
|
1816
1980
|
const performEnableLFS$1 = async (opts) => {
|
|
1817
1981
|
const { authorization, host, project, repo } = opts;
|
|
@@ -1821,9 +1985,14 @@ const performEnableLFS$1 = async (opts) => {
|
|
|
1821
1985
|
Authorization: authorization
|
|
1822
1986
|
}
|
|
1823
1987
|
};
|
|
1824
|
-
const { ok, status, statusText } = await fetch__default["default"](
|
|
1988
|
+
const { ok, status, statusText } = await fetch__default["default"](
|
|
1989
|
+
`https://${host}/rest/git-lfs/admin/projects/${project}/repos/${repo}/enabled`,
|
|
1990
|
+
options
|
|
1991
|
+
);
|
|
1825
1992
|
if (!ok)
|
|
1826
|
-
throw new Error(
|
|
1993
|
+
throw new Error(
|
|
1994
|
+
`Failed to enable LFS in the repository, ${status}: ${statusText}`
|
|
1995
|
+
);
|
|
1827
1996
|
};
|
|
1828
1997
|
function createPublishBitbucketAction(options) {
|
|
1829
1998
|
const { integrations, config } = options;
|
|
@@ -1901,7 +2070,9 @@ function createPublishBitbucketAction(options) {
|
|
|
1901
2070
|
},
|
|
1902
2071
|
async handler(ctx) {
|
|
1903
2072
|
var _a;
|
|
1904
|
-
ctx.logger.warn(
|
|
2073
|
+
ctx.logger.warn(
|
|
2074
|
+
`[Deprecated] Please migrate the use of action "publish:bitbucket" to "publish:bitbucketCloud" or "publish:bitbucketServer".`
|
|
2075
|
+
);
|
|
1905
2076
|
const {
|
|
1906
2077
|
repoUrl,
|
|
1907
2078
|
description,
|
|
@@ -1912,24 +2083,35 @@ function createPublishBitbucketAction(options) {
|
|
|
1912
2083
|
gitAuthorName,
|
|
1913
2084
|
gitAuthorEmail
|
|
1914
2085
|
} = ctx.input;
|
|
1915
|
-
const { workspace, project, repo, host } = parseRepoUrl(
|
|
2086
|
+
const { workspace, project, repo, host } = parseRepoUrl(
|
|
2087
|
+
repoUrl,
|
|
2088
|
+
integrations
|
|
2089
|
+
);
|
|
1916
2090
|
if (host === "bitbucket.org") {
|
|
1917
2091
|
if (!workspace) {
|
|
1918
|
-
throw new errors.InputError(
|
|
2092
|
+
throw new errors.InputError(
|
|
2093
|
+
`Invalid URL provider was included in the repo URL to create ${ctx.input.repoUrl}, missing workspace`
|
|
2094
|
+
);
|
|
1919
2095
|
}
|
|
1920
2096
|
}
|
|
1921
2097
|
if (!project) {
|
|
1922
|
-
throw new errors.InputError(
|
|
2098
|
+
throw new errors.InputError(
|
|
2099
|
+
`Invalid URL provider was included in the repo URL to create ${ctx.input.repoUrl}, missing project`
|
|
2100
|
+
);
|
|
1923
2101
|
}
|
|
1924
2102
|
const integrationConfig = integrations.bitbucket.byHost(host);
|
|
1925
2103
|
if (!integrationConfig) {
|
|
1926
|
-
throw new errors.InputError(
|
|
2104
|
+
throw new errors.InputError(
|
|
2105
|
+
`No matching integration configuration for host ${host}, please check your integrations config`
|
|
2106
|
+
);
|
|
1927
2107
|
}
|
|
1928
|
-
const authorization = getAuthorizationHeader$
|
|
1929
|
-
|
|
1930
|
-
|
|
1931
|
-
|
|
1932
|
-
|
|
2108
|
+
const authorization = getAuthorizationHeader$1(
|
|
2109
|
+
ctx.input.token ? {
|
|
2110
|
+
host: integrationConfig.config.host,
|
|
2111
|
+
apiBaseUrl: integrationConfig.config.apiBaseUrl,
|
|
2112
|
+
token: ctx.input.token
|
|
2113
|
+
} : integrationConfig.config
|
|
2114
|
+
);
|
|
1933
2115
|
const apiBaseUrl = integrationConfig.config.apiBaseUrl;
|
|
1934
2116
|
const createMethod = host === "bitbucket.org" ? createBitbucketCloudRepository : createBitbucketServerRepository;
|
|
1935
2117
|
const { remoteUrl, repoContentsUrl } = await createMethod({
|
|
@@ -2002,12 +2184,17 @@ const createRepository$1 = async (opts) => {
|
|
|
2002
2184
|
};
|
|
2003
2185
|
let response;
|
|
2004
2186
|
try {
|
|
2005
|
-
response = await fetch__default["default"](
|
|
2187
|
+
response = await fetch__default["default"](
|
|
2188
|
+
`${apiBaseUrl}/repositories/${workspace}/${repo}`,
|
|
2189
|
+
options
|
|
2190
|
+
);
|
|
2006
2191
|
} catch (e) {
|
|
2007
2192
|
throw new Error(`Unable to create repository, ${e}`);
|
|
2008
2193
|
}
|
|
2009
2194
|
if (response.status !== 200) {
|
|
2010
|
-
throw new Error(
|
|
2195
|
+
throw new Error(
|
|
2196
|
+
`Unable to create repository, ${response.status} ${response.statusText}, ${await response.text()}`
|
|
2197
|
+
);
|
|
2011
2198
|
}
|
|
2012
2199
|
const r = await response.json();
|
|
2013
2200
|
let remoteUrl = "";
|
|
@@ -2019,15 +2206,20 @@ const createRepository$1 = async (opts) => {
|
|
|
2019
2206
|
const repoContentsUrl = `${r.links.html.href}/src/${mainBranch}`;
|
|
2020
2207
|
return { remoteUrl, repoContentsUrl };
|
|
2021
2208
|
};
|
|
2022
|
-
const getAuthorizationHeader
|
|
2209
|
+
const getAuthorizationHeader = (config) => {
|
|
2023
2210
|
if (config.username && config.appPassword) {
|
|
2024
|
-
const buffer = Buffer.from(
|
|
2211
|
+
const buffer = Buffer.from(
|
|
2212
|
+
`${config.username}:${config.appPassword}`,
|
|
2213
|
+
"utf8"
|
|
2214
|
+
);
|
|
2025
2215
|
return `Basic ${buffer.toString("base64")}`;
|
|
2026
2216
|
}
|
|
2027
2217
|
if (config.token) {
|
|
2028
2218
|
return `Bearer ${config.token}`;
|
|
2029
2219
|
}
|
|
2030
|
-
throw new Error(
|
|
2220
|
+
throw new Error(
|
|
2221
|
+
`Authorization has not been provided for Bitbucket Cloud. Please add either username + appPassword to the Integrations config or a user login auth token`
|
|
2222
|
+
);
|
|
2031
2223
|
};
|
|
2032
2224
|
function createPublishBitbucketCloudAction(options) {
|
|
2033
2225
|
const { integrations, config } = options;
|
|
@@ -2090,18 +2282,29 @@ function createPublishBitbucketCloudAction(options) {
|
|
|
2090
2282
|
defaultBranch = "master",
|
|
2091
2283
|
repoVisibility = "private"
|
|
2092
2284
|
} = ctx.input;
|
|
2093
|
-
const { workspace, project, repo, host } = parseRepoUrl(
|
|
2285
|
+
const { workspace, project, repo, host } = parseRepoUrl(
|
|
2286
|
+
repoUrl,
|
|
2287
|
+
integrations
|
|
2288
|
+
);
|
|
2094
2289
|
if (!workspace) {
|
|
2095
|
-
throw new errors.InputError(
|
|
2290
|
+
throw new errors.InputError(
|
|
2291
|
+
`Invalid URL provider was included in the repo URL to create ${ctx.input.repoUrl}, missing workspace`
|
|
2292
|
+
);
|
|
2096
2293
|
}
|
|
2097
2294
|
if (!project) {
|
|
2098
|
-
throw new errors.InputError(
|
|
2295
|
+
throw new errors.InputError(
|
|
2296
|
+
`Invalid URL provider was included in the repo URL to create ${ctx.input.repoUrl}, missing project`
|
|
2297
|
+
);
|
|
2099
2298
|
}
|
|
2100
2299
|
const integrationConfig = integrations.bitbucketCloud.byHost(host);
|
|
2101
2300
|
if (!integrationConfig) {
|
|
2102
|
-
throw new errors.InputError(
|
|
2301
|
+
throw new errors.InputError(
|
|
2302
|
+
`No matching integration configuration for host ${host}, please check your integrations config`
|
|
2303
|
+
);
|
|
2103
2304
|
}
|
|
2104
|
-
const authorization = getAuthorizationHeader
|
|
2305
|
+
const authorization = getAuthorizationHeader(
|
|
2306
|
+
ctx.input.token ? { token: ctx.input.token } : integrationConfig.config
|
|
2307
|
+
);
|
|
2105
2308
|
const apiBaseUrl = integrationConfig.config.apiBaseUrl;
|
|
2106
2309
|
const { remoteUrl, repoContentsUrl } = await createRepository$1({
|
|
2107
2310
|
authorization,
|
|
@@ -2125,7 +2328,9 @@ function createPublishBitbucketCloudAction(options) {
|
|
|
2125
2328
|
};
|
|
2126
2329
|
} else {
|
|
2127
2330
|
if (!integrationConfig.config.username || !integrationConfig.config.appPassword) {
|
|
2128
|
-
throw new Error(
|
|
2331
|
+
throw new Error(
|
|
2332
|
+
"Credentials for Bitbucket Cloud integration required for this action."
|
|
2333
|
+
);
|
|
2129
2334
|
}
|
|
2130
2335
|
auth = {
|
|
2131
2336
|
username: integrationConfig.config.username,
|
|
@@ -2138,7 +2343,9 @@ function createPublishBitbucketCloudAction(options) {
|
|
|
2138
2343
|
auth,
|
|
2139
2344
|
defaultBranch,
|
|
2140
2345
|
logger: ctx.logger,
|
|
2141
|
-
commitMessage: config.getOptionalString(
|
|
2346
|
+
commitMessage: config.getOptionalString(
|
|
2347
|
+
"scaffolder.defaultCommitMessage"
|
|
2348
|
+
),
|
|
2142
2349
|
gitAuthorInfo
|
|
2143
2350
|
});
|
|
2144
2351
|
ctx.output("remoteUrl", remoteUrl);
|
|
@@ -2175,7 +2382,9 @@ const createRepository = async (opts) => {
|
|
|
2175
2382
|
throw new Error(`Unable to create repository, ${e}`);
|
|
2176
2383
|
}
|
|
2177
2384
|
if (response.status !== 201) {
|
|
2178
|
-
throw new Error(
|
|
2385
|
+
throw new Error(
|
|
2386
|
+
`Unable to create repository, ${response.status} ${response.statusText}, ${await response.text()}`
|
|
2387
|
+
);
|
|
2179
2388
|
}
|
|
2180
2389
|
const r = await response.json();
|
|
2181
2390
|
let remoteUrl = "";
|
|
@@ -2187,9 +2396,6 @@ const createRepository = async (opts) => {
|
|
|
2187
2396
|
const repoContentsUrl = `${r.links.self[0].href}`;
|
|
2188
2397
|
return { remoteUrl, repoContentsUrl };
|
|
2189
2398
|
};
|
|
2190
|
-
const getAuthorizationHeader = (config) => {
|
|
2191
|
-
return `Bearer ${config.token}`;
|
|
2192
|
-
};
|
|
2193
2399
|
const performEnableLFS = async (opts) => {
|
|
2194
2400
|
const { authorization, host, project, repo } = opts;
|
|
2195
2401
|
const options = {
|
|
@@ -2198,9 +2404,14 @@ const performEnableLFS = async (opts) => {
|
|
|
2198
2404
|
Authorization: authorization
|
|
2199
2405
|
}
|
|
2200
2406
|
};
|
|
2201
|
-
const { ok, status, statusText } = await fetch__default["default"](
|
|
2407
|
+
const { ok, status, statusText } = await fetch__default["default"](
|
|
2408
|
+
`https://${host}/rest/git-lfs/admin/projects/${project}/repos/${repo}/enabled`,
|
|
2409
|
+
options
|
|
2410
|
+
);
|
|
2202
2411
|
if (!ok)
|
|
2203
|
-
throw new Error(
|
|
2412
|
+
throw new Error(
|
|
2413
|
+
`Failed to enable LFS in the repository, ${status}: ${statusText}`
|
|
2414
|
+
);
|
|
2204
2415
|
};
|
|
2205
2416
|
function createPublishBitbucketServerAction(options) {
|
|
2206
2417
|
const { integrations, config } = options;
|
|
@@ -2272,17 +2483,28 @@ function createPublishBitbucketServerAction(options) {
|
|
|
2272
2483
|
} = ctx.input;
|
|
2273
2484
|
const { project, repo, host } = parseRepoUrl(repoUrl, integrations);
|
|
2274
2485
|
if (!project) {
|
|
2275
|
-
throw new errors.InputError(
|
|
2486
|
+
throw new errors.InputError(
|
|
2487
|
+
`Invalid URL provider was included in the repo URL to create ${ctx.input.repoUrl}, missing project`
|
|
2488
|
+
);
|
|
2276
2489
|
}
|
|
2277
2490
|
const integrationConfig = integrations.bitbucketServer.byHost(host);
|
|
2278
2491
|
if (!integrationConfig) {
|
|
2279
|
-
throw new errors.InputError(
|
|
2492
|
+
throw new errors.InputError(
|
|
2493
|
+
`No matching integration configuration for host ${host}, please check your integrations config`
|
|
2494
|
+
);
|
|
2280
2495
|
}
|
|
2281
2496
|
const token = (_a = ctx.input.token) != null ? _a : integrationConfig.config.token;
|
|
2282
|
-
|
|
2283
|
-
|
|
2497
|
+
const authConfig = {
|
|
2498
|
+
...integrationConfig.config,
|
|
2499
|
+
...{ token }
|
|
2500
|
+
};
|
|
2501
|
+
const reqOpts = integration.getBitbucketServerRequestOptions(authConfig);
|
|
2502
|
+
const authorization = reqOpts.headers.Authorization;
|
|
2503
|
+
if (!authorization) {
|
|
2504
|
+
throw new Error(
|
|
2505
|
+
`Authorization has not been provided for ${integrationConfig.config.host}. Please add either (a) a user login auth token, or (b) a token or (c) username + password to the integration config.`
|
|
2506
|
+
);
|
|
2284
2507
|
}
|
|
2285
|
-
const authorization = getAuthorizationHeader({ token });
|
|
2286
2508
|
const apiBaseUrl = integrationConfig.config.apiBaseUrl;
|
|
2287
2509
|
const { remoteUrl, repoContentsUrl } = await createRepository({
|
|
2288
2510
|
authorization,
|
|
@@ -2296,9 +2518,11 @@ function createPublishBitbucketServerAction(options) {
|
|
|
2296
2518
|
name: config.getOptionalString("scaffolder.defaultAuthor.name"),
|
|
2297
2519
|
email: config.getOptionalString("scaffolder.defaultAuthor.email")
|
|
2298
2520
|
};
|
|
2299
|
-
const auth = {
|
|
2300
|
-
|
|
2301
|
-
|
|
2521
|
+
const auth = authConfig.token ? {
|
|
2522
|
+
token
|
|
2523
|
+
} : {
|
|
2524
|
+
username: authConfig.username,
|
|
2525
|
+
password: authConfig.password
|
|
2302
2526
|
};
|
|
2303
2527
|
await initRepoAndPush({
|
|
2304
2528
|
dir: getRepoSourceDirectory(ctx.workspacePath, ctx.input.sourcePath),
|
|
@@ -2306,7 +2530,9 @@ function createPublishBitbucketServerAction(options) {
|
|
|
2306
2530
|
auth,
|
|
2307
2531
|
defaultBranch,
|
|
2308
2532
|
logger: ctx.logger,
|
|
2309
|
-
commitMessage: config.getOptionalString(
|
|
2533
|
+
commitMessage: config.getOptionalString(
|
|
2534
|
+
"scaffolder.defaultCommitMessage"
|
|
2535
|
+
),
|
|
2310
2536
|
gitAuthorInfo
|
|
2311
2537
|
});
|
|
2312
2538
|
if (enableLFS) {
|
|
@@ -2361,9 +2587,14 @@ const createGerritProject = async (config, options) => {
|
|
|
2361
2587
|
"Content-Type": "application/json"
|
|
2362
2588
|
}
|
|
2363
2589
|
};
|
|
2364
|
-
const response = await fetch__default["default"](
|
|
2590
|
+
const response = await fetch__default["default"](
|
|
2591
|
+
`${config.baseUrl}/a/projects/${encodeURIComponent(projectName)}`,
|
|
2592
|
+
fetchOptions
|
|
2593
|
+
);
|
|
2365
2594
|
if (response.status !== 201) {
|
|
2366
|
-
throw new Error(
|
|
2595
|
+
throw new Error(
|
|
2596
|
+
`Unable to create repository, ${response.status} ${response.statusText}, ${await response.text()}`
|
|
2597
|
+
);
|
|
2367
2598
|
}
|
|
2368
2599
|
};
|
|
2369
2600
|
const generateCommitMessage = (config, commitSubject) => {
|
|
@@ -2442,13 +2673,20 @@ function createPublishGerritAction(options) {
|
|
|
2442
2673
|
gitCommitMessage = "initial commit",
|
|
2443
2674
|
sourcePath
|
|
2444
2675
|
} = ctx.input;
|
|
2445
|
-
const { repo, host, owner, workspace } = parseRepoUrl(
|
|
2676
|
+
const { repo, host, owner, workspace } = parseRepoUrl(
|
|
2677
|
+
repoUrl,
|
|
2678
|
+
integrations
|
|
2679
|
+
);
|
|
2446
2680
|
const integrationConfig = integrations.gerrit.byHost(host);
|
|
2447
2681
|
if (!integrationConfig) {
|
|
2448
|
-
throw new errors.InputError(
|
|
2682
|
+
throw new errors.InputError(
|
|
2683
|
+
`No matching integration configuration for host ${host}, please check your integrations config`
|
|
2684
|
+
);
|
|
2449
2685
|
}
|
|
2450
2686
|
if (!workspace) {
|
|
2451
|
-
throw new errors.InputError(
|
|
2687
|
+
throw new errors.InputError(
|
|
2688
|
+
`Invalid URL provider was included in the repo URL to create ${ctx.input.repoUrl}, missing workspace`
|
|
2689
|
+
);
|
|
2452
2690
|
}
|
|
2453
2691
|
await createGerritProject(integrationConfig.config, {
|
|
2454
2692
|
description,
|
|
@@ -2556,7 +2794,9 @@ function createPublishGerritReviewAction(options) {
|
|
|
2556
2794
|
}
|
|
2557
2795
|
const integrationConfig = integrations.gerrit.byHost(host);
|
|
2558
2796
|
if (!integrationConfig) {
|
|
2559
|
-
throw new errors.InputError(
|
|
2797
|
+
throw new errors.InputError(
|
|
2798
|
+
`No matching integration configuration for host ${host}, please check your integrations config`
|
|
2799
|
+
);
|
|
2560
2800
|
}
|
|
2561
2801
|
const auth = {
|
|
2562
2802
|
username: integrationConfig.config.username,
|
|
@@ -2661,10 +2901,42 @@ function createPublishGithubAction(options) {
|
|
|
2661
2901
|
if (!owner) {
|
|
2662
2902
|
throw new errors.InputError("Invalid repository owner provided in repoUrl");
|
|
2663
2903
|
}
|
|
2664
|
-
const newRepo = await createGithubRepoWithCollaboratorsAndTopics(
|
|
2904
|
+
const newRepo = await createGithubRepoWithCollaboratorsAndTopics(
|
|
2905
|
+
client,
|
|
2906
|
+
repo,
|
|
2907
|
+
owner,
|
|
2908
|
+
repoVisibility,
|
|
2909
|
+
description,
|
|
2910
|
+
deleteBranchOnMerge,
|
|
2911
|
+
allowMergeCommit,
|
|
2912
|
+
allowSquashMerge,
|
|
2913
|
+
allowRebaseMerge,
|
|
2914
|
+
access,
|
|
2915
|
+
collaborators,
|
|
2916
|
+
topics,
|
|
2917
|
+
ctx.logger
|
|
2918
|
+
);
|
|
2665
2919
|
const remoteUrl = newRepo.clone_url;
|
|
2666
2920
|
const repoContentsUrl = `${newRepo.html_url}/blob/${defaultBranch}`;
|
|
2667
|
-
await initRepoPushAndProtect(
|
|
2921
|
+
await initRepoPushAndProtect(
|
|
2922
|
+
remoteUrl,
|
|
2923
|
+
octokitOptions.auth,
|
|
2924
|
+
ctx.workspacePath,
|
|
2925
|
+
ctx.input.sourcePath,
|
|
2926
|
+
defaultBranch,
|
|
2927
|
+
protectDefaultBranch,
|
|
2928
|
+
protectEnforceAdmins,
|
|
2929
|
+
owner,
|
|
2930
|
+
client,
|
|
2931
|
+
repo,
|
|
2932
|
+
requireCodeOwnerReviews,
|
|
2933
|
+
requiredStatusCheckContexts,
|
|
2934
|
+
config,
|
|
2935
|
+
ctx.logger,
|
|
2936
|
+
gitCommitMessage,
|
|
2937
|
+
gitAuthorName,
|
|
2938
|
+
gitAuthorEmail
|
|
2939
|
+
);
|
|
2668
2940
|
ctx.output("remoteUrl", remoteUrl);
|
|
2669
2941
|
ctx.output("repoContentsUrl", repoContentsUrl);
|
|
2670
2942
|
}
|
|
@@ -2691,11 +2963,15 @@ async function serializeDirectoryContents(sourcePath, options) {
|
|
|
2691
2963
|
stats: true
|
|
2692
2964
|
});
|
|
2693
2965
|
const limiter = limiterFactory__default["default"](10);
|
|
2694
|
-
return Promise.all(
|
|
2695
|
-
path: path$1,
|
|
2696
|
-
|
|
2697
|
-
|
|
2698
|
-
|
|
2966
|
+
return Promise.all(
|
|
2967
|
+
paths.map(async ({ path: path$1, stats }) => ({
|
|
2968
|
+
path: path$1,
|
|
2969
|
+
content: await limiter(
|
|
2970
|
+
async () => fs__default["default"].readFile(path.join(sourcePath, path$1))
|
|
2971
|
+
),
|
|
2972
|
+
executable: isExecutable(stats == null ? void 0 : stats.mode)
|
|
2973
|
+
}))
|
|
2974
|
+
);
|
|
2699
2975
|
}
|
|
2700
2976
|
|
|
2701
2977
|
async function deserializeDirectoryContents(targetPath, files) {
|
|
@@ -2716,7 +2992,9 @@ const defaultClientFactory = async ({
|
|
|
2716
2992
|
host = "github.com",
|
|
2717
2993
|
token: providedToken
|
|
2718
2994
|
}) => {
|
|
2719
|
-
const [encodedHost, encodedOwner, encodedRepo] = [host, owner, repo].map(
|
|
2995
|
+
const [encodedHost, encodedOwner, encodedRepo] = [host, owner, repo].map(
|
|
2996
|
+
encodeURIComponent
|
|
2997
|
+
);
|
|
2720
2998
|
const octokitOptions = await getOctokitOptions({
|
|
2721
2999
|
integrations,
|
|
2722
3000
|
credentialsProvider: githubCredentialsProvider,
|
|
@@ -2810,7 +3088,9 @@ const createPublishGithubPullRequestAction = ({
|
|
|
2810
3088
|
} = ctx.input;
|
|
2811
3089
|
const { owner, repo, host } = parseRepoUrl(repoUrl, integrations);
|
|
2812
3090
|
if (!owner) {
|
|
2813
|
-
throw new errors.InputError(
|
|
3091
|
+
throw new errors.InputError(
|
|
3092
|
+
`No owner provided for host: ${host}, and repo ${repo}`
|
|
3093
|
+
);
|
|
2814
3094
|
}
|
|
2815
3095
|
const client = await clientFactory({
|
|
2816
3096
|
integrations,
|
|
@@ -2824,14 +3104,16 @@ const createPublishGithubPullRequestAction = ({
|
|
|
2824
3104
|
const directoryContents = await serializeDirectoryContents(fileRoot, {
|
|
2825
3105
|
gitignore: true
|
|
2826
3106
|
});
|
|
2827
|
-
const files = Object.fromEntries(
|
|
2828
|
-
|
|
2829
|
-
|
|
2830
|
-
|
|
2831
|
-
|
|
2832
|
-
|
|
2833
|
-
|
|
2834
|
-
|
|
3107
|
+
const files = Object.fromEntries(
|
|
3108
|
+
directoryContents.map((file) => [
|
|
3109
|
+
targetPath ? path__default["default"].posix.join(targetPath, file.path) : file.path,
|
|
3110
|
+
{
|
|
3111
|
+
mode: file.executable ? "100755" : "100644",
|
|
3112
|
+
encoding: "base64",
|
|
3113
|
+
content: file.content.toString("base64")
|
|
3114
|
+
}
|
|
3115
|
+
])
|
|
3116
|
+
);
|
|
2835
3117
|
try {
|
|
2836
3118
|
const response = await client.createPullRequest({
|
|
2837
3119
|
owner,
|
|
@@ -2941,11 +3223,15 @@ function createPublishGitlabAction(options) {
|
|
|
2941
3223
|
} = ctx.input;
|
|
2942
3224
|
const { owner, repo, host } = parseRepoUrl(repoUrl, integrations);
|
|
2943
3225
|
if (!owner) {
|
|
2944
|
-
throw new errors.InputError(
|
|
3226
|
+
throw new errors.InputError(
|
|
3227
|
+
`No owner provided for host: ${host}, and repo ${repo}`
|
|
3228
|
+
);
|
|
2945
3229
|
}
|
|
2946
3230
|
const integrationConfig = integrations.gitlab.byHost(host);
|
|
2947
3231
|
if (!integrationConfig) {
|
|
2948
|
-
throw new errors.InputError(
|
|
3232
|
+
throw new errors.InputError(
|
|
3233
|
+
`No matching integration configuration for host ${host}, please check your integrations config`
|
|
3234
|
+
);
|
|
2949
3235
|
}
|
|
2950
3236
|
if (!integrationConfig.config.token && !ctx.input.token) {
|
|
2951
3237
|
throw new errors.InputError(`No token available for host ${host}`);
|
|
@@ -3091,7 +3377,9 @@ const createPublishGitlabMergeRequestAction = (options) => {
|
|
|
3091
3377
|
const integrationConfig = integrations.gitlab.byHost(host);
|
|
3092
3378
|
const destinationBranch = ctx.input.branchName;
|
|
3093
3379
|
if (!integrationConfig) {
|
|
3094
|
-
throw new errors.InputError(
|
|
3380
|
+
throw new errors.InputError(
|
|
3381
|
+
`No matching integration configuration for host ${host}, please check your integrations config`
|
|
3382
|
+
);
|
|
3095
3383
|
}
|
|
3096
3384
|
if (!integrationConfig.config.token && !ctx.input.token) {
|
|
3097
3385
|
throw new errors.InputError(`No token available for host ${host}`);
|
|
@@ -3109,10 +3397,15 @@ const createPublishGitlabMergeRequestAction = (options) => {
|
|
|
3109
3397
|
const assigneeUser = await api.Users.username(assignee);
|
|
3110
3398
|
assigneeId = assigneeUser[0].id;
|
|
3111
3399
|
} catch (e) {
|
|
3112
|
-
ctx.logger.warn(
|
|
3400
|
+
ctx.logger.warn(
|
|
3401
|
+
`Failed to find gitlab user id for ${assignee}: ${e}. Proceeding with MR creation without an assignee.`
|
|
3402
|
+
);
|
|
3113
3403
|
}
|
|
3114
3404
|
}
|
|
3115
|
-
const targetPath = backendCommon.resolveSafeChildPath(
|
|
3405
|
+
const targetPath = backendCommon.resolveSafeChildPath(
|
|
3406
|
+
ctx.workspacePath,
|
|
3407
|
+
ctx.input.targetPath
|
|
3408
|
+
);
|
|
3116
3409
|
const fileContents = await serializeDirectoryContents(targetPath, {
|
|
3117
3410
|
gitignore: true
|
|
3118
3411
|
});
|
|
@@ -3129,21 +3422,38 @@ const createPublishGitlabMergeRequestAction = (options) => {
|
|
|
3129
3422
|
const projects = await api.Projects.show(projectPath);
|
|
3130
3423
|
const { default_branch: defaultBranch } = projects;
|
|
3131
3424
|
try {
|
|
3132
|
-
await api.Branches.create(
|
|
3425
|
+
await api.Branches.create(
|
|
3426
|
+
projectPath,
|
|
3427
|
+
destinationBranch,
|
|
3428
|
+
String(defaultBranch)
|
|
3429
|
+
);
|
|
3133
3430
|
} catch (e) {
|
|
3134
3431
|
throw new errors.InputError(`The branch creation failed ${e}`);
|
|
3135
3432
|
}
|
|
3136
3433
|
try {
|
|
3137
|
-
await api.Commits.create(
|
|
3434
|
+
await api.Commits.create(
|
|
3435
|
+
projectPath,
|
|
3436
|
+
destinationBranch,
|
|
3437
|
+
ctx.input.title,
|
|
3438
|
+
actions
|
|
3439
|
+
);
|
|
3138
3440
|
} catch (e) {
|
|
3139
|
-
throw new errors.InputError(
|
|
3441
|
+
throw new errors.InputError(
|
|
3442
|
+
`Committing the changes to ${destinationBranch} failed ${e}`
|
|
3443
|
+
);
|
|
3140
3444
|
}
|
|
3141
3445
|
try {
|
|
3142
|
-
const mergeRequestUrl = await api.MergeRequests.create(
|
|
3143
|
-
|
|
3144
|
-
|
|
3145
|
-
|
|
3146
|
-
|
|
3446
|
+
const mergeRequestUrl = await api.MergeRequests.create(
|
|
3447
|
+
projectPath,
|
|
3448
|
+
destinationBranch,
|
|
3449
|
+
String(defaultBranch),
|
|
3450
|
+
ctx.input.title,
|
|
3451
|
+
{
|
|
3452
|
+
description: ctx.input.description,
|
|
3453
|
+
removeSourceBranch: ctx.input.removeSourceBranch ? ctx.input.removeSourceBranch : false,
|
|
3454
|
+
assigneeId
|
|
3455
|
+
}
|
|
3456
|
+
).then((mergeRequest) => {
|
|
3147
3457
|
return mergeRequest.web_url;
|
|
3148
3458
|
});
|
|
3149
3459
|
ctx.output("projectid", projectPath);
|
|
@@ -3251,14 +3561,18 @@ class TemplateActionRegistry {
|
|
|
3251
3561
|
}
|
|
3252
3562
|
register(action) {
|
|
3253
3563
|
if (this.actions.has(action.id)) {
|
|
3254
|
-
throw new errors.ConflictError(
|
|
3564
|
+
throw new errors.ConflictError(
|
|
3565
|
+
`Template action with ID '${action.id}' has already been registered`
|
|
3566
|
+
);
|
|
3255
3567
|
}
|
|
3256
3568
|
this.actions.set(action.id, action);
|
|
3257
3569
|
}
|
|
3258
3570
|
get(actionId) {
|
|
3259
3571
|
const action = this.actions.get(actionId);
|
|
3260
3572
|
if (!action) {
|
|
3261
|
-
throw new errors.NotFoundError(
|
|
3573
|
+
throw new errors.NotFoundError(
|
|
3574
|
+
`Template action with ID '${actionId}' is not registered.`
|
|
3575
|
+
);
|
|
3262
3576
|
}
|
|
3263
3577
|
return action;
|
|
3264
3578
|
}
|
|
@@ -3267,7 +3581,10 @@ class TemplateActionRegistry {
|
|
|
3267
3581
|
}
|
|
3268
3582
|
}
|
|
3269
3583
|
|
|
3270
|
-
const migrationsDir = backendCommon.resolvePackagePath(
|
|
3584
|
+
const migrationsDir = backendCommon.resolvePackagePath(
|
|
3585
|
+
"@backstage/plugin-scaffolder-backend",
|
|
3586
|
+
"migrations"
|
|
3587
|
+
);
|
|
3271
3588
|
const parseSqlDateToIsoString = (input) => {
|
|
3272
3589
|
if (typeof input === "string") {
|
|
3273
3590
|
return luxon.DateTime.fromSQL(input, { zone: "UTC" }).toISO();
|
|
@@ -3382,10 +3699,14 @@ class DatabaseTaskStore {
|
|
|
3382
3699
|
}
|
|
3383
3700
|
}
|
|
3384
3701
|
async listStaleTasks({ timeoutS }) {
|
|
3385
|
-
const rawRows = await this.db("tasks").where("status", "processing").andWhere(
|
|
3386
|
-
|
|
3387
|
-
|
|
3388
|
-
|
|
3702
|
+
const rawRows = await this.db("tasks").where("status", "processing").andWhere(
|
|
3703
|
+
"last_heartbeat_at",
|
|
3704
|
+
"<=",
|
|
3705
|
+
this.db.client.config.client.includes("sqlite3") ? this.db.raw(`datetime('now', ?)`, [`-${timeoutS} seconds`]) : this.db.raw(`dateadd('second', ?, ?)`, [
|
|
3706
|
+
`-${timeoutS}`,
|
|
3707
|
+
this.db.fn.now()
|
|
3708
|
+
])
|
|
3709
|
+
);
|
|
3389
3710
|
const tasks = rawRows.map((row) => ({
|
|
3390
3711
|
taskId: row.id
|
|
3391
3712
|
}));
|
|
@@ -3400,7 +3721,9 @@ class DatabaseTaskStore {
|
|
|
3400
3721
|
if (status === "failed" || status === "completed") {
|
|
3401
3722
|
oldStatus = "processing";
|
|
3402
3723
|
} else {
|
|
3403
|
-
throw new Error(
|
|
3724
|
+
throw new Error(
|
|
3725
|
+
`Invalid status update of run '${taskId}' to status '${status}'`
|
|
3726
|
+
);
|
|
3404
3727
|
}
|
|
3405
3728
|
await this.db.transaction(async (tx) => {
|
|
3406
3729
|
const [task] = await tx("tasks").where({
|
|
@@ -3410,7 +3733,9 @@ class DatabaseTaskStore {
|
|
|
3410
3733
|
throw new Error(`No task with taskId ${taskId} found`);
|
|
3411
3734
|
}
|
|
3412
3735
|
if (task.status !== oldStatus) {
|
|
3413
|
-
throw new errors.ConflictError(
|
|
3736
|
+
throw new errors.ConflictError(
|
|
3737
|
+
`Refusing to update status of run '${taskId}' to status '${status}' as it is currently '${task.status}', expected '${oldStatus}'`
|
|
3738
|
+
);
|
|
3414
3739
|
}
|
|
3415
3740
|
const updateCount = await tx("tasks").where({
|
|
3416
3741
|
id: taskId,
|
|
@@ -3419,7 +3744,9 @@ class DatabaseTaskStore {
|
|
|
3419
3744
|
status
|
|
3420
3745
|
});
|
|
3421
3746
|
if (updateCount !== 1) {
|
|
3422
|
-
throw new errors.ConflictError(
|
|
3747
|
+
throw new errors.ConflictError(
|
|
3748
|
+
`Failed to update status to '${status}' for taskId ${taskId}`
|
|
3749
|
+
);
|
|
3423
3750
|
}
|
|
3424
3751
|
await tx("task_events").insert({
|
|
3425
3752
|
task_id: taskId,
|
|
@@ -3459,7 +3786,9 @@ class DatabaseTaskStore {
|
|
|
3459
3786
|
createdAt: parseSqlDateToIsoString(event.created_at)
|
|
3460
3787
|
};
|
|
3461
3788
|
} catch (error) {
|
|
3462
|
-
throw new Error(
|
|
3789
|
+
throw new Error(
|
|
3790
|
+
`Failed to parse event body from event taskId=${taskId} id=${event.id}, ${error}`
|
|
3791
|
+
);
|
|
3463
3792
|
}
|
|
3464
3793
|
});
|
|
3465
3794
|
return { events };
|
|
@@ -3520,7 +3849,10 @@ class TaskManager {
|
|
|
3520
3849
|
this.startTimeout();
|
|
3521
3850
|
} catch (error) {
|
|
3522
3851
|
this.isDone = true;
|
|
3523
|
-
this.logger.error(
|
|
3852
|
+
this.logger.error(
|
|
3853
|
+
`Heartbeat for task ${this.task.taskId} failed`,
|
|
3854
|
+
error
|
|
3855
|
+
);
|
|
3524
3856
|
}
|
|
3525
3857
|
}, 1e3);
|
|
3526
3858
|
}
|
|
@@ -3541,7 +3873,9 @@ class StorageTaskBroker {
|
|
|
3541
3873
|
}
|
|
3542
3874
|
async list(options) {
|
|
3543
3875
|
if (!this.storage.list) {
|
|
3544
|
-
throw new Error(
|
|
3876
|
+
throw new Error(
|
|
3877
|
+
"TaskStore does not implement the list method. Please implement the list method to be able to list tasks"
|
|
3878
|
+
);
|
|
3545
3879
|
}
|
|
3546
3880
|
return await this.storage.list({ createdBy: options == null ? void 0 : options.createdBy });
|
|
3547
3881
|
}
|
|
@@ -3549,12 +3883,16 @@ class StorageTaskBroker {
|
|
|
3549
3883
|
for (; ; ) {
|
|
3550
3884
|
const pendingTask = await this.storage.claimTask();
|
|
3551
3885
|
if (pendingTask) {
|
|
3552
|
-
return TaskManager.create(
|
|
3553
|
-
|
|
3554
|
-
|
|
3555
|
-
|
|
3556
|
-
|
|
3557
|
-
|
|
3886
|
+
return TaskManager.create(
|
|
3887
|
+
{
|
|
3888
|
+
taskId: pendingTask.id,
|
|
3889
|
+
spec: pendingTask.spec,
|
|
3890
|
+
secrets: pendingTask.secrets,
|
|
3891
|
+
createdBy: pendingTask.createdBy
|
|
3892
|
+
},
|
|
3893
|
+
this.storage,
|
|
3894
|
+
this.logger
|
|
3895
|
+
);
|
|
3558
3896
|
}
|
|
3559
3897
|
await this.waitForDispatch();
|
|
3560
3898
|
}
|
|
@@ -3592,19 +3930,21 @@ class StorageTaskBroker {
|
|
|
3592
3930
|
}
|
|
3593
3931
|
async vacuumTasks(options) {
|
|
3594
3932
|
const { tasks } = await this.storage.listStaleTasks(options);
|
|
3595
|
-
await Promise.all(
|
|
3596
|
-
|
|
3597
|
-
|
|
3598
|
-
|
|
3599
|
-
|
|
3600
|
-
|
|
3601
|
-
|
|
3602
|
-
|
|
3603
|
-
|
|
3604
|
-
|
|
3605
|
-
|
|
3606
|
-
|
|
3607
|
-
|
|
3933
|
+
await Promise.all(
|
|
3934
|
+
tasks.map(async (task) => {
|
|
3935
|
+
try {
|
|
3936
|
+
await this.storage.completeTask({
|
|
3937
|
+
taskId: task.taskId,
|
|
3938
|
+
status: "failed",
|
|
3939
|
+
eventBody: {
|
|
3940
|
+
message: "The task was cancelled because the task worker lost connection to the task broker"
|
|
3941
|
+
}
|
|
3942
|
+
});
|
|
3943
|
+
} catch (error) {
|
|
3944
|
+
this.logger.warn(`Failed to cancel task '${task.taskId}', ${error}`);
|
|
3945
|
+
}
|
|
3946
|
+
})
|
|
3947
|
+
);
|
|
3608
3948
|
}
|
|
3609
3949
|
waitForDispatch() {
|
|
3610
3950
|
return this.deferredDispatch.promise;
|
|
@@ -3625,10 +3965,12 @@ function generateExampleOutput(schema) {
|
|
|
3625
3965
|
return examples[0];
|
|
3626
3966
|
}
|
|
3627
3967
|
if (schema.type === "object") {
|
|
3628
|
-
return Object.fromEntries(
|
|
3629
|
-
key,
|
|
3630
|
-
|
|
3631
|
-
|
|
3968
|
+
return Object.fromEntries(
|
|
3969
|
+
Object.entries((_a = schema.properties) != null ? _a : {}).map(([key, value]) => [
|
|
3970
|
+
key,
|
|
3971
|
+
generateExampleOutput(value)
|
|
3972
|
+
])
|
|
3973
|
+
);
|
|
3632
3974
|
} else if (schema.type === "array") {
|
|
3633
3975
|
const [firstSchema] = (_b = [schema.items]) == null ? void 0 : _b.flat();
|
|
3634
3976
|
if (firstSchema) {
|
|
@@ -3655,7 +3997,11 @@ const createStepLogger = ({
|
|
|
3655
3997
|
const metadata = { stepId: step.id };
|
|
3656
3998
|
const taskLogger = winston__namespace.createLogger({
|
|
3657
3999
|
level: process.env.LOG_LEVEL || "info",
|
|
3658
|
-
format: winston__namespace.format.combine(
|
|
4000
|
+
format: winston__namespace.format.combine(
|
|
4001
|
+
winston__namespace.format.colorize(),
|
|
4002
|
+
winston__namespace.format.timestamp(),
|
|
4003
|
+
winston__namespace.format.simple()
|
|
4004
|
+
),
|
|
3659
4005
|
defaultMeta: {}
|
|
3660
4006
|
});
|
|
3661
4007
|
const streamLogger = new stream.PassThrough();
|
|
@@ -3675,13 +4021,17 @@ class NunjucksWorkflowRunner {
|
|
|
3675
4021
|
isSingleTemplateString(input) {
|
|
3676
4022
|
var _a, _b;
|
|
3677
4023
|
const { parser, nodes } = nunjucks__default["default"];
|
|
3678
|
-
const parsed = parser.parse(
|
|
3679
|
-
|
|
3680
|
-
|
|
3681
|
-
|
|
3682
|
-
|
|
4024
|
+
const parsed = parser.parse(
|
|
4025
|
+
input,
|
|
4026
|
+
{},
|
|
4027
|
+
{
|
|
4028
|
+
autoescape: false,
|
|
4029
|
+
tags: {
|
|
4030
|
+
variableStart: "${{",
|
|
4031
|
+
variableEnd: "}}"
|
|
4032
|
+
}
|
|
3683
4033
|
}
|
|
3684
|
-
|
|
4034
|
+
);
|
|
3685
4035
|
return parsed.children.length === 1 && !(((_b = (_a = parsed.children[0]) == null ? void 0 : _a.children) == null ? void 0 : _b[0]) instanceof nodes.TemplateData);
|
|
3686
4036
|
}
|
|
3687
4037
|
render(input, context, renderTemplate) {
|
|
@@ -3690,7 +4040,10 @@ class NunjucksWorkflowRunner {
|
|
|
3690
4040
|
if (typeof value === "string") {
|
|
3691
4041
|
try {
|
|
3692
4042
|
if (this.isSingleTemplateString(value)) {
|
|
3693
|
-
const wrappedDumped = value.replace(
|
|
4043
|
+
const wrappedDumped = value.replace(
|
|
4044
|
+
/\${{(.+)}}/g,
|
|
4045
|
+
"${{ ( $1 ) | dump }}"
|
|
4046
|
+
);
|
|
3694
4047
|
const templated2 = renderTemplate(wrappedDumped, context);
|
|
3695
4048
|
if (templated2 === "") {
|
|
3696
4049
|
return void 0;
|
|
@@ -3698,7 +4051,9 @@ class NunjucksWorkflowRunner {
|
|
|
3698
4051
|
return JSON.parse(templated2);
|
|
3699
4052
|
}
|
|
3700
4053
|
} catch (ex) {
|
|
3701
|
-
this.options.logger.error(
|
|
4054
|
+
this.options.logger.error(
|
|
4055
|
+
`Failed to parse template string: ${value} with error ${ex.message}`
|
|
4056
|
+
);
|
|
3702
4057
|
}
|
|
3703
4058
|
const templated = renderTemplate(value, context);
|
|
3704
4059
|
if (templated === "") {
|
|
@@ -3715,9 +4070,14 @@ class NunjucksWorkflowRunner {
|
|
|
3715
4070
|
async execute(task) {
|
|
3716
4071
|
var _a, _b, _c, _d, _e;
|
|
3717
4072
|
if (!isValidTaskSpec(task.spec)) {
|
|
3718
|
-
throw new errors.InputError(
|
|
4073
|
+
throw new errors.InputError(
|
|
4074
|
+
"Wrong template version executed with the workflow engine"
|
|
4075
|
+
);
|
|
3719
4076
|
}
|
|
3720
|
-
const workspacePath = path__default["default"].join(
|
|
4077
|
+
const workspacePath = path__default["default"].join(
|
|
4078
|
+
this.options.workingDirectory,
|
|
4079
|
+
await task.getWorkspaceName()
|
|
4080
|
+
);
|
|
3721
4081
|
const { integrations } = this.options;
|
|
3722
4082
|
const renderTemplate = await SecureTemplater.loadRenderer({
|
|
3723
4083
|
parseRepoUrl(url) {
|
|
@@ -3727,7 +4087,9 @@ class NunjucksWorkflowRunner {
|
|
|
3727
4087
|
});
|
|
3728
4088
|
try {
|
|
3729
4089
|
await fs__default["default"].ensureDir(workspacePath);
|
|
3730
|
-
await task.emitLog(
|
|
4090
|
+
await task.emitLog(
|
|
4091
|
+
`Starting up task with ${task.spec.steps.length} steps`
|
|
4092
|
+
);
|
|
3731
4093
|
const context = {
|
|
3732
4094
|
parameters: task.spec.parameters,
|
|
3733
4095
|
steps: {},
|
|
@@ -3736,9 +4098,16 @@ class NunjucksWorkflowRunner {
|
|
|
3736
4098
|
for (const step of task.spec.steps) {
|
|
3737
4099
|
try {
|
|
3738
4100
|
if (step.if) {
|
|
3739
|
-
const ifResult = await this.render(
|
|
4101
|
+
const ifResult = await this.render(
|
|
4102
|
+
step.if,
|
|
4103
|
+
context,
|
|
4104
|
+
renderTemplate
|
|
4105
|
+
);
|
|
3740
4106
|
if (!isTruthy(ifResult)) {
|
|
3741
|
-
await task.emitLog(
|
|
4107
|
+
await task.emitLog(
|
|
4108
|
+
`Skipping step ${step.id} because it's if condition was false`,
|
|
4109
|
+
{ stepId: step.id, status: "skipped" }
|
|
4110
|
+
);
|
|
3742
4111
|
continue;
|
|
3743
4112
|
}
|
|
3744
4113
|
}
|
|
@@ -3749,10 +4118,13 @@ class NunjucksWorkflowRunner {
|
|
|
3749
4118
|
const action = this.options.actionRegistry.get(step.action);
|
|
3750
4119
|
const { taskLogger, streamLogger } = createStepLogger({ task, step });
|
|
3751
4120
|
if (task.isDryRun && !action.supportsDryRun) {
|
|
3752
|
-
task.emitLog(
|
|
3753
|
-
|
|
3754
|
-
|
|
3755
|
-
|
|
4121
|
+
task.emitLog(
|
|
4122
|
+
`Skipping because ${action.id} does not support dry-run`,
|
|
4123
|
+
{
|
|
4124
|
+
stepId: step.id,
|
|
4125
|
+
status: "skipped"
|
|
4126
|
+
}
|
|
4127
|
+
);
|
|
3756
4128
|
const outputSchema = (_a = action.schema) == null ? void 0 : _a.output;
|
|
3757
4129
|
if (outputSchema) {
|
|
3758
4130
|
context.steps[step.id] = {
|
|
@@ -3763,12 +4135,21 @@ class NunjucksWorkflowRunner {
|
|
|
3763
4135
|
}
|
|
3764
4136
|
continue;
|
|
3765
4137
|
}
|
|
3766
|
-
const input = (_c = step.input && this.render(
|
|
4138
|
+
const input = (_c = step.input && this.render(
|
|
4139
|
+
step.input,
|
|
4140
|
+
{ ...context, secrets: (_b = task.secrets) != null ? _b : {} },
|
|
4141
|
+
renderTemplate
|
|
4142
|
+
)) != null ? _c : {};
|
|
3767
4143
|
if ((_d = action.schema) == null ? void 0 : _d.input) {
|
|
3768
|
-
const validateResult = jsonschema.validate(
|
|
4144
|
+
const validateResult = jsonschema.validate(
|
|
4145
|
+
input,
|
|
4146
|
+
action.schema.input
|
|
4147
|
+
);
|
|
3769
4148
|
if (!validateResult.valid) {
|
|
3770
4149
|
const errors$1 = validateResult.errors.join(", ");
|
|
3771
|
-
throw new errors.InputError(
|
|
4150
|
+
throw new errors.InputError(
|
|
4151
|
+
`Invalid input passed to action ${action.id}, ${errors$1}`
|
|
4152
|
+
);
|
|
3772
4153
|
}
|
|
3773
4154
|
}
|
|
3774
4155
|
const tmpDirs = new Array();
|
|
@@ -3780,7 +4161,9 @@ class NunjucksWorkflowRunner {
|
|
|
3780
4161
|
logStream: streamLogger,
|
|
3781
4162
|
workspacePath,
|
|
3782
4163
|
createTemporaryDirectory: async () => {
|
|
3783
|
-
const tmpDir = await fs__default["default"].mkdtemp(
|
|
4164
|
+
const tmpDir = await fs__default["default"].mkdtemp(
|
|
4165
|
+
`${workspacePath}_step-${step.id}-`
|
|
4166
|
+
);
|
|
3784
4167
|
tmpDirs.push(tmpDir);
|
|
3785
4168
|
return tmpDir;
|
|
3786
4169
|
},
|
|
@@ -3851,9 +4234,13 @@ class TaskWorker {
|
|
|
3851
4234
|
async runOneTask(task) {
|
|
3852
4235
|
try {
|
|
3853
4236
|
if (task.spec.apiVersion !== "scaffolder.backstage.io/v1beta3") {
|
|
3854
|
-
throw new Error(
|
|
4237
|
+
throw new Error(
|
|
4238
|
+
`Unsupported Template apiVersion ${task.spec.apiVersion}`
|
|
4239
|
+
);
|
|
3855
4240
|
}
|
|
3856
|
-
const { output } = await this.options.runners.workflowRunner.execute(
|
|
4241
|
+
const { output } = await this.options.runners.workflowRunner.execute(
|
|
4242
|
+
task
|
|
4243
|
+
);
|
|
3857
4244
|
await task.complete("completed", { output });
|
|
3858
4245
|
} catch (error) {
|
|
3859
4246
|
errors.assertError(error);
|
|
@@ -3900,7 +4287,10 @@ function createDryRunner(options) {
|
|
|
3900
4287
|
});
|
|
3901
4288
|
const dryRunId = uuid.v4();
|
|
3902
4289
|
const log = new Array();
|
|
3903
|
-
const contentsPath = backendCommon.resolveSafeChildPath(
|
|
4290
|
+
const contentsPath = backendCommon.resolveSafeChildPath(
|
|
4291
|
+
options.workingDirectory,
|
|
4292
|
+
`dry-run-content-${dryRunId}`
|
|
4293
|
+
);
|
|
3904
4294
|
try {
|
|
3905
4295
|
await deserializeDirectoryContents(contentsPath, input.directoryContents);
|
|
3906
4296
|
const result = await workflowRunner.execute({
|
|
@@ -3916,7 +4306,9 @@ function createDryRunner(options) {
|
|
|
3916
4306
|
],
|
|
3917
4307
|
templateInfo: {
|
|
3918
4308
|
entityRef: "template:default/dry-run",
|
|
3919
|
-
baseUrl: url.pathToFileURL(
|
|
4309
|
+
baseUrl: url.pathToFileURL(
|
|
4310
|
+
backendCommon.resolveSafeChildPath(contentsPath, "template.yaml")
|
|
4311
|
+
).toString()
|
|
3920
4312
|
}
|
|
3921
4313
|
},
|
|
3922
4314
|
secrets: input.secrets,
|
|
@@ -3963,7 +4355,9 @@ async function getWorkingDirectory(config, logger) {
|
|
|
3963
4355
|
logger.info(`using working directory: ${workingDirectory}`);
|
|
3964
4356
|
} catch (err) {
|
|
3965
4357
|
errors.assertError(err);
|
|
3966
|
-
logger.error(
|
|
4358
|
+
logger.error(
|
|
4359
|
+
`working directory ${workingDirectory} ${err.code === "ENOENT" ? "does not exist" : "is not writable"}`
|
|
4360
|
+
);
|
|
3967
4361
|
throw err;
|
|
3968
4362
|
}
|
|
3969
4363
|
return workingDirectory;
|
|
@@ -3992,7 +4386,9 @@ async function findTemplate(options) {
|
|
|
3992
4386
|
}
|
|
3993
4387
|
const template = await catalogApi.getEntityByRef(entityRef, { token });
|
|
3994
4388
|
if (!template) {
|
|
3995
|
-
throw new errors.NotFoundError(
|
|
4389
|
+
throw new errors.NotFoundError(
|
|
4390
|
+
`Template ${catalogModel.stringifyEntityRef(entityRef)} not found`
|
|
4391
|
+
);
|
|
3996
4392
|
}
|
|
3997
4393
|
return template;
|
|
3998
4394
|
}
|
|
@@ -4054,31 +4450,36 @@ async function createRouter(options) {
|
|
|
4054
4450
|
workingDirectory,
|
|
4055
4451
|
additionalTemplateFilters
|
|
4056
4452
|
});
|
|
4057
|
-
router.get(
|
|
4058
|
-
|
|
4059
|
-
|
|
4060
|
-
|
|
4061
|
-
|
|
4062
|
-
|
|
4063
|
-
|
|
4064
|
-
|
|
4065
|
-
|
|
4066
|
-
|
|
4067
|
-
const parameters = [(_a = template.spec.parameters) != null ? _a : []].flat();
|
|
4068
|
-
res.json({
|
|
4069
|
-
title: (_b = template.metadata.title) != null ? _b : template.metadata.name,
|
|
4070
|
-
steps: parameters.map((schema) => {
|
|
4071
|
-
var _a2;
|
|
4072
|
-
return {
|
|
4073
|
-
title: (_a2 = schema.title) != null ? _a2 : "Fill in template parameters",
|
|
4074
|
-
schema
|
|
4075
|
-
};
|
|
4076
|
-
})
|
|
4453
|
+
router.get(
|
|
4454
|
+
"/v2/templates/:namespace/:kind/:name/parameter-schema",
|
|
4455
|
+
async (req, res) => {
|
|
4456
|
+
var _a, _b;
|
|
4457
|
+
const { namespace, kind, name } = req.params;
|
|
4458
|
+
const { token } = parseBearerToken(req.headers.authorization);
|
|
4459
|
+
const template = await findTemplate({
|
|
4460
|
+
catalogApi: catalogClient,
|
|
4461
|
+
entityRef: { kind, namespace, name },
|
|
4462
|
+
token
|
|
4077
4463
|
});
|
|
4078
|
-
|
|
4079
|
-
|
|
4464
|
+
if (isSupportedTemplate(template)) {
|
|
4465
|
+
const parameters = [(_a = template.spec.parameters) != null ? _a : []].flat();
|
|
4466
|
+
res.json({
|
|
4467
|
+
title: (_b = template.metadata.title) != null ? _b : template.metadata.name,
|
|
4468
|
+
steps: parameters.map((schema) => {
|
|
4469
|
+
var _a2;
|
|
4470
|
+
return {
|
|
4471
|
+
title: (_a2 = schema.title) != null ? _a2 : "Fill in template parameters",
|
|
4472
|
+
schema
|
|
4473
|
+
};
|
|
4474
|
+
})
|
|
4475
|
+
});
|
|
4476
|
+
} else {
|
|
4477
|
+
throw new errors.InputError(
|
|
4478
|
+
`Unsupported apiVersion field in schema entity, ${template.apiVersion}`
|
|
4479
|
+
);
|
|
4480
|
+
}
|
|
4080
4481
|
}
|
|
4081
|
-
|
|
4482
|
+
).get("/v2/actions", async (_req, res) => {
|
|
4082
4483
|
const actionsList = actionRegistry.list().map((action) => {
|
|
4083
4484
|
return {
|
|
4084
4485
|
id: action.id,
|
|
@@ -4093,7 +4494,9 @@ async function createRouter(options) {
|
|
|
4093
4494
|
const { kind, namespace, name } = catalogModel.parseEntityRef(templateRef, {
|
|
4094
4495
|
defaultKind: "template"
|
|
4095
4496
|
});
|
|
4096
|
-
const { token, entityRef: userEntityRef } = parseBearerToken(
|
|
4497
|
+
const { token, entityRef: userEntityRef } = parseBearerToken(
|
|
4498
|
+
req.headers.authorization
|
|
4499
|
+
);
|
|
4097
4500
|
const userEntity = userEntityRef ? await catalogClient.getEntityByRef(userEntityRef, { token }) : void 0;
|
|
4098
4501
|
let auditLog = `Scaffolding task for ${templateRef}`;
|
|
4099
4502
|
if (userEntityRef) {
|
|
@@ -4107,7 +4510,9 @@ async function createRouter(options) {
|
|
|
4107
4510
|
token
|
|
4108
4511
|
});
|
|
4109
4512
|
if (!isSupportedTemplate(template)) {
|
|
4110
|
-
throw new errors.InputError(
|
|
4513
|
+
throw new errors.InputError(
|
|
4514
|
+
`Unsupported apiVersion field in schema entity, ${template.apiVersion}`
|
|
4515
|
+
);
|
|
4111
4516
|
}
|
|
4112
4517
|
for (const parameters of [(_a = template.spec.parameters) != null ? _a : []].flat()) {
|
|
4113
4518
|
const result2 = jsonschema.validate(values, parameters);
|
|
@@ -4157,7 +4562,9 @@ async function createRouter(options) {
|
|
|
4157
4562
|
throw new errors.InputError("createdBy query parameter must be a string");
|
|
4158
4563
|
}
|
|
4159
4564
|
if (!taskBroker.list) {
|
|
4160
|
-
throw new Error(
|
|
4565
|
+
throw new Error(
|
|
4566
|
+
"TaskBroker does not support listing tasks, please implement the list method on the TaskBroker."
|
|
4567
|
+
);
|
|
4161
4568
|
}
|
|
4162
4569
|
const tasks = await taskBroker.list({
|
|
4163
4570
|
createdBy: userEntityRef
|
|
@@ -4182,23 +4589,30 @@ async function createRouter(options) {
|
|
|
4182
4589
|
});
|
|
4183
4590
|
const subscription = taskBroker.event$({ taskId, after }).subscribe({
|
|
4184
4591
|
error: (error) => {
|
|
4185
|
-
logger.error(
|
|
4592
|
+
logger.error(
|
|
4593
|
+
`Received error from event stream when observing taskId '${taskId}', ${error}`
|
|
4594
|
+
);
|
|
4595
|
+
res.end();
|
|
4186
4596
|
},
|
|
4187
4597
|
next: ({ events }) => {
|
|
4188
4598
|
var _a;
|
|
4189
4599
|
let shouldUnsubscribe = false;
|
|
4190
4600
|
for (const event of events) {
|
|
4191
|
-
res.write(
|
|
4601
|
+
res.write(
|
|
4602
|
+
`event: ${event.type}
|
|
4192
4603
|
data: ${JSON.stringify(event)}
|
|
4193
4604
|
|
|
4194
|
-
`
|
|
4605
|
+
`
|
|
4606
|
+
);
|
|
4195
4607
|
if (event.type === "completion") {
|
|
4196
4608
|
shouldUnsubscribe = true;
|
|
4197
4609
|
}
|
|
4198
4610
|
}
|
|
4199
4611
|
(_a = res.flush) == null ? void 0 : _a.call(res);
|
|
4200
|
-
if (shouldUnsubscribe)
|
|
4612
|
+
if (shouldUnsubscribe) {
|
|
4201
4613
|
subscription.unsubscribe();
|
|
4614
|
+
res.end();
|
|
4615
|
+
}
|
|
4202
4616
|
}
|
|
4203
4617
|
});
|
|
4204
4618
|
req.on("close", () => {
|
|
@@ -4213,7 +4627,9 @@ data: ${JSON.stringify(event)}
|
|
|
4213
4627
|
}, 3e4);
|
|
4214
4628
|
const subscription = taskBroker.event$({ taskId, after }).subscribe({
|
|
4215
4629
|
error: (error) => {
|
|
4216
|
-
logger.error(
|
|
4630
|
+
logger.error(
|
|
4631
|
+
`Received error from event stream when observing taskId '${taskId}', ${error}`
|
|
4632
|
+
);
|
|
4217
4633
|
},
|
|
4218
4634
|
next: ({ events }) => {
|
|
4219
4635
|
clearTimeout(timeout);
|
|
@@ -4231,7 +4647,9 @@ data: ${JSON.stringify(event)}
|
|
|
4231
4647
|
template: zod.z.unknown(),
|
|
4232
4648
|
values: zod.z.record(zod.z.unknown()),
|
|
4233
4649
|
secrets: zod.z.record(zod.z.string()).optional(),
|
|
4234
|
-
directoryContents: zod.z.array(
|
|
4650
|
+
directoryContents: zod.z.array(
|
|
4651
|
+
zod.z.object({ path: zod.z.string(), base64Content: zod.z.string() })
|
|
4652
|
+
)
|
|
4235
4653
|
});
|
|
4236
4654
|
const body = await bodySchema.parseAsync(req.body).catch((e) => {
|
|
4237
4655
|
throw new errors.InputError(`Malformed request: ${e}`);
|
|
@@ -4298,7 +4716,9 @@ function parseBearerToken(header) {
|
|
|
4298
4716
|
throw new TypeError("Expected Bearer with JWT");
|
|
4299
4717
|
}
|
|
4300
4718
|
const [_header, rawPayload, _signature] = token.split(".");
|
|
4301
|
-
const payload = JSON.parse(
|
|
4719
|
+
const payload = JSON.parse(
|
|
4720
|
+
Buffer.from(rawPayload, "base64").toString()
|
|
4721
|
+
);
|
|
4302
4722
|
if (typeof payload !== "object" || payload === null || Array.isArray(payload)) {
|
|
4303
4723
|
throw new TypeError("Malformed JWT payload");
|
|
4304
4724
|
}
|
|
@@ -4337,24 +4757,28 @@ class ScaffolderEntitiesProcessor {
|
|
|
4337
4757
|
defaultKind: "Group",
|
|
4338
4758
|
defaultNamespace: selfRef.namespace
|
|
4339
4759
|
});
|
|
4340
|
-
emit(
|
|
4341
|
-
|
|
4342
|
-
|
|
4343
|
-
|
|
4344
|
-
|
|
4345
|
-
|
|
4346
|
-
|
|
4347
|
-
|
|
4348
|
-
|
|
4349
|
-
|
|
4350
|
-
|
|
4351
|
-
|
|
4352
|
-
|
|
4353
|
-
|
|
4354
|
-
|
|
4355
|
-
|
|
4356
|
-
|
|
4357
|
-
|
|
4760
|
+
emit(
|
|
4761
|
+
pluginCatalogBackend.processingResult.relation({
|
|
4762
|
+
source: selfRef,
|
|
4763
|
+
type: catalogModel.RELATION_OWNED_BY,
|
|
4764
|
+
target: {
|
|
4765
|
+
kind: targetRef.kind,
|
|
4766
|
+
namespace: targetRef.namespace,
|
|
4767
|
+
name: targetRef.name
|
|
4768
|
+
}
|
|
4769
|
+
})
|
|
4770
|
+
);
|
|
4771
|
+
emit(
|
|
4772
|
+
pluginCatalogBackend.processingResult.relation({
|
|
4773
|
+
source: {
|
|
4774
|
+
kind: targetRef.kind,
|
|
4775
|
+
namespace: targetRef.namespace,
|
|
4776
|
+
name: targetRef.name
|
|
4777
|
+
},
|
|
4778
|
+
type: catalogModel.RELATION_OWNER_OF,
|
|
4779
|
+
target: selfRef
|
|
4780
|
+
})
|
|
4781
|
+
);
|
|
4358
4782
|
}
|
|
4359
4783
|
}
|
|
4360
4784
|
return entity;
|
|
@@ -4370,7 +4794,9 @@ const scaffolderCatalogModule = backendPluginApi.createBackendModule({
|
|
|
4370
4794
|
catalogProcessingExtensionPoint: pluginCatalogNode.catalogProcessingExtentionPoint
|
|
4371
4795
|
},
|
|
4372
4796
|
async init({ catalogProcessingExtensionPoint }) {
|
|
4373
|
-
catalogProcessingExtensionPoint.addProcessor(
|
|
4797
|
+
catalogProcessingExtensionPoint.addProcessor(
|
|
4798
|
+
new ScaffolderEntitiesProcessor()
|
|
4799
|
+
);
|
|
4374
4800
|
}
|
|
4375
4801
|
});
|
|
4376
4802
|
}
|