@backstage/plugin-scaffolder-backend 1.5.1 → 1.6.0-next.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.cjs.js CHANGED
@@ -20,6 +20,7 @@ var azureDevopsNodeApi = require('azure-devops-node-api');
20
20
  var fetch = require('node-fetch');
21
21
  var crypto = require('crypto');
22
22
  var octokitPluginCreatePullRequest = require('octokit-plugin-create-pull-request');
23
+ var fs$1 = require('fs');
23
24
  var limiterFactory = require('p-limit');
24
25
  var node = require('@gitbeaker/node');
25
26
  var uuid = require('uuid');
@@ -283,13 +284,8 @@ async function recursiveReadDir(dir) {
283
284
  return files.reduce((a, f) => a.concat(f), []);
284
285
  }
285
286
 
286
- async function fetchContents({
287
- reader,
288
- integrations,
289
- baseUrl,
290
- fetchUrl = ".",
291
- outputPath
292
- }) {
287
+ async function fetchContents(options) {
288
+ const { reader, integrations, baseUrl, fetchUrl = ".", outputPath } = options;
293
289
  let fetchUrlIsAbsolute = false;
294
290
  try {
295
291
  new URL(fetchUrl);
@@ -1043,7 +1039,7 @@ async function getOctokitOptions(options) {
1043
1039
  previews: ["nebula-preview"]
1044
1040
  };
1045
1041
  }
1046
- async function createGithubRepoWithCollaboratorsAndTopics(client, repo, owner, repoVisibility, description, deleteBranchOnMerge, allowMergeCommit, allowSquashMerge, allowRebaseMerge, access, collaborators, topics, logger) {
1042
+ async function createGithubRepoWithCollaboratorsAndTopics(client, repo, owner, repoVisibility, description, homepage, deleteBranchOnMerge, allowMergeCommit, allowSquashMerge, allowRebaseMerge, access, collaborators, topics, logger) {
1047
1043
  const user = await client.rest.users.getByUsername({
1048
1044
  username: owner
1049
1045
  });
@@ -1056,7 +1052,8 @@ async function createGithubRepoWithCollaboratorsAndTopics(client, repo, owner, r
1056
1052
  delete_branch_on_merge: deleteBranchOnMerge,
1057
1053
  allow_merge_commit: allowMergeCommit,
1058
1054
  allow_squash_merge: allowSquashMerge,
1059
- allow_rebase_merge: allowRebaseMerge
1055
+ allow_rebase_merge: allowRebaseMerge,
1056
+ homepage
1060
1057
  }) : client.rest.repos.createForAuthenticatedUser({
1061
1058
  name: repo,
1062
1059
  private: repoVisibility === "private",
@@ -1064,7 +1061,8 @@ async function createGithubRepoWithCollaboratorsAndTopics(client, repo, owner, r
1064
1061
  delete_branch_on_merge: deleteBranchOnMerge,
1065
1062
  allow_merge_commit: allowMergeCommit,
1066
1063
  allow_squash_merge: allowSquashMerge,
1067
- allow_rebase_merge: allowRebaseMerge
1064
+ allow_rebase_merge: allowRebaseMerge,
1065
+ homepage
1068
1066
  });
1069
1067
  let newRepo;
1070
1068
  try {
@@ -1335,6 +1333,10 @@ const description = {
1335
1333
  title: "Repository Description",
1336
1334
  type: "string"
1337
1335
  };
1336
+ const homepage = {
1337
+ title: "Repository Homepage",
1338
+ type: "string"
1339
+ };
1338
1340
  const access = {
1339
1341
  title: "Repository Access",
1340
1342
  description: `Sets an admin collaborator on the repository. Can either be a user reference different from 'owner' in 'repoUrl' or team reference, eg. 'org/team-name'`,
@@ -1473,6 +1475,7 @@ function createGithubRepoCreateAction(options) {
1473
1475
  properties: {
1474
1476
  repoUrl: repoUrl,
1475
1477
  description: description,
1478
+ homepage: homepage,
1476
1479
  access: access,
1477
1480
  requireCodeOwnerReviews: requireCodeOwnerReviews,
1478
1481
  requiredStatusCheckContexts: requiredStatusCheckContexts,
@@ -1498,6 +1501,7 @@ function createGithubRepoCreateAction(options) {
1498
1501
  const {
1499
1502
  repoUrl,
1500
1503
  description,
1504
+ homepage,
1501
1505
  access,
1502
1506
  repoVisibility = "private",
1503
1507
  deleteBranchOnMerge = false,
@@ -1525,6 +1529,7 @@ function createGithubRepoCreateAction(options) {
1525
1529
  owner,
1526
1530
  repoVisibility,
1527
1531
  description,
1532
+ homepage,
1528
1533
  deleteBranchOnMerge,
1529
1534
  allowMergeCommit,
1530
1535
  allowSquashMerge,
@@ -1800,6 +1805,10 @@ function createPublishAzureAction(options) {
1800
1805
  repoContentsUrl: {
1801
1806
  title: "A URL to the root of the repository",
1802
1807
  type: "string"
1808
+ },
1809
+ repositoryId: {
1810
+ title: "The Id of the created repository",
1811
+ type: "string"
1803
1812
  }
1804
1813
  }
1805
1814
  }
@@ -1849,6 +1858,10 @@ function createPublishAzureAction(options) {
1849
1858
  "No remote URL returned from create repository for Azure"
1850
1859
  );
1851
1860
  }
1861
+ const repositoryId = returnedRepo.id;
1862
+ if (!repositoryId) {
1863
+ throw new errors.InputError("No Id returned from create repository for Azure");
1864
+ }
1852
1865
  const repoContentsUrl = remoteUrl;
1853
1866
  const gitAuthorInfo = {
1854
1867
  name: gitAuthorName ? gitAuthorName : config.getOptionalString("scaffolder.defaultAuthor.name"),
@@ -1868,6 +1881,7 @@ function createPublishAzureAction(options) {
1868
1881
  });
1869
1882
  ctx.output("remoteUrl", remoteUrl);
1870
1883
  ctx.output("repoContentsUrl", repoContentsUrl);
1884
+ ctx.output("repositoryId", repositoryId);
1871
1885
  }
1872
1886
  });
1873
1887
  }
@@ -2544,37 +2558,6 @@ function createPublishBitbucketServerAction(options) {
2544
2558
  });
2545
2559
  }
2546
2560
 
2547
- function createPublishFileAction() {
2548
- return createTemplateAction({
2549
- id: "publish:file",
2550
- description: "Writes contents of the workspace to a local directory",
2551
- schema: {
2552
- input: {
2553
- type: "object",
2554
- required: ["path"],
2555
- properties: {
2556
- path: {
2557
- title: "Path to a directory where the output will be written",
2558
- type: "string"
2559
- }
2560
- }
2561
- }
2562
- },
2563
- async handler(ctx) {
2564
- ctx.logger.warn(
2565
- "[DEPRECATED] This action will be removed, prefer testing templates using the template editor instead."
2566
- );
2567
- const { path: path$1 } = ctx.input;
2568
- const exists = await fs__default["default"].pathExists(path$1);
2569
- if (exists) {
2570
- throw new errors.InputError("Output path already exists");
2571
- }
2572
- await fs__default["default"].ensureDir(path.dirname(path$1));
2573
- await fs__default["default"].copy(ctx.workspacePath, path$1);
2574
- }
2575
- });
2576
- }
2577
-
2578
2561
  const createGerritProject = async (config, options) => {
2579
2562
  const { projectName, parent, owner, description } = options;
2580
2563
  const fetchOptions = {
@@ -2843,6 +2826,7 @@ function createPublishGithubAction(options) {
2843
2826
  properties: {
2844
2827
  repoUrl: repoUrl,
2845
2828
  description: description,
2829
+ homepage: homepage,
2846
2830
  access: access,
2847
2831
  requireCodeOwnerReviews: requireCodeOwnerReviews,
2848
2832
  requiredStatusCheckContexts: requiredStatusCheckContexts,
@@ -2875,6 +2859,7 @@ function createPublishGithubAction(options) {
2875
2859
  const {
2876
2860
  repoUrl,
2877
2861
  description,
2862
+ homepage,
2878
2863
  access,
2879
2864
  requireCodeOwnerReviews = false,
2880
2865
  requiredStatusCheckContexts = [],
@@ -2910,6 +2895,7 @@ function createPublishGithubAction(options) {
2910
2895
  owner,
2911
2896
  repoVisibility,
2912
2897
  description,
2898
+ homepage,
2913
2899
  deleteBranchOnMerge,
2914
2900
  allowMergeCommit,
2915
2901
  allowSquashMerge,
@@ -2955,6 +2941,10 @@ const isExecutable = (fileMode) => {
2955
2941
  const res = fileMode & executeBitMask;
2956
2942
  return res > 0;
2957
2943
  };
2944
+ async function asyncFilter(array, callback) {
2945
+ const filterMap = await Promise.all(array.map(callback));
2946
+ return array.filter((_value, index) => filterMap[index]);
2947
+ }
2958
2948
  async function serializeDirectoryContents(sourcePath, options) {
2959
2949
  var _a;
2960
2950
  const paths = await globby__default["default"]((_a = options == null ? void 0 : options.globPatterns) != null ? _a : DEFAULT_GLOB_PATTERNS, {
@@ -2962,17 +2952,36 @@ async function serializeDirectoryContents(sourcePath, options) {
2962
2952
  dot: true,
2963
2953
  gitignore: options == null ? void 0 : options.gitignore,
2964
2954
  followSymbolicLinks: false,
2955
+ onlyFiles: false,
2965
2956
  objectMode: true,
2966
2957
  stats: true
2967
2958
  });
2968
2959
  const limiter = limiterFactory__default["default"](10);
2960
+ const valid = await asyncFilter(paths, async ({ dirent, path }) => {
2961
+ if (dirent.isDirectory())
2962
+ return false;
2963
+ if (!dirent.isSymbolicLink())
2964
+ return true;
2965
+ const safePath = backendCommon.resolveSafeChildPath(sourcePath, path);
2966
+ try {
2967
+ await fs$1.promises.stat(safePath);
2968
+ return false;
2969
+ } catch (e) {
2970
+ return errors.isError(e) && e.code === "ENOENT";
2971
+ }
2972
+ });
2969
2973
  return Promise.all(
2970
- paths.map(async ({ path: path$1, stats }) => ({
2971
- path: path$1,
2972
- content: await limiter(
2973
- async () => fs__default["default"].readFile(path.join(sourcePath, path$1))
2974
- ),
2975
- executable: isExecutable(stats == null ? void 0 : stats.mode)
2974
+ valid.map(async ({ dirent, path, stats }) => ({
2975
+ path,
2976
+ content: await limiter(async () => {
2977
+ const absFilePath = backendCommon.resolveSafeChildPath(sourcePath, path);
2978
+ if (dirent.isSymbolicLink()) {
2979
+ return fs$1.promises.readlink(absFilePath, "buffer");
2980
+ }
2981
+ return fs$1.promises.readFile(absFilePath);
2982
+ }),
2983
+ executable: isExecutable(stats == null ? void 0 : stats.mode),
2984
+ symlink: dirent.isSymbolicLink()
2976
2985
  }))
2977
2986
  );
2978
2987
  }
@@ -3005,7 +3014,10 @@ const defaultClientFactory = async ({
3005
3014
  token: providedToken
3006
3015
  });
3007
3016
  const OctokitPR = octokit.Octokit.plugin(octokitPluginCreatePullRequest.createPullRequest);
3008
- return new OctokitPR(octokitOptions);
3017
+ return new OctokitPR({
3018
+ ...octokitOptions,
3019
+ ...{ throttle: { enabled: false } }
3020
+ });
3009
3021
  };
3010
3022
  const createPublishGithubPullRequestAction = ({
3011
3023
  integrations,
@@ -3125,13 +3137,21 @@ const createPublishGithubPullRequestAction = ({
3125
3137
  const directoryContents = await serializeDirectoryContents(fileRoot, {
3126
3138
  gitignore: true
3127
3139
  });
3140
+ const determineFileMode = (file) => {
3141
+ if (file.symlink)
3142
+ return "120000";
3143
+ if (file.executable)
3144
+ return "100755";
3145
+ return "100644";
3146
+ };
3147
+ const determineFileEncoding = (file) => file.symlink ? "utf-8" : "base64";
3128
3148
  const files = Object.fromEntries(
3129
3149
  directoryContents.map((file) => [
3130
3150
  targetPath ? path__default["default"].posix.join(targetPath, file.path) : file.path,
3131
3151
  {
3132
- mode: file.executable ? "100755" : "100644",
3133
- encoding: "base64",
3134
- content: file.content.toString("base64")
3152
+ mode: determineFileMode(file),
3153
+ encoding: determineFileEncoding(file),
3154
+ content: file.content.toString(determineFileEncoding(file))
3135
3155
  }
3136
3156
  ])
3137
3157
  );
@@ -3261,6 +3281,10 @@ function createPublishGitlabAction(options) {
3261
3281
  repoContentsUrl: {
3262
3282
  title: "A URL to the root of the repository",
3263
3283
  type: "string"
3284
+ },
3285
+ projectId: {
3286
+ title: "The ID of the project",
3287
+ type: "string"
3264
3288
  }
3265
3289
  }
3266
3290
  }
@@ -3333,6 +3357,7 @@ function createPublishGitlabAction(options) {
3333
3357
  });
3334
3358
  ctx.output("remoteUrl", remoteUrl);
3335
3359
  ctx.output("repoContentsUrl", repoContentsUrl);
3360
+ ctx.output("projectId", projectId);
3336
3361
  }
3337
3362
  });
3338
3363
  }
@@ -3639,6 +3664,9 @@ const migrationsDir = backendCommon.resolvePackagePath(
3639
3664
  "@backstage/plugin-scaffolder-backend",
3640
3665
  "migrations"
3641
3666
  );
3667
+ function isPluginDatabaseManager(opt) {
3668
+ return opt.getClient !== void 0;
3669
+ }
3642
3670
  const parseSqlDateToIsoString = (input) => {
3643
3671
  if (typeof input === "string") {
3644
3672
  return luxon.DateTime.fromSQL(input, { zone: "UTC" }).toISO();
@@ -3647,13 +3675,33 @@ const parseSqlDateToIsoString = (input) => {
3647
3675
  };
3648
3676
  class DatabaseTaskStore {
3649
3677
  static async create(options) {
3650
- await options.database.migrate.latest({
3651
- directory: migrationsDir
3652
- });
3653
- return new DatabaseTaskStore(options);
3678
+ const { database } = options;
3679
+ const client = await this.getClient(database);
3680
+ await this.runMigrations(database, client);
3681
+ return new DatabaseTaskStore(client);
3654
3682
  }
3655
- constructor(options) {
3656
- this.db = options.database;
3683
+ static async getClient(database) {
3684
+ if (isPluginDatabaseManager(database)) {
3685
+ return database.getClient();
3686
+ }
3687
+ return database;
3688
+ }
3689
+ static async runMigrations(database, client) {
3690
+ var _a;
3691
+ if (!isPluginDatabaseManager(database)) {
3692
+ await client.migrate.latest({
3693
+ directory: migrationsDir
3694
+ });
3695
+ return;
3696
+ }
3697
+ if (!((_a = database.migrations) == null ? void 0 : _a.skip)) {
3698
+ await client.migrate.latest({
3699
+ directory: migrationsDir
3700
+ });
3701
+ }
3702
+ }
3703
+ constructor(client) {
3704
+ this.db = client;
3657
3705
  }
3658
3706
  async list(options) {
3659
3707
  const queryBuilder = this.db("tasks");
@@ -3752,7 +3800,8 @@ class DatabaseTaskStore {
3752
3800
  throw new errors.ConflictError(`No running task with taskId ${taskId} found`);
3753
3801
  }
3754
3802
  }
3755
- async listStaleTasks({ timeoutS }) {
3803
+ async listStaleTasks(options) {
3804
+ const { timeoutS } = options;
3756
3805
  const rawRows = await this.db("tasks").where("status", "processing").andWhere(
3757
3806
  "last_heartbeat_at",
3758
3807
  "<=",
@@ -3766,11 +3815,8 @@ class DatabaseTaskStore {
3766
3815
  }));
3767
3816
  return { tasks };
3768
3817
  }
3769
- async completeTask({
3770
- taskId,
3771
- status,
3772
- eventBody
3773
- }) {
3818
+ async completeTask(options) {
3819
+ const { taskId, status, eventBody } = options;
3774
3820
  let oldStatus;
3775
3821
  if (status === "failed" || status === "completed") {
3776
3822
  oldStatus = "processing";
@@ -3818,10 +3864,8 @@ class DatabaseTaskStore {
3818
3864
  body: serializedBody
3819
3865
  });
3820
3866
  }
3821
- async listEvents({
3822
- taskId,
3823
- after
3824
- }) {
3867
+ async listEvents(options) {
3868
+ const { taskId, after } = options;
3825
3869
  const rawEvents = await this.db("task_events").where({
3826
3870
  task_id: taskId
3827
3871
  }).andWhere((builder) => {
@@ -4450,6 +4494,48 @@ async function findTemplate(options) {
4450
4494
  function isSupportedTemplate(entity) {
4451
4495
  return entity.apiVersion === "scaffolder.backstage.io/v1beta3";
4452
4496
  }
4497
+ function buildDefaultIdentityClient({
4498
+ logger
4499
+ }) {
4500
+ return {
4501
+ getIdentity: async ({ request }) => {
4502
+ var _a;
4503
+ const header = request.headers.authorization;
4504
+ if (!header) {
4505
+ return void 0;
4506
+ }
4507
+ try {
4508
+ const token = (_a = header.match(/^Bearer\s(\S+\.\S+\.\S+)$/i)) == null ? void 0 : _a[1];
4509
+ if (!token) {
4510
+ throw new TypeError("Expected Bearer with JWT");
4511
+ }
4512
+ const [_header, rawPayload, _signature] = token.split(".");
4513
+ const payload = JSON.parse(
4514
+ Buffer.from(rawPayload, "base64").toString()
4515
+ );
4516
+ if (typeof payload !== "object" || payload === null || Array.isArray(payload)) {
4517
+ throw new TypeError("Malformed JWT payload");
4518
+ }
4519
+ const sub = payload.sub;
4520
+ if (typeof sub !== "string") {
4521
+ throw new TypeError("Expected string sub claim");
4522
+ }
4523
+ catalogModel.parseEntityRef(sub);
4524
+ return {
4525
+ identity: {
4526
+ userEntityRef: sub,
4527
+ ownershipEntityRefs: [],
4528
+ type: "user"
4529
+ },
4530
+ token
4531
+ };
4532
+ } catch (e) {
4533
+ logger.error(`Invalid authorization header: ${errors.stringifyError(e)}`);
4534
+ return void 0;
4535
+ }
4536
+ }
4537
+ };
4538
+ }
4453
4539
  async function createRouter(options) {
4454
4540
  const router = Router__default["default"]();
4455
4541
  router.use(express__default["default"].json());
@@ -4464,13 +4550,12 @@ async function createRouter(options) {
4464
4550
  additionalTemplateFilters
4465
4551
  } = options;
4466
4552
  const logger = parentLogger.child({ plugin: "scaffolder" });
4553
+ const identity = options.identity || buildDefaultIdentityClient({ logger });
4467
4554
  const workingDirectory = await getWorkingDirectory(config, logger);
4468
4555
  const integrations = integration.ScmIntegrations.fromConfig(config);
4469
4556
  let taskBroker;
4470
4557
  if (!options.taskBroker) {
4471
- const databaseTaskStore = await DatabaseTaskStore.create({
4472
- database: await database.getClient()
4473
- });
4558
+ const databaseTaskStore = await DatabaseTaskStore.create({ database });
4474
4559
  taskBroker = new StorageTaskBroker(databaseTaskStore, logger);
4475
4560
  } else {
4476
4561
  taskBroker = options.taskBroker;
@@ -4509,10 +4594,10 @@ async function createRouter(options) {
4509
4594
  async (req, res) => {
4510
4595
  var _a, _b;
4511
4596
  const { namespace, kind, name } = req.params;
4512
- const { token } = parseBearerToken({
4513
- header: req.headers.authorization,
4514
- logger
4597
+ const userIdentity = await identity.getIdentity({
4598
+ request: req
4515
4599
  });
4600
+ const token = userIdentity == null ? void 0 : userIdentity.token;
4516
4601
  const template = await findTemplate({
4517
4602
  catalogApi: catalogClient,
4518
4603
  entityRef: { kind, namespace, name },
@@ -4553,10 +4638,11 @@ async function createRouter(options) {
4553
4638
  const { kind, namespace, name } = catalogModel.parseEntityRef(templateRef, {
4554
4639
  defaultKind: "template"
4555
4640
  });
4556
- const { token, entityRef: userEntityRef } = parseBearerToken({
4557
- header: req.headers.authorization,
4558
- logger
4641
+ const callerIdentity = await identity.getIdentity({
4642
+ request: req
4559
4643
  });
4644
+ const token = callerIdentity == null ? void 0 : callerIdentity.token;
4645
+ const userEntityRef = callerIdentity == null ? void 0 : callerIdentity.identity.userEntityRef;
4560
4646
  const userEntity = userEntityRef ? await catalogClient.getEntityByRef(userEntityRef, { token }) : void 0;
4561
4647
  let auditLog = `Scaffolding task for ${templateRef}`;
4562
4648
  if (userEntityRef) {
@@ -4604,7 +4690,10 @@ async function createRouter(options) {
4604
4690
  namespace,
4605
4691
  name: (_c = template.metadata) == null ? void 0 : _c.name
4606
4692
  }),
4607
- baseUrl
4693
+ baseUrl,
4694
+ entity: {
4695
+ metadata: template.metadata
4696
+ }
4608
4697
  }
4609
4698
  };
4610
4699
  const result = await taskBroker.dispatch({
@@ -4702,7 +4791,7 @@ data: ${JSON.stringify(event)}
4702
4791
  clearTimeout(timeout);
4703
4792
  });
4704
4793
  }).post("/v2/dry-run", async (req, res) => {
4705
- var _a, _b, _c;
4794
+ var _a, _b, _c, _d;
4706
4795
  const bodySchema = zod.z.object({
4707
4796
  template: zod.z.unknown(),
4708
4797
  values: zod.z.record(zod.z.unknown()),
@@ -4718,11 +4807,10 @@ data: ${JSON.stringify(event)}
4718
4807
  if (!await pluginScaffolderCommon.templateEntityV1beta3Validator.check(template)) {
4719
4808
  throw new errors.InputError("Input template is not a template");
4720
4809
  }
4721
- const { token } = parseBearerToken({
4722
- header: req.headers.authorization,
4723
- logger
4724
- });
4725
- for (const parameters of [(_a = template.spec.parameters) != null ? _a : []].flat()) {
4810
+ const token = (_a = await identity.getIdentity({
4811
+ request: req
4812
+ })) == null ? void 0 : _a.token;
4813
+ for (const parameters of [(_b = template.spec.parameters) != null ? _b : []].flat()) {
4726
4814
  const result2 = jsonschema.validate(body.values, parameters);
4727
4815
  if (!result2.valid) {
4728
4816
  res.status(400).json({ errors: result2.errors });
@@ -4741,10 +4829,10 @@ data: ${JSON.stringify(event)}
4741
4829
  spec: {
4742
4830
  apiVersion: template.apiVersion,
4743
4831
  steps,
4744
- output: (_b = template.spec.output) != null ? _b : {},
4832
+ output: (_c = template.spec.output) != null ? _c : {},
4745
4833
  parameters: body.values
4746
4834
  },
4747
- directoryContents: ((_c = body.directoryContents) != null ? _c : []).map((file) => ({
4835
+ directoryContents: ((_d = body.directoryContents) != null ? _d : []).map((file) => ({
4748
4836
  path: file.path,
4749
4837
  content: Buffer.from(file.base64Content, "base64")
4750
4838
  })),
@@ -4768,37 +4856,6 @@ data: ${JSON.stringify(event)}
4768
4856
  app.use("/", router);
4769
4857
  return app;
4770
4858
  }
4771
- function parseBearerToken({
4772
- header,
4773
- logger
4774
- }) {
4775
- var _a;
4776
- if (!header) {
4777
- return {};
4778
- }
4779
- try {
4780
- const token = (_a = header.match(/^Bearer\s(\S+\.\S+\.\S+)$/i)) == null ? void 0 : _a[1];
4781
- if (!token) {
4782
- throw new TypeError("Expected Bearer with JWT");
4783
- }
4784
- const [_header, rawPayload, _signature] = token.split(".");
4785
- const payload = JSON.parse(
4786
- Buffer.from(rawPayload, "base64").toString()
4787
- );
4788
- if (typeof payload !== "object" || payload === null || Array.isArray(payload)) {
4789
- throw new TypeError("Malformed JWT payload");
4790
- }
4791
- const sub = payload.sub;
4792
- if (typeof sub !== "string") {
4793
- throw new TypeError("Expected string sub claim");
4794
- }
4795
- catalogModel.parseEntityRef(sub);
4796
- return { entityRef: sub, token };
4797
- } catch (e) {
4798
- logger.error(`Invalid authorization header: ${errors.stringifyError(e)}`);
4799
- return {};
4800
- }
4801
- }
4802
4859
 
4803
4860
  class ScaffolderEntitiesProcessor {
4804
4861
  constructor() {
@@ -4868,6 +4925,94 @@ const scaffolderCatalogModule = backendPluginApi.createBackendModule({
4868
4925
  }
4869
4926
  });
4870
4927
 
4928
+ var __accessCheck = (obj, member, msg) => {
4929
+ if (!member.has(obj))
4930
+ throw TypeError("Cannot " + msg);
4931
+ };
4932
+ var __privateGet = (obj, member, getter) => {
4933
+ __accessCheck(obj, member, "read from private field");
4934
+ return getter ? getter.call(obj) : member.get(obj);
4935
+ };
4936
+ var __privateAdd = (obj, member, value) => {
4937
+ if (member.has(obj))
4938
+ throw TypeError("Cannot add the same private member more than once");
4939
+ member instanceof WeakSet ? member.add(obj) : member.set(obj, value);
4940
+ };
4941
+ var _actions;
4942
+ class ScaffolderActionsExtensionPointImpl {
4943
+ constructor() {
4944
+ __privateAdd(this, _actions, new Array());
4945
+ }
4946
+ addActions(...actions) {
4947
+ __privateGet(this, _actions).push(...actions);
4948
+ }
4949
+ get actions() {
4950
+ return __privateGet(this, _actions);
4951
+ }
4952
+ }
4953
+ _actions = new WeakMap();
4954
+ const scaffolderActionsExtensionPoint = backendPluginApi.createExtensionPoint({
4955
+ id: "scaffolder.actions"
4956
+ });
4957
+ const scaffolderPlugin = backendPluginApi.createBackendPlugin({
4958
+ id: "scaffolder",
4959
+ register(env, options) {
4960
+ const actionsExtensions = new ScaffolderActionsExtensionPointImpl();
4961
+ env.registerExtensionPoint(
4962
+ scaffolderActionsExtensionPoint,
4963
+ actionsExtensions
4964
+ );
4965
+ env.registerInit({
4966
+ deps: {
4967
+ logger: backendPluginApi.loggerServiceRef,
4968
+ config: backendPluginApi.configServiceRef,
4969
+ reader: backendPluginApi.urlReaderServiceRef,
4970
+ permissions: backendPluginApi.permissionsServiceRef,
4971
+ database: backendPluginApi.databaseServiceRef,
4972
+ httpRouter: backendPluginApi.httpRouterServiceRef,
4973
+ catalogClient: pluginCatalogNode.catalogServiceRef
4974
+ },
4975
+ async init({
4976
+ logger,
4977
+ config,
4978
+ reader,
4979
+ database,
4980
+ httpRouter,
4981
+ catalogClient
4982
+ }) {
4983
+ const { additionalTemplateFilters, taskBroker, taskWorkers } = options;
4984
+ const log = backendPluginApi.loggerToWinstonLogger(logger);
4985
+ const actions = options.actions || [
4986
+ ...actionsExtensions.actions,
4987
+ ...createBuiltinActions({
4988
+ integrations: integration.ScmIntegrations.fromConfig(config),
4989
+ catalogClient,
4990
+ reader,
4991
+ config,
4992
+ additionalTemplateFilters
4993
+ })
4994
+ ];
4995
+ const actionIds = actions.map((action) => action.id).join(", ");
4996
+ log.info(
4997
+ `Starting scaffolder with the following actions enabled ${actionIds}`
4998
+ );
4999
+ const router = await createRouter({
5000
+ logger: log,
5001
+ config,
5002
+ database,
5003
+ catalogClient,
5004
+ reader,
5005
+ actions,
5006
+ taskBroker,
5007
+ taskWorkers,
5008
+ additionalTemplateFilters
5009
+ });
5010
+ httpRouter.use(router);
5011
+ }
5012
+ });
5013
+ }
5014
+ });
5015
+
4871
5016
  exports.DatabaseTaskStore = DatabaseTaskStore;
4872
5017
  exports.ScaffolderEntitiesProcessor = ScaffolderEntitiesProcessor;
4873
5018
  exports.TaskManager = TaskManager;
@@ -4890,7 +5035,6 @@ exports.createPublishAzureAction = createPublishAzureAction;
4890
5035
  exports.createPublishBitbucketAction = createPublishBitbucketAction;
4891
5036
  exports.createPublishBitbucketCloudAction = createPublishBitbucketCloudAction;
4892
5037
  exports.createPublishBitbucketServerAction = createPublishBitbucketServerAction;
4893
- exports.createPublishFileAction = createPublishFileAction;
4894
5038
  exports.createPublishGerritAction = createPublishGerritAction;
4895
5039
  exports.createPublishGerritReviewAction = createPublishGerritReviewAction;
4896
5040
  exports.createPublishGithubAction = createPublishGithubAction;
@@ -4902,4 +5046,5 @@ exports.createTemplateAction = createTemplateAction;
4902
5046
  exports.executeShellCommand = executeShellCommand;
4903
5047
  exports.fetchContents = fetchContents;
4904
5048
  exports.scaffolderCatalogModule = scaffolderCatalogModule;
5049
+ exports.scaffolderPlugin = scaffolderPlugin;
4905
5050
  //# sourceMappingURL=index.cjs.js.map