@backstage/plugin-scaffolder-backend 0.15.8 → 0.15.12

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/CHANGELOG.md CHANGED
@@ -1,5 +1,91 @@
1
1
  # @backstage/plugin-scaffolder-backend
2
2
 
3
+ ## 0.15.12
4
+
5
+ ### Patch Changes
6
+
7
+ - 9990df8a1f: Expose some classes and interfaces public so TaskWorkers can run externally from the scaffolder API.
8
+ - b45a34fb15: Adds a new endpoint for consuming logs from the Scaffolder that uses long polling instead of Server Sent Events.
9
+
10
+ This is useful if Backstage is accessed from an environment that doesn't support SSE correctly, which happens in combination with certain enterprise HTTP Proxy servers.
11
+
12
+ It is intended to switch the endpoint globally for the whole instance.
13
+ If you want to use it, you can provide a reconfigured API to the `scaffolderApiRef`:
14
+
15
+ ```tsx
16
+ // packages/app/src/apis.ts
17
+
18
+ // ...
19
+ import {
20
+ scaffolderApiRef,
21
+ ScaffolderClient,
22
+ } from '@backstage/plugin-scaffolder';
23
+
24
+ export const apis: AnyApiFactory[] = [
25
+ // ...
26
+
27
+ createApiFactory({
28
+ api: scaffolderApiRef,
29
+ deps: {
30
+ discoveryApi: discoveryApiRef,
31
+ identityApi: identityApiRef,
32
+ scmIntegrationsApi: scmIntegrationsApiRef,
33
+ },
34
+ factory: ({ discoveryApi, identityApi, scmIntegrationsApi }) =>
35
+ new ScaffolderClient({
36
+ discoveryApi,
37
+ identityApi,
38
+ scmIntegrationsApi,
39
+ // use long polling instead of an eventsource
40
+ useLongPollingLogs: true,
41
+ }),
42
+ }),
43
+ ];
44
+ ```
45
+
46
+ - a794c341ca: Fix a bug where only file mode 775 is considered an executable
47
+ - Updated dependencies
48
+ - @backstage/backend-common@0.9.9
49
+ - @backstage/catalog-client@0.5.1
50
+ - @backstage/plugin-catalog-backend@0.17.3
51
+ - @backstage/plugin-scaffolder-backend-module-cookiecutter@0.1.4
52
+
53
+ ## 0.15.11
54
+
55
+ ### Patch Changes
56
+
57
+ - 10615525f3: Switch to use the json and observable types from `@backstage/types`
58
+ - 41c49884d2: Start using the new `@backstage/types` package. Initially, this means using the `Observable` and `Json*` types from there. The types also remain in their old places but deprecated, and will be removed in a future release.
59
+ - e55a5dea09: Fixed bug where the mode of an executable file was ignored
60
+ - Updated dependencies
61
+ - @backstage/plugin-catalog-backend@0.17.2
62
+ - @backstage/config@0.1.11
63
+ - @backstage/errors@0.1.4
64
+ - @backstage/integration@0.6.9
65
+ - @backstage/backend-common@0.9.8
66
+ - @backstage/catalog-model@0.9.6
67
+ - @backstage/plugin-scaffolder-backend-module-cookiecutter@0.1.3
68
+ - @backstage/plugin-scaffolder-common@0.1.1
69
+
70
+ ## 0.15.10
71
+
72
+ ### Patch Changes
73
+
74
+ - b149e94290: Allow `catalog:register` action to register optional locations
75
+ - 36e67d2f24: Internal updates to apply more strict checks to throw errors.
76
+ - Updated dependencies
77
+ - @backstage/plugin-catalog-backend@0.17.1
78
+ - @backstage/backend-common@0.9.7
79
+ - @backstage/errors@0.1.3
80
+ - @backstage/catalog-model@0.9.5
81
+
82
+ ## 0.15.9
83
+
84
+ ### Patch Changes
85
+
86
+ - 0f99f1170e: Make sure `sourcePath` of `publish:github:pull-request` can only be used to
87
+ retrieve files from the workspace.
88
+
3
89
  ## 0.15.8
4
90
 
5
91
  ### Patch Changes
package/dist/index.cjs.js CHANGED
@@ -22,14 +22,14 @@ var lodash = require('lodash');
22
22
  var octokitPluginCreatePullRequest = require('octokit-plugin-create-pull-request');
23
23
  var node = require('@gitbeaker/node');
24
24
  var webhooks = require('@octokit/webhooks');
25
- var express = require('express');
26
- var Router = require('express-promise-router');
27
- var jsonschema = require('jsonschema');
28
25
  var uuid = require('uuid');
29
26
  var luxon = require('luxon');
30
- var os = require('os');
31
27
  var Handlebars = require('handlebars');
32
28
  var winston = require('winston');
29
+ var jsonschema = require('jsonschema');
30
+ var express = require('express');
31
+ var Router = require('express-promise-router');
32
+ var os = require('os');
33
33
  var pluginCatalogBackend = require('@backstage/plugin-catalog-backend');
34
34
  var pluginScaffolderCommon = require('@backstage/plugin-scaffolder-common');
35
35
 
@@ -62,11 +62,11 @@ var yaml__namespace = /*#__PURE__*/_interopNamespace(yaml);
62
62
  var globby__default = /*#__PURE__*/_interopDefaultLegacy(globby);
63
63
  var nunjucks__default = /*#__PURE__*/_interopDefaultLegacy(nunjucks);
64
64
  var fetch__default = /*#__PURE__*/_interopDefaultLegacy(fetch);
65
+ var Handlebars__namespace = /*#__PURE__*/_interopNamespace(Handlebars);
66
+ var winston__namespace = /*#__PURE__*/_interopNamespace(winston);
65
67
  var express__default = /*#__PURE__*/_interopDefaultLegacy(express);
66
68
  var Router__default = /*#__PURE__*/_interopDefaultLegacy(Router);
67
69
  var os__default = /*#__PURE__*/_interopDefaultLegacy(os);
68
- var Handlebars__namespace = /*#__PURE__*/_interopNamespace(Handlebars);
69
- var winston__namespace = /*#__PURE__*/_interopNamespace(winston);
70
70
 
71
71
  const createTemplateAction = (templateAction) => {
72
72
  return templateAction;
@@ -88,6 +88,11 @@ function createCatalogRegisterAction(options) {
88
88
  title: "Catalog Info URL",
89
89
  description: "An absolute URL pointing to the catalog info file location",
90
90
  type: "string"
91
+ },
92
+ optional: {
93
+ title: "Optional",
94
+ description: "Permit the registered location to optionally exist. Default: false",
95
+ type: "boolean"
91
96
  }
92
97
  }
93
98
  },
@@ -104,6 +109,11 @@ function createCatalogRegisterAction(options) {
104
109
  title: "Fetch URL",
105
110
  description: "A relative path from the repo root pointing to the catalog info file, defaults to /catalog-info.yaml",
106
111
  type: "string"
112
+ },
113
+ optional: {
114
+ title: "Optional",
115
+ description: "Permit the registered location to optionally exist. Default: false",
116
+ type: "boolean"
107
117
  }
108
118
  }
109
119
  }
@@ -132,15 +142,21 @@ function createCatalogRegisterAction(options) {
132
142
  type: "url",
133
143
  target: catalogInfoUrl
134
144
  }, ctx.token ? {token: ctx.token} : {});
135
- const result = await catalogClient.addLocation({
136
- dryRun: true,
137
- type: "url",
138
- target: catalogInfoUrl
139
- }, ctx.token ? {token: ctx.token} : {});
140
- if (result.entities.length > 0) {
141
- const {entities} = result;
142
- const entity = (_a = entities.find((e) => !e.metadata.name.startsWith("generated-"))) != null ? _a : entities[0];
143
- ctx.output("entityRef", catalogModel.stringifyEntityRef(entity));
145
+ try {
146
+ const result = await catalogClient.addLocation({
147
+ dryRun: true,
148
+ type: "url",
149
+ target: catalogInfoUrl
150
+ }, ctx.token ? {token: ctx.token} : {});
151
+ if (result.entities.length > 0) {
152
+ const {entities} = result;
153
+ const entity = (_a = entities.find((e) => !e.metadata.name.startsWith("generated-"))) != null ? _a : entities[0];
154
+ ctx.output("entityRef", catalogModel.stringifyEntityRef(entity));
155
+ }
156
+ } catch (e) {
157
+ if (!input.optional) {
158
+ throw e;
159
+ }
144
160
  }
145
161
  ctx.output("catalogInfoUrl", catalogInfoUrl);
146
162
  }
@@ -631,6 +647,7 @@ const enableBranchProtectionOnDefaultRepoBranch = async ({
631
647
  }
632
648
  });
633
649
  } catch (e) {
650
+ errors.assertError(e);
634
651
  if (e.message.includes("Upgrade to GitHub Pro or make this repository public to enable this feature")) {
635
652
  logger.warn("Branch protection was not enabled as it requires GitHub Pro for private repositories");
636
653
  } else {
@@ -652,7 +669,11 @@ const enableBranchProtectionOnDefaultRepoBranch = async ({
652
669
  const getRepoSourceDirectory = (workspacePath, sourcePath) => {
653
670
  if (sourcePath) {
654
671
  const safeSuffix = path.normalize(sourcePath).replace(/^(\.\.(\/|\\|$))+/, "");
655
- return path.join(workspacePath, safeSuffix);
672
+ const path$1 = path.join(workspacePath, safeSuffix);
673
+ if (!backendCommon.isChildPath(workspacePath, path$1)) {
674
+ throw new Error("Invalid source path");
675
+ }
676
+ return path$1;
656
677
  }
657
678
  return workspacePath;
658
679
  };
@@ -693,6 +714,11 @@ const parseRepoUrl = (repoUrl, integrations) => {
693
714
  }
694
715
  return {host, owner, repo, organization, workspace, project};
695
716
  };
717
+ const isExecutable = (fileMode) => {
718
+ const executeBitMask = 73;
719
+ const res = fileMode & executeBitMask;
720
+ return res > 0;
721
+ };
696
722
 
697
723
  function createPublishAzureAction(options) {
698
724
  const {integrations, config} = options;
@@ -1212,6 +1238,7 @@ function createPublishGithubAction(options) {
1212
1238
  permission
1213
1239
  });
1214
1240
  } catch (e) {
1241
+ errors.assertError(e);
1215
1242
  ctx.logger.warn(`Skipping ${permission} access for ${team_slug}, ${e.message}`);
1216
1243
  }
1217
1244
  }
@@ -1224,6 +1251,7 @@ function createPublishGithubAction(options) {
1224
1251
  names: topics.map((t) => t.toLowerCase())
1225
1252
  });
1226
1253
  } catch (e) {
1254
+ errors.assertError(e);
1227
1255
  ctx.logger.warn(`Skipping topics ${topics.join(" ")}, ${e.message}`);
1228
1256
  }
1229
1257
  }
@@ -1255,6 +1283,7 @@ function createPublishGithubAction(options) {
1255
1283
  requireCodeOwnerReviews
1256
1284
  });
1257
1285
  } catch (e) {
1286
+ errors.assertError(e);
1258
1287
  ctx.logger.warn(`Skipping: default branch protection on '${newRepo.name}', ${e.message}`);
1259
1288
  }
1260
1289
  ctx.output("remoteUrl", remoteUrl);
@@ -1361,19 +1390,30 @@ const createPublishGithubPullRequestAction = ({
1361
1390
  throw new errors.InputError(`No owner provided for host: ${host}, and repo ${repo}`);
1362
1391
  }
1363
1392
  const client = await clientFactory({integrations, host, owner, repo});
1364
- const fileRoot = sourcePath ? path__default['default'].resolve(ctx.workspacePath, sourcePath) : ctx.workspacePath;
1393
+ const fileRoot = sourcePath ? backendCommon.resolveSafeChildPath(ctx.workspacePath, sourcePath) : ctx.workspacePath;
1365
1394
  const localFilePaths = await globby__default['default'](["./**", "./**/.*", "!.git"], {
1366
1395
  cwd: fileRoot,
1367
1396
  gitignore: true,
1368
1397
  dot: true
1369
1398
  });
1370
- const fileContents = await Promise.all(localFilePaths.map((p) => fs.readFile(path__default['default'].resolve(fileRoot, p))));
1399
+ const fileContents = await Promise.all(localFilePaths.map((filePath) => {
1400
+ const absPath = path__default['default'].resolve(fileRoot, filePath);
1401
+ const base64EncodedContent = fs__default['default'].readFileSync(absPath).toString("base64");
1402
+ const fileStat = fs__default['default'].statSync(absPath);
1403
+ const githubTreeItemMode = isExecutable(fileStat.mode) ? "100755" : "100644";
1404
+ const encoding = "base64";
1405
+ return {
1406
+ encoding,
1407
+ content: base64EncodedContent,
1408
+ mode: githubTreeItemMode
1409
+ };
1410
+ }));
1371
1411
  const repoFilePaths = localFilePaths.map((repoFilePath) => {
1372
1412
  return targetPath ? `${targetPath}/${repoFilePath}` : repoFilePath;
1373
1413
  });
1374
1414
  const changes = [
1375
1415
  {
1376
- files: lodash.zipObject(repoFilePaths, fileContents.map((buf) => buf.toString())),
1416
+ files: lodash.zipObject(repoFilePaths, fileContents),
1377
1417
  commit: title
1378
1418
  }
1379
1419
  ];
@@ -1633,6 +1673,7 @@ function createGithubWebhookAction(options) {
1633
1673
  });
1634
1674
  ctx.logger.info(`Webhook '${webhookUrl}' created successfully`);
1635
1675
  } catch (e) {
1676
+ errors.assertError(e);
1636
1677
  ctx.logger.warn(`Failed: create webhook '${webhookUrl}' on repo: '${repo}', ${e.message}`);
1637
1678
  }
1638
1679
  }
@@ -1710,38 +1751,16 @@ class TemplateActionRegistry {
1710
1751
  }
1711
1752
  }
1712
1753
 
1713
- class CatalogEntityClient {
1714
- constructor(catalogClient) {
1715
- this.catalogClient = catalogClient;
1716
- }
1717
- async findTemplate(templateName, options) {
1718
- const {items: templates} = await this.catalogClient.getEntities({
1719
- filter: {
1720
- kind: "template",
1721
- "metadata.name": templateName
1722
- }
1723
- }, options);
1724
- if (templates.length !== 1) {
1725
- if (templates.length > 1) {
1726
- throw new errors.ConflictError("Templates lookup resulted in multiple matches");
1727
- } else {
1728
- throw new errors.NotFoundError("Template not found");
1729
- }
1730
- }
1731
- return templates[0];
1732
- }
1733
- }
1734
-
1735
1754
  const migrationsDir = backendCommon.resolvePackagePath("@backstage/plugin-scaffolder-backend", "migrations");
1736
1755
  class DatabaseTaskStore {
1737
- constructor(db) {
1738
- this.db = db;
1739
- }
1740
- static async create(knex) {
1741
- await knex.migrate.latest({
1756
+ static async create(options) {
1757
+ await options.database.migrate.latest({
1742
1758
  directory: migrationsDir
1743
1759
  });
1744
- return new DatabaseTaskStore(knex);
1760
+ return new DatabaseTaskStore(options);
1761
+ }
1762
+ constructor(options) {
1763
+ this.db = options.database;
1745
1764
  }
1746
1765
  async getTask(taskId) {
1747
1766
  const [result] = await this.db("tasks").where({id: taskId}).select();
@@ -1897,7 +1916,7 @@ class DatabaseTaskStore {
1897
1916
  }
1898
1917
  }
1899
1918
 
1900
- class TaskAgent {
1919
+ class TaskManager {
1901
1920
  constructor(state, storage, logger) {
1902
1921
  this.state = state;
1903
1922
  this.storage = storage;
@@ -1905,7 +1924,7 @@ class TaskAgent {
1905
1924
  this.isDone = false;
1906
1925
  }
1907
1926
  static create(state, storage, logger) {
1908
- const agent = new TaskAgent(state, storage, logger);
1927
+ const agent = new TaskManager(state, storage, logger);
1909
1928
  agent.startTimeout();
1910
1929
  return agent;
1911
1930
  }
@@ -1971,7 +1990,7 @@ class StorageTaskBroker {
1971
1990
  for (; ; ) {
1972
1991
  const pendingTask = await this.storage.claimTask();
1973
1992
  if (pendingTask) {
1974
- return TaskAgent.create({
1993
+ return TaskManager.create({
1975
1994
  taskId: pendingTask.id,
1976
1995
  spec: pendingTask.spec,
1977
1996
  secrets: pendingTask.secrets
@@ -2006,13 +2025,14 @@ class StorageTaskBroker {
2006
2025
  try {
2007
2026
  callback(void 0, result);
2008
2027
  } catch (error) {
2028
+ errors.assertError(error);
2009
2029
  callback(error, {events: []});
2010
2030
  }
2011
2031
  }
2012
2032
  await new Promise((resolve) => setTimeout(resolve, 1e3));
2013
2033
  }
2014
2034
  })();
2015
- return unsubscribe;
2035
+ return {unsubscribe};
2016
2036
  }
2017
2037
  async vacuumTasks(timeoutS) {
2018
2038
  const {tasks} = await this.storage.listStaleTasks(timeoutS);
@@ -2039,68 +2059,12 @@ class StorageTaskBroker {
2039
2059
  }
2040
2060
  }
2041
2061
 
2042
- class TaskWorker {
2043
- constructor(options) {
2044
- this.options = options;
2045
- }
2046
- start() {
2047
- (async () => {
2048
- for (; ; ) {
2049
- const task = await this.options.taskBroker.claim();
2050
- await this.runOneTask(task);
2051
- }
2052
- })();
2053
- }
2054
- async runOneTask(task) {
2055
- try {
2056
- const {output} = task.spec.apiVersion === "scaffolder.backstage.io/v1beta3" ? await this.options.runners.workflowRunner.execute(task) : await this.options.runners.legacyWorkflowRunner.execute(task);
2057
- await task.complete("completed", {output});
2058
- } catch (error) {
2059
- await task.complete("failed", {
2060
- error: {name: error.name, message: error.message}
2061
- });
2062
- }
2063
- }
2064
- }
2065
-
2066
- async function getWorkingDirectory(config, logger) {
2067
- if (!config.has("backend.workingDirectory")) {
2068
- return os__default['default'].tmpdir();
2069
- }
2070
- const workingDirectory = config.getString("backend.workingDirectory");
2071
- try {
2072
- await fs__default['default'].access(workingDirectory, fs__default['default'].constants.F_OK | fs__default['default'].constants.W_OK);
2073
- logger.info(`using working directory: ${workingDirectory}`);
2074
- } catch (err) {
2075
- logger.error(`working directory ${workingDirectory} ${err.code === "ENOENT" ? "does not exist" : "is not writable"}`);
2076
- throw err;
2077
- }
2078
- return workingDirectory;
2079
- }
2080
- function getEntityBaseUrl(entity) {
2081
- var _a, _b;
2082
- let location = (_a = entity.metadata.annotations) == null ? void 0 : _a[catalogModel.SOURCE_LOCATION_ANNOTATION];
2083
- if (!location) {
2084
- location = (_b = entity.metadata.annotations) == null ? void 0 : _b[catalogModel.LOCATION_ANNOTATION];
2085
- }
2086
- if (!location) {
2087
- return void 0;
2088
- }
2089
- const {type, target} = catalogModel.parseLocationReference(location);
2090
- if (type === "url") {
2091
- return target;
2092
- } else if (type === "file") {
2093
- return `file://${target}`;
2094
- }
2095
- return void 0;
2096
- }
2097
-
2098
2062
  function isTruthy(value) {
2099
2063
  return lodash.isArray(value) ? value.length > 0 : !!value;
2100
2064
  }
2101
2065
 
2102
2066
  const isValidTaskSpec$1 = (taskSpec) => taskSpec.apiVersion === "backstage.io/v1beta2";
2103
- class LegacyWorkflowRunner {
2067
+ class HandlebarsWorkflowRunner {
2104
2068
  constructor(options) {
2105
2069
  this.options = options;
2106
2070
  this.handlebars = Handlebars__namespace.create();
@@ -2276,7 +2240,10 @@ class LegacyWorkflowRunner {
2276
2240
  const isValidTaskSpec = (taskSpec) => {
2277
2241
  return taskSpec.apiVersion === "scaffolder.backstage.io/v1beta3";
2278
2242
  };
2279
- const createStepLogger = ({task, step}) => {
2243
+ const createStepLogger = ({
2244
+ task,
2245
+ step
2246
+ }) => {
2280
2247
  const metadata = {stepId: step.id};
2281
2248
  const taskLogger = winston__namespace.createLogger({
2282
2249
  level: process.env.LOG_LEVEL || "info",
@@ -2293,7 +2260,7 @@ const createStepLogger = ({task, step}) => {
2293
2260
  taskLogger.add(new winston__namespace.transports.Stream({stream: streamLogger}));
2294
2261
  return {taskLogger, streamLogger};
2295
2262
  };
2296
- class DefaultWorkflowRunner {
2263
+ class NunjucksWorkflowRunner {
2297
2264
  constructor(options) {
2298
2265
  this.options = options;
2299
2266
  this.nunjucksOptions = {
@@ -2424,6 +2391,111 @@ class DefaultWorkflowRunner {
2424
2391
  }
2425
2392
  }
2426
2393
 
2394
+ class TaskWorker {
2395
+ constructor(options) {
2396
+ this.options = options;
2397
+ }
2398
+ static async create(options) {
2399
+ const {
2400
+ taskBroker,
2401
+ logger,
2402
+ actionRegistry,
2403
+ integrations,
2404
+ workingDirectory
2405
+ } = options;
2406
+ const legacyWorkflowRunner = new HandlebarsWorkflowRunner({
2407
+ logger,
2408
+ actionRegistry,
2409
+ integrations,
2410
+ workingDirectory
2411
+ });
2412
+ const workflowRunner = new NunjucksWorkflowRunner({
2413
+ actionRegistry,
2414
+ integrations,
2415
+ logger,
2416
+ workingDirectory
2417
+ });
2418
+ return new TaskWorker({
2419
+ taskBroker,
2420
+ runners: {legacyWorkflowRunner, workflowRunner}
2421
+ });
2422
+ }
2423
+ start() {
2424
+ (async () => {
2425
+ for (; ; ) {
2426
+ const task = await this.options.taskBroker.claim();
2427
+ await this.runOneTask(task);
2428
+ }
2429
+ })();
2430
+ }
2431
+ async runOneTask(task) {
2432
+ try {
2433
+ const {output} = task.spec.apiVersion === "scaffolder.backstage.io/v1beta3" ? await this.options.runners.workflowRunner.execute(task) : await this.options.runners.legacyWorkflowRunner.execute(task);
2434
+ await task.complete("completed", {output});
2435
+ } catch (error) {
2436
+ errors.assertError(error);
2437
+ await task.complete("failed", {
2438
+ error: {name: error.name, message: error.message}
2439
+ });
2440
+ }
2441
+ }
2442
+ }
2443
+
2444
+ class CatalogEntityClient {
2445
+ constructor(catalogClient) {
2446
+ this.catalogClient = catalogClient;
2447
+ }
2448
+ async findTemplate(templateName, options) {
2449
+ const {items: templates} = await this.catalogClient.getEntities({
2450
+ filter: {
2451
+ kind: "template",
2452
+ "metadata.name": templateName
2453
+ }
2454
+ }, options);
2455
+ if (templates.length !== 1) {
2456
+ if (templates.length > 1) {
2457
+ throw new errors.ConflictError("Templates lookup resulted in multiple matches");
2458
+ } else {
2459
+ throw new errors.NotFoundError("Template not found");
2460
+ }
2461
+ }
2462
+ return templates[0];
2463
+ }
2464
+ }
2465
+
2466
+ async function getWorkingDirectory(config, logger) {
2467
+ if (!config.has("backend.workingDirectory")) {
2468
+ return os__default['default'].tmpdir();
2469
+ }
2470
+ const workingDirectory = config.getString("backend.workingDirectory");
2471
+ try {
2472
+ await fs__default['default'].access(workingDirectory, fs__default['default'].constants.F_OK | fs__default['default'].constants.W_OK);
2473
+ logger.info(`using working directory: ${workingDirectory}`);
2474
+ } catch (err) {
2475
+ errors.assertError(err);
2476
+ logger.error(`working directory ${workingDirectory} ${err.code === "ENOENT" ? "does not exist" : "is not writable"}`);
2477
+ throw err;
2478
+ }
2479
+ return workingDirectory;
2480
+ }
2481
+ function getEntityBaseUrl(entity) {
2482
+ var _a, _b;
2483
+ let location = (_a = entity.metadata.annotations) == null ? void 0 : _a[catalogModel.SOURCE_LOCATION_ANNOTATION];
2484
+ if (!location) {
2485
+ location = (_b = entity.metadata.annotations) == null ? void 0 : _b[catalogModel.LOCATION_ANNOTATION];
2486
+ }
2487
+ if (!location) {
2488
+ return void 0;
2489
+ }
2490
+ const {type, target} = catalogModel.parseLocationReference(location);
2491
+ if (type === "url") {
2492
+ return target;
2493
+ } else if (type === "file") {
2494
+ return `file://${target}`;
2495
+ }
2496
+ return void 0;
2497
+ }
2498
+
2427
2499
  function isSupportedTemplate(entity) {
2428
2500
  return entity.apiVersion === "backstage.io/v1beta2" || entity.apiVersion === "scaffolder.backstage.io/v1beta3";
2429
2501
  }
@@ -2444,29 +2516,24 @@ async function createRouter(options) {
2444
2516
  const workingDirectory = await getWorkingDirectory(config, logger);
2445
2517
  const entityClient = new CatalogEntityClient(catalogClient);
2446
2518
  const integrations = integration.ScmIntegrations.fromConfig(config);
2447
- const databaseTaskStore = await DatabaseTaskStore.create(await database.getClient());
2448
- const taskBroker = new StorageTaskBroker(databaseTaskStore, logger);
2519
+ let taskBroker;
2520
+ if (!options.taskBroker) {
2521
+ const databaseTaskStore = await DatabaseTaskStore.create({
2522
+ database: await database.getClient()
2523
+ });
2524
+ taskBroker = new StorageTaskBroker(databaseTaskStore, logger);
2525
+ } else {
2526
+ taskBroker = options.taskBroker;
2527
+ }
2449
2528
  const actionRegistry = new TemplateActionRegistry();
2450
- const legacyWorkflowRunner = new LegacyWorkflowRunner({
2451
- logger,
2452
- actionRegistry,
2453
- integrations,
2454
- workingDirectory
2455
- });
2456
- const workflowRunner = new DefaultWorkflowRunner({
2457
- actionRegistry,
2458
- integrations,
2459
- logger,
2460
- workingDirectory
2461
- });
2462
2529
  const workers = [];
2463
2530
  for (let i = 0; i < (taskWorkers || 1); i++) {
2464
- const worker = new TaskWorker({
2531
+ const worker = await TaskWorker.create({
2465
2532
  taskBroker,
2466
- runners: {
2467
- legacyWorkflowRunner,
2468
- workflowRunner
2469
- }
2533
+ actionRegistry,
2534
+ integrations,
2535
+ logger,
2536
+ workingDirectory
2470
2537
  });
2471
2538
  workers.push(worker);
2472
2539
  }
@@ -2577,14 +2644,15 @@ async function createRouter(options) {
2577
2644
  res.status(200).json(task);
2578
2645
  }).get("/v2/tasks/:taskId/eventstream", async (req, res) => {
2579
2646
  const {taskId} = req.params;
2580
- const after = Number(req.query.after) || void 0;
2647
+ const after = req.query.after !== void 0 ? Number(req.query.after) : void 0;
2581
2648
  logger.debug(`Event stream observing taskId '${taskId}' opened`);
2582
2649
  res.writeHead(200, {
2583
2650
  Connection: "keep-alive",
2584
2651
  "Cache-Control": "no-cache",
2585
2652
  "Content-Type": "text/event-stream"
2586
2653
  });
2587
- const unsubscribe = taskBroker.observe({taskId, after}, (error, {events}) => {
2654
+ const {unsubscribe} = taskBroker.observe({taskId, after}, (error, {events}) => {
2655
+ var _a;
2588
2656
  if (error) {
2589
2657
  logger.error(`Received error from event stream when observing taskId '${taskId}', ${error}`);
2590
2658
  }
@@ -2598,7 +2666,7 @@ data: ${JSON.stringify(event)}
2598
2666
  shouldUnsubscribe = true;
2599
2667
  }
2600
2668
  }
2601
- res.flush();
2669
+ (_a = res.flush) == null ? void 0 : _a.call(res);
2602
2670
  if (shouldUnsubscribe)
2603
2671
  unsubscribe();
2604
2672
  });
@@ -2606,6 +2674,27 @@ data: ${JSON.stringify(event)}
2606
2674
  unsubscribe();
2607
2675
  logger.debug(`Event stream observing taskId '${taskId}' closed`);
2608
2676
  });
2677
+ }).get("/v2/tasks/:taskId/events", async (req, res) => {
2678
+ const {taskId} = req.params;
2679
+ const after = Number(req.query.after) || void 0;
2680
+ let unsubscribe = () => {
2681
+ };
2682
+ const timeout = setTimeout(() => {
2683
+ unsubscribe();
2684
+ res.json([]);
2685
+ }, 3e4);
2686
+ ({unsubscribe} = taskBroker.observe({taskId, after}, (error, {events}) => {
2687
+ clearTimeout(timeout);
2688
+ unsubscribe();
2689
+ if (error) {
2690
+ logger.error(`Received error from log when observing taskId '${taskId}', ${error}`);
2691
+ }
2692
+ res.json(events);
2693
+ }));
2694
+ req.on("close", () => {
2695
+ unsubscribe();
2696
+ clearTimeout(timeout);
2697
+ });
2609
2698
  });
2610
2699
  const app = express__default['default']();
2611
2700
  app.set("logger", logger);
@@ -2672,8 +2761,11 @@ Object.defineProperty(exports, 'createFetchCookiecutterAction', {
2672
2761
  }
2673
2762
  });
2674
2763
  exports.CatalogEntityClient = CatalogEntityClient;
2764
+ exports.DatabaseTaskStore = DatabaseTaskStore;
2675
2765
  exports.OctokitProvider = OctokitProvider;
2676
2766
  exports.ScaffolderEntitiesProcessor = ScaffolderEntitiesProcessor;
2767
+ exports.TaskManager = TaskManager;
2768
+ exports.TaskWorker = TaskWorker;
2677
2769
  exports.TemplateActionRegistry = TemplateActionRegistry;
2678
2770
  exports.createBuiltinActions = createBuiltinActions;
2679
2771
  exports.createCatalogRegisterAction = createCatalogRegisterAction;