@backstage/plugin-scaffolder-backend 0.15.11 → 0.15.12
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +50 -0
- package/dist/index.cjs.js +175 -123
- package/dist/index.cjs.js.map +1 -1
- package/dist/index.d.ts +294 -1
- package/package.json +8 -8
package/CHANGELOG.md
CHANGED
|
@@ -1,5 +1,55 @@
|
|
|
1
1
|
# @backstage/plugin-scaffolder-backend
|
|
2
2
|
|
|
3
|
+
## 0.15.12
|
|
4
|
+
|
|
5
|
+
### Patch Changes
|
|
6
|
+
|
|
7
|
+
- 9990df8a1f: Expose some classes and interfaces public so TaskWorkers can run externally from the scaffolder API.
|
|
8
|
+
- b45a34fb15: Adds a new endpoint for consuming logs from the Scaffolder that uses long polling instead of Server Sent Events.
|
|
9
|
+
|
|
10
|
+
This is useful if Backstage is accessed from an environment that doesn't support SSE correctly, which happens in combination with certain enterprise HTTP Proxy servers.
|
|
11
|
+
|
|
12
|
+
It is intended to switch the endpoint globally for the whole instance.
|
|
13
|
+
If you want to use it, you can provide a reconfigured API to the `scaffolderApiRef`:
|
|
14
|
+
|
|
15
|
+
```tsx
|
|
16
|
+
// packages/app/src/apis.ts
|
|
17
|
+
|
|
18
|
+
// ...
|
|
19
|
+
import {
|
|
20
|
+
scaffolderApiRef,
|
|
21
|
+
ScaffolderClient,
|
|
22
|
+
} from '@backstage/plugin-scaffolder';
|
|
23
|
+
|
|
24
|
+
export const apis: AnyApiFactory[] = [
|
|
25
|
+
// ...
|
|
26
|
+
|
|
27
|
+
createApiFactory({
|
|
28
|
+
api: scaffolderApiRef,
|
|
29
|
+
deps: {
|
|
30
|
+
discoveryApi: discoveryApiRef,
|
|
31
|
+
identityApi: identityApiRef,
|
|
32
|
+
scmIntegrationsApi: scmIntegrationsApiRef,
|
|
33
|
+
},
|
|
34
|
+
factory: ({ discoveryApi, identityApi, scmIntegrationsApi }) =>
|
|
35
|
+
new ScaffolderClient({
|
|
36
|
+
discoveryApi,
|
|
37
|
+
identityApi,
|
|
38
|
+
scmIntegrationsApi,
|
|
39
|
+
// use long polling instead of an eventsource
|
|
40
|
+
useLongPollingLogs: true,
|
|
41
|
+
}),
|
|
42
|
+
}),
|
|
43
|
+
];
|
|
44
|
+
```
|
|
45
|
+
|
|
46
|
+
- a794c341ca: Fix a bug where only file mode 775 is considered an executable
|
|
47
|
+
- Updated dependencies
|
|
48
|
+
- @backstage/backend-common@0.9.9
|
|
49
|
+
- @backstage/catalog-client@0.5.1
|
|
50
|
+
- @backstage/plugin-catalog-backend@0.17.3
|
|
51
|
+
- @backstage/plugin-scaffolder-backend-module-cookiecutter@0.1.4
|
|
52
|
+
|
|
3
53
|
## 0.15.11
|
|
4
54
|
|
|
5
55
|
### Patch Changes
|
package/dist/index.cjs.js
CHANGED
|
@@ -22,14 +22,14 @@ var lodash = require('lodash');
|
|
|
22
22
|
var octokitPluginCreatePullRequest = require('octokit-plugin-create-pull-request');
|
|
23
23
|
var node = require('@gitbeaker/node');
|
|
24
24
|
var webhooks = require('@octokit/webhooks');
|
|
25
|
-
var express = require('express');
|
|
26
|
-
var Router = require('express-promise-router');
|
|
27
|
-
var jsonschema = require('jsonschema');
|
|
28
25
|
var uuid = require('uuid');
|
|
29
26
|
var luxon = require('luxon');
|
|
30
|
-
var os = require('os');
|
|
31
27
|
var Handlebars = require('handlebars');
|
|
32
28
|
var winston = require('winston');
|
|
29
|
+
var jsonschema = require('jsonschema');
|
|
30
|
+
var express = require('express');
|
|
31
|
+
var Router = require('express-promise-router');
|
|
32
|
+
var os = require('os');
|
|
33
33
|
var pluginCatalogBackend = require('@backstage/plugin-catalog-backend');
|
|
34
34
|
var pluginScaffolderCommon = require('@backstage/plugin-scaffolder-common');
|
|
35
35
|
|
|
@@ -62,11 +62,11 @@ var yaml__namespace = /*#__PURE__*/_interopNamespace(yaml);
|
|
|
62
62
|
var globby__default = /*#__PURE__*/_interopDefaultLegacy(globby);
|
|
63
63
|
var nunjucks__default = /*#__PURE__*/_interopDefaultLegacy(nunjucks);
|
|
64
64
|
var fetch__default = /*#__PURE__*/_interopDefaultLegacy(fetch);
|
|
65
|
+
var Handlebars__namespace = /*#__PURE__*/_interopNamespace(Handlebars);
|
|
66
|
+
var winston__namespace = /*#__PURE__*/_interopNamespace(winston);
|
|
65
67
|
var express__default = /*#__PURE__*/_interopDefaultLegacy(express);
|
|
66
68
|
var Router__default = /*#__PURE__*/_interopDefaultLegacy(Router);
|
|
67
69
|
var os__default = /*#__PURE__*/_interopDefaultLegacy(os);
|
|
68
|
-
var Handlebars__namespace = /*#__PURE__*/_interopNamespace(Handlebars);
|
|
69
|
-
var winston__namespace = /*#__PURE__*/_interopNamespace(winston);
|
|
70
70
|
|
|
71
71
|
const createTemplateAction = (templateAction) => {
|
|
72
72
|
return templateAction;
|
|
@@ -714,6 +714,11 @@ const parseRepoUrl = (repoUrl, integrations) => {
|
|
|
714
714
|
}
|
|
715
715
|
return {host, owner, repo, organization, workspace, project};
|
|
716
716
|
};
|
|
717
|
+
const isExecutable = (fileMode) => {
|
|
718
|
+
const executeBitMask = 73;
|
|
719
|
+
const res = fileMode & executeBitMask;
|
|
720
|
+
return res > 0;
|
|
721
|
+
};
|
|
717
722
|
|
|
718
723
|
function createPublishAzureAction(options) {
|
|
719
724
|
const {integrations, config} = options;
|
|
@@ -1395,8 +1400,7 @@ const createPublishGithubPullRequestAction = ({
|
|
|
1395
1400
|
const absPath = path__default['default'].resolve(fileRoot, filePath);
|
|
1396
1401
|
const base64EncodedContent = fs__default['default'].readFileSync(absPath).toString("base64");
|
|
1397
1402
|
const fileStat = fs__default['default'].statSync(absPath);
|
|
1398
|
-
const
|
|
1399
|
-
const githubTreeItemMode = isExecutable ? "100755" : "100644";
|
|
1403
|
+
const githubTreeItemMode = isExecutable(fileStat.mode) ? "100755" : "100644";
|
|
1400
1404
|
const encoding = "base64";
|
|
1401
1405
|
return {
|
|
1402
1406
|
encoding,
|
|
@@ -1747,38 +1751,16 @@ class TemplateActionRegistry {
|
|
|
1747
1751
|
}
|
|
1748
1752
|
}
|
|
1749
1753
|
|
|
1750
|
-
class CatalogEntityClient {
|
|
1751
|
-
constructor(catalogClient) {
|
|
1752
|
-
this.catalogClient = catalogClient;
|
|
1753
|
-
}
|
|
1754
|
-
async findTemplate(templateName, options) {
|
|
1755
|
-
const {items: templates} = await this.catalogClient.getEntities({
|
|
1756
|
-
filter: {
|
|
1757
|
-
kind: "template",
|
|
1758
|
-
"metadata.name": templateName
|
|
1759
|
-
}
|
|
1760
|
-
}, options);
|
|
1761
|
-
if (templates.length !== 1) {
|
|
1762
|
-
if (templates.length > 1) {
|
|
1763
|
-
throw new errors.ConflictError("Templates lookup resulted in multiple matches");
|
|
1764
|
-
} else {
|
|
1765
|
-
throw new errors.NotFoundError("Template not found");
|
|
1766
|
-
}
|
|
1767
|
-
}
|
|
1768
|
-
return templates[0];
|
|
1769
|
-
}
|
|
1770
|
-
}
|
|
1771
|
-
|
|
1772
1754
|
const migrationsDir = backendCommon.resolvePackagePath("@backstage/plugin-scaffolder-backend", "migrations");
|
|
1773
1755
|
class DatabaseTaskStore {
|
|
1774
|
-
|
|
1775
|
-
|
|
1776
|
-
}
|
|
1777
|
-
static async create(knex) {
|
|
1778
|
-
await knex.migrate.latest({
|
|
1756
|
+
static async create(options) {
|
|
1757
|
+
await options.database.migrate.latest({
|
|
1779
1758
|
directory: migrationsDir
|
|
1780
1759
|
});
|
|
1781
|
-
return new DatabaseTaskStore(
|
|
1760
|
+
return new DatabaseTaskStore(options);
|
|
1761
|
+
}
|
|
1762
|
+
constructor(options) {
|
|
1763
|
+
this.db = options.database;
|
|
1782
1764
|
}
|
|
1783
1765
|
async getTask(taskId) {
|
|
1784
1766
|
const [result] = await this.db("tasks").where({id: taskId}).select();
|
|
@@ -1934,7 +1916,7 @@ class DatabaseTaskStore {
|
|
|
1934
1916
|
}
|
|
1935
1917
|
}
|
|
1936
1918
|
|
|
1937
|
-
class
|
|
1919
|
+
class TaskManager {
|
|
1938
1920
|
constructor(state, storage, logger) {
|
|
1939
1921
|
this.state = state;
|
|
1940
1922
|
this.storage = storage;
|
|
@@ -1942,7 +1924,7 @@ class TaskAgent {
|
|
|
1942
1924
|
this.isDone = false;
|
|
1943
1925
|
}
|
|
1944
1926
|
static create(state, storage, logger) {
|
|
1945
|
-
const agent = new
|
|
1927
|
+
const agent = new TaskManager(state, storage, logger);
|
|
1946
1928
|
agent.startTimeout();
|
|
1947
1929
|
return agent;
|
|
1948
1930
|
}
|
|
@@ -2008,7 +1990,7 @@ class StorageTaskBroker {
|
|
|
2008
1990
|
for (; ; ) {
|
|
2009
1991
|
const pendingTask = await this.storage.claimTask();
|
|
2010
1992
|
if (pendingTask) {
|
|
2011
|
-
return
|
|
1993
|
+
return TaskManager.create({
|
|
2012
1994
|
taskId: pendingTask.id,
|
|
2013
1995
|
spec: pendingTask.spec,
|
|
2014
1996
|
secrets: pendingTask.secrets
|
|
@@ -2050,7 +2032,7 @@ class StorageTaskBroker {
|
|
|
2050
2032
|
await new Promise((resolve) => setTimeout(resolve, 1e3));
|
|
2051
2033
|
}
|
|
2052
2034
|
})();
|
|
2053
|
-
return unsubscribe;
|
|
2035
|
+
return {unsubscribe};
|
|
2054
2036
|
}
|
|
2055
2037
|
async vacuumTasks(timeoutS) {
|
|
2056
2038
|
const {tasks} = await this.storage.listStaleTasks(timeoutS);
|
|
@@ -2077,70 +2059,12 @@ class StorageTaskBroker {
|
|
|
2077
2059
|
}
|
|
2078
2060
|
}
|
|
2079
2061
|
|
|
2080
|
-
class TaskWorker {
|
|
2081
|
-
constructor(options) {
|
|
2082
|
-
this.options = options;
|
|
2083
|
-
}
|
|
2084
|
-
start() {
|
|
2085
|
-
(async () => {
|
|
2086
|
-
for (; ; ) {
|
|
2087
|
-
const task = await this.options.taskBroker.claim();
|
|
2088
|
-
await this.runOneTask(task);
|
|
2089
|
-
}
|
|
2090
|
-
})();
|
|
2091
|
-
}
|
|
2092
|
-
async runOneTask(task) {
|
|
2093
|
-
try {
|
|
2094
|
-
const {output} = task.spec.apiVersion === "scaffolder.backstage.io/v1beta3" ? await this.options.runners.workflowRunner.execute(task) : await this.options.runners.legacyWorkflowRunner.execute(task);
|
|
2095
|
-
await task.complete("completed", {output});
|
|
2096
|
-
} catch (error) {
|
|
2097
|
-
errors.assertError(error);
|
|
2098
|
-
await task.complete("failed", {
|
|
2099
|
-
error: {name: error.name, message: error.message}
|
|
2100
|
-
});
|
|
2101
|
-
}
|
|
2102
|
-
}
|
|
2103
|
-
}
|
|
2104
|
-
|
|
2105
|
-
async function getWorkingDirectory(config, logger) {
|
|
2106
|
-
if (!config.has("backend.workingDirectory")) {
|
|
2107
|
-
return os__default['default'].tmpdir();
|
|
2108
|
-
}
|
|
2109
|
-
const workingDirectory = config.getString("backend.workingDirectory");
|
|
2110
|
-
try {
|
|
2111
|
-
await fs__default['default'].access(workingDirectory, fs__default['default'].constants.F_OK | fs__default['default'].constants.W_OK);
|
|
2112
|
-
logger.info(`using working directory: ${workingDirectory}`);
|
|
2113
|
-
} catch (err) {
|
|
2114
|
-
errors.assertError(err);
|
|
2115
|
-
logger.error(`working directory ${workingDirectory} ${err.code === "ENOENT" ? "does not exist" : "is not writable"}`);
|
|
2116
|
-
throw err;
|
|
2117
|
-
}
|
|
2118
|
-
return workingDirectory;
|
|
2119
|
-
}
|
|
2120
|
-
function getEntityBaseUrl(entity) {
|
|
2121
|
-
var _a, _b;
|
|
2122
|
-
let location = (_a = entity.metadata.annotations) == null ? void 0 : _a[catalogModel.SOURCE_LOCATION_ANNOTATION];
|
|
2123
|
-
if (!location) {
|
|
2124
|
-
location = (_b = entity.metadata.annotations) == null ? void 0 : _b[catalogModel.LOCATION_ANNOTATION];
|
|
2125
|
-
}
|
|
2126
|
-
if (!location) {
|
|
2127
|
-
return void 0;
|
|
2128
|
-
}
|
|
2129
|
-
const {type, target} = catalogModel.parseLocationReference(location);
|
|
2130
|
-
if (type === "url") {
|
|
2131
|
-
return target;
|
|
2132
|
-
} else if (type === "file") {
|
|
2133
|
-
return `file://${target}`;
|
|
2134
|
-
}
|
|
2135
|
-
return void 0;
|
|
2136
|
-
}
|
|
2137
|
-
|
|
2138
2062
|
function isTruthy(value) {
|
|
2139
2063
|
return lodash.isArray(value) ? value.length > 0 : !!value;
|
|
2140
2064
|
}
|
|
2141
2065
|
|
|
2142
2066
|
const isValidTaskSpec$1 = (taskSpec) => taskSpec.apiVersion === "backstage.io/v1beta2";
|
|
2143
|
-
class
|
|
2067
|
+
class HandlebarsWorkflowRunner {
|
|
2144
2068
|
constructor(options) {
|
|
2145
2069
|
this.options = options;
|
|
2146
2070
|
this.handlebars = Handlebars__namespace.create();
|
|
@@ -2316,7 +2240,10 @@ class LegacyWorkflowRunner {
|
|
|
2316
2240
|
const isValidTaskSpec = (taskSpec) => {
|
|
2317
2241
|
return taskSpec.apiVersion === "scaffolder.backstage.io/v1beta3";
|
|
2318
2242
|
};
|
|
2319
|
-
const createStepLogger = ({
|
|
2243
|
+
const createStepLogger = ({
|
|
2244
|
+
task,
|
|
2245
|
+
step
|
|
2246
|
+
}) => {
|
|
2320
2247
|
const metadata = {stepId: step.id};
|
|
2321
2248
|
const taskLogger = winston__namespace.createLogger({
|
|
2322
2249
|
level: process.env.LOG_LEVEL || "info",
|
|
@@ -2333,7 +2260,7 @@ const createStepLogger = ({task, step}) => {
|
|
|
2333
2260
|
taskLogger.add(new winston__namespace.transports.Stream({stream: streamLogger}));
|
|
2334
2261
|
return {taskLogger, streamLogger};
|
|
2335
2262
|
};
|
|
2336
|
-
class
|
|
2263
|
+
class NunjucksWorkflowRunner {
|
|
2337
2264
|
constructor(options) {
|
|
2338
2265
|
this.options = options;
|
|
2339
2266
|
this.nunjucksOptions = {
|
|
@@ -2464,6 +2391,111 @@ class DefaultWorkflowRunner {
|
|
|
2464
2391
|
}
|
|
2465
2392
|
}
|
|
2466
2393
|
|
|
2394
|
+
class TaskWorker {
|
|
2395
|
+
constructor(options) {
|
|
2396
|
+
this.options = options;
|
|
2397
|
+
}
|
|
2398
|
+
static async create(options) {
|
|
2399
|
+
const {
|
|
2400
|
+
taskBroker,
|
|
2401
|
+
logger,
|
|
2402
|
+
actionRegistry,
|
|
2403
|
+
integrations,
|
|
2404
|
+
workingDirectory
|
|
2405
|
+
} = options;
|
|
2406
|
+
const legacyWorkflowRunner = new HandlebarsWorkflowRunner({
|
|
2407
|
+
logger,
|
|
2408
|
+
actionRegistry,
|
|
2409
|
+
integrations,
|
|
2410
|
+
workingDirectory
|
|
2411
|
+
});
|
|
2412
|
+
const workflowRunner = new NunjucksWorkflowRunner({
|
|
2413
|
+
actionRegistry,
|
|
2414
|
+
integrations,
|
|
2415
|
+
logger,
|
|
2416
|
+
workingDirectory
|
|
2417
|
+
});
|
|
2418
|
+
return new TaskWorker({
|
|
2419
|
+
taskBroker,
|
|
2420
|
+
runners: {legacyWorkflowRunner, workflowRunner}
|
|
2421
|
+
});
|
|
2422
|
+
}
|
|
2423
|
+
start() {
|
|
2424
|
+
(async () => {
|
|
2425
|
+
for (; ; ) {
|
|
2426
|
+
const task = await this.options.taskBroker.claim();
|
|
2427
|
+
await this.runOneTask(task);
|
|
2428
|
+
}
|
|
2429
|
+
})();
|
|
2430
|
+
}
|
|
2431
|
+
async runOneTask(task) {
|
|
2432
|
+
try {
|
|
2433
|
+
const {output} = task.spec.apiVersion === "scaffolder.backstage.io/v1beta3" ? await this.options.runners.workflowRunner.execute(task) : await this.options.runners.legacyWorkflowRunner.execute(task);
|
|
2434
|
+
await task.complete("completed", {output});
|
|
2435
|
+
} catch (error) {
|
|
2436
|
+
errors.assertError(error);
|
|
2437
|
+
await task.complete("failed", {
|
|
2438
|
+
error: {name: error.name, message: error.message}
|
|
2439
|
+
});
|
|
2440
|
+
}
|
|
2441
|
+
}
|
|
2442
|
+
}
|
|
2443
|
+
|
|
2444
|
+
class CatalogEntityClient {
|
|
2445
|
+
constructor(catalogClient) {
|
|
2446
|
+
this.catalogClient = catalogClient;
|
|
2447
|
+
}
|
|
2448
|
+
async findTemplate(templateName, options) {
|
|
2449
|
+
const {items: templates} = await this.catalogClient.getEntities({
|
|
2450
|
+
filter: {
|
|
2451
|
+
kind: "template",
|
|
2452
|
+
"metadata.name": templateName
|
|
2453
|
+
}
|
|
2454
|
+
}, options);
|
|
2455
|
+
if (templates.length !== 1) {
|
|
2456
|
+
if (templates.length > 1) {
|
|
2457
|
+
throw new errors.ConflictError("Templates lookup resulted in multiple matches");
|
|
2458
|
+
} else {
|
|
2459
|
+
throw new errors.NotFoundError("Template not found");
|
|
2460
|
+
}
|
|
2461
|
+
}
|
|
2462
|
+
return templates[0];
|
|
2463
|
+
}
|
|
2464
|
+
}
|
|
2465
|
+
|
|
2466
|
+
async function getWorkingDirectory(config, logger) {
|
|
2467
|
+
if (!config.has("backend.workingDirectory")) {
|
|
2468
|
+
return os__default['default'].tmpdir();
|
|
2469
|
+
}
|
|
2470
|
+
const workingDirectory = config.getString("backend.workingDirectory");
|
|
2471
|
+
try {
|
|
2472
|
+
await fs__default['default'].access(workingDirectory, fs__default['default'].constants.F_OK | fs__default['default'].constants.W_OK);
|
|
2473
|
+
logger.info(`using working directory: ${workingDirectory}`);
|
|
2474
|
+
} catch (err) {
|
|
2475
|
+
errors.assertError(err);
|
|
2476
|
+
logger.error(`working directory ${workingDirectory} ${err.code === "ENOENT" ? "does not exist" : "is not writable"}`);
|
|
2477
|
+
throw err;
|
|
2478
|
+
}
|
|
2479
|
+
return workingDirectory;
|
|
2480
|
+
}
|
|
2481
|
+
function getEntityBaseUrl(entity) {
|
|
2482
|
+
var _a, _b;
|
|
2483
|
+
let location = (_a = entity.metadata.annotations) == null ? void 0 : _a[catalogModel.SOURCE_LOCATION_ANNOTATION];
|
|
2484
|
+
if (!location) {
|
|
2485
|
+
location = (_b = entity.metadata.annotations) == null ? void 0 : _b[catalogModel.LOCATION_ANNOTATION];
|
|
2486
|
+
}
|
|
2487
|
+
if (!location) {
|
|
2488
|
+
return void 0;
|
|
2489
|
+
}
|
|
2490
|
+
const {type, target} = catalogModel.parseLocationReference(location);
|
|
2491
|
+
if (type === "url") {
|
|
2492
|
+
return target;
|
|
2493
|
+
} else if (type === "file") {
|
|
2494
|
+
return `file://${target}`;
|
|
2495
|
+
}
|
|
2496
|
+
return void 0;
|
|
2497
|
+
}
|
|
2498
|
+
|
|
2467
2499
|
function isSupportedTemplate(entity) {
|
|
2468
2500
|
return entity.apiVersion === "backstage.io/v1beta2" || entity.apiVersion === "scaffolder.backstage.io/v1beta3";
|
|
2469
2501
|
}
|
|
@@ -2484,29 +2516,24 @@ async function createRouter(options) {
|
|
|
2484
2516
|
const workingDirectory = await getWorkingDirectory(config, logger);
|
|
2485
2517
|
const entityClient = new CatalogEntityClient(catalogClient);
|
|
2486
2518
|
const integrations = integration.ScmIntegrations.fromConfig(config);
|
|
2487
|
-
|
|
2488
|
-
|
|
2519
|
+
let taskBroker;
|
|
2520
|
+
if (!options.taskBroker) {
|
|
2521
|
+
const databaseTaskStore = await DatabaseTaskStore.create({
|
|
2522
|
+
database: await database.getClient()
|
|
2523
|
+
});
|
|
2524
|
+
taskBroker = new StorageTaskBroker(databaseTaskStore, logger);
|
|
2525
|
+
} else {
|
|
2526
|
+
taskBroker = options.taskBroker;
|
|
2527
|
+
}
|
|
2489
2528
|
const actionRegistry = new TemplateActionRegistry();
|
|
2490
|
-
const legacyWorkflowRunner = new LegacyWorkflowRunner({
|
|
2491
|
-
logger,
|
|
2492
|
-
actionRegistry,
|
|
2493
|
-
integrations,
|
|
2494
|
-
workingDirectory
|
|
2495
|
-
});
|
|
2496
|
-
const workflowRunner = new DefaultWorkflowRunner({
|
|
2497
|
-
actionRegistry,
|
|
2498
|
-
integrations,
|
|
2499
|
-
logger,
|
|
2500
|
-
workingDirectory
|
|
2501
|
-
});
|
|
2502
2529
|
const workers = [];
|
|
2503
2530
|
for (let i = 0; i < (taskWorkers || 1); i++) {
|
|
2504
|
-
const worker =
|
|
2531
|
+
const worker = await TaskWorker.create({
|
|
2505
2532
|
taskBroker,
|
|
2506
|
-
|
|
2507
|
-
|
|
2508
|
-
|
|
2509
|
-
|
|
2533
|
+
actionRegistry,
|
|
2534
|
+
integrations,
|
|
2535
|
+
logger,
|
|
2536
|
+
workingDirectory
|
|
2510
2537
|
});
|
|
2511
2538
|
workers.push(worker);
|
|
2512
2539
|
}
|
|
@@ -2617,14 +2644,15 @@ async function createRouter(options) {
|
|
|
2617
2644
|
res.status(200).json(task);
|
|
2618
2645
|
}).get("/v2/tasks/:taskId/eventstream", async (req, res) => {
|
|
2619
2646
|
const {taskId} = req.params;
|
|
2620
|
-
const after = Number(req.query.after)
|
|
2647
|
+
const after = req.query.after !== void 0 ? Number(req.query.after) : void 0;
|
|
2621
2648
|
logger.debug(`Event stream observing taskId '${taskId}' opened`);
|
|
2622
2649
|
res.writeHead(200, {
|
|
2623
2650
|
Connection: "keep-alive",
|
|
2624
2651
|
"Cache-Control": "no-cache",
|
|
2625
2652
|
"Content-Type": "text/event-stream"
|
|
2626
2653
|
});
|
|
2627
|
-
const unsubscribe = taskBroker.observe({taskId, after}, (error, {events}) => {
|
|
2654
|
+
const {unsubscribe} = taskBroker.observe({taskId, after}, (error, {events}) => {
|
|
2655
|
+
var _a;
|
|
2628
2656
|
if (error) {
|
|
2629
2657
|
logger.error(`Received error from event stream when observing taskId '${taskId}', ${error}`);
|
|
2630
2658
|
}
|
|
@@ -2638,7 +2666,7 @@ data: ${JSON.stringify(event)}
|
|
|
2638
2666
|
shouldUnsubscribe = true;
|
|
2639
2667
|
}
|
|
2640
2668
|
}
|
|
2641
|
-
res.flush();
|
|
2669
|
+
(_a = res.flush) == null ? void 0 : _a.call(res);
|
|
2642
2670
|
if (shouldUnsubscribe)
|
|
2643
2671
|
unsubscribe();
|
|
2644
2672
|
});
|
|
@@ -2646,6 +2674,27 @@ data: ${JSON.stringify(event)}
|
|
|
2646
2674
|
unsubscribe();
|
|
2647
2675
|
logger.debug(`Event stream observing taskId '${taskId}' closed`);
|
|
2648
2676
|
});
|
|
2677
|
+
}).get("/v2/tasks/:taskId/events", async (req, res) => {
|
|
2678
|
+
const {taskId} = req.params;
|
|
2679
|
+
const after = Number(req.query.after) || void 0;
|
|
2680
|
+
let unsubscribe = () => {
|
|
2681
|
+
};
|
|
2682
|
+
const timeout = setTimeout(() => {
|
|
2683
|
+
unsubscribe();
|
|
2684
|
+
res.json([]);
|
|
2685
|
+
}, 3e4);
|
|
2686
|
+
({unsubscribe} = taskBroker.observe({taskId, after}, (error, {events}) => {
|
|
2687
|
+
clearTimeout(timeout);
|
|
2688
|
+
unsubscribe();
|
|
2689
|
+
if (error) {
|
|
2690
|
+
logger.error(`Received error from log when observing taskId '${taskId}', ${error}`);
|
|
2691
|
+
}
|
|
2692
|
+
res.json(events);
|
|
2693
|
+
}));
|
|
2694
|
+
req.on("close", () => {
|
|
2695
|
+
unsubscribe();
|
|
2696
|
+
clearTimeout(timeout);
|
|
2697
|
+
});
|
|
2649
2698
|
});
|
|
2650
2699
|
const app = express__default['default']();
|
|
2651
2700
|
app.set("logger", logger);
|
|
@@ -2712,8 +2761,11 @@ Object.defineProperty(exports, 'createFetchCookiecutterAction', {
|
|
|
2712
2761
|
}
|
|
2713
2762
|
});
|
|
2714
2763
|
exports.CatalogEntityClient = CatalogEntityClient;
|
|
2764
|
+
exports.DatabaseTaskStore = DatabaseTaskStore;
|
|
2715
2765
|
exports.OctokitProvider = OctokitProvider;
|
|
2716
2766
|
exports.ScaffolderEntitiesProcessor = ScaffolderEntitiesProcessor;
|
|
2767
|
+
exports.TaskManager = TaskManager;
|
|
2768
|
+
exports.TaskWorker = TaskWorker;
|
|
2717
2769
|
exports.TemplateActionRegistry = TemplateActionRegistry;
|
|
2718
2770
|
exports.createBuiltinActions = createBuiltinActions;
|
|
2719
2771
|
exports.createCatalogRegisterAction = createCatalogRegisterAction;
|