@backstage/plugin-scaffolder-backend 1.26.0-next.1 → 1.26.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +85 -0
- package/README.md +1 -1
- package/alpha/package.json +1 -1
- package/dist/ScaffolderPlugin.cjs.js +168 -0
- package/dist/ScaffolderPlugin.cjs.js.map +1 -0
- package/dist/alpha.cjs.js +8 -197
- package/dist/alpha.cjs.js.map +1 -1
- package/dist/alpha.d.ts +4 -8
- package/dist/deprecated.cjs.js +15 -0
- package/dist/deprecated.cjs.js.map +1 -0
- package/dist/index.cjs.js +60 -133
- package/dist/index.cjs.js.map +1 -1
- package/dist/index.d.ts +11 -3
- package/dist/lib/templating/SecureTemplater.cjs.js +169 -0
- package/dist/lib/templating/SecureTemplater.cjs.js.map +1 -0
- package/dist/lib/templating/filters.cjs.js +26 -0
- package/dist/lib/templating/filters.cjs.js.map +1 -0
- package/dist/lib/templating/helpers.cjs.js +13 -0
- package/dist/lib/templating/helpers.cjs.js.map +1 -0
- package/dist/scaffolder/actions/TemplateActionRegistry.cjs.js +30 -0
- package/dist/scaffolder/actions/TemplateActionRegistry.cjs.js.map +1 -0
- package/dist/scaffolder/actions/builtin/catalog/fetch.cjs.js +93 -0
- package/dist/scaffolder/actions/builtin/catalog/fetch.cjs.js.map +1 -0
- package/dist/scaffolder/actions/builtin/catalog/fetch.examples.cjs.js +43 -0
- package/dist/scaffolder/actions/builtin/catalog/fetch.examples.cjs.js.map +1 -0
- package/dist/scaffolder/actions/builtin/catalog/register.cjs.js +142 -0
- package/dist/scaffolder/actions/builtin/catalog/register.cjs.js.map +1 -0
- package/dist/scaffolder/actions/builtin/catalog/register.examples.cjs.js +28 -0
- package/dist/scaffolder/actions/builtin/catalog/register.examples.cjs.js.map +1 -0
- package/dist/scaffolder/actions/builtin/catalog/write.cjs.js +74 -0
- package/dist/scaffolder/actions/builtin/catalog/write.cjs.js.map +1 -0
- package/dist/scaffolder/actions/builtin/catalog/write.examples.cjs.js +56 -0
- package/dist/scaffolder/actions/builtin/catalog/write.examples.cjs.js.map +1 -0
- package/dist/scaffolder/actions/builtin/createBuiltinActions.cjs.js +156 -0
- package/dist/scaffolder/actions/builtin/createBuiltinActions.cjs.js.map +1 -0
- package/dist/scaffolder/actions/builtin/debug/log.cjs.js +66 -0
- package/dist/scaffolder/actions/builtin/debug/log.cjs.js.map +1 -0
- package/dist/scaffolder/actions/builtin/debug/log.examples.cjs.js +58 -0
- package/dist/scaffolder/actions/builtin/debug/log.examples.cjs.js.map +1 -0
- package/dist/scaffolder/actions/builtin/debug/wait.cjs.js +66 -0
- package/dist/scaffolder/actions/builtin/debug/wait.cjs.js.map +1 -0
- package/dist/scaffolder/actions/builtin/debug/wait.examples.cjs.js +58 -0
- package/dist/scaffolder/actions/builtin/debug/wait.examples.cjs.js.map +1 -0
- package/dist/scaffolder/actions/builtin/fetch/plain.cjs.js +56 -0
- package/dist/scaffolder/actions/builtin/fetch/plain.cjs.js.map +1 -0
- package/dist/scaffolder/actions/builtin/fetch/plain.examples.cjs.js +44 -0
- package/dist/scaffolder/actions/builtin/fetch/plain.examples.cjs.js.map +1 -0
- package/dist/scaffolder/actions/builtin/fetch/plainFile.cjs.js +56 -0
- package/dist/scaffolder/actions/builtin/fetch/plainFile.cjs.js.map +1 -0
- package/dist/scaffolder/actions/builtin/fetch/plainFile.examples.cjs.js +29 -0
- package/dist/scaffolder/actions/builtin/fetch/plainFile.examples.cjs.js.map +1 -0
- package/dist/scaffolder/actions/builtin/fetch/template.cjs.js +241 -0
- package/dist/scaffolder/actions/builtin/fetch/template.cjs.js.map +1 -0
- package/dist/scaffolder/actions/builtin/fetch/template.examples.cjs.js +35 -0
- package/dist/scaffolder/actions/builtin/fetch/template.examples.cjs.js.map +1 -0
- package/dist/scaffolder/actions/builtin/fetch/templateFile.cjs.js +119 -0
- package/dist/scaffolder/actions/builtin/fetch/templateFile.cjs.js.map +1 -0
- package/dist/scaffolder/actions/builtin/fetch/templateFile.examples.cjs.js +34 -0
- package/dist/scaffolder/actions/builtin/fetch/templateFile.examples.cjs.js.map +1 -0
- package/dist/scaffolder/actions/builtin/filesystem/delete.cjs.js +54 -0
- package/dist/scaffolder/actions/builtin/filesystem/delete.cjs.js.map +1 -0
- package/dist/scaffolder/actions/builtin/filesystem/delete.examples.cjs.js +44 -0
- package/dist/scaffolder/actions/builtin/filesystem/delete.examples.cjs.js.map +1 -0
- package/dist/scaffolder/actions/builtin/filesystem/rename.cjs.js +83 -0
- package/dist/scaffolder/actions/builtin/filesystem/rename.cjs.js.map +1 -0
- package/dist/scaffolder/actions/builtin/filesystem/rename.examples.cjs.js +48 -0
- package/dist/scaffolder/actions/builtin/filesystem/rename.examples.cjs.js.map +1 -0
- package/dist/scaffolder/actions/deprecated.cjs.js +74 -0
- package/dist/scaffolder/actions/deprecated.cjs.js.map +1 -0
- package/dist/scaffolder/dryrun/DecoratedActionsRegistry.cjs.js +57 -0
- package/dist/scaffolder/dryrun/DecoratedActionsRegistry.cjs.js.map +1 -0
- package/dist/scaffolder/dryrun/createDryRunner.cjs.js +97 -0
- package/dist/scaffolder/dryrun/createDryRunner.cjs.js.map +1 -0
- package/dist/scaffolder/tasks/DatabaseTaskStore.cjs.js +430 -0
- package/dist/scaffolder/tasks/DatabaseTaskStore.cjs.js.map +1 -0
- package/dist/scaffolder/tasks/DatabaseWorkspaceProvider.cjs.js +22 -0
- package/dist/scaffolder/tasks/DatabaseWorkspaceProvider.cjs.js.map +1 -0
- package/dist/scaffolder/tasks/NunjucksWorkflowRunner.cjs.js +545 -0
- package/dist/scaffolder/tasks/NunjucksWorkflowRunner.cjs.js.map +1 -0
- package/dist/scaffolder/tasks/StorageTaskBroker.cjs.js +318 -0
- package/dist/scaffolder/tasks/StorageTaskBroker.cjs.js.map +1 -0
- package/dist/scaffolder/tasks/TaskWorker.cjs.js +110 -0
- package/dist/scaffolder/tasks/TaskWorker.cjs.js.map +1 -0
- package/dist/scaffolder/tasks/WorkspaceService.cjs.js +50 -0
- package/dist/scaffolder/tasks/WorkspaceService.cjs.js.map +1 -0
- package/dist/scaffolder/tasks/dbUtil.cjs.js +20 -0
- package/dist/scaffolder/tasks/dbUtil.cjs.js.map +1 -0
- package/dist/scaffolder/tasks/helper.cjs.js +46 -0
- package/dist/scaffolder/tasks/helper.cjs.js.map +1 -0
- package/dist/scaffolder/tasks/logger.cjs.js +156 -0
- package/dist/scaffolder/tasks/logger.cjs.js.map +1 -0
- package/dist/scaffolder/tasks/taskRecoveryHelper.cjs.js +18 -0
- package/dist/scaffolder/tasks/taskRecoveryHelper.cjs.js.map +1 -0
- package/dist/service/conditionExports.cjs.js +26 -0
- package/dist/service/conditionExports.cjs.js.map +1 -0
- package/dist/service/helpers.cjs.js +92 -0
- package/dist/service/helpers.cjs.js.map +1 -0
- package/dist/service/router.cjs.js +640 -0
- package/dist/service/router.cjs.js.map +1 -0
- package/dist/service/rules.cjs.js +97 -0
- package/dist/service/rules.cjs.js.map +1 -0
- package/dist/util/checkPermissions.cjs.js +25 -0
- package/dist/util/checkPermissions.cjs.js.map +1 -0
- package/dist/util/metrics.cjs.js +24 -0
- package/dist/util/metrics.cjs.js.map +1 -0
- package/package.json +32 -31
- package/dist/cjs/router-BqZK9yax.cjs.js +0 -4101
- package/dist/cjs/router-BqZK9yax.cjs.js.map +0 -1
|
@@ -1,4101 +0,0 @@
|
|
|
1
|
-
'use strict';
|
|
2
|
-
|
|
3
|
-
var backendCommon = require('@backstage/backend-common');
|
|
4
|
-
var catalogModel = require('@backstage/catalog-model');
|
|
5
|
-
var config = require('@backstage/config');
|
|
6
|
-
var errors = require('@backstage/errors');
|
|
7
|
-
var integration = require('@backstage/integration');
|
|
8
|
-
var pluginScaffolderCommon = require('@backstage/plugin-scaffolder-common');
|
|
9
|
-
var alpha$1 = require('@backstage/plugin-scaffolder-common/alpha');
|
|
10
|
-
var express = require('express');
|
|
11
|
-
var Router = require('express-promise-router');
|
|
12
|
-
var jsonschema = require('jsonschema');
|
|
13
|
-
var zod = require('zod');
|
|
14
|
-
var pluginScaffolderNode = require('@backstage/plugin-scaffolder-node');
|
|
15
|
-
var yaml = require('yaml');
|
|
16
|
-
var fs = require('fs-extra');
|
|
17
|
-
var backendPluginApi = require('@backstage/backend-plugin-api');
|
|
18
|
-
var path = require('path');
|
|
19
|
-
var fs$1 = require('fs');
|
|
20
|
-
var luxon = require('luxon');
|
|
21
|
-
var globby = require('globby');
|
|
22
|
-
var isbinaryfile = require('isbinaryfile');
|
|
23
|
-
var isolatedVm = require('isolated-vm');
|
|
24
|
-
var get = require('lodash/get');
|
|
25
|
-
var github = require('@backstage/plugin-scaffolder-backend-module-github');
|
|
26
|
-
var azure = require('@backstage/plugin-scaffolder-backend-module-azure');
|
|
27
|
-
var bitbucket = require('@backstage/plugin-scaffolder-backend-module-bitbucket');
|
|
28
|
-
var bitbucketCloud = require('@backstage/plugin-scaffolder-backend-module-bitbucket-cloud');
|
|
29
|
-
var bitbucketServer = require('@backstage/plugin-scaffolder-backend-module-bitbucket-server');
|
|
30
|
-
var gerrit = require('@backstage/plugin-scaffolder-backend-module-gerrit');
|
|
31
|
-
var gitlab = require('@backstage/plugin-scaffolder-backend-module-gitlab');
|
|
32
|
-
var pluginScaffolderBackendModuleGitea = require('@backstage/plugin-scaffolder-backend-module-gitea');
|
|
33
|
-
var uuid = require('uuid');
|
|
34
|
-
var alpha = require('@backstage/plugin-scaffolder-node/alpha');
|
|
35
|
-
var os = require('os');
|
|
36
|
-
var ObservableImpl = require('zen-observable');
|
|
37
|
-
var lodash = require('lodash');
|
|
38
|
-
var PQueue = require('p-queue');
|
|
39
|
-
var winston = require('winston');
|
|
40
|
-
var nunjucks = require('nunjucks');
|
|
41
|
-
var stream = require('stream');
|
|
42
|
-
var api = require('@opentelemetry/api');
|
|
43
|
-
var pluginPermissionNode = require('@backstage/plugin-permission-node');
|
|
44
|
-
var promClient = require('prom-client');
|
|
45
|
-
var pluginPermissionCommon = require('@backstage/plugin-permission-common');
|
|
46
|
-
var Transport = require('winston-transport');
|
|
47
|
-
var tripleBeam = require('triple-beam');
|
|
48
|
-
var url = require('url');
|
|
49
|
-
|
|
50
|
-
function _interopDefaultCompat (e) { return e && typeof e === 'object' && 'default' in e ? e : { default: e }; }
|
|
51
|
-
|
|
52
|
-
function _interopNamespaceCompat(e) {
|
|
53
|
-
if (e && typeof e === 'object' && 'default' in e) return e;
|
|
54
|
-
var n = Object.create(null);
|
|
55
|
-
if (e) {
|
|
56
|
-
Object.keys(e).forEach(function (k) {
|
|
57
|
-
if (k !== 'default') {
|
|
58
|
-
var d = Object.getOwnPropertyDescriptor(e, k);
|
|
59
|
-
Object.defineProperty(n, k, d.get ? d : {
|
|
60
|
-
enumerable: true,
|
|
61
|
-
get: function () { return e[k]; }
|
|
62
|
-
});
|
|
63
|
-
}
|
|
64
|
-
});
|
|
65
|
-
}
|
|
66
|
-
n.default = e;
|
|
67
|
-
return Object.freeze(n);
|
|
68
|
-
}
|
|
69
|
-
|
|
70
|
-
var express__default = /*#__PURE__*/_interopDefaultCompat(express);
|
|
71
|
-
var Router__default = /*#__PURE__*/_interopDefaultCompat(Router);
|
|
72
|
-
var yaml__namespace = /*#__PURE__*/_interopNamespaceCompat(yaml);
|
|
73
|
-
var fs__default = /*#__PURE__*/_interopDefaultCompat(fs);
|
|
74
|
-
var path__default = /*#__PURE__*/_interopDefaultCompat(path);
|
|
75
|
-
var fs__default$1 = /*#__PURE__*/_interopDefaultCompat(fs$1);
|
|
76
|
-
var globby__default = /*#__PURE__*/_interopDefaultCompat(globby);
|
|
77
|
-
var get__default = /*#__PURE__*/_interopDefaultCompat(get);
|
|
78
|
-
var os__default = /*#__PURE__*/_interopDefaultCompat(os);
|
|
79
|
-
var ObservableImpl__default = /*#__PURE__*/_interopDefaultCompat(ObservableImpl);
|
|
80
|
-
var PQueue__default = /*#__PURE__*/_interopDefaultCompat(PQueue);
|
|
81
|
-
var winston__namespace = /*#__PURE__*/_interopNamespaceCompat(winston);
|
|
82
|
-
var nunjucks__default = /*#__PURE__*/_interopDefaultCompat(nunjucks);
|
|
83
|
-
var Transport__default = /*#__PURE__*/_interopDefaultCompat(Transport);
|
|
84
|
-
|
|
85
|
-
const examples$a = [
|
|
86
|
-
{
|
|
87
|
-
description: "Register with the catalog",
|
|
88
|
-
example: yaml__namespace.default.stringify({
|
|
89
|
-
steps: [
|
|
90
|
-
{
|
|
91
|
-
action: "catalog:register",
|
|
92
|
-
id: "register-with-catalog",
|
|
93
|
-
name: "Register with the catalog",
|
|
94
|
-
input: {
|
|
95
|
-
catalogInfoUrl: "http://github.com/backstage/backstage/blob/master/catalog-info.yaml"
|
|
96
|
-
}
|
|
97
|
-
}
|
|
98
|
-
]
|
|
99
|
-
})
|
|
100
|
-
}
|
|
101
|
-
];
|
|
102
|
-
|
|
103
|
-
const id$4 = "catalog:register";
|
|
104
|
-
function createCatalogRegisterAction(options) {
|
|
105
|
-
const { catalogClient, integrations, auth } = options;
|
|
106
|
-
return pluginScaffolderNode.createTemplateAction({
|
|
107
|
-
id: id$4,
|
|
108
|
-
description: "Registers entities from a catalog descriptor file in the workspace into the software catalog.",
|
|
109
|
-
examples: examples$a,
|
|
110
|
-
schema: {
|
|
111
|
-
input: {
|
|
112
|
-
oneOf: [
|
|
113
|
-
{
|
|
114
|
-
type: "object",
|
|
115
|
-
required: ["catalogInfoUrl"],
|
|
116
|
-
properties: {
|
|
117
|
-
catalogInfoUrl: {
|
|
118
|
-
title: "Catalog Info URL",
|
|
119
|
-
description: "An absolute URL pointing to the catalog info file location",
|
|
120
|
-
type: "string"
|
|
121
|
-
},
|
|
122
|
-
optional: {
|
|
123
|
-
title: "Optional",
|
|
124
|
-
description: "Permit the registered location to optionally exist. Default: false",
|
|
125
|
-
type: "boolean"
|
|
126
|
-
}
|
|
127
|
-
}
|
|
128
|
-
},
|
|
129
|
-
{
|
|
130
|
-
type: "object",
|
|
131
|
-
required: ["repoContentsUrl"],
|
|
132
|
-
properties: {
|
|
133
|
-
repoContentsUrl: {
|
|
134
|
-
title: "Repository Contents URL",
|
|
135
|
-
description: "An absolute URL pointing to the root of a repository directory tree",
|
|
136
|
-
type: "string"
|
|
137
|
-
},
|
|
138
|
-
catalogInfoPath: {
|
|
139
|
-
title: "Fetch URL",
|
|
140
|
-
description: "A relative path from the repo root pointing to the catalog info file, defaults to /catalog-info.yaml",
|
|
141
|
-
type: "string"
|
|
142
|
-
},
|
|
143
|
-
optional: {
|
|
144
|
-
title: "Optional",
|
|
145
|
-
description: "Permit the registered location to optionally exist. Default: false",
|
|
146
|
-
type: "boolean"
|
|
147
|
-
}
|
|
148
|
-
}
|
|
149
|
-
}
|
|
150
|
-
]
|
|
151
|
-
},
|
|
152
|
-
output: {
|
|
153
|
-
type: "object",
|
|
154
|
-
required: ["catalogInfoUrl"],
|
|
155
|
-
properties: {
|
|
156
|
-
entityRef: {
|
|
157
|
-
type: "string"
|
|
158
|
-
},
|
|
159
|
-
catalogInfoUrl: {
|
|
160
|
-
type: "string"
|
|
161
|
-
}
|
|
162
|
-
}
|
|
163
|
-
}
|
|
164
|
-
},
|
|
165
|
-
async handler(ctx) {
|
|
166
|
-
const { input } = ctx;
|
|
167
|
-
let catalogInfoUrl;
|
|
168
|
-
if ("catalogInfoUrl" in input) {
|
|
169
|
-
catalogInfoUrl = input.catalogInfoUrl;
|
|
170
|
-
} else {
|
|
171
|
-
const { repoContentsUrl, catalogInfoPath = "/catalog-info.yaml" } = input;
|
|
172
|
-
const integration = integrations.byUrl(repoContentsUrl);
|
|
173
|
-
if (!integration) {
|
|
174
|
-
throw new errors.InputError(
|
|
175
|
-
`No integration found for host ${repoContentsUrl}`
|
|
176
|
-
);
|
|
177
|
-
}
|
|
178
|
-
catalogInfoUrl = integration.resolveUrl({
|
|
179
|
-
base: repoContentsUrl,
|
|
180
|
-
url: catalogInfoPath
|
|
181
|
-
});
|
|
182
|
-
}
|
|
183
|
-
ctx.logger.info(`Registering ${catalogInfoUrl} in the catalog`);
|
|
184
|
-
const { token } = await auth?.getPluginRequestToken({
|
|
185
|
-
onBehalfOf: await ctx.getInitiatorCredentials(),
|
|
186
|
-
targetPluginId: "catalog"
|
|
187
|
-
}) ?? { token: ctx.secrets?.backstageToken };
|
|
188
|
-
try {
|
|
189
|
-
await catalogClient.addLocation(
|
|
190
|
-
{
|
|
191
|
-
type: "url",
|
|
192
|
-
target: catalogInfoUrl
|
|
193
|
-
},
|
|
194
|
-
token ? { token } : {}
|
|
195
|
-
);
|
|
196
|
-
} catch (e) {
|
|
197
|
-
if (!input.optional) {
|
|
198
|
-
throw e;
|
|
199
|
-
}
|
|
200
|
-
}
|
|
201
|
-
try {
|
|
202
|
-
const result = await catalogClient.addLocation(
|
|
203
|
-
{
|
|
204
|
-
dryRun: true,
|
|
205
|
-
type: "url",
|
|
206
|
-
target: catalogInfoUrl
|
|
207
|
-
},
|
|
208
|
-
token ? { token } : {}
|
|
209
|
-
);
|
|
210
|
-
if (result.entities.length) {
|
|
211
|
-
const { entities } = result;
|
|
212
|
-
let entity;
|
|
213
|
-
entity = entities.find(
|
|
214
|
-
(e) => !e.metadata.name.startsWith("generated-") && e.kind === "Component"
|
|
215
|
-
);
|
|
216
|
-
if (!entity) {
|
|
217
|
-
entity = entities.find(
|
|
218
|
-
(e) => !e.metadata.name.startsWith("generated-")
|
|
219
|
-
);
|
|
220
|
-
}
|
|
221
|
-
if (!entity) {
|
|
222
|
-
entity = entities[0];
|
|
223
|
-
}
|
|
224
|
-
ctx.output("entityRef", catalogModel.stringifyEntityRef(entity));
|
|
225
|
-
}
|
|
226
|
-
} catch (e) {
|
|
227
|
-
if (!input.optional) {
|
|
228
|
-
throw e;
|
|
229
|
-
}
|
|
230
|
-
}
|
|
231
|
-
ctx.output("catalogInfoUrl", catalogInfoUrl);
|
|
232
|
-
}
|
|
233
|
-
});
|
|
234
|
-
}
|
|
235
|
-
|
|
236
|
-
const examples$9 = [
|
|
237
|
-
{
|
|
238
|
-
description: "Write a catalog yaml file",
|
|
239
|
-
example: yaml__namespace.stringify({
|
|
240
|
-
steps: [
|
|
241
|
-
{
|
|
242
|
-
action: "catalog:write",
|
|
243
|
-
id: "create-catalog-info-file",
|
|
244
|
-
name: "Create catalog file",
|
|
245
|
-
input: {
|
|
246
|
-
entity: {
|
|
247
|
-
apiVersion: "backstage.io/v1alpha1",
|
|
248
|
-
kind: "Component",
|
|
249
|
-
metadata: {
|
|
250
|
-
name: "test",
|
|
251
|
-
annotations: {}
|
|
252
|
-
},
|
|
253
|
-
spec: {
|
|
254
|
-
type: "service",
|
|
255
|
-
lifecycle: "production",
|
|
256
|
-
owner: "default/owner"
|
|
257
|
-
}
|
|
258
|
-
}
|
|
259
|
-
}
|
|
260
|
-
}
|
|
261
|
-
]
|
|
262
|
-
})
|
|
263
|
-
}
|
|
264
|
-
];
|
|
265
|
-
|
|
266
|
-
const id$3 = "catalog:write";
|
|
267
|
-
function createCatalogWriteAction() {
|
|
268
|
-
return pluginScaffolderNode.createTemplateAction({
|
|
269
|
-
id: id$3,
|
|
270
|
-
description: "Writes the catalog-info.yaml for your template",
|
|
271
|
-
schema: {
|
|
272
|
-
input: zod.z.object({
|
|
273
|
-
filePath: zod.z.string().optional().describe("Defaults to catalog-info.yaml"),
|
|
274
|
-
// TODO: this should reference an zod entity validator if it existed.
|
|
275
|
-
entity: zod.z.record(zod.z.any()).describe(
|
|
276
|
-
"You can provide the same values used in the Entity schema."
|
|
277
|
-
)
|
|
278
|
-
})
|
|
279
|
-
},
|
|
280
|
-
examples: examples$9,
|
|
281
|
-
supportsDryRun: true,
|
|
282
|
-
async handler(ctx) {
|
|
283
|
-
const { filePath, entity } = ctx.input;
|
|
284
|
-
const entityRef = ctx.templateInfo?.entityRef;
|
|
285
|
-
const path = filePath ?? "catalog-info.yaml";
|
|
286
|
-
ctx.logger.info(`Writing ${path}`);
|
|
287
|
-
await fs__default.default.outputFile(
|
|
288
|
-
backendPluginApi.resolveSafeChildPath(ctx.workspacePath, path),
|
|
289
|
-
yaml__namespace.stringify({
|
|
290
|
-
...entity,
|
|
291
|
-
metadata: {
|
|
292
|
-
...entity.metadata,
|
|
293
|
-
...entityRef ? {
|
|
294
|
-
annotations: {
|
|
295
|
-
...entity.metadata.annotations,
|
|
296
|
-
"backstage.io/source-template": entityRef
|
|
297
|
-
}
|
|
298
|
-
} : void 0
|
|
299
|
-
}
|
|
300
|
-
})
|
|
301
|
-
);
|
|
302
|
-
}
|
|
303
|
-
});
|
|
304
|
-
}
|
|
305
|
-
|
|
306
|
-
const examples$8 = [
|
|
307
|
-
{
|
|
308
|
-
description: "Fetch entity by reference",
|
|
309
|
-
example: yaml__namespace.default.stringify({
|
|
310
|
-
steps: [
|
|
311
|
-
{
|
|
312
|
-
action: "catalog:fetch",
|
|
313
|
-
id: "fetch",
|
|
314
|
-
name: "Fetch catalog entity",
|
|
315
|
-
input: {
|
|
316
|
-
entityRef: "component:default/name"
|
|
317
|
-
}
|
|
318
|
-
}
|
|
319
|
-
]
|
|
320
|
-
})
|
|
321
|
-
},
|
|
322
|
-
{
|
|
323
|
-
description: "Fetch multiple entities by reference",
|
|
324
|
-
example: yaml__namespace.default.stringify({
|
|
325
|
-
steps: [
|
|
326
|
-
{
|
|
327
|
-
action: "catalog:fetch",
|
|
328
|
-
id: "fetchMultiple",
|
|
329
|
-
name: "Fetch catalog entities",
|
|
330
|
-
input: {
|
|
331
|
-
entityRefs: ["component:default/name"]
|
|
332
|
-
}
|
|
333
|
-
}
|
|
334
|
-
]
|
|
335
|
-
})
|
|
336
|
-
}
|
|
337
|
-
];
|
|
338
|
-
|
|
339
|
-
const id$2 = "catalog:fetch";
|
|
340
|
-
function createFetchCatalogEntityAction(options) {
|
|
341
|
-
const { catalogClient, auth } = options;
|
|
342
|
-
return pluginScaffolderNode.createTemplateAction({
|
|
343
|
-
id: id$2,
|
|
344
|
-
description: "Returns entity or entities from the catalog by entity reference(s)",
|
|
345
|
-
examples: examples$8,
|
|
346
|
-
supportsDryRun: true,
|
|
347
|
-
schema: {
|
|
348
|
-
input: zod.z.object({
|
|
349
|
-
entityRef: zod.z.string({
|
|
350
|
-
description: "Entity reference of the entity to get"
|
|
351
|
-
}).optional(),
|
|
352
|
-
entityRefs: zod.z.array(zod.z.string(), {
|
|
353
|
-
description: "Entity references of the entities to get"
|
|
354
|
-
}).optional(),
|
|
355
|
-
optional: zod.z.boolean({
|
|
356
|
-
description: "Allow the entity or entities to optionally exist. Default: false"
|
|
357
|
-
}).optional(),
|
|
358
|
-
defaultKind: zod.z.string({ description: "The default kind" }).optional(),
|
|
359
|
-
defaultNamespace: zod.z.string({ description: "The default namespace" }).optional()
|
|
360
|
-
}),
|
|
361
|
-
output: zod.z.object({
|
|
362
|
-
entity: zod.z.any({
|
|
363
|
-
description: "Object containing same values used in the Entity schema. Only when used with `entityRef` parameter."
|
|
364
|
-
}).optional(),
|
|
365
|
-
entities: zod.z.array(
|
|
366
|
-
zod.z.any({
|
|
367
|
-
description: "Array containing objects with same values used in the Entity schema. Only when used with `entityRefs` parameter."
|
|
368
|
-
})
|
|
369
|
-
).optional()
|
|
370
|
-
})
|
|
371
|
-
},
|
|
372
|
-
async handler(ctx) {
|
|
373
|
-
const { entityRef, entityRefs, optional, defaultKind, defaultNamespace } = ctx.input;
|
|
374
|
-
if (!entityRef && !entityRefs) {
|
|
375
|
-
if (optional) {
|
|
376
|
-
return;
|
|
377
|
-
}
|
|
378
|
-
throw new Error("Missing entity reference or references");
|
|
379
|
-
}
|
|
380
|
-
const { token } = await auth?.getPluginRequestToken({
|
|
381
|
-
onBehalfOf: await ctx.getInitiatorCredentials(),
|
|
382
|
-
targetPluginId: "catalog"
|
|
383
|
-
}) ?? { token: ctx.secrets?.backstageToken };
|
|
384
|
-
if (entityRef) {
|
|
385
|
-
const entity = await catalogClient.getEntityByRef(
|
|
386
|
-
catalogModel.stringifyEntityRef(
|
|
387
|
-
catalogModel.parseEntityRef(entityRef, { defaultKind, defaultNamespace })
|
|
388
|
-
),
|
|
389
|
-
{
|
|
390
|
-
token
|
|
391
|
-
}
|
|
392
|
-
);
|
|
393
|
-
if (!entity && !optional) {
|
|
394
|
-
throw new Error(`Entity ${entityRef} not found`);
|
|
395
|
-
}
|
|
396
|
-
ctx.output("entity", entity ?? null);
|
|
397
|
-
}
|
|
398
|
-
if (entityRefs) {
|
|
399
|
-
const entities = await catalogClient.getEntitiesByRefs(
|
|
400
|
-
{
|
|
401
|
-
entityRefs: entityRefs.map(
|
|
402
|
-
(ref) => catalogModel.stringifyEntityRef(
|
|
403
|
-
catalogModel.parseEntityRef(ref, { defaultKind, defaultNamespace })
|
|
404
|
-
)
|
|
405
|
-
)
|
|
406
|
-
},
|
|
407
|
-
{
|
|
408
|
-
token
|
|
409
|
-
}
|
|
410
|
-
);
|
|
411
|
-
const finalEntities = entities.items.map((e, i) => {
|
|
412
|
-
if (!e && !optional) {
|
|
413
|
-
throw new Error(`Entity ${entityRefs[i]} not found`);
|
|
414
|
-
}
|
|
415
|
-
return e ?? null;
|
|
416
|
-
});
|
|
417
|
-
ctx.output("entities", finalEntities);
|
|
418
|
-
}
|
|
419
|
-
}
|
|
420
|
-
});
|
|
421
|
-
}
|
|
422
|
-
|
|
423
|
-
const examples$7 = [
|
|
424
|
-
{
|
|
425
|
-
description: "Write a debug message",
|
|
426
|
-
example: yaml__namespace.default.stringify({
|
|
427
|
-
steps: [
|
|
428
|
-
{
|
|
429
|
-
action: "debug:log",
|
|
430
|
-
id: "write-debug-line",
|
|
431
|
-
name: 'Write "Hello Backstage!" log line',
|
|
432
|
-
input: {
|
|
433
|
-
message: "Hello Backstage!"
|
|
434
|
-
}
|
|
435
|
-
}
|
|
436
|
-
]
|
|
437
|
-
})
|
|
438
|
-
},
|
|
439
|
-
{
|
|
440
|
-
description: "List the workspace directory",
|
|
441
|
-
example: yaml__namespace.default.stringify({
|
|
442
|
-
steps: [
|
|
443
|
-
{
|
|
444
|
-
action: "debug:log",
|
|
445
|
-
id: "write-workspace-directory",
|
|
446
|
-
name: "List the workspace directory",
|
|
447
|
-
input: {
|
|
448
|
-
listWorkspace: true
|
|
449
|
-
}
|
|
450
|
-
}
|
|
451
|
-
]
|
|
452
|
-
})
|
|
453
|
-
},
|
|
454
|
-
{
|
|
455
|
-
description: "List the workspace directory with file contents",
|
|
456
|
-
example: yaml__namespace.default.stringify({
|
|
457
|
-
steps: [
|
|
458
|
-
{
|
|
459
|
-
action: "debug:log",
|
|
460
|
-
id: "write-workspace-directory",
|
|
461
|
-
name: "List the workspace directory with file contents",
|
|
462
|
-
input: {
|
|
463
|
-
listWorkspace: "with-contents"
|
|
464
|
-
}
|
|
465
|
-
}
|
|
466
|
-
]
|
|
467
|
-
})
|
|
468
|
-
}
|
|
469
|
-
];
|
|
470
|
-
|
|
471
|
-
const id$1 = "debug:log";
|
|
472
|
-
function createDebugLogAction() {
|
|
473
|
-
return pluginScaffolderNode.createTemplateAction({
|
|
474
|
-
id: id$1,
|
|
475
|
-
description: "Writes a message into the log and/or lists all files in the workspace.",
|
|
476
|
-
examples: examples$7,
|
|
477
|
-
schema: {
|
|
478
|
-
input: zod.z.object({
|
|
479
|
-
message: zod.z.string({ description: "Message to output." }).optional(),
|
|
480
|
-
listWorkspace: zod.z.union([zod.z.boolean(), zod.z.enum(["with-filenames", "with-contents"])], {
|
|
481
|
-
description: 'List all files in the workspace. If used with "with-contents", also the file contents are listed.'
|
|
482
|
-
}).optional()
|
|
483
|
-
})
|
|
484
|
-
},
|
|
485
|
-
supportsDryRun: true,
|
|
486
|
-
async handler(ctx) {
|
|
487
|
-
ctx.logger.info(JSON.stringify(ctx.input, null, 2));
|
|
488
|
-
if (ctx.input?.message) {
|
|
489
|
-
ctx.logger.info(ctx.input.message);
|
|
490
|
-
}
|
|
491
|
-
if (ctx.input?.listWorkspace) {
|
|
492
|
-
const files = await recursiveReadDir(ctx.workspacePath);
|
|
493
|
-
ctx.logger.info(
|
|
494
|
-
`Workspace:
|
|
495
|
-
${files.map((f) => {
|
|
496
|
-
const relativePath = path.relative(ctx.workspacePath, f);
|
|
497
|
-
if (ctx.input?.listWorkspace === "with-contents") {
|
|
498
|
-
const content = fs__default$1.default.readFileSync(f, "utf-8");
|
|
499
|
-
return ` - ${relativePath}:
|
|
500
|
-
|
|
501
|
-
${content}`;
|
|
502
|
-
}
|
|
503
|
-
return ` - ${relativePath}`;
|
|
504
|
-
}).join("\n")}`
|
|
505
|
-
);
|
|
506
|
-
}
|
|
507
|
-
}
|
|
508
|
-
});
|
|
509
|
-
}
|
|
510
|
-
async function recursiveReadDir(dir) {
|
|
511
|
-
const subdirs = await fs.readdir(dir);
|
|
512
|
-
const files = await Promise.all(
|
|
513
|
-
subdirs.map(async (subdir) => {
|
|
514
|
-
const res = path.join(dir, subdir);
|
|
515
|
-
return (await fs.stat(res)).isDirectory() ? recursiveReadDir(res) : [res];
|
|
516
|
-
})
|
|
517
|
-
);
|
|
518
|
-
return files.reduce((a, f) => a.concat(f), []);
|
|
519
|
-
}
|
|
520
|
-
|
|
521
|
-
const examples$6 = [
|
|
522
|
-
{
|
|
523
|
-
description: "Waiting for 50 milliseconds",
|
|
524
|
-
example: yaml__namespace.default.stringify({
|
|
525
|
-
steps: [
|
|
526
|
-
{
|
|
527
|
-
action: "debug:wait",
|
|
528
|
-
id: "wait-milliseconds",
|
|
529
|
-
name: "Waiting for 50 milliseconds",
|
|
530
|
-
input: {
|
|
531
|
-
milliseconds: 50
|
|
532
|
-
}
|
|
533
|
-
}
|
|
534
|
-
]
|
|
535
|
-
})
|
|
536
|
-
},
|
|
537
|
-
{
|
|
538
|
-
description: "Waiting for 5 seconds",
|
|
539
|
-
example: yaml__namespace.default.stringify({
|
|
540
|
-
steps: [
|
|
541
|
-
{
|
|
542
|
-
action: "debug:wait",
|
|
543
|
-
id: "wait-5sec",
|
|
544
|
-
name: "Waiting for 5 seconds",
|
|
545
|
-
input: {
|
|
546
|
-
seconds: 5
|
|
547
|
-
}
|
|
548
|
-
}
|
|
549
|
-
]
|
|
550
|
-
})
|
|
551
|
-
},
|
|
552
|
-
{
|
|
553
|
-
description: "Waiting for 1 minutes",
|
|
554
|
-
example: yaml__namespace.default.stringify({
|
|
555
|
-
steps: [
|
|
556
|
-
{
|
|
557
|
-
action: "debug:wait",
|
|
558
|
-
id: "wait-1min",
|
|
559
|
-
name: "Waiting for 1 minutes",
|
|
560
|
-
input: {
|
|
561
|
-
minutes: 1
|
|
562
|
-
}
|
|
563
|
-
}
|
|
564
|
-
]
|
|
565
|
-
})
|
|
566
|
-
}
|
|
567
|
-
];
|
|
568
|
-
|
|
569
|
-
const id = "debug:wait";
|
|
570
|
-
const MAX_WAIT_TIME_IN_ISO = "T00:10:00";
|
|
571
|
-
function createWaitAction(options) {
|
|
572
|
-
const toDuration = (maxWaitTime) => {
|
|
573
|
-
if (maxWaitTime) {
|
|
574
|
-
if (maxWaitTime instanceof luxon.Duration) {
|
|
575
|
-
return maxWaitTime;
|
|
576
|
-
}
|
|
577
|
-
return luxon.Duration.fromObject(maxWaitTime);
|
|
578
|
-
}
|
|
579
|
-
return luxon.Duration.fromISOTime(MAX_WAIT_TIME_IN_ISO);
|
|
580
|
-
};
|
|
581
|
-
return pluginScaffolderNode.createTemplateAction({
|
|
582
|
-
id,
|
|
583
|
-
description: "Waits for a certain period of time.",
|
|
584
|
-
examples: examples$6,
|
|
585
|
-
schema: {
|
|
586
|
-
input: {
|
|
587
|
-
type: "object",
|
|
588
|
-
properties: {
|
|
589
|
-
minutes: {
|
|
590
|
-
title: "Waiting period in minutes.",
|
|
591
|
-
type: "number"
|
|
592
|
-
},
|
|
593
|
-
seconds: {
|
|
594
|
-
title: "Waiting period in seconds.",
|
|
595
|
-
type: "number"
|
|
596
|
-
},
|
|
597
|
-
milliseconds: {
|
|
598
|
-
title: "Waiting period in milliseconds.",
|
|
599
|
-
type: "number"
|
|
600
|
-
}
|
|
601
|
-
}
|
|
602
|
-
}
|
|
603
|
-
},
|
|
604
|
-
async handler(ctx) {
|
|
605
|
-
const delayTime = luxon.Duration.fromObject(ctx.input);
|
|
606
|
-
const maxWait = toDuration(options?.maxWaitTime);
|
|
607
|
-
if (delayTime.minus(maxWait).toMillis() > 0) {
|
|
608
|
-
throw new Error(
|
|
609
|
-
`Waiting duration is longer than the maximum threshold of ${maxWait.toHuman()}`
|
|
610
|
-
);
|
|
611
|
-
}
|
|
612
|
-
await new Promise((resolve) => {
|
|
613
|
-
const controller = new AbortController();
|
|
614
|
-
const timeoutHandle = setTimeout(abort, delayTime.toMillis());
|
|
615
|
-
ctx.signal?.addEventListener("abort", abort);
|
|
616
|
-
function abort() {
|
|
617
|
-
ctx.signal?.removeEventListener("abort", abort);
|
|
618
|
-
clearTimeout(timeoutHandle);
|
|
619
|
-
controller.abort();
|
|
620
|
-
resolve("finished");
|
|
621
|
-
}
|
|
622
|
-
});
|
|
623
|
-
}
|
|
624
|
-
});
|
|
625
|
-
}
|
|
626
|
-
|
|
627
|
-
const examples$5 = [
|
|
628
|
-
{
|
|
629
|
-
description: "Downloads content and places it in the workspace.",
|
|
630
|
-
example: yaml__namespace.default.stringify({
|
|
631
|
-
steps: [
|
|
632
|
-
{
|
|
633
|
-
action: "fetch:plain",
|
|
634
|
-
id: "fetch-plain",
|
|
635
|
-
name: "Fetch plain",
|
|
636
|
-
input: {
|
|
637
|
-
url: "https://github.com/backstage/community/tree/main/backstage-community-sessions/assets"
|
|
638
|
-
}
|
|
639
|
-
}
|
|
640
|
-
]
|
|
641
|
-
})
|
|
642
|
-
},
|
|
643
|
-
{
|
|
644
|
-
description: "Optionally, if you would prefer the data to be downloaded to a subdirectory in the workspace you may specify the \u2018targetPath\u2019 input option.",
|
|
645
|
-
example: yaml__namespace.default.stringify({
|
|
646
|
-
steps: [
|
|
647
|
-
{
|
|
648
|
-
action: "fetch:plain",
|
|
649
|
-
id: "fetch-plain",
|
|
650
|
-
name: "Fetch plain",
|
|
651
|
-
input: {
|
|
652
|
-
url: "https://github.com/backstage/community/tree/main/backstage-community-sessions/assets",
|
|
653
|
-
targetPath: "fetched-data"
|
|
654
|
-
}
|
|
655
|
-
}
|
|
656
|
-
]
|
|
657
|
-
})
|
|
658
|
-
}
|
|
659
|
-
];
|
|
660
|
-
|
|
661
|
-
const ACTION_ID = "fetch:plain";
|
|
662
|
-
function createFetchPlainAction(options) {
|
|
663
|
-
const { reader, integrations } = options;
|
|
664
|
-
return pluginScaffolderNode.createTemplateAction({
|
|
665
|
-
id: ACTION_ID,
|
|
666
|
-
examples: examples$5,
|
|
667
|
-
description: "Downloads content and places it in the workspace, or optionally in a subdirectory specified by the `targetPath` input option.",
|
|
668
|
-
schema: {
|
|
669
|
-
input: {
|
|
670
|
-
type: "object",
|
|
671
|
-
required: ["url"],
|
|
672
|
-
properties: {
|
|
673
|
-
url: {
|
|
674
|
-
title: "Fetch URL",
|
|
675
|
-
description: "Relative path or absolute URL pointing to the directory tree to fetch",
|
|
676
|
-
type: "string"
|
|
677
|
-
},
|
|
678
|
-
targetPath: {
|
|
679
|
-
title: "Target Path",
|
|
680
|
-
description: "Target path within the working directory to download the contents to.",
|
|
681
|
-
type: "string"
|
|
682
|
-
},
|
|
683
|
-
token: {
|
|
684
|
-
title: "Token",
|
|
685
|
-
description: "An optional token to use for authentication when reading the resources.",
|
|
686
|
-
type: "string"
|
|
687
|
-
}
|
|
688
|
-
}
|
|
689
|
-
}
|
|
690
|
-
},
|
|
691
|
-
supportsDryRun: true,
|
|
692
|
-
async handler(ctx) {
|
|
693
|
-
ctx.logger.info("Fetching plain content from remote URL");
|
|
694
|
-
const targetPath = ctx.input.targetPath ?? "./";
|
|
695
|
-
const outputPath = backendPluginApi.resolveSafeChildPath(ctx.workspacePath, targetPath);
|
|
696
|
-
await pluginScaffolderNode.fetchContents({
|
|
697
|
-
reader,
|
|
698
|
-
integrations,
|
|
699
|
-
baseUrl: ctx.templateInfo?.baseUrl,
|
|
700
|
-
fetchUrl: ctx.input.url,
|
|
701
|
-
outputPath,
|
|
702
|
-
token: ctx.input.token
|
|
703
|
-
});
|
|
704
|
-
}
|
|
705
|
-
});
|
|
706
|
-
}
|
|
707
|
-
|
|
708
|
-
const examples$4 = [
|
|
709
|
-
{
|
|
710
|
-
description: "Downloads a file and places it in the workspace.",
|
|
711
|
-
example: yaml__namespace.default.stringify({
|
|
712
|
-
steps: [
|
|
713
|
-
{
|
|
714
|
-
action: "fetch:plain:file",
|
|
715
|
-
id: "fetch-plain-file",
|
|
716
|
-
name: "Fetch plain file",
|
|
717
|
-
input: {
|
|
718
|
-
url: "https://github.com/backstage/community/tree/main/backstage-community-sessions/assets/Backstage%20Community%20Sessions.png",
|
|
719
|
-
targetPath: "target-path"
|
|
720
|
-
}
|
|
721
|
-
}
|
|
722
|
-
]
|
|
723
|
-
})
|
|
724
|
-
}
|
|
725
|
-
];
|
|
726
|
-
|
|
727
|
-
function createFetchPlainFileAction(options) {
|
|
728
|
-
const { reader, integrations } = options;
|
|
729
|
-
return pluginScaffolderNode.createTemplateAction({
|
|
730
|
-
id: "fetch:plain:file",
|
|
731
|
-
description: "Downloads single file and places it in the workspace.",
|
|
732
|
-
examples: examples$4,
|
|
733
|
-
schema: {
|
|
734
|
-
input: {
|
|
735
|
-
type: "object",
|
|
736
|
-
required: ["url", "targetPath"],
|
|
737
|
-
properties: {
|
|
738
|
-
url: {
|
|
739
|
-
title: "Fetch URL",
|
|
740
|
-
description: "Relative path or absolute URL pointing to the single file to fetch.",
|
|
741
|
-
type: "string"
|
|
742
|
-
},
|
|
743
|
-
targetPath: {
|
|
744
|
-
title: "Target Path",
|
|
745
|
-
description: "Target path within the working directory to download the file as.",
|
|
746
|
-
type: "string"
|
|
747
|
-
},
|
|
748
|
-
token: {
|
|
749
|
-
title: "Token",
|
|
750
|
-
description: "An optional token to use for authentication when reading the resources.",
|
|
751
|
-
type: "string"
|
|
752
|
-
}
|
|
753
|
-
}
|
|
754
|
-
}
|
|
755
|
-
},
|
|
756
|
-
supportsDryRun: true,
|
|
757
|
-
async handler(ctx) {
|
|
758
|
-
ctx.logger.info("Fetching plain content from remote URL");
|
|
759
|
-
const outputPath = backendPluginApi.resolveSafeChildPath(
|
|
760
|
-
ctx.workspacePath,
|
|
761
|
-
ctx.input.targetPath
|
|
762
|
-
);
|
|
763
|
-
await pluginScaffolderNode.fetchFile({
|
|
764
|
-
reader,
|
|
765
|
-
integrations,
|
|
766
|
-
baseUrl: ctx.templateInfo?.baseUrl,
|
|
767
|
-
fetchUrl: ctx.input.url,
|
|
768
|
-
outputPath,
|
|
769
|
-
token: ctx.input.token
|
|
770
|
-
});
|
|
771
|
-
}
|
|
772
|
-
});
|
|
773
|
-
}
|
|
774
|
-
|
|
775
|
-
function isNoNodeSnapshotOptionProvided() {
|
|
776
|
-
return process.env.NODE_OPTIONS?.includes("--no-node-snapshot") || process.argv.includes("--no-node-snapshot");
|
|
777
|
-
}
|
|
778
|
-
function getMajorNodeVersion() {
|
|
779
|
-
const version = process.versions.node;
|
|
780
|
-
return parseInt(version.split(".")[0], 10);
|
|
781
|
-
}
|
|
782
|
-
|
|
783
|
-
const mkScript = (nunjucksSource) => `
|
|
784
|
-
const { render, renderCompat } = (() => {
|
|
785
|
-
const module = {};
|
|
786
|
-
const process = { env: {} };
|
|
787
|
-
const require = (pkg) => { if (pkg === 'events') { return function (){}; }};
|
|
788
|
-
|
|
789
|
-
${nunjucksSource}
|
|
790
|
-
|
|
791
|
-
const env = module.exports.configure({
|
|
792
|
-
autoescape: false,
|
|
793
|
-
...JSON.parse(nunjucksConfigs),
|
|
794
|
-
tags: {
|
|
795
|
-
variableStart: '\${{',
|
|
796
|
-
variableEnd: '}}',
|
|
797
|
-
},
|
|
798
|
-
});
|
|
799
|
-
|
|
800
|
-
const compatEnv = module.exports.configure({
|
|
801
|
-
autoescape: false,
|
|
802
|
-
...JSON.parse(nunjucksConfigs),
|
|
803
|
-
tags: {
|
|
804
|
-
variableStart: '{{',
|
|
805
|
-
variableEnd: '}}',
|
|
806
|
-
},
|
|
807
|
-
});
|
|
808
|
-
compatEnv.addFilter('jsonify', compatEnv.getFilter('dump'));
|
|
809
|
-
|
|
810
|
-
for (const name of JSON.parse(availableTemplateFilters)) {
|
|
811
|
-
env.addFilter(name, (...args) => JSON.parse(callFilter(name, args)));
|
|
812
|
-
}
|
|
813
|
-
for (const [name, value] of Object.entries(JSON.parse(availableTemplateGlobals))) {
|
|
814
|
-
env.addGlobal(name, value);
|
|
815
|
-
}
|
|
816
|
-
for (const name of JSON.parse(availableTemplateCallbacks)) {
|
|
817
|
-
env.addGlobal(name, (...args) => JSON.parse(callGlobal(name, args)));
|
|
818
|
-
}
|
|
819
|
-
|
|
820
|
-
let uninstallCompat = undefined;
|
|
821
|
-
|
|
822
|
-
function render(str, values) {
|
|
823
|
-
try {
|
|
824
|
-
if (uninstallCompat) {
|
|
825
|
-
uninstallCompat();
|
|
826
|
-
uninstallCompat = undefined;
|
|
827
|
-
}
|
|
828
|
-
return env.renderString(str, JSON.parse(values));
|
|
829
|
-
} catch (error) {
|
|
830
|
-
// Make sure errors don't leak anything
|
|
831
|
-
throw new Error(String(error.message));
|
|
832
|
-
}
|
|
833
|
-
}
|
|
834
|
-
|
|
835
|
-
function renderCompat(str, values) {
|
|
836
|
-
try {
|
|
837
|
-
if (!uninstallCompat) {
|
|
838
|
-
uninstallCompat = module.exports.installJinjaCompat();
|
|
839
|
-
}
|
|
840
|
-
return compatEnv.renderString(str, JSON.parse(values));
|
|
841
|
-
} catch (error) {
|
|
842
|
-
// Make sure errors don't leak anything
|
|
843
|
-
throw new Error(String(error.message));
|
|
844
|
-
}
|
|
845
|
-
}
|
|
846
|
-
|
|
847
|
-
return { render, renderCompat };
|
|
848
|
-
})();
|
|
849
|
-
`;
|
|
850
|
-
class SecureTemplater {
|
|
851
|
-
static async loadRenderer(options = {}) {
|
|
852
|
-
const {
|
|
853
|
-
cookiecutterCompat,
|
|
854
|
-
templateFilters = {},
|
|
855
|
-
templateGlobals = {},
|
|
856
|
-
nunjucksConfigs = {}
|
|
857
|
-
} = options;
|
|
858
|
-
const nodeVersion = getMajorNodeVersion();
|
|
859
|
-
if (nodeVersion >= 20 && !isNoNodeSnapshotOptionProvided()) {
|
|
860
|
-
throw new Error(
|
|
861
|
-
`When using Node.js version 20 or newer, the scaffolder backend plugin requires that it be started with the --no-node-snapshot option.
|
|
862
|
-
Please make sure that you have NODE_OPTIONS=--no-node-snapshot in your environment.`
|
|
863
|
-
);
|
|
864
|
-
}
|
|
865
|
-
const isolate = new isolatedVm.Isolate({ memoryLimit: 128 });
|
|
866
|
-
const context = await isolate.createContext();
|
|
867
|
-
const contextGlobal = context.global;
|
|
868
|
-
const nunjucksSource = await fs__default.default.readFile(
|
|
869
|
-
backendPluginApi.resolvePackagePath(
|
|
870
|
-
"@backstage/plugin-scaffolder-backend",
|
|
871
|
-
"assets/nunjucks.js.txt"
|
|
872
|
-
),
|
|
873
|
-
"utf-8"
|
|
874
|
-
);
|
|
875
|
-
const nunjucksScript = await isolate.compileScript(
|
|
876
|
-
mkScript(nunjucksSource)
|
|
877
|
-
);
|
|
878
|
-
await contextGlobal.set("nunjucksConfigs", JSON.stringify(nunjucksConfigs));
|
|
879
|
-
const availableFilters = Object.keys(templateFilters);
|
|
880
|
-
await contextGlobal.set(
|
|
881
|
-
"availableTemplateFilters",
|
|
882
|
-
JSON.stringify(availableFilters)
|
|
883
|
-
);
|
|
884
|
-
const globalCallbacks = [];
|
|
885
|
-
const globalValues = {};
|
|
886
|
-
for (const [name, value] of Object.entries(templateGlobals)) {
|
|
887
|
-
if (typeof value === "function") {
|
|
888
|
-
globalCallbacks.push(name);
|
|
889
|
-
} else {
|
|
890
|
-
globalValues[name] = value;
|
|
891
|
-
}
|
|
892
|
-
}
|
|
893
|
-
await contextGlobal.set(
|
|
894
|
-
"availableTemplateGlobals",
|
|
895
|
-
JSON.stringify(globalValues)
|
|
896
|
-
);
|
|
897
|
-
await contextGlobal.set(
|
|
898
|
-
"availableTemplateCallbacks",
|
|
899
|
-
JSON.stringify(globalCallbacks)
|
|
900
|
-
);
|
|
901
|
-
await contextGlobal.set(
|
|
902
|
-
"callFilter",
|
|
903
|
-
(filterName, args) => {
|
|
904
|
-
if (!Object.hasOwn(templateFilters, filterName)) {
|
|
905
|
-
return "";
|
|
906
|
-
}
|
|
907
|
-
return JSON.stringify(templateFilters[filterName](...args));
|
|
908
|
-
}
|
|
909
|
-
);
|
|
910
|
-
await contextGlobal.set(
|
|
911
|
-
"callGlobal",
|
|
912
|
-
(globalName, args) => {
|
|
913
|
-
if (!Object.hasOwn(templateGlobals, globalName)) {
|
|
914
|
-
return "";
|
|
915
|
-
}
|
|
916
|
-
const global = templateGlobals[globalName];
|
|
917
|
-
if (typeof global !== "function") {
|
|
918
|
-
return "";
|
|
919
|
-
}
|
|
920
|
-
return JSON.stringify(global(...args));
|
|
921
|
-
}
|
|
922
|
-
);
|
|
923
|
-
await nunjucksScript.run(context);
|
|
924
|
-
const render = (template, values) => {
|
|
925
|
-
if (!context) {
|
|
926
|
-
throw new Error("SecureTemplater has not been initialized");
|
|
927
|
-
}
|
|
928
|
-
contextGlobal.setSync("templateStr", String(template));
|
|
929
|
-
contextGlobal.setSync("templateValues", JSON.stringify(values));
|
|
930
|
-
if (cookiecutterCompat) {
|
|
931
|
-
return context.evalSync(`renderCompat(templateStr, templateValues)`);
|
|
932
|
-
}
|
|
933
|
-
return context.evalSync(`render(templateStr, templateValues)`);
|
|
934
|
-
};
|
|
935
|
-
return render;
|
|
936
|
-
}
|
|
937
|
-
}
|
|
938
|
-
|
|
939
|
-
const createDefaultFilters = ({
|
|
940
|
-
integrations
|
|
941
|
-
}) => {
|
|
942
|
-
return {
|
|
943
|
-
parseRepoUrl: (url) => pluginScaffolderNode.parseRepoUrl(url, integrations),
|
|
944
|
-
parseEntityRef: (ref, context) => catalogModel.parseEntityRef(ref, context),
|
|
945
|
-
pick: (obj, key) => get__default.default(obj, key),
|
|
946
|
-
projectSlug: (repoUrl) => {
|
|
947
|
-
const { owner, repo } = pluginScaffolderNode.parseRepoUrl(repoUrl, integrations);
|
|
948
|
-
return `${owner}/${repo}`;
|
|
949
|
-
}
|
|
950
|
-
};
|
|
951
|
-
};
|
|
952
|
-
|
|
953
|
-
const examples$3 = [
|
|
954
|
-
{
|
|
955
|
-
description: "Downloads a skeleton directory that lives alongside the template file and fill it out with values.",
|
|
956
|
-
example: yaml__namespace.default.stringify({
|
|
957
|
-
steps: [
|
|
958
|
-
{
|
|
959
|
-
action: "fetch:template",
|
|
960
|
-
id: "fetch-template",
|
|
961
|
-
name: "Fetch template",
|
|
962
|
-
input: {
|
|
963
|
-
url: "./skeleton",
|
|
964
|
-
targetPath: "./target",
|
|
965
|
-
values: {
|
|
966
|
-
name: "test-project",
|
|
967
|
-
count: 1234,
|
|
968
|
-
itemList: ["first", "second", "third"],
|
|
969
|
-
showDummyFile: false
|
|
970
|
-
}
|
|
971
|
-
}
|
|
972
|
-
}
|
|
973
|
-
]
|
|
974
|
-
})
|
|
975
|
-
}
|
|
976
|
-
];
|
|
977
|
-
|
|
978
|
-
function createFetchTemplateAction(options) {
|
|
979
|
-
const {
|
|
980
|
-
reader,
|
|
981
|
-
integrations,
|
|
982
|
-
additionalTemplateFilters,
|
|
983
|
-
additionalTemplateGlobals
|
|
984
|
-
} = options;
|
|
985
|
-
const defaultTemplateFilters = createDefaultFilters({ integrations });
|
|
986
|
-
return pluginScaffolderNode.createTemplateAction({
|
|
987
|
-
id: "fetch:template",
|
|
988
|
-
description: "Downloads a skeleton, templates variables into file and directory names and content, and places the result in the workspace, or optionally in a subdirectory specified by the `targetPath` input option.",
|
|
989
|
-
examples: examples$3,
|
|
990
|
-
schema: {
|
|
991
|
-
input: {
|
|
992
|
-
type: "object",
|
|
993
|
-
required: ["url"],
|
|
994
|
-
properties: {
|
|
995
|
-
url: {
|
|
996
|
-
title: "Fetch URL",
|
|
997
|
-
description: "Relative path or absolute URL pointing to the directory tree to fetch",
|
|
998
|
-
type: "string"
|
|
999
|
-
},
|
|
1000
|
-
targetPath: {
|
|
1001
|
-
title: "Target Path",
|
|
1002
|
-
description: "Target path within the working directory to download the contents to. Defaults to the working directory root.",
|
|
1003
|
-
type: "string"
|
|
1004
|
-
},
|
|
1005
|
-
values: {
|
|
1006
|
-
title: "Template Values",
|
|
1007
|
-
description: "Values to pass on to the templating engine",
|
|
1008
|
-
type: "object"
|
|
1009
|
-
},
|
|
1010
|
-
copyWithoutRender: {
|
|
1011
|
-
title: "[Deprecated] Copy Without Render",
|
|
1012
|
-
description: "An array of glob patterns. Any files or directories which match are copied without being processed as templates.",
|
|
1013
|
-
type: "array",
|
|
1014
|
-
items: {
|
|
1015
|
-
type: "string"
|
|
1016
|
-
}
|
|
1017
|
-
},
|
|
1018
|
-
copyWithoutTemplating: {
|
|
1019
|
-
title: "Copy Without Templating",
|
|
1020
|
-
description: "An array of glob patterns. Contents of matched files or directories are copied without being processed, but paths are subject to rendering.",
|
|
1021
|
-
type: "array",
|
|
1022
|
-
items: {
|
|
1023
|
-
type: "string"
|
|
1024
|
-
}
|
|
1025
|
-
},
|
|
1026
|
-
cookiecutterCompat: {
|
|
1027
|
-
title: "Cookiecutter compatibility mode",
|
|
1028
|
-
description: "Enable features to maximise compatibility with templates built for fetch:cookiecutter",
|
|
1029
|
-
type: "boolean"
|
|
1030
|
-
},
|
|
1031
|
-
templateFileExtension: {
|
|
1032
|
-
title: "Template File Extension",
|
|
1033
|
-
description: "If set, only files with the given extension will be templated. If set to `true`, the default extension `.njk` is used.",
|
|
1034
|
-
type: ["string", "boolean"]
|
|
1035
|
-
},
|
|
1036
|
-
replace: {
|
|
1037
|
-
title: "Replace files",
|
|
1038
|
-
description: "If set, replace files in targetPath instead of skipping existing ones.",
|
|
1039
|
-
type: "boolean"
|
|
1040
|
-
},
|
|
1041
|
-
token: {
|
|
1042
|
-
title: "Token",
|
|
1043
|
-
description: "An optional token to use for authentication when reading the resources.",
|
|
1044
|
-
type: "string"
|
|
1045
|
-
}
|
|
1046
|
-
}
|
|
1047
|
-
}
|
|
1048
|
-
},
|
|
1049
|
-
supportsDryRun: true,
|
|
1050
|
-
async handler(ctx) {
|
|
1051
|
-
ctx.logger.info("Fetching template content from remote URL");
|
|
1052
|
-
const workDir = await ctx.createTemporaryDirectory();
|
|
1053
|
-
const templateDir = backendPluginApi.resolveSafeChildPath(workDir, "template");
|
|
1054
|
-
const targetPath = ctx.input.targetPath ?? "./";
|
|
1055
|
-
const outputDir = backendPluginApi.resolveSafeChildPath(ctx.workspacePath, targetPath);
|
|
1056
|
-
if (ctx.input.copyWithoutRender && ctx.input.copyWithoutTemplating) {
|
|
1057
|
-
throw new errors.InputError(
|
|
1058
|
-
"Fetch action input copyWithoutRender and copyWithoutTemplating can not be used at the same time"
|
|
1059
|
-
);
|
|
1060
|
-
}
|
|
1061
|
-
let copyOnlyPatterns;
|
|
1062
|
-
let renderFilename;
|
|
1063
|
-
if (ctx.input.copyWithoutRender) {
|
|
1064
|
-
ctx.logger.warn(
|
|
1065
|
-
"[Deprecated] copyWithoutRender is deprecated Please use copyWithoutTemplating instead."
|
|
1066
|
-
);
|
|
1067
|
-
copyOnlyPatterns = ctx.input.copyWithoutRender;
|
|
1068
|
-
renderFilename = false;
|
|
1069
|
-
} else {
|
|
1070
|
-
copyOnlyPatterns = ctx.input.copyWithoutTemplating;
|
|
1071
|
-
renderFilename = true;
|
|
1072
|
-
}
|
|
1073
|
-
if (copyOnlyPatterns && !Array.isArray(copyOnlyPatterns)) {
|
|
1074
|
-
throw new errors.InputError(
|
|
1075
|
-
"Fetch action input copyWithoutRender/copyWithoutTemplating must be an Array"
|
|
1076
|
-
);
|
|
1077
|
-
}
|
|
1078
|
-
if (ctx.input.templateFileExtension && (copyOnlyPatterns || ctx.input.cookiecutterCompat)) {
|
|
1079
|
-
throw new errors.InputError(
|
|
1080
|
-
"Fetch action input extension incompatible with copyWithoutRender/copyWithoutTemplating and cookiecutterCompat"
|
|
1081
|
-
);
|
|
1082
|
-
}
|
|
1083
|
-
let extension = false;
|
|
1084
|
-
if (ctx.input.templateFileExtension) {
|
|
1085
|
-
extension = ctx.input.templateFileExtension === true ? ".njk" : ctx.input.templateFileExtension;
|
|
1086
|
-
if (!extension.startsWith(".")) {
|
|
1087
|
-
extension = `.${extension}`;
|
|
1088
|
-
}
|
|
1089
|
-
}
|
|
1090
|
-
await pluginScaffolderNode.fetchContents({
|
|
1091
|
-
reader,
|
|
1092
|
-
integrations,
|
|
1093
|
-
baseUrl: ctx.templateInfo?.baseUrl,
|
|
1094
|
-
fetchUrl: ctx.input.url,
|
|
1095
|
-
outputPath: templateDir,
|
|
1096
|
-
token: ctx.input.token
|
|
1097
|
-
});
|
|
1098
|
-
ctx.logger.info("Listing files and directories in template");
|
|
1099
|
-
const allEntriesInTemplate = await globby__default.default(`**/*`, {
|
|
1100
|
-
cwd: templateDir,
|
|
1101
|
-
dot: true,
|
|
1102
|
-
onlyFiles: false,
|
|
1103
|
-
markDirectories: true,
|
|
1104
|
-
followSymbolicLinks: false
|
|
1105
|
-
});
|
|
1106
|
-
const nonTemplatedEntries = new Set(
|
|
1107
|
-
await globby__default.default(copyOnlyPatterns || [], {
|
|
1108
|
-
cwd: templateDir,
|
|
1109
|
-
dot: true,
|
|
1110
|
-
onlyFiles: false,
|
|
1111
|
-
markDirectories: true,
|
|
1112
|
-
followSymbolicLinks: false
|
|
1113
|
-
})
|
|
1114
|
-
);
|
|
1115
|
-
const { cookiecutterCompat, values } = ctx.input;
|
|
1116
|
-
const context = {
|
|
1117
|
-
[cookiecutterCompat ? "cookiecutter" : "values"]: values
|
|
1118
|
-
};
|
|
1119
|
-
ctx.logger.info(
|
|
1120
|
-
`Processing ${allEntriesInTemplate.length} template files/directories with input values`,
|
|
1121
|
-
ctx.input.values
|
|
1122
|
-
);
|
|
1123
|
-
const renderTemplate = await SecureTemplater.loadRenderer({
|
|
1124
|
-
cookiecutterCompat: ctx.input.cookiecutterCompat,
|
|
1125
|
-
templateFilters: {
|
|
1126
|
-
...defaultTemplateFilters,
|
|
1127
|
-
...additionalTemplateFilters
|
|
1128
|
-
},
|
|
1129
|
-
templateGlobals: additionalTemplateGlobals,
|
|
1130
|
-
nunjucksConfigs: {
|
|
1131
|
-
trimBlocks: ctx.input.trimBlocks,
|
|
1132
|
-
lstripBlocks: ctx.input.lstripBlocks
|
|
1133
|
-
}
|
|
1134
|
-
});
|
|
1135
|
-
for (const location of allEntriesInTemplate) {
|
|
1136
|
-
let renderContents;
|
|
1137
|
-
let localOutputPath = location;
|
|
1138
|
-
if (extension) {
|
|
1139
|
-
renderContents = path.extname(localOutputPath) === extension;
|
|
1140
|
-
if (renderContents) {
|
|
1141
|
-
localOutputPath = localOutputPath.slice(0, -extension.length);
|
|
1142
|
-
}
|
|
1143
|
-
localOutputPath = renderTemplate(localOutputPath, context);
|
|
1144
|
-
} else {
|
|
1145
|
-
renderContents = !nonTemplatedEntries.has(location);
|
|
1146
|
-
if (renderFilename) {
|
|
1147
|
-
localOutputPath = renderTemplate(localOutputPath, context);
|
|
1148
|
-
} else {
|
|
1149
|
-
localOutputPath = renderContents ? renderTemplate(localOutputPath, context) : localOutputPath;
|
|
1150
|
-
}
|
|
1151
|
-
}
|
|
1152
|
-
if (containsSkippedContent(localOutputPath)) {
|
|
1153
|
-
continue;
|
|
1154
|
-
}
|
|
1155
|
-
const outputPath = backendPluginApi.resolveSafeChildPath(outputDir, localOutputPath);
|
|
1156
|
-
if (fs__default.default.existsSync(outputPath) && !ctx.input.replace) {
|
|
1157
|
-
continue;
|
|
1158
|
-
}
|
|
1159
|
-
if (!renderContents && !extension) {
|
|
1160
|
-
ctx.logger.info(
|
|
1161
|
-
`Copying file/directory ${location} without processing.`
|
|
1162
|
-
);
|
|
1163
|
-
}
|
|
1164
|
-
if (location.endsWith("/")) {
|
|
1165
|
-
ctx.logger.info(
|
|
1166
|
-
`Writing directory ${location} to template output path.`
|
|
1167
|
-
);
|
|
1168
|
-
await fs__default.default.ensureDir(outputPath);
|
|
1169
|
-
} else {
|
|
1170
|
-
const inputFilePath = backendPluginApi.resolveSafeChildPath(templateDir, location);
|
|
1171
|
-
const stats = await fs__default.default.promises.lstat(inputFilePath);
|
|
1172
|
-
if (stats.isSymbolicLink() || await isbinaryfile.isBinaryFile(inputFilePath)) {
|
|
1173
|
-
ctx.logger.info(
|
|
1174
|
-
`Copying file binary or symbolic link at ${location}, to template output path.`
|
|
1175
|
-
);
|
|
1176
|
-
await fs__default.default.copy(inputFilePath, outputPath);
|
|
1177
|
-
} else {
|
|
1178
|
-
const statsObj = await fs__default.default.stat(inputFilePath);
|
|
1179
|
-
ctx.logger.info(
|
|
1180
|
-
`Writing file ${location} to template output path with mode ${statsObj.mode}.`
|
|
1181
|
-
);
|
|
1182
|
-
const inputFileContents = await fs__default.default.readFile(inputFilePath, "utf-8");
|
|
1183
|
-
await fs__default.default.outputFile(
|
|
1184
|
-
outputPath,
|
|
1185
|
-
renderContents ? renderTemplate(inputFileContents, context) : inputFileContents,
|
|
1186
|
-
{ mode: statsObj.mode }
|
|
1187
|
-
);
|
|
1188
|
-
}
|
|
1189
|
-
}
|
|
1190
|
-
}
|
|
1191
|
-
ctx.logger.info(`Template result written to ${outputDir}`);
|
|
1192
|
-
}
|
|
1193
|
-
});
|
|
1194
|
-
}
|
|
1195
|
-
function containsSkippedContent(localOutputPath) {
|
|
1196
|
-
return localOutputPath === "" || localOutputPath.startsWith("/") || localOutputPath.includes("//");
|
|
1197
|
-
}
|
|
1198
|
-
|
|
1199
|
-
const examples$2 = [
|
|
1200
|
-
{
|
|
1201
|
-
description: "Downloads a template file and fill it out with values.",
|
|
1202
|
-
example: yaml__namespace.default.stringify({
|
|
1203
|
-
steps: [
|
|
1204
|
-
{
|
|
1205
|
-
action: "fetch:template:file",
|
|
1206
|
-
id: "fetch-template-file",
|
|
1207
|
-
name: "Fetch template file",
|
|
1208
|
-
input: {
|
|
1209
|
-
url: "./skeleton.txt",
|
|
1210
|
-
targetPath: "./target/skeleton.txt",
|
|
1211
|
-
values: {
|
|
1212
|
-
name: "test-project",
|
|
1213
|
-
count: 1234,
|
|
1214
|
-
itemList: ["first", "second", "third"]
|
|
1215
|
-
}
|
|
1216
|
-
}
|
|
1217
|
-
}
|
|
1218
|
-
]
|
|
1219
|
-
})
|
|
1220
|
-
}
|
|
1221
|
-
];
|
|
1222
|
-
|
|
1223
|
-
function createFetchTemplateFileAction(options) {
|
|
1224
|
-
const {
|
|
1225
|
-
reader,
|
|
1226
|
-
integrations,
|
|
1227
|
-
additionalTemplateFilters,
|
|
1228
|
-
additionalTemplateGlobals
|
|
1229
|
-
} = options;
|
|
1230
|
-
const defaultTemplateFilters = createDefaultFilters({ integrations });
|
|
1231
|
-
return pluginScaffolderNode.createTemplateAction({
|
|
1232
|
-
id: "fetch:template:file",
|
|
1233
|
-
description: "Downloads single file and places it in the workspace.",
|
|
1234
|
-
examples: examples$2,
|
|
1235
|
-
schema: {
|
|
1236
|
-
input: {
|
|
1237
|
-
type: "object",
|
|
1238
|
-
required: ["url", "targetPath"],
|
|
1239
|
-
properties: {
|
|
1240
|
-
url: {
|
|
1241
|
-
title: "Fetch URL",
|
|
1242
|
-
description: "Relative path or absolute URL pointing to the single file to fetch.",
|
|
1243
|
-
type: "string"
|
|
1244
|
-
},
|
|
1245
|
-
targetPath: {
|
|
1246
|
-
title: "Target Path",
|
|
1247
|
-
description: "Target path within the working directory to download the file as.",
|
|
1248
|
-
type: "string"
|
|
1249
|
-
},
|
|
1250
|
-
values: {
|
|
1251
|
-
title: "Template Values",
|
|
1252
|
-
description: "Values to pass on to the templating engine",
|
|
1253
|
-
type: "object"
|
|
1254
|
-
},
|
|
1255
|
-
cookiecutterCompat: {
|
|
1256
|
-
title: "Cookiecutter compatibility mode",
|
|
1257
|
-
description: "Enable features to maximise compatibility with templates built for fetch:cookiecutter",
|
|
1258
|
-
type: "boolean"
|
|
1259
|
-
},
|
|
1260
|
-
replace: {
|
|
1261
|
-
title: "Replace file",
|
|
1262
|
-
description: "If set, replace file in targetPath instead of overwriting existing one.",
|
|
1263
|
-
type: "boolean"
|
|
1264
|
-
},
|
|
1265
|
-
token: {
|
|
1266
|
-
title: "Token",
|
|
1267
|
-
description: "An optional token to use for authentication when reading the resources.",
|
|
1268
|
-
type: "string"
|
|
1269
|
-
}
|
|
1270
|
-
}
|
|
1271
|
-
}
|
|
1272
|
-
},
|
|
1273
|
-
supportsDryRun: true,
|
|
1274
|
-
async handler(ctx) {
|
|
1275
|
-
ctx.logger.info("Fetching template file content from remote URL");
|
|
1276
|
-
const workDir = await ctx.createTemporaryDirectory();
|
|
1277
|
-
const tmpFilePath = path__default.default.join(workDir, "tmp");
|
|
1278
|
-
const outputPath = backendPluginApi.resolveSafeChildPath(
|
|
1279
|
-
ctx.workspacePath,
|
|
1280
|
-
ctx.input.targetPath
|
|
1281
|
-
);
|
|
1282
|
-
if (fs__default.default.existsSync(outputPath) && !ctx.input.replace) {
|
|
1283
|
-
ctx.logger.info(
|
|
1284
|
-
`File ${ctx.input.targetPath} already exists in workspace, not replacing.`
|
|
1285
|
-
);
|
|
1286
|
-
return;
|
|
1287
|
-
}
|
|
1288
|
-
await pluginScaffolderNode.fetchFile({
|
|
1289
|
-
reader,
|
|
1290
|
-
integrations,
|
|
1291
|
-
baseUrl: ctx.templateInfo?.baseUrl,
|
|
1292
|
-
fetchUrl: ctx.input.url,
|
|
1293
|
-
outputPath: tmpFilePath,
|
|
1294
|
-
token: ctx.input.token
|
|
1295
|
-
});
|
|
1296
|
-
const { cookiecutterCompat, values } = ctx.input;
|
|
1297
|
-
const context = {
|
|
1298
|
-
[cookiecutterCompat ? "cookiecutter" : "values"]: values
|
|
1299
|
-
};
|
|
1300
|
-
ctx.logger.info(
|
|
1301
|
-
`Processing template file with input values`,
|
|
1302
|
-
ctx.input.values
|
|
1303
|
-
);
|
|
1304
|
-
const renderTemplate = await SecureTemplater.loadRenderer({
|
|
1305
|
-
cookiecutterCompat,
|
|
1306
|
-
templateFilters: {
|
|
1307
|
-
...defaultTemplateFilters,
|
|
1308
|
-
...additionalTemplateFilters
|
|
1309
|
-
},
|
|
1310
|
-
templateGlobals: additionalTemplateGlobals,
|
|
1311
|
-
nunjucksConfigs: {
|
|
1312
|
-
trimBlocks: ctx.input.trimBlocks,
|
|
1313
|
-
lstripBlocks: ctx.input.lstripBlocks
|
|
1314
|
-
}
|
|
1315
|
-
});
|
|
1316
|
-
const contents = await fs__default.default.readFile(tmpFilePath, "utf-8");
|
|
1317
|
-
const result = renderTemplate(contents, context);
|
|
1318
|
-
await fs__default.default.ensureDir(path__default.default.dirname(outputPath));
|
|
1319
|
-
await fs__default.default.outputFile(outputPath, result);
|
|
1320
|
-
ctx.logger.info(`Template file has been written to ${outputPath}`);
|
|
1321
|
-
}
|
|
1322
|
-
});
|
|
1323
|
-
}
|
|
1324
|
-
|
|
1325
|
-
const examples$1 = [
|
|
1326
|
-
{
|
|
1327
|
-
description: "Delete specified files",
|
|
1328
|
-
example: yaml__namespace.stringify({
|
|
1329
|
-
steps: [
|
|
1330
|
-
{
|
|
1331
|
-
action: "fs:delete",
|
|
1332
|
-
id: "deleteFiles",
|
|
1333
|
-
name: "Delete files",
|
|
1334
|
-
input: {
|
|
1335
|
-
files: ["file1.txt", "file2.txt"]
|
|
1336
|
-
}
|
|
1337
|
-
}
|
|
1338
|
-
]
|
|
1339
|
-
})
|
|
1340
|
-
}
|
|
1341
|
-
];
|
|
1342
|
-
|
|
1343
|
-
const createFilesystemDeleteAction = () => {
|
|
1344
|
-
return pluginScaffolderNode.createTemplateAction({
|
|
1345
|
-
id: "fs:delete",
|
|
1346
|
-
description: "Deletes files and directories from the workspace",
|
|
1347
|
-
examples: examples$1,
|
|
1348
|
-
schema: {
|
|
1349
|
-
input: {
|
|
1350
|
-
required: ["files"],
|
|
1351
|
-
type: "object",
|
|
1352
|
-
properties: {
|
|
1353
|
-
files: {
|
|
1354
|
-
title: "Files",
|
|
1355
|
-
description: "A list of files and directories that will be deleted",
|
|
1356
|
-
type: "array",
|
|
1357
|
-
items: {
|
|
1358
|
-
type: "string"
|
|
1359
|
-
}
|
|
1360
|
-
}
|
|
1361
|
-
}
|
|
1362
|
-
}
|
|
1363
|
-
},
|
|
1364
|
-
supportsDryRun: true,
|
|
1365
|
-
async handler(ctx) {
|
|
1366
|
-
if (!Array.isArray(ctx.input?.files)) {
|
|
1367
|
-
throw new errors.InputError("files must be an Array");
|
|
1368
|
-
}
|
|
1369
|
-
for (const file of ctx.input.files) {
|
|
1370
|
-
const filepath = backendPluginApi.resolveSafeChildPath(ctx.workspacePath, file);
|
|
1371
|
-
try {
|
|
1372
|
-
await fs__default.default.remove(filepath);
|
|
1373
|
-
ctx.logger.info(`File ${filepath} deleted successfully`);
|
|
1374
|
-
} catch (err) {
|
|
1375
|
-
ctx.logger.error(`Failed to delete file ${filepath}:`, err);
|
|
1376
|
-
throw err;
|
|
1377
|
-
}
|
|
1378
|
-
}
|
|
1379
|
-
}
|
|
1380
|
-
});
|
|
1381
|
-
};
|
|
1382
|
-
|
|
1383
|
-
const examples = [
|
|
1384
|
-
{
|
|
1385
|
-
description: "Rename specified files ",
|
|
1386
|
-
example: yaml__namespace.stringify({
|
|
1387
|
-
steps: [
|
|
1388
|
-
{
|
|
1389
|
-
action: "fs:rename",
|
|
1390
|
-
id: "renameFiles",
|
|
1391
|
-
name: "Rename files",
|
|
1392
|
-
input: {
|
|
1393
|
-
files: [
|
|
1394
|
-
{ from: "file1.txt", to: "file1Renamed.txt" },
|
|
1395
|
-
{ from: "file2.txt", to: "file2Renamed.txt" },
|
|
1396
|
-
{ from: "file3.txt", to: "file3Renamed.txt", overwrite: true }
|
|
1397
|
-
]
|
|
1398
|
-
}
|
|
1399
|
-
}
|
|
1400
|
-
]
|
|
1401
|
-
})
|
|
1402
|
-
}
|
|
1403
|
-
];
|
|
1404
|
-
|
|
1405
|
-
const createFilesystemRenameAction = () => {
|
|
1406
|
-
return pluginScaffolderNode.createTemplateAction({
|
|
1407
|
-
id: "fs:rename",
|
|
1408
|
-
description: "Renames files and directories within the workspace",
|
|
1409
|
-
examples,
|
|
1410
|
-
schema: {
|
|
1411
|
-
input: {
|
|
1412
|
-
required: ["files"],
|
|
1413
|
-
type: "object",
|
|
1414
|
-
properties: {
|
|
1415
|
-
files: {
|
|
1416
|
-
title: "Files",
|
|
1417
|
-
description: "A list of file and directory names that will be renamed",
|
|
1418
|
-
type: "array",
|
|
1419
|
-
items: {
|
|
1420
|
-
type: "object",
|
|
1421
|
-
required: ["from", "to"],
|
|
1422
|
-
properties: {
|
|
1423
|
-
from: {
|
|
1424
|
-
type: "string",
|
|
1425
|
-
title: "The source location of the file to be renamed"
|
|
1426
|
-
},
|
|
1427
|
-
to: {
|
|
1428
|
-
type: "string",
|
|
1429
|
-
title: "The destination of the new file"
|
|
1430
|
-
},
|
|
1431
|
-
overwrite: {
|
|
1432
|
-
type: "boolean",
|
|
1433
|
-
title: "Overwrite existing file or directory, default is false"
|
|
1434
|
-
}
|
|
1435
|
-
}
|
|
1436
|
-
}
|
|
1437
|
-
}
|
|
1438
|
-
}
|
|
1439
|
-
}
|
|
1440
|
-
},
|
|
1441
|
-
supportsDryRun: true,
|
|
1442
|
-
async handler(ctx) {
|
|
1443
|
-
if (!Array.isArray(ctx.input?.files)) {
|
|
1444
|
-
throw new errors.InputError("files must be an Array");
|
|
1445
|
-
}
|
|
1446
|
-
for (const file of ctx.input.files) {
|
|
1447
|
-
if (!file.from || !file.to) {
|
|
1448
|
-
throw new errors.InputError("each file must have a from and to property");
|
|
1449
|
-
}
|
|
1450
|
-
const sourceFilepath = backendPluginApi.resolveSafeChildPath(
|
|
1451
|
-
ctx.workspacePath,
|
|
1452
|
-
file.from
|
|
1453
|
-
);
|
|
1454
|
-
const destFilepath = backendPluginApi.resolveSafeChildPath(ctx.workspacePath, file.to);
|
|
1455
|
-
try {
|
|
1456
|
-
await fs__default.default.move(sourceFilepath, destFilepath, {
|
|
1457
|
-
overwrite: file.overwrite ?? false
|
|
1458
|
-
});
|
|
1459
|
-
ctx.logger.info(
|
|
1460
|
-
`File ${sourceFilepath} renamed to ${destFilepath} successfully`
|
|
1461
|
-
);
|
|
1462
|
-
} catch (err) {
|
|
1463
|
-
ctx.logger.error(
|
|
1464
|
-
`Failed to rename file ${sourceFilepath} to ${destFilepath}:`,
|
|
1465
|
-
err
|
|
1466
|
-
);
|
|
1467
|
-
throw err;
|
|
1468
|
-
}
|
|
1469
|
-
}
|
|
1470
|
-
}
|
|
1471
|
-
});
|
|
1472
|
-
};
|
|
1473
|
-
|
|
1474
|
-
const createBuiltinActions = (options) => {
|
|
1475
|
-
const {
|
|
1476
|
-
reader,
|
|
1477
|
-
integrations,
|
|
1478
|
-
catalogClient,
|
|
1479
|
-
auth,
|
|
1480
|
-
config,
|
|
1481
|
-
additionalTemplateFilters,
|
|
1482
|
-
additionalTemplateGlobals
|
|
1483
|
-
} = options;
|
|
1484
|
-
const githubCredentialsProvider = integration.DefaultGithubCredentialsProvider.fromIntegrations(integrations);
|
|
1485
|
-
const actions = [
|
|
1486
|
-
createFetchPlainAction({
|
|
1487
|
-
reader,
|
|
1488
|
-
integrations
|
|
1489
|
-
}),
|
|
1490
|
-
createFetchPlainFileAction({
|
|
1491
|
-
reader,
|
|
1492
|
-
integrations
|
|
1493
|
-
}),
|
|
1494
|
-
createFetchTemplateAction({
|
|
1495
|
-
integrations,
|
|
1496
|
-
reader,
|
|
1497
|
-
additionalTemplateFilters,
|
|
1498
|
-
additionalTemplateGlobals
|
|
1499
|
-
}),
|
|
1500
|
-
createFetchTemplateFileAction({
|
|
1501
|
-
integrations,
|
|
1502
|
-
reader,
|
|
1503
|
-
additionalTemplateFilters,
|
|
1504
|
-
additionalTemplateGlobals
|
|
1505
|
-
}),
|
|
1506
|
-
gerrit.createPublishGerritAction({
|
|
1507
|
-
integrations,
|
|
1508
|
-
config
|
|
1509
|
-
}),
|
|
1510
|
-
gerrit.createPublishGerritReviewAction({
|
|
1511
|
-
integrations,
|
|
1512
|
-
config
|
|
1513
|
-
}),
|
|
1514
|
-
pluginScaffolderBackendModuleGitea.createPublishGiteaAction({
|
|
1515
|
-
integrations,
|
|
1516
|
-
config
|
|
1517
|
-
}),
|
|
1518
|
-
github.createPublishGithubAction({
|
|
1519
|
-
integrations,
|
|
1520
|
-
config,
|
|
1521
|
-
githubCredentialsProvider
|
|
1522
|
-
}),
|
|
1523
|
-
github.createPublishGithubPullRequestAction({
|
|
1524
|
-
integrations,
|
|
1525
|
-
githubCredentialsProvider,
|
|
1526
|
-
config
|
|
1527
|
-
}),
|
|
1528
|
-
gitlab.createPublishGitlabAction({
|
|
1529
|
-
integrations,
|
|
1530
|
-
config
|
|
1531
|
-
}),
|
|
1532
|
-
gitlab.createPublishGitlabMergeRequestAction({
|
|
1533
|
-
integrations
|
|
1534
|
-
}),
|
|
1535
|
-
gitlab.createGitlabRepoPushAction({
|
|
1536
|
-
integrations
|
|
1537
|
-
}),
|
|
1538
|
-
bitbucket.createPublishBitbucketAction({
|
|
1539
|
-
integrations,
|
|
1540
|
-
config
|
|
1541
|
-
}),
|
|
1542
|
-
bitbucketCloud.createPublishBitbucketCloudAction({
|
|
1543
|
-
integrations,
|
|
1544
|
-
config
|
|
1545
|
-
}),
|
|
1546
|
-
bitbucketCloud.createPublishBitbucketCloudPullRequestAction({ integrations, config }),
|
|
1547
|
-
bitbucketServer.createPublishBitbucketServerAction({
|
|
1548
|
-
integrations,
|
|
1549
|
-
config
|
|
1550
|
-
}),
|
|
1551
|
-
bitbucketServer.createPublishBitbucketServerPullRequestAction({
|
|
1552
|
-
integrations,
|
|
1553
|
-
config
|
|
1554
|
-
}),
|
|
1555
|
-
azure.createPublishAzureAction({
|
|
1556
|
-
integrations,
|
|
1557
|
-
config
|
|
1558
|
-
}),
|
|
1559
|
-
createDebugLogAction(),
|
|
1560
|
-
createWaitAction(),
|
|
1561
|
-
createCatalogRegisterAction({ catalogClient, integrations, auth }),
|
|
1562
|
-
createFetchCatalogEntityAction({ catalogClient, auth }),
|
|
1563
|
-
createCatalogWriteAction(),
|
|
1564
|
-
createFilesystemDeleteAction(),
|
|
1565
|
-
createFilesystemRenameAction(),
|
|
1566
|
-
github.createGithubActionsDispatchAction({
|
|
1567
|
-
integrations,
|
|
1568
|
-
githubCredentialsProvider
|
|
1569
|
-
}),
|
|
1570
|
-
github.createGithubWebhookAction({
|
|
1571
|
-
integrations,
|
|
1572
|
-
githubCredentialsProvider
|
|
1573
|
-
}),
|
|
1574
|
-
github.createGithubIssuesLabelAction({
|
|
1575
|
-
integrations,
|
|
1576
|
-
githubCredentialsProvider
|
|
1577
|
-
}),
|
|
1578
|
-
github.createGithubRepoCreateAction({
|
|
1579
|
-
integrations,
|
|
1580
|
-
githubCredentialsProvider
|
|
1581
|
-
}),
|
|
1582
|
-
github.createGithubRepoPushAction({
|
|
1583
|
-
integrations,
|
|
1584
|
-
config,
|
|
1585
|
-
githubCredentialsProvider
|
|
1586
|
-
}),
|
|
1587
|
-
github.createGithubEnvironmentAction({
|
|
1588
|
-
integrations,
|
|
1589
|
-
catalogClient
|
|
1590
|
-
}),
|
|
1591
|
-
github.createGithubDeployKeyAction({
|
|
1592
|
-
integrations
|
|
1593
|
-
}),
|
|
1594
|
-
github.createGithubAutolinksAction({
|
|
1595
|
-
integrations,
|
|
1596
|
-
githubCredentialsProvider
|
|
1597
|
-
}),
|
|
1598
|
-
bitbucketCloud.createBitbucketPipelinesRunAction({
|
|
1599
|
-
integrations
|
|
1600
|
-
})
|
|
1601
|
-
];
|
|
1602
|
-
return actions;
|
|
1603
|
-
};
|
|
1604
|
-
|
|
1605
|
-
class TemplateActionRegistry {
|
|
1606
|
-
actions = /* @__PURE__ */ new Map();
|
|
1607
|
-
register(action) {
|
|
1608
|
-
if (this.actions.has(action.id)) {
|
|
1609
|
-
throw new errors.ConflictError(
|
|
1610
|
-
`Template action with ID '${action.id}' has already been registered`
|
|
1611
|
-
);
|
|
1612
|
-
}
|
|
1613
|
-
this.actions.set(action.id, action);
|
|
1614
|
-
}
|
|
1615
|
-
get(actionId) {
|
|
1616
|
-
const action = this.actions.get(actionId);
|
|
1617
|
-
if (!action) {
|
|
1618
|
-
throw new errors.NotFoundError(
|
|
1619
|
-
`Template action with ID '${actionId}' is not registered.`
|
|
1620
|
-
);
|
|
1621
|
-
}
|
|
1622
|
-
return action;
|
|
1623
|
-
}
|
|
1624
|
-
list() {
|
|
1625
|
-
return [...this.actions.values()];
|
|
1626
|
-
}
|
|
1627
|
-
}
|
|
1628
|
-
|
|
1629
|
-
const trimEventsTillLastRecovery = (events) => {
|
|
1630
|
-
const recoveredEventInd = events.slice().reverse().findIndex((event) => event.type === "recovered");
|
|
1631
|
-
if (recoveredEventInd >= 0) {
|
|
1632
|
-
const ind = events.length - recoveredEventInd - 1;
|
|
1633
|
-
const { recoverStrategy } = events[ind].body;
|
|
1634
|
-
if (recoverStrategy === "startOver") {
|
|
1635
|
-
return {
|
|
1636
|
-
events: recoveredEventInd === 0 ? [] : events.slice(ind)
|
|
1637
|
-
};
|
|
1638
|
-
}
|
|
1639
|
-
}
|
|
1640
|
-
return { events };
|
|
1641
|
-
};
|
|
1642
|
-
|
|
1643
|
-
const intervalFromNowTill = (timeoutS, knex) => {
|
|
1644
|
-
let heartbeatInterval = knex.raw(`? - interval '${timeoutS} seconds'`, [
|
|
1645
|
-
knex.fn.now()
|
|
1646
|
-
]);
|
|
1647
|
-
if (knex.client.config.client.includes("mysql")) {
|
|
1648
|
-
heartbeatInterval = knex.raw(
|
|
1649
|
-
`date_sub(now(), interval ${timeoutS} second)`
|
|
1650
|
-
);
|
|
1651
|
-
} else if (knex.client.config.client.includes("sqlite3")) {
|
|
1652
|
-
heartbeatInterval = knex.raw(`datetime('now', ?)`, [
|
|
1653
|
-
`-${timeoutS} seconds`
|
|
1654
|
-
]);
|
|
1655
|
-
}
|
|
1656
|
-
return heartbeatInterval;
|
|
1657
|
-
};
|
|
1658
|
-
|
|
1659
|
-
async function getWorkingDirectory(config, logger) {
|
|
1660
|
-
if (!config.has("backend.workingDirectory")) {
|
|
1661
|
-
return os__default.default.tmpdir();
|
|
1662
|
-
}
|
|
1663
|
-
const workingDirectory = config.getString("backend.workingDirectory");
|
|
1664
|
-
try {
|
|
1665
|
-
await fs__default.default.access(workingDirectory, fs__default.default.constants.F_OK | fs__default.default.constants.W_OK);
|
|
1666
|
-
logger.info(`using working directory: ${workingDirectory}`);
|
|
1667
|
-
} catch (err) {
|
|
1668
|
-
errors.assertError(err);
|
|
1669
|
-
logger.error(
|
|
1670
|
-
`working directory ${workingDirectory} ${err.code === "ENOENT" ? "does not exist" : "is not writable"}`
|
|
1671
|
-
);
|
|
1672
|
-
throw err;
|
|
1673
|
-
}
|
|
1674
|
-
return workingDirectory;
|
|
1675
|
-
}
|
|
1676
|
-
function getEntityBaseUrl(entity) {
|
|
1677
|
-
let location = entity.metadata.annotations?.[catalogModel.ANNOTATION_SOURCE_LOCATION];
|
|
1678
|
-
if (!location) {
|
|
1679
|
-
location = entity.metadata.annotations?.[catalogModel.ANNOTATION_LOCATION];
|
|
1680
|
-
}
|
|
1681
|
-
if (!location) {
|
|
1682
|
-
return void 0;
|
|
1683
|
-
}
|
|
1684
|
-
const { type, target } = catalogModel.parseLocationRef(location);
|
|
1685
|
-
if (type === "url") {
|
|
1686
|
-
return target;
|
|
1687
|
-
} else if (type === "file") {
|
|
1688
|
-
return `file://${target}`;
|
|
1689
|
-
}
|
|
1690
|
-
return void 0;
|
|
1691
|
-
}
|
|
1692
|
-
async function findTemplate(options) {
|
|
1693
|
-
const { entityRef, token, catalogApi } = options;
|
|
1694
|
-
if (entityRef.kind.toLocaleLowerCase("en-US") !== "template") {
|
|
1695
|
-
throw new errors.InputError(`Invalid kind, only 'Template' kind is supported`);
|
|
1696
|
-
}
|
|
1697
|
-
const template = await catalogApi.getEntityByRef(entityRef, { token });
|
|
1698
|
-
if (!template) {
|
|
1699
|
-
throw new errors.NotFoundError(
|
|
1700
|
-
`Template ${catalogModel.stringifyEntityRef(entityRef)} not found`
|
|
1701
|
-
);
|
|
1702
|
-
}
|
|
1703
|
-
return template;
|
|
1704
|
-
}
|
|
1705
|
-
function parseStringsParam(param, paramName) {
|
|
1706
|
-
if (param === void 0) {
|
|
1707
|
-
return void 0;
|
|
1708
|
-
}
|
|
1709
|
-
const array = [param].flat();
|
|
1710
|
-
if (array.some((p) => typeof p !== "string")) {
|
|
1711
|
-
throw new errors.InputError(
|
|
1712
|
-
`Invalid ${paramName}, not a string or array of strings`
|
|
1713
|
-
);
|
|
1714
|
-
}
|
|
1715
|
-
return array;
|
|
1716
|
-
}
|
|
1717
|
-
function parseNumberParam(param, paramName) {
|
|
1718
|
-
return parseStringsParam(param, paramName)?.map((val) => {
|
|
1719
|
-
const ret = Number.parseInt(val, 10);
|
|
1720
|
-
if (isNaN(ret)) {
|
|
1721
|
-
throw new errors.InputError(
|
|
1722
|
-
`Invalid ${paramName} parameter "${val}", expected a number or array of numbers`
|
|
1723
|
-
);
|
|
1724
|
-
}
|
|
1725
|
-
return ret;
|
|
1726
|
-
});
|
|
1727
|
-
}
|
|
1728
|
-
function flattenParams(...params) {
|
|
1729
|
-
return [...params].flat().filter(Boolean);
|
|
1730
|
-
}
|
|
1731
|
-
|
|
1732
|
-
const migrationsDir = backendPluginApi.resolvePackagePath(
|
|
1733
|
-
"@backstage/plugin-scaffolder-backend",
|
|
1734
|
-
"migrations"
|
|
1735
|
-
);
|
|
1736
|
-
function isPluginDatabaseManager(opt) {
|
|
1737
|
-
return opt.getClient !== void 0;
|
|
1738
|
-
}
|
|
1739
|
-
const parseSqlDateToIsoString = (input) => {
|
|
1740
|
-
if (typeof input === "string") {
|
|
1741
|
-
const parsed = luxon.DateTime.fromSQL(input, { zone: "UTC" });
|
|
1742
|
-
if (!parsed.isValid) {
|
|
1743
|
-
throw new Error(
|
|
1744
|
-
`Failed to parse database timestamp '${input}', ${parsed.invalidReason}: ${parsed.invalidExplanation}`
|
|
1745
|
-
);
|
|
1746
|
-
}
|
|
1747
|
-
return parsed.toISO();
|
|
1748
|
-
}
|
|
1749
|
-
return input;
|
|
1750
|
-
};
|
|
1751
|
-
class DatabaseTaskStore {
|
|
1752
|
-
db;
|
|
1753
|
-
static async create(options) {
|
|
1754
|
-
const { database } = options;
|
|
1755
|
-
const client = await this.getClient(database);
|
|
1756
|
-
await this.runMigrations(database, client);
|
|
1757
|
-
return new DatabaseTaskStore(client);
|
|
1758
|
-
}
|
|
1759
|
-
isRecoverableTask(spec) {
|
|
1760
|
-
return ["startOver"].includes(
|
|
1761
|
-
spec.EXPERIMENTAL_recovery?.EXPERIMENTAL_strategy ?? "none"
|
|
1762
|
-
);
|
|
1763
|
-
}
|
|
1764
|
-
parseSpec({ spec, id }) {
|
|
1765
|
-
try {
|
|
1766
|
-
return JSON.parse(spec);
|
|
1767
|
-
} catch (error) {
|
|
1768
|
-
throw new Error(`Failed to parse spec of task '${id}', ${error}`);
|
|
1769
|
-
}
|
|
1770
|
-
}
|
|
1771
|
-
parseTaskSecrets(taskRow) {
|
|
1772
|
-
try {
|
|
1773
|
-
return taskRow.secrets ? JSON.parse(taskRow.secrets) : void 0;
|
|
1774
|
-
} catch (error) {
|
|
1775
|
-
throw new Error(
|
|
1776
|
-
`Failed to parse secrets of task '${taskRow.id}', ${error}`
|
|
1777
|
-
);
|
|
1778
|
-
}
|
|
1779
|
-
}
|
|
1780
|
-
static async getClient(database) {
|
|
1781
|
-
if (isPluginDatabaseManager(database)) {
|
|
1782
|
-
return database.getClient();
|
|
1783
|
-
}
|
|
1784
|
-
return database;
|
|
1785
|
-
}
|
|
1786
|
-
static async runMigrations(database, client) {
|
|
1787
|
-
if (!isPluginDatabaseManager(database)) {
|
|
1788
|
-
await client.migrate.latest({
|
|
1789
|
-
directory: migrationsDir
|
|
1790
|
-
});
|
|
1791
|
-
return;
|
|
1792
|
-
}
|
|
1793
|
-
if (!database.migrations?.skip) {
|
|
1794
|
-
await client.migrate.latest({
|
|
1795
|
-
directory: migrationsDir
|
|
1796
|
-
});
|
|
1797
|
-
}
|
|
1798
|
-
}
|
|
1799
|
-
constructor(client) {
|
|
1800
|
-
this.db = client;
|
|
1801
|
-
}
|
|
1802
|
-
async list(options) {
|
|
1803
|
-
const { createdBy, status, pagination, order, filters } = options ?? {};
|
|
1804
|
-
const queryBuilder = this.db("tasks");
|
|
1805
|
-
if (createdBy || filters?.createdBy) {
|
|
1806
|
-
const arr = flattenParams(
|
|
1807
|
-
createdBy,
|
|
1808
|
-
filters?.createdBy
|
|
1809
|
-
);
|
|
1810
|
-
queryBuilder.whereIn("created_by", [...new Set(arr)]);
|
|
1811
|
-
}
|
|
1812
|
-
if (status || filters?.status) {
|
|
1813
|
-
const arr = flattenParams(
|
|
1814
|
-
status,
|
|
1815
|
-
filters?.status
|
|
1816
|
-
);
|
|
1817
|
-
queryBuilder.whereIn("status", [...new Set(arr)]);
|
|
1818
|
-
}
|
|
1819
|
-
if (order) {
|
|
1820
|
-
order.forEach((f) => {
|
|
1821
|
-
queryBuilder.orderBy(f.field, f.order);
|
|
1822
|
-
});
|
|
1823
|
-
} else {
|
|
1824
|
-
queryBuilder.orderBy("created_at", "desc");
|
|
1825
|
-
}
|
|
1826
|
-
const countQuery = queryBuilder.clone();
|
|
1827
|
-
countQuery.count("tasks.id", { as: "count" });
|
|
1828
|
-
if (pagination?.limit !== void 0) {
|
|
1829
|
-
queryBuilder.limit(pagination.limit);
|
|
1830
|
-
}
|
|
1831
|
-
if (pagination?.offset !== void 0) {
|
|
1832
|
-
queryBuilder.offset(pagination.offset);
|
|
1833
|
-
}
|
|
1834
|
-
const [results, [{ count }]] = await Promise.all([
|
|
1835
|
-
queryBuilder.select(),
|
|
1836
|
-
countQuery
|
|
1837
|
-
]);
|
|
1838
|
-
const tasks = results.map((result) => ({
|
|
1839
|
-
id: result.id,
|
|
1840
|
-
spec: JSON.parse(result.spec),
|
|
1841
|
-
status: result.status,
|
|
1842
|
-
createdBy: result.created_by ?? void 0,
|
|
1843
|
-
lastHeartbeatAt: parseSqlDateToIsoString(result.last_heartbeat_at),
|
|
1844
|
-
createdAt: parseSqlDateToIsoString(result.created_at)
|
|
1845
|
-
}));
|
|
1846
|
-
return { tasks, totalTasks: count };
|
|
1847
|
-
}
|
|
1848
|
-
async getTask(taskId) {
|
|
1849
|
-
const [result] = await this.db("tasks").where({ id: taskId }).select();
|
|
1850
|
-
if (!result) {
|
|
1851
|
-
throw new errors.NotFoundError(`No task with id '${taskId}' found`);
|
|
1852
|
-
}
|
|
1853
|
-
try {
|
|
1854
|
-
const spec = JSON.parse(result.spec);
|
|
1855
|
-
const secrets = result.secrets ? JSON.parse(result.secrets) : void 0;
|
|
1856
|
-
const state = result.state ? JSON.parse(result.state).state : void 0;
|
|
1857
|
-
return {
|
|
1858
|
-
id: result.id,
|
|
1859
|
-
spec,
|
|
1860
|
-
status: result.status,
|
|
1861
|
-
lastHeartbeatAt: parseSqlDateToIsoString(result.last_heartbeat_at),
|
|
1862
|
-
createdAt: parseSqlDateToIsoString(result.created_at),
|
|
1863
|
-
createdBy: result.created_by ?? void 0,
|
|
1864
|
-
secrets,
|
|
1865
|
-
state
|
|
1866
|
-
};
|
|
1867
|
-
} catch (error) {
|
|
1868
|
-
throw new Error(`Failed to parse spec of task '${taskId}', ${error}`);
|
|
1869
|
-
}
|
|
1870
|
-
}
|
|
1871
|
-
async createTask(options) {
|
|
1872
|
-
const taskId = uuid.v4();
|
|
1873
|
-
await this.db("tasks").insert({
|
|
1874
|
-
id: taskId,
|
|
1875
|
-
spec: JSON.stringify(options.spec),
|
|
1876
|
-
secrets: options.secrets ? JSON.stringify(options.secrets) : void 0,
|
|
1877
|
-
created_by: options.createdBy ?? null,
|
|
1878
|
-
status: "open"
|
|
1879
|
-
});
|
|
1880
|
-
return { taskId };
|
|
1881
|
-
}
|
|
1882
|
-
async claimTask() {
|
|
1883
|
-
return this.db.transaction(async (tx) => {
|
|
1884
|
-
const [task] = await tx("tasks").where({
|
|
1885
|
-
status: "open"
|
|
1886
|
-
}).limit(1).select();
|
|
1887
|
-
if (!task) {
|
|
1888
|
-
return void 0;
|
|
1889
|
-
}
|
|
1890
|
-
const spec = this.parseSpec(task);
|
|
1891
|
-
const updateCount = await tx("tasks").where({ id: task.id, status: "open" }).update({
|
|
1892
|
-
status: "processing",
|
|
1893
|
-
last_heartbeat_at: this.db.fn.now(),
|
|
1894
|
-
// remove the secrets for non-recoverable tasks when moving to processing state.
|
|
1895
|
-
secrets: this.isRecoverableTask(spec) ? task.secrets : null
|
|
1896
|
-
});
|
|
1897
|
-
if (updateCount < 1) {
|
|
1898
|
-
return void 0;
|
|
1899
|
-
}
|
|
1900
|
-
const getState = () => {
|
|
1901
|
-
try {
|
|
1902
|
-
return task.state ? JSON.parse(task.state).state : void 0;
|
|
1903
|
-
} catch (error) {
|
|
1904
|
-
throw new Error(
|
|
1905
|
-
`Failed to parse state of the task '${task.id}', ${error}`
|
|
1906
|
-
);
|
|
1907
|
-
}
|
|
1908
|
-
};
|
|
1909
|
-
const secrets = this.parseTaskSecrets(task);
|
|
1910
|
-
return {
|
|
1911
|
-
id: task.id,
|
|
1912
|
-
spec,
|
|
1913
|
-
status: "processing",
|
|
1914
|
-
lastHeartbeatAt: task.last_heartbeat_at,
|
|
1915
|
-
createdAt: task.created_at,
|
|
1916
|
-
createdBy: task.created_by ?? void 0,
|
|
1917
|
-
secrets,
|
|
1918
|
-
state: getState()
|
|
1919
|
-
};
|
|
1920
|
-
});
|
|
1921
|
-
}
|
|
1922
|
-
async heartbeatTask(taskId) {
|
|
1923
|
-
const updateCount = await this.db("tasks").where({ id: taskId, status: "processing" }).update({
|
|
1924
|
-
last_heartbeat_at: this.db.fn.now()
|
|
1925
|
-
});
|
|
1926
|
-
if (updateCount === 0) {
|
|
1927
|
-
throw new errors.ConflictError(`No running task with taskId ${taskId} found`);
|
|
1928
|
-
}
|
|
1929
|
-
}
|
|
1930
|
-
async listStaleTasks(options) {
|
|
1931
|
-
const { timeoutS } = options;
|
|
1932
|
-
const heartbeatInterval = intervalFromNowTill(timeoutS, this.db);
|
|
1933
|
-
const rawRows = await this.db("tasks").where("status", "processing").andWhere("last_heartbeat_at", "<=", heartbeatInterval);
|
|
1934
|
-
const tasks = rawRows.map((row) => ({
|
|
1935
|
-
recovery: JSON.parse(row.spec).EXPERIMENTAL_recovery,
|
|
1936
|
-
taskId: row.id
|
|
1937
|
-
}));
|
|
1938
|
-
return { tasks };
|
|
1939
|
-
}
|
|
1940
|
-
async completeTask(options) {
|
|
1941
|
-
const { taskId, status, eventBody } = options;
|
|
1942
|
-
let oldStatus;
|
|
1943
|
-
if (["failed", "completed", "cancelled"].includes(status)) {
|
|
1944
|
-
oldStatus = "processing";
|
|
1945
|
-
} else {
|
|
1946
|
-
throw new Error(
|
|
1947
|
-
`Invalid status update of run '${taskId}' to status '${status}'`
|
|
1948
|
-
);
|
|
1949
|
-
}
|
|
1950
|
-
await this.db.transaction(async (tx) => {
|
|
1951
|
-
const [task] = await tx("tasks").where({
|
|
1952
|
-
id: taskId
|
|
1953
|
-
}).limit(1).select();
|
|
1954
|
-
const updateTask = async (criteria) => {
|
|
1955
|
-
const updateCount = await tx("tasks").where(criteria).update({
|
|
1956
|
-
status,
|
|
1957
|
-
secrets: null
|
|
1958
|
-
});
|
|
1959
|
-
if (updateCount !== 1) {
|
|
1960
|
-
throw new errors.ConflictError(
|
|
1961
|
-
`Failed to update status to '${status}' for taskId ${taskId}`
|
|
1962
|
-
);
|
|
1963
|
-
}
|
|
1964
|
-
await tx("task_events").insert({
|
|
1965
|
-
task_id: taskId,
|
|
1966
|
-
event_type: "completion",
|
|
1967
|
-
body: JSON.stringify(eventBody)
|
|
1968
|
-
});
|
|
1969
|
-
};
|
|
1970
|
-
if (status === "cancelled") {
|
|
1971
|
-
await updateTask({
|
|
1972
|
-
id: taskId
|
|
1973
|
-
});
|
|
1974
|
-
return;
|
|
1975
|
-
}
|
|
1976
|
-
if (task.status === "cancelled") {
|
|
1977
|
-
return;
|
|
1978
|
-
}
|
|
1979
|
-
if (!task) {
|
|
1980
|
-
throw new Error(`No task with taskId ${taskId} found`);
|
|
1981
|
-
}
|
|
1982
|
-
if (task.status !== oldStatus) {
|
|
1983
|
-
throw new errors.ConflictError(
|
|
1984
|
-
`Refusing to update status of run '${taskId}' to status '${status}' as it is currently '${task.status}', expected '${oldStatus}'`
|
|
1985
|
-
);
|
|
1986
|
-
}
|
|
1987
|
-
await updateTask({
|
|
1988
|
-
id: taskId,
|
|
1989
|
-
status: oldStatus
|
|
1990
|
-
});
|
|
1991
|
-
});
|
|
1992
|
-
}
|
|
1993
|
-
async emitLogEvent(options) {
|
|
1994
|
-
const { taskId, body } = options;
|
|
1995
|
-
const serializedBody = JSON.stringify(body);
|
|
1996
|
-
await this.db("task_events").insert({
|
|
1997
|
-
task_id: taskId,
|
|
1998
|
-
event_type: "log",
|
|
1999
|
-
body: serializedBody
|
|
2000
|
-
});
|
|
2001
|
-
}
|
|
2002
|
-
async getTaskState({ taskId }) {
|
|
2003
|
-
const [result] = await this.db("tasks").where({ id: taskId }).select("state");
|
|
2004
|
-
return result.state ? JSON.parse(result.state) : void 0;
|
|
2005
|
-
}
|
|
2006
|
-
async saveTaskState(options) {
|
|
2007
|
-
if (options.state) {
|
|
2008
|
-
const serializedState = JSON.stringify({ state: options.state });
|
|
2009
|
-
await this.db("tasks").where({ id: options.taskId }).update({
|
|
2010
|
-
state: serializedState
|
|
2011
|
-
});
|
|
2012
|
-
}
|
|
2013
|
-
}
|
|
2014
|
-
async listEvents(options) {
|
|
2015
|
-
const { isTaskRecoverable, taskId, after } = options;
|
|
2016
|
-
const rawEvents = await this.db("task_events").where({
|
|
2017
|
-
task_id: taskId
|
|
2018
|
-
}).andWhere((builder) => {
|
|
2019
|
-
if (typeof after === "number") {
|
|
2020
|
-
builder.where("id", ">", after).orWhere("event_type", "completion");
|
|
2021
|
-
}
|
|
2022
|
-
}).orderBy("id").select();
|
|
2023
|
-
const events = rawEvents.map((event) => {
|
|
2024
|
-
try {
|
|
2025
|
-
const body = JSON.parse(event.body);
|
|
2026
|
-
return {
|
|
2027
|
-
id: Number(event.id),
|
|
2028
|
-
isTaskRecoverable,
|
|
2029
|
-
taskId,
|
|
2030
|
-
body,
|
|
2031
|
-
type: event.event_type,
|
|
2032
|
-
createdAt: parseSqlDateToIsoString(event.created_at)
|
|
2033
|
-
};
|
|
2034
|
-
} catch (error) {
|
|
2035
|
-
throw new Error(
|
|
2036
|
-
`Failed to parse event body from event taskId=${taskId} id=${event.id}, ${error}`
|
|
2037
|
-
);
|
|
2038
|
-
}
|
|
2039
|
-
});
|
|
2040
|
-
return trimEventsTillLastRecovery(events);
|
|
2041
|
-
}
|
|
2042
|
-
async shutdownTask(options) {
|
|
2043
|
-
const { taskId } = options;
|
|
2044
|
-
const message = `This task was marked as stale as it exceeded its timeout`;
|
|
2045
|
-
const statusStepEvents = (await this.listEvents({ taskId })).events.filter(
|
|
2046
|
-
({ body }) => body?.stepId
|
|
2047
|
-
);
|
|
2048
|
-
const completedSteps = statusStepEvents.filter(
|
|
2049
|
-
({ body: { status } }) => status === "failed" || status === "completed"
|
|
2050
|
-
).map((step) => step.body.stepId);
|
|
2051
|
-
const hungProcessingSteps = statusStepEvents.filter(({ body: { status } }) => status === "processing").map((event) => event.body.stepId).filter((step) => !completedSteps.includes(step));
|
|
2052
|
-
for (const step of hungProcessingSteps) {
|
|
2053
|
-
await this.emitLogEvent({
|
|
2054
|
-
taskId,
|
|
2055
|
-
body: {
|
|
2056
|
-
message,
|
|
2057
|
-
stepId: step,
|
|
2058
|
-
status: "failed"
|
|
2059
|
-
}
|
|
2060
|
-
});
|
|
2061
|
-
}
|
|
2062
|
-
await this.completeTask({
|
|
2063
|
-
taskId,
|
|
2064
|
-
status: "failed",
|
|
2065
|
-
eventBody: {
|
|
2066
|
-
message
|
|
2067
|
-
}
|
|
2068
|
-
});
|
|
2069
|
-
}
|
|
2070
|
-
async rehydrateWorkspace(options) {
|
|
2071
|
-
const [result] = await this.db("tasks").where({ id: options.taskId }).select("workspace");
|
|
2072
|
-
await alpha.restoreWorkspace({
|
|
2073
|
-
path: options.targetPath,
|
|
2074
|
-
buffer: result.workspace
|
|
2075
|
-
});
|
|
2076
|
-
}
|
|
2077
|
-
async cleanWorkspace({ taskId }) {
|
|
2078
|
-
await this.db("tasks").where({ id: taskId }).update({
|
|
2079
|
-
workspace: null
|
|
2080
|
-
});
|
|
2081
|
-
}
|
|
2082
|
-
async serializeWorkspace(options) {
|
|
2083
|
-
if (options.path) {
|
|
2084
|
-
const workspace = (await alpha.serializeWorkspace(options)).contents;
|
|
2085
|
-
await this.db("tasks").where({ id: options.taskId }).update({
|
|
2086
|
-
workspace
|
|
2087
|
-
});
|
|
2088
|
-
}
|
|
2089
|
-
}
|
|
2090
|
-
async cancelTask(options) {
|
|
2091
|
-
const { taskId, body } = options;
|
|
2092
|
-
const serializedBody = JSON.stringify(body);
|
|
2093
|
-
await this.db("task_events").insert({
|
|
2094
|
-
task_id: taskId,
|
|
2095
|
-
event_type: "cancelled",
|
|
2096
|
-
body: serializedBody
|
|
2097
|
-
});
|
|
2098
|
-
}
|
|
2099
|
-
async retryTask(options) {
|
|
2100
|
-
await this.db.transaction(async (tx) => {
|
|
2101
|
-
const result = await tx("tasks").where("id", options.taskId).update(
|
|
2102
|
-
{
|
|
2103
|
-
status: "open",
|
|
2104
|
-
last_heartbeat_at: this.db.fn.now()
|
|
2105
|
-
},
|
|
2106
|
-
["id", "spec"]
|
|
2107
|
-
);
|
|
2108
|
-
for (const { id, spec } of result) {
|
|
2109
|
-
const taskSpec = JSON.parse(spec);
|
|
2110
|
-
await tx("task_events").where("task_id", id).andWhere((q) => q.whereIn("event_type", ["cancelled", "completion"])).del();
|
|
2111
|
-
await tx("task_events").insert({
|
|
2112
|
-
task_id: id,
|
|
2113
|
-
event_type: "recovered",
|
|
2114
|
-
body: JSON.stringify({
|
|
2115
|
-
recoverStrategy: taskSpec.EXPERIMENTAL_recovery?.EXPERIMENTAL_strategy ?? "none"
|
|
2116
|
-
})
|
|
2117
|
-
});
|
|
2118
|
-
}
|
|
2119
|
-
});
|
|
2120
|
-
}
|
|
2121
|
-
async recoverTasks(options) {
|
|
2122
|
-
const taskIdsToRecover = [];
|
|
2123
|
-
const timeoutS = luxon.Duration.fromObject(options.timeout).as("seconds");
|
|
2124
|
-
await this.db.transaction(async (tx) => {
|
|
2125
|
-
const heartbeatInterval = intervalFromNowTill(timeoutS, this.db);
|
|
2126
|
-
const result = await tx("tasks").where("status", "processing").andWhere("last_heartbeat_at", "<=", heartbeatInterval).update(
|
|
2127
|
-
{
|
|
2128
|
-
status: "open",
|
|
2129
|
-
last_heartbeat_at: this.db.fn.now()
|
|
2130
|
-
},
|
|
2131
|
-
["id", "spec"]
|
|
2132
|
-
);
|
|
2133
|
-
taskIdsToRecover.push(...result.map((i) => i.id));
|
|
2134
|
-
for (const { id, spec } of result) {
|
|
2135
|
-
const taskSpec = JSON.parse(spec);
|
|
2136
|
-
await tx("task_events").insert({
|
|
2137
|
-
task_id: id,
|
|
2138
|
-
event_type: "recovered",
|
|
2139
|
-
body: JSON.stringify({
|
|
2140
|
-
recoverStrategy: taskSpec.EXPERIMENTAL_recovery?.EXPERIMENTAL_strategy ?? "none"
|
|
2141
|
-
})
|
|
2142
|
-
});
|
|
2143
|
-
}
|
|
2144
|
-
});
|
|
2145
|
-
return { ids: taskIdsToRecover };
|
|
2146
|
-
}
|
|
2147
|
-
}
|
|
2148
|
-
|
|
2149
|
-
function isTruthy(value) {
|
|
2150
|
-
return lodash.isArray(value) ? value.length > 0 : !!value;
|
|
2151
|
-
}
|
|
2152
|
-
function generateExampleOutput(schema) {
|
|
2153
|
-
const { examples } = schema;
|
|
2154
|
-
if (examples && Array.isArray(examples)) {
|
|
2155
|
-
return examples[0];
|
|
2156
|
-
}
|
|
2157
|
-
if (schema.type === "object") {
|
|
2158
|
-
return Object.fromEntries(
|
|
2159
|
-
Object.entries(schema.properties ?? {}).map(([key, value]) => [
|
|
2160
|
-
key,
|
|
2161
|
-
generateExampleOutput(value)
|
|
2162
|
-
])
|
|
2163
|
-
);
|
|
2164
|
-
} else if (schema.type === "array") {
|
|
2165
|
-
const [firstSchema] = [schema.items]?.flat();
|
|
2166
|
-
if (firstSchema) {
|
|
2167
|
-
return [generateExampleOutput(firstSchema)];
|
|
2168
|
-
}
|
|
2169
|
-
return [];
|
|
2170
|
-
} else if (schema.type === "string") {
|
|
2171
|
-
return "<example>";
|
|
2172
|
-
} else if (schema.type === "number") {
|
|
2173
|
-
return 0;
|
|
2174
|
-
} else if (schema.type === "boolean") {
|
|
2175
|
-
return false;
|
|
2176
|
-
}
|
|
2177
|
-
return "<unknown>";
|
|
2178
|
-
}
|
|
2179
|
-
const readDuration$1 = (config$1, key, defaultValue) => {
|
|
2180
|
-
if (config$1?.has(key)) {
|
|
2181
|
-
return config.readDurationFromConfig(config$1, { key });
|
|
2182
|
-
}
|
|
2183
|
-
return defaultValue;
|
|
2184
|
-
};
|
|
2185
|
-
|
|
2186
|
-
class DatabaseWorkspaceProvider {
|
|
2187
|
-
constructor(storage) {
|
|
2188
|
-
this.storage = storage;
|
|
2189
|
-
}
|
|
2190
|
-
static create(storage) {
|
|
2191
|
-
return new DatabaseWorkspaceProvider(storage);
|
|
2192
|
-
}
|
|
2193
|
-
async serializeWorkspace(options) {
|
|
2194
|
-
await this.storage.serializeWorkspace?.(options);
|
|
2195
|
-
}
|
|
2196
|
-
async rehydrateWorkspace(options) {
|
|
2197
|
-
return this.storage.rehydrateWorkspace?.(options);
|
|
2198
|
-
}
|
|
2199
|
-
async cleanWorkspace(options) {
|
|
2200
|
-
return this.storage.cleanWorkspace?.(options);
|
|
2201
|
-
}
|
|
2202
|
-
}
|
|
2203
|
-
|
|
2204
|
-
class DefaultWorkspaceService {
|
|
2205
|
-
constructor(task, workspaceProvider, config) {
|
|
2206
|
-
this.task = task;
|
|
2207
|
-
this.workspaceProvider = workspaceProvider;
|
|
2208
|
-
this.config = config;
|
|
2209
|
-
}
|
|
2210
|
-
static create(task, storage, additionalWorkspaceProviders, config) {
|
|
2211
|
-
const workspaceProviderName = config?.getOptionalString(
|
|
2212
|
-
"scaffolder.EXPERIMENTAL_workspaceSerializationProvider"
|
|
2213
|
-
) ?? "database";
|
|
2214
|
-
const workspaceProvider = additionalWorkspaceProviders?.[workspaceProviderName] ?? DatabaseWorkspaceProvider.create(storage);
|
|
2215
|
-
return new DefaultWorkspaceService(task, workspaceProvider, config);
|
|
2216
|
-
}
|
|
2217
|
-
async serializeWorkspace(options) {
|
|
2218
|
-
if (this.isWorkspaceSerializationEnabled()) {
|
|
2219
|
-
await this.workspaceProvider.serializeWorkspace({
|
|
2220
|
-
path: options.path,
|
|
2221
|
-
taskId: this.task.taskId
|
|
2222
|
-
});
|
|
2223
|
-
}
|
|
2224
|
-
}
|
|
2225
|
-
async cleanWorkspace() {
|
|
2226
|
-
if (this.isWorkspaceSerializationEnabled()) {
|
|
2227
|
-
await this.workspaceProvider.cleanWorkspace({ taskId: this.task.taskId });
|
|
2228
|
-
}
|
|
2229
|
-
}
|
|
2230
|
-
async rehydrateWorkspace(options) {
|
|
2231
|
-
if (this.isWorkspaceSerializationEnabled()) {
|
|
2232
|
-
await fs__default.default.mkdirp(options.targetPath);
|
|
2233
|
-
await this.workspaceProvider.rehydrateWorkspace(options);
|
|
2234
|
-
}
|
|
2235
|
-
}
|
|
2236
|
-
isWorkspaceSerializationEnabled() {
|
|
2237
|
-
return this.config?.getOptionalBoolean(
|
|
2238
|
-
"scaffolder.EXPERIMENTAL_workspaceSerialization"
|
|
2239
|
-
) ?? false;
|
|
2240
|
-
}
|
|
2241
|
-
}
|
|
2242
|
-
|
|
2243
|
-
class TaskManager {
|
|
2244
|
-
// Runs heartbeat internally
|
|
2245
|
-
constructor(task, storage, signal, logger, workspaceService, auth) {
|
|
2246
|
-
this.task = task;
|
|
2247
|
-
this.storage = storage;
|
|
2248
|
-
this.signal = signal;
|
|
2249
|
-
this.logger = logger;
|
|
2250
|
-
this.workspaceService = workspaceService;
|
|
2251
|
-
this.auth = auth;
|
|
2252
|
-
}
|
|
2253
|
-
isDone = false;
|
|
2254
|
-
heartbeatTimeoutId;
|
|
2255
|
-
static create(task, storage, abortSignal, logger, auth, config, additionalWorkspaceProviders) {
|
|
2256
|
-
const workspaceService = DefaultWorkspaceService.create(
|
|
2257
|
-
task,
|
|
2258
|
-
storage,
|
|
2259
|
-
additionalWorkspaceProviders,
|
|
2260
|
-
config
|
|
2261
|
-
);
|
|
2262
|
-
const agent = new TaskManager(
|
|
2263
|
-
task,
|
|
2264
|
-
storage,
|
|
2265
|
-
abortSignal,
|
|
2266
|
-
logger,
|
|
2267
|
-
workspaceService,
|
|
2268
|
-
auth
|
|
2269
|
-
);
|
|
2270
|
-
agent.startTimeout();
|
|
2271
|
-
return agent;
|
|
2272
|
-
}
|
|
2273
|
-
get spec() {
|
|
2274
|
-
return this.task.spec;
|
|
2275
|
-
}
|
|
2276
|
-
get cancelSignal() {
|
|
2277
|
-
return this.signal;
|
|
2278
|
-
}
|
|
2279
|
-
get secrets() {
|
|
2280
|
-
return this.task.secrets;
|
|
2281
|
-
}
|
|
2282
|
-
get createdBy() {
|
|
2283
|
-
return this.task.createdBy;
|
|
2284
|
-
}
|
|
2285
|
-
async getWorkspaceName() {
|
|
2286
|
-
return this.task.taskId;
|
|
2287
|
-
}
|
|
2288
|
-
async rehydrateWorkspace(options) {
|
|
2289
|
-
await this.workspaceService.rehydrateWorkspace(options);
|
|
2290
|
-
}
|
|
2291
|
-
get done() {
|
|
2292
|
-
return this.isDone;
|
|
2293
|
-
}
|
|
2294
|
-
async emitLog(message, logMetadata) {
|
|
2295
|
-
await this.storage.emitLogEvent({
|
|
2296
|
-
taskId: this.task.taskId,
|
|
2297
|
-
body: { message, ...logMetadata }
|
|
2298
|
-
});
|
|
2299
|
-
}
|
|
2300
|
-
async getTaskState() {
|
|
2301
|
-
return this.storage.getTaskState?.({ taskId: this.task.taskId });
|
|
2302
|
-
}
|
|
2303
|
-
async updateCheckpoint(options) {
|
|
2304
|
-
const { key, ...value } = options;
|
|
2305
|
-
if (this.task.state) {
|
|
2306
|
-
this.task.state.checkpoints[key] = value;
|
|
2307
|
-
} else {
|
|
2308
|
-
this.task.state = { checkpoints: { [key]: value } };
|
|
2309
|
-
}
|
|
2310
|
-
await this.storage.saveTaskState?.({
|
|
2311
|
-
taskId: this.task.taskId,
|
|
2312
|
-
state: this.task.state
|
|
2313
|
-
});
|
|
2314
|
-
}
|
|
2315
|
-
async serializeWorkspace(options) {
|
|
2316
|
-
await this.workspaceService.serializeWorkspace(options);
|
|
2317
|
-
}
|
|
2318
|
-
async cleanWorkspace() {
|
|
2319
|
-
await this.workspaceService.cleanWorkspace();
|
|
2320
|
-
}
|
|
2321
|
-
async complete(result, metadata) {
|
|
2322
|
-
await this.storage.completeTask({
|
|
2323
|
-
taskId: this.task.taskId,
|
|
2324
|
-
status: result === "failed" ? "failed" : "completed",
|
|
2325
|
-
eventBody: {
|
|
2326
|
-
message: `Run completed with status: ${result}`,
|
|
2327
|
-
...metadata
|
|
2328
|
-
}
|
|
2329
|
-
});
|
|
2330
|
-
this.isDone = true;
|
|
2331
|
-
if (this.heartbeatTimeoutId) {
|
|
2332
|
-
clearTimeout(this.heartbeatTimeoutId);
|
|
2333
|
-
}
|
|
2334
|
-
}
|
|
2335
|
-
startTimeout() {
|
|
2336
|
-
this.heartbeatTimeoutId = setTimeout(async () => {
|
|
2337
|
-
try {
|
|
2338
|
-
await this.storage.heartbeatTask(this.task.taskId);
|
|
2339
|
-
this.startTimeout();
|
|
2340
|
-
} catch (error) {
|
|
2341
|
-
this.isDone = true;
|
|
2342
|
-
this.logger.error(
|
|
2343
|
-
`Heartbeat for task ${this.task.taskId} failed`,
|
|
2344
|
-
error
|
|
2345
|
-
);
|
|
2346
|
-
}
|
|
2347
|
-
}, 1e3);
|
|
2348
|
-
}
|
|
2349
|
-
async getInitiatorCredentials() {
|
|
2350
|
-
const secrets = this.task.secrets;
|
|
2351
|
-
if (secrets && secrets.__initiatorCredentials) {
|
|
2352
|
-
return JSON.parse(secrets.__initiatorCredentials);
|
|
2353
|
-
}
|
|
2354
|
-
if (!this.auth) {
|
|
2355
|
-
throw new Error(
|
|
2356
|
-
"Failed to create none credentials in scaffolder task. The TaskManager has not been initialized with an auth service implementation"
|
|
2357
|
-
);
|
|
2358
|
-
}
|
|
2359
|
-
return this.auth.getNoneCredentials();
|
|
2360
|
-
}
|
|
2361
|
-
}
|
|
2362
|
-
function defer() {
|
|
2363
|
-
let resolve = () => {
|
|
2364
|
-
};
|
|
2365
|
-
const promise = new Promise((_resolve) => {
|
|
2366
|
-
resolve = _resolve;
|
|
2367
|
-
});
|
|
2368
|
-
return { promise, resolve };
|
|
2369
|
-
}
|
|
2370
|
-
class StorageTaskBroker {
|
|
2371
|
-
constructor(storage, logger, config, auth, additionalWorkspaceProviders) {
|
|
2372
|
-
this.storage = storage;
|
|
2373
|
-
this.logger = logger;
|
|
2374
|
-
this.config = config;
|
|
2375
|
-
this.auth = auth;
|
|
2376
|
-
this.additionalWorkspaceProviders = additionalWorkspaceProviders;
|
|
2377
|
-
}
|
|
2378
|
-
async list(options) {
|
|
2379
|
-
if (!this.storage.list) {
|
|
2380
|
-
throw new Error(
|
|
2381
|
-
"TaskStore does not implement the list method. Please implement the list method to be able to list tasks"
|
|
2382
|
-
);
|
|
2383
|
-
}
|
|
2384
|
-
return await this.storage.list(options ?? {});
|
|
2385
|
-
}
|
|
2386
|
-
deferredDispatch = defer();
|
|
2387
|
-
async registerCancellable(taskId, abortController) {
|
|
2388
|
-
let shouldUnsubscribe = false;
|
|
2389
|
-
const subscription = this.event$({ taskId, after: void 0 }).subscribe({
|
|
2390
|
-
error: (_) => {
|
|
2391
|
-
subscription.unsubscribe();
|
|
2392
|
-
},
|
|
2393
|
-
next: ({ events }) => {
|
|
2394
|
-
for (const event of events) {
|
|
2395
|
-
if (event.type === "cancelled") {
|
|
2396
|
-
abortController.abort();
|
|
2397
|
-
shouldUnsubscribe = true;
|
|
2398
|
-
}
|
|
2399
|
-
if (event.type === "completion" && !event.isTaskRecoverable) {
|
|
2400
|
-
shouldUnsubscribe = true;
|
|
2401
|
-
}
|
|
2402
|
-
}
|
|
2403
|
-
if (shouldUnsubscribe) {
|
|
2404
|
-
subscription.unsubscribe();
|
|
2405
|
-
}
|
|
2406
|
-
}
|
|
2407
|
-
});
|
|
2408
|
-
}
|
|
2409
|
-
async recoverTasks() {
|
|
2410
|
-
const enabled = (this.config && this.config.getOptionalBoolean(
|
|
2411
|
-
"scaffolder.EXPERIMENTAL_recoverTasks"
|
|
2412
|
-
)) ?? false;
|
|
2413
|
-
if (enabled) {
|
|
2414
|
-
const defaultTimeout = { seconds: 30 };
|
|
2415
|
-
const timeout = readDuration$1(
|
|
2416
|
-
this.config,
|
|
2417
|
-
"scaffolder.EXPERIMENTAL_recoverTasksTimeout",
|
|
2418
|
-
defaultTimeout
|
|
2419
|
-
);
|
|
2420
|
-
const { ids: recoveredTaskIds } = await this.storage.recoverTasks?.({
|
|
2421
|
-
timeout
|
|
2422
|
-
}) ?? { ids: [] };
|
|
2423
|
-
if (recoveredTaskIds.length > 0) {
|
|
2424
|
-
this.signalDispatch();
|
|
2425
|
-
}
|
|
2426
|
-
}
|
|
2427
|
-
}
|
|
2428
|
-
/**
|
|
2429
|
-
* {@inheritdoc TaskBroker.claim}
|
|
2430
|
-
*/
|
|
2431
|
-
async claim() {
|
|
2432
|
-
for (; ; ) {
|
|
2433
|
-
const pendingTask = await this.storage.claimTask();
|
|
2434
|
-
if (pendingTask) {
|
|
2435
|
-
const abortController = new AbortController();
|
|
2436
|
-
await this.registerCancellable(pendingTask.id, abortController);
|
|
2437
|
-
return TaskManager.create(
|
|
2438
|
-
{
|
|
2439
|
-
taskId: pendingTask.id,
|
|
2440
|
-
spec: pendingTask.spec,
|
|
2441
|
-
secrets: pendingTask.secrets,
|
|
2442
|
-
createdBy: pendingTask.createdBy,
|
|
2443
|
-
state: pendingTask.state
|
|
2444
|
-
},
|
|
2445
|
-
this.storage,
|
|
2446
|
-
abortController.signal,
|
|
2447
|
-
this.logger,
|
|
2448
|
-
this.auth,
|
|
2449
|
-
this.config,
|
|
2450
|
-
this.additionalWorkspaceProviders
|
|
2451
|
-
);
|
|
2452
|
-
}
|
|
2453
|
-
await this.waitForDispatch();
|
|
2454
|
-
}
|
|
2455
|
-
}
|
|
2456
|
-
/**
|
|
2457
|
-
* {@inheritdoc TaskBroker.dispatch}
|
|
2458
|
-
*/
|
|
2459
|
-
async dispatch(options) {
|
|
2460
|
-
const taskRow = await this.storage.createTask(options);
|
|
2461
|
-
this.signalDispatch();
|
|
2462
|
-
return {
|
|
2463
|
-
taskId: taskRow.taskId
|
|
2464
|
-
};
|
|
2465
|
-
}
|
|
2466
|
-
/**
|
|
2467
|
-
* {@inheritdoc TaskBroker.get}
|
|
2468
|
-
*/
|
|
2469
|
-
async get(taskId) {
|
|
2470
|
-
return this.storage.getTask(taskId);
|
|
2471
|
-
}
|
|
2472
|
-
/**
|
|
2473
|
-
* {@inheritdoc TaskBroker.event$}
|
|
2474
|
-
*/
|
|
2475
|
-
event$(options) {
|
|
2476
|
-
return new ObservableImpl__default.default((observer) => {
|
|
2477
|
-
const { taskId } = options;
|
|
2478
|
-
let after = options.after;
|
|
2479
|
-
let cancelled = false;
|
|
2480
|
-
(async () => {
|
|
2481
|
-
const task = await this.storage.getTask(taskId);
|
|
2482
|
-
const isTaskRecoverable = task.spec.EXPERIMENTAL_recovery?.EXPERIMENTAL_strategy === "startOver";
|
|
2483
|
-
while (!cancelled) {
|
|
2484
|
-
const result = await this.storage.listEvents({
|
|
2485
|
-
isTaskRecoverable,
|
|
2486
|
-
taskId,
|
|
2487
|
-
after
|
|
2488
|
-
});
|
|
2489
|
-
const { events } = result;
|
|
2490
|
-
if (events.length) {
|
|
2491
|
-
after = events[events.length - 1].id;
|
|
2492
|
-
observer.next(result);
|
|
2493
|
-
}
|
|
2494
|
-
await new Promise((resolve) => setTimeout(resolve, 1e3));
|
|
2495
|
-
}
|
|
2496
|
-
})();
|
|
2497
|
-
return () => {
|
|
2498
|
-
cancelled = true;
|
|
2499
|
-
};
|
|
2500
|
-
});
|
|
2501
|
-
}
|
|
2502
|
-
/**
|
|
2503
|
-
* {@inheritdoc TaskBroker.vacuumTasks}
|
|
2504
|
-
*/
|
|
2505
|
-
async vacuumTasks(options) {
|
|
2506
|
-
const { tasks } = await this.storage.listStaleTasks(options);
|
|
2507
|
-
await Promise.all(
|
|
2508
|
-
tasks.map(async (task) => {
|
|
2509
|
-
try {
|
|
2510
|
-
await this.storage.completeTask({
|
|
2511
|
-
taskId: task.taskId,
|
|
2512
|
-
status: "failed",
|
|
2513
|
-
eventBody: {
|
|
2514
|
-
message: "The task was cancelled because the task worker lost connection to the task broker"
|
|
2515
|
-
}
|
|
2516
|
-
});
|
|
2517
|
-
} catch (error) {
|
|
2518
|
-
this.logger.warn(`Failed to cancel task '${task.taskId}', ${error}`);
|
|
2519
|
-
}
|
|
2520
|
-
})
|
|
2521
|
-
);
|
|
2522
|
-
}
|
|
2523
|
-
waitForDispatch() {
|
|
2524
|
-
return this.deferredDispatch.promise;
|
|
2525
|
-
}
|
|
2526
|
-
signalDispatch() {
|
|
2527
|
-
this.deferredDispatch.resolve();
|
|
2528
|
-
this.deferredDispatch = defer();
|
|
2529
|
-
}
|
|
2530
|
-
async cancel(taskId) {
|
|
2531
|
-
const { events } = await this.storage.listEvents({ taskId });
|
|
2532
|
-
const currentStepId = events.length > 0 ? events.filter(({ body }) => body?.stepId).reduce((prev, curr) => prev.id > curr.id ? prev : curr).body.stepId : 0;
|
|
2533
|
-
await this.storage.cancelTask?.({
|
|
2534
|
-
taskId,
|
|
2535
|
-
body: {
|
|
2536
|
-
message: `Step ${currentStepId} has been cancelled.`,
|
|
2537
|
-
stepId: currentStepId,
|
|
2538
|
-
status: "cancelled"
|
|
2539
|
-
}
|
|
2540
|
-
});
|
|
2541
|
-
}
|
|
2542
|
-
async retry(taskId) {
|
|
2543
|
-
await this.storage.retryTask?.({ taskId });
|
|
2544
|
-
this.signalDispatch();
|
|
2545
|
-
}
|
|
2546
|
-
}
|
|
2547
|
-
|
|
2548
|
-
function createCounterMetric(config) {
|
|
2549
|
-
let metric = promClient.register.getSingleMetric(config.name);
|
|
2550
|
-
if (!metric) {
|
|
2551
|
-
metric = new promClient.Counter(config);
|
|
2552
|
-
promClient.register.registerMetric(metric);
|
|
2553
|
-
}
|
|
2554
|
-
return metric;
|
|
2555
|
-
}
|
|
2556
|
-
function createHistogramMetric(config) {
|
|
2557
|
-
let metric = promClient.register.getSingleMetric(config.name);
|
|
2558
|
-
if (!metric) {
|
|
2559
|
-
metric = new promClient.Histogram(config);
|
|
2560
|
-
promClient.register.registerMetric(metric);
|
|
2561
|
-
}
|
|
2562
|
-
return metric;
|
|
2563
|
-
}
|
|
2564
|
-
|
|
2565
|
-
const createTemplatePermissionRule = pluginPermissionNode.makeCreatePermissionRule();
|
|
2566
|
-
const hasTag = createTemplatePermissionRule({
|
|
2567
|
-
name: "HAS_TAG",
|
|
2568
|
-
resourceType: alpha$1.RESOURCE_TYPE_SCAFFOLDER_TEMPLATE,
|
|
2569
|
-
description: `Match parameters or steps with the given tag`,
|
|
2570
|
-
paramsSchema: zod.z.object({
|
|
2571
|
-
tag: zod.z.string().describe("Name of the tag to match on")
|
|
2572
|
-
}),
|
|
2573
|
-
apply: (resource, { tag }) => {
|
|
2574
|
-
return resource["backstage:permissions"]?.tags?.includes(tag) ?? false;
|
|
2575
|
-
},
|
|
2576
|
-
toQuery: () => ({})
|
|
2577
|
-
});
|
|
2578
|
-
const createActionPermissionRule = pluginPermissionNode.makeCreatePermissionRule();
|
|
2579
|
-
const hasActionId = createActionPermissionRule({
|
|
2580
|
-
name: "HAS_ACTION_ID",
|
|
2581
|
-
resourceType: alpha$1.RESOURCE_TYPE_SCAFFOLDER_ACTION,
|
|
2582
|
-
description: `Match actions with the given actionId`,
|
|
2583
|
-
paramsSchema: zod.z.object({
|
|
2584
|
-
actionId: zod.z.string().describe("Name of the actionId to match on")
|
|
2585
|
-
}),
|
|
2586
|
-
apply: (resource, { actionId }) => {
|
|
2587
|
-
return resource.action === actionId;
|
|
2588
|
-
},
|
|
2589
|
-
toQuery: () => ({})
|
|
2590
|
-
});
|
|
2591
|
-
buildHasProperty({
|
|
2592
|
-
name: "HAS_PROPERTY",
|
|
2593
|
-
valueSchema: zod.z.union([zod.z.string(), zod.z.number(), zod.z.boolean(), zod.z.null()]),
|
|
2594
|
-
validateProperty: false
|
|
2595
|
-
});
|
|
2596
|
-
const hasBooleanProperty = buildHasProperty({
|
|
2597
|
-
name: "HAS_BOOLEAN_PROPERTY",
|
|
2598
|
-
valueSchema: zod.z.boolean()
|
|
2599
|
-
});
|
|
2600
|
-
const hasNumberProperty = buildHasProperty({
|
|
2601
|
-
name: "HAS_NUMBER_PROPERTY",
|
|
2602
|
-
valueSchema: zod.z.number()
|
|
2603
|
-
});
|
|
2604
|
-
const hasStringProperty = buildHasProperty({
|
|
2605
|
-
name: "HAS_STRING_PROPERTY",
|
|
2606
|
-
valueSchema: zod.z.string()
|
|
2607
|
-
});
|
|
2608
|
-
function buildHasProperty({
|
|
2609
|
-
name,
|
|
2610
|
-
valueSchema,
|
|
2611
|
-
validateProperty = true
|
|
2612
|
-
}) {
|
|
2613
|
-
return createActionPermissionRule({
|
|
2614
|
-
name,
|
|
2615
|
-
description: `Allow actions with the specified property`,
|
|
2616
|
-
resourceType: alpha$1.RESOURCE_TYPE_SCAFFOLDER_ACTION,
|
|
2617
|
-
paramsSchema: zod.z.object({
|
|
2618
|
-
key: zod.z.string().describe(`Property within the action parameters to match on`),
|
|
2619
|
-
value: valueSchema.optional().describe(`Value of the given property to match on`)
|
|
2620
|
-
}),
|
|
2621
|
-
apply: (resource, { key, value }) => {
|
|
2622
|
-
const foundValue = lodash.get(resource.input, key);
|
|
2623
|
-
if (validateProperty && !valueSchema.safeParse(foundValue).success) {
|
|
2624
|
-
return false;
|
|
2625
|
-
}
|
|
2626
|
-
if (value !== void 0) {
|
|
2627
|
-
if (valueSchema.safeParse(value).success) {
|
|
2628
|
-
return value === foundValue;
|
|
2629
|
-
}
|
|
2630
|
-
return false;
|
|
2631
|
-
}
|
|
2632
|
-
return foundValue !== void 0;
|
|
2633
|
-
},
|
|
2634
|
-
toQuery: () => ({})
|
|
2635
|
-
});
|
|
2636
|
-
}
|
|
2637
|
-
const scaffolderTemplateRules = { hasTag };
|
|
2638
|
-
const scaffolderActionRules = {
|
|
2639
|
-
hasActionId,
|
|
2640
|
-
hasBooleanProperty,
|
|
2641
|
-
hasNumberProperty,
|
|
2642
|
-
hasStringProperty
|
|
2643
|
-
};
|
|
2644
|
-
|
|
2645
|
-
const escapeRegExp = (text) => {
|
|
2646
|
-
return text.replace(/[.*+?^${}(\)|[\]\\]/g, "\\$&");
|
|
2647
|
-
};
|
|
2648
|
-
class BackstageLoggerTransport extends Transport__default.default {
|
|
2649
|
-
constructor(backstageLogger, taskContext, stepId, opts) {
|
|
2650
|
-
super(opts);
|
|
2651
|
-
this.backstageLogger = backstageLogger;
|
|
2652
|
-
this.taskContext = taskContext;
|
|
2653
|
-
this.stepId = stepId;
|
|
2654
|
-
}
|
|
2655
|
-
log(info, callback) {
|
|
2656
|
-
if (typeof info !== "object" || info === null) {
|
|
2657
|
-
callback();
|
|
2658
|
-
return;
|
|
2659
|
-
}
|
|
2660
|
-
const message = info[tripleBeam.MESSAGE];
|
|
2661
|
-
const level = info[tripleBeam.LEVEL];
|
|
2662
|
-
const splat = info[tripleBeam.SPLAT];
|
|
2663
|
-
switch (level) {
|
|
2664
|
-
case "error":
|
|
2665
|
-
this.backstageLogger.error(String(message), ...splat);
|
|
2666
|
-
break;
|
|
2667
|
-
case "warn":
|
|
2668
|
-
this.backstageLogger.warn(String(message), ...splat);
|
|
2669
|
-
break;
|
|
2670
|
-
case "info":
|
|
2671
|
-
this.backstageLogger.info(String(message), ...splat);
|
|
2672
|
-
break;
|
|
2673
|
-
case "debug":
|
|
2674
|
-
this.backstageLogger.debug(String(message), ...splat);
|
|
2675
|
-
break;
|
|
2676
|
-
default:
|
|
2677
|
-
this.backstageLogger.info(String(message), ...splat);
|
|
2678
|
-
}
|
|
2679
|
-
this.taskContext.emitLog(message, { stepId: this.stepId });
|
|
2680
|
-
callback();
|
|
2681
|
-
}
|
|
2682
|
-
}
|
|
2683
|
-
class WinstonLogger {
|
|
2684
|
-
#winston;
|
|
2685
|
-
#addRedactions;
|
|
2686
|
-
/**
|
|
2687
|
-
* Creates a {@link WinstonLogger} instance.
|
|
2688
|
-
*/
|
|
2689
|
-
static create(options) {
|
|
2690
|
-
const redacter = WinstonLogger.redacter();
|
|
2691
|
-
let logger = winston.createLogger({
|
|
2692
|
-
level: options.level,
|
|
2693
|
-
format: winston.format.combine(options.format, redacter.format),
|
|
2694
|
-
transports: options.transports ?? new winston.transports.Console()
|
|
2695
|
-
});
|
|
2696
|
-
if (options.meta) {
|
|
2697
|
-
logger = logger.child(options.meta);
|
|
2698
|
-
}
|
|
2699
|
-
return new WinstonLogger(logger, redacter.add);
|
|
2700
|
-
}
|
|
2701
|
-
/**
|
|
2702
|
-
* Creates a winston log formatter for redacting secrets.
|
|
2703
|
-
*/
|
|
2704
|
-
static redacter() {
|
|
2705
|
-
const redactionSet = /* @__PURE__ */ new Set();
|
|
2706
|
-
let redactionPattern = void 0;
|
|
2707
|
-
return {
|
|
2708
|
-
format: winston.format((obj) => {
|
|
2709
|
-
if (!redactionPattern || !obj) {
|
|
2710
|
-
return obj;
|
|
2711
|
-
}
|
|
2712
|
-
obj[tripleBeam.MESSAGE] = obj[tripleBeam.MESSAGE]?.replace?.(redactionPattern, "***");
|
|
2713
|
-
return obj;
|
|
2714
|
-
})(),
|
|
2715
|
-
add(newRedactions) {
|
|
2716
|
-
let added = 0;
|
|
2717
|
-
for (const redactionToTrim of newRedactions) {
|
|
2718
|
-
const redaction = redactionToTrim.trim();
|
|
2719
|
-
if (redaction.length <= 1) {
|
|
2720
|
-
continue;
|
|
2721
|
-
}
|
|
2722
|
-
if (!redactionSet.has(redaction)) {
|
|
2723
|
-
redactionSet.add(redaction);
|
|
2724
|
-
added += 1;
|
|
2725
|
-
}
|
|
2726
|
-
}
|
|
2727
|
-
if (added > 0) {
|
|
2728
|
-
const redactions = Array.from(redactionSet).map((r) => escapeRegExp(r)).join("|");
|
|
2729
|
-
redactionPattern = new RegExp(`(${redactions})`, "g");
|
|
2730
|
-
}
|
|
2731
|
-
}
|
|
2732
|
-
};
|
|
2733
|
-
}
|
|
2734
|
-
/**
|
|
2735
|
-
* Creates a pretty printed winston log formatter.
|
|
2736
|
-
*/
|
|
2737
|
-
static colorFormat() {
|
|
2738
|
-
const colorizer = winston.format.colorize();
|
|
2739
|
-
return winston.format.combine(
|
|
2740
|
-
winston.format.timestamp(),
|
|
2741
|
-
winston.format.colorize({
|
|
2742
|
-
colors: {
|
|
2743
|
-
timestamp: "dim",
|
|
2744
|
-
prefix: "blue",
|
|
2745
|
-
field: "cyan",
|
|
2746
|
-
debug: "grey"
|
|
2747
|
-
}
|
|
2748
|
-
}),
|
|
2749
|
-
winston.format.printf((info) => {
|
|
2750
|
-
const { timestamp, plugin, service } = info;
|
|
2751
|
-
const message = info[tripleBeam.MESSAGE];
|
|
2752
|
-
const level = info[tripleBeam.LEVEL];
|
|
2753
|
-
const fields = info[tripleBeam.SPLAT];
|
|
2754
|
-
const prefix = plugin || service;
|
|
2755
|
-
const timestampColor = colorizer.colorize("timestamp", timestamp);
|
|
2756
|
-
const prefixColor = colorizer.colorize("prefix", prefix);
|
|
2757
|
-
const extraFields = Object.entries(fields).map(
|
|
2758
|
-
([key, value]) => `${colorizer.colorize("field", `${key}`)}=${value}`
|
|
2759
|
-
).join(" ");
|
|
2760
|
-
return `${timestampColor} ${prefixColor} ${level} ${message} ${extraFields}`;
|
|
2761
|
-
})
|
|
2762
|
-
);
|
|
2763
|
-
}
|
|
2764
|
-
constructor(winston, addRedactions) {
|
|
2765
|
-
this.#winston = winston;
|
|
2766
|
-
this.#addRedactions = addRedactions;
|
|
2767
|
-
}
|
|
2768
|
-
error(message, meta) {
|
|
2769
|
-
this.#winston.error(message, meta);
|
|
2770
|
-
}
|
|
2771
|
-
warn(message, meta) {
|
|
2772
|
-
this.#winston.warn(message, meta);
|
|
2773
|
-
}
|
|
2774
|
-
info(message, meta) {
|
|
2775
|
-
this.#winston.info(message, meta);
|
|
2776
|
-
}
|
|
2777
|
-
debug(message, meta) {
|
|
2778
|
-
this.#winston.debug(message, meta);
|
|
2779
|
-
}
|
|
2780
|
-
child(meta) {
|
|
2781
|
-
return new WinstonLogger(this.#winston.child(meta));
|
|
2782
|
-
}
|
|
2783
|
-
addRedactions(redactions) {
|
|
2784
|
-
this.#addRedactions?.(redactions);
|
|
2785
|
-
}
|
|
2786
|
-
}
|
|
2787
|
-
|
|
2788
|
-
const isValidTaskSpec = (taskSpec) => {
|
|
2789
|
-
return taskSpec.apiVersion === "scaffolder.backstage.io/v1beta3";
|
|
2790
|
-
};
|
|
2791
|
-
const createStepLogger = ({
|
|
2792
|
-
task,
|
|
2793
|
-
step,
|
|
2794
|
-
rootLogger
|
|
2795
|
-
}) => {
|
|
2796
|
-
const taskLogger = WinstonLogger.create({
|
|
2797
|
-
level: process.env.LOG_LEVEL || "info",
|
|
2798
|
-
format: winston__namespace.format.combine(
|
|
2799
|
-
winston__namespace.format.colorize(),
|
|
2800
|
-
winston__namespace.format.simple()
|
|
2801
|
-
),
|
|
2802
|
-
transports: [new BackstageLoggerTransport(rootLogger, task, step.id)]
|
|
2803
|
-
});
|
|
2804
|
-
taskLogger.addRedactions(Object.values(task.secrets ?? {}));
|
|
2805
|
-
const streamLogger = new stream.PassThrough();
|
|
2806
|
-
streamLogger.on("data", async (data) => {
|
|
2807
|
-
const message = data.toString().trim();
|
|
2808
|
-
if (message?.length > 1) {
|
|
2809
|
-
taskLogger.info(message);
|
|
2810
|
-
}
|
|
2811
|
-
});
|
|
2812
|
-
return { taskLogger, streamLogger };
|
|
2813
|
-
};
|
|
2814
|
-
const isActionAuthorized = pluginPermissionNode.createConditionAuthorizer(
|
|
2815
|
-
Object.values(scaffolderActionRules)
|
|
2816
|
-
);
|
|
2817
|
-
class NunjucksWorkflowRunner {
|
|
2818
|
-
constructor(options) {
|
|
2819
|
-
this.options = options;
|
|
2820
|
-
this.defaultTemplateFilters = createDefaultFilters({
|
|
2821
|
-
integrations: this.options.integrations
|
|
2822
|
-
});
|
|
2823
|
-
}
|
|
2824
|
-
defaultTemplateFilters;
|
|
2825
|
-
tracker = scaffoldingTracker();
|
|
2826
|
-
isSingleTemplateString(input) {
|
|
2827
|
-
const { parser, nodes } = nunjucks__default.default;
|
|
2828
|
-
const parsed = parser.parse(
|
|
2829
|
-
input,
|
|
2830
|
-
{},
|
|
2831
|
-
{
|
|
2832
|
-
autoescape: false,
|
|
2833
|
-
tags: {
|
|
2834
|
-
variableStart: "${{",
|
|
2835
|
-
variableEnd: "}}"
|
|
2836
|
-
}
|
|
2837
|
-
}
|
|
2838
|
-
);
|
|
2839
|
-
return parsed.children.length === 1 && !(parsed.children[0]?.children?.[0] instanceof nodes.TemplateData);
|
|
2840
|
-
}
|
|
2841
|
-
render(input, context, renderTemplate) {
|
|
2842
|
-
return JSON.parse(JSON.stringify(input), (_key, value) => {
|
|
2843
|
-
try {
|
|
2844
|
-
if (typeof value === "string") {
|
|
2845
|
-
try {
|
|
2846
|
-
if (this.isSingleTemplateString(value)) {
|
|
2847
|
-
const wrappedDumped = value.replace(
|
|
2848
|
-
/\${{(.+)}}/g,
|
|
2849
|
-
"${{ ( $1 ) | dump }}"
|
|
2850
|
-
);
|
|
2851
|
-
const templated2 = renderTemplate(wrappedDumped, context);
|
|
2852
|
-
if (templated2 === "") {
|
|
2853
|
-
return void 0;
|
|
2854
|
-
}
|
|
2855
|
-
return JSON.parse(templated2);
|
|
2856
|
-
}
|
|
2857
|
-
} catch (ex) {
|
|
2858
|
-
this.options.logger.error(
|
|
2859
|
-
`Failed to parse template string: ${value} with error ${ex.message}`
|
|
2860
|
-
);
|
|
2861
|
-
}
|
|
2862
|
-
const templated = renderTemplate(value, context);
|
|
2863
|
-
if (templated === "") {
|
|
2864
|
-
return void 0;
|
|
2865
|
-
}
|
|
2866
|
-
return templated;
|
|
2867
|
-
}
|
|
2868
|
-
} catch {
|
|
2869
|
-
return value;
|
|
2870
|
-
}
|
|
2871
|
-
return value;
|
|
2872
|
-
});
|
|
2873
|
-
}
|
|
2874
|
-
async executeStep(task, step, context, renderTemplate, taskTrack, workspacePath, decision) {
|
|
2875
|
-
const stepTrack = await this.tracker.stepStart(task, step);
|
|
2876
|
-
if (task.cancelSignal.aborted) {
|
|
2877
|
-
throw new Error(`Step ${step.name} has been cancelled.`);
|
|
2878
|
-
}
|
|
2879
|
-
try {
|
|
2880
|
-
if (step.if) {
|
|
2881
|
-
const ifResult = this.render(step.if, context, renderTemplate);
|
|
2882
|
-
if (!isTruthy(ifResult)) {
|
|
2883
|
-
await stepTrack.skipFalsy();
|
|
2884
|
-
return;
|
|
2885
|
-
}
|
|
2886
|
-
}
|
|
2887
|
-
const action = this.options.actionRegistry.get(step.action);
|
|
2888
|
-
const { taskLogger, streamLogger } = createStepLogger({
|
|
2889
|
-
task,
|
|
2890
|
-
step,
|
|
2891
|
-
rootLogger: this.options.logger
|
|
2892
|
-
});
|
|
2893
|
-
if (task.isDryRun) {
|
|
2894
|
-
const redactedSecrets = Object.fromEntries(
|
|
2895
|
-
Object.entries(task.secrets ?? {}).map((secret) => [secret[0], "***"])
|
|
2896
|
-
);
|
|
2897
|
-
const debugInput = (step.input && this.render(
|
|
2898
|
-
step.input,
|
|
2899
|
-
{
|
|
2900
|
-
...context,
|
|
2901
|
-
secrets: redactedSecrets
|
|
2902
|
-
},
|
|
2903
|
-
renderTemplate
|
|
2904
|
-
)) ?? {};
|
|
2905
|
-
taskLogger.info(
|
|
2906
|
-
`Running ${action.id} in dry-run mode with inputs (secrets redacted): ${JSON.stringify(
|
|
2907
|
-
debugInput,
|
|
2908
|
-
void 0,
|
|
2909
|
-
2
|
|
2910
|
-
)}`
|
|
2911
|
-
);
|
|
2912
|
-
if (!action.supportsDryRun) {
|
|
2913
|
-
await taskTrack.skipDryRun(step, action);
|
|
2914
|
-
const outputSchema = action.schema?.output;
|
|
2915
|
-
if (outputSchema) {
|
|
2916
|
-
context.steps[step.id] = {
|
|
2917
|
-
output: generateExampleOutput(outputSchema)
|
|
2918
|
-
};
|
|
2919
|
-
} else {
|
|
2920
|
-
context.steps[step.id] = { output: {} };
|
|
2921
|
-
}
|
|
2922
|
-
return;
|
|
2923
|
-
}
|
|
2924
|
-
}
|
|
2925
|
-
const iterations = (step.each ? Object.entries(this.render(step.each, context, renderTemplate)).map(
|
|
2926
|
-
([key, value]) => ({
|
|
2927
|
-
each: { key, value }
|
|
2928
|
-
})
|
|
2929
|
-
) : [{}]).map((i) => ({
|
|
2930
|
-
...i,
|
|
2931
|
-
// Secrets are only passed when templating the input to actions for security reasons
|
|
2932
|
-
input: step.input ? this.render(
|
|
2933
|
-
step.input,
|
|
2934
|
-
{ ...context, secrets: task.secrets ?? {}, ...i },
|
|
2935
|
-
renderTemplate
|
|
2936
|
-
) : {}
|
|
2937
|
-
}));
|
|
2938
|
-
for (const iteration of iterations) {
|
|
2939
|
-
const actionId = `${action.id}${iteration.each ? `[${iteration.each.key}]` : ""}`;
|
|
2940
|
-
if (action.schema?.input) {
|
|
2941
|
-
const validateResult = jsonschema.validate(
|
|
2942
|
-
iteration.input,
|
|
2943
|
-
action.schema.input
|
|
2944
|
-
);
|
|
2945
|
-
if (!validateResult.valid) {
|
|
2946
|
-
const errors$1 = validateResult.errors.join(", ");
|
|
2947
|
-
throw new errors.InputError(
|
|
2948
|
-
`Invalid input passed to action ${actionId}, ${errors$1}`
|
|
2949
|
-
);
|
|
2950
|
-
}
|
|
2951
|
-
}
|
|
2952
|
-
if (!isActionAuthorized(decision, {
|
|
2953
|
-
action: action.id,
|
|
2954
|
-
input: iteration.input
|
|
2955
|
-
})) {
|
|
2956
|
-
throw new errors.NotAllowedError(
|
|
2957
|
-
`Unauthorized action: ${actionId}. The action is not allowed. Input: ${JSON.stringify(
|
|
2958
|
-
iteration.input,
|
|
2959
|
-
null,
|
|
2960
|
-
2
|
|
2961
|
-
)}`
|
|
2962
|
-
);
|
|
2963
|
-
}
|
|
2964
|
-
}
|
|
2965
|
-
const tmpDirs = new Array();
|
|
2966
|
-
const stepOutput = {};
|
|
2967
|
-
const prevTaskState = await task.getTaskState?.();
|
|
2968
|
-
for (const iteration of iterations) {
|
|
2969
|
-
if (iteration.each) {
|
|
2970
|
-
taskLogger.info(
|
|
2971
|
-
`Running step each: ${JSON.stringify(
|
|
2972
|
-
iteration.each,
|
|
2973
|
-
(k, v) => k ? v.toString() : v,
|
|
2974
|
-
0
|
|
2975
|
-
)}`
|
|
2976
|
-
);
|
|
2977
|
-
}
|
|
2978
|
-
await action.handler({
|
|
2979
|
-
input: iteration.input,
|
|
2980
|
-
secrets: task.secrets ?? {},
|
|
2981
|
-
// TODO(blam): move to LoggerService and away from Winston
|
|
2982
|
-
logger: backendCommon.loggerToWinstonLogger(taskLogger),
|
|
2983
|
-
logStream: streamLogger,
|
|
2984
|
-
workspacePath,
|
|
2985
|
-
async checkpoint(keySuffix, fn) {
|
|
2986
|
-
const key = `v1.task.checkpoint.${step.id}.${keySuffix}`;
|
|
2987
|
-
try {
|
|
2988
|
-
let prevValue;
|
|
2989
|
-
if (prevTaskState) {
|
|
2990
|
-
const prevState = prevTaskState.state?.checkpoints?.[key];
|
|
2991
|
-
if (prevState && prevState.status === "success") {
|
|
2992
|
-
prevValue = prevState.value;
|
|
2993
|
-
}
|
|
2994
|
-
}
|
|
2995
|
-
const value = prevValue ? prevValue : await fn();
|
|
2996
|
-
if (!prevValue) {
|
|
2997
|
-
task.updateCheckpoint?.({
|
|
2998
|
-
key,
|
|
2999
|
-
status: "success",
|
|
3000
|
-
value
|
|
3001
|
-
});
|
|
3002
|
-
}
|
|
3003
|
-
return value;
|
|
3004
|
-
} catch (err) {
|
|
3005
|
-
task.updateCheckpoint?.({
|
|
3006
|
-
key,
|
|
3007
|
-
status: "failed",
|
|
3008
|
-
reason: errors.stringifyError(err)
|
|
3009
|
-
});
|
|
3010
|
-
throw err;
|
|
3011
|
-
} finally {
|
|
3012
|
-
await task.serializeWorkspace?.({ path: workspacePath });
|
|
3013
|
-
}
|
|
3014
|
-
},
|
|
3015
|
-
createTemporaryDirectory: async () => {
|
|
3016
|
-
const tmpDir = await fs__default.default.mkdtemp(
|
|
3017
|
-
`${workspacePath}_step-${step.id}-`
|
|
3018
|
-
);
|
|
3019
|
-
tmpDirs.push(tmpDir);
|
|
3020
|
-
return tmpDir;
|
|
3021
|
-
},
|
|
3022
|
-
output(name, value) {
|
|
3023
|
-
if (step.each) {
|
|
3024
|
-
stepOutput[name] = stepOutput[name] || [];
|
|
3025
|
-
stepOutput[name].push(value);
|
|
3026
|
-
} else {
|
|
3027
|
-
stepOutput[name] = value;
|
|
3028
|
-
}
|
|
3029
|
-
},
|
|
3030
|
-
templateInfo: task.spec.templateInfo,
|
|
3031
|
-
user: task.spec.user,
|
|
3032
|
-
isDryRun: task.isDryRun,
|
|
3033
|
-
signal: task.cancelSignal,
|
|
3034
|
-
getInitiatorCredentials: () => task.getInitiatorCredentials()
|
|
3035
|
-
});
|
|
3036
|
-
}
|
|
3037
|
-
for (const tmpDir of tmpDirs) {
|
|
3038
|
-
await fs__default.default.remove(tmpDir);
|
|
3039
|
-
}
|
|
3040
|
-
context.steps[step.id] = { output: stepOutput };
|
|
3041
|
-
if (task.cancelSignal.aborted) {
|
|
3042
|
-
throw new Error(`Step ${step.name} has been cancelled.`);
|
|
3043
|
-
}
|
|
3044
|
-
await stepTrack.markSuccessful();
|
|
3045
|
-
} catch (err) {
|
|
3046
|
-
await taskTrack.markFailed(step, err);
|
|
3047
|
-
await stepTrack.markFailed();
|
|
3048
|
-
throw err;
|
|
3049
|
-
} finally {
|
|
3050
|
-
await task.serializeWorkspace?.({ path: workspacePath });
|
|
3051
|
-
}
|
|
3052
|
-
}
|
|
3053
|
-
async execute(task) {
|
|
3054
|
-
if (!isValidTaskSpec(task.spec)) {
|
|
3055
|
-
throw new errors.InputError(
|
|
3056
|
-
"Wrong template version executed with the workflow engine"
|
|
3057
|
-
);
|
|
3058
|
-
}
|
|
3059
|
-
const taskId = await task.getWorkspaceName();
|
|
3060
|
-
const workspacePath = path__default.default.join(this.options.workingDirectory, taskId);
|
|
3061
|
-
const { additionalTemplateFilters, additionalTemplateGlobals } = this.options;
|
|
3062
|
-
const renderTemplate = await SecureTemplater.loadRenderer({
|
|
3063
|
-
templateFilters: {
|
|
3064
|
-
...this.defaultTemplateFilters,
|
|
3065
|
-
...additionalTemplateFilters
|
|
3066
|
-
},
|
|
3067
|
-
templateGlobals: additionalTemplateGlobals
|
|
3068
|
-
});
|
|
3069
|
-
try {
|
|
3070
|
-
await task.rehydrateWorkspace?.({ taskId, targetPath: workspacePath });
|
|
3071
|
-
const taskTrack = await this.tracker.taskStart(task);
|
|
3072
|
-
await fs__default.default.ensureDir(workspacePath);
|
|
3073
|
-
const context = {
|
|
3074
|
-
parameters: task.spec.parameters,
|
|
3075
|
-
steps: {},
|
|
3076
|
-
user: task.spec.user
|
|
3077
|
-
};
|
|
3078
|
-
const [decision] = this.options.permissions && task.spec.steps.length ? await this.options.permissions.authorizeConditional(
|
|
3079
|
-
[{ permission: alpha$1.actionExecutePermission }],
|
|
3080
|
-
{ credentials: await task.getInitiatorCredentials() }
|
|
3081
|
-
) : [{ result: pluginPermissionCommon.AuthorizeResult.ALLOW }];
|
|
3082
|
-
for (const step of task.spec.steps) {
|
|
3083
|
-
await this.executeStep(
|
|
3084
|
-
task,
|
|
3085
|
-
step,
|
|
3086
|
-
context,
|
|
3087
|
-
renderTemplate,
|
|
3088
|
-
taskTrack,
|
|
3089
|
-
workspacePath,
|
|
3090
|
-
decision
|
|
3091
|
-
);
|
|
3092
|
-
}
|
|
3093
|
-
const output = this.render(task.spec.output, context, renderTemplate);
|
|
3094
|
-
await taskTrack.markSuccessful();
|
|
3095
|
-
await task.cleanWorkspace?.();
|
|
3096
|
-
return { output };
|
|
3097
|
-
} finally {
|
|
3098
|
-
if (workspacePath) {
|
|
3099
|
-
await fs__default.default.remove(workspacePath);
|
|
3100
|
-
}
|
|
3101
|
-
}
|
|
3102
|
-
}
|
|
3103
|
-
}
|
|
3104
|
-
function scaffoldingTracker() {
|
|
3105
|
-
const promTaskCount = createCounterMetric({
|
|
3106
|
-
name: "scaffolder_task_count",
|
|
3107
|
-
help: "Count of task runs",
|
|
3108
|
-
labelNames: ["template", "user", "result"]
|
|
3109
|
-
});
|
|
3110
|
-
const promTaskDuration = createHistogramMetric({
|
|
3111
|
-
name: "scaffolder_task_duration",
|
|
3112
|
-
help: "Duration of a task run",
|
|
3113
|
-
labelNames: ["template", "result"]
|
|
3114
|
-
});
|
|
3115
|
-
const promtStepCount = createCounterMetric({
|
|
3116
|
-
name: "scaffolder_step_count",
|
|
3117
|
-
help: "Count of step runs",
|
|
3118
|
-
labelNames: ["template", "step", "result"]
|
|
3119
|
-
});
|
|
3120
|
-
const promStepDuration = createHistogramMetric({
|
|
3121
|
-
name: "scaffolder_step_duration",
|
|
3122
|
-
help: "Duration of a step runs",
|
|
3123
|
-
labelNames: ["template", "step", "result"]
|
|
3124
|
-
});
|
|
3125
|
-
const meter = api.metrics.getMeter("default");
|
|
3126
|
-
const taskCount = meter.createCounter("scaffolder.task.count", {
|
|
3127
|
-
description: "Count of task runs"
|
|
3128
|
-
});
|
|
3129
|
-
const taskDuration = meter.createHistogram("scaffolder.task.duration", {
|
|
3130
|
-
description: "Duration of a task run",
|
|
3131
|
-
unit: "seconds"
|
|
3132
|
-
});
|
|
3133
|
-
const stepCount = meter.createCounter("scaffolder.step.count", {
|
|
3134
|
-
description: "Count of step runs"
|
|
3135
|
-
});
|
|
3136
|
-
const stepDuration = meter.createHistogram("scaffolder.step.duration", {
|
|
3137
|
-
description: "Duration of a step runs",
|
|
3138
|
-
unit: "seconds"
|
|
3139
|
-
});
|
|
3140
|
-
async function taskStart(task) {
|
|
3141
|
-
await task.emitLog(`Starting up task with ${task.spec.steps.length} steps`);
|
|
3142
|
-
const template = task.spec.templateInfo?.entityRef || "";
|
|
3143
|
-
const user = task.spec.user?.ref || "";
|
|
3144
|
-
const startTime = process.hrtime();
|
|
3145
|
-
const taskTimer = promTaskDuration.startTimer({
|
|
3146
|
-
template
|
|
3147
|
-
});
|
|
3148
|
-
function endTime() {
|
|
3149
|
-
const delta = process.hrtime(startTime);
|
|
3150
|
-
return delta[0] + delta[1] / 1e9;
|
|
3151
|
-
}
|
|
3152
|
-
async function skipDryRun(step, action) {
|
|
3153
|
-
task.emitLog(`Skipping because ${action.id} does not support dry-run`, {
|
|
3154
|
-
stepId: step.id,
|
|
3155
|
-
status: "skipped"
|
|
3156
|
-
});
|
|
3157
|
-
}
|
|
3158
|
-
async function markSuccessful() {
|
|
3159
|
-
promTaskCount.inc({
|
|
3160
|
-
template,
|
|
3161
|
-
user,
|
|
3162
|
-
result: "ok"
|
|
3163
|
-
});
|
|
3164
|
-
taskTimer({ result: "ok" });
|
|
3165
|
-
taskCount.add(1, { template, user, result: "ok" });
|
|
3166
|
-
taskDuration.record(endTime(), {
|
|
3167
|
-
result: "ok"
|
|
3168
|
-
});
|
|
3169
|
-
}
|
|
3170
|
-
async function markFailed(step, err) {
|
|
3171
|
-
await task.emitLog(String(err.stack), {
|
|
3172
|
-
stepId: step.id,
|
|
3173
|
-
status: "failed"
|
|
3174
|
-
});
|
|
3175
|
-
promTaskCount.inc({
|
|
3176
|
-
template,
|
|
3177
|
-
user,
|
|
3178
|
-
result: "failed"
|
|
3179
|
-
});
|
|
3180
|
-
taskTimer({ result: "failed" });
|
|
3181
|
-
taskCount.add(1, { template, user, result: "failed" });
|
|
3182
|
-
taskDuration.record(endTime(), {
|
|
3183
|
-
result: "failed"
|
|
3184
|
-
});
|
|
3185
|
-
}
|
|
3186
|
-
async function markCancelled(step) {
|
|
3187
|
-
await task.emitLog(`Step ${step.id} has been cancelled.`, {
|
|
3188
|
-
stepId: step.id,
|
|
3189
|
-
status: "cancelled"
|
|
3190
|
-
});
|
|
3191
|
-
promTaskCount.inc({
|
|
3192
|
-
template,
|
|
3193
|
-
user,
|
|
3194
|
-
result: "cancelled"
|
|
3195
|
-
});
|
|
3196
|
-
taskTimer({ result: "cancelled" });
|
|
3197
|
-
taskCount.add(1, { template, user, result: "cancelled" });
|
|
3198
|
-
taskDuration.record(endTime(), {
|
|
3199
|
-
result: "cancelled"
|
|
3200
|
-
});
|
|
3201
|
-
}
|
|
3202
|
-
return {
|
|
3203
|
-
skipDryRun,
|
|
3204
|
-
markCancelled,
|
|
3205
|
-
markSuccessful,
|
|
3206
|
-
markFailed
|
|
3207
|
-
};
|
|
3208
|
-
}
|
|
3209
|
-
async function stepStart(task, step) {
|
|
3210
|
-
await task.emitLog(`Beginning step ${step.name}`, {
|
|
3211
|
-
stepId: step.id,
|
|
3212
|
-
status: "processing"
|
|
3213
|
-
});
|
|
3214
|
-
const template = task.spec.templateInfo?.entityRef || "";
|
|
3215
|
-
const startTime = process.hrtime();
|
|
3216
|
-
const stepTimer = promStepDuration.startTimer({
|
|
3217
|
-
template,
|
|
3218
|
-
step: step.name
|
|
3219
|
-
});
|
|
3220
|
-
function endTime() {
|
|
3221
|
-
const delta = process.hrtime(startTime);
|
|
3222
|
-
return delta[0] + delta[1] / 1e9;
|
|
3223
|
-
}
|
|
3224
|
-
async function markSuccessful() {
|
|
3225
|
-
await task.emitLog(`Finished step ${step.name}`, {
|
|
3226
|
-
stepId: step.id,
|
|
3227
|
-
status: "completed"
|
|
3228
|
-
});
|
|
3229
|
-
promtStepCount.inc({
|
|
3230
|
-
template,
|
|
3231
|
-
step: step.name,
|
|
3232
|
-
result: "ok"
|
|
3233
|
-
});
|
|
3234
|
-
stepTimer({ result: "ok" });
|
|
3235
|
-
stepCount.add(1, { template, step: step.name, result: "ok" });
|
|
3236
|
-
stepDuration.record(endTime(), {
|
|
3237
|
-
result: "ok"
|
|
3238
|
-
});
|
|
3239
|
-
}
|
|
3240
|
-
async function markCancelled() {
|
|
3241
|
-
promtStepCount.inc({
|
|
3242
|
-
template,
|
|
3243
|
-
step: step.name,
|
|
3244
|
-
result: "cancelled"
|
|
3245
|
-
});
|
|
3246
|
-
stepTimer({ result: "cancelled" });
|
|
3247
|
-
stepCount.add(1, { template, step: step.name, result: "cancelled" });
|
|
3248
|
-
stepDuration.record(endTime(), {
|
|
3249
|
-
result: "cancelled"
|
|
3250
|
-
});
|
|
3251
|
-
}
|
|
3252
|
-
async function markFailed() {
|
|
3253
|
-
promtStepCount.inc({
|
|
3254
|
-
template,
|
|
3255
|
-
step: step.name,
|
|
3256
|
-
result: "failed"
|
|
3257
|
-
});
|
|
3258
|
-
stepTimer({ result: "failed" });
|
|
3259
|
-
stepCount.add(1, { template, step: step.name, result: "failed" });
|
|
3260
|
-
stepDuration.record(endTime(), {
|
|
3261
|
-
result: "failed"
|
|
3262
|
-
});
|
|
3263
|
-
}
|
|
3264
|
-
async function skipFalsy() {
|
|
3265
|
-
await task.emitLog(
|
|
3266
|
-
`Skipping step ${step.id} because its if condition was false`,
|
|
3267
|
-
{ stepId: step.id, status: "skipped" }
|
|
3268
|
-
);
|
|
3269
|
-
stepTimer({ result: "skipped" });
|
|
3270
|
-
stepCount.add(1, { template, step: step.name, result: "skipped" });
|
|
3271
|
-
stepDuration.record(endTime(), {
|
|
3272
|
-
result: "skipped"
|
|
3273
|
-
});
|
|
3274
|
-
}
|
|
3275
|
-
return {
|
|
3276
|
-
markCancelled,
|
|
3277
|
-
markFailed,
|
|
3278
|
-
markSuccessful,
|
|
3279
|
-
skipFalsy
|
|
3280
|
-
};
|
|
3281
|
-
}
|
|
3282
|
-
return {
|
|
3283
|
-
taskStart,
|
|
3284
|
-
stepStart
|
|
3285
|
-
};
|
|
3286
|
-
}
|
|
3287
|
-
|
|
3288
|
-
class TaskWorker {
|
|
3289
|
-
constructor(options) {
|
|
3290
|
-
this.options = options;
|
|
3291
|
-
this.stopWorkers = false;
|
|
3292
|
-
this.logger = options.logger;
|
|
3293
|
-
this.taskQueue = new PQueue__default.default({
|
|
3294
|
-
concurrency: options.concurrentTasksLimit
|
|
3295
|
-
});
|
|
3296
|
-
}
|
|
3297
|
-
taskQueue;
|
|
3298
|
-
logger;
|
|
3299
|
-
stopWorkers;
|
|
3300
|
-
static async create(options) {
|
|
3301
|
-
const {
|
|
3302
|
-
taskBroker,
|
|
3303
|
-
logger,
|
|
3304
|
-
actionRegistry,
|
|
3305
|
-
integrations,
|
|
3306
|
-
workingDirectory,
|
|
3307
|
-
additionalTemplateFilters,
|
|
3308
|
-
concurrentTasksLimit = 10,
|
|
3309
|
-
// from 1 to Infinity
|
|
3310
|
-
additionalTemplateGlobals,
|
|
3311
|
-
permissions
|
|
3312
|
-
} = options;
|
|
3313
|
-
const workflowRunner = new NunjucksWorkflowRunner({
|
|
3314
|
-
actionRegistry,
|
|
3315
|
-
integrations,
|
|
3316
|
-
logger,
|
|
3317
|
-
workingDirectory,
|
|
3318
|
-
additionalTemplateFilters,
|
|
3319
|
-
additionalTemplateGlobals,
|
|
3320
|
-
permissions
|
|
3321
|
-
});
|
|
3322
|
-
return new TaskWorker({
|
|
3323
|
-
taskBroker,
|
|
3324
|
-
runners: { workflowRunner },
|
|
3325
|
-
concurrentTasksLimit,
|
|
3326
|
-
permissions
|
|
3327
|
-
});
|
|
3328
|
-
}
|
|
3329
|
-
async recoverTasks() {
|
|
3330
|
-
try {
|
|
3331
|
-
await this.options.taskBroker.recoverTasks?.();
|
|
3332
|
-
} catch (err) {
|
|
3333
|
-
this.logger?.error(errors.stringifyError(err));
|
|
3334
|
-
}
|
|
3335
|
-
}
|
|
3336
|
-
start() {
|
|
3337
|
-
(async () => {
|
|
3338
|
-
while (!this.stopWorkers) {
|
|
3339
|
-
await new Promise((resolve) => setTimeout(resolve, 1e4));
|
|
3340
|
-
await this.recoverTasks();
|
|
3341
|
-
}
|
|
3342
|
-
})();
|
|
3343
|
-
(async () => {
|
|
3344
|
-
while (!this.stopWorkers) {
|
|
3345
|
-
await this.onReadyToClaimTask();
|
|
3346
|
-
if (!this.stopWorkers) {
|
|
3347
|
-
const task = await this.options.taskBroker.claim();
|
|
3348
|
-
void this.taskQueue.add(() => this.runOneTask(task));
|
|
3349
|
-
}
|
|
3350
|
-
}
|
|
3351
|
-
})();
|
|
3352
|
-
}
|
|
3353
|
-
stop() {
|
|
3354
|
-
this.stopWorkers = true;
|
|
3355
|
-
}
|
|
3356
|
-
onReadyToClaimTask() {
|
|
3357
|
-
if (this.taskQueue.pending < this.options.concurrentTasksLimit) {
|
|
3358
|
-
return Promise.resolve();
|
|
3359
|
-
}
|
|
3360
|
-
return new Promise((resolve) => {
|
|
3361
|
-
this.taskQueue.once("next", () => {
|
|
3362
|
-
resolve();
|
|
3363
|
-
});
|
|
3364
|
-
});
|
|
3365
|
-
}
|
|
3366
|
-
async runOneTask(task) {
|
|
3367
|
-
try {
|
|
3368
|
-
if (task.spec.apiVersion !== "scaffolder.backstage.io/v1beta3") {
|
|
3369
|
-
throw new Error(
|
|
3370
|
-
`Unsupported Template apiVersion ${task.spec.apiVersion}`
|
|
3371
|
-
);
|
|
3372
|
-
}
|
|
3373
|
-
const { output } = await this.options.runners.workflowRunner.execute(
|
|
3374
|
-
task
|
|
3375
|
-
);
|
|
3376
|
-
await task.complete("completed", { output });
|
|
3377
|
-
} catch (error) {
|
|
3378
|
-
errors.assertError(error);
|
|
3379
|
-
await task.complete("failed", {
|
|
3380
|
-
error: { name: error.name, message: error.message }
|
|
3381
|
-
});
|
|
3382
|
-
}
|
|
3383
|
-
}
|
|
3384
|
-
}
|
|
3385
|
-
|
|
3386
|
-
class DecoratedActionsRegistry extends TemplateActionRegistry {
|
|
3387
|
-
constructor(innerRegistry, extraActions) {
|
|
3388
|
-
super();
|
|
3389
|
-
this.innerRegistry = innerRegistry;
|
|
3390
|
-
for (const action of extraActions) {
|
|
3391
|
-
this.register(action);
|
|
3392
|
-
}
|
|
3393
|
-
}
|
|
3394
|
-
get(actionId) {
|
|
3395
|
-
try {
|
|
3396
|
-
return super.get(actionId);
|
|
3397
|
-
} catch {
|
|
3398
|
-
return this.innerRegistry.get(actionId);
|
|
3399
|
-
}
|
|
3400
|
-
}
|
|
3401
|
-
}
|
|
3402
|
-
|
|
3403
|
-
function createDryRunner(options) {
|
|
3404
|
-
return async function dryRun(input) {
|
|
3405
|
-
let contentPromise;
|
|
3406
|
-
const workflowRunner = new NunjucksWorkflowRunner({
|
|
3407
|
-
...options,
|
|
3408
|
-
actionRegistry: new DecoratedActionsRegistry(options.actionRegistry, [
|
|
3409
|
-
pluginScaffolderNode.createTemplateAction({
|
|
3410
|
-
id: "dry-run:extract",
|
|
3411
|
-
supportsDryRun: true,
|
|
3412
|
-
async handler(ctx) {
|
|
3413
|
-
contentPromise = pluginScaffolderNode.serializeDirectoryContents(ctx.workspacePath);
|
|
3414
|
-
await contentPromise.catch(() => {
|
|
3415
|
-
});
|
|
3416
|
-
}
|
|
3417
|
-
})
|
|
3418
|
-
])
|
|
3419
|
-
});
|
|
3420
|
-
const dryRunId = uuid.v4();
|
|
3421
|
-
const log = new Array();
|
|
3422
|
-
const contentsPath = backendPluginApi.resolveSafeChildPath(
|
|
3423
|
-
options.workingDirectory,
|
|
3424
|
-
`dry-run-content-${dryRunId}`
|
|
3425
|
-
);
|
|
3426
|
-
try {
|
|
3427
|
-
await pluginScaffolderNode.deserializeDirectoryContents(contentsPath, input.directoryContents);
|
|
3428
|
-
const abortSignal = new AbortController().signal;
|
|
3429
|
-
const result = await workflowRunner.execute({
|
|
3430
|
-
spec: {
|
|
3431
|
-
...input.spec,
|
|
3432
|
-
steps: [
|
|
3433
|
-
...input.spec.steps,
|
|
3434
|
-
{
|
|
3435
|
-
id: dryRunId,
|
|
3436
|
-
name: "dry-run:extract",
|
|
3437
|
-
action: "dry-run:extract"
|
|
3438
|
-
}
|
|
3439
|
-
],
|
|
3440
|
-
templateInfo: {
|
|
3441
|
-
entityRef: "template:default/dry-run",
|
|
3442
|
-
baseUrl: url.pathToFileURL(
|
|
3443
|
-
backendPluginApi.resolveSafeChildPath(contentsPath, "template.yaml")
|
|
3444
|
-
).toString()
|
|
3445
|
-
}
|
|
3446
|
-
},
|
|
3447
|
-
secrets: input.secrets,
|
|
3448
|
-
getInitiatorCredentials: () => Promise.resolve(input.credentials),
|
|
3449
|
-
// No need to update this at the end of the run, so just hard-code it
|
|
3450
|
-
done: false,
|
|
3451
|
-
isDryRun: true,
|
|
3452
|
-
getWorkspaceName: async () => `dry-run-${dryRunId}`,
|
|
3453
|
-
cancelSignal: abortSignal,
|
|
3454
|
-
async emitLog(message, logMetadata) {
|
|
3455
|
-
if (logMetadata?.stepId === dryRunId) {
|
|
3456
|
-
return;
|
|
3457
|
-
}
|
|
3458
|
-
log.push({
|
|
3459
|
-
body: {
|
|
3460
|
-
...logMetadata,
|
|
3461
|
-
message
|
|
3462
|
-
}
|
|
3463
|
-
});
|
|
3464
|
-
},
|
|
3465
|
-
complete: async () => {
|
|
3466
|
-
throw new Error("Not implemented");
|
|
3467
|
-
}
|
|
3468
|
-
});
|
|
3469
|
-
if (!contentPromise) {
|
|
3470
|
-
throw new Error("Content extraction step was skipped");
|
|
3471
|
-
}
|
|
3472
|
-
const directoryContents = await contentPromise;
|
|
3473
|
-
return {
|
|
3474
|
-
log,
|
|
3475
|
-
directoryContents,
|
|
3476
|
-
output: result.output
|
|
3477
|
-
};
|
|
3478
|
-
} finally {
|
|
3479
|
-
await fs__default.default.remove(contentsPath);
|
|
3480
|
-
}
|
|
3481
|
-
};
|
|
3482
|
-
}
|
|
3483
|
-
|
|
3484
|
-
async function checkPermission(options) {
|
|
3485
|
-
const { permissions, permissionService, credentials } = options;
|
|
3486
|
-
if (permissionService) {
|
|
3487
|
-
const permissionRequest = permissions.map((permission) => ({
|
|
3488
|
-
permission
|
|
3489
|
-
}));
|
|
3490
|
-
const authorizationResponses = await permissionService.authorize(
|
|
3491
|
-
permissionRequest,
|
|
3492
|
-
{ credentials }
|
|
3493
|
-
);
|
|
3494
|
-
for (const response of authorizationResponses) {
|
|
3495
|
-
if (response.result === pluginPermissionCommon.AuthorizeResult.DENY) {
|
|
3496
|
-
throw new errors.NotAllowedError();
|
|
3497
|
-
}
|
|
3498
|
-
}
|
|
3499
|
-
}
|
|
3500
|
-
}
|
|
3501
|
-
|
|
3502
|
-
function isTemplatePermissionRuleInput(permissionRule) {
|
|
3503
|
-
return permissionRule.resourceType === alpha$1.RESOURCE_TYPE_SCAFFOLDER_TEMPLATE;
|
|
3504
|
-
}
|
|
3505
|
-
function isActionPermissionRuleInput(permissionRule) {
|
|
3506
|
-
return permissionRule.resourceType === alpha$1.RESOURCE_TYPE_SCAFFOLDER_ACTION;
|
|
3507
|
-
}
|
|
3508
|
-
function isSupportedTemplate(entity) {
|
|
3509
|
-
return entity.apiVersion === "scaffolder.backstage.io/v1beta3";
|
|
3510
|
-
}
|
|
3511
|
-
function buildDefaultIdentityClient(options) {
|
|
3512
|
-
return {
|
|
3513
|
-
getIdentity: async ({ request }) => {
|
|
3514
|
-
const header = request.headers.authorization;
|
|
3515
|
-
const { logger } = options;
|
|
3516
|
-
if (!header) {
|
|
3517
|
-
return void 0;
|
|
3518
|
-
}
|
|
3519
|
-
try {
|
|
3520
|
-
const token = header.match(/^Bearer\s(\S+\.\S+\.\S+)$/i)?.[1];
|
|
3521
|
-
if (!token) {
|
|
3522
|
-
throw new TypeError("Expected Bearer with JWT");
|
|
3523
|
-
}
|
|
3524
|
-
const [_header, rawPayload, _signature] = token.split(".");
|
|
3525
|
-
const payload = JSON.parse(
|
|
3526
|
-
Buffer.from(rawPayload, "base64").toString()
|
|
3527
|
-
);
|
|
3528
|
-
if (typeof payload !== "object" || payload === null || Array.isArray(payload)) {
|
|
3529
|
-
throw new TypeError("Malformed JWT payload");
|
|
3530
|
-
}
|
|
3531
|
-
const sub = payload.sub;
|
|
3532
|
-
if (typeof sub !== "string") {
|
|
3533
|
-
throw new TypeError("Expected string sub claim");
|
|
3534
|
-
}
|
|
3535
|
-
if (sub === "backstage-server") {
|
|
3536
|
-
return void 0;
|
|
3537
|
-
}
|
|
3538
|
-
catalogModel.parseEntityRef(sub);
|
|
3539
|
-
return {
|
|
3540
|
-
identity: {
|
|
3541
|
-
userEntityRef: sub,
|
|
3542
|
-
ownershipEntityRefs: [],
|
|
3543
|
-
type: "user"
|
|
3544
|
-
},
|
|
3545
|
-
token
|
|
3546
|
-
};
|
|
3547
|
-
} catch (e) {
|
|
3548
|
-
logger.error(`Invalid authorization header: ${errors.stringifyError(e)}`);
|
|
3549
|
-
return void 0;
|
|
3550
|
-
}
|
|
3551
|
-
}
|
|
3552
|
-
};
|
|
3553
|
-
}
|
|
3554
|
-
const readDuration = (config$1, key, defaultValue) => {
|
|
3555
|
-
if (config$1.has(key)) {
|
|
3556
|
-
return config.readDurationFromConfig(config$1, { key });
|
|
3557
|
-
}
|
|
3558
|
-
return defaultValue;
|
|
3559
|
-
};
|
|
3560
|
-
async function createRouter(options) {
|
|
3561
|
-
const router = Router__default.default();
|
|
3562
|
-
router.use(express__default.default.json({ limit: "10MB" }));
|
|
3563
|
-
const {
|
|
3564
|
-
logger: parentLogger,
|
|
3565
|
-
config,
|
|
3566
|
-
reader,
|
|
3567
|
-
database,
|
|
3568
|
-
catalogClient,
|
|
3569
|
-
actions,
|
|
3570
|
-
taskWorkers,
|
|
3571
|
-
scheduler,
|
|
3572
|
-
additionalTemplateFilters,
|
|
3573
|
-
additionalTemplateGlobals,
|
|
3574
|
-
additionalWorkspaceProviders,
|
|
3575
|
-
permissions,
|
|
3576
|
-
permissionRules,
|
|
3577
|
-
discovery = backendCommon.HostDiscovery.fromConfig(config),
|
|
3578
|
-
identity = buildDefaultIdentityClient(options),
|
|
3579
|
-
autocompleteHandlers = {}
|
|
3580
|
-
} = options;
|
|
3581
|
-
const { auth, httpAuth } = backendCommon.createLegacyAuthAdapters({
|
|
3582
|
-
...options,
|
|
3583
|
-
identity,
|
|
3584
|
-
discovery
|
|
3585
|
-
});
|
|
3586
|
-
const concurrentTasksLimit = options.concurrentTasksLimit ?? options.config.getOptionalNumber("scaffolder.concurrentTasksLimit");
|
|
3587
|
-
const logger = parentLogger.child({ plugin: "scaffolder" });
|
|
3588
|
-
const workingDirectory = await getWorkingDirectory(config, logger);
|
|
3589
|
-
const integrations = integration.ScmIntegrations.fromConfig(config);
|
|
3590
|
-
let taskBroker;
|
|
3591
|
-
if (!options.taskBroker) {
|
|
3592
|
-
const databaseTaskStore = await DatabaseTaskStore.create({ database });
|
|
3593
|
-
taskBroker = new StorageTaskBroker(
|
|
3594
|
-
databaseTaskStore,
|
|
3595
|
-
logger,
|
|
3596
|
-
config,
|
|
3597
|
-
auth,
|
|
3598
|
-
additionalWorkspaceProviders
|
|
3599
|
-
);
|
|
3600
|
-
if (scheduler && databaseTaskStore.listStaleTasks) {
|
|
3601
|
-
await scheduler.scheduleTask({
|
|
3602
|
-
id: "close_stale_tasks",
|
|
3603
|
-
frequency: readDuration(
|
|
3604
|
-
config,
|
|
3605
|
-
"scaffolder.taskTimeoutJanitorFrequency",
|
|
3606
|
-
{
|
|
3607
|
-
minutes: 5
|
|
3608
|
-
}
|
|
3609
|
-
),
|
|
3610
|
-
timeout: { minutes: 15 },
|
|
3611
|
-
fn: async () => {
|
|
3612
|
-
const { tasks } = await databaseTaskStore.listStaleTasks({
|
|
3613
|
-
timeoutS: luxon.Duration.fromObject(
|
|
3614
|
-
readDuration(config, "scaffolder.taskTimeout", {
|
|
3615
|
-
hours: 24
|
|
3616
|
-
})
|
|
3617
|
-
).as("seconds")
|
|
3618
|
-
});
|
|
3619
|
-
for (const task of tasks) {
|
|
3620
|
-
await databaseTaskStore.shutdownTask(task);
|
|
3621
|
-
logger.info(`Successfully closed stale task ${task.taskId}`);
|
|
3622
|
-
}
|
|
3623
|
-
}
|
|
3624
|
-
});
|
|
3625
|
-
}
|
|
3626
|
-
} else {
|
|
3627
|
-
taskBroker = options.taskBroker;
|
|
3628
|
-
}
|
|
3629
|
-
const actionRegistry = new TemplateActionRegistry();
|
|
3630
|
-
const workers = [];
|
|
3631
|
-
if (concurrentTasksLimit !== 0) {
|
|
3632
|
-
for (let i = 0; i < (taskWorkers || 1); i++) {
|
|
3633
|
-
const worker = await TaskWorker.create({
|
|
3634
|
-
taskBroker,
|
|
3635
|
-
actionRegistry,
|
|
3636
|
-
integrations,
|
|
3637
|
-
logger,
|
|
3638
|
-
workingDirectory,
|
|
3639
|
-
additionalTemplateFilters,
|
|
3640
|
-
additionalTemplateGlobals,
|
|
3641
|
-
concurrentTasksLimit,
|
|
3642
|
-
permissions
|
|
3643
|
-
});
|
|
3644
|
-
workers.push(worker);
|
|
3645
|
-
}
|
|
3646
|
-
}
|
|
3647
|
-
const actionsToRegister = Array.isArray(actions) ? actions : createBuiltinActions({
|
|
3648
|
-
integrations,
|
|
3649
|
-
catalogClient,
|
|
3650
|
-
reader,
|
|
3651
|
-
config,
|
|
3652
|
-
additionalTemplateFilters,
|
|
3653
|
-
additionalTemplateGlobals,
|
|
3654
|
-
auth
|
|
3655
|
-
});
|
|
3656
|
-
actionsToRegister.forEach((action) => actionRegistry.register(action));
|
|
3657
|
-
const launchWorkers = () => workers.forEach((worker) => worker.start());
|
|
3658
|
-
const shutdownWorkers = () => {
|
|
3659
|
-
workers.forEach((worker) => worker.stop());
|
|
3660
|
-
};
|
|
3661
|
-
if (options.lifecycle) {
|
|
3662
|
-
options.lifecycle.addStartupHook(launchWorkers);
|
|
3663
|
-
options.lifecycle.addShutdownHook(shutdownWorkers);
|
|
3664
|
-
} else {
|
|
3665
|
-
launchWorkers();
|
|
3666
|
-
}
|
|
3667
|
-
const dryRunner = createDryRunner({
|
|
3668
|
-
actionRegistry,
|
|
3669
|
-
integrations,
|
|
3670
|
-
logger,
|
|
3671
|
-
workingDirectory,
|
|
3672
|
-
additionalTemplateFilters,
|
|
3673
|
-
additionalTemplateGlobals,
|
|
3674
|
-
permissions
|
|
3675
|
-
});
|
|
3676
|
-
const templateRules = Object.values(
|
|
3677
|
-
scaffolderTemplateRules
|
|
3678
|
-
);
|
|
3679
|
-
const actionRules = Object.values(
|
|
3680
|
-
scaffolderActionRules
|
|
3681
|
-
);
|
|
3682
|
-
if (permissionRules) {
|
|
3683
|
-
templateRules.push(
|
|
3684
|
-
...permissionRules.filter(isTemplatePermissionRuleInput)
|
|
3685
|
-
);
|
|
3686
|
-
actionRules.push(...permissionRules.filter(isActionPermissionRuleInput));
|
|
3687
|
-
}
|
|
3688
|
-
const isAuthorized = pluginPermissionNode.createConditionAuthorizer(Object.values(templateRules));
|
|
3689
|
-
const permissionIntegrationRouter = pluginPermissionNode.createPermissionIntegrationRouter({
|
|
3690
|
-
resources: [
|
|
3691
|
-
{
|
|
3692
|
-
resourceType: alpha$1.RESOURCE_TYPE_SCAFFOLDER_TEMPLATE,
|
|
3693
|
-
permissions: alpha$1.scaffolderTemplatePermissions,
|
|
3694
|
-
rules: templateRules
|
|
3695
|
-
},
|
|
3696
|
-
{
|
|
3697
|
-
resourceType: alpha$1.RESOURCE_TYPE_SCAFFOLDER_ACTION,
|
|
3698
|
-
permissions: alpha$1.scaffolderActionPermissions,
|
|
3699
|
-
rules: actionRules
|
|
3700
|
-
}
|
|
3701
|
-
],
|
|
3702
|
-
permissions: alpha$1.scaffolderTaskPermissions
|
|
3703
|
-
});
|
|
3704
|
-
router.use(permissionIntegrationRouter);
|
|
3705
|
-
router.get(
|
|
3706
|
-
"/v2/templates/:namespace/:kind/:name/parameter-schema",
|
|
3707
|
-
async (req, res) => {
|
|
3708
|
-
const credentials = await httpAuth.credentials(req);
|
|
3709
|
-
const { token } = await auth.getPluginRequestToken({
|
|
3710
|
-
onBehalfOf: credentials,
|
|
3711
|
-
targetPluginId: "catalog"
|
|
3712
|
-
});
|
|
3713
|
-
const template = await authorizeTemplate(
|
|
3714
|
-
req.params,
|
|
3715
|
-
token,
|
|
3716
|
-
credentials
|
|
3717
|
-
);
|
|
3718
|
-
const parameters = [template.spec.parameters ?? []].flat();
|
|
3719
|
-
const presentation = template.spec.presentation;
|
|
3720
|
-
res.json({
|
|
3721
|
-
title: template.metadata.title ?? template.metadata.name,
|
|
3722
|
-
...presentation ? { presentation } : {},
|
|
3723
|
-
description: template.metadata.description,
|
|
3724
|
-
"ui:options": template.metadata["ui:options"],
|
|
3725
|
-
steps: parameters.map((schema) => ({
|
|
3726
|
-
title: schema.title ?? "Please enter the following information",
|
|
3727
|
-
description: schema.description,
|
|
3728
|
-
schema
|
|
3729
|
-
}))
|
|
3730
|
-
});
|
|
3731
|
-
}
|
|
3732
|
-
).get("/v2/actions", async (_req, res) => {
|
|
3733
|
-
const actionsList = actionRegistry.list().map((action) => {
|
|
3734
|
-
return {
|
|
3735
|
-
id: action.id,
|
|
3736
|
-
description: action.description,
|
|
3737
|
-
examples: action.examples,
|
|
3738
|
-
schema: action.schema
|
|
3739
|
-
};
|
|
3740
|
-
});
|
|
3741
|
-
res.json(actionsList);
|
|
3742
|
-
}).post("/v2/tasks", async (req, res) => {
|
|
3743
|
-
const templateRef = req.body.templateRef;
|
|
3744
|
-
const { kind, namespace, name } = catalogModel.parseEntityRef(templateRef, {
|
|
3745
|
-
defaultKind: "template"
|
|
3746
|
-
});
|
|
3747
|
-
const credentials = await httpAuth.credentials(req);
|
|
3748
|
-
await checkPermission({
|
|
3749
|
-
credentials,
|
|
3750
|
-
permissions: [alpha$1.taskCreatePermission],
|
|
3751
|
-
permissionService: permissions
|
|
3752
|
-
});
|
|
3753
|
-
const { token } = await auth.getPluginRequestToken({
|
|
3754
|
-
onBehalfOf: credentials,
|
|
3755
|
-
targetPluginId: "catalog"
|
|
3756
|
-
});
|
|
3757
|
-
const userEntityRef = auth.isPrincipal(credentials, "user") ? credentials.principal.userEntityRef : void 0;
|
|
3758
|
-
const userEntity = userEntityRef ? await catalogClient.getEntityByRef(userEntityRef, { token }) : void 0;
|
|
3759
|
-
let auditLog = `Scaffolding task for ${templateRef}`;
|
|
3760
|
-
if (userEntityRef) {
|
|
3761
|
-
auditLog += ` created by ${userEntityRef}`;
|
|
3762
|
-
}
|
|
3763
|
-
logger.info(auditLog);
|
|
3764
|
-
const values = req.body.values;
|
|
3765
|
-
const template = await authorizeTemplate(
|
|
3766
|
-
{ kind, namespace, name },
|
|
3767
|
-
token,
|
|
3768
|
-
credentials
|
|
3769
|
-
);
|
|
3770
|
-
for (const parameters of [template.spec.parameters ?? []].flat()) {
|
|
3771
|
-
const result2 = jsonschema.validate(values, parameters);
|
|
3772
|
-
if (!result2.valid) {
|
|
3773
|
-
res.status(400).json({ errors: result2.errors });
|
|
3774
|
-
return;
|
|
3775
|
-
}
|
|
3776
|
-
}
|
|
3777
|
-
const baseUrl = getEntityBaseUrl(template);
|
|
3778
|
-
const taskSpec = {
|
|
3779
|
-
apiVersion: template.apiVersion,
|
|
3780
|
-
steps: template.spec.steps.map((step, index) => ({
|
|
3781
|
-
...step,
|
|
3782
|
-
id: step.id ?? `step-${index + 1}`,
|
|
3783
|
-
name: step.name ?? step.action
|
|
3784
|
-
})),
|
|
3785
|
-
EXPERIMENTAL_recovery: template.spec.EXPERIMENTAL_recovery,
|
|
3786
|
-
output: template.spec.output ?? {},
|
|
3787
|
-
parameters: values,
|
|
3788
|
-
user: {
|
|
3789
|
-
entity: userEntity,
|
|
3790
|
-
ref: userEntityRef
|
|
3791
|
-
},
|
|
3792
|
-
templateInfo: {
|
|
3793
|
-
entityRef: catalogModel.stringifyEntityRef({ kind, name, namespace }),
|
|
3794
|
-
baseUrl,
|
|
3795
|
-
entity: {
|
|
3796
|
-
metadata: template.metadata
|
|
3797
|
-
}
|
|
3798
|
-
}
|
|
3799
|
-
};
|
|
3800
|
-
const secrets = {
|
|
3801
|
-
...req.body.secrets,
|
|
3802
|
-
backstageToken: token,
|
|
3803
|
-
__initiatorCredentials: JSON.stringify(credentials)
|
|
3804
|
-
};
|
|
3805
|
-
const result = await taskBroker.dispatch({
|
|
3806
|
-
spec: taskSpec,
|
|
3807
|
-
createdBy: userEntityRef,
|
|
3808
|
-
secrets
|
|
3809
|
-
});
|
|
3810
|
-
res.status(201).json({ id: result.taskId });
|
|
3811
|
-
}).get("/v2/tasks", async (req, res) => {
|
|
3812
|
-
const credentials = await httpAuth.credentials(req);
|
|
3813
|
-
await checkPermission({
|
|
3814
|
-
credentials,
|
|
3815
|
-
permissions: [alpha$1.taskReadPermission],
|
|
3816
|
-
permissionService: permissions
|
|
3817
|
-
});
|
|
3818
|
-
if (!taskBroker.list) {
|
|
3819
|
-
throw new Error(
|
|
3820
|
-
"TaskBroker does not support listing tasks, please implement the list method on the TaskBroker."
|
|
3821
|
-
);
|
|
3822
|
-
}
|
|
3823
|
-
const createdBy = parseStringsParam(req.query.createdBy, "createdBy");
|
|
3824
|
-
const status = parseStringsParam(req.query.status, "status");
|
|
3825
|
-
const order = parseStringsParam(req.query.order, "order")?.map((item) => {
|
|
3826
|
-
const match = item.match(/^(asc|desc):(.+)$/);
|
|
3827
|
-
if (!match) {
|
|
3828
|
-
throw new errors.InputError(
|
|
3829
|
-
`Invalid order parameter "${item}", expected "<asc or desc>:<field name>"`
|
|
3830
|
-
);
|
|
3831
|
-
}
|
|
3832
|
-
return {
|
|
3833
|
-
order: match[1],
|
|
3834
|
-
field: match[2]
|
|
3835
|
-
};
|
|
3836
|
-
});
|
|
3837
|
-
const limit = parseNumberParam(req.query.limit, "limit");
|
|
3838
|
-
const offset = parseNumberParam(req.query.offset, "offset");
|
|
3839
|
-
const tasks = await taskBroker.list({
|
|
3840
|
-
filters: {
|
|
3841
|
-
createdBy,
|
|
3842
|
-
status: status ? status : void 0
|
|
3843
|
-
},
|
|
3844
|
-
order,
|
|
3845
|
-
pagination: {
|
|
3846
|
-
limit: limit ? limit[0] : void 0,
|
|
3847
|
-
offset: offset ? offset[0] : void 0
|
|
3848
|
-
}
|
|
3849
|
-
});
|
|
3850
|
-
res.status(200).json(tasks);
|
|
3851
|
-
}).get("/v2/tasks/:taskId", async (req, res) => {
|
|
3852
|
-
const credentials = await httpAuth.credentials(req);
|
|
3853
|
-
await checkPermission({
|
|
3854
|
-
credentials,
|
|
3855
|
-
permissions: [alpha$1.taskReadPermission],
|
|
3856
|
-
permissionService: permissions
|
|
3857
|
-
});
|
|
3858
|
-
const { taskId } = req.params;
|
|
3859
|
-
const task = await taskBroker.get(taskId);
|
|
3860
|
-
if (!task) {
|
|
3861
|
-
throw new errors.NotFoundError(`Task with id ${taskId} does not exist`);
|
|
3862
|
-
}
|
|
3863
|
-
delete task.secrets;
|
|
3864
|
-
res.status(200).json(task);
|
|
3865
|
-
}).post("/v2/tasks/:taskId/cancel", async (req, res) => {
|
|
3866
|
-
const credentials = await httpAuth.credentials(req);
|
|
3867
|
-
await checkPermission({
|
|
3868
|
-
credentials,
|
|
3869
|
-
permissions: [alpha$1.taskCancelPermission, alpha$1.taskReadPermission],
|
|
3870
|
-
permissionService: permissions
|
|
3871
|
-
});
|
|
3872
|
-
const { taskId } = req.params;
|
|
3873
|
-
await taskBroker.cancel?.(taskId);
|
|
3874
|
-
res.status(200).json({ status: "cancelled" });
|
|
3875
|
-
}).post("/v2/tasks/:taskId/retry", async (req, res) => {
|
|
3876
|
-
const credentials = await httpAuth.credentials(req);
|
|
3877
|
-
await checkPermission({
|
|
3878
|
-
credentials,
|
|
3879
|
-
permissions: [alpha$1.taskCreatePermission, alpha$1.taskReadPermission],
|
|
3880
|
-
permissionService: permissions
|
|
3881
|
-
});
|
|
3882
|
-
const { taskId } = req.params;
|
|
3883
|
-
await taskBroker.retry?.(taskId);
|
|
3884
|
-
res.status(201).json({ id: taskId });
|
|
3885
|
-
}).get("/v2/tasks/:taskId/eventstream", async (req, res) => {
|
|
3886
|
-
const credentials = await httpAuth.credentials(req);
|
|
3887
|
-
await checkPermission({
|
|
3888
|
-
credentials,
|
|
3889
|
-
permissions: [alpha$1.taskReadPermission],
|
|
3890
|
-
permissionService: permissions
|
|
3891
|
-
});
|
|
3892
|
-
const { taskId } = req.params;
|
|
3893
|
-
const after = req.query.after !== void 0 ? Number(req.query.after) : void 0;
|
|
3894
|
-
logger.debug(`Event stream observing taskId '${taskId}' opened`);
|
|
3895
|
-
res.writeHead(200, {
|
|
3896
|
-
Connection: "keep-alive",
|
|
3897
|
-
"Cache-Control": "no-cache",
|
|
3898
|
-
"Content-Type": "text/event-stream"
|
|
3899
|
-
});
|
|
3900
|
-
const subscription = taskBroker.event$({ taskId, after }).subscribe({
|
|
3901
|
-
error: (error) => {
|
|
3902
|
-
logger.error(
|
|
3903
|
-
`Received error from event stream when observing taskId '${taskId}', ${error}`
|
|
3904
|
-
);
|
|
3905
|
-
res.end();
|
|
3906
|
-
},
|
|
3907
|
-
next: ({ events }) => {
|
|
3908
|
-
let shouldUnsubscribe = false;
|
|
3909
|
-
for (const event of events) {
|
|
3910
|
-
res.write(
|
|
3911
|
-
`event: ${event.type}
|
|
3912
|
-
data: ${JSON.stringify(event)}
|
|
3913
|
-
|
|
3914
|
-
`
|
|
3915
|
-
);
|
|
3916
|
-
if (event.type === "completion" && !event.isTaskRecoverable) {
|
|
3917
|
-
shouldUnsubscribe = true;
|
|
3918
|
-
}
|
|
3919
|
-
}
|
|
3920
|
-
res.flush?.();
|
|
3921
|
-
if (shouldUnsubscribe) {
|
|
3922
|
-
subscription.unsubscribe();
|
|
3923
|
-
res.end();
|
|
3924
|
-
}
|
|
3925
|
-
}
|
|
3926
|
-
});
|
|
3927
|
-
req.on("close", () => {
|
|
3928
|
-
subscription.unsubscribe();
|
|
3929
|
-
logger.debug(`Event stream observing taskId '${taskId}' closed`);
|
|
3930
|
-
});
|
|
3931
|
-
}).get("/v2/tasks/:taskId/events", async (req, res) => {
|
|
3932
|
-
const credentials = await httpAuth.credentials(req);
|
|
3933
|
-
await checkPermission({
|
|
3934
|
-
credentials,
|
|
3935
|
-
permissions: [alpha$1.taskReadPermission],
|
|
3936
|
-
permissionService: permissions
|
|
3937
|
-
});
|
|
3938
|
-
const { taskId } = req.params;
|
|
3939
|
-
const after = Number(req.query.after) || void 0;
|
|
3940
|
-
const timeout = setTimeout(() => {
|
|
3941
|
-
res.json([]);
|
|
3942
|
-
}, 3e4);
|
|
3943
|
-
const subscription = taskBroker.event$({ taskId, after }).subscribe({
|
|
3944
|
-
error: (error) => {
|
|
3945
|
-
logger.error(
|
|
3946
|
-
`Received error from event stream when observing taskId '${taskId}', ${error}`
|
|
3947
|
-
);
|
|
3948
|
-
},
|
|
3949
|
-
next: ({ events }) => {
|
|
3950
|
-
clearTimeout(timeout);
|
|
3951
|
-
subscription.unsubscribe();
|
|
3952
|
-
res.json(events);
|
|
3953
|
-
}
|
|
3954
|
-
});
|
|
3955
|
-
req.on("close", () => {
|
|
3956
|
-
subscription.unsubscribe();
|
|
3957
|
-
clearTimeout(timeout);
|
|
3958
|
-
});
|
|
3959
|
-
}).post("/v2/dry-run", async (req, res) => {
|
|
3960
|
-
const credentials = await httpAuth.credentials(req);
|
|
3961
|
-
await checkPermission({
|
|
3962
|
-
credentials,
|
|
3963
|
-
permissions: [alpha$1.taskCreatePermission],
|
|
3964
|
-
permissionService: permissions
|
|
3965
|
-
});
|
|
3966
|
-
const bodySchema = zod.z.object({
|
|
3967
|
-
template: zod.z.unknown(),
|
|
3968
|
-
values: zod.z.record(zod.z.unknown()),
|
|
3969
|
-
secrets: zod.z.record(zod.z.string()).optional(),
|
|
3970
|
-
directoryContents: zod.z.array(
|
|
3971
|
-
zod.z.object({ path: zod.z.string(), base64Content: zod.z.string() })
|
|
3972
|
-
)
|
|
3973
|
-
});
|
|
3974
|
-
const body = await bodySchema.parseAsync(req.body).catch((e) => {
|
|
3975
|
-
throw new errors.InputError(`Malformed request: ${e}`);
|
|
3976
|
-
});
|
|
3977
|
-
const template = body.template;
|
|
3978
|
-
if (!await pluginScaffolderCommon.templateEntityV1beta3Validator.check(template)) {
|
|
3979
|
-
throw new errors.InputError("Input template is not a template");
|
|
3980
|
-
}
|
|
3981
|
-
const { token } = await auth.getPluginRequestToken({
|
|
3982
|
-
onBehalfOf: credentials,
|
|
3983
|
-
targetPluginId: "catalog"
|
|
3984
|
-
});
|
|
3985
|
-
const userEntityRef = auth.isPrincipal(credentials, "user") ? credentials.principal.userEntityRef : void 0;
|
|
3986
|
-
const userEntity = userEntityRef ? await catalogClient.getEntityByRef(userEntityRef, { token }) : void 0;
|
|
3987
|
-
for (const parameters of [template.spec.parameters ?? []].flat()) {
|
|
3988
|
-
const result2 = jsonschema.validate(body.values, parameters);
|
|
3989
|
-
if (!result2.valid) {
|
|
3990
|
-
res.status(400).json({ errors: result2.errors });
|
|
3991
|
-
return;
|
|
3992
|
-
}
|
|
3993
|
-
}
|
|
3994
|
-
const steps = template.spec.steps.map((step, index) => ({
|
|
3995
|
-
...step,
|
|
3996
|
-
id: step.id ?? `step-${index + 1}`,
|
|
3997
|
-
name: step.name ?? step.action
|
|
3998
|
-
}));
|
|
3999
|
-
const result = await dryRunner({
|
|
4000
|
-
spec: {
|
|
4001
|
-
apiVersion: template.apiVersion,
|
|
4002
|
-
steps,
|
|
4003
|
-
output: template.spec.output ?? {},
|
|
4004
|
-
parameters: body.values,
|
|
4005
|
-
user: {
|
|
4006
|
-
entity: userEntity,
|
|
4007
|
-
ref: userEntityRef
|
|
4008
|
-
}
|
|
4009
|
-
},
|
|
4010
|
-
directoryContents: (body.directoryContents ?? []).map((file) => ({
|
|
4011
|
-
path: file.path,
|
|
4012
|
-
content: Buffer.from(file.base64Content, "base64")
|
|
4013
|
-
})),
|
|
4014
|
-
secrets: {
|
|
4015
|
-
...body.secrets,
|
|
4016
|
-
...token && { backstageToken: token }
|
|
4017
|
-
},
|
|
4018
|
-
credentials
|
|
4019
|
-
});
|
|
4020
|
-
res.status(200).json({
|
|
4021
|
-
...result,
|
|
4022
|
-
steps,
|
|
4023
|
-
directoryContents: result.directoryContents.map((file) => ({
|
|
4024
|
-
path: file.path,
|
|
4025
|
-
executable: file.executable,
|
|
4026
|
-
base64Content: file.content.toString("base64")
|
|
4027
|
-
}))
|
|
4028
|
-
});
|
|
4029
|
-
}).post("/v2/autocomplete/:provider/:resource", async (req, res) => {
|
|
4030
|
-
const { token, context } = req.body;
|
|
4031
|
-
const { provider, resource } = req.params;
|
|
4032
|
-
if (!token) throw new errors.InputError("Missing token query parameter");
|
|
4033
|
-
if (!autocompleteHandlers[provider]) {
|
|
4034
|
-
throw new errors.InputError(`Unsupported provider: ${provider}`);
|
|
4035
|
-
}
|
|
4036
|
-
const { results } = await autocompleteHandlers[provider]({
|
|
4037
|
-
resource,
|
|
4038
|
-
token,
|
|
4039
|
-
context
|
|
4040
|
-
});
|
|
4041
|
-
res.status(200).json({ results });
|
|
4042
|
-
});
|
|
4043
|
-
const app = express__default.default();
|
|
4044
|
-
app.set("logger", logger);
|
|
4045
|
-
app.use("/", router);
|
|
4046
|
-
async function authorizeTemplate(entityRef, token, credentials) {
|
|
4047
|
-
const template = await findTemplate({
|
|
4048
|
-
catalogApi: catalogClient,
|
|
4049
|
-
entityRef,
|
|
4050
|
-
token
|
|
4051
|
-
});
|
|
4052
|
-
if (!isSupportedTemplate(template)) {
|
|
4053
|
-
throw new errors.InputError(
|
|
4054
|
-
`Unsupported apiVersion field in schema entity, ${template.apiVersion}`
|
|
4055
|
-
);
|
|
4056
|
-
}
|
|
4057
|
-
if (!permissions) {
|
|
4058
|
-
return template;
|
|
4059
|
-
}
|
|
4060
|
-
const [parameterDecision, stepDecision] = await permissions.authorizeConditional(
|
|
4061
|
-
[
|
|
4062
|
-
{ permission: alpha$1.templateParameterReadPermission },
|
|
4063
|
-
{ permission: alpha$1.templateStepReadPermission }
|
|
4064
|
-
],
|
|
4065
|
-
{ credentials }
|
|
4066
|
-
);
|
|
4067
|
-
if (Array.isArray(template.spec.parameters)) {
|
|
4068
|
-
template.spec.parameters = template.spec.parameters.filter(
|
|
4069
|
-
(step) => isAuthorized(parameterDecision, step)
|
|
4070
|
-
);
|
|
4071
|
-
} else if (template.spec.parameters && !isAuthorized(parameterDecision, template.spec.parameters)) {
|
|
4072
|
-
template.spec.parameters = void 0;
|
|
4073
|
-
}
|
|
4074
|
-
template.spec.steps = template.spec.steps.filter(
|
|
4075
|
-
(step) => isAuthorized(stepDecision, step)
|
|
4076
|
-
);
|
|
4077
|
-
return template;
|
|
4078
|
-
}
|
|
4079
|
-
return app;
|
|
4080
|
-
}
|
|
4081
|
-
|
|
4082
|
-
exports.DatabaseTaskStore = DatabaseTaskStore;
|
|
4083
|
-
exports.TaskManager = TaskManager;
|
|
4084
|
-
exports.TaskWorker = TaskWorker;
|
|
4085
|
-
exports.TemplateActionRegistry = TemplateActionRegistry;
|
|
4086
|
-
exports.createBuiltinActions = createBuiltinActions;
|
|
4087
|
-
exports.createCatalogRegisterAction = createCatalogRegisterAction;
|
|
4088
|
-
exports.createCatalogWriteAction = createCatalogWriteAction;
|
|
4089
|
-
exports.createDebugLogAction = createDebugLogAction;
|
|
4090
|
-
exports.createFetchCatalogEntityAction = createFetchCatalogEntityAction;
|
|
4091
|
-
exports.createFetchPlainAction = createFetchPlainAction;
|
|
4092
|
-
exports.createFetchPlainFileAction = createFetchPlainFileAction;
|
|
4093
|
-
exports.createFetchTemplateAction = createFetchTemplateAction;
|
|
4094
|
-
exports.createFetchTemplateFileAction = createFetchTemplateFileAction;
|
|
4095
|
-
exports.createFilesystemDeleteAction = createFilesystemDeleteAction;
|
|
4096
|
-
exports.createFilesystemRenameAction = createFilesystemRenameAction;
|
|
4097
|
-
exports.createRouter = createRouter;
|
|
4098
|
-
exports.createWaitAction = createWaitAction;
|
|
4099
|
-
exports.scaffolderActionRules = scaffolderActionRules;
|
|
4100
|
-
exports.scaffolderTemplateRules = scaffolderTemplateRules;
|
|
4101
|
-
//# sourceMappingURL=router-BqZK9yax.cjs.js.map
|