@backstage/plugin-scaffolder-backend 1.2.0 → 1.3.0-next.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +66 -0
- package/dist/index.cjs.js +555 -87
- package/dist/index.cjs.js.map +1 -1
- package/dist/index.d.ts +50 -3
- package/package.json +14 -12
package/dist/index.cjs.js
CHANGED
|
@@ -16,9 +16,10 @@ var child_process = require('child_process');
|
|
|
16
16
|
var stream = require('stream');
|
|
17
17
|
var azureDevopsNodeApi = require('azure-devops-node-api');
|
|
18
18
|
var fetch = require('node-fetch');
|
|
19
|
+
var crypto = require('crypto');
|
|
19
20
|
var octokit = require('octokit');
|
|
20
|
-
var lodash = require('lodash');
|
|
21
21
|
var octokitPluginCreatePullRequest = require('octokit-plugin-create-pull-request');
|
|
22
|
+
var limiterFactory = require('p-limit');
|
|
22
23
|
var node = require('@gitbeaker/node');
|
|
23
24
|
var webhooks = require('@octokit/webhooks');
|
|
24
25
|
var uuid = require('uuid');
|
|
@@ -26,12 +27,15 @@ var luxon = require('luxon');
|
|
|
26
27
|
var ObservableImpl = require('zen-observable');
|
|
27
28
|
var winston = require('winston');
|
|
28
29
|
var nunjucks = require('nunjucks');
|
|
30
|
+
var lodash = require('lodash');
|
|
29
31
|
var jsonschema = require('jsonschema');
|
|
32
|
+
var pluginScaffolderCommon = require('@backstage/plugin-scaffolder-common');
|
|
30
33
|
var express = require('express');
|
|
31
34
|
var Router = require('express-promise-router');
|
|
35
|
+
var zod = require('zod');
|
|
36
|
+
var url = require('url');
|
|
32
37
|
var os = require('os');
|
|
33
38
|
var pluginCatalogBackend = require('@backstage/plugin-catalog-backend');
|
|
34
|
-
var pluginScaffolderCommon = require('@backstage/plugin-scaffolder-common');
|
|
35
39
|
|
|
36
40
|
function _interopDefaultLegacy (e) { return e && typeof e === 'object' && 'default' in e ? e : { 'default': e }; }
|
|
37
41
|
|
|
@@ -58,6 +62,8 @@ var yaml__namespace = /*#__PURE__*/_interopNamespace(yaml);
|
|
|
58
62
|
var path__default = /*#__PURE__*/_interopDefaultLegacy(path);
|
|
59
63
|
var globby__default = /*#__PURE__*/_interopDefaultLegacy(globby);
|
|
60
64
|
var fetch__default = /*#__PURE__*/_interopDefaultLegacy(fetch);
|
|
65
|
+
var crypto__default = /*#__PURE__*/_interopDefaultLegacy(crypto);
|
|
66
|
+
var limiterFactory__default = /*#__PURE__*/_interopDefaultLegacy(limiterFactory);
|
|
61
67
|
var ObservableImpl__default = /*#__PURE__*/_interopDefaultLegacy(ObservableImpl);
|
|
62
68
|
var winston__namespace = /*#__PURE__*/_interopNamespace(winston);
|
|
63
69
|
var nunjucks__default = /*#__PURE__*/_interopDefaultLegacy(nunjucks);
|
|
@@ -115,6 +121,18 @@ function createCatalogRegisterAction(options) {
|
|
|
115
121
|
}
|
|
116
122
|
}
|
|
117
123
|
]
|
|
124
|
+
},
|
|
125
|
+
output: {
|
|
126
|
+
type: "object",
|
|
127
|
+
required: ["catalogInfoUrl"],
|
|
128
|
+
properties: {
|
|
129
|
+
entityRef: {
|
|
130
|
+
type: "string"
|
|
131
|
+
},
|
|
132
|
+
catalogInfoUrl: {
|
|
133
|
+
type: "string"
|
|
134
|
+
}
|
|
135
|
+
}
|
|
118
136
|
}
|
|
119
137
|
},
|
|
120
138
|
async handler(ctx) {
|
|
@@ -188,6 +206,7 @@ function createCatalogWriteAction() {
|
|
|
188
206
|
}
|
|
189
207
|
}
|
|
190
208
|
},
|
|
209
|
+
supportsDryRun: true,
|
|
191
210
|
async handler(ctx) {
|
|
192
211
|
ctx.logStream.write(`Writing catalog-info.yaml`);
|
|
193
212
|
const { filePath, entity } = ctx.input;
|
|
@@ -219,6 +238,7 @@ function createDebugLogAction() {
|
|
|
219
238
|
}
|
|
220
239
|
}
|
|
221
240
|
},
|
|
241
|
+
supportsDryRun: true,
|
|
222
242
|
async handler(ctx) {
|
|
223
243
|
var _a, _b;
|
|
224
244
|
ctx.logger.info(JSON.stringify(ctx.input, null, 2));
|
|
@@ -304,6 +324,7 @@ function createFetchPlainAction(options) {
|
|
|
304
324
|
}
|
|
305
325
|
}
|
|
306
326
|
},
|
|
327
|
+
supportsDryRun: true,
|
|
307
328
|
async handler(ctx) {
|
|
308
329
|
var _a, _b;
|
|
309
330
|
ctx.logger.info("Fetching plain content from remote URL");
|
|
@@ -470,6 +491,7 @@ function createFetchTemplateAction(options) {
|
|
|
470
491
|
}
|
|
471
492
|
}
|
|
472
493
|
},
|
|
494
|
+
supportsDryRun: true,
|
|
473
495
|
async handler(ctx) {
|
|
474
496
|
var _a, _b;
|
|
475
497
|
ctx.logger.info("Fetching template content from remote URL");
|
|
@@ -502,13 +524,15 @@ function createFetchTemplateAction(options) {
|
|
|
502
524
|
cwd: templateDir,
|
|
503
525
|
dot: true,
|
|
504
526
|
onlyFiles: false,
|
|
505
|
-
markDirectories: true
|
|
527
|
+
markDirectories: true,
|
|
528
|
+
followSymbolicLinks: false
|
|
506
529
|
});
|
|
507
530
|
const nonTemplatedEntries = new Set((await Promise.all((ctx.input.copyWithoutRender || []).map((pattern) => globby__default["default"](pattern, {
|
|
508
531
|
cwd: templateDir,
|
|
509
532
|
dot: true,
|
|
510
533
|
onlyFiles: false,
|
|
511
|
-
markDirectories: true
|
|
534
|
+
markDirectories: true,
|
|
535
|
+
followSymbolicLinks: false
|
|
512
536
|
})))).flat());
|
|
513
537
|
const { cookiecutterCompat, values } = ctx.input;
|
|
514
538
|
const context = {
|
|
@@ -583,6 +607,7 @@ const createFilesystemDeleteAction = () => {
|
|
|
583
607
|
}
|
|
584
608
|
}
|
|
585
609
|
},
|
|
610
|
+
supportsDryRun: true,
|
|
586
611
|
async handler(ctx) {
|
|
587
612
|
var _a;
|
|
588
613
|
if (!Array.isArray((_a = ctx.input) == null ? void 0 : _a.files)) {
|
|
@@ -637,6 +662,7 @@ const createFilesystemRenameAction = () => {
|
|
|
637
662
|
}
|
|
638
663
|
}
|
|
639
664
|
},
|
|
665
|
+
supportsDryRun: true,
|
|
640
666
|
async handler(ctx) {
|
|
641
667
|
var _a, _b;
|
|
642
668
|
if (!Array.isArray((_a = ctx.input) == null ? void 0 : _a.files)) {
|
|
@@ -825,11 +851,6 @@ const parseRepoUrl = (repoUrl, integrations) => {
|
|
|
825
851
|
}
|
|
826
852
|
return { host, owner, repo, organization, workspace, project };
|
|
827
853
|
};
|
|
828
|
-
const isExecutable = (fileMode) => {
|
|
829
|
-
const executeBitMask = 73;
|
|
830
|
-
const res = fileMode & executeBitMask;
|
|
831
|
-
return res > 0;
|
|
832
|
-
};
|
|
833
854
|
|
|
834
855
|
function createPublishAzureAction(options) {
|
|
835
856
|
const { integrations, config } = options;
|
|
@@ -1578,6 +1599,138 @@ function createPublishFileAction() {
|
|
|
1578
1599
|
});
|
|
1579
1600
|
}
|
|
1580
1601
|
|
|
1602
|
+
const createGerritProject = async (config, options) => {
|
|
1603
|
+
const { projectName, parent, owner, description } = options;
|
|
1604
|
+
const fetchOptions = {
|
|
1605
|
+
method: "PUT",
|
|
1606
|
+
body: JSON.stringify({
|
|
1607
|
+
parent,
|
|
1608
|
+
description,
|
|
1609
|
+
owners: [owner],
|
|
1610
|
+
create_empty_commit: false
|
|
1611
|
+
}),
|
|
1612
|
+
headers: {
|
|
1613
|
+
...integration.getGerritRequestOptions(config).headers,
|
|
1614
|
+
"Content-Type": "application/json"
|
|
1615
|
+
}
|
|
1616
|
+
};
|
|
1617
|
+
const response = await fetch__default["default"](`${config.baseUrl}/a/projects/${encodeURIComponent(projectName)}`, fetchOptions);
|
|
1618
|
+
if (response.status !== 201) {
|
|
1619
|
+
throw new Error(`Unable to create repository, ${response.status} ${response.statusText}, ${await response.text()}`);
|
|
1620
|
+
}
|
|
1621
|
+
};
|
|
1622
|
+
const generateCommitMessage = (config, commitSubject) => {
|
|
1623
|
+
const changeId = crypto__default["default"].randomBytes(20).toString("hex");
|
|
1624
|
+
const msg = `${config.getOptionalString("scaffolder.defaultCommitMessage") || commitSubject}
|
|
1625
|
+
|
|
1626
|
+
Change-Id: I${changeId}`;
|
|
1627
|
+
return msg;
|
|
1628
|
+
};
|
|
1629
|
+
function createPublishGerritAction(options) {
|
|
1630
|
+
const { integrations, config } = options;
|
|
1631
|
+
return createTemplateAction({
|
|
1632
|
+
id: "publish:gerrit",
|
|
1633
|
+
description: "Initializes a git repository of the content in the workspace, and publishes it to Gerrit.",
|
|
1634
|
+
schema: {
|
|
1635
|
+
input: {
|
|
1636
|
+
type: "object",
|
|
1637
|
+
required: ["repoUrl"],
|
|
1638
|
+
properties: {
|
|
1639
|
+
repoUrl: {
|
|
1640
|
+
title: "Repository Location",
|
|
1641
|
+
type: "string"
|
|
1642
|
+
},
|
|
1643
|
+
description: {
|
|
1644
|
+
title: "Repository Description",
|
|
1645
|
+
type: "string"
|
|
1646
|
+
},
|
|
1647
|
+
defaultBranch: {
|
|
1648
|
+
title: "Default Branch",
|
|
1649
|
+
type: "string",
|
|
1650
|
+
description: `Sets the default branch on the repository. The default value is 'master'`
|
|
1651
|
+
},
|
|
1652
|
+
gitCommitMessage: {
|
|
1653
|
+
title: "Git Commit Message",
|
|
1654
|
+
type: "string",
|
|
1655
|
+
description: `Sets the commit message on the repository. The default value is 'initial commit'`
|
|
1656
|
+
},
|
|
1657
|
+
gitAuthorName: {
|
|
1658
|
+
title: "Default Author Name",
|
|
1659
|
+
type: "string",
|
|
1660
|
+
description: `Sets the default author name for the commit. The default value is 'Scaffolder'`
|
|
1661
|
+
},
|
|
1662
|
+
gitAuthorEmail: {
|
|
1663
|
+
title: "Default Author Email",
|
|
1664
|
+
type: "string",
|
|
1665
|
+
description: `Sets the default author email for the commit.`
|
|
1666
|
+
}
|
|
1667
|
+
}
|
|
1668
|
+
},
|
|
1669
|
+
output: {
|
|
1670
|
+
type: "object",
|
|
1671
|
+
properties: {
|
|
1672
|
+
remoteUrl: {
|
|
1673
|
+
title: "A URL to the repository with the provider",
|
|
1674
|
+
type: "string"
|
|
1675
|
+
},
|
|
1676
|
+
repoContentsUrl: {
|
|
1677
|
+
title: "A URL to the root of the repository",
|
|
1678
|
+
type: "string"
|
|
1679
|
+
}
|
|
1680
|
+
}
|
|
1681
|
+
}
|
|
1682
|
+
},
|
|
1683
|
+
async handler(ctx) {
|
|
1684
|
+
const {
|
|
1685
|
+
repoUrl,
|
|
1686
|
+
description,
|
|
1687
|
+
defaultBranch = "master",
|
|
1688
|
+
gitAuthorName,
|
|
1689
|
+
gitAuthorEmail,
|
|
1690
|
+
gitCommitMessage = "initial commit"
|
|
1691
|
+
} = ctx.input;
|
|
1692
|
+
const { repo, host, owner, workspace } = parseRepoUrl(repoUrl, integrations);
|
|
1693
|
+
const integrationConfig = integrations.gerrit.byHost(host);
|
|
1694
|
+
if (!integrationConfig) {
|
|
1695
|
+
throw new errors.InputError(`No matching integration configuration for host ${host}, please check your integrations config`);
|
|
1696
|
+
}
|
|
1697
|
+
if (!owner) {
|
|
1698
|
+
throw new errors.InputError(`Invalid URL provider was included in the repo URL to create ${ctx.input.repoUrl}, missing owner`);
|
|
1699
|
+
}
|
|
1700
|
+
if (!workspace) {
|
|
1701
|
+
throw new errors.InputError(`Invalid URL provider was included in the repo URL to create ${ctx.input.repoUrl}, missing workspace`);
|
|
1702
|
+
}
|
|
1703
|
+
await createGerritProject(integrationConfig.config, {
|
|
1704
|
+
description,
|
|
1705
|
+
owner,
|
|
1706
|
+
projectName: repo,
|
|
1707
|
+
parent: workspace
|
|
1708
|
+
});
|
|
1709
|
+
const auth = {
|
|
1710
|
+
username: integrationConfig.config.username,
|
|
1711
|
+
password: integrationConfig.config.password
|
|
1712
|
+
};
|
|
1713
|
+
const gitAuthorInfo = {
|
|
1714
|
+
name: gitAuthorName ? gitAuthorName : config.getOptionalString("scaffolder.defaultAuthor.name"),
|
|
1715
|
+
email: gitAuthorEmail ? gitAuthorEmail : config.getOptionalString("scaffolder.defaultAuthor.email")
|
|
1716
|
+
};
|
|
1717
|
+
const remoteUrl = `${integrationConfig.config.cloneUrl}/a/${repo}`;
|
|
1718
|
+
await initRepoAndPush({
|
|
1719
|
+
dir: getRepoSourceDirectory(ctx.workspacePath, void 0),
|
|
1720
|
+
remoteUrl,
|
|
1721
|
+
auth,
|
|
1722
|
+
defaultBranch,
|
|
1723
|
+
logger: ctx.logger,
|
|
1724
|
+
commitMessage: generateCommitMessage(config, gitCommitMessage),
|
|
1725
|
+
gitAuthorInfo
|
|
1726
|
+
});
|
|
1727
|
+
const repoContentsUrl = `${integrationConfig.config.gitilesBaseUrl}/${repo}/+/refs/heads/${defaultBranch}`;
|
|
1728
|
+
ctx.output("remoteUrl", remoteUrl);
|
|
1729
|
+
ctx.output("repoContentsUrl", repoContentsUrl);
|
|
1730
|
+
}
|
|
1731
|
+
});
|
|
1732
|
+
}
|
|
1733
|
+
|
|
1581
1734
|
const DEFAULT_TIMEOUT_MS = 6e4;
|
|
1582
1735
|
async function getOctokitOptions(options) {
|
|
1583
1736
|
var _a;
|
|
@@ -1662,6 +1815,11 @@ function createPublishGithubAction(options) {
|
|
|
1662
1815
|
type: "string",
|
|
1663
1816
|
description: `Sets the default branch on the repository. The default value is 'master'`
|
|
1664
1817
|
},
|
|
1818
|
+
protectDefaultBranch: {
|
|
1819
|
+
title: "Protect Default Branch",
|
|
1820
|
+
type: "boolean",
|
|
1821
|
+
description: `Protect the default branch after creating the repository. The default value is 'true'`
|
|
1822
|
+
},
|
|
1665
1823
|
deleteBranchOnMerge: {
|
|
1666
1824
|
title: "Delete Branch On Merge",
|
|
1667
1825
|
type: "boolean",
|
|
@@ -1704,22 +1862,36 @@ function createPublishGithubAction(options) {
|
|
|
1704
1862
|
},
|
|
1705
1863
|
collaborators: {
|
|
1706
1864
|
title: "Collaborators",
|
|
1707
|
-
description: "Provide additional users with permissions",
|
|
1865
|
+
description: "Provide additional users or teams with permissions",
|
|
1708
1866
|
type: "array",
|
|
1709
1867
|
items: {
|
|
1710
1868
|
type: "object",
|
|
1711
|
-
|
|
1869
|
+
additionalProperties: false,
|
|
1870
|
+
required: ["access"],
|
|
1712
1871
|
properties: {
|
|
1713
1872
|
access: {
|
|
1714
1873
|
type: "string",
|
|
1715
1874
|
description: "The type of access for the user",
|
|
1716
1875
|
enum: ["push", "pull", "admin", "maintain", "triage"]
|
|
1717
1876
|
},
|
|
1877
|
+
user: {
|
|
1878
|
+
type: "string",
|
|
1879
|
+
description: "The name of the user that will be added as a collaborator"
|
|
1880
|
+
},
|
|
1718
1881
|
username: {
|
|
1719
1882
|
type: "string",
|
|
1720
|
-
description: "
|
|
1883
|
+
description: "Deprecated. Use the `team` or `user` field instead."
|
|
1884
|
+
},
|
|
1885
|
+
team: {
|
|
1886
|
+
type: "string",
|
|
1887
|
+
description: "The name of the team that will be added as a collaborator"
|
|
1721
1888
|
}
|
|
1722
|
-
}
|
|
1889
|
+
},
|
|
1890
|
+
oneOf: [
|
|
1891
|
+
{ required: ["user"] },
|
|
1892
|
+
{ required: ["username"] },
|
|
1893
|
+
{ required: ["team"] }
|
|
1894
|
+
]
|
|
1723
1895
|
}
|
|
1724
1896
|
},
|
|
1725
1897
|
token: {
|
|
@@ -1759,6 +1931,7 @@ function createPublishGithubAction(options) {
|
|
|
1759
1931
|
requiredStatusCheckContexts = [],
|
|
1760
1932
|
repoVisibility = "private",
|
|
1761
1933
|
defaultBranch = "master",
|
|
1934
|
+
protectDefaultBranch = true,
|
|
1762
1935
|
deleteBranchOnMerge = false,
|
|
1763
1936
|
gitCommitMessage = "initial commit",
|
|
1764
1937
|
gitAuthorName,
|
|
@@ -1803,7 +1976,16 @@ function createPublishGithubAction(options) {
|
|
|
1803
1976
|
allow_squash_merge: allowSquashMerge,
|
|
1804
1977
|
allow_rebase_merge: allowRebaseMerge
|
|
1805
1978
|
});
|
|
1806
|
-
|
|
1979
|
+
let newRepo;
|
|
1980
|
+
try {
|
|
1981
|
+
newRepo = (await repoCreationPromise).data;
|
|
1982
|
+
} catch (e) {
|
|
1983
|
+
errors.assertError(e);
|
|
1984
|
+
if (e.message === "Resource not accessible by integration") {
|
|
1985
|
+
ctx.logger.warn(`The GitHub app or token provided may not have the required permissions to create the ${user.data.type} repository ${owner}/${repo}.`);
|
|
1986
|
+
}
|
|
1987
|
+
throw new Error(`Failed to create the ${user.data.type} repository ${owner}/${repo}, ${e.message}`);
|
|
1988
|
+
}
|
|
1807
1989
|
if (access == null ? void 0 : access.startsWith(`${owner}/`)) {
|
|
1808
1990
|
const [, team] = access.split("/");
|
|
1809
1991
|
await client.rest.teams.addOrUpdateRepoPermissionsInOrg({
|
|
@@ -1822,21 +2004,37 @@ function createPublishGithubAction(options) {
|
|
|
1822
2004
|
});
|
|
1823
2005
|
}
|
|
1824
2006
|
if (collaborators) {
|
|
1825
|
-
for (const {
|
|
1826
|
-
access: permission,
|
|
1827
|
-
username: team_slug
|
|
1828
|
-
} of collaborators) {
|
|
2007
|
+
for (const collaborator of collaborators) {
|
|
1829
2008
|
try {
|
|
1830
|
-
|
|
1831
|
-
|
|
1832
|
-
|
|
1833
|
-
|
|
1834
|
-
|
|
1835
|
-
|
|
1836
|
-
|
|
2009
|
+
if ("user" in collaborator) {
|
|
2010
|
+
await client.rest.repos.addCollaborator({
|
|
2011
|
+
owner,
|
|
2012
|
+
repo,
|
|
2013
|
+
username: collaborator.user,
|
|
2014
|
+
permission: collaborator.access
|
|
2015
|
+
});
|
|
2016
|
+
} else if ("username" in collaborator) {
|
|
2017
|
+
ctx.logger.warn("The field `username` is deprecated in favor of `team` and will be removed in the future.");
|
|
2018
|
+
await client.rest.teams.addOrUpdateRepoPermissionsInOrg({
|
|
2019
|
+
org: owner,
|
|
2020
|
+
team_slug: collaborator.username,
|
|
2021
|
+
owner,
|
|
2022
|
+
repo,
|
|
2023
|
+
permission: collaborator.access
|
|
2024
|
+
});
|
|
2025
|
+
} else if ("team" in collaborator) {
|
|
2026
|
+
await client.rest.teams.addOrUpdateRepoPermissionsInOrg({
|
|
2027
|
+
org: owner,
|
|
2028
|
+
team_slug: collaborator.team,
|
|
2029
|
+
owner,
|
|
2030
|
+
repo,
|
|
2031
|
+
permission: collaborator.access
|
|
2032
|
+
});
|
|
2033
|
+
}
|
|
1837
2034
|
} catch (e) {
|
|
1838
2035
|
errors.assertError(e);
|
|
1839
|
-
|
|
2036
|
+
const name = extractCollaboratorName(collaborator);
|
|
2037
|
+
ctx.logger.warn(`Skipping ${collaborator.access} access for ${name}, ${e.message}`);
|
|
1840
2038
|
}
|
|
1841
2039
|
}
|
|
1842
2040
|
}
|
|
@@ -1870,25 +2068,69 @@ function createPublishGithubAction(options) {
|
|
|
1870
2068
|
commitMessage: gitCommitMessage ? gitCommitMessage : config.getOptionalString("scaffolder.defaultCommitMessage"),
|
|
1871
2069
|
gitAuthorInfo
|
|
1872
2070
|
});
|
|
1873
|
-
|
|
1874
|
-
|
|
1875
|
-
|
|
1876
|
-
|
|
1877
|
-
|
|
1878
|
-
|
|
1879
|
-
|
|
1880
|
-
|
|
1881
|
-
|
|
1882
|
-
|
|
1883
|
-
|
|
1884
|
-
|
|
1885
|
-
|
|
2071
|
+
if (protectDefaultBranch) {
|
|
2072
|
+
try {
|
|
2073
|
+
await enableBranchProtectionOnDefaultRepoBranch({
|
|
2074
|
+
owner,
|
|
2075
|
+
client,
|
|
2076
|
+
repoName: newRepo.name,
|
|
2077
|
+
logger: ctx.logger,
|
|
2078
|
+
defaultBranch,
|
|
2079
|
+
requireCodeOwnerReviews,
|
|
2080
|
+
requiredStatusCheckContexts
|
|
2081
|
+
});
|
|
2082
|
+
} catch (e) {
|
|
2083
|
+
errors.assertError(e);
|
|
2084
|
+
ctx.logger.warn(`Skipping: default branch protection on '${newRepo.name}', ${e.message}`);
|
|
2085
|
+
}
|
|
1886
2086
|
}
|
|
1887
2087
|
ctx.output("remoteUrl", remoteUrl);
|
|
1888
2088
|
ctx.output("repoContentsUrl", repoContentsUrl);
|
|
1889
2089
|
}
|
|
1890
2090
|
});
|
|
1891
2091
|
}
|
|
2092
|
+
function extractCollaboratorName(collaborator) {
|
|
2093
|
+
if ("username" in collaborator)
|
|
2094
|
+
return collaborator.username;
|
|
2095
|
+
if ("user" in collaborator)
|
|
2096
|
+
return collaborator.user;
|
|
2097
|
+
return collaborator.team;
|
|
2098
|
+
}
|
|
2099
|
+
|
|
2100
|
+
const DEFAULT_GLOB_PATTERNS = ["./**", "!.git"];
|
|
2101
|
+
const isExecutable = (fileMode) => {
|
|
2102
|
+
if (!fileMode) {
|
|
2103
|
+
return false;
|
|
2104
|
+
}
|
|
2105
|
+
const executeBitMask = 73;
|
|
2106
|
+
const res = fileMode & executeBitMask;
|
|
2107
|
+
return res > 0;
|
|
2108
|
+
};
|
|
2109
|
+
async function serializeDirectoryContents(sourcePath, options) {
|
|
2110
|
+
var _a;
|
|
2111
|
+
const paths = await globby__default["default"]((_a = options == null ? void 0 : options.globPatterns) != null ? _a : DEFAULT_GLOB_PATTERNS, {
|
|
2112
|
+
cwd: sourcePath,
|
|
2113
|
+
dot: true,
|
|
2114
|
+
gitignore: options == null ? void 0 : options.gitignore,
|
|
2115
|
+
followSymbolicLinks: false,
|
|
2116
|
+
objectMode: true,
|
|
2117
|
+
stats: true
|
|
2118
|
+
});
|
|
2119
|
+
const limiter = limiterFactory__default["default"](10);
|
|
2120
|
+
return Promise.all(paths.map(async ({ path: path$1, stats }) => ({
|
|
2121
|
+
path: path$1,
|
|
2122
|
+
content: await limiter(async () => fs__default["default"].readFile(path.join(sourcePath, path$1))),
|
|
2123
|
+
executable: isExecutable(stats == null ? void 0 : stats.mode)
|
|
2124
|
+
})));
|
|
2125
|
+
}
|
|
2126
|
+
|
|
2127
|
+
async function deserializeDirectoryContents(targetPath, files) {
|
|
2128
|
+
for (const file of files) {
|
|
2129
|
+
const filePath = backendCommon.resolveSafeChildPath(targetPath, file.path);
|
|
2130
|
+
await fs__default["default"].ensureDir(path.dirname(filePath));
|
|
2131
|
+
await fs__default["default"].writeFile(filePath, file.content);
|
|
2132
|
+
}
|
|
2133
|
+
}
|
|
1892
2134
|
|
|
1893
2135
|
class GithubResponseError extends errors.CustomErrorBase {
|
|
1894
2136
|
}
|
|
@@ -2005,38 +2247,28 @@ const createPublishGithubPullRequestAction = ({
|
|
|
2005
2247
|
token: providedToken
|
|
2006
2248
|
});
|
|
2007
2249
|
const fileRoot = sourcePath ? backendCommon.resolveSafeChildPath(ctx.workspacePath, sourcePath) : ctx.workspacePath;
|
|
2008
|
-
const
|
|
2009
|
-
|
|
2010
|
-
gitignore: true,
|
|
2011
|
-
dot: true
|
|
2012
|
-
});
|
|
2013
|
-
const fileContents = await Promise.all(localFilePaths.map((filePath) => {
|
|
2014
|
-
const absPath = backendCommon.resolveSafeChildPath(fileRoot, filePath);
|
|
2015
|
-
const base64EncodedContent = fs__default["default"].readFileSync(absPath).toString("base64");
|
|
2016
|
-
const fileStat = fs__default["default"].statSync(absPath);
|
|
2017
|
-
const githubTreeItemMode = isExecutable(fileStat.mode) ? "100755" : "100644";
|
|
2018
|
-
const encoding = "base64";
|
|
2019
|
-
return {
|
|
2020
|
-
encoding,
|
|
2021
|
-
content: base64EncodedContent,
|
|
2022
|
-
mode: githubTreeItemMode
|
|
2023
|
-
};
|
|
2024
|
-
}));
|
|
2025
|
-
const repoFilePaths = localFilePaths.map((repoFilePath) => {
|
|
2026
|
-
return targetPath ? `${targetPath}/${repoFilePath}` : repoFilePath;
|
|
2250
|
+
const directoryContents = await serializeDirectoryContents(fileRoot, {
|
|
2251
|
+
gitignore: true
|
|
2027
2252
|
});
|
|
2028
|
-
const
|
|
2253
|
+
const files = Object.fromEntries(directoryContents.map((file) => [
|
|
2254
|
+
targetPath ? path__default["default"].posix.join(targetPath, file.path) : file.path,
|
|
2029
2255
|
{
|
|
2030
|
-
|
|
2031
|
-
|
|
2256
|
+
mode: file.executable ? "100755" : "100644",
|
|
2257
|
+
encoding: "base64",
|
|
2258
|
+
content: file.content.toString("base64")
|
|
2032
2259
|
}
|
|
2033
|
-
];
|
|
2260
|
+
]));
|
|
2034
2261
|
try {
|
|
2035
2262
|
const response = await client.createPullRequest({
|
|
2036
2263
|
owner,
|
|
2037
2264
|
repo,
|
|
2038
2265
|
title,
|
|
2039
|
-
changes
|
|
2266
|
+
changes: [
|
|
2267
|
+
{
|
|
2268
|
+
files,
|
|
2269
|
+
commit: title
|
|
2270
|
+
}
|
|
2271
|
+
],
|
|
2040
2272
|
body: description,
|
|
2041
2273
|
head: branchName,
|
|
2042
2274
|
draft
|
|
@@ -2244,7 +2476,6 @@ const createPublishGitlabMergeRequestAction = (options) => {
|
|
|
2244
2476
|
const repoUrl = ctx.input.repoUrl;
|
|
2245
2477
|
const { host } = parseRepoUrl(repoUrl, integrations);
|
|
2246
2478
|
const integrationConfig = integrations.gitlab.byHost(host);
|
|
2247
|
-
const actions = [];
|
|
2248
2479
|
const destinationBranch = ctx.input.branchName;
|
|
2249
2480
|
if (!integrationConfig) {
|
|
2250
2481
|
throw new errors.InputError(`No matching integration configuration for host ${host}, please check your integrations config`);
|
|
@@ -2258,23 +2489,17 @@ const createPublishGitlabMergeRequestAction = (options) => {
|
|
|
2258
2489
|
host: integrationConfig.config.baseUrl,
|
|
2259
2490
|
[tokenType]: token
|
|
2260
2491
|
});
|
|
2261
|
-
const
|
|
2262
|
-
const
|
|
2263
|
-
|
|
2264
|
-
gitignore: true,
|
|
2265
|
-
dot: true
|
|
2492
|
+
const targetPath = backendCommon.resolveSafeChildPath(ctx.workspacePath, ctx.input.targetPath);
|
|
2493
|
+
const fileContents = await serializeDirectoryContents(targetPath, {
|
|
2494
|
+
gitignore: true
|
|
2266
2495
|
});
|
|
2267
|
-
const
|
|
2268
|
-
|
|
2269
|
-
|
|
2270
|
-
|
|
2271
|
-
|
|
2272
|
-
|
|
2273
|
-
|
|
2274
|
-
filePath: repoFilePaths[i],
|
|
2275
|
-
content: fileContents[i].toString()
|
|
2276
|
-
});
|
|
2277
|
-
}
|
|
2496
|
+
const actions = fileContents.map((file) => ({
|
|
2497
|
+
action: "create",
|
|
2498
|
+
filePath: path__default["default"].posix.join(ctx.input.targetPath, file.path),
|
|
2499
|
+
encoding: "base64",
|
|
2500
|
+
content: file.content.toString("base64"),
|
|
2501
|
+
execute_filemode: file.executable
|
|
2502
|
+
}));
|
|
2278
2503
|
const projects = await api.Projects.show(ctx.input.projectid);
|
|
2279
2504
|
const { default_branch: defaultBranch } = projects;
|
|
2280
2505
|
try {
|
|
@@ -2566,6 +2791,10 @@ const createBuiltinActions = (options) => {
|
|
|
2566
2791
|
reader,
|
|
2567
2792
|
additionalTemplateFilters
|
|
2568
2793
|
}),
|
|
2794
|
+
createPublishGerritAction({
|
|
2795
|
+
integrations,
|
|
2796
|
+
config
|
|
2797
|
+
}),
|
|
2569
2798
|
createPublishGithubAction({
|
|
2570
2799
|
integrations,
|
|
2571
2800
|
config,
|
|
@@ -2642,6 +2871,12 @@ class TemplateActionRegistry {
|
|
|
2642
2871
|
}
|
|
2643
2872
|
|
|
2644
2873
|
const migrationsDir = backendCommon.resolvePackagePath("@backstage/plugin-scaffolder-backend", "migrations");
|
|
2874
|
+
const parseSqlDateToIsoString = (input) => {
|
|
2875
|
+
if (typeof input === "string") {
|
|
2876
|
+
return luxon.DateTime.fromSQL(input, { zone: "UTC" }).toISO();
|
|
2877
|
+
}
|
|
2878
|
+
return input;
|
|
2879
|
+
};
|
|
2645
2880
|
class DatabaseTaskStore {
|
|
2646
2881
|
static async create(options) {
|
|
2647
2882
|
await options.database.migrate.latest({
|
|
@@ -2652,6 +2887,27 @@ class DatabaseTaskStore {
|
|
|
2652
2887
|
constructor(options) {
|
|
2653
2888
|
this.db = options.database;
|
|
2654
2889
|
}
|
|
2890
|
+
async list(options) {
|
|
2891
|
+
const queryBuilder = this.db("tasks");
|
|
2892
|
+
if (options.createdBy) {
|
|
2893
|
+
queryBuilder.where({
|
|
2894
|
+
created_by: options.createdBy
|
|
2895
|
+
});
|
|
2896
|
+
}
|
|
2897
|
+
const results = await queryBuilder.orderBy("created_at", "desc").select();
|
|
2898
|
+
const tasks = results.map((result) => {
|
|
2899
|
+
var _a;
|
|
2900
|
+
return {
|
|
2901
|
+
id: result.id,
|
|
2902
|
+
spec: JSON.parse(result.spec),
|
|
2903
|
+
status: result.status,
|
|
2904
|
+
createdBy: (_a = result.created_by) != null ? _a : void 0,
|
|
2905
|
+
lastHeartbeatAt: parseSqlDateToIsoString(result.last_heartbeat_at),
|
|
2906
|
+
createdAt: parseSqlDateToIsoString(result.created_at)
|
|
2907
|
+
};
|
|
2908
|
+
});
|
|
2909
|
+
return { tasks };
|
|
2910
|
+
}
|
|
2655
2911
|
async getTask(taskId) {
|
|
2656
2912
|
var _a;
|
|
2657
2913
|
const [result] = await this.db("tasks").where({ id: taskId }).select();
|
|
@@ -2665,8 +2921,8 @@ class DatabaseTaskStore {
|
|
|
2665
2921
|
id: result.id,
|
|
2666
2922
|
spec,
|
|
2667
2923
|
status: result.status,
|
|
2668
|
-
lastHeartbeatAt: result.last_heartbeat_at,
|
|
2669
|
-
createdAt: result.created_at,
|
|
2924
|
+
lastHeartbeatAt: parseSqlDateToIsoString(result.last_heartbeat_at),
|
|
2925
|
+
createdAt: parseSqlDateToIsoString(result.created_at),
|
|
2670
2926
|
createdBy: (_a = result.created_by) != null ? _a : void 0,
|
|
2671
2927
|
secrets
|
|
2672
2928
|
};
|
|
@@ -2803,7 +3059,7 @@ class DatabaseTaskStore {
|
|
|
2803
3059
|
taskId,
|
|
2804
3060
|
body,
|
|
2805
3061
|
type: event.event_type,
|
|
2806
|
-
createdAt:
|
|
3062
|
+
createdAt: parseSqlDateToIsoString(event.created_at)
|
|
2807
3063
|
};
|
|
2808
3064
|
} catch (error) {
|
|
2809
3065
|
throw new Error(`Failed to parse event body from event taskId=${taskId} id=${event.id}, ${error}`);
|
|
@@ -2886,6 +3142,12 @@ class StorageTaskBroker {
|
|
|
2886
3142
|
this.logger = logger;
|
|
2887
3143
|
this.deferredDispatch = defer();
|
|
2888
3144
|
}
|
|
3145
|
+
async list(options) {
|
|
3146
|
+
if (!this.storage.list) {
|
|
3147
|
+
throw new Error("TaskStore does not implement the list method. Please implement the list method to be able to list tasks");
|
|
3148
|
+
}
|
|
3149
|
+
return await this.storage.list({ createdBy: options == null ? void 0 : options.createdBy });
|
|
3150
|
+
}
|
|
2889
3151
|
async claim() {
|
|
2890
3152
|
for (; ; ) {
|
|
2891
3153
|
const pendingTask = await this.storage.claimTask();
|
|
@@ -2959,6 +3221,32 @@ class StorageTaskBroker {
|
|
|
2959
3221
|
function isTruthy(value) {
|
|
2960
3222
|
return lodash.isArray(value) ? value.length > 0 : !!value;
|
|
2961
3223
|
}
|
|
3224
|
+
function generateExampleOutput(schema) {
|
|
3225
|
+
var _a, _b;
|
|
3226
|
+
const { examples } = schema;
|
|
3227
|
+
if (examples && Array.isArray(examples)) {
|
|
3228
|
+
return examples[0];
|
|
3229
|
+
}
|
|
3230
|
+
if (schema.type === "object") {
|
|
3231
|
+
return Object.fromEntries(Object.entries((_a = schema.properties) != null ? _a : {}).map(([key, value]) => [
|
|
3232
|
+
key,
|
|
3233
|
+
generateExampleOutput(value)
|
|
3234
|
+
]));
|
|
3235
|
+
} else if (schema.type === "array") {
|
|
3236
|
+
const [firstSchema] = (_b = [schema.items]) == null ? void 0 : _b.flat();
|
|
3237
|
+
if (firstSchema) {
|
|
3238
|
+
return [generateExampleOutput(firstSchema)];
|
|
3239
|
+
}
|
|
3240
|
+
return [];
|
|
3241
|
+
} else if (schema.type === "string") {
|
|
3242
|
+
return "<example>";
|
|
3243
|
+
} else if (schema.type === "number") {
|
|
3244
|
+
return 0;
|
|
3245
|
+
} else if (schema.type === "boolean") {
|
|
3246
|
+
return false;
|
|
3247
|
+
}
|
|
3248
|
+
return "<unknown>";
|
|
3249
|
+
}
|
|
2962
3250
|
|
|
2963
3251
|
const isValidTaskSpec = (taskSpec) => {
|
|
2964
3252
|
return taskSpec.apiVersion === "scaffolder.backstage.io/v1beta3";
|
|
@@ -3028,7 +3316,7 @@ class NunjucksWorkflowRunner {
|
|
|
3028
3316
|
});
|
|
3029
3317
|
}
|
|
3030
3318
|
async execute(task) {
|
|
3031
|
-
var _a, _b, _c, _d;
|
|
3319
|
+
var _a, _b, _c, _d, _e;
|
|
3032
3320
|
if (!isValidTaskSpec(task.spec)) {
|
|
3033
3321
|
throw new errors.InputError("Wrong template version executed with the workflow engine");
|
|
3034
3322
|
}
|
|
@@ -3063,8 +3351,23 @@ class NunjucksWorkflowRunner {
|
|
|
3063
3351
|
});
|
|
3064
3352
|
const action = this.options.actionRegistry.get(step.action);
|
|
3065
3353
|
const { taskLogger, streamLogger } = createStepLogger({ task, step });
|
|
3066
|
-
|
|
3067
|
-
|
|
3354
|
+
if (task.isDryRun && !action.supportsDryRun) {
|
|
3355
|
+
task.emitLog(`Skipping because ${action.id} does not support dry-run`, {
|
|
3356
|
+
stepId: step.id,
|
|
3357
|
+
status: "skipped"
|
|
3358
|
+
});
|
|
3359
|
+
const outputSchema = (_a = action.schema) == null ? void 0 : _a.output;
|
|
3360
|
+
if (outputSchema) {
|
|
3361
|
+
context.steps[step.id] = {
|
|
3362
|
+
output: generateExampleOutput(outputSchema)
|
|
3363
|
+
};
|
|
3364
|
+
} else {
|
|
3365
|
+
context.steps[step.id] = { output: {} };
|
|
3366
|
+
}
|
|
3367
|
+
continue;
|
|
3368
|
+
}
|
|
3369
|
+
const input = (_c = step.input && this.render(step.input, { ...context, secrets: (_b = task.secrets) != null ? _b : {} }, renderTemplate)) != null ? _c : {};
|
|
3370
|
+
if ((_d = action.schema) == null ? void 0 : _d.input) {
|
|
3068
3371
|
const validateResult = jsonschema.validate(input, action.schema.input);
|
|
3069
3372
|
if (!validateResult.valid) {
|
|
3070
3373
|
const errors$1 = validateResult.errors.join(", ");
|
|
@@ -3075,7 +3378,7 @@ class NunjucksWorkflowRunner {
|
|
|
3075
3378
|
const stepOutput = {};
|
|
3076
3379
|
await action.handler({
|
|
3077
3380
|
input,
|
|
3078
|
-
secrets: (
|
|
3381
|
+
secrets: (_e = task.secrets) != null ? _e : {},
|
|
3079
3382
|
logger: taskLogger,
|
|
3080
3383
|
logStream: streamLogger,
|
|
3081
3384
|
workspacePath,
|
|
@@ -3164,6 +3467,95 @@ class TaskWorker {
|
|
|
3164
3467
|
}
|
|
3165
3468
|
}
|
|
3166
3469
|
|
|
3470
|
+
class DecoratedActionsRegistry extends TemplateActionRegistry {
|
|
3471
|
+
constructor(innerRegistry, extraActions) {
|
|
3472
|
+
super();
|
|
3473
|
+
this.innerRegistry = innerRegistry;
|
|
3474
|
+
for (const action of extraActions) {
|
|
3475
|
+
this.register(action);
|
|
3476
|
+
}
|
|
3477
|
+
}
|
|
3478
|
+
get(actionId) {
|
|
3479
|
+
try {
|
|
3480
|
+
return super.get(actionId);
|
|
3481
|
+
} catch {
|
|
3482
|
+
return this.innerRegistry.get(actionId);
|
|
3483
|
+
}
|
|
3484
|
+
}
|
|
3485
|
+
}
|
|
3486
|
+
|
|
3487
|
+
function createDryRunner(options) {
|
|
3488
|
+
return async function dryRun(input) {
|
|
3489
|
+
let contentPromise;
|
|
3490
|
+
const workflowRunner = new NunjucksWorkflowRunner({
|
|
3491
|
+
...options,
|
|
3492
|
+
actionRegistry: new DecoratedActionsRegistry(options.actionRegistry, [
|
|
3493
|
+
createTemplateAction({
|
|
3494
|
+
id: "dry-run:extract",
|
|
3495
|
+
supportsDryRun: true,
|
|
3496
|
+
async handler(ctx) {
|
|
3497
|
+
contentPromise = serializeDirectoryContents(ctx.workspacePath);
|
|
3498
|
+
await contentPromise.catch(() => {
|
|
3499
|
+
});
|
|
3500
|
+
}
|
|
3501
|
+
})
|
|
3502
|
+
])
|
|
3503
|
+
});
|
|
3504
|
+
const dryRunId = uuid.v4();
|
|
3505
|
+
const log = new Array();
|
|
3506
|
+
const contentsPath = backendCommon.resolveSafeChildPath(options.workingDirectory, `dry-run-content-${dryRunId}`);
|
|
3507
|
+
try {
|
|
3508
|
+
await deserializeDirectoryContents(contentsPath, input.directoryContents);
|
|
3509
|
+
const result = await workflowRunner.execute({
|
|
3510
|
+
spec: {
|
|
3511
|
+
...input.spec,
|
|
3512
|
+
steps: [
|
|
3513
|
+
...input.spec.steps,
|
|
3514
|
+
{
|
|
3515
|
+
id: dryRunId,
|
|
3516
|
+
name: "dry-run:extract",
|
|
3517
|
+
action: "dry-run:extract"
|
|
3518
|
+
}
|
|
3519
|
+
],
|
|
3520
|
+
templateInfo: {
|
|
3521
|
+
entityRef: "template:default/dry-run",
|
|
3522
|
+
baseUrl: url.pathToFileURL(backendCommon.resolveSafeChildPath(contentsPath, "template.yaml")).toString()
|
|
3523
|
+
}
|
|
3524
|
+
},
|
|
3525
|
+
secrets: input.secrets,
|
|
3526
|
+
done: false,
|
|
3527
|
+
isDryRun: true,
|
|
3528
|
+
getWorkspaceName: async () => `dry-run-${dryRunId}`,
|
|
3529
|
+
async emitLog(message, logMetadata) {
|
|
3530
|
+
if ((logMetadata == null ? void 0 : logMetadata.stepId) === dryRunId) {
|
|
3531
|
+
return;
|
|
3532
|
+
}
|
|
3533
|
+
log.push({
|
|
3534
|
+
body: {
|
|
3535
|
+
...logMetadata,
|
|
3536
|
+
message
|
|
3537
|
+
}
|
|
3538
|
+
});
|
|
3539
|
+
},
|
|
3540
|
+
async complete() {
|
|
3541
|
+
throw new Error("Not implemented");
|
|
3542
|
+
}
|
|
3543
|
+
});
|
|
3544
|
+
if (!contentPromise) {
|
|
3545
|
+
throw new Error("Content extraction step was skipped");
|
|
3546
|
+
}
|
|
3547
|
+
const directoryContents = await contentPromise;
|
|
3548
|
+
return {
|
|
3549
|
+
log,
|
|
3550
|
+
directoryContents,
|
|
3551
|
+
output: result.output
|
|
3552
|
+
};
|
|
3553
|
+
} finally {
|
|
3554
|
+
await fs__default["default"].remove(contentsPath);
|
|
3555
|
+
}
|
|
3556
|
+
};
|
|
3557
|
+
}
|
|
3558
|
+
|
|
3167
3559
|
async function getWorkingDirectory(config, logger) {
|
|
3168
3560
|
if (!config.has("backend.workingDirectory")) {
|
|
3169
3561
|
return os__default["default"].tmpdir();
|
|
@@ -3261,6 +3653,13 @@ async function createRouter(options) {
|
|
|
3261
3653
|
});
|
|
3262
3654
|
actionsToRegister.forEach((action) => actionRegistry.register(action));
|
|
3263
3655
|
workers.forEach((worker) => worker.start());
|
|
3656
|
+
const dryRunner = createDryRunner({
|
|
3657
|
+
actionRegistry,
|
|
3658
|
+
integrations,
|
|
3659
|
+
logger,
|
|
3660
|
+
workingDirectory,
|
|
3661
|
+
additionalTemplateFilters
|
|
3662
|
+
});
|
|
3264
3663
|
router.get("/v2/templates/:namespace/:kind/:name/parameter-schema", async (req, res) => {
|
|
3265
3664
|
var _a, _b;
|
|
3266
3665
|
const { namespace, kind, name } = req.params;
|
|
@@ -3353,6 +3752,18 @@ async function createRouter(options) {
|
|
|
3353
3752
|
}
|
|
3354
3753
|
});
|
|
3355
3754
|
res.status(201).json({ id: result.taskId });
|
|
3755
|
+
}).get("/v2/tasks", async (req, res) => {
|
|
3756
|
+
const [userEntityRef] = [req.query.createdBy].flat();
|
|
3757
|
+
if (typeof userEntityRef !== "string" && typeof userEntityRef !== "undefined") {
|
|
3758
|
+
throw new errors.InputError("createdBy query parameter must be a string");
|
|
3759
|
+
}
|
|
3760
|
+
if (!taskBroker.list) {
|
|
3761
|
+
throw new Error("TaskBroker does not support listing tasks, please implement the list method on the TaskBroker.");
|
|
3762
|
+
}
|
|
3763
|
+
const tasks = await taskBroker.list({
|
|
3764
|
+
createdBy: userEntityRef
|
|
3765
|
+
});
|
|
3766
|
+
res.status(200).json(tasks);
|
|
3356
3767
|
}).get("/v2/tasks/:taskId", async (req, res) => {
|
|
3357
3768
|
const { taskId } = req.params;
|
|
3358
3769
|
const task = await taskBroker.get(taskId);
|
|
@@ -3415,6 +3826,62 @@ data: ${JSON.stringify(event)}
|
|
|
3415
3826
|
subscription.unsubscribe();
|
|
3416
3827
|
clearTimeout(timeout);
|
|
3417
3828
|
});
|
|
3829
|
+
}).post("/v2/dry-run", async (req, res) => {
|
|
3830
|
+
var _a, _b, _c;
|
|
3831
|
+
const bodySchema = zod.z.object({
|
|
3832
|
+
template: zod.z.unknown(),
|
|
3833
|
+
values: zod.z.record(zod.z.unknown()),
|
|
3834
|
+
secrets: zod.z.record(zod.z.string()).optional(),
|
|
3835
|
+
directoryContents: zod.z.array(zod.z.object({ path: zod.z.string(), base64Content: zod.z.string() }))
|
|
3836
|
+
});
|
|
3837
|
+
const body = await bodySchema.parseAsync(req.body).catch((e) => {
|
|
3838
|
+
throw new errors.InputError(`Malformed request: ${e}`);
|
|
3839
|
+
});
|
|
3840
|
+
const template = body.template;
|
|
3841
|
+
if (!await pluginScaffolderCommon.templateEntityV1beta3Validator.check(template)) {
|
|
3842
|
+
throw new errors.InputError("Input template is not a template");
|
|
3843
|
+
}
|
|
3844
|
+
const { token } = parseBearerToken(req.headers.authorization);
|
|
3845
|
+
for (const parameters of [(_a = template.spec.parameters) != null ? _a : []].flat()) {
|
|
3846
|
+
const result2 = jsonschema.validate(body.values, parameters);
|
|
3847
|
+
if (!result2.valid) {
|
|
3848
|
+
res.status(400).json({ errors: result2.errors });
|
|
3849
|
+
return;
|
|
3850
|
+
}
|
|
3851
|
+
}
|
|
3852
|
+
const steps = template.spec.steps.map((step, index) => {
|
|
3853
|
+
var _a2, _b2;
|
|
3854
|
+
return {
|
|
3855
|
+
...step,
|
|
3856
|
+
id: (_a2 = step.id) != null ? _a2 : `step-${index + 1}`,
|
|
3857
|
+
name: (_b2 = step.name) != null ? _b2 : step.action
|
|
3858
|
+
};
|
|
3859
|
+
});
|
|
3860
|
+
const result = await dryRunner({
|
|
3861
|
+
spec: {
|
|
3862
|
+
apiVersion: template.apiVersion,
|
|
3863
|
+
steps,
|
|
3864
|
+
output: (_b = template.spec.output) != null ? _b : {},
|
|
3865
|
+
parameters: body.values
|
|
3866
|
+
},
|
|
3867
|
+
directoryContents: ((_c = body.directoryContents) != null ? _c : []).map((file) => ({
|
|
3868
|
+
path: file.path,
|
|
3869
|
+
content: Buffer.from(file.base64Content, "base64")
|
|
3870
|
+
})),
|
|
3871
|
+
secrets: {
|
|
3872
|
+
...body.secrets,
|
|
3873
|
+
...token && { backstageToken: token }
|
|
3874
|
+
}
|
|
3875
|
+
});
|
|
3876
|
+
res.status(200).json({
|
|
3877
|
+
...result,
|
|
3878
|
+
steps,
|
|
3879
|
+
directoryContents: result.directoryContents.map((file) => ({
|
|
3880
|
+
path: file.path,
|
|
3881
|
+
executable: file.executable,
|
|
3882
|
+
base64Content: file.content.toString("base64")
|
|
3883
|
+
}))
|
|
3884
|
+
});
|
|
3418
3885
|
});
|
|
3419
3886
|
const app = express__default["default"]();
|
|
3420
3887
|
app.set("logger", logger);
|
|
@@ -3504,6 +3971,7 @@ exports.createPublishBitbucketAction = createPublishBitbucketAction;
|
|
|
3504
3971
|
exports.createPublishBitbucketCloudAction = createPublishBitbucketCloudAction;
|
|
3505
3972
|
exports.createPublishBitbucketServerAction = createPublishBitbucketServerAction;
|
|
3506
3973
|
exports.createPublishFileAction = createPublishFileAction;
|
|
3974
|
+
exports.createPublishGerritAction = createPublishGerritAction;
|
|
3507
3975
|
exports.createPublishGithubAction = createPublishGithubAction;
|
|
3508
3976
|
exports.createPublishGithubPullRequestAction = createPublishGithubPullRequestAction;
|
|
3509
3977
|
exports.createPublishGitlabAction = createPublishGitlabAction;
|