@backstage/plugin-scaffolder-backend 1.2.0-next.1 → 1.3.0-next.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +69 -0
- package/dist/index.cjs.js +454 -68
- package/dist/index.cjs.js.map +1 -1
- package/dist/index.d.ts +26 -1
- package/package.json +15 -13
package/dist/index.cjs.js
CHANGED
|
@@ -16,9 +16,10 @@ var child_process = require('child_process');
|
|
|
16
16
|
var stream = require('stream');
|
|
17
17
|
var azureDevopsNodeApi = require('azure-devops-node-api');
|
|
18
18
|
var fetch = require('node-fetch');
|
|
19
|
+
var crypto = require('crypto');
|
|
19
20
|
var octokit = require('octokit');
|
|
20
|
-
var lodash = require('lodash');
|
|
21
21
|
var octokitPluginCreatePullRequest = require('octokit-plugin-create-pull-request');
|
|
22
|
+
var limiterFactory = require('p-limit');
|
|
22
23
|
var node = require('@gitbeaker/node');
|
|
23
24
|
var webhooks = require('@octokit/webhooks');
|
|
24
25
|
var uuid = require('uuid');
|
|
@@ -26,12 +27,15 @@ var luxon = require('luxon');
|
|
|
26
27
|
var ObservableImpl = require('zen-observable');
|
|
27
28
|
var winston = require('winston');
|
|
28
29
|
var nunjucks = require('nunjucks');
|
|
30
|
+
var lodash = require('lodash');
|
|
29
31
|
var jsonschema = require('jsonschema');
|
|
32
|
+
var pluginScaffolderCommon = require('@backstage/plugin-scaffolder-common');
|
|
30
33
|
var express = require('express');
|
|
31
34
|
var Router = require('express-promise-router');
|
|
35
|
+
var zod = require('zod');
|
|
36
|
+
var url = require('url');
|
|
32
37
|
var os = require('os');
|
|
33
38
|
var pluginCatalogBackend = require('@backstage/plugin-catalog-backend');
|
|
34
|
-
var pluginScaffolderCommon = require('@backstage/plugin-scaffolder-common');
|
|
35
39
|
|
|
36
40
|
function _interopDefaultLegacy (e) { return e && typeof e === 'object' && 'default' in e ? e : { 'default': e }; }
|
|
37
41
|
|
|
@@ -58,6 +62,8 @@ var yaml__namespace = /*#__PURE__*/_interopNamespace(yaml);
|
|
|
58
62
|
var path__default = /*#__PURE__*/_interopDefaultLegacy(path);
|
|
59
63
|
var globby__default = /*#__PURE__*/_interopDefaultLegacy(globby);
|
|
60
64
|
var fetch__default = /*#__PURE__*/_interopDefaultLegacy(fetch);
|
|
65
|
+
var crypto__default = /*#__PURE__*/_interopDefaultLegacy(crypto);
|
|
66
|
+
var limiterFactory__default = /*#__PURE__*/_interopDefaultLegacy(limiterFactory);
|
|
61
67
|
var ObservableImpl__default = /*#__PURE__*/_interopDefaultLegacy(ObservableImpl);
|
|
62
68
|
var winston__namespace = /*#__PURE__*/_interopNamespace(winston);
|
|
63
69
|
var nunjucks__default = /*#__PURE__*/_interopDefaultLegacy(nunjucks);
|
|
@@ -115,6 +121,18 @@ function createCatalogRegisterAction(options) {
|
|
|
115
121
|
}
|
|
116
122
|
}
|
|
117
123
|
]
|
|
124
|
+
},
|
|
125
|
+
output: {
|
|
126
|
+
type: "object",
|
|
127
|
+
required: ["catalogInfoUrl"],
|
|
128
|
+
properties: {
|
|
129
|
+
entityRef: {
|
|
130
|
+
type: "string"
|
|
131
|
+
},
|
|
132
|
+
catalogInfoUrl: {
|
|
133
|
+
type: "string"
|
|
134
|
+
}
|
|
135
|
+
}
|
|
118
136
|
}
|
|
119
137
|
},
|
|
120
138
|
async handler(ctx) {
|
|
@@ -188,6 +206,7 @@ function createCatalogWriteAction() {
|
|
|
188
206
|
}
|
|
189
207
|
}
|
|
190
208
|
},
|
|
209
|
+
supportsDryRun: true,
|
|
191
210
|
async handler(ctx) {
|
|
192
211
|
ctx.logStream.write(`Writing catalog-info.yaml`);
|
|
193
212
|
const { filePath, entity } = ctx.input;
|
|
@@ -219,6 +238,7 @@ function createDebugLogAction() {
|
|
|
219
238
|
}
|
|
220
239
|
}
|
|
221
240
|
},
|
|
241
|
+
supportsDryRun: true,
|
|
222
242
|
async handler(ctx) {
|
|
223
243
|
var _a, _b;
|
|
224
244
|
ctx.logger.info(JSON.stringify(ctx.input, null, 2));
|
|
@@ -304,6 +324,7 @@ function createFetchPlainAction(options) {
|
|
|
304
324
|
}
|
|
305
325
|
}
|
|
306
326
|
},
|
|
327
|
+
supportsDryRun: true,
|
|
307
328
|
async handler(ctx) {
|
|
308
329
|
var _a, _b;
|
|
309
330
|
ctx.logger.info("Fetching plain content from remote URL");
|
|
@@ -470,6 +491,7 @@ function createFetchTemplateAction(options) {
|
|
|
470
491
|
}
|
|
471
492
|
}
|
|
472
493
|
},
|
|
494
|
+
supportsDryRun: true,
|
|
473
495
|
async handler(ctx) {
|
|
474
496
|
var _a, _b;
|
|
475
497
|
ctx.logger.info("Fetching template content from remote URL");
|
|
@@ -502,13 +524,15 @@ function createFetchTemplateAction(options) {
|
|
|
502
524
|
cwd: templateDir,
|
|
503
525
|
dot: true,
|
|
504
526
|
onlyFiles: false,
|
|
505
|
-
markDirectories: true
|
|
527
|
+
markDirectories: true,
|
|
528
|
+
followSymbolicLinks: false
|
|
506
529
|
});
|
|
507
530
|
const nonTemplatedEntries = new Set((await Promise.all((ctx.input.copyWithoutRender || []).map((pattern) => globby__default["default"](pattern, {
|
|
508
531
|
cwd: templateDir,
|
|
509
532
|
dot: true,
|
|
510
533
|
onlyFiles: false,
|
|
511
|
-
markDirectories: true
|
|
534
|
+
markDirectories: true,
|
|
535
|
+
followSymbolicLinks: false
|
|
512
536
|
})))).flat());
|
|
513
537
|
const { cookiecutterCompat, values } = ctx.input;
|
|
514
538
|
const context = {
|
|
@@ -583,6 +607,7 @@ const createFilesystemDeleteAction = () => {
|
|
|
583
607
|
}
|
|
584
608
|
}
|
|
585
609
|
},
|
|
610
|
+
supportsDryRun: true,
|
|
586
611
|
async handler(ctx) {
|
|
587
612
|
var _a;
|
|
588
613
|
if (!Array.isArray((_a = ctx.input) == null ? void 0 : _a.files)) {
|
|
@@ -637,6 +662,7 @@ const createFilesystemRenameAction = () => {
|
|
|
637
662
|
}
|
|
638
663
|
}
|
|
639
664
|
},
|
|
665
|
+
supportsDryRun: true,
|
|
640
666
|
async handler(ctx) {
|
|
641
667
|
var _a, _b;
|
|
642
668
|
if (!Array.isArray((_a = ctx.input) == null ? void 0 : _a.files)) {
|
|
@@ -825,11 +851,6 @@ const parseRepoUrl = (repoUrl, integrations) => {
|
|
|
825
851
|
}
|
|
826
852
|
return { host, owner, repo, organization, workspace, project };
|
|
827
853
|
};
|
|
828
|
-
const isExecutable = (fileMode) => {
|
|
829
|
-
const executeBitMask = 73;
|
|
830
|
-
const res = fileMode & executeBitMask;
|
|
831
|
-
return res > 0;
|
|
832
|
-
};
|
|
833
854
|
|
|
834
855
|
function createPublishAzureAction(options) {
|
|
835
856
|
const { integrations, config } = options;
|
|
@@ -1578,6 +1599,138 @@ function createPublishFileAction() {
|
|
|
1578
1599
|
});
|
|
1579
1600
|
}
|
|
1580
1601
|
|
|
1602
|
+
const createGerritProject = async (config, options) => {
|
|
1603
|
+
const { projectName, parent, owner, description } = options;
|
|
1604
|
+
const fetchOptions = {
|
|
1605
|
+
method: "PUT",
|
|
1606
|
+
body: JSON.stringify({
|
|
1607
|
+
parent,
|
|
1608
|
+
description,
|
|
1609
|
+
owners: [owner],
|
|
1610
|
+
create_empty_commit: false
|
|
1611
|
+
}),
|
|
1612
|
+
headers: {
|
|
1613
|
+
...integration.getGerritRequestOptions(config).headers,
|
|
1614
|
+
"Content-Type": "application/json"
|
|
1615
|
+
}
|
|
1616
|
+
};
|
|
1617
|
+
const response = await fetch__default["default"](`${config.baseUrl}/a/projects/${encodeURIComponent(projectName)}`, fetchOptions);
|
|
1618
|
+
if (response.status !== 201) {
|
|
1619
|
+
throw new Error(`Unable to create repository, ${response.status} ${response.statusText}, ${await response.text()}`);
|
|
1620
|
+
}
|
|
1621
|
+
};
|
|
1622
|
+
const generateCommitMessage = (config, commitSubject) => {
|
|
1623
|
+
const changeId = crypto__default["default"].randomBytes(20).toString("hex");
|
|
1624
|
+
const msg = `${config.getOptionalString("scaffolder.defaultCommitMessage") || commitSubject}
|
|
1625
|
+
|
|
1626
|
+
Change-Id: I${changeId}`;
|
|
1627
|
+
return msg;
|
|
1628
|
+
};
|
|
1629
|
+
function createPublishGerritAction(options) {
|
|
1630
|
+
const { integrations, config } = options;
|
|
1631
|
+
return createTemplateAction({
|
|
1632
|
+
id: "publish:gerrit",
|
|
1633
|
+
description: "Initializes a git repository of the content in the workspace, and publishes it to Gerrit.",
|
|
1634
|
+
schema: {
|
|
1635
|
+
input: {
|
|
1636
|
+
type: "object",
|
|
1637
|
+
required: ["repoUrl"],
|
|
1638
|
+
properties: {
|
|
1639
|
+
repoUrl: {
|
|
1640
|
+
title: "Repository Location",
|
|
1641
|
+
type: "string"
|
|
1642
|
+
},
|
|
1643
|
+
description: {
|
|
1644
|
+
title: "Repository Description",
|
|
1645
|
+
type: "string"
|
|
1646
|
+
},
|
|
1647
|
+
defaultBranch: {
|
|
1648
|
+
title: "Default Branch",
|
|
1649
|
+
type: "string",
|
|
1650
|
+
description: `Sets the default branch on the repository. The default value is 'master'`
|
|
1651
|
+
},
|
|
1652
|
+
gitCommitMessage: {
|
|
1653
|
+
title: "Git Commit Message",
|
|
1654
|
+
type: "string",
|
|
1655
|
+
description: `Sets the commit message on the repository. The default value is 'initial commit'`
|
|
1656
|
+
},
|
|
1657
|
+
gitAuthorName: {
|
|
1658
|
+
title: "Default Author Name",
|
|
1659
|
+
type: "string",
|
|
1660
|
+
description: `Sets the default author name for the commit. The default value is 'Scaffolder'`
|
|
1661
|
+
},
|
|
1662
|
+
gitAuthorEmail: {
|
|
1663
|
+
title: "Default Author Email",
|
|
1664
|
+
type: "string",
|
|
1665
|
+
description: `Sets the default author email for the commit.`
|
|
1666
|
+
}
|
|
1667
|
+
}
|
|
1668
|
+
},
|
|
1669
|
+
output: {
|
|
1670
|
+
type: "object",
|
|
1671
|
+
properties: {
|
|
1672
|
+
remoteUrl: {
|
|
1673
|
+
title: "A URL to the repository with the provider",
|
|
1674
|
+
type: "string"
|
|
1675
|
+
},
|
|
1676
|
+
repoContentsUrl: {
|
|
1677
|
+
title: "A URL to the root of the repository",
|
|
1678
|
+
type: "string"
|
|
1679
|
+
}
|
|
1680
|
+
}
|
|
1681
|
+
}
|
|
1682
|
+
},
|
|
1683
|
+
async handler(ctx) {
|
|
1684
|
+
const {
|
|
1685
|
+
repoUrl,
|
|
1686
|
+
description,
|
|
1687
|
+
defaultBranch = "master",
|
|
1688
|
+
gitAuthorName,
|
|
1689
|
+
gitAuthorEmail,
|
|
1690
|
+
gitCommitMessage = "initial commit"
|
|
1691
|
+
} = ctx.input;
|
|
1692
|
+
const { repo, host, owner, workspace } = parseRepoUrl(repoUrl, integrations);
|
|
1693
|
+
const integrationConfig = integrations.gerrit.byHost(host);
|
|
1694
|
+
if (!integrationConfig) {
|
|
1695
|
+
throw new errors.InputError(`No matching integration configuration for host ${host}, please check your integrations config`);
|
|
1696
|
+
}
|
|
1697
|
+
if (!owner) {
|
|
1698
|
+
throw new errors.InputError(`Invalid URL provider was included in the repo URL to create ${ctx.input.repoUrl}, missing owner`);
|
|
1699
|
+
}
|
|
1700
|
+
if (!workspace) {
|
|
1701
|
+
throw new errors.InputError(`Invalid URL provider was included in the repo URL to create ${ctx.input.repoUrl}, missing workspace`);
|
|
1702
|
+
}
|
|
1703
|
+
await createGerritProject(integrationConfig.config, {
|
|
1704
|
+
description,
|
|
1705
|
+
owner,
|
|
1706
|
+
projectName: repo,
|
|
1707
|
+
parent: workspace
|
|
1708
|
+
});
|
|
1709
|
+
const auth = {
|
|
1710
|
+
username: integrationConfig.config.username,
|
|
1711
|
+
password: integrationConfig.config.password
|
|
1712
|
+
};
|
|
1713
|
+
const gitAuthorInfo = {
|
|
1714
|
+
name: gitAuthorName ? gitAuthorName : config.getOptionalString("scaffolder.defaultAuthor.name"),
|
|
1715
|
+
email: gitAuthorEmail ? gitAuthorEmail : config.getOptionalString("scaffolder.defaultAuthor.email")
|
|
1716
|
+
};
|
|
1717
|
+
const remoteUrl = `${integrationConfig.config.cloneUrl}/a/${repo}`;
|
|
1718
|
+
await initRepoAndPush({
|
|
1719
|
+
dir: getRepoSourceDirectory(ctx.workspacePath, void 0),
|
|
1720
|
+
remoteUrl,
|
|
1721
|
+
auth,
|
|
1722
|
+
defaultBranch,
|
|
1723
|
+
logger: ctx.logger,
|
|
1724
|
+
commitMessage: generateCommitMessage(config, gitCommitMessage),
|
|
1725
|
+
gitAuthorInfo
|
|
1726
|
+
});
|
|
1727
|
+
const repoContentsUrl = `${integrationConfig.config.gitilesBaseUrl}/${repo}/+/refs/heads/${defaultBranch}`;
|
|
1728
|
+
ctx.output("remoteUrl", remoteUrl);
|
|
1729
|
+
ctx.output("repoContentsUrl", repoContentsUrl);
|
|
1730
|
+
}
|
|
1731
|
+
});
|
|
1732
|
+
}
|
|
1733
|
+
|
|
1581
1734
|
const DEFAULT_TIMEOUT_MS = 6e4;
|
|
1582
1735
|
async function getOctokitOptions(options) {
|
|
1583
1736
|
var _a;
|
|
@@ -1662,6 +1815,11 @@ function createPublishGithubAction(options) {
|
|
|
1662
1815
|
type: "string",
|
|
1663
1816
|
description: `Sets the default branch on the repository. The default value is 'master'`
|
|
1664
1817
|
},
|
|
1818
|
+
protectDefaultBranch: {
|
|
1819
|
+
title: "Protect Default Branch",
|
|
1820
|
+
type: "boolean",
|
|
1821
|
+
description: `Protect the default branch after creating the repository. The default value is 'true'`
|
|
1822
|
+
},
|
|
1665
1823
|
deleteBranchOnMerge: {
|
|
1666
1824
|
title: "Delete Branch On Merge",
|
|
1667
1825
|
type: "boolean",
|
|
@@ -1759,6 +1917,7 @@ function createPublishGithubAction(options) {
|
|
|
1759
1917
|
requiredStatusCheckContexts = [],
|
|
1760
1918
|
repoVisibility = "private",
|
|
1761
1919
|
defaultBranch = "master",
|
|
1920
|
+
protectDefaultBranch = true,
|
|
1762
1921
|
deleteBranchOnMerge = false,
|
|
1763
1922
|
gitCommitMessage = "initial commit",
|
|
1764
1923
|
gitAuthorName,
|
|
@@ -1803,7 +1962,16 @@ function createPublishGithubAction(options) {
|
|
|
1803
1962
|
allow_squash_merge: allowSquashMerge,
|
|
1804
1963
|
allow_rebase_merge: allowRebaseMerge
|
|
1805
1964
|
});
|
|
1806
|
-
|
|
1965
|
+
let newRepo;
|
|
1966
|
+
try {
|
|
1967
|
+
newRepo = (await repoCreationPromise).data;
|
|
1968
|
+
} catch (e) {
|
|
1969
|
+
errors.assertError(e);
|
|
1970
|
+
if (e.message === "Resource not accessible by integration") {
|
|
1971
|
+
ctx.logger.warn(`The GitHub app or token provided may not have the required permissions to create the ${user.data.type} repository ${owner}/${repo}.`);
|
|
1972
|
+
}
|
|
1973
|
+
throw new Error(`Failed to create the ${user.data.type} repository ${owner}/${repo}, ${e.message}`);
|
|
1974
|
+
}
|
|
1807
1975
|
if (access == null ? void 0 : access.startsWith(`${owner}/`)) {
|
|
1808
1976
|
const [, team] = access.split("/");
|
|
1809
1977
|
await client.rest.teams.addOrUpdateRepoPermissionsInOrg({
|
|
@@ -1870,19 +2038,21 @@ function createPublishGithubAction(options) {
|
|
|
1870
2038
|
commitMessage: gitCommitMessage ? gitCommitMessage : config.getOptionalString("scaffolder.defaultCommitMessage"),
|
|
1871
2039
|
gitAuthorInfo
|
|
1872
2040
|
});
|
|
1873
|
-
|
|
1874
|
-
|
|
1875
|
-
|
|
1876
|
-
|
|
1877
|
-
|
|
1878
|
-
|
|
1879
|
-
|
|
1880
|
-
|
|
1881
|
-
|
|
1882
|
-
|
|
1883
|
-
|
|
1884
|
-
|
|
1885
|
-
|
|
2041
|
+
if (protectDefaultBranch) {
|
|
2042
|
+
try {
|
|
2043
|
+
await enableBranchProtectionOnDefaultRepoBranch({
|
|
2044
|
+
owner,
|
|
2045
|
+
client,
|
|
2046
|
+
repoName: newRepo.name,
|
|
2047
|
+
logger: ctx.logger,
|
|
2048
|
+
defaultBranch,
|
|
2049
|
+
requireCodeOwnerReviews,
|
|
2050
|
+
requiredStatusCheckContexts
|
|
2051
|
+
});
|
|
2052
|
+
} catch (e) {
|
|
2053
|
+
errors.assertError(e);
|
|
2054
|
+
ctx.logger.warn(`Skipping: default branch protection on '${newRepo.name}', ${e.message}`);
|
|
2055
|
+
}
|
|
1886
2056
|
}
|
|
1887
2057
|
ctx.output("remoteUrl", remoteUrl);
|
|
1888
2058
|
ctx.output("repoContentsUrl", repoContentsUrl);
|
|
@@ -1890,6 +2060,41 @@ function createPublishGithubAction(options) {
|
|
|
1890
2060
|
});
|
|
1891
2061
|
}
|
|
1892
2062
|
|
|
2063
|
+
const DEFAULT_GLOB_PATTERNS = ["./**", "!.git"];
|
|
2064
|
+
const isExecutable = (fileMode) => {
|
|
2065
|
+
if (!fileMode) {
|
|
2066
|
+
return false;
|
|
2067
|
+
}
|
|
2068
|
+
const executeBitMask = 73;
|
|
2069
|
+
const res = fileMode & executeBitMask;
|
|
2070
|
+
return res > 0;
|
|
2071
|
+
};
|
|
2072
|
+
async function serializeDirectoryContents(sourcePath, options) {
|
|
2073
|
+
var _a;
|
|
2074
|
+
const paths = await globby__default["default"]((_a = options == null ? void 0 : options.globPatterns) != null ? _a : DEFAULT_GLOB_PATTERNS, {
|
|
2075
|
+
cwd: sourcePath,
|
|
2076
|
+
dot: true,
|
|
2077
|
+
gitignore: options == null ? void 0 : options.gitignore,
|
|
2078
|
+
followSymbolicLinks: false,
|
|
2079
|
+
objectMode: true,
|
|
2080
|
+
stats: true
|
|
2081
|
+
});
|
|
2082
|
+
const limiter = limiterFactory__default["default"](10);
|
|
2083
|
+
return Promise.all(paths.map(async ({ path: path$1, stats }) => ({
|
|
2084
|
+
path: path$1,
|
|
2085
|
+
content: await limiter(async () => fs__default["default"].readFile(path.join(sourcePath, path$1))),
|
|
2086
|
+
executable: isExecutable(stats == null ? void 0 : stats.mode)
|
|
2087
|
+
})));
|
|
2088
|
+
}
|
|
2089
|
+
|
|
2090
|
+
async function deserializeDirectoryContents(targetPath, files) {
|
|
2091
|
+
for (const file of files) {
|
|
2092
|
+
const filePath = backendCommon.resolveSafeChildPath(targetPath, file.path);
|
|
2093
|
+
await fs__default["default"].ensureDir(path.dirname(filePath));
|
|
2094
|
+
await fs__default["default"].writeFile(filePath, file.content);
|
|
2095
|
+
}
|
|
2096
|
+
}
|
|
2097
|
+
|
|
1893
2098
|
class GithubResponseError extends errors.CustomErrorBase {
|
|
1894
2099
|
}
|
|
1895
2100
|
const defaultClientFactory = async ({
|
|
@@ -2005,38 +2210,28 @@ const createPublishGithubPullRequestAction = ({
|
|
|
2005
2210
|
token: providedToken
|
|
2006
2211
|
});
|
|
2007
2212
|
const fileRoot = sourcePath ? backendCommon.resolveSafeChildPath(ctx.workspacePath, sourcePath) : ctx.workspacePath;
|
|
2008
|
-
const
|
|
2009
|
-
|
|
2010
|
-
gitignore: true,
|
|
2011
|
-
dot: true
|
|
2012
|
-
});
|
|
2013
|
-
const fileContents = await Promise.all(localFilePaths.map((filePath) => {
|
|
2014
|
-
const absPath = backendCommon.resolveSafeChildPath(fileRoot, filePath);
|
|
2015
|
-
const base64EncodedContent = fs__default["default"].readFileSync(absPath).toString("base64");
|
|
2016
|
-
const fileStat = fs__default["default"].statSync(absPath);
|
|
2017
|
-
const githubTreeItemMode = isExecutable(fileStat.mode) ? "100755" : "100644";
|
|
2018
|
-
const encoding = "base64";
|
|
2019
|
-
return {
|
|
2020
|
-
encoding,
|
|
2021
|
-
content: base64EncodedContent,
|
|
2022
|
-
mode: githubTreeItemMode
|
|
2023
|
-
};
|
|
2024
|
-
}));
|
|
2025
|
-
const repoFilePaths = localFilePaths.map((repoFilePath) => {
|
|
2026
|
-
return targetPath ? `${targetPath}/${repoFilePath}` : repoFilePath;
|
|
2213
|
+
const directoryContents = await serializeDirectoryContents(fileRoot, {
|
|
2214
|
+
gitignore: true
|
|
2027
2215
|
});
|
|
2028
|
-
const
|
|
2216
|
+
const files = Object.fromEntries(directoryContents.map((file) => [
|
|
2217
|
+
targetPath ? path__default["default"].posix.join(targetPath, file.path) : file.path,
|
|
2029
2218
|
{
|
|
2030
|
-
|
|
2031
|
-
|
|
2219
|
+
mode: file.executable ? "100755" : "100644",
|
|
2220
|
+
encoding: "base64",
|
|
2221
|
+
content: file.content.toString("base64")
|
|
2032
2222
|
}
|
|
2033
|
-
];
|
|
2223
|
+
]));
|
|
2034
2224
|
try {
|
|
2035
2225
|
const response = await client.createPullRequest({
|
|
2036
2226
|
owner,
|
|
2037
2227
|
repo,
|
|
2038
2228
|
title,
|
|
2039
|
-
changes
|
|
2229
|
+
changes: [
|
|
2230
|
+
{
|
|
2231
|
+
files,
|
|
2232
|
+
commit: title
|
|
2233
|
+
}
|
|
2234
|
+
],
|
|
2040
2235
|
body: description,
|
|
2041
2236
|
head: branchName,
|
|
2042
2237
|
draft
|
|
@@ -2244,7 +2439,6 @@ const createPublishGitlabMergeRequestAction = (options) => {
|
|
|
2244
2439
|
const repoUrl = ctx.input.repoUrl;
|
|
2245
2440
|
const { host } = parseRepoUrl(repoUrl, integrations);
|
|
2246
2441
|
const integrationConfig = integrations.gitlab.byHost(host);
|
|
2247
|
-
const actions = [];
|
|
2248
2442
|
const destinationBranch = ctx.input.branchName;
|
|
2249
2443
|
if (!integrationConfig) {
|
|
2250
2444
|
throw new errors.InputError(`No matching integration configuration for host ${host}, please check your integrations config`);
|
|
@@ -2258,23 +2452,17 @@ const createPublishGitlabMergeRequestAction = (options) => {
|
|
|
2258
2452
|
host: integrationConfig.config.baseUrl,
|
|
2259
2453
|
[tokenType]: token
|
|
2260
2454
|
});
|
|
2261
|
-
const
|
|
2262
|
-
const
|
|
2263
|
-
|
|
2264
|
-
gitignore: true,
|
|
2265
|
-
dot: true
|
|
2455
|
+
const targetPath = backendCommon.resolveSafeChildPath(ctx.workspacePath, ctx.input.targetPath);
|
|
2456
|
+
const fileContents = await serializeDirectoryContents(targetPath, {
|
|
2457
|
+
gitignore: true
|
|
2266
2458
|
});
|
|
2267
|
-
const
|
|
2268
|
-
|
|
2269
|
-
|
|
2270
|
-
|
|
2271
|
-
|
|
2272
|
-
|
|
2273
|
-
|
|
2274
|
-
filePath: repoFilePaths[i],
|
|
2275
|
-
content: fileContents[i].toString()
|
|
2276
|
-
});
|
|
2277
|
-
}
|
|
2459
|
+
const actions = fileContents.map((file) => ({
|
|
2460
|
+
action: "create",
|
|
2461
|
+
filePath: path__default["default"].posix.join(ctx.input.targetPath, file.path),
|
|
2462
|
+
encoding: "base64",
|
|
2463
|
+
content: file.content.toString("base64"),
|
|
2464
|
+
execute_filemode: file.executable
|
|
2465
|
+
}));
|
|
2278
2466
|
const projects = await api.Projects.show(ctx.input.projectid);
|
|
2279
2467
|
const { default_branch: defaultBranch } = projects;
|
|
2280
2468
|
try {
|
|
@@ -2566,6 +2754,10 @@ const createBuiltinActions = (options) => {
|
|
|
2566
2754
|
reader,
|
|
2567
2755
|
additionalTemplateFilters
|
|
2568
2756
|
}),
|
|
2757
|
+
createPublishGerritAction({
|
|
2758
|
+
integrations,
|
|
2759
|
+
config
|
|
2760
|
+
}),
|
|
2569
2761
|
createPublishGithubAction({
|
|
2570
2762
|
integrations,
|
|
2571
2763
|
config,
|
|
@@ -2959,6 +3151,32 @@ class StorageTaskBroker {
|
|
|
2959
3151
|
function isTruthy(value) {
|
|
2960
3152
|
return lodash.isArray(value) ? value.length > 0 : !!value;
|
|
2961
3153
|
}
|
|
3154
|
+
function generateExampleOutput(schema) {
|
|
3155
|
+
var _a, _b;
|
|
3156
|
+
const { examples } = schema;
|
|
3157
|
+
if (examples && Array.isArray(examples)) {
|
|
3158
|
+
return examples[0];
|
|
3159
|
+
}
|
|
3160
|
+
if (schema.type === "object") {
|
|
3161
|
+
return Object.fromEntries(Object.entries((_a = schema.properties) != null ? _a : {}).map(([key, value]) => [
|
|
3162
|
+
key,
|
|
3163
|
+
generateExampleOutput(value)
|
|
3164
|
+
]));
|
|
3165
|
+
} else if (schema.type === "array") {
|
|
3166
|
+
const [firstSchema] = (_b = [schema.items]) == null ? void 0 : _b.flat();
|
|
3167
|
+
if (firstSchema) {
|
|
3168
|
+
return [generateExampleOutput(firstSchema)];
|
|
3169
|
+
}
|
|
3170
|
+
return [];
|
|
3171
|
+
} else if (schema.type === "string") {
|
|
3172
|
+
return "<example>";
|
|
3173
|
+
} else if (schema.type === "number") {
|
|
3174
|
+
return 0;
|
|
3175
|
+
} else if (schema.type === "boolean") {
|
|
3176
|
+
return false;
|
|
3177
|
+
}
|
|
3178
|
+
return "<unknown>";
|
|
3179
|
+
}
|
|
2962
3180
|
|
|
2963
3181
|
const isValidTaskSpec = (taskSpec) => {
|
|
2964
3182
|
return taskSpec.apiVersion === "scaffolder.backstage.io/v1beta3";
|
|
@@ -3028,7 +3246,7 @@ class NunjucksWorkflowRunner {
|
|
|
3028
3246
|
});
|
|
3029
3247
|
}
|
|
3030
3248
|
async execute(task) {
|
|
3031
|
-
var _a, _b, _c, _d;
|
|
3249
|
+
var _a, _b, _c, _d, _e;
|
|
3032
3250
|
if (!isValidTaskSpec(task.spec)) {
|
|
3033
3251
|
throw new errors.InputError("Wrong template version executed with the workflow engine");
|
|
3034
3252
|
}
|
|
@@ -3063,8 +3281,23 @@ class NunjucksWorkflowRunner {
|
|
|
3063
3281
|
});
|
|
3064
3282
|
const action = this.options.actionRegistry.get(step.action);
|
|
3065
3283
|
const { taskLogger, streamLogger } = createStepLogger({ task, step });
|
|
3066
|
-
|
|
3067
|
-
|
|
3284
|
+
if (task.isDryRun && !action.supportsDryRun) {
|
|
3285
|
+
task.emitLog(`Skipping because ${action.id} does not support dry-run`, {
|
|
3286
|
+
stepId: step.id,
|
|
3287
|
+
status: "skipped"
|
|
3288
|
+
});
|
|
3289
|
+
const outputSchema = (_a = action.schema) == null ? void 0 : _a.output;
|
|
3290
|
+
if (outputSchema) {
|
|
3291
|
+
context.steps[step.id] = {
|
|
3292
|
+
output: generateExampleOutput(outputSchema)
|
|
3293
|
+
};
|
|
3294
|
+
} else {
|
|
3295
|
+
context.steps[step.id] = { output: {} };
|
|
3296
|
+
}
|
|
3297
|
+
continue;
|
|
3298
|
+
}
|
|
3299
|
+
const input = (_c = step.input && this.render(step.input, { ...context, secrets: (_b = task.secrets) != null ? _b : {} }, renderTemplate)) != null ? _c : {};
|
|
3300
|
+
if ((_d = action.schema) == null ? void 0 : _d.input) {
|
|
3068
3301
|
const validateResult = jsonschema.validate(input, action.schema.input);
|
|
3069
3302
|
if (!validateResult.valid) {
|
|
3070
3303
|
const errors$1 = validateResult.errors.join(", ");
|
|
@@ -3075,7 +3308,7 @@ class NunjucksWorkflowRunner {
|
|
|
3075
3308
|
const stepOutput = {};
|
|
3076
3309
|
await action.handler({
|
|
3077
3310
|
input,
|
|
3078
|
-
secrets: (
|
|
3311
|
+
secrets: (_e = task.secrets) != null ? _e : {},
|
|
3079
3312
|
logger: taskLogger,
|
|
3080
3313
|
logStream: streamLogger,
|
|
3081
3314
|
workspacePath,
|
|
@@ -3164,6 +3397,95 @@ class TaskWorker {
|
|
|
3164
3397
|
}
|
|
3165
3398
|
}
|
|
3166
3399
|
|
|
3400
|
+
class DecoratedActionsRegistry extends TemplateActionRegistry {
|
|
3401
|
+
constructor(innerRegistry, extraActions) {
|
|
3402
|
+
super();
|
|
3403
|
+
this.innerRegistry = innerRegistry;
|
|
3404
|
+
for (const action of extraActions) {
|
|
3405
|
+
this.register(action);
|
|
3406
|
+
}
|
|
3407
|
+
}
|
|
3408
|
+
get(actionId) {
|
|
3409
|
+
try {
|
|
3410
|
+
return super.get(actionId);
|
|
3411
|
+
} catch {
|
|
3412
|
+
return this.innerRegistry.get(actionId);
|
|
3413
|
+
}
|
|
3414
|
+
}
|
|
3415
|
+
}
|
|
3416
|
+
|
|
3417
|
+
function createDryRunner(options) {
|
|
3418
|
+
return async function dryRun(input) {
|
|
3419
|
+
let contentPromise;
|
|
3420
|
+
const workflowRunner = new NunjucksWorkflowRunner({
|
|
3421
|
+
...options,
|
|
3422
|
+
actionRegistry: new DecoratedActionsRegistry(options.actionRegistry, [
|
|
3423
|
+
createTemplateAction({
|
|
3424
|
+
id: "dry-run:extract",
|
|
3425
|
+
supportsDryRun: true,
|
|
3426
|
+
async handler(ctx) {
|
|
3427
|
+
contentPromise = serializeDirectoryContents(ctx.workspacePath);
|
|
3428
|
+
await contentPromise.catch(() => {
|
|
3429
|
+
});
|
|
3430
|
+
}
|
|
3431
|
+
})
|
|
3432
|
+
])
|
|
3433
|
+
});
|
|
3434
|
+
const dryRunId = uuid.v4();
|
|
3435
|
+
const log = new Array();
|
|
3436
|
+
const contentsPath = backendCommon.resolveSafeChildPath(options.workingDirectory, `dry-run-content-${dryRunId}`);
|
|
3437
|
+
try {
|
|
3438
|
+
await deserializeDirectoryContents(contentsPath, input.directoryContents);
|
|
3439
|
+
const result = await workflowRunner.execute({
|
|
3440
|
+
spec: {
|
|
3441
|
+
...input.spec,
|
|
3442
|
+
steps: [
|
|
3443
|
+
...input.spec.steps,
|
|
3444
|
+
{
|
|
3445
|
+
id: dryRunId,
|
|
3446
|
+
name: "dry-run:extract",
|
|
3447
|
+
action: "dry-run:extract"
|
|
3448
|
+
}
|
|
3449
|
+
],
|
|
3450
|
+
templateInfo: {
|
|
3451
|
+
entityRef: "template:default/dry-run",
|
|
3452
|
+
baseUrl: url.pathToFileURL(backendCommon.resolveSafeChildPath(contentsPath, "template.yaml")).toString()
|
|
3453
|
+
}
|
|
3454
|
+
},
|
|
3455
|
+
secrets: input.secrets,
|
|
3456
|
+
done: false,
|
|
3457
|
+
isDryRun: true,
|
|
3458
|
+
getWorkspaceName: async () => `dry-run-${dryRunId}`,
|
|
3459
|
+
async emitLog(message, logMetadata) {
|
|
3460
|
+
if ((logMetadata == null ? void 0 : logMetadata.stepId) === dryRunId) {
|
|
3461
|
+
return;
|
|
3462
|
+
}
|
|
3463
|
+
log.push({
|
|
3464
|
+
body: {
|
|
3465
|
+
...logMetadata,
|
|
3466
|
+
message
|
|
3467
|
+
}
|
|
3468
|
+
});
|
|
3469
|
+
},
|
|
3470
|
+
async complete() {
|
|
3471
|
+
throw new Error("Not implemented");
|
|
3472
|
+
}
|
|
3473
|
+
});
|
|
3474
|
+
if (!contentPromise) {
|
|
3475
|
+
throw new Error("Content extraction step was skipped");
|
|
3476
|
+
}
|
|
3477
|
+
const directoryContents = await contentPromise;
|
|
3478
|
+
return {
|
|
3479
|
+
log,
|
|
3480
|
+
directoryContents,
|
|
3481
|
+
output: result.output
|
|
3482
|
+
};
|
|
3483
|
+
} finally {
|
|
3484
|
+
await fs__default["default"].remove(contentsPath);
|
|
3485
|
+
}
|
|
3486
|
+
};
|
|
3487
|
+
}
|
|
3488
|
+
|
|
3167
3489
|
async function getWorkingDirectory(config, logger) {
|
|
3168
3490
|
if (!config.has("backend.workingDirectory")) {
|
|
3169
3491
|
return os__default["default"].tmpdir();
|
|
@@ -3261,6 +3583,13 @@ async function createRouter(options) {
|
|
|
3261
3583
|
});
|
|
3262
3584
|
actionsToRegister.forEach((action) => actionRegistry.register(action));
|
|
3263
3585
|
workers.forEach((worker) => worker.start());
|
|
3586
|
+
const dryRunner = createDryRunner({
|
|
3587
|
+
actionRegistry,
|
|
3588
|
+
integrations,
|
|
3589
|
+
logger,
|
|
3590
|
+
workingDirectory,
|
|
3591
|
+
additionalTemplateFilters
|
|
3592
|
+
});
|
|
3264
3593
|
router.get("/v2/templates/:namespace/:kind/:name/parameter-schema", async (req, res) => {
|
|
3265
3594
|
var _a, _b;
|
|
3266
3595
|
const { namespace, kind, name } = req.params;
|
|
@@ -3415,6 +3744,62 @@ data: ${JSON.stringify(event)}
|
|
|
3415
3744
|
subscription.unsubscribe();
|
|
3416
3745
|
clearTimeout(timeout);
|
|
3417
3746
|
});
|
|
3747
|
+
}).post("/v2/dry-run", async (req, res) => {
|
|
3748
|
+
var _a, _b, _c;
|
|
3749
|
+
const bodySchema = zod.z.object({
|
|
3750
|
+
template: zod.z.unknown(),
|
|
3751
|
+
values: zod.z.record(zod.z.unknown()),
|
|
3752
|
+
secrets: zod.z.record(zod.z.string()).optional(),
|
|
3753
|
+
directoryContents: zod.z.array(zod.z.object({ path: zod.z.string(), base64Content: zod.z.string() }))
|
|
3754
|
+
});
|
|
3755
|
+
const body = await bodySchema.parseAsync(req.body).catch((e) => {
|
|
3756
|
+
throw new errors.InputError(`Malformed request: ${e}`);
|
|
3757
|
+
});
|
|
3758
|
+
const template = body.template;
|
|
3759
|
+
if (!await pluginScaffolderCommon.templateEntityV1beta3Validator.check(template)) {
|
|
3760
|
+
throw new errors.InputError("Input template is not a template");
|
|
3761
|
+
}
|
|
3762
|
+
const { token } = parseBearerToken(req.headers.authorization);
|
|
3763
|
+
for (const parameters of [(_a = template.spec.parameters) != null ? _a : []].flat()) {
|
|
3764
|
+
const result2 = jsonschema.validate(body.values, parameters);
|
|
3765
|
+
if (!result2.valid) {
|
|
3766
|
+
res.status(400).json({ errors: result2.errors });
|
|
3767
|
+
return;
|
|
3768
|
+
}
|
|
3769
|
+
}
|
|
3770
|
+
const steps = template.spec.steps.map((step, index) => {
|
|
3771
|
+
var _a2, _b2;
|
|
3772
|
+
return {
|
|
3773
|
+
...step,
|
|
3774
|
+
id: (_a2 = step.id) != null ? _a2 : `step-${index + 1}`,
|
|
3775
|
+
name: (_b2 = step.name) != null ? _b2 : step.action
|
|
3776
|
+
};
|
|
3777
|
+
});
|
|
3778
|
+
const result = await dryRunner({
|
|
3779
|
+
spec: {
|
|
3780
|
+
apiVersion: template.apiVersion,
|
|
3781
|
+
steps,
|
|
3782
|
+
output: (_b = template.spec.output) != null ? _b : {},
|
|
3783
|
+
parameters: body.values
|
|
3784
|
+
},
|
|
3785
|
+
directoryContents: ((_c = body.directoryContents) != null ? _c : []).map((file) => ({
|
|
3786
|
+
path: file.path,
|
|
3787
|
+
content: Buffer.from(file.base64Content, "base64")
|
|
3788
|
+
})),
|
|
3789
|
+
secrets: {
|
|
3790
|
+
...body.secrets,
|
|
3791
|
+
...token && { backstageToken: token }
|
|
3792
|
+
}
|
|
3793
|
+
});
|
|
3794
|
+
res.status(200).json({
|
|
3795
|
+
...result,
|
|
3796
|
+
steps,
|
|
3797
|
+
directoryContents: result.directoryContents.map((file) => ({
|
|
3798
|
+
path: file.path,
|
|
3799
|
+
executable: file.executable,
|
|
3800
|
+
base64Content: file.content.toString("base64")
|
|
3801
|
+
}))
|
|
3802
|
+
});
|
|
3418
3803
|
});
|
|
3419
3804
|
const app = express__default["default"]();
|
|
3420
3805
|
app.set("logger", logger);
|
|
@@ -3504,6 +3889,7 @@ exports.createPublishBitbucketAction = createPublishBitbucketAction;
|
|
|
3504
3889
|
exports.createPublishBitbucketCloudAction = createPublishBitbucketCloudAction;
|
|
3505
3890
|
exports.createPublishBitbucketServerAction = createPublishBitbucketServerAction;
|
|
3506
3891
|
exports.createPublishFileAction = createPublishFileAction;
|
|
3892
|
+
exports.createPublishGerritAction = createPublishGerritAction;
|
|
3507
3893
|
exports.createPublishGithubAction = createPublishGithubAction;
|
|
3508
3894
|
exports.createPublishGithubPullRequestAction = createPublishGithubPullRequestAction;
|
|
3509
3895
|
exports.createPublishGitlabAction = createPublishGitlabAction;
|