@backstage/cli 0.10.0 → 0.10.4
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +51 -0
- package/config/jest.js +5 -3
- package/dist/cjs/{Lockfile-80f0eec4.cjs.js → Lockfile-35661afa.cjs.js} +18 -18
- package/dist/cjs/{build-555a7349.cjs.js → build-16f16d70.cjs.js} +9 -9
- package/dist/cjs/{build-8652aa69.cjs.js → build-ba4675b9.cjs.js} +8 -7
- package/dist/cjs/{build-be463597.cjs.js → build-e7dbdd80.cjs.js} +23 -23
- package/dist/cjs/{build-b5fb2b35.cjs.js → build-f3921423.cjs.js} +8 -7
- package/dist/cjs/{buildWorkspace-79c93924.cjs.js → buildWorkspace-a573304e.cjs.js} +6 -6
- package/dist/cjs/{bump-53a961f8.cjs.js → bump-0acacde9.cjs.js} +36 -36
- package/dist/cjs/{bundle-64a797f5.cjs.js → bundle-8bcde9c4.cjs.js} +15 -15
- package/dist/cjs/{clean-a28705b5.cjs.js → clean-f5d3a3ff.cjs.js} +6 -6
- package/dist/cjs/{config-3816244a.cjs.js → config-02308249.cjs.js} +17 -9
- package/dist/cjs/{create-d5e65154.cjs.js → create-2c1328b1.cjs.js} +41 -41
- package/dist/cjs/{createPlugin-77a95bbd.cjs.js → createPlugin-33636b2b.cjs.js} +31 -31
- package/dist/cjs/{dev-3b68b882.cjs.js → dev-c74959e4.cjs.js} +8 -8
- package/dist/cjs/{diff-7238cc6c.cjs.js → diff-d393a7d6.cjs.js} +39 -32
- package/dist/cjs/{docs-f46d6945.cjs.js → docs-43abe831.cjs.js} +6 -6
- package/dist/cjs/{index-8a512334.cjs.js → index-60792ea9.cjs.js} +65 -68
- package/dist/cjs/{index-05328067.cjs.js → index-d1f94b33.cjs.js} +15 -15
- package/dist/cjs/{index-c8a3517a.cjs.js → index-f5dcae68.cjs.js} +14 -14
- package/dist/cjs/{info-a069f3b4.cjs.js → info-f260605b.cjs.js} +6 -6
- package/dist/cjs/{install-43a8076b.cjs.js → install-db22d18d.cjs.js} +60 -33
- package/dist/cjs/{lint-60658813.cjs.js → lint-0c8dd45e.cjs.js} +5 -5
- package/dist/cjs/{lint-0053aded.cjs.js → lint-f4a40773.cjs.js} +7 -7
- package/dist/cjs/{pack-a4246579.cjs.js → pack-f5669e4f.cjs.js} +6 -6
- package/dist/cjs/{packager-b1a183ad.cjs.js → packager-ce069fd1.cjs.js} +37 -35
- package/dist/cjs/{packages-562fcd14.cjs.js → packages-587c99dd.cjs.js} +5 -5
- package/dist/cjs/{paths-f77bdf66.cjs.js → paths-8d6dfec1.cjs.js} +62 -46
- package/dist/cjs/{print-db876002.cjs.js → print-5db03c66.cjs.js} +5 -5
- package/dist/cjs/{removePlugin-1379b63a.cjs.js → removePlugin-5984ee26.cjs.js} +30 -30
- package/dist/cjs/{run-bacdaaf1.cjs.js → run-8b1e277a.cjs.js} +5 -5
- package/dist/cjs/{schema-3fe57ce7.cjs.js → schema-37d6a972.cjs.js} +5 -5
- package/dist/cjs/{serve-e89d4ea4.cjs.js → serve-aaec7087.cjs.js} +15 -15
- package/dist/cjs/{serve-a9cf4184.cjs.js → serve-bfb3e982.cjs.js} +10 -10
- package/dist/cjs/{server-4f0e40d3.cjs.js → server-d9492b36.cjs.js} +6 -6
- package/dist/cjs/{svgrTemplate-2d0d15cf.cjs.js → svgrTemplate-f19e974c.cjs.js} +3 -3
- package/dist/cjs/{tasks-29b7d09e.cjs.js → tasks-94b21b51.cjs.js} +29 -29
- package/dist/cjs/{testCommand-5602a58f.cjs.js → testCommand-0c162b80.cjs.js} +4 -4
- package/dist/cjs/{validate-9b1a5456.cjs.js → validate-89d918a7.cjs.js} +4 -4
- package/dist/index.cjs.js +1 -1
- package/package.json +18 -21
- package/templates/default-plugin/package.json.hbs +3 -2
- package/templates/serve_index.html +1 -1
|
@@ -4,8 +4,8 @@ var fs = require('fs-extra');
|
|
|
4
4
|
var path = require('path');
|
|
5
5
|
var os = require('os');
|
|
6
6
|
var tar = require('tar');
|
|
7
|
-
var index = require('./index-
|
|
8
|
-
var run = require('./run-
|
|
7
|
+
var index = require('./index-60792ea9.cjs.js');
|
|
8
|
+
var run = require('./run-8b1e277a.cjs.js');
|
|
9
9
|
|
|
10
10
|
function _interopDefaultLegacy (e) { return e && typeof e === 'object' && 'default' in e ? e : { 'default': e }; }
|
|
11
11
|
|
|
@@ -18,7 +18,7 @@ const UNSAFE_PACKAGES = [
|
|
|
18
18
|
];
|
|
19
19
|
async function createDistWorkspace(packageNames, options = {}) {
|
|
20
20
|
var _a, _b, _c;
|
|
21
|
-
const targetDir = (_a = options.targetDir) != null ? _a : await fs__default[
|
|
21
|
+
const targetDir = (_a = options.targetDir) != null ? _a : await fs__default["default"].mkdtemp(path.resolve(os.tmpdir(), "dist-workspace"));
|
|
22
22
|
const targets = await findTargetPackages(packageNames);
|
|
23
23
|
if (options.buildDependencies) {
|
|
24
24
|
const exclude = (_b = options.buildExcludes) != null ? _b : [];
|
|
@@ -36,14 +36,14 @@ async function createDistWorkspace(packageNames, options = {}) {
|
|
|
36
36
|
for (const file of files) {
|
|
37
37
|
const src = typeof file === "string" ? file : file.src;
|
|
38
38
|
const dest = typeof file === "string" ? file : file.dest;
|
|
39
|
-
await fs__default[
|
|
39
|
+
await fs__default["default"].copy(index.paths.resolveTargetRoot(src), path.resolve(targetDir, dest));
|
|
40
40
|
}
|
|
41
41
|
if (options.skeleton) {
|
|
42
42
|
const skeletonFiles = targets.map((target) => {
|
|
43
43
|
const dir = path.relative(index.paths.targetRoot, target.location);
|
|
44
44
|
return path.join(dir, "package.json");
|
|
45
45
|
});
|
|
46
|
-
await tar__default[
|
|
46
|
+
await tar__default["default"].create({
|
|
47
47
|
file: path.resolve(targetDir, options.skeleton),
|
|
48
48
|
cwd: targetDir,
|
|
49
49
|
portable: true,
|
|
@@ -61,24 +61,24 @@ async function moveToDistWorkspace(workspaceDir, localPackages) {
|
|
|
61
61
|
cwd: target.location
|
|
62
62
|
});
|
|
63
63
|
if (target.scripts.postpack) {
|
|
64
|
-
await run.run("yarn", ["postpack"], {cwd: target.location});
|
|
64
|
+
await run.run("yarn", ["postpack"], { cwd: target.location });
|
|
65
65
|
}
|
|
66
66
|
const outputDir = path.relative(index.paths.targetRoot, target.location);
|
|
67
67
|
const absoluteOutputPath = path.resolve(workspaceDir, outputDir);
|
|
68
|
-
await fs__default[
|
|
69
|
-
await tar__default[
|
|
68
|
+
await fs__default["default"].ensureDir(absoluteOutputPath);
|
|
69
|
+
await tar__default["default"].extract({
|
|
70
70
|
file: archivePath,
|
|
71
71
|
cwd: absoluteOutputPath,
|
|
72
72
|
strip: 1
|
|
73
73
|
});
|
|
74
|
-
await fs__default[
|
|
74
|
+
await fs__default["default"].remove(archivePath);
|
|
75
75
|
if (target.get("bundled")) {
|
|
76
|
-
const pkgJson = await fs__default[
|
|
76
|
+
const pkgJson = await fs__default["default"].readJson(path.resolve(absoluteOutputPath, "package.json"));
|
|
77
77
|
delete pkgJson.dependencies;
|
|
78
78
|
delete pkgJson.devDependencies;
|
|
79
79
|
delete pkgJson.peerDependencies;
|
|
80
80
|
delete pkgJson.optionalDependencies;
|
|
81
|
-
await fs__default[
|
|
81
|
+
await fs__default["default"].writeJson(path.resolve(absoluteOutputPath, "package.json"), pkgJson, {
|
|
82
82
|
spaces: 2
|
|
83
83
|
});
|
|
84
84
|
}
|
|
@@ -92,12 +92,12 @@ async function moveToDistWorkspace(workspaceDir, localPackages) {
|
|
|
92
92
|
}
|
|
93
93
|
async function findTargetPackages(pkgNames) {
|
|
94
94
|
var _a;
|
|
95
|
-
const {Project} = require("@lerna/project");
|
|
96
|
-
const {PackageGraph} = require("@lerna/package-graph");
|
|
95
|
+
const { Project } = require("@lerna/project");
|
|
96
|
+
const { PackageGraph } = require("@lerna/package-graph");
|
|
97
97
|
const project = new Project(index.paths.targetDir);
|
|
98
98
|
const packages = await project.getPackages();
|
|
99
99
|
const graph = new PackageGraph(packages);
|
|
100
|
-
const targets = new Map();
|
|
100
|
+
const targets = /* @__PURE__ */ new Map();
|
|
101
101
|
const searchNames = pkgNames.slice();
|
|
102
102
|
while (searchNames.length) {
|
|
103
103
|
const name = searchNames.pop();
|
|
@@ -120,4 +120,4 @@ async function findTargetPackages(pkgNames) {
|
|
|
120
120
|
}
|
|
121
121
|
|
|
122
122
|
exports.createDistWorkspace = createDistWorkspace;
|
|
123
|
-
//# sourceMappingURL=index-
|
|
123
|
+
//# sourceMappingURL=index-d1f94b33.cjs.js.map
|
|
@@ -3,7 +3,7 @@
|
|
|
3
3
|
var fs = require('fs-extra');
|
|
4
4
|
var chalk = require('chalk');
|
|
5
5
|
var yaml = require('yaml');
|
|
6
|
-
var index$1 = require('./index-
|
|
6
|
+
var index$1 = require('./index-60792ea9.cjs.js');
|
|
7
7
|
var crypto = require('crypto');
|
|
8
8
|
var openBrowser = require('react-dev-utils/openBrowser');
|
|
9
9
|
var request = require('@octokit/request');
|
|
@@ -42,7 +42,7 @@ const FORM_PAGE = `
|
|
|
42
42
|
</form>
|
|
43
43
|
<script>
|
|
44
44
|
document.getElementById("form").submit()
|
|
45
|
-
|
|
45
|
+
<\/script>
|
|
46
46
|
</body>
|
|
47
47
|
</html>
|
|
48
48
|
`;
|
|
@@ -69,21 +69,21 @@ class GithubCreateAppServer {
|
|
|
69
69
|
res.setHeader("content-type", "text/html");
|
|
70
70
|
res.send(body);
|
|
71
71
|
};
|
|
72
|
-
const webhookId = crypto__default[
|
|
72
|
+
const webhookId = crypto__default["default"].randomBytes(15).toString("base64").replace(/[\+\/]/g, "");
|
|
73
73
|
this.webhookUrl = `https://smee.io/${webhookId}`;
|
|
74
74
|
}
|
|
75
|
-
static async run({org}) {
|
|
75
|
+
static async run({ org }) {
|
|
76
76
|
const encodedOrg = encodeURIComponent(org);
|
|
77
77
|
const actionUrl = `https://github.com/organizations/${encodedOrg}/settings/apps/new`;
|
|
78
78
|
const server = new GithubCreateAppServer(actionUrl);
|
|
79
79
|
return server.start();
|
|
80
80
|
}
|
|
81
81
|
async start() {
|
|
82
|
-
const app = express__default[
|
|
82
|
+
const app = express__default["default"]();
|
|
83
83
|
app.get("/", this.formHandler);
|
|
84
84
|
const callPromise = new Promise((resolve, reject) => {
|
|
85
85
|
app.get("/callback", (req, res) => {
|
|
86
|
-
request.request(`POST /app-manifests/${encodeURIComponent(req.query.code)}/conversions`).then(({data}) => {
|
|
86
|
+
request.request(`POST /app-manifests/${encodeURIComponent(req.query.code)}/conversions`).then(({ data }) => {
|
|
87
87
|
resolve({
|
|
88
88
|
name: data.name,
|
|
89
89
|
slug: data.slug,
|
|
@@ -99,7 +99,7 @@ class GithubCreateAppServer {
|
|
|
99
99
|
});
|
|
100
100
|
});
|
|
101
101
|
this.baseUrl = await this.listen(app);
|
|
102
|
-
openBrowser__default[
|
|
102
|
+
openBrowser__default["default"](this.baseUrl);
|
|
103
103
|
return callPromise;
|
|
104
104
|
}
|
|
105
105
|
async listen(app) {
|
|
@@ -110,7 +110,7 @@ class GithubCreateAppServer {
|
|
|
110
110
|
reject(new Error(`Unexpected listener info '${info}'`));
|
|
111
111
|
return;
|
|
112
112
|
}
|
|
113
|
-
const {port} = info;
|
|
113
|
+
const { port } = info;
|
|
114
114
|
resolve(`http://localhost:${port}`);
|
|
115
115
|
});
|
|
116
116
|
});
|
|
@@ -118,14 +118,14 @@ class GithubCreateAppServer {
|
|
|
118
118
|
}
|
|
119
119
|
|
|
120
120
|
var index = async (org) => {
|
|
121
|
-
const {slug, name, ...config} = await GithubCreateAppServer.run({org});
|
|
121
|
+
const { slug, name, ...config } = await GithubCreateAppServer.run({ org });
|
|
122
122
|
const fileName = `github-app-${slug}-credentials.yaml`;
|
|
123
123
|
const content = `# Name: ${name}
|
|
124
124
|
${yaml.stringify(config)}`;
|
|
125
|
-
await fs__default[
|
|
126
|
-
console.log(`GitHub App configuration written to ${chalk__default[
|
|
127
|
-
console.log(chalk__default[
|
|
125
|
+
await fs__default["default"].writeFile(index$1.paths.resolveTargetRoot(fileName), content);
|
|
126
|
+
console.log(`GitHub App configuration written to ${chalk__default["default"].cyan(fileName)}`);
|
|
127
|
+
console.log(chalk__default["default"].yellow("This file contains sensitive credentials, it should not be committed to version control and handled with care!"));
|
|
128
128
|
};
|
|
129
129
|
|
|
130
|
-
exports
|
|
131
|
-
//# sourceMappingURL=index-
|
|
130
|
+
exports["default"] = index;
|
|
131
|
+
//# sourceMappingURL=index-f5dcae68.cjs.js.map
|
|
@@ -1,9 +1,9 @@
|
|
|
1
1
|
'use strict';
|
|
2
2
|
|
|
3
|
-
var index = require('./index-
|
|
3
|
+
var index = require('./index-60792ea9.cjs.js');
|
|
4
4
|
var os = require('os');
|
|
5
|
-
var run = require('./run-
|
|
6
|
-
var Lockfile = require('./Lockfile-
|
|
5
|
+
var run = require('./run-8b1e277a.cjs.js');
|
|
6
|
+
var Lockfile = require('./Lockfile-35661afa.cjs.js');
|
|
7
7
|
require('chalk');
|
|
8
8
|
require('commander');
|
|
9
9
|
require('fs-extra');
|
|
@@ -23,7 +23,7 @@ var info = async () => {
|
|
|
23
23
|
await new Promise(async () => {
|
|
24
24
|
const yarnVersion = await run.runPlain("yarn --version");
|
|
25
25
|
const isLocal = require("fs").existsSync(index.paths.resolveOwn("./src"));
|
|
26
|
-
console.log(`OS: ${os__default[
|
|
26
|
+
console.log(`OS: ${os__default["default"].type} ${os__default["default"].release} - ${os__default["default"].platform}/${os__default["default"].arch}`);
|
|
27
27
|
console.log(`node: ${process.version}`);
|
|
28
28
|
console.log(`yarn: ${yarnVersion}`);
|
|
29
29
|
console.log(`cli: ${index.version$1} (${isLocal ? "local" : "installed"})`);
|
|
@@ -40,5 +40,5 @@ var info = async () => {
|
|
|
40
40
|
});
|
|
41
41
|
};
|
|
42
42
|
|
|
43
|
-
exports
|
|
44
|
-
//# sourceMappingURL=info-
|
|
43
|
+
exports["default"] = info;
|
|
44
|
+
//# sourceMappingURL=info-f260605b.cjs.js.map
|
|
@@ -3,12 +3,12 @@
|
|
|
3
3
|
var fs = require('fs-extra');
|
|
4
4
|
require('semver');
|
|
5
5
|
require('@yarnpkg/lockfile');
|
|
6
|
-
var packages = require('./packages-
|
|
7
|
-
var index = require('./index-
|
|
6
|
+
var packages = require('./packages-587c99dd.cjs.js');
|
|
7
|
+
var index = require('./index-60792ea9.cjs.js');
|
|
8
8
|
var chalk = require('chalk');
|
|
9
9
|
var sortBy = require('lodash/sortBy');
|
|
10
10
|
var groupBy = require('lodash/groupBy');
|
|
11
|
-
var run = require('./run-
|
|
11
|
+
var run = require('./run-8b1e277a.cjs.js');
|
|
12
12
|
require('commander');
|
|
13
13
|
require('@backstage/cli-common');
|
|
14
14
|
require('@backstage/config/package.json');
|
|
@@ -33,9 +33,9 @@ class AppRouteStep {
|
|
|
33
33
|
}
|
|
34
34
|
async run() {
|
|
35
35
|
var _a;
|
|
36
|
-
const {path, element, packageName} = this.data;
|
|
36
|
+
const { path, element, packageName } = this.data;
|
|
37
37
|
const appTsxPath = index.paths.resolveTargetRoot("packages/app/src/App.tsx");
|
|
38
|
-
const contents = await fs__default[
|
|
38
|
+
const contents = await fs__default["default"].readFile(appTsxPath, "utf-8");
|
|
39
39
|
let failed = false;
|
|
40
40
|
const contentsWithRoute = contents.replace(/(\s*)<\/FlatRoutes>/, `$1 <Route path="${path}" element={${element}} />$1</FlatRoutes>`);
|
|
41
41
|
if (contentsWithRoute === contents) {
|
|
@@ -56,21 +56,21 @@ import `);
|
|
|
56
56
|
console.log(`1. import { ${componentName} } from '${packageName}';`);
|
|
57
57
|
console.log(`2. <Route path="${path}" element={${element}} />`);
|
|
58
58
|
} else {
|
|
59
|
-
await fs__default[
|
|
59
|
+
await fs__default["default"].writeFile(appTsxPath, contentsWithImport);
|
|
60
60
|
}
|
|
61
61
|
}
|
|
62
62
|
}
|
|
63
63
|
const appRoute = createStepDefinition({
|
|
64
64
|
type: "app-route",
|
|
65
65
|
deserialize(obj, pkg) {
|
|
66
|
-
const {path, element} = obj;
|
|
66
|
+
const { path, element } = obj;
|
|
67
67
|
if (!path || typeof path !== "string") {
|
|
68
68
|
throw new Error("Invalid install step, 'path' must be a string");
|
|
69
69
|
}
|
|
70
70
|
if (!element || typeof element !== "string") {
|
|
71
71
|
throw new Error("Invalid install step, 'element' must be a string");
|
|
72
72
|
}
|
|
73
|
-
return new AppRouteStep({path, element, packageName: pkg.name});
|
|
73
|
+
return new AppRouteStep({ path, element, packageName: pkg.name });
|
|
74
74
|
},
|
|
75
75
|
create(data) {
|
|
76
76
|
return new AppRouteStep(data);
|
|
@@ -82,23 +82,23 @@ class DependenciesStep {
|
|
|
82
82
|
this.data = data;
|
|
83
83
|
}
|
|
84
84
|
async run() {
|
|
85
|
-
const {dependencies: dependencies2} = this.data;
|
|
86
|
-
const byTarget = groupBy__default[
|
|
85
|
+
const { dependencies: dependencies2 } = this.data;
|
|
86
|
+
const byTarget = groupBy__default["default"](dependencies2, "target");
|
|
87
87
|
for (const [target, deps] of Object.entries(byTarget)) {
|
|
88
88
|
const pkgPath = index.paths.resolveTargetRoot(target, "package.json");
|
|
89
|
-
const pkgJson = await fs__default[
|
|
90
|
-
const depTypes = new Set();
|
|
89
|
+
const pkgJson = await fs__default["default"].readJson(pkgPath);
|
|
90
|
+
const depTypes = /* @__PURE__ */ new Set();
|
|
91
91
|
for (const dep of deps) {
|
|
92
92
|
depTypes.add(dep.type);
|
|
93
93
|
pkgJson[dep.type][dep.name] = dep.query;
|
|
94
94
|
}
|
|
95
95
|
for (const depType of depTypes) {
|
|
96
|
-
pkgJson[depType] = Object.fromEntries(sortBy__default[
|
|
96
|
+
pkgJson[depType] = Object.fromEntries(sortBy__default["default"](Object.entries(pkgJson[depType]), ([key]) => key));
|
|
97
97
|
}
|
|
98
|
-
await fs__default[
|
|
98
|
+
await fs__default["default"].writeJson(pkgPath, pkgJson, { spaces: 2 });
|
|
99
99
|
}
|
|
100
100
|
console.log();
|
|
101
|
-
console.log(`Running ${chalk__default[
|
|
101
|
+
console.log(`Running ${chalk__default["default"].blue("yarn install")} to install new versions`);
|
|
102
102
|
console.log();
|
|
103
103
|
await run.run("yarn", ["install"]);
|
|
104
104
|
}
|
|
@@ -124,11 +124,11 @@ class MessageStep {
|
|
|
124
124
|
const message = createStepDefinition({
|
|
125
125
|
type: "message",
|
|
126
126
|
deserialize(obj) {
|
|
127
|
-
const {message: msg} = obj;
|
|
127
|
+
const { message: msg } = obj;
|
|
128
128
|
if (!msg || typeof msg !== "string" && !Array.isArray(msg)) {
|
|
129
129
|
throw new Error("Invalid install step, 'message' must be a string or array");
|
|
130
130
|
}
|
|
131
|
-
return new MessageStep({message: [msg].flat().join("")});
|
|
131
|
+
return new MessageStep({ message: [msg].flat().join("") });
|
|
132
132
|
},
|
|
133
133
|
create(data) {
|
|
134
134
|
return new MessageStep(data);
|
|
@@ -161,7 +161,7 @@ class PluginInstaller {
|
|
|
161
161
|
constructor(steps) {
|
|
162
162
|
this.steps = steps;
|
|
163
163
|
}
|
|
164
|
-
static async resolveSteps(pkg) {
|
|
164
|
+
static async resolveSteps(pkg, versionToInstall) {
|
|
165
165
|
var _a, _b;
|
|
166
166
|
const steps = [];
|
|
167
167
|
const dependencies$1 = [];
|
|
@@ -169,14 +169,14 @@ class PluginInstaller {
|
|
|
169
169
|
target: "packages/app",
|
|
170
170
|
type: "dependencies",
|
|
171
171
|
name: pkg.name,
|
|
172
|
-
query: `^${pkg.version}`
|
|
172
|
+
query: versionToInstall || `^${pkg.version}`
|
|
173
173
|
});
|
|
174
174
|
steps.push({
|
|
175
175
|
type: "dependencies",
|
|
176
|
-
step: dependencies.create({dependencies: dependencies$1})
|
|
176
|
+
step: dependencies.create({ dependencies: dependencies$1 })
|
|
177
177
|
});
|
|
178
178
|
for (const step of (_b = (_a = pkg.experimentalInstallationRecipe) == null ? void 0 : _a.steps) != null ? _b : []) {
|
|
179
|
-
const {type} = step;
|
|
179
|
+
const { type } = step;
|
|
180
180
|
const definition = stepDefinitions.find((d) => d.type === type);
|
|
181
181
|
if (definition) {
|
|
182
182
|
steps.push({
|
|
@@ -190,25 +190,52 @@ class PluginInstaller {
|
|
|
190
190
|
return steps;
|
|
191
191
|
}
|
|
192
192
|
async run() {
|
|
193
|
-
for (const {type, step} of this.steps) {
|
|
193
|
+
for (const { type, step } of this.steps) {
|
|
194
194
|
console.log(`Running step ${type}`);
|
|
195
195
|
await step.run();
|
|
196
196
|
}
|
|
197
197
|
}
|
|
198
198
|
}
|
|
199
|
+
async function installPluginAndPeerPlugins(pkg) {
|
|
200
|
+
const pluginDeps = /* @__PURE__ */ new Map();
|
|
201
|
+
pluginDeps.set(pkg.name, { pkg });
|
|
202
|
+
await loadPeerPluginDeps(pkg, pluginDeps);
|
|
203
|
+
console.log(`Installing ${pkg.name} AND any peer plugin dependencies.`);
|
|
204
|
+
for (const [_pluginDepName, pluginDep] of pluginDeps.entries()) {
|
|
205
|
+
const { pkg: pluginDepPkg, versionToInstall } = pluginDep;
|
|
206
|
+
console.log(`Installing plugin: ${pluginDepPkg.name}: ${versionToInstall || pluginDepPkg.version}`);
|
|
207
|
+
const steps = await PluginInstaller.resolveSteps(pluginDepPkg, versionToInstall);
|
|
208
|
+
const installer = new PluginInstaller(steps);
|
|
209
|
+
await installer.run();
|
|
210
|
+
}
|
|
211
|
+
}
|
|
212
|
+
async function loadPackageJson(plugin) {
|
|
213
|
+
if (plugin.endsWith("package.json")) {
|
|
214
|
+
return await fs__default["default"].readJson(plugin);
|
|
215
|
+
}
|
|
216
|
+
return await fetchPluginPackage(plugin);
|
|
217
|
+
}
|
|
218
|
+
async function loadPeerPluginDeps(pkg, pluginMap) {
|
|
219
|
+
var _a, _b;
|
|
220
|
+
for (const [pluginId, pluginVersion] of Object.entries((_b = (_a = pkg.experimentalInstallationRecipe) == null ? void 0 : _a.peerPluginDependencies) != null ? _b : {})) {
|
|
221
|
+
const depPkg = await loadPackageJson(pluginId);
|
|
222
|
+
if (!pluginMap.get(depPkg.name)) {
|
|
223
|
+
pluginMap.set(depPkg.name, {
|
|
224
|
+
pkg: depPkg,
|
|
225
|
+
versionToInstall: pluginVersion
|
|
226
|
+
});
|
|
227
|
+
await loadPeerPluginDeps(depPkg, pluginMap);
|
|
228
|
+
}
|
|
229
|
+
}
|
|
230
|
+
}
|
|
199
231
|
var install = async (pluginId, cmd) => {
|
|
200
|
-
|
|
201
|
-
if (
|
|
202
|
-
pkg = await fetchPluginPackage(pluginId);
|
|
203
|
-
} else if (cmd == null ? void 0 : cmd.from) {
|
|
204
|
-
pkg = await fs__default['default'].readJson(cmd.from);
|
|
205
|
-
} else {
|
|
232
|
+
const from = pluginId || (cmd == null ? void 0 : cmd.from);
|
|
233
|
+
if (!from) {
|
|
206
234
|
throw new Error("Missing both <plugin-id> or a package.json file path in the --from flag.");
|
|
207
235
|
}
|
|
208
|
-
const
|
|
209
|
-
|
|
210
|
-
await installer.run();
|
|
236
|
+
const pkg = await loadPackageJson(from);
|
|
237
|
+
await installPluginAndPeerPlugins(pkg);
|
|
211
238
|
};
|
|
212
239
|
|
|
213
|
-
exports
|
|
214
|
-
//# sourceMappingURL=install-
|
|
240
|
+
exports["default"] = install;
|
|
241
|
+
//# sourceMappingURL=install-db22d18d.cjs.js.map
|
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
'use strict';
|
|
2
2
|
|
|
3
|
-
var run = require('./run-
|
|
4
|
-
var index = require('./index-
|
|
3
|
+
var run = require('./run-8b1e277a.cjs.js');
|
|
4
|
+
var index = require('./index-60792ea9.cjs.js');
|
|
5
5
|
require('child_process');
|
|
6
6
|
require('util');
|
|
7
7
|
require('@backstage/errors');
|
|
@@ -14,7 +14,7 @@ require('@backstage/config/package.json');
|
|
|
14
14
|
|
|
15
15
|
var lint = async (cmd, cmdArgs) => {
|
|
16
16
|
const args = [
|
|
17
|
-
"--ext=js,jsx,ts,tsx",
|
|
17
|
+
"--ext=js,jsx,ts,tsx,mjs,cjs",
|
|
18
18
|
"--max-warnings=0",
|
|
19
19
|
`--format=${cmd.format}`,
|
|
20
20
|
...cmdArgs != null ? cmdArgs : [index.paths.targetDir]
|
|
@@ -25,5 +25,5 @@ var lint = async (cmd, cmdArgs) => {
|
|
|
25
25
|
await run.run("eslint", args);
|
|
26
26
|
};
|
|
27
27
|
|
|
28
|
-
exports
|
|
29
|
-
//# sourceMappingURL=lint-
|
|
28
|
+
exports["default"] = lint;
|
|
29
|
+
//# sourceMappingURL=lint-0c8dd45e.cjs.js.map
|
|
@@ -1,9 +1,9 @@
|
|
|
1
1
|
'use strict';
|
|
2
2
|
|
|
3
|
-
var Lockfile = require('./Lockfile-
|
|
4
|
-
require('./run-
|
|
3
|
+
var Lockfile = require('./Lockfile-35661afa.cjs.js');
|
|
4
|
+
require('./run-8b1e277a.cjs.js');
|
|
5
5
|
require('chalk');
|
|
6
|
-
var index = require('./index-
|
|
6
|
+
var index = require('./index-60792ea9.cjs.js');
|
|
7
7
|
var partition = require('lodash/partition');
|
|
8
8
|
require('fs-extra');
|
|
9
9
|
require('semver');
|
|
@@ -42,14 +42,14 @@ var lint = async (cmd) => {
|
|
|
42
42
|
lockfile.replaceVersions(result.newVersions);
|
|
43
43
|
await lockfile.save();
|
|
44
44
|
} else {
|
|
45
|
-
const [newVersionsForbidden, newVersionsAllowed] = partition__default[
|
|
45
|
+
const [newVersionsForbidden, newVersionsAllowed] = partition__default["default"](result.newVersions, ({ name }) => forbiddenDuplicatesFilter(name));
|
|
46
46
|
if (newVersionsForbidden.length && !fix) {
|
|
47
47
|
success = false;
|
|
48
48
|
}
|
|
49
49
|
logArray(newVersionsForbidden, "The following packages must be deduplicated, this can be done automatically with --fix", (e) => ` ${e.name} @ ${e.range} bumped from ${e.oldVersion} to ${e.newVersion}`);
|
|
50
50
|
logArray(newVersionsAllowed, "The following packages can be deduplicated, this can be done automatically with --fix", (e) => ` ${e.name} @ ${e.range} bumped from ${e.oldVersion} to ${e.newVersion}`);
|
|
51
51
|
}
|
|
52
|
-
const [newRangesForbidden, newRangesAllowed] = partition__default[
|
|
52
|
+
const [newRangesForbidden, newRangesAllowed] = partition__default["default"](result.newRanges, ({ name }) => forbiddenDuplicatesFilter(name));
|
|
53
53
|
if (newRangesForbidden.length) {
|
|
54
54
|
success = false;
|
|
55
55
|
}
|
|
@@ -71,7 +71,7 @@ function logArray(arr, header, each) {
|
|
|
71
71
|
console.log();
|
|
72
72
|
}
|
|
73
73
|
|
|
74
|
-
exports
|
|
74
|
+
exports["default"] = lint;
|
|
75
75
|
exports.forbiddenDuplicatesFilter = forbiddenDuplicatesFilter;
|
|
76
76
|
exports.includedFilter = includedFilter;
|
|
77
|
-
//# sourceMappingURL=lint-
|
|
77
|
+
//# sourceMappingURL=lint-f4a40773.cjs.js.map
|
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
'use strict';
|
|
2
2
|
|
|
3
3
|
var fs = require('fs-extra');
|
|
4
|
-
var index = require('./index-
|
|
4
|
+
var index = require('./index-60792ea9.cjs.js');
|
|
5
5
|
require('commander');
|
|
6
6
|
require('chalk');
|
|
7
7
|
require('semver');
|
|
@@ -19,19 +19,19 @@ const PKG_BACKUP_PATH = "package.json-prepack";
|
|
|
19
19
|
const pre = async () => {
|
|
20
20
|
var _a;
|
|
21
21
|
const pkgPath = index.paths.resolveTarget(PKG_PATH);
|
|
22
|
-
const pkgContent = await fs__default[
|
|
22
|
+
const pkgContent = await fs__default["default"].readFile(pkgPath, "utf8");
|
|
23
23
|
const pkg = JSON.parse(pkgContent);
|
|
24
|
-
await fs__default[
|
|
24
|
+
await fs__default["default"].writeFile(PKG_BACKUP_PATH, pkgContent);
|
|
25
25
|
for (const key of Object.keys((_a = pkg.publishConfig) != null ? _a : {})) {
|
|
26
26
|
if (!SKIPPED_KEYS.includes(key)) {
|
|
27
27
|
pkg[key] = pkg.publishConfig[key];
|
|
28
28
|
}
|
|
29
29
|
}
|
|
30
|
-
await fs__default[
|
|
30
|
+
await fs__default["default"].writeJson(pkgPath, pkg, { encoding: "utf8", spaces: 2 });
|
|
31
31
|
};
|
|
32
32
|
const post = async () => {
|
|
33
33
|
try {
|
|
34
|
-
await fs__default[
|
|
34
|
+
await fs__default["default"].move(PKG_BACKUP_PATH, PKG_PATH, { overwrite: true });
|
|
35
35
|
} catch (error) {
|
|
36
36
|
console.warn(`Failed to restore package.json during postpack, ${error}. Your package will be fine but you may have ended up with some garbage in the repo.`);
|
|
37
37
|
}
|
|
@@ -39,4 +39,4 @@ const post = async () => {
|
|
|
39
39
|
|
|
40
40
|
exports.post = post;
|
|
41
41
|
exports.pre = pre;
|
|
42
|
-
//# sourceMappingURL=pack-
|
|
42
|
+
//# sourceMappingURL=pack-f5669e4f.cjs.js.map
|