@backstage/cli-node 0.2.8-next.0 → 0.2.9-next.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +20 -0
- package/dist/git/GitUtils.cjs.js +68 -0
- package/dist/git/GitUtils.cjs.js.map +1 -0
- package/dist/index.cjs.js +10 -562
- package/dist/index.cjs.js.map +1 -1
- package/dist/index.d.ts +4 -0
- package/dist/monorepo/Lockfile.cjs.js +183 -0
- package/dist/monorepo/Lockfile.cjs.js.map +1 -0
- package/dist/monorepo/PackageGraph.cjs.js +209 -0
- package/dist/monorepo/PackageGraph.cjs.js.map +1 -0
- package/dist/monorepo/isMonoRepo.cjs.js +21 -0
- package/dist/monorepo/isMonoRepo.cjs.js.map +1 -0
- package/dist/paths.cjs.js +8 -0
- package/dist/paths.cjs.js.map +1 -0
- package/dist/roles/PackageRoles.cjs.js +155 -0
- package/dist/roles/PackageRoles.cjs.js.map +1 -0
- package/dist/run.cjs.js +9 -0
- package/dist/run.cjs.js.map +1 -0
- package/package.json +6 -6
package/CHANGELOG.md
CHANGED
|
@@ -1,5 +1,25 @@
|
|
|
1
1
|
# @backstage/cli-node
|
|
2
2
|
|
|
3
|
+
## 0.2.9-next.0
|
|
4
|
+
|
|
5
|
+
### Patch Changes
|
|
6
|
+
|
|
7
|
+
- fec7278: Added new `lockfile.getDependencyTreeHash(name)` utility.
|
|
8
|
+
- Updated dependencies
|
|
9
|
+
- @backstage/cli-common@0.1.14
|
|
10
|
+
- @backstage/errors@1.2.4
|
|
11
|
+
- @backstage/types@1.1.1
|
|
12
|
+
|
|
13
|
+
## 0.2.8
|
|
14
|
+
|
|
15
|
+
### Patch Changes
|
|
16
|
+
|
|
17
|
+
- 0c70f43: Add definition for the new `backstage.inline` field in `package.json`.
|
|
18
|
+
- Updated dependencies
|
|
19
|
+
- @backstage/cli-common@0.1.14
|
|
20
|
+
- @backstage/errors@1.2.4
|
|
21
|
+
- @backstage/types@1.1.1
|
|
22
|
+
|
|
3
23
|
## 0.2.8-next.0
|
|
4
24
|
|
|
5
25
|
### Patch Changes
|
|
@@ -0,0 +1,68 @@
|
|
|
1
|
+
'use strict';
|
|
2
|
+
|
|
3
|
+
var errors = require('@backstage/errors');
|
|
4
|
+
var paths = require('../paths.cjs.js');
|
|
5
|
+
var run = require('../run.cjs.js');
|
|
6
|
+
|
|
7
|
+
async function runGit(...args) {
|
|
8
|
+
try {
|
|
9
|
+
const { stdout } = await run.execFile("git", args, {
|
|
10
|
+
shell: true,
|
|
11
|
+
cwd: paths.paths.targetRoot
|
|
12
|
+
});
|
|
13
|
+
return stdout.trim().split(/\r\n|\r|\n/);
|
|
14
|
+
} catch (error) {
|
|
15
|
+
errors.assertError(error);
|
|
16
|
+
if (error.stderr || typeof error.code === "number") {
|
|
17
|
+
const stderr = error.stderr?.toString("utf8");
|
|
18
|
+
const msg = stderr?.trim() ?? `with exit code ${error.code}`;
|
|
19
|
+
throw new Error(`git ${args[0]} failed, ${msg}`);
|
|
20
|
+
}
|
|
21
|
+
throw new errors.ForwardedError("Unknown execution error", error);
|
|
22
|
+
}
|
|
23
|
+
}
|
|
24
|
+
class GitUtils {
|
|
25
|
+
/**
|
|
26
|
+
* Returns a sorted list of all files that have changed since the merge base
|
|
27
|
+
* of the provided `ref` and HEAD, as well as all files that are not tracked by git.
|
|
28
|
+
*/
|
|
29
|
+
static async listChangedFiles(ref) {
|
|
30
|
+
if (!ref) {
|
|
31
|
+
throw new Error("ref is required");
|
|
32
|
+
}
|
|
33
|
+
let diffRef = ref;
|
|
34
|
+
try {
|
|
35
|
+
const [base] = await runGit("merge-base", "HEAD", ref);
|
|
36
|
+
diffRef = base;
|
|
37
|
+
} catch {
|
|
38
|
+
}
|
|
39
|
+
const tracked = await runGit("diff", "--name-only", diffRef);
|
|
40
|
+
const untracked = await runGit(
|
|
41
|
+
"ls-files",
|
|
42
|
+
"--others",
|
|
43
|
+
"--exclude-standard"
|
|
44
|
+
);
|
|
45
|
+
return Array.from(/* @__PURE__ */ new Set([...tracked, ...untracked]));
|
|
46
|
+
}
|
|
47
|
+
/**
|
|
48
|
+
* Returns the contents of a file at a specific ref.
|
|
49
|
+
*/
|
|
50
|
+
static async readFileAtRef(path, ref) {
|
|
51
|
+
let showRef = ref;
|
|
52
|
+
try {
|
|
53
|
+
const [base] = await runGit("merge-base", "HEAD", ref);
|
|
54
|
+
showRef = base;
|
|
55
|
+
} catch {
|
|
56
|
+
}
|
|
57
|
+
const { stdout } = await run.execFile("git", ["show", `${showRef}:${path}`], {
|
|
58
|
+
shell: true,
|
|
59
|
+
cwd: paths.paths.targetRoot,
|
|
60
|
+
maxBuffer: 1024 * 1024 * 50
|
|
61
|
+
});
|
|
62
|
+
return stdout;
|
|
63
|
+
}
|
|
64
|
+
}
|
|
65
|
+
|
|
66
|
+
exports.GitUtils = GitUtils;
|
|
67
|
+
exports.runGit = runGit;
|
|
68
|
+
//# sourceMappingURL=GitUtils.cjs.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"GitUtils.cjs.js","sources":["../../src/git/GitUtils.ts"],"sourcesContent":["/*\n * Copyright 2022 The Backstage Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { assertError, ForwardedError } from '@backstage/errors';\nimport { paths } from '../paths';\nimport { execFile } from '../run';\n\n/**\n * Run a git command, trimming the output splitting it into lines.\n */\nexport async function runGit(...args: string[]) {\n try {\n const { stdout } = await execFile('git', args, {\n shell: true,\n cwd: paths.targetRoot,\n });\n return stdout.trim().split(/\\r\\n|\\r|\\n/);\n } catch (error) {\n assertError(error);\n if (error.stderr || typeof error.code === 'number') {\n const stderr = (error.stderr as undefined | Buffer)?.toString('utf8');\n const msg = stderr?.trim() ?? `with exit code ${error.code}`;\n throw new Error(`git ${args[0]} failed, ${msg}`);\n }\n throw new ForwardedError('Unknown execution error', error);\n }\n}\n\n/**\n * Utilities for working with git.\n *\n * @public\n */\nexport class GitUtils {\n /**\n * Returns a sorted list of all files that have changed since the merge base\n * of the provided `ref` and HEAD, as well as all files that are not tracked by git.\n */\n static async listChangedFiles(ref: string) {\n if (!ref) {\n throw new Error('ref is required');\n }\n\n let diffRef = ref;\n try {\n const [base] = await runGit('merge-base', 'HEAD', ref);\n diffRef = base;\n } catch {\n // silently fall back to using the ref directly if merge base is not available\n }\n\n const tracked = await runGit('diff', '--name-only', diffRef);\n const untracked = await runGit(\n 'ls-files',\n '--others',\n '--exclude-standard',\n );\n\n return Array.from(new Set([...tracked, ...untracked]));\n }\n\n /**\n * Returns the contents of a file at a specific ref.\n */\n static async readFileAtRef(path: string, ref: string) {\n let showRef = ref;\n try {\n const [base] = await runGit('merge-base', 'HEAD', ref);\n showRef = base;\n } catch {\n // silently fall back to using the ref directly if merge base is not available\n }\n\n const { stdout } = await execFile('git', ['show', `${showRef}:${path}`], {\n shell: true,\n cwd: paths.targetRoot,\n maxBuffer: 1024 * 1024 * 50,\n });\n return stdout;\n }\n}\n"],"names":["execFile","paths","assertError","ForwardedError"],"mappings":";;;;;;AAuBA,eAAsB,UAAU,IAAgB,EAAA;AAC9C,EAAI,IAAA;AACF,IAAA,MAAM,EAAE,MAAO,EAAA,GAAI,MAAMA,YAAA,CAAS,OAAO,IAAM,EAAA;AAAA,MAC7C,KAAO,EAAA,IAAA;AAAA,MACP,KAAKC,WAAM,CAAA,UAAA;AAAA,KACZ,CAAA,CAAA;AACD,IAAA,OAAO,MAAO,CAAA,IAAA,EAAO,CAAA,KAAA,CAAM,YAAY,CAAA,CAAA;AAAA,WAChC,KAAO,EAAA;AACd,IAAAC,kBAAA,CAAY,KAAK,CAAA,CAAA;AACjB,IAAA,IAAI,KAAM,CAAA,MAAA,IAAU,OAAO,KAAA,CAAM,SAAS,QAAU,EAAA;AAClD,MAAA,MAAM,MAAU,GAAA,KAAA,CAAM,MAA+B,EAAA,QAAA,CAAS,MAAM,CAAA,CAAA;AACpE,MAAA,MAAM,MAAM,MAAQ,EAAA,IAAA,EAAU,IAAA,CAAA,eAAA,EAAkB,MAAM,IAAI,CAAA,CAAA,CAAA;AAC1D,MAAM,MAAA,IAAI,MAAM,CAAO,IAAA,EAAA,IAAA,CAAK,CAAC,CAAC,CAAA,SAAA,EAAY,GAAG,CAAE,CAAA,CAAA,CAAA;AAAA,KACjD;AACA,IAAM,MAAA,IAAIC,qBAAe,CAAA,yBAAA,EAA2B,KAAK,CAAA,CAAA;AAAA,GAC3D;AACF,CAAA;AAOO,MAAM,QAAS,CAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAKpB,aAAa,iBAAiB,GAAa,EAAA;AACzC,IAAA,IAAI,CAAC,GAAK,EAAA;AACR,MAAM,MAAA,IAAI,MAAM,iBAAiB,CAAA,CAAA;AAAA,KACnC;AAEA,IAAA,IAAI,OAAU,GAAA,GAAA,CAAA;AACd,IAAI,IAAA;AACF,MAAA,MAAM,CAAC,IAAI,CAAA,GAAI,MAAM,MAAO,CAAA,YAAA,EAAc,QAAQ,GAAG,CAAA,CAAA;AACrD,MAAU,OAAA,GAAA,IAAA,CAAA;AAAA,KACJ,CAAA,MAAA;AAAA,KAER;AAEA,IAAA,MAAM,OAAU,GAAA,MAAM,MAAO,CAAA,MAAA,EAAQ,eAAe,OAAO,CAAA,CAAA;AAC3D,IAAA,MAAM,YAAY,MAAM,MAAA;AAAA,MACtB,UAAA;AAAA,MACA,UAAA;AAAA,MACA,oBAAA;AAAA,KACF,CAAA;AAEA,IAAO,OAAA,KAAA,CAAM,IAAK,iBAAA,IAAI,GAAI,CAAA,CAAC,GAAG,OAAS,EAAA,GAAG,SAAS,CAAC,CAAC,CAAA,CAAA;AAAA,GACvD;AAAA;AAAA;AAAA;AAAA,EAKA,aAAa,aAAc,CAAA,IAAA,EAAc,GAAa,EAAA;AACpD,IAAA,IAAI,OAAU,GAAA,GAAA,CAAA;AACd,IAAI,IAAA;AACF,MAAA,MAAM,CAAC,IAAI,CAAA,GAAI,MAAM,MAAO,CAAA,YAAA,EAAc,QAAQ,GAAG,CAAA,CAAA;AACrD,MAAU,OAAA,GAAA,IAAA,CAAA;AAAA,KACJ,CAAA,MAAA;AAAA,KAER;AAEA,IAAA,MAAM,EAAE,MAAA,EAAW,GAAA,MAAMH,YAAS,CAAA,KAAA,EAAO,CAAC,MAAA,EAAQ,CAAG,EAAA,OAAO,CAAI,CAAA,EAAA,IAAI,EAAE,CAAG,EAAA;AAAA,MACvE,KAAO,EAAA,IAAA;AAAA,MACP,KAAKC,WAAM,CAAA,UAAA;AAAA,MACX,SAAA,EAAW,OAAO,IAAO,GAAA,EAAA;AAAA,KAC1B,CAAA,CAAA;AACD,IAAO,OAAA,MAAA,CAAA;AAAA,GACT;AACF;;;;;"}
|
package/dist/index.cjs.js
CHANGED
|
@@ -1,568 +1,16 @@
|
|
|
1
1
|
'use strict';
|
|
2
2
|
|
|
3
|
-
var
|
|
4
|
-
var
|
|
5
|
-
var
|
|
6
|
-
var
|
|
7
|
-
var
|
|
8
|
-
var path = require('path');
|
|
9
|
-
var getPackages = require('@manypkg/get-packages');
|
|
10
|
-
var parsers = require('@yarnpkg/parsers');
|
|
11
|
-
var zod = require('zod');
|
|
3
|
+
var GitUtils = require('./git/GitUtils.cjs.js');
|
|
4
|
+
var isMonoRepo = require('./monorepo/isMonoRepo.cjs.js');
|
|
5
|
+
var PackageGraph = require('./monorepo/PackageGraph.cjs.js');
|
|
6
|
+
var Lockfile = require('./monorepo/Lockfile.cjs.js');
|
|
7
|
+
var PackageRoles = require('./roles/PackageRoles.cjs.js');
|
|
12
8
|
|
|
13
|
-
function _interopDefaultCompat (e) { return e && typeof e === 'object' && 'default' in e ? e : { default: e }; }
|
|
14
9
|
|
|
15
|
-
var fs__default = /*#__PURE__*/_interopDefaultCompat(fs);
|
|
16
|
-
var path__default = /*#__PURE__*/_interopDefaultCompat(path);
|
|
17
10
|
|
|
18
|
-
|
|
19
|
-
|
|
20
|
-
|
|
21
|
-
|
|
22
|
-
|
|
23
|
-
try {
|
|
24
|
-
const { stdout } = await execFile("git", args, {
|
|
25
|
-
shell: true,
|
|
26
|
-
cwd: paths.targetRoot
|
|
27
|
-
});
|
|
28
|
-
return stdout.trim().split(/\r\n|\r|\n/);
|
|
29
|
-
} catch (error) {
|
|
30
|
-
errors.assertError(error);
|
|
31
|
-
if (error.stderr || typeof error.code === "number") {
|
|
32
|
-
const stderr = error.stderr?.toString("utf8");
|
|
33
|
-
const msg = stderr?.trim() ?? `with exit code ${error.code}`;
|
|
34
|
-
throw new Error(`git ${args[0]} failed, ${msg}`);
|
|
35
|
-
}
|
|
36
|
-
throw new errors.ForwardedError("Unknown execution error", error);
|
|
37
|
-
}
|
|
38
|
-
}
|
|
39
|
-
class GitUtils {
|
|
40
|
-
/**
|
|
41
|
-
* Returns a sorted list of all files that have changed since the merge base
|
|
42
|
-
* of the provided `ref` and HEAD, as well as all files that are not tracked by git.
|
|
43
|
-
*/
|
|
44
|
-
static async listChangedFiles(ref) {
|
|
45
|
-
if (!ref) {
|
|
46
|
-
throw new Error("ref is required");
|
|
47
|
-
}
|
|
48
|
-
let diffRef = ref;
|
|
49
|
-
try {
|
|
50
|
-
const [base] = await runGit("merge-base", "HEAD", ref);
|
|
51
|
-
diffRef = base;
|
|
52
|
-
} catch {
|
|
53
|
-
}
|
|
54
|
-
const tracked = await runGit("diff", "--name-only", diffRef);
|
|
55
|
-
const untracked = await runGit(
|
|
56
|
-
"ls-files",
|
|
57
|
-
"--others",
|
|
58
|
-
"--exclude-standard"
|
|
59
|
-
);
|
|
60
|
-
return Array.from(/* @__PURE__ */ new Set([...tracked, ...untracked]));
|
|
61
|
-
}
|
|
62
|
-
/**
|
|
63
|
-
* Returns the contents of a file at a specific ref.
|
|
64
|
-
*/
|
|
65
|
-
static async readFileAtRef(path, ref) {
|
|
66
|
-
let showRef = ref;
|
|
67
|
-
try {
|
|
68
|
-
const [base] = await runGit("merge-base", "HEAD", ref);
|
|
69
|
-
showRef = base;
|
|
70
|
-
} catch {
|
|
71
|
-
}
|
|
72
|
-
const { stdout } = await execFile("git", ["show", `${showRef}:${path}`], {
|
|
73
|
-
shell: true,
|
|
74
|
-
cwd: paths.targetRoot,
|
|
75
|
-
maxBuffer: 1024 * 1024 * 50
|
|
76
|
-
});
|
|
77
|
-
return stdout;
|
|
78
|
-
}
|
|
79
|
-
}
|
|
80
|
-
|
|
81
|
-
async function isMonoRepo() {
|
|
82
|
-
const rootPackageJsonPath = paths.resolveTargetRoot("package.json");
|
|
83
|
-
try {
|
|
84
|
-
const pkg = await fs__default.default.readJson(rootPackageJsonPath);
|
|
85
|
-
return Boolean(pkg?.workspaces?.packages);
|
|
86
|
-
} catch (error) {
|
|
87
|
-
return false;
|
|
88
|
-
}
|
|
89
|
-
}
|
|
90
|
-
|
|
91
|
-
const ENTRY_PATTERN = /^((?:@[^/]+\/)?[^@/]+)@(.+)$/;
|
|
92
|
-
const SPECIAL_OBJECT_KEYS = [
|
|
93
|
-
`__metadata`,
|
|
94
|
-
`version`,
|
|
95
|
-
`resolution`,
|
|
96
|
-
`dependencies`,
|
|
97
|
-
`peerDependencies`,
|
|
98
|
-
`dependenciesMeta`,
|
|
99
|
-
`peerDependenciesMeta`,
|
|
100
|
-
`binaries`
|
|
101
|
-
];
|
|
102
|
-
class Lockfile {
|
|
103
|
-
constructor(packages, data) {
|
|
104
|
-
this.packages = packages;
|
|
105
|
-
this.data = data;
|
|
106
|
-
}
|
|
107
|
-
/**
|
|
108
|
-
* Load a {@link Lockfile} from a file path.
|
|
109
|
-
*/
|
|
110
|
-
static async load(path) {
|
|
111
|
-
const lockfileContents = await fs__default.default.readFile(path, "utf8");
|
|
112
|
-
return Lockfile.parse(lockfileContents);
|
|
113
|
-
}
|
|
114
|
-
/**
|
|
115
|
-
* Parse lockfile contents into a {@link Lockfile}.
|
|
116
|
-
*
|
|
117
|
-
* @public
|
|
118
|
-
*/
|
|
119
|
-
static parse(content) {
|
|
120
|
-
let data;
|
|
121
|
-
try {
|
|
122
|
-
data = parsers.parseSyml(content);
|
|
123
|
-
} catch (err) {
|
|
124
|
-
throw new Error(`Failed yarn.lock parse, ${err}`);
|
|
125
|
-
}
|
|
126
|
-
const packages = /* @__PURE__ */ new Map();
|
|
127
|
-
for (const [key, value] of Object.entries(data)) {
|
|
128
|
-
if (SPECIAL_OBJECT_KEYS.includes(key)) continue;
|
|
129
|
-
const [, name, ranges] = ENTRY_PATTERN.exec(key) ?? [];
|
|
130
|
-
if (!name) {
|
|
131
|
-
throw new Error(`Failed to parse yarn.lock entry '${key}'`);
|
|
132
|
-
}
|
|
133
|
-
let queries = packages.get(name);
|
|
134
|
-
if (!queries) {
|
|
135
|
-
queries = [];
|
|
136
|
-
packages.set(name, queries);
|
|
137
|
-
}
|
|
138
|
-
for (let range of ranges.split(/\s*,\s*/)) {
|
|
139
|
-
if (range.startsWith(`${name}@`)) {
|
|
140
|
-
range = range.slice(`${name}@`.length);
|
|
141
|
-
}
|
|
142
|
-
if (range.startsWith("npm:")) {
|
|
143
|
-
range = range.slice("npm:".length);
|
|
144
|
-
}
|
|
145
|
-
queries.push({ range, version: value.version, dataKey: key });
|
|
146
|
-
}
|
|
147
|
-
}
|
|
148
|
-
return new Lockfile(packages, data);
|
|
149
|
-
}
|
|
150
|
-
/**
|
|
151
|
-
* Creates a simplified dependency graph from the lockfile data, where each
|
|
152
|
-
* key is a package, and the value is a set of all packages that it depends on
|
|
153
|
-
* across all versions.
|
|
154
|
-
*/
|
|
155
|
-
createSimplifiedDependencyGraph() {
|
|
156
|
-
const graph = /* @__PURE__ */ new Map();
|
|
157
|
-
for (const [name, entries] of this.packages) {
|
|
158
|
-
const dependencies = new Set(
|
|
159
|
-
entries.flatMap((e) => {
|
|
160
|
-
const data = this.data[e.dataKey];
|
|
161
|
-
return [
|
|
162
|
-
...Object.keys(data?.dependencies ?? {}),
|
|
163
|
-
...Object.keys(data?.peerDependencies ?? {})
|
|
164
|
-
];
|
|
165
|
-
})
|
|
166
|
-
);
|
|
167
|
-
graph.set(name, dependencies);
|
|
168
|
-
}
|
|
169
|
-
return graph;
|
|
170
|
-
}
|
|
171
|
-
/**
|
|
172
|
-
* Diff with another lockfile, returning entries that have been
|
|
173
|
-
* added, changed, and removed compared to the other lockfile.
|
|
174
|
-
*/
|
|
175
|
-
diff(otherLockfile) {
|
|
176
|
-
const diff = {
|
|
177
|
-
added: new Array(),
|
|
178
|
-
changed: new Array(),
|
|
179
|
-
removed: new Array()
|
|
180
|
-
};
|
|
181
|
-
const remainingOldNames = new Set(this.packages.keys());
|
|
182
|
-
for (const [name, otherQueries] of otherLockfile.packages) {
|
|
183
|
-
remainingOldNames.delete(name);
|
|
184
|
-
const thisQueries = this.packages.get(name);
|
|
185
|
-
if (!thisQueries) {
|
|
186
|
-
diff.removed.push(...otherQueries.map((q) => ({ name, range: q.range })));
|
|
187
|
-
continue;
|
|
188
|
-
}
|
|
189
|
-
const remainingOldRanges = new Set(thisQueries.map((q) => q.range));
|
|
190
|
-
for (const otherQuery of otherQueries) {
|
|
191
|
-
remainingOldRanges.delete(otherQuery.range);
|
|
192
|
-
const thisQuery = thisQueries.find((q) => q.range === otherQuery.range);
|
|
193
|
-
if (!thisQuery) {
|
|
194
|
-
diff.removed.push({ name, range: otherQuery.range });
|
|
195
|
-
continue;
|
|
196
|
-
}
|
|
197
|
-
const otherPkg = otherLockfile.data[otherQuery.dataKey];
|
|
198
|
-
const thisPkg = this.data[thisQuery.dataKey];
|
|
199
|
-
if (otherPkg && thisPkg) {
|
|
200
|
-
const thisCheck = thisPkg.integrity || thisPkg.checksum;
|
|
201
|
-
const otherCheck = otherPkg.integrity || otherPkg.checksum;
|
|
202
|
-
if (thisCheck !== otherCheck) {
|
|
203
|
-
diff.changed.push({ name, range: otherQuery.range });
|
|
204
|
-
}
|
|
205
|
-
}
|
|
206
|
-
}
|
|
207
|
-
for (const thisRange of remainingOldRanges) {
|
|
208
|
-
diff.added.push({ name, range: thisRange });
|
|
209
|
-
}
|
|
210
|
-
}
|
|
211
|
-
for (const name of remainingOldNames) {
|
|
212
|
-
const queries = this.packages.get(name) ?? [];
|
|
213
|
-
diff.added.push(...queries.map((q) => ({ name, range: q.range })));
|
|
214
|
-
}
|
|
215
|
-
return diff;
|
|
216
|
-
}
|
|
217
|
-
}
|
|
218
|
-
|
|
219
|
-
class PackageGraph extends Map {
|
|
220
|
-
/**
|
|
221
|
-
* Lists all local packages in a monorepo.
|
|
222
|
-
*/
|
|
223
|
-
static async listTargetPackages() {
|
|
224
|
-
const { packages } = await getPackages.getPackages(paths.targetDir);
|
|
225
|
-
return packages;
|
|
226
|
-
}
|
|
227
|
-
/**
|
|
228
|
-
* Creates a package graph from a list of local packages.
|
|
229
|
-
*/
|
|
230
|
-
static fromPackages(packages) {
|
|
231
|
-
const graph = new PackageGraph();
|
|
232
|
-
for (const pkg of packages) {
|
|
233
|
-
const name = pkg.packageJson.name;
|
|
234
|
-
const existingPkg = graph.get(name);
|
|
235
|
-
if (existingPkg) {
|
|
236
|
-
throw new Error(
|
|
237
|
-
`Duplicate package name '${name}' at ${pkg.dir} and ${existingPkg.dir}`
|
|
238
|
-
);
|
|
239
|
-
}
|
|
240
|
-
graph.set(name, {
|
|
241
|
-
name,
|
|
242
|
-
dir: pkg.dir,
|
|
243
|
-
packageJson: pkg.packageJson,
|
|
244
|
-
allLocalDependencies: /* @__PURE__ */ new Map(),
|
|
245
|
-
publishedLocalDependencies: /* @__PURE__ */ new Map(),
|
|
246
|
-
localDependencies: /* @__PURE__ */ new Map(),
|
|
247
|
-
localDevDependencies: /* @__PURE__ */ new Map(),
|
|
248
|
-
localOptionalDependencies: /* @__PURE__ */ new Map(),
|
|
249
|
-
allLocalDependents: /* @__PURE__ */ new Map(),
|
|
250
|
-
publishedLocalDependents: /* @__PURE__ */ new Map(),
|
|
251
|
-
localDependents: /* @__PURE__ */ new Map(),
|
|
252
|
-
localDevDependents: /* @__PURE__ */ new Map(),
|
|
253
|
-
localOptionalDependents: /* @__PURE__ */ new Map()
|
|
254
|
-
});
|
|
255
|
-
}
|
|
256
|
-
for (const node of graph.values()) {
|
|
257
|
-
for (const depName of Object.keys(node.packageJson.dependencies || {})) {
|
|
258
|
-
const depPkg = graph.get(depName);
|
|
259
|
-
if (depPkg) {
|
|
260
|
-
node.allLocalDependencies.set(depName, depPkg);
|
|
261
|
-
node.publishedLocalDependencies.set(depName, depPkg);
|
|
262
|
-
node.localDependencies.set(depName, depPkg);
|
|
263
|
-
depPkg.allLocalDependents.set(node.name, node);
|
|
264
|
-
depPkg.publishedLocalDependents.set(node.name, node);
|
|
265
|
-
depPkg.localDependents.set(node.name, node);
|
|
266
|
-
}
|
|
267
|
-
}
|
|
268
|
-
for (const depName of Object.keys(
|
|
269
|
-
node.packageJson.devDependencies || {}
|
|
270
|
-
)) {
|
|
271
|
-
const depPkg = graph.get(depName);
|
|
272
|
-
if (depPkg) {
|
|
273
|
-
node.allLocalDependencies.set(depName, depPkg);
|
|
274
|
-
node.localDevDependencies.set(depName, depPkg);
|
|
275
|
-
depPkg.allLocalDependents.set(node.name, node);
|
|
276
|
-
depPkg.localDevDependents.set(node.name, node);
|
|
277
|
-
}
|
|
278
|
-
}
|
|
279
|
-
for (const depName of Object.keys(
|
|
280
|
-
node.packageJson.optionalDependencies || {}
|
|
281
|
-
)) {
|
|
282
|
-
const depPkg = graph.get(depName);
|
|
283
|
-
if (depPkg) {
|
|
284
|
-
node.allLocalDependencies.set(depName, depPkg);
|
|
285
|
-
node.publishedLocalDependencies.set(depName, depPkg);
|
|
286
|
-
node.localOptionalDependencies.set(depName, depPkg);
|
|
287
|
-
depPkg.allLocalDependents.set(node.name, node);
|
|
288
|
-
depPkg.publishedLocalDependents.set(node.name, node);
|
|
289
|
-
depPkg.localOptionalDependents.set(node.name, node);
|
|
290
|
-
}
|
|
291
|
-
}
|
|
292
|
-
}
|
|
293
|
-
return graph;
|
|
294
|
-
}
|
|
295
|
-
/**
|
|
296
|
-
* Traverses the package graph and collects a set of package names.
|
|
297
|
-
*
|
|
298
|
-
* The traversal starts at the provided list names, and continues
|
|
299
|
-
* throughout all the names returned by the `collectFn`, which is
|
|
300
|
-
* called once for each seen package.
|
|
301
|
-
*/
|
|
302
|
-
collectPackageNames(startingPackageNames, collectFn) {
|
|
303
|
-
const targets = /* @__PURE__ */ new Set();
|
|
304
|
-
const searchNames = startingPackageNames.slice();
|
|
305
|
-
while (searchNames.length) {
|
|
306
|
-
const name = searchNames.pop();
|
|
307
|
-
if (targets.has(name)) {
|
|
308
|
-
continue;
|
|
309
|
-
}
|
|
310
|
-
const node = this.get(name);
|
|
311
|
-
if (!node) {
|
|
312
|
-
throw new Error(`Package '${name}' not found`);
|
|
313
|
-
}
|
|
314
|
-
targets.add(name);
|
|
315
|
-
const collected = collectFn(node);
|
|
316
|
-
if (collected) {
|
|
317
|
-
searchNames.push(...collected);
|
|
318
|
-
}
|
|
319
|
-
}
|
|
320
|
-
return targets;
|
|
321
|
-
}
|
|
322
|
-
/**
|
|
323
|
-
* Lists all packages that have changed since a given git ref.
|
|
324
|
-
*
|
|
325
|
-
* @remarks
|
|
326
|
-
*
|
|
327
|
-
* If the `analyzeLockfile` option is set to true, the change detection will
|
|
328
|
-
* also consider changes to the dependency management lockfile.
|
|
329
|
-
*/
|
|
330
|
-
async listChangedPackages(options) {
|
|
331
|
-
const changedFiles = await GitUtils.listChangedFiles(options.ref);
|
|
332
|
-
const dirMap = new Map(
|
|
333
|
-
Array.from(this.values()).map((pkg) => [
|
|
334
|
-
// relative from root, convert to posix, and add a / at the end
|
|
335
|
-
path__default.default.relative(paths.targetRoot, pkg.dir).split(path__default.default.sep).join(path__default.default.posix.sep) + path__default.default.posix.sep,
|
|
336
|
-
pkg
|
|
337
|
-
])
|
|
338
|
-
);
|
|
339
|
-
const packageDirs = Array.from(dirMap.keys());
|
|
340
|
-
const result = new Array();
|
|
341
|
-
let searchIndex = 0;
|
|
342
|
-
changedFiles.sort();
|
|
343
|
-
packageDirs.sort();
|
|
344
|
-
for (const packageDir of packageDirs) {
|
|
345
|
-
while (searchIndex < changedFiles.length && changedFiles[searchIndex] < packageDir) {
|
|
346
|
-
searchIndex += 1;
|
|
347
|
-
}
|
|
348
|
-
if (changedFiles[searchIndex]?.startsWith(packageDir)) {
|
|
349
|
-
searchIndex += 1;
|
|
350
|
-
result.push(dirMap.get(packageDir));
|
|
351
|
-
while (changedFiles[searchIndex]?.startsWith(packageDir)) {
|
|
352
|
-
searchIndex += 1;
|
|
353
|
-
}
|
|
354
|
-
}
|
|
355
|
-
}
|
|
356
|
-
if (changedFiles.includes("yarn.lock") && options.analyzeLockfile) {
|
|
357
|
-
let thisLockfile;
|
|
358
|
-
let otherLockfile;
|
|
359
|
-
try {
|
|
360
|
-
thisLockfile = await Lockfile.load(
|
|
361
|
-
paths.resolveTargetRoot("yarn.lock")
|
|
362
|
-
);
|
|
363
|
-
otherLockfile = Lockfile.parse(
|
|
364
|
-
await GitUtils.readFileAtRef("yarn.lock", options.ref)
|
|
365
|
-
);
|
|
366
|
-
} catch (error) {
|
|
367
|
-
console.warn(
|
|
368
|
-
`Failed to read lockfiles, assuming all packages have changed, ${error}`
|
|
369
|
-
);
|
|
370
|
-
return Array.from(this.values());
|
|
371
|
-
}
|
|
372
|
-
const diff = thisLockfile.diff(otherLockfile);
|
|
373
|
-
const graph = thisLockfile.createSimplifiedDependencyGraph();
|
|
374
|
-
{
|
|
375
|
-
const otherGraph = thisLockfile.createSimplifiedDependencyGraph();
|
|
376
|
-
for (const [name, dependencies] of otherGraph) {
|
|
377
|
-
const node = graph.get(name);
|
|
378
|
-
if (node) {
|
|
379
|
-
dependencies.forEach((d) => node.add(d));
|
|
380
|
-
} else {
|
|
381
|
-
graph.set(name, dependencies);
|
|
382
|
-
}
|
|
383
|
-
}
|
|
384
|
-
}
|
|
385
|
-
const changedPackages = new Set(
|
|
386
|
-
[...diff.added, ...diff.changed, ...diff.removed].map((e) => e.name)
|
|
387
|
-
);
|
|
388
|
-
let changed = false;
|
|
389
|
-
do {
|
|
390
|
-
changed = false;
|
|
391
|
-
for (const [name, dependencies] of graph) {
|
|
392
|
-
if (changedPackages.has(name)) {
|
|
393
|
-
continue;
|
|
394
|
-
}
|
|
395
|
-
for (const dep of dependencies) {
|
|
396
|
-
if (changedPackages.has(dep)) {
|
|
397
|
-
changed = true;
|
|
398
|
-
changedPackages.add(name);
|
|
399
|
-
break;
|
|
400
|
-
}
|
|
401
|
-
}
|
|
402
|
-
}
|
|
403
|
-
} while (changed);
|
|
404
|
-
for (const node of this.values()) {
|
|
405
|
-
if (changedPackages.has(node.name) && !result.includes(node)) {
|
|
406
|
-
result.push(node);
|
|
407
|
-
}
|
|
408
|
-
}
|
|
409
|
-
}
|
|
410
|
-
return result;
|
|
411
|
-
}
|
|
412
|
-
}
|
|
413
|
-
|
|
414
|
-
const packageRoleInfos = [
|
|
415
|
-
{
|
|
416
|
-
role: "frontend",
|
|
417
|
-
platform: "web",
|
|
418
|
-
output: ["bundle"]
|
|
419
|
-
},
|
|
420
|
-
{
|
|
421
|
-
role: "backend",
|
|
422
|
-
platform: "node",
|
|
423
|
-
output: ["bundle"]
|
|
424
|
-
},
|
|
425
|
-
{
|
|
426
|
-
role: "cli",
|
|
427
|
-
platform: "node",
|
|
428
|
-
output: ["cjs"]
|
|
429
|
-
},
|
|
430
|
-
{
|
|
431
|
-
role: "web-library",
|
|
432
|
-
platform: "web",
|
|
433
|
-
output: ["types", "esm"]
|
|
434
|
-
},
|
|
435
|
-
{
|
|
436
|
-
role: "node-library",
|
|
437
|
-
platform: "node",
|
|
438
|
-
output: ["types", "cjs"]
|
|
439
|
-
},
|
|
440
|
-
{
|
|
441
|
-
role: "common-library",
|
|
442
|
-
platform: "common",
|
|
443
|
-
output: ["types", "esm", "cjs"]
|
|
444
|
-
},
|
|
445
|
-
{
|
|
446
|
-
role: "frontend-plugin",
|
|
447
|
-
platform: "web",
|
|
448
|
-
output: ["types", "esm"]
|
|
449
|
-
},
|
|
450
|
-
{
|
|
451
|
-
role: "frontend-plugin-module",
|
|
452
|
-
platform: "web",
|
|
453
|
-
output: ["types", "esm"]
|
|
454
|
-
},
|
|
455
|
-
{
|
|
456
|
-
role: "frontend-dynamic-container",
|
|
457
|
-
// experimental
|
|
458
|
-
platform: "web",
|
|
459
|
-
output: ["bundle"]
|
|
460
|
-
},
|
|
461
|
-
{
|
|
462
|
-
role: "backend-plugin",
|
|
463
|
-
platform: "node",
|
|
464
|
-
output: ["types", "cjs"]
|
|
465
|
-
},
|
|
466
|
-
{
|
|
467
|
-
role: "backend-plugin-module",
|
|
468
|
-
platform: "node",
|
|
469
|
-
output: ["types", "cjs"]
|
|
470
|
-
}
|
|
471
|
-
];
|
|
472
|
-
const readSchema = zod.z.object({
|
|
473
|
-
name: zod.z.string().optional(),
|
|
474
|
-
backstage: zod.z.object({
|
|
475
|
-
role: zod.z.string().optional()
|
|
476
|
-
}).optional()
|
|
477
|
-
});
|
|
478
|
-
const detectionSchema = zod.z.object({
|
|
479
|
-
name: zod.z.string().optional(),
|
|
480
|
-
scripts: zod.z.object({
|
|
481
|
-
start: zod.z.string().optional(),
|
|
482
|
-
build: zod.z.string().optional()
|
|
483
|
-
}).optional(),
|
|
484
|
-
publishConfig: zod.z.object({
|
|
485
|
-
main: zod.z.string().optional(),
|
|
486
|
-
types: zod.z.string().optional(),
|
|
487
|
-
module: zod.z.string().optional()
|
|
488
|
-
}).optional(),
|
|
489
|
-
main: zod.z.string().optional(),
|
|
490
|
-
types: zod.z.string().optional(),
|
|
491
|
-
module: zod.z.string().optional()
|
|
492
|
-
});
|
|
493
|
-
class PackageRoles {
|
|
494
|
-
/**
|
|
495
|
-
* Get the associated info for a package role.
|
|
496
|
-
*/
|
|
497
|
-
static getRoleInfo(role) {
|
|
498
|
-
const roleInfo = packageRoleInfos.find((r) => r.role === role);
|
|
499
|
-
if (!roleInfo) {
|
|
500
|
-
throw new Error(`Unknown package role '${role}'`);
|
|
501
|
-
}
|
|
502
|
-
return roleInfo;
|
|
503
|
-
}
|
|
504
|
-
/**
|
|
505
|
-
* Given package JSON data, get the package role.
|
|
506
|
-
*/
|
|
507
|
-
static getRoleFromPackage(pkgJson) {
|
|
508
|
-
const pkg = readSchema.parse(pkgJson);
|
|
509
|
-
if (pkg.backstage) {
|
|
510
|
-
const { role } = pkg.backstage;
|
|
511
|
-
if (!role) {
|
|
512
|
-
throw new Error(
|
|
513
|
-
`Package ${pkg.name} must specify a role in the "backstage" field`
|
|
514
|
-
);
|
|
515
|
-
}
|
|
516
|
-
return this.getRoleInfo(role).role;
|
|
517
|
-
}
|
|
518
|
-
return void 0;
|
|
519
|
-
}
|
|
520
|
-
/**
|
|
521
|
-
* Attempt to detect the role of a package from its package.json.
|
|
522
|
-
*/
|
|
523
|
-
static detectRoleFromPackage(pkgJson) {
|
|
524
|
-
const pkg = detectionSchema.parse(pkgJson);
|
|
525
|
-
if (pkg.scripts?.start?.includes("app:serve")) {
|
|
526
|
-
return "frontend";
|
|
527
|
-
}
|
|
528
|
-
if (pkg.scripts?.build?.includes("backend:bundle")) {
|
|
529
|
-
return "backend";
|
|
530
|
-
}
|
|
531
|
-
if (pkg.name?.includes("plugin-") && pkg.name?.includes("-backend-module-")) {
|
|
532
|
-
return "backend-plugin-module";
|
|
533
|
-
}
|
|
534
|
-
if (pkg.name?.includes("plugin-") && pkg.name?.includes("-module-")) {
|
|
535
|
-
return "frontend-plugin-module";
|
|
536
|
-
}
|
|
537
|
-
if (pkg.scripts?.start?.includes("plugin:serve")) {
|
|
538
|
-
return "frontend-plugin";
|
|
539
|
-
}
|
|
540
|
-
if (pkg.scripts?.start?.includes("backend:dev")) {
|
|
541
|
-
return "backend-plugin";
|
|
542
|
-
}
|
|
543
|
-
const mainEntry = pkg.publishConfig?.main || pkg.main;
|
|
544
|
-
const moduleEntry = pkg.publishConfig?.module || pkg.module;
|
|
545
|
-
const typesEntry = pkg.publishConfig?.types || pkg.types;
|
|
546
|
-
if (typesEntry) {
|
|
547
|
-
if (mainEntry && moduleEntry) {
|
|
548
|
-
return "common-library";
|
|
549
|
-
}
|
|
550
|
-
if (moduleEntry || mainEntry?.endsWith(".esm.js")) {
|
|
551
|
-
return "web-library";
|
|
552
|
-
}
|
|
553
|
-
if (mainEntry) {
|
|
554
|
-
return "node-library";
|
|
555
|
-
}
|
|
556
|
-
} else if (mainEntry) {
|
|
557
|
-
return "cli";
|
|
558
|
-
}
|
|
559
|
-
return void 0;
|
|
560
|
-
}
|
|
561
|
-
}
|
|
562
|
-
|
|
563
|
-
exports.GitUtils = GitUtils;
|
|
564
|
-
exports.Lockfile = Lockfile;
|
|
565
|
-
exports.PackageGraph = PackageGraph;
|
|
566
|
-
exports.PackageRoles = PackageRoles;
|
|
567
|
-
exports.isMonoRepo = isMonoRepo;
|
|
11
|
+
exports.GitUtils = GitUtils.GitUtils;
|
|
12
|
+
exports.isMonoRepo = isMonoRepo.isMonoRepo;
|
|
13
|
+
exports.PackageGraph = PackageGraph.PackageGraph;
|
|
14
|
+
exports.Lockfile = Lockfile.Lockfile;
|
|
15
|
+
exports.PackageRoles = PackageRoles.PackageRoles;
|
|
568
16
|
//# sourceMappingURL=index.cjs.js.map
|