@mui/internal-bundle-size-checker 1.0.9-canary.5 → 1.0.9-canary.50
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +5 -2
- package/build/browser.d.ts +2 -0
- package/build/builder.d.ts +46 -0
- package/build/cli.d.ts +1 -0
- package/build/configLoader.d.ts +23 -0
- package/build/constants.d.ts +1 -0
- package/build/defineConfig.d.ts +8 -0
- package/build/fetchSnapshot.d.ts +7 -0
- package/build/fetchSnapshotWithFallback.d.ts +11 -0
- package/build/formatUtils.d.ts +6 -0
- package/build/git.d.ts +23 -0
- package/build/github.d.ts +2 -0
- package/build/index.d.ts +4 -0
- package/build/notifyPr.d.ts +12 -0
- package/build/renderMarkdownReport.d.ts +45 -0
- package/build/sizeDiff.d.ts +59 -0
- package/build/strings.d.ts +23 -0
- package/build/uploadSnapshot.d.ts +10 -0
- package/build/worker.d.ts +12 -0
- package/package.json +21 -27
- package/src/{viteBuilder.js → builder.js} +99 -34
- package/src/cli.js +169 -43
- package/src/configLoader.js +122 -43
- package/src/constants.js +1 -0
- package/src/fetchSnapshot.js +3 -61
- package/src/fetchSnapshotWithFallback.js +34 -0
- package/src/git.js +50 -0
- package/src/github.js +4 -1
- package/src/index.js +3 -9
- package/src/notifyPr.js +81 -0
- package/src/renderMarkdownReport.js +18 -24
- package/src/renderMarkdownReport.test.js +97 -80
- package/src/sizeDiff.js +1 -5
- package/src/strings.js +38 -0
- package/src/types.d.ts +12 -23
- package/src/uploadSnapshot.js +2 -2
- package/src/worker.js +13 -20
- package/tsconfig.build.json +15 -0
- package/tsconfig.json +2 -2
- package/src/webpackBuilder.js +0 -267
package/src/configLoader.js
CHANGED
|
@@ -2,9 +2,11 @@
|
|
|
2
2
|
* Utility to load the bundle-size-checker configuration
|
|
3
3
|
*/
|
|
4
4
|
|
|
5
|
-
import fs from 'fs';
|
|
6
|
-
import path from 'path';
|
|
5
|
+
import fs from 'node:fs/promises';
|
|
6
|
+
import path from 'node:path';
|
|
7
7
|
import envCi from 'env-ci';
|
|
8
|
+
import * as module from 'node:module';
|
|
9
|
+
import * as url from 'node:url';
|
|
8
10
|
|
|
9
11
|
/**
|
|
10
12
|
* Attempts to load and parse a single config file
|
|
@@ -14,10 +16,6 @@ import envCi from 'env-ci';
|
|
|
14
16
|
*/
|
|
15
17
|
async function loadConfigFile(configPath) {
|
|
16
18
|
try {
|
|
17
|
-
if (!fs.existsSync(configPath)) {
|
|
18
|
-
return null;
|
|
19
|
-
}
|
|
20
|
-
|
|
21
19
|
// Dynamic import for ESM
|
|
22
20
|
const configUrl = new URL(`file://${configPath}`);
|
|
23
21
|
const { default: config } = await import(configUrl.href);
|
|
@@ -29,12 +27,18 @@ async function loadConfigFile(configPath) {
|
|
|
29
27
|
resolvedConfig = await config;
|
|
30
28
|
} else if (typeof config === 'function') {
|
|
31
29
|
resolvedConfig = await config();
|
|
30
|
+
} else {
|
|
31
|
+
// Handle plain config objects
|
|
32
|
+
resolvedConfig = config;
|
|
32
33
|
}
|
|
33
34
|
|
|
34
35
|
return resolvedConfig;
|
|
35
|
-
} catch (error) {
|
|
36
|
-
|
|
37
|
-
|
|
36
|
+
} catch (/** @type {any} */ error) {
|
|
37
|
+
if (error.code === 'ERR_MODULE_NOT_FOUND') {
|
|
38
|
+
return null;
|
|
39
|
+
}
|
|
40
|
+
|
|
41
|
+
throw error;
|
|
38
42
|
}
|
|
39
43
|
}
|
|
40
44
|
|
|
@@ -77,59 +81,132 @@ export function applyUploadConfigDefaults(uploadConfig, ciInfo) {
|
|
|
77
81
|
};
|
|
78
82
|
}
|
|
79
83
|
|
|
84
|
+
/**
|
|
85
|
+
* @param {{ [s: string]: any; } | ArrayLike<any>} exportsObj
|
|
86
|
+
* @returns {string[]} Array of export paths
|
|
87
|
+
*/
|
|
88
|
+
function findExports(exportsObj) {
|
|
89
|
+
const paths = [];
|
|
90
|
+
for (const [key, value] of Object.entries(exportsObj)) {
|
|
91
|
+
// ignore null values
|
|
92
|
+
if (!value) {
|
|
93
|
+
continue;
|
|
94
|
+
}
|
|
95
|
+
if (key.startsWith('.')) {
|
|
96
|
+
paths.push(key);
|
|
97
|
+
} else {
|
|
98
|
+
paths.push(...findExports(value));
|
|
99
|
+
}
|
|
100
|
+
}
|
|
101
|
+
return paths;
|
|
102
|
+
}
|
|
103
|
+
|
|
104
|
+
/**
|
|
105
|
+
* @param {import("fs").PathLike | fs.FileHandle} pkgJson
|
|
106
|
+
* @returns {Promise<string[]>}
|
|
107
|
+
*/
|
|
108
|
+
async function findExportedPaths(pkgJson) {
|
|
109
|
+
const pkgContent = await fs.readFile(pkgJson, 'utf8');
|
|
110
|
+
const { exports = {} } = JSON.parse(pkgContent);
|
|
111
|
+
return findExports(exports);
|
|
112
|
+
}
|
|
113
|
+
|
|
114
|
+
/**
|
|
115
|
+
* Checks if the given import source is a top-level package
|
|
116
|
+
* @param {string} importSrc - The import source string
|
|
117
|
+
* @returns {boolean} - True if it's a top-level package, false otherwise
|
|
118
|
+
*/
|
|
119
|
+
function isPackageTopLevel(importSrc) {
|
|
120
|
+
const parts = importSrc.split('/');
|
|
121
|
+
return parts.length === 1 || (parts.length === 2 && parts[0].startsWith('@'));
|
|
122
|
+
}
|
|
123
|
+
|
|
80
124
|
/**
|
|
81
125
|
* Normalizes entries to ensure they have a consistent format and ids are unique
|
|
82
126
|
* @param {EntryPoint[]} entries - The array of entries from the config
|
|
83
|
-
* @
|
|
127
|
+
* @param {string} configPath - The path to the configuration file
|
|
128
|
+
* @returns {Promise<ObjectEntry[]>} - Normalized entries with uniqueness enforced
|
|
84
129
|
*/
|
|
85
|
-
function normalizeEntries(entries) {
|
|
130
|
+
async function normalizeEntries(entries, configPath) {
|
|
86
131
|
const usedIds = new Set();
|
|
87
132
|
|
|
88
|
-
|
|
89
|
-
|
|
90
|
-
|
|
91
|
-
|
|
92
|
-
|
|
93
|
-
|
|
94
|
-
entry = {
|
|
95
|
-
id: entry,
|
|
96
|
-
import: importSrc,
|
|
97
|
-
importedNames: [importName],
|
|
98
|
-
};
|
|
99
|
-
} else {
|
|
100
|
-
// For entries like '@mui/material', create an object with import only
|
|
101
|
-
entry = {
|
|
102
|
-
id: entry,
|
|
103
|
-
import: importSrc,
|
|
104
|
-
};
|
|
105
|
-
}
|
|
106
|
-
}
|
|
133
|
+
const result = (
|
|
134
|
+
await Promise.all(
|
|
135
|
+
entries.map(async (entry) => {
|
|
136
|
+
if (typeof entry === 'string') {
|
|
137
|
+
entry = { id: entry };
|
|
138
|
+
}
|
|
107
139
|
|
|
108
|
-
|
|
109
|
-
throw new Error('Object entries must have an id property');
|
|
110
|
-
}
|
|
140
|
+
entry = { ...entry };
|
|
111
141
|
|
|
112
|
-
|
|
113
|
-
|
|
114
|
-
|
|
142
|
+
if (!entry.id) {
|
|
143
|
+
throw new Error('Object entries must have an id property');
|
|
144
|
+
}
|
|
115
145
|
|
|
146
|
+
if (!entry.code && !entry.import) {
|
|
147
|
+
// Transform string entries into object entries
|
|
148
|
+
const [importSrc, importName] = entry.id.split('#');
|
|
149
|
+
entry.import = importSrc;
|
|
150
|
+
if (importName) {
|
|
151
|
+
entry.importedNames = [importName];
|
|
152
|
+
}
|
|
153
|
+
if (isPackageTopLevel(entry.import) && !entry.importedNames) {
|
|
154
|
+
entry.track = true;
|
|
155
|
+
}
|
|
156
|
+
}
|
|
157
|
+
|
|
158
|
+
if (entry.expand) {
|
|
159
|
+
if (!entry.import || !isPackageTopLevel(entry.import)) {
|
|
160
|
+
throw new Error(
|
|
161
|
+
`Entry "${entry.id}": expand can only be used with top-level package imports`,
|
|
162
|
+
);
|
|
163
|
+
}
|
|
164
|
+
if (!module.findPackageJSON) {
|
|
165
|
+
throw new Error(
|
|
166
|
+
"Your Node.js version doesn't support `module.findPackageJSON`, which is required to expand entries.",
|
|
167
|
+
);
|
|
168
|
+
}
|
|
169
|
+
const pkgJson = module.findPackageJSON(entry.import, url.pathToFileURL(configPath));
|
|
170
|
+
if (!pkgJson) {
|
|
171
|
+
throw new Error(`Can't find package.json for entry "${entry.id}".`);
|
|
172
|
+
}
|
|
173
|
+
const exportedPaths = await findExportedPaths(pkgJson);
|
|
174
|
+
|
|
175
|
+
const expandedEntries = [];
|
|
176
|
+
for (const exportPath of exportedPaths) {
|
|
177
|
+
const importSrc = entry.import + exportPath.slice(1);
|
|
178
|
+
expandedEntries.push({
|
|
179
|
+
id: importSrc,
|
|
180
|
+
import: importSrc,
|
|
181
|
+
track: isPackageTopLevel(importSrc),
|
|
182
|
+
});
|
|
183
|
+
}
|
|
184
|
+
return expandedEntries;
|
|
185
|
+
}
|
|
186
|
+
|
|
187
|
+
return [entry];
|
|
188
|
+
}),
|
|
189
|
+
)
|
|
190
|
+
).flat();
|
|
191
|
+
|
|
192
|
+
for (const entry of result) {
|
|
116
193
|
if (usedIds.has(entry.id)) {
|
|
117
194
|
throw new Error(`Duplicate entry id found: "${entry.id}". Entry ids must be unique.`);
|
|
118
195
|
}
|
|
119
|
-
|
|
120
196
|
usedIds.add(entry.id);
|
|
197
|
+
}
|
|
121
198
|
|
|
122
|
-
|
|
123
|
-
});
|
|
199
|
+
return result;
|
|
124
200
|
}
|
|
125
201
|
|
|
126
202
|
/**
|
|
127
203
|
* Apply default values to the configuration using CI environment
|
|
128
204
|
* @param {BundleSizeCheckerConfigObject} config - The loaded configuration
|
|
129
|
-
* @
|
|
205
|
+
* @param {string} configPath - The path to the configuration file
|
|
206
|
+
* @returns {Promise<NormalizedBundleSizeCheckerConfig>} Configuration with defaults applied
|
|
130
207
|
* @throws {Error} If required fields are missing
|
|
131
208
|
*/
|
|
132
|
-
function applyConfigDefaults(config) {
|
|
209
|
+
async function applyConfigDefaults(config, configPath) {
|
|
133
210
|
// Get environment CI information
|
|
134
211
|
/** @type {{ branch?: string, isPr?: boolean, prBranch?: string, slug?: string}} */
|
|
135
212
|
const ciInfo = envCi();
|
|
@@ -145,8 +222,10 @@ function applyConfigDefaults(config) {
|
|
|
145
222
|
// Clone the config to avoid mutating the original
|
|
146
223
|
/** @type {NormalizedBundleSizeCheckerConfig} */
|
|
147
224
|
const result = {
|
|
148
|
-
entrypoints: normalizeEntries(config.entrypoints),
|
|
225
|
+
entrypoints: await normalizeEntries(config.entrypoints, configPath),
|
|
149
226
|
upload: null, // Default to disabled
|
|
227
|
+
comment: config.comment !== undefined ? config.comment : true, // Default to enabled
|
|
228
|
+
replace: config.replace || {}, // String replacements, default to empty object
|
|
150
229
|
};
|
|
151
230
|
|
|
152
231
|
// Handle different types of upload value
|
|
@@ -195,7 +274,7 @@ export async function loadConfig(rootDir) {
|
|
|
195
274
|
const config = await loadConfigFile(configPath);
|
|
196
275
|
if (config) {
|
|
197
276
|
// Apply defaults and return the config
|
|
198
|
-
return applyConfigDefaults(config);
|
|
277
|
+
return applyConfigDefaults(config, configPath);
|
|
199
278
|
}
|
|
200
279
|
}
|
|
201
280
|
|
package/src/constants.js
ADDED
|
@@ -0,0 +1 @@
|
|
|
1
|
+
export const DASHBOARD_ORIGIN = 'https://frontend-public.mui.com';
|
package/src/fetchSnapshot.js
CHANGED
|
@@ -1,10 +1,10 @@
|
|
|
1
|
-
|
|
1
|
+
// This file must be importable in the browser
|
|
2
2
|
|
|
3
3
|
/**
|
|
4
4
|
*
|
|
5
5
|
* @param {string} repo - The name of the repository e.g. 'mui/material-ui'
|
|
6
6
|
* @param {string} sha - The commit SHA
|
|
7
|
-
* @returns {Promise<import('./sizeDiff').SizeSnapshot>} - The size snapshot data
|
|
7
|
+
* @returns {Promise<import('./sizeDiff.js').SizeSnapshot>} - The size snapshot data
|
|
8
8
|
*/
|
|
9
9
|
export async function fetchSnapshot(repo, sha) {
|
|
10
10
|
const urlsToTry = [
|
|
@@ -27,7 +27,7 @@ export async function fetchSnapshot(repo, sha) {
|
|
|
27
27
|
continue;
|
|
28
28
|
}
|
|
29
29
|
|
|
30
|
-
return response.json();
|
|
30
|
+
return /** @type {Promise<any>} */ (response.json());
|
|
31
31
|
} catch (error) {
|
|
32
32
|
lastError = error;
|
|
33
33
|
continue;
|
|
@@ -36,61 +36,3 @@ export async function fetchSnapshot(repo, sha) {
|
|
|
36
36
|
|
|
37
37
|
throw new Error(`Failed to fetch snapshot`, { cause: lastError });
|
|
38
38
|
}
|
|
39
|
-
|
|
40
|
-
/**
|
|
41
|
-
* Gets parent commits for a given commit SHA using GitHub API
|
|
42
|
-
* @param {string} repo - Repository name (e.g., 'mui/material-ui')
|
|
43
|
-
* @param {string} commit - The commit SHA to start from
|
|
44
|
-
* @param {number} depth - How many commits to retrieve (including the starting commit)
|
|
45
|
-
* @returns {Promise<string[]>} Array of commit SHAs in chronological order (excluding the starting commit)
|
|
46
|
-
*/
|
|
47
|
-
async function getParentCommits(repo, commit, depth = 4) {
|
|
48
|
-
try {
|
|
49
|
-
const [owner, repoName] = repo.split('/');
|
|
50
|
-
|
|
51
|
-
const { data: commits } = await octokit.repos.listCommits({
|
|
52
|
-
owner,
|
|
53
|
-
repo: repoName,
|
|
54
|
-
sha: commit,
|
|
55
|
-
per_page: depth,
|
|
56
|
-
});
|
|
57
|
-
|
|
58
|
-
// Skip the first commit (which is the starting commit) and return the rest
|
|
59
|
-
return commits.slice(1).map((commitDetails) => commitDetails.sha);
|
|
60
|
-
} catch (/** @type {any} */ error) {
|
|
61
|
-
console.warn(`Failed to get parent commits for ${commit}: ${error.message}`);
|
|
62
|
-
return [];
|
|
63
|
-
}
|
|
64
|
-
}
|
|
65
|
-
|
|
66
|
-
/**
|
|
67
|
-
* Attempts to fetch a snapshot with fallback to parent commits
|
|
68
|
-
* @param {string} repo - Repository name
|
|
69
|
-
* @param {string} commit - The commit SHA to start from
|
|
70
|
-
* @param {number} [fallbackDepth=3] - How many parent commits to try as fallback
|
|
71
|
-
* @returns {Promise<{snapshot: import('./sizeDiff').SizeSnapshot | null, actualCommit: string | null}>}
|
|
72
|
-
*/
|
|
73
|
-
export async function fetchSnapshotWithFallback(repo, commit, fallbackDepth = 3) {
|
|
74
|
-
// Try the original commit first
|
|
75
|
-
try {
|
|
76
|
-
const snapshot = await fetchSnapshot(repo, commit);
|
|
77
|
-
return { snapshot, actualCommit: commit };
|
|
78
|
-
} catch (/** @type {any} */ error) {
|
|
79
|
-
// fallthrough to parent commits if the snapshot for the original commit fails
|
|
80
|
-
}
|
|
81
|
-
|
|
82
|
-
// Get parent commits and try each one
|
|
83
|
-
const parentCommits = await getParentCommits(repo, commit, fallbackDepth + 1);
|
|
84
|
-
|
|
85
|
-
for (const parentCommit of parentCommits) {
|
|
86
|
-
try {
|
|
87
|
-
// eslint-disable-next-line no-await-in-loop
|
|
88
|
-
const snapshot = await fetchSnapshot(repo, parentCommit);
|
|
89
|
-
return { snapshot, actualCommit: parentCommit };
|
|
90
|
-
} catch {
|
|
91
|
-
// fallthrough to the next parent commit if fetching fails
|
|
92
|
-
}
|
|
93
|
-
}
|
|
94
|
-
|
|
95
|
-
return { snapshot: null, actualCommit: null };
|
|
96
|
-
}
|
|
@@ -0,0 +1,34 @@
|
|
|
1
|
+
import { fetchSnapshot } from './fetchSnapshot.js';
|
|
2
|
+
import { getParentCommits } from './git.js';
|
|
3
|
+
|
|
4
|
+
/**
|
|
5
|
+
* Attempts to fetch a snapshot with fallback to parent commits
|
|
6
|
+
* @param {string} repo - Repository name
|
|
7
|
+
* @param {string} commit - The commit SHA to start from
|
|
8
|
+
* @param {number} [fallbackDepth=3] - How many parent commits to try as fallback
|
|
9
|
+
* @returns {Promise<{snapshot: import('./sizeDiff.js').SizeSnapshot | null, actualCommit: string | null}>}
|
|
10
|
+
*/
|
|
11
|
+
export async function fetchSnapshotWithFallback(repo, commit, fallbackDepth = 3) {
|
|
12
|
+
// Try the original commit first
|
|
13
|
+
try {
|
|
14
|
+
const snapshot = await fetchSnapshot(repo, commit);
|
|
15
|
+
return { snapshot, actualCommit: commit };
|
|
16
|
+
} catch (/** @type {any} */ error) {
|
|
17
|
+
// fallthrough to parent commits if the snapshot for the original commit fails
|
|
18
|
+
}
|
|
19
|
+
|
|
20
|
+
// Get parent commits and try each one
|
|
21
|
+
const parentCommits = await getParentCommits(repo, commit, fallbackDepth);
|
|
22
|
+
|
|
23
|
+
for (const parentCommit of parentCommits) {
|
|
24
|
+
try {
|
|
25
|
+
// eslint-disable-next-line no-await-in-loop
|
|
26
|
+
const snapshot = await fetchSnapshot(repo, parentCommit);
|
|
27
|
+
return { snapshot, actualCommit: parentCommit };
|
|
28
|
+
} catch {
|
|
29
|
+
// fallthrough to the next parent commit if fetching fails
|
|
30
|
+
}
|
|
31
|
+
}
|
|
32
|
+
|
|
33
|
+
return { snapshot: null, actualCommit: null };
|
|
34
|
+
}
|
package/src/git.js
ADDED
|
@@ -0,0 +1,50 @@
|
|
|
1
|
+
import { execa } from 'execa';
|
|
2
|
+
import gitUrlParse from 'git-url-parse';
|
|
3
|
+
|
|
4
|
+
/**
|
|
5
|
+
* Gets parent commits for a given commit SHA using git CLI
|
|
6
|
+
* @param {string} repo - Repository name (e.g., 'mui/material-ui') - ignored for git CLI
|
|
7
|
+
* @param {string} commit - The commit SHA to start from
|
|
8
|
+
* @param {number} depth - How many commits to retrieve (including the starting commit)
|
|
9
|
+
* @returns {Promise<string[]>} Array of commit SHAs in chronological order (excluding the starting commit)
|
|
10
|
+
*/
|
|
11
|
+
export async function getParentCommits(repo, commit, depth = 3) {
|
|
12
|
+
const { stdout } = await execa('git', ['rev-list', `--max-count=${depth}`, '--skip=1', commit]);
|
|
13
|
+
return stdout.trim().split('\n').filter(Boolean);
|
|
14
|
+
}
|
|
15
|
+
|
|
16
|
+
/**
|
|
17
|
+
* Compares two commits and returns merge base information using git CLI
|
|
18
|
+
* @param {string} base - Base commit SHA
|
|
19
|
+
* @param {string} head - Head commit SHA
|
|
20
|
+
* @returns {Promise<string>} Object with merge base commit info
|
|
21
|
+
*/
|
|
22
|
+
export async function getMergeBase(base, head) {
|
|
23
|
+
const { stdout } = await execa('git', ['merge-base', base, head]);
|
|
24
|
+
return stdout.trim();
|
|
25
|
+
}
|
|
26
|
+
|
|
27
|
+
/**
|
|
28
|
+
* Gets the current repository owner and name from git remote
|
|
29
|
+
* @returns {Promise<string | null>}
|
|
30
|
+
*/
|
|
31
|
+
async function getRemoteUrl(remote = 'origin') {
|
|
32
|
+
try {
|
|
33
|
+
const { stdout } = await execa('git', ['remote', 'get-url', remote]);
|
|
34
|
+
return stdout.trim();
|
|
35
|
+
} catch {
|
|
36
|
+
return null;
|
|
37
|
+
}
|
|
38
|
+
}
|
|
39
|
+
|
|
40
|
+
/**
|
|
41
|
+
* Gets the current repository owner and name from git remote
|
|
42
|
+
* @returns {Promise<{owner: string | null, name: string | null}>}
|
|
43
|
+
*/
|
|
44
|
+
export async function getCurrentRepoInfo() {
|
|
45
|
+
const remoteUrl = (await getRemoteUrl('upstream')) || (await getRemoteUrl('origin'));
|
|
46
|
+
if (!remoteUrl) {
|
|
47
|
+
return { owner: null, name: null };
|
|
48
|
+
}
|
|
49
|
+
return gitUrlParse(remoteUrl);
|
|
50
|
+
}
|
package/src/github.js
CHANGED
|
@@ -4,4 +4,7 @@ import { Octokit } from '@octokit/rest';
|
|
|
4
4
|
|
|
5
5
|
// Create and export Octokit instance
|
|
6
6
|
/** @type {import('@octokit/rest').Octokit} */
|
|
7
|
-
export const octokit = new Octokit(
|
|
7
|
+
export const octokit = new Octokit({
|
|
8
|
+
auth: process.env.DANGER_GITHUB_API_TOKEN,
|
|
9
|
+
userAgent: 'bundle-size-checker',
|
|
10
|
+
});
|
package/src/index.js
CHANGED
|
@@ -1,13 +1,7 @@
|
|
|
1
|
+
/// <reference types="./types.d.ts" />
|
|
2
|
+
|
|
1
3
|
import defineConfig from './defineConfig.js';
|
|
2
4
|
import { loadConfig } from './configLoader.js';
|
|
3
|
-
import { calculateSizeDiff } from './sizeDiff.js';
|
|
4
5
|
import { renderMarkdownReport } from './renderMarkdownReport.js';
|
|
5
|
-
import { fetchSnapshot } from './fetchSnapshot.js';
|
|
6
|
-
|
|
7
|
-
export { defineConfig, loadConfig, calculateSizeDiff, renderMarkdownReport, fetchSnapshot };
|
|
8
6
|
|
|
9
|
-
|
|
10
|
-
* @typedef {import('./sizeDiff.js').Size} Size
|
|
11
|
-
* @typedef {import('./sizeDiff.js').SizeSnapshot} SizeSnapshot
|
|
12
|
-
* @typedef {import('./sizeDiff.js').ComparisonResult} ComparisonResult
|
|
13
|
-
*/
|
|
7
|
+
export { defineConfig, loadConfig, renderMarkdownReport };
|
package/src/notifyPr.js
ADDED
|
@@ -0,0 +1,81 @@
|
|
|
1
|
+
// @ts-check
|
|
2
|
+
|
|
3
|
+
import { octokit } from './github.js';
|
|
4
|
+
|
|
5
|
+
/**
|
|
6
|
+
* Recursively searches for a comment containing the specified marker.
|
|
7
|
+
* Searches page-by-page (newest first) and stops when found or no more pages exist.
|
|
8
|
+
*
|
|
9
|
+
* @param {string} owner - Repository owner
|
|
10
|
+
* @param {string} repoName - Repository name
|
|
11
|
+
* @param {number} prNumber - Pull request number
|
|
12
|
+
* @param {string} marker - HTML comment marker to search for
|
|
13
|
+
* @param {number} page - Current page number (default: 1)
|
|
14
|
+
*/
|
|
15
|
+
async function findCommentByMarker(owner, repoName, prNumber, marker, page = 1) {
|
|
16
|
+
const { data: comments } = await octokit.issues.listComments({
|
|
17
|
+
owner,
|
|
18
|
+
repo: repoName,
|
|
19
|
+
issue_number: prNumber,
|
|
20
|
+
sort: 'updated',
|
|
21
|
+
direction: 'desc',
|
|
22
|
+
per_page: 100,
|
|
23
|
+
page,
|
|
24
|
+
});
|
|
25
|
+
|
|
26
|
+
// Base case: no comments on this page
|
|
27
|
+
if (comments.length <= 0) {
|
|
28
|
+
return null;
|
|
29
|
+
}
|
|
30
|
+
|
|
31
|
+
// Success case: found comment with marker
|
|
32
|
+
const foundComment = comments.find((comment) => comment.body && comment.body.includes(marker));
|
|
33
|
+
if (foundComment) {
|
|
34
|
+
return foundComment;
|
|
35
|
+
}
|
|
36
|
+
|
|
37
|
+
return findCommentByMarker(owner, repoName, prNumber, marker, page + 1);
|
|
38
|
+
}
|
|
39
|
+
|
|
40
|
+
/**
|
|
41
|
+
* Creates or updates a comment on a pull request with the specified content.
|
|
42
|
+
* Uses an HTML comment marker to identify and update existing comments.
|
|
43
|
+
* Searches page-by-page (newest first) and stops early when comment is found.
|
|
44
|
+
*
|
|
45
|
+
* @param {string} repo - The repository in format "owner/repo"
|
|
46
|
+
* @param {number} prNumber - The pull request number
|
|
47
|
+
* @param {string} id - Unique identifier to mark the comment for future updates
|
|
48
|
+
* @param {string} content - The content to post or update in the comment
|
|
49
|
+
* @returns {Promise<void>}
|
|
50
|
+
*/
|
|
51
|
+
export async function notifyPr(repo, prNumber, id, content) {
|
|
52
|
+
const [owner, repoName] = repo.split('/');
|
|
53
|
+
|
|
54
|
+
if (!owner || !repoName) {
|
|
55
|
+
throw new Error(`Invalid repo format. Expected "owner/repo", got "${repo}"`);
|
|
56
|
+
}
|
|
57
|
+
|
|
58
|
+
const marker = `<!-- bundle-size-checker-id: ${id} -->`;
|
|
59
|
+
const commentBody = `${marker}\n${content}`;
|
|
60
|
+
|
|
61
|
+
// Search for existing comment with our marker
|
|
62
|
+
const existingComment = await findCommentByMarker(owner, repoName, prNumber, marker);
|
|
63
|
+
|
|
64
|
+
if (existingComment) {
|
|
65
|
+
// Update existing comment
|
|
66
|
+
await octokit.issues.updateComment({
|
|
67
|
+
owner,
|
|
68
|
+
repo: repoName,
|
|
69
|
+
comment_id: existingComment.id,
|
|
70
|
+
body: commentBody,
|
|
71
|
+
});
|
|
72
|
+
} else {
|
|
73
|
+
// Create new comment
|
|
74
|
+
await octokit.issues.createComment({
|
|
75
|
+
owner,
|
|
76
|
+
repo: repoName,
|
|
77
|
+
issue_number: prNumber,
|
|
78
|
+
body: commentBody,
|
|
79
|
+
});
|
|
80
|
+
}
|
|
81
|
+
}
|
|
@@ -5,9 +5,11 @@
|
|
|
5
5
|
*/
|
|
6
6
|
|
|
7
7
|
import { calculateSizeDiff } from './sizeDiff.js';
|
|
8
|
-
import { fetchSnapshot
|
|
8
|
+
import { fetchSnapshot } from './fetchSnapshot.js';
|
|
9
9
|
import { displayPercentFormatter, byteSizeChangeFormatter } from './formatUtils.js';
|
|
10
|
-
import {
|
|
10
|
+
import { getMergeBase } from './git.js';
|
|
11
|
+
import { fetchSnapshotWithFallback } from './fetchSnapshotWithFallback.js';
|
|
12
|
+
import { DASHBOARD_ORIGIN } from './constants.js';
|
|
11
13
|
|
|
12
14
|
/**
|
|
13
15
|
* Generates a symbol based on the relative change value.
|
|
@@ -97,11 +99,13 @@ function formatMarkdownTable(columns, data) {
|
|
|
97
99
|
const separators = alignments.map((align) => {
|
|
98
100
|
switch (align) {
|
|
99
101
|
case 'center':
|
|
100
|
-
return '
|
|
102
|
+
return ':---------:';
|
|
101
103
|
case 'right':
|
|
102
104
|
return '----------:';
|
|
105
|
+
case 'left':
|
|
106
|
+
return ':----------';
|
|
103
107
|
default:
|
|
104
|
-
return '
|
|
108
|
+
return '-----------';
|
|
105
109
|
}
|
|
106
110
|
});
|
|
107
111
|
table += `|${separators.join('|')}|\n`;
|
|
@@ -141,9 +145,9 @@ export function renderMarkdownReportContent(
|
|
|
141
145
|
|
|
142
146
|
markdownContent += formatMarkdownTable(
|
|
143
147
|
[
|
|
144
|
-
{ field: 'id', header: 'Bundle' },
|
|
145
|
-
{ field: 'parsed', header: 'Parsed
|
|
146
|
-
{ field: 'gzip', header: 'Gzip
|
|
148
|
+
{ field: 'id', header: 'Bundle', align: 'left' },
|
|
149
|
+
{ field: 'parsed', header: 'Parsed size', align: 'right' },
|
|
150
|
+
{ field: 'gzip', header: 'Gzip size', align: 'right' },
|
|
147
151
|
],
|
|
148
152
|
trackedEntries.map(({ id, parsed, gzip }) => ({
|
|
149
153
|
id,
|
|
@@ -193,50 +197,40 @@ export function renderMarkdownReportContent(
|
|
|
193
197
|
*
|
|
194
198
|
* @param {PrInfo} prInfo
|
|
195
199
|
* @param {Object} [options] - Optional parameters
|
|
196
|
-
* @param {string | null} [options.circleciBuildNumber] - The CircleCI build number
|
|
197
200
|
* @param {string | null} [options.actualBaseCommit] - The actual commit SHA used for comparison (may differ from prInfo.base.sha)
|
|
198
201
|
* @returns {URL}
|
|
199
202
|
*/
|
|
200
203
|
function getDetailsUrl(prInfo, options = {}) {
|
|
201
|
-
const {
|
|
204
|
+
const { actualBaseCommit } = options;
|
|
202
205
|
const detailedComparisonUrl = new URL(
|
|
203
|
-
|
|
206
|
+
`${DASHBOARD_ORIGIN}/size-comparison/${prInfo.base.repo.full_name}/diff`,
|
|
204
207
|
);
|
|
205
208
|
detailedComparisonUrl.searchParams.set('prNumber', String(prInfo.number));
|
|
206
209
|
detailedComparisonUrl.searchParams.set('baseRef', prInfo.base.ref);
|
|
207
210
|
detailedComparisonUrl.searchParams.set('baseCommit', actualBaseCommit || prInfo.base.sha);
|
|
208
211
|
detailedComparisonUrl.searchParams.set('headCommit', prInfo.head.sha);
|
|
209
|
-
if (circleciBuildNumber) {
|
|
210
|
-
detailedComparisonUrl.searchParams.set('circleCIBuildNumber', circleciBuildNumber);
|
|
211
|
-
}
|
|
212
212
|
return detailedComparisonUrl;
|
|
213
213
|
}
|
|
214
214
|
|
|
215
215
|
/**
|
|
216
216
|
*
|
|
217
217
|
* @param {PrInfo} prInfo
|
|
218
|
-
* @param {string} [circleciBuildNumber] - The CircleCI build number
|
|
219
218
|
* @param {Object} [options] - Additional options
|
|
220
219
|
* @param {string[]} [options.track] - Array of bundle IDs to track
|
|
221
220
|
* @param {number} [options.fallbackDepth=3] - How many parent commits to try as fallback when base snapshot is missing
|
|
222
221
|
* @param {number} [options.maxDetailsLines=100] - Maximum number of bundles to show in details section
|
|
222
|
+
* @param {(base: string, head: string) => Promise<string>} [options.getMergeBase] - Custom function to get merge base commit
|
|
223
223
|
* @returns {Promise<string>} Markdown report
|
|
224
224
|
*/
|
|
225
|
-
export async function renderMarkdownReport(prInfo,
|
|
225
|
+
export async function renderMarkdownReport(prInfo, options = {}) {
|
|
226
226
|
let markdownContent = '';
|
|
227
227
|
|
|
228
228
|
const prCommit = prInfo.head.sha;
|
|
229
229
|
const repo = prInfo.base.repo.full_name;
|
|
230
230
|
const { fallbackDepth = 3 } = options;
|
|
231
231
|
|
|
232
|
-
const
|
|
233
|
-
const
|
|
234
|
-
owner,
|
|
235
|
-
repo: repoName,
|
|
236
|
-
base: prInfo.base.sha,
|
|
237
|
-
head: prCommit,
|
|
238
|
-
});
|
|
239
|
-
const baseCommit = data.merge_base_commit.sha;
|
|
232
|
+
const getMergeBaseFn = options.getMergeBase || getMergeBase;
|
|
233
|
+
const baseCommit = await getMergeBaseFn(prInfo.base.sha, prCommit);
|
|
240
234
|
|
|
241
235
|
const [baseResult, prSnapshot] = await Promise.all([
|
|
242
236
|
fetchSnapshotWithFallback(repo, baseCommit, fallbackDepth),
|
|
@@ -257,7 +251,7 @@ export async function renderMarkdownReport(prInfo, circleciBuildNumber, options
|
|
|
257
251
|
|
|
258
252
|
markdownContent += report;
|
|
259
253
|
|
|
260
|
-
markdownContent += `\n\n[Details of bundle changes](${getDetailsUrl(prInfo, {
|
|
254
|
+
markdownContent += `\n\n[Details of bundle changes](${getDetailsUrl(prInfo, { actualBaseCommit })})`;
|
|
261
255
|
|
|
262
256
|
return markdownContent;
|
|
263
257
|
}
|