bun-doctor 0.0.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,749 @@
1
+ import path from "node:path";
2
+ import fs from "node:fs";
3
+ //#region src/constants.ts
4
+ const VERSION = "0.0.1";
5
+ const BUN_DOCS = {
6
+ autoInstall: "https://bun.com/docs/runtime/auto-install",
7
+ bunfig: "https://bun.com/docs/runtime/bunfig",
8
+ ci: "https://bun.com/docs/guides/install/cicd",
9
+ catalogs: "https://bun.com/docs/pm/catalogs",
10
+ environmentVariables: "https://bun.com/docs/runtime/environment-variables",
11
+ hashing: "https://bun.com/docs/runtime/hashing",
12
+ lifecycle: "https://bun.com/docs/pm/lifecycle",
13
+ lockfile: "https://bun.com/docs/pm/lockfile",
14
+ nodeCompatibility: "https://bun.com/docs/runtime/nodejs-compat",
15
+ securityScanner: "https://bun.com/docs/pm/security-scanner-api",
16
+ sqlite: "https://bun.com/docs/runtime/sqlite",
17
+ testConfiguration: "https://bun.com/docs/test/configuration",
18
+ testRunner: "https://bun.com/docs/test",
19
+ typescript: "https://bun.com/docs/typescript",
20
+ workspaces: "https://bun.com/docs/pm/workspaces"
21
+ };
22
+ const IGNORED_DIRECTORIES = new Set([
23
+ ".git",
24
+ ".next",
25
+ ".turbo",
26
+ "build",
27
+ "coverage",
28
+ "dist",
29
+ "node_modules",
30
+ "vendor"
31
+ ]);
32
+ const SOURCE_FILE_EXTENSIONS = new Set([
33
+ ".cjs",
34
+ ".cts",
35
+ ".js",
36
+ ".jsx",
37
+ ".mjs",
38
+ ".mts",
39
+ ".ts",
40
+ ".tsx"
41
+ ]);
42
+ //#endregion
43
+ //#region src/utils.ts
44
+ const isPlainObject = (value) => typeof value === "object" && value !== null && !Array.isArray(value);
45
+ const fileExists = (filePath) => {
46
+ try {
47
+ return fs.statSync(filePath).isFile();
48
+ } catch {
49
+ return false;
50
+ }
51
+ };
52
+ const readJsonFile = (filePath) => {
53
+ try {
54
+ return JSON.parse(fs.readFileSync(filePath, "utf8"));
55
+ } catch {
56
+ return null;
57
+ }
58
+ };
59
+ const collectFiles = (rootDirectory, predicate) => {
60
+ const files = [];
61
+ const stack = [rootDirectory];
62
+ while (stack.length > 0) {
63
+ const currentDirectory = stack.pop();
64
+ if (!currentDirectory) continue;
65
+ let entries = [];
66
+ try {
67
+ entries = fs.readdirSync(currentDirectory, { withFileTypes: true });
68
+ } catch {
69
+ continue;
70
+ }
71
+ for (const entry of entries) {
72
+ const entryPath = path.join(currentDirectory, entry.name);
73
+ if (entry.isDirectory()) {
74
+ if (!entry.name.startsWith(".") && !IGNORED_DIRECTORIES.has(entry.name)) stack.push(entryPath);
75
+ continue;
76
+ }
77
+ if (entry.isFile() && predicate(entryPath)) files.push(entryPath);
78
+ }
79
+ }
80
+ return files.sort();
81
+ };
82
+ const isSourceFilePath = (filePath) => {
83
+ if (filePath.endsWith(".d.ts")) return false;
84
+ return SOURCE_FILE_EXTENSIONS.has(path.extname(filePath));
85
+ };
86
+ const toRelativePath = (filePath, rootDirectory) => path.relative(rootDirectory, filePath).replaceAll(path.sep, "/") || ".";
87
+ const findLineNumber = (content, pattern) => {
88
+ const lines = content.split(/\r?\n/);
89
+ const flags = pattern.flags.replace("g", "");
90
+ const linePattern = new RegExp(pattern.source, flags);
91
+ for (let lineIndex = 0; lineIndex < lines.length; lineIndex++) if (linePattern.test(lines[lineIndex] ?? "")) return lineIndex + 1;
92
+ return 1;
93
+ };
94
+ const wildcardToRegExp = (pattern) => {
95
+ const escaped = pattern.replace(/[.+^${}()|[\]\\]/g, "\\$&").replaceAll("**", "__DOUBLE_STAR__").replaceAll("*", "[^/]*").replaceAll("__DOUBLE_STAR__", ".*");
96
+ return new RegExp(`^${escaped}$`);
97
+ };
98
+ //#endregion
99
+ //#region src/config.ts
100
+ const CONFIG_FILE_NAME = "bun-doctor.config.json";
101
+ const loadConfig = (rootDirectory, packageJson) => {
102
+ const configPath = path.join(rootDirectory, CONFIG_FILE_NAME);
103
+ if (fs.existsSync(configPath)) return readJsonFile(configPath) ?? {};
104
+ return packageJson.bunDoctor ?? {};
105
+ };
106
+ const filterIgnoredDiagnostics = (diagnostics, config, rootDirectory) => {
107
+ const ignoredRules = new Set(config.ignore?.rules ?? []);
108
+ const ignoredFilePatterns = (config.ignore?.files ?? []).map(wildcardToRegExp);
109
+ return diagnostics.filter((diagnostic) => {
110
+ if (ignoredRules.has(diagnostic.ruleId)) return false;
111
+ const relativePath = toRelativePath(diagnostic.filePath, rootDirectory);
112
+ return !ignoredFilePatterns.some((pattern) => pattern.test(relativePath));
113
+ });
114
+ };
115
+ //#endregion
116
+ //#region src/project.ts
117
+ const LOCKFILE_NAMES = ["bun.lock", "bun.lockb"];
118
+ const LEGACY_LOCKFILE_NAMES = [
119
+ "package-lock.json",
120
+ "pnpm-lock.yaml",
121
+ "yarn.lock"
122
+ ];
123
+ const WORKFLOW_EXTENSIONS = new Set([".yml", ".yaml"]);
124
+ const collectDependencies = (packageJson) => ({
125
+ ...packageJson.optionalDependencies,
126
+ ...packageJson.peerDependencies,
127
+ ...packageJson.devDependencies,
128
+ ...packageJson.dependencies
129
+ });
130
+ const toPackageManifest = (packageJsonPath, packageJson) => ({
131
+ packageJsonPath,
132
+ packageJson,
133
+ packageName: packageJson.name ?? path.basename(path.dirname(packageJsonPath)),
134
+ dependencies: collectDependencies(packageJson),
135
+ trustedDependencies: new Set(packageJson.trustedDependencies ?? [])
136
+ });
137
+ const collectPackageManifests = (rootDirectory, rootPackageJsonPath, rootPackageJson) => {
138
+ return [toPackageManifest(rootPackageJsonPath, rootPackageJson), ...collectFiles(rootDirectory, (filePath) => path.basename(filePath) === "package.json" && filePath !== rootPackageJsonPath).flatMap((manifestPath) => {
139
+ const packageJson = readJsonFile(manifestPath);
140
+ if (!packageJson || typeof packageJson !== "object" || Array.isArray(packageJson)) return [];
141
+ return [toPackageManifest(manifestPath, packageJson)];
142
+ })];
143
+ };
144
+ const mergeDependencies = (manifests) => {
145
+ const dependencies = {};
146
+ for (const manifest of manifests) Object.assign(dependencies, manifest.dependencies);
147
+ return dependencies;
148
+ };
149
+ const mergeTrustedDependencies = (manifests) => {
150
+ const trustedDependencies = /* @__PURE__ */ new Set();
151
+ for (const manifest of manifests) for (const packageName of manifest.trustedDependencies) trustedDependencies.add(packageName);
152
+ return trustedDependencies;
153
+ };
154
+ const parseBooleanTomlValue = (content, key) => {
155
+ const match = content.match(new RegExp(`^\\s*${key}\\s*=\\s*(true|false)\\s*$`, "m"));
156
+ if (!match?.[1]) return void 0;
157
+ return match[1] === "true";
158
+ };
159
+ const parseStringTomlValue = (content, key) => {
160
+ return content.match(new RegExp(`^\\s*${key}\\s*=\\s*["']([^"']+)["']\\s*$`, "m"))?.[1];
161
+ };
162
+ const parseBunfig = (rootDirectory) => {
163
+ const filePath = path.join(rootDirectory, "bunfig.toml");
164
+ if (!fileExists(filePath)) return null;
165
+ const content = fs.readFileSync(filePath, "utf8");
166
+ return {
167
+ filePath,
168
+ content,
169
+ installIgnoreScripts: parseBooleanTomlValue(content, "ignoreScripts"),
170
+ installFrozenLockfile: parseBooleanTomlValue(content, "frozenLockfile"),
171
+ installAuto: parseStringTomlValue(content, "auto"),
172
+ installSecurityScanner: parseStringTomlValue(content, "scanner")
173
+ };
174
+ };
175
+ const readSourceFiles = (rootDirectory) => collectFiles(rootDirectory, isSourceFilePath).map((filePath) => ({
176
+ filePath,
177
+ content: fs.readFileSync(filePath, "utf8")
178
+ }));
179
+ const readWorkflowFiles = (rootDirectory) => {
180
+ const workflowDirectory = path.join(rootDirectory, ".github", "workflows");
181
+ if (!fs.existsSync(workflowDirectory)) return [];
182
+ return collectFiles(workflowDirectory, (filePath) => WORKFLOW_EXTENSIONS.has(path.extname(filePath))).map((filePath) => ({
183
+ filePath,
184
+ content: fs.readFileSync(filePath, "utf8")
185
+ }));
186
+ };
187
+ const readTsconfig = (rootDirectory) => {
188
+ const tsconfigPath = path.join(rootDirectory, "tsconfig.json");
189
+ if (!fileExists(tsconfigPath)) return {
190
+ path: null,
191
+ config: null
192
+ };
193
+ return {
194
+ path: tsconfigPath,
195
+ config: readJsonFile(tsconfigPath)
196
+ };
197
+ };
198
+ const findPackageJsonPath = (startDirectory) => {
199
+ const packageJsonPath = path.join(startDirectory, "package.json");
200
+ if (fileExists(packageJsonPath)) return packageJsonPath;
201
+ throw new Error(`No package.json found in ${startDirectory}`);
202
+ };
203
+ const discoverProject = (directory) => {
204
+ const rootDirectory = path.resolve(directory);
205
+ const packageJsonPath = findPackageJsonPath(rootDirectory);
206
+ const packageJson = readJsonFile(packageJsonPath);
207
+ if (!packageJson || typeof packageJson !== "object" || Array.isArray(packageJson)) throw new Error(`Could not parse ${packageJsonPath}`);
208
+ const lockfiles = LOCKFILE_NAMES.filter((lockfileName) => fileExists(path.join(rootDirectory, lockfileName)));
209
+ const legacyLockfiles = LEGACY_LOCKFILE_NAMES.filter((lockfileName) => fileExists(path.join(rootDirectory, lockfileName)));
210
+ const packageManifests = collectPackageManifests(rootDirectory, packageJsonPath, packageJson);
211
+ const tsconfig = readTsconfig(rootDirectory);
212
+ const pnpmWorkspacePath = path.join(rootDirectory, "pnpm-workspace.yaml");
213
+ return {
214
+ rootDirectory,
215
+ packageJsonPath,
216
+ packageJson,
217
+ packageName: packageJson.name ?? path.basename(rootDirectory),
218
+ dependencies: mergeDependencies(packageManifests),
219
+ trustedDependencies: mergeTrustedDependencies(packageManifests),
220
+ packageManifests,
221
+ lockfiles,
222
+ legacyLockfiles,
223
+ bunfig: parseBunfig(rootDirectory),
224
+ tsconfigPath: tsconfig.path,
225
+ tsconfig: tsconfig.config,
226
+ workflows: readWorkflowFiles(rootDirectory),
227
+ sourceFiles: readSourceFiles(rootDirectory),
228
+ pnpmWorkspacePath: fileExists(pnpmWorkspacePath) ? pnpmWorkspacePath : null
229
+ };
230
+ };
231
+ //#endregion
232
+ //#region src/compat-db.ts
233
+ const COMPAT_DB = [
234
+ {
235
+ packageName: "sqlite3",
236
+ severity: "risk",
237
+ affectedRanges: ["*"],
238
+ bunVersions: [">=1.0.0"],
239
+ platforms: ["all"],
240
+ confidence: "medium",
241
+ reason: "Native SQLite packages can depend on lifecycle scripts and Node native addon behavior. Bun ships a native SQLite driver.",
242
+ sources: [BUN_DOCS.sqlite, BUN_DOCS.lifecycle],
243
+ lastVerified: "2026-05-12",
244
+ replacement: "bun:sqlite",
245
+ migrationHint: "Evaluate replacing sqlite3 usage with Database from bun:sqlite.",
246
+ requiresTrustedDependency: true
247
+ },
248
+ {
249
+ packageName: "better-sqlite3",
250
+ severity: "risk",
251
+ affectedRanges: ["*"],
252
+ bunVersions: [">=1.0.0"],
253
+ platforms: ["all"],
254
+ confidence: "medium",
255
+ reason: "Native SQLite packages can be sensitive to lifecycle script and Node-API behavior. Bun ships a native SQLite driver.",
256
+ sources: [
257
+ BUN_DOCS.sqlite,
258
+ BUN_DOCS.lifecycle,
259
+ BUN_DOCS.nodeCompatibility
260
+ ],
261
+ lastVerified: "2026-05-12",
262
+ replacement: "bun:sqlite",
263
+ workaround: "If you keep better-sqlite3, verify install and runtime on every target platform.",
264
+ migrationHint: "Evaluate replacing better-sqlite3 usage with Database from bun:sqlite.",
265
+ requiresTrustedDependency: true
266
+ },
267
+ {
268
+ packageName: "node-sass",
269
+ severity: "risk",
270
+ affectedRanges: ["*"],
271
+ bunVersions: [">=1.0.0"],
272
+ platforms: ["all"],
273
+ confidence: "high",
274
+ reason: "node-sass commonly relies on native install/build behavior. Bun does not run arbitrary lifecycle scripts by default.",
275
+ sources: [BUN_DOCS.lifecycle],
276
+ lastVerified: "2026-05-12",
277
+ replacement: "sass",
278
+ workaround: "Prefer Dart Sass. If you keep node-sass, explicitly trust it and verify platform installs.",
279
+ migrationHint: "Replace node-sass with sass where possible.",
280
+ requiresTrustedDependency: true
281
+ },
282
+ {
283
+ packageName: "dotenv",
284
+ severity: "win",
285
+ affectedRanges: ["*"],
286
+ bunVersions: [">=1.0.0"],
287
+ platforms: ["all"],
288
+ confidence: "high",
289
+ reason: "Bun automatically loads .env files, so many dotenv usages can be removed.",
290
+ sources: [BUN_DOCS.environmentVariables],
291
+ lastVerified: "2026-05-12",
292
+ replacement: "Bun built-in .env loading",
293
+ migrationHint: "Remove dotenv/config bootstrap code when Bun's automatic .env loading covers the same files."
294
+ },
295
+ {
296
+ packageName: "node-fetch",
297
+ severity: "win",
298
+ affectedRanges: ["*"],
299
+ bunVersions: [">=1.0.0"],
300
+ platforms: ["all"],
301
+ confidence: "high",
302
+ reason: "Bun provides a native fetch implementation through Web APIs.",
303
+ sources: [BUN_DOCS.nodeCompatibility],
304
+ lastVerified: "2026-05-12",
305
+ replacement: "global fetch",
306
+ migrationHint: "Prefer global fetch for new Bun-targeted code."
307
+ },
308
+ {
309
+ packageName: "cross-fetch",
310
+ severity: "win",
311
+ affectedRanges: ["*"],
312
+ bunVersions: [">=1.0.0"],
313
+ platforms: ["all"],
314
+ confidence: "high",
315
+ reason: "Bun provides native Request, Response, Headers, and fetch Web APIs.",
316
+ sources: [BUN_DOCS.nodeCompatibility],
317
+ lastVerified: "2026-05-12",
318
+ replacement: "global fetch",
319
+ migrationHint: "Prefer global fetch when all target runtimes provide it."
320
+ },
321
+ {
322
+ packageName: "bcrypt",
323
+ severity: "win",
324
+ affectedRanges: ["*"],
325
+ bunVersions: [">=1.0.0"],
326
+ platforms: ["all"],
327
+ confidence: "medium",
328
+ reason: "Bun ships password hashing APIs, which can remove native bcrypt install complexity in some apps.",
329
+ sources: [BUN_DOCS.hashing, BUN_DOCS.lifecycle],
330
+ lastVerified: "2026-05-12",
331
+ replacement: "Bun.password",
332
+ migrationHint: "Evaluate Bun.password for new Bun-only password hashing code.",
333
+ requiresTrustedDependency: true
334
+ },
335
+ {
336
+ packageName: "ts-node",
337
+ severity: "migration",
338
+ affectedRanges: ["*"],
339
+ bunVersions: [">=1.0.0"],
340
+ platforms: ["all"],
341
+ confidence: "high",
342
+ reason: "Bun runs TypeScript files directly, so ts-node is often unnecessary after migration.",
343
+ sources: [BUN_DOCS.typescript],
344
+ lastVerified: "2026-05-12",
345
+ replacement: "bun run file.ts",
346
+ migrationHint: "Replace ts-node scripts with bun run or direct bun execution where possible."
347
+ },
348
+ {
349
+ packageName: "tsx",
350
+ severity: "migration",
351
+ affectedRanges: ["*"],
352
+ bunVersions: [">=1.0.0"],
353
+ platforms: ["all"],
354
+ confidence: "high",
355
+ reason: "Bun runs TypeScript files directly and has watch/hot modes.",
356
+ sources: [BUN_DOCS.typescript],
357
+ lastVerified: "2026-05-12",
358
+ replacement: "bun run file.ts",
359
+ migrationHint: "Replace tsx scripts with bun run where Bun is the target runtime."
360
+ },
361
+ {
362
+ packageName: "nodemon",
363
+ severity: "migration",
364
+ affectedRanges: ["*"],
365
+ bunVersions: [">=1.0.0"],
366
+ platforms: ["all"],
367
+ confidence: "high",
368
+ reason: "Bun has built-in watch and hot reload modes for many runtime workflows.",
369
+ sources: ["https://bun.com/docs/runtime/watch-mode"],
370
+ lastVerified: "2026-05-12",
371
+ replacement: "bun --watch or bun --hot",
372
+ migrationHint: "Replace nodemon development scripts with bun --watch or bun --hot when behavior matches."
373
+ },
374
+ {
375
+ packageName: "jest",
376
+ severity: "migration",
377
+ affectedRanges: ["*"],
378
+ bunVersions: [">=1.0.0"],
379
+ platforms: ["all"],
380
+ confidence: "medium",
381
+ reason: "Bun ships a Jest-compatible test runner, but config/setup behavior needs migration review.",
382
+ sources: [BUN_DOCS.testRunner, BUN_DOCS.testConfiguration],
383
+ lastVerified: "2026-05-12",
384
+ replacement: "bun test",
385
+ workaround: "Keep Jest if you rely on unsupported Jest-specific behavior.",
386
+ migrationHint: "Audit jest.config and setup files before replacing test scripts with bun test."
387
+ },
388
+ {
389
+ packageName: "webpack",
390
+ severity: "win",
391
+ affectedRanges: ["*"],
392
+ bunVersions: [">=1.0.0"],
393
+ platforms: ["all"],
394
+ confidence: "medium",
395
+ reason: "Bun includes a native bundler for many application and library workflows.",
396
+ sources: ["https://bun.com/docs/bundler"],
397
+ lastVerified: "2026-05-12",
398
+ replacement: "bun build",
399
+ migrationHint: "Evaluate bun build for simple library or app bundles before keeping webpack by default."
400
+ },
401
+ {
402
+ packageName: "esbuild",
403
+ severity: "win",
404
+ affectedRanges: ["*"],
405
+ bunVersions: [">=1.0.0"],
406
+ platforms: ["all"],
407
+ confidence: "medium",
408
+ reason: "Bun includes a native bundler and provides an esbuild migration guide.",
409
+ sources: ["https://bun.com/docs/bundler/esbuild"],
410
+ lastVerified: "2026-05-12",
411
+ replacement: "bun build",
412
+ migrationHint: "Evaluate bun build for build scripts that only need basic esbuild behavior."
413
+ }
414
+ ];
415
+ //#endregion
416
+ //#region src/rules.ts
417
+ const CATEGORY_BY_LEVEL = {
418
+ blocker: "Blockers",
419
+ risk: "Risks",
420
+ migration: "Migration work",
421
+ win: "Bun wins"
422
+ };
423
+ const createDiagnostic = (input) => {
424
+ if (input.sources.length === 0) throw new Error(`Rule ${input.ruleId} is missing a source`);
425
+ return {
426
+ ruleId: input.ruleId,
427
+ title: input.title,
428
+ level: input.level,
429
+ category: CATEGORY_BY_LEVEL[input.level],
430
+ message: input.message,
431
+ filePath: input.filePath,
432
+ line: input.line ?? 1,
433
+ sources: input.sources,
434
+ help: input.help,
435
+ packageName: input.packageName
436
+ };
437
+ };
438
+ const getCompilerOptions = (project) => {
439
+ const compilerOptions = project.tsconfig?.compilerOptions;
440
+ return isPlainObject(compilerOptions) ? compilerOptions : {};
441
+ };
442
+ const usesBunGlobal = (project) => project.sourceFiles.some((sourceFile) => /\bBun\.|from\s+["']bun["']|require\(["']bun["']\)/.test(sourceFile.content));
443
+ const hasDependency = (project, packageName) => Boolean(project.dependencies[packageName]);
444
+ const findDependencyManifests = (project, packageName) => project.packageManifests.filter((manifest) => Boolean(manifest.dependencies[packageName]));
445
+ const hasPackageJsonWorkspaces = (project) => Boolean(project.packageJson.workspaces);
446
+ const hasCatalogReference = (project) => Object.values(project.dependencies).some((version) => version.startsWith("catalog:"));
447
+ const hasPackageJsonCatalog = (project) => Boolean(project.packageJson.catalog) || Boolean(project.packageJson.catalogs) || isPlainObject(project.packageJson.workspaces) && (Boolean(project.packageJson.workspaces.catalog) || Boolean(project.packageJson.workspaces.catalogs));
448
+ const workflowUsesBun = (content) => /\bbun\s+(install|run|test|build|x)\b/.test(content);
449
+ const workflowUsesSetupBun = (content) => /oven-sh\/setup-bun@/.test(content);
450
+ const workflowUsesLegacyInstall = (content) => /\b(npm ci|npm install|pnpm install|yarn install|yarn --frozen-lockfile)\b/.test(content);
451
+ const workflowUsesUnfrozenBunInstall = (content) => /^\s*(-\s*)?run:\s*bun install\s*$/m.test(content) || /\bbun install\b(?![^\n]*--frozen-lockfile)/.test(content);
452
+ const runPackageRules = (project) => {
453
+ const diagnostics = [];
454
+ const packageJsonPath = project.packageJsonPath;
455
+ const hasBunLock = project.lockfiles.includes("bun.lock");
456
+ const hasBunLockb = project.lockfiles.includes("bun.lockb");
457
+ const hasAnyBunLock = hasBunLock || hasBunLockb;
458
+ if (!hasAnyBunLock) diagnostics.push(createDiagnostic({
459
+ ruleId: "bun/lockfile-missing",
460
+ title: "Missing Bun lockfile",
461
+ level: "migration",
462
+ message: "This project has no bun.lock. Commit Bun's lockfile before treating installs or CI as reproducible under Bun.",
463
+ filePath: packageJsonPath,
464
+ sources: [BUN_DOCS.lockfile],
465
+ help: "Run bun install and commit bun.lock."
466
+ }));
467
+ if (hasBunLockb) diagnostics.push(createDiagnostic({
468
+ ruleId: "bun/legacy-lockb",
469
+ title: "Legacy binary Bun lockfile",
470
+ level: "migration",
471
+ message: "bun.lockb is the legacy binary lockfile format. Bun v1.2 defaults to the text-based bun.lock.",
472
+ filePath: path.join(project.rootDirectory, "bun.lockb"),
473
+ sources: [BUN_DOCS.lockfile],
474
+ help: "Migrate with bun install --save-text-lockfile --frozen-lockfile --lockfile-only, then remove bun.lockb after verification."
475
+ }));
476
+ if (hasAnyBunLock && project.legacyLockfiles.length > 0) diagnostics.push(createDiagnostic({
477
+ ruleId: "bun/mixed-lockfiles",
478
+ title: "Mixed package-manager lockfiles",
479
+ level: "risk",
480
+ message: `Bun lockfile exists alongside ${project.legacyLockfiles.join(", ")}. Mixed lockfiles make it unclear which package manager owns dependency resolution.`,
481
+ filePath: packageJsonPath,
482
+ sources: [BUN_DOCS.lockfile],
483
+ help: "Keep legacy lockfiles only if another supported workflow still owns them; otherwise remove them after validating bun.lock."
484
+ }));
485
+ if (!project.packageJson.packageManager?.startsWith("bun@")) diagnostics.push(createDiagnostic({
486
+ ruleId: "bun/package-manager-field",
487
+ title: "packageManager does not pin Bun",
488
+ level: "migration",
489
+ message: "package.json does not pin Bun in packageManager, so contributors and CI may use different package managers.",
490
+ filePath: packageJsonPath,
491
+ sources: [BUN_DOCS.lockfile],
492
+ help: "Set packageManager to the Bun version used by the project, for example bun@1.3.11."
493
+ }));
494
+ if (project.pnpmWorkspacePath && !hasPackageJsonWorkspaces(project)) diagnostics.push(createDiagnostic({
495
+ ruleId: "bun/pnpm-workspace-only",
496
+ title: "Workspaces only declared for pnpm",
497
+ level: "blocker",
498
+ message: "Bun reads workspaces from package.json. A pnpm-workspace.yaml without package.json workspaces will not define Bun workspaces.",
499
+ filePath: project.pnpmWorkspacePath,
500
+ sources: [BUN_DOCS.workspaces],
501
+ help: "Move workspace globs into package.json workspaces before relying on bun install at the repo root."
502
+ }));
503
+ if (hasCatalogReference(project) && !hasPackageJsonCatalog(project)) diagnostics.push(createDiagnostic({
504
+ ruleId: "bun/catalog-without-package-json-catalog",
505
+ title: "Catalog references need Bun catalog definitions",
506
+ level: "blocker",
507
+ message: "This project uses catalog: dependency references, but no Bun catalog or catalogs definition was found in package.json.",
508
+ filePath: packageJsonPath,
509
+ sources: [BUN_DOCS.catalogs],
510
+ help: "Define catalog or catalogs in package.json, preferably under workspaces for monorepos."
511
+ }));
512
+ for (const workflow of project.workflows) {
513
+ if (workflowUsesBun(workflow.content) && !workflowUsesSetupBun(workflow.content)) diagnostics.push(createDiagnostic({
514
+ ruleId: "bun/ci-missing-setup-bun",
515
+ title: "CI uses Bun without setup-bun",
516
+ level: "blocker",
517
+ message: "This workflow runs bun commands but does not install Bun with oven-sh/setup-bun.",
518
+ filePath: workflow.filePath,
519
+ line: findLineNumber(workflow.content, /\bbun\s+(install|run|test|build|x)\b/),
520
+ sources: [BUN_DOCS.ci],
521
+ help: "Add oven-sh/setup-bun before bun commands in GitHub Actions."
522
+ }));
523
+ if (hasAnyBunLock && workflowUsesLegacyInstall(workflow.content)) diagnostics.push(createDiagnostic({
524
+ ruleId: "bun/ci-uses-legacy-package-manager",
525
+ title: "CI still installs with another package manager",
526
+ level: "migration",
527
+ message: "This workflow uses npm, pnpm, or yarn install even though the project has a Bun lockfile.",
528
+ filePath: workflow.filePath,
529
+ line: findLineNumber(workflow.content, /\b(npm ci|npm install|pnpm install|yarn install|yarn --frozen-lockfile)\b/),
530
+ sources: [BUN_DOCS.ci],
531
+ help: "Switch Bun-owned CI jobs to bun install --frozen-lockfile."
532
+ }));
533
+ if (workflowUsesUnfrozenBunInstall(workflow.content)) diagnostics.push(createDiagnostic({
534
+ ruleId: "bun/ci-install-not-frozen",
535
+ title: "CI Bun install is not frozen",
536
+ level: "risk",
537
+ message: "This workflow runs bun install without --frozen-lockfile, so CI can update bun.lock instead of verifying it.",
538
+ filePath: workflow.filePath,
539
+ line: findLineNumber(workflow.content, /\bbun install\b/),
540
+ sources: [BUN_DOCS.bunfig, BUN_DOCS.ci],
541
+ help: "Use bun install --frozen-lockfile in CI."
542
+ }));
543
+ }
544
+ if (usesBunGlobal(project) && !hasDependency(project, "@types/bun")) diagnostics.push(createDiagnostic({
545
+ ruleId: "bun/types-package-missing",
546
+ title: "Bun types are missing",
547
+ level: "migration",
548
+ message: "Source files reference Bun APIs, but @types/bun is not installed.",
549
+ filePath: packageJsonPath,
550
+ sources: [BUN_DOCS.typescript],
551
+ help: "Install @types/bun as a dev dependency."
552
+ }));
553
+ const compilerTypes = getCompilerOptions(project).types;
554
+ if (hasDependency(project, "@types/bun") && Array.isArray(compilerTypes) && !compilerTypes.includes("bun")) diagnostics.push(createDiagnostic({
555
+ ruleId: "bun/tsconfig-types-missing-bun",
556
+ title: "tsconfig types excludes Bun",
557
+ level: "risk",
558
+ message: "@types/bun is installed, but compilerOptions.types does not include bun. TypeScript 6+ requires explicit Bun types in this mode.",
559
+ filePath: project.tsconfigPath ?? packageJsonPath,
560
+ sources: [BUN_DOCS.typescript],
561
+ help: "Add \"bun\" to compilerOptions.types or remove types if you do not need to restrict global type packages."
562
+ }));
563
+ if (project.bunfig?.installAuto && project.bunfig.installAuto !== "disable") diagnostics.push(createDiagnostic({
564
+ ruleId: "bun/auto-install-enabled",
565
+ title: "Bun auto-install is enabled",
566
+ level: "risk",
567
+ message: `bunfig.toml sets install.auto to ${project.bunfig.installAuto}. Auto-install can fetch packages during execution when node_modules is absent.`,
568
+ filePath: project.bunfig.filePath,
569
+ line: findLineNumber(project.bunfig.content, /auto\s*=/),
570
+ sources: [BUN_DOCS.autoInstall, BUN_DOCS.bunfig],
571
+ help: "For application repos and CI, consider install.auto = \"disable\" for more predictable execution."
572
+ }));
573
+ for (const entry of COMPAT_DB) for (const manifest of findDependencyManifests(project, entry.packageName)) {
574
+ diagnostics.push(createDiagnostic({
575
+ ruleId: `compat/${entry.packageName}`,
576
+ title: `${entry.packageName} compatibility note`,
577
+ level: entry.severity,
578
+ message: entry.reason,
579
+ filePath: manifest.packageJsonPath,
580
+ sources: entry.sources,
581
+ packageName: entry.packageName,
582
+ help: [
583
+ entry.replacement ? `Replacement: ${entry.replacement}.` : "",
584
+ entry.workaround,
585
+ entry.migrationHint
586
+ ].filter(Boolean).join(" ")
587
+ }));
588
+ if (entry.requiresTrustedDependency && !manifest.trustedDependencies.has(entry.packageName)) diagnostics.push(createDiagnostic({
589
+ ruleId: `bun/trusted-dependency/${entry.packageName}`,
590
+ title: `${entry.packageName} may need trustedDependencies`,
591
+ level: "risk",
592
+ message: `${entry.packageName} can require lifecycle scripts, but it is not listed in trustedDependencies. Bun does not run arbitrary lifecycle scripts by default.`,
593
+ filePath: manifest.packageJsonPath,
594
+ sources: [BUN_DOCS.lifecycle],
595
+ packageName: entry.packageName,
596
+ help: `If you keep ${entry.packageName}, verify whether it needs install scripts and add it to trustedDependencies only after review.`
597
+ }));
598
+ }
599
+ return diagnostics;
600
+ };
601
+ const CODE_RULES = [
602
+ {
603
+ ruleId: "code/node-repl",
604
+ title: "node:repl is not implemented in Bun",
605
+ level: "blocker",
606
+ pattern: /(?:from\s+["']node:repl["']|require\(["']node:repl["']\))/,
607
+ message: "Bun's Node compatibility table marks node:repl as not implemented.",
608
+ sources: [BUN_DOCS.nodeCompatibility]
609
+ },
610
+ {
611
+ ruleId: "code/node-trace-events",
612
+ title: "node:trace_events is not implemented in Bun",
613
+ level: "blocker",
614
+ pattern: /(?:from\s+["']node:trace_events["']|require\(["']node:trace_events["']\))/,
615
+ message: "Bun's Node compatibility table marks node:trace_events as not implemented.",
616
+ sources: [BUN_DOCS.nodeCompatibility]
617
+ },
618
+ {
619
+ ruleId: "code/node-sqlite",
620
+ title: "node:sqlite is not implemented in Bun",
621
+ level: "blocker",
622
+ pattern: /(?:from\s+["']node:sqlite["']|require\(["']node:sqlite["']\))/,
623
+ message: "Bun does not implement node:sqlite. Bun provides bun:sqlite instead.",
624
+ sources: [BUN_DOCS.nodeCompatibility, BUN_DOCS.sqlite],
625
+ help: "Use bun:sqlite for Bun-targeted SQLite code."
626
+ },
627
+ {
628
+ ruleId: "code/process-binding",
629
+ title: "process.binding usage is compatibility-sensitive",
630
+ level: "risk",
631
+ pattern: /\bprocess\.binding\s*\(/,
632
+ message: "process.binding is an internal Node API and only partially implemented by Bun.",
633
+ sources: [BUN_DOCS.nodeCompatibility],
634
+ help: "Replace internal Node binding access with public APIs before migrating."
635
+ },
636
+ {
637
+ ruleId: "code/process-missing-api",
638
+ title: "Node process API is not implemented in Bun",
639
+ level: "blocker",
640
+ pattern: /\bprocess\.(loadEnvFile|getBuiltinModule)\s*\(/,
641
+ message: "Bun's compatibility docs list process.loadEnvFile and process.getBuiltinModule as not implemented.",
642
+ sources: [BUN_DOCS.nodeCompatibility]
643
+ },
644
+ {
645
+ ruleId: "code/module-register",
646
+ title: "module.register is not implemented in Bun",
647
+ level: "blocker",
648
+ pattern: /\bmodule\.register\s*\(/,
649
+ message: "Bun's compatibility docs list module.register as not implemented and recommend Bun.plugin in the meantime.",
650
+ sources: [BUN_DOCS.nodeCompatibility, "https://bun.com/docs/runtime/plugins"],
651
+ help: "Evaluate Bun.plugin or avoid runtime module loader hooks in Bun-targeted code."
652
+ },
653
+ {
654
+ ruleId: "code/node-test",
655
+ title: "node:test is only partly implemented in Bun",
656
+ level: "migration",
657
+ pattern: /(?:from\s+["']node:test["']|require\(["']node:test["']\))/,
658
+ message: "Bun's compatibility docs mark node:test as partly implemented. Bun has its own bun:test runner.",
659
+ sources: [BUN_DOCS.nodeCompatibility, BUN_DOCS.testRunner],
660
+ help: "Prefer bun:test when migrating the test runner to Bun."
661
+ },
662
+ {
663
+ ruleId: "code/v8-specific-api",
664
+ title: "V8-specific APIs are compatibility-sensitive",
665
+ level: "risk",
666
+ pattern: /(?:from\s+["']node:v8["']|require\(["']node:v8["']\)|\bv8\.(serialize|deserialize|setFlagsFromString|cachedDataVersionTag)\s*\()/,
667
+ message: "Bun runs on JavaScriptCore, and its Node v8 compatibility is partial.",
668
+ sources: [BUN_DOCS.nodeCompatibility],
669
+ help: "Avoid V8-specific runtime assumptions in Bun-targeted code."
670
+ },
671
+ {
672
+ ruleId: "code/worker-resource-limits",
673
+ title: "worker_threads resource limits are unsupported",
674
+ level: "risk",
675
+ pattern: /\bresourceLimits\s*:/,
676
+ message: "Bun's worker_threads compatibility notes list resourceLimits as unsupported.",
677
+ sources: [BUN_DOCS.nodeCompatibility],
678
+ help: "Verify worker behavior under Bun or avoid Node-specific Worker options."
679
+ }
680
+ ];
681
+ const runCodeRules = (project) => {
682
+ const diagnostics = [];
683
+ for (const sourceFile of project.sourceFiles) for (const rule of CODE_RULES) {
684
+ if (!rule.pattern.test(sourceFile.content)) continue;
685
+ diagnostics.push(createDiagnostic({
686
+ ruleId: rule.ruleId,
687
+ title: rule.title,
688
+ level: rule.level,
689
+ message: rule.message,
690
+ filePath: sourceFile.filePath,
691
+ line: findLineNumber(sourceFile.content, rule.pattern),
692
+ sources: rule.sources,
693
+ help: rule.help
694
+ }));
695
+ }
696
+ return diagnostics;
697
+ };
698
+ //#endregion
699
+ //#region src/score.ts
700
+ const getScoreLabel = (score) => {
701
+ if (score >= 90) return "Ready";
702
+ if (score >= 75) return "Close";
703
+ if (score >= 50) return "Risky";
704
+ return "Blocked";
705
+ };
706
+ const collectUniqueRuleCounts = (diagnostics) => {
707
+ const counts = {
708
+ blocker: /* @__PURE__ */ new Set(),
709
+ risk: /* @__PURE__ */ new Set(),
710
+ migration: /* @__PURE__ */ new Set(),
711
+ win: /* @__PURE__ */ new Set()
712
+ };
713
+ for (const diagnostic of diagnostics) counts[diagnostic.level].add(diagnostic.ruleId);
714
+ return counts;
715
+ };
716
+ const calculateScore = (diagnostics) => {
717
+ const counts = collectUniqueRuleCounts(diagnostics);
718
+ const penalty = counts.blocker.size * 12 + counts.risk.size * 5 + counts.migration.size * 2;
719
+ const score = Math.max(0, Math.round(100 - penalty));
720
+ return {
721
+ score,
722
+ label: getScoreLabel(score)
723
+ };
724
+ };
725
+ const summarizeDiagnostics = (diagnostics) => ({
726
+ blockers: diagnostics.filter((diagnostic) => diagnostic.level === "blocker").length,
727
+ risks: diagnostics.filter((diagnostic) => diagnostic.level === "risk").length,
728
+ migrations: diagnostics.filter((diagnostic) => diagnostic.level === "migration").length,
729
+ wins: diagnostics.filter((diagnostic) => diagnostic.level === "win").length
730
+ });
731
+ //#endregion
732
+ //#region src/scan.ts
733
+ const scan = async (directory, options = {}) => {
734
+ const project = discoverProject(directory);
735
+ const loadedConfig = options.configOverride ?? loadConfig(project.rootDirectory, project.packageJson);
736
+ const shouldRunPackageChecks = options.packageChecks ?? loadedConfig.package ?? true;
737
+ const shouldRunCodeChecks = options.codeChecks ?? loadedConfig.code ?? true;
738
+ const filteredDiagnostics = filterIgnoredDiagnostics([...shouldRunPackageChecks ? runPackageRules(project) : [], ...shouldRunCodeChecks ? runCodeRules(project) : []], loadedConfig, project.rootDirectory);
739
+ return {
740
+ project,
741
+ diagnostics: filteredDiagnostics,
742
+ score: calculateScore(filteredDiagnostics),
743
+ summary: summarizeDiagnostics(filteredDiagnostics)
744
+ };
745
+ };
746
+ //#endregion
747
+ export { VERSION as a, toRelativePath as i, calculateScore as n, summarizeDiagnostics as r, scan as t };
748
+
749
+ //# sourceMappingURL=scan-BVcJTreL.mjs.map