@openrewrite/rewrite 8.68.0-20251202-044649 → 8.68.0-20251202-154952

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (45) hide show
  1. package/dist/javascript/assertions.d.ts +1 -0
  2. package/dist/javascript/assertions.d.ts.map +1 -1
  3. package/dist/javascript/assertions.js +82 -11
  4. package/dist/javascript/assertions.js.map +1 -1
  5. package/dist/javascript/dependency-workspace.d.ts +46 -5
  6. package/dist/javascript/dependency-workspace.d.ts.map +1 -1
  7. package/dist/javascript/dependency-workspace.js +70 -35
  8. package/dist/javascript/dependency-workspace.js.map +1 -1
  9. package/dist/javascript/index.d.ts +2 -0
  10. package/dist/javascript/index.d.ts.map +1 -1
  11. package/dist/javascript/index.js +2 -0
  12. package/dist/javascript/index.js.map +1 -1
  13. package/dist/javascript/node-resolution-result.d.ts +204 -0
  14. package/dist/javascript/node-resolution-result.d.ts.map +1 -0
  15. package/dist/javascript/node-resolution-result.js +723 -0
  16. package/dist/javascript/node-resolution-result.js.map +1 -0
  17. package/dist/javascript/package-json-parser.d.ts +143 -0
  18. package/dist/javascript/package-json-parser.d.ts.map +1 -0
  19. package/dist/javascript/package-json-parser.js +773 -0
  20. package/dist/javascript/package-json-parser.js.map +1 -0
  21. package/dist/javascript/templating/engine.js +1 -1
  22. package/dist/javascript/templating/engine.js.map +1 -1
  23. package/dist/json/parser.js +10 -1
  24. package/dist/json/parser.js.map +1 -1
  25. package/dist/json/tree.d.ts +1 -1
  26. package/dist/json/tree.js +1 -1
  27. package/dist/json/tree.js.map +1 -1
  28. package/dist/parser.d.ts +1 -1
  29. package/dist/parser.d.ts.map +1 -1
  30. package/dist/rpc/request/parse.d.ts +4 -0
  31. package/dist/rpc/request/parse.d.ts.map +1 -1
  32. package/dist/rpc/request/parse.js +17 -1
  33. package/dist/rpc/request/parse.js.map +1 -1
  34. package/dist/version.txt +1 -1
  35. package/package.json +5 -2
  36. package/src/javascript/assertions.ts +73 -15
  37. package/src/javascript/dependency-workspace.ts +124 -46
  38. package/src/javascript/index.ts +2 -0
  39. package/src/javascript/node-resolution-result.ts +905 -0
  40. package/src/javascript/package-json-parser.ts +845 -0
  41. package/src/javascript/templating/engine.ts +1 -1
  42. package/src/json/parser.ts +18 -1
  43. package/src/json/tree.ts +1 -1
  44. package/src/parser.ts +1 -1
  45. package/src/rpc/request/parse.ts +20 -2
@@ -0,0 +1,845 @@
1
+ /*
2
+ * Copyright 2025 the original author or authors.
3
+ * <p>
4
+ * Licensed under the Moderne Source Available License (the "License");
5
+ * you may not use this file except in compliance with the License.
6
+ * You may obtain a copy of the License at
7
+ * <p>
8
+ * https://docs.moderne.io/licensing/moderne-source-available-license
9
+ * <p>
10
+ * Unless required by applicable law or agreed to in writing, software
11
+ * distributed under the License is distributed on an "AS IS" BASIS,
12
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13
+ * See the License for the specific language governing permissions and
14
+ * limitations under the License.
15
+ */
16
+ import {Parser, ParserInput, parserInputFile, parserInputRead, ParserOptions, Parsers} from "../parser";
17
+ import {SourceFile} from "../tree";
18
+ import {Json, JsonParser} from "../json";
19
+ import {
20
+ createNodeResolutionResultMarker,
21
+ NodeResolutionResult,
22
+ PackageLockContent,
23
+ PackageLockEntry,
24
+ PackageManager,
25
+ readNpmrcConfigs
26
+ } from "./node-resolution-result";
27
+ import * as fs from "fs";
28
+ import * as fsp from "fs/promises";
29
+ import * as path from "path";
30
+ import * as YAML from "yaml";
31
+ import {spawnSync} from "child_process";
32
+
33
+ /**
34
+ * Bun.lock package entry metadata.
35
+ */
36
+ interface BunLockMetadata {
37
+ readonly dependencies?: Record<string, string>;
38
+ readonly devDependencies?: Record<string, string>;
39
+ readonly peerDependencies?: Record<string, string>;
40
+ readonly optionalDependencies?: Record<string, string>;
41
+ }
42
+
43
+ /**
44
+ * Bun.lock package entry: [name@version, url, metadata, integrity]
45
+ * Note: Using unknown for first element since we need runtime validation of parsed JSON.
46
+ */
47
+ type BunLockPackageEntry = [unknown, string?, BunLockMetadata?, string?];
48
+
49
+ /**
50
+ * Parsed bun.lock content structure.
51
+ */
52
+ interface BunLockContent {
53
+ readonly packages?: Record<string, BunLockPackageEntry>;
54
+ }
55
+
56
+ export interface PackageJsonParserOptions extends ParserOptions {
57
+ /**
58
+ * If true, skips reading and parsing lock files for dependency resolution.
59
+ * The NodeResolutionResult marker will still be created, but without resolved dependencies.
60
+ */
61
+ skipDependencyResolution?: boolean;
62
+ }
63
+
64
+ /**
65
+ * A parser for package.json files that wraps the JsonParser.
66
+ *
67
+ * Similar to how MavenParser wraps XmlParser in Java, this parser:
68
+ * - Parses package.json files as JSON documents
69
+ * - Attaches NodeResolutionResult markers with dependency information
70
+ * - Optionally reads corresponding lock files (package-lock.json, yarn.lock, etc.)
71
+ * to provide resolved dependency versions
72
+ */
73
+ export class PackageJsonParser extends Parser {
74
+ private readonly jsonParser: JsonParser;
75
+ private readonly skipDependencyResolution: boolean;
76
+
77
+ /** Fields to copy from package.json that contain dependency maps */
78
+ private static readonly DEPENDENCY_FIELDS = [
79
+ 'dependencies',
80
+ 'devDependencies',
81
+ 'peerDependencies',
82
+ 'optionalDependencies'
83
+ ] as const;
84
+
85
+ constructor(options: PackageJsonParserOptions = {}) {
86
+ super(options);
87
+ this.jsonParser = new JsonParser(options);
88
+ this.skipDependencyResolution = options.skipDependencyResolution ?? false;
89
+ }
90
+
91
+ /**
92
+ * Extracts package metadata from a package.json object into a lock file entry format.
93
+ * Copies version, dependency fields, engines, and license.
94
+ */
95
+ private static extractPackageMetadata(pkgJson: any, fallbackVersion?: string): Record<string, any> {
96
+ const entry: Record<string, any> = {
97
+ version: pkgJson.version || fallbackVersion,
98
+ };
99
+
100
+ for (const field of PackageJsonParser.DEPENDENCY_FIELDS) {
101
+ if (pkgJson[field] && Object.keys(pkgJson[field]).length > 0) {
102
+ entry[field] = pkgJson[field];
103
+ }
104
+ }
105
+
106
+ if (pkgJson.engines) {
107
+ entry.engines = pkgJson.engines;
108
+ }
109
+ if (pkgJson.license) {
110
+ entry.license = pkgJson.license;
111
+ }
112
+
113
+ return entry;
114
+ }
115
+
116
+ /**
117
+ * Accepts package.json files.
118
+ */
119
+ accept(sourcePath: string): boolean {
120
+ const fileName = path.basename(sourcePath);
121
+ return fileName === 'package.json';
122
+ }
123
+
124
+ async *parse(...inputs: ParserInput[]): AsyncGenerator<SourceFile> {
125
+ // Group inputs by directory to share NodeResolutionResult markers
126
+ const inputsByDir = new Map<string, ParserInput[]>();
127
+
128
+ for (const input of inputs) {
129
+ const filePath = parserInputFile(input);
130
+ const dir = path.dirname(filePath);
131
+
132
+ if (!inputsByDir.has(dir)) {
133
+ inputsByDir.set(dir, []);
134
+ }
135
+ inputsByDir.get(dir)!.push(input);
136
+ }
137
+
138
+ // Process each directory's package.json files
139
+ for (const [dir, dirInputs] of inputsByDir) {
140
+ // Create a shared marker for this directory
141
+ let marker: NodeResolutionResult | null = null;
142
+
143
+ for (const input of dirInputs) {
144
+ // Parse as JSON first
145
+ const jsonGenerator = this.jsonParser.parse(input);
146
+ const jsonResult = await jsonGenerator.next();
147
+
148
+ if (jsonResult.done || !jsonResult.value) {
149
+ continue;
150
+ }
151
+
152
+ const jsonDoc = jsonResult.value as Json.Document;
153
+
154
+ // Create NodeResolutionResult marker if not already created for this directory
155
+ if (!marker) {
156
+ marker = await this.createMarker(input, dir);
157
+ }
158
+
159
+ // Attach the marker to the JSON document
160
+ if (marker) {
161
+ yield {
162
+ ...jsonDoc,
163
+ markers: {
164
+ ...jsonDoc.markers,
165
+ markers: [...jsonDoc.markers.markers, marker]
166
+ }
167
+ };
168
+ } else {
169
+ yield jsonDoc;
170
+ }
171
+ }
172
+ }
173
+ }
174
+
175
+ /**
176
+ * Creates a NodeResolutionResult marker from the package.json content and optional lock file.
177
+ */
178
+ private async createMarker(input: ParserInput, dir: string): Promise<NodeResolutionResult | null> {
179
+ try {
180
+ const content = parserInputRead(input);
181
+ const packageJson = JSON.parse(content);
182
+
183
+ // Determine the relative path for the marker
184
+ const filePath = parserInputFile(input);
185
+ const relativePath = this.relativeTo
186
+ ? path.relative(this.relativeTo, filePath)
187
+ : filePath;
188
+
189
+ // Try to read lock file if dependency resolution is not skipped
190
+ // Use relativeTo directory if available (for tests), otherwise use the directory from input path
191
+ let lockContent: PackageLockContent | undefined = undefined;
192
+ let packageManager: PackageManager | undefined = undefined;
193
+ if (!this.skipDependencyResolution) {
194
+ const lockDir = this.relativeTo || dir;
195
+ const lockResult = await this.tryReadLockFile(lockDir);
196
+ lockContent = lockResult?.content;
197
+ packageManager = lockResult?.packageManager;
198
+ }
199
+
200
+ // Read .npmrc configurations from all scopes
201
+ const projectDir = this.relativeTo || dir;
202
+ const npmrcConfigs = await readNpmrcConfigs(projectDir);
203
+
204
+ return createNodeResolutionResultMarker(
205
+ relativePath,
206
+ packageJson,
207
+ lockContent,
208
+ undefined,
209
+ packageManager,
210
+ npmrcConfigs.length > 0 ? npmrcConfigs : undefined
211
+ );
212
+ } catch (error) {
213
+ console.warn(`Failed to create NodeResolutionResult marker: ${error}`);
214
+ return null;
215
+ }
216
+ }
217
+
218
+ /**
219
+ * Lock file detection configuration.
220
+ * Priority order determines which package manager is detected when multiple lock files exist.
221
+ */
222
+ private static readonly LOCK_FILE_CONFIG: ReadonlyArray<{
223
+ filename: string;
224
+ packageManager: PackageManager | ((content: string) => PackageManager);
225
+ /** If true, prefer walking node_modules over parsing lock file */
226
+ preferNodeModules?: boolean;
227
+ }> = [
228
+ { filename: 'package-lock.json', packageManager: PackageManager.Npm },
229
+ { filename: 'bun.lock', packageManager: PackageManager.Bun },
230
+ { filename: 'pnpm-lock.yaml', packageManager: PackageManager.Pnpm, preferNodeModules: true },
231
+ // yarn.lock omits transitive dependency details (engines/license), so prefer node_modules
232
+ { filename: 'yarn.lock', packageManager: (content) =>
233
+ content.includes('__metadata:') ? PackageManager.YarnBerry : PackageManager.YarnClassic,
234
+ preferNodeModules: true
235
+ },
236
+ ];
237
+
238
+ /**
239
+ * Attempts to read and parse a lock file from the given directory.
240
+ * Supports npm (package-lock.json), bun (bun.lock), pnpm, and yarn.
241
+ *
242
+ * @returns Object with parsed lock file content and detected package manager, or undefined if no lock file found
243
+ */
244
+ private async tryReadLockFile(dir: string): Promise<{ content: PackageLockContent; packageManager: PackageManager } | undefined> {
245
+ // Detect which lock file exists (first match wins based on priority)
246
+ for (const config of PackageJsonParser.LOCK_FILE_CONFIG) {
247
+ const lockPath = path.join(dir, config.filename);
248
+ if (!fs.existsSync(lockPath)) {
249
+ continue;
250
+ }
251
+
252
+ try {
253
+ const fileContent = await fsp.readFile(lockPath, 'utf-8');
254
+ const packageManager = typeof config.packageManager === 'function'
255
+ ? config.packageManager(fileContent)
256
+ : config.packageManager;
257
+
258
+ // For package managers where lock file omits details, prefer node_modules
259
+ if (config.preferNodeModules) {
260
+ const parsed = await this.walkNodeModules(dir);
261
+ if (parsed) {
262
+ return { content: parsed, packageManager };
263
+ }
264
+ }
265
+
266
+ // Parse lock file based on package manager
267
+ const content = await this.parseLockFileContent(config.filename, fileContent, dir);
268
+ if (content) {
269
+ return { content, packageManager };
270
+ }
271
+ } catch (error) {
272
+ console.debug?.(`Failed to parse ${config.filename}: ${error}`);
273
+ }
274
+ }
275
+
276
+ return undefined;
277
+ }
278
+
279
+ /**
280
+ * Parses lock file content based on the lock file type.
281
+ */
282
+ private async parseLockFileContent(
283
+ filename: string,
284
+ content: string,
285
+ dir: string
286
+ ): Promise<PackageLockContent | undefined> {
287
+ switch (filename) {
288
+ case 'package-lock.json':
289
+ return JSON.parse(content);
290
+ case 'bun.lock':
291
+ return this.convertBunLockToNpmFormat(this.parseJsonc(content) as BunLockContent);
292
+ case 'pnpm-lock.yaml':
293
+ // Fall back to pnpm CLI when node_modules unavailable
294
+ return this.getPnpmDependencies(dir);
295
+ case 'yarn.lock':
296
+ return this.parseYarnLock(content);
297
+ default:
298
+ return undefined;
299
+ }
300
+ }
301
+
302
+ /**
303
+ * Parses JSONC (JSON with Comments and trailing commas) content.
304
+ *
305
+ * Note: This is a simple regex-based approach that works for bun.lock files but doesn't
306
+ * handle edge cases like comment-like sequences inside strings (e.g., "// not a comment").
307
+ * For lock files this is acceptable since they don't contain such patterns. If broader
308
+ * JSONC support is needed, consider using a proper parser like `jsonc-parser`.
309
+ */
310
+ private parseJsonc(content: string): Record<string, any> {
311
+ // Remove single-line comments (// ...)
312
+ let stripped = content.replace(/\/\/.*$/gm, '');
313
+ // Remove multi-line comments (/* ... */)
314
+ stripped = stripped.replace(/\/\*[\s\S]*?\*\//g, '');
315
+ // Remove trailing commas before ] or }
316
+ stripped = stripped.replace(/,(\s*[}\]])/g, '$1');
317
+ return JSON.parse(stripped);
318
+ }
319
+
320
+ /**
321
+ * Walks the node_modules directory to build an npm-format packages structure.
322
+ * This provides 100% accurate resolution for all package managers since it reads
323
+ * the actual installed packages rather than trying to interpret lock file formats.
324
+ *
325
+ * @param dir The project directory containing node_modules
326
+ * @returns npm package-lock.json format with packages map, or undefined if node_modules doesn't exist
327
+ */
328
+ private async walkNodeModules(dir: string): Promise<any> {
329
+ const nodeModulesPath = path.join(dir, 'node_modules');
330
+ if (!fs.existsSync(nodeModulesPath)) {
331
+ return undefined;
332
+ }
333
+
334
+ const packages: Record<string, any> = {
335
+ "": {} // Root package placeholder
336
+ };
337
+
338
+ // Check if this is a pnpm project (has .pnpm directory)
339
+ const pnpmPath = path.join(nodeModulesPath, '.pnpm');
340
+ if (fs.existsSync(pnpmPath)) {
341
+ await this.walkPnpmNodeModules(pnpmPath, packages);
342
+ } else {
343
+ await this.walkNodeModulesRecursive(nodeModulesPath, 'node_modules', packages);
344
+ }
345
+
346
+ return Object.keys(packages).length > 1 ? {
347
+ lockfileVersion: 3,
348
+ packages
349
+ } : undefined;
350
+ }
351
+
352
+ /**
353
+ * Walks pnpm's .pnpm directory structure to build packages map.
354
+ * pnpm stores packages in .pnpm/<name>@<version>/node_modules/<name>/
355
+ */
356
+ private async walkPnpmNodeModules(pnpmPath: string, packages: Record<string, any>): Promise<void> {
357
+ let entries: fs.Dirent[];
358
+ try {
359
+ entries = await fsp.readdir(pnpmPath, { withFileTypes: true });
360
+ } catch {
361
+ return;
362
+ }
363
+
364
+ // Process entries in parallel for better performance
365
+ await Promise.all(entries.map(async (entry) => {
366
+ // Skip non-directories and special files
367
+ if (!entry.isDirectory() || entry.name === 'node_modules') {
368
+ return;
369
+ }
370
+
371
+ // Parse name@version from directory name
372
+ // Handle scoped packages: @scope+name@version
373
+ const atIndex = entry.name.lastIndexOf('@');
374
+ if (atIndex <= 0) return;
375
+
376
+ let name = entry.name.substring(0, atIndex);
377
+ const version = entry.name.substring(atIndex + 1);
378
+
379
+ // pnpm encodes @ as + in scoped packages: @scope+name -> @scope/name
380
+ if (name.startsWith('@') && name.includes('+')) {
381
+ name = name.replace('+', '/');
382
+ }
383
+
384
+ // The actual package is at .pnpm/<name>@<version>/node_modules/<name>/
385
+ const pkgDir = path.join(pnpmPath, entry.name, 'node_modules', name.replace('/', path.sep));
386
+ const packageJsonPath = path.join(pkgDir, 'package.json');
387
+
388
+ let pkgJson: any;
389
+ try {
390
+ const content = await fsp.readFile(packageJsonPath, 'utf-8');
391
+ pkgJson = JSON.parse(content);
392
+ } catch {
393
+ return;
394
+ }
395
+
396
+ // Use name@version as the key for pnpm (flat structure with version)
397
+ const pkgKey = `node_modules/${name}@${version}`;
398
+ packages[pkgKey] = PackageJsonParser.extractPackageMetadata(pkgJson, version);
399
+ }));
400
+ }
401
+
402
+ /**
403
+ * Recursively walks a node_modules directory, reading package.json files
404
+ * and building the packages map.
405
+ *
406
+ * @param nodeModulesPath Absolute path to the node_modules directory
407
+ * @param relativePath Relative path from project root (e.g., "node_modules" or "node_modules/foo/node_modules")
408
+ * @param packages The packages map to populate
409
+ */
410
+ private async walkNodeModulesRecursive(
411
+ nodeModulesPath: string,
412
+ relativePath: string,
413
+ packages: Record<string, any>
414
+ ): Promise<void> {
415
+ let entries: fs.Dirent[];
416
+ try {
417
+ entries = await fsp.readdir(nodeModulesPath, { withFileTypes: true });
418
+ } catch {
419
+ return; // Directory not readable
420
+ }
421
+
422
+ // Process entries in parallel for better performance
423
+ await Promise.all(entries.map(async (entry) => {
424
+ // Skip hidden files
425
+ if (entry.name.startsWith('.')) {
426
+ return;
427
+ }
428
+
429
+ // Accept directories and symlinks (pnpm uses symlinks)
430
+ const isDirectoryOrSymlink = entry.isDirectory() || entry.isSymbolicLink();
431
+ if (!isDirectoryOrSymlink) {
432
+ return;
433
+ }
434
+
435
+ // Handle scoped packages (@scope/name)
436
+ if (entry.name.startsWith('@')) {
437
+ const scopePath = path.join(nodeModulesPath, entry.name);
438
+ let scopeEntries: fs.Dirent[];
439
+ try {
440
+ scopeEntries = await fsp.readdir(scopePath, { withFileTypes: true });
441
+ } catch {
442
+ return;
443
+ }
444
+
445
+ await Promise.all(scopeEntries.map(async (scopeEntry) => {
446
+ // Accept directories and symlinks for scoped packages too
447
+ if (!scopeEntry.isDirectory() && !scopeEntry.isSymbolicLink()) return;
448
+
449
+ const scopedName = `${entry.name}/${scopeEntry.name}`;
450
+ const pkgPath = path.join(scopePath, scopeEntry.name);
451
+ await this.processPackage(pkgPath, `${relativePath}/${scopedName}`, packages);
452
+ }));
453
+ } else {
454
+ const pkgPath = path.join(nodeModulesPath, entry.name);
455
+ await this.processPackage(pkgPath, `${relativePath}/${entry.name}`, packages);
456
+ }
457
+ }));
458
+ }
459
+
460
+ /**
461
+ * Processes a single package directory, reading its package.json and
462
+ * recursively processing nested node_modules.
463
+ */
464
+ private async processPackage(
465
+ pkgPath: string,
466
+ relativePath: string,
467
+ packages: Record<string, any>
468
+ ): Promise<void> {
469
+ const packageJsonPath = path.join(pkgPath, 'package.json');
470
+
471
+ // Read and parse the package's package.json
472
+ let pkgJson: any;
473
+ try {
474
+ const content = await fsp.readFile(packageJsonPath, 'utf-8');
475
+ pkgJson = JSON.parse(content);
476
+ } catch {
477
+ return; // Not a valid package
478
+ }
479
+
480
+ packages[relativePath] = PackageJsonParser.extractPackageMetadata(pkgJson);
481
+
482
+ // Recursively process nested node_modules
483
+ const nestedNodeModules = path.join(pkgPath, 'node_modules');
484
+ try {
485
+ await fsp.access(nestedNodeModules);
486
+ await this.walkNodeModulesRecursive(nestedNodeModules, `${relativePath}/node_modules`, packages);
487
+ } catch {
488
+ // No nested node_modules, that's fine
489
+ }
490
+ }
491
+
492
+ /**
493
+ * Converts bun.lock format to npm package-lock.json format for unified processing.
494
+ *
495
+ * bun.lock format (v1):
496
+ * - Keys are package names or paths like "is-even/is-odd" for nested deps
497
+ * - Values are arrays: [name@version, url, metadata, integrity]
498
+ * - metadata can have: { dependencies: {...}, devDependencies: {...}, ... }
499
+ */
500
+ private convertBunLockToNpmFormat(bunLock: BunLockContent): PackageLockContent | undefined {
501
+ if (!bunLock.packages) {
502
+ return undefined;
503
+ }
504
+
505
+ const packages: Record<string, PackageLockEntry> = {
506
+ "": {} // Root package placeholder
507
+ };
508
+
509
+ for (const [key, value] of Object.entries(bunLock.packages)) {
510
+ // bun.lock array format: [name@version, url, metadata, integrity]
511
+ const [nameAtVersion, , metadata] = value;
512
+
513
+ if (typeof nameAtVersion !== 'string') continue;
514
+
515
+ // Parse name@version from first element
516
+ const atIndex = nameAtVersion.lastIndexOf('@');
517
+ if (atIndex <= 0) continue;
518
+
519
+ const name = nameAtVersion.substring(0, atIndex);
520
+ const version = nameAtVersion.substring(atIndex + 1);
521
+
522
+ const pkgEntry: PackageLockEntry = {
523
+ version,
524
+ dependencies: metadata?.dependencies && Object.keys(metadata.dependencies).length > 0
525
+ ? metadata.dependencies : undefined,
526
+ devDependencies: metadata?.devDependencies && Object.keys(metadata.devDependencies).length > 0
527
+ ? metadata.devDependencies : undefined,
528
+ peerDependencies: metadata?.peerDependencies && Object.keys(metadata.peerDependencies).length > 0
529
+ ? metadata.peerDependencies : undefined,
530
+ optionalDependencies: metadata?.optionalDependencies && Object.keys(metadata.optionalDependencies).length > 0
531
+ ? metadata.optionalDependencies : undefined,
532
+ };
533
+
534
+ // Convert bun's path format to npm's node_modules format
535
+ // bun uses "parent/child" for nested deps, npm uses "node_modules/parent/node_modules/child"
536
+ let pkgPath: string;
537
+ if (key.includes('/')) {
538
+ // Nested dependency - convert "is-even/is-odd" to "node_modules/is-even/node_modules/is-odd"
539
+ const parts = key.split('/');
540
+ pkgPath = parts.map(p => `node_modules/${p}`).join('/');
541
+ } else {
542
+ pkgPath = `node_modules/${name}`;
543
+ }
544
+
545
+ packages[pkgPath] = pkgEntry;
546
+ }
547
+
548
+ return {
549
+ lockfileVersion: 3,
550
+ packages
551
+ };
552
+ }
553
+
554
+ /**
555
+ * Gets dependency information from pnpm using its CLI.
556
+ * Uses `pnpm list --json --depth=Infinity` to get the full dependency tree.
557
+ */
558
+ private getPnpmDependencies(dir: string): Record<string, any> | undefined {
559
+ // Use spawnSync with array args to avoid shell injection risks
560
+ const result = spawnSync('pnpm', ['list', '--json', '--depth=Infinity'], {
561
+ cwd: dir,
562
+ encoding: 'utf-8',
563
+ stdio: ['pipe', 'pipe', 'pipe'],
564
+ timeout: 30000
565
+ });
566
+
567
+ if (result.error || result.status !== 0) {
568
+ return undefined;
569
+ }
570
+
571
+ const pnpmList = JSON.parse(result.stdout);
572
+ return this.convertPnpmListToNpmFormat(pnpmList);
573
+ }
574
+
575
+ /**
576
+ * Converts pnpm list --json output to npm package-lock.json format.
577
+ */
578
+ private convertPnpmListToNpmFormat(pnpmList: any): Record<string, any> | undefined {
579
+ const packages: Record<string, any> = {
580
+ "": {} // Root package placeholder
581
+ };
582
+
583
+ // pnpm list returns an array of projects (for workspaces) or a single object
584
+ const projects = Array.isArray(pnpmList) ? pnpmList : [pnpmList];
585
+
586
+ for (const project of projects) {
587
+ this.extractPnpmDependencies(project.dependencies, packages);
588
+ this.extractPnpmDependencies(project.devDependencies, packages);
589
+ this.extractPnpmDependencies(project.optionalDependencies, packages);
590
+ }
591
+
592
+ return Object.keys(packages).length > 1 ? {
593
+ lockfileVersion: 3,
594
+ packages
595
+ } : undefined;
596
+ }
597
+
598
+ /**
599
+ * Recursively extracts dependencies from pnpm list output.
600
+ * Uses name@version as key to handle multiple versions of the same package.
601
+ */
602
+ private extractPnpmDependencies(deps: any, packages: Record<string, any>): void {
603
+ if (!deps) return;
604
+
605
+ for (const [name, info] of Object.entries(deps as Record<string, any>)) {
606
+ const version = info.version;
607
+ if (!version) continue;
608
+
609
+ const pkgKey = `node_modules/${name}@${version}`;
610
+
611
+ if (!packages[pkgKey]) {
612
+ const pkgEntry: any = { version };
613
+
614
+ // Extract nested dependency version constraints
615
+ if (info.dependencies) {
616
+ const nestedDeps: Record<string, string> = {};
617
+ for (const [depName, depInfo] of Object.entries(info.dependencies as Record<string, any>)) {
618
+ nestedDeps[depName] = (depInfo as any).version || '*';
619
+ }
620
+ if (Object.keys(nestedDeps).length > 0) {
621
+ pkgEntry.dependencies = nestedDeps;
622
+ }
623
+ }
624
+
625
+ packages[pkgKey] = pkgEntry;
626
+ }
627
+
628
+ // Recursively process nested dependencies
629
+ if (info.dependencies) {
630
+ this.extractPnpmDependencies(info.dependencies, packages);
631
+ }
632
+ }
633
+ }
634
+
635
+ /**
636
+ * Parses yarn.lock file and returns npm-format content.
637
+ * Detects whether it's Yarn Classic (v1) or Yarn Berry (v2+) format.
638
+ */
639
+ private parseYarnLock(content: string): any {
640
+ // Yarn Berry (v2+) has __metadata section at the start
641
+ if (content.includes('__metadata:')) {
642
+ return this.parseYarnBerryLock(content);
643
+ }
644
+
645
+ // Yarn Classic (v1) starts with "# yarn lockfile v1"
646
+ if (content.includes('# yarn lockfile v1')) {
647
+ return this.parseYarnClassicLock(content);
648
+ }
649
+
650
+ return undefined;
651
+ }
652
+
653
+ /**
654
+ * Parses Yarn Berry (v2+) yarn.lock file directly.
655
+ * Format is standard YAML with package entries like:
656
+ * "is-odd@npm:^3.0.1":
657
+ * version: 3.0.1
658
+ * resolution: "is-odd@npm:3.0.1"
659
+ * dependencies:
660
+ * is-number: "npm:^6.0.0"
661
+ */
662
+ private parseYarnBerryLock(content: string): any {
663
+ const lock = YAML.parse(content);
664
+ if (!lock) return undefined;
665
+
666
+ const packages: Record<string, any> = {
667
+ "": {} // Root package placeholder
668
+ };
669
+
670
+ for (const [key, entry] of Object.entries(lock as Record<string, any>)) {
671
+ // Skip metadata and workspace entries
672
+ if (key === '__metadata' || key.includes('@workspace:')) continue;
673
+ if (!entry || typeof entry !== 'object') continue;
674
+
675
+ const version = entry.version;
676
+ if (!version) continue;
677
+
678
+ // Extract package name from resolution like "is-odd@npm:3.0.1"
679
+ let name: string;
680
+ if (entry.resolution) {
681
+ const npmIndex = entry.resolution.indexOf('@npm:');
682
+ if (npmIndex > 0) {
683
+ name = entry.resolution.substring(0, npmIndex);
684
+ } else {
685
+ continue;
686
+ }
687
+ } else {
688
+ // Fallback: parse from key like "is-odd@npm:^3.0.1"
689
+ const npmIndex = key.indexOf('@npm:');
690
+ if (npmIndex > 0) {
691
+ name = key.substring(0, npmIndex);
692
+ } else {
693
+ continue;
694
+ }
695
+ }
696
+
697
+ const pkgKey = `node_modules/${name}@${version}`;
698
+
699
+ // Skip if already processed (multiple version constraints can resolve to same version)
700
+ if (packages[pkgKey]) continue;
701
+
702
+ const pkgEntry: any = { version };
703
+
704
+ // Parse dependencies
705
+ if (entry.dependencies && typeof entry.dependencies === 'object') {
706
+ const deps: Record<string, string> = {};
707
+ for (const [depName, depConstraint] of Object.entries(entry.dependencies as Record<string, string>)) {
708
+ // Constraint is like "npm:^6.0.0" - strip the "npm:" prefix
709
+ deps[depName] = depConstraint.startsWith('npm:')
710
+ ? depConstraint.substring(4)
711
+ : depConstraint;
712
+ }
713
+ if (Object.keys(deps).length > 0) {
714
+ pkgEntry.dependencies = deps;
715
+ }
716
+ }
717
+
718
+ packages[pkgKey] = pkgEntry;
719
+ }
720
+
721
+ return Object.keys(packages).length > 1 ? {
722
+ lockfileVersion: 3,
723
+ packages
724
+ } : undefined;
725
+ }
726
+
727
+ /**
728
+ * Parses Yarn Classic (v1) yarn.lock file directly.
729
+ * Format is a custom format (not standard YAML):
730
+ *
731
+ * is-odd@^3.0.1:
732
+ * version "3.0.1"
733
+ * resolved "https://..."
734
+ * integrity sha512-...
735
+ * dependencies:
736
+ * is-number "^6.0.0"
737
+ */
738
+ private parseYarnClassicLock(content: string): any {
739
+ const packages: Record<string, any> = {
740
+ "": {} // Root package placeholder
741
+ };
742
+
743
+ // Split into package blocks - each block starts with an unindented line ending with ":"
744
+ // and may span multiple version constraints (e.g., "pkg@^1.0.0, pkg@^1.2.0:")
745
+ const lines = content.split('\n');
746
+ let currentNames: string[] = [];
747
+ let currentVersion: string | null = null;
748
+ let currentDeps: Record<string, string> = {};
749
+ let inDependencies = false;
750
+
751
+ for (const line of lines) {
752
+ // Skip comments and empty lines
753
+ if (line.startsWith('#') || line.trim() === '') {
754
+ continue;
755
+ }
756
+
757
+ // New package block (unindented line ending with ":")
758
+ if (!line.startsWith(' ') && line.endsWith(':')) {
759
+ // Save previous package if exists
760
+ if (currentNames.length > 0 && currentVersion) {
761
+ const pkgKey = `node_modules/${currentNames[0]}@${currentVersion}`;
762
+ if (!packages[pkgKey]) {
763
+ const pkgEntry: any = { version: currentVersion };
764
+ if (Object.keys(currentDeps).length > 0) {
765
+ pkgEntry.dependencies = currentDeps;
766
+ }
767
+ packages[pkgKey] = pkgEntry;
768
+ }
769
+ }
770
+
771
+ // Parse new package names from line like 'is-odd@^3.0.1, is-odd@^3.0.0:'
772
+ // or '"@babel/core@^7.0.0":'
773
+ const namesStr = line.slice(0, -1); // Remove trailing ":"
774
+ currentNames = [];
775
+
776
+ // Split by ", " but handle quoted strings
777
+ const parts = namesStr.split(/,\s*(?=(?:[^"]*"[^"]*")*[^"]*$)/);
778
+ for (const part of parts) {
779
+ // Remove surrounding quotes if present
780
+ let cleaned = part.trim();
781
+ if (cleaned.startsWith('"') && cleaned.endsWith('"')) {
782
+ cleaned = cleaned.slice(1, -1);
783
+ }
784
+ // Extract package name (everything before last @)
785
+ const atIndex = cleaned.lastIndexOf('@');
786
+ if (atIndex > 0) {
787
+ currentNames.push(cleaned.substring(0, atIndex));
788
+ }
789
+ }
790
+
791
+ currentVersion = null;
792
+ currentDeps = {};
793
+ inDependencies = false;
794
+ continue;
795
+ }
796
+
797
+ // Version line: ' version "3.0.1"'
798
+ const versionMatch = line.match(/^\s+version\s+"([^"]+)"/);
799
+ if (versionMatch) {
800
+ currentVersion = versionMatch[1];
801
+ continue;
802
+ }
803
+
804
+ // Dependencies section start
805
+ if (line.match(/^\s+dependencies:\s*$/)) {
806
+ inDependencies = true;
807
+ continue;
808
+ }
809
+
810
+ // Other section (resolved, integrity, etc.) - ends dependencies section
811
+ if (line.match(/^\s+\w+:/) && !line.match(/^\s{4}/)) {
812
+ inDependencies = false;
813
+ continue;
814
+ }
815
+
816
+ // Dependency entry: ' is-number "^6.0.0"'
817
+ if (inDependencies) {
818
+ const depMatch = line.match(/^\s{4}(.+?)\s+"([^"]+)"/);
819
+ if (depMatch) {
820
+ currentDeps[depMatch[1]] = depMatch[2];
821
+ }
822
+ }
823
+ }
824
+
825
+ // Save last package
826
+ if (currentNames.length > 0 && currentVersion) {
827
+ const pkgKey = `node_modules/${currentNames[0]}@${currentVersion}`;
828
+ if (!packages[pkgKey]) {
829
+ const pkgEntry: any = { version: currentVersion };
830
+ if (Object.keys(currentDeps).length > 0) {
831
+ pkgEntry.dependencies = currentDeps;
832
+ }
833
+ packages[pkgKey] = pkgEntry;
834
+ }
835
+ }
836
+
837
+ return Object.keys(packages).length > 1 ? {
838
+ lockfileVersion: 3,
839
+ packages
840
+ } : undefined;
841
+ }
842
+ }
843
+
844
+ // Register with the Parsers registry for RPC support
845
+ Parsers.registerParser("packageJson", PackageJsonParser);