@openrewrite/rewrite 8.70.2 → 8.70.4

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (60) hide show
  1. package/dist/javascript/add-import.d.ts +5 -0
  2. package/dist/javascript/add-import.d.ts.map +1 -1
  3. package/dist/javascript/add-import.js +22 -9
  4. package/dist/javascript/add-import.js.map +1 -1
  5. package/dist/javascript/assertions.d.ts.map +1 -1
  6. package/dist/javascript/assertions.js +45 -13
  7. package/dist/javascript/assertions.js.map +1 -1
  8. package/dist/javascript/dependency-workspace.d.ts +5 -0
  9. package/dist/javascript/dependency-workspace.d.ts.map +1 -1
  10. package/dist/javascript/dependency-workspace.js +47 -13
  11. package/dist/javascript/dependency-workspace.js.map +1 -1
  12. package/dist/javascript/package-json-parser.d.ts +24 -0
  13. package/dist/javascript/package-json-parser.d.ts.map +1 -1
  14. package/dist/javascript/package-json-parser.js +147 -34
  15. package/dist/javascript/package-json-parser.js.map +1 -1
  16. package/dist/javascript/package-manager.d.ts +45 -7
  17. package/dist/javascript/package-manager.d.ts.map +1 -1
  18. package/dist/javascript/package-manager.js +83 -45
  19. package/dist/javascript/package-manager.js.map +1 -1
  20. package/dist/javascript/project-parser.d.ts +7 -0
  21. package/dist/javascript/project-parser.d.ts.map +1 -1
  22. package/dist/javascript/project-parser.js +10 -9
  23. package/dist/javascript/project-parser.js.map +1 -1
  24. package/dist/javascript/recipes/add-dependency.d.ts +7 -3
  25. package/dist/javascript/recipes/add-dependency.d.ts.map +1 -1
  26. package/dist/javascript/recipes/add-dependency.js +71 -13
  27. package/dist/javascript/recipes/add-dependency.js.map +1 -1
  28. package/dist/javascript/recipes/upgrade-dependency-version.d.ts +7 -3
  29. package/dist/javascript/recipes/upgrade-dependency-version.d.ts.map +1 -1
  30. package/dist/javascript/recipes/upgrade-dependency-version.js +71 -13
  31. package/dist/javascript/recipes/upgrade-dependency-version.js.map +1 -1
  32. package/dist/javascript/recipes/upgrade-transitive-dependency-version.d.ts +7 -3
  33. package/dist/javascript/recipes/upgrade-transitive-dependency-version.d.ts.map +1 -1
  34. package/dist/javascript/recipes/upgrade-transitive-dependency-version.js +71 -13
  35. package/dist/javascript/recipes/upgrade-transitive-dependency-version.js.map +1 -1
  36. package/dist/path-utils.d.ts +45 -0
  37. package/dist/path-utils.d.ts.map +1 -0
  38. package/dist/path-utils.js +210 -0
  39. package/dist/path-utils.js.map +1 -0
  40. package/dist/rpc/request/parse-project.d.ts +13 -1
  41. package/dist/rpc/request/parse-project.d.ts.map +1 -1
  42. package/dist/rpc/request/parse-project.js +18 -9
  43. package/dist/rpc/request/parse-project.js.map +1 -1
  44. package/dist/run.d.ts.map +1 -1
  45. package/dist/run.js +4 -0
  46. package/dist/run.js.map +1 -1
  47. package/dist/version.txt +1 -1
  48. package/package.json +1 -1
  49. package/src/javascript/add-import.ts +28 -7
  50. package/src/javascript/assertions.ts +48 -6
  51. package/src/javascript/dependency-workspace.ts +66 -13
  52. package/src/javascript/package-json-parser.ts +181 -42
  53. package/src/javascript/package-manager.ts +120 -52
  54. package/src/javascript/project-parser.ts +18 -9
  55. package/src/javascript/recipes/add-dependency.ts +89 -17
  56. package/src/javascript/recipes/upgrade-dependency-version.ts +89 -17
  57. package/src/javascript/recipes/upgrade-transitive-dependency-version.ts +89 -17
  58. package/src/path-utils.ts +208 -0
  59. package/src/rpc/request/parse-project.ts +17 -9
  60. package/src/run.ts +4 -0
@@ -17,11 +17,15 @@
17
17
  import {Option, ScanningRecipe} from "../../recipe";
18
18
  import {ExecutionContext} from "../../execution";
19
19
  import {TreeVisitor} from "../../visitor";
20
- import {getMemberKeyName, isLiteral, Json, JsonVisitor} from "../../json";
20
+ import {Tree} from "../../tree";
21
+ import {getMemberKeyName, isJson, isLiteral, Json, JsonVisitor} from "../../json";
22
+ import {isDocuments, isYaml, Yaml} from "../../yaml";
23
+ import {isPlainText, PlainText} from "../../text";
21
24
  import {
22
25
  allDependencyScopes,
23
26
  DependencyScope,
24
27
  findNodeResolutionResult,
28
+ NpmrcScope,
25
29
  PackageManager
26
30
  } from "../node-resolution-result";
27
31
  import * as path from "path";
@@ -33,6 +37,7 @@ import {
33
37
  createLockFileEditor,
34
38
  DependencyRecipeAccumulator,
35
39
  getAllLockFileNames,
40
+ getLockFileName,
36
41
  parseLockFileContent,
37
42
  runInstallIfNeeded,
38
43
  runInstallInTempDir,
@@ -44,8 +49,6 @@ import {
44
49
  * Information about a project that needs updating
45
50
  */
46
51
  interface ProjectUpdateInfo {
47
- /** Absolute path to the project directory */
48
- projectDir: string;
49
52
  /** Relative path to package.json (from source root) */
50
53
  packageJsonPath: string;
51
54
  /** Original package.json content */
@@ -63,9 +66,14 @@ interface ProjectUpdateInfo {
63
66
  * already satisfies the new constraint. Only package.json needs updating.
64
67
  */
65
68
  skipInstall: boolean;
69
+ /** Config file contents extracted from the project (e.g., .npmrc) */
70
+ configFiles?: Record<string, string>;
66
71
  }
67
72
 
68
- type Accumulator = DependencyRecipeAccumulator<ProjectUpdateInfo>;
73
+ interface Accumulator extends DependencyRecipeAccumulator<ProjectUpdateInfo> {
74
+ /** Original lock file content, keyed by lock file path */
75
+ originalLockFiles: Map<string, string>;
76
+ }
69
77
 
70
78
  /**
71
79
  * Upgrades the version of a direct dependency in package.json and updates the lock file.
@@ -101,7 +109,10 @@ export class UpgradeDependencyVersion extends ScanningRecipe<Accumulator> {
101
109
  newVersion!: string;
102
110
 
103
111
  initialValue(_ctx: ExecutionContext): Accumulator {
104
- return createDependencyRecipeAccumulator();
112
+ return {
113
+ ...createDependencyRecipeAccumulator<ProjectUpdateInfo>(),
114
+ originalLockFiles: new Map()
115
+ };
105
116
  }
106
117
 
107
118
  /**
@@ -139,10 +150,38 @@ export class UpgradeDependencyVersion extends ScanningRecipe<Accumulator> {
139
150
 
140
151
  async scanner(acc: Accumulator): Promise<TreeVisitor<any, ExecutionContext>> {
141
152
  const recipe = this;
153
+ const LOCK_FILE_NAMES = getAllLockFileNames();
154
+
155
+ return new class extends TreeVisitor<Tree, ExecutionContext> {
156
+ protected async accept(tree: Tree, ctx: ExecutionContext): Promise<Tree | undefined> {
157
+ // Handle JSON documents (package.json and JSON lock files)
158
+ if (isJson(tree) && tree.kind === Json.Kind.Document) {
159
+ return this.handleJsonDocument(tree as Json.Document, ctx);
160
+ }
161
+
162
+ // Handle YAML documents (pnpm-lock.yaml)
163
+ if (isYaml(tree) && isDocuments(tree)) {
164
+ return this.handleYamlDocument(tree, ctx);
165
+ }
166
+
167
+ // Handle PlainText files (yarn.lock for Yarn Classic)
168
+ if (isPlainText(tree)) {
169
+ return this.handlePlainTextDocument(tree as PlainText, ctx);
170
+ }
171
+
172
+ return tree;
173
+ }
174
+
175
+ private async handleJsonDocument(doc: Json.Document, _ctx: ExecutionContext): Promise<Json | undefined> {
176
+ const basename = path.basename(doc.sourcePath);
142
177
 
143
- return new class extends JsonVisitor<ExecutionContext> {
144
- protected async visitDocument(doc: Json.Document, _ctx: ExecutionContext): Promise<Json | undefined> {
145
- // Only process package.json files
178
+ // Capture JSON lock file content (package-lock.json, bun.lock)
179
+ if (LOCK_FILE_NAMES.includes(basename)) {
180
+ acc.originalLockFiles.set(doc.sourcePath, await TreePrinters.print(doc));
181
+ return doc;
182
+ }
183
+
184
+ // Only process package.json files for dependency analysis
146
185
  if (!doc.sourcePath.endsWith('package.json')) {
147
186
  return doc;
148
187
  }
@@ -152,8 +191,6 @@ export class UpgradeDependencyVersion extends ScanningRecipe<Accumulator> {
152
191
  return doc;
153
192
  }
154
193
 
155
- // Get the project directory and package manager
156
- const projectDir = path.dirname(path.resolve(doc.sourcePath));
157
194
  const pm = marker.packageManager ?? PackageManager.Npm;
158
195
 
159
196
  // Check each dependency scope for the target package
@@ -189,22 +226,43 @@ export class UpgradeDependencyVersion extends ScanningRecipe<Accumulator> {
189
226
  const skipInstall = resolvedDep !== undefined &&
190
227
  semver.satisfies(resolvedDep.version, recipe.newVersion);
191
228
 
229
+ // Extract project-level .npmrc config from marker
230
+ const configFiles: Record<string, string> = {};
231
+ const projectNpmrc = marker.npmrcConfigs?.find(c => c.scope === NpmrcScope.Project);
232
+ if (projectNpmrc) {
233
+ const lines = Object.entries(projectNpmrc.properties)
234
+ .map(([key, value]) => `${key}=${value}`);
235
+ configFiles['.npmrc'] = lines.join('\n');
236
+ }
237
+
192
238
  acc.projectsToUpdate.set(doc.sourcePath, {
193
- projectDir,
194
239
  packageJsonPath: doc.sourcePath,
195
- originalPackageJson: await this.printDocument(doc),
240
+ originalPackageJson: await TreePrinters.print(doc),
196
241
  dependencyScope: foundScope,
197
242
  currentVersion,
198
243
  newVersion: recipe.newVersion,
199
244
  packageManager: pm,
200
- skipInstall
245
+ skipInstall,
246
+ configFiles: Object.keys(configFiles).length > 0 ? configFiles : undefined
201
247
  });
202
248
 
203
249
  return doc;
204
250
  }
205
251
 
206
- private async printDocument(doc: Json.Document): Promise<string> {
207
- return TreePrinters.print(doc);
252
+ private async handleYamlDocument(docs: Yaml.Documents, _ctx: ExecutionContext): Promise<Yaml.Documents | undefined> {
253
+ const basename = path.basename(docs.sourcePath);
254
+ if (LOCK_FILE_NAMES.includes(basename)) {
255
+ acc.originalLockFiles.set(docs.sourcePath, await TreePrinters.print(docs));
256
+ }
257
+ return docs;
258
+ }
259
+
260
+ private async handlePlainTextDocument(text: PlainText, _ctx: ExecutionContext): Promise<PlainText | undefined> {
261
+ const basename = path.basename(text.sourcePath);
262
+ if (LOCK_FILE_NAMES.includes(basename)) {
263
+ acc.originalLockFiles.set(text.sourcePath, await TreePrinters.print(text));
264
+ }
265
+ return text;
208
266
  }
209
267
  };
210
268
  }
@@ -281,6 +339,7 @@ export class UpgradeDependencyVersion extends ScanningRecipe<Accumulator> {
281
339
  /**
282
340
  * Runs the package manager in a temporary directory to update the lock file.
283
341
  * Writes a modified package.json with the new version, then runs install to update the lock file.
342
+ * All file contents are provided from in-memory sources (SourceFiles), not read from disk.
284
343
  */
285
344
  private async runPackageManagerInstall(
286
345
  acc: Accumulator,
@@ -294,10 +353,23 @@ export class UpgradeDependencyVersion extends ScanningRecipe<Accumulator> {
294
353
  updateInfo.newVersion
295
354
  );
296
355
 
356
+ // Get the lock file path based on package manager
357
+ const lockFileName = getLockFileName(updateInfo.packageManager);
358
+ const packageJsonDir = path.dirname(updateInfo.packageJsonPath);
359
+ const lockFilePath = packageJsonDir === '.'
360
+ ? lockFileName
361
+ : path.join(packageJsonDir, lockFileName);
362
+
363
+ // Look up the original lock file content from captured SourceFiles
364
+ const originalLockFileContent = acc.originalLockFiles.get(lockFilePath);
365
+
297
366
  const result = await runInstallInTempDir(
298
- updateInfo.projectDir,
299
367
  updateInfo.packageManager,
300
- modifiedPackageJson
368
+ modifiedPackageJson,
369
+ {
370
+ originalLockFileContent,
371
+ configFiles: updateInfo.configFiles
372
+ }
301
373
  );
302
374
 
303
375
  storeInstallResult(result, acc, updateInfo, modifiedPackageJson);
@@ -17,11 +17,15 @@
17
17
  import {Option, ScanningRecipe} from "../../recipe";
18
18
  import {ExecutionContext} from "../../execution";
19
19
  import {TreeVisitor} from "../../visitor";
20
- import {Json, JsonParser, JsonVisitor} from "../../json";
20
+ import {Tree} from "../../tree";
21
+ import {isJson, Json, JsonParser, JsonVisitor} from "../../json";
22
+ import {isDocuments, isYaml, Yaml} from "../../yaml";
23
+ import {isPlainText, PlainText} from "../../text";
21
24
  import {
22
25
  allDependencyScopes,
23
26
  findNodeResolutionResult,
24
27
  NodeResolutionResultQueries,
28
+ NpmrcScope,
25
29
  PackageManager
26
30
  } from "../node-resolution-result";
27
31
  import * as path from "path";
@@ -33,6 +37,7 @@ import {
33
37
  createLockFileEditor,
34
38
  DependencyRecipeAccumulator,
35
39
  getAllLockFileNames,
40
+ getLockFileName,
36
41
  parseLockFileContent,
37
42
  runInstallIfNeeded,
38
43
  runInstallInTempDir,
@@ -45,8 +50,6 @@ import {applyOverrideToPackageJson, DependencyPathSegment, parseDependencyPath}
45
50
  * Information about a project that needs updating
46
51
  */
47
52
  interface ProjectUpdateInfo {
48
- /** Absolute path to the project directory */
49
- projectDir: string;
50
53
  /** Relative path to package.json (from source root) */
51
54
  packageJsonPath: string;
52
55
  /** Original package.json content */
@@ -62,9 +65,14 @@ interface ProjectUpdateInfo {
62
65
  skipInstall: boolean;
63
66
  /** Parsed dependency path for scoped overrides (if specified) */
64
67
  dependencyPathSegments?: DependencyPathSegment[];
68
+ /** Config file contents extracted from the project (e.g., .npmrc) */
69
+ configFiles?: Record<string, string>;
65
70
  }
66
71
 
67
- type Accumulator = DependencyRecipeAccumulator<ProjectUpdateInfo>;
72
+ interface Accumulator extends DependencyRecipeAccumulator<ProjectUpdateInfo> {
73
+ /** Original lock file content, keyed by lock file path */
74
+ originalLockFiles: Map<string, string>;
75
+ }
68
76
 
69
77
  /**
70
78
  * Upgrades the version of a transitive dependency by adding override entries to package.json.
@@ -108,15 +116,46 @@ export class UpgradeTransitiveDependencyVersion extends ScanningRecipe<Accumulat
108
116
  dependencyPath?: string;
109
117
 
110
118
  initialValue(_ctx: ExecutionContext): Accumulator {
111
- return createDependencyRecipeAccumulator();
119
+ return {
120
+ ...createDependencyRecipeAccumulator<ProjectUpdateInfo>(),
121
+ originalLockFiles: new Map()
122
+ };
112
123
  }
113
124
 
114
125
  async scanner(acc: Accumulator): Promise<TreeVisitor<any, ExecutionContext>> {
115
126
  const recipe = this;
127
+ const LOCK_FILE_NAMES = getAllLockFileNames();
128
+
129
+ return new class extends TreeVisitor<Tree, ExecutionContext> {
130
+ protected async accept(tree: Tree, ctx: ExecutionContext): Promise<Tree | undefined> {
131
+ // Handle JSON documents (package.json and JSON lock files)
132
+ if (isJson(tree) && tree.kind === Json.Kind.Document) {
133
+ return this.handleJsonDocument(tree as Json.Document, ctx);
134
+ }
135
+
136
+ // Handle YAML documents (pnpm-lock.yaml)
137
+ if (isYaml(tree) && isDocuments(tree)) {
138
+ return this.handleYamlDocument(tree, ctx);
139
+ }
116
140
 
117
- return new class extends JsonVisitor<ExecutionContext> {
118
- protected async visitDocument(doc: Json.Document, _ctx: ExecutionContext): Promise<Json | undefined> {
119
- // Only process package.json files
141
+ // Handle PlainText files (yarn.lock for Yarn Classic)
142
+ if (isPlainText(tree)) {
143
+ return this.handlePlainTextDocument(tree as PlainText, ctx);
144
+ }
145
+
146
+ return tree;
147
+ }
148
+
149
+ private async handleJsonDocument(doc: Json.Document, _ctx: ExecutionContext): Promise<Json | undefined> {
150
+ const basename = path.basename(doc.sourcePath);
151
+
152
+ // Capture JSON lock file content (package-lock.json, bun.lock)
153
+ if (LOCK_FILE_NAMES.includes(basename)) {
154
+ acc.originalLockFiles.set(doc.sourcePath, await TreePrinters.print(doc));
155
+ return doc;
156
+ }
157
+
158
+ // Only process package.json files for dependency analysis
120
159
  if (!doc.sourcePath.endsWith('package.json')) {
121
160
  return doc;
122
161
  }
@@ -126,8 +165,6 @@ export class UpgradeTransitiveDependencyVersion extends ScanningRecipe<Accumulat
126
165
  return doc;
127
166
  }
128
167
 
129
- // Get the project directory and package manager
130
- const projectDir = path.dirname(path.resolve(doc.sourcePath));
131
168
  const pm = marker.packageManager ?? PackageManager.Npm;
132
169
 
133
170
  // Check if package is a direct dependency - if so, skip (use UpgradeDependencyVersion instead)
@@ -167,21 +204,42 @@ export class UpgradeTransitiveDependencyVersion extends ScanningRecipe<Accumulat
167
204
  ? parseDependencyPath(recipe.dependencyPath)
168
205
  : undefined;
169
206
 
207
+ // Extract project-level .npmrc config from marker
208
+ const configFiles: Record<string, string> = {};
209
+ const projectNpmrc = marker.npmrcConfigs?.find(c => c.scope === NpmrcScope.Project);
210
+ if (projectNpmrc) {
211
+ const lines = Object.entries(projectNpmrc.properties)
212
+ .map(([key, value]) => `${key}=${value}`);
213
+ configFiles['.npmrc'] = lines.join('\n');
214
+ }
215
+
170
216
  acc.projectsToUpdate.set(doc.sourcePath, {
171
- projectDir,
172
217
  packageJsonPath: doc.sourcePath,
173
- originalPackageJson: await this.printDocument(doc),
218
+ originalPackageJson: await TreePrinters.print(doc),
174
219
  newVersion: recipe.newVersion,
175
220
  packageManager: pm,
176
221
  skipInstall: false, // Always need to run install for overrides
177
- dependencyPathSegments
222
+ dependencyPathSegments,
223
+ configFiles: Object.keys(configFiles).length > 0 ? configFiles : undefined
178
224
  });
179
225
 
180
226
  return doc;
181
227
  }
182
228
 
183
- private async printDocument(doc: Json.Document): Promise<string> {
184
- return TreePrinters.print(doc);
229
+ private async handleYamlDocument(docs: Yaml.Documents, _ctx: ExecutionContext): Promise<Yaml.Documents | undefined> {
230
+ const basename = path.basename(docs.sourcePath);
231
+ if (LOCK_FILE_NAMES.includes(basename)) {
232
+ acc.originalLockFiles.set(docs.sourcePath, await TreePrinters.print(docs));
233
+ }
234
+ return docs;
235
+ }
236
+
237
+ private async handlePlainTextDocument(text: PlainText, _ctx: ExecutionContext): Promise<PlainText | undefined> {
238
+ const basename = path.basename(text.sourcePath);
239
+ if (LOCK_FILE_NAMES.includes(basename)) {
240
+ acc.originalLockFiles.set(text.sourcePath, await TreePrinters.print(text));
241
+ }
242
+ return text;
185
243
  }
186
244
  };
187
245
  }
@@ -289,6 +347,7 @@ export class UpgradeTransitiveDependencyVersion extends ScanningRecipe<Accumulat
289
347
 
290
348
  /**
291
349
  * Runs the package manager in a temporary directory to update the lock file.
350
+ * All file contents are provided from in-memory sources (SourceFiles), not read from disk.
292
351
  */
293
352
  private async runPackageManagerInstall(
294
353
  acc: Accumulator,
@@ -301,10 +360,23 @@ export class UpgradeTransitiveDependencyVersion extends ScanningRecipe<Accumulat
301
360
  updateInfo
302
361
  );
303
362
 
363
+ // Get the lock file path based on package manager
364
+ const lockFileName = getLockFileName(updateInfo.packageManager);
365
+ const packageJsonDir = path.dirname(updateInfo.packageJsonPath);
366
+ const lockFilePath = packageJsonDir === '.'
367
+ ? lockFileName
368
+ : path.join(packageJsonDir, lockFileName);
369
+
370
+ // Look up the original lock file content from captured SourceFiles
371
+ const originalLockFileContent = acc.originalLockFiles.get(lockFilePath);
372
+
304
373
  const result = await runInstallInTempDir(
305
- updateInfo.projectDir,
306
374
  updateInfo.packageManager,
307
- modifiedPackageJson
375
+ modifiedPackageJson,
376
+ {
377
+ originalLockFileContent,
378
+ configFiles: updateInfo.configFiles
379
+ }
308
380
  );
309
381
 
310
382
  storeInstallResult(result, acc, updateInfo, modifiedPackageJson);
@@ -0,0 +1,208 @@
1
+ /*
2
+ * Copyright 2025 the original author or authors.
3
+ * <p>
4
+ * Licensed under the Moderne Source Available License (the "License");
5
+ * you may not use this file except in compliance with the License.
6
+ * You may obtain a copy of the License at
7
+ * <p>
8
+ * https://docs.moderne.io/licensing/moderne-source-available-license
9
+ * <p>
10
+ * Unless required by applicable law or agreed to in writing, software
11
+ * distributed under the License is distributed on an "AS IS" BASIS,
12
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13
+ * See the License for the specific language governing permissions and
14
+ * limitations under the License.
15
+ */
16
+ import * as fs from "fs";
17
+ import * as fsp from "fs/promises";
18
+ import * as path from "path";
19
+ import {spawnSync} from "child_process";
20
+
21
+ /**
22
+ * Default directory exclusions for file/directory discovery.
23
+ */
24
+ export const DEFAULT_DIR_EXCLUSIONS = new Set([
25
+ "node_modules",
26
+ ".git",
27
+ ".svn",
28
+ ".hg",
29
+ "dist",
30
+ "build",
31
+ "out",
32
+ "coverage",
33
+ ".next",
34
+ ".nuxt",
35
+ ".output"
36
+ ]);
37
+
38
+ /**
39
+ * Checks if a path is ignored by Git.
40
+ * Returns true if the path is ignored, false otherwise.
41
+ * Falls back to false if not in a git repository.
42
+ */
43
+ export function isGitIgnored(filePath: string, cwd?: string): boolean {
44
+ const result = spawnSync("git", ["check-ignore", "-q", filePath], {
45
+ cwd: cwd || path.dirname(filePath),
46
+ encoding: "utf8"
47
+ });
48
+ // Exit code 0 means the file IS ignored
49
+ // Exit code 1 means the file is NOT ignored
50
+ // Other exit codes indicate git errors (not a repo, etc.)
51
+ return result.status === 0;
52
+ }
53
+
54
+ /**
55
+ * Checks if we're in a git repository.
56
+ */
57
+ export function isInGitRepo(dir: string): boolean {
58
+ const result = spawnSync("git", ["rev-parse", "--git-dir"], {
59
+ cwd: dir,
60
+ encoding: "utf8"
61
+ });
62
+ return result.status === 0;
63
+ }
64
+
65
+ export interface WalkDirsOptions {
66
+ /**
67
+ * Maximum depth to descend. 0 means only immediate children.
68
+ * undefined means unlimited.
69
+ */
70
+ maxDepth?: number;
71
+
72
+ /**
73
+ * Directory names to exclude. Defaults to DEFAULT_DIR_EXCLUSIONS.
74
+ */
75
+ excludeDirs?: Set<string>;
76
+
77
+ /**
78
+ * Whether to respect .gitignore. Defaults to false.
79
+ */
80
+ respectGitIgnore?: boolean;
81
+
82
+ /**
83
+ * Only include directories containing a file with this name.
84
+ * Useful for finding package directories by looking for package.json.
85
+ */
86
+ mustContainFile?: string;
87
+ }
88
+
89
+ /**
90
+ * Recursively walks directories starting from a given path.
91
+ * Returns all subdirectory paths (not including the root).
92
+ */
93
+ export async function walkDirs(
94
+ rootDir: string,
95
+ options: WalkDirsOptions = {}
96
+ ): Promise<string[]> {
97
+ const {
98
+ maxDepth,
99
+ excludeDirs = DEFAULT_DIR_EXCLUSIONS,
100
+ respectGitIgnore = false,
101
+ mustContainFile
102
+ } = options;
103
+
104
+ const results: string[] = [];
105
+ const inGitRepo = respectGitIgnore && isInGitRepo(rootDir);
106
+
107
+ async function walk(dir: string, depth: number): Promise<void> {
108
+ if (maxDepth !== undefined && depth > maxDepth) {
109
+ return;
110
+ }
111
+
112
+ let entries: fs.Dirent[];
113
+ try {
114
+ entries = await fsp.readdir(dir, {withFileTypes: true});
115
+ } catch {
116
+ return;
117
+ }
118
+
119
+ for (const entry of entries) {
120
+ if (!entry.isDirectory()) {
121
+ continue;
122
+ }
123
+
124
+ // Skip excluded directories
125
+ if (excludeDirs.has(entry.name)) {
126
+ continue;
127
+ }
128
+
129
+ // Skip hidden directories
130
+ if (entry.name.startsWith('.')) {
131
+ continue;
132
+ }
133
+
134
+ const fullPath = path.join(dir, entry.name);
135
+
136
+ // Check git ignore if enabled
137
+ if (inGitRepo && isGitIgnored(fullPath, rootDir)) {
138
+ continue;
139
+ }
140
+
141
+ // Check for required file if specified
142
+ if (mustContainFile) {
143
+ const requiredFile = path.join(fullPath, mustContainFile);
144
+ if (fs.existsSync(requiredFile)) {
145
+ results.push(fullPath);
146
+ }
147
+ } else {
148
+ results.push(fullPath);
149
+ }
150
+
151
+ // Recurse into subdirectory
152
+ await walk(fullPath, depth + 1);
153
+ }
154
+ }
155
+
156
+ await walk(rootDir, 0);
157
+ return results;
158
+ }
159
+
160
+ /**
161
+ * Gets files tracked by git (and untracked but not ignored files).
162
+ * Falls back to empty array if not in a git repository.
163
+ */
164
+ export function getGitTrackedFiles(dir: string): string[] {
165
+ const files: string[] = [];
166
+
167
+ // Get tracked files
168
+ const tracked = spawnSync("git", ["ls-files"], {
169
+ cwd: dir,
170
+ encoding: "utf8"
171
+ });
172
+
173
+ if (tracked.status !== 0 || tracked.error) {
174
+ return [];
175
+ }
176
+
177
+ if (tracked.stdout) {
178
+ for (const line of tracked.stdout.split("\n")) {
179
+ const trimmed = line.trim();
180
+ if (trimmed) {
181
+ const fullPath = path.join(dir, trimmed);
182
+ if (fs.existsSync(fullPath)) {
183
+ files.push(fullPath);
184
+ }
185
+ }
186
+ }
187
+ }
188
+
189
+ // Get untracked but not ignored files
190
+ const untracked = spawnSync("git", ["ls-files", "--others", "--exclude-standard"], {
191
+ cwd: dir,
192
+ encoding: "utf8"
193
+ });
194
+
195
+ if (untracked.stdout) {
196
+ for (const line of untracked.stdout.split("\n")) {
197
+ const trimmed = line.trim();
198
+ if (trimmed) {
199
+ const fullPath = path.join(dir, trimmed);
200
+ if (fs.existsSync(fullPath)) {
201
+ files.push(fullPath);
202
+ }
203
+ }
204
+ }
205
+ }
206
+
207
+ return files;
208
+ }
@@ -43,7 +43,13 @@ export interface ParseProjectResponseItem {
43
43
  export class ParseProject {
44
44
  constructor(
45
45
  private readonly projectPath: string,
46
- private readonly exclusions?: string[]
46
+ private readonly exclusions?: string[],
47
+ /**
48
+ * Optional path to make source file paths relative to.
49
+ * If not specified, paths are relative to projectPath.
50
+ * Use this when parsing a subdirectory but wanting paths relative to the repository root.
51
+ */
52
+ private readonly relativeTo?: string
47
53
  ) {}
48
54
 
49
55
  static handle(
@@ -61,6 +67,8 @@ export class ParseProject {
61
67
 
62
68
  const projectPath = path.resolve(request.projectPath);
63
69
  const exclusions = request.exclusions ?? DEFAULT_EXCLUSIONS;
70
+ // Use relativeTo if specified, otherwise default to projectPath
71
+ const relativeTo = request.relativeTo ? path.resolve(request.relativeTo) : projectPath;
64
72
 
65
73
  // Use ProjectParser for file discovery and Prettier detection
66
74
  const projectParser = new ProjectParser(projectPath, {exclusions});
@@ -74,7 +82,7 @@ export class ParseProject {
74
82
  if (discovered.packageJsonFiles.length > 0) {
75
83
  const parser = Parsers.createParser("packageJson", {
76
84
  ctx,
77
- relativeTo: projectPath
85
+ relativeTo
78
86
  });
79
87
  const generator = parser.parse(...discovered.packageJsonFiles);
80
88
 
@@ -93,7 +101,7 @@ export class ParseProject {
93
101
 
94
102
  // Parse JSON lock files
95
103
  if (discovered.lockFiles.json.length > 0) {
96
- const parser = Parsers.createParser("json", {ctx, relativeTo: projectPath});
104
+ const parser = Parsers.createParser("json", {ctx, relativeTo});
97
105
  const generator = parser.parse(...discovered.lockFiles.json);
98
106
 
99
107
  for (const _ of discovered.lockFiles.json) {
@@ -111,7 +119,7 @@ export class ParseProject {
111
119
 
112
120
  // Parse YAML lock files
113
121
  if (discovered.lockFiles.yaml.length > 0) {
114
- const parser = Parsers.createParser("yaml", {ctx, relativeTo: projectPath});
122
+ const parser = Parsers.createParser("yaml", {ctx, relativeTo});
115
123
  const generator = parser.parse(...discovered.lockFiles.yaml);
116
124
 
117
125
  for (const _ of discovered.lockFiles.yaml) {
@@ -129,7 +137,7 @@ export class ParseProject {
129
137
 
130
138
  // Parse text lock files (yarn.lock Classic)
131
139
  if (discovered.lockFiles.text.length > 0) {
132
- const parser = Parsers.createParser("plainText", {ctx, relativeTo: projectPath});
140
+ const parser = Parsers.createParser("plainText", {ctx, relativeTo});
133
141
  const generator = parser.parse(...discovered.lockFiles.text);
134
142
 
135
143
  for (const _ of discovered.lockFiles.text) {
@@ -149,7 +157,7 @@ export class ParseProject {
149
157
  if (discovered.jsFiles.length > 0) {
150
158
  const parser = Parsers.createParser("javascript", {
151
159
  ctx,
152
- relativeTo: projectPath
160
+ relativeTo
153
161
  });
154
162
 
155
163
  // Check if Prettier is available
@@ -211,7 +219,7 @@ export class ParseProject {
211
219
 
212
220
  // Parse other YAML files
213
221
  if (discovered.yamlFiles.length > 0) {
214
- const parser = Parsers.createParser("yaml", {ctx, relativeTo: projectPath});
222
+ const parser = Parsers.createParser("yaml", {ctx, relativeTo});
215
223
  const generator = parser.parse(...discovered.yamlFiles);
216
224
 
217
225
  for (const _ of discovered.yamlFiles) {
@@ -229,7 +237,7 @@ export class ParseProject {
229
237
 
230
238
  // Parse other JSON files
231
239
  if (discovered.jsonFiles.length > 0) {
232
- const parser = Parsers.createParser("json", {ctx, relativeTo: projectPath});
240
+ const parser = Parsers.createParser("json", {ctx, relativeTo});
233
241
  const generator = parser.parse(...discovered.jsonFiles);
234
242
 
235
243
  for (const _ of discovered.jsonFiles) {
@@ -247,7 +255,7 @@ export class ParseProject {
247
255
 
248
256
  // Parse text config files (.prettierignore, .gitignore, etc.)
249
257
  if (discovered.textFiles.length > 0) {
250
- const parser = Parsers.createParser("plainText", {ctx, relativeTo: projectPath});
258
+ const parser = Parsers.createParser("plainText", {ctx, relativeTo});
251
259
  const generator = parser.parse(...discovered.textFiles);
252
260
 
253
261
  for (const _ of discovered.textFiles) {