@chappibunny/repolens 1.3.1 → 1.5.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -6,7 +6,7 @@ import { executeAIRequest } from "../utils/rate-limit.js";
6
6
  const DEFAULT_TIMEOUT_MS = 60000;
7
7
  const DEFAULT_MAX_TOKENS = 2500;
8
8
 
9
- export async function generateText({ system, user, temperature, maxTokens, config }) {
9
+ export async function generateText({ system, user, temperature, maxTokens, config, jsonMode, jsonSchema }) {
10
10
  // Check if AI is enabled (env var takes precedence, then config)
11
11
  const aiConfig = config?.ai || {};
12
12
  const enabled = process.env.REPOLENS_AI_ENABLED === "true" || aiConfig.enabled === true;
@@ -43,18 +43,58 @@ export async function generateText({ system, user, temperature, maxTokens, confi
43
43
  if (!baseUrl && provider === "openai_compatible") {
44
44
  warn("REPOLENS_AI_BASE_URL not set. Using OpenAI default.");
45
45
  }
46
+
47
+ // Select provider adapter
48
+ const adapter = getProviderAdapter(provider);
46
49
 
47
50
  try {
48
- const result = await callOpenAICompatibleAPI({
49
- baseUrl: baseUrl || "https://api.openai.com/v1",
51
+ const result = await adapter({
52
+ baseUrl: baseUrl || getDefaultBaseUrl(provider),
50
53
  apiKey,
51
54
  model,
52
55
  system,
53
56
  user,
54
57
  temperature: resolvedTemp,
55
58
  maxTokens: resolvedMaxTokens,
56
- timeoutMs
59
+ timeoutMs,
60
+ jsonMode,
57
61
  });
62
+
63
+ // Validate JSON schema if provided
64
+ if (jsonMode && jsonSchema && result) {
65
+ const parsed = safeParseJSON(result);
66
+ if (!parsed) {
67
+ warn("AI returned invalid JSON, re-prompting once...");
68
+ const retryResult = await adapter({
69
+ baseUrl: baseUrl || getDefaultBaseUrl(provider),
70
+ apiKey,
71
+ model,
72
+ system,
73
+ user: user + "\n\nIMPORTANT: Your previous response was not valid JSON. Respond ONLY with a valid JSON object.",
74
+ temperature: resolvedTemp,
75
+ maxTokens: resolvedMaxTokens,
76
+ timeoutMs,
77
+ jsonMode,
78
+ });
79
+ const retryParsed = safeParseJSON(retryResult);
80
+ if (!retryParsed) {
81
+ warn("AI JSON re-prompt also failed, falling back to deterministic.");
82
+ return { success: false, error: "Invalid JSON from AI after retry", fallback: true };
83
+ }
84
+ const schemaError = validateSchema(retryParsed, jsonSchema);
85
+ if (schemaError) {
86
+ warn(`AI JSON schema mismatch after retry: ${schemaError}`);
87
+ return { success: false, error: schemaError, fallback: true };
88
+ }
89
+ return { success: true, text: retryResult, parsed: retryParsed, fallback: false };
90
+ }
91
+ const schemaError = validateSchema(parsed, jsonSchema);
92
+ if (schemaError) {
93
+ warn(`AI JSON schema mismatch: ${schemaError}`);
94
+ return { success: false, error: schemaError, fallback: true };
95
+ }
96
+ return { success: true, text: result, parsed, fallback: false };
97
+ }
58
98
 
59
99
  return {
60
100
  success: true,
@@ -72,7 +112,59 @@ export async function generateText({ system, user, temperature, maxTokens, confi
72
112
  }
73
113
  }
74
114
 
75
- async function callOpenAICompatibleAPI({ baseUrl, apiKey, model, system, user, temperature, maxTokens, timeoutMs }) {
115
+ /**
116
+ * Parse JSON safely, returning null on failure.
117
+ */
118
+ function safeParseJSON(text) {
119
+ try {
120
+ return JSON.parse(text);
121
+ } catch {
122
+ // Try extracting JSON from markdown code blocks
123
+ const match = text?.match(/```(?:json)?\s*([\s\S]*?)```/);
124
+ if (match) {
125
+ try { return JSON.parse(match[1].trim()); } catch { /* fall through */ }
126
+ }
127
+ return null;
128
+ }
129
+ }
130
+
131
+ /**
132
+ * Validate an object against a simple schema (required string fields).
133
+ * Returns error message or null if valid.
134
+ */
135
+ function validateSchema(obj, schema) {
136
+ if (!schema || !schema.required) return null;
137
+ for (const field of schema.required) {
138
+ if (!(field in obj)) return `Missing required field: ${field}`;
139
+ }
140
+ return null;
141
+ }
142
+
143
+ /**
144
+ * Get default base URL for a provider.
145
+ */
146
+ function getDefaultBaseUrl(provider) {
147
+ switch (provider) {
148
+ case "anthropic": return "https://api.anthropic.com";
149
+ case "azure": return process.env.REPOLENS_AI_BASE_URL || "https://api.openai.com/v1";
150
+ case "google": return "https://generativelanguage.googleapis.com";
151
+ default: return "https://api.openai.com/v1";
152
+ }
153
+ }
154
+
155
+ /**
156
+ * Select the appropriate provider adapter function.
157
+ */
158
+ function getProviderAdapter(provider) {
159
+ switch (provider) {
160
+ case "anthropic": return callAnthropicAPI;
161
+ case "google": return callGoogleAPI;
162
+ // "openai_compatible" and "azure" both use the OpenAI format
163
+ default: return callOpenAICompatibleAPI;
164
+ }
165
+ }
166
+
167
+ async function callOpenAICompatibleAPI({ baseUrl, apiKey, model, system, user, temperature, maxTokens, timeoutMs, jsonMode }) {
76
168
  return await executeAIRequest(async () => {
77
169
  const url = `${baseUrl}/chat/completions`;
78
170
 
@@ -94,6 +186,9 @@ async function callOpenAICompatibleAPI({ baseUrl, apiKey, model, system, user, t
94
186
  if (temperature != null) {
95
187
  body.temperature = temperature;
96
188
  }
189
+ if (jsonMode) {
190
+ body.response_format = { type: "json_object" };
191
+ }
97
192
 
98
193
  const response = await fetch(url, {
99
194
  method: "POST",
@@ -132,15 +227,126 @@ async function callOpenAICompatibleAPI({ baseUrl, apiKey, model, system, user, t
132
227
  });
133
228
  }
134
229
 
230
+ /**
231
+ * Anthropic Messages API adapter.
232
+ */
233
+ async function callAnthropicAPI({ baseUrl, apiKey, model, system, user, temperature, maxTokens, timeoutMs }) {
234
+ return await executeAIRequest(async () => {
235
+ const url = `${baseUrl}/v1/messages`;
236
+
237
+ const controller = new AbortController();
238
+ const timeoutId = setTimeout(() => controller.abort(), timeoutMs);
239
+
240
+ try {
241
+ const body = {
242
+ model: model || "claude-sonnet-4-20250514",
243
+ max_tokens: maxTokens,
244
+ system,
245
+ messages: [{ role: "user", content: user }],
246
+ };
247
+ if (temperature != null) {
248
+ body.temperature = temperature;
249
+ }
250
+
251
+ const response = await fetch(url, {
252
+ method: "POST",
253
+ headers: {
254
+ "Content-Type": "application/json",
255
+ "x-api-key": apiKey,
256
+ "anthropic-version": "2023-06-01",
257
+ },
258
+ body: JSON.stringify(body),
259
+ signal: controller.signal,
260
+ });
261
+
262
+ clearTimeout(timeoutId);
263
+
264
+ if (!response.ok) {
265
+ const errorText = await response.text();
266
+ throw new Error(`Anthropic API error (${response.status}): ${errorText}`);
267
+ }
268
+
269
+ const data = await response.json();
270
+
271
+ if (!data.content || data.content.length === 0) {
272
+ throw new Error("No content returned from Anthropic API");
273
+ }
274
+
275
+ return data.content[0].text;
276
+ } catch (error) {
277
+ clearTimeout(timeoutId);
278
+ if (error.name === "AbortError") {
279
+ throw new Error(`Request timeout after ${timeoutMs}ms`);
280
+ }
281
+ throw error;
282
+ }
283
+ });
284
+ }
285
+
286
+ /**
287
+ * Google Gemini API adapter.
288
+ */
289
+ async function callGoogleAPI({ baseUrl, apiKey, model, system, user, temperature, maxTokens, timeoutMs }) {
290
+ return await executeAIRequest(async () => {
291
+ const geminiModel = model || "gemini-pro";
292
+ const url = `${baseUrl}/v1beta/models/${geminiModel}:generateContent?key=${encodeURIComponent(apiKey)}`;
293
+
294
+ const controller = new AbortController();
295
+ const timeoutId = setTimeout(() => controller.abort(), timeoutMs);
296
+
297
+ try {
298
+ const body = {
299
+ contents: [{ parts: [{ text: `${system}\n\n${user}` }] }],
300
+ generationConfig: { maxOutputTokens: maxTokens },
301
+ };
302
+ if (temperature != null) {
303
+ body.generationConfig.temperature = temperature;
304
+ }
305
+
306
+ const response = await fetch(url, {
307
+ method: "POST",
308
+ headers: { "Content-Type": "application/json" },
309
+ body: JSON.stringify(body),
310
+ signal: controller.signal,
311
+ });
312
+
313
+ clearTimeout(timeoutId);
314
+
315
+ if (!response.ok) {
316
+ const errorText = await response.text();
317
+ throw new Error(`Google API error (${response.status}): ${errorText}`);
318
+ }
319
+
320
+ const data = await response.json();
321
+
322
+ if (!data.candidates || data.candidates.length === 0) {
323
+ throw new Error("No candidates returned from Google API");
324
+ }
325
+
326
+ return data.candidates[0].content.parts[0].text;
327
+ } catch (error) {
328
+ clearTimeout(timeoutId);
329
+ if (error.name === "AbortError") {
330
+ throw new Error(`Request timeout after ${timeoutMs}ms`);
331
+ }
332
+ throw error;
333
+ }
334
+ });
335
+ }
336
+
135
337
  export function isAIEnabled() {
136
338
  return process.env.REPOLENS_AI_ENABLED === "true";
137
339
  }
138
340
 
139
341
  export function getAIConfig() {
342
+ const provider = process.env.REPOLENS_AI_PROVIDER || "openai_compatible";
343
+ const defaultModel = provider === "anthropic" ? "claude-sonnet-4-20250514"
344
+ : provider === "google" ? "gemini-pro"
345
+ : "gpt-5-mini";
140
346
  return {
141
347
  enabled: isAIEnabled(),
142
- provider: process.env.REPOLENS_AI_PROVIDER || "openai_compatible",
143
- model: process.env.REPOLENS_AI_MODEL || "gpt-5-mini",
348
+ provider,
349
+ model: process.env.REPOLENS_AI_MODEL || defaultModel,
144
350
  hasApiKey: !!process.env.REPOLENS_AI_API_KEY,
145
351
  temperature: process.env.REPOLENS_AI_TEMPERATURE ? parseFloat(process.env.REPOLENS_AI_TEMPERATURE) : undefined,
146
352
  maxTokens: parseInt(process.env.REPOLENS_AI_MAX_TOKENS || DEFAULT_MAX_TOKENS)
@@ -0,0 +1,146 @@
1
+ // CODEOWNERS file parser
2
+ // Maps file paths to team/individual owners
3
+
4
+ import fs from "node:fs/promises";
5
+ import path from "node:path";
6
+ import { info } from "../utils/logger.js";
7
+
8
+ /**
9
+ * Parse CODEOWNERS file and return ownership rules.
10
+ * Searches standard locations: CODEOWNERS, .github/CODEOWNERS, docs/CODEOWNERS
11
+ */
12
+ export async function parseCodeowners(repoRoot) {
13
+ const locations = [
14
+ path.join(repoRoot, "CODEOWNERS"),
15
+ path.join(repoRoot, ".github", "CODEOWNERS"),
16
+ path.join(repoRoot, "docs", "CODEOWNERS"),
17
+ ];
18
+
19
+ for (const loc of locations) {
20
+ try {
21
+ const content = await fs.readFile(loc, "utf8");
22
+ const rules = parseRules(content);
23
+ if (rules.length > 0) {
24
+ info(`CODEOWNERS loaded from ${path.relative(repoRoot, loc)} (${rules.length} rules)`);
25
+ return { found: true, file: path.relative(repoRoot, loc), rules };
26
+ }
27
+ } catch {
28
+ // File doesn't exist, try next
29
+ }
30
+ }
31
+
32
+ return { found: false, file: null, rules: [] };
33
+ }
34
+
35
+ /**
36
+ * Parse CODEOWNERS content into pattern→owners rules.
37
+ */
38
+ function parseRules(content) {
39
+ const rules = [];
40
+
41
+ for (const line of content.split("\n")) {
42
+ const trimmed = line.trim();
43
+ if (!trimmed || trimmed.startsWith("#")) continue;
44
+
45
+ const parts = trimmed.split(/\s+/);
46
+ if (parts.length < 2) continue;
47
+
48
+ const pattern = parts[0];
49
+ const owners = parts.slice(1).filter(o => o.startsWith("@") || o.includes("@"));
50
+
51
+ if (owners.length > 0) {
52
+ rules.push({ pattern, owners });
53
+ }
54
+ }
55
+
56
+ return rules;
57
+ }
58
+
59
+ /**
60
+ * Find owners for a given file path using CODEOWNERS rules.
61
+ * Rules are matched last-match-wins (same as GitHub behavior).
62
+ */
63
+ export function findOwners(filePath, rules) {
64
+ let matched = [];
65
+
66
+ for (const rule of rules) {
67
+ if (matchPattern(filePath, rule.pattern)) {
68
+ matched = rule.owners;
69
+ }
70
+ }
71
+
72
+ return matched;
73
+ }
74
+
75
+ /**
76
+ * Match a file path against a CODEOWNERS pattern.
77
+ * Supports: *, **, directory patterns, exact matches.
78
+ */
79
+ function matchPattern(filePath, pattern) {
80
+ const normalized = filePath.replace(/\\/g, "/");
81
+
82
+ // Remove leading slash for consistency
83
+ const cleanPattern = pattern.startsWith("/") ? pattern.slice(1) : pattern;
84
+
85
+ // Directory pattern (e.g., "src/")
86
+ if (cleanPattern.endsWith("/")) {
87
+ return normalized.startsWith(cleanPattern) || normalized.includes(`/${cleanPattern}`);
88
+ }
89
+
90
+ // Convert glob to regex
91
+ let regex = cleanPattern
92
+ .replace(/\./g, "\\.")
93
+ .replace(/\*\*/g, "<<DOUBLESTAR>>")
94
+ .replace(/\*/g, "[^/]*")
95
+ .replace(/<<DOUBLESTAR>>/g, ".*");
96
+
97
+ // If pattern has no path separator, match anywhere in path
98
+ if (!cleanPattern.includes("/")) {
99
+ regex = `(^|/)${regex}($|/)`;
100
+ } else {
101
+ regex = `(^|/)${regex}$`;
102
+ }
103
+
104
+ try {
105
+ return new RegExp(regex).test(normalized);
106
+ } catch {
107
+ return false;
108
+ }
109
+ }
110
+
111
+ /**
112
+ * Build an ownership summary for modules.
113
+ * Returns a map of modulePath → owners[].
114
+ */
115
+ export function buildOwnershipMap(modules, files, rules) {
116
+ if (!rules || rules.length === 0) return {};
117
+
118
+ const ownershipMap = {};
119
+
120
+ for (const mod of modules) {
121
+ const moduleFiles = files.filter(f => {
122
+ const normalized = f.replace(/\\/g, "/");
123
+ return normalized.startsWith(mod.key + "/") || normalized === mod.key;
124
+ });
125
+
126
+ // Find owners for representative files in this module
127
+ const ownerCounts = {};
128
+ for (const file of moduleFiles) {
129
+ const owners = findOwners(file, rules);
130
+ for (const owner of owners) {
131
+ ownerCounts[owner] = (ownerCounts[owner] || 0) + 1;
132
+ }
133
+ }
134
+
135
+ // Primary owners are those who own the most files in this module
136
+ const sortedOwners = Object.entries(ownerCounts)
137
+ .sort((a, b) => b[1] - a[1])
138
+ .map(([owner]) => owner);
139
+
140
+ if (sortedOwners.length > 0) {
141
+ ownershipMap[mod.key] = sortedOwners;
142
+ }
143
+ }
144
+
145
+ return ownershipMap;
146
+ }
@@ -80,7 +80,17 @@ export function buildAIContext(scanResult, config) {
80
80
 
81
81
  patterns,
82
82
 
83
- repoRoots: config.module_roots || []
83
+ repoRoots: config.module_roots || [],
84
+
85
+ // Monorepo workspace metadata (if detected)
86
+ monorepo: scanResult.monorepo?.isMonorepo ? {
87
+ tool: scanResult.monorepo.tool,
88
+ packageCount: scanResult.monorepo.packages.length,
89
+ packages: scanResult.monorepo.packages.slice(0, 20).map(p => ({
90
+ name: p.name,
91
+ path: p.path,
92
+ })),
93
+ } : undefined,
84
94
  };
85
95
  }
86
96
 
@@ -2,42 +2,42 @@
2
2
 
3
3
  const DEFAULT_DOMAIN_HINTS = [
4
4
  {
5
- match: ["auth", "login", "signup", "session", "user", "account"],
6
- domain: "Authentication",
7
- description: "User authentication and identity flows"
5
+ match: ["auth", "login", "signup", "session", "user", "account", "oauth", "sso"],
6
+ domain: "Authentication & Identity",
7
+ description: "User authentication, authorization, and identity management"
8
8
  },
9
9
  {
10
- match: ["stock", "chart", "price", "market", "watchlist", "ticker", "quote"],
11
- domain: "Market Data & Analysis",
12
- description: "Market data retrieval, analysis, and visualization"
10
+ match: ["dashboard", "analytics", "chart", "report", "metric", "stat", "insight"],
11
+ domain: "Analytics & Reporting",
12
+ description: "Data visualization, reporting, and analytics dashboards"
13
13
  },
14
14
  {
15
- match: ["article", "newsletter", "news", "research", "content", "blog", "post"],
16
- domain: "Content & Research",
17
- description: "Content publishing, research, and insight delivery"
15
+ match: ["article", "newsletter", "news", "research", "content", "blog", "post", "cms"],
16
+ domain: "Content Management",
17
+ description: "Content publishing, management, and delivery"
18
18
  },
19
19
  {
20
- match: ["portfolio", "positions", "holdings", "trades", "orders"],
21
- domain: "Portfolio Management",
22
- description: "Portfolio tracking and trading functionality"
20
+ match: ["search", "filter", "query", "index", "catalog", "browse"],
21
+ domain: "Search & Discovery",
22
+ description: "Search functionality, filtering, and content discovery"
23
23
  },
24
24
  {
25
- match: ["alert", "notification", "email", "sms", "webhook"],
26
- domain: "Alerts & Notifications",
27
- description: "User notification and alerting system"
25
+ match: ["alert", "notification", "email", "sms", "webhook", "push", "message"],
26
+ domain: "Notifications",
27
+ description: "User notifications and messaging system"
28
28
  },
29
29
  {
30
- match: ["payment", "subscription", "billing", "stripe", "checkout"],
30
+ match: ["payment", "subscription", "billing", "stripe", "checkout", "invoice"],
31
31
  domain: "Payments & Billing",
32
32
  description: "Payment processing and subscription management"
33
33
  },
34
34
  {
35
- match: ["api", "endpoint", "route", "handler"],
35
+ match: ["api", "endpoint", "route", "handler", "controller", "middleware"],
36
36
  domain: "API Layer",
37
37
  description: "Backend API endpoints and request handling"
38
38
  },
39
39
  {
40
- match: ["component", "ui", "button", "form", "modal", "dialog"],
40
+ match: ["component", "ui", "button", "form", "modal", "dialog", "layout", "widget"],
41
41
  domain: "UI Components",
42
42
  description: "Reusable user interface components"
43
43
  },
@@ -47,19 +47,34 @@ const DEFAULT_DOMAIN_HINTS = [
47
47
  description: "Custom React hooks for state and behavior"
48
48
  },
49
49
  {
50
- match: ["store", "state", "redux", "zustand", "context"],
50
+ match: ["store", "state", "redux", "zustand", "context", "atom"],
51
51
  domain: "State Management",
52
52
  description: "Application state management"
53
53
  },
54
54
  {
55
- match: ["lib", "util", "helper", "common", "shared"],
55
+ match: ["lib", "util", "helper", "common", "shared", "tool"],
56
56
  domain: "Shared Utilities",
57
57
  description: "Common utilities and helper functions"
58
58
  },
59
59
  {
60
- match: ["data", "database", "db", "prisma", "sql"],
60
+ match: ["data", "database", "db", "prisma", "sql", "model", "schema", "migration", "seed"],
61
61
  domain: "Data Layer",
62
- description: "Database access and data persistence"
62
+ description: "Database access, models, and data persistence"
63
+ },
64
+ {
65
+ match: ["config", "setting", "env", "constant"],
66
+ domain: "Configuration",
67
+ description: "Application configuration and environment settings"
68
+ },
69
+ {
70
+ match: ["test", "spec", "fixture", "mock", "stub", "e2e", "cypress", "playwright"],
71
+ domain: "Testing",
72
+ description: "Test suites, fixtures, and testing utilities"
73
+ },
74
+ {
75
+ match: ["job", "queue", "worker", "cron", "task", "scheduler", "background"],
76
+ domain: "Background Jobs",
77
+ description: "Background processing, job queues, and scheduled tasks"
63
78
  }
64
79
  ];
65
80
 
@@ -0,0 +1,155 @@
1
+ // Monorepo workspace detection
2
+ // Detects npm/yarn workspaces, pnpm workspaces, and Lerna configurations
3
+
4
+ import fs from "node:fs/promises";
5
+ import path from "node:path";
6
+ import { info } from "../utils/logger.js";
7
+
8
+ /**
9
+ * Detect monorepo workspaces in a repository.
10
+ * Returns { isMonorepo, tool, packages[] } where each package has { name, path, packageJson }.
11
+ */
12
+ export async function detectMonorepo(repoRoot) {
13
+ const result = { isMonorepo: false, tool: null, packages: [] };
14
+
15
+ // 1. Check package.json workspaces (npm/yarn)
16
+ const npmWorkspaces = await detectNpmWorkspaces(repoRoot);
17
+ if (npmWorkspaces.length > 0) {
18
+ result.isMonorepo = true;
19
+ result.tool = "npm/yarn workspaces";
20
+ result.packages = npmWorkspaces;
21
+ info(`Monorepo detected: ${result.tool} with ${result.packages.length} packages`);
22
+ return result;
23
+ }
24
+
25
+ // 2. Check pnpm-workspace.yaml
26
+ const pnpmWorkspaces = await detectPnpmWorkspaces(repoRoot);
27
+ if (pnpmWorkspaces.length > 0) {
28
+ result.isMonorepo = true;
29
+ result.tool = "pnpm workspaces";
30
+ result.packages = pnpmWorkspaces;
31
+ info(`Monorepo detected: ${result.tool} with ${result.packages.length} packages`);
32
+ return result;
33
+ }
34
+
35
+ // 3. Check lerna.json
36
+ const lernaPackages = await detectLerna(repoRoot);
37
+ if (lernaPackages.length > 0) {
38
+ result.isMonorepo = true;
39
+ result.tool = "Lerna";
40
+ result.packages = lernaPackages;
41
+ info(`Monorepo detected: ${result.tool} with ${result.packages.length} packages`);
42
+ return result;
43
+ }
44
+
45
+ return result;
46
+ }
47
+
48
+ async function detectNpmWorkspaces(repoRoot) {
49
+ try {
50
+ const pkgPath = path.join(repoRoot, "package.json");
51
+ const raw = await fs.readFile(pkgPath, "utf8");
52
+ const pkg = JSON.parse(raw);
53
+
54
+ if (!pkg.workspaces) return [];
55
+
56
+ // workspaces can be an array or { packages: [...] }
57
+ const patterns = Array.isArray(pkg.workspaces)
58
+ ? pkg.workspaces
59
+ : pkg.workspaces.packages || [];
60
+
61
+ return await resolveWorkspacePatterns(repoRoot, patterns);
62
+ } catch {
63
+ return [];
64
+ }
65
+ }
66
+
67
+ async function detectPnpmWorkspaces(repoRoot) {
68
+ try {
69
+ const yamlPath = path.join(repoRoot, "pnpm-workspace.yaml");
70
+ const raw = await fs.readFile(yamlPath, "utf8");
71
+
72
+ // Simple YAML parsing for packages array (avoid adding js-yaml dependency for this)
73
+ const patterns = [];
74
+ let inPackages = false;
75
+ for (const line of raw.split("\n")) {
76
+ const trimmed = line.trim();
77
+ if (trimmed === "packages:") {
78
+ inPackages = true;
79
+ continue;
80
+ }
81
+ if (inPackages) {
82
+ if (trimmed.startsWith("- ")) {
83
+ patterns.push(trimmed.slice(2).replace(/['"]/g, "").trim());
84
+ } else if (trimmed && !trimmed.startsWith("#")) {
85
+ break; // End of packages list
86
+ }
87
+ }
88
+ }
89
+
90
+ return await resolveWorkspacePatterns(repoRoot, patterns);
91
+ } catch {
92
+ return [];
93
+ }
94
+ }
95
+
96
+ async function detectLerna(repoRoot) {
97
+ try {
98
+ const lernaPath = path.join(repoRoot, "lerna.json");
99
+ const raw = await fs.readFile(lernaPath, "utf8");
100
+ const lerna = JSON.parse(raw);
101
+
102
+ const patterns = lerna.packages || ["packages/*"];
103
+ return await resolveWorkspacePatterns(repoRoot, patterns);
104
+ } catch {
105
+ return [];
106
+ }
107
+ }
108
+
109
+ /**
110
+ * Resolve workspace glob patterns to actual package directories.
111
+ */
112
+ async function resolveWorkspacePatterns(repoRoot, patterns) {
113
+ const packages = [];
114
+ const seen = new Set();
115
+
116
+ for (const pattern of patterns) {
117
+ // Convert glob pattern to directory search
118
+ // e.g. "packages/*" → list dirs in packages/
119
+ const basePath = pattern.replace(/\/?\*.*$/, "");
120
+ const searchDir = path.join(repoRoot, basePath);
121
+
122
+ try {
123
+ const entries = await fs.readdir(searchDir, { withFileTypes: true });
124
+ for (const entry of entries) {
125
+ if (!entry.isDirectory()) continue;
126
+
127
+ const pkgDir = path.join(searchDir, entry.name);
128
+ const pkgJsonPath = path.join(pkgDir, "package.json");
129
+
130
+ try {
131
+ const raw = await fs.readFile(pkgJsonPath, "utf8");
132
+ const pkg = JSON.parse(raw);
133
+ const relativePath = path.relative(repoRoot, pkgDir).replace(/\\/g, "/");
134
+
135
+ if (!seen.has(relativePath)) {
136
+ seen.add(relativePath);
137
+ packages.push({
138
+ name: pkg.name || entry.name,
139
+ path: relativePath,
140
+ version: pkg.version,
141
+ dependencies: Object.keys(pkg.dependencies || {}),
142
+ devDependencies: Object.keys(pkg.devDependencies || {}),
143
+ });
144
+ }
145
+ } catch {
146
+ // No package.json in this directory, skip
147
+ }
148
+ }
149
+ } catch {
150
+ // Directory doesn't exist, skip
151
+ }
152
+ }
153
+
154
+ return packages;
155
+ }