@chappibunny/repolens 0.6.4 → 0.8.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,226 @@
1
+ // Architecture drift detection
2
+ // Compares current scan snapshot against a stored reference baseline
3
+ // Flags structural changes: new/removed modules, dependency shifts, API surface changes
4
+
5
+ import fs from "node:fs/promises";
6
+ import path from "node:path";
7
+ import { info, warn } from "../utils/logger.js";
8
+
9
+ const BASELINE_FILENAME = "architecture-baseline.json";
10
+
11
+ /**
12
+ * Save the current architecture state as a baseline for future drift detection.
13
+ * @param {object} snapshot - The architecture snapshot to save
14
+ * @param {string} outputDir - Directory to save the baseline (typically .repolens/)
15
+ */
16
+ export async function saveBaseline(snapshot, outputDir) {
17
+ const baselinePath = path.join(outputDir, BASELINE_FILENAME);
18
+ const baseline = {
19
+ version: 1,
20
+ timestamp: new Date().toISOString(),
21
+ ...snapshot,
22
+ };
23
+ await fs.mkdir(outputDir, { recursive: true });
24
+ await fs.writeFile(baselinePath, JSON.stringify(baseline, null, 2), "utf8");
25
+ info(`Architecture baseline saved to ${baselinePath}`);
26
+ return baselinePath;
27
+ }
28
+
29
+ /**
30
+ * Load a previously saved baseline. Returns null if none exists.
31
+ */
32
+ export async function loadBaseline(outputDir) {
33
+ const baselinePath = path.join(outputDir, BASELINE_FILENAME);
34
+ try {
35
+ const raw = await fs.readFile(baselinePath, "utf8");
36
+ return JSON.parse(raw);
37
+ } catch {
38
+ return null;
39
+ }
40
+ }
41
+
42
+ /**
43
+ * Build an architecture snapshot from scan results and analysis outputs.
44
+ * This is the canonical representation used for drift comparison.
45
+ */
46
+ export function buildSnapshot(scanResult, depGraph, graphqlResult, tsResult) {
47
+ return {
48
+ modules: (scanResult.modules || []).map(m => m.key).sort(),
49
+ moduleFileCounts: Object.fromEntries(
50
+ (scanResult.modules || []).map(m => [m.key, m.fileCount])
51
+ ),
52
+ filesCount: scanResult.filesCount || 0,
53
+ apiEndpoints: (scanResult.api || []).map(a => `${a.methods.join(",")}:${a.path}`).sort(),
54
+ pages: (scanResult.pages || []).map(p => p.path).sort(),
55
+ externalApis: (scanResult.externalApis || []).map(e => e.name).sort(),
56
+ frameworks: scanResult.metadata?.frameworks || [],
57
+ depGraphEdgeCount: depGraph?.edges?.length || 0,
58
+ depGraphCycleCount: depGraph?.cycles?.length || 0,
59
+ externalDeps: depGraph?.externalDeps || [],
60
+ graphqlTypes: graphqlResult?.detected ? graphqlResult.types.map(t => t.name).sort() : [],
61
+ graphqlQueries: graphqlResult?.detected ? graphqlResult.queries.map(q => q.name).sort() : [],
62
+ graphqlMutations: graphqlResult?.detected ? graphqlResult.mutations.map(m => m.name).sort() : [],
63
+ tsInterfaces: tsResult?.detected ? tsResult.interfaces.map(i => i.name).sort() : [],
64
+ tsClasses: tsResult?.detected ? tsResult.classes.map(c => c.name).sort() : [],
65
+ };
66
+ }
67
+
68
+ /**
69
+ * Compare a current snapshot against a stored baseline and produce drift analysis.
70
+ */
71
+ export function detectDrift(baseline, current) {
72
+ if (!baseline) {
73
+ return {
74
+ hasBaseline: false,
75
+ drifts: [],
76
+ summary: "No baseline found. Run `repolens publish` once to establish a baseline.",
77
+ };
78
+ }
79
+
80
+ const drifts = [];
81
+
82
+ // Module drift
83
+ const baseModules = new Set(baseline.modules || []);
84
+ const currModules = new Set(current.modules || []);
85
+ const addedModules = [...currModules].filter(m => !baseModules.has(m));
86
+ const removedModules = [...baseModules].filter(m => !currModules.has(m));
87
+ if (addedModules.length > 0) {
88
+ drifts.push({ category: "modules", type: "added", items: addedModules, severity: "info" });
89
+ }
90
+ if (removedModules.length > 0) {
91
+ drifts.push({ category: "modules", type: "removed", items: removedModules, severity: "warning" });
92
+ }
93
+
94
+ // Significant module size changes (>50% file count change)
95
+ for (const mod of current.modules || []) {
96
+ const baseCount = baseline.moduleFileCounts?.[mod] || 0;
97
+ const currCount = current.moduleFileCounts?.[mod] || 0;
98
+ if (baseCount > 0 && currCount > 0) {
99
+ const changeRatio = Math.abs(currCount - baseCount) / baseCount;
100
+ if (changeRatio > 0.5) {
101
+ drifts.push({
102
+ category: "modules",
103
+ type: "size-change",
104
+ items: [`${mod}: ${baseCount} → ${currCount} files (${changeRatio > 0 ? "+" : ""}${Math.round(changeRatio * 100)}%)`],
105
+ severity: changeRatio > 1.0 ? "warning" : "info",
106
+ });
107
+ }
108
+ }
109
+ }
110
+
111
+ // API endpoint drift
112
+ const baseApis = new Set(baseline.apiEndpoints || []);
113
+ const currApis = new Set(current.apiEndpoints || []);
114
+ const addedApis = [...currApis].filter(a => !baseApis.has(a));
115
+ const removedApis = [...baseApis].filter(a => !currApis.has(a));
116
+ if (addedApis.length > 0) {
117
+ drifts.push({ category: "api", type: "added", items: addedApis, severity: "info" });
118
+ }
119
+ if (removedApis.length > 0) {
120
+ drifts.push({ category: "api", type: "removed", items: removedApis, severity: "warning" });
121
+ }
122
+
123
+ // Page drift
124
+ const basePages = new Set(baseline.pages || []);
125
+ const currPages = new Set(current.pages || []);
126
+ const addedPages = [...currPages].filter(p => !basePages.has(p));
127
+ const removedPages = [...basePages].filter(p => !currPages.has(p));
128
+ if (addedPages.length > 0) {
129
+ drifts.push({ category: "pages", type: "added", items: addedPages, severity: "info" });
130
+ }
131
+ if (removedPages.length > 0) {
132
+ drifts.push({ category: "pages", type: "removed", items: removedPages, severity: "warning" });
133
+ }
134
+
135
+ // External dependency drift
136
+ const baseDeps = new Set(baseline.externalDeps || []);
137
+ const currDeps = new Set(current.externalDeps || []);
138
+ const addedDeps = [...currDeps].filter(d => !baseDeps.has(d));
139
+ const removedDeps = [...baseDeps].filter(d => !currDeps.has(d));
140
+ if (addedDeps.length > 0) {
141
+ drifts.push({ category: "dependencies", type: "added", items: addedDeps, severity: "info" });
142
+ }
143
+ if (removedDeps.length > 0) {
144
+ drifts.push({ category: "dependencies", type: "removed", items: removedDeps, severity: "warning" });
145
+ }
146
+
147
+ // Framework changes
148
+ const baseFrameworks = new Set(baseline.frameworks || []);
149
+ const currFrameworks = new Set(current.frameworks || []);
150
+ const addedFrameworks = [...currFrameworks].filter(f => !baseFrameworks.has(f));
151
+ const removedFrameworks = [...baseFrameworks].filter(f => !currFrameworks.has(f));
152
+ if (addedFrameworks.length > 0) {
153
+ drifts.push({ category: "frameworks", type: "added", items: addedFrameworks, severity: "info" });
154
+ }
155
+ if (removedFrameworks.length > 0) {
156
+ drifts.push({ category: "frameworks", type: "removed", items: removedFrameworks, severity: "critical" });
157
+ }
158
+
159
+ // Circular dependency drift
160
+ const baseCycles = baseline.depGraphCycleCount || 0;
161
+ const currCycles = current.depGraphCycleCount || 0;
162
+ if (currCycles > baseCycles) {
163
+ drifts.push({
164
+ category: "cycles",
165
+ type: "increased",
166
+ items: [`${baseCycles} → ${currCycles} circular dependencies`],
167
+ severity: "warning",
168
+ });
169
+ } else if (currCycles < baseCycles) {
170
+ drifts.push({
171
+ category: "cycles",
172
+ type: "decreased",
173
+ items: [`${baseCycles} → ${currCycles} circular dependencies`],
174
+ severity: "info",
175
+ });
176
+ }
177
+
178
+ // GraphQL schema drift
179
+ const baseGqlTypes = new Set(baseline.graphqlTypes || []);
180
+ const currGqlTypes = new Set(current.graphqlTypes || []);
181
+ const addedTypes = [...currGqlTypes].filter(t => !baseGqlTypes.has(t));
182
+ const removedTypes = [...baseGqlTypes].filter(t => !currGqlTypes.has(t));
183
+ if (addedTypes.length > 0 || removedTypes.length > 0) {
184
+ drifts.push({
185
+ category: "graphql",
186
+ type: "schema-changed",
187
+ items: [
188
+ ...(addedTypes.length ? [`+${addedTypes.length} types: ${addedTypes.join(", ")}`] : []),
189
+ ...(removedTypes.length ? [`-${removedTypes.length} types: ${removedTypes.join(", ")}`] : []),
190
+ ],
191
+ severity: removedTypes.length > 0 ? "warning" : "info",
192
+ });
193
+ }
194
+
195
+ // Overall file count change
196
+ const baseFiles = baseline.filesCount || 0;
197
+ const currFiles = current.filesCount || 0;
198
+ if (baseFiles > 0 && currFiles > 0) {
199
+ const fileChange = Math.abs(currFiles - baseFiles) / baseFiles;
200
+ if (fileChange > 0.2) {
201
+ drifts.push({
202
+ category: "scale",
203
+ type: "file-count-change",
204
+ items: [`${baseFiles} → ${currFiles} files (${currFiles > baseFiles ? "+" : ""}${Math.round(fileChange * 100)}%)`],
205
+ severity: fileChange > 0.5 ? "warning" : "info",
206
+ });
207
+ }
208
+ }
209
+
210
+ const summary = drifts.length === 0
211
+ ? "No architecture drift detected since last baseline."
212
+ : `${drifts.length} drift(s) detected: ${drifts.filter(d => d.severity === "critical").length} critical, ${drifts.filter(d => d.severity === "warning").length} warning(s), ${drifts.filter(d => d.severity === "info").length} informational`;
213
+
214
+ if (drifts.length > 0) {
215
+ warn(`Architecture drift: ${summary}`);
216
+ } else {
217
+ info("Architecture drift: no drift detected");
218
+ }
219
+
220
+ return {
221
+ hasBaseline: true,
222
+ baselineTimestamp: baseline.timestamp,
223
+ drifts,
224
+ summary,
225
+ };
226
+ }
@@ -0,0 +1,261 @@
1
+ // GraphQL schema detection — discovers schemas, queries, mutations, subscriptions, and resolvers
2
+
3
+ import fs from "node:fs/promises";
4
+ import path from "node:path";
5
+ import { info } from "../utils/logger.js";
6
+
7
+ const GRAPHQL_EXTENSIONS = [".graphql", ".gql"];
8
+
9
+ const TYPE_PATTERNS = {
10
+ query: /type\s+Query\s*\{([^}]*)\}/gs,
11
+ mutation: /type\s+Mutation\s*\{([^}]*)\}/gs,
12
+ subscription: /type\s+Subscription\s*\{([^}]*)\}/gs,
13
+ objectType: /type\s+(\w+)(?:\s+implements\s+[\w&\s]+)?\s*\{([^}]*)\}/gs,
14
+ inputType: /input\s+(\w+)\s*\{([^}]*)\}/gs,
15
+ enumType: /enum\s+(\w+)\s*\{([^}]*)\}/gs,
16
+ interfaceType: /interface\s+(\w+)\s*\{([^}]*)\}/gs,
17
+ unionType: /union\s+(\w+)\s*=\s*([^;\n]+)/g,
18
+ scalarType: /scalar\s+(\w+)/g,
19
+ directive: /directive\s+@(\w+)/g,
20
+ };
21
+
22
+ const RESOLVER_PATTERNS = [
23
+ /(?:Query|Mutation|Subscription)\s*:\s*\{/,
24
+ /resolvers?\s*=\s*\{/,
25
+ /createResolversMap/,
26
+ /\bresolveType\b/,
27
+ /fieldResolver/,
28
+ ];
29
+
30
+ const SCHEMA_LIBRARY_PATTERNS = [
31
+ { name: "Apollo Server", pattern: /ApolloServer|@apollo\/server|apollo-server/ },
32
+ { name: "GraphQL Yoga", pattern: /graphql-yoga|createYoga/ },
33
+ { name: "Mercurius", pattern: /mercurius/ },
34
+ { name: "graphql-js", pattern: /graphql\b.*buildSchema|GraphQLSchema|GraphQLObjectType/ },
35
+ { name: "type-graphql", pattern: /type-graphql|@Resolver|@Query|@Mutation/ },
36
+ { name: "Nexus", pattern: /nexus|makeSchema|objectType\(/ },
37
+ { name: "Pothos", pattern: /pothos|SchemaBuilder/ },
38
+ { name: "Hasura", pattern: /hasura/ },
39
+ { name: "Relay", pattern: /relay-runtime|RelayEnvironment/ },
40
+ { name: "urql", pattern: /urql|@urql/ },
41
+ { name: "Apollo Client", pattern: /ApolloClient|@apollo\/client|apollo-client|useQuery|useMutation/ },
42
+ ];
43
+
44
+ function parseFieldsFromBlock(block) {
45
+ const fields = [];
46
+ const lines = block.split("\n").map(l => l.trim()).filter(l => l && !l.startsWith("#"));
47
+ for (const line of lines) {
48
+ const match = line.match(/^(\w+)\s*(?:\([^)]*\))?\s*:\s*(.+)/);
49
+ if (match) {
50
+ fields.push({ name: match[1], type: match[2].replace(/\s*#.*/, "").trim() });
51
+ }
52
+ }
53
+ return fields;
54
+ }
55
+
56
+ async function readFileSafe(filePath) {
57
+ try {
58
+ return await fs.readFile(filePath, "utf8");
59
+ } catch {
60
+ return "";
61
+ }
62
+ }
63
+
64
+ export async function analyzeGraphQL(files, repoRoot) {
65
+ const result = {
66
+ detected: false,
67
+ schemaFiles: [],
68
+ types: [],
69
+ queries: [],
70
+ mutations: [],
71
+ subscriptions: [],
72
+ enums: [],
73
+ inputs: [],
74
+ interfaces: [],
75
+ unions: [],
76
+ scalars: [],
77
+ directives: [],
78
+ resolverFiles: [],
79
+ libraries: [],
80
+ summary: null,
81
+ };
82
+
83
+ // Phase 1: Find dedicated .graphql/.gql schema files
84
+ const schemaFiles = files.filter(f => GRAPHQL_EXTENSIONS.some(ext => f.endsWith(ext)));
85
+
86
+ // Phase 2: Find JS/TS files with inline schema or resolvers
87
+ const codeFiles = files.filter(f =>
88
+ f.endsWith(".js") || f.endsWith(".ts") || f.endsWith(".jsx") || f.endsWith(".tsx")
89
+ );
90
+
91
+ // Parse .graphql/.gql files
92
+ for (const file of schemaFiles) {
93
+ const content = await readFileSafe(path.join(repoRoot, file));
94
+ if (!content) continue;
95
+ result.schemaFiles.push(file);
96
+ extractSchemaTypes(content, file, result);
97
+ }
98
+
99
+ // Scan code files for inline schemas, resolvers, and libraries
100
+ for (const file of codeFiles) {
101
+ const content = await readFileSafe(path.join(repoRoot, file));
102
+ if (!content) continue;
103
+
104
+ // Detect GraphQL libraries
105
+ for (const { name, pattern } of SCHEMA_LIBRARY_PATTERNS) {
106
+ if (pattern.test(content) && !result.libraries.includes(name)) {
107
+ result.libraries.push(name);
108
+ }
109
+ }
110
+
111
+ // Detect inline SDL (template literals with gql tag or type definitions)
112
+ const inlineSdlPattern = /(?:gql|graphql)\s*`([^`]+)`/gs;
113
+ let match;
114
+ while ((match = inlineSdlPattern.exec(content)) !== null) {
115
+ extractSchemaTypes(match[1], file, result);
116
+ }
117
+
118
+ // Also detect raw type definitions in string literals
119
+ if (/type\s+(?:Query|Mutation|Subscription)\s*\{/.test(content)) {
120
+ extractSchemaTypes(content, file, result);
121
+ }
122
+
123
+ // Detect resolver files
124
+ const isResolver = RESOLVER_PATTERNS.some(p => p.test(content));
125
+ if (isResolver && !result.resolverFiles.includes(file)) {
126
+ result.resolverFiles.push(file);
127
+ }
128
+ }
129
+
130
+ result.detected = result.schemaFiles.length > 0 ||
131
+ result.types.length > 0 ||
132
+ result.resolverFiles.length > 0 ||
133
+ result.libraries.length > 0;
134
+
135
+ if (result.detected) {
136
+ result.summary = buildSummary(result);
137
+ info(`GraphQL detected: ${result.types.length} types, ${result.queries.length} queries, ${result.mutations.length} mutations`);
138
+ }
139
+
140
+ return result;
141
+ }
142
+
143
+ function extractSchemaTypes(content, sourceFile, result) {
144
+ let match;
145
+
146
+ // Object types (skip Query/Mutation/Subscription — handled separately)
147
+ const typeRegex = /type\s+(\w+)(?:\s+implements\s+([\w&\s]+))?\s*\{([^}]*)\}/gs;
148
+ while ((match = typeRegex.exec(content)) !== null) {
149
+ const name = match[1];
150
+ if (["Query", "Mutation", "Subscription"].includes(name)) continue;
151
+ const implements_ = match[2] ? match[2].split("&").map(s => s.trim()) : [];
152
+ const fields = parseFieldsFromBlock(match[3]);
153
+ if (!result.types.some(t => t.name === name)) {
154
+ result.types.push({ name, fields, implements: implements_, source: sourceFile });
155
+ }
156
+ }
157
+
158
+ // Query fields
159
+ const queryRegex = /type\s+Query\s*\{([^}]*)\}/gs;
160
+ while ((match = queryRegex.exec(content)) !== null) {
161
+ const fields = parseFieldsFromBlock(match[1]);
162
+ for (const field of fields) {
163
+ if (!result.queries.some(q => q.name === field.name)) {
164
+ result.queries.push({ ...field, source: sourceFile });
165
+ }
166
+ }
167
+ }
168
+
169
+ // Mutation fields
170
+ const mutationRegex = /type\s+Mutation\s*\{([^}]*)\}/gs;
171
+ while ((match = mutationRegex.exec(content)) !== null) {
172
+ const fields = parseFieldsFromBlock(match[1]);
173
+ for (const field of fields) {
174
+ if (!result.mutations.some(m => m.name === field.name)) {
175
+ result.mutations.push({ ...field, source: sourceFile });
176
+ }
177
+ }
178
+ }
179
+
180
+ // Subscription fields
181
+ const subRegex = /type\s+Subscription\s*\{([^}]*)\}/gs;
182
+ while ((match = subRegex.exec(content)) !== null) {
183
+ const fields = parseFieldsFromBlock(match[1]);
184
+ for (const field of fields) {
185
+ if (!result.subscriptions.some(s => s.name === field.name)) {
186
+ result.subscriptions.push({ ...field, source: sourceFile });
187
+ }
188
+ }
189
+ }
190
+
191
+ // Enums
192
+ const enumRegex = /enum\s+(\w+)\s*\{([^}]*)\}/gs;
193
+ while ((match = enumRegex.exec(content)) !== null) {
194
+ const name = match[1];
195
+ const values = match[2].split("\n").map(l => l.trim()).filter(l => l && !l.startsWith("#"));
196
+ if (!result.enums.some(e => e.name === name)) {
197
+ result.enums.push({ name, values, source: sourceFile });
198
+ }
199
+ }
200
+
201
+ // Input types
202
+ const inputRegex = /input\s+(\w+)\s*\{([^}]*)\}/gs;
203
+ while ((match = inputRegex.exec(content)) !== null) {
204
+ const name = match[1];
205
+ const fields = parseFieldsFromBlock(match[2]);
206
+ if (!result.inputs.some(i => i.name === name)) {
207
+ result.inputs.push({ name, fields, source: sourceFile });
208
+ }
209
+ }
210
+
211
+ // Interfaces
212
+ const ifaceRegex = /interface\s+(\w+)\s*\{([^}]*)\}/gs;
213
+ while ((match = ifaceRegex.exec(content)) !== null) {
214
+ const name = match[1];
215
+ const fields = parseFieldsFromBlock(match[2]);
216
+ if (!result.interfaces.some(i => i.name === name)) {
217
+ result.interfaces.push({ name, fields, source: sourceFile });
218
+ }
219
+ }
220
+
221
+ // Unions
222
+ const unionRegex = /union\s+(\w+)\s*=\s*([^;\n]+)/g;
223
+ while ((match = unionRegex.exec(content)) !== null) {
224
+ const name = match[1];
225
+ const members = match[2].split("|").map(s => s.trim());
226
+ if (!result.unions.some(u => u.name === name)) {
227
+ result.unions.push({ name, members, source: sourceFile });
228
+ }
229
+ }
230
+
231
+ // Scalars
232
+ const scalarRegex = /scalar\s+(\w+)/g;
233
+ while ((match = scalarRegex.exec(content)) !== null) {
234
+ if (!result.scalars.includes(match[1])) {
235
+ result.scalars.push(match[1]);
236
+ }
237
+ }
238
+
239
+ // Directives
240
+ const directiveRegex = /directive\s+@(\w+)/g;
241
+ while ((match = directiveRegex.exec(content)) !== null) {
242
+ if (!result.directives.includes(match[1])) {
243
+ result.directives.push(match[1]);
244
+ }
245
+ }
246
+ }
247
+
248
+ function buildSummary(result) {
249
+ const parts = [];
250
+ if (result.schemaFiles.length) parts.push(`${result.schemaFiles.length} schema file(s)`);
251
+ if (result.types.length) parts.push(`${result.types.length} object type(s)`);
252
+ if (result.queries.length) parts.push(`${result.queries.length} quer${result.queries.length === 1 ? "y" : "ies"}`);
253
+ if (result.mutations.length) parts.push(`${result.mutations.length} mutation(s)`);
254
+ if (result.subscriptions.length) parts.push(`${result.subscriptions.length} subscription(s)`);
255
+ if (result.enums.length) parts.push(`${result.enums.length} enum(s)`);
256
+ if (result.inputs.length) parts.push(`${result.inputs.length} input type(s)`);
257
+ if (result.interfaces.length) parts.push(`${result.interfaces.length} interface(s)`);
258
+ if (result.resolverFiles.length) parts.push(`${result.resolverFiles.length} resolver file(s)`);
259
+ if (result.libraries.length) parts.push(`libraries: ${result.libraries.join(", ")}`);
260
+ return parts.join(" · ");
261
+ }
@@ -0,0 +1,171 @@
1
+ // TypeScript type graph analysis — maps interfaces, types, classes, and their relationships
2
+
3
+ import fs from "node:fs/promises";
4
+ import path from "node:path";
5
+ import { info } from "../utils/logger.js";
6
+
7
+ const TS_EXTENSIONS = [".ts", ".tsx"];
8
+
9
+ // Patterns for extracting type declarations
10
+ const INTERFACE_PATTERN = /(?:export\s+)?interface\s+(\w+)(?:\s+extends\s+([\w,\s<>]+))?\s*\{/g;
11
+ const TYPE_ALIAS_PATTERN = /(?:export\s+)?type\s+(\w+)(?:<[^>]*>)?\s*=\s*([^;]+);/g;
12
+ const CLASS_PATTERN = /(?:export\s+)?(?:abstract\s+)?class\s+(\w+)(?:\s+extends\s+(\w+))?(?:\s+implements\s+([\w,\s]+))?\s*\{/g;
13
+ const ENUM_PATTERN = /(?:export\s+)?(?:const\s+)?enum\s+(\w+)\s*\{/g;
14
+ const GENERIC_CONSTRAINT = /<(\w+)\s+extends\s+(\w+)/g;
15
+
16
+ async function readFileSafe(filePath) {
17
+ try {
18
+ return await fs.readFile(filePath, "utf8");
19
+ } catch {
20
+ return "";
21
+ }
22
+ }
23
+
24
+ function extractTypeReferences(typeExpression) {
25
+ // Extract named type references from a type expression (strips primitives, operators)
26
+ const primitives = new Set([
27
+ "string", "number", "boolean", "void", "null", "undefined", "never", "any",
28
+ "unknown", "object", "bigint", "symbol", "true", "false",
29
+ ]);
30
+ const refs = new Set();
31
+ // Match word boundaries that look like type names (PascalCase or all-caps)
32
+ const namePattern = /\b([A-Z]\w*)\b/g;
33
+ let match;
34
+ while ((match = namePattern.exec(typeExpression)) !== null) {
35
+ const name = match[1];
36
+ if (!primitives.has(name.toLowerCase()) && name !== "Array" && name !== "Promise" &&
37
+ name !== "Record" && name !== "Partial" && name !== "Required" && name !== "Omit" &&
38
+ name !== "Pick" && name !== "Readonly" && name !== "Map" && name !== "Set" &&
39
+ name !== "Date" && name !== "RegExp" && name !== "Error" && name !== "Buffer") {
40
+ refs.add(name);
41
+ }
42
+ }
43
+ return [...refs];
44
+ }
45
+
46
+ export async function analyzeTypeScript(files, repoRoot) {
47
+ const result = {
48
+ detected: false,
49
+ interfaces: [],
50
+ typeAliases: [],
51
+ classes: [],
52
+ enums: [],
53
+ relationships: [], // { from, to, type: extends|implements|references }
54
+ files: [],
55
+ summary: null,
56
+ };
57
+
58
+ const tsFiles = files.filter(f => TS_EXTENSIONS.some(ext => f.endsWith(ext)));
59
+ if (tsFiles.length === 0) return result;
60
+
61
+ result.detected = true;
62
+ const typeNames = new Set(); // track all declared type names for relationship resolution
63
+
64
+ for (const file of tsFiles) {
65
+ const content = await readFileSafe(path.join(repoRoot, file));
66
+ if (!content) continue;
67
+
68
+ let hasTypes = false;
69
+
70
+ // Interfaces
71
+ let match;
72
+ const ifaceRegex = new RegExp(INTERFACE_PATTERN.source, "g");
73
+ while ((match = ifaceRegex.exec(content)) !== null) {
74
+ const name = match[1];
75
+ const extends_ = match[2] ? match[2].split(",").map(s => s.replace(/<.*/, "").trim()) : [];
76
+ typeNames.add(name);
77
+ result.interfaces.push({ name, extends: extends_, source: file });
78
+ hasTypes = true;
79
+
80
+ for (const parent of extends_) {
81
+ result.relationships.push({ from: name, to: parent, type: "extends" });
82
+ }
83
+ }
84
+
85
+ // Type aliases
86
+ const typeRegex = new RegExp(TYPE_ALIAS_PATTERN.source, "g");
87
+ while ((match = typeRegex.exec(content)) !== null) {
88
+ const name = match[1];
89
+ const definition = match[2].trim();
90
+ const refs = extractTypeReferences(definition);
91
+ typeNames.add(name);
92
+ result.typeAliases.push({ name, refs, source: file });
93
+ hasTypes = true;
94
+
95
+ for (const ref of refs) {
96
+ result.relationships.push({ from: name, to: ref, type: "references" });
97
+ }
98
+ }
99
+
100
+ // Classes
101
+ const classRegex = new RegExp(CLASS_PATTERN.source, "g");
102
+ while ((match = classRegex.exec(content)) !== null) {
103
+ const name = match[1];
104
+ const extends_ = match[2] || null;
105
+ const implements_ = match[3] ? match[3].split(",").map(s => s.trim()) : [];
106
+ typeNames.add(name);
107
+ result.classes.push({ name, extends: extends_, implements: implements_, source: file });
108
+ hasTypes = true;
109
+
110
+ if (extends_) {
111
+ result.relationships.push({ from: name, to: extends_, type: "extends" });
112
+ }
113
+ for (const iface of implements_) {
114
+ result.relationships.push({ from: name, to: iface, type: "implements" });
115
+ }
116
+ }
117
+
118
+ // Enums
119
+ const enumRegex = new RegExp(ENUM_PATTERN.source, "g");
120
+ while ((match = enumRegex.exec(content)) !== null) {
121
+ typeNames.add(match[1]);
122
+ result.enums.push({ name: match[1], source: file });
123
+ hasTypes = true;
124
+ }
125
+
126
+ // Generic constraints (e.g., <T extends SomeType>)
127
+ const constraintRegex = new RegExp(GENERIC_CONSTRAINT.source, "g");
128
+ while ((match = constraintRegex.exec(content)) !== null) {
129
+ // Only track if the constraint references a known (non-primitive) type
130
+ const refs = extractTypeReferences(match[2]);
131
+ for (const ref of refs) {
132
+ result.relationships.push({ from: match[1], to: ref, type: "constrained-by" });
133
+ }
134
+ }
135
+
136
+ if (hasTypes) {
137
+ result.files.push(file);
138
+ }
139
+ }
140
+
141
+ // Filter relationships to only include edges where both sides are project-declared types
142
+ result.relationships = result.relationships.filter(
143
+ r => typeNames.has(r.from) && typeNames.has(r.to) && r.from !== r.to
144
+ );
145
+
146
+ // Deduplicate relationships
147
+ const relKey = r => `${r.from}→${r.to}:${r.type}`;
148
+ const seen = new Set();
149
+ result.relationships = result.relationships.filter(r => {
150
+ const k = relKey(r);
151
+ if (seen.has(k)) return false;
152
+ seen.add(k);
153
+ return true;
154
+ });
155
+
156
+ result.summary = buildSummary(result);
157
+ info(`TypeScript types: ${result.interfaces.length} interfaces, ${result.typeAliases.length} aliases, ${result.classes.length} classes, ${result.enums.length} enums`);
158
+
159
+ return result;
160
+ }
161
+
162
+ function buildSummary(result) {
163
+ const parts = [];
164
+ if (result.files.length) parts.push(`${result.files.length} file(s) with type declarations`);
165
+ if (result.interfaces.length) parts.push(`${result.interfaces.length} interface(s)`);
166
+ if (result.typeAliases.length) parts.push(`${result.typeAliases.length} type alias(es)`);
167
+ if (result.classes.length) parts.push(`${result.classes.length} class(es)`);
168
+ if (result.enums.length) parts.push(`${result.enums.length} enum(s)`);
169
+ if (result.relationships.length) parts.push(`${result.relationships.length} relationship(s)`);
170
+ return parts.join(" · ");
171
+ }