@aiready/context-analyzer 0.5.3 → 0.7.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.js CHANGED
@@ -21,20 +21,252 @@ var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: tru
21
21
  var index_exports = {};
22
22
  __export(index_exports, {
23
23
  analyzeContext: () => analyzeContext,
24
+ buildCoUsageMatrix: () => buildCoUsageMatrix,
25
+ buildTypeGraph: () => buildTypeGraph,
26
+ calculateDomainConfidence: () => calculateDomainConfidence,
27
+ findConsolidationCandidates: () => findConsolidationCandidates,
28
+ findSemanticClusters: () => findSemanticClusters,
24
29
  generateSummary: () => generateSummary,
25
- getSmartDefaults: () => getSmartDefaults
30
+ getCoUsageData: () => getCoUsageData,
31
+ getSmartDefaults: () => getSmartDefaults,
32
+ inferDomainFromSemantics: () => inferDomainFromSemantics
26
33
  });
27
34
  module.exports = __toCommonJS(index_exports);
28
35
  var import_core2 = require("@aiready/core");
29
36
 
30
37
  // src/analyzer.ts
31
38
  var import_core = require("@aiready/core");
39
+
40
+ // src/semantic-analysis.ts
41
+ function buildCoUsageMatrix(graph) {
42
+ const coUsageMatrix = /* @__PURE__ */ new Map();
43
+ for (const [sourceFile, node] of graph.nodes) {
44
+ const imports = node.imports;
45
+ for (let i = 0; i < imports.length; i++) {
46
+ const fileA = imports[i];
47
+ if (!coUsageMatrix.has(fileA)) {
48
+ coUsageMatrix.set(fileA, /* @__PURE__ */ new Map());
49
+ }
50
+ for (let j = i + 1; j < imports.length; j++) {
51
+ const fileB = imports[j];
52
+ const fileAUsage = coUsageMatrix.get(fileA);
53
+ fileAUsage.set(fileB, (fileAUsage.get(fileB) || 0) + 1);
54
+ if (!coUsageMatrix.has(fileB)) {
55
+ coUsageMatrix.set(fileB, /* @__PURE__ */ new Map());
56
+ }
57
+ const fileBUsage = coUsageMatrix.get(fileB);
58
+ fileBUsage.set(fileA, (fileBUsage.get(fileA) || 0) + 1);
59
+ }
60
+ }
61
+ }
62
+ return coUsageMatrix;
63
+ }
64
+ function buildTypeGraph(graph) {
65
+ const typeGraph = /* @__PURE__ */ new Map();
66
+ for (const [file, node] of graph.nodes) {
67
+ for (const exp of node.exports) {
68
+ if (exp.typeReferences) {
69
+ for (const typeRef of exp.typeReferences) {
70
+ if (!typeGraph.has(typeRef)) {
71
+ typeGraph.set(typeRef, /* @__PURE__ */ new Set());
72
+ }
73
+ typeGraph.get(typeRef).add(file);
74
+ }
75
+ }
76
+ }
77
+ }
78
+ return typeGraph;
79
+ }
80
+ function findSemanticClusters(coUsageMatrix, minCoUsage = 3) {
81
+ const clusters = /* @__PURE__ */ new Map();
82
+ const visited = /* @__PURE__ */ new Set();
83
+ for (const [file, coUsages] of coUsageMatrix) {
84
+ if (visited.has(file)) continue;
85
+ const cluster = [file];
86
+ visited.add(file);
87
+ for (const [relatedFile, count] of coUsages) {
88
+ if (count >= minCoUsage && !visited.has(relatedFile)) {
89
+ cluster.push(relatedFile);
90
+ visited.add(relatedFile);
91
+ }
92
+ }
93
+ if (cluster.length > 1) {
94
+ clusters.set(file, cluster);
95
+ }
96
+ }
97
+ return clusters;
98
+ }
99
+ function calculateDomainConfidence(signals) {
100
+ const weights = {
101
+ coUsage: 0.35,
102
+ // Strongest signal: actual usage patterns
103
+ typeReference: 0.3,
104
+ // Strong signal: shared types
105
+ exportName: 0.15,
106
+ // Medium signal: identifier semantics
107
+ importPath: 0.1,
108
+ // Weaker signal: path structure
109
+ folderStructure: 0.1
110
+ // Weakest signal: organization convention
111
+ };
112
+ let confidence = 0;
113
+ if (signals.coUsage) confidence += weights.coUsage;
114
+ if (signals.typeReference) confidence += weights.typeReference;
115
+ if (signals.exportName) confidence += weights.exportName;
116
+ if (signals.importPath) confidence += weights.importPath;
117
+ if (signals.folderStructure) confidence += weights.folderStructure;
118
+ return confidence;
119
+ }
120
+ function inferDomainFromSemantics(file, exportName, graph, coUsageMatrix, typeGraph, exportTypeRefs) {
121
+ const assignments = [];
122
+ const domainSignals = /* @__PURE__ */ new Map();
123
+ const coUsages = coUsageMatrix.get(file) || /* @__PURE__ */ new Map();
124
+ const strongCoUsages = Array.from(coUsages.entries()).filter(([_, count]) => count >= 3).map(([coFile]) => coFile);
125
+ for (const coFile of strongCoUsages) {
126
+ const coNode = graph.nodes.get(coFile);
127
+ if (coNode) {
128
+ for (const exp of coNode.exports) {
129
+ if (exp.inferredDomain && exp.inferredDomain !== "unknown") {
130
+ const domain = exp.inferredDomain;
131
+ if (!domainSignals.has(domain)) {
132
+ domainSignals.set(domain, {
133
+ coUsage: false,
134
+ typeReference: false,
135
+ exportName: false,
136
+ importPath: false,
137
+ folderStructure: false
138
+ });
139
+ }
140
+ domainSignals.get(domain).coUsage = true;
141
+ }
142
+ }
143
+ }
144
+ }
145
+ if (exportTypeRefs) {
146
+ for (const typeRef of exportTypeRefs) {
147
+ const filesWithType = typeGraph.get(typeRef);
148
+ if (filesWithType) {
149
+ for (const typeFile of filesWithType) {
150
+ if (typeFile !== file) {
151
+ const typeNode = graph.nodes.get(typeFile);
152
+ if (typeNode) {
153
+ for (const exp of typeNode.exports) {
154
+ if (exp.inferredDomain && exp.inferredDomain !== "unknown") {
155
+ const domain = exp.inferredDomain;
156
+ if (!domainSignals.has(domain)) {
157
+ domainSignals.set(domain, {
158
+ coUsage: false,
159
+ typeReference: false,
160
+ exportName: false,
161
+ importPath: false,
162
+ folderStructure: false
163
+ });
164
+ }
165
+ domainSignals.get(domain).typeReference = true;
166
+ }
167
+ }
168
+ }
169
+ }
170
+ }
171
+ }
172
+ }
173
+ }
174
+ for (const [domain, signals] of domainSignals) {
175
+ const confidence = calculateDomainConfidence(signals);
176
+ if (confidence >= 0.3) {
177
+ assignments.push({ domain, confidence, signals });
178
+ }
179
+ }
180
+ assignments.sort((a, b) => b.confidence - a.confidence);
181
+ return assignments;
182
+ }
183
+ function getCoUsageData(file, coUsageMatrix) {
184
+ const coImportedWith = coUsageMatrix.get(file) || /* @__PURE__ */ new Map();
185
+ const sharedImporters = [];
186
+ return {
187
+ file,
188
+ coImportedWith,
189
+ sharedImporters
190
+ };
191
+ }
192
+ function findConsolidationCandidates(graph, coUsageMatrix, typeGraph, minCoUsage = 5, minSharedTypes = 2) {
193
+ const candidates = [];
194
+ for (const [fileA, coUsages] of coUsageMatrix) {
195
+ const nodeA = graph.nodes.get(fileA);
196
+ if (!nodeA) continue;
197
+ for (const [fileB, coUsageCount] of coUsages) {
198
+ if (fileB <= fileA) continue;
199
+ if (coUsageCount < minCoUsage) continue;
200
+ const nodeB = graph.nodes.get(fileB);
201
+ if (!nodeB) continue;
202
+ const typesA = new Set(nodeA.exports.flatMap((e) => e.typeReferences || []));
203
+ const typesB = new Set(nodeB.exports.flatMap((e) => e.typeReferences || []));
204
+ const sharedTypes = Array.from(typesA).filter((t) => typesB.has(t));
205
+ if (sharedTypes.length >= minSharedTypes) {
206
+ const strength = coUsageCount / 10 + sharedTypes.length / 5;
207
+ candidates.push({
208
+ files: [fileA, fileB],
209
+ reason: `High co-usage (${coUsageCount}x) and ${sharedTypes.length} shared types`,
210
+ strength
211
+ });
212
+ } else if (coUsageCount >= minCoUsage * 2) {
213
+ const strength = coUsageCount / 10;
214
+ candidates.push({
215
+ files: [fileA, fileB],
216
+ reason: `Very high co-usage (${coUsageCount}x)`,
217
+ strength
218
+ });
219
+ }
220
+ }
221
+ }
222
+ candidates.sort((a, b) => b.strength - a.strength);
223
+ return candidates;
224
+ }
225
+
226
+ // src/analyzer.ts
227
+ function extractDomainKeywordsFromPaths(files) {
228
+ const folderNames = /* @__PURE__ */ new Set();
229
+ for (const { file } of files) {
230
+ const segments = file.split("/");
231
+ const skipFolders = /* @__PURE__ */ new Set(["src", "lib", "dist", "build", "node_modules", "test", "tests", "__tests__", "spec", "e2e", "scripts", "components", "utils", "helpers", "util", "helper", "api", "apis"]);
232
+ for (const segment of segments) {
233
+ const normalized = segment.toLowerCase();
234
+ if (normalized && !skipFolders.has(normalized) && !normalized.includes(".")) {
235
+ const singular = singularize(normalized);
236
+ folderNames.add(singular);
237
+ }
238
+ }
239
+ }
240
+ return Array.from(folderNames);
241
+ }
242
+ function singularize(word) {
243
+ const irregulars = {
244
+ people: "person",
245
+ children: "child",
246
+ men: "man",
247
+ women: "woman"
248
+ };
249
+ if (irregulars[word]) {
250
+ return irregulars[word];
251
+ }
252
+ if (word.endsWith("ies")) {
253
+ return word.slice(0, -3) + "y";
254
+ }
255
+ if (word.endsWith("ses")) {
256
+ return word.slice(0, -2);
257
+ }
258
+ if (word.endsWith("s") && word.length > 3) {
259
+ return word.slice(0, -1);
260
+ }
261
+ return word;
262
+ }
32
263
  function buildDependencyGraph(files) {
33
264
  const nodes = /* @__PURE__ */ new Map();
34
265
  const edges = /* @__PURE__ */ new Map();
266
+ const autoDetectedKeywords = extractDomainKeywordsFromPaths(files);
35
267
  for (const { file, content } of files) {
36
268
  const imports = extractImportsFromContent(content);
37
- const exports2 = extractExports(content);
269
+ const exports2 = extractExportsWithAST(content, file, { domainKeywords: autoDetectedKeywords }, imports);
38
270
  const tokenCost = (0, import_core.estimateTokens)(content);
39
271
  const linesOfCode = content.split("\n").length;
40
272
  nodes.set(file, {
@@ -46,7 +278,28 @@ function buildDependencyGraph(files) {
46
278
  });
47
279
  edges.set(file, new Set(imports));
48
280
  }
49
- return { nodes, edges };
281
+ const graph = { nodes, edges };
282
+ const coUsageMatrix = buildCoUsageMatrix(graph);
283
+ const typeGraph = buildTypeGraph(graph);
284
+ graph.coUsageMatrix = coUsageMatrix;
285
+ graph.typeGraph = typeGraph;
286
+ for (const [file, node] of nodes) {
287
+ for (const exp of node.exports) {
288
+ const semanticAssignments = inferDomainFromSemantics(
289
+ file,
290
+ exp.name,
291
+ graph,
292
+ coUsageMatrix,
293
+ typeGraph,
294
+ exp.typeReferences
295
+ );
296
+ exp.domains = semanticAssignments;
297
+ if (semanticAssignments.length > 0) {
298
+ exp.inferredDomain = semanticAssignments[0].domain;
299
+ }
300
+ }
301
+ }
302
+ return graph;
50
303
  }
51
304
  function extractImportsFromContent(content) {
52
305
  const imports = [];
@@ -62,7 +315,7 @@ function extractImportsFromContent(content) {
62
315
  let match;
63
316
  while ((match = pattern.exec(content)) !== null) {
64
317
  const importPath = match[1];
65
- if (importPath && !importPath.startsWith("@") && !importPath.startsWith("node:")) {
318
+ if (importPath && !importPath.startsWith("node:")) {
66
319
  imports.push(importPath);
67
320
  }
68
321
  }
@@ -149,26 +402,7 @@ function detectCircularDependencies(graph) {
149
402
  return cycles;
150
403
  }
151
404
  function calculateCohesion(exports2, filePath) {
152
- if (exports2.length === 0) return 1;
153
- if (exports2.length === 1) return 1;
154
- if (filePath && isTestFile(filePath)) {
155
- return 1;
156
- }
157
- const domains = exports2.map((e) => e.inferredDomain || "unknown");
158
- const domainCounts = /* @__PURE__ */ new Map();
159
- for (const domain of domains) {
160
- domainCounts.set(domain, (domainCounts.get(domain) || 0) + 1);
161
- }
162
- const total = domains.length;
163
- let entropy = 0;
164
- for (const count of domainCounts.values()) {
165
- const p = count / total;
166
- if (p > 0) {
167
- entropy -= p * Math.log2(p);
168
- }
169
- }
170
- const maxEntropy = Math.log2(total);
171
- return maxEntropy > 0 ? 1 - entropy / maxEntropy : 1;
405
+ return calculateEnhancedCohesion(exports2, filePath);
172
406
  }
173
407
  function isTestFile(filePath) {
174
408
  const lower = filePath.toLowerCase();
@@ -221,7 +455,7 @@ function detectModuleClusters(graph) {
221
455
  }
222
456
  return clusters.sort((a, b) => b.fragmentationScore - a.fragmentationScore);
223
457
  }
224
- function extractExports(content) {
458
+ function extractExports(content, filePath, domainOptions, fileImports) {
225
459
  const exports2 = [];
226
460
  const patterns = [
227
461
  /export\s+function\s+(\w+)/g,
@@ -244,15 +478,20 @@ function extractExports(content) {
244
478
  while ((match = pattern.exec(content)) !== null) {
245
479
  const name = match[1] || "default";
246
480
  const type = types[index];
247
- const inferredDomain = inferDomain(name);
481
+ const inferredDomain = inferDomain(name, filePath, domainOptions, fileImports);
248
482
  exports2.push({ name, type, inferredDomain });
249
483
  }
250
484
  });
251
485
  return exports2;
252
486
  }
253
- function inferDomain(name) {
487
+ function inferDomain(name, filePath, domainOptions, fileImports) {
254
488
  const lower = name.toLowerCase();
255
- const domainKeywords = [
489
+ const tokens = Array.from(
490
+ new Set(
491
+ lower.replace(/([a-z0-9])([A-Z])/g, "$1 $2").replace(/[^a-z0-9]+/gi, " ").split(" ").filter(Boolean)
492
+ )
493
+ );
494
+ const defaultKeywords = [
256
495
  "authentication",
257
496
  "authorization",
258
497
  "payment",
@@ -269,14 +508,11 @@ function inferDomain(name) {
269
508
  "config",
270
509
  "model",
271
510
  "view",
272
- "auth",
273
- "api",
274
- "helper",
275
- "util"
511
+ "auth"
276
512
  ];
513
+ const domainKeywords = domainOptions?.domainKeywords && domainOptions.domainKeywords.length ? [...domainOptions.domainKeywords, ...defaultKeywords] : defaultKeywords;
277
514
  for (const keyword of domainKeywords) {
278
- const wordBoundaryPattern = new RegExp(`\\b${keyword}\\b`, "i");
279
- if (wordBoundaryPattern.test(name)) {
515
+ if (tokens.includes(keyword)) {
280
516
  return keyword;
281
517
  }
282
518
  }
@@ -285,6 +521,37 @@ function inferDomain(name) {
285
521
  return keyword;
286
522
  }
287
523
  }
524
+ if (fileImports && fileImports.length > 0) {
525
+ for (const importPath of fileImports) {
526
+ const allSegments = importPath.split("/");
527
+ const relevantSegments = allSegments.filter((s) => {
528
+ if (!s) return false;
529
+ if (s === "." || s === "..") return false;
530
+ if (s.startsWith("@") && s.length === 1) return false;
531
+ return true;
532
+ }).map((s) => s.startsWith("@") ? s.slice(1) : s);
533
+ for (const segment of relevantSegments) {
534
+ const segLower = segment.toLowerCase();
535
+ const singularSegment = singularize(segLower);
536
+ for (const keyword of domainKeywords) {
537
+ if (singularSegment === keyword || segLower === keyword || segLower.includes(keyword)) {
538
+ return keyword;
539
+ }
540
+ }
541
+ }
542
+ }
543
+ }
544
+ if (filePath) {
545
+ const pathSegments = filePath.toLowerCase().split("/");
546
+ for (const segment of pathSegments) {
547
+ const singularSegment = singularize(segment);
548
+ for (const keyword of domainKeywords) {
549
+ if (singularSegment === keyword || segment === keyword || segment.includes(keyword)) {
550
+ return keyword;
551
+ }
552
+ }
553
+ }
554
+ }
288
555
  return "unknown";
289
556
  }
290
557
  function generateConsolidationPlan(domain, files, targetFiles) {
@@ -313,6 +580,78 @@ function generateConsolidationPlan(domain, files, targetFiles) {
313
580
  );
314
581
  return plan;
315
582
  }
583
+ function extractExportsWithAST(content, filePath, domainOptions, fileImports) {
584
+ try {
585
+ const { exports: astExports } = (0, import_core.parseFileExports)(content, filePath);
586
+ return astExports.map((exp) => ({
587
+ name: exp.name,
588
+ type: exp.type,
589
+ inferredDomain: inferDomain(exp.name, filePath, domainOptions, fileImports),
590
+ imports: exp.imports,
591
+ dependencies: exp.dependencies
592
+ }));
593
+ } catch (error) {
594
+ return extractExports(content, filePath, domainOptions, fileImports);
595
+ }
596
+ }
597
+ function calculateEnhancedCohesion(exports2, filePath) {
598
+ if (exports2.length === 0) return 1;
599
+ if (exports2.length === 1) return 1;
600
+ if (filePath && isTestFile(filePath)) {
601
+ return 1;
602
+ }
603
+ const domainCohesion = calculateDomainCohesion(exports2);
604
+ const hasImportData = exports2.some((e) => e.imports && e.imports.length > 0);
605
+ if (!hasImportData) {
606
+ return domainCohesion;
607
+ }
608
+ const importCohesion = calculateImportBasedCohesion(exports2);
609
+ return importCohesion * 0.6 + domainCohesion * 0.4;
610
+ }
611
+ function calculateImportBasedCohesion(exports2) {
612
+ const exportsWithImports = exports2.filter((e) => e.imports && e.imports.length > 0);
613
+ if (exportsWithImports.length < 2) {
614
+ return 1;
615
+ }
616
+ let totalSimilarity = 0;
617
+ let comparisons = 0;
618
+ for (let i = 0; i < exportsWithImports.length; i++) {
619
+ for (let j = i + 1; j < exportsWithImports.length; j++) {
620
+ const exp1 = exportsWithImports[i];
621
+ const exp2 = exportsWithImports[j];
622
+ const similarity = calculateJaccardSimilarity(exp1.imports, exp2.imports);
623
+ totalSimilarity += similarity;
624
+ comparisons++;
625
+ }
626
+ }
627
+ return comparisons > 0 ? totalSimilarity / comparisons : 1;
628
+ }
629
+ function calculateJaccardSimilarity(arr1, arr2) {
630
+ if (arr1.length === 0 && arr2.length === 0) return 1;
631
+ if (arr1.length === 0 || arr2.length === 0) return 0;
632
+ const set1 = new Set(arr1);
633
+ const set2 = new Set(arr2);
634
+ const intersection = new Set([...set1].filter((x) => set2.has(x)));
635
+ const union = /* @__PURE__ */ new Set([...set1, ...set2]);
636
+ return intersection.size / union.size;
637
+ }
638
+ function calculateDomainCohesion(exports2) {
639
+ const domains = exports2.map((e) => e.inferredDomain || "unknown");
640
+ const domainCounts = /* @__PURE__ */ new Map();
641
+ for (const domain of domains) {
642
+ domainCounts.set(domain, (domainCounts.get(domain) || 0) + 1);
643
+ }
644
+ const total = domains.length;
645
+ let entropy = 0;
646
+ for (const count of domainCounts.values()) {
647
+ const p = count / total;
648
+ if (p > 0) {
649
+ entropy -= p * Math.log2(p);
650
+ }
651
+ }
652
+ const maxEntropy = Math.log2(total);
653
+ return maxEntropy > 0 ? 1 - entropy / maxEntropy : 1;
654
+ }
316
655
 
317
656
  // src/index.ts
318
657
  async function getSmartDefaults(directory, userOptions) {
@@ -646,6 +985,13 @@ function downgradeSeverity(s) {
646
985
  // Annotate the CommonJS export names for ESM import in node:
647
986
  0 && (module.exports = {
648
987
  analyzeContext,
988
+ buildCoUsageMatrix,
989
+ buildTypeGraph,
990
+ calculateDomainConfidence,
991
+ findConsolidationCandidates,
992
+ findSemanticClusters,
649
993
  generateSummary,
650
- getSmartDefaults
994
+ getCoUsageData,
995
+ getSmartDefaults,
996
+ inferDomainFromSemantics
651
997
  });
package/dist/index.mjs CHANGED
@@ -1,10 +1,24 @@
1
1
  import {
2
2
  analyzeContext,
3
+ buildCoUsageMatrix,
4
+ buildTypeGraph,
5
+ calculateDomainConfidence,
6
+ findConsolidationCandidates,
7
+ findSemanticClusters,
3
8
  generateSummary,
4
- getSmartDefaults
5
- } from "./chunk-EX7HCWAO.mjs";
9
+ getCoUsageData,
10
+ getSmartDefaults,
11
+ inferDomainFromSemantics
12
+ } from "./chunk-DMRZMS2U.mjs";
6
13
  export {
7
14
  analyzeContext,
15
+ buildCoUsageMatrix,
16
+ buildTypeGraph,
17
+ calculateDomainConfidence,
18
+ findConsolidationCandidates,
19
+ findSemanticClusters,
8
20
  generateSummary,
9
- getSmartDefaults
21
+ getCoUsageData,
22
+ getSmartDefaults,
23
+ inferDomainFromSemantics
10
24
  };
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@aiready/context-analyzer",
3
- "version": "0.5.3",
3
+ "version": "0.7.0",
4
4
  "description": "AI context window cost analysis - detect fragmented code, deep import chains, and expensive context budgets",
5
5
  "main": "./dist/index.js",
6
6
  "module": "./dist/index.mjs",
@@ -50,7 +50,7 @@
50
50
  "commander": "^12.1.0",
51
51
  "chalk": "^5.3.0",
52
52
  "prompts": "^2.4.2",
53
- "@aiready/core": "0.5.6"
53
+ "@aiready/core": "0.7.0"
54
54
  },
55
55
  "devDependencies": {
56
56
  "@types/node": "^22.10.2",
@@ -0,0 +1,156 @@
1
+ import { describe, it, expect } from 'vitest';
2
+ import { buildDependencyGraph } from '../analyzer';
3
+
4
+ describe('Auto-detection from folder structure', () => {
5
+ it('should auto-detect domain keywords from folder paths', () => {
6
+ const files = [
7
+ {
8
+ file: 'src/payments/process.ts',
9
+ content: 'export function processPayment() { return 1; }',
10
+ },
11
+ {
12
+ file: 'src/orders/create.ts',
13
+ content: 'export function createOrder() { return 2; }',
14
+ },
15
+ ];
16
+
17
+ const graph = buildDependencyGraph(files);
18
+ const paymentsNode = graph.nodes.get('src/payments/process.ts');
19
+ const ordersNode = graph.nodes.get('src/orders/create.ts');
20
+
21
+ // Should detect 'payment' from processPayment (now part of auto-detected keywords)
22
+ expect(paymentsNode?.exports[0].inferredDomain).toBe('payment');
23
+
24
+ // Should detect 'order' from createOrder
25
+ expect(ordersNode?.exports[0].inferredDomain).toBe('order');
26
+ });
27
+
28
+ it('should detect domains from nested folders', () => {
29
+ const files = [
30
+ {
31
+ file: 'src/api/invoices/handler.ts',
32
+ content: 'export function handleRequest() { return 1; }',
33
+ },
34
+ ];
35
+
36
+ const graph = buildDependencyGraph(files);
37
+ const node = graph.nodes.get('src/api/invoices/handler.ts');
38
+
39
+ // Should detect 'invoice' from path (invoices folder)
40
+ expect(node?.exports[0].inferredDomain).toBe('invoice');
41
+ });
42
+
43
+ it('should skip common infrastructure folders', () => {
44
+ const files = [
45
+ {
46
+ file: 'src/utils/helpers/format.ts',
47
+ content: 'export function formatData() { return 1; }',
48
+ },
49
+ ];
50
+
51
+ const graph = buildDependencyGraph(files);
52
+ const node = graph.nodes.get('src/utils/helpers/format.ts');
53
+
54
+ // 'utils' and 'helpers' should be skipped, no domain detected
55
+ expect(node?.exports[0].inferredDomain).toBe('unknown');
56
+ });
57
+
58
+ it('should merge auto-detected with custom keywords', () => {
59
+ const files = [
60
+ {
61
+ file: 'src/receipts/scan.ts',
62
+ content: 'export function scanReceipt() { return 1; }',
63
+ },
64
+ ];
65
+
66
+ const graph = buildDependencyGraph(files, {
67
+ domainKeywords: ['receipt'], // Custom keyword
68
+ });
69
+ const node = graph.nodes.get('src/receipts/scan.ts');
70
+
71
+ // Should detect 'receipt' from both auto-detection AND custom keywords
72
+ expect(node?.exports[0].inferredDomain).toBe('receipt');
73
+ });
74
+ });
75
+
76
+ describe('Import-path domain inference', () => {
77
+ it('should infer domain from import paths', () => {
78
+ const files = [
79
+ {
80
+ file: 'src/lib/session.ts',
81
+ content: `
82
+ import { processPayment } from '../payments/processor';
83
+ export function createSession() { return 1; }
84
+ `,
85
+ },
86
+ {
87
+ file: 'src/payments/processor.ts',
88
+ content: 'export function processPayment() { return 2; }',
89
+ },
90
+ ];
91
+
92
+ const graph = buildDependencyGraph(files);
93
+ const sessionNode = graph.nodes.get('src/lib/session.ts');
94
+
95
+ // session.ts imports from '../payments/...' so should infer 'payment' domain
96
+ expect(sessionNode?.exports[0].inferredDomain).toBe('payment');
97
+ });
98
+
99
+ it('should infer domain from absolute import paths', () => {
100
+ const files = [
101
+ {
102
+ file: 'src/components/nav-links.ts',
103
+ content: `
104
+ import { getOrders } from '@/orders/service';
105
+ export function NavLinks() { return 'nav'; }
106
+ `,
107
+ },
108
+ {
109
+ file: 'src/orders/service.ts',
110
+ content: 'export function getOrders() { return []; }',
111
+ },
112
+ ];
113
+
114
+ const graph = buildDependencyGraph(files);
115
+ const navNode = graph.nodes.get('src/components/nav-links.ts');
116
+
117
+ // nav-links.ts imports from '@/orders/...' so should infer 'order' domain
118
+ expect(navNode?.exports[0].inferredDomain).toBe('order');
119
+ });
120
+
121
+ it('should use identifier name first before import-path fallback', () => {
122
+ const files = [
123
+ {
124
+ file: 'src/lib/handler.ts',
125
+ content: `
126
+ import { processPayment } from '../payments/processor';
127
+ export function processInvoice() { return 1; }
128
+ `,
129
+ },
130
+ ];
131
+
132
+ const graph = buildDependencyGraph(files);
133
+ const node = graph.nodes.get('src/lib/handler.ts');
134
+
135
+ // processInvoice should match 'invoice' from identifier, not 'payment' from imports
136
+ expect(node?.exports[0].inferredDomain).toBe('invoice');
137
+ });
138
+
139
+ it('should fall back to import-path when identifier is generic', () => {
140
+ const files = [
141
+ {
142
+ file: 'src/lib/dynamodb.ts',
143
+ content: `
144
+ import { Customer } from '../customers/model';
145
+ export function connect() { return 1; }
146
+ `,
147
+ },
148
+ ];
149
+
150
+ const graph = buildDependencyGraph(files);
151
+ const node = graph.nodes.get('src/lib/dynamodb.ts');
152
+
153
+ // 'connect' is generic, should infer 'customer' from import path
154
+ expect(node?.exports[0].inferredDomain).toBe('customer');
155
+ });
156
+ });