@getmikk/core 1.3.2 → 1.5.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,6 +1,73 @@
1
1
  import * as path from 'node:path'
2
2
  import type { DependencyGraph, ModuleCluster, GraphNode } from './types.js'
3
3
 
4
+ // ─── Domain keyword maps for semantic naming ────────────────────────
5
+ // Each entry maps a human-readable domain label to keywords found in
6
+ // function names and file basenames. The first match wins.
7
+ const DOMAIN_KEYWORDS: [string, string[]][] = [
8
+ // Core backends
9
+ ['Authentication', ['auth', 'login', 'logout', 'signin', 'signup', 'session', 'jwt', 'token', 'credential', 'password', 'oauth', 'sso']],
10
+ ['Encryption', ['encrypt', 'decrypt', 'cipher', 'aes', 'argon', 'derive', 'salt', 'envelope', 'hmac']],
11
+ ['Database', ['prisma', 'query', 'queries', 'db', 'database', 'repository', 'knex', 'sequelize', 'drizzle', 'typeorm', 'migration', 'seed']],
12
+ ['API', ['api', 'endpoint', 'middleware', 'handler', 'route', 'controller', 'request', 'response', 'rest', 'openapi']],
13
+ ['Validation', ['validate', 'validator', 'schema', 'assert', 'sanitize', 'zod', 'yup', 'joi']],
14
+ ['Config', ['config', 'env', 'settings', 'constants', 'options', 'feature', 'flag']],
15
+ ['Utils', ['util', 'utils', 'helper', 'helpers', 'format', 'convert', 'transform', 'lib', 'common', 'shared']],
16
+ ['Secrets', ['secret', 'vault', 'credential', 'key', 'keychain', 'encrypt', 'kms']],
17
+ ['Testing', ['test', 'spec', 'mock', 'fixture', 'stub', 'fake', 'factory', 'seed']],
18
+
19
+ // Frontend / UI
20
+ ['Navigation', ['sidebar', 'header', 'footer', 'nav', 'breadcrumb', 'menu', 'topbar', 'toolbar', 'appbar']],
21
+ ['Layout', ['layout', 'shell', 'frame', 'wrapper', 'page', 'container', 'grid', 'template']],
22
+ ['Forms', ['form', 'input', 'select', 'checkbox', 'radio', 'textarea', 'field', 'datepicker']],
23
+ ['Hooks', ['hook', 'useauth', 'usestate', 'useeffect', 'usememo', 'usequery', 'usemutation', 'useform', 'composable']],
24
+ ['Providers', ['provider', 'context', 'theme', 'store', 'reducer', 'zustand', 'pinia']],
25
+ ['Components', ['component', 'button', 'modal', 'dialog', 'card', 'toast', 'toggle', 'badge', 'tab', 'alert', 'avatar', 'widget']],
26
+ ['Dashboard', ['dashboard', 'chart', 'metric', 'stat', 'analytics', 'widget', 'overview', 'report']],
27
+ ['Media', ['image', 'video', 'audio', 'upload', 'gallery', 'zoom', 'embed', 'asset']],
28
+ ['Notifications', ['notification', 'toast', 'alert', 'snackbar', 'banner', 'push']],
29
+
30
+ // Business domains
31
+ ['Project Management', ['project', 'member', 'team', 'workspace', 'organization', 'invite', 'role', 'permission']],
32
+ ['Portfolio', ['portfolio', 'resume', 'experience', 'certification', 'award', 'testimonial', 'social', 'profile', 'bio']],
33
+ ['Blog', ['blog', 'post', 'article', 'mdx', 'markdown', 'rss', 'feed', 'author', 'category', 'tag', 'comment']],
34
+ ['Sponsors', ['sponsor', 'donation', 'patron', 'tier', 'backer']],
35
+ ['Search', ['search', 'filter', 'sort', 'query', 'autocomplete', 'fuzzy', 'index', 'algolia']],
36
+ ['Payments', ['payment', 'stripe', 'billing', 'invoice', 'subscription', 'checkout', 'cart', 'price', 'order']],
37
+
38
+ // CLI / Tooling
39
+ ['CLI', ['command', 'arg', 'flag', 'prompt', 'subcommand', 'repl', 'cli', 'yargs', 'commander', 'inquirer']],
40
+
41
+ // AI / ML
42
+ ['AI & ML', ['model', 'train', 'predict', 'inference', 'pipeline', 'tokenizer', 'embedding', 'llm', 'openai', 'anthropic', 'vector']],
43
+
44
+ // Messaging / Queue
45
+ ['Messaging', ['queue', 'worker', 'consumer', 'producer', 'broker', 'pubsub', 'event', 'subscriber', 'publisher', 'bullmq', 'kafka', 'rabbitmq']],
46
+
47
+ // Caching
48
+ ['Caching', ['cache', 'redis', 'memcached', 'ttl', 'invalidate', 'lru']],
49
+
50
+ // Logging / Monitoring
51
+ ['Logging', ['logger', 'log', 'trace', 'metric', 'telemetry', 'sentry', 'monitor', 'span']],
52
+
53
+ // Scheduling
54
+ ['Scheduling', ['cron', 'job', 'scheduler', 'background', 'recurring', 'interval']],
55
+
56
+ // Storage / Files
57
+ ['Storage', ['storage', 's3', 'bucket', 'blob', 'upload', 'download', 'stream', 'file', 'archive']],
58
+
59
+ // Email
60
+ ['Email', ['email', 'mail', 'smtp', 'sendgrid', 'mailer', 'template', 'newsletter']],
61
+
62
+ // GraphQL / gRPC
63
+ ['GraphQL', ['resolver', 'mutation', 'subscription', 'typedef', 'graphql', 'gql', 'apollo']],
64
+ ['gRPC', ['grpc', 'rpc', 'protobuf', 'service', 'stub', 'proto']],
65
+
66
+ // i18n / a11y
67
+ ['Internationalization', ['i18n', 'locale', 'translation', 'intl', 'language', 'l10n']],
68
+ ['Accessibility', ['a11y', 'aria', 'screenreader', 'focus', 'keyboard']],
69
+ ]
70
+
4
71
  /**
5
72
  * ClusterDetector — analyzes the dependency graph and groups files
6
73
  * into natural module clusters using greedy agglomeration with coupling scores.
@@ -39,7 +106,7 @@ export class ClusterDetector {
39
106
 
40
107
  // Start a new cluster with this file as seed
41
108
  const cluster: string[] = [seedFile]
42
- assigned.add(seedFile)
109
+ const tentative = new Set<string>([seedFile])
43
110
 
44
111
  // Expand: find files strongly coupled to any file in this cluster
45
112
  let expanded = true
@@ -50,7 +117,7 @@ export class ClusterDetector {
50
117
  const partners = couplingMatrix.get(clusterFile) || new Map()
51
118
 
52
119
  for (const [candidate, score] of partners) {
53
- if (assigned.has(candidate)) continue
120
+ if (assigned.has(candidate) || tentative.has(candidate)) continue
54
121
  if (score < this.minCouplingScore) continue
55
122
 
56
123
  // Is this candidate more coupled to this cluster than to others?
@@ -63,7 +130,7 @@ export class ClusterDetector {
63
130
 
64
131
  if (clusterAffinity > bestOutsideAffinity) {
65
132
  cluster.push(candidate)
66
- assigned.add(candidate)
133
+ tentative.add(candidate)
67
134
  expanded = true
68
135
  }
69
136
  }
@@ -71,13 +138,16 @@ export class ClusterDetector {
71
138
  }
72
139
 
73
140
  if (cluster.length >= this.minClusterSize) {
141
+ // Mark all files in this cluster as assigned
142
+ for (const f of cluster) assigned.add(f)
74
143
  const filePathsForCluster = cluster.map(id => this.getNodeFile(id))
144
+ const functionIds = this.getFunctionIdsForFiles(cluster)
75
145
  clusters.push({
76
146
  id: this.inferClusterId(filePathsForCluster),
77
147
  files: filePathsForCluster,
78
148
  confidence: this.computeClusterConfidence(cluster),
79
- suggestedName: this.inferClusterName(filePathsForCluster),
80
- functions: this.getFunctionIdsForFiles(cluster),
149
+ suggestedName: this.inferSemanticName(filePathsForCluster, functionIds),
150
+ functions: functionIds,
81
151
  })
82
152
  }
83
153
  }
@@ -86,17 +156,59 @@ export class ClusterDetector {
86
156
  for (const file of files) {
87
157
  if (!assigned.has(file)) {
88
158
  const filePath = this.getNodeFile(file)
159
+ const functionIds = this.getFunctionIdsForFiles([file])
89
160
  clusters.push({
90
161
  id: this.inferClusterId([filePath]),
91
162
  files: [filePath],
92
163
  confidence: 0.3,
93
- suggestedName: this.inferClusterName([filePath]),
94
- functions: this.getFunctionIdsForFiles([file]),
164
+ suggestedName: this.inferSemanticName([filePath], functionIds),
165
+ functions: functionIds,
95
166
  })
96
167
  }
97
168
  }
98
169
 
99
- return clusters.sort((a, b) => b.confidence - a.confidence)
170
+ // ── Post-process: merge clusters with the same base directory ──
171
+ // Without this, a directory like `lib/` often fragments into
172
+ // "Lib", "Lib (2)", "Lib (3)" which is useless for AI.
173
+ const merged = this.mergeSiblingClusters(clusters)
174
+
175
+ // Deduplicate cluster IDs — append numeric suffix if collision
176
+ const seenIds = new Map<string, number>()
177
+ for (const cluster of merged) {
178
+ const baseId = cluster.id
179
+ const count = seenIds.get(baseId) || 0
180
+ seenIds.set(baseId, count + 1)
181
+ if (count > 0) {
182
+ cluster.id = `${baseId}-${count + 1}`
183
+ }
184
+ }
185
+
186
+ // ── Disambiguate duplicate module names ──
187
+ // When semantic naming produces the same label for different clusters
188
+ // (e.g. "Search" × 3), append the distinctive directory segment.
189
+ const nameCount = new Map<string, ModuleCluster[]>()
190
+ for (const cluster of merged) {
191
+ const existing = nameCount.get(cluster.suggestedName) || []
192
+ existing.push(cluster)
193
+ nameCount.set(cluster.suggestedName, existing)
194
+ }
195
+ for (const [name, dupes] of nameCount) {
196
+ if (dupes.length <= 1) continue
197
+ for (const cluster of dupes) {
198
+ // Try to find a distinctive directory segment from the cluster ID
199
+ // e.g. "packages-diagram-generator" → "Diagram Generator"
200
+ const segments = cluster.id.split('-')
201
+ .filter(s => s !== 'packages' && s !== 'apps' && s !== 'src')
202
+ const suffix = segments
203
+ .map(s => s.charAt(0).toUpperCase() + s.slice(1))
204
+ .join(' ')
205
+ if (suffix && suffix !== name) {
206
+ cluster.suggestedName = `${name} (${suffix})`
207
+ }
208
+ }
209
+ }
210
+
211
+ return merged.sort((a, b) => b.confidence - a.confidence)
100
212
  }
101
213
 
102
214
  // ─── Coupling Matrix ──────────────────────────────────────────
@@ -276,7 +388,7 @@ export class ClusterDetector {
276
388
  private inferClusterId(filePaths: string[]): string {
277
389
  if (filePaths.length === 0) return 'unknown'
278
390
  if (filePaths.length === 1) {
279
- return this.getDirSegment(filePaths[0])
391
+ return this.getDirSegments(filePaths[0])
280
392
  }
281
393
  // Find the longest common directory prefix
282
394
  const segments = filePaths.map(f => f.split('/'))
@@ -290,23 +402,179 @@ export class ClusterDetector {
290
402
  }
291
403
  }
292
404
  const commonPath = firstSegments.slice(0, commonLen).join('/')
293
- return this.getDirSegment(commonPath || filePaths[0])
405
+ return this.getDirSegments(commonPath || filePaths[0])
294
406
  }
295
407
 
296
- /** Get the most meaningful directory segment from a path */
297
- private getDirSegment(filePath: string): string {
408
+ /**
409
+ * Build a hyphenated module ID from the meaningful directory segments.
410
+ * Skips "src" since it's a trivial container. Returns at most 3 segments.
411
+ * e.g. "src/components/ui/button.tsx" → "components-ui"
412
+ * "src/lib/hooks/use-auth.ts" → "lib-hooks"
413
+ * "features/auth/api/route.ts" → "features-auth-api"
414
+ */
415
+ private getDirSegments(filePath: string): string {
298
416
  const parts = filePath.split('/')
299
- // Skip 'src' if present
300
- if (parts[0] === 'src' && parts.length >= 2) return parts[1]
301
- if (parts.length > 1) return parts[0]
302
- return path.basename(filePath, path.extname(filePath))
417
+ // Remove filename (last part with an extension)
418
+ const dirs = parts.filter((p, i) => i < parts.length - 1 || !p.includes('.'))
419
+ // Drop 'src' prefix it carries no semantic meaning
420
+ const meaningful = dirs.filter(d => d !== 'src' && d !== '')
421
+ if (meaningful.length === 0) {
422
+ // Fallback: use the filename without extension
423
+ const last = parts[parts.length - 1]
424
+ return last.replace(/\.[^.]+$/, '') || 'unknown'
425
+ }
426
+ // Take up to 3 segments for a unique but concise ID
427
+ return meaningful.slice(0, 3).join('-')
428
+ }
429
+
430
+ // ─── Cluster Merging ──────────────────────────────────────────
431
+
432
+ /**
433
+ * Merge clusters that share the same base directory (first 1-2 segments).
434
+ * This prevents fragmentation like "Lib", "Lib (2)", "Lib (3)" from
435
+ * clumsy coupling-based splitting of files in the same directory.
436
+ */
437
+ private mergeSiblingClusters(clusters: ModuleCluster[]): ModuleCluster[] {
438
+ const byBaseDir = new Map<string, ModuleCluster[]>()
439
+
440
+ for (const cluster of clusters) {
441
+ const base = this.getBaseDir(cluster.files)
442
+ const existing = byBaseDir.get(base) || []
443
+ existing.push(cluster)
444
+ byBaseDir.set(base, existing)
445
+ }
446
+
447
+ const result: ModuleCluster[] = []
448
+ for (const [baseDir, siblings] of byBaseDir) {
449
+ if (siblings.length <= 1) {
450
+ result.push(...siblings)
451
+ continue
452
+ }
453
+
454
+ // Merge all siblings into one cluster
455
+ const allFiles = siblings.flatMap(c => c.files)
456
+ const allFunctions = siblings.flatMap(c => c.functions)
457
+ const avgConfidence = siblings.reduce((sum, c) => sum + c.confidence, 0) / siblings.length
458
+ const uniqueFiles = [...new Set(allFiles)]
459
+ const uniqueFunctions = [...new Set(allFunctions)]
460
+
461
+ result.push({
462
+ id: this.getDirSegments(uniqueFiles[0]),
463
+ files: uniqueFiles,
464
+ confidence: avgConfidence,
465
+ suggestedName: this.inferSemanticName(uniqueFiles, uniqueFunctions),
466
+ functions: uniqueFunctions,
467
+ })
468
+ }
469
+
470
+ return result
471
+ }
472
+
473
+ /** Get the base directory (first meaningful segment) for a set of files */
474
+ private getBaseDir(files: string[]): string {
475
+ if (files.length === 0) return 'unknown'
476
+ // Find common prefix of all file paths
477
+ const segments = files.map(f => f.split('/'))
478
+ const first = segments[0]
479
+ let commonLen = 0
480
+ for (let i = 0; i < first.length - 1; i++) {
481
+ if (segments.every(s => s[i] === first[i])) {
482
+ commonLen = i + 1
483
+ } else {
484
+ break
485
+ }
486
+ }
487
+ const common = first.slice(0, commonLen)
488
+ .filter(d => d !== 'src' && d !== '')
489
+ // Use the first 2 meaningful path segments as the "base"
490
+ return common.slice(0, 2).join('/') || first.filter(d => d !== 'src' && d !== '')[0] || 'root'
491
+ }
492
+
493
+ // ─── Semantic Naming ──────────────────────────────────────────
494
+
495
+ /**
496
+ * Produce a human-meaningful module name by analyzing function names
497
+ * and file basenames. Falls back to title-cased directory name.
498
+ *
499
+ * Algorithm:
500
+ * 1. Collect all words from function labels and file basenames
501
+ * 2. Score each domain from DOMAIN_KEYWORDS against the word bag
502
+ * 3. Pick top 1–2 domains above threshold; combine them
503
+ * 4. If no domain matches, fall back to directory-based name
504
+ */
505
+ private inferSemanticName(filePaths: string[], functionIds: string[]): string {
506
+ // Collect words from function names
507
+ const fnLabels = functionIds
508
+ .map(id => this.graph.nodes.get(id)?.label ?? '')
509
+ .filter(Boolean)
510
+
511
+ // Collect file basenames without extension
512
+ const fileNames = filePaths.map(f => {
513
+ const basename = f.split('/').pop() || ''
514
+ return basename.replace(/\.[^.]+$/, '')
515
+ })
516
+
517
+ // Also include directory segments (e.g. "blog" from "features/blog/hooks")
518
+ const dirNames = filePaths.flatMap(f => {
519
+ const parts = f.split('/')
520
+ return parts.slice(0, -1).filter(d => d !== 'src' && d !== '')
521
+ })
522
+
523
+ // Build a lowercased word bag from all sources
524
+ const wordBag = this.buildWordBag([...fnLabels, ...fileNames, ...dirNames])
525
+
526
+ // Score each domain
527
+ const scores: [string, number][] = []
528
+ for (const [domain, keywords] of DOMAIN_KEYWORDS) {
529
+ let score = 0
530
+ for (const kw of keywords) {
531
+ for (const word of wordBag) {
532
+ // Exact match or word contains keyword (e.g. "hooks" contains "hook")
533
+ // Do NOT check kw.includes(word) — too loose ("use" would match "usequery")
534
+ if (word === kw || word.includes(kw)) {
535
+ score++
536
+ }
537
+ }
538
+ }
539
+ if (score > 0) scores.push([domain, score])
540
+ }
541
+
542
+ scores.sort((a, b) => b[1] - a[1])
543
+
544
+ if (scores.length >= 2 && scores[0][1] > 1 && scores[1][1] > 1 &&
545
+ scores[1][1] >= scores[0][1] * 0.5) {
546
+ // Two strong domains — combine them
547
+ return `${scores[0][0]} & ${scores[1][0]}`
548
+ }
549
+ if (scores.length >= 1 && scores[0][1] > 0) {
550
+ return scores[0][0]
551
+ }
552
+
553
+ // Fallback: directory-based name
554
+ return this.inferClusterNameFromDir(filePaths)
303
555
  }
304
556
 
305
- /** Infer a human-readable cluster name */
306
- private inferClusterName(filePaths: string[]): string {
557
+ /** Fallback: infer a human-readable cluster name from directory paths */
558
+ private inferClusterNameFromDir(filePaths: string[]): string {
307
559
  const dir = this.inferClusterId(filePaths)
308
560
  return dir
309
561
  .replace(/[-_]/g, ' ')
310
562
  .replace(/\b\w/g, c => c.toUpperCase())
311
563
  }
564
+
565
+ /** Split identifiers and file names into lowercase words */
566
+ private buildWordBag(identifiers: string[]): string[] {
567
+ const words: string[] = []
568
+ for (const id of identifiers) {
569
+ // Split camelCase/PascalCase
570
+ const split = id
571
+ .replace(/([a-z0-9])([A-Z])/g, '$1 $2')
572
+ .replace(/([A-Z]+)([A-Z][a-z])/g, '$1 $2')
573
+ .split(/[\s_\-\.]+/)
574
+ .map(w => w.toLowerCase())
575
+ .filter(w => w.length > 1)
576
+ words.push(...split)
577
+ }
578
+ return words
579
+ }
312
580
  }
@@ -66,6 +66,8 @@ export class GraphBuilder {
66
66
  isAsync: fn.isAsync,
67
67
  hash: fn.hash,
68
68
  purpose: fn.purpose,
69
+ params: fn.params?.map(p => ({ name: p.name, type: p.type, ...(p.optional ? { optional: true } : {}) })),
70
+ returnType: fn.returnType !== 'void' ? fn.returnType : undefined,
69
71
  edgeCasesHandled: fn.edgeCasesHandled,
70
72
  errorHandling: fn.errorHandling,
71
73
  detailedLines: fn.detailedLines,
@@ -19,6 +19,8 @@ export interface GraphNode {
19
19
  isAsync?: boolean
20
20
  hash?: string
21
21
  purpose?: string
22
+ params?: { name: string; type: string; optional?: boolean }[]
23
+ returnType?: string
22
24
  edgeCasesHandled?: string[]
23
25
  errorHandling?: { line: number; type: 'try-catch' | 'throw'; detail: string }[]
24
26
  detailedLines?: { startLine: number; endLine: number; blockType: string }[]
package/src/index.ts CHANGED
@@ -7,6 +7,7 @@ export * from './contract/index.js'
7
7
  export * from './hash/index.js'
8
8
  export * from './utils/errors.js'
9
9
  export * from './utils/logger.js'
10
- export { discoverFiles, readFileContent, writeFileContent, fileExists, setupMikkDirectory } from './utils/fs.js'
10
+ export { discoverFiles, discoverContextFiles, readFileContent, writeFileContent, fileExists, setupMikkDirectory, readMikkIgnore, parseMikkIgnore, detectProjectLanguage, getDiscoveryPatterns, generateMikkIgnore } from './utils/fs.js'
11
+ export type { ContextFile, ContextFileType, ProjectLanguage } from './utils/fs.js'
11
12
  export { minimatch } from './utils/minimatch.js'
12
13
  export { scoreFunctions, findFuzzyMatches, levenshtein, splitCamelCase, extractKeywords } from './utils/fuzzy-match.js'
@@ -115,7 +115,7 @@ export class BoundaryChecker {
115
115
  check(): BoundaryCheckResult {
116
116
  const violations: BoundaryViolation[] = []
117
117
 
118
- // Collect all cross-module calls
118
+ // Pass 1: Check cross-module function calls
119
119
  for (const fn of Object.values(this.lock.functions)) {
120
120
  for (const calleeId of fn.calls) {
121
121
  const callee = this.lock.functions[calleeId]
@@ -128,11 +128,24 @@ export class BoundaryChecker {
128
128
  }
129
129
  }
130
130
 
131
+ // Pass 2: Check cross-module file-level imports
132
+ for (const file of Object.values(this.lock.files)) {
133
+ if (!file.imports || file.imports.length === 0) continue
134
+ for (const importedPath of file.imports) {
135
+ const importedFile = this.lock.files[importedPath]
136
+ if (!importedFile) continue
137
+ if (file.moduleId === importedFile.moduleId) continue // same module — fine
138
+
139
+ const violation = this.checkFileImport(file, importedFile)
140
+ if (violation) violations.push(violation)
141
+ }
142
+ }
143
+
131
144
  const errorCount = violations.filter(v => v.severity === 'error').length
132
145
  const warnCount = violations.filter(v => v.severity === 'warning').length
133
146
 
134
147
  const summary = violations.length === 0
135
- ? `✓ All module boundaries respected (${Object.keys(this.lock.functions).length} functions checked)`
148
+ ? `✓ All module boundaries respected (${Object.keys(this.lock.functions).length} functions, ${Object.keys(this.lock.files).length} files checked)`
136
149
  : `✗ ${errorCount} boundary error(s), ${warnCount} warning(s) found`
137
150
 
138
151
  return {
@@ -191,9 +204,56 @@ export class BoundaryChecker {
191
204
  return null
192
205
  }
193
206
 
207
+ /**
208
+ * Check a single cross-module file import against parsed rules.
209
+ * Returns a violation if the import is forbidden, null if it's allowed.
210
+ */
211
+ private checkFileImport(
212
+ sourceFile: { path: string; moduleId: string },
213
+ targetFile: { path: string; moduleId: string }
214
+ ): BoundaryViolation | null {
215
+ for (const rule of this.rules) {
216
+ if (rule.fromModuleId !== sourceFile.moduleId) continue
217
+
218
+ let forbidden = false
219
+
220
+ if (rule.type === 'isolated') {
221
+ forbidden = true
222
+ } else if (rule.type === 'deny') {
223
+ forbidden = rule.toModuleIds.includes(targetFile.moduleId)
224
+ } else if (rule.type === 'allow_only') {
225
+ forbidden = !rule.toModuleIds.includes(targetFile.moduleId)
226
+ }
227
+
228
+ if (forbidden) {
229
+ return {
230
+ from: {
231
+ functionId: `file:${sourceFile.path}`,
232
+ functionName: path.basename(sourceFile.path),
233
+ file: sourceFile.path,
234
+ moduleId: sourceFile.moduleId,
235
+ moduleName: this.moduleNames.get(sourceFile.moduleId) ?? sourceFile.moduleId,
236
+ },
237
+ to: {
238
+ functionId: `file:${targetFile.path}`,
239
+ functionName: path.basename(targetFile.path),
240
+ file: targetFile.path,
241
+ moduleId: targetFile.moduleId,
242
+ moduleName: this.moduleNames.get(targetFile.moduleId) ?? targetFile.moduleId,
243
+ },
244
+ rule: rule.raw,
245
+ severity: 'error',
246
+ }
247
+ }
248
+ }
249
+ return null
250
+ }
251
+
194
252
  /** Return all cross-module call pairs (useful for generating allow rules) */
195
253
  allCrossModuleCalls(): { from: string; to: string; count: number }[] {
196
254
  const counts = new Map<string, number>()
255
+
256
+ // Count function-level cross-module calls
197
257
  for (const fn of Object.values(this.lock.functions)) {
198
258
  for (const calleeId of fn.calls) {
199
259
  const callee = this.lock.functions[calleeId]
@@ -202,6 +262,18 @@ export class BoundaryChecker {
202
262
  counts.set(key, (counts.get(key) ?? 0) + 1)
203
263
  }
204
264
  }
265
+
266
+ // Count file-level cross-module imports
267
+ for (const file of Object.values(this.lock.files)) {
268
+ if (!file.imports) continue
269
+ for (const importedPath of file.imports) {
270
+ const importedFile = this.lock.files[importedPath]
271
+ if (!importedFile || file.moduleId === importedFile.moduleId) continue
272
+ const key = `${file.moduleId}→${importedFile.moduleId}`
273
+ counts.set(key, (counts.get(key) ?? 0) + 1)
274
+ }
275
+ }
276
+
205
277
  return [...counts.entries()]
206
278
  .map(([key, count]) => {
207
279
  const [from, to] = key.split('→')
@@ -63,6 +63,16 @@ export interface ParsedClass {
63
63
  errorHandling?: { line: number, type: 'try-catch' | 'throw', detail: string }[]
64
64
  }
65
65
 
66
+ /** A detected HTTP route registration (Express/Koa/Hono style) */
67
+ export interface ParsedRoute {
68
+ method: string // "GET", "POST", "PUT", "DELETE", "USE", etc.
69
+ path: string // "/upload", "/:shortId", "/api"
70
+ handler: string // "createZap" or "anonymous"
71
+ middlewares: string[] // ["uploadLimiter", "upload.single"]
72
+ file: string // "src/Routes/zap.routes.ts"
73
+ line: number // 15
74
+ }
75
+
66
76
  /** A generic declaration like interface, type, or constant with metadata */
67
77
  export interface ParsedGeneric {
68
78
  id: string
@@ -85,6 +95,7 @@ export interface ParsedFile {
85
95
  generics: ParsedGeneric[]
86
96
  imports: ParsedImport[]
87
97
  exports: ParsedExport[]
98
+ routes: ParsedRoute[] // Detected HTTP route registrations
88
99
  hash: string // SHA-256 of the entire file content
89
100
  parsedAt: number // Date.now()
90
101
  }