@getmikk/core 2.0.14 → 2.0.15

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (64) hide show
  1. package/README.md +4 -4
  2. package/package.json +2 -1
  3. package/src/analysis/type-flow.ts +1 -1
  4. package/src/cache/incremental-cache.ts +86 -80
  5. package/src/contract/contract-reader.ts +1 -0
  6. package/src/contract/lock-compiler.ts +95 -13
  7. package/src/contract/schema.ts +2 -0
  8. package/src/error-handler.ts +2 -1
  9. package/src/graph/cluster-detector.ts +2 -4
  10. package/src/graph/dead-code-detector.ts +303 -117
  11. package/src/graph/graph-builder.ts +21 -161
  12. package/src/graph/impact-analyzer.ts +1 -0
  13. package/src/graph/index.ts +2 -0
  14. package/src/graph/rich-function-index.ts +1080 -0
  15. package/src/graph/symbol-table.ts +252 -0
  16. package/src/hash/hash-store.ts +1 -0
  17. package/src/index.ts +2 -0
  18. package/src/parser/base-extractor.ts +19 -0
  19. package/src/parser/boundary-checker.ts +31 -12
  20. package/src/parser/error-recovery.ts +5 -4
  21. package/src/parser/function-body-extractor.ts +248 -0
  22. package/src/parser/go/go-extractor.ts +249 -676
  23. package/src/parser/index.ts +132 -318
  24. package/src/parser/language-registry.ts +57 -0
  25. package/src/parser/oxc-parser.ts +166 -28
  26. package/src/parser/oxc-resolver.ts +179 -11
  27. package/src/parser/parser-constants.ts +1 -0
  28. package/src/parser/rust/rust-extractor.ts +109 -0
  29. package/src/parser/tree-sitter/parser.ts +369 -62
  30. package/src/parser/tree-sitter/queries.ts +106 -10
  31. package/src/parser/types.ts +20 -1
  32. package/src/search/bm25.ts +21 -8
  33. package/src/search/direct-search.ts +472 -0
  34. package/src/search/embedding-provider.ts +249 -0
  35. package/src/search/index.ts +12 -0
  36. package/src/search/semantic-search.ts +435 -0
  37. package/src/utils/artifact-transaction.ts +1 -0
  38. package/src/utils/atomic-write.ts +1 -0
  39. package/src/utils/errors.ts +89 -4
  40. package/src/utils/fs.ts +104 -50
  41. package/src/utils/json.ts +1 -0
  42. package/src/utils/language-registry.ts +84 -6
  43. package/src/utils/path.ts +26 -0
  44. package/tests/dead-code.test.ts +3 -2
  45. package/tests/direct-search.test.ts +435 -0
  46. package/tests/error-recovery.test.ts +143 -0
  47. package/tests/fixtures/simple-api/src/index.ts +1 -1
  48. package/tests/go-parser.test.ts +19 -335
  49. package/tests/js-parser.test.ts +18 -1089
  50. package/tests/language-registry-all.test.ts +276 -0
  51. package/tests/language-registry.test.ts +6 -4
  52. package/tests/parse-diagnostics.test.ts +9 -96
  53. package/tests/parser.test.ts +42 -771
  54. package/tests/polyglot-parser.test.ts +117 -0
  55. package/tests/rich-function-index.test.ts +703 -0
  56. package/tests/tree-sitter-parser.test.ts +108 -80
  57. package/tests/ts-parser.test.ts +8 -8
  58. package/tests/verification.test.ts +175 -0
  59. package/src/parser/base-parser.ts +0 -16
  60. package/src/parser/go/go-parser.ts +0 -43
  61. package/src/parser/javascript/js-extractor.ts +0 -278
  62. package/src/parser/javascript/js-parser.ts +0 -101
  63. package/src/parser/typescript/ts-extractor.ts +0 -447
  64. package/src/parser/typescript/ts-parser.ts +0 -36
package/README.md CHANGED
@@ -75,10 +75,10 @@ One root hash comparison = instant full drift detection. Persisted in SQLite wit
75
75
 
76
76
  Compiles a `DependencyGraph` + `MikkContract` + parsed files into a `MikkLock`. The lock file is the single source of truth for all MCP tools and CLI commands.
77
77
 
78
- Lock format v1.7.0:
79
- - Integer-based function index (`fnIndex`) — call graph edges stored as integer references, not repeated strings
80
- - Compact JSON output — no pretty-printing
81
- - Backward-compatible hydration for older formats
78
+ Lock format:
79
+ - Integer-based function index (`fnIndex`) — call graph edges stored as integer references
80
+ - Compact JSON output
81
+ - Backward-compatible hydration
82
82
 
83
83
  ### ContractReader / ContractWriter / LockReader
84
84
 
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@getmikk/core",
3
- "version": "2.0.14",
3
+ "version": "2.0.15",
4
4
  "publishConfig": {
5
5
  "access": "public",
6
6
  "registry": "https://registry.npmjs.org/"
@@ -33,6 +33,7 @@
33
33
  "eslint": "^9.39.2"
34
34
  },
35
35
  "dependencies": {
36
+ "@google/generative-ai": "^0.21.0",
36
37
  "better-sqlite3": "^12.6.2",
37
38
  "fast-glob": "^3.3.0",
38
39
  "tree-sitter-wasms": "^0.1.13",
@@ -4,7 +4,7 @@
4
4
  */
5
5
 
6
6
  import type { MikkLock, MikkLockFunction } from '../contract/schema.js'
7
- import type { DependencyGraph, GraphEdge } from '../graph/types.js'
7
+ import type { DependencyGraph } from '../graph/types.js'
8
8
 
9
9
  // ---------------------------------------------------------------------------
10
10
  // Types
@@ -1,15 +1,10 @@
1
- import * as fs from 'node:fs'
1
+ import * as fs from 'node:fs/promises'
2
2
  import * as path from 'node:path'
3
3
  import type { ParsedFile } from '../parser/types.js'
4
4
 
5
- // ---------------------------------------------------------------------------
6
- // Incremental Analysis Cache — avoids re-parsing unchanged files
7
- // ---------------------------------------------------------------------------
8
-
9
5
  interface CacheEntry {
10
6
  hash: string
11
7
  parsedAt: string
12
- // Store lightweight metadata instead of full ParsedFile to prevent metadata bloat
13
8
  size: number
14
9
  lastAccessed: number
15
10
  }
@@ -21,75 +16,91 @@ interface CacheMetadata {
21
16
  }
22
17
 
23
18
  const CACHE_VERSION = 1
24
- const MAX_CACHE_SIZE = 5000 // Max entries before LRU eviction
25
- const CACHE_TTL_MS = 24 * 60 * 60 * 1000 // 24 hours
19
+ const MAX_CACHE_SIZE = 5000
20
+ const CACHE_TTL_MS = 24 * 60 * 60 * 1000
26
21
 
27
22
  export class IncrementalCache {
28
23
  private cacheDir: string
29
24
  private metadata: CacheMetadata
30
25
  private hits = 0
31
26
  private misses = 0
32
- // Mutex to prevent race conditions during concurrent access
33
- private mutex = Promise.resolve()
27
+ private queue: Array<() => void> = []
28
+ private running = false
29
+ private initialized = false
30
+ private pendingInit: Promise<void> | null = null
34
31
 
35
- constructor(projectRoot: string) {
36
- this.cacheDir = path.join(projectRoot, '.mikk', 'cache')
37
- this.metadata = {
38
- version: CACHE_VERSION,
39
- entries: new Map(),
40
- lastPruned: Date.now(),
41
- }
42
- this.loadMetadata()
43
- }
32
+ constructor(projectRoot: string) {
33
+ this.cacheDir = path.join(projectRoot, '.mikk', 'cache')
34
+ this.metadata = {
35
+ version: CACHE_VERSION,
36
+ entries: new Map(),
37
+ lastPruned: Date.now(),
38
+ }
39
+ this.pendingInit = this.loadMetadata()
40
+ }
44
41
 
45
- /**
46
- * Simple mutex-like protection using a flag for basic race condition prevention
47
- * Note: For production, a proper mutex library would be better
48
- */
49
- private isLocked = false
50
- private async withMutex<T>(fn: () => Promise<T>): Promise<T> {
51
- while (this.isLocked) {
52
- await new Promise(resolve => setTimeout(resolve, 1))
53
- }
54
- this.isLocked = true
55
- try {
56
- return await fn()
57
- } finally {
58
- this.isLocked = false
59
- }
60
- }
42
+ private async ensureInitialized(): Promise<void> {
43
+ if (this.initialized) return
44
+ if (this.pendingInit) {
45
+ await this.pendingInit
46
+ }
47
+ }
48
+
49
+ private async withMutex<T>(fn: () => Promise<T>): Promise<T> {
50
+ return new Promise((resolve, reject) => {
51
+ this.queue.push(async () => {
52
+ try {
53
+ await this.ensureInitialized()
54
+ const result = await fn()
55
+ resolve(result)
56
+ } catch (err) {
57
+ reject(err)
58
+ }
59
+ })
60
+ if (!this.running) {
61
+ this.processQueue()
62
+ }
63
+ })
64
+ }
65
+
66
+ private async processQueue(): Promise<void> {
67
+ this.running = true
68
+ while (this.queue.length > 0) {
69
+ const fn = this.queue.shift()!
70
+ await fn()
71
+ }
72
+ this.running = false
73
+ }
61
74
 
62
75
  private getCacheFilePath(hash: string): string {
63
76
  return path.join(this.cacheDir, `${hash}.json`)
64
77
  }
65
78
 
66
- private loadMetadata(): void {
79
+ private async loadMetadata(): Promise<void> {
67
80
  const metaPath = path.join(this.cacheDir, 'metadata.json')
68
81
  try {
69
- if (fs.existsSync(metaPath)) {
70
- const raw = fs.readFileSync(metaPath, 'utf-8')
71
- const data = JSON.parse(raw)
72
- this.metadata.version = data.version ?? CACHE_VERSION
73
- this.metadata.lastPruned = data.lastPruned ?? Date.now()
74
- // Rebuild entries map
75
- this.metadata.entries = new Map(Object.entries(data.entries ?? {}))
76
- }
82
+ const raw = await fs.readFile(metaPath, 'utf-8')
83
+ const data = JSON.parse(raw)
84
+ this.metadata.version = data.version ?? CACHE_VERSION
85
+ this.metadata.lastPruned = data.lastPruned ?? Date.now()
86
+ this.metadata.entries = new Map(Object.entries(data.entries ?? {}))
87
+ this.initialized = true
77
88
  } catch {
78
- // Corrupted metadata — start fresh
79
89
  this.metadata.entries = new Map()
90
+ this.initialized = true
80
91
  }
81
92
  }
82
93
 
83
- private saveMetadata(): void {
94
+ private async saveMetadata(): Promise<void> {
84
95
  try {
85
- fs.mkdirSync(this.cacheDir, { recursive: true })
96
+ await fs.mkdir(this.cacheDir, { recursive: true })
86
97
  const metaPath = path.join(this.cacheDir, 'metadata.json')
87
98
  const serializable = {
88
99
  version: this.metadata.version,
89
100
  lastPruned: this.metadata.lastPruned,
90
101
  entries: Object.fromEntries(this.metadata.entries),
91
102
  }
92
- fs.writeFileSync(metaPath, JSON.stringify(serializable), 'utf-8')
103
+ await fs.writeFile(metaPath, JSON.stringify(serializable), 'utf-8')
93
104
  } catch {
94
105
  // Silently fail — cache is non-critical
95
106
  }
@@ -112,7 +123,6 @@ export class IncrementalCache {
112
123
  return null
113
124
  }
114
125
 
115
- // Check TTL
116
126
  const parsedAt = new Date(entry.parsedAt).getTime()
117
127
  if (Date.now() - parsedAt > CACHE_TTL_MS) {
118
128
  this.metadata.entries.delete(filePath)
@@ -120,19 +130,14 @@ export class IncrementalCache {
120
130
  return null
121
131
  }
122
132
 
123
- // Load from disk and reconstruct ParsedFile
124
133
  const cacheFile = this.getCacheFilePath(contentHash)
125
134
  try {
126
- if (fs.existsSync(cacheFile)) {
127
- const raw = fs.readFileSync(cacheFile, 'utf-8')
128
- const parsed = JSON.parse(raw) as ParsedFile
129
- this.hits++
130
- // Update last accessed time
131
- entry.lastAccessed = Date.now()
132
- return parsed
133
- }
135
+ const raw = await fs.readFile(cacheFile, 'utf-8')
136
+ const parsed = JSON.parse(raw) as ParsedFile
137
+ this.hits++
138
+ entry.lastAccessed = Date.now()
139
+ return parsed
134
140
  } catch (err) {
135
- // Corrupted cache entry
136
141
  console.warn(`Corrupted cache entry for ${filePath}:`, err)
137
142
  this.metadata.entries.delete(filePath)
138
143
  }
@@ -147,26 +152,23 @@ export class IncrementalCache {
147
152
  */
148
153
  async set(filePath: string, contentHash: string, parsed: ParsedFile): Promise<void> {
149
154
  return this.withMutex(async () => {
150
- // Evict if cache is full
151
155
  if (this.metadata.entries.size >= MAX_CACHE_SIZE) {
152
- this.evictLRU()
156
+ await this.evictLRU()
153
157
  }
154
158
 
155
159
  const entry: CacheEntry = {
156
160
  hash: contentHash,
157
161
  parsedAt: new Date().toISOString(),
158
- // Store file size for lightweight tracking instead of full ParsedFile
159
162
  size: JSON.stringify(parsed).length,
160
163
  lastAccessed: Date.now()
161
164
  }
162
165
 
163
166
  this.metadata.entries.set(filePath, entry)
164
167
 
165
- // Write to disk
166
168
  try {
167
- fs.mkdirSync(this.cacheDir, { recursive: true })
169
+ await fs.mkdir(this.cacheDir, { recursive: true })
168
170
  const cacheFile = this.getCacheFilePath(contentHash)
169
- fs.writeFileSync(cacheFile, JSON.stringify(parsed), 'utf-8')
171
+ await fs.writeFile(cacheFile, JSON.stringify(parsed), 'utf-8')
170
172
  } catch {
171
173
  // Silently fail — cache is non-critical
172
174
  }
@@ -182,9 +184,7 @@ export class IncrementalCache {
182
184
  if (entry) {
183
185
  const cacheFile = this.getCacheFilePath(entry.hash)
184
186
  try {
185
- if (fs.existsSync(cacheFile)) {
186
- fs.unlinkSync(cacheFile)
187
- }
187
+ await fs.unlink(cacheFile)
188
188
  } catch { /* ignore */ }
189
189
  this.metadata.entries.delete(filePath)
190
190
  }
@@ -199,13 +199,11 @@ export class IncrementalCache {
199
199
  for (const [, entry] of this.metadata.entries) {
200
200
  const cacheFile = this.getCacheFilePath(entry.hash)
201
201
  try {
202
- if (fs.existsSync(cacheFile)) {
203
- fs.unlinkSync(cacheFile)
204
- }
202
+ await fs.unlink(cacheFile)
205
203
  } catch { /* ignore */ }
206
204
  }
207
205
  this.metadata.entries.clear()
208
- this.saveMetadata()
206
+ await this.saveMetadata()
209
207
  })
210
208
  }
211
209
 
@@ -228,15 +226,14 @@ export class IncrementalCache {
228
226
  */
229
227
  async flush(): Promise<void> {
230
228
  return this.withMutex(async () => {
231
- this.saveMetadata()
229
+ await this.saveMetadata()
232
230
  })
233
231
  }
234
232
 
235
233
  /**
236
234
  * Evict least recently used entries when cache is full.
237
235
  */
238
- private evictLRU(): void {
239
- // Sort by parsedAt (oldest first) and remove oldest 20%
236
+ private async evictLRU(): Promise<void> {
240
237
  const sorted = [...this.metadata.entries.entries()].sort(
241
238
  (a, b) => new Date(a[1].parsedAt).getTime() - new Date(b[1].parsedAt).getTime()
242
239
  )
@@ -245,9 +242,7 @@ export class IncrementalCache {
245
242
  const [filePath, entry] = sorted[i]
246
243
  const cacheFile = this.getCacheFilePath(entry.hash)
247
244
  try {
248
- if (fs.existsSync(cacheFile)) {
249
- fs.unlinkSync(cacheFile)
250
- }
245
+ await fs.unlink(cacheFile)
251
246
  } catch { /* ignore */ }
252
247
  this.metadata.entries.delete(filePath)
253
248
  }
@@ -259,14 +254,25 @@ export class IncrementalCache {
259
254
  async prune(): Promise<void> {
260
255
  return this.withMutex(async () => {
261
256
  const now = Date.now()
257
+ const toDelete: string[] = []
262
258
  for (const [filePath, entry] of this.metadata.entries) {
263
259
  const parsedAt = new Date(entry.parsedAt).getTime()
264
260
  if (now - parsedAt > CACHE_TTL_MS) {
265
- this.invalidate(filePath)
261
+ toDelete.push(filePath)
262
+ }
263
+ }
264
+ for (const filePath of toDelete) {
265
+ const entry = this.metadata.entries.get(filePath)
266
+ if (entry) {
267
+ const cacheFile = this.getCacheFilePath(entry.hash)
268
+ try {
269
+ await fs.unlink(cacheFile)
270
+ } catch { /* ignore */ }
271
+ this.metadata.entries.delete(filePath)
266
272
  }
267
273
  }
268
274
  this.metadata.lastPruned = now
269
- this.saveMetadata()
275
+ await this.saveMetadata()
270
276
  })
271
277
  }
272
278
  }
@@ -1,3 +1,4 @@
1
+ /* eslint-disable @typescript-eslint/no-explicit-any */
1
2
  import { MikkContractSchema, type MikkContract } from './schema.js'
2
3
  import { ContractNotFoundError } from '../utils/errors.js'
3
4
  import { readJsonSafe } from '../utils/json.js'
@@ -123,6 +123,7 @@ function capitalise(s: string): string {
123
123
  */
124
124
  export class LockCompiler {
125
125
  private projectRootPath: string | null = null
126
+
126
127
  /** Main entry -- compile full lock from graph + contract + parsed files */
127
128
  compile(
128
129
  graph: DependencyGraph,
@@ -132,6 +133,7 @@ export class LockCompiler {
132
133
  projectRoot?: string
133
134
  ): MikkLock {
134
135
  this.projectRootPath = projectRoot ? nodePath.resolve(projectRoot) : null
136
+
135
137
  const functions = this.compileFunctions(graph, contract)
136
138
  const classes = this.compileClasses(graph, contract)
137
139
  const generics = this.compileGenerics(graph, contract)
@@ -204,6 +206,11 @@ export class LockCompiler {
204
206
  const inEdges = graph.inEdges.get(id) || []
205
207
  const outEdges = graph.outEdges.get(id) || []
206
208
 
209
+ const params = metadata.params || []
210
+ const returnType = metadata.returnType || 'void'
211
+ const signatureHash = hashContent(`${displayName}(${params.map(p => p.type).join(',')}):${returnType}`)
212
+ const tokenVector = this.generateTokenVector(displayName, params, returnType, metadata.purpose)
213
+
207
214
  result[id] = {
208
215
  id,
209
216
  name: displayName,
@@ -214,26 +221,94 @@ export class LockCompiler {
214
221
  calls: outEdges.filter(e => e.type === 'calls').map(e => e.to),
215
222
  calledBy: inEdges.filter(e => e.type === 'calls').map(e => e.from),
216
223
  moduleId: moduleId || 'unknown',
217
- ...(metadata.params && metadata.params.length > 0
218
- ? { params: metadata.params }
219
- : {}),
224
+ ...(params.length > 0 ? { params } : {}),
220
225
  ...(metadata.returnType ? { returnType: metadata.returnType } : {}),
221
226
  ...(metadata.isAsync ? { isAsync: true } : {}),
222
227
  ...(metadata.isExported ? { isExported: true } : {}),
223
228
  purpose: metadata.purpose || inferPurpose(
224
229
  displayName,
225
- metadata.params,
226
- metadata.returnType,
230
+ params,
231
+ returnType,
227
232
  metadata.isAsync,
228
233
  ),
229
234
  edgeCasesHandled: metadata.edgeCasesHandled,
230
235
  errorHandling: metadata.errorHandling,
236
+ signatureHash,
237
+ tokenVector,
231
238
  }
232
239
  }
233
240
 
234
241
  return result
235
242
  }
236
243
 
244
+ private generateTokenVector(
245
+ name: string,
246
+ params: Array<{ name: string; type: string; optional?: boolean }>,
247
+ returnType: string,
248
+ purpose?: string
249
+ ): number[] {
250
+ const tokens: string[] = []
251
+
252
+ tokens.push(...name.match(/[A-Z][a-z]+|[a-z]+/g)?.map(t => t.toLowerCase()) || [])
253
+
254
+ for (const param of params) {
255
+ tokens.push(...param.name.match(/[A-Z][a-z]+|[a-z]+/g)?.map(t => t.toLowerCase()) || [])
256
+ }
257
+
258
+ tokens.push(...returnType.match(/[A-Z][a-z]+|[a-z]+/g)?.map(t => t.toLowerCase()) || [])
259
+
260
+ if (purpose) {
261
+ tokens.push(...purpose.match(/[a-z]{3,}/g)?.map(t => t.toLowerCase()) || [])
262
+ }
263
+
264
+ const vocabulary = this.buildVocabulary()
265
+ const vector = new Array(64).fill(0)
266
+
267
+ for (const token of tokens) {
268
+ if (vocabulary.has(token)) {
269
+ const idx = vocabulary.get(token)!
270
+ const hash = this.simpleHash(token)
271
+ vector[idx % 64] += hash
272
+ }
273
+ }
274
+
275
+ const magnitude = Math.sqrt(vector.reduce((sum, v) => sum + v * v, 0))
276
+ if (magnitude > 0) {
277
+ for (let i = 0; i < vector.length; i++) {
278
+ vector[i] /= magnitude
279
+ }
280
+ }
281
+
282
+ return vector
283
+ }
284
+
285
+ private buildVocabulary(): Map<string, number> {
286
+ const common = [
287
+ 'get', 'set', 'add', 'remove', 'create', 'delete', 'update', 'find',
288
+ 'load', 'save', 'parse', 'format', 'validate', 'check', 'handle',
289
+ 'process', 'render', 'display', 'build', 'make', 'init', 'setup',
290
+ 'config', 'user', 'auth', 'login', 'logout', 'token', 'data', 'file',
291
+ 'path', 'config', 'options', 'params', 'args', 'error', 'result',
292
+ 'async', 'promise', 'callback', 'event', 'handler', 'middleware',
293
+ 'database', 'query', 'insert', 'update', 'delete', 'select', 'transaction',
294
+ 'string', 'number', 'boolean', 'array', 'object', 'function', 'class',
295
+ 'interface', 'type', 'enum', 'const', 'var', 'let', 'return', 'void',
296
+ ]
297
+
298
+ const vocab = new Map<string, number>()
299
+ common.forEach((word, idx) => vocab.set(word, idx))
300
+ return vocab
301
+ }
302
+
303
+ private simpleHash(str: string): number {
304
+ let hash = 0
305
+ for (let i = 0; i < str.length; i++) {
306
+ hash = ((hash << 5) - hash) + str.charCodeAt(i)
307
+ hash = hash & hash
308
+ }
309
+ return Math.abs(hash % 10)
310
+ }
311
+
237
312
  private compileClasses(
238
313
  graph: DependencyGraph,
239
314
  contract: MikkContract
@@ -321,15 +396,22 @@ export class LockCompiler {
321
396
  ): Record<string, MikkLock['modules'][string]> {
322
397
  const result: Record<string, MikkLock['modules'][string]> = {}
323
398
 
399
+ // Build a map for fast file lookups - O(1) instead of O(n) per module
400
+ const fileHashMap = new Map<string, string>()
401
+ for (const file of parsedFiles) {
402
+ fileHashMap.set(file.path, file.hash)
403
+ }
404
+
324
405
  for (const module of contract.declared.modules) {
325
- const moduleFiles = parsedFiles
326
- .filter(f => this.fileMatchesModule(f.path, module.paths))
327
- .map(f => f.path)
328
-
329
- const fileHashes = moduleFiles.map(f => {
330
- const parsed = parsedFiles.find(pf => pf.path === f)
331
- return parsed?.hash ?? ''
332
- })
406
+ const moduleFiles: string[] = []
407
+
408
+ for (const file of parsedFiles) {
409
+ if (this.fileMatchesModule(file.path, module.paths)) {
410
+ moduleFiles.push(file.path)
411
+ }
412
+ }
413
+
414
+ const fileHashes = moduleFiles.map(f => fileHashMap.get(f) ?? '')
333
415
 
334
416
  result[module.id] = {
335
417
  id: module.id,
@@ -93,6 +93,8 @@ export const MikkLockFunctionSchema = z.object({
93
93
  })).optional(),
94
94
  confidence: z.number().optional(),
95
95
  riskScore: z.number().optional(),
96
+ signatureHash: z.string().optional(),
97
+ tokenVector: z.array(z.number()).optional(),
96
98
  })
97
99
 
98
100
  export const MikkLockModuleSchema = z.object({
@@ -1,3 +1,4 @@
1
+ /* eslint-disable @typescript-eslint/no-explicit-any */
1
2
  /**
2
3
  * Standardized Error Handling System
3
4
  *
@@ -269,7 +270,7 @@ export function createFileNotFoundError(filePath: string): FileSystemError {
269
270
  /**
270
271
  * Create a file too large error
271
272
  */
272
- export function createFileTooLargeError(filePath: string, size: number, limit: number): FileSystemError {
273
+ export function createFileTooLargeError(filePath: string, _size: number, _limit: number): FileSystemError {
273
274
  return new FileSystemError('FILE_TOO_LARGE', filePath)
274
275
  }
275
276
 
@@ -1,5 +1,4 @@
1
- import * as path from 'node:path'
2
- import type { DependencyGraph, ModuleCluster, GraphNode } from './types.js'
1
+ import type { DependencyGraph, ModuleCluster } from './types.js'
3
2
 
4
3
  // ─── Domain keyword maps for semantic naming ────────────────────────
5
4
  // Each entry maps a human-readable domain label to keywords found in
@@ -227,7 +226,6 @@ export class ClusterDetector {
227
226
  private computeCouplingMatrix(files: string[]): Map<string, Map<string, number>> {
228
227
  const matrix = new Map<string, Map<string, number>>()
229
228
  const fileEdgeCounts = new Map<string, number>()
230
- const pairCounts = new Map<string, number>()
231
229
 
232
230
  // Count total edges per file
233
231
  for (const fileId of files) {
@@ -450,7 +448,7 @@ export class ClusterDetector {
450
448
  }
451
449
 
452
450
  const result: ModuleCluster[] = []
453
- for (const [baseDir, siblings] of byBaseDir) {
451
+ for (const siblings of byBaseDir.values()) {
454
452
  if (siblings.length <= 1) {
455
453
  result.push(...siblings)
456
454
  continue