@getmikk/core 2.0.12 → 2.0.14
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +12 -3
- package/package.json +1 -1
- package/src/analysis/index.ts +9 -0
- package/src/analysis/taint-analysis.ts +419 -0
- package/src/analysis/type-flow.ts +247 -0
- package/src/cache/incremental-cache.ts +272 -0
- package/src/cache/index.ts +1 -0
- package/src/contract/adr-manager.ts +5 -4
- package/src/contract/contract-generator.ts +31 -3
- package/src/contract/contract-writer.ts +3 -2
- package/src/contract/lock-compiler.ts +34 -0
- package/src/contract/lock-reader.ts +62 -5
- package/src/contract/schema.ts +10 -0
- package/src/index.ts +14 -1
- package/src/parser/error-recovery.ts +646 -0
- package/src/parser/index.ts +330 -74
- package/src/parser/oxc-parser.ts +3 -2
- package/src/parser/tree-sitter/parser.ts +59 -9
- package/src/parser/tree-sitter/queries.ts +27 -0
- package/src/parser/types.ts +1 -1
- package/src/security/index.ts +1 -0
- package/src/security/scanner.ts +342 -0
- package/src/utils/artifact-transaction.ts +176 -0
- package/src/utils/atomic-write.ts +131 -0
- package/src/utils/fs.ts +76 -25
- package/src/utils/language-registry.ts +95 -0
- package/src/utils/minimatch.ts +49 -6
- package/tests/adr-manager.test.ts +6 -0
- package/tests/artifact-transaction.test.ts +73 -0
- package/tests/contract.test.ts +12 -0
- package/tests/dead-code.test.ts +12 -0
- package/tests/esm-resolver.test.ts +6 -0
- package/tests/fs.test.ts +22 -1
- package/tests/fuzzy-match.test.ts +6 -0
- package/tests/go-parser.test.ts +7 -0
- package/tests/graph.test.ts +10 -0
- package/tests/hash.test.ts +6 -0
- package/tests/impact-classified.test.ts +13 -0
- package/tests/js-parser.test.ts +10 -0
- package/tests/language-registry.test.ts +64 -0
- package/tests/parse-diagnostics.test.ts +115 -0
- package/tests/parser.test.ts +36 -0
- package/tests/tree-sitter-parser.test.ts +201 -0
- package/tests/ts-parser.test.ts +6 -0
|
@@ -0,0 +1,272 @@
|
|
|
1
|
+
import * as fs from 'node:fs'
|
|
2
|
+
import * as path from 'node:path'
|
|
3
|
+
import type { ParsedFile } from '../parser/types.js'
|
|
4
|
+
|
|
5
|
+
// ---------------------------------------------------------------------------
|
|
6
|
+
// Incremental Analysis Cache — avoids re-parsing unchanged files
|
|
7
|
+
// ---------------------------------------------------------------------------
|
|
8
|
+
|
|
9
|
+
interface CacheEntry {
|
|
10
|
+
hash: string
|
|
11
|
+
parsedAt: string
|
|
12
|
+
// Store lightweight metadata instead of full ParsedFile to prevent metadata bloat
|
|
13
|
+
size: number
|
|
14
|
+
lastAccessed: number
|
|
15
|
+
}
|
|
16
|
+
|
|
17
|
+
interface CacheMetadata {
|
|
18
|
+
version: number
|
|
19
|
+
entries: Map<string, CacheEntry>
|
|
20
|
+
lastPruned: number
|
|
21
|
+
}
|
|
22
|
+
|
|
23
|
+
const CACHE_VERSION = 1
|
|
24
|
+
const MAX_CACHE_SIZE = 5000 // Max entries before LRU eviction
|
|
25
|
+
const CACHE_TTL_MS = 24 * 60 * 60 * 1000 // 24 hours
|
|
26
|
+
|
|
27
|
+
export class IncrementalCache {
|
|
28
|
+
private cacheDir: string
|
|
29
|
+
private metadata: CacheMetadata
|
|
30
|
+
private hits = 0
|
|
31
|
+
private misses = 0
|
|
32
|
+
// Mutex to prevent race conditions during concurrent access
|
|
33
|
+
private mutex = Promise.resolve()
|
|
34
|
+
|
|
35
|
+
constructor(projectRoot: string) {
|
|
36
|
+
this.cacheDir = path.join(projectRoot, '.mikk', 'cache')
|
|
37
|
+
this.metadata = {
|
|
38
|
+
version: CACHE_VERSION,
|
|
39
|
+
entries: new Map(),
|
|
40
|
+
lastPruned: Date.now(),
|
|
41
|
+
}
|
|
42
|
+
this.loadMetadata()
|
|
43
|
+
}
|
|
44
|
+
|
|
45
|
+
/**
|
|
46
|
+
* Simple mutex-like protection using a flag for basic race condition prevention
|
|
47
|
+
* Note: For production, a proper mutex library would be better
|
|
48
|
+
*/
|
|
49
|
+
private isLocked = false
|
|
50
|
+
private async withMutex<T>(fn: () => Promise<T>): Promise<T> {
|
|
51
|
+
while (this.isLocked) {
|
|
52
|
+
await new Promise(resolve => setTimeout(resolve, 1))
|
|
53
|
+
}
|
|
54
|
+
this.isLocked = true
|
|
55
|
+
try {
|
|
56
|
+
return await fn()
|
|
57
|
+
} finally {
|
|
58
|
+
this.isLocked = false
|
|
59
|
+
}
|
|
60
|
+
}
|
|
61
|
+
|
|
62
|
+
private getCacheFilePath(hash: string): string {
|
|
63
|
+
return path.join(this.cacheDir, `${hash}.json`)
|
|
64
|
+
}
|
|
65
|
+
|
|
66
|
+
private loadMetadata(): void {
|
|
67
|
+
const metaPath = path.join(this.cacheDir, 'metadata.json')
|
|
68
|
+
try {
|
|
69
|
+
if (fs.existsSync(metaPath)) {
|
|
70
|
+
const raw = fs.readFileSync(metaPath, 'utf-8')
|
|
71
|
+
const data = JSON.parse(raw)
|
|
72
|
+
this.metadata.version = data.version ?? CACHE_VERSION
|
|
73
|
+
this.metadata.lastPruned = data.lastPruned ?? Date.now()
|
|
74
|
+
// Rebuild entries map
|
|
75
|
+
this.metadata.entries = new Map(Object.entries(data.entries ?? {}))
|
|
76
|
+
}
|
|
77
|
+
} catch {
|
|
78
|
+
// Corrupted metadata — start fresh
|
|
79
|
+
this.metadata.entries = new Map()
|
|
80
|
+
}
|
|
81
|
+
}
|
|
82
|
+
|
|
83
|
+
private saveMetadata(): void {
|
|
84
|
+
try {
|
|
85
|
+
fs.mkdirSync(this.cacheDir, { recursive: true })
|
|
86
|
+
const metaPath = path.join(this.cacheDir, 'metadata.json')
|
|
87
|
+
const serializable = {
|
|
88
|
+
version: this.metadata.version,
|
|
89
|
+
lastPruned: this.metadata.lastPruned,
|
|
90
|
+
entries: Object.fromEntries(this.metadata.entries),
|
|
91
|
+
}
|
|
92
|
+
fs.writeFileSync(metaPath, JSON.stringify(serializable), 'utf-8')
|
|
93
|
+
} catch {
|
|
94
|
+
// Silently fail — cache is non-critical
|
|
95
|
+
}
|
|
96
|
+
}
|
|
97
|
+
|
|
98
|
+
/**
|
|
99
|
+
* Get cached parse result if content hash matches.
|
|
100
|
+
* Returns null if cache miss or stale.
|
|
101
|
+
*/
|
|
102
|
+
async get(filePath: string, contentHash: string): Promise<ParsedFile | null> {
|
|
103
|
+
return this.withMutex(async () => {
|
|
104
|
+
const entry = this.metadata.entries.get(filePath)
|
|
105
|
+
if (!entry) {
|
|
106
|
+
this.misses++
|
|
107
|
+
return null
|
|
108
|
+
}
|
|
109
|
+
|
|
110
|
+
if (entry.hash !== contentHash) {
|
|
111
|
+
this.misses++
|
|
112
|
+
return null
|
|
113
|
+
}
|
|
114
|
+
|
|
115
|
+
// Check TTL
|
|
116
|
+
const parsedAt = new Date(entry.parsedAt).getTime()
|
|
117
|
+
if (Date.now() - parsedAt > CACHE_TTL_MS) {
|
|
118
|
+
this.metadata.entries.delete(filePath)
|
|
119
|
+
this.misses++
|
|
120
|
+
return null
|
|
121
|
+
}
|
|
122
|
+
|
|
123
|
+
// Load from disk and reconstruct ParsedFile
|
|
124
|
+
const cacheFile = this.getCacheFilePath(contentHash)
|
|
125
|
+
try {
|
|
126
|
+
if (fs.existsSync(cacheFile)) {
|
|
127
|
+
const raw = fs.readFileSync(cacheFile, 'utf-8')
|
|
128
|
+
const parsed = JSON.parse(raw) as ParsedFile
|
|
129
|
+
this.hits++
|
|
130
|
+
// Update last accessed time
|
|
131
|
+
entry.lastAccessed = Date.now()
|
|
132
|
+
return parsed
|
|
133
|
+
}
|
|
134
|
+
} catch (err) {
|
|
135
|
+
// Corrupted cache entry
|
|
136
|
+
console.warn(`Corrupted cache entry for ${filePath}:`, err)
|
|
137
|
+
this.metadata.entries.delete(filePath)
|
|
138
|
+
}
|
|
139
|
+
|
|
140
|
+
this.misses++
|
|
141
|
+
return null
|
|
142
|
+
})
|
|
143
|
+
}
|
|
144
|
+
|
|
145
|
+
/**
|
|
146
|
+
* Store parse result in cache.
|
|
147
|
+
*/
|
|
148
|
+
async set(filePath: string, contentHash: string, parsed: ParsedFile): Promise<void> {
|
|
149
|
+
return this.withMutex(async () => {
|
|
150
|
+
// Evict if cache is full
|
|
151
|
+
if (this.metadata.entries.size >= MAX_CACHE_SIZE) {
|
|
152
|
+
this.evictLRU()
|
|
153
|
+
}
|
|
154
|
+
|
|
155
|
+
const entry: CacheEntry = {
|
|
156
|
+
hash: contentHash,
|
|
157
|
+
parsedAt: new Date().toISOString(),
|
|
158
|
+
// Store file size for lightweight tracking instead of full ParsedFile
|
|
159
|
+
size: JSON.stringify(parsed).length,
|
|
160
|
+
lastAccessed: Date.now()
|
|
161
|
+
}
|
|
162
|
+
|
|
163
|
+
this.metadata.entries.set(filePath, entry)
|
|
164
|
+
|
|
165
|
+
// Write to disk
|
|
166
|
+
try {
|
|
167
|
+
fs.mkdirSync(this.cacheDir, { recursive: true })
|
|
168
|
+
const cacheFile = this.getCacheFilePath(contentHash)
|
|
169
|
+
fs.writeFileSync(cacheFile, JSON.stringify(parsed), 'utf-8')
|
|
170
|
+
} catch {
|
|
171
|
+
// Silently fail — cache is non-critical
|
|
172
|
+
}
|
|
173
|
+
})
|
|
174
|
+
}
|
|
175
|
+
|
|
176
|
+
/**
|
|
177
|
+
* Invalidate cache for a specific file.
|
|
178
|
+
*/
|
|
179
|
+
async invalidate(filePath: string): Promise<void> {
|
|
180
|
+
return this.withMutex(async () => {
|
|
181
|
+
const entry = this.metadata.entries.get(filePath)
|
|
182
|
+
if (entry) {
|
|
183
|
+
const cacheFile = this.getCacheFilePath(entry.hash)
|
|
184
|
+
try {
|
|
185
|
+
if (fs.existsSync(cacheFile)) {
|
|
186
|
+
fs.unlinkSync(cacheFile)
|
|
187
|
+
}
|
|
188
|
+
} catch { /* ignore */ }
|
|
189
|
+
this.metadata.entries.delete(filePath)
|
|
190
|
+
}
|
|
191
|
+
})
|
|
192
|
+
}
|
|
193
|
+
|
|
194
|
+
/**
|
|
195
|
+
* Clear entire cache.
|
|
196
|
+
*/
|
|
197
|
+
async clear(): Promise<void> {
|
|
198
|
+
return this.withMutex(async () => {
|
|
199
|
+
for (const [, entry] of this.metadata.entries) {
|
|
200
|
+
const cacheFile = this.getCacheFilePath(entry.hash)
|
|
201
|
+
try {
|
|
202
|
+
if (fs.existsSync(cacheFile)) {
|
|
203
|
+
fs.unlinkSync(cacheFile)
|
|
204
|
+
}
|
|
205
|
+
} catch { /* ignore */ }
|
|
206
|
+
}
|
|
207
|
+
this.metadata.entries.clear()
|
|
208
|
+
this.saveMetadata()
|
|
209
|
+
})
|
|
210
|
+
}
|
|
211
|
+
|
|
212
|
+
/**
|
|
213
|
+
* Get cache statistics.
|
|
214
|
+
*/
|
|
215
|
+
getStats(): { hits: number; misses: number; hitRate: number; size: number } {
|
|
216
|
+
const total = this.hits + this.misses
|
|
217
|
+
return {
|
|
218
|
+
hits: this.hits,
|
|
219
|
+
misses: this.misses,
|
|
220
|
+
hitRate: total > 0 ? this.hits / total : 0,
|
|
221
|
+
size: this.metadata.entries.size,
|
|
222
|
+
}
|
|
223
|
+
}
|
|
224
|
+
|
|
225
|
+
/**
|
|
226
|
+
* Persist cache metadata to disk.
|
|
227
|
+
* Call this after batch operations.
|
|
228
|
+
*/
|
|
229
|
+
async flush(): Promise<void> {
|
|
230
|
+
return this.withMutex(async () => {
|
|
231
|
+
this.saveMetadata()
|
|
232
|
+
})
|
|
233
|
+
}
|
|
234
|
+
|
|
235
|
+
/**
|
|
236
|
+
* Evict least recently used entries when cache is full.
|
|
237
|
+
*/
|
|
238
|
+
private evictLRU(): void {
|
|
239
|
+
// Sort by parsedAt (oldest first) and remove oldest 20%
|
|
240
|
+
const sorted = [...this.metadata.entries.entries()].sort(
|
|
241
|
+
(a, b) => new Date(a[1].parsedAt).getTime() - new Date(b[1].parsedAt).getTime()
|
|
242
|
+
)
|
|
243
|
+
const toRemove = Math.ceil(sorted.length * 0.2)
|
|
244
|
+
for (let i = 0; i < toRemove; i++) {
|
|
245
|
+
const [filePath, entry] = sorted[i]
|
|
246
|
+
const cacheFile = this.getCacheFilePath(entry.hash)
|
|
247
|
+
try {
|
|
248
|
+
if (fs.existsSync(cacheFile)) {
|
|
249
|
+
fs.unlinkSync(cacheFile)
|
|
250
|
+
}
|
|
251
|
+
} catch { /* ignore */ }
|
|
252
|
+
this.metadata.entries.delete(filePath)
|
|
253
|
+
}
|
|
254
|
+
}
|
|
255
|
+
|
|
256
|
+
/**
|
|
257
|
+
* Prune expired entries from cache.
|
|
258
|
+
*/
|
|
259
|
+
async prune(): Promise<void> {
|
|
260
|
+
return this.withMutex(async () => {
|
|
261
|
+
const now = Date.now()
|
|
262
|
+
for (const [filePath, entry] of this.metadata.entries) {
|
|
263
|
+
const parsedAt = new Date(entry.parsedAt).getTime()
|
|
264
|
+
if (now - parsedAt > CACHE_TTL_MS) {
|
|
265
|
+
this.invalidate(filePath)
|
|
266
|
+
}
|
|
267
|
+
}
|
|
268
|
+
this.metadata.lastPruned = now
|
|
269
|
+
this.saveMetadata()
|
|
270
|
+
})
|
|
271
|
+
}
|
|
272
|
+
}
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
export { IncrementalCache } from './incremental-cache.js'
|
|
@@ -1,6 +1,7 @@
|
|
|
1
|
-
import * as fs from 'node:fs/promises'
|
|
2
1
|
import type { MikkContract, MikkDecision } from './schema.js'
|
|
3
2
|
import { MikkContractSchema } from './schema.js'
|
|
3
|
+
import { readJsonSafe } from '../utils/json.js'
|
|
4
|
+
import { writeFileAtomic } from '../utils/atomic-write.js'
|
|
4
5
|
|
|
5
6
|
/**
|
|
6
7
|
* AdrManager — CRUD operations on Architectural Decision Records
|
|
@@ -65,11 +66,11 @@ export class AdrManager {
|
|
|
65
66
|
// ─── Helpers ───────────────────────────────────────────────────
|
|
66
67
|
|
|
67
68
|
private async readContract(): Promise<MikkContract> {
|
|
68
|
-
const
|
|
69
|
-
return MikkContractSchema.parse(
|
|
69
|
+
const json = await readJsonSafe(this.contractPath, 'mikk.json')
|
|
70
|
+
return MikkContractSchema.parse(json)
|
|
70
71
|
}
|
|
71
72
|
|
|
72
73
|
private async writeContract(contract: MikkContract): Promise<void> {
|
|
73
|
-
await
|
|
74
|
+
await writeFileAtomic(this.contractPath, JSON.stringify(contract, null, 2), { encoding: 'utf-8' })
|
|
74
75
|
}
|
|
75
76
|
}
|
|
@@ -1,6 +1,26 @@
|
|
|
1
1
|
import type { MikkContract } from './schema.js'
|
|
2
2
|
import type { ModuleCluster } from '../graph/types.js'
|
|
3
3
|
import type { ParsedFile } from '../parser/types.js'
|
|
4
|
+
import { minimatch } from '../utils/minimatch.js'
|
|
5
|
+
|
|
6
|
+
/** Common vendor directories to exclude from contract generation */
|
|
7
|
+
const VENDOR_PATTERNS = [
|
|
8
|
+
'**/node_modules/**',
|
|
9
|
+
'**/venv/**',
|
|
10
|
+
'**/.venv/**',
|
|
11
|
+
'**/__pycache__/**',
|
|
12
|
+
'**/vendor/**',
|
|
13
|
+
'**/dist/**',
|
|
14
|
+
'**/build/**',
|
|
15
|
+
'**/.next/**',
|
|
16
|
+
'**/target/**',
|
|
17
|
+
]
|
|
18
|
+
|
|
19
|
+
/** Check if a path is from a vendor directory */
|
|
20
|
+
function isVendorPath(filePath: string): boolean {
|
|
21
|
+
const normalized = filePath.replace(/\\/g, '/')
|
|
22
|
+
return VENDOR_PATTERNS.some(pattern => minimatch(normalized, pattern))
|
|
23
|
+
}
|
|
4
24
|
|
|
5
25
|
/** Common entry point filenames across ecosystems (without extensions) */
|
|
6
26
|
const ENTRY_BASENAMES = ['index', 'main', 'app', 'server', 'mod', 'lib', '__init__', 'manage', 'program', 'startup']
|
|
@@ -39,7 +59,13 @@ export class ContractGenerator {
|
|
|
39
59
|
projectName: string,
|
|
40
60
|
packageJsonDescription?: string
|
|
41
61
|
): MikkContract {
|
|
42
|
-
|
|
62
|
+
// Filter out vendor files from clusters
|
|
63
|
+
const filteredClusters = clusters.map(cluster => ({
|
|
64
|
+
...cluster,
|
|
65
|
+
files: cluster.files.filter(f => !isVendorPath(f)),
|
|
66
|
+
})).filter(cluster => cluster.files.length > 0)
|
|
67
|
+
|
|
68
|
+
const modules = filteredClusters.map(cluster => ({
|
|
43
69
|
id: cluster.id,
|
|
44
70
|
name: cluster.suggestedName,
|
|
45
71
|
description: this.inferModuleDescription(cluster, parsedFiles),
|
|
@@ -50,14 +76,16 @@ export class ContractGenerator {
|
|
|
50
76
|
|
|
51
77
|
// Detect entry points — language-agnostic basename matching
|
|
52
78
|
const entryPoints = parsedFiles
|
|
79
|
+
.filter(f => !isVendorPath(f.path))
|
|
53
80
|
.filter(f => {
|
|
54
81
|
const basename = (f.path.split('/').pop() || '').replace(/\.[^.]+$/, '')
|
|
55
82
|
return ENTRY_BASENAMES.includes(basename)
|
|
56
83
|
})
|
|
57
84
|
.map(f => f.path)
|
|
58
85
|
|
|
59
|
-
const
|
|
60
|
-
const
|
|
86
|
+
const filteredParsedFiles = parsedFiles.filter(f => !isVendorPath(f.path))
|
|
87
|
+
const detectedLanguage = inferLanguageFromFiles(filteredParsedFiles)
|
|
88
|
+
const fallbackEntry = filteredParsedFiles[0]?.path ?? 'src/index'
|
|
61
89
|
|
|
62
90
|
return {
|
|
63
91
|
version: '1.0.0',
|
|
@@ -1,6 +1,7 @@
|
|
|
1
1
|
import * as fs from 'node:fs/promises'
|
|
2
2
|
import * as path from 'node:path'
|
|
3
3
|
import type { MikkContract } from './schema.js'
|
|
4
|
+
import { writeFileAtomic } from '../utils/atomic-write.js'
|
|
4
5
|
|
|
5
6
|
const VERSION = '@getmikk/cli@1.2.1'
|
|
6
7
|
|
|
@@ -20,7 +21,7 @@ export class ContractWriter {
|
|
|
20
21
|
async writeNew(contract: MikkContract, outputPath: string): Promise<void> {
|
|
21
22
|
await fs.mkdir(path.dirname(outputPath), { recursive: true })
|
|
22
23
|
const json = JSON.stringify(contract, null, 2)
|
|
23
|
-
await
|
|
24
|
+
await writeFileAtomic(outputPath, json, { encoding: 'utf-8' })
|
|
24
25
|
}
|
|
25
26
|
|
|
26
27
|
/** Update an existing mikk.json respecting overwrite mode */
|
|
@@ -108,6 +109,6 @@ export class ContractWriter {
|
|
|
108
109
|
})
|
|
109
110
|
|
|
110
111
|
await fs.mkdir(path.dirname(historyPath), { recursive: true })
|
|
111
|
-
await
|
|
112
|
+
await writeFileAtomic(historyPath, JSON.stringify(history, null, 2), { encoding: 'utf-8' })
|
|
112
113
|
}
|
|
113
114
|
}
|
|
@@ -6,6 +6,7 @@ import * as nodePath from 'node:path'
|
|
|
6
6
|
import { hashContent } from '../hash/file-hasher.js'
|
|
7
7
|
import { computeModuleHash, computeRootHash } from '../hash/tree-hasher.js'
|
|
8
8
|
import { minimatch } from '../utils/minimatch.js'
|
|
9
|
+
import { randomUUID } from 'node:crypto'
|
|
9
10
|
|
|
10
11
|
const VERSION = '@getmikk/cli@1.2.1'
|
|
11
12
|
|
|
@@ -153,6 +154,8 @@ export class LockCompiler {
|
|
|
153
154
|
lastSyncAt: new Date().toISOString(),
|
|
154
155
|
lockHash: '',
|
|
155
156
|
contractHash: hashContent(JSON.stringify(contract)),
|
|
157
|
+
generationId: randomUUID(),
|
|
158
|
+
writeVersion: 0,
|
|
156
159
|
},
|
|
157
160
|
modules,
|
|
158
161
|
functions,
|
|
@@ -192,6 +195,9 @@ export class LockCompiler {
|
|
|
192
195
|
for (const [id, node] of graph.nodes) {
|
|
193
196
|
if (node.type !== 'function') continue
|
|
194
197
|
|
|
198
|
+
// Skip vendor files
|
|
199
|
+
if (this.isVendorPath(node.file)) continue
|
|
200
|
+
|
|
195
201
|
const moduleId = this.findModule(node.file, contract.declared.modules)
|
|
196
202
|
const displayName = node.name ?? ''
|
|
197
203
|
const metadata = node.metadata ?? {}
|
|
@@ -235,6 +241,8 @@ export class LockCompiler {
|
|
|
235
241
|
const result: Record<string, any> = {}
|
|
236
242
|
for (const [id, node] of graph.nodes) {
|
|
237
243
|
if (node.type !== 'class') continue
|
|
244
|
+
if (this.isVendorPath(node.file)) continue
|
|
245
|
+
|
|
238
246
|
const moduleId = this.findModule(node.file, contract.declared.modules)
|
|
239
247
|
const className = node.name ?? ''
|
|
240
248
|
const metadata = node.metadata ?? {}
|
|
@@ -264,6 +272,8 @@ export class LockCompiler {
|
|
|
264
272
|
// Only include exported generics non-exported types/interfaces are
|
|
265
273
|
// internal implementation details that add noise without value.
|
|
266
274
|
if (!(node.metadata?.isExported)) continue
|
|
275
|
+
if (this.isVendorPath(node.file)) continue
|
|
276
|
+
|
|
267
277
|
const moduleId = this.findModule(node.file, contract.declared.modules)
|
|
268
278
|
const genericName = node.name ?? ''
|
|
269
279
|
const metadata = node.metadata ?? {}
|
|
@@ -326,6 +336,7 @@ export class LockCompiler {
|
|
|
326
336
|
files: moduleFiles,
|
|
327
337
|
hash: computeModuleHash(fileHashes),
|
|
328
338
|
fragmentPath: `.mikk/fragments/${module.id}.lock`,
|
|
339
|
+
...(module.parentId ? { parentId: module.parentId } : {}),
|
|
329
340
|
}
|
|
330
341
|
}
|
|
331
342
|
|
|
@@ -341,6 +352,9 @@ export class LockCompiler {
|
|
|
341
352
|
const result: Record<string, MikkLock['files'][string]> = {}
|
|
342
353
|
|
|
343
354
|
for (const file of parsedFiles) {
|
|
355
|
+
// Skip vendor files entirely
|
|
356
|
+
if (this.isVendorPath(file.path)) continue
|
|
357
|
+
|
|
344
358
|
const moduleId = this.findModule(file.path, contract.declared.modules)
|
|
345
359
|
|
|
346
360
|
// Collect file-level imports from the parsed file info directly
|
|
@@ -397,6 +411,9 @@ export class LockCompiler {
|
|
|
397
411
|
|
|
398
412
|
/** Check if a file path matches any of the module's path patterns */
|
|
399
413
|
private fileMatchesModule(filePath: string, patterns: string[]): boolean {
|
|
414
|
+
// Skip vendor paths - never match them to any module
|
|
415
|
+
if (this.isVendorPath(filePath)) return false
|
|
416
|
+
|
|
400
417
|
const relativePath = getModuleMatchPath(filePath, this.projectRootPath)
|
|
401
418
|
const normalizedRelative = relativePath.replace(/\\/g, '/').toLowerCase()
|
|
402
419
|
const normalizedAbsolute = filePath.replace(/\\/g, '/').toLowerCase()
|
|
@@ -416,4 +433,21 @@ export class LockCompiler {
|
|
|
416
433
|
}
|
|
417
434
|
return false
|
|
418
435
|
}
|
|
436
|
+
|
|
437
|
+
/** Check if a path is from a vendor directory */
|
|
438
|
+
private isVendorPath(filePath: string): boolean {
|
|
439
|
+
const normalized = filePath.replace(/\\/g, '/')
|
|
440
|
+
const vendorPatterns = [
|
|
441
|
+
'**/node_modules/**',
|
|
442
|
+
'**/venv/**',
|
|
443
|
+
'**/.venv/**',
|
|
444
|
+
'**/__pycache__/**',
|
|
445
|
+
'**/vendor/**',
|
|
446
|
+
'**/dist/**',
|
|
447
|
+
'**/build/**',
|
|
448
|
+
'**/.next/**',
|
|
449
|
+
'**/target/**',
|
|
450
|
+
]
|
|
451
|
+
return vendorPatterns.some(pattern => minimatch(normalized, pattern))
|
|
452
|
+
}
|
|
419
453
|
}
|
|
@@ -1,7 +1,13 @@
|
|
|
1
|
-
import * as fs from 'node:fs/promises'
|
|
2
1
|
import { MikkLockSchema, type MikkLock } from './schema.js'
|
|
3
2
|
import { LockNotFoundError } from '../utils/errors.js'
|
|
4
3
|
import { readJsonSafe } from '../utils/json.js'
|
|
4
|
+
import { writeFileAtomic } from '../utils/atomic-write.js'
|
|
5
|
+
import { randomUUID } from 'node:crypto'
|
|
6
|
+
|
|
7
|
+
export interface LockWriteOptions {
|
|
8
|
+
expectedGenerationId?: string
|
|
9
|
+
expectedWriteVersion?: number
|
|
10
|
+
}
|
|
5
11
|
|
|
6
12
|
/**
|
|
7
13
|
* LockReader -- reads and validates mikk.lock.json from disk.
|
|
@@ -32,11 +38,62 @@ export class LockReader {
|
|
|
32
38
|
return result.data
|
|
33
39
|
}
|
|
34
40
|
|
|
35
|
-
|
|
36
|
-
async write(lock: MikkLock, lockPath: string): Promise<void> {
|
|
41
|
+
serialize(lock: MikkLock): string {
|
|
37
42
|
const compact = compactifyLock(lock)
|
|
38
|
-
|
|
39
|
-
|
|
43
|
+
return JSON.stringify(compact)
|
|
44
|
+
}
|
|
45
|
+
|
|
46
|
+
async prepareForWrite(lock: MikkLock, lockPath: string, options: LockWriteOptions = {}): Promise<MikkLock> {
|
|
47
|
+
let existing: MikkLock | null = null
|
|
48
|
+
try {
|
|
49
|
+
existing = await this.read(lockPath)
|
|
50
|
+
} catch (err: any) {
|
|
51
|
+
if (!(err instanceof LockNotFoundError)) {
|
|
52
|
+
throw err
|
|
53
|
+
}
|
|
54
|
+
}
|
|
55
|
+
|
|
56
|
+
const prepared: MikkLock = {
|
|
57
|
+
...lock,
|
|
58
|
+
syncState: {
|
|
59
|
+
...lock.syncState,
|
|
60
|
+
},
|
|
61
|
+
}
|
|
62
|
+
|
|
63
|
+
if (existing) {
|
|
64
|
+
const existingGeneration = existing.syncState.generationId
|
|
65
|
+
const existingWriteVersion = existing.syncState.writeVersion ?? 0
|
|
66
|
+
|
|
67
|
+
if (
|
|
68
|
+
options.expectedGenerationId &&
|
|
69
|
+
existingGeneration &&
|
|
70
|
+
options.expectedGenerationId !== existingGeneration
|
|
71
|
+
) {
|
|
72
|
+
throw new Error('Lock write rejected: generation mismatch (stale writer).')
|
|
73
|
+
}
|
|
74
|
+
|
|
75
|
+
if (
|
|
76
|
+
typeof options.expectedWriteVersion === 'number' &&
|
|
77
|
+
options.expectedWriteVersion !== existingWriteVersion
|
|
78
|
+
) {
|
|
79
|
+
throw new Error('Lock write rejected: writeVersion mismatch (stale writer).')
|
|
80
|
+
}
|
|
81
|
+
|
|
82
|
+
prepared.syncState.generationId = existingGeneration || prepared.syncState.generationId || randomUUID()
|
|
83
|
+
prepared.syncState.writeVersion = existingWriteVersion + 1
|
|
84
|
+
} else {
|
|
85
|
+
prepared.syncState.generationId = prepared.syncState.generationId || randomUUID()
|
|
86
|
+
prepared.syncState.writeVersion = prepared.syncState.writeVersion ?? 0
|
|
87
|
+
}
|
|
88
|
+
|
|
89
|
+
return prepared
|
|
90
|
+
}
|
|
91
|
+
|
|
92
|
+
/** Write lock file to disk in compact format */
|
|
93
|
+
async write(lock: MikkLock, lockPath: string, options: LockWriteOptions = {}): Promise<void> {
|
|
94
|
+
const prepared = await this.prepareForWrite(lock, lockPath, options)
|
|
95
|
+
const json = this.serialize(prepared)
|
|
96
|
+
await writeFileAtomic(lockPath, json, { encoding: 'utf-8' })
|
|
40
97
|
}
|
|
41
98
|
}
|
|
42
99
|
|
package/src/contract/schema.ts
CHANGED
|
@@ -10,6 +10,7 @@ export const MikkModuleSchema = z.object({
|
|
|
10
10
|
owners: z.array(z.string()).optional(),
|
|
11
11
|
paths: z.array(z.string()),
|
|
12
12
|
entryFunctions: z.array(z.string()).optional(),
|
|
13
|
+
parentId: z.string().optional(),
|
|
13
14
|
})
|
|
14
15
|
|
|
15
16
|
export const MikkDecisionSchema = z.object({
|
|
@@ -99,6 +100,7 @@ export const MikkLockModuleSchema = z.object({
|
|
|
99
100
|
files: z.array(z.string()),
|
|
100
101
|
hash: z.string(),
|
|
101
102
|
fragmentPath: z.string(),
|
|
103
|
+
parentId: z.string().optional(),
|
|
102
104
|
})
|
|
103
105
|
|
|
104
106
|
export const MikkLockImportSchema = z.object({
|
|
@@ -174,6 +176,14 @@ export const MikkLockSchema = z.object({
|
|
|
174
176
|
lastSyncAt: z.string(),
|
|
175
177
|
lockHash: z.string(),
|
|
176
178
|
contractHash: z.string(),
|
|
179
|
+
generationId: z.string().optional(),
|
|
180
|
+
writeVersion: z.number().int().nonnegative().optional(),
|
|
181
|
+
parseDiagnostics: z.object({
|
|
182
|
+
requestedFiles: z.number().int().nonnegative(),
|
|
183
|
+
parsedFiles: z.number().int().nonnegative(),
|
|
184
|
+
fallbackFiles: z.number().int().nonnegative(),
|
|
185
|
+
diagnostics: z.number().int().nonnegative(),
|
|
186
|
+
}).optional(),
|
|
177
187
|
}),
|
|
178
188
|
modules: z.record(MikkLockModuleSchema),
|
|
179
189
|
functions: z.record(MikkLockFunctionSchema),
|
package/src/index.ts
CHANGED
|
@@ -6,6 +6,8 @@ export * from './graph/index.js'
|
|
|
6
6
|
export * from './contract/index.js'
|
|
7
7
|
export * from './hash/index.js'
|
|
8
8
|
export * from './search/index.js'
|
|
9
|
+
export * from './cache/index.js'
|
|
10
|
+
export * from './security/index.js'
|
|
9
11
|
export * from './utils/errors.js'
|
|
10
12
|
export * from './utils/logger.js'
|
|
11
13
|
export { MikkError, ErrorHandler, ErrorBuilder, ErrorCategory, FileSystemError, ModuleLoadError, GraphError, TokenBudgetError, ValidationError, createDefaultErrorListener, createFileNotFoundError, createFileTooLargeError, createPermissionDeniedError, createModuleNotFoundError, createModuleLoadFailedError, createGraphBuildFailedError, createNodeNotFoundError, createTokenBudgetExceededError, createValidationError, isMikkError, getRootCause, toMikkError } from './error-handler.js'
|
|
@@ -13,4 +15,15 @@ export type { } from './error-handler.js'
|
|
|
13
15
|
export { discoverFiles, discoverContextFiles, readFileContent, writeFileContent, fileExists, setupMikkDirectory, readMikkIgnore, parseMikkIgnore, detectProjectLanguage, getDiscoveryPatterns, generateMikkIgnore, updateGitIgnore, cleanupGitIgnore } from './utils/fs.js'
|
|
14
16
|
export type { ContextFile, ContextFileType, ProjectLanguage } from './utils/fs.js'
|
|
15
17
|
export { minimatch } from './utils/minimatch.js'
|
|
16
|
-
export { scoreFunctions, findFuzzyMatches, levenshtein, splitCamelCase, extractKeywords } from './utils/fuzzy-match.js'
|
|
18
|
+
export { scoreFunctions, findFuzzyMatches, levenshtein, splitCamelCase, extractKeywords } from './utils/fuzzy-match.js'
|
|
19
|
+
export { writeFileAtomic, writeJsonAtomic } from './utils/atomic-write.js'
|
|
20
|
+
export type { AtomicWriteOptions } from './utils/atomic-write.js'
|
|
21
|
+
export {
|
|
22
|
+
runArtifactWriteTransaction,
|
|
23
|
+
recoverArtifactWriteTransactions,
|
|
24
|
+
} from './utils/artifact-transaction.js'
|
|
25
|
+
export type {
|
|
26
|
+
ArtifactWriteInput,
|
|
27
|
+
ArtifactTransactionOptions,
|
|
28
|
+
RecoverySummary,
|
|
29
|
+
} from './utils/artifact-transaction.js'
|