@bfra.me/workspace-analyzer 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +402 -0
- package/lib/chunk-4LSFAAZW.js +1 -0
- package/lib/chunk-JDF7DQ4V.js +27 -0
- package/lib/chunk-WOJ4C7N7.js +7122 -0
- package/lib/cli.d.ts +1 -0
- package/lib/cli.js +318 -0
- package/lib/index.d.ts +3701 -0
- package/lib/index.js +1262 -0
- package/lib/types/index.d.ts +146 -0
- package/lib/types/index.js +28 -0
- package/package.json +89 -0
- package/src/analyzers/analyzer.ts +201 -0
- package/src/analyzers/architectural-analyzer.ts +304 -0
- package/src/analyzers/build-config-analyzer.ts +334 -0
- package/src/analyzers/circular-import-analyzer.ts +463 -0
- package/src/analyzers/config-consistency-analyzer.ts +335 -0
- package/src/analyzers/dead-code-analyzer.ts +565 -0
- package/src/analyzers/duplicate-code-analyzer.ts +626 -0
- package/src/analyzers/duplicate-dependency-analyzer.ts +381 -0
- package/src/analyzers/eslint-config-analyzer.ts +281 -0
- package/src/analyzers/exports-field-analyzer.ts +324 -0
- package/src/analyzers/index.ts +388 -0
- package/src/analyzers/large-dependency-analyzer.ts +535 -0
- package/src/analyzers/package-json-analyzer.ts +349 -0
- package/src/analyzers/peer-dependency-analyzer.ts +275 -0
- package/src/analyzers/tree-shaking-analyzer.ts +623 -0
- package/src/analyzers/tsconfig-analyzer.ts +382 -0
- package/src/analyzers/unused-dependency-analyzer.ts +356 -0
- package/src/analyzers/version-alignment-analyzer.ts +308 -0
- package/src/api/analyze-workspace.ts +245 -0
- package/src/api/index.ts +11 -0
- package/src/cache/cache-manager.ts +495 -0
- package/src/cache/cache-schema.ts +247 -0
- package/src/cache/change-detector.ts +169 -0
- package/src/cache/file-hasher.ts +65 -0
- package/src/cache/index.ts +47 -0
- package/src/cli/commands/analyze.ts +240 -0
- package/src/cli/commands/index.ts +5 -0
- package/src/cli/index.ts +61 -0
- package/src/cli/types.ts +65 -0
- package/src/cli/ui.ts +213 -0
- package/src/cli.ts +9 -0
- package/src/config/defaults.ts +183 -0
- package/src/config/index.ts +81 -0
- package/src/config/loader.ts +270 -0
- package/src/config/merger.ts +229 -0
- package/src/config/schema.ts +263 -0
- package/src/core/incremental-analyzer.ts +462 -0
- package/src/core/index.ts +34 -0
- package/src/core/orchestrator.ts +416 -0
- package/src/graph/dependency-graph.ts +408 -0
- package/src/graph/index.ts +19 -0
- package/src/index.ts +417 -0
- package/src/parser/config-parser.ts +491 -0
- package/src/parser/import-extractor.ts +340 -0
- package/src/parser/index.ts +54 -0
- package/src/parser/typescript-parser.ts +95 -0
- package/src/performance/bundle-estimator.ts +444 -0
- package/src/performance/index.ts +27 -0
- package/src/reporters/console-reporter.ts +355 -0
- package/src/reporters/index.ts +49 -0
- package/src/reporters/json-reporter.ts +273 -0
- package/src/reporters/markdown-reporter.ts +349 -0
- package/src/reporters/reporter.ts +399 -0
- package/src/rules/builtin-rules.ts +709 -0
- package/src/rules/index.ts +52 -0
- package/src/rules/rule-engine.ts +409 -0
- package/src/scanner/index.ts +18 -0
- package/src/scanner/workspace-scanner.ts +403 -0
- package/src/types/index.ts +176 -0
- package/src/types/result.ts +19 -0
- package/src/utils/index.ts +7 -0
- package/src/utils/pattern-matcher.ts +48 -0
|
@@ -0,0 +1,495 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Cache manager for workspace analysis results.
|
|
3
|
+
*
|
|
4
|
+
* Provides file-based storage for analysis caches with incremental updates,
|
|
5
|
+
* compression support, and automatic invalidation.
|
|
6
|
+
*/
|
|
7
|
+
|
|
8
|
+
import type {Issue} from '../types/index'
|
|
9
|
+
import type {Result} from '../types/result'
|
|
10
|
+
import type {
|
|
11
|
+
AnalysisCache,
|
|
12
|
+
CachedFileState,
|
|
13
|
+
CacheOptions,
|
|
14
|
+
CacheStatistics,
|
|
15
|
+
CacheValidationResult,
|
|
16
|
+
} from './cache-schema'
|
|
17
|
+
import {Buffer} from 'node:buffer'
|
|
18
|
+
import {mkdir, readFile, rm, stat, writeFile} from 'node:fs/promises'
|
|
19
|
+
import {join} from 'node:path'
|
|
20
|
+
import {promisify} from 'node:util'
|
|
21
|
+
import {gunzip, gzip} from 'node:zlib'
|
|
22
|
+
import {err, ok} from '../types/result'
|
|
23
|
+
import {
|
|
24
|
+
CACHE_SCHEMA_VERSION,
|
|
25
|
+
CONFIG_FILE_PATTERNS,
|
|
26
|
+
createEmptyCache,
|
|
27
|
+
createFileAnalysisEntry,
|
|
28
|
+
createPackageAnalysisEntry,
|
|
29
|
+
DEFAULT_CACHE_OPTIONS,
|
|
30
|
+
} from './cache-schema'
|
|
31
|
+
import {createAnalysisChangeDetector} from './change-detector'
|
|
32
|
+
import {createWorkspaceHasher} from './file-hasher'
|
|
33
|
+
|
|
34
|
+
const gzipAsync = promisify(gzip)
|
|
35
|
+
const gunzipAsync = promisify(gunzip)
|
|
36
|
+
|
|
37
|
+
/**
|
|
38
|
+
* Cache file name.
|
|
39
|
+
*/
|
|
40
|
+
const CACHE_FILE_NAME = 'analysis-cache.json'
|
|
41
|
+
const COMPRESSED_CACHE_FILE_NAME = 'analysis-cache.json.gz'
|
|
42
|
+
|
|
43
|
+
/**
|
|
44
|
+
* Error codes for cache operations.
|
|
45
|
+
*/
|
|
46
|
+
export type CacheErrorCode =
|
|
47
|
+
| 'CACHE_NOT_FOUND'
|
|
48
|
+
| 'CACHE_CORRUPTED'
|
|
49
|
+
| 'CACHE_EXPIRED'
|
|
50
|
+
| 'CACHE_VERSION_MISMATCH'
|
|
51
|
+
| 'CACHE_WRITE_FAILED'
|
|
52
|
+
| 'CACHE_READ_FAILED'
|
|
53
|
+
| 'CACHE_INVALID'
|
|
54
|
+
|
|
55
|
+
/**
|
|
56
|
+
* Error for cache operations.
|
|
57
|
+
*/
|
|
58
|
+
export interface CacheError {
|
|
59
|
+
readonly code: CacheErrorCode
|
|
60
|
+
readonly message: string
|
|
61
|
+
readonly cause?: Error
|
|
62
|
+
}
|
|
63
|
+
|
|
64
|
+
/**
|
|
65
|
+
* Cache manager for workspace analysis.
|
|
66
|
+
*/
|
|
67
|
+
export interface CacheManager {
|
|
68
|
+
/** Load cache from disk */
|
|
69
|
+
readonly load: () => Promise<Result<AnalysisCache, CacheError>>
|
|
70
|
+
/** Save cache to disk */
|
|
71
|
+
readonly save: (cache: AnalysisCache) => Promise<Result<void, CacheError>>
|
|
72
|
+
/** Validate cache against current workspace state */
|
|
73
|
+
readonly validate: (
|
|
74
|
+
cache: AnalysisCache,
|
|
75
|
+
currentFiles: readonly string[],
|
|
76
|
+
) => Promise<CacheValidationResult>
|
|
77
|
+
/** Update cache with new file analysis */
|
|
78
|
+
readonly updateFile: (
|
|
79
|
+
cache: AnalysisCache,
|
|
80
|
+
path: string,
|
|
81
|
+
issues: readonly Issue[],
|
|
82
|
+
analyzersRun: readonly string[],
|
|
83
|
+
) => Promise<Result<AnalysisCache, CacheError>>
|
|
84
|
+
/** Update cache with new package analysis */
|
|
85
|
+
readonly updatePackage: (
|
|
86
|
+
cache: AnalysisCache,
|
|
87
|
+
packageName: string,
|
|
88
|
+
packagePath: string,
|
|
89
|
+
issues: readonly Issue[],
|
|
90
|
+
analyzersRun: readonly string[],
|
|
91
|
+
) => Promise<Result<AnalysisCache, CacheError>>
|
|
92
|
+
/** Clear all cache files */
|
|
93
|
+
readonly clear: () => Promise<Result<void, CacheError>>
|
|
94
|
+
/** Get cache statistics */
|
|
95
|
+
readonly getStatistics: (cache: AnalysisCache) => CacheStatistics
|
|
96
|
+
/** Check if cache is valid without full validation */
|
|
97
|
+
readonly quickValidate: (cache: AnalysisCache, configHash: string) => boolean
|
|
98
|
+
}
|
|
99
|
+
|
|
100
|
+
/**
|
|
101
|
+
* Options for creating a cache manager.
|
|
102
|
+
*/
|
|
103
|
+
export interface CacheManagerOptions extends CacheOptions {
|
|
104
|
+
/** Workspace root path */
|
|
105
|
+
readonly workspacePath: string
|
|
106
|
+
/** Current analyzer version */
|
|
107
|
+
readonly analyzerVersion: string
|
|
108
|
+
}
|
|
109
|
+
|
|
110
|
+
/**
|
|
111
|
+
* Creates a cache manager for workspace analysis.
|
|
112
|
+
*
|
|
113
|
+
* @param options - Cache manager configuration
|
|
114
|
+
* @returns A CacheManager instance
|
|
115
|
+
*
|
|
116
|
+
* @example
|
|
117
|
+
* ```ts
|
|
118
|
+
* const manager = createCacheManager({
|
|
119
|
+
* workspacePath: '/path/to/workspace',
|
|
120
|
+
* analyzerVersion: '1.0.0',
|
|
121
|
+
* })
|
|
122
|
+
*
|
|
123
|
+
* const result = await manager.load()
|
|
124
|
+
* if (isOk(result)) {
|
|
125
|
+
* const validation = await manager.validate(result.data, currentFiles)
|
|
126
|
+
* if (!validation.isValid) {
|
|
127
|
+
* // Re-analyze changed files
|
|
128
|
+
* }
|
|
129
|
+
* }
|
|
130
|
+
* ```
|
|
131
|
+
*/
|
|
132
|
+
export function createCacheManager(options: CacheManagerOptions): CacheManager {
|
|
133
|
+
const {
|
|
134
|
+
workspacePath,
|
|
135
|
+
analyzerVersion,
|
|
136
|
+
cacheDir = DEFAULT_CACHE_OPTIONS.cacheDir,
|
|
137
|
+
maxAge = DEFAULT_CACHE_OPTIONS.maxAge,
|
|
138
|
+
compress = DEFAULT_CACHE_OPTIONS.compress,
|
|
139
|
+
hashAlgorithm = DEFAULT_CACHE_OPTIONS.hashAlgorithm,
|
|
140
|
+
} = options
|
|
141
|
+
|
|
142
|
+
const cachePath = join(workspacePath, cacheDir)
|
|
143
|
+
const cacheFile = join(cachePath, compress ? COMPRESSED_CACHE_FILE_NAME : CACHE_FILE_NAME)
|
|
144
|
+
const hasher = createWorkspaceHasher({algorithm: hashAlgorithm})
|
|
145
|
+
const changeDetector = createAnalysisChangeDetector({algorithm: hashAlgorithm})
|
|
146
|
+
|
|
147
|
+
async function ensureCacheDir(): Promise<void> {
|
|
148
|
+
await mkdir(cachePath, {recursive: true})
|
|
149
|
+
}
|
|
150
|
+
|
|
151
|
+
async function readCacheFile(): Promise<Result<string, CacheError>> {
|
|
152
|
+
try {
|
|
153
|
+
const content = await readFile(cacheFile)
|
|
154
|
+
if (compress) {
|
|
155
|
+
const decompressed = await gunzipAsync(content)
|
|
156
|
+
return ok(decompressed.toString('utf-8'))
|
|
157
|
+
}
|
|
158
|
+
return ok(content.toString('utf-8'))
|
|
159
|
+
} catch (error) {
|
|
160
|
+
if ((error as NodeJS.ErrnoException).code === 'ENOENT') {
|
|
161
|
+
return err({code: 'CACHE_NOT_FOUND', message: 'Cache file not found'})
|
|
162
|
+
}
|
|
163
|
+
return err({
|
|
164
|
+
code: 'CACHE_READ_FAILED',
|
|
165
|
+
message: `Failed to read cache file: ${(error as Error).message}`,
|
|
166
|
+
cause: error as Error,
|
|
167
|
+
})
|
|
168
|
+
}
|
|
169
|
+
}
|
|
170
|
+
|
|
171
|
+
async function writeCacheFile(content: string): Promise<Result<void, CacheError>> {
|
|
172
|
+
try {
|
|
173
|
+
await ensureCacheDir()
|
|
174
|
+
const buffer = Buffer.from(content, 'utf-8')
|
|
175
|
+
const data = compress ? await gzipAsync(buffer) : buffer
|
|
176
|
+
await writeFile(cacheFile, data)
|
|
177
|
+
return ok(undefined)
|
|
178
|
+
} catch (error) {
|
|
179
|
+
return err({
|
|
180
|
+
code: 'CACHE_WRITE_FAILED',
|
|
181
|
+
message: `Failed to write cache file: ${(error as Error).message}`,
|
|
182
|
+
cause: error as Error,
|
|
183
|
+
})
|
|
184
|
+
}
|
|
185
|
+
}
|
|
186
|
+
|
|
187
|
+
async function getFileState(path: string): Promise<CachedFileState> {
|
|
188
|
+
const stats = await stat(path)
|
|
189
|
+
const contentHash = await hasher.hash(path)
|
|
190
|
+
return {
|
|
191
|
+
path,
|
|
192
|
+
contentHash,
|
|
193
|
+
modifiedAt: stats.mtime.toISOString(),
|
|
194
|
+
size: stats.size,
|
|
195
|
+
}
|
|
196
|
+
}
|
|
197
|
+
|
|
198
|
+
return {
|
|
199
|
+
async load(): Promise<Result<AnalysisCache, CacheError>> {
|
|
200
|
+
const readResult = await readCacheFile()
|
|
201
|
+
if (readResult.success === false) {
|
|
202
|
+
return readResult
|
|
203
|
+
}
|
|
204
|
+
|
|
205
|
+
try {
|
|
206
|
+
const cache = JSON.parse(readResult.data) as AnalysisCache
|
|
207
|
+
const cacheVersion = cache.metadata.version
|
|
208
|
+
|
|
209
|
+
// Version check
|
|
210
|
+
if (cacheVersion !== CACHE_SCHEMA_VERSION) {
|
|
211
|
+
return err({
|
|
212
|
+
code: 'CACHE_VERSION_MISMATCH',
|
|
213
|
+
message: `Cache version ${String(cacheVersion)} does not match current version ${String(CACHE_SCHEMA_VERSION)}`,
|
|
214
|
+
})
|
|
215
|
+
}
|
|
216
|
+
|
|
217
|
+
// Age check
|
|
218
|
+
const cacheAge = Date.now() - new Date(cache.metadata.createdAt).getTime()
|
|
219
|
+
if (cacheAge > maxAge) {
|
|
220
|
+
return err({
|
|
221
|
+
code: 'CACHE_EXPIRED',
|
|
222
|
+
message: `Cache is ${Math.round(cacheAge / 1000 / 60 / 60 / 24)} days old (max: ${Math.round(maxAge / 1000 / 60 / 60 / 24)} days)`,
|
|
223
|
+
})
|
|
224
|
+
}
|
|
225
|
+
|
|
226
|
+
return ok(cache)
|
|
227
|
+
} catch (error) {
|
|
228
|
+
return err({
|
|
229
|
+
code: 'CACHE_CORRUPTED',
|
|
230
|
+
message: `Failed to parse cache file: ${(error as Error).message}`,
|
|
231
|
+
cause: error as Error,
|
|
232
|
+
})
|
|
233
|
+
}
|
|
234
|
+
},
|
|
235
|
+
|
|
236
|
+
async save(cache: AnalysisCache): Promise<Result<void, CacheError>> {
|
|
237
|
+
const updatedCache: AnalysisCache = {
|
|
238
|
+
...cache,
|
|
239
|
+
metadata: {
|
|
240
|
+
...cache.metadata,
|
|
241
|
+
updatedAt: new Date().toISOString(),
|
|
242
|
+
},
|
|
243
|
+
}
|
|
244
|
+
const content = JSON.stringify(updatedCache, null, compress ? 0 : 2)
|
|
245
|
+
return writeCacheFile(content)
|
|
246
|
+
},
|
|
247
|
+
|
|
248
|
+
async validate(
|
|
249
|
+
cache: AnalysisCache,
|
|
250
|
+
currentFiles: readonly string[],
|
|
251
|
+
): Promise<CacheValidationResult> {
|
|
252
|
+
// Check for config file changes first (full invalidation)
|
|
253
|
+
const configChanged = await changeDetector.hasConfigChanged(cache.configFiles)
|
|
254
|
+
if (configChanged) {
|
|
255
|
+
return {
|
|
256
|
+
isValid: false,
|
|
257
|
+
changedFiles: [],
|
|
258
|
+
newFiles: [],
|
|
259
|
+
deletedFiles: [],
|
|
260
|
+
invalidatedPackages: [],
|
|
261
|
+
changedConfigFiles: cache.configFiles.map(f => f.path),
|
|
262
|
+
invalidationReason: 'Configuration files changed',
|
|
263
|
+
}
|
|
264
|
+
}
|
|
265
|
+
|
|
266
|
+
// Validate individual files
|
|
267
|
+
const cachedFileStates = Object.values(cache.files).map(f => f.fileState)
|
|
268
|
+
const validation = await changeDetector.validateCache(cachedFileStates, currentFiles)
|
|
269
|
+
|
|
270
|
+
// Determine which packages are affected by changed files
|
|
271
|
+
const affectedPackages = new Set<string>()
|
|
272
|
+
const allChangedPaths = [
|
|
273
|
+
...validation.changedFiles,
|
|
274
|
+
...validation.newFiles,
|
|
275
|
+
...validation.deletedFiles,
|
|
276
|
+
]
|
|
277
|
+
|
|
278
|
+
for (const changedPath of allChangedPaths) {
|
|
279
|
+
// Find package containing this file
|
|
280
|
+
for (const [pkgName, pkg] of Object.entries(cache.packages)) {
|
|
281
|
+
const pkgFullPath = join(workspacePath, pkg.packagePath)
|
|
282
|
+
if (changedPath.startsWith(pkgFullPath)) {
|
|
283
|
+
affectedPackages.add(pkgName)
|
|
284
|
+
break
|
|
285
|
+
}
|
|
286
|
+
}
|
|
287
|
+
}
|
|
288
|
+
|
|
289
|
+
return {
|
|
290
|
+
...validation,
|
|
291
|
+
invalidatedPackages: Array.from(affectedPackages),
|
|
292
|
+
}
|
|
293
|
+
},
|
|
294
|
+
|
|
295
|
+
async updateFile(
|
|
296
|
+
cache: AnalysisCache,
|
|
297
|
+
path: string,
|
|
298
|
+
issues: readonly Issue[],
|
|
299
|
+
analyzersRun: readonly string[],
|
|
300
|
+
): Promise<Result<AnalysisCache, CacheError>> {
|
|
301
|
+
try {
|
|
302
|
+
const fileState = await getFileState(path)
|
|
303
|
+
const entry = createFileAnalysisEntry(
|
|
304
|
+
path,
|
|
305
|
+
fileState.contentHash,
|
|
306
|
+
new Date(fileState.modifiedAt),
|
|
307
|
+
fileState.size,
|
|
308
|
+
issues,
|
|
309
|
+
analyzersRun,
|
|
310
|
+
)
|
|
311
|
+
|
|
312
|
+
return ok({
|
|
313
|
+
...cache,
|
|
314
|
+
files: {
|
|
315
|
+
...cache.files,
|
|
316
|
+
[path]: entry,
|
|
317
|
+
},
|
|
318
|
+
metadata: {
|
|
319
|
+
...cache.metadata,
|
|
320
|
+
updatedAt: new Date().toISOString(),
|
|
321
|
+
},
|
|
322
|
+
})
|
|
323
|
+
} catch (error) {
|
|
324
|
+
return err({
|
|
325
|
+
code: 'CACHE_WRITE_FAILED',
|
|
326
|
+
message: `Failed to update cache for file ${path}: ${(error as Error).message}`,
|
|
327
|
+
cause: error as Error,
|
|
328
|
+
})
|
|
329
|
+
}
|
|
330
|
+
},
|
|
331
|
+
|
|
332
|
+
async updatePackage(
|
|
333
|
+
cache: AnalysisCache,
|
|
334
|
+
packageName: string,
|
|
335
|
+
packagePath: string,
|
|
336
|
+
issues: readonly Issue[],
|
|
337
|
+
analyzersRun: readonly string[],
|
|
338
|
+
): Promise<Result<AnalysisCache, CacheError>> {
|
|
339
|
+
try {
|
|
340
|
+
const packageJsonPath = join(workspacePath, packagePath, 'package.json')
|
|
341
|
+
const packageJsonHash = await hasher.hash(packageJsonPath)
|
|
342
|
+
const entry = createPackageAnalysisEntry(
|
|
343
|
+
packageName,
|
|
344
|
+
packagePath,
|
|
345
|
+
packageJsonHash,
|
|
346
|
+
issues,
|
|
347
|
+
analyzersRun,
|
|
348
|
+
)
|
|
349
|
+
|
|
350
|
+
return ok({
|
|
351
|
+
...cache,
|
|
352
|
+
packages: {
|
|
353
|
+
...cache.packages,
|
|
354
|
+
[packageName]: entry,
|
|
355
|
+
},
|
|
356
|
+
metadata: {
|
|
357
|
+
...cache.metadata,
|
|
358
|
+
updatedAt: new Date().toISOString(),
|
|
359
|
+
},
|
|
360
|
+
})
|
|
361
|
+
} catch (error) {
|
|
362
|
+
return err({
|
|
363
|
+
code: 'CACHE_WRITE_FAILED',
|
|
364
|
+
message: `Failed to update cache for package ${packageName}: ${(error as Error).message}`,
|
|
365
|
+
cause: error as Error,
|
|
366
|
+
})
|
|
367
|
+
}
|
|
368
|
+
},
|
|
369
|
+
|
|
370
|
+
async clear(): Promise<Result<void, CacheError>> {
|
|
371
|
+
try {
|
|
372
|
+
await rm(cachePath, {recursive: true, force: true})
|
|
373
|
+
return ok(undefined)
|
|
374
|
+
} catch (error) {
|
|
375
|
+
return err({
|
|
376
|
+
code: 'CACHE_WRITE_FAILED',
|
|
377
|
+
message: `Failed to clear cache: ${(error as Error).message}`,
|
|
378
|
+
cause: error as Error,
|
|
379
|
+
})
|
|
380
|
+
}
|
|
381
|
+
},
|
|
382
|
+
|
|
383
|
+
getStatistics(cache: AnalysisCache): CacheStatistics {
|
|
384
|
+
const cachedFiles = Object.keys(cache.files).length
|
|
385
|
+
const cachedPackages = Object.keys(cache.packages).length
|
|
386
|
+
const ageMs = Date.now() - new Date(cache.metadata.createdAt).getTime()
|
|
387
|
+
|
|
388
|
+
// Calculate total cache size (approximate)
|
|
389
|
+
const fileIssueCount = Object.values(cache.files).reduce((sum, f) => sum + f.issues.length, 0)
|
|
390
|
+
const packageIssueCount = Object.values(cache.packages).reduce(
|
|
391
|
+
(sum, p) => sum + p.issues.length,
|
|
392
|
+
0,
|
|
393
|
+
)
|
|
394
|
+
const totalSizeBytes = (fileIssueCount + packageIssueCount) * 200 + cachedFiles * 100
|
|
395
|
+
|
|
396
|
+
return {
|
|
397
|
+
cachedFiles,
|
|
398
|
+
cachedPackages,
|
|
399
|
+
totalSizeBytes,
|
|
400
|
+
ageMs,
|
|
401
|
+
hitCount: 0, // Updated during analysis
|
|
402
|
+
missCount: 0, // Updated during analysis
|
|
403
|
+
hitRate: 0, // Calculated: hitCount / (hitCount + missCount)
|
|
404
|
+
}
|
|
405
|
+
},
|
|
406
|
+
|
|
407
|
+
quickValidate(cache: AnalysisCache, configHash: string): boolean {
|
|
408
|
+
// Quick checks without file system access
|
|
409
|
+
if (cache.metadata.version !== CACHE_SCHEMA_VERSION) return false
|
|
410
|
+
if (cache.metadata.workspacePath !== workspacePath) return false
|
|
411
|
+
if (cache.metadata.analyzerVersion !== analyzerVersion) return false
|
|
412
|
+
if (cache.metadata.configHash !== configHash) return false
|
|
413
|
+
|
|
414
|
+
const cacheAge = Date.now() - new Date(cache.metadata.createdAt).getTime()
|
|
415
|
+
if (cacheAge > maxAge) return false
|
|
416
|
+
|
|
417
|
+
return true
|
|
418
|
+
},
|
|
419
|
+
}
|
|
420
|
+
}
|
|
421
|
+
|
|
422
|
+
/**
|
|
423
|
+
* Creates an empty cache for a workspace.
|
|
424
|
+
*
|
|
425
|
+
* @param workspacePath - Workspace root path
|
|
426
|
+
* @param configHash - Hash of the current configuration
|
|
427
|
+
* @param analyzerVersion - Current analyzer version
|
|
428
|
+
* @returns A new empty AnalysisCache
|
|
429
|
+
*/
|
|
430
|
+
export function initializeCache(
|
|
431
|
+
workspacePath: string,
|
|
432
|
+
configHash: string,
|
|
433
|
+
analyzerVersion: string,
|
|
434
|
+
): AnalysisCache {
|
|
435
|
+
return createEmptyCache(workspacePath, configHash, analyzerVersion)
|
|
436
|
+
}
|
|
437
|
+
|
|
438
|
+
/**
|
|
439
|
+
* Collects configuration file states for cache invalidation tracking.
|
|
440
|
+
*
|
|
441
|
+
* @param workspacePath - Workspace root path
|
|
442
|
+
* @param packagePaths - Package directory paths (relative to workspace)
|
|
443
|
+
* @returns Array of configuration file states
|
|
444
|
+
*/
|
|
445
|
+
export async function collectConfigFileStates(
|
|
446
|
+
workspacePath: string,
|
|
447
|
+
packagePaths: readonly string[],
|
|
448
|
+
): Promise<CachedFileState[]> {
|
|
449
|
+
const hasher = createWorkspaceHasher()
|
|
450
|
+
const configFiles: CachedFileState[] = []
|
|
451
|
+
|
|
452
|
+
// Collect workspace-level config files
|
|
453
|
+
for (const pattern of CONFIG_FILE_PATTERNS) {
|
|
454
|
+
// Simple pattern matching (no glob wildcards for now)
|
|
455
|
+
if (!pattern.includes('*')) {
|
|
456
|
+
const configPath = join(workspacePath, pattern)
|
|
457
|
+
try {
|
|
458
|
+
const stats = await stat(configPath)
|
|
459
|
+
const contentHash = await hasher.hash(configPath)
|
|
460
|
+
configFiles.push({
|
|
461
|
+
path: configPath,
|
|
462
|
+
contentHash,
|
|
463
|
+
modifiedAt: stats.mtime.toISOString(),
|
|
464
|
+
size: stats.size,
|
|
465
|
+
})
|
|
466
|
+
} catch {
|
|
467
|
+
// Config file doesn't exist, skip
|
|
468
|
+
}
|
|
469
|
+
}
|
|
470
|
+
}
|
|
471
|
+
|
|
472
|
+
// Collect package-level config files
|
|
473
|
+
for (const pkgPath of packagePaths) {
|
|
474
|
+
const fullPkgPath = join(workspacePath, pkgPath)
|
|
475
|
+
for (const pattern of CONFIG_FILE_PATTERNS) {
|
|
476
|
+
if (!pattern.includes('*')) {
|
|
477
|
+
const configPath = join(fullPkgPath, pattern)
|
|
478
|
+
try {
|
|
479
|
+
const stats = await stat(configPath)
|
|
480
|
+
const contentHash = await hasher.hash(configPath)
|
|
481
|
+
configFiles.push({
|
|
482
|
+
path: configPath,
|
|
483
|
+
contentHash,
|
|
484
|
+
modifiedAt: stats.mtime.toISOString(),
|
|
485
|
+
size: stats.size,
|
|
486
|
+
})
|
|
487
|
+
} catch {
|
|
488
|
+
// Config file doesn't exist, skip
|
|
489
|
+
}
|
|
490
|
+
}
|
|
491
|
+
}
|
|
492
|
+
}
|
|
493
|
+
|
|
494
|
+
return configFiles
|
|
495
|
+
}
|