@bfra.me/workspace-analyzer 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +402 -0
- package/lib/chunk-4LSFAAZW.js +1 -0
- package/lib/chunk-JDF7DQ4V.js +27 -0
- package/lib/chunk-WOJ4C7N7.js +7122 -0
- package/lib/cli.d.ts +1 -0
- package/lib/cli.js +318 -0
- package/lib/index.d.ts +3701 -0
- package/lib/index.js +1262 -0
- package/lib/types/index.d.ts +146 -0
- package/lib/types/index.js +28 -0
- package/package.json +89 -0
- package/src/analyzers/analyzer.ts +201 -0
- package/src/analyzers/architectural-analyzer.ts +304 -0
- package/src/analyzers/build-config-analyzer.ts +334 -0
- package/src/analyzers/circular-import-analyzer.ts +463 -0
- package/src/analyzers/config-consistency-analyzer.ts +335 -0
- package/src/analyzers/dead-code-analyzer.ts +565 -0
- package/src/analyzers/duplicate-code-analyzer.ts +626 -0
- package/src/analyzers/duplicate-dependency-analyzer.ts +381 -0
- package/src/analyzers/eslint-config-analyzer.ts +281 -0
- package/src/analyzers/exports-field-analyzer.ts +324 -0
- package/src/analyzers/index.ts +388 -0
- package/src/analyzers/large-dependency-analyzer.ts +535 -0
- package/src/analyzers/package-json-analyzer.ts +349 -0
- package/src/analyzers/peer-dependency-analyzer.ts +275 -0
- package/src/analyzers/tree-shaking-analyzer.ts +623 -0
- package/src/analyzers/tsconfig-analyzer.ts +382 -0
- package/src/analyzers/unused-dependency-analyzer.ts +356 -0
- package/src/analyzers/version-alignment-analyzer.ts +308 -0
- package/src/api/analyze-workspace.ts +245 -0
- package/src/api/index.ts +11 -0
- package/src/cache/cache-manager.ts +495 -0
- package/src/cache/cache-schema.ts +247 -0
- package/src/cache/change-detector.ts +169 -0
- package/src/cache/file-hasher.ts +65 -0
- package/src/cache/index.ts +47 -0
- package/src/cli/commands/analyze.ts +240 -0
- package/src/cli/commands/index.ts +5 -0
- package/src/cli/index.ts +61 -0
- package/src/cli/types.ts +65 -0
- package/src/cli/ui.ts +213 -0
- package/src/cli.ts +9 -0
- package/src/config/defaults.ts +183 -0
- package/src/config/index.ts +81 -0
- package/src/config/loader.ts +270 -0
- package/src/config/merger.ts +229 -0
- package/src/config/schema.ts +263 -0
- package/src/core/incremental-analyzer.ts +462 -0
- package/src/core/index.ts +34 -0
- package/src/core/orchestrator.ts +416 -0
- package/src/graph/dependency-graph.ts +408 -0
- package/src/graph/index.ts +19 -0
- package/src/index.ts +417 -0
- package/src/parser/config-parser.ts +491 -0
- package/src/parser/import-extractor.ts +340 -0
- package/src/parser/index.ts +54 -0
- package/src/parser/typescript-parser.ts +95 -0
- package/src/performance/bundle-estimator.ts +444 -0
- package/src/performance/index.ts +27 -0
- package/src/reporters/console-reporter.ts +355 -0
- package/src/reporters/index.ts +49 -0
- package/src/reporters/json-reporter.ts +273 -0
- package/src/reporters/markdown-reporter.ts +349 -0
- package/src/reporters/reporter.ts +399 -0
- package/src/rules/builtin-rules.ts +709 -0
- package/src/rules/index.ts +52 -0
- package/src/rules/rule-engine.ts +409 -0
- package/src/scanner/index.ts +18 -0
- package/src/scanner/workspace-scanner.ts +403 -0
- package/src/types/index.ts +176 -0
- package/src/types/result.ts +19 -0
- package/src/utils/index.ts +7 -0
- package/src/utils/pattern-matcher.ts +48 -0
|
@@ -0,0 +1,462 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Incremental analyzer orchestrator for workspace analysis.
|
|
3
|
+
*
|
|
4
|
+
* Provides efficient incremental analysis by leveraging file change detection,
|
|
5
|
+
* cache management, and parallel execution for large codebase performance.
|
|
6
|
+
*/
|
|
7
|
+
|
|
8
|
+
import type {
|
|
9
|
+
Analyzer,
|
|
10
|
+
AnalyzerError,
|
|
11
|
+
AnalysisContext as BaseAnalysisContext,
|
|
12
|
+
} from '../analyzers/analyzer'
|
|
13
|
+
import type {AnalysisCache, CacheValidationResult} from '../cache/cache-schema'
|
|
14
|
+
import type {WorkspacePackage} from '../scanner/workspace-scanner'
|
|
15
|
+
import type {AnalysisProgress, Issue, Severity} from '../types/index'
|
|
16
|
+
import type {Result} from '../types/result'
|
|
17
|
+
import process from 'node:process'
|
|
18
|
+
import {pLimit} from '@bfra.me/es/async'
|
|
19
|
+
import {consola} from 'consola'
|
|
20
|
+
import {collectConfigFileStates, createCacheManager, initializeCache} from '../cache/cache-manager'
|
|
21
|
+
import {createWorkspaceHasher} from '../cache/file-hasher'
|
|
22
|
+
import {err, ok} from '../types/result'
|
|
23
|
+
|
|
24
|
+
/**
|
|
25
|
+
* Options for incremental analysis.
|
|
26
|
+
*/
|
|
27
|
+
export interface IncrementalAnalysisOptions {
|
|
28
|
+
/** Workspace root path */
|
|
29
|
+
readonly workspacePath: string
|
|
30
|
+
/** Current analyzer version (for cache invalidation) */
|
|
31
|
+
readonly analyzerVersion: string
|
|
32
|
+
/** Whether to use caching (default: true) */
|
|
33
|
+
readonly useCache?: boolean
|
|
34
|
+
/** Maximum number of concurrent file analyses (default: 4) */
|
|
35
|
+
readonly concurrency?: number
|
|
36
|
+
/** Maximum cache age in milliseconds (default: 7 days) */
|
|
37
|
+
readonly maxCacheAge?: number
|
|
38
|
+
/** Minimum severity to report (default: 'info') */
|
|
39
|
+
readonly minSeverity?: Severity
|
|
40
|
+
/** Progress callback for reporting */
|
|
41
|
+
readonly onProgress?: (progress: AnalysisProgress) => void
|
|
42
|
+
/** Cache directory (default: .workspace-analyzer-cache) */
|
|
43
|
+
readonly cacheDir?: string
|
|
44
|
+
/** Hash algorithm for file content (default: sha256) */
|
|
45
|
+
readonly hashAlgorithm?: 'sha256' | 'md5'
|
|
46
|
+
}
|
|
47
|
+
|
|
48
|
+
/**
|
|
49
|
+
* Error codes for incremental analysis.
|
|
50
|
+
*/
|
|
51
|
+
export type IncrementalAnalysisErrorCode =
|
|
52
|
+
| 'SCAN_FAILED'
|
|
53
|
+
| 'ANALYSIS_FAILED'
|
|
54
|
+
| 'CACHE_ERROR'
|
|
55
|
+
| 'ANALYZER_ERROR'
|
|
56
|
+
| 'TIMEOUT'
|
|
57
|
+
|
|
58
|
+
/**
|
|
59
|
+
* Error for incremental analysis operations.
|
|
60
|
+
*/
|
|
61
|
+
export interface IncrementalAnalysisError {
|
|
62
|
+
readonly code: IncrementalAnalysisErrorCode
|
|
63
|
+
readonly message: string
|
|
64
|
+
readonly cause?: Error
|
|
65
|
+
}
|
|
66
|
+
|
|
67
|
+
/**
|
|
68
|
+
* Result of incremental analysis.
|
|
69
|
+
*/
|
|
70
|
+
export interface IncrementalAnalysisResult {
|
|
71
|
+
/** All issues found */
|
|
72
|
+
readonly issues: readonly Issue[]
|
|
73
|
+
/** Number of files analyzed (not from cache) */
|
|
74
|
+
readonly filesAnalyzed: number
|
|
75
|
+
/** Number of files loaded from cache */
|
|
76
|
+
readonly filesFromCache: number
|
|
77
|
+
/** Number of packages analyzed */
|
|
78
|
+
readonly packagesAnalyzed: number
|
|
79
|
+
/** Analysis duration in milliseconds */
|
|
80
|
+
readonly durationMs: number
|
|
81
|
+
/** Whether cache was used */
|
|
82
|
+
readonly cacheUsed: boolean
|
|
83
|
+
/** Cache statistics after analysis */
|
|
84
|
+
readonly cacheStats?: {
|
|
85
|
+
readonly hitRate: number
|
|
86
|
+
readonly hitCount: number
|
|
87
|
+
readonly missCount: number
|
|
88
|
+
}
|
|
89
|
+
}
|
|
90
|
+
|
|
91
|
+
/**
|
|
92
|
+
* Extended analysis context with incremental analysis metadata.
|
|
93
|
+
*/
|
|
94
|
+
export interface IncrementalAnalysisContext {
|
|
95
|
+
/** Workspace root path */
|
|
96
|
+
readonly workspacePath: string
|
|
97
|
+
/** All source files in the workspace */
|
|
98
|
+
readonly files: readonly string[]
|
|
99
|
+
/** Package paths (relative to workspace root) */
|
|
100
|
+
readonly packagePaths: readonly string[]
|
|
101
|
+
/** Configuration hash for cache invalidation */
|
|
102
|
+
readonly configHash: string
|
|
103
|
+
/** Current cache state (if available) */
|
|
104
|
+
readonly cache?: AnalysisCache
|
|
105
|
+
/** Files that need fresh analysis */
|
|
106
|
+
readonly filesToAnalyze: readonly string[]
|
|
107
|
+
/** Files that can use cached results */
|
|
108
|
+
readonly cachedFiles: readonly string[]
|
|
109
|
+
/** Progress reporter */
|
|
110
|
+
readonly reportProgress: (current: string, processed: number, total?: number) => void
|
|
111
|
+
}
|
|
112
|
+
|
|
113
|
+
/**
|
|
114
|
+
* Incremental analyzer orchestrator.
|
|
115
|
+
*/
|
|
116
|
+
export interface IncrementalAnalyzer {
|
|
117
|
+
/** Run incremental analysis on the workspace */
|
|
118
|
+
readonly analyze: (
|
|
119
|
+
files: readonly string[],
|
|
120
|
+
packages: readonly WorkspacePackage[],
|
|
121
|
+
analyzers: readonly Analyzer[],
|
|
122
|
+
) => Promise<Result<IncrementalAnalysisResult, IncrementalAnalysisError>>
|
|
123
|
+
/** Invalidate cache for specific files */
|
|
124
|
+
readonly invalidateFiles: (paths: readonly string[]) => Promise<void>
|
|
125
|
+
/** Clear the entire cache */
|
|
126
|
+
readonly clearCache: () => Promise<Result<void, IncrementalAnalysisError>>
|
|
127
|
+
/** Get current cache statistics */
|
|
128
|
+
readonly getCacheStats: () => Promise<
|
|
129
|
+
Result<{cachedFiles: number; cachedPackages: number; ageMs: number}, IncrementalAnalysisError>
|
|
130
|
+
>
|
|
131
|
+
}
|
|
132
|
+
|
|
133
|
+
/**
|
|
134
|
+
* Default options for incremental analysis.
|
|
135
|
+
*/
|
|
136
|
+
export const DEFAULT_INCREMENTAL_OPTIONS = {
|
|
137
|
+
useCache: true,
|
|
138
|
+
concurrency: 4,
|
|
139
|
+
maxCacheAge: 7 * 24 * 60 * 60 * 1000, // 7 days
|
|
140
|
+
minSeverity: 'info' as Severity,
|
|
141
|
+
cacheDir: '.workspace-analyzer-cache',
|
|
142
|
+
hashAlgorithm: 'sha256' as const,
|
|
143
|
+
} as const
|
|
144
|
+
|
|
145
|
+
/**
|
|
146
|
+
* Creates an incremental analyzer orchestrator.
|
|
147
|
+
*
|
|
148
|
+
* @param options - Incremental analysis configuration
|
|
149
|
+
* @returns An IncrementalAnalyzer instance
|
|
150
|
+
*
|
|
151
|
+
* @example
|
|
152
|
+
* ```ts
|
|
153
|
+
* const analyzer = createIncrementalAnalyzer({
|
|
154
|
+
* workspacePath: '/path/to/workspace',
|
|
155
|
+
* analyzerVersion: '1.0.0',
|
|
156
|
+
* onProgress: (progress) => {
|
|
157
|
+
* console.error(`${progress.phase}: ${progress.processed}/${progress.total}`)
|
|
158
|
+
* },
|
|
159
|
+
* })
|
|
160
|
+
*
|
|
161
|
+
* const result = await analyzer.analyze(files, packages, analyzers)
|
|
162
|
+
* if (result.success) {
|
|
163
|
+
* console.error(`Found ${result.data.issues.length} issues`)
|
|
164
|
+
* }
|
|
165
|
+
* ```
|
|
166
|
+
*/
|
|
167
|
+
export function createIncrementalAnalyzer(
|
|
168
|
+
options: IncrementalAnalysisOptions,
|
|
169
|
+
): IncrementalAnalyzer {
|
|
170
|
+
const {
|
|
171
|
+
workspacePath,
|
|
172
|
+
analyzerVersion,
|
|
173
|
+
useCache = DEFAULT_INCREMENTAL_OPTIONS.useCache,
|
|
174
|
+
concurrency = DEFAULT_INCREMENTAL_OPTIONS.concurrency,
|
|
175
|
+
maxCacheAge = DEFAULT_INCREMENTAL_OPTIONS.maxCacheAge,
|
|
176
|
+
minSeverity = DEFAULT_INCREMENTAL_OPTIONS.minSeverity,
|
|
177
|
+
onProgress,
|
|
178
|
+
cacheDir = DEFAULT_INCREMENTAL_OPTIONS.cacheDir,
|
|
179
|
+
hashAlgorithm = DEFAULT_INCREMENTAL_OPTIONS.hashAlgorithm,
|
|
180
|
+
} = options
|
|
181
|
+
|
|
182
|
+
const hasher = createWorkspaceHasher({algorithm: hashAlgorithm})
|
|
183
|
+
const cacheManager = createCacheManager({
|
|
184
|
+
workspacePath,
|
|
185
|
+
analyzerVersion,
|
|
186
|
+
cacheDir,
|
|
187
|
+
maxAge: maxCacheAge,
|
|
188
|
+
hashAlgorithm,
|
|
189
|
+
})
|
|
190
|
+
|
|
191
|
+
const limit = pLimit(concurrency)
|
|
192
|
+
|
|
193
|
+
function reportProgress(
|
|
194
|
+
phase: AnalysisProgress['phase'],
|
|
195
|
+
current: string,
|
|
196
|
+
processed: number,
|
|
197
|
+
total?: number,
|
|
198
|
+
): void {
|
|
199
|
+
onProgress?.({phase, current, processed, total})
|
|
200
|
+
}
|
|
201
|
+
|
|
202
|
+
async function computeConfigHash(packagePaths: readonly string[]): Promise<string> {
|
|
203
|
+
try {
|
|
204
|
+
const configFiles = await collectConfigFileStates(workspacePath, packagePaths)
|
|
205
|
+
const hashes = configFiles.map(f => f.contentHash)
|
|
206
|
+
return hasher.hashContent(hashes.join(':'))
|
|
207
|
+
} catch {
|
|
208
|
+
return hasher.hashContent(Date.now().toString())
|
|
209
|
+
}
|
|
210
|
+
}
|
|
211
|
+
|
|
212
|
+
return {
|
|
213
|
+
async analyze(
|
|
214
|
+
files: readonly string[],
|
|
215
|
+
packages: readonly WorkspacePackage[],
|
|
216
|
+
analyzers: readonly Analyzer[],
|
|
217
|
+
): Promise<Result<IncrementalAnalysisResult, IncrementalAnalysisError>> {
|
|
218
|
+
const startTime = Date.now()
|
|
219
|
+
let cache: AnalysisCache | undefined
|
|
220
|
+
let validation: CacheValidationResult | undefined
|
|
221
|
+
let cacheUsed = false
|
|
222
|
+
let hitCount = 0
|
|
223
|
+
let missCount = 0
|
|
224
|
+
|
|
225
|
+
const packagePaths = packages.map(p => p.packagePath)
|
|
226
|
+
reportProgress('scanning', workspacePath, 0, files.length)
|
|
227
|
+
|
|
228
|
+
const configHash = await computeConfigHash(packagePaths)
|
|
229
|
+
|
|
230
|
+
// Load and validate cache
|
|
231
|
+
if (useCache) {
|
|
232
|
+
const cacheResult = await cacheManager.load()
|
|
233
|
+
if (cacheResult.success && cacheManager.quickValidate(cacheResult.data, configHash)) {
|
|
234
|
+
cache = cacheResult.data
|
|
235
|
+
validation = await cacheManager.validate(cache, files as string[])
|
|
236
|
+
cacheUsed = true
|
|
237
|
+
}
|
|
238
|
+
}
|
|
239
|
+
|
|
240
|
+
// Determine files needing analysis
|
|
241
|
+
const filesToAnalyze: string[] = []
|
|
242
|
+
const cachedFiles: string[] = []
|
|
243
|
+
|
|
244
|
+
if (validation?.isValid && cache != null) {
|
|
245
|
+
for (const file of files) {
|
|
246
|
+
if (file in cache.files) {
|
|
247
|
+
cachedFiles.push(file)
|
|
248
|
+
hitCount++
|
|
249
|
+
} else {
|
|
250
|
+
filesToAnalyze.push(file)
|
|
251
|
+
missCount++
|
|
252
|
+
}
|
|
253
|
+
}
|
|
254
|
+
} else {
|
|
255
|
+
filesToAnalyze.push(...files)
|
|
256
|
+
missCount = files.length
|
|
257
|
+
|
|
258
|
+
if (useCache) {
|
|
259
|
+
cache = initializeCache(workspacePath, configHash, analyzerVersion)
|
|
260
|
+
const configFiles = await collectConfigFileStates(workspacePath, packagePaths)
|
|
261
|
+
cache = {...cache, configFiles}
|
|
262
|
+
}
|
|
263
|
+
}
|
|
264
|
+
|
|
265
|
+
reportProgress('parsing', '', 0, filesToAnalyze.length)
|
|
266
|
+
|
|
267
|
+
// Collect cached issues
|
|
268
|
+
const allIssues: Issue[] = []
|
|
269
|
+
if (cache != null) {
|
|
270
|
+
for (const file of cachedFiles) {
|
|
271
|
+
const cached = cache.files[file]
|
|
272
|
+
if (cached != null) {
|
|
273
|
+
allIssues.push(...cached.issues)
|
|
274
|
+
}
|
|
275
|
+
}
|
|
276
|
+
allIssues.push(...cache.workspaceIssues)
|
|
277
|
+
}
|
|
278
|
+
|
|
279
|
+
// Build analysis context for analyzers
|
|
280
|
+
const analyzerContext: BaseAnalysisContext = {
|
|
281
|
+
workspacePath,
|
|
282
|
+
packages,
|
|
283
|
+
config: {
|
|
284
|
+
minSeverity,
|
|
285
|
+
include: [],
|
|
286
|
+
exclude: [],
|
|
287
|
+
},
|
|
288
|
+
reportProgress: (message: string) => {
|
|
289
|
+
reportProgress('analyzing', message, 0)
|
|
290
|
+
},
|
|
291
|
+
}
|
|
292
|
+
|
|
293
|
+
// Run analyzers with parallel execution
|
|
294
|
+
reportProgress('analyzing', '', 0, analyzers.length)
|
|
295
|
+
|
|
296
|
+
const analyzerResults = await Promise.all(
|
|
297
|
+
analyzers.map(async (analyzer, index) =>
|
|
298
|
+
limit(async () => {
|
|
299
|
+
const analyzerId = analyzer.metadata.id
|
|
300
|
+
reportProgress('analyzing', analyzerId, index + 1, analyzers.length)
|
|
301
|
+
try {
|
|
302
|
+
const result = await analyzer.analyze(analyzerContext)
|
|
303
|
+
return result
|
|
304
|
+
} catch (error) {
|
|
305
|
+
const analyzerError: AnalyzerError = {
|
|
306
|
+
code: 'ANALYZER_ERROR',
|
|
307
|
+
message: `Analyzer ${analyzerId} failed: ${(error as Error).message}`,
|
|
308
|
+
}
|
|
309
|
+
return {success: false as const, error: analyzerError}
|
|
310
|
+
}
|
|
311
|
+
}),
|
|
312
|
+
),
|
|
313
|
+
)
|
|
314
|
+
|
|
315
|
+
// Collect results
|
|
316
|
+
for (const result of analyzerResults) {
|
|
317
|
+
if (!result.success) {
|
|
318
|
+
consola.warn(`Analyzer error: ${result.error.message}`)
|
|
319
|
+
continue
|
|
320
|
+
}
|
|
321
|
+
allIssues.push(...result.data)
|
|
322
|
+
}
|
|
323
|
+
|
|
324
|
+
// Filter by severity
|
|
325
|
+
const severityOrder: Severity[] = ['info', 'warning', 'error', 'critical']
|
|
326
|
+
const minSeverityIndex = severityOrder.indexOf(minSeverity)
|
|
327
|
+
const filteredIssues = allIssues.filter(
|
|
328
|
+
issue => severityOrder.indexOf(issue.severity) >= minSeverityIndex,
|
|
329
|
+
)
|
|
330
|
+
|
|
331
|
+
// Update cache
|
|
332
|
+
if (useCache && cache != null) {
|
|
333
|
+
const issuesByFile = new Map<string, Issue[]>()
|
|
334
|
+
for (const issue of filteredIssues) {
|
|
335
|
+
const existing = issuesByFile.get(issue.location.filePath) ?? []
|
|
336
|
+
existing.push(issue)
|
|
337
|
+
issuesByFile.set(issue.location.filePath, existing)
|
|
338
|
+
}
|
|
339
|
+
|
|
340
|
+
let updatedCache = cache
|
|
341
|
+
for (const file of filesToAnalyze) {
|
|
342
|
+
const fileIssues = issuesByFile.get(file) ?? []
|
|
343
|
+
const updateResult = await cacheManager.updateFile(
|
|
344
|
+
updatedCache,
|
|
345
|
+
file,
|
|
346
|
+
fileIssues,
|
|
347
|
+
analyzers.map(a => a.metadata.id),
|
|
348
|
+
)
|
|
349
|
+
if (updateResult.success) {
|
|
350
|
+
updatedCache = updateResult.data
|
|
351
|
+
}
|
|
352
|
+
}
|
|
353
|
+
|
|
354
|
+
reportProgress('reporting', 'Saving cache', 0)
|
|
355
|
+
await cacheManager.save(updatedCache)
|
|
356
|
+
}
|
|
357
|
+
|
|
358
|
+
reportProgress('reporting', '', files.length, files.length)
|
|
359
|
+
|
|
360
|
+
const durationMs = Date.now() - startTime
|
|
361
|
+
|
|
362
|
+
return ok({
|
|
363
|
+
issues: filteredIssues,
|
|
364
|
+
filesAnalyzed: filesToAnalyze.length,
|
|
365
|
+
filesFromCache: cachedFiles.length,
|
|
366
|
+
packagesAnalyzed: packagePaths.length,
|
|
367
|
+
durationMs,
|
|
368
|
+
cacheUsed,
|
|
369
|
+
cacheStats: cacheUsed
|
|
370
|
+
? {
|
|
371
|
+
hitRate: hitCount + missCount > 0 ? hitCount / (hitCount + missCount) : 0,
|
|
372
|
+
hitCount,
|
|
373
|
+
missCount,
|
|
374
|
+
}
|
|
375
|
+
: undefined,
|
|
376
|
+
})
|
|
377
|
+
},
|
|
378
|
+
|
|
379
|
+
async invalidateFiles(paths: readonly string[]): Promise<void> {
|
|
380
|
+
const cacheResult = await cacheManager.load()
|
|
381
|
+
if (!cacheResult.success) return
|
|
382
|
+
|
|
383
|
+
const cache = cacheResult.data
|
|
384
|
+
const updatedFiles = {...cache.files}
|
|
385
|
+
for (const path of paths) {
|
|
386
|
+
delete updatedFiles[path]
|
|
387
|
+
}
|
|
388
|
+
|
|
389
|
+
await cacheManager.save({
|
|
390
|
+
...cache,
|
|
391
|
+
files: updatedFiles,
|
|
392
|
+
})
|
|
393
|
+
},
|
|
394
|
+
|
|
395
|
+
async clearCache(): Promise<Result<void, IncrementalAnalysisError>> {
|
|
396
|
+
const result = await cacheManager.clear()
|
|
397
|
+
if (!result.success) {
|
|
398
|
+
return err({
|
|
399
|
+
code: 'CACHE_ERROR',
|
|
400
|
+
message: result.error.message,
|
|
401
|
+
})
|
|
402
|
+
}
|
|
403
|
+
return ok(undefined)
|
|
404
|
+
},
|
|
405
|
+
|
|
406
|
+
async getCacheStats(): Promise<
|
|
407
|
+
Result<{cachedFiles: number; cachedPackages: number; ageMs: number}, IncrementalAnalysisError>
|
|
408
|
+
> {
|
|
409
|
+
const cacheResult = await cacheManager.load()
|
|
410
|
+
if (!cacheResult.success) {
|
|
411
|
+
if (cacheResult.error.code === 'CACHE_NOT_FOUND') {
|
|
412
|
+
return ok({cachedFiles: 0, cachedPackages: 0, ageMs: 0})
|
|
413
|
+
}
|
|
414
|
+
return err({
|
|
415
|
+
code: 'CACHE_ERROR',
|
|
416
|
+
message: cacheResult.error.message,
|
|
417
|
+
})
|
|
418
|
+
}
|
|
419
|
+
|
|
420
|
+
const stats = cacheManager.getStatistics(cacheResult.data)
|
|
421
|
+
return ok({
|
|
422
|
+
cachedFiles: stats.cachedFiles,
|
|
423
|
+
cachedPackages: stats.cachedPackages,
|
|
424
|
+
ageMs: stats.ageMs,
|
|
425
|
+
})
|
|
426
|
+
},
|
|
427
|
+
}
|
|
428
|
+
}
|
|
429
|
+
|
|
430
|
+
/**
|
|
431
|
+
* Creates a progress callback that logs to stderr.
|
|
432
|
+
*/
|
|
433
|
+
export function createConsoleProgressCallback(): (progress: AnalysisProgress) => void {
|
|
434
|
+
let lastPhase: AnalysisProgress['phase'] | undefined
|
|
435
|
+
|
|
436
|
+
return (progress: AnalysisProgress) => {
|
|
437
|
+
if (progress.phase !== lastPhase) {
|
|
438
|
+
lastPhase = progress.phase
|
|
439
|
+
const phaseNames = {
|
|
440
|
+
scanning: 'Scanning workspace',
|
|
441
|
+
parsing: 'Parsing source files',
|
|
442
|
+
analyzing: 'Running analyzers',
|
|
443
|
+
reporting: 'Generating report',
|
|
444
|
+
}
|
|
445
|
+
console.error(`\n${phaseNames[progress.phase]}...`)
|
|
446
|
+
}
|
|
447
|
+
|
|
448
|
+
if (progress.total != null && progress.total > 0) {
|
|
449
|
+
const percent = Math.round((progress.processed / progress.total) * 100)
|
|
450
|
+
process.stderr.write(`\r ${progress.processed}/${progress.total} (${percent}%)`)
|
|
451
|
+
}
|
|
452
|
+
}
|
|
453
|
+
}
|
|
454
|
+
|
|
455
|
+
/**
|
|
456
|
+
* Creates a no-op progress callback.
|
|
457
|
+
*/
|
|
458
|
+
export function createSilentProgressCallback(): (progress: AnalysisProgress) => void {
|
|
459
|
+
return () => {
|
|
460
|
+
// Silent operation
|
|
461
|
+
}
|
|
462
|
+
}
|
|
@@ -0,0 +1,34 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Core module exports for workspace analyzer.
|
|
3
|
+
*
|
|
4
|
+
* Provides the main orchestration layer for incremental analysis
|
|
5
|
+
* with caching, parallel execution, and progress reporting.
|
|
6
|
+
*/
|
|
7
|
+
|
|
8
|
+
// Incremental analyzer
|
|
9
|
+
export {
|
|
10
|
+
createConsoleProgressCallback,
|
|
11
|
+
createIncrementalAnalyzer,
|
|
12
|
+
createSilentProgressCallback,
|
|
13
|
+
DEFAULT_INCREMENTAL_OPTIONS,
|
|
14
|
+
} from './incremental-analyzer'
|
|
15
|
+
|
|
16
|
+
export type {
|
|
17
|
+
IncrementalAnalysisContext,
|
|
18
|
+
IncrementalAnalysisError,
|
|
19
|
+
IncrementalAnalysisErrorCode,
|
|
20
|
+
IncrementalAnalysisOptions,
|
|
21
|
+
IncrementalAnalysisResult,
|
|
22
|
+
IncrementalAnalyzer,
|
|
23
|
+
} from './incremental-analyzer'
|
|
24
|
+
|
|
25
|
+
// Analysis orchestrator
|
|
26
|
+
export {createOrchestrator} from './orchestrator'
|
|
27
|
+
|
|
28
|
+
export type {
|
|
29
|
+
AnalysisContext,
|
|
30
|
+
AnalysisOrchestrator,
|
|
31
|
+
OrchestratorError,
|
|
32
|
+
OrchestratorErrorCode,
|
|
33
|
+
OrchestratorOptions,
|
|
34
|
+
} from './orchestrator'
|