@getmikk/core 1.8.2 → 1.9.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (43) hide show
  1. package/package.json +3 -1
  2. package/src/constants.ts +285 -0
  3. package/src/contract/contract-generator.ts +7 -0
  4. package/src/contract/index.ts +2 -3
  5. package/src/contract/lock-compiler.ts +74 -42
  6. package/src/contract/lock-reader.ts +24 -4
  7. package/src/contract/schema.ts +27 -1
  8. package/src/error-handler.ts +430 -0
  9. package/src/graph/cluster-detector.ts +45 -20
  10. package/src/graph/confidence-engine.ts +60 -0
  11. package/src/graph/dead-code-detector.ts +27 -5
  12. package/src/graph/graph-builder.ts +298 -238
  13. package/src/graph/impact-analyzer.ts +131 -114
  14. package/src/graph/index.ts +4 -0
  15. package/src/graph/memory-manager.ts +345 -0
  16. package/src/graph/query-engine.ts +79 -0
  17. package/src/graph/risk-engine.ts +86 -0
  18. package/src/graph/types.ts +89 -64
  19. package/src/parser/boundary-checker.ts +3 -1
  20. package/src/parser/change-detector.ts +99 -0
  21. package/src/parser/go/go-extractor.ts +28 -9
  22. package/src/parser/go/go-parser.ts +2 -0
  23. package/src/parser/index.ts +88 -38
  24. package/src/parser/javascript/js-extractor.ts +1 -1
  25. package/src/parser/javascript/js-parser.ts +2 -0
  26. package/src/parser/oxc-parser.ts +675 -0
  27. package/src/parser/oxc-resolver.ts +83 -0
  28. package/src/parser/tree-sitter/parser.ts +27 -15
  29. package/src/parser/types.ts +100 -73
  30. package/src/parser/typescript/ts-extractor.ts +241 -537
  31. package/src/parser/typescript/ts-parser.ts +16 -171
  32. package/src/parser/typescript/ts-resolver.ts +11 -1
  33. package/src/search/bm25.ts +5 -2
  34. package/src/utils/minimatch.ts +1 -1
  35. package/tests/contract.test.ts +2 -2
  36. package/tests/dead-code.test.ts +7 -7
  37. package/tests/esm-resolver.test.ts +75 -0
  38. package/tests/graph.test.ts +20 -20
  39. package/tests/helpers.ts +11 -6
  40. package/tests/impact-classified.test.ts +37 -41
  41. package/tests/parser.test.ts +7 -5
  42. package/tests/ts-parser.test.ts +27 -52
  43. package/test-output.txt +0 -373
@@ -1,138 +1,155 @@
1
- import type { DependencyGraph, GraphEdge, ImpactResult, ClassifiedImpact, RiskLevel } from './types.js'
1
+ import type { DependencyGraph, ImpactResult, ClassifiedImpact } from './types.js'
2
+ import { RiskEngine } from './risk-engine.js'
3
+ import { ConfidenceEngine } from './confidence-engine.js'
2
4
 
3
5
  /**
4
- * ImpactAnalyzer Given changed nodes, walks the graph backwards (BFS)
5
- * to find everything that depends on them.
6
- * Powers "what breaks if I change X?"
7
- *
8
- * Risk classification:
9
- * CRITICAL = direct caller (depth 1) that crosses a module boundary
10
- * HIGH = direct caller (depth 1) within the same module
11
- * MEDIUM = depth 2
12
- * LOW = depth 3+
13
- *
14
- * Confidence is derived from the quality of resolved edges in the traversal
15
- * path, not from the size of the result set. A small impact set built from
16
- * low-confidence (unresolved/fuzzy) edges is still LOW confidence.
6
+ * Mikk 2.0: Impact Analyzer
7
+ * Given changed nodes, walks the graph backwards (reverse dependency)
8
+ * to find everything impacted, computing quantitative risk and confidence.
17
9
  */
18
10
  export class ImpactAnalyzer {
19
- constructor(private graph: DependencyGraph) { }
11
+ private riskEngine: RiskEngine;
12
+ private confidenceEngine: ConfidenceEngine;
13
+
14
+ constructor(private graph: DependencyGraph) {
15
+ this.riskEngine = new RiskEngine(graph);
16
+ this.confidenceEngine = new ConfidenceEngine(graph);
17
+ }
20
18
 
21
19
  /** Given a list of changed node IDs, find everything impacted */
22
- analyze(changedNodeIds: string[]): ImpactResult {
23
- const visited = new Set<string>()
24
- const depthMap = new Map<string, number>()
25
- // Track the minimum confidence seen along the path to each node
26
- const pathConfidence = new Map<string, number>()
27
-
28
- const queue: { id: string; depth: number; confidence: number }[] =
29
- changedNodeIds.map(id => ({ id, depth: 0, confidence: 1.0 }))
30
- let maxDepth = 0
31
-
32
- const changedSet = new Set(changedNodeIds)
33
-
34
- // Collect module IDs of the changed nodes
35
- const changedModules = new Set<string | undefined>()
36
- for (const id of changedNodeIds) {
37
- const node = this.graph.nodes.get(id)
38
- if (node) changedModules.add(node.moduleId)
39
- }
20
+ public analyze(changedNodeIds: string[]): ImpactResult {
21
+ // depth and shortest-path tracking per visited node
22
+ const visited = new Map<string, { depth: number, paths: string[][] }>();
23
+ // Use an index pointer instead of queue.shift() to avoid O(n) dequeue cost.
24
+ const queue: { id: string, depth: number, path: string[], pathSet: Set<string> }[] =
25
+ changedNodeIds.map(id => ({ id, depth: 0, path: [id], pathSet: new Set([id]) }));
26
+ let queueHead = 0;
27
+
28
+ let maxDepth = 0;
29
+ const entryPoints = new Set<string>();
30
+ const criticalModules = new Set<string>();
31
+
32
+ while (queueHead < queue.length) {
33
+ const { id: current, depth, path, pathSet } = queue[queueHead++];
34
+
35
+ if (!visited.has(current)) {
36
+ visited.set(current, { depth, paths: [path] });
37
+ } else {
38
+ visited.get(current)!.paths.push(path);
39
+ if (depth < visited.get(current)!.depth) {
40
+ visited.get(current)!.depth = depth;
41
+ }
42
+ }
43
+
44
+ maxDepth = Math.max(maxDepth, depth);
45
+ const node = this.graph.nodes.get(current);
40
46
 
41
- while (queue.length > 0) {
42
- const { id: current, depth, confidence: pathConf } = queue.shift()!
43
- if (visited.has(current)) continue
44
- visited.add(current)
45
- depthMap.set(current, depth)
46
- pathConfidence.set(current, pathConf)
47
- maxDepth = Math.max(maxDepth, depth)
47
+ if (node?.metadata?.isExported) {
48
+ entryPoints.add(current);
49
+ }
48
50
 
49
- // Find everything that depends on current (incoming edges)
50
- const dependents = this.graph.inEdges.get(current) || []
51
+ const dependents = this.graph.inEdges.get(current) || [];
51
52
  for (const edge of dependents) {
52
- if (!visited.has(edge.source) && edge.type !== 'contains') {
53
- // Propagate the minimum confidence seen so far on this path.
54
- // A chain is only as trustworthy as its weakest link.
55
- const edgeConf = edge.confidence ?? 1.0
56
- const newPathConf = Math.min(pathConf, edgeConf)
57
- queue.push({ id: edge.source, depth: depth + 1, confidence: newPathConf })
53
+ if (edge.type === 'contains') continue;
54
+ // Use pathSet (O(1) lookup) instead of path.includes() (O(depth))
55
+ if (!pathSet.has(edge.from)) {
56
+ const newPathSet = new Set(pathSet);
57
+ newPathSet.add(edge.from);
58
+ queue.push({
59
+ id: edge.from,
60
+ depth: depth + 1,
61
+ path: [...path, edge.from],
62
+ pathSet: newPathSet,
63
+ });
58
64
  }
59
65
  }
60
66
  }
61
67
 
62
- const impacted = [...visited].filter(id => !changedSet.has(id))
63
-
64
- // Classify each impacted node by risk level
65
- const classified: ImpactResult['classified'] = {
66
- critical: [],
67
- high: [],
68
- medium: [],
69
- low: [],
70
- }
71
-
72
- for (const id of impacted) {
73
- const node = this.graph.nodes.get(id)
74
- if (!node) continue
75
-
76
- const depth = depthMap.get(id) ?? 999
77
- const crossesBoundary = !changedModules.has(node.moduleId)
68
+ const impactedIds = Array.from(visited.keys()).filter(id => !changedNodeIds.includes(id));
69
+
70
+ let totalRisk = 0;
71
+ let totalConfidence = 0;
72
+
73
+ const classified = {
74
+ critical: [] as ClassifiedImpact[],
75
+ high: [] as ClassifiedImpact[],
76
+ medium: [] as ClassifiedImpact[],
77
+ low: [] as ClassifiedImpact[]
78
+ };
79
+
80
+ for (const id of impactedIds) {
81
+ const context = visited.get(id)!;
82
+ const node = this.graph.nodes.get(id);
83
+ let risk = this.riskEngine.scoreNode(id);
84
+
85
+ // Path reversal for confidence calculation (since BFS walks backwards)
86
+ const reversedPaths = context.paths.map(p => [...p].reverse());
87
+ const confidence = this.confidenceEngine.calculateNodeAggregatedConfidence(reversedPaths);
88
+
89
+ // Mikk 2.0 Hybrid Risk: Boost if boundary crossed at depth 1
90
+ // Check if ANY changed node crosses module boundary (not just first one)
91
+ if (context.depth === 1 && node?.moduleId) {
92
+ const crossesBoundary = changedNodeIds.some(id => {
93
+ const changedNode = this.graph.nodes.get(id);
94
+ // Add proper null checks for module IDs
95
+ if (!changedNode?.moduleId || !node.moduleId) {
96
+ return false;
97
+ }
98
+ return changedNode.moduleId !== node.moduleId;
99
+ });
100
+ if (crossesBoundary) {
101
+ risk = Math.max(risk, 80);
102
+ }
103
+ }
78
104
 
79
- const risk: RiskLevel =
80
- depth === 1 && crossesBoundary ? 'critical' :
81
- depth === 1 ? 'high' :
82
- depth === 2 ? 'medium' :
83
- 'low'
105
+ totalRisk += risk;
106
+ totalConfidence += confidence;
84
107
 
85
- const entry: ClassifiedImpact = {
108
+ const impactEntry: ClassifiedImpact = {
86
109
  nodeId: id,
87
- label: node.label,
88
- file: node.file,
89
- moduleId: node.moduleId,
90
- risk,
91
- depth,
110
+ label: node?.name || 'unknown',
111
+ file: node?.file || 'unknown',
112
+ risk: (risk >= 80 ? 'CRITICAL' : risk >= 60 ? 'HIGH' : risk >= 40 ? 'MEDIUM' : 'LOW'),
113
+ riskScore: risk,
114
+ depth: context.depth
115
+ };
116
+
117
+ if (risk >= 80) classified.critical.push(impactEntry);
118
+ else if (risk >= 60) classified.high.push(impactEntry);
119
+ else if (risk >= 40) classified.medium.push(impactEntry);
120
+ else classified.low.push(impactEntry);
121
+
122
+ if (risk > 70 && node?.moduleId) {
123
+ criticalModules.add(node.moduleId);
92
124
  }
93
-
94
- classified[risk].push(entry)
95
- }
96
-
97
- return {
98
- changed: changedNodeIds,
99
- impacted,
100
- depth: maxDepth,
101
- confidence: this.computeConfidence(impacted, pathConfidence),
102
- classified,
103
125
  }
104
- }
105
126
 
106
- /**
107
- * Derive confidence from the actual quality of edges traversed, not from
108
- * result size. A small result built from fuzzy/unresolved edges is LOW
109
- * confidence; a large result built from high-confidence AST edges is HIGH.
110
- *
111
- * Algorithm:
112
- * - Compute the average minimum-path-confidence across all impacted nodes.
113
- * - Penalise for deep chains (they amplify uncertainty).
114
- * - Map the combined score to HIGH / MEDIUM / LOW.
115
- */
116
- private computeConfidence(
117
- impacted: string[],
118
- pathConfidence: Map<string, number>,
119
- ): 'high' | 'medium' | 'low' {
120
- if (impacted.length === 0) return 'high'
121
-
122
- // Average path confidence across all impacted nodes
123
- let total = 0
124
- for (const id of impacted) {
125
- total += pathConfidence.get(id) ?? 1.0
126
- }
127
- const avgConf = total / impacted.length
127
+ const avgConfidence = impactedIds.length > 0
128
+ ? totalConfidence / impactedIds.length
129
+ : 1.0;
128
130
 
129
- // Penalise for large impact sets: confidence erodes with result size
130
- const sizePenalty = impacted.length > 20 ? 0.15 : impacted.length > 10 ? 0.08 : 0
131
+ const riskScore = impactedIds.length > 0
132
+ ? Math.min(Math.max(totalRisk / impactedIds.length, 0), 100)
133
+ : 0;
131
134
 
132
- const score = avgConf - sizePenalty
135
+ const allImpacted: ClassifiedImpact[] = [
136
+ ...classified.critical,
137
+ ...classified.high,
138
+ ...classified.medium,
139
+ ...classified.low
140
+ ];
133
141
 
134
- if (score >= 0.75) return 'high'
135
- if (score >= 0.50) return 'medium'
136
- return 'low'
142
+ return {
143
+ changed: changedNodeIds,
144
+ impacted: impactedIds,
145
+ allImpacted,
146
+ depth: maxDepth,
147
+ entryPoints: Array.from(entryPoints),
148
+ criticalModules: Array.from(criticalModules),
149
+ paths: Array.from(visited.values()).flatMap(v => v.paths),
150
+ confidence: Number(avgConfidence.toFixed(3)),
151
+ riskScore: Math.round(riskScore),
152
+ classified
153
+ };
137
154
  }
138
155
  }
@@ -4,4 +4,8 @@ export { ImpactAnalyzer } from './impact-analyzer.js'
4
4
  export { ClusterDetector } from './cluster-detector.js'
5
5
  export { DeadCodeDetector } from './dead-code-detector.js'
6
6
  export type { DeadCodeResult, DeadCodeEntry } from './dead-code-detector.js'
7
+ export { RiskEngine } from './risk-engine.js'
8
+ export type { RiskContext, RiskModifiers } from './risk-engine.js'
9
+ export { ConfidenceEngine } from './confidence-engine.js'
10
+ export { QueryEngine } from './query-engine.js'
7
11
 
@@ -0,0 +1,345 @@
1
+ /**
2
+ * Memory Manager for Large Graph Operations
3
+ *
4
+ * Provides memory monitoring, cleanup, and optimization for graph operations
5
+ * that can consume significant amounts of memory in large codebases.
6
+ */
7
+
8
+ // Memory thresholds in bytes
9
+ const MEMORY_THRESHOLDS = {
10
+ WARNING: 100 * 1024 * 1024, // 100MB
11
+ CRITICAL: 200 * 1024 * 1024, // 200MB
12
+ EMERGENCY: 400 * 1024 * 1024, // 400MB
13
+ }
14
+
15
+ // Default cleanup configuration
16
+ const DEFAULT_CLEANUP_CONFIG = {
17
+ maxAge: 30 * 60 * 1000, // 30 minutes
18
+ maxNodes: 10000, // Maximum nodes to keep in memory
19
+ gcInterval: 60 * 1000, // GC check interval (1 minute)
20
+ }
21
+
22
+ export interface MemoryStats {
23
+ heapUsed: number
24
+ heapTotal: number
25
+ external: number
26
+ rss: number
27
+ percentage: number
28
+ status: 'normal' | 'warning' | 'critical' | 'emergency'
29
+ }
30
+
31
+ export interface MemoryManagerConfig {
32
+ maxAge?: number
33
+ maxNodes?: number
34
+ gcInterval?: number
35
+ enableAutoGC?: boolean
36
+ }
37
+
38
+ /**
39
+ * Memory Manager for graph operations
40
+ */
41
+ export class MemoryManager {
42
+ private config: Required<MemoryManagerConfig>
43
+ private lastGC = Date.now()
44
+ private nodeCache = new Map<string, { data: any; timestamp: number }>()
45
+ private gcTimer?: NodeJS.Timeout
46
+
47
+ constructor(config: MemoryManagerConfig = {}) {
48
+ this.config = {
49
+ maxAge: config.maxAge ?? DEFAULT_CLEANUP_CONFIG.maxAge,
50
+ maxNodes: config.maxNodes ?? DEFAULT_CLEANUP_CONFIG.maxNodes,
51
+ gcInterval: config.gcInterval ?? DEFAULT_CLEANUP_CONFIG.gcInterval,
52
+ enableAutoGC: config.enableAutoGC ?? true,
53
+ }
54
+
55
+ if (this.config.enableAutoGC) {
56
+ this.startAutoGC()
57
+ }
58
+ }
59
+
60
+ /**
61
+ * Get current memory statistics
62
+ */
63
+ getMemoryStats(): MemoryStats {
64
+ const usage = process.memoryUsage()
65
+ const percentage = (usage.heapUsed / usage.heapTotal) * 100
66
+
67
+ let status: MemoryStats['status'] = 'normal'
68
+ if (usage.heapUsed > MEMORY_THRESHOLDS.EMERGENCY) {
69
+ status = 'emergency'
70
+ } else if (usage.heapUsed > MEMORY_THRESHOLDS.CRITICAL) {
71
+ status = 'critical'
72
+ } else if (usage.heapUsed > MEMORY_THRESHOLDS.WARNING) {
73
+ status = 'warning'
74
+ }
75
+
76
+ return {
77
+ heapUsed: usage.heapUsed,
78
+ heapTotal: usage.heapTotal,
79
+ external: usage.external,
80
+ rss: usage.rss,
81
+ percentage,
82
+ status,
83
+ }
84
+ }
85
+
86
+ /**
87
+ * Check if memory usage is critical
88
+ */
89
+ isMemoryCritical(): boolean {
90
+ const stats = this.getMemoryStats()
91
+ return stats.status === 'critical' || stats.status === 'emergency'
92
+ }
93
+
94
+ /**
95
+ * Force garbage collection if available
96
+ */
97
+ forceGC(): void {
98
+ if (global.gc) {
99
+ global.gc()
100
+ this.lastGC = Date.now()
101
+ }
102
+ }
103
+
104
+ /**
105
+ * Cache a node with automatic cleanup
106
+ */
107
+ cacheNode(id: string, data: any): void {
108
+ // If we're at the node limit, remove oldest entries
109
+ if (this.nodeCache.size >= this.config.maxNodes) {
110
+ this.evictOldestNodes(Math.floor(this.config.maxNodes * 0.1)) // Remove 10%
111
+ }
112
+
113
+ this.nodeCache.set(id, {
114
+ data,
115
+ timestamp: Date.now(),
116
+ })
117
+ }
118
+
119
+ /**
120
+ * Get cached node data
121
+ */
122
+ getCachedNode(id: string): any | null {
123
+ const cached = this.nodeCache.get(id)
124
+ if (!cached) return null
125
+
126
+ // Check if expired
127
+ if (Date.now() - cached.timestamp > this.config.maxAge) {
128
+ this.nodeCache.delete(id)
129
+ return null
130
+ }
131
+
132
+ return cached.data
133
+ }
134
+
135
+ /**
136
+ * Clear node cache
137
+ */
138
+ clearCache(): void {
139
+ this.nodeCache.clear()
140
+ }
141
+
142
+ /**
143
+ * Perform comprehensive memory cleanup
144
+ */
145
+ cleanup(): void {
146
+ // Clear expired cache entries
147
+ const now = Date.now()
148
+ for (const [id, cached] of this.nodeCache.entries()) {
149
+ if (now - cached.timestamp > this.config.maxAge) {
150
+ this.nodeCache.delete(id)
151
+ }
152
+ }
153
+
154
+ // Force garbage collection
155
+ this.forceGC()
156
+ }
157
+
158
+ /**
159
+ * Evict oldest nodes from cache
160
+ */
161
+ private evictOldestNodes(count: number): void {
162
+ const entries = Array.from(this.nodeCache.entries())
163
+ .sort((a, b) => a[1].timestamp - b[1].timestamp)
164
+
165
+ for (let i = 0; i < Math.min(count, entries.length); i++) {
166
+ this.nodeCache.delete(entries[i][0])
167
+ }
168
+ }
169
+
170
+ /**
171
+ * Start automatic garbage collection
172
+ */
173
+ private startAutoGC(): void {
174
+ this.gcTimer = setInterval(() => {
175
+ const stats = this.getMemoryStats()
176
+
177
+ // If memory usage is high, perform cleanup
178
+ if (stats.status !== 'normal') {
179
+ this.cleanup()
180
+ }
181
+
182
+ // Periodic cleanup regardless of memory pressure
183
+ if (Date.now() - this.lastGC > this.config.gcInterval) {
184
+ this.cleanup()
185
+ }
186
+ }, this.config.gcInterval)
187
+ }
188
+
189
+ /**
190
+ * Stop automatic garbage collection
191
+ */
192
+ stopAutoGC(): void {
193
+ if (this.gcTimer) {
194
+ clearInterval(this.gcTimer)
195
+ this.gcTimer = undefined
196
+ }
197
+ }
198
+
199
+ /**
200
+ * Dispose of memory manager
201
+ */
202
+ dispose(): void {
203
+ this.stopAutoGC()
204
+ this.clearCache()
205
+ this.forceGC()
206
+ }
207
+ }
208
+
209
+ /**
210
+ * Memory-aware graph builder wrapper
211
+ */
212
+ export class MemoryAwareGraphBuilder {
213
+ private memoryManager: MemoryManager
214
+
215
+ constructor(config?: MemoryManagerConfig) {
216
+ this.memoryManager = new MemoryManager(config)
217
+ }
218
+
219
+ /**
220
+ * Build graph with memory monitoring
221
+ */
222
+ buildGraph(lock: any): any {
223
+ const stats = this.memoryManager.getMemoryStats()
224
+
225
+ // Check memory before starting
226
+ if (this.memoryManager.isMemoryCritical()) {
227
+ console.warn('Memory usage is critical, performing cleanup before graph build')
228
+ this.memoryManager.cleanup()
229
+ }
230
+
231
+ try {
232
+ // Build graph implementation here
233
+ return this.buildGraphInternal(lock)
234
+ } finally {
235
+ // Cleanup after build
236
+ this.memoryManager.cleanup()
237
+ }
238
+ }
239
+
240
+ /**
241
+ * Internal graph building implementation
242
+ */
243
+ private buildGraphInternal(lock: any): any {
244
+ const nodes = new Map<string, any>()
245
+ const edges: any[] = []
246
+ const outEdges = new Map<string, any[]>()
247
+ const inEdges = new Map<string, any[]>()
248
+
249
+ // Process functions with memory monitoring
250
+ for (const [id, fn] of Object.entries(lock.functions || {})) {
251
+ // Check memory periodically
252
+ if (nodes.size % 1000 === 0) {
253
+ if (this.memoryManager.isMemoryCritical()) {
254
+ console.warn('Memory pressure detected during graph build, forcing cleanup')
255
+ this.memoryManager.cleanup()
256
+ }
257
+ }
258
+
259
+ const node = {
260
+ id,
261
+ name: (fn as any).name,
262
+ file: (fn as any).file,
263
+ type: 'function',
264
+ moduleId: (fn as any).moduleId,
265
+ metadata: {
266
+ isExported: (fn as any).isExported,
267
+ isAsync: (fn as any).isAsync,
268
+ },
269
+ }
270
+
271
+ nodes.set(id, node)
272
+ outEdges.set(id, [])
273
+ inEdges.set(id, [])
274
+ }
275
+
276
+ // Process edges
277
+ for (const [id, fn] of Object.entries(lock.functions || {})) {
278
+ const calls = (fn as any).calls || []
279
+ for (const targetId of calls) {
280
+ if (nodes.has(targetId)) {
281
+ const edge = {
282
+ from: id,
283
+ to: targetId,
284
+ type: 'calls',
285
+ }
286
+ edges.push(edge)
287
+ outEdges.get(id)?.push(edge)
288
+ inEdges.get(targetId)?.push(edge)
289
+ }
290
+ }
291
+ }
292
+
293
+ return {
294
+ nodes,
295
+ edges,
296
+ outEdges,
297
+ inEdges,
298
+ }
299
+ }
300
+
301
+ /**
302
+ * Get memory statistics
303
+ */
304
+ getMemoryStats(): MemoryStats {
305
+ return this.memoryManager.getMemoryStats()
306
+ }
307
+
308
+ /**
309
+ * Dispose of the graph builder
310
+ */
311
+ dispose(): void {
312
+ this.memoryManager.dispose()
313
+ }
314
+ }
315
+
316
+ /**
317
+ * Utility function to monitor memory usage during operations
318
+ */
319
+ export function withMemoryMonitoring<T>(
320
+ operation: () => T,
321
+ memoryManager?: MemoryManager
322
+ ): T {
323
+ const manager = memoryManager || new MemoryManager({ enableAutoGC: false })
324
+
325
+ const initialStats = manager.getMemoryStats()
326
+ console.log(`Memory before operation: ${(initialStats.heapUsed / 1024 / 1024).toFixed(1)}MB`)
327
+
328
+ try {
329
+ const result = operation()
330
+
331
+ const finalStats = manager.getMemoryStats()
332
+ const delta = finalStats.heapUsed - initialStats.heapUsed
333
+ console.log(`Memory after operation: ${(finalStats.heapUsed / 1024 / 1024).toFixed(1)}MB (${delta >= 0 ? '+' : ''}${(delta / 1024 / 1024).toFixed(1)}MB)`)
334
+
335
+ if (finalStats.status !== 'normal') {
336
+ console.warn(`Memory status: ${finalStats.status}`)
337
+ }
338
+
339
+ return result
340
+ } finally {
341
+ if (!memoryManager) {
342
+ manager.dispose()
343
+ }
344
+ }
345
+ }