@stackmemoryai/stackmemory 0.3.7 → 0.3.8
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/agents/core/agent-task-manager.js +5 -5
- package/dist/agents/core/agent-task-manager.js.map +2 -2
- package/dist/agents/verifiers/base-verifier.js +2 -2
- package/dist/agents/verifiers/base-verifier.js.map +2 -2
- package/dist/cli/claude-sm.js +0 -11
- package/dist/cli/claude-sm.js.map +2 -2
- package/dist/cli/codex-sm.js +0 -11
- package/dist/cli/codex-sm.js.map +2 -2
- package/dist/cli/commands/chromadb.js +64 -34
- package/dist/cli/commands/chromadb.js.map +2 -2
- package/dist/cli/commands/clear.js +9 -13
- package/dist/cli/commands/clear.js.map +2 -2
- package/dist/cli/commands/config.js +43 -33
- package/dist/cli/commands/config.js.map +2 -2
- package/dist/cli/commands/context.js.map +2 -2
- package/dist/cli/commands/dashboard.js +41 -13
- package/dist/cli/commands/dashboard.js.map +2 -2
- package/dist/cli/commands/gc.js +69 -20
- package/dist/cli/commands/gc.js.map +2 -2
- package/dist/cli/commands/handoff.js.map +2 -2
- package/dist/cli/commands/infinite-storage.js +60 -19
- package/dist/cli/commands/infinite-storage.js.map +2 -2
- package/dist/cli/commands/linear-create.js +36 -8
- package/dist/cli/commands/linear-create.js.map +2 -2
- package/dist/cli/commands/linear-list.js +33 -10
- package/dist/cli/commands/linear-list.js.map +2 -2
- package/dist/cli/commands/linear-migrate.js +17 -4
- package/dist/cli/commands/linear-migrate.js.map +2 -2
- package/dist/cli/commands/linear-test.js +14 -6
- package/dist/cli/commands/linear-test.js.map +2 -2
- package/dist/cli/commands/linear-unified.js +123 -35
- package/dist/cli/commands/linear-unified.js.map +2 -2
- package/dist/cli/commands/linear.js.map +2 -2
- package/dist/cli/commands/monitor.js.map +2 -2
- package/dist/cli/commands/onboard.js +35 -8
- package/dist/cli/commands/onboard.js.map +2 -2
- package/dist/cli/commands/quality.js +2 -7
- package/dist/cli/commands/quality.js.map +2 -2
- package/dist/cli/commands/session.js +23 -6
- package/dist/cli/commands/session.js.map +2 -2
- package/dist/cli/commands/skills.js +72 -27
- package/dist/cli/commands/skills.js.map +2 -2
- package/dist/cli/commands/storage.js +108 -38
- package/dist/cli/commands/storage.js.map +2 -2
- package/dist/cli/commands/tui.js.map +2 -2
- package/dist/cli/commands/webhook.js +57 -18
- package/dist/cli/commands/webhook.js.map +2 -2
- package/dist/cli/commands/workflow.js +8 -15
- package/dist/cli/commands/workflow.js.map +2 -2
- package/dist/cli/commands/worktree.js +34 -13
- package/dist/cli/commands/worktree.js.map +2 -2
- package/dist/cli/index.js +0 -11
- package/dist/cli/index.js.map +2 -2
- package/dist/core/config/types.js.map +1 -1
- package/dist/core/context/auto-context.js +10 -6
- package/dist/core/context/auto-context.js.map +2 -2
- package/dist/core/context/context-bridge.js.map +2 -2
- package/dist/core/context/frame-database.js +13 -3
- package/dist/core/context/frame-database.js.map +2 -2
- package/dist/core/context/frame-digest.js +7 -5
- package/dist/core/context/frame-digest.js.map +2 -2
- package/dist/core/context/frame-manager.js.map +2 -2
- package/dist/core/context/frame-stack.js +16 -5
- package/dist/core/context/frame-stack.js.map +2 -2
- package/dist/core/context/incremental-gc.js +10 -3
- package/dist/core/context/incremental-gc.js.map +2 -2
- package/dist/core/context/index.js.map +1 -1
- package/dist/core/context/permission-manager.js.map +2 -2
- package/dist/core/context/refactored-frame-manager.js +12 -3
- package/dist/core/context/refactored-frame-manager.js.map +2 -2
- package/dist/core/context/shared-context-layer.js +4 -2
- package/dist/core/context/shared-context-layer.js.map +2 -2
- package/dist/core/database/batch-operations.js +112 -86
- package/dist/core/database/batch-operations.js.map +2 -2
- package/dist/core/database/query-cache.js +19 -9
- package/dist/core/database/query-cache.js.map +2 -2
- package/dist/core/database/sqlite-adapter.js +1 -1
- package/dist/core/database/sqlite-adapter.js.map +2 -2
- package/dist/core/digest/enhanced-hybrid-digest.js +8 -2
- package/dist/core/digest/enhanced-hybrid-digest.js.map +2 -2
- package/dist/core/errors/recovery.js +9 -2
- package/dist/core/errors/recovery.js.map +2 -2
- package/dist/core/frame/workflow-templates-stub.js.map +1 -1
- package/dist/core/frame/workflow-templates.js +40 -1
- package/dist/core/frame/workflow-templates.js.map +2 -2
- package/dist/core/monitoring/logger.js +6 -1
- package/dist/core/monitoring/logger.js.map +2 -2
- package/dist/core/monitoring/metrics.js.map +2 -2
- package/dist/core/monitoring/progress-tracker.js.map +2 -2
- package/dist/core/performance/context-cache.js.map +2 -2
- package/dist/core/performance/lazy-context-loader.js +24 -20
- package/dist/core/performance/lazy-context-loader.js.map +2 -2
- package/dist/core/performance/optimized-frame-context.js +27 -12
- package/dist/core/performance/optimized-frame-context.js.map +2 -2
- package/dist/core/performance/performance-benchmark.js +10 -6
- package/dist/core/performance/performance-benchmark.js.map +2 -2
- package/dist/core/performance/performance-profiler.js +51 -14
- package/dist/core/performance/performance-profiler.js.map +2 -2
- package/dist/core/performance/streaming-jsonl-parser.js +5 -1
- package/dist/core/performance/streaming-jsonl-parser.js.map +2 -2
- package/dist/core/projects/project-manager.js +14 -20
- package/dist/core/projects/project-manager.js.map +2 -2
- package/dist/core/retrieval/context-retriever.js.map +1 -1
- package/dist/core/retrieval/llm-context-retrieval.js.map +2 -2
- package/dist/core/session/clear-survival-stub.js +5 -1
- package/dist/core/session/clear-survival-stub.js.map +2 -2
- package/dist/core/session/clear-survival.js +35 -0
- package/dist/core/session/clear-survival.js.map +2 -2
- package/dist/core/session/index.js.map +1 -1
- package/dist/core/session/session-manager.js.map +2 -2
- package/dist/core/storage/chromadb-adapter.js +6 -2
- package/dist/core/storage/chromadb-adapter.js.map +2 -2
- package/dist/core/storage/chromadb-simple.js +17 -5
- package/dist/core/storage/chromadb-simple.js.map +2 -2
- package/dist/core/storage/infinite-storage.js +109 -46
- package/dist/core/storage/infinite-storage.js.map +2 -2
- package/dist/core/storage/railway-optimized-storage.js +48 -22
- package/dist/core/storage/railway-optimized-storage.js.map +2 -2
- package/dist/core/storage/remote-storage.js +41 -23
- package/dist/core/storage/remote-storage.js.map +2 -2
- package/dist/core/trace/cli-trace-wrapper.js +9 -2
- package/dist/core/trace/cli-trace-wrapper.js.map +2 -2
- package/dist/core/trace/db-trace-wrapper.js +96 -68
- package/dist/core/trace/db-trace-wrapper.js.map +2 -2
- package/dist/core/trace/debug-trace.js +25 -8
- package/dist/core/trace/debug-trace.js.map +2 -2
- package/dist/core/trace/index.js +6 -2
- package/dist/core/trace/index.js.map +2 -2
- package/dist/core/trace/linear-api-wrapper.js +10 -5
- package/dist/core/trace/linear-api-wrapper.js.map +2 -2
- package/dist/core/trace/trace-demo.js +14 -10
- package/dist/core/trace/trace-demo.js.map +2 -2
- package/dist/core/trace/trace-detector.js +9 -2
- package/dist/core/trace/trace-detector.js.map +2 -2
- package/dist/core/trace/types.js.map +1 -1
- package/dist/core/utils/compression.js.map +1 -1
- package/dist/core/utils/update-checker.js.map +1 -1
- package/dist/core/worktree/worktree-manager.js +18 -7
- package/dist/core/worktree/worktree-manager.js.map +2 -2
- package/dist/features/analytics/core/analytics-service.js.map +2 -2
- package/dist/features/analytics/queries/metrics-queries.js +1 -1
- package/dist/features/analytics/queries/metrics-queries.js.map +2 -2
- package/dist/features/tasks/pebbles-task-store.js.map +1 -1
- package/dist/features/tui/components/analytics-panel.js +36 -15
- package/dist/features/tui/components/analytics-panel.js.map +2 -2
- package/dist/features/tui/components/pr-tracker.js +19 -7
- package/dist/features/tui/components/pr-tracker.js.map +2 -2
- package/dist/features/tui/components/session-monitor.js +22 -9
- package/dist/features/tui/components/session-monitor.js.map +2 -2
- package/dist/features/tui/components/subagent-fleet.js +20 -13
- package/dist/features/tui/components/subagent-fleet.js.map +2 -2
- package/dist/features/tui/components/task-board.js +26 -10
- package/dist/features/tui/components/task-board.js.map +2 -2
- package/dist/features/tui/index.js.map +2 -2
- package/dist/features/tui/services/data-service.js +6 -2
- package/dist/features/tui/services/data-service.js.map +2 -2
- package/dist/features/tui/services/linear-task-reader.js +3 -1
- package/dist/features/tui/services/linear-task-reader.js.map +2 -2
- package/dist/features/tui/services/websocket-client.js +3 -1
- package/dist/features/tui/services/websocket-client.js.map +2 -2
- package/dist/features/tui/terminal-compat.js +6 -2
- package/dist/features/tui/terminal-compat.js.map +2 -2
- package/dist/features/web/client/stores/task-store.js.map +2 -2
- package/dist/features/web/server/index.js +18 -10
- package/dist/features/web/server/index.js.map +2 -2
- package/dist/integrations/linear/sync-service.js +12 -13
- package/dist/integrations/linear/sync-service.js.map +2 -2
- package/dist/integrations/linear/sync.js +174 -12
- package/dist/integrations/linear/sync.js.map +2 -2
- package/dist/integrations/linear/unified-sync.js +1 -1
- package/dist/integrations/linear/unified-sync.js.map +1 -1
- package/dist/integrations/linear/webhook-server.js +15 -16
- package/dist/integrations/linear/webhook-server.js.map +2 -2
- package/dist/mcp/stackmemory-mcp-server.js +0 -11
- package/dist/mcp/stackmemory-mcp-server.js.map +2 -2
- package/dist/servers/production/auth-middleware.js.map +2 -2
- package/dist/servers/railway/index.js.map +2 -2
- package/dist/services/config-service.js +6 -7
- package/dist/services/config-service.js.map +2 -2
- package/dist/services/context-service.js +11 -12
- package/dist/services/context-service.js.map +2 -2
- package/dist/skills/claude-skills.js +4 -2
- package/dist/skills/claude-skills.js.map +2 -2
- package/dist/skills/dashboard-launcher.js.map +2 -2
- package/dist/skills/repo-ingestion-skill.js.map +2 -2
- package/dist/utils/env.js +46 -0
- package/dist/utils/env.js.map +7 -0
- package/dist/utils/logger.js +0 -11
- package/dist/utils/logger.js.map +2 -2
- package/package.json +1 -1
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
{
|
|
2
2
|
"version": 3,
|
|
3
3
|
"sources": ["../../../src/core/performance/optimized-frame-context.ts"],
|
|
4
|
-
"sourcesContent": ["/**\n * Optimized Frame Context Assembly\n * High-performance context retrieval with caching and batching\n */\n\nimport Database from 'better-sqlite3';\nimport { getQueryCache, createCacheKey } from '../database/query-cache.js';\nimport { logger } from '../monitoring/logger.js';\nimport { Frame, FrameContext, Anchor, Event } from '../context/frame-manager.js';\n\nexport interface ContextAssemblyOptions {\n maxEvents?: number;\n includeClosed?: boolean;\n enableCaching?: boolean;\n batchSize?: number;\n}\n\nexport interface OptimizedFrameContext extends FrameContext {\n performance: {\n assemblyTimeMs: number;\n cacheHits: number;\n dbQueries: number;\n totalRows: number;\n };\n}\n\n/**\n * Optimized context assembly with caching and batching\n */\nexport class OptimizedContextAssembler {\n private db: Database.Database;\n private cache = getQueryCache();\n private preparedStatements = new Map<string, Database.Statement>();\n\n constructor(db: Database.Database) {\n this.db = db;\n this.initializePreparedStatements();\n }\n\n /**\n * Get hot stack context with optimizations\n */\n async getHotStackContext(\n activeStack: string[],\n options: ContextAssemblyOptions = {}\n ): Promise<OptimizedFrameContext[]> {\n const startTime = performance.now();\n const stats = {\n cacheHits: 0,\n dbQueries: 0,\n totalRows: 0,\n };\n\n const {\n maxEvents = 20,\n includeClosed = false,\n enableCaching = true,\n batchSize = 10,\n } = options;\n\n try {\n // Batch process frames for better performance\n const contexts: OptimizedFrameContext[] = [];\n \n for (let i = 0; i < activeStack.length; i += batchSize) {\n const batch = activeStack.slice(i, i + batchSize);\n const batchContexts = await this.processBatch(\n batch,\n maxEvents,\n includeClosed,\n enableCaching,\n stats\n );\n contexts.push(...batchContexts);\n }\n\n const assemblyTimeMs = performance.now() - startTime;\n\n // Add performance stats to each context\n return contexts.map((context: any) => ({\n ...context,\n performance: {\n assemblyTimeMs: assemblyTimeMs / contexts.length,\n ...stats,\n },\n }));\n\n } catch (error: unknown) {\n logger.error('Failed to assemble hot stack context', error as Error, {\n activeStack,\n options,\n });\n throw error;\n }\n }\n\n /**\n * Get single frame context with full optimization\n */\n async getFrameContext(\n frameId: string,\n options: ContextAssemblyOptions = {}\n ): Promise<OptimizedFrameContext | null> {\n const startTime = performance.now();\n const stats = { cacheHits: 0, dbQueries: 0, totalRows: 0 };\n\n const {\n maxEvents = 50,\n enableCaching = true,\n } = options;\n\n // Check cache first\n const cacheKey = createCacheKey('frame_context', [frameId, maxEvents]);\n if (enableCaching) {\n const cached = this.cache.getFrameContext(cacheKey);\n if (cached) {\n stats.cacheHits++;\n return {\n ...cached,\n performance: {\n assemblyTimeMs: performance.now() - startTime,\n ...stats,\n },\n };\n }\n }\n\n try {\n const context = await this.assembleFrameContext(frameId, maxEvents, stats);\n \n if (!context) return null;\n\n // Cache the result\n if (enableCaching) {\n this.cache.cacheFrameContext(cacheKey, context);\n }\n\n const result: OptimizedFrameContext = {\n ...context,\n performance: {\n assemblyTimeMs: performance.now() - startTime,\n ...stats,\n },\n };\n\n return result;\n\n } catch (error: unknown) {\n logger.error('Failed to get frame context', error as Error, { frameId });\n throw error;\n }\n }\n\n /**\n * Process a batch of frames efficiently\n */\n private async processBatch(\n frameIds: string[],\n maxEvents: number,\n includeClosed: boolean,\n enableCaching: boolean,\n stats: { cacheHits: number; dbQueries: number; totalRows: number }\n ): Promise<OptimizedFrameContext[]> {\n const contexts: OptimizedFrameContext[] = [];\n \n // Get cached contexts first\n const uncachedIds = [];\n for (const frameId of frameIds) {\n const cacheKey = createCacheKey('frame_context', [frameId, maxEvents]);\n if (enableCaching) {\n const cached = this.cache.getFrameContext(cacheKey);\n if (cached) {\n stats.cacheHits++;\n contexts.push(cached);\n continue;\n }\n }\n uncachedIds.push(frameId);\n }\n\n if (uncachedIds.length === 0) {\n return contexts;\n }\n\n // Batch fetch uncached frames\n const frames = await this.batchGetFrames(uncachedIds, stats);\n const allEvents = await this.batchGetEvents(uncachedIds, maxEvents, stats);\n const allAnchors = await this.batchGetAnchors(uncachedIds, stats);\n const allArtifacts = await this.batchGetArtifacts(uncachedIds, stats);\n\n // Assemble contexts from batched data\n for (const frameId of uncachedIds) {\n const frame = frames.get(frameId);\n if (!frame || (!includeClosed && frame.state === 'closed')) {\n continue;\n }\n\n const context: FrameContext = {\n frameId,\n header: {\n goal: frame.name,\n constraints: this.extractConstraints(frame.inputs),\n definitions: frame.inputs.definitions,\n },\n anchors: allAnchors.get(frameId) || [],\n recentEvents: allEvents.get(frameId) || [],\n activeArtifacts: allArtifacts.get(frameId) || [],\n };\n\n // Cache the context\n if (enableCaching) {\n const cacheKey = createCacheKey('frame_context', [frameId, maxEvents]);\n this.cache.cacheFrameContext(cacheKey, context);\n }\n\n contexts.push(context as OptimizedFrameContext);\n }\n\n return contexts;\n }\n\n /**\n * Batch get frames with single query\n */\n private async batchGetFrames(\n frameIds: string[],\n stats: { dbQueries: number; totalRows: number }\n ): Promise<Map<string, Frame>> {\n if (frameIds.length === 0) return new Map();\n\n const stmt = this.preparedStatements.get('batch_frames');\n if (!stmt) throw new Error('Prepared statement not found: batch_frames');\n\n const placeholders = frameIds.map(() => '?').join(',');\n const query = `SELECT * FROM frames WHERE frame_id IN (${placeholders})`;\n \n stats.dbQueries++;\n const rows = this.db.prepare(query).all(...frameIds) as any[];\n stats.totalRows += rows.length;\n\n const frameMap = new Map<string, Frame>();\n for (const row of rows) {\n frameMap.set(row.frame_id, {\n ...row,\n inputs: JSON.parse(row.inputs || '{}'),\n outputs: JSON.parse(row.outputs || '{}'),\n digest_json: JSON.parse(row.digest_json || '{}'),\n });\n }\n\n return frameMap;\n }\n\n /**\n * Batch get events for multiple frames\n */\n private async batchGetEvents(\n frameIds: string[],\n maxEvents: number,\n stats: { dbQueries: number; totalRows: number }\n ): Promise<Map<string, Event[]>> {\n if (frameIds.length === 0) return new Map();\n\n const placeholders = frameIds.map(() => '?').join(',');\n const query = `\n SELECT *, ROW_NUMBER() OVER (PARTITION BY frame_id ORDER BY seq DESC) as rn\n FROM events \n WHERE frame_id IN (${placeholders}) \n AND rn <= ${maxEvents}\n ORDER BY frame_id, seq DESC\n `;\n\n stats.dbQueries++;\n const rows = this.db.prepare(query).all(...frameIds) as any[];\n stats.totalRows += rows.length;\n\n const eventMap = new Map<string, Event[]>();\n for (const row of rows) {\n if (!eventMap.has(row.frame_id)) {\n eventMap.set(row.frame_id, []);\n }\n eventMap.get(row.frame_id)!.push({\n ...row,\n payload: JSON.parse(row.payload),\n });\n }\n\n return eventMap;\n }\n\n /**\n * Batch get anchors for multiple frames\n */\n private async batchGetAnchors(\n frameIds: string[],\n stats: { dbQueries: number; totalRows: number }\n ): Promise<Map<string, Anchor[]>> {\n if (frameIds.length === 0) return new Map();\n\n const placeholders = frameIds.map(() => '?').join(',');\n const query = `\n SELECT * FROM anchors \n WHERE frame_id IN (${placeholders}) \n ORDER BY frame_id, priority DESC, created_at ASC\n `;\n\n stats.dbQueries++;\n const rows = this.db.prepare(query).all(...frameIds) as any[];\n stats.totalRows += rows.length;\n\n const anchorMap = new Map<string, Anchor[]>();\n for (const row of rows) {\n if (!anchorMap.has(row.frame_id)) {\n anchorMap.set(row.frame_id, []);\n }\n anchorMap.get(row.frame_id)!.push({\n ...row,\n metadata: JSON.parse(row.metadata || '{}'),\n });\n }\n\n return anchorMap;\n }\n\n /**\n * Batch get active artifacts for multiple frames\n */\n private async batchGetArtifacts(\n frameIds: string[],\n stats: { dbQueries: number; totalRows: number }\n ): Promise<Map<string, string[]>> {\n if (frameIds.length === 0) return new Map();\n\n const placeholders = frameIds.map(() => '?').join(',');\n const query = `\n SELECT frame_id, payload\n FROM events \n WHERE frame_id IN (${placeholders}) \n AND event_type = 'artifact'\n ORDER BY frame_id, ts DESC\n `;\n\n stats.dbQueries++;\n const rows = this.db.prepare(query).all(...frameIds) as any[];\n stats.totalRows += rows.length;\n\n const artifactMap = new Map<string, string[]>();\n for (const row of rows) {\n const payload = JSON.parse(row.payload);\n if (!artifactMap.has(row.frame_id)) {\n artifactMap.set(row.frame_id, []);\n }\n if (payload.path) {\n artifactMap.get(row.frame_id)!.push(payload.path);\n }\n }\n\n return artifactMap;\n }\n\n /**\n * Assemble single frame context\n */\n private async assembleFrameContext(\n frameId: string,\n maxEvents: number,\n stats: { dbQueries: number; totalRows: number }\n ): Promise<FrameContext | null> {\n // Single frame operations - these could be further optimized with prepared statements\n const frame = await this.batchGetFrames([frameId], stats).then(map => map.get(frameId));\n if (!frame) return null;\n\n const [events, anchors, artifacts] = await Promise.all([\n this.batchGetEvents([frameId], maxEvents, stats).then(map => map.get(frameId) || []),\n this.batchGetAnchors([frameId], stats).then(map => map.get(frameId) || []),\n this.batchGetArtifacts([frameId], stats).then(map => map.get(frameId) || []),\n ]);\n\n return {\n frameId,\n header: {\n goal: frame.name,\n constraints: this.extractConstraints(frame.inputs),\n definitions: frame.inputs.definitions,\n },\n anchors,\n recentEvents: events,\n activeArtifacts: artifacts,\n };\n }\n\n /**\n * Extract constraints from frame inputs\n */\n private extractConstraints(inputs: Record<string, any>): string[] {\n const constraints: string[] = [];\n \n if (inputs.constraints && Array.isArray(inputs.constraints)) {\n constraints.push(...inputs.constraints);\n }\n \n if (inputs.requirements && Array.isArray(inputs.requirements)) {\n constraints.push(...inputs.requirements);\n }\n \n if (inputs.limitations && Array.isArray(inputs.limitations)) {\n constraints.push(...inputs.limitations);\n }\n\n return constraints;\n }\n\n /**\n * Initialize prepared statements for common queries\n */\n private initializePreparedStatements(): void {\n try {\n // Single frame query\n this.preparedStatements.set(\n 'single_frame',\n this.db.prepare('SELECT * FROM frames WHERE frame_id = ?')\n );\n\n // Frame events with limit\n this.preparedStatements.set(\n 'frame_events',\n this.db.prepare('SELECT * FROM events WHERE frame_id = ? ORDER BY seq DESC LIMIT ?')\n );\n\n // Frame anchors\n this.preparedStatements.set(\n 'frame_anchors',\n this.db.prepare('SELECT * FROM anchors WHERE frame_id = ? ORDER BY priority DESC, created_at ASC')\n );\n\n logger.info('Prepared statements initialized for optimized context assembly');\n } catch (error: unknown) {\n logger.error('Failed to initialize prepared statements', error as Error);\n throw error;\n }\n }\n\n /**\n * Clear cache and reset prepared statements\n */\n cleanup(): void {\n this.cache.clear();\n // Modern better-sqlite3 automatically handles cleanup\n this.preparedStatements.clear();\n }\n}"],
|
|
5
|
-
"mappings": "AAMA,SAAS,eAAe,sBAAsB;AAC9C,SAAS,cAAc;
|
|
4
|
+
"sourcesContent": ["/**\n * Optimized Frame Context Assembly\n * High-performance context retrieval with caching and batching\n */\n\nimport Database from 'better-sqlite3';\nimport { getQueryCache, createCacheKey } from '../database/query-cache.js';\nimport { logger } from '../monitoring/logger.js';\nimport {\n Frame,\n FrameContext,\n Anchor,\n Event,\n} from '../context/frame-manager.js';\n\nexport interface ContextAssemblyOptions {\n maxEvents?: number;\n includeClosed?: boolean;\n enableCaching?: boolean;\n batchSize?: number;\n}\n\nexport interface OptimizedFrameContext extends FrameContext {\n performance: {\n assemblyTimeMs: number;\n cacheHits: number;\n dbQueries: number;\n totalRows: number;\n };\n}\n\n/**\n * Optimized context assembly with caching and batching\n */\nexport class OptimizedContextAssembler {\n private db: Database.Database;\n private cache = getQueryCache();\n private preparedStatements = new Map<string, Database.Statement>();\n\n constructor(db: Database.Database) {\n this.db = db;\n this.initializePreparedStatements();\n }\n\n /**\n * Get hot stack context with optimizations\n */\n async getHotStackContext(\n activeStack: string[],\n options: ContextAssemblyOptions = {}\n ): Promise<OptimizedFrameContext[]> {\n const startTime = performance.now();\n const stats = {\n cacheHits: 0,\n dbQueries: 0,\n totalRows: 0,\n };\n\n const {\n maxEvents = 20,\n includeClosed = false,\n enableCaching = true,\n batchSize = 10,\n } = options;\n\n try {\n // Batch process frames for better performance\n const contexts: OptimizedFrameContext[] = [];\n\n for (let i = 0; i < activeStack.length; i += batchSize) {\n const batch = activeStack.slice(i, i + batchSize);\n const batchContexts = await this.processBatch(\n batch,\n maxEvents,\n includeClosed,\n enableCaching,\n stats\n );\n contexts.push(...batchContexts);\n }\n\n const assemblyTimeMs = performance.now() - startTime;\n\n // Add performance stats to each context\n return contexts.map((context: any) => ({\n ...context,\n performance: {\n assemblyTimeMs: assemblyTimeMs / contexts.length,\n ...stats,\n },\n }));\n } catch (error: unknown) {\n logger.error('Failed to assemble hot stack context', error as Error, {\n activeStack,\n options,\n });\n throw error;\n }\n }\n\n /**\n * Get single frame context with full optimization\n */\n async getFrameContext(\n frameId: string,\n options: ContextAssemblyOptions = {}\n ): Promise<OptimizedFrameContext | null> {\n const startTime = performance.now();\n const stats = { cacheHits: 0, dbQueries: 0, totalRows: 0 };\n\n const { maxEvents = 50, enableCaching = true } = options;\n\n // Check cache first\n const cacheKey = createCacheKey('frame_context', [frameId, maxEvents]);\n if (enableCaching) {\n const cached = this.cache.getFrameContext(cacheKey);\n if (cached) {\n stats.cacheHits++;\n return {\n ...cached,\n performance: {\n assemblyTimeMs: performance.now() - startTime,\n ...stats,\n },\n };\n }\n }\n\n try {\n const context = await this.assembleFrameContext(\n frameId,\n maxEvents,\n stats\n );\n\n if (!context) return null;\n\n // Cache the result\n if (enableCaching) {\n this.cache.cacheFrameContext(cacheKey, context);\n }\n\n const result: OptimizedFrameContext = {\n ...context,\n performance: {\n assemblyTimeMs: performance.now() - startTime,\n ...stats,\n },\n };\n\n return result;\n } catch (error: unknown) {\n logger.error('Failed to get frame context', error as Error, { frameId });\n throw error;\n }\n }\n\n /**\n * Process a batch of frames efficiently\n */\n private async processBatch(\n frameIds: string[],\n maxEvents: number,\n includeClosed: boolean,\n enableCaching: boolean,\n stats: { cacheHits: number; dbQueries: number; totalRows: number }\n ): Promise<OptimizedFrameContext[]> {\n const contexts: OptimizedFrameContext[] = [];\n\n // Get cached contexts first\n const uncachedIds = [];\n for (const frameId of frameIds) {\n const cacheKey = createCacheKey('frame_context', [frameId, maxEvents]);\n if (enableCaching) {\n const cached = this.cache.getFrameContext(cacheKey);\n if (cached) {\n stats.cacheHits++;\n contexts.push(cached);\n continue;\n }\n }\n uncachedIds.push(frameId);\n }\n\n if (uncachedIds.length === 0) {\n return contexts;\n }\n\n // Batch fetch uncached frames\n const frames = await this.batchGetFrames(uncachedIds, stats);\n const allEvents = await this.batchGetEvents(uncachedIds, maxEvents, stats);\n const allAnchors = await this.batchGetAnchors(uncachedIds, stats);\n const allArtifacts = await this.batchGetArtifacts(uncachedIds, stats);\n\n // Assemble contexts from batched data\n for (const frameId of uncachedIds) {\n const frame = frames.get(frameId);\n if (!frame || (!includeClosed && frame.state === 'closed')) {\n continue;\n }\n\n const context: FrameContext = {\n frameId,\n header: {\n goal: frame.name,\n constraints: this.extractConstraints(frame.inputs),\n definitions: frame.inputs.definitions,\n },\n anchors: allAnchors.get(frameId) || [],\n recentEvents: allEvents.get(frameId) || [],\n activeArtifacts: allArtifacts.get(frameId) || [],\n };\n\n // Cache the context\n if (enableCaching) {\n const cacheKey = createCacheKey('frame_context', [frameId, maxEvents]);\n this.cache.cacheFrameContext(cacheKey, context);\n }\n\n contexts.push(context as OptimizedFrameContext);\n }\n\n return contexts;\n }\n\n /**\n * Batch get frames with single query\n */\n private async batchGetFrames(\n frameIds: string[],\n stats: { dbQueries: number; totalRows: number }\n ): Promise<Map<string, Frame>> {\n if (frameIds.length === 0) return new Map();\n\n const stmt = this.preparedStatements.get('batch_frames');\n if (!stmt) throw new Error('Prepared statement not found: batch_frames');\n\n const placeholders = frameIds.map(() => '?').join(',');\n const query = `SELECT * FROM frames WHERE frame_id IN (${placeholders})`;\n\n stats.dbQueries++;\n const rows = this.db.prepare(query).all(...frameIds) as any[];\n stats.totalRows += rows.length;\n\n const frameMap = new Map<string, Frame>();\n for (const row of rows) {\n frameMap.set(row.frame_id, {\n ...row,\n inputs: JSON.parse(row.inputs || '{}'),\n outputs: JSON.parse(row.outputs || '{}'),\n digest_json: JSON.parse(row.digest_json || '{}'),\n });\n }\n\n return frameMap;\n }\n\n /**\n * Batch get events for multiple frames\n */\n private async batchGetEvents(\n frameIds: string[],\n maxEvents: number,\n stats: { dbQueries: number; totalRows: number }\n ): Promise<Map<string, Event[]>> {\n if (frameIds.length === 0) return new Map();\n\n const placeholders = frameIds.map(() => '?').join(',');\n const query = `\n SELECT *, ROW_NUMBER() OVER (PARTITION BY frame_id ORDER BY seq DESC) as rn\n FROM events \n WHERE frame_id IN (${placeholders}) \n AND rn <= ${maxEvents}\n ORDER BY frame_id, seq DESC\n `;\n\n stats.dbQueries++;\n const rows = this.db.prepare(query).all(...frameIds) as any[];\n stats.totalRows += rows.length;\n\n const eventMap = new Map<string, Event[]>();\n for (const row of rows) {\n if (!eventMap.has(row.frame_id)) {\n eventMap.set(row.frame_id, []);\n }\n eventMap.get(row.frame_id)!.push({\n ...row,\n payload: JSON.parse(row.payload),\n });\n }\n\n return eventMap;\n }\n\n /**\n * Batch get anchors for multiple frames\n */\n private async batchGetAnchors(\n frameIds: string[],\n stats: { dbQueries: number; totalRows: number }\n ): Promise<Map<string, Anchor[]>> {\n if (frameIds.length === 0) return new Map();\n\n const placeholders = frameIds.map(() => '?').join(',');\n const query = `\n SELECT * FROM anchors \n WHERE frame_id IN (${placeholders}) \n ORDER BY frame_id, priority DESC, created_at ASC\n `;\n\n stats.dbQueries++;\n const rows = this.db.prepare(query).all(...frameIds) as any[];\n stats.totalRows += rows.length;\n\n const anchorMap = new Map<string, Anchor[]>();\n for (const row of rows) {\n if (!anchorMap.has(row.frame_id)) {\n anchorMap.set(row.frame_id, []);\n }\n anchorMap.get(row.frame_id)!.push({\n ...row,\n metadata: JSON.parse(row.metadata || '{}'),\n });\n }\n\n return anchorMap;\n }\n\n /**\n * Batch get active artifacts for multiple frames\n */\n private async batchGetArtifacts(\n frameIds: string[],\n stats: { dbQueries: number; totalRows: number }\n ): Promise<Map<string, string[]>> {\n if (frameIds.length === 0) return new Map();\n\n const placeholders = frameIds.map(() => '?').join(',');\n const query = `\n SELECT frame_id, payload\n FROM events \n WHERE frame_id IN (${placeholders}) \n AND event_type = 'artifact'\n ORDER BY frame_id, ts DESC\n `;\n\n stats.dbQueries++;\n const rows = this.db.prepare(query).all(...frameIds) as any[];\n stats.totalRows += rows.length;\n\n const artifactMap = new Map<string, string[]>();\n for (const row of rows) {\n const payload = JSON.parse(row.payload);\n if (!artifactMap.has(row.frame_id)) {\n artifactMap.set(row.frame_id, []);\n }\n if (payload.path) {\n artifactMap.get(row.frame_id)!.push(payload.path);\n }\n }\n\n return artifactMap;\n }\n\n /**\n * Assemble single frame context\n */\n private async assembleFrameContext(\n frameId: string,\n maxEvents: number,\n stats: { dbQueries: number; totalRows: number }\n ): Promise<FrameContext | null> {\n // Single frame operations - these could be further optimized with prepared statements\n const frame = await this.batchGetFrames([frameId], stats).then((map) =>\n map.get(frameId)\n );\n if (!frame) return null;\n\n const [events, anchors, artifacts] = await Promise.all([\n this.batchGetEvents([frameId], maxEvents, stats).then(\n (map) => map.get(frameId) || []\n ),\n this.batchGetAnchors([frameId], stats).then(\n (map) => map.get(frameId) || []\n ),\n this.batchGetArtifacts([frameId], stats).then(\n (map) => map.get(frameId) || []\n ),\n ]);\n\n return {\n frameId,\n header: {\n goal: frame.name,\n constraints: this.extractConstraints(frame.inputs),\n definitions: frame.inputs.definitions,\n },\n anchors,\n recentEvents: events,\n activeArtifacts: artifacts,\n };\n }\n\n /**\n * Extract constraints from frame inputs\n */\n private extractConstraints(inputs: Record<string, any>): string[] {\n const constraints: string[] = [];\n\n if (inputs.constraints && Array.isArray(inputs.constraints)) {\n constraints.push(...inputs.constraints);\n }\n\n if (inputs.requirements && Array.isArray(inputs.requirements)) {\n constraints.push(...inputs.requirements);\n }\n\n if (inputs.limitations && Array.isArray(inputs.limitations)) {\n constraints.push(...inputs.limitations);\n }\n\n return constraints;\n }\n\n /**\n * Initialize prepared statements for common queries\n */\n private initializePreparedStatements(): void {\n try {\n // Single frame query\n this.preparedStatements.set(\n 'single_frame',\n this.db.prepare('SELECT * FROM frames WHERE frame_id = ?')\n );\n\n // Frame events with limit\n this.preparedStatements.set(\n 'frame_events',\n this.db.prepare(\n 'SELECT * FROM events WHERE frame_id = ? ORDER BY seq DESC LIMIT ?'\n )\n );\n\n // Frame anchors\n this.preparedStatements.set(\n 'frame_anchors',\n this.db.prepare(\n 'SELECT * FROM anchors WHERE frame_id = ? ORDER BY priority DESC, created_at ASC'\n )\n );\n\n logger.info(\n 'Prepared statements initialized for optimized context assembly'\n );\n } catch (error: unknown) {\n logger.error('Failed to initialize prepared statements', error as Error);\n throw error;\n }\n }\n\n /**\n * Clear cache and reset prepared statements\n */\n cleanup(): void {\n this.cache.clear();\n // Modern better-sqlite3 automatically handles cleanup\n this.preparedStatements.clear();\n }\n}\n"],
|
|
5
|
+
"mappings": "AAMA,SAAS,eAAe,sBAAsB;AAC9C,SAAS,cAAc;AA2BhB,MAAM,0BAA0B;AAAA,EAC7B;AAAA,EACA,QAAQ,cAAc;AAAA,EACtB,qBAAqB,oBAAI,IAAgC;AAAA,EAEjE,YAAY,IAAuB;AACjC,SAAK,KAAK;AACV,SAAK,6BAA6B;AAAA,EACpC;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,mBACJ,aACA,UAAkC,CAAC,GACD;AAClC,UAAM,YAAY,YAAY,IAAI;AAClC,UAAM,QAAQ;AAAA,MACZ,WAAW;AAAA,MACX,WAAW;AAAA,MACX,WAAW;AAAA,IACb;AAEA,UAAM;AAAA,MACJ,YAAY;AAAA,MACZ,gBAAgB;AAAA,MAChB,gBAAgB;AAAA,MAChB,YAAY;AAAA,IACd,IAAI;AAEJ,QAAI;AAEF,YAAM,WAAoC,CAAC;AAE3C,eAAS,IAAI,GAAG,IAAI,YAAY,QAAQ,KAAK,WAAW;AACtD,cAAM,QAAQ,YAAY,MAAM,GAAG,IAAI,SAAS;AAChD,cAAM,gBAAgB,MAAM,KAAK;AAAA,UAC/B;AAAA,UACA;AAAA,UACA;AAAA,UACA;AAAA,UACA;AAAA,QACF;AACA,iBAAS,KAAK,GAAG,aAAa;AAAA,MAChC;AAEA,YAAM,iBAAiB,YAAY,IAAI,IAAI;AAG3C,aAAO,SAAS,IAAI,CAAC,aAAkB;AAAA,QACrC,GAAG;AAAA,QACH,aAAa;AAAA,UACX,gBAAgB,iBAAiB,SAAS;AAAA,UAC1C,GAAG;AAAA,QACL;AAAA,MACF,EAAE;AAAA,IACJ,SAAS,OAAgB;AACvB,aAAO,MAAM,wCAAwC,OAAgB;AAAA,QACnE;AAAA,QACA;AAAA,MACF,CAAC;AACD,YAAM;AAAA,IACR;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,gBACJ,SACA,UAAkC,CAAC,GACI;AACvC,UAAM,YAAY,YAAY,IAAI;AAClC,UAAM,QAAQ,EAAE,WAAW,GAAG,WAAW,GAAG,WAAW,EAAE;AAEzD,UAAM,EAAE,YAAY,IAAI,gBAAgB,KAAK,IAAI;AAGjD,UAAM,WAAW,eAAe,iBAAiB,CAAC,SAAS,SAAS,CAAC;AACrE,QAAI,eAAe;AACjB,YAAM,SAAS,KAAK,MAAM,gBAAgB,QAAQ;AAClD,UAAI,QAAQ;AACV,cAAM;AACN,eAAO;AAAA,UACL,GAAG;AAAA,UACH,aAAa;AAAA,YACX,gBAAgB,YAAY,IAAI,IAAI;AAAA,YACpC,GAAG;AAAA,UACL;AAAA,QACF;AAAA,MACF;AAAA,IACF;AAEA,QAAI;AACF,YAAM,UAAU,MAAM,KAAK;AAAA,QACzB;AAAA,QACA;AAAA,QACA;AAAA,MACF;AAEA,UAAI,CAAC,QAAS,QAAO;AAGrB,UAAI,eAAe;AACjB,aAAK,MAAM,kBAAkB,UAAU,OAAO;AAAA,MAChD;AAEA,YAAM,SAAgC;AAAA,QACpC,GAAG;AAAA,QACH,aAAa;AAAA,UACX,gBAAgB,YAAY,IAAI,IAAI;AAAA,UACpC,GAAG;AAAA,QACL;AAAA,MACF;AAEA,aAAO;AAAA,IACT,SAAS,OAAgB;AACvB,aAAO,MAAM,+BAA+B,OAAgB,EAAE,QAAQ,CAAC;AACvE,YAAM;AAAA,IACR;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,MAAc,aACZ,UACA,WACA,eACA,eACA,OACkC;AAClC,UAAM,WAAoC,CAAC;AAG3C,UAAM,cAAc,CAAC;AACrB,eAAW,WAAW,UAAU;AAC9B,YAAM,WAAW,eAAe,iBAAiB,CAAC,SAAS,SAAS,CAAC;AACrE,UAAI,eAAe;AACjB,cAAM,SAAS,KAAK,MAAM,gBAAgB,QAAQ;AAClD,YAAI,QAAQ;AACV,gBAAM;AACN,mBAAS,KAAK,MAAM;AACpB;AAAA,QACF;AAAA,MACF;AACA,kBAAY,KAAK,OAAO;AAAA,IAC1B;AAEA,QAAI,YAAY,WAAW,GAAG;AAC5B,aAAO;AAAA,IACT;AAGA,UAAM,SAAS,MAAM,KAAK,eAAe,aAAa,KAAK;AAC3D,UAAM,YAAY,MAAM,KAAK,eAAe,aAAa,WAAW,KAAK;AACzE,UAAM,aAAa,MAAM,KAAK,gBAAgB,aAAa,KAAK;AAChE,UAAM,eAAe,MAAM,KAAK,kBAAkB,aAAa,KAAK;AAGpE,eAAW,WAAW,aAAa;AACjC,YAAM,QAAQ,OAAO,IAAI,OAAO;AAChC,UAAI,CAAC,SAAU,CAAC,iBAAiB,MAAM,UAAU,UAAW;AAC1D;AAAA,MACF;AAEA,YAAM,UAAwB;AAAA,QAC5B;AAAA,QACA,QAAQ;AAAA,UACN,MAAM,MAAM;AAAA,UACZ,aAAa,KAAK,mBAAmB,MAAM,MAAM;AAAA,UACjD,aAAa,MAAM,OAAO;AAAA,QAC5B;AAAA,QACA,SAAS,WAAW,IAAI,OAAO,KAAK,CAAC;AAAA,QACrC,cAAc,UAAU,IAAI,OAAO,KAAK,CAAC;AAAA,QACzC,iBAAiB,aAAa,IAAI,OAAO,KAAK,CAAC;AAAA,MACjD;AAGA,UAAI,eAAe;AACjB,cAAM,WAAW,eAAe,iBAAiB,CAAC,SAAS,SAAS,CAAC;AACrE,aAAK,MAAM,kBAAkB,UAAU,OAAO;AAAA,MAChD;AAEA,eAAS,KAAK,OAAgC;AAAA,IAChD;AAEA,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA,EAKA,MAAc,eACZ,UACA,OAC6B;AAC7B,QAAI,SAAS,WAAW,EAAG,QAAO,oBAAI,IAAI;AAE1C,UAAM,OAAO,KAAK,mBAAmB,IAAI,cAAc;AACvD,QAAI,CAAC,KAAM,OAAM,IAAI,MAAM,4CAA4C;AAEvE,UAAM,eAAe,SAAS,IAAI,MAAM,GAAG,EAAE,KAAK,GAAG;AACrD,UAAM,QAAQ,2CAA2C,YAAY;AAErE,UAAM;AACN,UAAM,OAAO,KAAK,GAAG,QAAQ,KAAK,EAAE,IAAI,GAAG,QAAQ;AACnD,UAAM,aAAa,KAAK;AAExB,UAAM,WAAW,oBAAI,IAAmB;AACxC,eAAW,OAAO,MAAM;AACtB,eAAS,IAAI,IAAI,UAAU;AAAA,QACzB,GAAG;AAAA,QACH,QAAQ,KAAK,MAAM,IAAI,UAAU,IAAI;AAAA,QACrC,SAAS,KAAK,MAAM,IAAI,WAAW,IAAI;AAAA,QACvC,aAAa,KAAK,MAAM,IAAI,eAAe,IAAI;AAAA,MACjD,CAAC;AAAA,IACH;AAEA,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA,EAKA,MAAc,eACZ,UACA,WACA,OAC+B;AAC/B,QAAI,SAAS,WAAW,EAAG,QAAO,oBAAI,IAAI;AAE1C,UAAM,eAAe,SAAS,IAAI,MAAM,GAAG,EAAE,KAAK,GAAG;AACrD,UAAM,QAAQ;AAAA;AAAA;AAAA,2BAGS,YAAY;AAAA,kBACrB,SAAS;AAAA;AAAA;AAIvB,UAAM;AACN,UAAM,OAAO,KAAK,GAAG,QAAQ,KAAK,EAAE,IAAI,GAAG,QAAQ;AACnD,UAAM,aAAa,KAAK;AAExB,UAAM,WAAW,oBAAI,IAAqB;AAC1C,eAAW,OAAO,MAAM;AACtB,UAAI,CAAC,SAAS,IAAI,IAAI,QAAQ,GAAG;AAC/B,iBAAS,IAAI,IAAI,UAAU,CAAC,CAAC;AAAA,MAC/B;AACA,eAAS,IAAI,IAAI,QAAQ,EAAG,KAAK;AAAA,QAC/B,GAAG;AAAA,QACH,SAAS,KAAK,MAAM,IAAI,OAAO;AAAA,MACjC,CAAC;AAAA,IACH;AAEA,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA,EAKA,MAAc,gBACZ,UACA,OACgC;AAChC,QAAI,SAAS,WAAW,EAAG,QAAO,oBAAI,IAAI;AAE1C,UAAM,eAAe,SAAS,IAAI,MAAM,GAAG,EAAE,KAAK,GAAG;AACrD,UAAM,QAAQ;AAAA;AAAA,2BAES,YAAY;AAAA;AAAA;AAInC,UAAM;AACN,UAAM,OAAO,KAAK,GAAG,QAAQ,KAAK,EAAE,IAAI,GAAG,QAAQ;AACnD,UAAM,aAAa,KAAK;AAExB,UAAM,YAAY,oBAAI,IAAsB;AAC5C,eAAW,OAAO,MAAM;AACtB,UAAI,CAAC,UAAU,IAAI,IAAI,QAAQ,GAAG;AAChC,kBAAU,IAAI,IAAI,UAAU,CAAC,CAAC;AAAA,MAChC;AACA,gBAAU,IAAI,IAAI,QAAQ,EAAG,KAAK;AAAA,QAChC,GAAG;AAAA,QACH,UAAU,KAAK,MAAM,IAAI,YAAY,IAAI;AAAA,MAC3C,CAAC;AAAA,IACH;AAEA,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA,EAKA,MAAc,kBACZ,UACA,OACgC;AAChC,QAAI,SAAS,WAAW,EAAG,QAAO,oBAAI,IAAI;AAE1C,UAAM,eAAe,SAAS,IAAI,MAAM,GAAG,EAAE,KAAK,GAAG;AACrD,UAAM,QAAQ;AAAA;AAAA;AAAA,2BAGS,YAAY;AAAA;AAAA;AAAA;AAKnC,UAAM;AACN,UAAM,OAAO,KAAK,GAAG,QAAQ,KAAK,EAAE,IAAI,GAAG,QAAQ;AACnD,UAAM,aAAa,KAAK;AAExB,UAAM,cAAc,oBAAI,IAAsB;AAC9C,eAAW,OAAO,MAAM;AACtB,YAAM,UAAU,KAAK,MAAM,IAAI,OAAO;AACtC,UAAI,CAAC,YAAY,IAAI,IAAI,QAAQ,GAAG;AAClC,oBAAY,IAAI,IAAI,UAAU,CAAC,CAAC;AAAA,MAClC;AACA,UAAI,QAAQ,MAAM;AAChB,oBAAY,IAAI,IAAI,QAAQ,EAAG,KAAK,QAAQ,IAAI;AAAA,MAClD;AAAA,IACF;AAEA,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA,EAKA,MAAc,qBACZ,SACA,WACA,OAC8B;AAE9B,UAAM,QAAQ,MAAM,KAAK,eAAe,CAAC,OAAO,GAAG,KAAK,EAAE;AAAA,MAAK,CAAC,QAC9D,IAAI,IAAI,OAAO;AAAA,IACjB;AACA,QAAI,CAAC,MAAO,QAAO;AAEnB,UAAM,CAAC,QAAQ,SAAS,SAAS,IAAI,MAAM,QAAQ,IAAI;AAAA,MACrD,KAAK,eAAe,CAAC,OAAO,GAAG,WAAW,KAAK,EAAE;AAAA,QAC/C,CAAC,QAAQ,IAAI,IAAI,OAAO,KAAK,CAAC;AAAA,MAChC;AAAA,MACA,KAAK,gBAAgB,CAAC,OAAO,GAAG,KAAK,EAAE;AAAA,QACrC,CAAC,QAAQ,IAAI,IAAI,OAAO,KAAK,CAAC;AAAA,MAChC;AAAA,MACA,KAAK,kBAAkB,CAAC,OAAO,GAAG,KAAK,EAAE;AAAA,QACvC,CAAC,QAAQ,IAAI,IAAI,OAAO,KAAK,CAAC;AAAA,MAChC;AAAA,IACF,CAAC;AAED,WAAO;AAAA,MACL;AAAA,MACA,QAAQ;AAAA,QACN,MAAM,MAAM;AAAA,QACZ,aAAa,KAAK,mBAAmB,MAAM,MAAM;AAAA,QACjD,aAAa,MAAM,OAAO;AAAA,MAC5B;AAAA,MACA;AAAA,MACA,cAAc;AAAA,MACd,iBAAiB;AAAA,IACnB;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKQ,mBAAmB,QAAuC;AAChE,UAAM,cAAwB,CAAC;AAE/B,QAAI,OAAO,eAAe,MAAM,QAAQ,OAAO,WAAW,GAAG;AAC3D,kBAAY,KAAK,GAAG,OAAO,WAAW;AAAA,IACxC;AAEA,QAAI,OAAO,gBAAgB,MAAM,QAAQ,OAAO,YAAY,GAAG;AAC7D,kBAAY,KAAK,GAAG,OAAO,YAAY;AAAA,IACzC;AAEA,QAAI,OAAO,eAAe,MAAM,QAAQ,OAAO,WAAW,GAAG;AAC3D,kBAAY,KAAK,GAAG,OAAO,WAAW;AAAA,IACxC;AAEA,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA,EAKQ,+BAAqC;AAC3C,QAAI;AAEF,WAAK,mBAAmB;AAAA,QACtB;AAAA,QACA,KAAK,GAAG,QAAQ,yCAAyC;AAAA,MAC3D;AAGA,WAAK,mBAAmB;AAAA,QACtB;AAAA,QACA,KAAK,GAAG;AAAA,UACN;AAAA,QACF;AAAA,MACF;AAGA,WAAK,mBAAmB;AAAA,QACtB;AAAA,QACA,KAAK,GAAG;AAAA,UACN;AAAA,QACF;AAAA,MACF;AAEA,aAAO;AAAA,QACL;AAAA,MACF;AAAA,IACF,SAAS,OAAgB;AACvB,aAAO,MAAM,4CAA4C,KAAc;AACvE,YAAM;AAAA,IACR;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,UAAgB;AACd,SAAK,MAAM,MAAM;AAEjB,SAAK,mBAAmB,MAAM;AAAA,EAChC;AACF;",
|
|
6
6
|
"names": []
|
|
7
7
|
}
|
|
@@ -129,9 +129,7 @@ class PerformanceBenchmark {
|
|
|
129
129
|
const loader = new LazyContextLoader(db, projectId);
|
|
130
130
|
let frames = [];
|
|
131
131
|
try {
|
|
132
|
-
frames = db.prepare(
|
|
133
|
-
"SELECT id FROM frames ORDER BY updated_at DESC LIMIT ?"
|
|
134
|
-
).all(frameCount);
|
|
132
|
+
frames = db.prepare("SELECT id FROM frames ORDER BY updated_at DESC LIMIT ?").all(frameCount);
|
|
135
133
|
} catch (error) {
|
|
136
134
|
logger.warn("Frames table not found, using mock data for benchmark");
|
|
137
135
|
frames = Array.from({ length: Math.min(frameCount, 10) }, (_, i) => ({
|
|
@@ -227,17 +225,23 @@ class PerformanceBenchmark {
|
|
|
227
225
|
for (const result of suite.results) {
|
|
228
226
|
console.log(`\u{1F4CA} ${result.name}`);
|
|
229
227
|
console.log(` Duration: ${result.duration.toFixed(2)}ms`);
|
|
230
|
-
console.log(
|
|
228
|
+
console.log(
|
|
229
|
+
` Memory: ${(result.memoryUsed / 1024 / 1024).toFixed(2)}MB`
|
|
230
|
+
);
|
|
231
231
|
console.log(` Throughput: ${result.throughput.toFixed(0)} items/sec`);
|
|
232
232
|
if (result.improvement !== void 0) {
|
|
233
233
|
const icon = result.improvement > 0 ? "\u{1F680}" : "\u26A0\uFE0F";
|
|
234
|
-
console.log(
|
|
234
|
+
console.log(
|
|
235
|
+
` ${icon} Improvement: ${result.improvement.toFixed(1)}%`
|
|
236
|
+
);
|
|
235
237
|
}
|
|
236
238
|
console.log("");
|
|
237
239
|
}
|
|
238
240
|
console.log("\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550");
|
|
239
241
|
console.log(`\u23F1\uFE0F Total Duration: ${suite.totalDuration.toFixed(2)}ms`);
|
|
240
|
-
console.log(
|
|
242
|
+
console.log(
|
|
243
|
+
`\u{1F4C8} Average Improvement: ${suite.averageImprovement.toFixed(1)}%`
|
|
244
|
+
);
|
|
241
245
|
console.log("");
|
|
242
246
|
logger.info("Performance Benchmark Complete", {
|
|
243
247
|
suite: suite.name,
|
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
{
|
|
2
2
|
"version": 3,
|
|
3
3
|
"sources": ["../../../src/core/performance/performance-benchmark.ts"],
|
|
4
|
-
"sourcesContent": ["/**\n * Performance Benchmark Suite\n * Measure improvements from optimization efforts\n */\n\nimport { performance } from 'perf_hooks';\nimport { logger } from '../monitoring/logger.js';\nimport { StreamingJSONLParser } from './streaming-jsonl-parser.js';\nimport { ContextCache } from './context-cache.js';\nimport { LazyContextLoader } from './lazy-context-loader.js';\nimport { readFileSync } from 'fs';\nimport { join } from 'path';\n\nexport interface BenchmarkResult {\n name: string;\n duration: number;\n memoryUsed: number;\n itemsProcessed: number;\n throughput: number;\n improvement?: number;\n}\n\nexport interface BenchmarkSuite {\n name: string;\n results: BenchmarkResult[];\n totalDuration: number;\n averageImprovement: number;\n}\n\nexport class PerformanceBenchmark {\n private results: BenchmarkResult[] = [];\n\n /**\n * Benchmark JSONL parsing performance\n */\n async benchmarkJSONLParsing(\n filePath: string,\n iterations = 3\n ): Promise<BenchmarkResult> {\n const parser = new StreamingJSONLParser();\n \n // Baseline: traditional sync parsing\n const baselineStart = performance.now();\n const baselineMemStart = process.memoryUsage().heapUsed;\n \n let baselineCount = 0;\n for (let i = 0; i < iterations; i++) {\n const content = readFileSync(filePath, 'utf8');\n const lines = content.split('\\n').filter((l: any) => l.trim());\n for (const line of lines) {\n try {\n JSON.parse(line);\n baselineCount++;\n } catch {}\n }\n }\n \n const baselineDuration = performance.now() - baselineStart;\n const baselineMemUsed = process.memoryUsage().heapUsed - baselineMemStart;\n \n // Optimized: streaming parser\n const optimizedStart = performance.now();\n const optimizedMemStart = process.memoryUsage().heapUsed;\n \n let optimizedCount = 0;\n for (let i = 0; i < iterations; i++) {\n for await (const batch of parser.parseStream(filePath)) {\n optimizedCount += batch.length;\n }\n }\n \n const optimizedDuration = performance.now() - optimizedStart;\n const optimizedMemUsed = process.memoryUsage().heapUsed - optimizedMemStart;\n \n const improvement = ((baselineDuration - optimizedDuration) / baselineDuration) * 100;\n const memImprovement = ((baselineMemUsed - optimizedMemUsed) / baselineMemUsed) * 100;\n \n const result: BenchmarkResult = {\n name: 'JSONL Parsing',\n duration: optimizedDuration / iterations,\n memoryUsed: optimizedMemUsed,\n itemsProcessed: optimizedCount / iterations,\n throughput: (optimizedCount / iterations) / (optimizedDuration / 1000 / iterations),\n improvement,\n };\n \n logger.info('JSONL Parsing Benchmark', {\n baseline: {\n duration: baselineDuration / iterations,\n memory: baselineMemUsed,\n throughput: (baselineCount / iterations) / (baselineDuration / 1000 / iterations),\n },\n optimized: result,\n improvements: {\n speed: `${improvement.toFixed(1)}%`,\n memory: `${memImprovement.toFixed(1)}%`,\n },\n });\n \n this.results.push(result);\n return result;\n }\n\n /**\n * Benchmark context caching performance\n */\n async benchmarkContextCache(\n itemCount = 1000,\n accessPatterns = 10000\n ): Promise<BenchmarkResult> {\n const cache = new ContextCache<any>({\n maxSize: 50 * 1024 * 1024,\n maxItems: itemCount,\n });\n \n // Prepare test data\n const testData = Array.from({ length: itemCount }, (_, i) => ({\n key: `item-${i}`,\n value: { \n id: i, \n data: 'x'.repeat(Math.floor(Math.random() * 1000)),\n timestamp: Date.now(),\n },\n }));\n \n // Populate cache\n const populateStart = performance.now();\n for (const item of testData) {\n cache.set(item.key, item.value);\n }\n const populateDuration = performance.now() - populateStart;\n \n // Benchmark cache access\n const accessStart = performance.now();\n let hits = 0;\n let misses = 0;\n \n for (let i = 0; i < accessPatterns; i++) {\n const index = Math.floor(Math.random() * itemCount * 1.2); // Some will miss\n const key = `item-${index}`;\n const result = cache.get(key);\n if (result) hits++;\n else misses++;\n }\n \n const accessDuration = performance.now() - accessStart;\n const stats = cache.getStats();\n \n const result: BenchmarkResult = {\n name: 'Context Cache',\n duration: accessDuration,\n memoryUsed: cache.getSize().bytes,\n itemsProcessed: accessPatterns,\n throughput: accessPatterns / (accessDuration / 1000),\n improvement: stats.hitRate * 100,\n };\n \n logger.info('Context Cache Benchmark', {\n populate: {\n duration: populateDuration,\n items: itemCount,\n },\n access: {\n duration: accessDuration,\n patterns: accessPatterns,\n hitRate: `${(stats.hitRate * 100).toFixed(1)}%`,\n },\n performance: {\n throughput: `${result.throughput.toFixed(0)} ops/sec`,\n avgAccessTime: `${stats.avgAccessTime.toFixed(2)}ms`,\n },\n });\n \n this.results.push(result);\n return result;\n }\n\n /**\n * Benchmark lazy loading performance\n */\n async benchmarkLazyLoading(\n db: any,\n projectId: string,\n frameCount = 100\n ): Promise<BenchmarkResult> {\n const loader = new LazyContextLoader(db, projectId);\n \n // Check if frames table exists, if not use a mock test\n let frames: any[] = [];\n try {\n frames = db.prepare(\n 'SELECT id FROM frames ORDER BY updated_at DESC LIMIT ?'\n ).all(frameCount) as any[];\n } catch (error: unknown) {\n // Create mock frame IDs if table doesn't exist\n logger.warn('Frames table not found, using mock data for benchmark');\n frames = Array.from({ length: Math.min(frameCount, 10) }, (_, i) => ({\n id: `frame-${i}`,\n }));\n }\n \n const frameIds = frames.map((f: any) => f.id);\n \n // Benchmark eager loading (baseline)\n const eagerStart = performance.now();\n const eagerMemStart = process.memoryUsage().heapUsed;\n \n const eagerData = [];\n for (const id of frameIds) {\n try {\n const frame = db.prepare('SELECT * FROM frames WHERE id = ?').get(id);\n const anchors = db.prepare('SELECT * FROM anchors WHERE frame_id = ?').all(id);\n const events = db.prepare('SELECT * FROM events WHERE frame_id = ? LIMIT 10').all(id);\n eagerData.push({ frame, anchors, events });\n } catch {\n // Use mock data if tables don't exist\n eagerData.push({\n frame: { id, type: 'mock', name: `Mock ${id}` },\n anchors: [],\n events: [],\n });\n }\n }\n \n const eagerDuration = performance.now() - eagerStart;\n const eagerMemUsed = process.memoryUsage().heapUsed - eagerMemStart;\n \n // Benchmark lazy loading\n const lazyStart = performance.now();\n const lazyMemStart = process.memoryUsage().heapUsed;\n \n // Preload with lazy loading\n await loader.preloadContext(frameIds, { parallel: true, depth: 2 });\n \n // Access data lazily\n let accessedCount = 0;\n for (const id of frameIds.slice(0, frameCount / 2)) {\n const frame = await loader.lazyFrame(id).get();\n if (frame) accessedCount++;\n }\n \n const lazyDuration = performance.now() - lazyStart;\n const lazyMemUsed = process.memoryUsage().heapUsed - lazyMemStart;\n \n const improvement = ((eagerDuration - lazyDuration) / eagerDuration) * 100;\n const memImprovement = ((eagerMemUsed - lazyMemUsed) / eagerMemUsed) * 100;\n \n const result: BenchmarkResult = {\n name: 'Lazy Loading',\n duration: lazyDuration,\n memoryUsed: lazyMemUsed,\n itemsProcessed: frameCount,\n throughput: frameCount / (lazyDuration / 1000),\n improvement,\n };\n \n logger.info('Lazy Loading Benchmark', {\n eager: {\n duration: eagerDuration,\n memory: eagerMemUsed,\n },\n lazy: {\n duration: lazyDuration,\n memory: lazyMemUsed,\n accessed: accessedCount,\n },\n improvements: {\n speed: `${improvement.toFixed(1)}%`,\n memory: `${memImprovement.toFixed(1)}%`,\n },\n });\n \n this.results.push(result);\n return result;\n }\n\n /**\n * Run full benchmark suite\n */\n async runFullSuite(\n projectRoot: string,\n db: any,\n projectId: string\n ): Promise<BenchmarkSuite> {\n const suiteStart = performance.now();\n \n logger.info('Starting Performance Benchmark Suite');\n \n // Run benchmarks\n const tasksFile = join(projectRoot, '.stackmemory', 'tasks.jsonl');\n \n const jsonlResult = await this.benchmarkJSONLParsing(tasksFile);\n const cacheResult = await this.benchmarkContextCache();\n const lazyResult = await this.benchmarkLazyLoading(db, projectId);\n \n const totalDuration = performance.now() - suiteStart;\n const averageImprovement = this.results\n .filter((r: any) => r.improvement !== undefined)\n .reduce((sum, r) => sum + (r.improvement || 0), 0) / \n this.results.filter((r: any) => r.improvement !== undefined).length;\n \n const suite: BenchmarkSuite = {\n name: 'Performance Optimization Suite',\n results: this.results,\n totalDuration,\n averageImprovement,\n };\n \n // Generate summary report\n this.generateReport(suite);\n \n return suite;\n }\n\n /**\n * Generate performance report\n */\n private generateReport(suite: BenchmarkSuite): void {\n console.log('\\n\u2554\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2557');\n console.log('\u2551 Performance Benchmark Results \u2551');\n console.log('\u255A\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u255D\\n');\n \n for (const result of suite.results) {\n console.log(`\uD83D\uDCCA ${result.name}`);\n console.log(` Duration: ${result.duration.toFixed(2)}ms`);\n console.log(` Memory: ${(result.memoryUsed / 1024 / 1024).toFixed(2)}MB`);\n console.log(` Throughput: ${result.throughput.toFixed(0)} items/sec`);\n if (result.improvement !== undefined) {\n const icon = result.improvement > 0 ? '\uD83D\uDE80' : '\u26A0\uFE0F';\n console.log(` ${icon} Improvement: ${result.improvement.toFixed(1)}%`);\n }\n console.log('');\n }\n \n console.log('\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550');\n console.log(`\u23F1\uFE0F Total Duration: ${suite.totalDuration.toFixed(2)}ms`);\n console.log(`\uD83D\uDCC8 Average Improvement: ${suite.averageImprovement.toFixed(1)}%`);\n console.log('');\n \n logger.info('Performance Benchmark Complete', {\n suite: suite.name,\n duration: suite.totalDuration,\n avgImprovement: suite.averageImprovement,\n results: suite.results.map((r: any) => ({\n name: r.name,\n improvement: r.improvement,\n throughput: r.throughput,\n })),\n });\n }\n\n /**\n * Get benchmark results\n */\n getResults(): BenchmarkResult[] {\n return this.results;\n }\n\n /**\n * Clear results\n */\n clearResults(): void {\n this.results = [];\n }\n}"],
|
|
5
|
-
"mappings": "AAKA,SAAS,mBAAmB;AAC5B,SAAS,cAAc;AACvB,SAAS,4BAA4B;AACrC,SAAS,oBAAoB;AAC7B,SAAS,yBAAyB;AAClC,SAAS,oBAAoB;AAC7B,SAAS,YAAY;AAkBd,MAAM,qBAAqB;AAAA,EACxB,UAA6B,CAAC;AAAA;AAAA;AAAA;AAAA,EAKtC,MAAM,sBACJ,UACA,aAAa,GACa;AAC1B,UAAM,SAAS,IAAI,qBAAqB;AAGxC,UAAM,gBAAgB,YAAY,IAAI;AACtC,UAAM,mBAAmB,QAAQ,YAAY,EAAE;AAE/C,QAAI,gBAAgB;AACpB,aAAS,IAAI,GAAG,IAAI,YAAY,KAAK;AACnC,YAAM,UAAU,aAAa,UAAU,MAAM;AAC7C,YAAM,QAAQ,QAAQ,MAAM,IAAI,EAAE,OAAO,CAAC,MAAW,EAAE,KAAK,CAAC;AAC7D,iBAAW,QAAQ,OAAO;AACxB,YAAI;AACF,eAAK,MAAM,IAAI;AACf;AAAA,QACF,QAAQ;AAAA,QAAC;AAAA,MACX;AAAA,IACF;AAEA,UAAM,mBAAmB,YAAY,IAAI,IAAI;AAC7C,UAAM,kBAAkB,QAAQ,YAAY,EAAE,WAAW;AAGzD,UAAM,iBAAiB,YAAY,IAAI;AACvC,UAAM,oBAAoB,QAAQ,YAAY,EAAE;AAEhD,QAAI,iBAAiB;AACrB,aAAS,IAAI,GAAG,IAAI,YAAY,KAAK;AACnC,uBAAiB,SAAS,OAAO,YAAY,QAAQ,GAAG;AACtD,0BAAkB,MAAM;AAAA,MAC1B;AAAA,IACF;AAEA,UAAM,oBAAoB,YAAY,IAAI,IAAI;AAC9C,UAAM,mBAAmB,QAAQ,YAAY,EAAE,WAAW;AAE1D,UAAM,
|
|
4
|
+
"sourcesContent": ["/**\n * Performance Benchmark Suite\n * Measure improvements from optimization efforts\n */\n\nimport { performance } from 'perf_hooks';\nimport { logger } from '../monitoring/logger.js';\nimport { StreamingJSONLParser } from './streaming-jsonl-parser.js';\nimport { ContextCache } from './context-cache.js';\nimport { LazyContextLoader } from './lazy-context-loader.js';\nimport { readFileSync } from 'fs';\nimport { join } from 'path';\n\nexport interface BenchmarkResult {\n name: string;\n duration: number;\n memoryUsed: number;\n itemsProcessed: number;\n throughput: number;\n improvement?: number;\n}\n\nexport interface BenchmarkSuite {\n name: string;\n results: BenchmarkResult[];\n totalDuration: number;\n averageImprovement: number;\n}\n\nexport class PerformanceBenchmark {\n private results: BenchmarkResult[] = [];\n\n /**\n * Benchmark JSONL parsing performance\n */\n async benchmarkJSONLParsing(\n filePath: string,\n iterations = 3\n ): Promise<BenchmarkResult> {\n const parser = new StreamingJSONLParser();\n\n // Baseline: traditional sync parsing\n const baselineStart = performance.now();\n const baselineMemStart = process.memoryUsage().heapUsed;\n\n let baselineCount = 0;\n for (let i = 0; i < iterations; i++) {\n const content = readFileSync(filePath, 'utf8');\n const lines = content.split('\\n').filter((l: any) => l.trim());\n for (const line of lines) {\n try {\n JSON.parse(line);\n baselineCount++;\n } catch {}\n }\n }\n\n const baselineDuration = performance.now() - baselineStart;\n const baselineMemUsed = process.memoryUsage().heapUsed - baselineMemStart;\n\n // Optimized: streaming parser\n const optimizedStart = performance.now();\n const optimizedMemStart = process.memoryUsage().heapUsed;\n\n let optimizedCount = 0;\n for (let i = 0; i < iterations; i++) {\n for await (const batch of parser.parseStream(filePath)) {\n optimizedCount += batch.length;\n }\n }\n\n const optimizedDuration = performance.now() - optimizedStart;\n const optimizedMemUsed = process.memoryUsage().heapUsed - optimizedMemStart;\n\n const improvement =\n ((baselineDuration - optimizedDuration) / baselineDuration) * 100;\n const memImprovement =\n ((baselineMemUsed - optimizedMemUsed) / baselineMemUsed) * 100;\n\n const result: BenchmarkResult = {\n name: 'JSONL Parsing',\n duration: optimizedDuration / iterations,\n memoryUsed: optimizedMemUsed,\n itemsProcessed: optimizedCount / iterations,\n throughput:\n optimizedCount / iterations / (optimizedDuration / 1000 / iterations),\n improvement,\n };\n\n logger.info('JSONL Parsing Benchmark', {\n baseline: {\n duration: baselineDuration / iterations,\n memory: baselineMemUsed,\n throughput:\n baselineCount / iterations / (baselineDuration / 1000 / iterations),\n },\n optimized: result,\n improvements: {\n speed: `${improvement.toFixed(1)}%`,\n memory: `${memImprovement.toFixed(1)}%`,\n },\n });\n\n this.results.push(result);\n return result;\n }\n\n /**\n * Benchmark context caching performance\n */\n async benchmarkContextCache(\n itemCount = 1000,\n accessPatterns = 10000\n ): Promise<BenchmarkResult> {\n const cache = new ContextCache<any>({\n maxSize: 50 * 1024 * 1024,\n maxItems: itemCount,\n });\n\n // Prepare test data\n const testData = Array.from({ length: itemCount }, (_, i) => ({\n key: `item-${i}`,\n value: {\n id: i,\n data: 'x'.repeat(Math.floor(Math.random() * 1000)),\n timestamp: Date.now(),\n },\n }));\n\n // Populate cache\n const populateStart = performance.now();\n for (const item of testData) {\n cache.set(item.key, item.value);\n }\n const populateDuration = performance.now() - populateStart;\n\n // Benchmark cache access\n const accessStart = performance.now();\n let hits = 0;\n let misses = 0;\n\n for (let i = 0; i < accessPatterns; i++) {\n const index = Math.floor(Math.random() * itemCount * 1.2); // Some will miss\n const key = `item-${index}`;\n const result = cache.get(key);\n if (result) hits++;\n else misses++;\n }\n\n const accessDuration = performance.now() - accessStart;\n const stats = cache.getStats();\n\n const result: BenchmarkResult = {\n name: 'Context Cache',\n duration: accessDuration,\n memoryUsed: cache.getSize().bytes,\n itemsProcessed: accessPatterns,\n throughput: accessPatterns / (accessDuration / 1000),\n improvement: stats.hitRate * 100,\n };\n\n logger.info('Context Cache Benchmark', {\n populate: {\n duration: populateDuration,\n items: itemCount,\n },\n access: {\n duration: accessDuration,\n patterns: accessPatterns,\n hitRate: `${(stats.hitRate * 100).toFixed(1)}%`,\n },\n performance: {\n throughput: `${result.throughput.toFixed(0)} ops/sec`,\n avgAccessTime: `${stats.avgAccessTime.toFixed(2)}ms`,\n },\n });\n\n this.results.push(result);\n return result;\n }\n\n /**\n * Benchmark lazy loading performance\n */\n async benchmarkLazyLoading(\n db: any,\n projectId: string,\n frameCount = 100\n ): Promise<BenchmarkResult> {\n const loader = new LazyContextLoader(db, projectId);\n\n // Check if frames table exists, if not use a mock test\n let frames: any[] = [];\n try {\n frames = db\n .prepare('SELECT id FROM frames ORDER BY updated_at DESC LIMIT ?')\n .all(frameCount) as any[];\n } catch (error: unknown) {\n // Create mock frame IDs if table doesn't exist\n logger.warn('Frames table not found, using mock data for benchmark');\n frames = Array.from({ length: Math.min(frameCount, 10) }, (_, i) => ({\n id: `frame-${i}`,\n }));\n }\n\n const frameIds = frames.map((f: any) => f.id);\n\n // Benchmark eager loading (baseline)\n const eagerStart = performance.now();\n const eagerMemStart = process.memoryUsage().heapUsed;\n\n const eagerData = [];\n for (const id of frameIds) {\n try {\n const frame = db.prepare('SELECT * FROM frames WHERE id = ?').get(id);\n const anchors = db\n .prepare('SELECT * FROM anchors WHERE frame_id = ?')\n .all(id);\n const events = db\n .prepare('SELECT * FROM events WHERE frame_id = ? LIMIT 10')\n .all(id);\n eagerData.push({ frame, anchors, events });\n } catch {\n // Use mock data if tables don't exist\n eagerData.push({\n frame: { id, type: 'mock', name: `Mock ${id}` },\n anchors: [],\n events: [],\n });\n }\n }\n\n const eagerDuration = performance.now() - eagerStart;\n const eagerMemUsed = process.memoryUsage().heapUsed - eagerMemStart;\n\n // Benchmark lazy loading\n const lazyStart = performance.now();\n const lazyMemStart = process.memoryUsage().heapUsed;\n\n // Preload with lazy loading\n await loader.preloadContext(frameIds, { parallel: true, depth: 2 });\n\n // Access data lazily\n let accessedCount = 0;\n for (const id of frameIds.slice(0, frameCount / 2)) {\n const frame = await loader.lazyFrame(id).get();\n if (frame) accessedCount++;\n }\n\n const lazyDuration = performance.now() - lazyStart;\n const lazyMemUsed = process.memoryUsage().heapUsed - lazyMemStart;\n\n const improvement = ((eagerDuration - lazyDuration) / eagerDuration) * 100;\n const memImprovement = ((eagerMemUsed - lazyMemUsed) / eagerMemUsed) * 100;\n\n const result: BenchmarkResult = {\n name: 'Lazy Loading',\n duration: lazyDuration,\n memoryUsed: lazyMemUsed,\n itemsProcessed: frameCount,\n throughput: frameCount / (lazyDuration / 1000),\n improvement,\n };\n\n logger.info('Lazy Loading Benchmark', {\n eager: {\n duration: eagerDuration,\n memory: eagerMemUsed,\n },\n lazy: {\n duration: lazyDuration,\n memory: lazyMemUsed,\n accessed: accessedCount,\n },\n improvements: {\n speed: `${improvement.toFixed(1)}%`,\n memory: `${memImprovement.toFixed(1)}%`,\n },\n });\n\n this.results.push(result);\n return result;\n }\n\n /**\n * Run full benchmark suite\n */\n async runFullSuite(\n projectRoot: string,\n db: any,\n projectId: string\n ): Promise<BenchmarkSuite> {\n const suiteStart = performance.now();\n\n logger.info('Starting Performance Benchmark Suite');\n\n // Run benchmarks\n const tasksFile = join(projectRoot, '.stackmemory', 'tasks.jsonl');\n\n const jsonlResult = await this.benchmarkJSONLParsing(tasksFile);\n const cacheResult = await this.benchmarkContextCache();\n const lazyResult = await this.benchmarkLazyLoading(db, projectId);\n\n const totalDuration = performance.now() - suiteStart;\n const averageImprovement =\n this.results\n .filter((r: any) => r.improvement !== undefined)\n .reduce((sum, r) => sum + (r.improvement || 0), 0) /\n this.results.filter((r: any) => r.improvement !== undefined).length;\n\n const suite: BenchmarkSuite = {\n name: 'Performance Optimization Suite',\n results: this.results,\n totalDuration,\n averageImprovement,\n };\n\n // Generate summary report\n this.generateReport(suite);\n\n return suite;\n }\n\n /**\n * Generate performance report\n */\n private generateReport(suite: BenchmarkSuite): void {\n console.log('\\n\u2554\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2557');\n console.log('\u2551 Performance Benchmark Results \u2551');\n console.log('\u255A\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u255D\\n');\n\n for (const result of suite.results) {\n console.log(`\uD83D\uDCCA ${result.name}`);\n console.log(` Duration: ${result.duration.toFixed(2)}ms`);\n console.log(\n ` Memory: ${(result.memoryUsed / 1024 / 1024).toFixed(2)}MB`\n );\n console.log(` Throughput: ${result.throughput.toFixed(0)} items/sec`);\n if (result.improvement !== undefined) {\n const icon = result.improvement > 0 ? '\uD83D\uDE80' : '\u26A0\uFE0F';\n console.log(\n ` ${icon} Improvement: ${result.improvement.toFixed(1)}%`\n );\n }\n console.log('');\n }\n\n console.log('\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550');\n console.log(`\u23F1\uFE0F Total Duration: ${suite.totalDuration.toFixed(2)}ms`);\n console.log(\n `\uD83D\uDCC8 Average Improvement: ${suite.averageImprovement.toFixed(1)}%`\n );\n console.log('');\n\n logger.info('Performance Benchmark Complete', {\n suite: suite.name,\n duration: suite.totalDuration,\n avgImprovement: suite.averageImprovement,\n results: suite.results.map((r: any) => ({\n name: r.name,\n improvement: r.improvement,\n throughput: r.throughput,\n })),\n });\n }\n\n /**\n * Get benchmark results\n */\n getResults(): BenchmarkResult[] {\n return this.results;\n }\n\n /**\n * Clear results\n */\n clearResults(): void {\n this.results = [];\n }\n}\n"],
|
|
5
|
+
"mappings": "AAKA,SAAS,mBAAmB;AAC5B,SAAS,cAAc;AACvB,SAAS,4BAA4B;AACrC,SAAS,oBAAoB;AAC7B,SAAS,yBAAyB;AAClC,SAAS,oBAAoB;AAC7B,SAAS,YAAY;AAkBd,MAAM,qBAAqB;AAAA,EACxB,UAA6B,CAAC;AAAA;AAAA;AAAA;AAAA,EAKtC,MAAM,sBACJ,UACA,aAAa,GACa;AAC1B,UAAM,SAAS,IAAI,qBAAqB;AAGxC,UAAM,gBAAgB,YAAY,IAAI;AACtC,UAAM,mBAAmB,QAAQ,YAAY,EAAE;AAE/C,QAAI,gBAAgB;AACpB,aAAS,IAAI,GAAG,IAAI,YAAY,KAAK;AACnC,YAAM,UAAU,aAAa,UAAU,MAAM;AAC7C,YAAM,QAAQ,QAAQ,MAAM,IAAI,EAAE,OAAO,CAAC,MAAW,EAAE,KAAK,CAAC;AAC7D,iBAAW,QAAQ,OAAO;AACxB,YAAI;AACF,eAAK,MAAM,IAAI;AACf;AAAA,QACF,QAAQ;AAAA,QAAC;AAAA,MACX;AAAA,IACF;AAEA,UAAM,mBAAmB,YAAY,IAAI,IAAI;AAC7C,UAAM,kBAAkB,QAAQ,YAAY,EAAE,WAAW;AAGzD,UAAM,iBAAiB,YAAY,IAAI;AACvC,UAAM,oBAAoB,QAAQ,YAAY,EAAE;AAEhD,QAAI,iBAAiB;AACrB,aAAS,IAAI,GAAG,IAAI,YAAY,KAAK;AACnC,uBAAiB,SAAS,OAAO,YAAY,QAAQ,GAAG;AACtD,0BAAkB,MAAM;AAAA,MAC1B;AAAA,IACF;AAEA,UAAM,oBAAoB,YAAY,IAAI,IAAI;AAC9C,UAAM,mBAAmB,QAAQ,YAAY,EAAE,WAAW;AAE1D,UAAM,eACF,mBAAmB,qBAAqB,mBAAoB;AAChE,UAAM,kBACF,kBAAkB,oBAAoB,kBAAmB;AAE7D,UAAM,SAA0B;AAAA,MAC9B,MAAM;AAAA,MACN,UAAU,oBAAoB;AAAA,MAC9B,YAAY;AAAA,MACZ,gBAAgB,iBAAiB;AAAA,MACjC,YACE,iBAAiB,cAAc,oBAAoB,MAAO;AAAA,MAC5D;AAAA,IACF;AAEA,WAAO,KAAK,2BAA2B;AAAA,MACrC,UAAU;AAAA,QACR,UAAU,mBAAmB;AAAA,QAC7B,QAAQ;AAAA,QACR,YACE,gBAAgB,cAAc,mBAAmB,MAAO;AAAA,MAC5D;AAAA,MACA,WAAW;AAAA,MACX,cAAc;AAAA,QACZ,OAAO,GAAG,YAAY,QAAQ,CAAC,CAAC;AAAA,QAChC,QAAQ,GAAG,eAAe,QAAQ,CAAC,CAAC;AAAA,MACtC;AAAA,IACF,CAAC;AAED,SAAK,QAAQ,KAAK,MAAM;AACxB,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,sBACJ,YAAY,KACZ,iBAAiB,KACS;AAC1B,UAAM,QAAQ,IAAI,aAAkB;AAAA,MAClC,SAAS,KAAK,OAAO;AAAA,MACrB,UAAU;AAAA,IACZ,CAAC;AAGD,UAAM,WAAW,MAAM,KAAK,EAAE,QAAQ,UAAU,GAAG,CAAC,GAAG,OAAO;AAAA,MAC5D,KAAK,QAAQ,CAAC;AAAA,MACd,OAAO;AAAA,QACL,IAAI;AAAA,QACJ,MAAM,IAAI,OAAO,KAAK,MAAM,KAAK,OAAO,IAAI,GAAI,CAAC;AAAA,QACjD,WAAW,KAAK,IAAI;AAAA,MACtB;AAAA,IACF,EAAE;AAGF,UAAM,gBAAgB,YAAY,IAAI;AACtC,eAAW,QAAQ,UAAU;AAC3B,YAAM,IAAI,KAAK,KAAK,KAAK,KAAK;AAAA,IAChC;AACA,UAAM,mBAAmB,YAAY,IAAI,IAAI;AAG7C,UAAM,cAAc,YAAY,IAAI;AACpC,QAAI,OAAO;AACX,QAAI,SAAS;AAEb,aAAS,IAAI,GAAG,IAAI,gBAAgB,KAAK;AACvC,YAAM,QAAQ,KAAK,MAAM,KAAK,OAAO,IAAI,YAAY,GAAG;AACxD,YAAM,MAAM,QAAQ,KAAK;AACzB,YAAMA,UAAS,MAAM,IAAI,GAAG;AAC5B,UAAIA,QAAQ;AAAA,UACP;AAAA,IACP;AAEA,UAAM,iBAAiB,YAAY,IAAI,IAAI;AAC3C,UAAM,QAAQ,MAAM,SAAS;AAE7B,UAAM,SAA0B;AAAA,MAC9B,MAAM;AAAA,MACN,UAAU;AAAA,MACV,YAAY,MAAM,QAAQ,EAAE;AAAA,MAC5B,gBAAgB;AAAA,MAChB,YAAY,kBAAkB,iBAAiB;AAAA,MAC/C,aAAa,MAAM,UAAU;AAAA,IAC/B;AAEA,WAAO,KAAK,2BAA2B;AAAA,MACrC,UAAU;AAAA,QACR,UAAU;AAAA,QACV,OAAO;AAAA,MACT;AAAA,MACA,QAAQ;AAAA,QACN,UAAU;AAAA,QACV,UAAU;AAAA,QACV,SAAS,IAAI,MAAM,UAAU,KAAK,QAAQ,CAAC,CAAC;AAAA,MAC9C;AAAA,MACA,aAAa;AAAA,QACX,YAAY,GAAG,OAAO,WAAW,QAAQ,CAAC,CAAC;AAAA,QAC3C,eAAe,GAAG,MAAM,cAAc,QAAQ,CAAC,CAAC;AAAA,MAClD;AAAA,IACF,CAAC;AAED,SAAK,QAAQ,KAAK,MAAM;AACxB,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,qBACJ,IACA,WACA,aAAa,KACa;AAC1B,UAAM,SAAS,IAAI,kBAAkB,IAAI,SAAS;AAGlD,QAAI,SAAgB,CAAC;AACrB,QAAI;AACF,eAAS,GACN,QAAQ,wDAAwD,EAChE,IAAI,UAAU;AAAA,IACnB,SAAS,OAAgB;AAEvB,aAAO,KAAK,uDAAuD;AACnE,eAAS,MAAM,KAAK,EAAE,QAAQ,KAAK,IAAI,YAAY,EAAE,EAAE,GAAG,CAAC,GAAG,OAAO;AAAA,QACnE,IAAI,SAAS,CAAC;AAAA,MAChB,EAAE;AAAA,IACJ;AAEA,UAAM,WAAW,OAAO,IAAI,CAAC,MAAW,EAAE,EAAE;AAG5C,UAAM,aAAa,YAAY,IAAI;AACnC,UAAM,gBAAgB,QAAQ,YAAY,EAAE;AAE5C,UAAM,YAAY,CAAC;AACnB,eAAW,MAAM,UAAU;AACzB,UAAI;AACF,cAAM,QAAQ,GAAG,QAAQ,mCAAmC,EAAE,IAAI,EAAE;AACpE,cAAM,UAAU,GACb,QAAQ,0CAA0C,EAClD,IAAI,EAAE;AACT,cAAM,SAAS,GACZ,QAAQ,kDAAkD,EAC1D,IAAI,EAAE;AACT,kBAAU,KAAK,EAAE,OAAO,SAAS,OAAO,CAAC;AAAA,MAC3C,QAAQ;AAEN,kBAAU,KAAK;AAAA,UACb,OAAO,EAAE,IAAI,MAAM,QAAQ,MAAM,QAAQ,EAAE,GAAG;AAAA,UAC9C,SAAS,CAAC;AAAA,UACV,QAAQ,CAAC;AAAA,QACX,CAAC;AAAA,MACH;AAAA,IACF;AAEA,UAAM,gBAAgB,YAAY,IAAI,IAAI;AAC1C,UAAM,eAAe,QAAQ,YAAY,EAAE,WAAW;AAGtD,UAAM,YAAY,YAAY,IAAI;AAClC,UAAM,eAAe,QAAQ,YAAY,EAAE;AAG3C,UAAM,OAAO,eAAe,UAAU,EAAE,UAAU,MAAM,OAAO,EAAE,CAAC;AAGlE,QAAI,gBAAgB;AACpB,eAAW,MAAM,SAAS,MAAM,GAAG,aAAa,CAAC,GAAG;AAClD,YAAM,QAAQ,MAAM,OAAO,UAAU,EAAE,EAAE,IAAI;AAC7C,UAAI,MAAO;AAAA,IACb;AAEA,UAAM,eAAe,YAAY,IAAI,IAAI;AACzC,UAAM,cAAc,QAAQ,YAAY,EAAE,WAAW;AAErD,UAAM,eAAgB,gBAAgB,gBAAgB,gBAAiB;AACvE,UAAM,kBAAmB,eAAe,eAAe,eAAgB;AAEvE,UAAM,SAA0B;AAAA,MAC9B,MAAM;AAAA,MACN,UAAU;AAAA,MACV,YAAY;AAAA,MACZ,gBAAgB;AAAA,MAChB,YAAY,cAAc,eAAe;AAAA,MACzC;AAAA,IACF;AAEA,WAAO,KAAK,0BAA0B;AAAA,MACpC,OAAO;AAAA,QACL,UAAU;AAAA,QACV,QAAQ;AAAA,MACV;AAAA,MACA,MAAM;AAAA,QACJ,UAAU;AAAA,QACV,QAAQ;AAAA,QACR,UAAU;AAAA,MACZ;AAAA,MACA,cAAc;AAAA,QACZ,OAAO,GAAG,YAAY,QAAQ,CAAC,CAAC;AAAA,QAChC,QAAQ,GAAG,eAAe,QAAQ,CAAC,CAAC;AAAA,MACtC;AAAA,IACF,CAAC;AAED,SAAK,QAAQ,KAAK,MAAM;AACxB,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,aACJ,aACA,IACA,WACyB;AACzB,UAAM,aAAa,YAAY,IAAI;AAEnC,WAAO,KAAK,sCAAsC;AAGlD,UAAM,YAAY,KAAK,aAAa,gBAAgB,aAAa;AAEjE,UAAM,cAAc,MAAM,KAAK,sBAAsB,SAAS;AAC9D,UAAM,cAAc,MAAM,KAAK,sBAAsB;AACrD,UAAM,aAAa,MAAM,KAAK,qBAAqB,IAAI,SAAS;AAEhE,UAAM,gBAAgB,YAAY,IAAI,IAAI;AAC1C,UAAM,qBACJ,KAAK,QACF,OAAO,CAAC,MAAW,EAAE,gBAAgB,MAAS,EAC9C,OAAO,CAAC,KAAK,MAAM,OAAO,EAAE,eAAe,IAAI,CAAC,IACnD,KAAK,QAAQ,OAAO,CAAC,MAAW,EAAE,gBAAgB,MAAS,EAAE;AAE/D,UAAM,QAAwB;AAAA,MAC5B,MAAM;AAAA,MACN,SAAS,KAAK;AAAA,MACd;AAAA,MACA;AAAA,IACF;AAGA,SAAK,eAAe,KAAK;AAEzB,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA,EAKQ,eAAe,OAA6B;AAClD,YAAQ,IAAI,wRAAkD;AAC9D,YAAQ,IAAI,yDAA+C;AAC3D,YAAQ,IAAI,wRAAkD;AAE9D,eAAW,UAAU,MAAM,SAAS;AAClC,cAAQ,IAAI,aAAM,OAAO,IAAI,EAAE;AAC/B,cAAQ,IAAI,gBAAgB,OAAO,SAAS,QAAQ,CAAC,CAAC,IAAI;AAC1D,cAAQ;AAAA,QACN,eAAe,OAAO,aAAa,OAAO,MAAM,QAAQ,CAAC,CAAC;AAAA,MAC5D;AACA,cAAQ,IAAI,kBAAkB,OAAO,WAAW,QAAQ,CAAC,CAAC,YAAY;AACtE,UAAI,OAAO,gBAAgB,QAAW;AACpC,cAAM,OAAO,OAAO,cAAc,IAAI,cAAO;AAC7C,gBAAQ;AAAA,UACN,MAAM,IAAI,iBAAiB,OAAO,YAAY,QAAQ,CAAC,CAAC;AAAA,QAC1D;AAAA,MACF;AACA,cAAQ,IAAI,EAAE;AAAA,IAChB;AAEA,YAAQ,IAAI,oQAA6C;AACzD,YAAQ,IAAI,iCAAuB,MAAM,cAAc,QAAQ,CAAC,CAAC,IAAI;AACrE,YAAQ;AAAA,MACN,kCAA2B,MAAM,mBAAmB,QAAQ,CAAC,CAAC;AAAA,IAChE;AACA,YAAQ,IAAI,EAAE;AAEd,WAAO,KAAK,kCAAkC;AAAA,MAC5C,OAAO,MAAM;AAAA,MACb,UAAU,MAAM;AAAA,MAChB,gBAAgB,MAAM;AAAA,MACtB,SAAS,MAAM,QAAQ,IAAI,CAAC,OAAY;AAAA,QACtC,MAAM,EAAE;AAAA,QACR,aAAa,EAAE;AAAA,QACf,YAAY,EAAE;AAAA,MAChB,EAAE;AAAA,IACJ,CAAC;AAAA,EACH;AAAA;AAAA;AAAA;AAAA,EAKA,aAAgC;AAC9B,WAAO,KAAK;AAAA,EACd;AAAA;AAAA;AAAA;AAAA,EAKA,eAAqB;AACnB,SAAK,UAAU,CAAC;AAAA,EAClB;AACF;",
|
|
6
6
|
"names": ["result"]
|
|
7
7
|
}
|
|
@@ -227,21 +227,31 @@ class PerformanceProfiler {
|
|
|
227
227
|
for (const hotPath of hotPaths.slice(0, 5)) {
|
|
228
228
|
const impact = hotPath.frequency * hotPath.avgDuration;
|
|
229
229
|
if (hotPath.path.includes("getFrameContext") && hotPath.avgDuration > 10) {
|
|
230
|
-
recommendations.push(
|
|
230
|
+
recommendations.push(
|
|
231
|
+
`Consider caching frame context for ${hotPath.path} (avg: ${hotPath.avgDuration.toFixed(1)}ms)`
|
|
232
|
+
);
|
|
231
233
|
}
|
|
232
234
|
if (hotPath.path.includes("getFrameEvents") && hotPath.frequency > 100) {
|
|
233
|
-
recommendations.push(
|
|
235
|
+
recommendations.push(
|
|
236
|
+
`High frequency event queries detected in ${hotPath.path} (${hotPath.frequency} calls). Consider pagination or caching.`
|
|
237
|
+
);
|
|
234
238
|
}
|
|
235
239
|
if (hotPath.path.includes("bulkInsert") && hotPath.avgDuration > 50) {
|
|
236
|
-
recommendations.push(
|
|
240
|
+
recommendations.push(
|
|
241
|
+
`Slow bulk insertion in ${hotPath.path}. Consider increasing batch size or using prepared statements.`
|
|
242
|
+
);
|
|
237
243
|
}
|
|
238
244
|
if (impact > 1e3) {
|
|
239
|
-
recommendations.push(
|
|
245
|
+
recommendations.push(
|
|
246
|
+
`High impact operation: ${hotPath.path} (${impact.toFixed(0)}ms total impact). Consider optimization.`
|
|
247
|
+
);
|
|
240
248
|
}
|
|
241
249
|
}
|
|
242
250
|
const memUsage = process.memoryUsage();
|
|
243
251
|
if (memUsage.heapUsed / memUsage.heapTotal > 0.8) {
|
|
244
|
-
recommendations.push(
|
|
252
|
+
recommendations.push(
|
|
253
|
+
"High memory usage detected. Consider implementing cleanup routines or reducing cache sizes."
|
|
254
|
+
);
|
|
245
255
|
}
|
|
246
256
|
if (recommendations.length === 0) {
|
|
247
257
|
recommendations.push("No significant performance issues detected.");
|
|
@@ -266,7 +276,10 @@ function performanceMonitor(operationName) {
|
|
|
266
276
|
const originalMethod = descriptor.value;
|
|
267
277
|
const finalOperationName = operationName || `${target.constructor.name}.${propertyKey}`;
|
|
268
278
|
descriptor.value = async function(...args) {
|
|
269
|
-
return getProfiler().timeFunction(
|
|
279
|
+
return getProfiler().timeFunction(
|
|
280
|
+
finalOperationName,
|
|
281
|
+
() => originalMethod.apply(this, args)
|
|
282
|
+
);
|
|
270
283
|
};
|
|
271
284
|
return descriptor;
|
|
272
285
|
};
|
|
@@ -278,12 +291,24 @@ class StackMemoryPerformanceMonitor {
|
|
|
278
291
|
*/
|
|
279
292
|
monitorFrameOperations(frameManager) {
|
|
280
293
|
this.wrapMethod(frameManager, "getFrame", "FrameManager.getFrame");
|
|
281
|
-
this.wrapMethod(
|
|
282
|
-
|
|
283
|
-
|
|
294
|
+
this.wrapMethod(
|
|
295
|
+
frameManager,
|
|
296
|
+
"getFrameEvents",
|
|
297
|
+
"FrameManager.getFrameEvents"
|
|
298
|
+
);
|
|
299
|
+
this.wrapMethod(
|
|
300
|
+
frameManager,
|
|
301
|
+
"getFrameAnchors",
|
|
302
|
+
"FrameManager.getFrameAnchors"
|
|
303
|
+
);
|
|
304
|
+
this.wrapMethod(
|
|
305
|
+
frameManager,
|
|
306
|
+
"getHotStackContext",
|
|
307
|
+
"FrameManager.getHotStackContext"
|
|
308
|
+
);
|
|
284
309
|
}
|
|
285
310
|
/**
|
|
286
|
-
* Monitor database operations
|
|
311
|
+
* Monitor database operations
|
|
287
312
|
*/
|
|
288
313
|
monitorDatabaseOperations(db) {
|
|
289
314
|
const originalPrepare = db.prepare;
|
|
@@ -299,7 +324,10 @@ class StackMemoryPerformanceMonitor {
|
|
|
299
324
|
const original = obj[methodName];
|
|
300
325
|
if (typeof original !== "function") return;
|
|
301
326
|
obj[methodName] = async function(...args) {
|
|
302
|
-
return getProfiler().timeFunction(
|
|
327
|
+
return getProfiler().timeFunction(
|
|
328
|
+
operationName,
|
|
329
|
+
() => original.apply(this, args)
|
|
330
|
+
);
|
|
303
331
|
};
|
|
304
332
|
}
|
|
305
333
|
}
|
|
@@ -309,13 +337,22 @@ function wrapStatement(stmt, sql) {
|
|
|
309
337
|
const originalGet = stmt.get;
|
|
310
338
|
const originalAll = stmt.all;
|
|
311
339
|
stmt.run = function(...args) {
|
|
312
|
-
return getProfiler().timeFunction(
|
|
340
|
+
return getProfiler().timeFunction(
|
|
341
|
+
`${operationName}.run`,
|
|
342
|
+
() => originalRun.apply(this, args)
|
|
343
|
+
);
|
|
313
344
|
};
|
|
314
345
|
stmt.get = function(...args) {
|
|
315
|
-
return getProfiler().timeFunction(
|
|
346
|
+
return getProfiler().timeFunction(
|
|
347
|
+
`${operationName}.get`,
|
|
348
|
+
() => originalGet.apply(this, args)
|
|
349
|
+
);
|
|
316
350
|
};
|
|
317
351
|
stmt.all = function(...args) {
|
|
318
|
-
return getProfiler().timeFunction(
|
|
352
|
+
return getProfiler().timeFunction(
|
|
353
|
+
`${operationName}.all`,
|
|
354
|
+
() => originalAll.apply(this, args)
|
|
355
|
+
);
|
|
319
356
|
};
|
|
320
357
|
return stmt;
|
|
321
358
|
}
|
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
{
|
|
2
2
|
"version": 3,
|
|
3
3
|
"sources": ["../../../src/core/performance/performance-profiler.ts"],
|
|
4
|
-
"sourcesContent": ["/**\n * Performance Profiler\n * Identifies and optimizes hot paths in StackMemory operations\n */\n\nimport { logger } from '../monitoring/logger.js';\nimport { getQueryStatistics } from '../trace/db-trace-wrapper.js';\nimport Database from 'better-sqlite3';\n// Type-safe environment variable access\nfunction getEnv(key: string, defaultValue?: string): string {\n const value = process.env[key];\n if (value === undefined) {\n if (defaultValue !== undefined) return defaultValue;\n throw new Error(`Environment variable ${key} is required`);\n }\n return value;\n}\n\nfunction getOptionalEnv(key: string): string | undefined {\n return process.env[key];\n}\n\n\nexport interface PerformanceMetrics {\n operationName: string;\n callCount: number;\n totalTimeMs: number;\n avgTimeMs: number;\n minTimeMs: number;\n maxTimeMs: number;\n p95TimeMs: number;\n lastExecuted: number;\n}\n\nexport interface HotPath {\n path: string;\n frequency: number;\n avgDuration: number;\n totalDuration: number;\n lastSeen: number;\n samples: PerformanceSample[];\n}\n\nexport interface PerformanceSample {\n timestamp: number;\n duration: number;\n metadata?: Record<string, any>;\n}\n\nexport interface SystemPerformanceReport {\n timestamp: number;\n hotPaths: HotPath[];\n databaseMetrics: any;\n memoryUsage: {\n heapUsed: number;\n heapTotal: number;\n external: number;\n };\n cacheMetrics: any;\n recommendations: string[];\n}\n\n/**\n * Performance profiler with hot path detection\n */\nexport class PerformanceProfiler {\n private metrics = new Map<string, PerformanceMetrics>();\n private hotPaths = new Map<string, HotPath>();\n private samples = new Map<string, PerformanceSample[]>();\n private isEnabled = true;\n private sampleLimit = 1000;\n private hotPathThreshold = 5; // Operations taking > 5ms are considered hot\n\n constructor(\n options: {\n enabled?: boolean;\n sampleLimit?: number;\n hotPathThreshold?: number;\n } = {}\n ) {\n this.isEnabled = options.enabled ?? true;\n this.sampleLimit = options.sampleLimit ?? 1000;\n this.hotPathThreshold = options.hotPathThreshold ?? 5;\n }\n\n /**\n * Start timing an operation\n */\n startTiming(operationName: string): (metadata?: Record<string, any>) => void {\n if (!this.isEnabled) {\n return () => {}; // No-op\n }\n\n const startTime = performance.now();\n \n return (metadata?: Record<string, any>) => {\n this.endTiming(operationName, startTime, metadata);\n };\n }\n\n /**\n * Time a function execution\n */\n async timeFunction<T>(\n operationName: string,\n fn: () => T | Promise<T>,\n metadata?: Record<string, any>\n ): Promise<T> {\n if (!this.isEnabled) {\n return await fn();\n }\n\n const endTimer = this.startTiming(operationName);\n try {\n const result = await fn();\n endTimer(metadata);\n return result;\n } catch (error: unknown) {\n endTimer({ ...metadata, error: true });\n throw error;\n }\n }\n\n /**\n * Record timing manually\n */\n recordTiming(operationName: string, durationMs: number, metadata?: Record<string, any>): void {\n if (!this.isEnabled) return;\n\n this.endTiming(operationName, performance.now() - durationMs, metadata);\n }\n\n /**\n * Get performance metrics for an operation\n */\n getMetrics(operationName: string): PerformanceMetrics | undefined {\n return this.metrics.get(operationName);\n }\n\n /**\n * Get all performance metrics\n */\n getAllMetrics(): Map<string, PerformanceMetrics> {\n return new Map(this.metrics);\n }\n\n /**\n * Get hot paths sorted by impact\n */\n getHotPaths(limit = 10): HotPath[] {\n return Array.from(this.hotPaths.values())\n .sort((a, b) => (b.frequency * b.avgDuration) - (a.frequency * a.avgDuration))\n .slice(0, limit);\n }\n\n /**\n * Generate comprehensive performance report\n */\n generateReport(db?: Database.Database): SystemPerformanceReport {\n const hotPaths = this.getHotPaths(20);\n const recommendations = this.generateRecommendations(hotPaths);\n \n const report: SystemPerformanceReport = {\n timestamp: Date.now(),\n hotPaths,\n databaseMetrics: db ? getQueryStatistics(db) : null,\n memoryUsage: process.memoryUsage(),\n cacheMetrics: null, // Will be filled by query cache if available\n recommendations,\n };\n\n logger.info('Performance report generated', {\n hotPathsCount: hotPaths.length,\n recommendationsCount: recommendations.length,\n topHotPath: hotPaths[0]?.path,\n });\n\n return report;\n }\n\n /**\n * Reset all metrics\n */\n reset(): void {\n this.metrics.clear();\n this.hotPaths.clear();\n this.samples.clear();\n logger.info('Performance metrics reset');\n }\n\n /**\n * Export metrics to JSON\n */\n exportMetrics(): string {\n const data = {\n timestamp: Date.now(),\n metrics: Object.fromEntries(this.metrics),\n hotPaths: Object.fromEntries(this.hotPaths),\n config: {\n sampleLimit: this.sampleLimit,\n hotPathThreshold: this.hotPathThreshold,\n enabled: this.isEnabled,\n },\n };\n\n return JSON.stringify(data, null, 2);\n }\n\n /**\n * Enable/disable profiling\n */\n setEnabled(enabled: boolean): void {\n this.isEnabled = enabled;\n logger.info(`Performance profiling ${enabled ? 'enabled' : 'disabled'}`);\n }\n\n /**\n * End timing for an operation\n */\n private endTiming(operationName: string, startTime: number, metadata?: Record<string, any>): void {\n const duration = performance.now() - startTime;\n const timestamp = Date.now();\n\n // Update metrics\n this.updateMetrics(operationName, duration, timestamp);\n\n // Track hot paths\n if (duration > this.hotPathThreshold) {\n this.trackHotPath(operationName, duration, timestamp, metadata);\n }\n\n // Store sample\n this.storeSample(operationName, duration, timestamp, metadata);\n }\n\n /**\n * Update performance metrics for an operation\n */\n private updateMetrics(operationName: string, duration: number, timestamp: number): void {\n const existing = this.metrics.get(operationName);\n \n if (!existing) {\n this.metrics.set(operationName, {\n operationName,\n callCount: 1,\n totalTimeMs: duration,\n avgTimeMs: duration,\n minTimeMs: duration,\n maxTimeMs: duration,\n p95TimeMs: duration,\n lastExecuted: timestamp,\n });\n } else {\n existing.callCount++;\n existing.totalTimeMs += duration;\n existing.avgTimeMs = existing.totalTimeMs / existing.callCount;\n existing.minTimeMs = Math.min(existing.minTimeMs, duration);\n existing.maxTimeMs = Math.max(existing.maxTimeMs, duration);\n existing.lastExecuted = timestamp;\n\n // Update p95 from samples\n existing.p95TimeMs = this.calculateP95(operationName);\n }\n }\n\n /**\n * Track hot path\n */\n private trackHotPath(\n operationName: string,\n duration: number,\n timestamp: number,\n metadata?: Record<string, any>\n ): void {\n const existing = this.hotPaths.get(operationName);\n \n if (!existing) {\n this.hotPaths.set(operationName, {\n path: operationName,\n frequency: 1,\n avgDuration: duration,\n totalDuration: duration,\n lastSeen: timestamp,\n samples: [{ timestamp, duration, metadata }],\n });\n } else {\n existing.frequency++;\n existing.totalDuration += duration;\n existing.avgDuration = existing.totalDuration / existing.frequency;\n existing.lastSeen = timestamp;\n \n // Keep limited samples\n existing.samples.push({ timestamp, duration, metadata });\n if (existing.samples.length > 100) {\n existing.samples = existing.samples.slice(-100);\n }\n }\n }\n\n /**\n * Store performance sample\n */\n private storeSample(\n operationName: string,\n duration: number,\n timestamp: number,\n metadata?: Record<string, any>\n ): void {\n if (!this.samples.has(operationName)) {\n this.samples.set(operationName, []);\n }\n\n const samples = this.samples.get(operationName)!;\n samples.push({ timestamp, duration, metadata });\n\n // Limit samples to prevent memory growth\n if (samples.length > this.sampleLimit) {\n samples.splice(0, samples.length - this.sampleLimit);\n }\n }\n\n /**\n * Calculate 95th percentile from samples\n */\n private calculateP95(operationName: string): number {\n const samples = this.samples.get(operationName);\n if (!samples || samples.length === 0) return 0;\n\n const durations = samples.map((s: any) => s.duration).sort((a, b) => a - b);\n const index = Math.floor(durations.length * 0.95);\n return durations[index] || 0;\n }\n\n /**\n * Generate optimization recommendations\n */\n private generateRecommendations(hotPaths: HotPath[]): string[] {\n const recommendations: string[] = [];\n\n for (const hotPath of hotPaths.slice(0, 5)) {\n const impact = hotPath.frequency * hotPath.avgDuration;\n \n if (hotPath.path.includes('getFrameContext') && hotPath.avgDuration > 10) {\n recommendations.push(`Consider caching frame context for ${hotPath.path} (avg: ${hotPath.avgDuration.toFixed(1)}ms)`);\n }\n \n if (hotPath.path.includes('getFrameEvents') && hotPath.frequency > 100) {\n recommendations.push(`High frequency event queries detected in ${hotPath.path} (${hotPath.frequency} calls). Consider pagination or caching.`);\n }\n \n if (hotPath.path.includes('bulkInsert') && hotPath.avgDuration > 50) {\n recommendations.push(`Slow bulk insertion in ${hotPath.path}. Consider increasing batch size or using prepared statements.`);\n }\n \n if (impact > 1000) {\n recommendations.push(`High impact operation: ${hotPath.path} (${impact.toFixed(0)}ms total impact). Consider optimization.`);\n }\n }\n\n // Memory recommendations\n const memUsage = process.memoryUsage();\n if (memUsage.heapUsed / memUsage.heapTotal > 0.8) {\n recommendations.push('High memory usage detected. Consider implementing cleanup routines or reducing cache sizes.');\n }\n\n if (recommendations.length === 0) {\n recommendations.push('No significant performance issues detected.');\n }\n\n return recommendations;\n }\n}\n\n// Global profiler instance\nlet globalProfiler: PerformanceProfiler | null = null;\n\n/**\n * Get or create global profiler\n */\nexport function getProfiler(): PerformanceProfiler {\n if (!globalProfiler) {\n globalProfiler = new PerformanceProfiler({\n enabled: process.env['NODE_ENV'] !== 'production' || process.env['STACKMEMORY_PROFILING'] === 'true',\n });\n }\n return globalProfiler;\n}\n\n/**\n * Convenience function to time operations\n */\nexport async function timeOperation<T>(\n operationName: string,\n fn: () => T | Promise<T>,\n metadata?: Record<string, any>\n): Promise<T> {\n return getProfiler().timeFunction(operationName, fn, metadata);\n}\n\n/**\n * Create a performance monitoring decorator\n */\nexport function performanceMonitor(operationName?: string) {\n return function (target: any, propertyKey: string, descriptor: PropertyDescriptor) {\n const originalMethod = descriptor.value;\n const finalOperationName = operationName || `${target.constructor.name}.${propertyKey}`;\n\n descriptor.value = async function (...args: any[]) {\n return getProfiler().timeFunction(finalOperationName, () => originalMethod.apply(this, args));\n };\n\n return descriptor;\n };\n}\n\n/**\n * Monitor specific StackMemory operations\n */\nexport class StackMemoryPerformanceMonitor {\n private profiler = getProfiler();\n \n /**\n * Monitor frame operations\n */\n monitorFrameOperations(frameManager: any): void {\n this.wrapMethod(frameManager, 'getFrame', 'FrameManager.getFrame');\n this.wrapMethod(frameManager, 'getFrameEvents', 'FrameManager.getFrameEvents');\n this.wrapMethod(frameManager, 'getFrameAnchors', 'FrameManager.getFrameAnchors');\n this.wrapMethod(frameManager, 'getHotStackContext', 'FrameManager.getHotStackContext');\n }\n\n /**\n * Monitor database operations \n */\n monitorDatabaseOperations(db: Database.Database): void {\n const originalPrepare = db.prepare;\n db.prepare = function<T = any>(sql: string): Database.Statement<T[], T> {\n const stmt = originalPrepare.call(this, sql);\n return wrapStatement(stmt, sql);\n };\n }\n\n /**\n * Wrap a method with performance monitoring\n */\n private wrapMethod(obj: any, methodName: string, operationName: string): void {\n const original = obj[methodName];\n if (typeof original !== 'function') return;\n\n obj[methodName] = async function (...args: any[]) {\n return getProfiler().timeFunction(operationName, () => original.apply(this, args));\n };\n }\n}\n\n/**\n * Wrap a database statement with performance monitoring\n */\nfunction wrapStatement(stmt: Database.Statement, sql: string): Database.Statement {\n const operationName = `SQL.${sql.trim().split(' ')[0].toUpperCase()}`;\n \n const originalRun = stmt.run;\n const originalGet = stmt.get;\n const originalAll = stmt.all;\n\n stmt.run = function (this: Database.Statement, ...args: any[]) {\n return getProfiler().timeFunction(`${operationName}.run`, () => originalRun.apply(this, args));\n } as any;\n\n stmt.get = function (this: Database.Statement, ...args: any[]) {\n return getProfiler().timeFunction(`${operationName}.get`, () => originalGet.apply(this, args));\n } as any;\n\n stmt.all = function (this: Database.Statement, ...args: any[]) {\n return getProfiler().timeFunction(`${operationName}.all`, () => originalAll.apply(this, args));\n } as any;\n\n return stmt;\n}"],
|
|
5
|
-
"mappings": "AAKA,SAAS,cAAc;AACvB,SAAS,0BAA0B;AAGnC,SAAS,OAAO,KAAa,cAA+B;AAC1D,QAAM,QAAQ,QAAQ,IAAI,GAAG;AAC7B,MAAI,UAAU,QAAW;AACvB,QAAI,iBAAiB,OAAW,QAAO;AACvC,UAAM,IAAI,MAAM,wBAAwB,GAAG,cAAc;AAAA,EAC3D;AACA,SAAO;AACT;AAEA,SAAS,eAAe,KAAiC;AACvD,SAAO,QAAQ,IAAI,GAAG;AACxB;
|
|
4
|
+
"sourcesContent": ["/**\n * Performance Profiler\n * Identifies and optimizes hot paths in StackMemory operations\n */\n\nimport { logger } from '../monitoring/logger.js';\nimport { getQueryStatistics } from '../trace/db-trace-wrapper.js';\nimport Database from 'better-sqlite3';\n// Type-safe environment variable access\nfunction getEnv(key: string, defaultValue?: string): string {\n const value = process.env[key];\n if (value === undefined) {\n if (defaultValue !== undefined) return defaultValue;\n throw new Error(`Environment variable ${key} is required`);\n }\n return value;\n}\n\nfunction getOptionalEnv(key: string): string | undefined {\n return process.env[key];\n}\n\nexport interface PerformanceMetrics {\n operationName: string;\n callCount: number;\n totalTimeMs: number;\n avgTimeMs: number;\n minTimeMs: number;\n maxTimeMs: number;\n p95TimeMs: number;\n lastExecuted: number;\n}\n\nexport interface HotPath {\n path: string;\n frequency: number;\n avgDuration: number;\n totalDuration: number;\n lastSeen: number;\n samples: PerformanceSample[];\n}\n\nexport interface PerformanceSample {\n timestamp: number;\n duration: number;\n metadata?: Record<string, any>;\n}\n\nexport interface SystemPerformanceReport {\n timestamp: number;\n hotPaths: HotPath[];\n databaseMetrics: any;\n memoryUsage: {\n heapUsed: number;\n heapTotal: number;\n external: number;\n };\n cacheMetrics: any;\n recommendations: string[];\n}\n\n/**\n * Performance profiler with hot path detection\n */\nexport class PerformanceProfiler {\n private metrics = new Map<string, PerformanceMetrics>();\n private hotPaths = new Map<string, HotPath>();\n private samples = new Map<string, PerformanceSample[]>();\n private isEnabled = true;\n private sampleLimit = 1000;\n private hotPathThreshold = 5; // Operations taking > 5ms are considered hot\n\n constructor(\n options: {\n enabled?: boolean;\n sampleLimit?: number;\n hotPathThreshold?: number;\n } = {}\n ) {\n this.isEnabled = options.enabled ?? true;\n this.sampleLimit = options.sampleLimit ?? 1000;\n this.hotPathThreshold = options.hotPathThreshold ?? 5;\n }\n\n /**\n * Start timing an operation\n */\n startTiming(operationName: string): (metadata?: Record<string, any>) => void {\n if (!this.isEnabled) {\n return () => {}; // No-op\n }\n\n const startTime = performance.now();\n\n return (metadata?: Record<string, any>) => {\n this.endTiming(operationName, startTime, metadata);\n };\n }\n\n /**\n * Time a function execution\n */\n async timeFunction<T>(\n operationName: string,\n fn: () => T | Promise<T>,\n metadata?: Record<string, any>\n ): Promise<T> {\n if (!this.isEnabled) {\n return await fn();\n }\n\n const endTimer = this.startTiming(operationName);\n try {\n const result = await fn();\n endTimer(metadata);\n return result;\n } catch (error: unknown) {\n endTimer({ ...metadata, error: true });\n throw error;\n }\n }\n\n /**\n * Record timing manually\n */\n recordTiming(\n operationName: string,\n durationMs: number,\n metadata?: Record<string, any>\n ): void {\n if (!this.isEnabled) return;\n\n this.endTiming(operationName, performance.now() - durationMs, metadata);\n }\n\n /**\n * Get performance metrics for an operation\n */\n getMetrics(operationName: string): PerformanceMetrics | undefined {\n return this.metrics.get(operationName);\n }\n\n /**\n * Get all performance metrics\n */\n getAllMetrics(): Map<string, PerformanceMetrics> {\n return new Map(this.metrics);\n }\n\n /**\n * Get hot paths sorted by impact\n */\n getHotPaths(limit = 10): HotPath[] {\n return Array.from(this.hotPaths.values())\n .sort((a, b) => b.frequency * b.avgDuration - a.frequency * a.avgDuration)\n .slice(0, limit);\n }\n\n /**\n * Generate comprehensive performance report\n */\n generateReport(db?: Database.Database): SystemPerformanceReport {\n const hotPaths = this.getHotPaths(20);\n const recommendations = this.generateRecommendations(hotPaths);\n\n const report: SystemPerformanceReport = {\n timestamp: Date.now(),\n hotPaths,\n databaseMetrics: db ? getQueryStatistics(db) : null,\n memoryUsage: process.memoryUsage(),\n cacheMetrics: null, // Will be filled by query cache if available\n recommendations,\n };\n\n logger.info('Performance report generated', {\n hotPathsCount: hotPaths.length,\n recommendationsCount: recommendations.length,\n topHotPath: hotPaths[0]?.path,\n });\n\n return report;\n }\n\n /**\n * Reset all metrics\n */\n reset(): void {\n this.metrics.clear();\n this.hotPaths.clear();\n this.samples.clear();\n logger.info('Performance metrics reset');\n }\n\n /**\n * Export metrics to JSON\n */\n exportMetrics(): string {\n const data = {\n timestamp: Date.now(),\n metrics: Object.fromEntries(this.metrics),\n hotPaths: Object.fromEntries(this.hotPaths),\n config: {\n sampleLimit: this.sampleLimit,\n hotPathThreshold: this.hotPathThreshold,\n enabled: this.isEnabled,\n },\n };\n\n return JSON.stringify(data, null, 2);\n }\n\n /**\n * Enable/disable profiling\n */\n setEnabled(enabled: boolean): void {\n this.isEnabled = enabled;\n logger.info(`Performance profiling ${enabled ? 'enabled' : 'disabled'}`);\n }\n\n /**\n * End timing for an operation\n */\n private endTiming(\n operationName: string,\n startTime: number,\n metadata?: Record<string, any>\n ): void {\n const duration = performance.now() - startTime;\n const timestamp = Date.now();\n\n // Update metrics\n this.updateMetrics(operationName, duration, timestamp);\n\n // Track hot paths\n if (duration > this.hotPathThreshold) {\n this.trackHotPath(operationName, duration, timestamp, metadata);\n }\n\n // Store sample\n this.storeSample(operationName, duration, timestamp, metadata);\n }\n\n /**\n * Update performance metrics for an operation\n */\n private updateMetrics(\n operationName: string,\n duration: number,\n timestamp: number\n ): void {\n const existing = this.metrics.get(operationName);\n\n if (!existing) {\n this.metrics.set(operationName, {\n operationName,\n callCount: 1,\n totalTimeMs: duration,\n avgTimeMs: duration,\n minTimeMs: duration,\n maxTimeMs: duration,\n p95TimeMs: duration,\n lastExecuted: timestamp,\n });\n } else {\n existing.callCount++;\n existing.totalTimeMs += duration;\n existing.avgTimeMs = existing.totalTimeMs / existing.callCount;\n existing.minTimeMs = Math.min(existing.minTimeMs, duration);\n existing.maxTimeMs = Math.max(existing.maxTimeMs, duration);\n existing.lastExecuted = timestamp;\n\n // Update p95 from samples\n existing.p95TimeMs = this.calculateP95(operationName);\n }\n }\n\n /**\n * Track hot path\n */\n private trackHotPath(\n operationName: string,\n duration: number,\n timestamp: number,\n metadata?: Record<string, any>\n ): void {\n const existing = this.hotPaths.get(operationName);\n\n if (!existing) {\n this.hotPaths.set(operationName, {\n path: operationName,\n frequency: 1,\n avgDuration: duration,\n totalDuration: duration,\n lastSeen: timestamp,\n samples: [{ timestamp, duration, metadata }],\n });\n } else {\n existing.frequency++;\n existing.totalDuration += duration;\n existing.avgDuration = existing.totalDuration / existing.frequency;\n existing.lastSeen = timestamp;\n\n // Keep limited samples\n existing.samples.push({ timestamp, duration, metadata });\n if (existing.samples.length > 100) {\n existing.samples = existing.samples.slice(-100);\n }\n }\n }\n\n /**\n * Store performance sample\n */\n private storeSample(\n operationName: string,\n duration: number,\n timestamp: number,\n metadata?: Record<string, any>\n ): void {\n if (!this.samples.has(operationName)) {\n this.samples.set(operationName, []);\n }\n\n const samples = this.samples.get(operationName)!;\n samples.push({ timestamp, duration, metadata });\n\n // Limit samples to prevent memory growth\n if (samples.length > this.sampleLimit) {\n samples.splice(0, samples.length - this.sampleLimit);\n }\n }\n\n /**\n * Calculate 95th percentile from samples\n */\n private calculateP95(operationName: string): number {\n const samples = this.samples.get(operationName);\n if (!samples || samples.length === 0) return 0;\n\n const durations = samples.map((s: any) => s.duration).sort((a, b) => a - b);\n const index = Math.floor(durations.length * 0.95);\n return durations[index] || 0;\n }\n\n /**\n * Generate optimization recommendations\n */\n private generateRecommendations(hotPaths: HotPath[]): string[] {\n const recommendations: string[] = [];\n\n for (const hotPath of hotPaths.slice(0, 5)) {\n const impact = hotPath.frequency * hotPath.avgDuration;\n\n if (\n hotPath.path.includes('getFrameContext') &&\n hotPath.avgDuration > 10\n ) {\n recommendations.push(\n `Consider caching frame context for ${hotPath.path} (avg: ${hotPath.avgDuration.toFixed(1)}ms)`\n );\n }\n\n if (hotPath.path.includes('getFrameEvents') && hotPath.frequency > 100) {\n recommendations.push(\n `High frequency event queries detected in ${hotPath.path} (${hotPath.frequency} calls). Consider pagination or caching.`\n );\n }\n\n if (hotPath.path.includes('bulkInsert') && hotPath.avgDuration > 50) {\n recommendations.push(\n `Slow bulk insertion in ${hotPath.path}. Consider increasing batch size or using prepared statements.`\n );\n }\n\n if (impact > 1000) {\n recommendations.push(\n `High impact operation: ${hotPath.path} (${impact.toFixed(0)}ms total impact). Consider optimization.`\n );\n }\n }\n\n // Memory recommendations\n const memUsage = process.memoryUsage();\n if (memUsage.heapUsed / memUsage.heapTotal > 0.8) {\n recommendations.push(\n 'High memory usage detected. Consider implementing cleanup routines or reducing cache sizes.'\n );\n }\n\n if (recommendations.length === 0) {\n recommendations.push('No significant performance issues detected.');\n }\n\n return recommendations;\n }\n}\n\n// Global profiler instance\nlet globalProfiler: PerformanceProfiler | null = null;\n\n/**\n * Get or create global profiler\n */\nexport function getProfiler(): PerformanceProfiler {\n if (!globalProfiler) {\n globalProfiler = new PerformanceProfiler({\n enabled:\n process.env['NODE_ENV'] !== 'production' ||\n process.env['STACKMEMORY_PROFILING'] === 'true',\n });\n }\n return globalProfiler;\n}\n\n/**\n * Convenience function to time operations\n */\nexport async function timeOperation<T>(\n operationName: string,\n fn: () => T | Promise<T>,\n metadata?: Record<string, any>\n): Promise<T> {\n return getProfiler().timeFunction(operationName, fn, metadata);\n}\n\n/**\n * Create a performance monitoring decorator\n */\nexport function performanceMonitor(operationName?: string) {\n return function (\n target: any,\n propertyKey: string,\n descriptor: PropertyDescriptor\n ) {\n const originalMethod = descriptor.value;\n const finalOperationName =\n operationName || `${target.constructor.name}.${propertyKey}`;\n\n descriptor.value = async function (...args: any[]) {\n return getProfiler().timeFunction(finalOperationName, () =>\n originalMethod.apply(this, args)\n );\n };\n\n return descriptor;\n };\n}\n\n/**\n * Monitor specific StackMemory operations\n */\nexport class StackMemoryPerformanceMonitor {\n private profiler = getProfiler();\n\n /**\n * Monitor frame operations\n */\n monitorFrameOperations(frameManager: any): void {\n this.wrapMethod(frameManager, 'getFrame', 'FrameManager.getFrame');\n this.wrapMethod(\n frameManager,\n 'getFrameEvents',\n 'FrameManager.getFrameEvents'\n );\n this.wrapMethod(\n frameManager,\n 'getFrameAnchors',\n 'FrameManager.getFrameAnchors'\n );\n this.wrapMethod(\n frameManager,\n 'getHotStackContext',\n 'FrameManager.getHotStackContext'\n );\n }\n\n /**\n * Monitor database operations\n */\n monitorDatabaseOperations(db: Database.Database): void {\n const originalPrepare = db.prepare;\n db.prepare = function <T = any>(sql: string): Database.Statement<T[], T> {\n const stmt = originalPrepare.call(this, sql);\n return wrapStatement(stmt, sql);\n };\n }\n\n /**\n * Wrap a method with performance monitoring\n */\n private wrapMethod(\n obj: any,\n methodName: string,\n operationName: string\n ): void {\n const original = obj[methodName];\n if (typeof original !== 'function') return;\n\n obj[methodName] = async function (...args: any[]) {\n return getProfiler().timeFunction(operationName, () =>\n original.apply(this, args)\n );\n };\n }\n}\n\n/**\n * Wrap a database statement with performance monitoring\n */\nfunction wrapStatement(\n stmt: Database.Statement,\n sql: string\n): Database.Statement {\n const operationName = `SQL.${sql.trim().split(' ')[0].toUpperCase()}`;\n\n const originalRun = stmt.run;\n const originalGet = stmt.get;\n const originalAll = stmt.all;\n\n stmt.run = function (this: Database.Statement, ...args: any[]) {\n return getProfiler().timeFunction(`${operationName}.run`, () =>\n originalRun.apply(this, args)\n );\n } as any;\n\n stmt.get = function (this: Database.Statement, ...args: any[]) {\n return getProfiler().timeFunction(`${operationName}.get`, () =>\n originalGet.apply(this, args)\n );\n } as any;\n\n stmt.all = function (this: Database.Statement, ...args: any[]) {\n return getProfiler().timeFunction(`${operationName}.all`, () =>\n originalAll.apply(this, args)\n );\n } as any;\n\n return stmt;\n}\n"],
|
|
5
|
+
"mappings": "AAKA,SAAS,cAAc;AACvB,SAAS,0BAA0B;AAGnC,SAAS,OAAO,KAAa,cAA+B;AAC1D,QAAM,QAAQ,QAAQ,IAAI,GAAG;AAC7B,MAAI,UAAU,QAAW;AACvB,QAAI,iBAAiB,OAAW,QAAO;AACvC,UAAM,IAAI,MAAM,wBAAwB,GAAG,cAAc;AAAA,EAC3D;AACA,SAAO;AACT;AAEA,SAAS,eAAe,KAAiC;AACvD,SAAO,QAAQ,IAAI,GAAG;AACxB;AA4CO,MAAM,oBAAoB;AAAA,EACvB,UAAU,oBAAI,IAAgC;AAAA,EAC9C,WAAW,oBAAI,IAAqB;AAAA,EACpC,UAAU,oBAAI,IAAiC;AAAA,EAC/C,YAAY;AAAA,EACZ,cAAc;AAAA,EACd,mBAAmB;AAAA;AAAA,EAE3B,YACE,UAII,CAAC,GACL;AACA,SAAK,YAAY,QAAQ,WAAW;AACpC,SAAK,cAAc,QAAQ,eAAe;AAC1C,SAAK,mBAAmB,QAAQ,oBAAoB;AAAA,EACtD;AAAA;AAAA;AAAA;AAAA,EAKA,YAAY,eAAiE;AAC3E,QAAI,CAAC,KAAK,WAAW;AACnB,aAAO,MAAM;AAAA,MAAC;AAAA,IAChB;AAEA,UAAM,YAAY,YAAY,IAAI;AAElC,WAAO,CAAC,aAAmC;AACzC,WAAK,UAAU,eAAe,WAAW,QAAQ;AAAA,IACnD;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,aACJ,eACA,IACA,UACY;AACZ,QAAI,CAAC,KAAK,WAAW;AACnB,aAAO,MAAM,GAAG;AAAA,IAClB;AAEA,UAAM,WAAW,KAAK,YAAY,aAAa;AAC/C,QAAI;AACF,YAAM,SAAS,MAAM,GAAG;AACxB,eAAS,QAAQ;AACjB,aAAO;AAAA,IACT,SAAS,OAAgB;AACvB,eAAS,EAAE,GAAG,UAAU,OAAO,KAAK,CAAC;AACrC,YAAM;AAAA,IACR;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,aACE,eACA,YACA,UACM;AACN,QAAI,CAAC,KAAK,UAAW;AAErB,SAAK,UAAU,eAAe,YAAY,IAAI,IAAI,YAAY,QAAQ;AAAA,EACxE;AAAA;AAAA;AAAA;AAAA,EAKA,WAAW,eAAuD;AAChE,WAAO,KAAK,QAAQ,IAAI,aAAa;AAAA,EACvC;AAAA;AAAA;AAAA;AAAA,EAKA,gBAAiD;AAC/C,WAAO,IAAI,IAAI,KAAK,OAAO;AAAA,EAC7B;AAAA;AAAA;AAAA;AAAA,EAKA,YAAY,QAAQ,IAAe;AACjC,WAAO,MAAM,KAAK,KAAK,SAAS,OAAO,CAAC,EACrC,KAAK,CAAC,GAAG,MAAM,EAAE,YAAY,EAAE,cAAc,EAAE,YAAY,EAAE,WAAW,EACxE,MAAM,GAAG,KAAK;AAAA,EACnB;AAAA;AAAA;AAAA;AAAA,EAKA,eAAe,IAAiD;AAC9D,UAAM,WAAW,KAAK,YAAY,EAAE;AACpC,UAAM,kBAAkB,KAAK,wBAAwB,QAAQ;AAE7D,UAAM,SAAkC;AAAA,MACtC,WAAW,KAAK,IAAI;AAAA,MACpB;AAAA,MACA,iBAAiB,KAAK,mBAAmB,EAAE,IAAI;AAAA,MAC/C,aAAa,QAAQ,YAAY;AAAA,MACjC,cAAc;AAAA;AAAA,MACd;AAAA,IACF;AAEA,WAAO,KAAK,gCAAgC;AAAA,MAC1C,eAAe,SAAS;AAAA,MACxB,sBAAsB,gBAAgB;AAAA,MACtC,YAAY,SAAS,CAAC,GAAG;AAAA,IAC3B,CAAC;AAED,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA,EAKA,QAAc;AACZ,SAAK,QAAQ,MAAM;AACnB,SAAK,SAAS,MAAM;AACpB,SAAK,QAAQ,MAAM;AACnB,WAAO,KAAK,2BAA2B;AAAA,EACzC;AAAA;AAAA;AAAA;AAAA,EAKA,gBAAwB;AACtB,UAAM,OAAO;AAAA,MACX,WAAW,KAAK,IAAI;AAAA,MACpB,SAAS,OAAO,YAAY,KAAK,OAAO;AAAA,MACxC,UAAU,OAAO,YAAY,KAAK,QAAQ;AAAA,MAC1C,QAAQ;AAAA,QACN,aAAa,KAAK;AAAA,QAClB,kBAAkB,KAAK;AAAA,QACvB,SAAS,KAAK;AAAA,MAChB;AAAA,IACF;AAEA,WAAO,KAAK,UAAU,MAAM,MAAM,CAAC;AAAA,EACrC;AAAA;AAAA;AAAA;AAAA,EAKA,WAAW,SAAwB;AACjC,SAAK,YAAY;AACjB,WAAO,KAAK,yBAAyB,UAAU,YAAY,UAAU,EAAE;AAAA,EACzE;AAAA;AAAA;AAAA;AAAA,EAKQ,UACN,eACA,WACA,UACM;AACN,UAAM,WAAW,YAAY,IAAI,IAAI;AACrC,UAAM,YAAY,KAAK,IAAI;AAG3B,SAAK,cAAc,eAAe,UAAU,SAAS;AAGrD,QAAI,WAAW,KAAK,kBAAkB;AACpC,WAAK,aAAa,eAAe,UAAU,WAAW,QAAQ;AAAA,IAChE;AAGA,SAAK,YAAY,eAAe,UAAU,WAAW,QAAQ;AAAA,EAC/D;AAAA;AAAA;AAAA;AAAA,EAKQ,cACN,eACA,UACA,WACM;AACN,UAAM,WAAW,KAAK,QAAQ,IAAI,aAAa;AAE/C,QAAI,CAAC,UAAU;AACb,WAAK,QAAQ,IAAI,eAAe;AAAA,QAC9B;AAAA,QACA,WAAW;AAAA,QACX,aAAa;AAAA,QACb,WAAW;AAAA,QACX,WAAW;AAAA,QACX,WAAW;AAAA,QACX,WAAW;AAAA,QACX,cAAc;AAAA,MAChB,CAAC;AAAA,IACH,OAAO;AACL,eAAS;AACT,eAAS,eAAe;AACxB,eAAS,YAAY,SAAS,cAAc,SAAS;AACrD,eAAS,YAAY,KAAK,IAAI,SAAS,WAAW,QAAQ;AAC1D,eAAS,YAAY,KAAK,IAAI,SAAS,WAAW,QAAQ;AAC1D,eAAS,eAAe;AAGxB,eAAS,YAAY,KAAK,aAAa,aAAa;AAAA,IACtD;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKQ,aACN,eACA,UACA,WACA,UACM;AACN,UAAM,WAAW,KAAK,SAAS,IAAI,aAAa;AAEhD,QAAI,CAAC,UAAU;AACb,WAAK,SAAS,IAAI,eAAe;AAAA,QAC/B,MAAM;AAAA,QACN,WAAW;AAAA,QACX,aAAa;AAAA,QACb,eAAe;AAAA,QACf,UAAU;AAAA,QACV,SAAS,CAAC,EAAE,WAAW,UAAU,SAAS,CAAC;AAAA,MAC7C,CAAC;AAAA,IACH,OAAO;AACL,eAAS;AACT,eAAS,iBAAiB;AAC1B,eAAS,cAAc,SAAS,gBAAgB,SAAS;AACzD,eAAS,WAAW;AAGpB,eAAS,QAAQ,KAAK,EAAE,WAAW,UAAU,SAAS,CAAC;AACvD,UAAI,SAAS,QAAQ,SAAS,KAAK;AACjC,iBAAS,UAAU,SAAS,QAAQ,MAAM,IAAI;AAAA,MAChD;AAAA,IACF;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKQ,YACN,eACA,UACA,WACA,UACM;AACN,QAAI,CAAC,KAAK,QAAQ,IAAI,aAAa,GAAG;AACpC,WAAK,QAAQ,IAAI,eAAe,CAAC,CAAC;AAAA,IACpC;AAEA,UAAM,UAAU,KAAK,QAAQ,IAAI,aAAa;AAC9C,YAAQ,KAAK,EAAE,WAAW,UAAU,SAAS,CAAC;AAG9C,QAAI,QAAQ,SAAS,KAAK,aAAa;AACrC,cAAQ,OAAO,GAAG,QAAQ,SAAS,KAAK,WAAW;AAAA,IACrD;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKQ,aAAa,eAA+B;AAClD,UAAM,UAAU,KAAK,QAAQ,IAAI,aAAa;AAC9C,QAAI,CAAC,WAAW,QAAQ,WAAW,EAAG,QAAO;AAE7C,UAAM,YAAY,QAAQ,IAAI,CAAC,MAAW,EAAE,QAAQ,EAAE,KAAK,CAAC,GAAG,MAAM,IAAI,CAAC;AAC1E,UAAM,QAAQ,KAAK,MAAM,UAAU,SAAS,IAAI;AAChD,WAAO,UAAU,KAAK,KAAK;AAAA,EAC7B;AAAA;AAAA;AAAA;AAAA,EAKQ,wBAAwB,UAA+B;AAC7D,UAAM,kBAA4B,CAAC;AAEnC,eAAW,WAAW,SAAS,MAAM,GAAG,CAAC,GAAG;AAC1C,YAAM,SAAS,QAAQ,YAAY,QAAQ;AAE3C,UACE,QAAQ,KAAK,SAAS,iBAAiB,KACvC,QAAQ,cAAc,IACtB;AACA,wBAAgB;AAAA,UACd,sCAAsC,QAAQ,IAAI,UAAU,QAAQ,YAAY,QAAQ,CAAC,CAAC;AAAA,QAC5F;AAAA,MACF;AAEA,UAAI,QAAQ,KAAK,SAAS,gBAAgB,KAAK,QAAQ,YAAY,KAAK;AACtE,wBAAgB;AAAA,UACd,4CAA4C,QAAQ,IAAI,KAAK,QAAQ,SAAS;AAAA,QAChF;AAAA,MACF;AAEA,UAAI,QAAQ,KAAK,SAAS,YAAY,KAAK,QAAQ,cAAc,IAAI;AACnE,wBAAgB;AAAA,UACd,0BAA0B,QAAQ,IAAI;AAAA,QACxC;AAAA,MACF;AAEA,UAAI,SAAS,KAAM;AACjB,wBAAgB;AAAA,UACd,0BAA0B,QAAQ,IAAI,KAAK,OAAO,QAAQ,CAAC,CAAC;AAAA,QAC9D;AAAA,MACF;AAAA,IACF;AAGA,UAAM,WAAW,QAAQ,YAAY;AACrC,QAAI,SAAS,WAAW,SAAS,YAAY,KAAK;AAChD,sBAAgB;AAAA,QACd;AAAA,MACF;AAAA,IACF;AAEA,QAAI,gBAAgB,WAAW,GAAG;AAChC,sBAAgB,KAAK,6CAA6C;AAAA,IACpE;AAEA,WAAO;AAAA,EACT;AACF;AAGA,IAAI,iBAA6C;AAK1C,SAAS,cAAmC;AACjD,MAAI,CAAC,gBAAgB;AACnB,qBAAiB,IAAI,oBAAoB;AAAA,MACvC,SACE,QAAQ,IAAI,UAAU,MAAM,gBAC5B,QAAQ,IAAI,uBAAuB,MAAM;AAAA,IAC7C,CAAC;AAAA,EACH;AACA,SAAO;AACT;AAKA,eAAsB,cACpB,eACA,IACA,UACY;AACZ,SAAO,YAAY,EAAE,aAAa,eAAe,IAAI,QAAQ;AAC/D;AAKO,SAAS,mBAAmB,eAAwB;AACzD,SAAO,SACL,QACA,aACA,YACA;AACA,UAAM,iBAAiB,WAAW;AAClC,UAAM,qBACJ,iBAAiB,GAAG,OAAO,YAAY,IAAI,IAAI,WAAW;AAE5D,eAAW,QAAQ,kBAAmB,MAAa;AACjD,aAAO,YAAY,EAAE;AAAA,QAAa;AAAA,QAAoB,MACpD,eAAe,MAAM,MAAM,IAAI;AAAA,MACjC;AAAA,IACF;AAEA,WAAO;AAAA,EACT;AACF;AAKO,MAAM,8BAA8B;AAAA,EACjC,WAAW,YAAY;AAAA;AAAA;AAAA;AAAA,EAK/B,uBAAuB,cAAyB;AAC9C,SAAK,WAAW,cAAc,YAAY,uBAAuB;AACjE,SAAK;AAAA,MACH;AAAA,MACA;AAAA,MACA;AAAA,IACF;AACA,SAAK;AAAA,MACH;AAAA,MACA;AAAA,MACA;AAAA,IACF;AACA,SAAK;AAAA,MACH;AAAA,MACA;AAAA,MACA;AAAA,IACF;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,0BAA0B,IAA6B;AACrD,UAAM,kBAAkB,GAAG;AAC3B,OAAG,UAAU,SAAmB,KAAyC;AACvE,YAAM,OAAO,gBAAgB,KAAK,MAAM,GAAG;AAC3C,aAAO,cAAc,MAAM,GAAG;AAAA,IAChC;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKQ,WACN,KACA,YACA,eACM;AACN,UAAM,WAAW,IAAI,UAAU;AAC/B,QAAI,OAAO,aAAa,WAAY;AAEpC,QAAI,UAAU,IAAI,kBAAmB,MAAa;AAChD,aAAO,YAAY,EAAE;AAAA,QAAa;AAAA,QAAe,MAC/C,SAAS,MAAM,MAAM,IAAI;AAAA,MAC3B;AAAA,IACF;AAAA,EACF;AACF;AAKA,SAAS,cACP,MACA,KACoB;AACpB,QAAM,gBAAgB,OAAO,IAAI,KAAK,EAAE,MAAM,GAAG,EAAE,CAAC,EAAE,YAAY,CAAC;AAEnE,QAAM,cAAc,KAAK;AACzB,QAAM,cAAc,KAAK;AACzB,QAAM,cAAc,KAAK;AAEzB,OAAK,MAAM,YAAuC,MAAa;AAC7D,WAAO,YAAY,EAAE;AAAA,MAAa,GAAG,aAAa;AAAA,MAAQ,MACxD,YAAY,MAAM,MAAM,IAAI;AAAA,IAC9B;AAAA,EACF;AAEA,OAAK,MAAM,YAAuC,MAAa;AAC7D,WAAO,YAAY,EAAE;AAAA,MAAa,GAAG,aAAa;AAAA,MAAQ,MACxD,YAAY,MAAM,MAAM,IAAI;AAAA,IAC9B;AAAA,EACF;AAEA,OAAK,MAAM,YAAuC,MAAa;AAC7D,WAAO,YAAY,EAAE;AAAA,MAAa,GAAG,aAAa;AAAA,MAAQ,MACxD,YAAY,MAAM,MAAM,IAAI;AAAA,IAC9B;AAAA,EACF;AAEA,SAAO;AACT;",
|
|
6
6
|
"names": []
|
|
7
7
|
}
|
|
@@ -110,7 +110,11 @@ class StreamingJSONLParser {
|
|
|
110
110
|
* Create a transform stream for JSONL parsing
|
|
111
111
|
*/
|
|
112
112
|
createTransformStream(options = {}) {
|
|
113
|
-
const {
|
|
113
|
+
const {
|
|
114
|
+
filter,
|
|
115
|
+
transform,
|
|
116
|
+
maxLineLength = this.DEFAULT_MAX_LINE_LENGTH
|
|
117
|
+
} = options;
|
|
114
118
|
let buffer = "";
|
|
115
119
|
let lineCount = 0;
|
|
116
120
|
return new Transform({
|
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
{
|
|
2
2
|
"version": 3,
|
|
3
3
|
"sources": ["../../../src/core/performance/streaming-jsonl-parser.ts"],
|
|
4
|
-
"sourcesContent": ["/**\n * Optimized Streaming JSONL Parser\n * Memory-efficient parsing for large JSONL files with async streaming\n */\n\nimport { createReadStream } from 'fs';\nimport { createInterface } from 'readline';\nimport { Transform, pipeline } from 'stream';\nimport { promisify } from 'util';\nimport { logger } from '../monitoring/logger.js';\n\nconst pipelineAsync = promisify(pipeline);\n\nexport interface ParseOptions {\n maxLineLength?: number;\n batchSize?: number;\n filter?: (obj: any) => boolean;\n transform?: (obj: any) => any;\n onProgress?: (processed: number, total?: number) => void;\n}\n\nexport class StreamingJSONLParser {\n private readonly DEFAULT_BATCH_SIZE = 100;\n private readonly DEFAULT_MAX_LINE_LENGTH = 1024 * 1024; // 1MB per line\n\n /**\n * Stream-parse a JSONL file with batching and backpressure handling\n */\n async*
|
|
5
|
-
"mappings": "AAKA,SAAS,wBAAwB;AACjC,SAAS,uBAAuB;AAChC,SAAS,WAAW,gBAAgB;AACpC,SAAS,iBAAiB;AAC1B,SAAS,cAAc;AAEvB,MAAM,gBAAgB,UAAU,QAAQ;AAUjC,MAAM,qBAAqB;AAAA,EACf,qBAAqB;AAAA,EACrB,0BAA0B,OAAO;AAAA;AAAA;AAAA;AAAA;AAAA,EAKlD,OAAO,YACL,UACA,UAAwB,CAAC,GACW;AACpC,UAAM;AAAA,MACJ,YAAY,KAAK;AAAA,MACjB,gBAAgB,KAAK;AAAA,MACrB;AAAA,MACA;AAAA,MACA;AAAA,IACF,IAAI;AAEJ,UAAM,SAAS,iBAAiB,UAAU;AAAA,MACxC,UAAU;AAAA,MACV,eAAe,KAAK;AAAA;AAAA,IACtB,CAAC;AAED,UAAM,KAAK,gBAAgB;AAAA,MACzB,OAAO;AAAA,MACP,WAAW;AAAA,MACX,aAAa;AAAA;AAAA,IACf,CAAC;AAED,QAAI,QAAa,CAAC;AAClB,QAAI,YAAY;AAChB,QAAI,iBAAiB;AACrB,QAAI,aAAa;AAEjB,QAAI;AACF,uBAAiB,QAAQ,IAAI;AAC3B;AAEA,YAAI,KAAK,SAAS,eAAe;AAC/B,iBAAO,KAAK,2BAA2B;AAAA,YACrC,YAAY;AAAA,YACZ,QAAQ,KAAK;AAAA,YACb,WAAW;AAAA,UACb,CAAC;AACD;AACA;AAAA,QACF;AAEA,YAAI,CAAC,KAAK,KAAK,EAAG;AAElB,YAAI;AACF,cAAI,MAAM,KAAK,MAAM,IAAI;AAEzB,cAAI,UAAU,CAAC,OAAO,GAAG,EAAG;AAC5B,cAAI,UAAW,OAAM,UAAU,GAAG;AAElC,gBAAM,KAAK,GAAQ;AACnB;AAEA,cAAI,MAAM,UAAU,WAAW;AAC7B,kBAAM;AACN,oBAAQ,CAAC;AACT,yBAAa,cAAc;AAAA,UAC7B;AAAA,QACF,SAAS,YAAqB;AAC5B;AACA,iBAAO,MAAM,8BAA8B;AAAA,YACzC,YAAY;AAAA,YACZ,OAAO;AAAA,YACP,SAAS,KAAK,UAAU,GAAG,GAAG;AAAA,UAChC,CAAC;AAAA,QACH;AAAA,MACF;AAGA,UAAI,MAAM,SAAS,GAAG;AACpB,cAAM;AACN,qBAAa,cAAc;AAAA,MAC7B;AAAA,IACF,UAAE;AACA,SAAG,MAAM;AACT,aAAO,QAAQ;AAEf,aAAO,MAAM,0BAA0B;AAAA,QACrC;AAAA,QACA,YAAY;AAAA,QACZ,WAAW;AAAA,QACX,QAAQ;AAAA,MACV,CAAC;AAAA,IACH;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,SACJ,UACA,UAA2C,CAAC,GAC9B;AACd,UAAM,UAAe,CAAC;AAEtB,qBAAiB,SAAS,KAAK,YAAe,UAAU;AAAA,MACtD,GAAG;AAAA,MACH,WAAW,OAAO;AAAA,IACpB,CAAC,GAAG;AACF,cAAQ,KAAK,GAAG,KAAK;AAAA,IACvB;AAEA,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,QACJ,UACA,WACA,UAAwB,CAAC,GACX;AACd,UAAM,UAAe,CAAC;AAEtB,qBAAiB,SAAS,KAAK,YAAe,UAAU,OAAO,GAAG;AAChE,YAAM,SAAS,MAAM,UAAU,KAAK;AACpC,cAAQ,KAAK,MAAM;AAAA,IACrB;AAEA,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA,EAKA,
|
|
4
|
+
"sourcesContent": ["/**\n * Optimized Streaming JSONL Parser\n * Memory-efficient parsing for large JSONL files with async streaming\n */\n\nimport { createReadStream } from 'fs';\nimport { createInterface } from 'readline';\nimport { Transform, pipeline } from 'stream';\nimport { promisify } from 'util';\nimport { logger } from '../monitoring/logger.js';\n\nconst pipelineAsync = promisify(pipeline);\n\nexport interface ParseOptions {\n maxLineLength?: number;\n batchSize?: number;\n filter?: (obj: any) => boolean;\n transform?: (obj: any) => any;\n onProgress?: (processed: number, total?: number) => void;\n}\n\nexport class StreamingJSONLParser {\n private readonly DEFAULT_BATCH_SIZE = 100;\n private readonly DEFAULT_MAX_LINE_LENGTH = 1024 * 1024; // 1MB per line\n\n /**\n * Stream-parse a JSONL file with batching and backpressure handling\n */\n async *parseStream<T = any>(\n filePath: string,\n options: ParseOptions = {}\n ): AsyncGenerator<T[], void, unknown> {\n const {\n batchSize = this.DEFAULT_BATCH_SIZE,\n maxLineLength = this.DEFAULT_MAX_LINE_LENGTH,\n filter,\n transform,\n onProgress,\n } = options;\n\n const stream = createReadStream(filePath, {\n encoding: 'utf8',\n highWaterMark: 64 * 1024, // 64KB chunks\n });\n\n const rl = createInterface({\n input: stream,\n crlfDelay: Infinity,\n historySize: 0, // Disable history for memory efficiency\n });\n\n let batch: T[] = [];\n let lineCount = 0;\n let processedCount = 0;\n let errorCount = 0;\n\n try {\n for await (const line of rl) {\n lineCount++;\n\n if (line.length > maxLineLength) {\n logger.warn('Skipping oversized line', {\n lineNumber: lineCount,\n length: line.length,\n maxLength: maxLineLength,\n });\n errorCount++;\n continue;\n }\n\n if (!line.trim()) continue;\n\n try {\n let obj = JSON.parse(line);\n\n if (filter && !filter(obj)) continue;\n if (transform) obj = transform(obj);\n\n batch.push(obj as T);\n processedCount++;\n\n if (batch.length >= batchSize) {\n yield batch;\n batch = [];\n onProgress?.(processedCount);\n }\n } catch (parseError: unknown) {\n errorCount++;\n logger.debug('Failed to parse JSONL line', {\n lineNumber: lineCount,\n error: parseError,\n preview: line.substring(0, 100),\n });\n }\n }\n\n // Yield remaining items\n if (batch.length > 0) {\n yield batch;\n onProgress?.(processedCount);\n }\n } finally {\n rl.close();\n stream.destroy();\n\n logger.debug('JSONL parsing complete', {\n filePath,\n totalLines: lineCount,\n processed: processedCount,\n errors: errorCount,\n });\n }\n }\n\n /**\n * Parse entire file into memory (use for smaller files)\n */\n async parseAll<T = any>(\n filePath: string,\n options: Omit<ParseOptions, 'batchSize'> = {}\n ): Promise<T[]> {\n const results: T[] = [];\n\n for await (const batch of this.parseStream<T>(filePath, {\n ...options,\n batchSize: Number.MAX_SAFE_INTEGER,\n })) {\n results.push(...batch);\n }\n\n return results;\n }\n\n /**\n * Process JSONL file with a custom processor function\n */\n async process<T = any, R = void>(\n filePath: string,\n processor: (items: T[]) => Promise<R>,\n options: ParseOptions = {}\n ): Promise<R[]> {\n const results: R[] = [];\n\n for await (const batch of this.parseStream<T>(filePath, options)) {\n const result = await processor(batch);\n results.push(result);\n }\n\n return results;\n }\n\n /**\n * Create a transform stream for JSONL parsing\n */\n createTransformStream<T = any>(options: ParseOptions = {}): Transform {\n const {\n filter,\n transform,\n maxLineLength = this.DEFAULT_MAX_LINE_LENGTH,\n } = options;\n let buffer = '';\n let lineCount = 0;\n\n return new Transform({\n objectMode: true,\n transform(chunk: Buffer | string, encoding, callback) {\n buffer += chunk.toString();\n const lines = buffer.split('\\n');\n\n // Keep incomplete line in buffer\n buffer = lines.pop() || '';\n\n for (const line of lines) {\n lineCount++;\n\n if (!line.trim()) continue;\n if (line.length > maxLineLength) {\n logger.warn('Skipping oversized line in transform', { lineCount });\n continue;\n }\n\n try {\n let obj = JSON.parse(line);\n\n if (filter && !filter(obj)) continue;\n if (transform) obj = transform(obj);\n\n this.push(obj);\n } catch (error: unknown) {\n logger.debug('Transform parse error', { lineCount, error });\n }\n }\n\n callback();\n },\n\n flush(callback) {\n // Process any remaining data\n if (buffer.trim()) {\n try {\n let obj = JSON.parse(buffer);\n if (!filter || filter(obj)) {\n if (transform) obj = transform(obj);\n this.push(obj);\n }\n } catch (error: unknown) {\n logger.debug('Flush parse error', { error });\n }\n }\n callback();\n },\n });\n }\n\n /**\n * Count lines in JSONL file without parsing\n */\n async countLines(filePath: string): Promise<number> {\n const stream = createReadStream(filePath, { encoding: 'utf8' });\n const rl = createInterface({ input: stream, historySize: 0 });\n\n let count = 0;\n for await (const _ of rl) {\n count++;\n }\n\n return count;\n }\n\n /**\n * Sample random lines from JSONL file\n */\n async *sampleLines<T = any>(\n filePath: string,\n sampleRate: number,\n options: Omit<ParseOptions, 'batchSize'> = {}\n ): AsyncGenerator<T, void, unknown> {\n if (sampleRate <= 0 || sampleRate > 1) {\n throw new Error('Sample rate must be between 0 and 1');\n }\n\n for await (const batch of this.parseStream<T>(filePath, options)) {\n for (const item of batch) {\n if (Math.random() < sampleRate) {\n yield item;\n }\n }\n }\n }\n}\n"],
|
|
5
|
+
"mappings": "AAKA,SAAS,wBAAwB;AACjC,SAAS,uBAAuB;AAChC,SAAS,WAAW,gBAAgB;AACpC,SAAS,iBAAiB;AAC1B,SAAS,cAAc;AAEvB,MAAM,gBAAgB,UAAU,QAAQ;AAUjC,MAAM,qBAAqB;AAAA,EACf,qBAAqB;AAAA,EACrB,0BAA0B,OAAO;AAAA;AAAA;AAAA;AAAA;AAAA,EAKlD,OAAO,YACL,UACA,UAAwB,CAAC,GACW;AACpC,UAAM;AAAA,MACJ,YAAY,KAAK;AAAA,MACjB,gBAAgB,KAAK;AAAA,MACrB;AAAA,MACA;AAAA,MACA;AAAA,IACF,IAAI;AAEJ,UAAM,SAAS,iBAAiB,UAAU;AAAA,MACxC,UAAU;AAAA,MACV,eAAe,KAAK;AAAA;AAAA,IACtB,CAAC;AAED,UAAM,KAAK,gBAAgB;AAAA,MACzB,OAAO;AAAA,MACP,WAAW;AAAA,MACX,aAAa;AAAA;AAAA,IACf,CAAC;AAED,QAAI,QAAa,CAAC;AAClB,QAAI,YAAY;AAChB,QAAI,iBAAiB;AACrB,QAAI,aAAa;AAEjB,QAAI;AACF,uBAAiB,QAAQ,IAAI;AAC3B;AAEA,YAAI,KAAK,SAAS,eAAe;AAC/B,iBAAO,KAAK,2BAA2B;AAAA,YACrC,YAAY;AAAA,YACZ,QAAQ,KAAK;AAAA,YACb,WAAW;AAAA,UACb,CAAC;AACD;AACA;AAAA,QACF;AAEA,YAAI,CAAC,KAAK,KAAK,EAAG;AAElB,YAAI;AACF,cAAI,MAAM,KAAK,MAAM,IAAI;AAEzB,cAAI,UAAU,CAAC,OAAO,GAAG,EAAG;AAC5B,cAAI,UAAW,OAAM,UAAU,GAAG;AAElC,gBAAM,KAAK,GAAQ;AACnB;AAEA,cAAI,MAAM,UAAU,WAAW;AAC7B,kBAAM;AACN,oBAAQ,CAAC;AACT,yBAAa,cAAc;AAAA,UAC7B;AAAA,QACF,SAAS,YAAqB;AAC5B;AACA,iBAAO,MAAM,8BAA8B;AAAA,YACzC,YAAY;AAAA,YACZ,OAAO;AAAA,YACP,SAAS,KAAK,UAAU,GAAG,GAAG;AAAA,UAChC,CAAC;AAAA,QACH;AAAA,MACF;AAGA,UAAI,MAAM,SAAS,GAAG;AACpB,cAAM;AACN,qBAAa,cAAc;AAAA,MAC7B;AAAA,IACF,UAAE;AACA,SAAG,MAAM;AACT,aAAO,QAAQ;AAEf,aAAO,MAAM,0BAA0B;AAAA,QACrC;AAAA,QACA,YAAY;AAAA,QACZ,WAAW;AAAA,QACX,QAAQ;AAAA,MACV,CAAC;AAAA,IACH;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,SACJ,UACA,UAA2C,CAAC,GAC9B;AACd,UAAM,UAAe,CAAC;AAEtB,qBAAiB,SAAS,KAAK,YAAe,UAAU;AAAA,MACtD,GAAG;AAAA,MACH,WAAW,OAAO;AAAA,IACpB,CAAC,GAAG;AACF,cAAQ,KAAK,GAAG,KAAK;AAAA,IACvB;AAEA,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,QACJ,UACA,WACA,UAAwB,CAAC,GACX;AACd,UAAM,UAAe,CAAC;AAEtB,qBAAiB,SAAS,KAAK,YAAe,UAAU,OAAO,GAAG;AAChE,YAAM,SAAS,MAAM,UAAU,KAAK;AACpC,cAAQ,KAAK,MAAM;AAAA,IACrB;AAEA,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA,EAKA,sBAA+B,UAAwB,CAAC,GAAc;AACpE,UAAM;AAAA,MACJ;AAAA,MACA;AAAA,MACA,gBAAgB,KAAK;AAAA,IACvB,IAAI;AACJ,QAAI,SAAS;AACb,QAAI,YAAY;AAEhB,WAAO,IAAI,UAAU;AAAA,MACnB,YAAY;AAAA,MACZ,UAAU,OAAwB,UAAU,UAAU;AACpD,kBAAU,MAAM,SAAS;AACzB,cAAM,QAAQ,OAAO,MAAM,IAAI;AAG/B,iBAAS,MAAM,IAAI,KAAK;AAExB,mBAAW,QAAQ,OAAO;AACxB;AAEA,cAAI,CAAC,KAAK,KAAK,EAAG;AAClB,cAAI,KAAK,SAAS,eAAe;AAC/B,mBAAO,KAAK,wCAAwC,EAAE,UAAU,CAAC;AACjE;AAAA,UACF;AAEA,cAAI;AACF,gBAAI,MAAM,KAAK,MAAM,IAAI;AAEzB,gBAAI,UAAU,CAAC,OAAO,GAAG,EAAG;AAC5B,gBAAI,UAAW,OAAM,UAAU,GAAG;AAElC,iBAAK,KAAK,GAAG;AAAA,UACf,SAAS,OAAgB;AACvB,mBAAO,MAAM,yBAAyB,EAAE,WAAW,MAAM,CAAC;AAAA,UAC5D;AAAA,QACF;AAEA,iBAAS;AAAA,MACX;AAAA,MAEA,MAAM,UAAU;AAEd,YAAI,OAAO,KAAK,GAAG;AACjB,cAAI;AACF,gBAAI,MAAM,KAAK,MAAM,MAAM;AAC3B,gBAAI,CAAC,UAAU,OAAO,GAAG,GAAG;AAC1B,kBAAI,UAAW,OAAM,UAAU,GAAG;AAClC,mBAAK,KAAK,GAAG;AAAA,YACf;AAAA,UACF,SAAS,OAAgB;AACvB,mBAAO,MAAM,qBAAqB,EAAE,MAAM,CAAC;AAAA,UAC7C;AAAA,QACF;AACA,iBAAS;AAAA,MACX;AAAA,IACF,CAAC;AAAA,EACH;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,WAAW,UAAmC;AAClD,UAAM,SAAS,iBAAiB,UAAU,EAAE,UAAU,OAAO,CAAC;AAC9D,UAAM,KAAK,gBAAgB,EAAE,OAAO,QAAQ,aAAa,EAAE,CAAC;AAE5D,QAAI,QAAQ;AACZ,qBAAiB,KAAK,IAAI;AACxB;AAAA,IACF;AAEA,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA,EAKA,OAAO,YACL,UACA,YACA,UAA2C,CAAC,GACV;AAClC,QAAI,cAAc,KAAK,aAAa,GAAG;AACrC,YAAM,IAAI,MAAM,qCAAqC;AAAA,IACvD;AAEA,qBAAiB,SAAS,KAAK,YAAe,UAAU,OAAO,GAAG;AAChE,iBAAW,QAAQ,OAAO;AACxB,YAAI,KAAK,OAAO,IAAI,YAAY;AAC9B,gBAAM;AAAA,QACR;AAAA,MACF;AAAA,IACF;AAAA,EACF;AACF;",
|
|
6
6
|
"names": []
|
|
7
7
|
}
|
|
@@ -56,19 +56,16 @@ class ProjectManager {
|
|
|
56
56
|
}
|
|
57
57
|
project.primaryLanguage = this.detectPrimaryLanguage(path);
|
|
58
58
|
project.framework = this.detectFramework(path);
|
|
59
|
-
await retry(
|
|
60
|
-
|
|
61
|
-
|
|
62
|
-
|
|
63
|
-
|
|
64
|
-
|
|
65
|
-
|
|
66
|
-
|
|
67
|
-
error: error instanceof Error ? error.message : String(error)
|
|
68
|
-
});
|
|
69
|
-
}
|
|
59
|
+
await retry(() => Promise.resolve(this.saveProject(project)), {
|
|
60
|
+
maxAttempts: 3,
|
|
61
|
+
initialDelay: 100,
|
|
62
|
+
onRetry: (attempt, error) => {
|
|
63
|
+
logger.warn(`Retrying project save (attempt ${attempt})`, {
|
|
64
|
+
projectId: project.id,
|
|
65
|
+
error: error instanceof Error ? error.message : String(error)
|
|
66
|
+
});
|
|
70
67
|
}
|
|
71
|
-
);
|
|
68
|
+
});
|
|
72
69
|
this.projectCache.set(path, project);
|
|
73
70
|
this.currentProject = project;
|
|
74
71
|
logger.info("Project auto-detected", {
|
|
@@ -647,14 +644,11 @@ class ProjectManager {
|
|
|
647
644
|
await this.detectProject(projectPath);
|
|
648
645
|
logger.info(`Discovered project: ${projectPath}`);
|
|
649
646
|
} catch (error) {
|
|
650
|
-
logger.warn(
|
|
651
|
-
|
|
652
|
-
|
|
653
|
-
|
|
654
|
-
|
|
655
|
-
operation: "scanAndCategorizeAllProjects"
|
|
656
|
-
}
|
|
657
|
-
);
|
|
647
|
+
logger.warn(`Failed to analyze project: ${projectPath}`, {
|
|
648
|
+
projectPath,
|
|
649
|
+
error: error instanceof Error ? error.message : String(error),
|
|
650
|
+
operation: "scanAndCategorizeAllProjects"
|
|
651
|
+
});
|
|
658
652
|
}
|
|
659
653
|
}
|
|
660
654
|
} catch (error) {
|