@stackmemoryai/stackmemory 0.5.33 → 0.5.35
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/agents/core/agent-task-manager.js.map +1 -1
- package/dist/cli/commands/clear.js +1 -1
- package/dist/cli/commands/clear.js.map +1 -1
- package/dist/cli/commands/context.js +1 -1
- package/dist/cli/commands/context.js.map +1 -1
- package/dist/cli/commands/dashboard.js.map +1 -1
- package/dist/cli/commands/discovery.js +1 -1
- package/dist/cli/commands/discovery.js.map +1 -1
- package/dist/cli/commands/handoff.js +1 -1
- package/dist/cli/commands/handoff.js.map +1 -1
- package/dist/cli/commands/monitor.js +1 -1
- package/dist/cli/commands/monitor.js.map +1 -1
- package/dist/cli/commands/quality.js +1 -1
- package/dist/cli/commands/quality.js.map +1 -1
- package/dist/cli/commands/skills.js +1 -1
- package/dist/cli/commands/skills.js.map +1 -1
- package/dist/cli/commands/workflow.js +1 -1
- package/dist/cli/commands/workflow.js.map +1 -1
- package/dist/cli/commands/worktree.js +1 -1
- package/dist/cli/commands/worktree.js.map +1 -1
- package/dist/cli/index.js +1 -1
- package/dist/cli/index.js.map +1 -1
- package/dist/core/context/auto-context.js.map +1 -1
- package/dist/core/context/compaction-handler.js.map +2 -2
- package/dist/core/context/context-bridge.js.map +2 -2
- package/dist/core/context/dual-stack-manager.js +1 -1
- package/dist/core/context/dual-stack-manager.js.map +1 -1
- package/dist/core/context/enhanced-rehydration.js.map +1 -1
- package/dist/core/context/frame-database.js +43 -10
- package/dist/core/context/frame-database.js.map +2 -2
- package/dist/core/context/frame-handoff-manager.js.map +1 -1
- package/dist/core/context/frame-lifecycle-hooks.js +119 -0
- package/dist/core/context/frame-lifecycle-hooks.js.map +7 -0
- package/dist/core/context/frame-stack.js +36 -7
- package/dist/core/context/frame-stack.js.map +2 -2
- package/dist/core/context/incremental-gc.js.map +2 -2
- package/dist/core/context/index.js +4 -22
- package/dist/core/context/index.js.map +2 -2
- package/dist/core/context/refactored-frame-manager.js +170 -37
- package/dist/core/context/refactored-frame-manager.js.map +3 -3
- package/dist/core/context/shared-context-layer.js.map +1 -1
- package/dist/core/context/stack-merge-resolver.js.map +1 -1
- package/dist/core/database/database-adapter.js.map +1 -1
- package/dist/core/database/paradedb-adapter.js.map +1 -1
- package/dist/core/database/query-router.js.map +1 -1
- package/dist/core/database/sqlite-adapter.js.map +1 -1
- package/dist/core/digest/frame-digest-integration.js.map +1 -1
- package/dist/core/digest/hybrid-digest-generator.js.map +1 -1
- package/dist/core/digest/types.js.map +1 -1
- package/dist/core/errors/index.js +249 -0
- package/dist/core/errors/index.js.map +2 -2
- package/dist/core/frame/workflow-templates.js.map +2 -2
- package/dist/core/merge/conflict-detector.js.map +1 -1
- package/dist/core/merge/resolution-engine.js.map +1 -1
- package/dist/core/merge/stack-diff.js.map +1 -1
- package/dist/core/models/model-router.js +10 -1
- package/dist/core/models/model-router.js.map +2 -2
- package/dist/core/monitoring/error-handler.js +37 -270
- package/dist/core/monitoring/error-handler.js.map +3 -3
- package/dist/core/monitoring/session-monitor.js.map +1 -1
- package/dist/core/performance/lazy-context-loader.js.map +1 -1
- package/dist/core/performance/optimized-frame-context.js.map +1 -1
- package/dist/core/retrieval/context-retriever.js.map +1 -1
- package/dist/core/retrieval/graph-retrieval.js.map +1 -1
- package/dist/core/retrieval/hierarchical-retrieval.js.map +1 -1
- package/dist/core/retrieval/llm-context-retrieval.js.map +1 -1
- package/dist/core/retrieval/retrieval-benchmarks.js.map +1 -1
- package/dist/core/retrieval/summary-generator.js.map +1 -1
- package/dist/core/retrieval/types.js.map +1 -1
- package/dist/core/storage/chromadb-adapter.js.map +1 -1
- package/dist/core/storage/infinite-storage.js.map +1 -1
- package/dist/core/storage/two-tier-storage.js.map +1 -1
- package/dist/features/tasks/task-aware-context.js.map +1 -1
- package/dist/features/web/server/index.js +1 -1
- package/dist/features/web/server/index.js.map +1 -1
- package/dist/hooks/schemas.js +50 -0
- package/dist/hooks/schemas.js.map +2 -2
- package/dist/hooks/sms-action-runner.js +47 -1
- package/dist/hooks/sms-action-runner.js.map +2 -2
- package/dist/hooks/sms-notify.js +63 -1
- package/dist/hooks/sms-notify.js.map +2 -2
- package/dist/hooks/sms-webhook.js +10 -3
- package/dist/hooks/sms-webhook.js.map +2 -2
- package/dist/hooks/whatsapp-commands.js +172 -69
- package/dist/hooks/whatsapp-commands.js.map +2 -2
- package/dist/hooks/whatsapp-sync.js +34 -0
- package/dist/hooks/whatsapp-sync.js.map +2 -2
- package/dist/index.js +1 -1
- package/dist/index.js.map +1 -1
- package/dist/integrations/mcp/handlers/context-handlers.js.map +1 -1
- package/dist/integrations/mcp/handlers/discovery-handlers.js.map +1 -1
- package/dist/integrations/mcp/server.js +1 -1
- package/dist/integrations/mcp/server.js.map +1 -1
- package/dist/integrations/ralph/bridge/ralph-stackmemory-bridge.js +1 -1
- package/dist/integrations/ralph/bridge/ralph-stackmemory-bridge.js.map +1 -1
- package/dist/integrations/ralph/context/stackmemory-context-loader.js +1 -1
- package/dist/integrations/ralph/context/stackmemory-context-loader.js.map +1 -1
- package/dist/integrations/ralph/learning/pattern-learner.js +1 -1
- package/dist/integrations/ralph/learning/pattern-learner.js.map +1 -1
- package/dist/integrations/ralph/orchestration/multi-loop-orchestrator.js +1 -1
- package/dist/integrations/ralph/orchestration/multi-loop-orchestrator.js.map +1 -1
- package/dist/integrations/ralph/swarm/swarm-coordinator.js +1 -1
- package/dist/integrations/ralph/swarm/swarm-coordinator.js.map +1 -1
- package/dist/integrations/ralph/visualization/ralph-debugger.js +1 -1
- package/dist/integrations/ralph/visualization/ralph-debugger.js.map +1 -1
- package/dist/mcp/stackmemory-mcp-server.js +1 -1
- package/dist/mcp/stackmemory-mcp-server.js.map +1 -1
- package/dist/skills/claude-skills.js.map +1 -1
- package/dist/skills/recursive-agent-orchestrator.js.map +1 -1
- package/dist/skills/unified-rlm-orchestrator.js.map +1 -1
- package/package.json +1 -1
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
{
|
|
2
2
|
"version": 3,
|
|
3
3
|
"sources": ["../../../src/core/digest/hybrid-digest-generator.ts"],
|
|
4
|
-
"sourcesContent": ["/**\n * Hybrid Digest Generator\n * Implements 80/20 split: deterministic extraction + AI review/insights\n */\n\nimport Database from 'better-sqlite3';\nimport {\n HybridDigest,\n DeterministicDigest,\n AIGeneratedDigest,\n DigestConfig,\n DigestInput,\n DigestGenerationRequest,\n DigestQueueStats,\n DigestLLMProvider,\n DigestStatus,\n FileModification,\n TestResult,\n ErrorInfo,\n DEFAULT_DIGEST_CONFIG,\n} from './types.js';\nimport { Frame, Anchor, Event } from '../context/frame-manager.js';\nimport { logger } from '../monitoring/logger.js';\n\n/**\n * Hybrid Digest Generator\n * Generates 80% deterministic + 20% AI review for frames\n */\nexport class HybridDigestGenerator {\n protected db: Database.Database;\n protected config: DigestConfig;\n protected llmProvider?: DigestLLMProvider;\n private queue: DigestGenerationRequest[] = [];\n private processing: boolean = false;\n private idleTimer?: NodeJS.Timeout;\n private stats: DigestQueueStats = {\n pending: 0,\n processing: 0,\n completed: 0,\n failed: 0,\n avgProcessingTimeMs: 0,\n };\n\n constructor(\n db: Database.Database,\n config: Partial<DigestConfig> = {},\n llmProvider?: DigestLLMProvider\n ) {\n this.db = db;\n this.config = { ...DEFAULT_DIGEST_CONFIG, ...config };\n this.llmProvider = llmProvider;\n this.initializeSchema();\n }\n\n private initializeSchema(): void {\n this.db.exec(`\n CREATE TABLE IF NOT EXISTS digest_queue (\n id INTEGER PRIMARY KEY AUTOINCREMENT,\n frame_id TEXT NOT NULL UNIQUE,\n frame_name TEXT NOT NULL,\n frame_type TEXT NOT NULL,\n priority TEXT DEFAULT 'normal',\n status TEXT DEFAULT 'pending',\n retry_count INTEGER DEFAULT 0,\n created_at INTEGER DEFAULT (unixepoch()),\n updated_at INTEGER DEFAULT (unixepoch()),\n error_message TEXT\n );\n\n CREATE INDEX IF NOT EXISTS idx_digest_queue_status ON digest_queue(status);\n CREATE INDEX IF NOT EXISTS idx_digest_queue_priority ON digest_queue(priority, created_at);\n `);\n }\n\n /**\n * Generate digest for a frame (immediate deterministic, queued AI)\n */\n public generateDigest(input: DigestInput): HybridDigest {\n const startTime = Date.now();\n\n // 1. Generate deterministic fields (60%) - always immediate\n const deterministic = this.extractDeterministicFields(input);\n\n // 2. Generate initial text summary from deterministic data\n const text = this.generateDeterministicText(input.frame, deterministic);\n\n // 3. Create the hybrid digest\n const digest: HybridDigest = {\n frameId: input.frame.frame_id,\n frameName: input.frame.name,\n frameType: input.frame.type,\n deterministic,\n status: 'deterministic_only',\n text,\n version: 1,\n createdAt: Date.now(),\n updatedAt: Date.now(),\n };\n\n // 4. Queue for AI generation if enabled\n if (this.config.enableAIGeneration && this.llmProvider) {\n this.queueForAIGeneration({\n frameId: input.frame.frame_id,\n frameName: input.frame.name,\n frameType: input.frame.type,\n priority: this.determinePriority(input),\n createdAt: Date.now(),\n retryCount: 0,\n maxRetries: this.config.maxRetries,\n });\n digest.status = 'ai_pending';\n }\n\n logger.debug('Generated deterministic digest', {\n frameId: input.frame.frame_id,\n durationMs: Date.now() - startTime,\n aiQueued: digest.status === 'ai_pending',\n });\n\n return digest;\n }\n\n /**\n * Extract deterministic fields from frame data (60%)\n */\n private extractDeterministicFields(input: DigestInput): DeterministicDigest {\n const { frame, anchors, events } = input;\n\n // Extract files modified from events\n const filesModified = this.extractFilesModified(events);\n\n // Extract test results\n const testsRun = this.extractTestResults(events);\n\n // Extract errors\n const errorsEncountered = this.extractErrors(events);\n\n // Count tool calls by type\n const toolCalls = events.filter((e) => e.event_type === 'tool_call');\n const toolCallsByType: Record<string, number> = {};\n for (const tc of toolCalls) {\n const toolName = tc.payload?.tool_name || 'unknown';\n toolCallsByType[toolName] = (toolCallsByType[toolName] || 0) + 1;\n }\n\n // Count anchors by type\n const anchorCounts: Record<string, number> = {};\n for (const anchor of anchors) {\n anchorCounts[anchor.type] = (anchorCounts[anchor.type] || 0) + 1;\n }\n\n // Extract decisions, constraints, risks\n const decisions = anchors\n .filter((a) => a.type === 'DECISION')\n .map((a) => a.text);\n const constraints = anchors\n .filter((a) => a.type === 'CONSTRAINT')\n .map((a) => a.text);\n const risks = anchors.filter((a) => a.type === 'RISK').map((a) => a.text);\n\n // Calculate duration\n const durationSeconds = frame.closed_at\n ? frame.closed_at - frame.created_at\n : Math.floor(Date.now() / 1000 - frame.created_at);\n\n // Determine exit status\n const exitStatus = this.determineExitStatus(frame, errorsEncountered);\n\n return {\n filesModified,\n testsRun,\n errorsEncountered,\n toolCallCount: toolCalls.length,\n toolCallsByType,\n durationSeconds,\n exitStatus,\n anchorCounts,\n decisions,\n constraints,\n risks,\n };\n }\n\n private extractFilesModified(events: Event[]): FileModification[] {\n const fileMap = new Map<string, FileModification>();\n\n for (const event of events) {\n if (\n event.event_type === 'tool_call' ||\n event.event_type === 'tool_result'\n ) {\n const payload = event.payload || {};\n\n // Handle various tool patterns\n const filePath = payload.file_path || payload.path || payload.file;\n if (filePath && typeof filePath === 'string') {\n const toolName = payload.tool_name || '';\n let operation: FileModification['operation'] = 'read';\n\n if (\n toolName.includes('write') ||\n toolName.includes('edit') ||\n toolName.includes('create')\n ) {\n operation = 'modify';\n } else if (\n toolName.includes('delete') ||\n toolName.includes('remove')\n ) {\n operation = 'delete';\n } else if (\n toolName.includes('read') ||\n toolName.includes('cat') ||\n toolName.includes('view')\n ) {\n operation = 'read';\n }\n\n const existing = fileMap.get(filePath);\n if (\n !existing ||\n this.operationPriority(operation) >\n this.operationPriority(existing.operation)\n ) {\n fileMap.set(filePath, {\n path: filePath,\n operation,\n linesChanged: payload.lines_changed,\n });\n }\n }\n\n // Handle filesAffected array\n const filesAffected = payload.filesAffected || payload.files_affected;\n if (Array.isArray(filesAffected)) {\n for (const f of filesAffected) {\n if (typeof f === 'string' && !fileMap.has(f)) {\n fileMap.set(f, { path: f, operation: 'modify' });\n }\n }\n }\n }\n }\n\n return Array.from(fileMap.values());\n }\n\n private operationPriority(op: FileModification['operation']): number {\n const priorities = { delete: 4, create: 3, modify: 2, read: 1 };\n return priorities[op] || 0;\n }\n\n private extractTestResults(events: Event[]): TestResult[] {\n const tests: TestResult[] = [];\n\n for (const event of events) {\n const payload = event.payload || {};\n\n // Look for test-related events\n if (\n payload.tool_name?.includes('test') ||\n payload.command?.includes('test') ||\n payload.test_name\n ) {\n const testName = payload.test_name || payload.command || 'unknown test';\n const success = payload.success !== false && !payload.error;\n\n tests.push({\n name: testName,\n status: success ? 'passed' : 'failed',\n duration: payload.duration,\n });\n }\n\n // Parse test output for results\n const output = payload.output || payload.result;\n if (typeof output === 'string') {\n // Match common test output patterns\n const passMatch = output.match(/(\\d+)\\s*(?:tests?\\s*)?passed/i);\n const failMatch = output.match(/(\\d+)\\s*(?:tests?\\s*)?failed/i);\n\n if (passMatch || failMatch) {\n const passed = passMatch ? parseInt(passMatch[1], 10) : 0;\n const failed = failMatch ? parseInt(failMatch[1], 10) : 0;\n\n if (passed > 0) {\n tests.push({ name: `${passed} tests`, status: 'passed' });\n }\n if (failed > 0) {\n tests.push({ name: `${failed} tests`, status: 'failed' });\n }\n }\n }\n }\n\n return tests;\n }\n\n private extractErrors(events: Event[]): ErrorInfo[] {\n const errorMap = new Map<string, ErrorInfo>();\n\n for (const event of events) {\n const payload = event.payload || {};\n\n // Check for explicit errors\n if (payload.error || payload.success === false) {\n const errorType = payload.error_type || 'UnknownError';\n const message =\n payload.error?.message || payload.error || 'Unknown error';\n\n const key = `${errorType}:${message.substring(0, 50)}`;\n const existing = errorMap.get(key);\n\n if (existing) {\n existing.count++;\n } else {\n errorMap.set(key, {\n type: errorType,\n message: String(message).substring(0, 200),\n resolved: false,\n count: 1,\n });\n }\n }\n }\n\n // Mark errors as resolved if there's a subsequent success\n // (simplified heuristic)\n return Array.from(errorMap.values());\n }\n\n private determineExitStatus(\n frame: Frame,\n errors: ErrorInfo[]\n ): DeterministicDigest['exitStatus'] {\n // Frame state is 'active' or 'closed', check outputs for cancellation\n const outputs = frame.outputs || {};\n if (outputs.cancelled || outputs.status === 'cancelled') return 'cancelled';\n if (errors.length === 0) return 'success';\n if (errors.some((e) => !e.resolved)) return 'failure';\n return 'partial';\n }\n\n /**\n * Generate text summary from deterministic data\n */\n private generateDeterministicText(\n frame: Frame,\n det: DeterministicDigest\n ): string {\n const parts: string[] = [];\n\n // Header\n parts.push(`## ${frame.name} (${frame.type})`);\n parts.push(`Status: ${det.exitStatus}`);\n\n // Duration\n if (det.durationSeconds > 0) {\n const mins = Math.floor(det.durationSeconds / 60);\n const secs = det.durationSeconds % 60;\n parts.push(`Duration: ${mins}m ${secs}s`);\n }\n\n // Files\n if (det.filesModified.length > 0) {\n parts.push(`\\n### Files Modified (${det.filesModified.length})`);\n for (const f of det.filesModified.slice(0, 10)) {\n parts.push(`- ${f.operation}: ${f.path}`);\n }\n if (det.filesModified.length > 10) {\n parts.push(` ...and ${det.filesModified.length - 10} more`);\n }\n }\n\n // Tool calls\n if (det.toolCallCount > 0) {\n parts.push(`\\n### Tool Calls (${det.toolCallCount})`);\n const sorted = Object.entries(det.toolCallsByType)\n .sort((a, b) => b[1] - a[1])\n .slice(0, 5);\n for (const [tool, count] of sorted) {\n parts.push(`- ${tool}: ${count}`);\n }\n }\n\n // Decisions\n if (det.decisions.length > 0) {\n parts.push(`\\n### Decisions (${det.decisions.length})`);\n for (const d of det.decisions.slice(0, 5)) {\n parts.push(`- ${d}`);\n }\n }\n\n // Constraints\n if (det.constraints.length > 0) {\n parts.push(`\\n### Constraints (${det.constraints.length})`);\n for (const c of det.constraints.slice(0, 3)) {\n parts.push(`- ${c}`);\n }\n }\n\n // Errors\n if (det.errorsEncountered.length > 0) {\n parts.push(`\\n### Errors (${det.errorsEncountered.length})`);\n for (const e of det.errorsEncountered.slice(0, 3)) {\n parts.push(`- ${e.type}: ${e.message.substring(0, 80)}`);\n }\n }\n\n // Tests\n if (det.testsRun.length > 0) {\n const passed = det.testsRun.filter((t) => t.status === 'passed').length;\n const failed = det.testsRun.filter((t) => t.status === 'failed').length;\n parts.push(`\\n### Tests: ${passed} passed, ${failed} failed`);\n }\n\n return parts.join('\\n');\n }\n\n /**\n * Queue frame for AI generation\n */\n private queueForAIGeneration(request: DigestGenerationRequest): void {\n try {\n this.db\n .prepare(\n `\n INSERT OR REPLACE INTO digest_queue \n (frame_id, frame_name, frame_type, priority, status, retry_count, created_at)\n VALUES (?, ?, ?, ?, 'pending', ?, ?)\n `\n )\n .run(\n request.frameId,\n request.frameName,\n request.frameType,\n request.priority,\n request.retryCount,\n Math.floor(request.createdAt / 1000)\n );\n\n this.stats.pending++;\n this.scheduleIdleProcessing();\n\n logger.debug('Queued frame for AI digest generation', {\n frameId: request.frameId,\n priority: request.priority,\n });\n } catch (error: any) {\n logger.error('Failed to queue digest generation', error);\n }\n }\n\n /**\n * Determine priority based on frame characteristics\n */\n private determinePriority(\n input: DigestInput\n ): DigestGenerationRequest['priority'] {\n const { frame, anchors, events } = input;\n\n // High priority for frames with many decisions or errors\n const decisionCount = anchors.filter((a) => a.type === 'DECISION').length;\n const errorCount = events.filter(\n (e) => e.payload?.error || e.payload?.success === false\n ).length;\n\n if (decisionCount >= 3 || errorCount >= 2) return 'high';\n if (decisionCount >= 1 || events.length >= 20) return 'normal';\n return 'low';\n }\n\n /**\n * Schedule idle-time processing\n */\n private scheduleIdleProcessing(): void {\n if (this.idleTimer) {\n clearTimeout(this.idleTimer);\n }\n\n this.idleTimer = setTimeout(() => {\n this.processQueue();\n }, this.config.idleThresholdMs);\n }\n\n /**\n * Process queued AI generation requests\n */\n public async processQueue(): Promise<void> {\n if (this.processing || !this.llmProvider) return;\n\n this.processing = true;\n\n try {\n // Get pending items ordered by priority and age\n const pending = this.db\n .prepare(\n `\n SELECT * FROM digest_queue \n WHERE status = 'pending' \n ORDER BY \n CASE priority \n WHEN 'high' THEN 1 \n WHEN 'normal' THEN 2 \n WHEN 'low' THEN 3 \n END,\n created_at ASC\n LIMIT ?\n `\n )\n .all(this.config.batchSize) as any[];\n\n for (const item of pending) {\n await this.processQueueItem(item);\n }\n } finally {\n this.processing = false;\n }\n }\n\n private async processQueueItem(item: any): Promise<void> {\n const startTime = Date.now();\n\n try {\n // Mark as processing\n this.db\n .prepare(\n `UPDATE digest_queue SET status = 'processing', updated_at = unixepoch() WHERE frame_id = ?`\n )\n .run(item.frame_id);\n\n this.stats.processing++;\n this.stats.pending--;\n\n // Get frame data\n const frame = this.db\n .prepare(`SELECT * FROM frames WHERE frame_id = ?`)\n .get(item.frame_id) as any;\n\n if (!frame) {\n throw new Error(`Frame not found: ${item.frame_id}`);\n }\n\n const anchors = this.db\n .prepare(`SELECT * FROM anchors WHERE frame_id = ?`)\n .all(item.frame_id) as Anchor[];\n\n const events = this.db\n .prepare(`SELECT * FROM events WHERE frame_id = ? ORDER BY ts ASC`)\n .all(item.frame_id) as Event[];\n\n // Parse JSON fields\n const parsedFrame: Frame = {\n ...frame,\n inputs: JSON.parse(frame.inputs || '{}'),\n outputs: JSON.parse(frame.outputs || '{}'),\n digest_json: JSON.parse(frame.digest_json || '{}'),\n };\n\n const input: DigestInput = {\n frame: parsedFrame,\n anchors: anchors.map((a: any) => ({\n ...a,\n metadata: JSON.parse(a.metadata || '{}'),\n })),\n events: events.map((e: any) => ({\n ...e,\n payload: JSON.parse(e.payload || '{}'),\n })),\n };\n\n // Generate deterministic first (needed for AI context)\n const deterministic = this.extractDeterministicFields(input);\n\n // Generate AI summary\n const aiGenerated = await this.llmProvider!.generateSummary(\n input,\n deterministic,\n this.config.maxTokens\n );\n\n // Update digest in frames table\n const existingDigest = parsedFrame.digest_json || {};\n const updatedDigest = {\n ...existingDigest,\n aiGenerated,\n status: 'complete',\n updatedAt: Date.now(),\n };\n\n // Generate enhanced text with AI summary\n const enhancedText = this.generateEnhancedText(\n parsedFrame,\n deterministic,\n aiGenerated\n );\n\n this.db\n .prepare(\n `\n UPDATE frames \n SET digest_json = ?, digest_text = ?\n WHERE frame_id = ?\n `\n )\n .run(JSON.stringify(updatedDigest), enhancedText, item.frame_id);\n\n // Mark as completed\n this.db\n .prepare(\n `UPDATE digest_queue SET status = 'completed', updated_at = unixepoch() WHERE frame_id = ?`\n )\n .run(item.frame_id);\n\n this.stats.processing--;\n this.stats.completed++;\n\n // Update average processing time\n const processingTime = Date.now() - startTime;\n this.stats.avgProcessingTimeMs =\n (this.stats.avgProcessingTimeMs * (this.stats.completed - 1) +\n processingTime) /\n this.stats.completed;\n\n logger.info('Generated AI digest', {\n frameId: item.frame_id,\n processingTimeMs: processingTime,\n });\n } catch (error: any) {\n // Handle retry logic\n const newRetryCount = item.retry_count + 1;\n\n if (newRetryCount < this.config.maxRetries) {\n this.db\n .prepare(\n `\n UPDATE digest_queue \n SET status = 'pending', retry_count = ?, error_message = ?, updated_at = unixepoch()\n WHERE frame_id = ?\n `\n )\n .run(newRetryCount, error.message, item.frame_id);\n\n this.stats.processing--;\n this.stats.pending++;\n\n logger.warn('AI digest generation failed, will retry', {\n frameId: item.frame_id,\n retryCount: newRetryCount,\n error: error.message,\n });\n } else {\n // Mark as failed\n this.db\n .prepare(\n `\n UPDATE digest_queue \n SET status = 'failed', error_message = ?, updated_at = unixepoch()\n WHERE frame_id = ?\n `\n )\n .run(error.message, item.frame_id);\n\n this.stats.processing--;\n this.stats.failed++;\n\n logger.error('AI digest generation failed permanently', error, {\n frameId: item.frame_id,\n });\n }\n }\n }\n\n /**\n * Generate enhanced text with AI review (20%)\n */\n private generateEnhancedText(\n frame: Frame,\n det: DeterministicDigest,\n ai: AIGeneratedDigest\n ): string {\n const parts: string[] = [];\n\n // Deterministic content first (80%)\n parts.push(this.generateDeterministicText(frame, det));\n\n // AI review section (20%) - compact\n parts.push(`\\n---`);\n parts.push(`**AI Review**: ${ai.summary}`);\n\n if (ai.insight) {\n parts.push(`**Insight**: ${ai.insight}`);\n }\n\n if (ai.flaggedIssue) {\n parts.push(`**Flag**: ${ai.flaggedIssue}`);\n }\n\n return parts.join('\\n');\n }\n\n /**\n * Get queue statistics\n */\n public getStats(): DigestQueueStats {\n return { ...this.stats };\n }\n\n /**\n * Set LLM provider\n */\n public setLLMProvider(provider: DigestLLMProvider): void {\n this.llmProvider = provider;\n }\n\n /**\n * Force process queue (for testing or manual trigger)\n */\n public async forceProcessQueue(): Promise<void> {\n if (this.idleTimer) {\n clearTimeout(this.idleTimer);\n }\n await this.processQueue();\n }\n\n /**\n * Get digest for a frame\n */\n public getDigest(frameId: string): HybridDigest | null {\n const frame = this.db\n .prepare(`SELECT * FROM frames WHERE frame_id = ?`)\n .get(frameId) as any;\n\n if (!frame) return null;\n\n const digestJson = JSON.parse(frame.digest_json || '{}');\n const anchors = this.db\n .prepare(`SELECT * FROM anchors WHERE frame_id = ?`)\n .all(frameId) as any[];\n const events = this.db\n .prepare(`SELECT * FROM events WHERE frame_id = ?`)\n .all(frameId) as any[];\n\n const parsedFrame: Frame = {\n ...frame,\n inputs: JSON.parse(frame.inputs || '{}'),\n outputs: JSON.parse(frame.outputs || '{}'),\n digest_json: digestJson,\n };\n\n const input: DigestInput = {\n frame: parsedFrame,\n anchors: anchors.map((a) => ({\n ...a,\n metadata: JSON.parse(a.metadata || '{}'),\n })),\n events: events.map((e) => ({\n ...e,\n payload: JSON.parse(e.payload || '{}'),\n })),\n };\n\n const deterministic = this.extractDeterministicFields(input);\n\n // Check queue status\n const queueItem = this.db\n .prepare(`SELECT status FROM digest_queue WHERE frame_id = ?`)\n .get(frameId) as any;\n\n let status: DigestStatus = 'deterministic_only';\n if (digestJson.aiGenerated) {\n status = 'complete';\n } else if (queueItem) {\n status =\n queueItem.status === 'processing'\n ? 'ai_processing'\n : queueItem.status === 'failed'\n ? 'ai_failed'\n : 'ai_pending';\n }\n\n return {\n frameId: frame.frame_id,\n frameName: frame.name,\n frameType: frame.type,\n deterministic,\n aiGenerated: digestJson.aiGenerated,\n status,\n text:\n frame.digest_text ||\n this.generateDeterministicText(parsedFrame, deterministic),\n version: 1,\n createdAt: frame.created_at * 1000,\n updatedAt: digestJson.updatedAt || frame.created_at * 1000,\n };\n }\n}\n"],
|
|
4
|
+
"sourcesContent": ["/**\n * Hybrid Digest Generator\n * Implements 80/20 split: deterministic extraction + AI review/insights\n */\n\nimport Database from 'better-sqlite3';\nimport {\n HybridDigest,\n DeterministicDigest,\n AIGeneratedDigest,\n DigestConfig,\n DigestInput,\n DigestGenerationRequest,\n DigestQueueStats,\n DigestLLMProvider,\n DigestStatus,\n FileModification,\n TestResult,\n ErrorInfo,\n DEFAULT_DIGEST_CONFIG,\n} from './types.js';\nimport { Frame, Anchor, Event } from '../context/index.js';\nimport { logger } from '../monitoring/logger.js';\n\n/**\n * Hybrid Digest Generator\n * Generates 80% deterministic + 20% AI review for frames\n */\nexport class HybridDigestGenerator {\n protected db: Database.Database;\n protected config: DigestConfig;\n protected llmProvider?: DigestLLMProvider;\n private queue: DigestGenerationRequest[] = [];\n private processing: boolean = false;\n private idleTimer?: NodeJS.Timeout;\n private stats: DigestQueueStats = {\n pending: 0,\n processing: 0,\n completed: 0,\n failed: 0,\n avgProcessingTimeMs: 0,\n };\n\n constructor(\n db: Database.Database,\n config: Partial<DigestConfig> = {},\n llmProvider?: DigestLLMProvider\n ) {\n this.db = db;\n this.config = { ...DEFAULT_DIGEST_CONFIG, ...config };\n this.llmProvider = llmProvider;\n this.initializeSchema();\n }\n\n private initializeSchema(): void {\n this.db.exec(`\n CREATE TABLE IF NOT EXISTS digest_queue (\n id INTEGER PRIMARY KEY AUTOINCREMENT,\n frame_id TEXT NOT NULL UNIQUE,\n frame_name TEXT NOT NULL,\n frame_type TEXT NOT NULL,\n priority TEXT DEFAULT 'normal',\n status TEXT DEFAULT 'pending',\n retry_count INTEGER DEFAULT 0,\n created_at INTEGER DEFAULT (unixepoch()),\n updated_at INTEGER DEFAULT (unixepoch()),\n error_message TEXT\n );\n\n CREATE INDEX IF NOT EXISTS idx_digest_queue_status ON digest_queue(status);\n CREATE INDEX IF NOT EXISTS idx_digest_queue_priority ON digest_queue(priority, created_at);\n `);\n }\n\n /**\n * Generate digest for a frame (immediate deterministic, queued AI)\n */\n public generateDigest(input: DigestInput): HybridDigest {\n const startTime = Date.now();\n\n // 1. Generate deterministic fields (60%) - always immediate\n const deterministic = this.extractDeterministicFields(input);\n\n // 2. Generate initial text summary from deterministic data\n const text = this.generateDeterministicText(input.frame, deterministic);\n\n // 3. Create the hybrid digest\n const digest: HybridDigest = {\n frameId: input.frame.frame_id,\n frameName: input.frame.name,\n frameType: input.frame.type,\n deterministic,\n status: 'deterministic_only',\n text,\n version: 1,\n createdAt: Date.now(),\n updatedAt: Date.now(),\n };\n\n // 4. Queue for AI generation if enabled\n if (this.config.enableAIGeneration && this.llmProvider) {\n this.queueForAIGeneration({\n frameId: input.frame.frame_id,\n frameName: input.frame.name,\n frameType: input.frame.type,\n priority: this.determinePriority(input),\n createdAt: Date.now(),\n retryCount: 0,\n maxRetries: this.config.maxRetries,\n });\n digest.status = 'ai_pending';\n }\n\n logger.debug('Generated deterministic digest', {\n frameId: input.frame.frame_id,\n durationMs: Date.now() - startTime,\n aiQueued: digest.status === 'ai_pending',\n });\n\n return digest;\n }\n\n /**\n * Extract deterministic fields from frame data (60%)\n */\n private extractDeterministicFields(input: DigestInput): DeterministicDigest {\n const { frame, anchors, events } = input;\n\n // Extract files modified from events\n const filesModified = this.extractFilesModified(events);\n\n // Extract test results\n const testsRun = this.extractTestResults(events);\n\n // Extract errors\n const errorsEncountered = this.extractErrors(events);\n\n // Count tool calls by type\n const toolCalls = events.filter((e) => e.event_type === 'tool_call');\n const toolCallsByType: Record<string, number> = {};\n for (const tc of toolCalls) {\n const toolName = tc.payload?.tool_name || 'unknown';\n toolCallsByType[toolName] = (toolCallsByType[toolName] || 0) + 1;\n }\n\n // Count anchors by type\n const anchorCounts: Record<string, number> = {};\n for (const anchor of anchors) {\n anchorCounts[anchor.type] = (anchorCounts[anchor.type] || 0) + 1;\n }\n\n // Extract decisions, constraints, risks\n const decisions = anchors\n .filter((a) => a.type === 'DECISION')\n .map((a) => a.text);\n const constraints = anchors\n .filter((a) => a.type === 'CONSTRAINT')\n .map((a) => a.text);\n const risks = anchors.filter((a) => a.type === 'RISK').map((a) => a.text);\n\n // Calculate duration\n const durationSeconds = frame.closed_at\n ? frame.closed_at - frame.created_at\n : Math.floor(Date.now() / 1000 - frame.created_at);\n\n // Determine exit status\n const exitStatus = this.determineExitStatus(frame, errorsEncountered);\n\n return {\n filesModified,\n testsRun,\n errorsEncountered,\n toolCallCount: toolCalls.length,\n toolCallsByType,\n durationSeconds,\n exitStatus,\n anchorCounts,\n decisions,\n constraints,\n risks,\n };\n }\n\n private extractFilesModified(events: Event[]): FileModification[] {\n const fileMap = new Map<string, FileModification>();\n\n for (const event of events) {\n if (\n event.event_type === 'tool_call' ||\n event.event_type === 'tool_result'\n ) {\n const payload = event.payload || {};\n\n // Handle various tool patterns\n const filePath = payload.file_path || payload.path || payload.file;\n if (filePath && typeof filePath === 'string') {\n const toolName = payload.tool_name || '';\n let operation: FileModification['operation'] = 'read';\n\n if (\n toolName.includes('write') ||\n toolName.includes('edit') ||\n toolName.includes('create')\n ) {\n operation = 'modify';\n } else if (\n toolName.includes('delete') ||\n toolName.includes('remove')\n ) {\n operation = 'delete';\n } else if (\n toolName.includes('read') ||\n toolName.includes('cat') ||\n toolName.includes('view')\n ) {\n operation = 'read';\n }\n\n const existing = fileMap.get(filePath);\n if (\n !existing ||\n this.operationPriority(operation) >\n this.operationPriority(existing.operation)\n ) {\n fileMap.set(filePath, {\n path: filePath,\n operation,\n linesChanged: payload.lines_changed,\n });\n }\n }\n\n // Handle filesAffected array\n const filesAffected = payload.filesAffected || payload.files_affected;\n if (Array.isArray(filesAffected)) {\n for (const f of filesAffected) {\n if (typeof f === 'string' && !fileMap.has(f)) {\n fileMap.set(f, { path: f, operation: 'modify' });\n }\n }\n }\n }\n }\n\n return Array.from(fileMap.values());\n }\n\n private operationPriority(op: FileModification['operation']): number {\n const priorities = { delete: 4, create: 3, modify: 2, read: 1 };\n return priorities[op] || 0;\n }\n\n private extractTestResults(events: Event[]): TestResult[] {\n const tests: TestResult[] = [];\n\n for (const event of events) {\n const payload = event.payload || {};\n\n // Look for test-related events\n if (\n payload.tool_name?.includes('test') ||\n payload.command?.includes('test') ||\n payload.test_name\n ) {\n const testName = payload.test_name || payload.command || 'unknown test';\n const success = payload.success !== false && !payload.error;\n\n tests.push({\n name: testName,\n status: success ? 'passed' : 'failed',\n duration: payload.duration,\n });\n }\n\n // Parse test output for results\n const output = payload.output || payload.result;\n if (typeof output === 'string') {\n // Match common test output patterns\n const passMatch = output.match(/(\\d+)\\s*(?:tests?\\s*)?passed/i);\n const failMatch = output.match(/(\\d+)\\s*(?:tests?\\s*)?failed/i);\n\n if (passMatch || failMatch) {\n const passed = passMatch ? parseInt(passMatch[1], 10) : 0;\n const failed = failMatch ? parseInt(failMatch[1], 10) : 0;\n\n if (passed > 0) {\n tests.push({ name: `${passed} tests`, status: 'passed' });\n }\n if (failed > 0) {\n tests.push({ name: `${failed} tests`, status: 'failed' });\n }\n }\n }\n }\n\n return tests;\n }\n\n private extractErrors(events: Event[]): ErrorInfo[] {\n const errorMap = new Map<string, ErrorInfo>();\n\n for (const event of events) {\n const payload = event.payload || {};\n\n // Check for explicit errors\n if (payload.error || payload.success === false) {\n const errorType = payload.error_type || 'UnknownError';\n const message =\n payload.error?.message || payload.error || 'Unknown error';\n\n const key = `${errorType}:${message.substring(0, 50)}`;\n const existing = errorMap.get(key);\n\n if (existing) {\n existing.count++;\n } else {\n errorMap.set(key, {\n type: errorType,\n message: String(message).substring(0, 200),\n resolved: false,\n count: 1,\n });\n }\n }\n }\n\n // Mark errors as resolved if there's a subsequent success\n // (simplified heuristic)\n return Array.from(errorMap.values());\n }\n\n private determineExitStatus(\n frame: Frame,\n errors: ErrorInfo[]\n ): DeterministicDigest['exitStatus'] {\n // Frame state is 'active' or 'closed', check outputs for cancellation\n const outputs = frame.outputs || {};\n if (outputs.cancelled || outputs.status === 'cancelled') return 'cancelled';\n if (errors.length === 0) return 'success';\n if (errors.some((e) => !e.resolved)) return 'failure';\n return 'partial';\n }\n\n /**\n * Generate text summary from deterministic data\n */\n private generateDeterministicText(\n frame: Frame,\n det: DeterministicDigest\n ): string {\n const parts: string[] = [];\n\n // Header\n parts.push(`## ${frame.name} (${frame.type})`);\n parts.push(`Status: ${det.exitStatus}`);\n\n // Duration\n if (det.durationSeconds > 0) {\n const mins = Math.floor(det.durationSeconds / 60);\n const secs = det.durationSeconds % 60;\n parts.push(`Duration: ${mins}m ${secs}s`);\n }\n\n // Files\n if (det.filesModified.length > 0) {\n parts.push(`\\n### Files Modified (${det.filesModified.length})`);\n for (const f of det.filesModified.slice(0, 10)) {\n parts.push(`- ${f.operation}: ${f.path}`);\n }\n if (det.filesModified.length > 10) {\n parts.push(` ...and ${det.filesModified.length - 10} more`);\n }\n }\n\n // Tool calls\n if (det.toolCallCount > 0) {\n parts.push(`\\n### Tool Calls (${det.toolCallCount})`);\n const sorted = Object.entries(det.toolCallsByType)\n .sort((a, b) => b[1] - a[1])\n .slice(0, 5);\n for (const [tool, count] of sorted) {\n parts.push(`- ${tool}: ${count}`);\n }\n }\n\n // Decisions\n if (det.decisions.length > 0) {\n parts.push(`\\n### Decisions (${det.decisions.length})`);\n for (const d of det.decisions.slice(0, 5)) {\n parts.push(`- ${d}`);\n }\n }\n\n // Constraints\n if (det.constraints.length > 0) {\n parts.push(`\\n### Constraints (${det.constraints.length})`);\n for (const c of det.constraints.slice(0, 3)) {\n parts.push(`- ${c}`);\n }\n }\n\n // Errors\n if (det.errorsEncountered.length > 0) {\n parts.push(`\\n### Errors (${det.errorsEncountered.length})`);\n for (const e of det.errorsEncountered.slice(0, 3)) {\n parts.push(`- ${e.type}: ${e.message.substring(0, 80)}`);\n }\n }\n\n // Tests\n if (det.testsRun.length > 0) {\n const passed = det.testsRun.filter((t) => t.status === 'passed').length;\n const failed = det.testsRun.filter((t) => t.status === 'failed').length;\n parts.push(`\\n### Tests: ${passed} passed, ${failed} failed`);\n }\n\n return parts.join('\\n');\n }\n\n /**\n * Queue frame for AI generation\n */\n private queueForAIGeneration(request: DigestGenerationRequest): void {\n try {\n this.db\n .prepare(\n `\n INSERT OR REPLACE INTO digest_queue \n (frame_id, frame_name, frame_type, priority, status, retry_count, created_at)\n VALUES (?, ?, ?, ?, 'pending', ?, ?)\n `\n )\n .run(\n request.frameId,\n request.frameName,\n request.frameType,\n request.priority,\n request.retryCount,\n Math.floor(request.createdAt / 1000)\n );\n\n this.stats.pending++;\n this.scheduleIdleProcessing();\n\n logger.debug('Queued frame for AI digest generation', {\n frameId: request.frameId,\n priority: request.priority,\n });\n } catch (error: any) {\n logger.error('Failed to queue digest generation', error);\n }\n }\n\n /**\n * Determine priority based on frame characteristics\n */\n private determinePriority(\n input: DigestInput\n ): DigestGenerationRequest['priority'] {\n const { frame, anchors, events } = input;\n\n // High priority for frames with many decisions or errors\n const decisionCount = anchors.filter((a) => a.type === 'DECISION').length;\n const errorCount = events.filter(\n (e) => e.payload?.error || e.payload?.success === false\n ).length;\n\n if (decisionCount >= 3 || errorCount >= 2) return 'high';\n if (decisionCount >= 1 || events.length >= 20) return 'normal';\n return 'low';\n }\n\n /**\n * Schedule idle-time processing\n */\n private scheduleIdleProcessing(): void {\n if (this.idleTimer) {\n clearTimeout(this.idleTimer);\n }\n\n this.idleTimer = setTimeout(() => {\n this.processQueue();\n }, this.config.idleThresholdMs);\n }\n\n /**\n * Process queued AI generation requests\n */\n public async processQueue(): Promise<void> {\n if (this.processing || !this.llmProvider) return;\n\n this.processing = true;\n\n try {\n // Get pending items ordered by priority and age\n const pending = this.db\n .prepare(\n `\n SELECT * FROM digest_queue \n WHERE status = 'pending' \n ORDER BY \n CASE priority \n WHEN 'high' THEN 1 \n WHEN 'normal' THEN 2 \n WHEN 'low' THEN 3 \n END,\n created_at ASC\n LIMIT ?\n `\n )\n .all(this.config.batchSize) as any[];\n\n for (const item of pending) {\n await this.processQueueItem(item);\n }\n } finally {\n this.processing = false;\n }\n }\n\n private async processQueueItem(item: any): Promise<void> {\n const startTime = Date.now();\n\n try {\n // Mark as processing\n this.db\n .prepare(\n `UPDATE digest_queue SET status = 'processing', updated_at = unixepoch() WHERE frame_id = ?`\n )\n .run(item.frame_id);\n\n this.stats.processing++;\n this.stats.pending--;\n\n // Get frame data\n const frame = this.db\n .prepare(`SELECT * FROM frames WHERE frame_id = ?`)\n .get(item.frame_id) as any;\n\n if (!frame) {\n throw new Error(`Frame not found: ${item.frame_id}`);\n }\n\n const anchors = this.db\n .prepare(`SELECT * FROM anchors WHERE frame_id = ?`)\n .all(item.frame_id) as Anchor[];\n\n const events = this.db\n .prepare(`SELECT * FROM events WHERE frame_id = ? ORDER BY ts ASC`)\n .all(item.frame_id) as Event[];\n\n // Parse JSON fields\n const parsedFrame: Frame = {\n ...frame,\n inputs: JSON.parse(frame.inputs || '{}'),\n outputs: JSON.parse(frame.outputs || '{}'),\n digest_json: JSON.parse(frame.digest_json || '{}'),\n };\n\n const input: DigestInput = {\n frame: parsedFrame,\n anchors: anchors.map((a: any) => ({\n ...a,\n metadata: JSON.parse(a.metadata || '{}'),\n })),\n events: events.map((e: any) => ({\n ...e,\n payload: JSON.parse(e.payload || '{}'),\n })),\n };\n\n // Generate deterministic first (needed for AI context)\n const deterministic = this.extractDeterministicFields(input);\n\n // Generate AI summary\n const aiGenerated = await this.llmProvider!.generateSummary(\n input,\n deterministic,\n this.config.maxTokens\n );\n\n // Update digest in frames table\n const existingDigest = parsedFrame.digest_json || {};\n const updatedDigest = {\n ...existingDigest,\n aiGenerated,\n status: 'complete',\n updatedAt: Date.now(),\n };\n\n // Generate enhanced text with AI summary\n const enhancedText = this.generateEnhancedText(\n parsedFrame,\n deterministic,\n aiGenerated\n );\n\n this.db\n .prepare(\n `\n UPDATE frames \n SET digest_json = ?, digest_text = ?\n WHERE frame_id = ?\n `\n )\n .run(JSON.stringify(updatedDigest), enhancedText, item.frame_id);\n\n // Mark as completed\n this.db\n .prepare(\n `UPDATE digest_queue SET status = 'completed', updated_at = unixepoch() WHERE frame_id = ?`\n )\n .run(item.frame_id);\n\n this.stats.processing--;\n this.stats.completed++;\n\n // Update average processing time\n const processingTime = Date.now() - startTime;\n this.stats.avgProcessingTimeMs =\n (this.stats.avgProcessingTimeMs * (this.stats.completed - 1) +\n processingTime) /\n this.stats.completed;\n\n logger.info('Generated AI digest', {\n frameId: item.frame_id,\n processingTimeMs: processingTime,\n });\n } catch (error: any) {\n // Handle retry logic\n const newRetryCount = item.retry_count + 1;\n\n if (newRetryCount < this.config.maxRetries) {\n this.db\n .prepare(\n `\n UPDATE digest_queue \n SET status = 'pending', retry_count = ?, error_message = ?, updated_at = unixepoch()\n WHERE frame_id = ?\n `\n )\n .run(newRetryCount, error.message, item.frame_id);\n\n this.stats.processing--;\n this.stats.pending++;\n\n logger.warn('AI digest generation failed, will retry', {\n frameId: item.frame_id,\n retryCount: newRetryCount,\n error: error.message,\n });\n } else {\n // Mark as failed\n this.db\n .prepare(\n `\n UPDATE digest_queue \n SET status = 'failed', error_message = ?, updated_at = unixepoch()\n WHERE frame_id = ?\n `\n )\n .run(error.message, item.frame_id);\n\n this.stats.processing--;\n this.stats.failed++;\n\n logger.error('AI digest generation failed permanently', error, {\n frameId: item.frame_id,\n });\n }\n }\n }\n\n /**\n * Generate enhanced text with AI review (20%)\n */\n private generateEnhancedText(\n frame: Frame,\n det: DeterministicDigest,\n ai: AIGeneratedDigest\n ): string {\n const parts: string[] = [];\n\n // Deterministic content first (80%)\n parts.push(this.generateDeterministicText(frame, det));\n\n // AI review section (20%) - compact\n parts.push(`\\n---`);\n parts.push(`**AI Review**: ${ai.summary}`);\n\n if (ai.insight) {\n parts.push(`**Insight**: ${ai.insight}`);\n }\n\n if (ai.flaggedIssue) {\n parts.push(`**Flag**: ${ai.flaggedIssue}`);\n }\n\n return parts.join('\\n');\n }\n\n /**\n * Get queue statistics\n */\n public getStats(): DigestQueueStats {\n return { ...this.stats };\n }\n\n /**\n * Set LLM provider\n */\n public setLLMProvider(provider: DigestLLMProvider): void {\n this.llmProvider = provider;\n }\n\n /**\n * Force process queue (for testing or manual trigger)\n */\n public async forceProcessQueue(): Promise<void> {\n if (this.idleTimer) {\n clearTimeout(this.idleTimer);\n }\n await this.processQueue();\n }\n\n /**\n * Get digest for a frame\n */\n public getDigest(frameId: string): HybridDigest | null {\n const frame = this.db\n .prepare(`SELECT * FROM frames WHERE frame_id = ?`)\n .get(frameId) as any;\n\n if (!frame) return null;\n\n const digestJson = JSON.parse(frame.digest_json || '{}');\n const anchors = this.db\n .prepare(`SELECT * FROM anchors WHERE frame_id = ?`)\n .all(frameId) as any[];\n const events = this.db\n .prepare(`SELECT * FROM events WHERE frame_id = ?`)\n .all(frameId) as any[];\n\n const parsedFrame: Frame = {\n ...frame,\n inputs: JSON.parse(frame.inputs || '{}'),\n outputs: JSON.parse(frame.outputs || '{}'),\n digest_json: digestJson,\n };\n\n const input: DigestInput = {\n frame: parsedFrame,\n anchors: anchors.map((a) => ({\n ...a,\n metadata: JSON.parse(a.metadata || '{}'),\n })),\n events: events.map((e) => ({\n ...e,\n payload: JSON.parse(e.payload || '{}'),\n })),\n };\n\n const deterministic = this.extractDeterministicFields(input);\n\n // Check queue status\n const queueItem = this.db\n .prepare(`SELECT status FROM digest_queue WHERE frame_id = ?`)\n .get(frameId) as any;\n\n let status: DigestStatus = 'deterministic_only';\n if (digestJson.aiGenerated) {\n status = 'complete';\n } else if (queueItem) {\n status =\n queueItem.status === 'processing'\n ? 'ai_processing'\n : queueItem.status === 'failed'\n ? 'ai_failed'\n : 'ai_pending';\n }\n\n return {\n frameId: frame.frame_id,\n frameName: frame.name,\n frameType: frame.type,\n deterministic,\n aiGenerated: digestJson.aiGenerated,\n status,\n text:\n frame.digest_text ||\n this.generateDeterministicText(parsedFrame, deterministic),\n version: 1,\n createdAt: frame.created_at * 1000,\n updatedAt: digestJson.updatedAt || frame.created_at * 1000,\n };\n }\n}\n"],
|
|
5
5
|
"mappings": ";;;;AAMA;AAAA,EAaE;AAAA,OACK;AAEP,SAAS,cAAc;AAMhB,MAAM,sBAAsB;AAAA,EACvB;AAAA,EACA;AAAA,EACA;AAAA,EACF,QAAmC,CAAC;AAAA,EACpC,aAAsB;AAAA,EACtB;AAAA,EACA,QAA0B;AAAA,IAChC,SAAS;AAAA,IACT,YAAY;AAAA,IACZ,WAAW;AAAA,IACX,QAAQ;AAAA,IACR,qBAAqB;AAAA,EACvB;AAAA,EAEA,YACE,IACA,SAAgC,CAAC,GACjC,aACA;AACA,SAAK,KAAK;AACV,SAAK,SAAS,EAAE,GAAG,uBAAuB,GAAG,OAAO;AACpD,SAAK,cAAc;AACnB,SAAK,iBAAiB;AAAA,EACxB;AAAA,EAEQ,mBAAyB;AAC/B,SAAK,GAAG,KAAK;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,KAgBZ;AAAA,EACH;AAAA;AAAA;AAAA;AAAA,EAKO,eAAe,OAAkC;AACtD,UAAM,YAAY,KAAK,IAAI;AAG3B,UAAM,gBAAgB,KAAK,2BAA2B,KAAK;AAG3D,UAAM,OAAO,KAAK,0BAA0B,MAAM,OAAO,aAAa;AAGtE,UAAM,SAAuB;AAAA,MAC3B,SAAS,MAAM,MAAM;AAAA,MACrB,WAAW,MAAM,MAAM;AAAA,MACvB,WAAW,MAAM,MAAM;AAAA,MACvB;AAAA,MACA,QAAQ;AAAA,MACR;AAAA,MACA,SAAS;AAAA,MACT,WAAW,KAAK,IAAI;AAAA,MACpB,WAAW,KAAK,IAAI;AAAA,IACtB;AAGA,QAAI,KAAK,OAAO,sBAAsB,KAAK,aAAa;AACtD,WAAK,qBAAqB;AAAA,QACxB,SAAS,MAAM,MAAM;AAAA,QACrB,WAAW,MAAM,MAAM;AAAA,QACvB,WAAW,MAAM,MAAM;AAAA,QACvB,UAAU,KAAK,kBAAkB,KAAK;AAAA,QACtC,WAAW,KAAK,IAAI;AAAA,QACpB,YAAY;AAAA,QACZ,YAAY,KAAK,OAAO;AAAA,MAC1B,CAAC;AACD,aAAO,SAAS;AAAA,IAClB;AAEA,WAAO,MAAM,kCAAkC;AAAA,MAC7C,SAAS,MAAM,MAAM;AAAA,MACrB,YAAY,KAAK,IAAI,IAAI;AAAA,MACzB,UAAU,OAAO,WAAW;AAAA,IAC9B,CAAC;AAED,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA,EAKQ,2BAA2B,OAAyC;AAC1E,UAAM,EAAE,OAAO,SAAS,OAAO,IAAI;AAGnC,UAAM,gBAAgB,KAAK,qBAAqB,MAAM;AAGtD,UAAM,WAAW,KAAK,mBAAmB,MAAM;AAG/C,UAAM,oBAAoB,KAAK,cAAc,MAAM;AAGnD,UAAM,YAAY,OAAO,OAAO,CAAC,MAAM,EAAE,eAAe,WAAW;AACnE,UAAM,kBAA0C,CAAC;AACjD,eAAW,MAAM,WAAW;AAC1B,YAAM,WAAW,GAAG,SAAS,aAAa;AAC1C,sBAAgB,QAAQ,KAAK,gBAAgB,QAAQ,KAAK,KAAK;AAAA,IACjE;AAGA,UAAM,eAAuC,CAAC;AAC9C,eAAW,UAAU,SAAS;AAC5B,mBAAa,OAAO,IAAI,KAAK,aAAa,OAAO,IAAI,KAAK,KAAK;AAAA,IACjE;AAGA,UAAM,YAAY,QACf,OAAO,CAAC,MAAM,EAAE,SAAS,UAAU,EACnC,IAAI,CAAC,MAAM,EAAE,IAAI;AACpB,UAAM,cAAc,QACjB,OAAO,CAAC,MAAM,EAAE,SAAS,YAAY,EACrC,IAAI,CAAC,MAAM,EAAE,IAAI;AACpB,UAAM,QAAQ,QAAQ,OAAO,CAAC,MAAM,EAAE,SAAS,MAAM,EAAE,IAAI,CAAC,MAAM,EAAE,IAAI;AAGxE,UAAM,kBAAkB,MAAM,YAC1B,MAAM,YAAY,MAAM,aACxB,KAAK,MAAM,KAAK,IAAI,IAAI,MAAO,MAAM,UAAU;AAGnD,UAAM,aAAa,KAAK,oBAAoB,OAAO,iBAAiB;AAEpE,WAAO;AAAA,MACL;AAAA,MACA;AAAA,MACA;AAAA,MACA,eAAe,UAAU;AAAA,MACzB;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,IACF;AAAA,EACF;AAAA,EAEQ,qBAAqB,QAAqC;AAChE,UAAM,UAAU,oBAAI,IAA8B;AAElD,eAAW,SAAS,QAAQ;AAC1B,UACE,MAAM,eAAe,eACrB,MAAM,eAAe,eACrB;AACA,cAAM,UAAU,MAAM,WAAW,CAAC;AAGlC,cAAM,WAAW,QAAQ,aAAa,QAAQ,QAAQ,QAAQ;AAC9D,YAAI,YAAY,OAAO,aAAa,UAAU;AAC5C,gBAAM,WAAW,QAAQ,aAAa;AACtC,cAAI,YAA2C;AAE/C,cACE,SAAS,SAAS,OAAO,KACzB,SAAS,SAAS,MAAM,KACxB,SAAS,SAAS,QAAQ,GAC1B;AACA,wBAAY;AAAA,UACd,WACE,SAAS,SAAS,QAAQ,KAC1B,SAAS,SAAS,QAAQ,GAC1B;AACA,wBAAY;AAAA,UACd,WACE,SAAS,SAAS,MAAM,KACxB,SAAS,SAAS,KAAK,KACvB,SAAS,SAAS,MAAM,GACxB;AACA,wBAAY;AAAA,UACd;AAEA,gBAAM,WAAW,QAAQ,IAAI,QAAQ;AACrC,cACE,CAAC,YACD,KAAK,kBAAkB,SAAS,IAC9B,KAAK,kBAAkB,SAAS,SAAS,GAC3C;AACA,oBAAQ,IAAI,UAAU;AAAA,cACpB,MAAM;AAAA,cACN;AAAA,cACA,cAAc,QAAQ;AAAA,YACxB,CAAC;AAAA,UACH;AAAA,QACF;AAGA,cAAM,gBAAgB,QAAQ,iBAAiB,QAAQ;AACvD,YAAI,MAAM,QAAQ,aAAa,GAAG;AAChC,qBAAW,KAAK,eAAe;AAC7B,gBAAI,OAAO,MAAM,YAAY,CAAC,QAAQ,IAAI,CAAC,GAAG;AAC5C,sBAAQ,IAAI,GAAG,EAAE,MAAM,GAAG,WAAW,SAAS,CAAC;AAAA,YACjD;AAAA,UACF;AAAA,QACF;AAAA,MACF;AAAA,IACF;AAEA,WAAO,MAAM,KAAK,QAAQ,OAAO,CAAC;AAAA,EACpC;AAAA,EAEQ,kBAAkB,IAA2C;AACnE,UAAM,aAAa,EAAE,QAAQ,GAAG,QAAQ,GAAG,QAAQ,GAAG,MAAM,EAAE;AAC9D,WAAO,WAAW,EAAE,KAAK;AAAA,EAC3B;AAAA,EAEQ,mBAAmB,QAA+B;AACxD,UAAM,QAAsB,CAAC;AAE7B,eAAW,SAAS,QAAQ;AAC1B,YAAM,UAAU,MAAM,WAAW,CAAC;AAGlC,UACE,QAAQ,WAAW,SAAS,MAAM,KAClC,QAAQ,SAAS,SAAS,MAAM,KAChC,QAAQ,WACR;AACA,cAAM,WAAW,QAAQ,aAAa,QAAQ,WAAW;AACzD,cAAM,UAAU,QAAQ,YAAY,SAAS,CAAC,QAAQ;AAEtD,cAAM,KAAK;AAAA,UACT,MAAM;AAAA,UACN,QAAQ,UAAU,WAAW;AAAA,UAC7B,UAAU,QAAQ;AAAA,QACpB,CAAC;AAAA,MACH;AAGA,YAAM,SAAS,QAAQ,UAAU,QAAQ;AACzC,UAAI,OAAO,WAAW,UAAU;AAE9B,cAAM,YAAY,OAAO,MAAM,+BAA+B;AAC9D,cAAM,YAAY,OAAO,MAAM,+BAA+B;AAE9D,YAAI,aAAa,WAAW;AAC1B,gBAAM,SAAS,YAAY,SAAS,UAAU,CAAC,GAAG,EAAE,IAAI;AACxD,gBAAM,SAAS,YAAY,SAAS,UAAU,CAAC,GAAG,EAAE,IAAI;AAExD,cAAI,SAAS,GAAG;AACd,kBAAM,KAAK,EAAE,MAAM,GAAG,MAAM,UAAU,QAAQ,SAAS,CAAC;AAAA,UAC1D;AACA,cAAI,SAAS,GAAG;AACd,kBAAM,KAAK,EAAE,MAAM,GAAG,MAAM,UAAU,QAAQ,SAAS,CAAC;AAAA,UAC1D;AAAA,QACF;AAAA,MACF;AAAA,IACF;AAEA,WAAO;AAAA,EACT;AAAA,EAEQ,cAAc,QAA8B;AAClD,UAAM,WAAW,oBAAI,IAAuB;AAE5C,eAAW,SAAS,QAAQ;AAC1B,YAAM,UAAU,MAAM,WAAW,CAAC;AAGlC,UAAI,QAAQ,SAAS,QAAQ,YAAY,OAAO;AAC9C,cAAM,YAAY,QAAQ,cAAc;AACxC,cAAM,UACJ,QAAQ,OAAO,WAAW,QAAQ,SAAS;AAE7C,cAAM,MAAM,GAAG,SAAS,IAAI,QAAQ,UAAU,GAAG,EAAE,CAAC;AACpD,cAAM,WAAW,SAAS,IAAI,GAAG;AAEjC,YAAI,UAAU;AACZ,mBAAS;AAAA,QACX,OAAO;AACL,mBAAS,IAAI,KAAK;AAAA,YAChB,MAAM;AAAA,YACN,SAAS,OAAO,OAAO,EAAE,UAAU,GAAG,GAAG;AAAA,YACzC,UAAU;AAAA,YACV,OAAO;AAAA,UACT,CAAC;AAAA,QACH;AAAA,MACF;AAAA,IACF;AAIA,WAAO,MAAM,KAAK,SAAS,OAAO,CAAC;AAAA,EACrC;AAAA,EAEQ,oBACN,OACA,QACmC;AAEnC,UAAM,UAAU,MAAM,WAAW,CAAC;AAClC,QAAI,QAAQ,aAAa,QAAQ,WAAW,YAAa,QAAO;AAChE,QAAI,OAAO,WAAW,EAAG,QAAO;AAChC,QAAI,OAAO,KAAK,CAAC,MAAM,CAAC,EAAE,QAAQ,EAAG,QAAO;AAC5C,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA,EAKQ,0BACN,OACA,KACQ;AACR,UAAM,QAAkB,CAAC;AAGzB,UAAM,KAAK,MAAM,MAAM,IAAI,KAAK,MAAM,IAAI,GAAG;AAC7C,UAAM,KAAK,WAAW,IAAI,UAAU,EAAE;AAGtC,QAAI,IAAI,kBAAkB,GAAG;AAC3B,YAAM,OAAO,KAAK,MAAM,IAAI,kBAAkB,EAAE;AAChD,YAAM,OAAO,IAAI,kBAAkB;AACnC,YAAM,KAAK,aAAa,IAAI,KAAK,IAAI,GAAG;AAAA,IAC1C;AAGA,QAAI,IAAI,cAAc,SAAS,GAAG;AAChC,YAAM,KAAK;AAAA,sBAAyB,IAAI,cAAc,MAAM,GAAG;AAC/D,iBAAW,KAAK,IAAI,cAAc,MAAM,GAAG,EAAE,GAAG;AAC9C,cAAM,KAAK,KAAK,EAAE,SAAS,KAAK,EAAE,IAAI,EAAE;AAAA,MAC1C;AACA,UAAI,IAAI,cAAc,SAAS,IAAI;AACjC,cAAM,KAAK,YAAY,IAAI,cAAc,SAAS,EAAE,OAAO;AAAA,MAC7D;AAAA,IACF;AAGA,QAAI,IAAI,gBAAgB,GAAG;AACzB,YAAM,KAAK;AAAA,kBAAqB,IAAI,aAAa,GAAG;AACpD,YAAM,SAAS,OAAO,QAAQ,IAAI,eAAe,EAC9C,KAAK,CAAC,GAAG,MAAM,EAAE,CAAC,IAAI,EAAE,CAAC,CAAC,EAC1B,MAAM,GAAG,CAAC;AACb,iBAAW,CAAC,MAAM,KAAK,KAAK,QAAQ;AAClC,cAAM,KAAK,KAAK,IAAI,KAAK,KAAK,EAAE;AAAA,MAClC;AAAA,IACF;AAGA,QAAI,IAAI,UAAU,SAAS,GAAG;AAC5B,YAAM,KAAK;AAAA,iBAAoB,IAAI,UAAU,MAAM,GAAG;AACtD,iBAAW,KAAK,IAAI,UAAU,MAAM,GAAG,CAAC,GAAG;AACzC,cAAM,KAAK,KAAK,CAAC,EAAE;AAAA,MACrB;AAAA,IACF;AAGA,QAAI,IAAI,YAAY,SAAS,GAAG;AAC9B,YAAM,KAAK;AAAA,mBAAsB,IAAI,YAAY,MAAM,GAAG;AAC1D,iBAAW,KAAK,IAAI,YAAY,MAAM,GAAG,CAAC,GAAG;AAC3C,cAAM,KAAK,KAAK,CAAC,EAAE;AAAA,MACrB;AAAA,IACF;AAGA,QAAI,IAAI,kBAAkB,SAAS,GAAG;AACpC,YAAM,KAAK;AAAA,cAAiB,IAAI,kBAAkB,MAAM,GAAG;AAC3D,iBAAW,KAAK,IAAI,kBAAkB,MAAM,GAAG,CAAC,GAAG;AACjD,cAAM,KAAK,KAAK,EAAE,IAAI,KAAK,EAAE,QAAQ,UAAU,GAAG,EAAE,CAAC,EAAE;AAAA,MACzD;AAAA,IACF;AAGA,QAAI,IAAI,SAAS,SAAS,GAAG;AAC3B,YAAM,SAAS,IAAI,SAAS,OAAO,CAAC,MAAM,EAAE,WAAW,QAAQ,EAAE;AACjE,YAAM,SAAS,IAAI,SAAS,OAAO,CAAC,MAAM,EAAE,WAAW,QAAQ,EAAE;AACjE,YAAM,KAAK;AAAA,aAAgB,MAAM,YAAY,MAAM,SAAS;AAAA,IAC9D;AAEA,WAAO,MAAM,KAAK,IAAI;AAAA,EACxB;AAAA;AAAA;AAAA;AAAA,EAKQ,qBAAqB,SAAwC;AACnE,QAAI;AACF,WAAK,GACF;AAAA,QACC;AAAA;AAAA;AAAA;AAAA;AAAA,MAKF,EACC;AAAA,QACC,QAAQ;AAAA,QACR,QAAQ;AAAA,QACR,QAAQ;AAAA,QACR,QAAQ;AAAA,QACR,QAAQ;AAAA,QACR,KAAK,MAAM,QAAQ,YAAY,GAAI;AAAA,MACrC;AAEF,WAAK,MAAM;AACX,WAAK,uBAAuB;AAE5B,aAAO,MAAM,yCAAyC;AAAA,QACpD,SAAS,QAAQ;AAAA,QACjB,UAAU,QAAQ;AAAA,MACpB,CAAC;AAAA,IACH,SAAS,OAAY;AACnB,aAAO,MAAM,qCAAqC,KAAK;AAAA,IACzD;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKQ,kBACN,OACqC;AACrC,UAAM,EAAE,OAAO,SAAS,OAAO,IAAI;AAGnC,UAAM,gBAAgB,QAAQ,OAAO,CAAC,MAAM,EAAE,SAAS,UAAU,EAAE;AACnE,UAAM,aAAa,OAAO;AAAA,MACxB,CAAC,MAAM,EAAE,SAAS,SAAS,EAAE,SAAS,YAAY;AAAA,IACpD,EAAE;AAEF,QAAI,iBAAiB,KAAK,cAAc,EAAG,QAAO;AAClD,QAAI,iBAAiB,KAAK,OAAO,UAAU,GAAI,QAAO;AACtD,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA,EAKQ,yBAA+B;AACrC,QAAI,KAAK,WAAW;AAClB,mBAAa,KAAK,SAAS;AAAA,IAC7B;AAEA,SAAK,YAAY,WAAW,MAAM;AAChC,WAAK,aAAa;AAAA,IACpB,GAAG,KAAK,OAAO,eAAe;AAAA,EAChC;AAAA;AAAA;AAAA;AAAA,EAKA,MAAa,eAA8B;AACzC,QAAI,KAAK,cAAc,CAAC,KAAK,YAAa;AAE1C,SAAK,aAAa;AAElB,QAAI;AAEF,YAAM,UAAU,KAAK,GAClB;AAAA,QACC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,MAYF,EACC,IAAI,KAAK,OAAO,SAAS;AAE5B,iBAAW,QAAQ,SAAS;AAC1B,cAAM,KAAK,iBAAiB,IAAI;AAAA,MAClC;AAAA,IACF,UAAE;AACA,WAAK,aAAa;AAAA,IACpB;AAAA,EACF;AAAA,EAEA,MAAc,iBAAiB,MAA0B;AACvD,UAAM,YAAY,KAAK,IAAI;AAE3B,QAAI;AAEF,WAAK,GACF;AAAA,QACC;AAAA,MACF,EACC,IAAI,KAAK,QAAQ;AAEpB,WAAK,MAAM;AACX,WAAK,MAAM;AAGX,YAAM,QAAQ,KAAK,GAChB,QAAQ,yCAAyC,EACjD,IAAI,KAAK,QAAQ;AAEpB,UAAI,CAAC,OAAO;AACV,cAAM,IAAI,MAAM,oBAAoB,KAAK,QAAQ,EAAE;AAAA,MACrD;AAEA,YAAM,UAAU,KAAK,GAClB,QAAQ,0CAA0C,EAClD,IAAI,KAAK,QAAQ;AAEpB,YAAM,SAAS,KAAK,GACjB,QAAQ,yDAAyD,EACjE,IAAI,KAAK,QAAQ;AAGpB,YAAM,cAAqB;AAAA,QACzB,GAAG;AAAA,QACH,QAAQ,KAAK,MAAM,MAAM,UAAU,IAAI;AAAA,QACvC,SAAS,KAAK,MAAM,MAAM,WAAW,IAAI;AAAA,QACzC,aAAa,KAAK,MAAM,MAAM,eAAe,IAAI;AAAA,MACnD;AAEA,YAAM,QAAqB;AAAA,QACzB,OAAO;AAAA,QACP,SAAS,QAAQ,IAAI,CAAC,OAAY;AAAA,UAChC,GAAG;AAAA,UACH,UAAU,KAAK,MAAM,EAAE,YAAY,IAAI;AAAA,QACzC,EAAE;AAAA,QACF,QAAQ,OAAO,IAAI,CAAC,OAAY;AAAA,UAC9B,GAAG;AAAA,UACH,SAAS,KAAK,MAAM,EAAE,WAAW,IAAI;AAAA,QACvC,EAAE;AAAA,MACJ;AAGA,YAAM,gBAAgB,KAAK,2BAA2B,KAAK;AAG3D,YAAM,cAAc,MAAM,KAAK,YAAa;AAAA,QAC1C;AAAA,QACA;AAAA,QACA,KAAK,OAAO;AAAA,MACd;AAGA,YAAM,iBAAiB,YAAY,eAAe,CAAC;AACnD,YAAM,gBAAgB;AAAA,QACpB,GAAG;AAAA,QACH;AAAA,QACA,QAAQ;AAAA,QACR,WAAW,KAAK,IAAI;AAAA,MACtB;AAGA,YAAM,eAAe,KAAK;AAAA,QACxB;AAAA,QACA;AAAA,QACA;AAAA,MACF;AAEA,WAAK,GACF;AAAA,QACC;AAAA;AAAA;AAAA;AAAA;AAAA,MAKF,EACC,IAAI,KAAK,UAAU,aAAa,GAAG,cAAc,KAAK,QAAQ;AAGjE,WAAK,GACF;AAAA,QACC;AAAA,MACF,EACC,IAAI,KAAK,QAAQ;AAEpB,WAAK,MAAM;AACX,WAAK,MAAM;AAGX,YAAM,iBAAiB,KAAK,IAAI,IAAI;AACpC,WAAK,MAAM,uBACR,KAAK,MAAM,uBAAuB,KAAK,MAAM,YAAY,KACxD,kBACF,KAAK,MAAM;AAEb,aAAO,KAAK,uBAAuB;AAAA,QACjC,SAAS,KAAK;AAAA,QACd,kBAAkB;AAAA,MACpB,CAAC;AAAA,IACH,SAAS,OAAY;AAEnB,YAAM,gBAAgB,KAAK,cAAc;AAEzC,UAAI,gBAAgB,KAAK,OAAO,YAAY;AAC1C,aAAK,GACF;AAAA,UACC;AAAA;AAAA;AAAA;AAAA;AAAA,QAKF,EACC,IAAI,eAAe,MAAM,SAAS,KAAK,QAAQ;AAElD,aAAK,MAAM;AACX,aAAK,MAAM;AAEX,eAAO,KAAK,2CAA2C;AAAA,UACrD,SAAS,KAAK;AAAA,UACd,YAAY;AAAA,UACZ,OAAO,MAAM;AAAA,QACf,CAAC;AAAA,MACH,OAAO;AAEL,aAAK,GACF;AAAA,UACC;AAAA;AAAA;AAAA;AAAA;AAAA,QAKF,EACC,IAAI,MAAM,SAAS,KAAK,QAAQ;AAEnC,aAAK,MAAM;AACX,aAAK,MAAM;AAEX,eAAO,MAAM,2CAA2C,OAAO;AAAA,UAC7D,SAAS,KAAK;AAAA,QAChB,CAAC;AAAA,MACH;AAAA,IACF;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKQ,qBACN,OACA,KACA,IACQ;AACR,UAAM,QAAkB,CAAC;AAGzB,UAAM,KAAK,KAAK,0BAA0B,OAAO,GAAG,CAAC;AAGrD,UAAM,KAAK;AAAA,IAAO;AAClB,UAAM,KAAK,kBAAkB,GAAG,OAAO,EAAE;AAEzC,QAAI,GAAG,SAAS;AACd,YAAM,KAAK,gBAAgB,GAAG,OAAO,EAAE;AAAA,IACzC;AAEA,QAAI,GAAG,cAAc;AACnB,YAAM,KAAK,aAAa,GAAG,YAAY,EAAE;AAAA,IAC3C;AAEA,WAAO,MAAM,KAAK,IAAI;AAAA,EACxB;AAAA;AAAA;AAAA;AAAA,EAKO,WAA6B;AAClC,WAAO,EAAE,GAAG,KAAK,MAAM;AAAA,EACzB;AAAA;AAAA;AAAA;AAAA,EAKO,eAAe,UAAmC;AACvD,SAAK,cAAc;AAAA,EACrB;AAAA;AAAA;AAAA;AAAA,EAKA,MAAa,oBAAmC;AAC9C,QAAI,KAAK,WAAW;AAClB,mBAAa,KAAK,SAAS;AAAA,IAC7B;AACA,UAAM,KAAK,aAAa;AAAA,EAC1B;AAAA;AAAA;AAAA;AAAA,EAKO,UAAU,SAAsC;AACrD,UAAM,QAAQ,KAAK,GAChB,QAAQ,yCAAyC,EACjD,IAAI,OAAO;AAEd,QAAI,CAAC,MAAO,QAAO;AAEnB,UAAM,aAAa,KAAK,MAAM,MAAM,eAAe,IAAI;AACvD,UAAM,UAAU,KAAK,GAClB,QAAQ,0CAA0C,EAClD,IAAI,OAAO;AACd,UAAM,SAAS,KAAK,GACjB,QAAQ,yCAAyC,EACjD,IAAI,OAAO;AAEd,UAAM,cAAqB;AAAA,MACzB,GAAG;AAAA,MACH,QAAQ,KAAK,MAAM,MAAM,UAAU,IAAI;AAAA,MACvC,SAAS,KAAK,MAAM,MAAM,WAAW,IAAI;AAAA,MACzC,aAAa;AAAA,IACf;AAEA,UAAM,QAAqB;AAAA,MACzB,OAAO;AAAA,MACP,SAAS,QAAQ,IAAI,CAAC,OAAO;AAAA,QAC3B,GAAG;AAAA,QACH,UAAU,KAAK,MAAM,EAAE,YAAY,IAAI;AAAA,MACzC,EAAE;AAAA,MACF,QAAQ,OAAO,IAAI,CAAC,OAAO;AAAA,QACzB,GAAG;AAAA,QACH,SAAS,KAAK,MAAM,EAAE,WAAW,IAAI;AAAA,MACvC,EAAE;AAAA,IACJ;AAEA,UAAM,gBAAgB,KAAK,2BAA2B,KAAK;AAG3D,UAAM,YAAY,KAAK,GACpB,QAAQ,oDAAoD,EAC5D,IAAI,OAAO;AAEd,QAAI,SAAuB;AAC3B,QAAI,WAAW,aAAa;AAC1B,eAAS;AAAA,IACX,WAAW,WAAW;AACpB,eACE,UAAU,WAAW,eACjB,kBACA,UAAU,WAAW,WACnB,cACA;AAAA,IACV;AAEA,WAAO;AAAA,MACL,SAAS,MAAM;AAAA,MACf,WAAW,MAAM;AAAA,MACjB,WAAW,MAAM;AAAA,MACjB;AAAA,MACA,aAAa,WAAW;AAAA,MACxB;AAAA,MACA,MACE,MAAM,eACN,KAAK,0BAA0B,aAAa,aAAa;AAAA,MAC3D,SAAS;AAAA,MACT,WAAW,MAAM,aAAa;AAAA,MAC9B,WAAW,WAAW,aAAa,MAAM,aAAa;AAAA,IACxD;AAAA,EACF;AACF;",
|
|
6
6
|
"names": []
|
|
7
7
|
}
|
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
{
|
|
2
2
|
"version": 3,
|
|
3
3
|
"sources": ["../../../src/core/digest/types.ts"],
|
|
4
|
-
"sourcesContent": ["/**\n * Types for Hybrid Digest Generation System\n * 80% deterministic extraction, 20% AI-generated review/insights\n */\n\nimport { Frame, Anchor, Event } from '../context/
|
|
4
|
+
"sourcesContent": ["/**\n * Types for Hybrid Digest Generation System\n * 80% deterministic extraction, 20% AI-generated review/insights\n */\n\nimport { Frame, Anchor, Event } from '../context/index.js';\n\n/**\n * Deterministic fields extracted directly from frame data (60%)\n */\nexport interface DeterministicDigest {\n /** Files that were modified during this frame */\n filesModified: FileModification[];\n /** Tests that were run */\n testsRun: TestResult[];\n /** Errors encountered */\n errorsEncountered: ErrorInfo[];\n /** Number of tool calls made */\n toolCallCount: number;\n /** Tool calls by type */\n toolCallsByType: Record<string, number>;\n /** Frame duration in seconds */\n durationSeconds: number;\n /** Exit status */\n exitStatus: 'success' | 'failure' | 'partial' | 'cancelled';\n /** Anchors by type */\n anchorCounts: Record<string, number>;\n /** Key decisions made (extracted from DECISION anchors) */\n decisions: string[];\n /** Constraints established (extracted from CONSTRAINT anchors) */\n constraints: string[];\n /** Risks identified (extracted from RISK anchors) */\n risks: string[];\n}\n\nexport interface FileModification {\n path: string;\n operation: 'create' | 'modify' | 'delete' | 'read';\n linesChanged?: number;\n}\n\nexport interface TestResult {\n name: string;\n status: 'passed' | 'failed' | 'skipped';\n duration?: number;\n}\n\nexport interface ErrorInfo {\n type: string;\n message: string;\n resolved: boolean;\n count: number;\n}\n\n/**\n * AI-generated review fields (20%)\n * Focused on high-value insights only\n */\nexport interface AIGeneratedDigest {\n /** One-line summary of what was accomplished */\n summary: string;\n /** Key insight or learning (if any) */\n insight?: string;\n /** Potential issue or risk spotted */\n flaggedIssue?: string;\n /** Generated at timestamp */\n generatedAt: number;\n /** Model used for generation */\n modelUsed?: string;\n /** Tokens used */\n tokensUsed?: number;\n}\n\n/**\n * Complete hybrid digest combining both approaches\n */\nexport interface HybridDigest {\n /** Frame identifier */\n frameId: string;\n /** Frame name/goal */\n frameName: string;\n /** Frame type */\n frameType: string;\n /** Deterministic fields (always available) */\n deterministic: DeterministicDigest;\n /** AI-generated fields (may be pending) */\n aiGenerated?: AIGeneratedDigest;\n /** Processing status */\n status: DigestStatus;\n /** Human-readable text representation */\n text: string;\n /** Version for schema evolution */\n version: number;\n /** Created timestamp */\n createdAt: number;\n /** Last updated timestamp */\n updatedAt: number;\n}\n\nexport type DigestStatus =\n | 'deterministic_only' // Only deterministic fields populated\n | 'ai_pending' // Queued for AI generation\n | 'ai_processing' // Currently being processed by AI\n | 'complete' // Both deterministic and AI fields populated\n | 'ai_failed'; // AI generation failed, falling back to deterministic\n\n/**\n * Digest generation request for the queue\n */\nexport interface DigestGenerationRequest {\n frameId: string;\n frameName: string;\n frameType: string;\n priority: 'low' | 'normal' | 'high';\n createdAt: number;\n retryCount: number;\n maxRetries: number;\n}\n\n/**\n * Configuration for the digest generator\n */\nexport interface DigestConfig {\n /** Enable AI generation (can be disabled for deterministic-only mode) */\n enableAIGeneration: boolean;\n /** Maximum tokens for AI summary */\n maxTokens: number;\n /** Batch size for idle processing */\n batchSize: number;\n /** Idle threshold in ms before processing queue */\n idleThresholdMs: number;\n /** Maximum retries for failed AI generation */\n maxRetries: number;\n /** Retry delay in ms */\n retryDelayMs: number;\n /** LLM provider configuration */\n llmConfig: {\n provider: 'anthropic' | 'openai' | 'local' | 'none';\n model: string;\n temperature: number;\n };\n}\n\nexport const DEFAULT_DIGEST_CONFIG: DigestConfig = {\n enableAIGeneration: true,\n maxTokens: 100, // Reduced for 20% AI contribution\n batchSize: 10, // Process more at once since smaller\n idleThresholdMs: 3000, // 3 seconds of idle time\n maxRetries: 2,\n retryDelayMs: 1000,\n llmConfig: {\n provider: 'anthropic',\n model: 'claude-3-haiku-20240307',\n temperature: 0.2, // Lower for more consistent output\n },\n};\n\n/**\n * Input for digest generation\n */\nexport interface DigestInput {\n frame: Frame;\n anchors: Anchor[];\n events: Event[];\n parentDigest?: HybridDigest;\n}\n\n/**\n * LLM provider interface for AI digest generation\n */\nexport interface DigestLLMProvider {\n generateSummary(\n input: DigestInput,\n deterministic: DeterministicDigest,\n maxTokens: number\n ): Promise<AIGeneratedDigest>;\n}\n\n/**\n * Digest queue statistics\n */\nexport interface DigestQueueStats {\n pending: number;\n processing: number;\n completed: number;\n failed: number;\n avgProcessingTimeMs: number;\n}\n"],
|
|
5
5
|
"mappings": ";;;;AA+IO,MAAM,wBAAsC;AAAA,EACjD,oBAAoB;AAAA,EACpB,WAAW;AAAA;AAAA,EACX,WAAW;AAAA;AAAA,EACX,iBAAiB;AAAA;AAAA,EACjB,YAAY;AAAA,EACZ,cAAc;AAAA,EACd,WAAW;AAAA,IACT,UAAU;AAAA,IACV,OAAO;AAAA,IACP,aAAa;AAAA;AAAA,EACf;AACF;",
|
|
6
6
|
"names": []
|
|
7
7
|
}
|
|
@@ -12,6 +12,7 @@ var ErrorCode = /* @__PURE__ */ ((ErrorCode2) => {
|
|
|
12
12
|
ErrorCode2["DB_INSERT_FAILED"] = "DB_007";
|
|
13
13
|
ErrorCode2["DB_UPDATE_FAILED"] = "DB_008";
|
|
14
14
|
ErrorCode2["DB_DELETE_FAILED"] = "DB_009";
|
|
15
|
+
ErrorCode2["DB_CORRUPTION"] = "DB_010";
|
|
15
16
|
ErrorCode2["FRAME_NOT_FOUND"] = "FRAME_001";
|
|
16
17
|
ErrorCode2["FRAME_INVALID_STATE"] = "FRAME_002";
|
|
17
18
|
ErrorCode2["FRAME_PARENT_NOT_FOUND"] = "FRAME_003";
|
|
@@ -47,6 +48,18 @@ var ErrorCode = /* @__PURE__ */ ((ErrorCode2) => {
|
|
|
47
48
|
ErrorCode2["RESOURCE_EXHAUSTED"] = "SYS_006";
|
|
48
49
|
ErrorCode2["SERVICE_UNAVAILABLE"] = "SYS_007";
|
|
49
50
|
ErrorCode2["SYSTEM_INIT_FAILED"] = "SYS_008";
|
|
51
|
+
ErrorCode2["UNKNOWN_ERROR"] = "SYS_009";
|
|
52
|
+
ErrorCode2["OPERATION_TIMEOUT"] = "SYS_010";
|
|
53
|
+
ErrorCode2["AUTH_FAILED"] = "AUTH_001";
|
|
54
|
+
ErrorCode2["TOKEN_EXPIRED"] = "AUTH_002";
|
|
55
|
+
ErrorCode2["INVALID_CREDENTIALS"] = "AUTH_003";
|
|
56
|
+
ErrorCode2["FILE_NOT_FOUND"] = "FS_001";
|
|
57
|
+
ErrorCode2["DISK_FULL"] = "FS_002";
|
|
58
|
+
ErrorCode2["NOT_GIT_REPO"] = "GIT_001";
|
|
59
|
+
ErrorCode2["GIT_COMMAND_FAILED"] = "GIT_002";
|
|
60
|
+
ErrorCode2["INVALID_BRANCH"] = "GIT_003";
|
|
61
|
+
ErrorCode2["NETWORK_ERROR"] = "NET_001";
|
|
62
|
+
ErrorCode2["API_ERROR"] = "NET_002";
|
|
50
63
|
ErrorCode2["STACK_CONTEXT_NOT_FOUND"] = "COLLAB_001";
|
|
51
64
|
ErrorCode2["HANDOFF_REQUEST_EXPIRED"] = "COLLAB_002";
|
|
52
65
|
ErrorCode2["MERGE_CONFLICT_UNRESOLVABLE"] = "COLLAB_003";
|
|
@@ -243,9 +256,241 @@ function createErrorHandler(defaultContext) {
|
|
|
243
256
|
);
|
|
244
257
|
};
|
|
245
258
|
}
|
|
259
|
+
function getUserFriendlyMessage(code) {
|
|
260
|
+
switch (code) {
|
|
261
|
+
// Auth errors
|
|
262
|
+
case "AUTH_001" /* AUTH_FAILED */:
|
|
263
|
+
return "Authentication failed. Please check your credentials and try again.";
|
|
264
|
+
case "AUTH_002" /* TOKEN_EXPIRED */:
|
|
265
|
+
return "Your session has expired. Please log in again.";
|
|
266
|
+
case "AUTH_003" /* INVALID_CREDENTIALS */:
|
|
267
|
+
return "Invalid credentials provided. Please check and try again.";
|
|
268
|
+
// File system errors
|
|
269
|
+
case "FS_001" /* FILE_NOT_FOUND */:
|
|
270
|
+
return "The requested file or directory was not found.";
|
|
271
|
+
case "SYS_005" /* PERMISSION_DENIED */:
|
|
272
|
+
return "Permission denied. Please check file permissions or run with appropriate privileges.";
|
|
273
|
+
case "FS_002" /* DISK_FULL */:
|
|
274
|
+
return "Insufficient disk space. Please free up space and try again.";
|
|
275
|
+
// Git errors
|
|
276
|
+
case "GIT_001" /* NOT_GIT_REPO */:
|
|
277
|
+
return "This command requires a git repository. Please run it from within a git repository.";
|
|
278
|
+
case "GIT_002" /* GIT_COMMAND_FAILED */:
|
|
279
|
+
return "Git operation failed. Please ensure your repository is in a valid state.";
|
|
280
|
+
case "GIT_003" /* INVALID_BRANCH */:
|
|
281
|
+
return "Invalid branch specified. Please check the branch name and try again.";
|
|
282
|
+
// Database errors
|
|
283
|
+
case "DB_001" /* DB_CONNECTION_FAILED */:
|
|
284
|
+
return "Database connection failed. Please try again or contact support if the issue persists.";
|
|
285
|
+
case "DB_002" /* DB_QUERY_FAILED */:
|
|
286
|
+
return "Database query failed. Please try again.";
|
|
287
|
+
case "DB_010" /* DB_CORRUPTION */:
|
|
288
|
+
return "Database appears to be corrupted. Please contact support.";
|
|
289
|
+
// Network errors
|
|
290
|
+
case "NET_001" /* NETWORK_ERROR */:
|
|
291
|
+
return "Network error. Please check your internet connection and try again.";
|
|
292
|
+
case "NET_002" /* API_ERROR */:
|
|
293
|
+
return "API request failed. Please try again later.";
|
|
294
|
+
case "SYS_010" /* OPERATION_TIMEOUT */:
|
|
295
|
+
return "The operation timed out. Please try again.";
|
|
296
|
+
// Validation errors
|
|
297
|
+
case "VAL_002" /* INVALID_INPUT */:
|
|
298
|
+
return "Invalid input provided. Please check your command and try again.";
|
|
299
|
+
case "VAL_001" /* VALIDATION_FAILED */:
|
|
300
|
+
return "Validation failed. Please check your input and try again.";
|
|
301
|
+
case "VAL_003" /* MISSING_REQUIRED_FIELD */:
|
|
302
|
+
return "A required field is missing. Please provide all required information.";
|
|
303
|
+
// System errors
|
|
304
|
+
case "SYS_004" /* CONFIGURATION_ERROR */:
|
|
305
|
+
return "Configuration error. Please check your settings.";
|
|
306
|
+
case "SYS_007" /* SERVICE_UNAVAILABLE */:
|
|
307
|
+
return "Service is temporarily unavailable. Please try again later.";
|
|
308
|
+
// Default
|
|
309
|
+
default:
|
|
310
|
+
return "An unexpected error occurred. Please try again or contact support.";
|
|
311
|
+
}
|
|
312
|
+
}
|
|
313
|
+
class ErrorHandler {
|
|
314
|
+
static retryMap = /* @__PURE__ */ new Map();
|
|
315
|
+
static MAX_RETRIES = 3;
|
|
316
|
+
/**
|
|
317
|
+
* Handle an error and exit the process
|
|
318
|
+
*/
|
|
319
|
+
static handle(error, operation) {
|
|
320
|
+
if (error instanceof StackMemoryError) {
|
|
321
|
+
const userMessage = getUserFriendlyMessage(error.code);
|
|
322
|
+
console.error(`\u274C ${userMessage}`);
|
|
323
|
+
if (error.isRetryable) {
|
|
324
|
+
console.error("\u{1F4A1} This error may be recoverable. Please try again.");
|
|
325
|
+
}
|
|
326
|
+
process.exit(1);
|
|
327
|
+
}
|
|
328
|
+
if (error instanceof Error) {
|
|
329
|
+
let stackMemoryError;
|
|
330
|
+
if ("code" in error && typeof error.code === "string") {
|
|
331
|
+
stackMemoryError = ErrorHandler.fromNodeError(
|
|
332
|
+
error,
|
|
333
|
+
{ operation }
|
|
334
|
+
);
|
|
335
|
+
} else {
|
|
336
|
+
stackMemoryError = wrapError(error, error.message, "COLLAB_005" /* OPERATION_FAILED */, {
|
|
337
|
+
operation
|
|
338
|
+
});
|
|
339
|
+
}
|
|
340
|
+
const userMessage = getUserFriendlyMessage(stackMemoryError.code);
|
|
341
|
+
console.error(`\u274C ${userMessage}`);
|
|
342
|
+
if (stackMemoryError.isRetryable) {
|
|
343
|
+
console.error("\u{1F4A1} This error may be recoverable. Please try again.");
|
|
344
|
+
}
|
|
345
|
+
process.exit(1);
|
|
346
|
+
}
|
|
347
|
+
console.error("\u274C An unexpected error occurred.");
|
|
348
|
+
process.exit(1);
|
|
349
|
+
}
|
|
350
|
+
/**
|
|
351
|
+
* Convert Node.js error to StackMemoryError
|
|
352
|
+
*/
|
|
353
|
+
static fromNodeError(nodeError, context = {}) {
|
|
354
|
+
const code = nodeError.code;
|
|
355
|
+
switch (code) {
|
|
356
|
+
case "ENOENT":
|
|
357
|
+
return new SystemError(
|
|
358
|
+
`File or directory not found: ${nodeError.path}`,
|
|
359
|
+
"FS_001" /* FILE_NOT_FOUND */,
|
|
360
|
+
{ ...context, path: nodeError.path },
|
|
361
|
+
nodeError
|
|
362
|
+
);
|
|
363
|
+
case "EACCES":
|
|
364
|
+
case "EPERM":
|
|
365
|
+
return new SystemError(
|
|
366
|
+
`Permission denied: ${nodeError.path}`,
|
|
367
|
+
"SYS_005" /* PERMISSION_DENIED */,
|
|
368
|
+
{ ...context, path: nodeError.path },
|
|
369
|
+
nodeError
|
|
370
|
+
);
|
|
371
|
+
case "ENOSPC":
|
|
372
|
+
return new SystemError(
|
|
373
|
+
"No space left on device",
|
|
374
|
+
"FS_002" /* DISK_FULL */,
|
|
375
|
+
context,
|
|
376
|
+
nodeError
|
|
377
|
+
);
|
|
378
|
+
case "ETIMEDOUT":
|
|
379
|
+
return new SystemError(
|
|
380
|
+
"Operation timed out",
|
|
381
|
+
"SYS_010" /* OPERATION_TIMEOUT */,
|
|
382
|
+
context,
|
|
383
|
+
nodeError
|
|
384
|
+
);
|
|
385
|
+
default:
|
|
386
|
+
return new SystemError(
|
|
387
|
+
nodeError.message,
|
|
388
|
+
"SYS_009" /* UNKNOWN_ERROR */,
|
|
389
|
+
{ ...context, nodeErrorCode: code },
|
|
390
|
+
nodeError
|
|
391
|
+
);
|
|
392
|
+
}
|
|
393
|
+
}
|
|
394
|
+
/**
|
|
395
|
+
* Safely execute an operation with optional fallback
|
|
396
|
+
*/
|
|
397
|
+
static async safeExecute(operation, operationName, fallback) {
|
|
398
|
+
try {
|
|
399
|
+
return await operation();
|
|
400
|
+
} catch (error) {
|
|
401
|
+
if (fallback !== void 0) {
|
|
402
|
+
return fallback;
|
|
403
|
+
}
|
|
404
|
+
ErrorHandler.handle(error, operationName);
|
|
405
|
+
}
|
|
406
|
+
}
|
|
407
|
+
/**
|
|
408
|
+
* Execute with automatic retry and exponential backoff
|
|
409
|
+
*/
|
|
410
|
+
static async withRetry(operation, operationName, maxRetries = ErrorHandler.MAX_RETRIES) {
|
|
411
|
+
let lastError;
|
|
412
|
+
for (let attempt = 1; attempt <= maxRetries; attempt++) {
|
|
413
|
+
try {
|
|
414
|
+
const result = await operation();
|
|
415
|
+
ErrorHandler.retryMap.delete(operationName);
|
|
416
|
+
return result;
|
|
417
|
+
} catch (error) {
|
|
418
|
+
lastError = error;
|
|
419
|
+
if (error instanceof StackMemoryError && !error.isRetryable) {
|
|
420
|
+
ErrorHandler.handle(error, operationName);
|
|
421
|
+
}
|
|
422
|
+
if (attempt === maxRetries) {
|
|
423
|
+
break;
|
|
424
|
+
}
|
|
425
|
+
const delay = Math.min(1e3 * Math.pow(2, attempt - 1), 5e3);
|
|
426
|
+
await new Promise((resolve) => setTimeout(resolve, delay));
|
|
427
|
+
}
|
|
428
|
+
}
|
|
429
|
+
ErrorHandler.handle(lastError, `${operationName} (after ${maxRetries} attempts)`);
|
|
430
|
+
}
|
|
431
|
+
/**
|
|
432
|
+
* Create a circuit breaker for an operation
|
|
433
|
+
*/
|
|
434
|
+
static createCircuitBreaker(operation, operationName, threshold = 5) {
|
|
435
|
+
let failures = 0;
|
|
436
|
+
let lastFailure = 0;
|
|
437
|
+
const resetTimeout = 3e4;
|
|
438
|
+
return async () => {
|
|
439
|
+
const now = Date.now();
|
|
440
|
+
if (now - lastFailure > resetTimeout) {
|
|
441
|
+
failures = 0;
|
|
442
|
+
}
|
|
443
|
+
if (failures >= threshold) {
|
|
444
|
+
throw new SystemError(
|
|
445
|
+
`Circuit breaker open for '${operationName}'`,
|
|
446
|
+
"SYS_007" /* SERVICE_UNAVAILABLE */,
|
|
447
|
+
{ operationName, failures, threshold }
|
|
448
|
+
);
|
|
449
|
+
}
|
|
450
|
+
try {
|
|
451
|
+
const result = await operation();
|
|
452
|
+
failures = 0;
|
|
453
|
+
return result;
|
|
454
|
+
} catch (error) {
|
|
455
|
+
failures++;
|
|
456
|
+
lastFailure = now;
|
|
457
|
+
throw error;
|
|
458
|
+
}
|
|
459
|
+
};
|
|
460
|
+
}
|
|
461
|
+
}
|
|
462
|
+
const validateInput = (value, name, validator) => {
|
|
463
|
+
if (!validator(value)) {
|
|
464
|
+
throw new ValidationError(
|
|
465
|
+
`Invalid ${name}: ${String(value)}`,
|
|
466
|
+
"VAL_002" /* INVALID_INPUT */,
|
|
467
|
+
{ name, value }
|
|
468
|
+
);
|
|
469
|
+
}
|
|
470
|
+
};
|
|
471
|
+
const validateEmail = (email) => {
|
|
472
|
+
const emailRegex = /^[^\s@]+@[^\s@]+\.[^\s@]+$/;
|
|
473
|
+
if (!emailRegex.test(email) || email.length > 254) {
|
|
474
|
+
throw new ValidationError(
|
|
475
|
+
`Invalid email format: ${email}`,
|
|
476
|
+
"VAL_002" /* INVALID_INPUT */,
|
|
477
|
+
{ email }
|
|
478
|
+
);
|
|
479
|
+
}
|
|
480
|
+
};
|
|
481
|
+
const validatePath = (filePath) => {
|
|
482
|
+
if (!filePath || filePath.includes("..") || filePath.includes("\0")) {
|
|
483
|
+
throw new ValidationError(
|
|
484
|
+
`Invalid path: ${filePath}`,
|
|
485
|
+
"VAL_002" /* INVALID_INPUT */,
|
|
486
|
+
{ path: filePath }
|
|
487
|
+
);
|
|
488
|
+
}
|
|
489
|
+
};
|
|
246
490
|
export {
|
|
247
491
|
DatabaseError,
|
|
248
492
|
ErrorCode,
|
|
493
|
+
ErrorHandler,
|
|
249
494
|
FrameError,
|
|
250
495
|
IntegrationError,
|
|
251
496
|
MCPError,
|
|
@@ -256,8 +501,12 @@ export {
|
|
|
256
501
|
ValidationError,
|
|
257
502
|
createErrorHandler,
|
|
258
503
|
getErrorMessage,
|
|
504
|
+
getUserFriendlyMessage,
|
|
259
505
|
isRetryableError,
|
|
260
506
|
isStackMemoryError,
|
|
507
|
+
validateEmail,
|
|
508
|
+
validateInput,
|
|
509
|
+
validatePath,
|
|
261
510
|
wrapError
|
|
262
511
|
};
|
|
263
512
|
//# sourceMappingURL=index.js.map
|
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
{
|
|
2
2
|
"version": 3,
|
|
3
3
|
"sources": ["../../../src/core/errors/index.ts"],
|
|
4
|
-
"sourcesContent": ["/**\n * Custom error classes for StackMemory\n * Provides a hierarchy of error types for better error handling and debugging\n */\n\nexport enum ErrorCode {\n // Database errors (1000-1999)\n DB_CONNECTION_FAILED = 'DB_001',\n DB_QUERY_FAILED = 'DB_002',\n DB_TRANSACTION_FAILED = 'DB_003',\n DB_MIGRATION_FAILED = 'DB_004',\n DB_CONSTRAINT_VIOLATION = 'DB_005',\n DB_SCHEMA_ERROR = 'DB_006',\n DB_INSERT_FAILED = 'DB_007',\n DB_UPDATE_FAILED = 'DB_008',\n DB_DELETE_FAILED = 'DB_009',\n\n // Frame errors (2000-2999)\n FRAME_NOT_FOUND = 'FRAME_001',\n FRAME_INVALID_STATE = 'FRAME_002',\n FRAME_PARENT_NOT_FOUND = 'FRAME_003',\n FRAME_CYCLE_DETECTED = 'FRAME_004',\n FRAME_ALREADY_CLOSED = 'FRAME_005',\n FRAME_INIT_FAILED = 'FRAME_006',\n FRAME_INVALID_INPUT = 'FRAME_007',\n FRAME_STACK_OVERFLOW = 'FRAME_008',\n\n // Task errors (3000-3999)\n TASK_NOT_FOUND = 'TASK_001',\n TASK_INVALID_STATE = 'TASK_002',\n TASK_DEPENDENCY_CONFLICT = 'TASK_003',\n TASK_CIRCULAR_DEPENDENCY = 'TASK_004',\n\n // Integration errors (4000-4999)\n LINEAR_AUTH_FAILED = 'LINEAR_001',\n LINEAR_API_ERROR = 'LINEAR_002',\n LINEAR_SYNC_FAILED = 'LINEAR_003',\n LINEAR_WEBHOOK_FAILED = 'LINEAR_004',\n\n // MCP errors (5000-5999)\n MCP_TOOL_NOT_FOUND = 'MCP_001',\n MCP_INVALID_PARAMS = 'MCP_002',\n MCP_EXECUTION_FAILED = 'MCP_003',\n MCP_RATE_LIMITED = 'MCP_004',\n\n // Project errors (6000-6999)\n PROJECT_NOT_FOUND = 'PROJECT_001',\n PROJECT_INVALID_PATH = 'PROJECT_002',\n PROJECT_GIT_ERROR = 'PROJECT_003',\n\n // Validation errors (7000-7999)\n VALIDATION_FAILED = 'VAL_001',\n INVALID_INPUT = 'VAL_002',\n MISSING_REQUIRED_FIELD = 'VAL_003',\n TYPE_MISMATCH = 'VAL_004',\n\n // System errors (8000-8999)\n INITIALIZATION_ERROR = 'SYS_001',\n NOT_FOUND = 'SYS_002',\n INTERNAL_ERROR = 'SYS_003',\n CONFIGURATION_ERROR = 'SYS_004',\n PERMISSION_DENIED = 'SYS_005',\n RESOURCE_EXHAUSTED = 'SYS_006',\n SERVICE_UNAVAILABLE = 'SYS_007',\n SYSTEM_INIT_FAILED = 'SYS_008',\n\n // Collaboration errors (9000-9999)\n STACK_CONTEXT_NOT_FOUND = 'COLLAB_001',\n HANDOFF_REQUEST_EXPIRED = 'COLLAB_002',\n MERGE_CONFLICT_UNRESOLVABLE = 'COLLAB_003',\n PERMISSION_VIOLATION = 'COLLAB_004',\n OPERATION_FAILED = 'COLLAB_005',\n OPERATION_EXPIRED = 'COLLAB_006',\n INVALID_STATE = 'COLLAB_007',\n RESOURCE_NOT_FOUND = 'COLLAB_008',\n HANDOFF_ALREADY_EXISTS = 'COLLAB_009',\n MERGE_SESSION_INVALID = 'COLLAB_010',\n STACK_SWITCH_FAILED = 'COLLAB_011',\n APPROVAL_TIMEOUT = 'COLLAB_012',\n CONFLICT_RESOLUTION_FAILED = 'COLLAB_013',\n TEAM_ACCESS_DENIED = 'COLLAB_014',\n STACK_LIMIT_EXCEEDED = 'COLLAB_015',\n}\n\nexport interface ErrorContext {\n [key: string]: unknown;\n}\n\nexport interface StackMemoryErrorOptions {\n code: ErrorCode;\n message: string;\n context?: ErrorContext;\n cause?: Error;\n isRetryable?: boolean;\n httpStatus?: number;\n}\n\n/**\n * Base error class for all StackMemory errors\n */\nexport class StackMemoryError extends Error {\n public readonly code: ErrorCode;\n public readonly context?: ErrorContext;\n public readonly cause?: Error;\n public readonly isRetryable: boolean;\n public readonly httpStatus: number;\n public readonly timestamp: Date;\n\n constructor(options: StackMemoryErrorOptions) {\n super(options.message);\n this.name = this.constructor.name;\n this.code = options.code;\n this.context = options.context;\n this.cause = options.cause;\n this.isRetryable = options.isRetryable ?? false;\n this.httpStatus = options.httpStatus ?? 500;\n this.timestamp = new Date();\n\n // Maintains proper stack trace for where our error was thrown\n if (Error.captureStackTrace) {\n Error.captureStackTrace(this, this.constructor);\n }\n }\n\n toJSON(): Record<string, unknown> {\n return {\n name: this.name,\n code: this.code,\n message: this.message,\n context: this.context,\n isRetryable: this.isRetryable,\n httpStatus: this.httpStatus,\n timestamp: this.timestamp.toISOString(),\n stack: this.stack,\n cause: this.cause?.message,\n };\n }\n}\n\n/**\n * Database-related errors\n */\nexport class DatabaseError extends StackMemoryError {\n constructor(\n message: string,\n code: ErrorCode = ErrorCode.DB_QUERY_FAILED,\n context?: ErrorContext,\n cause?: Error\n ) {\n super({\n code,\n message,\n context,\n cause,\n isRetryable: code === ErrorCode.DB_CONNECTION_FAILED,\n httpStatus: 503,\n });\n }\n}\n\n/**\n * Frame-related errors\n */\nexport class FrameError extends StackMemoryError {\n constructor(\n message: string,\n code: ErrorCode = ErrorCode.FRAME_INVALID_STATE,\n context?: ErrorContext\n ) {\n super({\n code,\n message,\n context,\n isRetryable: false,\n httpStatus: 400,\n });\n }\n}\n\n/**\n * Task-related errors\n */\nexport class TaskError extends StackMemoryError {\n constructor(\n message: string,\n code: ErrorCode = ErrorCode.TASK_INVALID_STATE,\n context?: ErrorContext\n ) {\n super({\n code,\n message,\n context,\n isRetryable: false,\n httpStatus: 400,\n });\n }\n}\n\n/**\n * Integration errors (Linear, etc.)\n */\nexport class IntegrationError extends StackMemoryError {\n constructor(\n message: string,\n code: ErrorCode = ErrorCode.LINEAR_API_ERROR,\n context?: ErrorContext,\n cause?: Error\n ) {\n super({\n code,\n message,\n context,\n cause,\n isRetryable: true,\n httpStatus: 502,\n });\n }\n}\n\n/**\n * MCP-related errors\n */\nexport class MCPError extends StackMemoryError {\n constructor(\n message: string,\n code: ErrorCode = ErrorCode.MCP_EXECUTION_FAILED,\n context?: ErrorContext\n ) {\n super({\n code,\n message,\n context,\n isRetryable: code === ErrorCode.MCP_RATE_LIMITED,\n httpStatus: code === ErrorCode.MCP_RATE_LIMITED ? 429 : 400,\n });\n }\n}\n\n/**\n * Validation errors\n */\nexport class ValidationError extends StackMemoryError {\n constructor(\n message: string,\n code: ErrorCode = ErrorCode.VALIDATION_FAILED,\n context?: ErrorContext\n ) {\n super({\n code,\n message,\n context,\n isRetryable: false,\n httpStatus: 400,\n });\n }\n}\n\n/**\n * Project-related errors\n */\nexport class ProjectError extends StackMemoryError {\n constructor(\n message: string,\n code: ErrorCode = ErrorCode.PROJECT_NOT_FOUND,\n context?: ErrorContext\n ) {\n super({\n code,\n message,\n context,\n isRetryable: false,\n httpStatus: 404,\n });\n }\n}\n\n/**\n * System/Internal errors\n */\nexport class SystemError extends StackMemoryError {\n constructor(\n message: string,\n code: ErrorCode = ErrorCode.INTERNAL_ERROR,\n context?: ErrorContext,\n cause?: Error\n ) {\n super({\n code,\n message,\n context,\n cause,\n isRetryable: code === ErrorCode.SERVICE_UNAVAILABLE,\n httpStatus: 500,\n });\n }\n}\n\n/**\n * Helper function to determine if an error is retryable\n */\nexport function isRetryableError(error: unknown): boolean {\n if (error instanceof StackMemoryError) {\n return error.isRetryable;\n }\n // Check for common retryable error patterns\n if (error instanceof Error) {\n const message = error.message.toLowerCase();\n return (\n message.includes('econnrefused') ||\n message.includes('timeout') ||\n message.includes('enotfound') ||\n message.includes('socket hang up')\n );\n }\n return false;\n}\n\n/**\n * Helper function to safely extract error message\n */\nexport function getErrorMessage(error: unknown): string {\n if (error instanceof Error) {\n return error.message;\n }\n if (typeof error === 'string') {\n return error;\n }\n if (error && typeof error === 'object' && 'message' in error) {\n return String(error.message);\n }\n return 'An unknown error occurred';\n}\n\n/**\n * Helper function to wrap unknown errors in StackMemoryError\n */\nexport function wrapError(\n error: unknown,\n defaultMessage: string,\n code: ErrorCode = ErrorCode.INTERNAL_ERROR,\n context?: ErrorContext\n): StackMemoryError {\n if (error instanceof StackMemoryError) {\n return error;\n }\n\n const cause = error instanceof Error ? error : undefined;\n const message = error instanceof Error ? error.message : defaultMessage;\n\n return new SystemError(message, code, context, cause);\n}\n\n/**\n * Type guard to check if error is a StackMemoryError\n */\nexport function isStackMemoryError(error: unknown): error is StackMemoryError {\n return error instanceof StackMemoryError;\n}\n\n/**\n * Create context-aware error handler\n */\nexport function createErrorHandler(defaultContext: ErrorContext) {\n return (error: unknown, additionalContext?: ErrorContext) => {\n const context = { ...defaultContext, ...additionalContext };\n\n if (error instanceof StackMemoryError) {\n // Create a new error with merged context since context is readonly\n return new StackMemoryError({\n code: error.code,\n message: error.message,\n context: { ...error.context, ...context },\n cause: error.cause,\n isRetryable: error.isRetryable,\n httpStatus: error.httpStatus,\n });\n }\n\n return wrapError(\n error,\n getErrorMessage(error),\n ErrorCode.INTERNAL_ERROR,\n context\n );\n };\n}\n"],
|
|
5
|
-
"mappings": ";;;;AAKO,IAAK,YAAL,kBAAKA,eAAL;AAEL,EAAAA,WAAA,0BAAuB;AACvB,EAAAA,WAAA,qBAAkB;AAClB,EAAAA,WAAA,2BAAwB;AACxB,EAAAA,WAAA,yBAAsB;AACtB,EAAAA,WAAA,6BAA0B;AAC1B,EAAAA,WAAA,qBAAkB;AAClB,EAAAA,WAAA,sBAAmB;AACnB,EAAAA,WAAA,sBAAmB;AACnB,EAAAA,WAAA,sBAAmB;
|
|
4
|
+
"sourcesContent": ["/**\n * Custom error classes for StackMemory\n * Provides a hierarchy of error types for better error handling and debugging\n */\n\nexport enum ErrorCode {\n // Database errors (DB_*)\n DB_CONNECTION_FAILED = 'DB_001',\n DB_QUERY_FAILED = 'DB_002',\n DB_TRANSACTION_FAILED = 'DB_003',\n DB_MIGRATION_FAILED = 'DB_004',\n DB_CONSTRAINT_VIOLATION = 'DB_005',\n DB_SCHEMA_ERROR = 'DB_006',\n DB_INSERT_FAILED = 'DB_007',\n DB_UPDATE_FAILED = 'DB_008',\n DB_DELETE_FAILED = 'DB_009',\n DB_CORRUPTION = 'DB_010',\n\n // Frame errors (FRAME_*)\n FRAME_NOT_FOUND = 'FRAME_001',\n FRAME_INVALID_STATE = 'FRAME_002',\n FRAME_PARENT_NOT_FOUND = 'FRAME_003',\n FRAME_CYCLE_DETECTED = 'FRAME_004',\n FRAME_ALREADY_CLOSED = 'FRAME_005',\n FRAME_INIT_FAILED = 'FRAME_006',\n FRAME_INVALID_INPUT = 'FRAME_007',\n FRAME_STACK_OVERFLOW = 'FRAME_008',\n\n // Task errors (TASK_*)\n TASK_NOT_FOUND = 'TASK_001',\n TASK_INVALID_STATE = 'TASK_002',\n TASK_DEPENDENCY_CONFLICT = 'TASK_003',\n TASK_CIRCULAR_DEPENDENCY = 'TASK_004',\n\n // Integration errors (LINEAR_*)\n LINEAR_AUTH_FAILED = 'LINEAR_001',\n LINEAR_API_ERROR = 'LINEAR_002',\n LINEAR_SYNC_FAILED = 'LINEAR_003',\n LINEAR_WEBHOOK_FAILED = 'LINEAR_004',\n\n // MCP errors (MCP_*)\n MCP_TOOL_NOT_FOUND = 'MCP_001',\n MCP_INVALID_PARAMS = 'MCP_002',\n MCP_EXECUTION_FAILED = 'MCP_003',\n MCP_RATE_LIMITED = 'MCP_004',\n\n // Project errors (PROJECT_*)\n PROJECT_NOT_FOUND = 'PROJECT_001',\n PROJECT_INVALID_PATH = 'PROJECT_002',\n PROJECT_GIT_ERROR = 'PROJECT_003',\n\n // Validation errors (VAL_*)\n VALIDATION_FAILED = 'VAL_001',\n INVALID_INPUT = 'VAL_002',\n MISSING_REQUIRED_FIELD = 'VAL_003',\n TYPE_MISMATCH = 'VAL_004',\n\n // System errors (SYS_*)\n INITIALIZATION_ERROR = 'SYS_001',\n NOT_FOUND = 'SYS_002',\n INTERNAL_ERROR = 'SYS_003',\n CONFIGURATION_ERROR = 'SYS_004',\n PERMISSION_DENIED = 'SYS_005',\n RESOURCE_EXHAUSTED = 'SYS_006',\n SERVICE_UNAVAILABLE = 'SYS_007',\n SYSTEM_INIT_FAILED = 'SYS_008',\n UNKNOWN_ERROR = 'SYS_009',\n OPERATION_TIMEOUT = 'SYS_010',\n\n // Authentication errors (AUTH_*)\n AUTH_FAILED = 'AUTH_001',\n TOKEN_EXPIRED = 'AUTH_002',\n INVALID_CREDENTIALS = 'AUTH_003',\n\n // File system errors (FS_*)\n FILE_NOT_FOUND = 'FS_001',\n DISK_FULL = 'FS_002',\n\n // Git errors (GIT_*)\n NOT_GIT_REPO = 'GIT_001',\n GIT_COMMAND_FAILED = 'GIT_002',\n INVALID_BRANCH = 'GIT_003',\n\n // Network errors (NET_*)\n NETWORK_ERROR = 'NET_001',\n API_ERROR = 'NET_002',\n\n // Collaboration errors (COLLAB_*)\n STACK_CONTEXT_NOT_FOUND = 'COLLAB_001',\n HANDOFF_REQUEST_EXPIRED = 'COLLAB_002',\n MERGE_CONFLICT_UNRESOLVABLE = 'COLLAB_003',\n PERMISSION_VIOLATION = 'COLLAB_004',\n OPERATION_FAILED = 'COLLAB_005',\n OPERATION_EXPIRED = 'COLLAB_006',\n INVALID_STATE = 'COLLAB_007',\n RESOURCE_NOT_FOUND = 'COLLAB_008',\n HANDOFF_ALREADY_EXISTS = 'COLLAB_009',\n MERGE_SESSION_INVALID = 'COLLAB_010',\n STACK_SWITCH_FAILED = 'COLLAB_011',\n APPROVAL_TIMEOUT = 'COLLAB_012',\n CONFLICT_RESOLUTION_FAILED = 'COLLAB_013',\n TEAM_ACCESS_DENIED = 'COLLAB_014',\n STACK_LIMIT_EXCEEDED = 'COLLAB_015',\n}\n\nexport interface ErrorContext {\n [key: string]: unknown;\n}\n\nexport interface StackMemoryErrorOptions {\n code: ErrorCode;\n message: string;\n context?: ErrorContext;\n cause?: Error;\n isRetryable?: boolean;\n httpStatus?: number;\n}\n\n/**\n * Base error class for all StackMemory errors\n */\nexport class StackMemoryError extends Error {\n public readonly code: ErrorCode;\n public readonly context?: ErrorContext;\n public readonly cause?: Error;\n public readonly isRetryable: boolean;\n public readonly httpStatus: number;\n public readonly timestamp: Date;\n\n constructor(options: StackMemoryErrorOptions) {\n super(options.message);\n this.name = this.constructor.name;\n this.code = options.code;\n this.context = options.context;\n this.cause = options.cause;\n this.isRetryable = options.isRetryable ?? false;\n this.httpStatus = options.httpStatus ?? 500;\n this.timestamp = new Date();\n\n // Maintains proper stack trace for where our error was thrown\n if (Error.captureStackTrace) {\n Error.captureStackTrace(this, this.constructor);\n }\n }\n\n toJSON(): Record<string, unknown> {\n return {\n name: this.name,\n code: this.code,\n message: this.message,\n context: this.context,\n isRetryable: this.isRetryable,\n httpStatus: this.httpStatus,\n timestamp: this.timestamp.toISOString(),\n stack: this.stack,\n cause: this.cause?.message,\n };\n }\n}\n\n/**\n * Database-related errors\n */\nexport class DatabaseError extends StackMemoryError {\n constructor(\n message: string,\n code: ErrorCode = ErrorCode.DB_QUERY_FAILED,\n context?: ErrorContext,\n cause?: Error\n ) {\n super({\n code,\n message,\n context,\n cause,\n isRetryable: code === ErrorCode.DB_CONNECTION_FAILED,\n httpStatus: 503,\n });\n }\n}\n\n/**\n * Frame-related errors\n */\nexport class FrameError extends StackMemoryError {\n constructor(\n message: string,\n code: ErrorCode = ErrorCode.FRAME_INVALID_STATE,\n context?: ErrorContext\n ) {\n super({\n code,\n message,\n context,\n isRetryable: false,\n httpStatus: 400,\n });\n }\n}\n\n/**\n * Task-related errors\n */\nexport class TaskError extends StackMemoryError {\n constructor(\n message: string,\n code: ErrorCode = ErrorCode.TASK_INVALID_STATE,\n context?: ErrorContext\n ) {\n super({\n code,\n message,\n context,\n isRetryable: false,\n httpStatus: 400,\n });\n }\n}\n\n/**\n * Integration errors (Linear, etc.)\n */\nexport class IntegrationError extends StackMemoryError {\n constructor(\n message: string,\n code: ErrorCode = ErrorCode.LINEAR_API_ERROR,\n context?: ErrorContext,\n cause?: Error\n ) {\n super({\n code,\n message,\n context,\n cause,\n isRetryable: true,\n httpStatus: 502,\n });\n }\n}\n\n/**\n * MCP-related errors\n */\nexport class MCPError extends StackMemoryError {\n constructor(\n message: string,\n code: ErrorCode = ErrorCode.MCP_EXECUTION_FAILED,\n context?: ErrorContext\n ) {\n super({\n code,\n message,\n context,\n isRetryable: code === ErrorCode.MCP_RATE_LIMITED,\n httpStatus: code === ErrorCode.MCP_RATE_LIMITED ? 429 : 400,\n });\n }\n}\n\n/**\n * Validation errors\n */\nexport class ValidationError extends StackMemoryError {\n constructor(\n message: string,\n code: ErrorCode = ErrorCode.VALIDATION_FAILED,\n context?: ErrorContext\n ) {\n super({\n code,\n message,\n context,\n isRetryable: false,\n httpStatus: 400,\n });\n }\n}\n\n/**\n * Project-related errors\n */\nexport class ProjectError extends StackMemoryError {\n constructor(\n message: string,\n code: ErrorCode = ErrorCode.PROJECT_NOT_FOUND,\n context?: ErrorContext\n ) {\n super({\n code,\n message,\n context,\n isRetryable: false,\n httpStatus: 404,\n });\n }\n}\n\n/**\n * System/Internal errors\n */\nexport class SystemError extends StackMemoryError {\n constructor(\n message: string,\n code: ErrorCode = ErrorCode.INTERNAL_ERROR,\n context?: ErrorContext,\n cause?: Error\n ) {\n super({\n code,\n message,\n context,\n cause,\n isRetryable: code === ErrorCode.SERVICE_UNAVAILABLE,\n httpStatus: 500,\n });\n }\n}\n\n/**\n * Helper function to determine if an error is retryable\n */\nexport function isRetryableError(error: unknown): boolean {\n if (error instanceof StackMemoryError) {\n return error.isRetryable;\n }\n // Check for common retryable error patterns\n if (error instanceof Error) {\n const message = error.message.toLowerCase();\n return (\n message.includes('econnrefused') ||\n message.includes('timeout') ||\n message.includes('enotfound') ||\n message.includes('socket hang up')\n );\n }\n return false;\n}\n\n/**\n * Helper function to safely extract error message\n */\nexport function getErrorMessage(error: unknown): string {\n if (error instanceof Error) {\n return error.message;\n }\n if (typeof error === 'string') {\n return error;\n }\n if (error && typeof error === 'object' && 'message' in error) {\n return String(error.message);\n }\n return 'An unknown error occurred';\n}\n\n/**\n * Helper function to wrap unknown errors in StackMemoryError\n */\nexport function wrapError(\n error: unknown,\n defaultMessage: string,\n code: ErrorCode = ErrorCode.INTERNAL_ERROR,\n context?: ErrorContext\n): StackMemoryError {\n if (error instanceof StackMemoryError) {\n return error;\n }\n\n const cause = error instanceof Error ? error : undefined;\n const message = error instanceof Error ? error.message : defaultMessage;\n\n return new SystemError(message, code, context, cause);\n}\n\n/**\n * Type guard to check if error is a StackMemoryError\n */\nexport function isStackMemoryError(error: unknown): error is StackMemoryError {\n return error instanceof StackMemoryError;\n}\n\n/**\n * Create context-aware error handler\n */\nexport function createErrorHandler(defaultContext: ErrorContext) {\n return (error: unknown, additionalContext?: ErrorContext) => {\n const context = { ...defaultContext, ...additionalContext };\n\n if (error instanceof StackMemoryError) {\n // Create a new error with merged context since context is readonly\n return new StackMemoryError({\n code: error.code,\n message: error.message,\n context: { ...error.context, ...context },\n cause: error.cause,\n isRetryable: error.isRetryable,\n httpStatus: error.httpStatus,\n });\n }\n\n return wrapError(\n error,\n getErrorMessage(error),\n ErrorCode.INTERNAL_ERROR,\n context\n );\n };\n}\n\n/**\n * User-friendly error messages for each error code\n */\nexport function getUserFriendlyMessage(code: ErrorCode): string {\n switch (code) {\n // Auth errors\n case ErrorCode.AUTH_FAILED:\n return 'Authentication failed. Please check your credentials and try again.';\n case ErrorCode.TOKEN_EXPIRED:\n return 'Your session has expired. Please log in again.';\n case ErrorCode.INVALID_CREDENTIALS:\n return 'Invalid credentials provided. Please check and try again.';\n\n // File system errors\n case ErrorCode.FILE_NOT_FOUND:\n return 'The requested file or directory was not found.';\n case ErrorCode.PERMISSION_DENIED:\n return 'Permission denied. Please check file permissions or run with appropriate privileges.';\n case ErrorCode.DISK_FULL:\n return 'Insufficient disk space. Please free up space and try again.';\n\n // Git errors\n case ErrorCode.NOT_GIT_REPO:\n return 'This command requires a git repository. Please run it from within a git repository.';\n case ErrorCode.GIT_COMMAND_FAILED:\n return 'Git operation failed. Please ensure your repository is in a valid state.';\n case ErrorCode.INVALID_BRANCH:\n return 'Invalid branch specified. Please check the branch name and try again.';\n\n // Database errors\n case ErrorCode.DB_CONNECTION_FAILED:\n return 'Database connection failed. Please try again or contact support if the issue persists.';\n case ErrorCode.DB_QUERY_FAILED:\n return 'Database query failed. Please try again.';\n case ErrorCode.DB_CORRUPTION:\n return 'Database appears to be corrupted. Please contact support.';\n\n // Network errors\n case ErrorCode.NETWORK_ERROR:\n return 'Network error. Please check your internet connection and try again.';\n case ErrorCode.API_ERROR:\n return 'API request failed. Please try again later.';\n case ErrorCode.OPERATION_TIMEOUT:\n return 'The operation timed out. Please try again.';\n\n // Validation errors\n case ErrorCode.INVALID_INPUT:\n return 'Invalid input provided. Please check your command and try again.';\n case ErrorCode.VALIDATION_FAILED:\n return 'Validation failed. Please check your input and try again.';\n case ErrorCode.MISSING_REQUIRED_FIELD:\n return 'A required field is missing. Please provide all required information.';\n\n // System errors\n case ErrorCode.CONFIGURATION_ERROR:\n return 'Configuration error. Please check your settings.';\n case ErrorCode.SERVICE_UNAVAILABLE:\n return 'Service is temporarily unavailable. Please try again later.';\n\n // Default\n default:\n return 'An unexpected error occurred. Please try again or contact support.';\n }\n}\n\n/**\n * ErrorHandler provides utilities for handling errors in CLI context\n */\nexport class ErrorHandler {\n private static retryMap = new Map<string, number>();\n private static readonly MAX_RETRIES = 3;\n\n /**\n * Handle an error and exit the process\n */\n static handle(error: unknown, operation: string): never {\n if (error instanceof StackMemoryError) {\n const userMessage = getUserFriendlyMessage(error.code);\n console.error(`\u274C ${userMessage}`);\n\n if (error.isRetryable) {\n console.error('\uD83D\uDCA1 This error may be recoverable. Please try again.');\n }\n\n process.exit(1);\n }\n\n if (error instanceof Error) {\n let stackMemoryError: StackMemoryError;\n\n if ('code' in error && typeof error.code === 'string') {\n stackMemoryError = ErrorHandler.fromNodeError(\n error as NodeJS.ErrnoException,\n { operation }\n );\n } else {\n stackMemoryError = wrapError(error, error.message, ErrorCode.OPERATION_FAILED, {\n operation,\n });\n }\n\n const userMessage = getUserFriendlyMessage(stackMemoryError.code);\n console.error(`\u274C ${userMessage}`);\n\n if (stackMemoryError.isRetryable) {\n console.error('\uD83D\uDCA1 This error may be recoverable. Please try again.');\n }\n\n process.exit(1);\n }\n\n // Unknown error type\n console.error('\u274C An unexpected error occurred.');\n process.exit(1);\n }\n\n /**\n * Convert Node.js error to StackMemoryError\n */\n static fromNodeError(\n nodeError: NodeJS.ErrnoException,\n context: ErrorContext = {}\n ): StackMemoryError {\n const code = nodeError.code;\n\n switch (code) {\n case 'ENOENT':\n return new SystemError(\n `File or directory not found: ${nodeError.path}`,\n ErrorCode.FILE_NOT_FOUND,\n { ...context, path: nodeError.path },\n nodeError\n );\n\n case 'EACCES':\n case 'EPERM':\n return new SystemError(\n `Permission denied: ${nodeError.path}`,\n ErrorCode.PERMISSION_DENIED,\n { ...context, path: nodeError.path },\n nodeError\n );\n\n case 'ENOSPC':\n return new SystemError(\n 'No space left on device',\n ErrorCode.DISK_FULL,\n context,\n nodeError\n );\n\n case 'ETIMEDOUT':\n return new SystemError(\n 'Operation timed out',\n ErrorCode.OPERATION_TIMEOUT,\n context,\n nodeError\n );\n\n default:\n return new SystemError(\n nodeError.message,\n ErrorCode.UNKNOWN_ERROR,\n { ...context, nodeErrorCode: code },\n nodeError\n );\n }\n }\n\n /**\n * Safely execute an operation with optional fallback\n */\n static async safeExecute<T>(\n operation: () => Promise<T> | T,\n operationName: string,\n fallback?: T\n ): Promise<T | undefined> {\n try {\n return await operation();\n } catch (error: unknown) {\n if (fallback !== undefined) {\n return fallback;\n }\n ErrorHandler.handle(error, operationName);\n }\n }\n\n /**\n * Execute with automatic retry and exponential backoff\n */\n static async withRetry<T>(\n operation: () => Promise<T> | T,\n operationName: string,\n maxRetries: number = ErrorHandler.MAX_RETRIES\n ): Promise<T> {\n let lastError: unknown;\n\n for (let attempt = 1; attempt <= maxRetries; attempt++) {\n try {\n const result = await operation();\n ErrorHandler.retryMap.delete(operationName);\n return result;\n } catch (error: unknown) {\n lastError = error;\n\n if (error instanceof StackMemoryError && !error.isRetryable) {\n ErrorHandler.handle(error, operationName);\n }\n\n if (attempt === maxRetries) {\n break;\n }\n\n const delay = Math.min(1000 * Math.pow(2, attempt - 1), 5000);\n await new Promise((resolve) => setTimeout(resolve, delay));\n }\n }\n\n ErrorHandler.handle(lastError, `${operationName} (after ${maxRetries} attempts)`);\n }\n\n /**\n * Create a circuit breaker for an operation\n */\n static createCircuitBreaker<T>(\n operation: () => Promise<T> | T,\n operationName: string,\n threshold: number = 5\n ) {\n let failures = 0;\n let lastFailure = 0;\n const resetTimeout = 30000;\n\n return async (): Promise<T> => {\n const now = Date.now();\n\n if (now - lastFailure > resetTimeout) {\n failures = 0;\n }\n\n if (failures >= threshold) {\n throw new SystemError(\n `Circuit breaker open for '${operationName}'`,\n ErrorCode.SERVICE_UNAVAILABLE,\n { operationName, failures, threshold }\n );\n }\n\n try {\n const result = await operation();\n failures = 0;\n return result;\n } catch (error: unknown) {\n failures++;\n lastFailure = now;\n throw error;\n }\n };\n }\n}\n\n/**\n * Validation utilities\n */\nexport const validateInput = (\n value: unknown,\n name: string,\n validator: (val: unknown) => boolean\n): asserts value is NonNullable<unknown> => {\n if (!validator(value)) {\n throw new ValidationError(\n `Invalid ${name}: ${String(value)}`,\n ErrorCode.INVALID_INPUT,\n { name, value }\n );\n }\n};\n\nexport const validateEmail = (email: string): asserts email is string => {\n const emailRegex = /^[^\\s@]+@[^\\s@]+\\.[^\\s@]+$/;\n if (!emailRegex.test(email) || email.length > 254) {\n throw new ValidationError(\n `Invalid email format: ${email}`,\n ErrorCode.INVALID_INPUT,\n { email }\n );\n }\n};\n\nexport const validatePath = (filePath: string): asserts filePath is string => {\n if (!filePath || filePath.includes('..') || filePath.includes('\\0')) {\n throw new ValidationError(\n `Invalid path: ${filePath}`,\n ErrorCode.INVALID_INPUT,\n { path: filePath }\n );\n }\n};\n"],
|
|
5
|
+
"mappings": ";;;;AAKO,IAAK,YAAL,kBAAKA,eAAL;AAEL,EAAAA,WAAA,0BAAuB;AACvB,EAAAA,WAAA,qBAAkB;AAClB,EAAAA,WAAA,2BAAwB;AACxB,EAAAA,WAAA,yBAAsB;AACtB,EAAAA,WAAA,6BAA0B;AAC1B,EAAAA,WAAA,qBAAkB;AAClB,EAAAA,WAAA,sBAAmB;AACnB,EAAAA,WAAA,sBAAmB;AACnB,EAAAA,WAAA,sBAAmB;AACnB,EAAAA,WAAA,mBAAgB;AAGhB,EAAAA,WAAA,qBAAkB;AAClB,EAAAA,WAAA,yBAAsB;AACtB,EAAAA,WAAA,4BAAyB;AACzB,EAAAA,WAAA,0BAAuB;AACvB,EAAAA,WAAA,0BAAuB;AACvB,EAAAA,WAAA,uBAAoB;AACpB,EAAAA,WAAA,yBAAsB;AACtB,EAAAA,WAAA,0BAAuB;AAGvB,EAAAA,WAAA,oBAAiB;AACjB,EAAAA,WAAA,wBAAqB;AACrB,EAAAA,WAAA,8BAA2B;AAC3B,EAAAA,WAAA,8BAA2B;AAG3B,EAAAA,WAAA,wBAAqB;AACrB,EAAAA,WAAA,sBAAmB;AACnB,EAAAA,WAAA,wBAAqB;AACrB,EAAAA,WAAA,2BAAwB;AAGxB,EAAAA,WAAA,wBAAqB;AACrB,EAAAA,WAAA,wBAAqB;AACrB,EAAAA,WAAA,0BAAuB;AACvB,EAAAA,WAAA,sBAAmB;AAGnB,EAAAA,WAAA,uBAAoB;AACpB,EAAAA,WAAA,0BAAuB;AACvB,EAAAA,WAAA,uBAAoB;AAGpB,EAAAA,WAAA,uBAAoB;AACpB,EAAAA,WAAA,mBAAgB;AAChB,EAAAA,WAAA,4BAAyB;AACzB,EAAAA,WAAA,mBAAgB;AAGhB,EAAAA,WAAA,0BAAuB;AACvB,EAAAA,WAAA,eAAY;AACZ,EAAAA,WAAA,oBAAiB;AACjB,EAAAA,WAAA,yBAAsB;AACtB,EAAAA,WAAA,uBAAoB;AACpB,EAAAA,WAAA,wBAAqB;AACrB,EAAAA,WAAA,yBAAsB;AACtB,EAAAA,WAAA,wBAAqB;AACrB,EAAAA,WAAA,mBAAgB;AAChB,EAAAA,WAAA,uBAAoB;AAGpB,EAAAA,WAAA,iBAAc;AACd,EAAAA,WAAA,mBAAgB;AAChB,EAAAA,WAAA,yBAAsB;AAGtB,EAAAA,WAAA,oBAAiB;AACjB,EAAAA,WAAA,eAAY;AAGZ,EAAAA,WAAA,kBAAe;AACf,EAAAA,WAAA,wBAAqB;AACrB,EAAAA,WAAA,oBAAiB;AAGjB,EAAAA,WAAA,mBAAgB;AAChB,EAAAA,WAAA,eAAY;AAGZ,EAAAA,WAAA,6BAA0B;AAC1B,EAAAA,WAAA,6BAA0B;AAC1B,EAAAA,WAAA,iCAA8B;AAC9B,EAAAA,WAAA,0BAAuB;AACvB,EAAAA,WAAA,sBAAmB;AACnB,EAAAA,WAAA,uBAAoB;AACpB,EAAAA,WAAA,mBAAgB;AAChB,EAAAA,WAAA,wBAAqB;AACrB,EAAAA,WAAA,4BAAyB;AACzB,EAAAA,WAAA,2BAAwB;AACxB,EAAAA,WAAA,yBAAsB;AACtB,EAAAA,WAAA,sBAAmB;AACnB,EAAAA,WAAA,gCAA6B;AAC7B,EAAAA,WAAA,wBAAqB;AACrB,EAAAA,WAAA,0BAAuB;AAjGb,SAAAA;AAAA,GAAA;AAoHL,MAAM,yBAAyB,MAAM;AAAA,EAC1B;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EAEhB,YAAY,SAAkC;AAC5C,UAAM,QAAQ,OAAO;AACrB,SAAK,OAAO,KAAK,YAAY;AAC7B,SAAK,OAAO,QAAQ;AACpB,SAAK,UAAU,QAAQ;AACvB,SAAK,QAAQ,QAAQ;AACrB,SAAK,cAAc,QAAQ,eAAe;AAC1C,SAAK,aAAa,QAAQ,cAAc;AACxC,SAAK,YAAY,oBAAI,KAAK;AAG1B,QAAI,MAAM,mBAAmB;AAC3B,YAAM,kBAAkB,MAAM,KAAK,WAAW;AAAA,IAChD;AAAA,EACF;AAAA,EAEA,SAAkC;AAChC,WAAO;AAAA,MACL,MAAM,KAAK;AAAA,MACX,MAAM,KAAK;AAAA,MACX,SAAS,KAAK;AAAA,MACd,SAAS,KAAK;AAAA,MACd,aAAa,KAAK;AAAA,MAClB,YAAY,KAAK;AAAA,MACjB,WAAW,KAAK,UAAU,YAAY;AAAA,MACtC,OAAO,KAAK;AAAA,MACZ,OAAO,KAAK,OAAO;AAAA,IACrB;AAAA,EACF;AACF;AAKO,MAAM,sBAAsB,iBAAiB;AAAA,EAClD,YACE,SACA,OAAkB,gCAClB,SACA,OACA;AACA,UAAM;AAAA,MACJ;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA,aAAa,SAAS;AAAA,MACtB,YAAY;AAAA,IACd,CAAC;AAAA,EACH;AACF;AAKO,MAAM,mBAAmB,iBAAiB;AAAA,EAC/C,YACE,SACA,OAAkB,uCAClB,SACA;AACA,UAAM;AAAA,MACJ;AAAA,MACA;AAAA,MACA;AAAA,MACA,aAAa;AAAA,MACb,YAAY;AAAA,IACd,CAAC;AAAA,EACH;AACF;AAKO,MAAM,kBAAkB,iBAAiB;AAAA,EAC9C,YACE,SACA,OAAkB,qCAClB,SACA;AACA,UAAM;AAAA,MACJ;AAAA,MACA;AAAA,MACA;AAAA,MACA,aAAa;AAAA,MACb,YAAY;AAAA,IACd,CAAC;AAAA,EACH;AACF;AAKO,MAAM,yBAAyB,iBAAiB;AAAA,EACrD,YACE,SACA,OAAkB,qCAClB,SACA,OACA;AACA,UAAM;AAAA,MACJ;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA,aAAa;AAAA,MACb,YAAY;AAAA,IACd,CAAC;AAAA,EACH;AACF;AAKO,MAAM,iBAAiB,iBAAiB;AAAA,EAC7C,YACE,SACA,OAAkB,sCAClB,SACA;AACA,UAAM;AAAA,MACJ;AAAA,MACA;AAAA,MACA;AAAA,MACA,aAAa,SAAS;AAAA,MACtB,YAAY,SAAS,mCAA6B,MAAM;AAAA,IAC1D,CAAC;AAAA,EACH;AACF;AAKO,MAAM,wBAAwB,iBAAiB;AAAA,EACpD,YACE,SACA,OAAkB,mCAClB,SACA;AACA,UAAM;AAAA,MACJ;AAAA,MACA;AAAA,MACA;AAAA,MACA,aAAa;AAAA,MACb,YAAY;AAAA,IACd,CAAC;AAAA,EACH;AACF;AAKO,MAAM,qBAAqB,iBAAiB;AAAA,EACjD,YACE,SACA,OAAkB,uCAClB,SACA;AACA,UAAM;AAAA,MACJ;AAAA,MACA;AAAA,MACA;AAAA,MACA,aAAa;AAAA,MACb,YAAY;AAAA,IACd,CAAC;AAAA,EACH;AACF;AAKO,MAAM,oBAAoB,iBAAiB;AAAA,EAChD,YACE,SACA,OAAkB,gCAClB,SACA,OACA;AACA,UAAM;AAAA,MACJ;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA,aAAa,SAAS;AAAA,MACtB,YAAY;AAAA,IACd,CAAC;AAAA,EACH;AACF;AAKO,SAAS,iBAAiB,OAAyB;AACxD,MAAI,iBAAiB,kBAAkB;AACrC,WAAO,MAAM;AAAA,EACf;AAEA,MAAI,iBAAiB,OAAO;AAC1B,UAAM,UAAU,MAAM,QAAQ,YAAY;AAC1C,WACE,QAAQ,SAAS,cAAc,KAC/B,QAAQ,SAAS,SAAS,KAC1B,QAAQ,SAAS,WAAW,KAC5B,QAAQ,SAAS,gBAAgB;AAAA,EAErC;AACA,SAAO;AACT;AAKO,SAAS,gBAAgB,OAAwB;AACtD,MAAI,iBAAiB,OAAO;AAC1B,WAAO,MAAM;AAAA,EACf;AACA,MAAI,OAAO,UAAU,UAAU;AAC7B,WAAO;AAAA,EACT;AACA,MAAI,SAAS,OAAO,UAAU,YAAY,aAAa,OAAO;AAC5D,WAAO,OAAO,MAAM,OAAO;AAAA,EAC7B;AACA,SAAO;AACT;AAKO,SAAS,UACd,OACA,gBACA,OAAkB,gCAClB,SACkB;AAClB,MAAI,iBAAiB,kBAAkB;AACrC,WAAO;AAAA,EACT;AAEA,QAAM,QAAQ,iBAAiB,QAAQ,QAAQ;AAC/C,QAAM,UAAU,iBAAiB,QAAQ,MAAM,UAAU;AAEzD,SAAO,IAAI,YAAY,SAAS,MAAM,SAAS,KAAK;AACtD;AAKO,SAAS,mBAAmB,OAA2C;AAC5E,SAAO,iBAAiB;AAC1B;AAKO,SAAS,mBAAmB,gBAA8B;AAC/D,SAAO,CAAC,OAAgB,sBAAqC;AAC3D,UAAM,UAAU,EAAE,GAAG,gBAAgB,GAAG,kBAAkB;AAE1D,QAAI,iBAAiB,kBAAkB;AAErC,aAAO,IAAI,iBAAiB;AAAA,QAC1B,MAAM,MAAM;AAAA,QACZ,SAAS,MAAM;AAAA,QACf,SAAS,EAAE,GAAG,MAAM,SAAS,GAAG,QAAQ;AAAA,QACxC,OAAO,MAAM;AAAA,QACb,aAAa,MAAM;AAAA,QACnB,YAAY,MAAM;AAAA,MACpB,CAAC;AAAA,IACH;AAEA,WAAO;AAAA,MACL;AAAA,MACA,gBAAgB,KAAK;AAAA,MACrB;AAAA,MACA;AAAA,IACF;AAAA,EACF;AACF;AAKO,SAAS,uBAAuB,MAAyB;AAC9D,UAAQ,MAAM;AAAA;AAAA,IAEZ,KAAK;AACH,aAAO;AAAA,IACT,KAAK;AACH,aAAO;AAAA,IACT,KAAK;AACH,aAAO;AAAA;AAAA,IAGT,KAAK;AACH,aAAO;AAAA,IACT,KAAK;AACH,aAAO;AAAA,IACT,KAAK;AACH,aAAO;AAAA;AAAA,IAGT,KAAK;AACH,aAAO;AAAA,IACT,KAAK;AACH,aAAO;AAAA,IACT,KAAK;AACH,aAAO;AAAA;AAAA,IAGT,KAAK;AACH,aAAO;AAAA,IACT,KAAK;AACH,aAAO;AAAA,IACT,KAAK;AACH,aAAO;AAAA;AAAA,IAGT,KAAK;AACH,aAAO;AAAA,IACT,KAAK;AACH,aAAO;AAAA,IACT,KAAK;AACH,aAAO;AAAA;AAAA,IAGT,KAAK;AACH,aAAO;AAAA,IACT,KAAK;AACH,aAAO;AAAA,IACT,KAAK;AACH,aAAO;AAAA;AAAA,IAGT,KAAK;AACH,aAAO;AAAA,IACT,KAAK;AACH,aAAO;AAAA;AAAA,IAGT;AACE,aAAO;AAAA,EACX;AACF;AAKO,MAAM,aAAa;AAAA,EACxB,OAAe,WAAW,oBAAI,IAAoB;AAAA,EAClD,OAAwB,cAAc;AAAA;AAAA;AAAA;AAAA,EAKtC,OAAO,OAAO,OAAgB,WAA0B;AACtD,QAAI,iBAAiB,kBAAkB;AACrC,YAAM,cAAc,uBAAuB,MAAM,IAAI;AACrD,cAAQ,MAAM,UAAK,WAAW,EAAE;AAEhC,UAAI,MAAM,aAAa;AACrB,gBAAQ,MAAM,4DAAqD;AAAA,MACrE;AAEA,cAAQ,KAAK,CAAC;AAAA,IAChB;AAEA,QAAI,iBAAiB,OAAO;AAC1B,UAAI;AAEJ,UAAI,UAAU,SAAS,OAAO,MAAM,SAAS,UAAU;AACrD,2BAAmB,aAAa;AAAA,UAC9B;AAAA,UACA,EAAE,UAAU;AAAA,QACd;AAAA,MACF,OAAO;AACL,2BAAmB,UAAU,OAAO,MAAM,SAAS,qCAA4B;AAAA,UAC7E;AAAA,QACF,CAAC;AAAA,MACH;AAEA,YAAM,cAAc,uBAAuB,iBAAiB,IAAI;AAChE,cAAQ,MAAM,UAAK,WAAW,EAAE;AAEhC,UAAI,iBAAiB,aAAa;AAChC,gBAAQ,MAAM,4DAAqD;AAAA,MACrE;AAEA,cAAQ,KAAK,CAAC;AAAA,IAChB;AAGA,YAAQ,MAAM,sCAAiC;AAC/C,YAAQ,KAAK,CAAC;AAAA,EAChB;AAAA;AAAA;AAAA;AAAA,EAKA,OAAO,cACL,WACA,UAAwB,CAAC,GACP;AAClB,UAAM,OAAO,UAAU;AAEvB,YAAQ,MAAM;AAAA,MACZ,KAAK;AACH,eAAO,IAAI;AAAA,UACT,gCAAgC,UAAU,IAAI;AAAA,UAC9C;AAAA,UACA,EAAE,GAAG,SAAS,MAAM,UAAU,KAAK;AAAA,UACnC;AAAA,QACF;AAAA,MAEF,KAAK;AAAA,MACL,KAAK;AACH,eAAO,IAAI;AAAA,UACT,sBAAsB,UAAU,IAAI;AAAA,UACpC;AAAA,UACA,EAAE,GAAG,SAAS,MAAM,UAAU,KAAK;AAAA,UACnC;AAAA,QACF;AAAA,MAEF,KAAK;AACH,eAAO,IAAI;AAAA,UACT;AAAA,UACA;AAAA,UACA;AAAA,UACA;AAAA,QACF;AAAA,MAEF,KAAK;AACH,eAAO,IAAI;AAAA,UACT;AAAA,UACA;AAAA,UACA;AAAA,UACA;AAAA,QACF;AAAA,MAEF;AACE,eAAO,IAAI;AAAA,UACT,UAAU;AAAA,UACV;AAAA,UACA,EAAE,GAAG,SAAS,eAAe,KAAK;AAAA,UAClC;AAAA,QACF;AAAA,IACJ;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,aAAa,YACX,WACA,eACA,UACwB;AACxB,QAAI;AACF,aAAO,MAAM,UAAU;AAAA,IACzB,SAAS,OAAgB;AACvB,UAAI,aAAa,QAAW;AAC1B,eAAO;AAAA,MACT;AACA,mBAAa,OAAO,OAAO,aAAa;AAAA,IAC1C;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,aAAa,UACX,WACA,eACA,aAAqB,aAAa,aACtB;AACZ,QAAI;AAEJ,aAAS,UAAU,GAAG,WAAW,YAAY,WAAW;AACtD,UAAI;AACF,cAAM,SAAS,MAAM,UAAU;AAC/B,qBAAa,SAAS,OAAO,aAAa;AAC1C,eAAO;AAAA,MACT,SAAS,OAAgB;AACvB,oBAAY;AAEZ,YAAI,iBAAiB,oBAAoB,CAAC,MAAM,aAAa;AAC3D,uBAAa,OAAO,OAAO,aAAa;AAAA,QAC1C;AAEA,YAAI,YAAY,YAAY;AAC1B;AAAA,QACF;AAEA,cAAM,QAAQ,KAAK,IAAI,MAAO,KAAK,IAAI,GAAG,UAAU,CAAC,GAAG,GAAI;AAC5D,cAAM,IAAI,QAAQ,CAAC,YAAY,WAAW,SAAS,KAAK,CAAC;AAAA,MAC3D;AAAA,IACF;AAEA,iBAAa,OAAO,WAAW,GAAG,aAAa,WAAW,UAAU,YAAY;AAAA,EAClF;AAAA;AAAA;AAAA;AAAA,EAKA,OAAO,qBACL,WACA,eACA,YAAoB,GACpB;AACA,QAAI,WAAW;AACf,QAAI,cAAc;AAClB,UAAM,eAAe;AAErB,WAAO,YAAwB;AAC7B,YAAM,MAAM,KAAK,IAAI;AAErB,UAAI,MAAM,cAAc,cAAc;AACpC,mBAAW;AAAA,MACb;AAEA,UAAI,YAAY,WAAW;AACzB,cAAM,IAAI;AAAA,UACR,6BAA6B,aAAa;AAAA,UAC1C;AAAA,UACA,EAAE,eAAe,UAAU,UAAU;AAAA,QACvC;AAAA,MACF;AAEA,UAAI;AACF,cAAM,SAAS,MAAM,UAAU;AAC/B,mBAAW;AACX,eAAO;AAAA,MACT,SAAS,OAAgB;AACvB;AACA,sBAAc;AACd,cAAM;AAAA,MACR;AAAA,IACF;AAAA,EACF;AACF;AAKO,MAAM,gBAAgB,CAC3B,OACA,MACA,cAC0C;AAC1C,MAAI,CAAC,UAAU,KAAK,GAAG;AACrB,UAAM,IAAI;AAAA,MACR,WAAW,IAAI,KAAK,OAAO,KAAK,CAAC;AAAA,MACjC;AAAA,MACA,EAAE,MAAM,MAAM;AAAA,IAChB;AAAA,EACF;AACF;AAEO,MAAM,gBAAgB,CAAC,UAA2C;AACvE,QAAM,aAAa;AACnB,MAAI,CAAC,WAAW,KAAK,KAAK,KAAK,MAAM,SAAS,KAAK;AACjD,UAAM,IAAI;AAAA,MACR,yBAAyB,KAAK;AAAA,MAC9B;AAAA,MACA,EAAE,MAAM;AAAA,IACV;AAAA,EACF;AACF;AAEO,MAAM,eAAe,CAAC,aAAiD;AAC5E,MAAI,CAAC,YAAY,SAAS,SAAS,IAAI,KAAK,SAAS,SAAS,IAAI,GAAG;AACnE,UAAM,IAAI;AAAA,MACR,iBAAiB,QAAQ;AAAA,MACzB;AAAA,MACA,EAAE,MAAM,SAAS;AAAA,IACnB;AAAA,EACF;AACF;",
|
|
6
6
|
"names": ["ErrorCode"]
|
|
7
7
|
}
|