@stackmemoryai/stackmemory 0.5.31 → 0.5.34

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (148) hide show
  1. package/dist/agents/core/agent-task-manager.js.map +1 -1
  2. package/dist/cli/claude-sm.js +199 -16
  3. package/dist/cli/claude-sm.js.map +2 -2
  4. package/dist/cli/commands/clear.js +1 -1
  5. package/dist/cli/commands/clear.js.map +1 -1
  6. package/dist/cli/commands/context.js +1 -12
  7. package/dist/cli/commands/context.js.map +2 -2
  8. package/dist/cli/commands/dashboard.js.map +1 -1
  9. package/dist/cli/commands/discovery.js +1 -1
  10. package/dist/cli/commands/discovery.js.map +1 -1
  11. package/dist/cli/commands/handoff.js +1 -1
  12. package/dist/cli/commands/handoff.js.map +1 -1
  13. package/dist/cli/commands/linear.js +1 -14
  14. package/dist/cli/commands/linear.js.map +2 -2
  15. package/dist/cli/commands/login.js +32 -10
  16. package/dist/cli/commands/login.js.map +2 -2
  17. package/dist/cli/commands/migrate.js +80 -22
  18. package/dist/cli/commands/migrate.js.map +2 -2
  19. package/dist/cli/commands/model.js +533 -0
  20. package/dist/cli/commands/model.js.map +7 -0
  21. package/dist/cli/commands/monitor.js +1 -1
  22. package/dist/cli/commands/monitor.js.map +1 -1
  23. package/dist/cli/commands/quality.js +1 -1
  24. package/dist/cli/commands/quality.js.map +1 -1
  25. package/dist/cli/commands/ralph.js +93 -28
  26. package/dist/cli/commands/ralph.js.map +2 -2
  27. package/dist/cli/commands/service.js +10 -3
  28. package/dist/cli/commands/service.js.map +2 -2
  29. package/dist/cli/commands/skills.js +61 -11
  30. package/dist/cli/commands/skills.js.map +2 -2
  31. package/dist/cli/commands/sms-notify.js +342 -22
  32. package/dist/cli/commands/sms-notify.js.map +3 -3
  33. package/dist/cli/commands/workflow.js +1 -1
  34. package/dist/cli/commands/workflow.js.map +1 -1
  35. package/dist/cli/commands/worktree.js +1 -1
  36. package/dist/cli/commands/worktree.js.map +1 -1
  37. package/dist/cli/index.js +3 -1
  38. package/dist/cli/index.js.map +2 -2
  39. package/dist/core/context/auto-context.js.map +1 -1
  40. package/dist/core/context/compaction-handler.js.map +2 -2
  41. package/dist/core/context/context-bridge.js.map +2 -2
  42. package/dist/core/context/dual-stack-manager.js +24 -8
  43. package/dist/core/context/dual-stack-manager.js.map +2 -2
  44. package/dist/core/context/enhanced-rehydration.js.map +1 -1
  45. package/dist/core/context/frame-database.js +41 -5
  46. package/dist/core/context/frame-database.js.map +2 -2
  47. package/dist/core/context/frame-digest.js +6 -1
  48. package/dist/core/context/frame-digest.js.map +2 -2
  49. package/dist/core/context/frame-handoff-manager.js.map +1 -1
  50. package/dist/core/context/frame-lifecycle-hooks.js +119 -0
  51. package/dist/core/context/frame-lifecycle-hooks.js.map +7 -0
  52. package/dist/core/context/frame-manager.js +56 -9
  53. package/dist/core/context/frame-manager.js.map +2 -2
  54. package/dist/core/context/frame-stack.js +29 -0
  55. package/dist/core/context/frame-stack.js.map +2 -2
  56. package/dist/core/context/incremental-gc.js.map +2 -2
  57. package/dist/core/context/index.js +4 -22
  58. package/dist/core/context/index.js.map +2 -2
  59. package/dist/core/context/permission-manager.js +0 -11
  60. package/dist/core/context/permission-manager.js.map +2 -2
  61. package/dist/core/context/recursive-context-manager.js +15 -9
  62. package/dist/core/context/recursive-context-manager.js.map +2 -2
  63. package/dist/core/context/refactored-frame-manager.js +140 -34
  64. package/dist/core/context/refactored-frame-manager.js.map +3 -3
  65. package/dist/core/context/shared-context-layer.js +0 -11
  66. package/dist/core/context/shared-context-layer.js.map +2 -2
  67. package/dist/core/context/stack-merge-resolver.js.map +1 -1
  68. package/dist/core/context/validation.js +6 -1
  69. package/dist/core/context/validation.js.map +2 -2
  70. package/dist/core/database/database-adapter.js.map +1 -1
  71. package/dist/core/database/paradedb-adapter.js.map +1 -1
  72. package/dist/core/database/query-router.js.map +1 -1
  73. package/dist/core/database/sqlite-adapter.js.map +1 -1
  74. package/dist/core/digest/frame-digest-integration.js.map +1 -1
  75. package/dist/core/digest/hybrid-digest-generator.js.map +1 -1
  76. package/dist/core/digest/types.js.map +1 -1
  77. package/dist/core/errors/index.js +249 -0
  78. package/dist/core/errors/index.js.map +2 -2
  79. package/dist/core/frame/workflow-templates.js.map +2 -2
  80. package/dist/core/merge/conflict-detector.js.map +1 -1
  81. package/dist/core/merge/resolution-engine.js.map +1 -1
  82. package/dist/core/merge/stack-diff.js.map +1 -1
  83. package/dist/core/models/fallback-monitor.js +229 -0
  84. package/dist/core/models/fallback-monitor.js.map +7 -0
  85. package/dist/core/models/model-router.js +340 -0
  86. package/dist/core/models/model-router.js.map +7 -0
  87. package/dist/core/monitoring/error-handler.js +37 -270
  88. package/dist/core/monitoring/error-handler.js.map +3 -3
  89. package/dist/core/monitoring/session-monitor.js.map +1 -1
  90. package/dist/core/performance/lazy-context-loader.js.map +1 -1
  91. package/dist/core/performance/optimized-frame-context.js.map +1 -1
  92. package/dist/core/retrieval/context-retriever.js.map +1 -1
  93. package/dist/core/retrieval/graph-retrieval.js.map +1 -1
  94. package/dist/core/retrieval/hierarchical-retrieval.js.map +1 -1
  95. package/dist/core/retrieval/llm-context-retrieval.js.map +1 -1
  96. package/dist/core/retrieval/retrieval-benchmarks.js.map +1 -1
  97. package/dist/core/retrieval/summary-generator.js.map +1 -1
  98. package/dist/core/retrieval/types.js.map +1 -1
  99. package/dist/core/storage/chromadb-adapter.js.map +1 -1
  100. package/dist/core/storage/infinite-storage.js.map +1 -1
  101. package/dist/core/storage/two-tier-storage.js.map +1 -1
  102. package/dist/features/tasks/task-aware-context.js.map +1 -1
  103. package/dist/features/web/server/index.js +1 -1
  104. package/dist/features/web/server/index.js.map +1 -1
  105. package/dist/hooks/claude-code-whatsapp-hook.js +197 -0
  106. package/dist/hooks/claude-code-whatsapp-hook.js.map +7 -0
  107. package/dist/hooks/linear-task-picker.js +1 -1
  108. package/dist/hooks/linear-task-picker.js.map +2 -2
  109. package/dist/hooks/schemas.js +105 -1
  110. package/dist/hooks/schemas.js.map +2 -2
  111. package/dist/hooks/session-summary.js +5 -1
  112. package/dist/hooks/session-summary.js.map +2 -2
  113. package/dist/hooks/sms-action-runner.js +16 -1
  114. package/dist/hooks/sms-action-runner.js.map +2 -2
  115. package/dist/hooks/sms-notify.js +4 -2
  116. package/dist/hooks/sms-notify.js.map +2 -2
  117. package/dist/hooks/sms-webhook.js +23 -2
  118. package/dist/hooks/sms-webhook.js.map +2 -2
  119. package/dist/hooks/whatsapp-commands.js +516 -0
  120. package/dist/hooks/whatsapp-commands.js.map +7 -0
  121. package/dist/hooks/whatsapp-scheduler.js +317 -0
  122. package/dist/hooks/whatsapp-scheduler.js.map +7 -0
  123. package/dist/hooks/whatsapp-sync.js +409 -0
  124. package/dist/hooks/whatsapp-sync.js.map +7 -0
  125. package/dist/index.js +1 -1
  126. package/dist/index.js.map +1 -1
  127. package/dist/integrations/mcp/handlers/context-handlers.js.map +1 -1
  128. package/dist/integrations/mcp/handlers/discovery-handlers.js.map +1 -1
  129. package/dist/integrations/mcp/server.js +1 -1
  130. package/dist/integrations/mcp/server.js.map +1 -1
  131. package/dist/integrations/ralph/bridge/ralph-stackmemory-bridge.js +1 -1
  132. package/dist/integrations/ralph/bridge/ralph-stackmemory-bridge.js.map +1 -1
  133. package/dist/integrations/ralph/context/stackmemory-context-loader.js +1 -1
  134. package/dist/integrations/ralph/context/stackmemory-context-loader.js.map +1 -1
  135. package/dist/integrations/ralph/learning/pattern-learner.js +1 -1
  136. package/dist/integrations/ralph/learning/pattern-learner.js.map +1 -1
  137. package/dist/integrations/ralph/orchestration/multi-loop-orchestrator.js +1 -1
  138. package/dist/integrations/ralph/orchestration/multi-loop-orchestrator.js.map +1 -1
  139. package/dist/integrations/ralph/swarm/swarm-coordinator.js +1 -1
  140. package/dist/integrations/ralph/swarm/swarm-coordinator.js.map +1 -1
  141. package/dist/integrations/ralph/visualization/ralph-debugger.js +1 -1
  142. package/dist/integrations/ralph/visualization/ralph-debugger.js.map +1 -1
  143. package/dist/mcp/stackmemory-mcp-server.js +1 -1
  144. package/dist/mcp/stackmemory-mcp-server.js.map +1 -1
  145. package/dist/skills/claude-skills.js.map +1 -1
  146. package/dist/skills/recursive-agent-orchestrator.js.map +1 -1
  147. package/dist/skills/unified-rlm-orchestrator.js.map +1 -1
  148. package/package.json +2 -3
@@ -1,7 +1,7 @@
1
1
  {
2
2
  "version": 3,
3
3
  "sources": ["../../../src/core/retrieval/context-retriever.ts"],
4
- "sourcesContent": ["/**\n * LLM-driven Context Retrieval System\n * Intelligently retrieves relevant context using ParadeDB search capabilities\n */\n\nimport {\n DatabaseAdapter,\n SearchOptions,\n} from '../database/database-adapter.js';\nimport { Frame } from '../context/frame-manager.js';\nimport { logger } from '../monitoring/logger.js';\n\nexport interface ContextQuery {\n text: string;\n type?: 'semantic' | 'keyword' | 'hybrid';\n maxResults?: number;\n timeRange?: {\n start?: Date;\n end?: Date;\n };\n frameTypes?: string[];\n scoreThreshold?: number;\n includeDigests?: boolean;\n}\n\nexport interface RetrievedContext {\n frame: Frame;\n score: number;\n relevanceReason: string;\n retrievalMethod: 'bm25' | 'vector' | 'hybrid';\n matchedFields: string[];\n}\n\nexport interface ContextRetrievalResult {\n contexts: RetrievedContext[];\n totalMatches: number;\n retrievalTimeMs: number;\n strategy: string;\n queryAnalysis: {\n intent: string;\n concepts: string[];\n complexity: 'simple' | 'moderate' | 'complex';\n };\n}\n\nexport interface RetrievalStrategy {\n name: string;\n searchType: 'text' | 'vector' | 'hybrid';\n weights?: { text: number; vector: number };\n boost?: Record<string, number>;\n fallbackStrategy?: string;\n}\n\nexport class ContextRetriever {\n private readonly adapter: DatabaseAdapter;\n private readonly strategies: Map<string, RetrievalStrategy> = new Map();\n private queryCache = new Map<string, ContextRetrievalResult>();\n private cacheMaxSize = 100;\n private cacheExpiryMs = 300000; // 5 minutes\n\n constructor(adapter: DatabaseAdapter) {\n this.adapter = adapter;\n this.initializeStrategies();\n }\n\n private initializeStrategies(): void {\n // Keyword-based search for specific terms\n this.strategies.set('keyword', {\n name: 'Keyword Search',\n searchType: 'text',\n boost: {\n name: 2.0,\n digest_text: 1.5,\n inputs: 1.2,\n outputs: 1.2,\n },\n fallbackStrategy: 'semantic',\n });\n\n // Semantic search using vector embeddings\n this.strategies.set('semantic', {\n name: 'Semantic Search',\n searchType: 'vector',\n fallbackStrategy: 'hybrid',\n });\n\n // Hybrid approach combining text and vector search\n this.strategies.set('hybrid', {\n name: 'Hybrid Search',\n searchType: 'hybrid',\n weights: { text: 0.6, vector: 0.4 },\n boost: {\n name: 2.0,\n digest_text: 1.5,\n },\n fallbackStrategy: 'keyword',\n });\n\n // Recent activity search\n this.strategies.set('recent', {\n name: 'Recent Activity',\n searchType: 'text',\n boost: {\n created_at: 3.0,\n closed_at: 2.0,\n },\n fallbackStrategy: 'hybrid',\n });\n\n // Error and debugging context\n this.strategies.set('debug', {\n name: 'Debug Context',\n searchType: 'hybrid',\n weights: { text: 0.8, vector: 0.2 },\n boost: {\n type: 2.5, // Boost error frames\n digest_text: 2.0,\n outputs: 1.8,\n },\n fallbackStrategy: 'keyword',\n });\n }\n\n async retrieveContext(query: ContextQuery): Promise<ContextRetrievalResult> {\n const startTime = Date.now();\n\n // Handle empty query gracefully\n if (!query.text || query.text.trim().length === 0) {\n logger.debug('Empty query provided, returning empty result');\n return {\n contexts: [],\n totalMatches: 0,\n retrievalTimeMs: Date.now() - startTime,\n strategy: 'empty_query',\n queryAnalysis: {\n intent: 'general',\n concepts: [],\n complexity: 'simple',\n },\n };\n }\n\n const cacheKey = this.generateCacheKey(query);\n\n // Check cache first\n const cached = this.getCachedResult(cacheKey);\n if (cached) {\n logger.debug('Context retrieval cache hit');\n return cached;\n }\n\n try {\n logger.info('Starting LLM-driven context retrieval', {\n query: query.text,\n });\n\n // Analyze query to determine best strategy\n const queryAnalysis = await this.analyzeQuery(query);\n const strategy = this.selectStrategy(queryAnalysis, query);\n\n logger.debug('Selected retrieval strategy', {\n strategy: strategy.name,\n analysis: queryAnalysis,\n });\n\n // Execute retrieval with selected strategy\n const contexts = await this.executeRetrieval(\n query,\n strategy,\n queryAnalysis\n );\n\n // Post-process and rank results\n const rankedContexts = await this.rankAndFilter(\n contexts,\n query,\n queryAnalysis\n );\n\n const result: ContextRetrievalResult = {\n contexts: rankedContexts,\n totalMatches: contexts.length,\n retrievalTimeMs: Date.now() - startTime,\n strategy: strategy.name,\n queryAnalysis,\n };\n\n // Cache result\n this.cacheResult(cacheKey, result);\n\n logger.info('Context retrieval completed', {\n resultsCount: rankedContexts.length,\n timeMs: result.retrievalTimeMs,\n strategy: strategy.name,\n });\n\n return result;\n } catch (error: unknown) {\n logger.error('Context retrieval failed:', error);\n\n // Return fallback empty result\n return {\n contexts: [],\n totalMatches: 0,\n retrievalTimeMs: Date.now() - startTime,\n strategy: 'fallback',\n queryAnalysis: {\n intent: 'unknown',\n concepts: [],\n complexity: 'simple',\n },\n };\n }\n }\n\n private async analyzeQuery(query: ContextQuery): Promise<{\n intent: string;\n concepts: string[];\n complexity: 'simple' | 'moderate' | 'complex';\n }> {\n const text = query.text.toLowerCase().trim();\n const words = text.split(/\\s+/);\n\n // Determine intent based on keywords\n let intent = 'general';\n if (\n this.containsKeywords(text, [\n 'error',\n 'exception',\n 'fail',\n 'bug',\n 'issue',\n 'problem',\n 'debug',\n ])\n ) {\n intent = 'debug';\n } else if (\n this.containsKeywords(text, ['how', 'what', 'why', 'when', 'where'])\n ) {\n intent = 'explanation';\n } else if (\n this.containsKeywords(text, [\n 'implement',\n 'create',\n 'build',\n 'add',\n 'develop',\n ])\n ) {\n intent = 'implementation';\n } else if (\n this.containsKeywords(text, [\n 'recent',\n 'latest',\n 'last',\n 'current',\n 'happened',\n ])\n ) {\n intent = 'recent_activity';\n }\n\n // Extract concepts (simplified - in production would use NLP)\n const concepts = this.extractConcepts(text);\n\n // Determine complexity\n let complexity: 'simple' | 'moderate' | 'complex' = 'simple';\n if (words.length > 10 || concepts.length > 5) {\n complexity = 'complex';\n } else if (words.length > 5 || concepts.length > 2) {\n complexity = 'moderate';\n }\n\n return { intent, concepts, complexity };\n }\n\n private containsKeywords(text: string, keywords: string[]): boolean {\n return keywords.some((keyword) =>\n text.toLowerCase().includes(keyword.toLowerCase())\n );\n }\n\n private extractConcepts(text: string): string[] {\n // Simplified concept extraction - in production would use NLP/embeddings\n const technicalTerms = [\n 'database',\n 'sql',\n 'query',\n 'index',\n 'migration',\n 'adapter',\n 'frame',\n 'event',\n 'anchor',\n 'digest',\n 'context',\n 'search',\n 'vector',\n 'embedding',\n 'similarity',\n 'score',\n 'rank',\n 'performance',\n 'optimization',\n 'cache',\n 'pool',\n 'connection',\n 'error',\n 'exception',\n 'debug',\n 'trace',\n 'log',\n 'monitor',\n ];\n\n const concepts: string[] = [];\n const words = text.split(/\\W+/).map((w) => w.toLowerCase());\n\n for (const term of technicalTerms) {\n if (words.includes(term)) {\n concepts.push(term);\n }\n }\n\n // Add bigrams for common technical phrases\n const bigrams = this.extractBigrams(words);\n const technicalBigrams = [\n 'database adapter',\n 'query router',\n 'connection pool',\n 'vector search',\n ];\n\n for (const bigram of bigrams) {\n if (technicalBigrams.includes(bigram)) {\n concepts.push(bigram);\n }\n }\n\n return [...new Set(concepts)]; // Remove duplicates\n }\n\n private extractBigrams(words: string[]): string[] {\n const bigrams: string[] = [];\n for (let i = 0; i < words.length - 1; i++) {\n bigrams.push(`${words[i]} ${words[i + 1]}`);\n }\n return bigrams;\n }\n\n private selectStrategy(\n analysis: { intent: string; complexity: string },\n query: ContextQuery\n ): RetrievalStrategy {\n // Override with explicit query type\n if (query.type) {\n return (\n this.strategies.get(\n query.type === 'keyword'\n ? 'keyword'\n : query.type === 'semantic'\n ? 'semantic'\n : 'hybrid'\n ) || this.strategies.get('hybrid')!\n );\n }\n\n // Select based on intent and complexity\n switch (analysis.intent) {\n case 'debug':\n return this.strategies.get('debug')!;\n case 'recent_activity':\n return this.strategies.get('recent')!;\n case 'explanation':\n return analysis.complexity === 'simple'\n ? this.strategies.get('keyword')!\n : this.strategies.get('semantic')!;\n case 'implementation':\n return this.strategies.get('hybrid')!;\n default:\n return analysis.complexity === 'complex'\n ? this.strategies.get('semantic')!\n : this.strategies.get('keyword')!;\n }\n }\n\n private async executeRetrieval(\n query: ContextQuery,\n strategy: RetrievalStrategy,\n analysis: { intent: string; concepts: string[] }\n ): Promise<RetrievedContext[]> {\n const searchOptions: SearchOptions = {\n query: query.text,\n searchType: strategy.searchType,\n limit: query.maxResults || 20,\n scoreThreshold: query.scoreThreshold || 0.1,\n boost: strategy.boost,\n };\n\n // Add field filtering based on query type\n if (query.frameTypes) {\n searchOptions.fields = ['type', 'name', 'digest_text'];\n }\n\n let rawResults: Array<Frame & { score: number }> = [];\n\n try {\n if (strategy.searchType === 'hybrid' && strategy.weights) {\n // Use hybrid search with embeddings (placeholder - would need actual embeddings)\n const embedding = await this.generateEmbedding(query.text);\n rawResults = await this.adapter.searchHybrid(\n query.text,\n embedding,\n strategy.weights\n );\n } else {\n // Use text or vector search\n rawResults = await this.adapter.search(searchOptions);\n }\n } catch (error: unknown) {\n logger.warn(`Strategy ${strategy.name} failed, trying fallback:`, error);\n\n if (strategy.fallbackStrategy) {\n const fallbackStrategy = this.strategies.get(strategy.fallbackStrategy);\n if (fallbackStrategy) {\n return this.executeRetrieval(query, fallbackStrategy, analysis);\n }\n }\n\n // Return empty results instead of throwing to prevent cascading failures\n return [];\n }\n\n // Convert to RetrievedContext objects\n return rawResults.map((result) => ({\n frame: result,\n score: result.score,\n relevanceReason: this.generateRelevanceReason(result, query, analysis),\n retrievalMethod: strategy.searchType as 'bm25' | 'vector' | 'hybrid',\n matchedFields: this.identifyMatchedFields(result, query),\n }));\n }\n\n private async generateEmbedding(text: string): Promise<number[]> {\n // Placeholder - in production would use actual embedding service\n // For now, return a mock embedding\n const hash = this.simpleHash(text);\n return Array.from(\n { length: 384 },\n (_, i) => ((hash + i) % 100) / 100 - 0.5\n );\n }\n\n private simpleHash(str: string): number {\n let hash = 0;\n for (let i = 0; i < str.length; i++) {\n const char = str.charCodeAt(i);\n hash = (hash << 5) - hash + char;\n hash = hash & hash; // Convert to 32-bit integer\n }\n return Math.abs(hash);\n }\n\n private generateRelevanceReason(\n frame: Frame,\n query: ContextQuery,\n analysis: { intent: string; concepts: string[] }\n ): string {\n const reasons: string[] = [];\n\n // Check for direct matches\n if (frame.name.toLowerCase().includes(query.text.toLowerCase())) {\n reasons.push('Frame name matches query');\n }\n\n if (frame.digest_text?.toLowerCase().includes(query.text.toLowerCase())) {\n reasons.push('Content contains query terms');\n }\n\n // Check for concept matches\n for (const concept of analysis.concepts) {\n if (\n frame.digest_text?.toLowerCase().includes(concept.toLowerCase()) ||\n frame.name.toLowerCase().includes(concept.toLowerCase())\n ) {\n reasons.push(`Related to ${concept}`);\n }\n }\n\n // Frame type relevance\n if (analysis.intent === 'debug' && frame.type.includes('error')) {\n reasons.push('Error context for debugging');\n }\n\n return reasons.length > 0\n ? reasons.join('; ')\n : 'General semantic similarity';\n }\n\n private identifyMatchedFields(frame: Frame, query: ContextQuery): string[] {\n const matched: string[] = [];\n const queryLower = query.text.toLowerCase();\n\n if (frame.name.toLowerCase().includes(queryLower)) {\n matched.push('name');\n }\n\n if (frame.digest_text?.toLowerCase().includes(queryLower)) {\n matched.push('digest_text');\n }\n\n if (frame.type.toLowerCase().includes(queryLower)) {\n matched.push('type');\n }\n\n return matched;\n }\n\n private async rankAndFilter(\n contexts: RetrievedContext[],\n query: ContextQuery,\n analysis: { intent: string; complexity: string }\n ): Promise<RetrievedContext[]> {\n // Apply additional filtering\n let filtered = contexts;\n\n // Filter by time range\n if (query.timeRange) {\n filtered = filtered.filter((ctx) => {\n const frameTime = new Date(ctx.frame.created_at);\n const start = query.timeRange?.start;\n const end = query.timeRange?.end;\n\n return (!start || frameTime >= start) && (!end || frameTime <= end);\n });\n }\n\n // Filter by frame types\n if (query.frameTypes) {\n filtered = filtered.filter((ctx) =>\n query.frameTypes!.includes(ctx.frame.type)\n );\n }\n\n // Apply score threshold\n if (query.scoreThreshold) {\n filtered = filtered.filter((ctx) => ctx.score >= query.scoreThreshold!);\n }\n\n // Enhanced ranking based on multiple factors\n const ranked = filtered.map((ctx) => ({\n ...ctx,\n score: this.calculateEnhancedScore(ctx, query, analysis),\n }));\n\n // Sort by enhanced score\n ranked.sort((a, b) => b.score - a.score);\n\n // Limit results\n const maxResults = query.maxResults || 20;\n return ranked.slice(0, maxResults);\n }\n\n private calculateEnhancedScore(\n context: RetrievedContext,\n query: ContextQuery,\n analysis: { intent: string; concepts: string[] }\n ): number {\n let score = context.score;\n\n // Boost recent frames\n const ageHours = (Date.now() - context.frame.created_at) / (1000 * 60 * 60);\n if (ageHours < 24) {\n score *= 1.2; // 20% boost for frames from last 24 hours\n } else if (ageHours < 168) {\n // 1 week\n score *= 1.1; // 10% boost for frames from last week\n }\n\n // Boost based on frame completeness\n if (context.frame.closed_at) {\n score *= 1.1; // Completed frames are more valuable\n }\n\n // Boost based on intent matching\n if (analysis.intent === 'debug' && context.frame.type.includes('error')) {\n score *= 1.5;\n }\n\n // Boost based on matched fields\n if (context.matchedFields.includes('name')) {\n score *= 1.3; // Name matches are highly relevant\n }\n\n if (context.matchedFields.length > 1) {\n score *= 1.1; // Multiple field matches\n }\n\n // Penalize very old frames for recent queries\n if (analysis.intent === 'recent_activity' && ageHours > 168) {\n score *= 0.5;\n }\n\n return score;\n }\n\n private generateCacheKey(query: ContextQuery): string {\n return JSON.stringify({\n text: query.text,\n type: query.type,\n maxResults: query.maxResults,\n frameTypes: query.frameTypes,\n scoreThreshold: query.scoreThreshold,\n });\n }\n\n private getCachedResult(cacheKey: string): ContextRetrievalResult | null {\n const entry = this.queryCache.get(cacheKey);\n if (!entry) return null;\n\n // Check expiry (simplified - would include timestamp in real implementation)\n return entry;\n }\n\n private cacheResult(cacheKey: string, result: ContextRetrievalResult): void {\n // Implement LRU eviction if cache is full\n if (this.queryCache.size >= this.cacheMaxSize) {\n const firstKey = this.queryCache.keys().next().value;\n this.queryCache.delete(firstKey);\n }\n\n this.queryCache.set(cacheKey, result);\n }\n\n // Utility methods for integration\n async findSimilarFrames(\n frameId: string,\n limit = 10\n ): Promise<RetrievedContext[]> {\n const frame = await this.adapter.getFrame(frameId);\n if (!frame) {\n throw new Error(`Frame not found: ${frameId}`);\n }\n\n const query: ContextQuery = {\n text: frame.digest_text || frame.name,\n type: 'semantic',\n maxResults: limit,\n scoreThreshold: 0.3,\n };\n\n const result = await this.retrieveContext(query);\n\n // Filter out the original frame\n return result.contexts.filter((ctx) => ctx.frame.frame_id !== frameId);\n }\n\n async findContextForError(\n errorMessage: string,\n stackTrace?: string\n ): Promise<RetrievedContext[]> {\n const query: ContextQuery = {\n text: `${errorMessage} ${stackTrace || ''}`.trim(),\n type: 'hybrid',\n maxResults: 15,\n frameTypes: ['error', 'debug', 'function'],\n scoreThreshold: 0.2,\n };\n\n const result = await this.retrieveContext(query);\n return result.contexts;\n }\n\n async getRecentContext(\n hours = 24,\n frameTypes?: string[]\n ): Promise<RetrievedContext[]> {\n const query: ContextQuery = {\n text: 'recent activity context',\n type: 'keyword',\n maxResults: 50,\n timeRange: {\n start: new Date(Date.now() - hours * 60 * 60 * 1000),\n },\n frameTypes,\n scoreThreshold: 0.1,\n };\n\n const result = await this.retrieveContext(query);\n return result.contexts;\n }\n\n // Analytics and insights\n getRetrievalStats() {\n return {\n cacheSize: this.queryCache.size,\n strategiesCount: this.strategies.size,\n availableStrategies: Array.from(this.strategies.keys()),\n };\n }\n\n clearCache(): void {\n this.queryCache.clear();\n logger.info('Context retrieval cache cleared');\n }\n}\n"],
4
+ "sourcesContent": ["/**\n * LLM-driven Context Retrieval System\n * Intelligently retrieves relevant context using ParadeDB search capabilities\n */\n\nimport {\n DatabaseAdapter,\n SearchOptions,\n} from '../database/database-adapter.js';\nimport { Frame } from '../context/index.js';\nimport { logger } from '../monitoring/logger.js';\n\nexport interface ContextQuery {\n text: string;\n type?: 'semantic' | 'keyword' | 'hybrid';\n maxResults?: number;\n timeRange?: {\n start?: Date;\n end?: Date;\n };\n frameTypes?: string[];\n scoreThreshold?: number;\n includeDigests?: boolean;\n}\n\nexport interface RetrievedContext {\n frame: Frame;\n score: number;\n relevanceReason: string;\n retrievalMethod: 'bm25' | 'vector' | 'hybrid';\n matchedFields: string[];\n}\n\nexport interface ContextRetrievalResult {\n contexts: RetrievedContext[];\n totalMatches: number;\n retrievalTimeMs: number;\n strategy: string;\n queryAnalysis: {\n intent: string;\n concepts: string[];\n complexity: 'simple' | 'moderate' | 'complex';\n };\n}\n\nexport interface RetrievalStrategy {\n name: string;\n searchType: 'text' | 'vector' | 'hybrid';\n weights?: { text: number; vector: number };\n boost?: Record<string, number>;\n fallbackStrategy?: string;\n}\n\nexport class ContextRetriever {\n private readonly adapter: DatabaseAdapter;\n private readonly strategies: Map<string, RetrievalStrategy> = new Map();\n private queryCache = new Map<string, ContextRetrievalResult>();\n private cacheMaxSize = 100;\n private cacheExpiryMs = 300000; // 5 minutes\n\n constructor(adapter: DatabaseAdapter) {\n this.adapter = adapter;\n this.initializeStrategies();\n }\n\n private initializeStrategies(): void {\n // Keyword-based search for specific terms\n this.strategies.set('keyword', {\n name: 'Keyword Search',\n searchType: 'text',\n boost: {\n name: 2.0,\n digest_text: 1.5,\n inputs: 1.2,\n outputs: 1.2,\n },\n fallbackStrategy: 'semantic',\n });\n\n // Semantic search using vector embeddings\n this.strategies.set('semantic', {\n name: 'Semantic Search',\n searchType: 'vector',\n fallbackStrategy: 'hybrid',\n });\n\n // Hybrid approach combining text and vector search\n this.strategies.set('hybrid', {\n name: 'Hybrid Search',\n searchType: 'hybrid',\n weights: { text: 0.6, vector: 0.4 },\n boost: {\n name: 2.0,\n digest_text: 1.5,\n },\n fallbackStrategy: 'keyword',\n });\n\n // Recent activity search\n this.strategies.set('recent', {\n name: 'Recent Activity',\n searchType: 'text',\n boost: {\n created_at: 3.0,\n closed_at: 2.0,\n },\n fallbackStrategy: 'hybrid',\n });\n\n // Error and debugging context\n this.strategies.set('debug', {\n name: 'Debug Context',\n searchType: 'hybrid',\n weights: { text: 0.8, vector: 0.2 },\n boost: {\n type: 2.5, // Boost error frames\n digest_text: 2.0,\n outputs: 1.8,\n },\n fallbackStrategy: 'keyword',\n });\n }\n\n async retrieveContext(query: ContextQuery): Promise<ContextRetrievalResult> {\n const startTime = Date.now();\n\n // Handle empty query gracefully\n if (!query.text || query.text.trim().length === 0) {\n logger.debug('Empty query provided, returning empty result');\n return {\n contexts: [],\n totalMatches: 0,\n retrievalTimeMs: Date.now() - startTime,\n strategy: 'empty_query',\n queryAnalysis: {\n intent: 'general',\n concepts: [],\n complexity: 'simple',\n },\n };\n }\n\n const cacheKey = this.generateCacheKey(query);\n\n // Check cache first\n const cached = this.getCachedResult(cacheKey);\n if (cached) {\n logger.debug('Context retrieval cache hit');\n return cached;\n }\n\n try {\n logger.info('Starting LLM-driven context retrieval', {\n query: query.text,\n });\n\n // Analyze query to determine best strategy\n const queryAnalysis = await this.analyzeQuery(query);\n const strategy = this.selectStrategy(queryAnalysis, query);\n\n logger.debug('Selected retrieval strategy', {\n strategy: strategy.name,\n analysis: queryAnalysis,\n });\n\n // Execute retrieval with selected strategy\n const contexts = await this.executeRetrieval(\n query,\n strategy,\n queryAnalysis\n );\n\n // Post-process and rank results\n const rankedContexts = await this.rankAndFilter(\n contexts,\n query,\n queryAnalysis\n );\n\n const result: ContextRetrievalResult = {\n contexts: rankedContexts,\n totalMatches: contexts.length,\n retrievalTimeMs: Date.now() - startTime,\n strategy: strategy.name,\n queryAnalysis,\n };\n\n // Cache result\n this.cacheResult(cacheKey, result);\n\n logger.info('Context retrieval completed', {\n resultsCount: rankedContexts.length,\n timeMs: result.retrievalTimeMs,\n strategy: strategy.name,\n });\n\n return result;\n } catch (error: unknown) {\n logger.error('Context retrieval failed:', error);\n\n // Return fallback empty result\n return {\n contexts: [],\n totalMatches: 0,\n retrievalTimeMs: Date.now() - startTime,\n strategy: 'fallback',\n queryAnalysis: {\n intent: 'unknown',\n concepts: [],\n complexity: 'simple',\n },\n };\n }\n }\n\n private async analyzeQuery(query: ContextQuery): Promise<{\n intent: string;\n concepts: string[];\n complexity: 'simple' | 'moderate' | 'complex';\n }> {\n const text = query.text.toLowerCase().trim();\n const words = text.split(/\\s+/);\n\n // Determine intent based on keywords\n let intent = 'general';\n if (\n this.containsKeywords(text, [\n 'error',\n 'exception',\n 'fail',\n 'bug',\n 'issue',\n 'problem',\n 'debug',\n ])\n ) {\n intent = 'debug';\n } else if (\n this.containsKeywords(text, ['how', 'what', 'why', 'when', 'where'])\n ) {\n intent = 'explanation';\n } else if (\n this.containsKeywords(text, [\n 'implement',\n 'create',\n 'build',\n 'add',\n 'develop',\n ])\n ) {\n intent = 'implementation';\n } else if (\n this.containsKeywords(text, [\n 'recent',\n 'latest',\n 'last',\n 'current',\n 'happened',\n ])\n ) {\n intent = 'recent_activity';\n }\n\n // Extract concepts (simplified - in production would use NLP)\n const concepts = this.extractConcepts(text);\n\n // Determine complexity\n let complexity: 'simple' | 'moderate' | 'complex' = 'simple';\n if (words.length > 10 || concepts.length > 5) {\n complexity = 'complex';\n } else if (words.length > 5 || concepts.length > 2) {\n complexity = 'moderate';\n }\n\n return { intent, concepts, complexity };\n }\n\n private containsKeywords(text: string, keywords: string[]): boolean {\n return keywords.some((keyword) =>\n text.toLowerCase().includes(keyword.toLowerCase())\n );\n }\n\n private extractConcepts(text: string): string[] {\n // Simplified concept extraction - in production would use NLP/embeddings\n const technicalTerms = [\n 'database',\n 'sql',\n 'query',\n 'index',\n 'migration',\n 'adapter',\n 'frame',\n 'event',\n 'anchor',\n 'digest',\n 'context',\n 'search',\n 'vector',\n 'embedding',\n 'similarity',\n 'score',\n 'rank',\n 'performance',\n 'optimization',\n 'cache',\n 'pool',\n 'connection',\n 'error',\n 'exception',\n 'debug',\n 'trace',\n 'log',\n 'monitor',\n ];\n\n const concepts: string[] = [];\n const words = text.split(/\\W+/).map((w) => w.toLowerCase());\n\n for (const term of technicalTerms) {\n if (words.includes(term)) {\n concepts.push(term);\n }\n }\n\n // Add bigrams for common technical phrases\n const bigrams = this.extractBigrams(words);\n const technicalBigrams = [\n 'database adapter',\n 'query router',\n 'connection pool',\n 'vector search',\n ];\n\n for (const bigram of bigrams) {\n if (technicalBigrams.includes(bigram)) {\n concepts.push(bigram);\n }\n }\n\n return [...new Set(concepts)]; // Remove duplicates\n }\n\n private extractBigrams(words: string[]): string[] {\n const bigrams: string[] = [];\n for (let i = 0; i < words.length - 1; i++) {\n bigrams.push(`${words[i]} ${words[i + 1]}`);\n }\n return bigrams;\n }\n\n private selectStrategy(\n analysis: { intent: string; complexity: string },\n query: ContextQuery\n ): RetrievalStrategy {\n // Override with explicit query type\n if (query.type) {\n return (\n this.strategies.get(\n query.type === 'keyword'\n ? 'keyword'\n : query.type === 'semantic'\n ? 'semantic'\n : 'hybrid'\n ) || this.strategies.get('hybrid')!\n );\n }\n\n // Select based on intent and complexity\n switch (analysis.intent) {\n case 'debug':\n return this.strategies.get('debug')!;\n case 'recent_activity':\n return this.strategies.get('recent')!;\n case 'explanation':\n return analysis.complexity === 'simple'\n ? this.strategies.get('keyword')!\n : this.strategies.get('semantic')!;\n case 'implementation':\n return this.strategies.get('hybrid')!;\n default:\n return analysis.complexity === 'complex'\n ? this.strategies.get('semantic')!\n : this.strategies.get('keyword')!;\n }\n }\n\n private async executeRetrieval(\n query: ContextQuery,\n strategy: RetrievalStrategy,\n analysis: { intent: string; concepts: string[] }\n ): Promise<RetrievedContext[]> {\n const searchOptions: SearchOptions = {\n query: query.text,\n searchType: strategy.searchType,\n limit: query.maxResults || 20,\n scoreThreshold: query.scoreThreshold || 0.1,\n boost: strategy.boost,\n };\n\n // Add field filtering based on query type\n if (query.frameTypes) {\n searchOptions.fields = ['type', 'name', 'digest_text'];\n }\n\n let rawResults: Array<Frame & { score: number }> = [];\n\n try {\n if (strategy.searchType === 'hybrid' && strategy.weights) {\n // Use hybrid search with embeddings (placeholder - would need actual embeddings)\n const embedding = await this.generateEmbedding(query.text);\n rawResults = await this.adapter.searchHybrid(\n query.text,\n embedding,\n strategy.weights\n );\n } else {\n // Use text or vector search\n rawResults = await this.adapter.search(searchOptions);\n }\n } catch (error: unknown) {\n logger.warn(`Strategy ${strategy.name} failed, trying fallback:`, error);\n\n if (strategy.fallbackStrategy) {\n const fallbackStrategy = this.strategies.get(strategy.fallbackStrategy);\n if (fallbackStrategy) {\n return this.executeRetrieval(query, fallbackStrategy, analysis);\n }\n }\n\n // Return empty results instead of throwing to prevent cascading failures\n return [];\n }\n\n // Convert to RetrievedContext objects\n return rawResults.map((result) => ({\n frame: result,\n score: result.score,\n relevanceReason: this.generateRelevanceReason(result, query, analysis),\n retrievalMethod: strategy.searchType as 'bm25' | 'vector' | 'hybrid',\n matchedFields: this.identifyMatchedFields(result, query),\n }));\n }\n\n private async generateEmbedding(text: string): Promise<number[]> {\n // Placeholder - in production would use actual embedding service\n // For now, return a mock embedding\n const hash = this.simpleHash(text);\n return Array.from(\n { length: 384 },\n (_, i) => ((hash + i) % 100) / 100 - 0.5\n );\n }\n\n private simpleHash(str: string): number {\n let hash = 0;\n for (let i = 0; i < str.length; i++) {\n const char = str.charCodeAt(i);\n hash = (hash << 5) - hash + char;\n hash = hash & hash; // Convert to 32-bit integer\n }\n return Math.abs(hash);\n }\n\n private generateRelevanceReason(\n frame: Frame,\n query: ContextQuery,\n analysis: { intent: string; concepts: string[] }\n ): string {\n const reasons: string[] = [];\n\n // Check for direct matches\n if (frame.name.toLowerCase().includes(query.text.toLowerCase())) {\n reasons.push('Frame name matches query');\n }\n\n if (frame.digest_text?.toLowerCase().includes(query.text.toLowerCase())) {\n reasons.push('Content contains query terms');\n }\n\n // Check for concept matches\n for (const concept of analysis.concepts) {\n if (\n frame.digest_text?.toLowerCase().includes(concept.toLowerCase()) ||\n frame.name.toLowerCase().includes(concept.toLowerCase())\n ) {\n reasons.push(`Related to ${concept}`);\n }\n }\n\n // Frame type relevance\n if (analysis.intent === 'debug' && frame.type.includes('error')) {\n reasons.push('Error context for debugging');\n }\n\n return reasons.length > 0\n ? reasons.join('; ')\n : 'General semantic similarity';\n }\n\n private identifyMatchedFields(frame: Frame, query: ContextQuery): string[] {\n const matched: string[] = [];\n const queryLower = query.text.toLowerCase();\n\n if (frame.name.toLowerCase().includes(queryLower)) {\n matched.push('name');\n }\n\n if (frame.digest_text?.toLowerCase().includes(queryLower)) {\n matched.push('digest_text');\n }\n\n if (frame.type.toLowerCase().includes(queryLower)) {\n matched.push('type');\n }\n\n return matched;\n }\n\n private async rankAndFilter(\n contexts: RetrievedContext[],\n query: ContextQuery,\n analysis: { intent: string; complexity: string }\n ): Promise<RetrievedContext[]> {\n // Apply additional filtering\n let filtered = contexts;\n\n // Filter by time range\n if (query.timeRange) {\n filtered = filtered.filter((ctx) => {\n const frameTime = new Date(ctx.frame.created_at);\n const start = query.timeRange?.start;\n const end = query.timeRange?.end;\n\n return (!start || frameTime >= start) && (!end || frameTime <= end);\n });\n }\n\n // Filter by frame types\n if (query.frameTypes) {\n filtered = filtered.filter((ctx) =>\n query.frameTypes!.includes(ctx.frame.type)\n );\n }\n\n // Apply score threshold\n if (query.scoreThreshold) {\n filtered = filtered.filter((ctx) => ctx.score >= query.scoreThreshold!);\n }\n\n // Enhanced ranking based on multiple factors\n const ranked = filtered.map((ctx) => ({\n ...ctx,\n score: this.calculateEnhancedScore(ctx, query, analysis),\n }));\n\n // Sort by enhanced score\n ranked.sort((a, b) => b.score - a.score);\n\n // Limit results\n const maxResults = query.maxResults || 20;\n return ranked.slice(0, maxResults);\n }\n\n private calculateEnhancedScore(\n context: RetrievedContext,\n query: ContextQuery,\n analysis: { intent: string; concepts: string[] }\n ): number {\n let score = context.score;\n\n // Boost recent frames\n const ageHours = (Date.now() - context.frame.created_at) / (1000 * 60 * 60);\n if (ageHours < 24) {\n score *= 1.2; // 20% boost for frames from last 24 hours\n } else if (ageHours < 168) {\n // 1 week\n score *= 1.1; // 10% boost for frames from last week\n }\n\n // Boost based on frame completeness\n if (context.frame.closed_at) {\n score *= 1.1; // Completed frames are more valuable\n }\n\n // Boost based on intent matching\n if (analysis.intent === 'debug' && context.frame.type.includes('error')) {\n score *= 1.5;\n }\n\n // Boost based on matched fields\n if (context.matchedFields.includes('name')) {\n score *= 1.3; // Name matches are highly relevant\n }\n\n if (context.matchedFields.length > 1) {\n score *= 1.1; // Multiple field matches\n }\n\n // Penalize very old frames for recent queries\n if (analysis.intent === 'recent_activity' && ageHours > 168) {\n score *= 0.5;\n }\n\n return score;\n }\n\n private generateCacheKey(query: ContextQuery): string {\n return JSON.stringify({\n text: query.text,\n type: query.type,\n maxResults: query.maxResults,\n frameTypes: query.frameTypes,\n scoreThreshold: query.scoreThreshold,\n });\n }\n\n private getCachedResult(cacheKey: string): ContextRetrievalResult | null {\n const entry = this.queryCache.get(cacheKey);\n if (!entry) return null;\n\n // Check expiry (simplified - would include timestamp in real implementation)\n return entry;\n }\n\n private cacheResult(cacheKey: string, result: ContextRetrievalResult): void {\n // Implement LRU eviction if cache is full\n if (this.queryCache.size >= this.cacheMaxSize) {\n const firstKey = this.queryCache.keys().next().value;\n this.queryCache.delete(firstKey);\n }\n\n this.queryCache.set(cacheKey, result);\n }\n\n // Utility methods for integration\n async findSimilarFrames(\n frameId: string,\n limit = 10\n ): Promise<RetrievedContext[]> {\n const frame = await this.adapter.getFrame(frameId);\n if (!frame) {\n throw new Error(`Frame not found: ${frameId}`);\n }\n\n const query: ContextQuery = {\n text: frame.digest_text || frame.name,\n type: 'semantic',\n maxResults: limit,\n scoreThreshold: 0.3,\n };\n\n const result = await this.retrieveContext(query);\n\n // Filter out the original frame\n return result.contexts.filter((ctx) => ctx.frame.frame_id !== frameId);\n }\n\n async findContextForError(\n errorMessage: string,\n stackTrace?: string\n ): Promise<RetrievedContext[]> {\n const query: ContextQuery = {\n text: `${errorMessage} ${stackTrace || ''}`.trim(),\n type: 'hybrid',\n maxResults: 15,\n frameTypes: ['error', 'debug', 'function'],\n scoreThreshold: 0.2,\n };\n\n const result = await this.retrieveContext(query);\n return result.contexts;\n }\n\n async getRecentContext(\n hours = 24,\n frameTypes?: string[]\n ): Promise<RetrievedContext[]> {\n const query: ContextQuery = {\n text: 'recent activity context',\n type: 'keyword',\n maxResults: 50,\n timeRange: {\n start: new Date(Date.now() - hours * 60 * 60 * 1000),\n },\n frameTypes,\n scoreThreshold: 0.1,\n };\n\n const result = await this.retrieveContext(query);\n return result.contexts;\n }\n\n // Analytics and insights\n getRetrievalStats() {\n return {\n cacheSize: this.queryCache.size,\n strategiesCount: this.strategies.size,\n availableStrategies: Array.from(this.strategies.keys()),\n };\n }\n\n clearCache(): void {\n this.queryCache.clear();\n logger.info('Context retrieval cache cleared');\n }\n}\n"],
5
5
  "mappings": ";;;;AAUA,SAAS,cAAc;AA2ChB,MAAM,iBAAiB;AAAA,EACX;AAAA,EACA,aAA6C,oBAAI,IAAI;AAAA,EAC9D,aAAa,oBAAI,IAAoC;AAAA,EACrD,eAAe;AAAA,EACf,gBAAgB;AAAA;AAAA,EAExB,YAAY,SAA0B;AACpC,SAAK,UAAU;AACf,SAAK,qBAAqB;AAAA,EAC5B;AAAA,EAEQ,uBAA6B;AAEnC,SAAK,WAAW,IAAI,WAAW;AAAA,MAC7B,MAAM;AAAA,MACN,YAAY;AAAA,MACZ,OAAO;AAAA,QACL,MAAM;AAAA,QACN,aAAa;AAAA,QACb,QAAQ;AAAA,QACR,SAAS;AAAA,MACX;AAAA,MACA,kBAAkB;AAAA,IACpB,CAAC;AAGD,SAAK,WAAW,IAAI,YAAY;AAAA,MAC9B,MAAM;AAAA,MACN,YAAY;AAAA,MACZ,kBAAkB;AAAA,IACpB,CAAC;AAGD,SAAK,WAAW,IAAI,UAAU;AAAA,MAC5B,MAAM;AAAA,MACN,YAAY;AAAA,MACZ,SAAS,EAAE,MAAM,KAAK,QAAQ,IAAI;AAAA,MAClC,OAAO;AAAA,QACL,MAAM;AAAA,QACN,aAAa;AAAA,MACf;AAAA,MACA,kBAAkB;AAAA,IACpB,CAAC;AAGD,SAAK,WAAW,IAAI,UAAU;AAAA,MAC5B,MAAM;AAAA,MACN,YAAY;AAAA,MACZ,OAAO;AAAA,QACL,YAAY;AAAA,QACZ,WAAW;AAAA,MACb;AAAA,MACA,kBAAkB;AAAA,IACpB,CAAC;AAGD,SAAK,WAAW,IAAI,SAAS;AAAA,MAC3B,MAAM;AAAA,MACN,YAAY;AAAA,MACZ,SAAS,EAAE,MAAM,KAAK,QAAQ,IAAI;AAAA,MAClC,OAAO;AAAA,QACL,MAAM;AAAA;AAAA,QACN,aAAa;AAAA,QACb,SAAS;AAAA,MACX;AAAA,MACA,kBAAkB;AAAA,IACpB,CAAC;AAAA,EACH;AAAA,EAEA,MAAM,gBAAgB,OAAsD;AAC1E,UAAM,YAAY,KAAK,IAAI;AAG3B,QAAI,CAAC,MAAM,QAAQ,MAAM,KAAK,KAAK,EAAE,WAAW,GAAG;AACjD,aAAO,MAAM,8CAA8C;AAC3D,aAAO;AAAA,QACL,UAAU,CAAC;AAAA,QACX,cAAc;AAAA,QACd,iBAAiB,KAAK,IAAI,IAAI;AAAA,QAC9B,UAAU;AAAA,QACV,eAAe;AAAA,UACb,QAAQ;AAAA,UACR,UAAU,CAAC;AAAA,UACX,YAAY;AAAA,QACd;AAAA,MACF;AAAA,IACF;AAEA,UAAM,WAAW,KAAK,iBAAiB,KAAK;AAG5C,UAAM,SAAS,KAAK,gBAAgB,QAAQ;AAC5C,QAAI,QAAQ;AACV,aAAO,MAAM,6BAA6B;AAC1C,aAAO;AAAA,IACT;AAEA,QAAI;AACF,aAAO,KAAK,yCAAyC;AAAA,QACnD,OAAO,MAAM;AAAA,MACf,CAAC;AAGD,YAAM,gBAAgB,MAAM,KAAK,aAAa,KAAK;AACnD,YAAM,WAAW,KAAK,eAAe,eAAe,KAAK;AAEzD,aAAO,MAAM,+BAA+B;AAAA,QAC1C,UAAU,SAAS;AAAA,QACnB,UAAU;AAAA,MACZ,CAAC;AAGD,YAAM,WAAW,MAAM,KAAK;AAAA,QAC1B;AAAA,QACA;AAAA,QACA;AAAA,MACF;AAGA,YAAM,iBAAiB,MAAM,KAAK;AAAA,QAChC;AAAA,QACA;AAAA,QACA;AAAA,MACF;AAEA,YAAM,SAAiC;AAAA,QACrC,UAAU;AAAA,QACV,cAAc,SAAS;AAAA,QACvB,iBAAiB,KAAK,IAAI,IAAI;AAAA,QAC9B,UAAU,SAAS;AAAA,QACnB;AAAA,MACF;AAGA,WAAK,YAAY,UAAU,MAAM;AAEjC,aAAO,KAAK,+BAA+B;AAAA,QACzC,cAAc,eAAe;AAAA,QAC7B,QAAQ,OAAO;AAAA,QACf,UAAU,SAAS;AAAA,MACrB,CAAC;AAED,aAAO;AAAA,IACT,SAAS,OAAgB;AACvB,aAAO,MAAM,6BAA6B,KAAK;AAG/C,aAAO;AAAA,QACL,UAAU,CAAC;AAAA,QACX,cAAc;AAAA,QACd,iBAAiB,KAAK,IAAI,IAAI;AAAA,QAC9B,UAAU;AAAA,QACV,eAAe;AAAA,UACb,QAAQ;AAAA,UACR,UAAU,CAAC;AAAA,UACX,YAAY;AAAA,QACd;AAAA,MACF;AAAA,IACF;AAAA,EACF;AAAA,EAEA,MAAc,aAAa,OAIxB;AACD,UAAM,OAAO,MAAM,KAAK,YAAY,EAAE,KAAK;AAC3C,UAAM,QAAQ,KAAK,MAAM,KAAK;AAG9B,QAAI,SAAS;AACb,QACE,KAAK,iBAAiB,MAAM;AAAA,MAC1B;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,IACF,CAAC,GACD;AACA,eAAS;AAAA,IACX,WACE,KAAK,iBAAiB,MAAM,CAAC,OAAO,QAAQ,OAAO,QAAQ,OAAO,CAAC,GACnE;AACA,eAAS;AAAA,IACX,WACE,KAAK,iBAAiB,MAAM;AAAA,MAC1B;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,IACF,CAAC,GACD;AACA,eAAS;AAAA,IACX,WACE,KAAK,iBAAiB,MAAM;AAAA,MAC1B;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,IACF,CAAC,GACD;AACA,eAAS;AAAA,IACX;AAGA,UAAM,WAAW,KAAK,gBAAgB,IAAI;AAG1C,QAAI,aAAgD;AACpD,QAAI,MAAM,SAAS,MAAM,SAAS,SAAS,GAAG;AAC5C,mBAAa;AAAA,IACf,WAAW,MAAM,SAAS,KAAK,SAAS,SAAS,GAAG;AAClD,mBAAa;AAAA,IACf;AAEA,WAAO,EAAE,QAAQ,UAAU,WAAW;AAAA,EACxC;AAAA,EAEQ,iBAAiB,MAAc,UAA6B;AAClE,WAAO,SAAS;AAAA,MAAK,CAAC,YACpB,KAAK,YAAY,EAAE,SAAS,QAAQ,YAAY,CAAC;AAAA,IACnD;AAAA,EACF;AAAA,EAEQ,gBAAgB,MAAwB;AAE9C,UAAM,iBAAiB;AAAA,MACrB;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,IACF;AAEA,UAAM,WAAqB,CAAC;AAC5B,UAAM,QAAQ,KAAK,MAAM,KAAK,EAAE,IAAI,CAAC,MAAM,EAAE,YAAY,CAAC;AAE1D,eAAW,QAAQ,gBAAgB;AACjC,UAAI,MAAM,SAAS,IAAI,GAAG;AACxB,iBAAS,KAAK,IAAI;AAAA,MACpB;AAAA,IACF;AAGA,UAAM,UAAU,KAAK,eAAe,KAAK;AACzC,UAAM,mBAAmB;AAAA,MACvB;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,IACF;AAEA,eAAW,UAAU,SAAS;AAC5B,UAAI,iBAAiB,SAAS,MAAM,GAAG;AACrC,iBAAS,KAAK,MAAM;AAAA,MACtB;AAAA,IACF;AAEA,WAAO,CAAC,GAAG,IAAI,IAAI,QAAQ,CAAC;AAAA,EAC9B;AAAA,EAEQ,eAAe,OAA2B;AAChD,UAAM,UAAoB,CAAC;AAC3B,aAAS,IAAI,GAAG,IAAI,MAAM,SAAS,GAAG,KAAK;AACzC,cAAQ,KAAK,GAAG,MAAM,CAAC,CAAC,IAAI,MAAM,IAAI,CAAC,CAAC,EAAE;AAAA,IAC5C;AACA,WAAO;AAAA,EACT;AAAA,EAEQ,eACN,UACA,OACmB;AAEnB,QAAI,MAAM,MAAM;AACd,aACE,KAAK,WAAW;AAAA,QACd,MAAM,SAAS,YACX,YACA,MAAM,SAAS,aACb,aACA;AAAA,MACR,KAAK,KAAK,WAAW,IAAI,QAAQ;AAAA,IAErC;AAGA,YAAQ,SAAS,QAAQ;AAAA,MACvB,KAAK;AACH,eAAO,KAAK,WAAW,IAAI,OAAO;AAAA,MACpC,KAAK;AACH,eAAO,KAAK,WAAW,IAAI,QAAQ;AAAA,MACrC,KAAK;AACH,eAAO,SAAS,eAAe,WAC3B,KAAK,WAAW,IAAI,SAAS,IAC7B,KAAK,WAAW,IAAI,UAAU;AAAA,MACpC,KAAK;AACH,eAAO,KAAK,WAAW,IAAI,QAAQ;AAAA,MACrC;AACE,eAAO,SAAS,eAAe,YAC3B,KAAK,WAAW,IAAI,UAAU,IAC9B,KAAK,WAAW,IAAI,SAAS;AAAA,IACrC;AAAA,EACF;AAAA,EAEA,MAAc,iBACZ,OACA,UACA,UAC6B;AAC7B,UAAM,gBAA+B;AAAA,MACnC,OAAO,MAAM;AAAA,MACb,YAAY,SAAS;AAAA,MACrB,OAAO,MAAM,cAAc;AAAA,MAC3B,gBAAgB,MAAM,kBAAkB;AAAA,MACxC,OAAO,SAAS;AAAA,IAClB;AAGA,QAAI,MAAM,YAAY;AACpB,oBAAc,SAAS,CAAC,QAAQ,QAAQ,aAAa;AAAA,IACvD;AAEA,QAAI,aAA+C,CAAC;AAEpD,QAAI;AACF,UAAI,SAAS,eAAe,YAAY,SAAS,SAAS;AAExD,cAAM,YAAY,MAAM,KAAK,kBAAkB,MAAM,IAAI;AACzD,qBAAa,MAAM,KAAK,QAAQ;AAAA,UAC9B,MAAM;AAAA,UACN;AAAA,UACA,SAAS;AAAA,QACX;AAAA,MACF,OAAO;AAEL,qBAAa,MAAM,KAAK,QAAQ,OAAO,aAAa;AAAA,MACtD;AAAA,IACF,SAAS,OAAgB;AACvB,aAAO,KAAK,YAAY,SAAS,IAAI,6BAA6B,KAAK;AAEvE,UAAI,SAAS,kBAAkB;AAC7B,cAAM,mBAAmB,KAAK,WAAW,IAAI,SAAS,gBAAgB;AACtE,YAAI,kBAAkB;AACpB,iBAAO,KAAK,iBAAiB,OAAO,kBAAkB,QAAQ;AAAA,QAChE;AAAA,MACF;AAGA,aAAO,CAAC;AAAA,IACV;AAGA,WAAO,WAAW,IAAI,CAAC,YAAY;AAAA,MACjC,OAAO;AAAA,MACP,OAAO,OAAO;AAAA,MACd,iBAAiB,KAAK,wBAAwB,QAAQ,OAAO,QAAQ;AAAA,MACrE,iBAAiB,SAAS;AAAA,MAC1B,eAAe,KAAK,sBAAsB,QAAQ,KAAK;AAAA,IACzD,EAAE;AAAA,EACJ;AAAA,EAEA,MAAc,kBAAkB,MAAiC;AAG/D,UAAM,OAAO,KAAK,WAAW,IAAI;AACjC,WAAO,MAAM;AAAA,MACX,EAAE,QAAQ,IAAI;AAAA,MACd,CAAC,GAAG,OAAQ,OAAO,KAAK,MAAO,MAAM;AAAA,IACvC;AAAA,EACF;AAAA,EAEQ,WAAW,KAAqB;AACtC,QAAI,OAAO;AACX,aAAS,IAAI,GAAG,IAAI,IAAI,QAAQ,KAAK;AACnC,YAAM,OAAO,IAAI,WAAW,CAAC;AAC7B,cAAQ,QAAQ,KAAK,OAAO;AAC5B,aAAO,OAAO;AAAA,IAChB;AACA,WAAO,KAAK,IAAI,IAAI;AAAA,EACtB;AAAA,EAEQ,wBACN,OACA,OACA,UACQ;AACR,UAAM,UAAoB,CAAC;AAG3B,QAAI,MAAM,KAAK,YAAY,EAAE,SAAS,MAAM,KAAK,YAAY,CAAC,GAAG;AAC/D,cAAQ,KAAK,0BAA0B;AAAA,IACzC;AAEA,QAAI,MAAM,aAAa,YAAY,EAAE,SAAS,MAAM,KAAK,YAAY,CAAC,GAAG;AACvE,cAAQ,KAAK,8BAA8B;AAAA,IAC7C;AAGA,eAAW,WAAW,SAAS,UAAU;AACvC,UACE,MAAM,aAAa,YAAY,EAAE,SAAS,QAAQ,YAAY,CAAC,KAC/D,MAAM,KAAK,YAAY,EAAE,SAAS,QAAQ,YAAY,CAAC,GACvD;AACA,gBAAQ,KAAK,cAAc,OAAO,EAAE;AAAA,MACtC;AAAA,IACF;AAGA,QAAI,SAAS,WAAW,WAAW,MAAM,KAAK,SAAS,OAAO,GAAG;AAC/D,cAAQ,KAAK,6BAA6B;AAAA,IAC5C;AAEA,WAAO,QAAQ,SAAS,IACpB,QAAQ,KAAK,IAAI,IACjB;AAAA,EACN;AAAA,EAEQ,sBAAsB,OAAc,OAA+B;AACzE,UAAM,UAAoB,CAAC;AAC3B,UAAM,aAAa,MAAM,KAAK,YAAY;AAE1C,QAAI,MAAM,KAAK,YAAY,EAAE,SAAS,UAAU,GAAG;AACjD,cAAQ,KAAK,MAAM;AAAA,IACrB;AAEA,QAAI,MAAM,aAAa,YAAY,EAAE,SAAS,UAAU,GAAG;AACzD,cAAQ,KAAK,aAAa;AAAA,IAC5B;AAEA,QAAI,MAAM,KAAK,YAAY,EAAE,SAAS,UAAU,GAAG;AACjD,cAAQ,KAAK,MAAM;AAAA,IACrB;AAEA,WAAO;AAAA,EACT;AAAA,EAEA,MAAc,cACZ,UACA,OACA,UAC6B;AAE7B,QAAI,WAAW;AAGf,QAAI,MAAM,WAAW;AACnB,iBAAW,SAAS,OAAO,CAAC,QAAQ;AAClC,cAAM,YAAY,IAAI,KAAK,IAAI,MAAM,UAAU;AAC/C,cAAM,QAAQ,MAAM,WAAW;AAC/B,cAAM,MAAM,MAAM,WAAW;AAE7B,gBAAQ,CAAC,SAAS,aAAa,WAAW,CAAC,OAAO,aAAa;AAAA,MACjE,CAAC;AAAA,IACH;AAGA,QAAI,MAAM,YAAY;AACpB,iBAAW,SAAS;AAAA,QAAO,CAAC,QAC1B,MAAM,WAAY,SAAS,IAAI,MAAM,IAAI;AAAA,MAC3C;AAAA,IACF;AAGA,QAAI,MAAM,gBAAgB;AACxB,iBAAW,SAAS,OAAO,CAAC,QAAQ,IAAI,SAAS,MAAM,cAAe;AAAA,IACxE;AAGA,UAAM,SAAS,SAAS,IAAI,CAAC,SAAS;AAAA,MACpC,GAAG;AAAA,MACH,OAAO,KAAK,uBAAuB,KAAK,OAAO,QAAQ;AAAA,IACzD,EAAE;AAGF,WAAO,KAAK,CAAC,GAAG,MAAM,EAAE,QAAQ,EAAE,KAAK;AAGvC,UAAM,aAAa,MAAM,cAAc;AACvC,WAAO,OAAO,MAAM,GAAG,UAAU;AAAA,EACnC;AAAA,EAEQ,uBACN,SACA,OACA,UACQ;AACR,QAAI,QAAQ,QAAQ;AAGpB,UAAM,YAAY,KAAK,IAAI,IAAI,QAAQ,MAAM,eAAe,MAAO,KAAK;AACxE,QAAI,WAAW,IAAI;AACjB,eAAS;AAAA,IACX,WAAW,WAAW,KAAK;AAEzB,eAAS;AAAA,IACX;AAGA,QAAI,QAAQ,MAAM,WAAW;AAC3B,eAAS;AAAA,IACX;AAGA,QAAI,SAAS,WAAW,WAAW,QAAQ,MAAM,KAAK,SAAS,OAAO,GAAG;AACvE,eAAS;AAAA,IACX;AAGA,QAAI,QAAQ,cAAc,SAAS,MAAM,GAAG;AAC1C,eAAS;AAAA,IACX;AAEA,QAAI,QAAQ,cAAc,SAAS,GAAG;AACpC,eAAS;AAAA,IACX;AAGA,QAAI,SAAS,WAAW,qBAAqB,WAAW,KAAK;AAC3D,eAAS;AAAA,IACX;AAEA,WAAO;AAAA,EACT;AAAA,EAEQ,iBAAiB,OAA6B;AACpD,WAAO,KAAK,UAAU;AAAA,MACpB,MAAM,MAAM;AAAA,MACZ,MAAM,MAAM;AAAA,MACZ,YAAY,MAAM;AAAA,MAClB,YAAY,MAAM;AAAA,MAClB,gBAAgB,MAAM;AAAA,IACxB,CAAC;AAAA,EACH;AAAA,EAEQ,gBAAgB,UAAiD;AACvE,UAAM,QAAQ,KAAK,WAAW,IAAI,QAAQ;AAC1C,QAAI,CAAC,MAAO,QAAO;AAGnB,WAAO;AAAA,EACT;AAAA,EAEQ,YAAY,UAAkB,QAAsC;AAE1E,QAAI,KAAK,WAAW,QAAQ,KAAK,cAAc;AAC7C,YAAM,WAAW,KAAK,WAAW,KAAK,EAAE,KAAK,EAAE;AAC/C,WAAK,WAAW,OAAO,QAAQ;AAAA,IACjC;AAEA,SAAK,WAAW,IAAI,UAAU,MAAM;AAAA,EACtC;AAAA;AAAA,EAGA,MAAM,kBACJ,SACA,QAAQ,IACqB;AAC7B,UAAM,QAAQ,MAAM,KAAK,QAAQ,SAAS,OAAO;AACjD,QAAI,CAAC,OAAO;AACV,YAAM,IAAI,MAAM,oBAAoB,OAAO,EAAE;AAAA,IAC/C;AAEA,UAAM,QAAsB;AAAA,MAC1B,MAAM,MAAM,eAAe,MAAM;AAAA,MACjC,MAAM;AAAA,MACN,YAAY;AAAA,MACZ,gBAAgB;AAAA,IAClB;AAEA,UAAM,SAAS,MAAM,KAAK,gBAAgB,KAAK;AAG/C,WAAO,OAAO,SAAS,OAAO,CAAC,QAAQ,IAAI,MAAM,aAAa,OAAO;AAAA,EACvE;AAAA,EAEA,MAAM,oBACJ,cACA,YAC6B;AAC7B,UAAM,QAAsB;AAAA,MAC1B,MAAM,GAAG,YAAY,IAAI,cAAc,EAAE,GAAG,KAAK;AAAA,MACjD,MAAM;AAAA,MACN,YAAY;AAAA,MACZ,YAAY,CAAC,SAAS,SAAS,UAAU;AAAA,MACzC,gBAAgB;AAAA,IAClB;AAEA,UAAM,SAAS,MAAM,KAAK,gBAAgB,KAAK;AAC/C,WAAO,OAAO;AAAA,EAChB;AAAA,EAEA,MAAM,iBACJ,QAAQ,IACR,YAC6B;AAC7B,UAAM,QAAsB;AAAA,MAC1B,MAAM;AAAA,MACN,MAAM;AAAA,MACN,YAAY;AAAA,MACZ,WAAW;AAAA,QACT,OAAO,IAAI,KAAK,KAAK,IAAI,IAAI,QAAQ,KAAK,KAAK,GAAI;AAAA,MACrD;AAAA,MACA;AAAA,MACA,gBAAgB;AAAA,IAClB;AAEA,UAAM,SAAS,MAAM,KAAK,gBAAgB,KAAK;AAC/C,WAAO,OAAO;AAAA,EAChB;AAAA;AAAA,EAGA,oBAAoB;AAClB,WAAO;AAAA,MACL,WAAW,KAAK,WAAW;AAAA,MAC3B,iBAAiB,KAAK,WAAW;AAAA,MACjC,qBAAqB,MAAM,KAAK,KAAK,WAAW,KAAK,CAAC;AAAA,IACxD;AAAA,EACF;AAAA,EAEA,aAAmB;AACjB,SAAK,WAAW,MAAM;AACtB,WAAO,KAAK,iCAAiC;AAAA,EAC/C;AACF;",
6
6
  "names": []
7
7
  }
@@ -1,7 +1,7 @@
1
1
  {
2
2
  "version": 3,
3
3
  "sources": ["../../../src/core/retrieval/graph-retrieval.ts"],
4
- "sourcesContent": ["/**\n * Graph-Based Retrieval with Explicit Semantic Navigation\n * Models documents as nodes with explicit relationships\n * Query traverses edges instead of embedding space\n *\n * The \"nuclear option\" for next-gen RAG\n */\n\nimport Database from 'better-sqlite3';\nimport { logger } from '../monitoring/logger.js';\nimport { Trace, CompressedTrace } from '../trace/types.js';\nimport { Frame, Anchor } from '../context/frame-manager.js';\nimport crypto from 'crypto';\n\nexport type NodeType =\n | 'document'\n | 'concept'\n | 'entity'\n | 'event'\n | 'decision'\n | 'constraint'\n | 'topic';\n\nexport type EdgeType =\n | 'citation' // Document cites another\n | 'topic_overlap' // Shares topics\n | 'temporal' // Time sequence\n | 'causal' // Causality chain\n | 'semantic' // Semantic similarity\n | 'structural' // Code structure relation\n | 'dependency' // Depends on\n | 'evolution' // Evolves from\n | 'contradiction' // Contradicts\n | 'implementation'; // Implements concept\n\nexport interface GraphNode {\n id: string;\n type: NodeType;\n label: string;\n content?: string;\n importance: number; // 0-1, affects node size\n metadata: {\n created: number;\n modified: number;\n accessed: number;\n accessCount: number;\n traceIds?: string[];\n frameIds?: string[];\n tags?: string[];\n embeddings?: number[];\n };\n}\n\nexport interface GraphEdge {\n id: string;\n source: string;\n target: string;\n type: EdgeType;\n weight: number; // 0-1, relationship strength\n metadata: {\n created: number;\n evidence?: string[];\n bidirectional?: boolean;\n };\n}\n\nexport interface TraversalPath {\n nodes: GraphNode[];\n edges: GraphEdge[];\n score: number;\n reasoning: string;\n}\n\nexport interface GraphQuery {\n startNodes?: string[]; // Start from specific nodes\n targetNodes?: string[]; // Find paths to targets\n edgeTypes?: EdgeType[]; // Allowed edge types\n maxHops?: number; // Max traversal depth\n minWeight?: number; // Min edge weight\n nodeTypes?: NodeType[]; // Filter node types\n}\n\nexport interface GraphConfig {\n maxNodes: number;\n maxEdges: number;\n minEdgeWeight: number;\n importanceThreshold: number;\n traversalTimeout: number;\n enableBidirectional: boolean;\n}\n\nexport const DEFAULT_GRAPH_CONFIG: GraphConfig = {\n maxNodes: 10000,\n maxEdges: 50000,\n minEdgeWeight: 0.1,\n importanceThreshold: 0.3,\n traversalTimeout: 5000,\n enableBidirectional: true,\n};\n\n/**\n * Graph-based retrieval system with explicit semantic relationships\n */\nexport class GraphRetrieval {\n private db: Database.Database;\n private config: GraphConfig;\n private nodeIndex: Map<string, GraphNode> = new Map();\n private adjacencyList: Map<string, GraphEdge[]> = new Map();\n private reverseAdjacencyList: Map<string, GraphEdge[]> = new Map();\n\n constructor(db: Database.Database, config: Partial<GraphConfig> = {}) {\n this.db = db;\n this.config = { ...DEFAULT_GRAPH_CONFIG, ...config };\n this.initializeSchema();\n }\n\n private initializeSchema(): void {\n // Nodes table\n this.db.exec(`\n CREATE TABLE IF NOT EXISTS graph_nodes (\n id TEXT PRIMARY KEY,\n type TEXT NOT NULL,\n label TEXT NOT NULL,\n content TEXT,\n importance REAL DEFAULT 0.5,\n created INTEGER DEFAULT (unixepoch() * 1000),\n modified INTEGER DEFAULT (unixepoch() * 1000),\n accessed INTEGER DEFAULT (unixepoch() * 1000),\n access_count INTEGER DEFAULT 0,\n trace_ids TEXT,\n frame_ids TEXT,\n tags TEXT,\n embeddings BLOB\n )\n `);\n\n // Edges table\n this.db.exec(`\n CREATE TABLE IF NOT EXISTS graph_edges (\n id TEXT PRIMARY KEY,\n source TEXT NOT NULL,\n target TEXT NOT NULL,\n type TEXT NOT NULL,\n weight REAL DEFAULT 0.5,\n created INTEGER DEFAULT (unixepoch() * 1000),\n evidence TEXT,\n bidirectional BOOLEAN DEFAULT 0,\n FOREIGN KEY (source) REFERENCES graph_nodes(id) ON DELETE CASCADE,\n FOREIGN KEY (target) REFERENCES graph_nodes(id) ON DELETE CASCADE,\n UNIQUE(source, target, type)\n )\n `);\n\n // Indexes for efficient traversal\n this.db.exec(`\n CREATE INDEX IF NOT EXISTS idx_graph_nodes_type ON graph_nodes(type);\n CREATE INDEX IF NOT EXISTS idx_graph_nodes_importance ON graph_nodes(importance DESC);\n CREATE INDEX IF NOT EXISTS idx_graph_edges_source ON graph_edges(source);\n CREATE INDEX IF NOT EXISTS idx_graph_edges_target ON graph_edges(target);\n CREATE INDEX IF NOT EXISTS idx_graph_edges_type ON graph_edges(type);\n CREATE INDEX IF NOT EXISTS idx_graph_edges_weight ON graph_edges(weight DESC);\n `);\n }\n\n /**\n * Build graph from traces and frames\n */\n async buildGraph(traces: Trace[], frames: Frame[]): Promise<void> {\n logger.info('Building semantic graph', {\n traceCount: traces.length,\n frameCount: frames.length,\n });\n\n // Create document nodes from traces\n for (const trace of traces) {\n await this.createDocumentNode(trace);\n }\n\n // Create concept nodes from frames\n for (const frame of frames) {\n await this.createConceptNode(frame);\n }\n\n // Establish edges based on relationships\n await this.establishTemporalEdges(traces);\n await this.establishCausalEdges(traces);\n await this.establishSemanticEdges(traces);\n await this.establishTopicEdges(traces);\n\n // Load graph into memory for fast traversal\n await this.loadGraphIntoMemory();\n\n logger.info('Graph built successfully', {\n nodes: this.nodeIndex.size,\n edges: this.adjacencyList.size,\n });\n }\n\n /**\n * Create document node from trace\n */\n private async createDocumentNode(trace: Trace): Promise<GraphNode> {\n const node: GraphNode = {\n id: `doc_${trace.id}`,\n type: 'document',\n label: trace.summary.substring(0, 100),\n content: JSON.stringify(trace),\n importance: trace.score,\n metadata: {\n created: trace.metadata.startTime,\n modified: trace.metadata.endTime,\n accessed: Date.now(),\n accessCount: 0,\n traceIds: [trace.id],\n tags: [trace.type, ...trace.metadata.filesModified.slice(0, 3)],\n },\n };\n\n await this.insertNode(node);\n return node;\n }\n\n /**\n * Create concept node from frame\n */\n private async createConceptNode(frame: Frame): Promise<GraphNode> {\n const node: GraphNode = {\n id: `concept_${frame.id}`,\n type: 'concept',\n label: frame.name,\n importance: frame.score,\n metadata: {\n created: frame.created_at,\n modified: frame.updated_at || frame.created_at,\n accessed: Date.now(),\n accessCount: 0,\n frameIds: [frame.id],\n tags: [frame.type],\n },\n };\n\n await this.insertNode(node);\n return node;\n }\n\n /**\n * Establish temporal edges between traces\n */\n private async establishTemporalEdges(traces: Trace[]): Promise<void> {\n // Sort by time\n const sorted = [...traces].sort(\n (a, b) => a.metadata.startTime - b.metadata.startTime\n );\n\n for (let i = 0; i < sorted.length - 1; i++) {\n const current = sorted[i];\n const next = sorted[i + 1];\n\n // Only link if within reasonable time window (1 hour)\n const timeDiff = next.metadata.startTime - current.metadata.endTime;\n if (timeDiff < 60 * 60 * 1000) {\n const weight = 1 / (1 + timeDiff / (1000 * 60)); // Decay by minutes\n\n await this.createEdge({\n id: this.generateId('edge'),\n source: `doc_${current.id}`,\n target: `doc_${next.id}`,\n type: 'temporal',\n weight,\n metadata: {\n created: Date.now(),\n evidence: [`${timeDiff}ms gap`],\n },\n });\n }\n }\n }\n\n /**\n * Establish causal edges based on trace relationships\n */\n private async establishCausalEdges(traces: Trace[]): Promise<void> {\n for (const trace of traces) {\n if (trace.metadata.causalChain && trace.metadata.causalChain.length > 0) {\n for (const parentId of trace.metadata.causalChain) {\n const parentExists = traces.find((t) => t.id === parentId);\n if (parentExists) {\n await this.createEdge({\n id: this.generateId('edge'),\n source: `doc_${parentId}`,\n target: `doc_${trace.id}`,\n type: 'causal',\n weight: 0.9, // Strong causal relationship\n metadata: {\n created: Date.now(),\n evidence: ['explicit causal chain'],\n },\n });\n }\n }\n }\n }\n }\n\n /**\n * Establish semantic edges based on similarity\n */\n private async establishSemanticEdges(traces: Trace[]): Promise<void> {\n // Compare each pair (expensive but thorough)\n for (let i = 0; i < traces.length - 1; i++) {\n for (let j = i + 1; j < traces.length; j++) {\n const similarity = this.calculateSimilarity(traces[i], traces[j]);\n\n if (similarity > this.config.minEdgeWeight) {\n await this.createEdge({\n id: this.generateId('edge'),\n source: `doc_${traces[i].id}`,\n target: `doc_${traces[j].id}`,\n type: 'semantic',\n weight: similarity,\n metadata: {\n created: Date.now(),\n evidence: [`similarity: ${similarity.toFixed(2)}`],\n bidirectional: true,\n },\n });\n }\n }\n }\n }\n\n /**\n * Establish topic overlap edges\n */\n private async establishTopicEdges(traces: Trace[]): Promise<void> {\n // Group by topic\n const topicGroups: Map<string, Trace[]> = new Map();\n\n for (const trace of traces) {\n const topic = trace.type;\n if (!topicGroups.has(topic)) {\n topicGroups.set(topic, []);\n }\n topicGroups.get(topic)!.push(trace);\n }\n\n // Connect traces within same topic\n for (const [topic, group] of topicGroups) {\n if (group.length < 2) continue;\n\n // Create topic hub node\n const topicNode: GraphNode = {\n id: `topic_${topic}`,\n type: 'topic',\n label: topic,\n importance: 0.7,\n metadata: {\n created: Date.now(),\n modified: Date.now(),\n accessed: Date.now(),\n accessCount: 0,\n tags: [topic],\n },\n };\n\n await this.insertNode(topicNode);\n\n // Connect all traces to topic hub\n for (const trace of group) {\n await this.createEdge({\n id: this.generateId('edge'),\n source: `doc_${trace.id}`,\n target: topicNode.id,\n type: 'topic_overlap',\n weight: 0.6,\n metadata: {\n created: Date.now(),\n bidirectional: true,\n },\n });\n }\n }\n }\n\n /**\n * Traverse graph to find relevant paths\n */\n async traverse(query: string, config?: GraphQuery): Promise<TraversalPath[]> {\n const startTime = Date.now();\n const queryConfig = config || {};\n const maxHops = queryConfig.maxHops || 3;\n const paths: TraversalPath[] = [];\n\n // Find starting nodes based on query\n const startNodes = await this.findStartNodes(query, queryConfig);\n\n if (startNodes.length === 0) {\n logger.warn('No starting nodes found for query', { query });\n return [];\n }\n\n // Perform BFS/DFS traversal from each start node\n for (const startNode of startNodes) {\n const nodePaths = await this.traverseFromNode(\n startNode,\n query,\n maxHops,\n queryConfig\n );\n paths.push(...nodePaths);\n }\n\n // Sort by score and limit results\n paths.sort((a, b) => b.score - a.score);\n const topPaths = paths.slice(0, 10);\n\n logger.info('Graph traversal complete', {\n query: query.substring(0, 50),\n startNodes: startNodes.length,\n pathsFound: paths.length,\n timeMs: Date.now() - startTime,\n });\n\n return topPaths;\n }\n\n /**\n * Find starting nodes for traversal\n */\n private async findStartNodes(\n query: string,\n config: GraphQuery\n ): Promise<GraphNode[]> {\n if (config.startNodes) {\n return config.startNodes\n .map((id) => this.nodeIndex.get(id))\n .filter((n) => n !== undefined) as GraphNode[];\n }\n\n // Find nodes matching query\n const queryWords = query.toLowerCase().split(/\\s+/);\n const candidates: Array<{ node: GraphNode; score: number }> = [];\n\n for (const node of this.nodeIndex.values()) {\n if (config.nodeTypes && !config.nodeTypes.includes(node.type)) {\n continue;\n }\n\n const label = node.label.toLowerCase();\n const tags = (node.metadata.tags || []).join(' ').toLowerCase();\n\n let score = 0;\n for (const word of queryWords) {\n if (label.includes(word)) score += 2;\n if (tags.includes(word)) score += 1;\n }\n\n if (score > 0) {\n score *= node.importance; // Weight by importance\n candidates.push({ node, score });\n }\n }\n\n // Sort and return top candidates\n candidates.sort((a, b) => b.score - a.score);\n return candidates.slice(0, 5).map((c) => c.node);\n }\n\n /**\n * Traverse from a specific node\n */\n private async traverseFromNode(\n startNode: GraphNode,\n query: string,\n maxHops: number,\n config: GraphQuery\n ): Promise<TraversalPath[]> {\n const paths: TraversalPath[] = [];\n const visited = new Set<string>();\n\n // BFS queue: [node, path, depth]\n const queue: Array<{\n node: GraphNode;\n path: TraversalPath;\n depth: number;\n }> = [\n {\n node: startNode,\n path: {\n nodes: [startNode],\n edges: [],\n score: startNode.importance,\n reasoning: `Starting from ${startNode.type}: ${startNode.label}`,\n },\n depth: 0,\n },\n ];\n\n while (queue.length > 0) {\n const current = queue.shift()!;\n\n if (current.depth >= maxHops) {\n paths.push(current.path);\n continue;\n }\n\n if (visited.has(current.node.id)) {\n continue;\n }\n visited.add(current.node.id);\n\n // Get outgoing edges\n const edges = this.adjacencyList.get(current.node.id) || [];\n\n for (const edge of edges) {\n // Filter by edge type if specified\n if (config.edgeTypes && !config.edgeTypes.includes(edge.type)) {\n continue;\n }\n\n // Filter by minimum weight\n if (config.minWeight && edge.weight < config.minWeight) {\n continue;\n }\n\n const targetNode = this.nodeIndex.get(edge.target);\n if (!targetNode) continue;\n\n // Calculate path score\n const pathScore = this.calculatePathScore(\n current.path,\n edge,\n targetNode,\n query\n );\n\n // Create new path\n const newPath: TraversalPath = {\n nodes: [...current.path.nodes, targetNode],\n edges: [...current.path.edges, edge],\n score: pathScore,\n reasoning: `${current.path.reasoning} \u2192 ${edge.type} \u2192 ${targetNode.label}`,\n };\n\n queue.push({\n node: targetNode,\n path: newPath,\n depth: current.depth + 1,\n });\n }\n }\n\n return paths;\n }\n\n /**\n * Calculate path score\n */\n private calculatePathScore(\n currentPath: TraversalPath,\n edge: GraphEdge,\n targetNode: GraphNode,\n query: string\n ): number {\n // Base score from current path\n let score = currentPath.score;\n\n // Edge weight contribution\n score *= edge.weight;\n\n // Target node importance\n score *= targetNode.importance;\n\n // Query relevance\n const queryWords = query.toLowerCase().split(/\\s+/);\n const targetLabel = targetNode.label.toLowerCase();\n let relevance = 0;\n for (const word of queryWords) {\n if (targetLabel.includes(word)) relevance += 1;\n }\n score *= 1 + relevance * 0.2;\n\n // Path length penalty (prefer shorter paths)\n score *= Math.pow(0.9, currentPath.nodes.length);\n\n return score;\n }\n\n /**\n * Calculate similarity between traces\n */\n private calculateSimilarity(a: Trace, b: Trace): number {\n // Type similarity\n const typeSim = a.type === b.type ? 0.3 : 0;\n\n // File overlap\n const filesA = new Set(a.metadata.filesModified);\n const filesB = new Set(b.metadata.filesModified);\n const intersection = new Set([...filesA].filter((x) => filesB.has(x)));\n const union = new Set([...filesA, ...filesB]);\n const fileSim = union.size > 0 ? (intersection.size / union.size) * 0.3 : 0;\n\n // Tool overlap\n const toolsA = new Set(a.tools.map((t) => t.tool));\n const toolsB = new Set(b.tools.map((t) => t.tool));\n const toolIntersection = new Set([...toolsA].filter((x) => toolsB.has(x)));\n const toolUnion = new Set([...toolsA, ...toolsB]);\n const toolSim =\n toolUnion.size > 0 ? (toolIntersection.size / toolUnion.size) * 0.2 : 0;\n\n // Summary text similarity (simple word overlap)\n const wordsA = new Set(a.summary.toLowerCase().split(/\\s+/));\n const wordsB = new Set(b.summary.toLowerCase().split(/\\s+/));\n const wordIntersection = new Set([...wordsA].filter((x) => wordsB.has(x)));\n const wordUnion = new Set([...wordsA, ...wordsB]);\n const textSim =\n wordUnion.size > 0 ? (wordIntersection.size / wordUnion.size) * 0.2 : 0;\n\n return typeSim + fileSim + toolSim + textSim;\n }\n\n /**\n * Insert node into database\n */\n private async insertNode(node: GraphNode): Promise<void> {\n this.db\n .prepare(\n `\n INSERT OR REPLACE INTO graph_nodes (\n id, type, label, content, importance,\n created, modified, accessed, access_count,\n trace_ids, frame_ids, tags, embeddings\n ) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)\n `\n )\n .run(\n node.id,\n node.type,\n node.label,\n node.content || null,\n node.importance,\n node.metadata.created,\n node.metadata.modified,\n node.metadata.accessed,\n node.metadata.accessCount,\n JSON.stringify(node.metadata.traceIds || []),\n JSON.stringify(node.metadata.frameIds || []),\n JSON.stringify(node.metadata.tags || []),\n node.metadata.embeddings ? Buffer.from(node.metadata.embeddings) : null\n );\n }\n\n /**\n * Create edge in database\n */\n private async createEdge(edge: GraphEdge): Promise<void> {\n try {\n this.db\n .prepare(\n `\n INSERT OR IGNORE INTO graph_edges (\n id, source, target, type, weight,\n created, evidence, bidirectional\n ) VALUES (?, ?, ?, ?, ?, ?, ?, ?)\n `\n )\n .run(\n edge.id,\n edge.source,\n edge.target,\n edge.type,\n edge.weight,\n edge.metadata.created,\n JSON.stringify(edge.metadata.evidence || []),\n edge.metadata.bidirectional ? 1 : 0\n );\n } catch (error: unknown) {\n // Ignore duplicate edges\n }\n }\n\n /**\n * Load graph into memory for fast traversal\n */\n private async loadGraphIntoMemory(): Promise<void> {\n // Load nodes\n const nodes = this.db.prepare('SELECT * FROM graph_nodes').all() as any[];\n\n for (const row of nodes) {\n const node: GraphNode = {\n id: row.id,\n type: row.type as NodeType,\n label: row.label,\n content: row.content,\n importance: row.importance,\n metadata: {\n created: row.created,\n modified: row.modified,\n accessed: row.accessed,\n accessCount: row.access_count,\n traceIds: JSON.parse(row.trace_ids || '[]'),\n frameIds: JSON.parse(row.frame_ids || '[]'),\n tags: JSON.parse(row.tags || '[]'),\n },\n };\n this.nodeIndex.set(node.id, node);\n }\n\n // Load edges\n const edges = this.db.prepare('SELECT * FROM graph_edges').all() as any[];\n\n for (const row of edges) {\n const edge: GraphEdge = {\n id: row.id,\n source: row.source,\n target: row.target,\n type: row.type as EdgeType,\n weight: row.weight,\n metadata: {\n created: row.created,\n evidence: JSON.parse(row.evidence || '[]'),\n bidirectional: row.bidirectional === 1,\n },\n };\n\n // Add to adjacency list\n if (!this.adjacencyList.has(edge.source)) {\n this.adjacencyList.set(edge.source, []);\n }\n this.adjacencyList.get(edge.source)!.push(edge);\n\n // Add to reverse adjacency list\n if (!this.reverseAdjacencyList.has(edge.target)) {\n this.reverseAdjacencyList.set(edge.target, []);\n }\n this.reverseAdjacencyList.get(edge.target)!.push(edge);\n\n // If bidirectional, add reverse edge\n if (edge.metadata.bidirectional) {\n const reverseEdge = {\n ...edge,\n source: edge.target,\n target: edge.source,\n };\n if (!this.adjacencyList.has(reverseEdge.source)) {\n this.adjacencyList.set(reverseEdge.source, []);\n }\n this.adjacencyList.get(reverseEdge.source)!.push(reverseEdge);\n }\n }\n }\n\n /**\n * Generate unique ID\n */\n private generateId(prefix: string): string {\n return `${prefix}_${crypto.randomBytes(8).toString('hex')}`;\n }\n\n /**\n * Get graph statistics\n */\n getStatistics(): any {\n const nodeStats = this.db\n .prepare(\n `\n SELECT \n type,\n COUNT(*) as count,\n AVG(importance) as avg_importance,\n MAX(importance) as max_importance\n FROM graph_nodes\n GROUP BY type\n `\n )\n .all();\n\n const edgeStats = this.db\n .prepare(\n `\n SELECT \n type,\n COUNT(*) as count,\n AVG(weight) as avg_weight,\n MAX(weight) as max_weight\n FROM graph_edges\n GROUP BY type\n `\n )\n .all();\n\n return {\n nodes: {\n total: this.nodeIndex.size,\n byType: nodeStats,\n inMemory: this.nodeIndex.size,\n },\n edges: {\n total: edgeStats.reduce((sum: number, e: any) => sum + e.count, 0),\n byType: edgeStats,\n adjacencyListSize: this.adjacencyList.size,\n },\n connectivity: {\n avgDegree: this.calculateAverageDegree(),\n maxDegree: this.calculateMaxDegree(),\n },\n };\n }\n\n /**\n * Calculate average node degree\n */\n private calculateAverageDegree(): number {\n if (this.nodeIndex.size === 0) return 0;\n\n let totalDegree = 0;\n for (const nodeId of this.nodeIndex.keys()) {\n const outgoing = this.adjacencyList.get(nodeId)?.length || 0;\n const incoming = this.reverseAdjacencyList.get(nodeId)?.length || 0;\n totalDegree += outgoing + incoming;\n }\n\n return totalDegree / this.nodeIndex.size;\n }\n\n /**\n * Calculate maximum node degree\n */\n private calculateMaxDegree(): number {\n let maxDegree = 0;\n\n for (const nodeId of this.nodeIndex.keys()) {\n const outgoing = this.adjacencyList.get(nodeId)?.length || 0;\n const incoming = this.reverseAdjacencyList.get(nodeId)?.length || 0;\n maxDegree = Math.max(maxDegree, outgoing + incoming);\n }\n\n return maxDegree;\n }\n\n /**\n * Export graph for visualization\n */\n exportForVisualization(): any {\n const nodes = Array.from(this.nodeIndex.values()).map((node) => ({\n id: node.id,\n label: node.label,\n type: node.type,\n size: node.importance * 10,\n color: this.getNodeColor(node.type),\n }));\n\n const edges = [];\n for (const edgeList of this.adjacencyList.values()) {\n for (const edge of edgeList) {\n edges.push({\n source: edge.source,\n target: edge.target,\n type: edge.type,\n weight: edge.weight,\n color: this.getEdgeColor(edge.type),\n });\n }\n }\n\n return { nodes, edges };\n }\n\n /**\n * Get node color for visualization\n */\n private getNodeColor(type: NodeType): string {\n const colors: Record<NodeType, string> = {\n document: '#4A90E2',\n concept: '#7ED321',\n entity: '#F5A623',\n event: '#D0021B',\n decision: '#9013FE',\n constraint: '#50E3C2',\n topic: '#B8E986',\n };\n return colors[type] || '#CCCCCC';\n }\n\n /**\n * Get edge color for visualization\n */\n private getEdgeColor(type: EdgeType): string {\n const colors: Record<EdgeType, string> = {\n citation: '#4A90E2',\n topic_overlap: '#7ED321',\n temporal: '#F5A623',\n causal: '#D0021B',\n semantic: '#9013FE',\n structural: '#50E3C2',\n dependency: '#B8E986',\n evolution: '#417505',\n contradiction: '#FF0000',\n implementation: '#0099FF',\n };\n return colors[type] || '#999999';\n }\n}\n"],
4
+ "sourcesContent": ["/**\n * Graph-Based Retrieval with Explicit Semantic Navigation\n * Models documents as nodes with explicit relationships\n * Query traverses edges instead of embedding space\n *\n * The \"nuclear option\" for next-gen RAG\n */\n\nimport Database from 'better-sqlite3';\nimport { logger } from '../monitoring/logger.js';\nimport { Trace, CompressedTrace } from '../trace/types.js';\nimport { Frame, Anchor } from '../context/index.js';\nimport crypto from 'crypto';\n\nexport type NodeType =\n | 'document'\n | 'concept'\n | 'entity'\n | 'event'\n | 'decision'\n | 'constraint'\n | 'topic';\n\nexport type EdgeType =\n | 'citation' // Document cites another\n | 'topic_overlap' // Shares topics\n | 'temporal' // Time sequence\n | 'causal' // Causality chain\n | 'semantic' // Semantic similarity\n | 'structural' // Code structure relation\n | 'dependency' // Depends on\n | 'evolution' // Evolves from\n | 'contradiction' // Contradicts\n | 'implementation'; // Implements concept\n\nexport interface GraphNode {\n id: string;\n type: NodeType;\n label: string;\n content?: string;\n importance: number; // 0-1, affects node size\n metadata: {\n created: number;\n modified: number;\n accessed: number;\n accessCount: number;\n traceIds?: string[];\n frameIds?: string[];\n tags?: string[];\n embeddings?: number[];\n };\n}\n\nexport interface GraphEdge {\n id: string;\n source: string;\n target: string;\n type: EdgeType;\n weight: number; // 0-1, relationship strength\n metadata: {\n created: number;\n evidence?: string[];\n bidirectional?: boolean;\n };\n}\n\nexport interface TraversalPath {\n nodes: GraphNode[];\n edges: GraphEdge[];\n score: number;\n reasoning: string;\n}\n\nexport interface GraphQuery {\n startNodes?: string[]; // Start from specific nodes\n targetNodes?: string[]; // Find paths to targets\n edgeTypes?: EdgeType[]; // Allowed edge types\n maxHops?: number; // Max traversal depth\n minWeight?: number; // Min edge weight\n nodeTypes?: NodeType[]; // Filter node types\n}\n\nexport interface GraphConfig {\n maxNodes: number;\n maxEdges: number;\n minEdgeWeight: number;\n importanceThreshold: number;\n traversalTimeout: number;\n enableBidirectional: boolean;\n}\n\nexport const DEFAULT_GRAPH_CONFIG: GraphConfig = {\n maxNodes: 10000,\n maxEdges: 50000,\n minEdgeWeight: 0.1,\n importanceThreshold: 0.3,\n traversalTimeout: 5000,\n enableBidirectional: true,\n};\n\n/**\n * Graph-based retrieval system with explicit semantic relationships\n */\nexport class GraphRetrieval {\n private db: Database.Database;\n private config: GraphConfig;\n private nodeIndex: Map<string, GraphNode> = new Map();\n private adjacencyList: Map<string, GraphEdge[]> = new Map();\n private reverseAdjacencyList: Map<string, GraphEdge[]> = new Map();\n\n constructor(db: Database.Database, config: Partial<GraphConfig> = {}) {\n this.db = db;\n this.config = { ...DEFAULT_GRAPH_CONFIG, ...config };\n this.initializeSchema();\n }\n\n private initializeSchema(): void {\n // Nodes table\n this.db.exec(`\n CREATE TABLE IF NOT EXISTS graph_nodes (\n id TEXT PRIMARY KEY,\n type TEXT NOT NULL,\n label TEXT NOT NULL,\n content TEXT,\n importance REAL DEFAULT 0.5,\n created INTEGER DEFAULT (unixepoch() * 1000),\n modified INTEGER DEFAULT (unixepoch() * 1000),\n accessed INTEGER DEFAULT (unixepoch() * 1000),\n access_count INTEGER DEFAULT 0,\n trace_ids TEXT,\n frame_ids TEXT,\n tags TEXT,\n embeddings BLOB\n )\n `);\n\n // Edges table\n this.db.exec(`\n CREATE TABLE IF NOT EXISTS graph_edges (\n id TEXT PRIMARY KEY,\n source TEXT NOT NULL,\n target TEXT NOT NULL,\n type TEXT NOT NULL,\n weight REAL DEFAULT 0.5,\n created INTEGER DEFAULT (unixepoch() * 1000),\n evidence TEXT,\n bidirectional BOOLEAN DEFAULT 0,\n FOREIGN KEY (source) REFERENCES graph_nodes(id) ON DELETE CASCADE,\n FOREIGN KEY (target) REFERENCES graph_nodes(id) ON DELETE CASCADE,\n UNIQUE(source, target, type)\n )\n `);\n\n // Indexes for efficient traversal\n this.db.exec(`\n CREATE INDEX IF NOT EXISTS idx_graph_nodes_type ON graph_nodes(type);\n CREATE INDEX IF NOT EXISTS idx_graph_nodes_importance ON graph_nodes(importance DESC);\n CREATE INDEX IF NOT EXISTS idx_graph_edges_source ON graph_edges(source);\n CREATE INDEX IF NOT EXISTS idx_graph_edges_target ON graph_edges(target);\n CREATE INDEX IF NOT EXISTS idx_graph_edges_type ON graph_edges(type);\n CREATE INDEX IF NOT EXISTS idx_graph_edges_weight ON graph_edges(weight DESC);\n `);\n }\n\n /**\n * Build graph from traces and frames\n */\n async buildGraph(traces: Trace[], frames: Frame[]): Promise<void> {\n logger.info('Building semantic graph', {\n traceCount: traces.length,\n frameCount: frames.length,\n });\n\n // Create document nodes from traces\n for (const trace of traces) {\n await this.createDocumentNode(trace);\n }\n\n // Create concept nodes from frames\n for (const frame of frames) {\n await this.createConceptNode(frame);\n }\n\n // Establish edges based on relationships\n await this.establishTemporalEdges(traces);\n await this.establishCausalEdges(traces);\n await this.establishSemanticEdges(traces);\n await this.establishTopicEdges(traces);\n\n // Load graph into memory for fast traversal\n await this.loadGraphIntoMemory();\n\n logger.info('Graph built successfully', {\n nodes: this.nodeIndex.size,\n edges: this.adjacencyList.size,\n });\n }\n\n /**\n * Create document node from trace\n */\n private async createDocumentNode(trace: Trace): Promise<GraphNode> {\n const node: GraphNode = {\n id: `doc_${trace.id}`,\n type: 'document',\n label: trace.summary.substring(0, 100),\n content: JSON.stringify(trace),\n importance: trace.score,\n metadata: {\n created: trace.metadata.startTime,\n modified: trace.metadata.endTime,\n accessed: Date.now(),\n accessCount: 0,\n traceIds: [trace.id],\n tags: [trace.type, ...trace.metadata.filesModified.slice(0, 3)],\n },\n };\n\n await this.insertNode(node);\n return node;\n }\n\n /**\n * Create concept node from frame\n */\n private async createConceptNode(frame: Frame): Promise<GraphNode> {\n const node: GraphNode = {\n id: `concept_${frame.id}`,\n type: 'concept',\n label: frame.name,\n importance: frame.score,\n metadata: {\n created: frame.created_at,\n modified: frame.updated_at || frame.created_at,\n accessed: Date.now(),\n accessCount: 0,\n frameIds: [frame.id],\n tags: [frame.type],\n },\n };\n\n await this.insertNode(node);\n return node;\n }\n\n /**\n * Establish temporal edges between traces\n */\n private async establishTemporalEdges(traces: Trace[]): Promise<void> {\n // Sort by time\n const sorted = [...traces].sort(\n (a, b) => a.metadata.startTime - b.metadata.startTime\n );\n\n for (let i = 0; i < sorted.length - 1; i++) {\n const current = sorted[i];\n const next = sorted[i + 1];\n\n // Only link if within reasonable time window (1 hour)\n const timeDiff = next.metadata.startTime - current.metadata.endTime;\n if (timeDiff < 60 * 60 * 1000) {\n const weight = 1 / (1 + timeDiff / (1000 * 60)); // Decay by minutes\n\n await this.createEdge({\n id: this.generateId('edge'),\n source: `doc_${current.id}`,\n target: `doc_${next.id}`,\n type: 'temporal',\n weight,\n metadata: {\n created: Date.now(),\n evidence: [`${timeDiff}ms gap`],\n },\n });\n }\n }\n }\n\n /**\n * Establish causal edges based on trace relationships\n */\n private async establishCausalEdges(traces: Trace[]): Promise<void> {\n for (const trace of traces) {\n if (trace.metadata.causalChain && trace.metadata.causalChain.length > 0) {\n for (const parentId of trace.metadata.causalChain) {\n const parentExists = traces.find((t) => t.id === parentId);\n if (parentExists) {\n await this.createEdge({\n id: this.generateId('edge'),\n source: `doc_${parentId}`,\n target: `doc_${trace.id}`,\n type: 'causal',\n weight: 0.9, // Strong causal relationship\n metadata: {\n created: Date.now(),\n evidence: ['explicit causal chain'],\n },\n });\n }\n }\n }\n }\n }\n\n /**\n * Establish semantic edges based on similarity\n */\n private async establishSemanticEdges(traces: Trace[]): Promise<void> {\n // Compare each pair (expensive but thorough)\n for (let i = 0; i < traces.length - 1; i++) {\n for (let j = i + 1; j < traces.length; j++) {\n const similarity = this.calculateSimilarity(traces[i], traces[j]);\n\n if (similarity > this.config.minEdgeWeight) {\n await this.createEdge({\n id: this.generateId('edge'),\n source: `doc_${traces[i].id}`,\n target: `doc_${traces[j].id}`,\n type: 'semantic',\n weight: similarity,\n metadata: {\n created: Date.now(),\n evidence: [`similarity: ${similarity.toFixed(2)}`],\n bidirectional: true,\n },\n });\n }\n }\n }\n }\n\n /**\n * Establish topic overlap edges\n */\n private async establishTopicEdges(traces: Trace[]): Promise<void> {\n // Group by topic\n const topicGroups: Map<string, Trace[]> = new Map();\n\n for (const trace of traces) {\n const topic = trace.type;\n if (!topicGroups.has(topic)) {\n topicGroups.set(topic, []);\n }\n topicGroups.get(topic)!.push(trace);\n }\n\n // Connect traces within same topic\n for (const [topic, group] of topicGroups) {\n if (group.length < 2) continue;\n\n // Create topic hub node\n const topicNode: GraphNode = {\n id: `topic_${topic}`,\n type: 'topic',\n label: topic,\n importance: 0.7,\n metadata: {\n created: Date.now(),\n modified: Date.now(),\n accessed: Date.now(),\n accessCount: 0,\n tags: [topic],\n },\n };\n\n await this.insertNode(topicNode);\n\n // Connect all traces to topic hub\n for (const trace of group) {\n await this.createEdge({\n id: this.generateId('edge'),\n source: `doc_${trace.id}`,\n target: topicNode.id,\n type: 'topic_overlap',\n weight: 0.6,\n metadata: {\n created: Date.now(),\n bidirectional: true,\n },\n });\n }\n }\n }\n\n /**\n * Traverse graph to find relevant paths\n */\n async traverse(query: string, config?: GraphQuery): Promise<TraversalPath[]> {\n const startTime = Date.now();\n const queryConfig = config || {};\n const maxHops = queryConfig.maxHops || 3;\n const paths: TraversalPath[] = [];\n\n // Find starting nodes based on query\n const startNodes = await this.findStartNodes(query, queryConfig);\n\n if (startNodes.length === 0) {\n logger.warn('No starting nodes found for query', { query });\n return [];\n }\n\n // Perform BFS/DFS traversal from each start node\n for (const startNode of startNodes) {\n const nodePaths = await this.traverseFromNode(\n startNode,\n query,\n maxHops,\n queryConfig\n );\n paths.push(...nodePaths);\n }\n\n // Sort by score and limit results\n paths.sort((a, b) => b.score - a.score);\n const topPaths = paths.slice(0, 10);\n\n logger.info('Graph traversal complete', {\n query: query.substring(0, 50),\n startNodes: startNodes.length,\n pathsFound: paths.length,\n timeMs: Date.now() - startTime,\n });\n\n return topPaths;\n }\n\n /**\n * Find starting nodes for traversal\n */\n private async findStartNodes(\n query: string,\n config: GraphQuery\n ): Promise<GraphNode[]> {\n if (config.startNodes) {\n return config.startNodes\n .map((id) => this.nodeIndex.get(id))\n .filter((n) => n !== undefined) as GraphNode[];\n }\n\n // Find nodes matching query\n const queryWords = query.toLowerCase().split(/\\s+/);\n const candidates: Array<{ node: GraphNode; score: number }> = [];\n\n for (const node of this.nodeIndex.values()) {\n if (config.nodeTypes && !config.nodeTypes.includes(node.type)) {\n continue;\n }\n\n const label = node.label.toLowerCase();\n const tags = (node.metadata.tags || []).join(' ').toLowerCase();\n\n let score = 0;\n for (const word of queryWords) {\n if (label.includes(word)) score += 2;\n if (tags.includes(word)) score += 1;\n }\n\n if (score > 0) {\n score *= node.importance; // Weight by importance\n candidates.push({ node, score });\n }\n }\n\n // Sort and return top candidates\n candidates.sort((a, b) => b.score - a.score);\n return candidates.slice(0, 5).map((c) => c.node);\n }\n\n /**\n * Traverse from a specific node\n */\n private async traverseFromNode(\n startNode: GraphNode,\n query: string,\n maxHops: number,\n config: GraphQuery\n ): Promise<TraversalPath[]> {\n const paths: TraversalPath[] = [];\n const visited = new Set<string>();\n\n // BFS queue: [node, path, depth]\n const queue: Array<{\n node: GraphNode;\n path: TraversalPath;\n depth: number;\n }> = [\n {\n node: startNode,\n path: {\n nodes: [startNode],\n edges: [],\n score: startNode.importance,\n reasoning: `Starting from ${startNode.type}: ${startNode.label}`,\n },\n depth: 0,\n },\n ];\n\n while (queue.length > 0) {\n const current = queue.shift()!;\n\n if (current.depth >= maxHops) {\n paths.push(current.path);\n continue;\n }\n\n if (visited.has(current.node.id)) {\n continue;\n }\n visited.add(current.node.id);\n\n // Get outgoing edges\n const edges = this.adjacencyList.get(current.node.id) || [];\n\n for (const edge of edges) {\n // Filter by edge type if specified\n if (config.edgeTypes && !config.edgeTypes.includes(edge.type)) {\n continue;\n }\n\n // Filter by minimum weight\n if (config.minWeight && edge.weight < config.minWeight) {\n continue;\n }\n\n const targetNode = this.nodeIndex.get(edge.target);\n if (!targetNode) continue;\n\n // Calculate path score\n const pathScore = this.calculatePathScore(\n current.path,\n edge,\n targetNode,\n query\n );\n\n // Create new path\n const newPath: TraversalPath = {\n nodes: [...current.path.nodes, targetNode],\n edges: [...current.path.edges, edge],\n score: pathScore,\n reasoning: `${current.path.reasoning} \u2192 ${edge.type} \u2192 ${targetNode.label}`,\n };\n\n queue.push({\n node: targetNode,\n path: newPath,\n depth: current.depth + 1,\n });\n }\n }\n\n return paths;\n }\n\n /**\n * Calculate path score\n */\n private calculatePathScore(\n currentPath: TraversalPath,\n edge: GraphEdge,\n targetNode: GraphNode,\n query: string\n ): number {\n // Base score from current path\n let score = currentPath.score;\n\n // Edge weight contribution\n score *= edge.weight;\n\n // Target node importance\n score *= targetNode.importance;\n\n // Query relevance\n const queryWords = query.toLowerCase().split(/\\s+/);\n const targetLabel = targetNode.label.toLowerCase();\n let relevance = 0;\n for (const word of queryWords) {\n if (targetLabel.includes(word)) relevance += 1;\n }\n score *= 1 + relevance * 0.2;\n\n // Path length penalty (prefer shorter paths)\n score *= Math.pow(0.9, currentPath.nodes.length);\n\n return score;\n }\n\n /**\n * Calculate similarity between traces\n */\n private calculateSimilarity(a: Trace, b: Trace): number {\n // Type similarity\n const typeSim = a.type === b.type ? 0.3 : 0;\n\n // File overlap\n const filesA = new Set(a.metadata.filesModified);\n const filesB = new Set(b.metadata.filesModified);\n const intersection = new Set([...filesA].filter((x) => filesB.has(x)));\n const union = new Set([...filesA, ...filesB]);\n const fileSim = union.size > 0 ? (intersection.size / union.size) * 0.3 : 0;\n\n // Tool overlap\n const toolsA = new Set(a.tools.map((t) => t.tool));\n const toolsB = new Set(b.tools.map((t) => t.tool));\n const toolIntersection = new Set([...toolsA].filter((x) => toolsB.has(x)));\n const toolUnion = new Set([...toolsA, ...toolsB]);\n const toolSim =\n toolUnion.size > 0 ? (toolIntersection.size / toolUnion.size) * 0.2 : 0;\n\n // Summary text similarity (simple word overlap)\n const wordsA = new Set(a.summary.toLowerCase().split(/\\s+/));\n const wordsB = new Set(b.summary.toLowerCase().split(/\\s+/));\n const wordIntersection = new Set([...wordsA].filter((x) => wordsB.has(x)));\n const wordUnion = new Set([...wordsA, ...wordsB]);\n const textSim =\n wordUnion.size > 0 ? (wordIntersection.size / wordUnion.size) * 0.2 : 0;\n\n return typeSim + fileSim + toolSim + textSim;\n }\n\n /**\n * Insert node into database\n */\n private async insertNode(node: GraphNode): Promise<void> {\n this.db\n .prepare(\n `\n INSERT OR REPLACE INTO graph_nodes (\n id, type, label, content, importance,\n created, modified, accessed, access_count,\n trace_ids, frame_ids, tags, embeddings\n ) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)\n `\n )\n .run(\n node.id,\n node.type,\n node.label,\n node.content || null,\n node.importance,\n node.metadata.created,\n node.metadata.modified,\n node.metadata.accessed,\n node.metadata.accessCount,\n JSON.stringify(node.metadata.traceIds || []),\n JSON.stringify(node.metadata.frameIds || []),\n JSON.stringify(node.metadata.tags || []),\n node.metadata.embeddings ? Buffer.from(node.metadata.embeddings) : null\n );\n }\n\n /**\n * Create edge in database\n */\n private async createEdge(edge: GraphEdge): Promise<void> {\n try {\n this.db\n .prepare(\n `\n INSERT OR IGNORE INTO graph_edges (\n id, source, target, type, weight,\n created, evidence, bidirectional\n ) VALUES (?, ?, ?, ?, ?, ?, ?, ?)\n `\n )\n .run(\n edge.id,\n edge.source,\n edge.target,\n edge.type,\n edge.weight,\n edge.metadata.created,\n JSON.stringify(edge.metadata.evidence || []),\n edge.metadata.bidirectional ? 1 : 0\n );\n } catch (error: unknown) {\n // Ignore duplicate edges\n }\n }\n\n /**\n * Load graph into memory for fast traversal\n */\n private async loadGraphIntoMemory(): Promise<void> {\n // Load nodes\n const nodes = this.db.prepare('SELECT * FROM graph_nodes').all() as any[];\n\n for (const row of nodes) {\n const node: GraphNode = {\n id: row.id,\n type: row.type as NodeType,\n label: row.label,\n content: row.content,\n importance: row.importance,\n metadata: {\n created: row.created,\n modified: row.modified,\n accessed: row.accessed,\n accessCount: row.access_count,\n traceIds: JSON.parse(row.trace_ids || '[]'),\n frameIds: JSON.parse(row.frame_ids || '[]'),\n tags: JSON.parse(row.tags || '[]'),\n },\n };\n this.nodeIndex.set(node.id, node);\n }\n\n // Load edges\n const edges = this.db.prepare('SELECT * FROM graph_edges').all() as any[];\n\n for (const row of edges) {\n const edge: GraphEdge = {\n id: row.id,\n source: row.source,\n target: row.target,\n type: row.type as EdgeType,\n weight: row.weight,\n metadata: {\n created: row.created,\n evidence: JSON.parse(row.evidence || '[]'),\n bidirectional: row.bidirectional === 1,\n },\n };\n\n // Add to adjacency list\n if (!this.adjacencyList.has(edge.source)) {\n this.adjacencyList.set(edge.source, []);\n }\n this.adjacencyList.get(edge.source)!.push(edge);\n\n // Add to reverse adjacency list\n if (!this.reverseAdjacencyList.has(edge.target)) {\n this.reverseAdjacencyList.set(edge.target, []);\n }\n this.reverseAdjacencyList.get(edge.target)!.push(edge);\n\n // If bidirectional, add reverse edge\n if (edge.metadata.bidirectional) {\n const reverseEdge = {\n ...edge,\n source: edge.target,\n target: edge.source,\n };\n if (!this.adjacencyList.has(reverseEdge.source)) {\n this.adjacencyList.set(reverseEdge.source, []);\n }\n this.adjacencyList.get(reverseEdge.source)!.push(reverseEdge);\n }\n }\n }\n\n /**\n * Generate unique ID\n */\n private generateId(prefix: string): string {\n return `${prefix}_${crypto.randomBytes(8).toString('hex')}`;\n }\n\n /**\n * Get graph statistics\n */\n getStatistics(): any {\n const nodeStats = this.db\n .prepare(\n `\n SELECT \n type,\n COUNT(*) as count,\n AVG(importance) as avg_importance,\n MAX(importance) as max_importance\n FROM graph_nodes\n GROUP BY type\n `\n )\n .all();\n\n const edgeStats = this.db\n .prepare(\n `\n SELECT \n type,\n COUNT(*) as count,\n AVG(weight) as avg_weight,\n MAX(weight) as max_weight\n FROM graph_edges\n GROUP BY type\n `\n )\n .all();\n\n return {\n nodes: {\n total: this.nodeIndex.size,\n byType: nodeStats,\n inMemory: this.nodeIndex.size,\n },\n edges: {\n total: edgeStats.reduce((sum: number, e: any) => sum + e.count, 0),\n byType: edgeStats,\n adjacencyListSize: this.adjacencyList.size,\n },\n connectivity: {\n avgDegree: this.calculateAverageDegree(),\n maxDegree: this.calculateMaxDegree(),\n },\n };\n }\n\n /**\n * Calculate average node degree\n */\n private calculateAverageDegree(): number {\n if (this.nodeIndex.size === 0) return 0;\n\n let totalDegree = 0;\n for (const nodeId of this.nodeIndex.keys()) {\n const outgoing = this.adjacencyList.get(nodeId)?.length || 0;\n const incoming = this.reverseAdjacencyList.get(nodeId)?.length || 0;\n totalDegree += outgoing + incoming;\n }\n\n return totalDegree / this.nodeIndex.size;\n }\n\n /**\n * Calculate maximum node degree\n */\n private calculateMaxDegree(): number {\n let maxDegree = 0;\n\n for (const nodeId of this.nodeIndex.keys()) {\n const outgoing = this.adjacencyList.get(nodeId)?.length || 0;\n const incoming = this.reverseAdjacencyList.get(nodeId)?.length || 0;\n maxDegree = Math.max(maxDegree, outgoing + incoming);\n }\n\n return maxDegree;\n }\n\n /**\n * Export graph for visualization\n */\n exportForVisualization(): any {\n const nodes = Array.from(this.nodeIndex.values()).map((node) => ({\n id: node.id,\n label: node.label,\n type: node.type,\n size: node.importance * 10,\n color: this.getNodeColor(node.type),\n }));\n\n const edges = [];\n for (const edgeList of this.adjacencyList.values()) {\n for (const edge of edgeList) {\n edges.push({\n source: edge.source,\n target: edge.target,\n type: edge.type,\n weight: edge.weight,\n color: this.getEdgeColor(edge.type),\n });\n }\n }\n\n return { nodes, edges };\n }\n\n /**\n * Get node color for visualization\n */\n private getNodeColor(type: NodeType): string {\n const colors: Record<NodeType, string> = {\n document: '#4A90E2',\n concept: '#7ED321',\n entity: '#F5A623',\n event: '#D0021B',\n decision: '#9013FE',\n constraint: '#50E3C2',\n topic: '#B8E986',\n };\n return colors[type] || '#CCCCCC';\n }\n\n /**\n * Get edge color for visualization\n */\n private getEdgeColor(type: EdgeType): string {\n const colors: Record<EdgeType, string> = {\n citation: '#4A90E2',\n topic_overlap: '#7ED321',\n temporal: '#F5A623',\n causal: '#D0021B',\n semantic: '#9013FE',\n structural: '#50E3C2',\n dependency: '#B8E986',\n evolution: '#417505',\n contradiction: '#FF0000',\n implementation: '#0099FF',\n };\n return colors[type] || '#999999';\n }\n}\n"],
5
5
  "mappings": ";;;;AASA,SAAS,cAAc;AAGvB,OAAO,YAAY;AA+EZ,MAAM,uBAAoC;AAAA,EAC/C,UAAU;AAAA,EACV,UAAU;AAAA,EACV,eAAe;AAAA,EACf,qBAAqB;AAAA,EACrB,kBAAkB;AAAA,EAClB,qBAAqB;AACvB;AAKO,MAAM,eAAe;AAAA,EAClB;AAAA,EACA;AAAA,EACA,YAAoC,oBAAI,IAAI;AAAA,EAC5C,gBAA0C,oBAAI,IAAI;AAAA,EAClD,uBAAiD,oBAAI,IAAI;AAAA,EAEjE,YAAY,IAAuB,SAA+B,CAAC,GAAG;AACpE,SAAK,KAAK;AACV,SAAK,SAAS,EAAE,GAAG,sBAAsB,GAAG,OAAO;AACnD,SAAK,iBAAiB;AAAA,EACxB;AAAA,EAEQ,mBAAyB;AAE/B,SAAK,GAAG,KAAK;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,KAgBZ;AAGD,SAAK,GAAG,KAAK;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,KAcZ;AAGD,SAAK,GAAG,KAAK;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,KAOZ;AAAA,EACH;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,WAAW,QAAiB,QAAgC;AAChE,WAAO,KAAK,2BAA2B;AAAA,MACrC,YAAY,OAAO;AAAA,MACnB,YAAY,OAAO;AAAA,IACrB,CAAC;AAGD,eAAW,SAAS,QAAQ;AAC1B,YAAM,KAAK,mBAAmB,KAAK;AAAA,IACrC;AAGA,eAAW,SAAS,QAAQ;AAC1B,YAAM,KAAK,kBAAkB,KAAK;AAAA,IACpC;AAGA,UAAM,KAAK,uBAAuB,MAAM;AACxC,UAAM,KAAK,qBAAqB,MAAM;AACtC,UAAM,KAAK,uBAAuB,MAAM;AACxC,UAAM,KAAK,oBAAoB,MAAM;AAGrC,UAAM,KAAK,oBAAoB;AAE/B,WAAO,KAAK,4BAA4B;AAAA,MACtC,OAAO,KAAK,UAAU;AAAA,MACtB,OAAO,KAAK,cAAc;AAAA,IAC5B,CAAC;AAAA,EACH;AAAA;AAAA;AAAA;AAAA,EAKA,MAAc,mBAAmB,OAAkC;AACjE,UAAM,OAAkB;AAAA,MACtB,IAAI,OAAO,MAAM,EAAE;AAAA,MACnB,MAAM;AAAA,MACN,OAAO,MAAM,QAAQ,UAAU,GAAG,GAAG;AAAA,MACrC,SAAS,KAAK,UAAU,KAAK;AAAA,MAC7B,YAAY,MAAM;AAAA,MAClB,UAAU;AAAA,QACR,SAAS,MAAM,SAAS;AAAA,QACxB,UAAU,MAAM,SAAS;AAAA,QACzB,UAAU,KAAK,IAAI;AAAA,QACnB,aAAa;AAAA,QACb,UAAU,CAAC,MAAM,EAAE;AAAA,QACnB,MAAM,CAAC,MAAM,MAAM,GAAG,MAAM,SAAS,cAAc,MAAM,GAAG,CAAC,CAAC;AAAA,MAChE;AAAA,IACF;AAEA,UAAM,KAAK,WAAW,IAAI;AAC1B,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA,EAKA,MAAc,kBAAkB,OAAkC;AAChE,UAAM,OAAkB;AAAA,MACtB,IAAI,WAAW,MAAM,EAAE;AAAA,MACvB,MAAM;AAAA,MACN,OAAO,MAAM;AAAA,MACb,YAAY,MAAM;AAAA,MAClB,UAAU;AAAA,QACR,SAAS,MAAM;AAAA,QACf,UAAU,MAAM,cAAc,MAAM;AAAA,QACpC,UAAU,KAAK,IAAI;AAAA,QACnB,aAAa;AAAA,QACb,UAAU,CAAC,MAAM,EAAE;AAAA,QACnB,MAAM,CAAC,MAAM,IAAI;AAAA,MACnB;AAAA,IACF;AAEA,UAAM,KAAK,WAAW,IAAI;AAC1B,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA,EAKA,MAAc,uBAAuB,QAAgC;AAEnE,UAAM,SAAS,CAAC,GAAG,MAAM,EAAE;AAAA,MACzB,CAAC,GAAG,MAAM,EAAE,SAAS,YAAY,EAAE,SAAS;AAAA,IAC9C;AAEA,aAAS,IAAI,GAAG,IAAI,OAAO,SAAS,GAAG,KAAK;AAC1C,YAAM,UAAU,OAAO,CAAC;AACxB,YAAM,OAAO,OAAO,IAAI,CAAC;AAGzB,YAAM,WAAW,KAAK,SAAS,YAAY,QAAQ,SAAS;AAC5D,UAAI,WAAW,KAAK,KAAK,KAAM;AAC7B,cAAM,SAAS,KAAK,IAAI,YAAY,MAAO;AAE3C,cAAM,KAAK,WAAW;AAAA,UACpB,IAAI,KAAK,WAAW,MAAM;AAAA,UAC1B,QAAQ,OAAO,QAAQ,EAAE;AAAA,UACzB,QAAQ,OAAO,KAAK,EAAE;AAAA,UACtB,MAAM;AAAA,UACN;AAAA,UACA,UAAU;AAAA,YACR,SAAS,KAAK,IAAI;AAAA,YAClB,UAAU,CAAC,GAAG,QAAQ,QAAQ;AAAA,UAChC;AAAA,QACF,CAAC;AAAA,MACH;AAAA,IACF;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,MAAc,qBAAqB,QAAgC;AACjE,eAAW,SAAS,QAAQ;AAC1B,UAAI,MAAM,SAAS,eAAe,MAAM,SAAS,YAAY,SAAS,GAAG;AACvE,mBAAW,YAAY,MAAM,SAAS,aAAa;AACjD,gBAAM,eAAe,OAAO,KAAK,CAAC,MAAM,EAAE,OAAO,QAAQ;AACzD,cAAI,cAAc;AAChB,kBAAM,KAAK,WAAW;AAAA,cACpB,IAAI,KAAK,WAAW,MAAM;AAAA,cAC1B,QAAQ,OAAO,QAAQ;AAAA,cACvB,QAAQ,OAAO,MAAM,EAAE;AAAA,cACvB,MAAM;AAAA,cACN,QAAQ;AAAA;AAAA,cACR,UAAU;AAAA,gBACR,SAAS,KAAK,IAAI;AAAA,gBAClB,UAAU,CAAC,uBAAuB;AAAA,cACpC;AAAA,YACF,CAAC;AAAA,UACH;AAAA,QACF;AAAA,MACF;AAAA,IACF;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,MAAc,uBAAuB,QAAgC;AAEnE,aAAS,IAAI,GAAG,IAAI,OAAO,SAAS,GAAG,KAAK;AAC1C,eAAS,IAAI,IAAI,GAAG,IAAI,OAAO,QAAQ,KAAK;AAC1C,cAAM,aAAa,KAAK,oBAAoB,OAAO,CAAC,GAAG,OAAO,CAAC,CAAC;AAEhE,YAAI,aAAa,KAAK,OAAO,eAAe;AAC1C,gBAAM,KAAK,WAAW;AAAA,YACpB,IAAI,KAAK,WAAW,MAAM;AAAA,YAC1B,QAAQ,OAAO,OAAO,CAAC,EAAE,EAAE;AAAA,YAC3B,QAAQ,OAAO,OAAO,CAAC,EAAE,EAAE;AAAA,YAC3B,MAAM;AAAA,YACN,QAAQ;AAAA,YACR,UAAU;AAAA,cACR,SAAS,KAAK,IAAI;AAAA,cAClB,UAAU,CAAC,eAAe,WAAW,QAAQ,CAAC,CAAC,EAAE;AAAA,cACjD,eAAe;AAAA,YACjB;AAAA,UACF,CAAC;AAAA,QACH;AAAA,MACF;AAAA,IACF;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,MAAc,oBAAoB,QAAgC;AAEhE,UAAM,cAAoC,oBAAI,IAAI;AAElD,eAAW,SAAS,QAAQ;AAC1B,YAAM,QAAQ,MAAM;AACpB,UAAI,CAAC,YAAY,IAAI,KAAK,GAAG;AAC3B,oBAAY,IAAI,OAAO,CAAC,CAAC;AAAA,MAC3B;AACA,kBAAY,IAAI,KAAK,EAAG,KAAK,KAAK;AAAA,IACpC;AAGA,eAAW,CAAC,OAAO,KAAK,KAAK,aAAa;AACxC,UAAI,MAAM,SAAS,EAAG;AAGtB,YAAM,YAAuB;AAAA,QAC3B,IAAI,SAAS,KAAK;AAAA,QAClB,MAAM;AAAA,QACN,OAAO;AAAA,QACP,YAAY;AAAA,QACZ,UAAU;AAAA,UACR,SAAS,KAAK,IAAI;AAAA,UAClB,UAAU,KAAK,IAAI;AAAA,UACnB,UAAU,KAAK,IAAI;AAAA,UACnB,aAAa;AAAA,UACb,MAAM,CAAC,KAAK;AAAA,QACd;AAAA,MACF;AAEA,YAAM,KAAK,WAAW,SAAS;AAG/B,iBAAW,SAAS,OAAO;AACzB,cAAM,KAAK,WAAW;AAAA,UACpB,IAAI,KAAK,WAAW,MAAM;AAAA,UAC1B,QAAQ,OAAO,MAAM,EAAE;AAAA,UACvB,QAAQ,UAAU;AAAA,UAClB,MAAM;AAAA,UACN,QAAQ;AAAA,UACR,UAAU;AAAA,YACR,SAAS,KAAK,IAAI;AAAA,YAClB,eAAe;AAAA,UACjB;AAAA,QACF,CAAC;AAAA,MACH;AAAA,IACF;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,SAAS,OAAe,QAA+C;AAC3E,UAAM,YAAY,KAAK,IAAI;AAC3B,UAAM,cAAc,UAAU,CAAC;AAC/B,UAAM,UAAU,YAAY,WAAW;AACvC,UAAM,QAAyB,CAAC;AAGhC,UAAM,aAAa,MAAM,KAAK,eAAe,OAAO,WAAW;AAE/D,QAAI,WAAW,WAAW,GAAG;AAC3B,aAAO,KAAK,qCAAqC,EAAE,MAAM,CAAC;AAC1D,aAAO,CAAC;AAAA,IACV;AAGA,eAAW,aAAa,YAAY;AAClC,YAAM,YAAY,MAAM,KAAK;AAAA,QAC3B;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,MACF;AACA,YAAM,KAAK,GAAG,SAAS;AAAA,IACzB;AAGA,UAAM,KAAK,CAAC,GAAG,MAAM,EAAE,QAAQ,EAAE,KAAK;AACtC,UAAM,WAAW,MAAM,MAAM,GAAG,EAAE;AAElC,WAAO,KAAK,4BAA4B;AAAA,MACtC,OAAO,MAAM,UAAU,GAAG,EAAE;AAAA,MAC5B,YAAY,WAAW;AAAA,MACvB,YAAY,MAAM;AAAA,MAClB,QAAQ,KAAK,IAAI,IAAI;AAAA,IACvB,CAAC;AAED,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA,EAKA,MAAc,eACZ,OACA,QACsB;AACtB,QAAI,OAAO,YAAY;AACrB,aAAO,OAAO,WACX,IAAI,CAAC,OAAO,KAAK,UAAU,IAAI,EAAE,CAAC,EAClC,OAAO,CAAC,MAAM,MAAM,MAAS;AAAA,IAClC;AAGA,UAAM,aAAa,MAAM,YAAY,EAAE,MAAM,KAAK;AAClD,UAAM,aAAwD,CAAC;AAE/D,eAAW,QAAQ,KAAK,UAAU,OAAO,GAAG;AAC1C,UAAI,OAAO,aAAa,CAAC,OAAO,UAAU,SAAS,KAAK,IAAI,GAAG;AAC7D;AAAA,MACF;AAEA,YAAM,QAAQ,KAAK,MAAM,YAAY;AACrC,YAAM,QAAQ,KAAK,SAAS,QAAQ,CAAC,GAAG,KAAK,GAAG,EAAE,YAAY;AAE9D,UAAI,QAAQ;AACZ,iBAAW,QAAQ,YAAY;AAC7B,YAAI,MAAM,SAAS,IAAI,EAAG,UAAS;AACnC,YAAI,KAAK,SAAS,IAAI,EAAG,UAAS;AAAA,MACpC;AAEA,UAAI,QAAQ,GAAG;AACb,iBAAS,KAAK;AACd,mBAAW,KAAK,EAAE,MAAM,MAAM,CAAC;AAAA,MACjC;AAAA,IACF;AAGA,eAAW,KAAK,CAAC,GAAG,MAAM,EAAE,QAAQ,EAAE,KAAK;AAC3C,WAAO,WAAW,MAAM,GAAG,CAAC,EAAE,IAAI,CAAC,MAAM,EAAE,IAAI;AAAA,EACjD;AAAA;AAAA;AAAA;AAAA,EAKA,MAAc,iBACZ,WACA,OACA,SACA,QAC0B;AAC1B,UAAM,QAAyB,CAAC;AAChC,UAAM,UAAU,oBAAI,IAAY;AAGhC,UAAM,QAID;AAAA,MACH;AAAA,QACE,MAAM;AAAA,QACN,MAAM;AAAA,UACJ,OAAO,CAAC,SAAS;AAAA,UACjB,OAAO,CAAC;AAAA,UACR,OAAO,UAAU;AAAA,UACjB,WAAW,iBAAiB,UAAU,IAAI,KAAK,UAAU,KAAK;AAAA,QAChE;AAAA,QACA,OAAO;AAAA,MACT;AAAA,IACF;AAEA,WAAO,MAAM,SAAS,GAAG;AACvB,YAAM,UAAU,MAAM,MAAM;AAE5B,UAAI,QAAQ,SAAS,SAAS;AAC5B,cAAM,KAAK,QAAQ,IAAI;AACvB;AAAA,MACF;AAEA,UAAI,QAAQ,IAAI,QAAQ,KAAK,EAAE,GAAG;AAChC;AAAA,MACF;AACA,cAAQ,IAAI,QAAQ,KAAK,EAAE;AAG3B,YAAM,QAAQ,KAAK,cAAc,IAAI,QAAQ,KAAK,EAAE,KAAK,CAAC;AAE1D,iBAAW,QAAQ,OAAO;AAExB,YAAI,OAAO,aAAa,CAAC,OAAO,UAAU,SAAS,KAAK,IAAI,GAAG;AAC7D;AAAA,QACF;AAGA,YAAI,OAAO,aAAa,KAAK,SAAS,OAAO,WAAW;AACtD;AAAA,QACF;AAEA,cAAM,aAAa,KAAK,UAAU,IAAI,KAAK,MAAM;AACjD,YAAI,CAAC,WAAY;AAGjB,cAAM,YAAY,KAAK;AAAA,UACrB,QAAQ;AAAA,UACR;AAAA,UACA;AAAA,UACA;AAAA,QACF;AAGA,cAAM,UAAyB;AAAA,UAC7B,OAAO,CAAC,GAAG,QAAQ,KAAK,OAAO,UAAU;AAAA,UACzC,OAAO,CAAC,GAAG,QAAQ,KAAK,OAAO,IAAI;AAAA,UACnC,OAAO;AAAA,UACP,WAAW,GAAG,QAAQ,KAAK,SAAS,WAAM,KAAK,IAAI,WAAM,WAAW,KAAK;AAAA,QAC3E;AAEA,cAAM,KAAK;AAAA,UACT,MAAM;AAAA,UACN,MAAM;AAAA,UACN,OAAO,QAAQ,QAAQ;AAAA,QACzB,CAAC;AAAA,MACH;AAAA,IACF;AAEA,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA,EAKQ,mBACN,aACA,MACA,YACA,OACQ;AAER,QAAI,QAAQ,YAAY;AAGxB,aAAS,KAAK;AAGd,aAAS,WAAW;AAGpB,UAAM,aAAa,MAAM,YAAY,EAAE,MAAM,KAAK;AAClD,UAAM,cAAc,WAAW,MAAM,YAAY;AACjD,QAAI,YAAY;AAChB,eAAW,QAAQ,YAAY;AAC7B,UAAI,YAAY,SAAS,IAAI,EAAG,cAAa;AAAA,IAC/C;AACA,aAAS,IAAI,YAAY;AAGzB,aAAS,KAAK,IAAI,KAAK,YAAY,MAAM,MAAM;AAE/C,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA,EAKQ,oBAAoB,GAAU,GAAkB;AAEtD,UAAM,UAAU,EAAE,SAAS,EAAE,OAAO,MAAM;AAG1C,UAAM,SAAS,IAAI,IAAI,EAAE,SAAS,aAAa;AAC/C,UAAM,SAAS,IAAI,IAAI,EAAE,SAAS,aAAa;AAC/C,UAAM,eAAe,IAAI,IAAI,CAAC,GAAG,MAAM,EAAE,OAAO,CAAC,MAAM,OAAO,IAAI,CAAC,CAAC,CAAC;AACrE,UAAM,QAAQ,oBAAI,IAAI,CAAC,GAAG,QAAQ,GAAG,MAAM,CAAC;AAC5C,UAAM,UAAU,MAAM,OAAO,IAAK,aAAa,OAAO,MAAM,OAAQ,MAAM;AAG1E,UAAM,SAAS,IAAI,IAAI,EAAE,MAAM,IAAI,CAAC,MAAM,EAAE,IAAI,CAAC;AACjD,UAAM,SAAS,IAAI,IAAI,EAAE,MAAM,IAAI,CAAC,MAAM,EAAE,IAAI,CAAC;AACjD,UAAM,mBAAmB,IAAI,IAAI,CAAC,GAAG,MAAM,EAAE,OAAO,CAAC,MAAM,OAAO,IAAI,CAAC,CAAC,CAAC;AACzE,UAAM,YAAY,oBAAI,IAAI,CAAC,GAAG,QAAQ,GAAG,MAAM,CAAC;AAChD,UAAM,UACJ,UAAU,OAAO,IAAK,iBAAiB,OAAO,UAAU,OAAQ,MAAM;AAGxE,UAAM,SAAS,IAAI,IAAI,EAAE,QAAQ,YAAY,EAAE,MAAM,KAAK,CAAC;AAC3D,UAAM,SAAS,IAAI,IAAI,EAAE,QAAQ,YAAY,EAAE,MAAM,KAAK,CAAC;AAC3D,UAAM,mBAAmB,IAAI,IAAI,CAAC,GAAG,MAAM,EAAE,OAAO,CAAC,MAAM,OAAO,IAAI,CAAC,CAAC,CAAC;AACzE,UAAM,YAAY,oBAAI,IAAI,CAAC,GAAG,QAAQ,GAAG,MAAM,CAAC;AAChD,UAAM,UACJ,UAAU,OAAO,IAAK,iBAAiB,OAAO,UAAU,OAAQ,MAAM;AAExE,WAAO,UAAU,UAAU,UAAU;AAAA,EACvC;AAAA;AAAA;AAAA;AAAA,EAKA,MAAc,WAAW,MAAgC;AACvD,SAAK,GACF;AAAA,MACC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,IAOF,EACC;AAAA,MACC,KAAK;AAAA,MACL,KAAK;AAAA,MACL,KAAK;AAAA,MACL,KAAK,WAAW;AAAA,MAChB,KAAK;AAAA,MACL,KAAK,SAAS;AAAA,MACd,KAAK,SAAS;AAAA,MACd,KAAK,SAAS;AAAA,MACd,KAAK,SAAS;AAAA,MACd,KAAK,UAAU,KAAK,SAAS,YAAY,CAAC,CAAC;AAAA,MAC3C,KAAK,UAAU,KAAK,SAAS,YAAY,CAAC,CAAC;AAAA,MAC3C,KAAK,UAAU,KAAK,SAAS,QAAQ,CAAC,CAAC;AAAA,MACvC,KAAK,SAAS,aAAa,OAAO,KAAK,KAAK,SAAS,UAAU,IAAI;AAAA,IACrE;AAAA,EACJ;AAAA;AAAA;AAAA;AAAA,EAKA,MAAc,WAAW,MAAgC;AACvD,QAAI;AACF,WAAK,GACF;AAAA,QACC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,MAMF,EACC;AAAA,QACC,KAAK;AAAA,QACL,KAAK;AAAA,QACL,KAAK;AAAA,QACL,KAAK;AAAA,QACL,KAAK;AAAA,QACL,KAAK,SAAS;AAAA,QACd,KAAK,UAAU,KAAK,SAAS,YAAY,CAAC,CAAC;AAAA,QAC3C,KAAK,SAAS,gBAAgB,IAAI;AAAA,MACpC;AAAA,IACJ,SAAS,OAAgB;AAAA,IAEzB;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,MAAc,sBAAqC;AAEjD,UAAM,QAAQ,KAAK,GAAG,QAAQ,2BAA2B,EAAE,IAAI;AAE/D,eAAW,OAAO,OAAO;AACvB,YAAM,OAAkB;AAAA,QACtB,IAAI,IAAI;AAAA,QACR,MAAM,IAAI;AAAA,QACV,OAAO,IAAI;AAAA,QACX,SAAS,IAAI;AAAA,QACb,YAAY,IAAI;AAAA,QAChB,UAAU;AAAA,UACR,SAAS,IAAI;AAAA,UACb,UAAU,IAAI;AAAA,UACd,UAAU,IAAI;AAAA,UACd,aAAa,IAAI;AAAA,UACjB,UAAU,KAAK,MAAM,IAAI,aAAa,IAAI;AAAA,UAC1C,UAAU,KAAK,MAAM,IAAI,aAAa,IAAI;AAAA,UAC1C,MAAM,KAAK,MAAM,IAAI,QAAQ,IAAI;AAAA,QACnC;AAAA,MACF;AACA,WAAK,UAAU,IAAI,KAAK,IAAI,IAAI;AAAA,IAClC;AAGA,UAAM,QAAQ,KAAK,GAAG,QAAQ,2BAA2B,EAAE,IAAI;AAE/D,eAAW,OAAO,OAAO;AACvB,YAAM,OAAkB;AAAA,QACtB,IAAI,IAAI;AAAA,QACR,QAAQ,IAAI;AAAA,QACZ,QAAQ,IAAI;AAAA,QACZ,MAAM,IAAI;AAAA,QACV,QAAQ,IAAI;AAAA,QACZ,UAAU;AAAA,UACR,SAAS,IAAI;AAAA,UACb,UAAU,KAAK,MAAM,IAAI,YAAY,IAAI;AAAA,UACzC,eAAe,IAAI,kBAAkB;AAAA,QACvC;AAAA,MACF;AAGA,UAAI,CAAC,KAAK,cAAc,IAAI,KAAK,MAAM,GAAG;AACxC,aAAK,cAAc,IAAI,KAAK,QAAQ,CAAC,CAAC;AAAA,MACxC;AACA,WAAK,cAAc,IAAI,KAAK,MAAM,EAAG,KAAK,IAAI;AAG9C,UAAI,CAAC,KAAK,qBAAqB,IAAI,KAAK,MAAM,GAAG;AAC/C,aAAK,qBAAqB,IAAI,KAAK,QAAQ,CAAC,CAAC;AAAA,MAC/C;AACA,WAAK,qBAAqB,IAAI,KAAK,MAAM,EAAG,KAAK,IAAI;AAGrD,UAAI,KAAK,SAAS,eAAe;AAC/B,cAAM,cAAc;AAAA,UAClB,GAAG;AAAA,UACH,QAAQ,KAAK;AAAA,UACb,QAAQ,KAAK;AAAA,QACf;AACA,YAAI,CAAC,KAAK,cAAc,IAAI,YAAY,MAAM,GAAG;AAC/C,eAAK,cAAc,IAAI,YAAY,QAAQ,CAAC,CAAC;AAAA,QAC/C;AACA,aAAK,cAAc,IAAI,YAAY,MAAM,EAAG,KAAK,WAAW;AAAA,MAC9D;AAAA,IACF;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKQ,WAAW,QAAwB;AACzC,WAAO,GAAG,MAAM,IAAI,OAAO,YAAY,CAAC,EAAE,SAAS,KAAK,CAAC;AAAA,EAC3D;AAAA;AAAA;AAAA;AAAA,EAKA,gBAAqB;AACnB,UAAM,YAAY,KAAK,GACpB;AAAA,MACC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,IASF,EACC,IAAI;AAEP,UAAM,YAAY,KAAK,GACpB;AAAA,MACC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,IASF,EACC,IAAI;AAEP,WAAO;AAAA,MACL,OAAO;AAAA,QACL,OAAO,KAAK,UAAU;AAAA,QACtB,QAAQ;AAAA,QACR,UAAU,KAAK,UAAU;AAAA,MAC3B;AAAA,MACA,OAAO;AAAA,QACL,OAAO,UAAU,OAAO,CAAC,KAAa,MAAW,MAAM,EAAE,OAAO,CAAC;AAAA,QACjE,QAAQ;AAAA,QACR,mBAAmB,KAAK,cAAc;AAAA,MACxC;AAAA,MACA,cAAc;AAAA,QACZ,WAAW,KAAK,uBAAuB;AAAA,QACvC,WAAW,KAAK,mBAAmB;AAAA,MACrC;AAAA,IACF;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKQ,yBAAiC;AACvC,QAAI,KAAK,UAAU,SAAS,EAAG,QAAO;AAEtC,QAAI,cAAc;AAClB,eAAW,UAAU,KAAK,UAAU,KAAK,GAAG;AAC1C,YAAM,WAAW,KAAK,cAAc,IAAI,MAAM,GAAG,UAAU;AAC3D,YAAM,WAAW,KAAK,qBAAqB,IAAI,MAAM,GAAG,UAAU;AAClE,qBAAe,WAAW;AAAA,IAC5B;AAEA,WAAO,cAAc,KAAK,UAAU;AAAA,EACtC;AAAA;AAAA;AAAA;AAAA,EAKQ,qBAA6B;AACnC,QAAI,YAAY;AAEhB,eAAW,UAAU,KAAK,UAAU,KAAK,GAAG;AAC1C,YAAM,WAAW,KAAK,cAAc,IAAI,MAAM,GAAG,UAAU;AAC3D,YAAM,WAAW,KAAK,qBAAqB,IAAI,MAAM,GAAG,UAAU;AAClE,kBAAY,KAAK,IAAI,WAAW,WAAW,QAAQ;AAAA,IACrD;AAEA,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA,EAKA,yBAA8B;AAC5B,UAAM,QAAQ,MAAM,KAAK,KAAK,UAAU,OAAO,CAAC,EAAE,IAAI,CAAC,UAAU;AAAA,MAC/D,IAAI,KAAK;AAAA,MACT,OAAO,KAAK;AAAA,MACZ,MAAM,KAAK;AAAA,MACX,MAAM,KAAK,aAAa;AAAA,MACxB,OAAO,KAAK,aAAa,KAAK,IAAI;AAAA,IACpC,EAAE;AAEF,UAAM,QAAQ,CAAC;AACf,eAAW,YAAY,KAAK,cAAc,OAAO,GAAG;AAClD,iBAAW,QAAQ,UAAU;AAC3B,cAAM,KAAK;AAAA,UACT,QAAQ,KAAK;AAAA,UACb,QAAQ,KAAK;AAAA,UACb,MAAM,KAAK;AAAA,UACX,QAAQ,KAAK;AAAA,UACb,OAAO,KAAK,aAAa,KAAK,IAAI;AAAA,QACpC,CAAC;AAAA,MACH;AAAA,IACF;AAEA,WAAO,EAAE,OAAO,MAAM;AAAA,EACxB;AAAA;AAAA;AAAA;AAAA,EAKQ,aAAa,MAAwB;AAC3C,UAAM,SAAmC;AAAA,MACvC,UAAU;AAAA,MACV,SAAS;AAAA,MACT,QAAQ;AAAA,MACR,OAAO;AAAA,MACP,UAAU;AAAA,MACV,YAAY;AAAA,MACZ,OAAO;AAAA,IACT;AACA,WAAO,OAAO,IAAI,KAAK;AAAA,EACzB;AAAA;AAAA;AAAA;AAAA,EAKQ,aAAa,MAAwB;AAC3C,UAAM,SAAmC;AAAA,MACvC,UAAU;AAAA,MACV,eAAe;AAAA,MACf,UAAU;AAAA,MACV,QAAQ;AAAA,MACR,UAAU;AAAA,MACV,YAAY;AAAA,MACZ,YAAY;AAAA,MACZ,WAAW;AAAA,MACX,eAAe;AAAA,MACf,gBAAgB;AAAA,IAClB;AACA,WAAO,OAAO,IAAI,KAAK;AAAA,EACzB;AACF;",
6
6
  "names": []
7
7
  }
@@ -1,7 +1,7 @@
1
1
  {
2
2
  "version": 3,
3
3
  "sources": ["../../../src/core/retrieval/hierarchical-retrieval.ts"],
4
- "sourcesContent": ["/**\n * Hierarchical Retrieval System with Progressive Summarization\n * Implements multi-level tree structure to prevent semantic collapse at scale\n *\n * Based on: Encyclopedia \u2192 Chapter \u2192 Section \u2192 Paragraph model\n * Reduces search space from 50K to ~200 at each hop\n */\n\nimport Database from 'better-sqlite3';\nimport { logger } from '../monitoring/logger.js';\nimport { Trace, CompressedTrace } from '../trace/types.js';\nimport { Frame, Anchor, Event } from '../context/frame-manager.js';\nimport * as zlib from 'zlib';\nimport { promisify } from 'util';\nimport crypto from 'crypto';\n\nconst gzipAsync = promisify(zlib.gzip);\nconst gunzipAsync = promisify(zlib.gunzip);\n\nexport interface HierarchyLevel {\n level: 'encyclopedia' | 'chapter' | 'section' | 'paragraph' | 'atom';\n id: string;\n parentId?: string;\n title: string;\n summary: string;\n embeddings?: number[];\n childCount: number;\n tokenCount: number;\n score: number;\n timeRange: {\n start: number;\n end: number;\n };\n metadata: {\n compressionRatio?: number;\n semanticDensity?: number;\n accessPattern?: 'hot' | 'warm' | 'cold';\n lastAccessed?: number;\n };\n}\n\nexport interface RetrievalNode {\n id: string;\n level: HierarchyLevel;\n children?: RetrievalNode[];\n content?: string; // Only for leaf nodes\n compressed?: boolean;\n}\n\nexport interface HierarchicalConfig {\n maxEncyclopediaSize: number; // Total documents (~50K)\n maxChapterSize: number; // Documents per chapter (~6K)\n maxSectionSize: number; // Docs per section (~250)\n maxParagraphSize: number; // Docs per paragraph (~10-20)\n compressionThreshold: number; // Token threshold for compression\n semanticThreshold: number; // Similarity threshold for grouping\n}\n\nexport const DEFAULT_HIERARCHY_CONFIG: HierarchicalConfig = {\n maxEncyclopediaSize: 50000,\n maxChapterSize: 6000,\n maxSectionSize: 250,\n maxParagraphSize: 20,\n compressionThreshold: 1000,\n semanticThreshold: 0.7,\n};\n\n/**\n * Manages hierarchical retrieval with progressive summarization\n */\nexport class HierarchicalRetrieval {\n private db: Database.Database;\n private config: HierarchicalConfig;\n private hierarchyCache: Map<string, RetrievalNode> = new Map();\n private summaryCache: Map<string, string> = new Map();\n\n constructor(db: Database.Database, config: Partial<HierarchicalConfig> = {}) {\n this.db = db;\n this.config = { ...DEFAULT_HIERARCHY_CONFIG, ...config };\n this.initializeSchema();\n }\n\n private initializeSchema(): void {\n this.db.exec(`\n CREATE TABLE IF NOT EXISTS hierarchy_nodes (\n id TEXT PRIMARY KEY,\n level TEXT NOT NULL,\n parent_id TEXT,\n title TEXT NOT NULL,\n summary TEXT,\n embeddings BLOB,\n child_count INTEGER DEFAULT 0,\n token_count INTEGER DEFAULT 0,\n score REAL DEFAULT 0,\n time_start INTEGER,\n time_end INTEGER,\n compression_ratio REAL,\n semantic_density REAL,\n access_pattern TEXT DEFAULT 'cold',\n last_accessed INTEGER,\n created_at INTEGER DEFAULT (unixepoch() * 1000),\n FOREIGN KEY (parent_id) REFERENCES hierarchy_nodes(id) ON DELETE CASCADE\n )\n `);\n\n this.db.exec(`\n CREATE INDEX IF NOT EXISTS idx_hierarchy_level ON hierarchy_nodes(level);\n CREATE INDEX IF NOT EXISTS idx_hierarchy_parent ON hierarchy_nodes(parent_id);\n CREATE INDEX IF NOT EXISTS idx_hierarchy_score ON hierarchy_nodes(score DESC);\n CREATE INDEX IF NOT EXISTS idx_hierarchy_time ON hierarchy_nodes(time_start, time_end);\n `);\n\n // Content storage for leaf nodes\n this.db.exec(`\n CREATE TABLE IF NOT EXISTS hierarchy_content (\n node_id TEXT PRIMARY KEY,\n content TEXT,\n compressed BOOLEAN DEFAULT 0,\n original_size INTEGER,\n compressed_size INTEGER,\n FOREIGN KEY (node_id) REFERENCES hierarchy_nodes(id) ON DELETE CASCADE\n )\n `);\n }\n\n /**\n * Build hierarchical structure from traces\n */\n async buildHierarchy(traces: Trace[]): Promise<RetrievalNode> {\n logger.info('Building hierarchical retrieval structure', {\n traceCount: traces.length,\n });\n\n // Sort traces by time and score\n traces.sort((a, b) => {\n const timeDiff = b.metadata.startTime - a.metadata.startTime;\n return timeDiff !== 0 ? timeDiff : b.score - a.score;\n });\n\n // Create root encyclopedia node\n const encyclopediaId = this.generateId('encyclopedia');\n const encyclopedia: RetrievalNode = {\n id: encyclopediaId,\n level: {\n level: 'encyclopedia',\n id: encyclopediaId,\n title: 'Knowledge Base',\n summary: await this.generateSummary(traces, 'encyclopedia'),\n childCount: 0,\n tokenCount: traces.reduce((sum, t) => sum + (t.tokenCount || 0), 0),\n score: Math.max(...traces.map((t) => t.score)),\n timeRange: {\n start: Math.min(...traces.map((t) => t.metadata.startTime)),\n end: Math.max(...traces.map((t) => t.metadata.endTime)),\n },\n metadata: {\n semanticDensity: 1.0,\n accessPattern: 'hot',\n },\n },\n children: [],\n };\n\n // Partition into chapters\n const chapters = await this.partitionIntoChapters(traces);\n\n for (const chapterTraces of chapters) {\n const chapter = await this.buildChapter(chapterTraces, encyclopediaId);\n encyclopedia.children!.push(chapter);\n }\n\n // Update child count\n encyclopedia.level.childCount = encyclopedia.children!.length;\n\n // Store in database\n await this.storeNode(encyclopedia);\n\n return encyclopedia;\n }\n\n /**\n * Partition traces into chapters based on semantic similarity and time\n */\n private async partitionIntoChapters(traces: Trace[]): Promise<Trace[][]> {\n const chapters: Trace[][] = [];\n let currentChapter: Trace[] = [];\n\n for (const trace of traces) {\n if (currentChapter.length >= this.config.maxChapterSize) {\n chapters.push(currentChapter);\n currentChapter = [trace];\n } else if (currentChapter.length > 0) {\n // Check semantic similarity with chapter\n const similarity = await this.calculateSimilarity(\n trace,\n currentChapter[currentChapter.length - 1]\n );\n\n if (similarity < this.config.semanticThreshold) {\n // Start new chapter if semantically different\n chapters.push(currentChapter);\n currentChapter = [trace];\n } else {\n currentChapter.push(trace);\n }\n } else {\n currentChapter.push(trace);\n }\n }\n\n if (currentChapter.length > 0) {\n chapters.push(currentChapter);\n }\n\n return chapters;\n }\n\n /**\n * Build a chapter node\n */\n private async buildChapter(\n traces: Trace[],\n parentId: string\n ): Promise<RetrievalNode> {\n const chapterId = this.generateId('chapter');\n\n const chapter: RetrievalNode = {\n id: chapterId,\n level: {\n level: 'chapter',\n id: chapterId,\n parentId,\n title: this.generateChapterTitle(traces),\n summary: await this.generateSummary(traces, 'chapter'),\n childCount: 0,\n tokenCount: traces.reduce((sum, t) => sum + (t.tokenCount || 0), 0),\n score: Math.max(...traces.map((t) => t.score)),\n timeRange: {\n start: Math.min(...traces.map((t) => t.metadata.startTime)),\n end: Math.max(...traces.map((t) => t.metadata.endTime)),\n },\n metadata: {\n compressionRatio: 0.8,\n semanticDensity: 0.8,\n accessPattern: 'warm',\n },\n },\n children: [],\n };\n\n // Partition into sections\n const sections = await this.partitionIntoSections(traces);\n\n for (const sectionTraces of sections) {\n const section = await this.buildSection(sectionTraces, chapterId);\n chapter.children!.push(section);\n }\n\n chapter.level.childCount = chapter.children!.length;\n return chapter;\n }\n\n /**\n * Build a section node\n */\n private async buildSection(\n traces: Trace[],\n parentId: string\n ): Promise<RetrievalNode> {\n const sectionId = this.generateId('section');\n\n const section: RetrievalNode = {\n id: sectionId,\n level: {\n level: 'section',\n id: sectionId,\n parentId,\n title: this.generateSectionTitle(traces),\n summary: await this.generateSummary(traces, 'section'),\n childCount: 0,\n tokenCount: traces.reduce((sum, t) => sum + (t.tokenCount || 0), 0),\n score: Math.max(...traces.map((t) => t.score)),\n timeRange: {\n start: Math.min(...traces.map((t) => t.metadata.startTime)),\n end: Math.max(...traces.map((t) => t.metadata.endTime)),\n },\n metadata: {\n compressionRatio: 0.6,\n semanticDensity: 0.6,\n accessPattern: 'cold',\n },\n },\n children: [],\n };\n\n // Partition into paragraphs\n const paragraphs = await this.partitionIntoParagraphs(traces);\n\n for (const paragraphTraces of paragraphs) {\n const paragraph = await this.buildParagraph(paragraphTraces, sectionId);\n section.children!.push(paragraph);\n }\n\n section.level.childCount = section.children!.length;\n return section;\n }\n\n /**\n * Build a paragraph (leaf) node\n */\n private async buildParagraph(\n traces: Trace[],\n parentId: string\n ): Promise<RetrievalNode> {\n const paragraphId = this.generateId('paragraph');\n\n // Combine trace content\n const content = traces\n .map((t) => {\n return `[${new Date(t.metadata.startTime).toISOString()}] ${t.type}: ${t.summary}`;\n })\n .join('\\n\\n');\n\n // Compress if large\n let storedContent = content;\n let compressed = false;\n\n if (content.length > this.config.compressionThreshold) {\n const compressedData = await gzipAsync(content);\n storedContent = compressedData.toString('base64');\n compressed = true;\n }\n\n const paragraph: RetrievalNode = {\n id: paragraphId,\n level: {\n level: 'paragraph',\n id: paragraphId,\n parentId,\n title: this.generateParagraphTitle(traces),\n summary: await this.generateSummary(traces, 'paragraph'),\n childCount: traces.length,\n tokenCount: traces.reduce((sum, t) => sum + (t.tokenCount || 0), 0),\n score: Math.max(...traces.map((t) => t.score)),\n timeRange: {\n start: Math.min(...traces.map((t) => t.metadata.startTime)),\n end: Math.max(...traces.map((t) => t.metadata.endTime)),\n },\n metadata: {\n compressionRatio: compressed ? 0.3 : 1.0,\n semanticDensity: 0.4,\n accessPattern: 'cold',\n },\n },\n content: storedContent,\n compressed,\n };\n\n return paragraph;\n }\n\n /**\n * Partition traces into sections\n */\n private async partitionIntoSections(traces: Trace[]): Promise<Trace[][]> {\n const sections: Trace[][] = [];\n const sectionSize = Math.ceil(\n traces.length / Math.ceil(traces.length / this.config.maxSectionSize)\n );\n\n for (let i = 0; i < traces.length; i += sectionSize) {\n sections.push(traces.slice(i, i + sectionSize));\n }\n\n return sections;\n }\n\n /**\n * Partition traces into paragraphs\n */\n private async partitionIntoParagraphs(traces: Trace[]): Promise<Trace[][]> {\n const paragraphs: Trace[][] = [];\n const paragraphSize = Math.ceil(\n traces.length / Math.ceil(traces.length / this.config.maxParagraphSize)\n );\n\n for (let i = 0; i < traces.length; i += paragraphSize) {\n paragraphs.push(traces.slice(i, i + paragraphSize));\n }\n\n return paragraphs;\n }\n\n /**\n * Traverse hierarchy to retrieve relevant content\n */\n async retrieve(\n query: string,\n maxDepth: number = 4,\n tokenBudget: number = 4000\n ): Promise<string> {\n logger.info('Hierarchical retrieval', { query, maxDepth, tokenBudget });\n\n // Start from encyclopedia\n const encyclopedia = await this.loadRootNode();\n if (!encyclopedia) {\n return 'No content available';\n }\n\n const path: RetrievalNode[] = [encyclopedia];\n let currentNode = encyclopedia;\n let tokensUsed = 0;\n\n // Traverse down the hierarchy\n for (let depth = 1; depth < maxDepth && tokensUsed < tokenBudget; depth++) {\n if (!currentNode.children || currentNode.children.length === 0) {\n break;\n }\n\n // Select best matching child\n const bestChild = await this.selectBestChild(\n currentNode.children,\n query,\n tokenBudget - tokensUsed\n );\n\n if (!bestChild) break;\n\n path.push(bestChild);\n currentNode = bestChild;\n tokensUsed += bestChild.level.tokenCount;\n\n // Update access pattern\n await this.updateAccessPattern(bestChild.id);\n }\n\n // Build context from path\n return this.buildContextFromPath(path, tokenBudget);\n }\n\n /**\n * Select best matching child node\n */\n private async selectBestChild(\n children: RetrievalNode[],\n query: string,\n remainingBudget: number\n ): Promise<RetrievalNode | null> {\n let bestChild: RetrievalNode | null = null;\n let bestScore = 0;\n\n for (const child of children) {\n if (child.level.tokenCount > remainingBudget) {\n continue;\n }\n\n // Calculate relevance score\n const score = await this.calculateRelevance(child, query);\n\n if (score > bestScore) {\n bestScore = score;\n bestChild = child;\n }\n }\n\n return bestChild;\n }\n\n /**\n * Calculate relevance of node to query\n */\n private async calculateRelevance(\n node: RetrievalNode,\n query: string\n ): Promise<number> {\n // Simple keyword matching for now\n // In production, use embeddings\n const queryWords = query.toLowerCase().split(/\\s+/);\n const nodeText = `${node.level.title} ${node.level.summary}`.toLowerCase();\n\n let matches = 0;\n for (const word of queryWords) {\n if (nodeText.includes(word)) {\n matches++;\n }\n }\n\n const keywordScore = matches / queryWords.length;\n const recencyScore =\n 1 / (1 + (Date.now() - node.level.timeRange.end) / (1000 * 60 * 60 * 24));\n const importanceScore = node.level.score;\n\n return keywordScore * 0.5 + recencyScore * 0.3 + importanceScore * 0.2;\n }\n\n /**\n * Build context string from retrieval path\n */\n private async buildContextFromPath(\n path: RetrievalNode[],\n tokenBudget: number\n ): Promise<string> {\n const sections: string[] = [];\n\n sections.push('## Retrieval Path');\n sections.push(path.map((n) => n.level.title).join(' \u2192 '));\n sections.push('');\n\n // Add summaries from each level\n for (const node of path) {\n sections.push(`### ${node.level.level}: ${node.level.title}`);\n sections.push(node.level.summary);\n\n if (node.content) {\n // Decompress if needed\n let content = node.content;\n if (node.compressed) {\n const compressed = Buffer.from(content, 'base64');\n const decompressed = await gunzipAsync(compressed);\n content = decompressed.toString();\n }\n sections.push('');\n sections.push('**Content:**');\n sections.push(content);\n }\n sections.push('');\n }\n\n // Add statistics\n sections.push('## Retrieval Statistics');\n sections.push(`- Levels traversed: ${path.length}`);\n sections.push(\n `- Search space reduction: ${this.calculateReduction(path)}x`\n );\n sections.push(\n `- Semantic density: ${this.calculateDensity(path).toFixed(2)}`\n );\n\n return sections.join('\\n');\n }\n\n /**\n * Calculate search space reduction\n */\n private calculateReduction(path: RetrievalNode[]): number {\n if (path.length < 2) return 1;\n\n const initial = path[0].level.childCount;\n const final = path[path.length - 1].level.childCount || 1;\n\n return Math.round(initial / final);\n }\n\n /**\n * Calculate semantic density along path\n */\n private calculateDensity(path: RetrievalNode[]): number {\n const densities = path.map((n) => n.level.metadata.semanticDensity || 1);\n return densities.reduce((sum, d) => sum + d, 0) / densities.length;\n }\n\n /**\n * Generate summary for a level\n */\n private async generateSummary(\n traces: Trace[],\n level: string\n ): Promise<string> {\n // Cache key\n const cacheKey = `${level}:${traces.map((t) => t.id).join(',')}`;\n\n if (this.summaryCache.has(cacheKey)) {\n return this.summaryCache.get(cacheKey)!;\n }\n\n // Generate summary based on level\n let summary: string;\n\n switch (level) {\n case 'encyclopedia':\n summary = `Complete knowledge base with ${traces.length} traces covering ${this.getTopics(traces).join(', ')}`;\n break;\n case 'chapter':\n summary = `${traces.length} operations focused on ${this.getDominantOperation(traces)}`;\n break;\n case 'section':\n summary = `${traces.length} traces: ${this.getKeyActivities(traces).join(', ')}`;\n break;\n case 'paragraph':\n summary = traces\n .slice(0, 3)\n .map((t) => t.summary)\n .join('. ');\n break;\n default:\n summary = `${traces.length} items`;\n }\n\n this.summaryCache.set(cacheKey, summary);\n return summary;\n }\n\n /**\n * Extract topics from traces\n */\n private getTopics(traces: Trace[]): string[] {\n const topics = new Set<string>();\n\n for (const trace of traces) {\n topics.add(trace.type);\n }\n\n return Array.from(topics).slice(0, 5);\n }\n\n /**\n * Get dominant operation type\n */\n private getDominantOperation(traces: Trace[]): string {\n const counts: Record<string, number> = {};\n\n for (const trace of traces) {\n counts[trace.type] = (counts[trace.type] || 0) + 1;\n }\n\n const sorted = Object.entries(counts).sort((a, b) => b[1] - a[1]);\n return sorted[0]?.[0] || 'mixed operations';\n }\n\n /**\n * Get key activities\n */\n private getKeyActivities(traces: Trace[]): string[] {\n return traces.slice(0, 3).map((t) => t.type);\n }\n\n /**\n * Generate titles\n */\n private generateChapterTitle(traces: Trace[]): string {\n const start = new Date(traces[0].metadata.startTime);\n const operation = this.getDominantOperation(traces);\n return `${operation} (${start.toLocaleDateString()})`;\n }\n\n private generateSectionTitle(traces: Trace[]): string {\n const start = new Date(traces[0].metadata.startTime);\n return `Section ${start.toLocaleTimeString()}`;\n }\n\n private generateParagraphTitle(traces: Trace[]): string {\n return `${traces.length} traces`;\n }\n\n /**\n * Calculate similarity between traces\n */\n private async calculateSimilarity(a: Trace, b: Trace): Promise<number> {\n // Simple similarity based on type and time\n const typeSimilarity = a.type === b.type ? 1 : 0;\n const timeDiff = Math.abs(a.metadata.startTime - b.metadata.startTime);\n const timeSimilarity = 1 / (1 + timeDiff / (1000 * 60 * 60)); // Hour scale\n\n return typeSimilarity * 0.5 + timeSimilarity * 0.5;\n }\n\n /**\n * Store node in database\n */\n private async storeNode(node: RetrievalNode): Promise<void> {\n const stmt = this.db.prepare(`\n INSERT OR REPLACE INTO hierarchy_nodes (\n id, level, parent_id, title, summary,\n child_count, token_count, score,\n time_start, time_end,\n compression_ratio, semantic_density, access_pattern\n ) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)\n `);\n\n stmt.run(\n node.id,\n node.level.level,\n node.level.parentId || null,\n node.level.title,\n node.level.summary,\n node.level.childCount,\n node.level.tokenCount,\n node.level.score,\n node.level.timeRange.start,\n node.level.timeRange.end,\n node.level.metadata.compressionRatio || 1,\n node.level.metadata.semanticDensity || 1,\n node.level.metadata.accessPattern || 'cold'\n );\n\n // Store content if present\n if (node.content) {\n this.db\n .prepare(\n `\n INSERT OR REPLACE INTO hierarchy_content (\n node_id, content, compressed, original_size, compressed_size\n ) VALUES (?, ?, ?, ?, ?)\n `\n )\n .run(\n node.id,\n node.content,\n node.compressed ? 1 : 0,\n node.compressed ? node.content.length * 2 : node.content.length,\n node.content.length\n );\n }\n\n // Recursively store children\n if (node.children) {\n for (const child of node.children) {\n await this.storeNode(child);\n }\n }\n }\n\n /**\n * Load root encyclopedia node\n */\n private async loadRootNode(): Promise<RetrievalNode | null> {\n const row = this.db\n .prepare(\n `\n SELECT * FROM hierarchy_nodes\n WHERE level = 'encyclopedia'\n ORDER BY created_at DESC\n LIMIT 1\n `\n )\n .get() as any;\n\n if (!row) return null;\n\n return this.loadNode(row.id);\n }\n\n /**\n * Load node from database\n */\n private async loadNode(nodeId: string): Promise<RetrievalNode | null> {\n // Check cache\n if (this.hierarchyCache.has(nodeId)) {\n return this.hierarchyCache.get(nodeId)!;\n }\n\n const nodeRow = this.db\n .prepare(\n `\n SELECT * FROM hierarchy_nodes WHERE id = ?\n `\n )\n .get(nodeId) as any;\n\n if (!nodeRow) return null;\n\n // Load content if exists\n const contentRow = this.db\n .prepare(\n `\n SELECT * FROM hierarchy_content WHERE node_id = ?\n `\n )\n .get(nodeId) as any;\n\n // Load children\n const childRows = this.db\n .prepare(\n `\n SELECT id FROM hierarchy_nodes WHERE parent_id = ?\n `\n )\n .all(nodeId) as any[];\n\n const children: RetrievalNode[] = [];\n for (const childRow of childRows) {\n const child = await this.loadNode(childRow.id);\n if (child) children.push(child);\n }\n\n const node: RetrievalNode = {\n id: nodeRow.id,\n level: {\n level: nodeRow.level as any,\n id: nodeRow.id,\n parentId: nodeRow.parent_id,\n title: nodeRow.title,\n summary: nodeRow.summary,\n childCount: nodeRow.child_count,\n tokenCount: nodeRow.token_count,\n score: nodeRow.score,\n timeRange: {\n start: nodeRow.time_start,\n end: nodeRow.time_end,\n },\n metadata: {\n compressionRatio: nodeRow.compression_ratio,\n semanticDensity: nodeRow.semantic_density,\n accessPattern: nodeRow.access_pattern as any,\n lastAccessed: nodeRow.last_accessed,\n },\n },\n children: children.length > 0 ? children : undefined,\n content: contentRow?.content,\n compressed: contentRow?.compressed === 1,\n };\n\n // Cache the node\n this.hierarchyCache.set(nodeId, node);\n\n return node;\n }\n\n /**\n * Update access pattern for a node\n */\n private async updateAccessPattern(nodeId: string): Promise<void> {\n this.db\n .prepare(\n `\n UPDATE hierarchy_nodes\n SET last_accessed = ?, access_pattern = 'hot'\n WHERE id = ?\n `\n )\n .run(Date.now(), nodeId);\n }\n\n /**\n * Generate unique ID\n */\n private generateId(prefix: string): string {\n return `${prefix}_${crypto.randomBytes(8).toString('hex')}`;\n }\n\n /**\n * Get hierarchy statistics\n */\n getStatistics(): any {\n const stats = this.db\n .prepare(\n `\n SELECT \n level,\n COUNT(*) as count,\n AVG(token_count) as avg_tokens,\n AVG(child_count) as avg_children,\n AVG(compression_ratio) as avg_compression,\n AVG(semantic_density) as avg_density\n FROM hierarchy_nodes\n GROUP BY level\n `\n )\n .all();\n\n const totalNodes = this.db\n .prepare(\n `\n SELECT COUNT(*) as count FROM hierarchy_nodes\n `\n )\n .get() as any;\n\n const totalContent = this.db\n .prepare(\n `\n SELECT \n SUM(original_size) as original,\n SUM(compressed_size) as compressed\n FROM hierarchy_content\n `\n )\n .get() as any;\n\n return {\n nodesByLevel: stats,\n totalNodes: totalNodes.count,\n totalContent: {\n original: totalContent?.original || 0,\n compressed: totalContent?.compressed || 0,\n ratio: totalContent?.original\n ? (1 - totalContent.compressed / totalContent.original).toFixed(2)\n : 0,\n },\n cacheSize: this.hierarchyCache.size,\n };\n }\n}\n"],
4
+ "sourcesContent": ["/**\n * Hierarchical Retrieval System with Progressive Summarization\n * Implements multi-level tree structure to prevent semantic collapse at scale\n *\n * Based on: Encyclopedia \u2192 Chapter \u2192 Section \u2192 Paragraph model\n * Reduces search space from 50K to ~200 at each hop\n */\n\nimport Database from 'better-sqlite3';\nimport { logger } from '../monitoring/logger.js';\nimport { Trace, CompressedTrace } from '../trace/types.js';\nimport { Frame, Anchor, Event } from '../context/index.js';\nimport * as zlib from 'zlib';\nimport { promisify } from 'util';\nimport crypto from 'crypto';\n\nconst gzipAsync = promisify(zlib.gzip);\nconst gunzipAsync = promisify(zlib.gunzip);\n\nexport interface HierarchyLevel {\n level: 'encyclopedia' | 'chapter' | 'section' | 'paragraph' | 'atom';\n id: string;\n parentId?: string;\n title: string;\n summary: string;\n embeddings?: number[];\n childCount: number;\n tokenCount: number;\n score: number;\n timeRange: {\n start: number;\n end: number;\n };\n metadata: {\n compressionRatio?: number;\n semanticDensity?: number;\n accessPattern?: 'hot' | 'warm' | 'cold';\n lastAccessed?: number;\n };\n}\n\nexport interface RetrievalNode {\n id: string;\n level: HierarchyLevel;\n children?: RetrievalNode[];\n content?: string; // Only for leaf nodes\n compressed?: boolean;\n}\n\nexport interface HierarchicalConfig {\n maxEncyclopediaSize: number; // Total documents (~50K)\n maxChapterSize: number; // Documents per chapter (~6K)\n maxSectionSize: number; // Docs per section (~250)\n maxParagraphSize: number; // Docs per paragraph (~10-20)\n compressionThreshold: number; // Token threshold for compression\n semanticThreshold: number; // Similarity threshold for grouping\n}\n\nexport const DEFAULT_HIERARCHY_CONFIG: HierarchicalConfig = {\n maxEncyclopediaSize: 50000,\n maxChapterSize: 6000,\n maxSectionSize: 250,\n maxParagraphSize: 20,\n compressionThreshold: 1000,\n semanticThreshold: 0.7,\n};\n\n/**\n * Manages hierarchical retrieval with progressive summarization\n */\nexport class HierarchicalRetrieval {\n private db: Database.Database;\n private config: HierarchicalConfig;\n private hierarchyCache: Map<string, RetrievalNode> = new Map();\n private summaryCache: Map<string, string> = new Map();\n\n constructor(db: Database.Database, config: Partial<HierarchicalConfig> = {}) {\n this.db = db;\n this.config = { ...DEFAULT_HIERARCHY_CONFIG, ...config };\n this.initializeSchema();\n }\n\n private initializeSchema(): void {\n this.db.exec(`\n CREATE TABLE IF NOT EXISTS hierarchy_nodes (\n id TEXT PRIMARY KEY,\n level TEXT NOT NULL,\n parent_id TEXT,\n title TEXT NOT NULL,\n summary TEXT,\n embeddings BLOB,\n child_count INTEGER DEFAULT 0,\n token_count INTEGER DEFAULT 0,\n score REAL DEFAULT 0,\n time_start INTEGER,\n time_end INTEGER,\n compression_ratio REAL,\n semantic_density REAL,\n access_pattern TEXT DEFAULT 'cold',\n last_accessed INTEGER,\n created_at INTEGER DEFAULT (unixepoch() * 1000),\n FOREIGN KEY (parent_id) REFERENCES hierarchy_nodes(id) ON DELETE CASCADE\n )\n `);\n\n this.db.exec(`\n CREATE INDEX IF NOT EXISTS idx_hierarchy_level ON hierarchy_nodes(level);\n CREATE INDEX IF NOT EXISTS idx_hierarchy_parent ON hierarchy_nodes(parent_id);\n CREATE INDEX IF NOT EXISTS idx_hierarchy_score ON hierarchy_nodes(score DESC);\n CREATE INDEX IF NOT EXISTS idx_hierarchy_time ON hierarchy_nodes(time_start, time_end);\n `);\n\n // Content storage for leaf nodes\n this.db.exec(`\n CREATE TABLE IF NOT EXISTS hierarchy_content (\n node_id TEXT PRIMARY KEY,\n content TEXT,\n compressed BOOLEAN DEFAULT 0,\n original_size INTEGER,\n compressed_size INTEGER,\n FOREIGN KEY (node_id) REFERENCES hierarchy_nodes(id) ON DELETE CASCADE\n )\n `);\n }\n\n /**\n * Build hierarchical structure from traces\n */\n async buildHierarchy(traces: Trace[]): Promise<RetrievalNode> {\n logger.info('Building hierarchical retrieval structure', {\n traceCount: traces.length,\n });\n\n // Sort traces by time and score\n traces.sort((a, b) => {\n const timeDiff = b.metadata.startTime - a.metadata.startTime;\n return timeDiff !== 0 ? timeDiff : b.score - a.score;\n });\n\n // Create root encyclopedia node\n const encyclopediaId = this.generateId('encyclopedia');\n const encyclopedia: RetrievalNode = {\n id: encyclopediaId,\n level: {\n level: 'encyclopedia',\n id: encyclopediaId,\n title: 'Knowledge Base',\n summary: await this.generateSummary(traces, 'encyclopedia'),\n childCount: 0,\n tokenCount: traces.reduce((sum, t) => sum + (t.tokenCount || 0), 0),\n score: Math.max(...traces.map((t) => t.score)),\n timeRange: {\n start: Math.min(...traces.map((t) => t.metadata.startTime)),\n end: Math.max(...traces.map((t) => t.metadata.endTime)),\n },\n metadata: {\n semanticDensity: 1.0,\n accessPattern: 'hot',\n },\n },\n children: [],\n };\n\n // Partition into chapters\n const chapters = await this.partitionIntoChapters(traces);\n\n for (const chapterTraces of chapters) {\n const chapter = await this.buildChapter(chapterTraces, encyclopediaId);\n encyclopedia.children!.push(chapter);\n }\n\n // Update child count\n encyclopedia.level.childCount = encyclopedia.children!.length;\n\n // Store in database\n await this.storeNode(encyclopedia);\n\n return encyclopedia;\n }\n\n /**\n * Partition traces into chapters based on semantic similarity and time\n */\n private async partitionIntoChapters(traces: Trace[]): Promise<Trace[][]> {\n const chapters: Trace[][] = [];\n let currentChapter: Trace[] = [];\n\n for (const trace of traces) {\n if (currentChapter.length >= this.config.maxChapterSize) {\n chapters.push(currentChapter);\n currentChapter = [trace];\n } else if (currentChapter.length > 0) {\n // Check semantic similarity with chapter\n const similarity = await this.calculateSimilarity(\n trace,\n currentChapter[currentChapter.length - 1]\n );\n\n if (similarity < this.config.semanticThreshold) {\n // Start new chapter if semantically different\n chapters.push(currentChapter);\n currentChapter = [trace];\n } else {\n currentChapter.push(trace);\n }\n } else {\n currentChapter.push(trace);\n }\n }\n\n if (currentChapter.length > 0) {\n chapters.push(currentChapter);\n }\n\n return chapters;\n }\n\n /**\n * Build a chapter node\n */\n private async buildChapter(\n traces: Trace[],\n parentId: string\n ): Promise<RetrievalNode> {\n const chapterId = this.generateId('chapter');\n\n const chapter: RetrievalNode = {\n id: chapterId,\n level: {\n level: 'chapter',\n id: chapterId,\n parentId,\n title: this.generateChapterTitle(traces),\n summary: await this.generateSummary(traces, 'chapter'),\n childCount: 0,\n tokenCount: traces.reduce((sum, t) => sum + (t.tokenCount || 0), 0),\n score: Math.max(...traces.map((t) => t.score)),\n timeRange: {\n start: Math.min(...traces.map((t) => t.metadata.startTime)),\n end: Math.max(...traces.map((t) => t.metadata.endTime)),\n },\n metadata: {\n compressionRatio: 0.8,\n semanticDensity: 0.8,\n accessPattern: 'warm',\n },\n },\n children: [],\n };\n\n // Partition into sections\n const sections = await this.partitionIntoSections(traces);\n\n for (const sectionTraces of sections) {\n const section = await this.buildSection(sectionTraces, chapterId);\n chapter.children!.push(section);\n }\n\n chapter.level.childCount = chapter.children!.length;\n return chapter;\n }\n\n /**\n * Build a section node\n */\n private async buildSection(\n traces: Trace[],\n parentId: string\n ): Promise<RetrievalNode> {\n const sectionId = this.generateId('section');\n\n const section: RetrievalNode = {\n id: sectionId,\n level: {\n level: 'section',\n id: sectionId,\n parentId,\n title: this.generateSectionTitle(traces),\n summary: await this.generateSummary(traces, 'section'),\n childCount: 0,\n tokenCount: traces.reduce((sum, t) => sum + (t.tokenCount || 0), 0),\n score: Math.max(...traces.map((t) => t.score)),\n timeRange: {\n start: Math.min(...traces.map((t) => t.metadata.startTime)),\n end: Math.max(...traces.map((t) => t.metadata.endTime)),\n },\n metadata: {\n compressionRatio: 0.6,\n semanticDensity: 0.6,\n accessPattern: 'cold',\n },\n },\n children: [],\n };\n\n // Partition into paragraphs\n const paragraphs = await this.partitionIntoParagraphs(traces);\n\n for (const paragraphTraces of paragraphs) {\n const paragraph = await this.buildParagraph(paragraphTraces, sectionId);\n section.children!.push(paragraph);\n }\n\n section.level.childCount = section.children!.length;\n return section;\n }\n\n /**\n * Build a paragraph (leaf) node\n */\n private async buildParagraph(\n traces: Trace[],\n parentId: string\n ): Promise<RetrievalNode> {\n const paragraphId = this.generateId('paragraph');\n\n // Combine trace content\n const content = traces\n .map((t) => {\n return `[${new Date(t.metadata.startTime).toISOString()}] ${t.type}: ${t.summary}`;\n })\n .join('\\n\\n');\n\n // Compress if large\n let storedContent = content;\n let compressed = false;\n\n if (content.length > this.config.compressionThreshold) {\n const compressedData = await gzipAsync(content);\n storedContent = compressedData.toString('base64');\n compressed = true;\n }\n\n const paragraph: RetrievalNode = {\n id: paragraphId,\n level: {\n level: 'paragraph',\n id: paragraphId,\n parentId,\n title: this.generateParagraphTitle(traces),\n summary: await this.generateSummary(traces, 'paragraph'),\n childCount: traces.length,\n tokenCount: traces.reduce((sum, t) => sum + (t.tokenCount || 0), 0),\n score: Math.max(...traces.map((t) => t.score)),\n timeRange: {\n start: Math.min(...traces.map((t) => t.metadata.startTime)),\n end: Math.max(...traces.map((t) => t.metadata.endTime)),\n },\n metadata: {\n compressionRatio: compressed ? 0.3 : 1.0,\n semanticDensity: 0.4,\n accessPattern: 'cold',\n },\n },\n content: storedContent,\n compressed,\n };\n\n return paragraph;\n }\n\n /**\n * Partition traces into sections\n */\n private async partitionIntoSections(traces: Trace[]): Promise<Trace[][]> {\n const sections: Trace[][] = [];\n const sectionSize = Math.ceil(\n traces.length / Math.ceil(traces.length / this.config.maxSectionSize)\n );\n\n for (let i = 0; i < traces.length; i += sectionSize) {\n sections.push(traces.slice(i, i + sectionSize));\n }\n\n return sections;\n }\n\n /**\n * Partition traces into paragraphs\n */\n private async partitionIntoParagraphs(traces: Trace[]): Promise<Trace[][]> {\n const paragraphs: Trace[][] = [];\n const paragraphSize = Math.ceil(\n traces.length / Math.ceil(traces.length / this.config.maxParagraphSize)\n );\n\n for (let i = 0; i < traces.length; i += paragraphSize) {\n paragraphs.push(traces.slice(i, i + paragraphSize));\n }\n\n return paragraphs;\n }\n\n /**\n * Traverse hierarchy to retrieve relevant content\n */\n async retrieve(\n query: string,\n maxDepth: number = 4,\n tokenBudget: number = 4000\n ): Promise<string> {\n logger.info('Hierarchical retrieval', { query, maxDepth, tokenBudget });\n\n // Start from encyclopedia\n const encyclopedia = await this.loadRootNode();\n if (!encyclopedia) {\n return 'No content available';\n }\n\n const path: RetrievalNode[] = [encyclopedia];\n let currentNode = encyclopedia;\n let tokensUsed = 0;\n\n // Traverse down the hierarchy\n for (let depth = 1; depth < maxDepth && tokensUsed < tokenBudget; depth++) {\n if (!currentNode.children || currentNode.children.length === 0) {\n break;\n }\n\n // Select best matching child\n const bestChild = await this.selectBestChild(\n currentNode.children,\n query,\n tokenBudget - tokensUsed\n );\n\n if (!bestChild) break;\n\n path.push(bestChild);\n currentNode = bestChild;\n tokensUsed += bestChild.level.tokenCount;\n\n // Update access pattern\n await this.updateAccessPattern(bestChild.id);\n }\n\n // Build context from path\n return this.buildContextFromPath(path, tokenBudget);\n }\n\n /**\n * Select best matching child node\n */\n private async selectBestChild(\n children: RetrievalNode[],\n query: string,\n remainingBudget: number\n ): Promise<RetrievalNode | null> {\n let bestChild: RetrievalNode | null = null;\n let bestScore = 0;\n\n for (const child of children) {\n if (child.level.tokenCount > remainingBudget) {\n continue;\n }\n\n // Calculate relevance score\n const score = await this.calculateRelevance(child, query);\n\n if (score > bestScore) {\n bestScore = score;\n bestChild = child;\n }\n }\n\n return bestChild;\n }\n\n /**\n * Calculate relevance of node to query\n */\n private async calculateRelevance(\n node: RetrievalNode,\n query: string\n ): Promise<number> {\n // Simple keyword matching for now\n // In production, use embeddings\n const queryWords = query.toLowerCase().split(/\\s+/);\n const nodeText = `${node.level.title} ${node.level.summary}`.toLowerCase();\n\n let matches = 0;\n for (const word of queryWords) {\n if (nodeText.includes(word)) {\n matches++;\n }\n }\n\n const keywordScore = matches / queryWords.length;\n const recencyScore =\n 1 / (1 + (Date.now() - node.level.timeRange.end) / (1000 * 60 * 60 * 24));\n const importanceScore = node.level.score;\n\n return keywordScore * 0.5 + recencyScore * 0.3 + importanceScore * 0.2;\n }\n\n /**\n * Build context string from retrieval path\n */\n private async buildContextFromPath(\n path: RetrievalNode[],\n tokenBudget: number\n ): Promise<string> {\n const sections: string[] = [];\n\n sections.push('## Retrieval Path');\n sections.push(path.map((n) => n.level.title).join(' \u2192 '));\n sections.push('');\n\n // Add summaries from each level\n for (const node of path) {\n sections.push(`### ${node.level.level}: ${node.level.title}`);\n sections.push(node.level.summary);\n\n if (node.content) {\n // Decompress if needed\n let content = node.content;\n if (node.compressed) {\n const compressed = Buffer.from(content, 'base64');\n const decompressed = await gunzipAsync(compressed);\n content = decompressed.toString();\n }\n sections.push('');\n sections.push('**Content:**');\n sections.push(content);\n }\n sections.push('');\n }\n\n // Add statistics\n sections.push('## Retrieval Statistics');\n sections.push(`- Levels traversed: ${path.length}`);\n sections.push(\n `- Search space reduction: ${this.calculateReduction(path)}x`\n );\n sections.push(\n `- Semantic density: ${this.calculateDensity(path).toFixed(2)}`\n );\n\n return sections.join('\\n');\n }\n\n /**\n * Calculate search space reduction\n */\n private calculateReduction(path: RetrievalNode[]): number {\n if (path.length < 2) return 1;\n\n const initial = path[0].level.childCount;\n const final = path[path.length - 1].level.childCount || 1;\n\n return Math.round(initial / final);\n }\n\n /**\n * Calculate semantic density along path\n */\n private calculateDensity(path: RetrievalNode[]): number {\n const densities = path.map((n) => n.level.metadata.semanticDensity || 1);\n return densities.reduce((sum, d) => sum + d, 0) / densities.length;\n }\n\n /**\n * Generate summary for a level\n */\n private async generateSummary(\n traces: Trace[],\n level: string\n ): Promise<string> {\n // Cache key\n const cacheKey = `${level}:${traces.map((t) => t.id).join(',')}`;\n\n if (this.summaryCache.has(cacheKey)) {\n return this.summaryCache.get(cacheKey)!;\n }\n\n // Generate summary based on level\n let summary: string;\n\n switch (level) {\n case 'encyclopedia':\n summary = `Complete knowledge base with ${traces.length} traces covering ${this.getTopics(traces).join(', ')}`;\n break;\n case 'chapter':\n summary = `${traces.length} operations focused on ${this.getDominantOperation(traces)}`;\n break;\n case 'section':\n summary = `${traces.length} traces: ${this.getKeyActivities(traces).join(', ')}`;\n break;\n case 'paragraph':\n summary = traces\n .slice(0, 3)\n .map((t) => t.summary)\n .join('. ');\n break;\n default:\n summary = `${traces.length} items`;\n }\n\n this.summaryCache.set(cacheKey, summary);\n return summary;\n }\n\n /**\n * Extract topics from traces\n */\n private getTopics(traces: Trace[]): string[] {\n const topics = new Set<string>();\n\n for (const trace of traces) {\n topics.add(trace.type);\n }\n\n return Array.from(topics).slice(0, 5);\n }\n\n /**\n * Get dominant operation type\n */\n private getDominantOperation(traces: Trace[]): string {\n const counts: Record<string, number> = {};\n\n for (const trace of traces) {\n counts[trace.type] = (counts[trace.type] || 0) + 1;\n }\n\n const sorted = Object.entries(counts).sort((a, b) => b[1] - a[1]);\n return sorted[0]?.[0] || 'mixed operations';\n }\n\n /**\n * Get key activities\n */\n private getKeyActivities(traces: Trace[]): string[] {\n return traces.slice(0, 3).map((t) => t.type);\n }\n\n /**\n * Generate titles\n */\n private generateChapterTitle(traces: Trace[]): string {\n const start = new Date(traces[0].metadata.startTime);\n const operation = this.getDominantOperation(traces);\n return `${operation} (${start.toLocaleDateString()})`;\n }\n\n private generateSectionTitle(traces: Trace[]): string {\n const start = new Date(traces[0].metadata.startTime);\n return `Section ${start.toLocaleTimeString()}`;\n }\n\n private generateParagraphTitle(traces: Trace[]): string {\n return `${traces.length} traces`;\n }\n\n /**\n * Calculate similarity between traces\n */\n private async calculateSimilarity(a: Trace, b: Trace): Promise<number> {\n // Simple similarity based on type and time\n const typeSimilarity = a.type === b.type ? 1 : 0;\n const timeDiff = Math.abs(a.metadata.startTime - b.metadata.startTime);\n const timeSimilarity = 1 / (1 + timeDiff / (1000 * 60 * 60)); // Hour scale\n\n return typeSimilarity * 0.5 + timeSimilarity * 0.5;\n }\n\n /**\n * Store node in database\n */\n private async storeNode(node: RetrievalNode): Promise<void> {\n const stmt = this.db.prepare(`\n INSERT OR REPLACE INTO hierarchy_nodes (\n id, level, parent_id, title, summary,\n child_count, token_count, score,\n time_start, time_end,\n compression_ratio, semantic_density, access_pattern\n ) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)\n `);\n\n stmt.run(\n node.id,\n node.level.level,\n node.level.parentId || null,\n node.level.title,\n node.level.summary,\n node.level.childCount,\n node.level.tokenCount,\n node.level.score,\n node.level.timeRange.start,\n node.level.timeRange.end,\n node.level.metadata.compressionRatio || 1,\n node.level.metadata.semanticDensity || 1,\n node.level.metadata.accessPattern || 'cold'\n );\n\n // Store content if present\n if (node.content) {\n this.db\n .prepare(\n `\n INSERT OR REPLACE INTO hierarchy_content (\n node_id, content, compressed, original_size, compressed_size\n ) VALUES (?, ?, ?, ?, ?)\n `\n )\n .run(\n node.id,\n node.content,\n node.compressed ? 1 : 0,\n node.compressed ? node.content.length * 2 : node.content.length,\n node.content.length\n );\n }\n\n // Recursively store children\n if (node.children) {\n for (const child of node.children) {\n await this.storeNode(child);\n }\n }\n }\n\n /**\n * Load root encyclopedia node\n */\n private async loadRootNode(): Promise<RetrievalNode | null> {\n const row = this.db\n .prepare(\n `\n SELECT * FROM hierarchy_nodes\n WHERE level = 'encyclopedia'\n ORDER BY created_at DESC\n LIMIT 1\n `\n )\n .get() as any;\n\n if (!row) return null;\n\n return this.loadNode(row.id);\n }\n\n /**\n * Load node from database\n */\n private async loadNode(nodeId: string): Promise<RetrievalNode | null> {\n // Check cache\n if (this.hierarchyCache.has(nodeId)) {\n return this.hierarchyCache.get(nodeId)!;\n }\n\n const nodeRow = this.db\n .prepare(\n `\n SELECT * FROM hierarchy_nodes WHERE id = ?\n `\n )\n .get(nodeId) as any;\n\n if (!nodeRow) return null;\n\n // Load content if exists\n const contentRow = this.db\n .prepare(\n `\n SELECT * FROM hierarchy_content WHERE node_id = ?\n `\n )\n .get(nodeId) as any;\n\n // Load children\n const childRows = this.db\n .prepare(\n `\n SELECT id FROM hierarchy_nodes WHERE parent_id = ?\n `\n )\n .all(nodeId) as any[];\n\n const children: RetrievalNode[] = [];\n for (const childRow of childRows) {\n const child = await this.loadNode(childRow.id);\n if (child) children.push(child);\n }\n\n const node: RetrievalNode = {\n id: nodeRow.id,\n level: {\n level: nodeRow.level as any,\n id: nodeRow.id,\n parentId: nodeRow.parent_id,\n title: nodeRow.title,\n summary: nodeRow.summary,\n childCount: nodeRow.child_count,\n tokenCount: nodeRow.token_count,\n score: nodeRow.score,\n timeRange: {\n start: nodeRow.time_start,\n end: nodeRow.time_end,\n },\n metadata: {\n compressionRatio: nodeRow.compression_ratio,\n semanticDensity: nodeRow.semantic_density,\n accessPattern: nodeRow.access_pattern as any,\n lastAccessed: nodeRow.last_accessed,\n },\n },\n children: children.length > 0 ? children : undefined,\n content: contentRow?.content,\n compressed: contentRow?.compressed === 1,\n };\n\n // Cache the node\n this.hierarchyCache.set(nodeId, node);\n\n return node;\n }\n\n /**\n * Update access pattern for a node\n */\n private async updateAccessPattern(nodeId: string): Promise<void> {\n this.db\n .prepare(\n `\n UPDATE hierarchy_nodes\n SET last_accessed = ?, access_pattern = 'hot'\n WHERE id = ?\n `\n )\n .run(Date.now(), nodeId);\n }\n\n /**\n * Generate unique ID\n */\n private generateId(prefix: string): string {\n return `${prefix}_${crypto.randomBytes(8).toString('hex')}`;\n }\n\n /**\n * Get hierarchy statistics\n */\n getStatistics(): any {\n const stats = this.db\n .prepare(\n `\n SELECT \n level,\n COUNT(*) as count,\n AVG(token_count) as avg_tokens,\n AVG(child_count) as avg_children,\n AVG(compression_ratio) as avg_compression,\n AVG(semantic_density) as avg_density\n FROM hierarchy_nodes\n GROUP BY level\n `\n )\n .all();\n\n const totalNodes = this.db\n .prepare(\n `\n SELECT COUNT(*) as count FROM hierarchy_nodes\n `\n )\n .get() as any;\n\n const totalContent = this.db\n .prepare(\n `\n SELECT \n SUM(original_size) as original,\n SUM(compressed_size) as compressed\n FROM hierarchy_content\n `\n )\n .get() as any;\n\n return {\n nodesByLevel: stats,\n totalNodes: totalNodes.count,\n totalContent: {\n original: totalContent?.original || 0,\n compressed: totalContent?.compressed || 0,\n ratio: totalContent?.original\n ? (1 - totalContent.compressed / totalContent.original).toFixed(2)\n : 0,\n },\n cacheSize: this.hierarchyCache.size,\n };\n }\n}\n"],
5
5
  "mappings": ";;;;AASA,SAAS,cAAc;AAGvB,YAAY,UAAU;AACtB,SAAS,iBAAiB;AAC1B,OAAO,YAAY;AAEnB,MAAM,YAAY,UAAU,KAAK,IAAI;AACrC,MAAM,cAAc,UAAU,KAAK,MAAM;AAyClC,MAAM,2BAA+C;AAAA,EAC1D,qBAAqB;AAAA,EACrB,gBAAgB;AAAA,EAChB,gBAAgB;AAAA,EAChB,kBAAkB;AAAA,EAClB,sBAAsB;AAAA,EACtB,mBAAmB;AACrB;AAKO,MAAM,sBAAsB;AAAA,EACzB;AAAA,EACA;AAAA,EACA,iBAA6C,oBAAI,IAAI;AAAA,EACrD,eAAoC,oBAAI,IAAI;AAAA,EAEpD,YAAY,IAAuB,SAAsC,CAAC,GAAG;AAC3E,SAAK,KAAK;AACV,SAAK,SAAS,EAAE,GAAG,0BAA0B,GAAG,OAAO;AACvD,SAAK,iBAAiB;AAAA,EACxB;AAAA,EAEQ,mBAAyB;AAC/B,SAAK,GAAG,KAAK;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,KAoBZ;AAED,SAAK,GAAG,KAAK;AAAA;AAAA;AAAA;AAAA;AAAA,KAKZ;AAGD,SAAK,GAAG,KAAK;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,KASZ;AAAA,EACH;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,eAAe,QAAyC;AAC5D,WAAO,KAAK,6CAA6C;AAAA,MACvD,YAAY,OAAO;AAAA,IACrB,CAAC;AAGD,WAAO,KAAK,CAAC,GAAG,MAAM;AACpB,YAAM,WAAW,EAAE,SAAS,YAAY,EAAE,SAAS;AACnD,aAAO,aAAa,IAAI,WAAW,EAAE,QAAQ,EAAE;AAAA,IACjD,CAAC;AAGD,UAAM,iBAAiB,KAAK,WAAW,cAAc;AACrD,UAAM,eAA8B;AAAA,MAClC,IAAI;AAAA,MACJ,OAAO;AAAA,QACL,OAAO;AAAA,QACP,IAAI;AAAA,QACJ,OAAO;AAAA,QACP,SAAS,MAAM,KAAK,gBAAgB,QAAQ,cAAc;AAAA,QAC1D,YAAY;AAAA,QACZ,YAAY,OAAO,OAAO,CAAC,KAAK,MAAM,OAAO,EAAE,cAAc,IAAI,CAAC;AAAA,QAClE,OAAO,KAAK,IAAI,GAAG,OAAO,IAAI,CAAC,MAAM,EAAE,KAAK,CAAC;AAAA,QAC7C,WAAW;AAAA,UACT,OAAO,KAAK,IAAI,GAAG,OAAO,IAAI,CAAC,MAAM,EAAE,SAAS,SAAS,CAAC;AAAA,UAC1D,KAAK,KAAK,IAAI,GAAG,OAAO,IAAI,CAAC,MAAM,EAAE,SAAS,OAAO,CAAC;AAAA,QACxD;AAAA,QACA,UAAU;AAAA,UACR,iBAAiB;AAAA,UACjB,eAAe;AAAA,QACjB;AAAA,MACF;AAAA,MACA,UAAU,CAAC;AAAA,IACb;AAGA,UAAM,WAAW,MAAM,KAAK,sBAAsB,MAAM;AAExD,eAAW,iBAAiB,UAAU;AACpC,YAAM,UAAU,MAAM,KAAK,aAAa,eAAe,cAAc;AACrE,mBAAa,SAAU,KAAK,OAAO;AAAA,IACrC;AAGA,iBAAa,MAAM,aAAa,aAAa,SAAU;AAGvD,UAAM,KAAK,UAAU,YAAY;AAEjC,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA,EAKA,MAAc,sBAAsB,QAAqC;AACvE,UAAM,WAAsB,CAAC;AAC7B,QAAI,iBAA0B,CAAC;AAE/B,eAAW,SAAS,QAAQ;AAC1B,UAAI,eAAe,UAAU,KAAK,OAAO,gBAAgB;AACvD,iBAAS,KAAK,cAAc;AAC5B,yBAAiB,CAAC,KAAK;AAAA,MACzB,WAAW,eAAe,SAAS,GAAG;AAEpC,cAAM,aAAa,MAAM,KAAK;AAAA,UAC5B;AAAA,UACA,eAAe,eAAe,SAAS,CAAC;AAAA,QAC1C;AAEA,YAAI,aAAa,KAAK,OAAO,mBAAmB;AAE9C,mBAAS,KAAK,cAAc;AAC5B,2BAAiB,CAAC,KAAK;AAAA,QACzB,OAAO;AACL,yBAAe,KAAK,KAAK;AAAA,QAC3B;AAAA,MACF,OAAO;AACL,uBAAe,KAAK,KAAK;AAAA,MAC3B;AAAA,IACF;AAEA,QAAI,eAAe,SAAS,GAAG;AAC7B,eAAS,KAAK,cAAc;AAAA,IAC9B;AAEA,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA,EAKA,MAAc,aACZ,QACA,UACwB;AACxB,UAAM,YAAY,KAAK,WAAW,SAAS;AAE3C,UAAM,UAAyB;AAAA,MAC7B,IAAI;AAAA,MACJ,OAAO;AAAA,QACL,OAAO;AAAA,QACP,IAAI;AAAA,QACJ;AAAA,QACA,OAAO,KAAK,qBAAqB,MAAM;AAAA,QACvC,SAAS,MAAM,KAAK,gBAAgB,QAAQ,SAAS;AAAA,QACrD,YAAY;AAAA,QACZ,YAAY,OAAO,OAAO,CAAC,KAAK,MAAM,OAAO,EAAE,cAAc,IAAI,CAAC;AAAA,QAClE,OAAO,KAAK,IAAI,GAAG,OAAO,IAAI,CAAC,MAAM,EAAE,KAAK,CAAC;AAAA,QAC7C,WAAW;AAAA,UACT,OAAO,KAAK,IAAI,GAAG,OAAO,IAAI,CAAC,MAAM,EAAE,SAAS,SAAS,CAAC;AAAA,UAC1D,KAAK,KAAK,IAAI,GAAG,OAAO,IAAI,CAAC,MAAM,EAAE,SAAS,OAAO,CAAC;AAAA,QACxD;AAAA,QACA,UAAU;AAAA,UACR,kBAAkB;AAAA,UAClB,iBAAiB;AAAA,UACjB,eAAe;AAAA,QACjB;AAAA,MACF;AAAA,MACA,UAAU,CAAC;AAAA,IACb;AAGA,UAAM,WAAW,MAAM,KAAK,sBAAsB,MAAM;AAExD,eAAW,iBAAiB,UAAU;AACpC,YAAM,UAAU,MAAM,KAAK,aAAa,eAAe,SAAS;AAChE,cAAQ,SAAU,KAAK,OAAO;AAAA,IAChC;AAEA,YAAQ,MAAM,aAAa,QAAQ,SAAU;AAC7C,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA,EAKA,MAAc,aACZ,QACA,UACwB;AACxB,UAAM,YAAY,KAAK,WAAW,SAAS;AAE3C,UAAM,UAAyB;AAAA,MAC7B,IAAI;AAAA,MACJ,OAAO;AAAA,QACL,OAAO;AAAA,QACP,IAAI;AAAA,QACJ;AAAA,QACA,OAAO,KAAK,qBAAqB,MAAM;AAAA,QACvC,SAAS,MAAM,KAAK,gBAAgB,QAAQ,SAAS;AAAA,QACrD,YAAY;AAAA,QACZ,YAAY,OAAO,OAAO,CAAC,KAAK,MAAM,OAAO,EAAE,cAAc,IAAI,CAAC;AAAA,QAClE,OAAO,KAAK,IAAI,GAAG,OAAO,IAAI,CAAC,MAAM,EAAE,KAAK,CAAC;AAAA,QAC7C,WAAW;AAAA,UACT,OAAO,KAAK,IAAI,GAAG,OAAO,IAAI,CAAC,MAAM,EAAE,SAAS,SAAS,CAAC;AAAA,UAC1D,KAAK,KAAK,IAAI,GAAG,OAAO,IAAI,CAAC,MAAM,EAAE,SAAS,OAAO,CAAC;AAAA,QACxD;AAAA,QACA,UAAU;AAAA,UACR,kBAAkB;AAAA,UAClB,iBAAiB;AAAA,UACjB,eAAe;AAAA,QACjB;AAAA,MACF;AAAA,MACA,UAAU,CAAC;AAAA,IACb;AAGA,UAAM,aAAa,MAAM,KAAK,wBAAwB,MAAM;AAE5D,eAAW,mBAAmB,YAAY;AACxC,YAAM,YAAY,MAAM,KAAK,eAAe,iBAAiB,SAAS;AACtE,cAAQ,SAAU,KAAK,SAAS;AAAA,IAClC;AAEA,YAAQ,MAAM,aAAa,QAAQ,SAAU;AAC7C,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA,EAKA,MAAc,eACZ,QACA,UACwB;AACxB,UAAM,cAAc,KAAK,WAAW,WAAW;AAG/C,UAAM,UAAU,OACb,IAAI,CAAC,MAAM;AACV,aAAO,IAAI,IAAI,KAAK,EAAE,SAAS,SAAS,EAAE,YAAY,CAAC,KAAK,EAAE,IAAI,KAAK,EAAE,OAAO;AAAA,IAClF,CAAC,EACA,KAAK,MAAM;AAGd,QAAI,gBAAgB;AACpB,QAAI,aAAa;AAEjB,QAAI,QAAQ,SAAS,KAAK,OAAO,sBAAsB;AACrD,YAAM,iBAAiB,MAAM,UAAU,OAAO;AAC9C,sBAAgB,eAAe,SAAS,QAAQ;AAChD,mBAAa;AAAA,IACf;AAEA,UAAM,YAA2B;AAAA,MAC/B,IAAI;AAAA,MACJ,OAAO;AAAA,QACL,OAAO;AAAA,QACP,IAAI;AAAA,QACJ;AAAA,QACA,OAAO,KAAK,uBAAuB,MAAM;AAAA,QACzC,SAAS,MAAM,KAAK,gBAAgB,QAAQ,WAAW;AAAA,QACvD,YAAY,OAAO;AAAA,QACnB,YAAY,OAAO,OAAO,CAAC,KAAK,MAAM,OAAO,EAAE,cAAc,IAAI,CAAC;AAAA,QAClE,OAAO,KAAK,IAAI,GAAG,OAAO,IAAI,CAAC,MAAM,EAAE,KAAK,CAAC;AAAA,QAC7C,WAAW;AAAA,UACT,OAAO,KAAK,IAAI,GAAG,OAAO,IAAI,CAAC,MAAM,EAAE,SAAS,SAAS,CAAC;AAAA,UAC1D,KAAK,KAAK,IAAI,GAAG,OAAO,IAAI,CAAC,MAAM,EAAE,SAAS,OAAO,CAAC;AAAA,QACxD;AAAA,QACA,UAAU;AAAA,UACR,kBAAkB,aAAa,MAAM;AAAA,UACrC,iBAAiB;AAAA,UACjB,eAAe;AAAA,QACjB;AAAA,MACF;AAAA,MACA,SAAS;AAAA,MACT;AAAA,IACF;AAEA,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA,EAKA,MAAc,sBAAsB,QAAqC;AACvE,UAAM,WAAsB,CAAC;AAC7B,UAAM,cAAc,KAAK;AAAA,MACvB,OAAO,SAAS,KAAK,KAAK,OAAO,SAAS,KAAK,OAAO,cAAc;AAAA,IACtE;AAEA,aAAS,IAAI,GAAG,IAAI,OAAO,QAAQ,KAAK,aAAa;AACnD,eAAS,KAAK,OAAO,MAAM,GAAG,IAAI,WAAW,CAAC;AAAA,IAChD;AAEA,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA,EAKA,MAAc,wBAAwB,QAAqC;AACzE,UAAM,aAAwB,CAAC;AAC/B,UAAM,gBAAgB,KAAK;AAAA,MACzB,OAAO,SAAS,KAAK,KAAK,OAAO,SAAS,KAAK,OAAO,gBAAgB;AAAA,IACxE;AAEA,aAAS,IAAI,GAAG,IAAI,OAAO,QAAQ,KAAK,eAAe;AACrD,iBAAW,KAAK,OAAO,MAAM,GAAG,IAAI,aAAa,CAAC;AAAA,IACpD;AAEA,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,SACJ,OACA,WAAmB,GACnB,cAAsB,KACL;AACjB,WAAO,KAAK,0BAA0B,EAAE,OAAO,UAAU,YAAY,CAAC;AAGtE,UAAM,eAAe,MAAM,KAAK,aAAa;AAC7C,QAAI,CAAC,cAAc;AACjB,aAAO;AAAA,IACT;AAEA,UAAM,OAAwB,CAAC,YAAY;AAC3C,QAAI,cAAc;AAClB,QAAI,aAAa;AAGjB,aAAS,QAAQ,GAAG,QAAQ,YAAY,aAAa,aAAa,SAAS;AACzE,UAAI,CAAC,YAAY,YAAY,YAAY,SAAS,WAAW,GAAG;AAC9D;AAAA,MACF;AAGA,YAAM,YAAY,MAAM,KAAK;AAAA,QAC3B,YAAY;AAAA,QACZ;AAAA,QACA,cAAc;AAAA,MAChB;AAEA,UAAI,CAAC,UAAW;AAEhB,WAAK,KAAK,SAAS;AACnB,oBAAc;AACd,oBAAc,UAAU,MAAM;AAG9B,YAAM,KAAK,oBAAoB,UAAU,EAAE;AAAA,IAC7C;AAGA,WAAO,KAAK,qBAAqB,MAAM,WAAW;AAAA,EACpD;AAAA;AAAA;AAAA;AAAA,EAKA,MAAc,gBACZ,UACA,OACA,iBAC+B;AAC/B,QAAI,YAAkC;AACtC,QAAI,YAAY;AAEhB,eAAW,SAAS,UAAU;AAC5B,UAAI,MAAM,MAAM,aAAa,iBAAiB;AAC5C;AAAA,MACF;AAGA,YAAM,QAAQ,MAAM,KAAK,mBAAmB,OAAO,KAAK;AAExD,UAAI,QAAQ,WAAW;AACrB,oBAAY;AACZ,oBAAY;AAAA,MACd;AAAA,IACF;AAEA,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA,EAKA,MAAc,mBACZ,MACA,OACiB;AAGjB,UAAM,aAAa,MAAM,YAAY,EAAE,MAAM,KAAK;AAClD,UAAM,WAAW,GAAG,KAAK,MAAM,KAAK,IAAI,KAAK,MAAM,OAAO,GAAG,YAAY;AAEzE,QAAI,UAAU;AACd,eAAW,QAAQ,YAAY;AAC7B,UAAI,SAAS,SAAS,IAAI,GAAG;AAC3B;AAAA,MACF;AAAA,IACF;AAEA,UAAM,eAAe,UAAU,WAAW;AAC1C,UAAM,eACJ,KAAK,KAAK,KAAK,IAAI,IAAI,KAAK,MAAM,UAAU,QAAQ,MAAO,KAAK,KAAK;AACvE,UAAM,kBAAkB,KAAK,MAAM;AAEnC,WAAO,eAAe,MAAM,eAAe,MAAM,kBAAkB;AAAA,EACrE;AAAA;AAAA;AAAA;AAAA,EAKA,MAAc,qBACZ,MACA,aACiB;AACjB,UAAM,WAAqB,CAAC;AAE5B,aAAS,KAAK,mBAAmB;AACjC,aAAS,KAAK,KAAK,IAAI,CAAC,MAAM,EAAE,MAAM,KAAK,EAAE,KAAK,UAAK,CAAC;AACxD,aAAS,KAAK,EAAE;AAGhB,eAAW,QAAQ,MAAM;AACvB,eAAS,KAAK,OAAO,KAAK,MAAM,KAAK,KAAK,KAAK,MAAM,KAAK,EAAE;AAC5D,eAAS,KAAK,KAAK,MAAM,OAAO;AAEhC,UAAI,KAAK,SAAS;AAEhB,YAAI,UAAU,KAAK;AACnB,YAAI,KAAK,YAAY;AACnB,gBAAM,aAAa,OAAO,KAAK,SAAS,QAAQ;AAChD,gBAAM,eAAe,MAAM,YAAY,UAAU;AACjD,oBAAU,aAAa,SAAS;AAAA,QAClC;AACA,iBAAS,KAAK,EAAE;AAChB,iBAAS,KAAK,cAAc;AAC5B,iBAAS,KAAK,OAAO;AAAA,MACvB;AACA,eAAS,KAAK,EAAE;AAAA,IAClB;AAGA,aAAS,KAAK,yBAAyB;AACvC,aAAS,KAAK,uBAAuB,KAAK,MAAM,EAAE;AAClD,aAAS;AAAA,MACP,6BAA6B,KAAK,mBAAmB,IAAI,CAAC;AAAA,IAC5D;AACA,aAAS;AAAA,MACP,uBAAuB,KAAK,iBAAiB,IAAI,EAAE,QAAQ,CAAC,CAAC;AAAA,IAC/D;AAEA,WAAO,SAAS,KAAK,IAAI;AAAA,EAC3B;AAAA;AAAA;AAAA;AAAA,EAKQ,mBAAmB,MAA+B;AACxD,QAAI,KAAK,SAAS,EAAG,QAAO;AAE5B,UAAM,UAAU,KAAK,CAAC,EAAE,MAAM;AAC9B,UAAM,QAAQ,KAAK,KAAK,SAAS,CAAC,EAAE,MAAM,cAAc;AAExD,WAAO,KAAK,MAAM,UAAU,KAAK;AAAA,EACnC;AAAA;AAAA;AAAA;AAAA,EAKQ,iBAAiB,MAA+B;AACtD,UAAM,YAAY,KAAK,IAAI,CAAC,MAAM,EAAE,MAAM,SAAS,mBAAmB,CAAC;AACvE,WAAO,UAAU,OAAO,CAAC,KAAK,MAAM,MAAM,GAAG,CAAC,IAAI,UAAU;AAAA,EAC9D;AAAA;AAAA;AAAA;AAAA,EAKA,MAAc,gBACZ,QACA,OACiB;AAEjB,UAAM,WAAW,GAAG,KAAK,IAAI,OAAO,IAAI,CAAC,MAAM,EAAE,EAAE,EAAE,KAAK,GAAG,CAAC;AAE9D,QAAI,KAAK,aAAa,IAAI,QAAQ,GAAG;AACnC,aAAO,KAAK,aAAa,IAAI,QAAQ;AAAA,IACvC;AAGA,QAAI;AAEJ,YAAQ,OAAO;AAAA,MACb,KAAK;AACH,kBAAU,gCAAgC,OAAO,MAAM,oBAAoB,KAAK,UAAU,MAAM,EAAE,KAAK,IAAI,CAAC;AAC5G;AAAA,MACF,KAAK;AACH,kBAAU,GAAG,OAAO,MAAM,0BAA0B,KAAK,qBAAqB,MAAM,CAAC;AACrF;AAAA,MACF,KAAK;AACH,kBAAU,GAAG,OAAO,MAAM,YAAY,KAAK,iBAAiB,MAAM,EAAE,KAAK,IAAI,CAAC;AAC9E;AAAA,MACF,KAAK;AACH,kBAAU,OACP,MAAM,GAAG,CAAC,EACV,IAAI,CAAC,MAAM,EAAE,OAAO,EACpB,KAAK,IAAI;AACZ;AAAA,MACF;AACE,kBAAU,GAAG,OAAO,MAAM;AAAA,IAC9B;AAEA,SAAK,aAAa,IAAI,UAAU,OAAO;AACvC,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA,EAKQ,UAAU,QAA2B;AAC3C,UAAM,SAAS,oBAAI,IAAY;AAE/B,eAAW,SAAS,QAAQ;AAC1B,aAAO,IAAI,MAAM,IAAI;AAAA,IACvB;AAEA,WAAO,MAAM,KAAK,MAAM,EAAE,MAAM,GAAG,CAAC;AAAA,EACtC;AAAA;AAAA;AAAA;AAAA,EAKQ,qBAAqB,QAAyB;AACpD,UAAM,SAAiC,CAAC;AAExC,eAAW,SAAS,QAAQ;AAC1B,aAAO,MAAM,IAAI,KAAK,OAAO,MAAM,IAAI,KAAK,KAAK;AAAA,IACnD;AAEA,UAAM,SAAS,OAAO,QAAQ,MAAM,EAAE,KAAK,CAAC,GAAG,MAAM,EAAE,CAAC,IAAI,EAAE,CAAC,CAAC;AAChE,WAAO,OAAO,CAAC,IAAI,CAAC,KAAK;AAAA,EAC3B;AAAA;AAAA;AAAA;AAAA,EAKQ,iBAAiB,QAA2B;AAClD,WAAO,OAAO,MAAM,GAAG,CAAC,EAAE,IAAI,CAAC,MAAM,EAAE,IAAI;AAAA,EAC7C;AAAA;AAAA;AAAA;AAAA,EAKQ,qBAAqB,QAAyB;AACpD,UAAM,QAAQ,IAAI,KAAK,OAAO,CAAC,EAAE,SAAS,SAAS;AACnD,UAAM,YAAY,KAAK,qBAAqB,MAAM;AAClD,WAAO,GAAG,SAAS,KAAK,MAAM,mBAAmB,CAAC;AAAA,EACpD;AAAA,EAEQ,qBAAqB,QAAyB;AACpD,UAAM,QAAQ,IAAI,KAAK,OAAO,CAAC,EAAE,SAAS,SAAS;AACnD,WAAO,WAAW,MAAM,mBAAmB,CAAC;AAAA,EAC9C;AAAA,EAEQ,uBAAuB,QAAyB;AACtD,WAAO,GAAG,OAAO,MAAM;AAAA,EACzB;AAAA;AAAA;AAAA;AAAA,EAKA,MAAc,oBAAoB,GAAU,GAA2B;AAErE,UAAM,iBAAiB,EAAE,SAAS,EAAE,OAAO,IAAI;AAC/C,UAAM,WAAW,KAAK,IAAI,EAAE,SAAS,YAAY,EAAE,SAAS,SAAS;AACrE,UAAM,iBAAiB,KAAK,IAAI,YAAY,MAAO,KAAK;AAExD,WAAO,iBAAiB,MAAM,iBAAiB;AAAA,EACjD;AAAA;AAAA;AAAA;AAAA,EAKA,MAAc,UAAU,MAAoC;AAC1D,UAAM,OAAO,KAAK,GAAG,QAAQ;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,KAO5B;AAED,SAAK;AAAA,MACH,KAAK;AAAA,MACL,KAAK,MAAM;AAAA,MACX,KAAK,MAAM,YAAY;AAAA,MACvB,KAAK,MAAM;AAAA,MACX,KAAK,MAAM;AAAA,MACX,KAAK,MAAM;AAAA,MACX,KAAK,MAAM;AAAA,MACX,KAAK,MAAM;AAAA,MACX,KAAK,MAAM,UAAU;AAAA,MACrB,KAAK,MAAM,UAAU;AAAA,MACrB,KAAK,MAAM,SAAS,oBAAoB;AAAA,MACxC,KAAK,MAAM,SAAS,mBAAmB;AAAA,MACvC,KAAK,MAAM,SAAS,iBAAiB;AAAA,IACvC;AAGA,QAAI,KAAK,SAAS;AAChB,WAAK,GACF;AAAA,QACC;AAAA;AAAA;AAAA;AAAA;AAAA,MAKF,EACC;AAAA,QACC,KAAK;AAAA,QACL,KAAK;AAAA,QACL,KAAK,aAAa,IAAI;AAAA,QACtB,KAAK,aAAa,KAAK,QAAQ,SAAS,IAAI,KAAK,QAAQ;AAAA,QACzD,KAAK,QAAQ;AAAA,MACf;AAAA,IACJ;AAGA,QAAI,KAAK,UAAU;AACjB,iBAAW,SAAS,KAAK,UAAU;AACjC,cAAM,KAAK,UAAU,KAAK;AAAA,MAC5B;AAAA,IACF;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,MAAc,eAA8C;AAC1D,UAAM,MAAM,KAAK,GACd;AAAA,MACC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,IAMF,EACC,IAAI;AAEP,QAAI,CAAC,IAAK,QAAO;AAEjB,WAAO,KAAK,SAAS,IAAI,EAAE;AAAA,EAC7B;AAAA;AAAA;AAAA;AAAA,EAKA,MAAc,SAAS,QAA+C;AAEpE,QAAI,KAAK,eAAe,IAAI,MAAM,GAAG;AACnC,aAAO,KAAK,eAAe,IAAI,MAAM;AAAA,IACvC;AAEA,UAAM,UAAU,KAAK,GAClB;AAAA,MACC;AAAA;AAAA;AAAA,IAGF,EACC,IAAI,MAAM;AAEb,QAAI,CAAC,QAAS,QAAO;AAGrB,UAAM,aAAa,KAAK,GACrB;AAAA,MACC;AAAA;AAAA;AAAA,IAGF,EACC,IAAI,MAAM;AAGb,UAAM,YAAY,KAAK,GACpB;AAAA,MACC;AAAA;AAAA;AAAA,IAGF,EACC,IAAI,MAAM;AAEb,UAAM,WAA4B,CAAC;AACnC,eAAW,YAAY,WAAW;AAChC,YAAM,QAAQ,MAAM,KAAK,SAAS,SAAS,EAAE;AAC7C,UAAI,MAAO,UAAS,KAAK,KAAK;AAAA,IAChC;AAEA,UAAM,OAAsB;AAAA,MAC1B,IAAI,QAAQ;AAAA,MACZ,OAAO;AAAA,QACL,OAAO,QAAQ;AAAA,QACf,IAAI,QAAQ;AAAA,QACZ,UAAU,QAAQ;AAAA,QAClB,OAAO,QAAQ;AAAA,QACf,SAAS,QAAQ;AAAA,QACjB,YAAY,QAAQ;AAAA,QACpB,YAAY,QAAQ;AAAA,QACpB,OAAO,QAAQ;AAAA,QACf,WAAW;AAAA,UACT,OAAO,QAAQ;AAAA,UACf,KAAK,QAAQ;AAAA,QACf;AAAA,QACA,UAAU;AAAA,UACR,kBAAkB,QAAQ;AAAA,UAC1B,iBAAiB,QAAQ;AAAA,UACzB,eAAe,QAAQ;AAAA,UACvB,cAAc,QAAQ;AAAA,QACxB;AAAA,MACF;AAAA,MACA,UAAU,SAAS,SAAS,IAAI,WAAW;AAAA,MAC3C,SAAS,YAAY;AAAA,MACrB,YAAY,YAAY,eAAe;AAAA,IACzC;AAGA,SAAK,eAAe,IAAI,QAAQ,IAAI;AAEpC,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA,EAKA,MAAc,oBAAoB,QAA+B;AAC/D,SAAK,GACF;AAAA,MACC;AAAA;AAAA;AAAA;AAAA;AAAA,IAKF,EACC,IAAI,KAAK,IAAI,GAAG,MAAM;AAAA,EAC3B;AAAA;AAAA;AAAA;AAAA,EAKQ,WAAW,QAAwB;AACzC,WAAO,GAAG,MAAM,IAAI,OAAO,YAAY,CAAC,EAAE,SAAS,KAAK,CAAC;AAAA,EAC3D;AAAA;AAAA;AAAA;AAAA,EAKA,gBAAqB;AACnB,UAAM,QAAQ,KAAK,GAChB;AAAA,MACC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,IAWF,EACC,IAAI;AAEP,UAAM,aAAa,KAAK,GACrB;AAAA,MACC;AAAA;AAAA;AAAA,IAGF,EACC,IAAI;AAEP,UAAM,eAAe,KAAK,GACvB;AAAA,MACC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,IAMF,EACC,IAAI;AAEP,WAAO;AAAA,MACL,cAAc;AAAA,MACd,YAAY,WAAW;AAAA,MACvB,cAAc;AAAA,QACZ,UAAU,cAAc,YAAY;AAAA,QACpC,YAAY,cAAc,cAAc;AAAA,QACxC,OAAO,cAAc,YAChB,IAAI,aAAa,aAAa,aAAa,UAAU,QAAQ,CAAC,IAC/D;AAAA,MACN;AAAA,MACA,WAAW,KAAK,eAAe;AAAA,IACjC;AAAA,EACF;AACF;",
6
6
  "names": []
7
7
  }