@stackmemoryai/stackmemory 0.5.33 → 0.5.34
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/agents/core/agent-task-manager.js.map +1 -1
- package/dist/cli/commands/clear.js +1 -1
- package/dist/cli/commands/clear.js.map +1 -1
- package/dist/cli/commands/context.js +1 -1
- package/dist/cli/commands/context.js.map +1 -1
- package/dist/cli/commands/dashboard.js.map +1 -1
- package/dist/cli/commands/discovery.js +1 -1
- package/dist/cli/commands/discovery.js.map +1 -1
- package/dist/cli/commands/handoff.js +1 -1
- package/dist/cli/commands/handoff.js.map +1 -1
- package/dist/cli/commands/monitor.js +1 -1
- package/dist/cli/commands/monitor.js.map +1 -1
- package/dist/cli/commands/quality.js +1 -1
- package/dist/cli/commands/quality.js.map +1 -1
- package/dist/cli/commands/skills.js +1 -1
- package/dist/cli/commands/skills.js.map +1 -1
- package/dist/cli/commands/workflow.js +1 -1
- package/dist/cli/commands/workflow.js.map +1 -1
- package/dist/cli/commands/worktree.js +1 -1
- package/dist/cli/commands/worktree.js.map +1 -1
- package/dist/cli/index.js +1 -1
- package/dist/cli/index.js.map +1 -1
- package/dist/core/context/auto-context.js.map +1 -1
- package/dist/core/context/compaction-handler.js.map +2 -2
- package/dist/core/context/context-bridge.js.map +2 -2
- package/dist/core/context/dual-stack-manager.js +1 -1
- package/dist/core/context/dual-stack-manager.js.map +1 -1
- package/dist/core/context/enhanced-rehydration.js.map +1 -1
- package/dist/core/context/frame-database.js +8 -0
- package/dist/core/context/frame-database.js.map +2 -2
- package/dist/core/context/frame-handoff-manager.js.map +1 -1
- package/dist/core/context/frame-lifecycle-hooks.js +119 -0
- package/dist/core/context/frame-lifecycle-hooks.js.map +7 -0
- package/dist/core/context/frame-stack.js +29 -0
- package/dist/core/context/frame-stack.js.map +2 -2
- package/dist/core/context/incremental-gc.js.map +2 -2
- package/dist/core/context/index.js +4 -22
- package/dist/core/context/index.js.map +2 -2
- package/dist/core/context/refactored-frame-manager.js +140 -34
- package/dist/core/context/refactored-frame-manager.js.map +3 -3
- package/dist/core/context/shared-context-layer.js.map +1 -1
- package/dist/core/context/stack-merge-resolver.js.map +1 -1
- package/dist/core/database/database-adapter.js.map +1 -1
- package/dist/core/database/paradedb-adapter.js.map +1 -1
- package/dist/core/database/query-router.js.map +1 -1
- package/dist/core/database/sqlite-adapter.js.map +1 -1
- package/dist/core/digest/frame-digest-integration.js.map +1 -1
- package/dist/core/digest/hybrid-digest-generator.js.map +1 -1
- package/dist/core/digest/types.js.map +1 -1
- package/dist/core/errors/index.js +249 -0
- package/dist/core/errors/index.js.map +2 -2
- package/dist/core/frame/workflow-templates.js.map +2 -2
- package/dist/core/merge/conflict-detector.js.map +1 -1
- package/dist/core/merge/resolution-engine.js.map +1 -1
- package/dist/core/merge/stack-diff.js.map +1 -1
- package/dist/core/models/model-router.js +10 -1
- package/dist/core/models/model-router.js.map +2 -2
- package/dist/core/monitoring/error-handler.js +37 -270
- package/dist/core/monitoring/error-handler.js.map +3 -3
- package/dist/core/monitoring/session-monitor.js.map +1 -1
- package/dist/core/performance/lazy-context-loader.js.map +1 -1
- package/dist/core/performance/optimized-frame-context.js.map +1 -1
- package/dist/core/retrieval/context-retriever.js.map +1 -1
- package/dist/core/retrieval/graph-retrieval.js.map +1 -1
- package/dist/core/retrieval/hierarchical-retrieval.js.map +1 -1
- package/dist/core/retrieval/llm-context-retrieval.js.map +1 -1
- package/dist/core/retrieval/retrieval-benchmarks.js.map +1 -1
- package/dist/core/retrieval/summary-generator.js.map +1 -1
- package/dist/core/retrieval/types.js.map +1 -1
- package/dist/core/storage/chromadb-adapter.js.map +1 -1
- package/dist/core/storage/infinite-storage.js.map +1 -1
- package/dist/core/storage/two-tier-storage.js.map +1 -1
- package/dist/features/tasks/task-aware-context.js.map +1 -1
- package/dist/features/web/server/index.js +1 -1
- package/dist/features/web/server/index.js.map +1 -1
- package/dist/hooks/schemas.js +50 -0
- package/dist/hooks/schemas.js.map +2 -2
- package/dist/hooks/sms-action-runner.js +4 -0
- package/dist/hooks/sms-action-runner.js.map +2 -2
- package/dist/hooks/whatsapp-commands.js +142 -2
- package/dist/hooks/whatsapp-commands.js.map +2 -2
- package/dist/hooks/whatsapp-sync.js +34 -0
- package/dist/hooks/whatsapp-sync.js.map +2 -2
- package/dist/index.js +1 -1
- package/dist/index.js.map +1 -1
- package/dist/integrations/mcp/handlers/context-handlers.js.map +1 -1
- package/dist/integrations/mcp/handlers/discovery-handlers.js.map +1 -1
- package/dist/integrations/mcp/server.js +1 -1
- package/dist/integrations/mcp/server.js.map +1 -1
- package/dist/integrations/ralph/bridge/ralph-stackmemory-bridge.js +1 -1
- package/dist/integrations/ralph/bridge/ralph-stackmemory-bridge.js.map +1 -1
- package/dist/integrations/ralph/context/stackmemory-context-loader.js +1 -1
- package/dist/integrations/ralph/context/stackmemory-context-loader.js.map +1 -1
- package/dist/integrations/ralph/learning/pattern-learner.js +1 -1
- package/dist/integrations/ralph/learning/pattern-learner.js.map +1 -1
- package/dist/integrations/ralph/orchestration/multi-loop-orchestrator.js +1 -1
- package/dist/integrations/ralph/orchestration/multi-loop-orchestrator.js.map +1 -1
- package/dist/integrations/ralph/swarm/swarm-coordinator.js +1 -1
- package/dist/integrations/ralph/swarm/swarm-coordinator.js.map +1 -1
- package/dist/integrations/ralph/visualization/ralph-debugger.js +1 -1
- package/dist/integrations/ralph/visualization/ralph-debugger.js.map +1 -1
- package/dist/mcp/stackmemory-mcp-server.js +1 -1
- package/dist/mcp/stackmemory-mcp-server.js.map +1 -1
- package/dist/skills/claude-skills.js.map +1 -1
- package/dist/skills/recursive-agent-orchestrator.js.map +1 -1
- package/dist/skills/unified-rlm-orchestrator.js.map +1 -1
- package/package.json +1 -1
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
{
|
|
2
2
|
"version": 3,
|
|
3
3
|
"sources": ["../../../src/core/context/stack-merge-resolver.ts"],
|
|
4
|
-
"sourcesContent": ["/**\n * Stack Merge Conflict Resolution - STA-101\n * Advanced conflict resolution for frame merging between individual and shared stacks\n */\n\nimport type { Frame, Event, Anchor } from './frame-manager.js';\nimport {\n DualStackManager,\n type StackSyncResult,\n} from './dual-stack-manager.js';\nimport { logger } from '../monitoring/logger.js';\nimport { ValidationError, DatabaseError, ErrorCode } from '../errors/index.js';\nimport {\n validateInput,\n StartMergeSessionSchema,\n CreateMergePolicySchema,\n ConflictResolutionSchema,\n type StartMergeSessionInput,\n type CreateMergePolicyInput,\n type ConflictResolutionInput,\n} from './validation.js';\n\nexport interface MergeConflict {\n frameId: string;\n conflictType:\n | 'content'\n | 'metadata'\n | 'sequence'\n | 'dependency'\n | 'permission';\n sourceFrame: Frame;\n targetFrame: Frame;\n conflictDetails: {\n field: string;\n sourceValue: any;\n targetValue: any;\n lastModified: {\n source: Date;\n target: Date;\n };\n }[];\n severity: 'low' | 'medium' | 'high' | 'critical';\n autoResolvable: boolean;\n}\n\nexport interface MergeResolution {\n conflictId: string;\n strategy: 'source_wins' | 'target_wins' | 'merge_both' | 'manual' | 'skip';\n resolutionData?: Record<string, any>;\n resolvedBy: string;\n resolvedAt: Date;\n notes?: string;\n}\n\nexport interface MergePolicy {\n name: string;\n description: string;\n rules: Array<{\n condition: string; // JSONPath expression\n action: 'source_wins' | 'target_wins' | 'merge_both' | 'require_manual';\n priority: number;\n }>;\n autoApplyThreshold: 'low' | 'medium' | 'high' | 'never';\n}\n\nexport interface MergeSession {\n sessionId: string;\n sourceStackId: string;\n targetStackId: string;\n conflicts: MergeConflict[];\n resolutions: MergeResolution[];\n policy: MergePolicy;\n status: 'analyzing' | 'resolving' | 'completed' | 'failed' | 'manual_review';\n startedAt: Date;\n completedAt?: Date;\n metadata: {\n totalFrames: number;\n conflictFrames: number;\n autoResolvedConflicts: number;\n manualResolvedConflicts: number;\n };\n}\n\nexport class StackMergeResolver {\n private dualStackManager: DualStackManager;\n private activeSessions: Map<string, MergeSession> = new Map();\n private mergePolicies: Map<string, MergePolicy> = new Map();\n\n constructor(dualStackManager: DualStackManager) {\n this.dualStackManager = dualStackManager;\n this.initializeDefaultPolicies();\n logger.debug('StackMergeResolver initialized', {\n policies: Array.from(this.mergePolicies.keys()),\n });\n }\n\n /**\n * Start a merge session with conflict analysis\n */\n async startMergeSession(\n sourceStackId: string,\n targetStackId: string,\n frameIds?: string[],\n policyName: string = 'default'\n ): Promise<string> {\n // Validate input parameters\n const input = validateInput(StartMergeSessionSchema, {\n sourceStackId,\n targetStackId,\n frameIds,\n policyName,\n });\n const sessionId = `merge-${Date.now()}-${Math.random().toString(36).substr(2, 9)}`;\n\n logger.debug('Looking for merge policy', {\n policyName: input.policyName,\n availablePolicies: Array.from(this.mergePolicies.keys()),\n });\n const policy = this.mergePolicies.get(input.policyName);\n if (!policy) {\n logger.error('Merge policy not found', {\n requested: input.policyName,\n available: Array.from(this.mergePolicies.keys()),\n });\n throw new ValidationError(\n `Merge policy not found: ${input.policyName}`,\n ErrorCode.RESOURCE_NOT_FOUND\n );\n }\n\n try {\n // Check merge permissions on both stacks\n const currentUserId =\n this.dualStackManager.getCurrentContext().ownerId || 'unknown';\n await this.dualStackManager\n .getPermissionManager()\n .enforcePermission(\n this.dualStackManager\n .getPermissionManager()\n .createContext(currentUserId, 'merge', 'stack', input.sourceStackId)\n );\n\n await this.dualStackManager\n .getPermissionManager()\n .enforcePermission(\n this.dualStackManager\n .getPermissionManager()\n .createContext(currentUserId, 'merge', 'stack', input.targetStackId)\n );\n\n // Create merge session\n const session: MergeSession = {\n sessionId,\n sourceStackId: input.sourceStackId,\n targetStackId: input.targetStackId,\n conflicts: [],\n resolutions: [],\n policy,\n status: 'analyzing',\n startedAt: new Date(),\n metadata: {\n totalFrames: 0,\n conflictFrames: 0,\n autoResolvedConflicts: 0,\n manualResolvedConflicts: 0,\n },\n };\n\n this.activeSessions.set(sessionId, session);\n\n // Analyze conflicts\n await this.analyzeConflicts(sessionId, frameIds);\n\n // Auto-resolve conflicts where possible\n await this.autoResolveConflicts(sessionId);\n\n logger.info(`Merge session started: ${sessionId}`, {\n sourceStack: sourceStackId,\n targetStack: targetStackId,\n conflicts: session.conflicts.length,\n policy: policyName,\n });\n\n return sessionId;\n } catch (error: unknown) {\n logger.error('Failed to start merge session', {\n error: error instanceof Error ? error.message : error,\n sourceStackId: input.sourceStackId,\n targetStackId: input.targetStackId,\n policyName: input.policyName,\n });\n throw new DatabaseError(\n 'Failed to start merge session',\n ErrorCode.OPERATION_FAILED,\n { sourceStackId, targetStackId },\n error instanceof Error ? error : undefined\n );\n }\n }\n\n /**\n * Analyze conflicts between source and target stacks\n */\n private async analyzeConflicts(\n sessionId: string,\n frameIds?: string[]\n ): Promise<void> {\n const session = this.activeSessions.get(sessionId);\n if (!session) {\n throw new DatabaseError(\n `Merge session not found: ${sessionId}`,\n ErrorCode.RESOURCE_NOT_FOUND\n );\n }\n\n try {\n const sourceStack = this.getStackManager(session.sourceStackId);\n const targetStack = this.getStackManager(session.targetStackId);\n\n // Get frames to analyze\n const framesToAnalyze =\n frameIds ||\n (await sourceStack.getActiveFrames()).map((f) => f.frame_id);\n\n session.metadata.totalFrames = framesToAnalyze.length;\n\n for (const frameId of framesToAnalyze) {\n const sourceFrame = await sourceStack.getFrame(frameId);\n if (!sourceFrame) continue;\n\n const targetFrame = await targetStack.getFrame(frameId);\n if (!targetFrame) continue; // No conflict if target doesn't exist\n\n // Analyze frame-level conflicts\n const conflicts = await this.analyzeFrameConflicts(\n sourceFrame,\n targetFrame\n );\n session.conflicts.push(...conflicts);\n }\n\n session.metadata.conflictFrames = new Set(\n session.conflicts.map((c) => c.frameId)\n ).size;\n session.status = 'resolving';\n\n this.activeSessions.set(sessionId, session);\n\n logger.info(`Conflict analysis completed: ${sessionId}`, {\n totalConflicts: session.conflicts.length,\n conflictFrames: session.metadata.conflictFrames,\n });\n } catch (error: unknown) {\n session.status = 'failed';\n this.activeSessions.set(sessionId, session);\n throw error;\n }\n }\n\n /**\n * Analyze conflicts within a single frame\n */\n private async analyzeFrameConflicts(\n sourceFrame: Frame,\n targetFrame: Frame\n ): Promise<MergeConflict[]> {\n const conflicts: MergeConflict[] = [];\n\n // Content conflicts\n if (sourceFrame.name !== targetFrame.name) {\n conflicts.push({\n frameId: sourceFrame.frame_id,\n conflictType: 'content',\n sourceFrame,\n targetFrame,\n conflictDetails: [\n {\n field: 'name',\n sourceValue: sourceFrame.name,\n targetValue: targetFrame.name,\n lastModified: {\n source: new Date(sourceFrame.created_at * 1000),\n target: new Date(targetFrame.created_at * 1000),\n },\n },\n ],\n severity: 'medium',\n autoResolvable: false,\n });\n }\n\n // State conflicts\n if (sourceFrame.state !== targetFrame.state) {\n conflicts.push({\n frameId: sourceFrame.frame_id,\n conflictType: 'metadata',\n sourceFrame,\n targetFrame,\n conflictDetails: [\n {\n field: 'state',\n sourceValue: sourceFrame.state,\n targetValue: targetFrame.state,\n lastModified: {\n source: new Date(sourceFrame.created_at * 1000),\n target: new Date(targetFrame.created_at * 1000),\n },\n },\n ],\n severity: 'high',\n autoResolvable: true, // Can auto-resolve based on timestamps\n });\n }\n\n // Input/Output conflicts\n if (\n JSON.stringify(sourceFrame.inputs) !== JSON.stringify(targetFrame.inputs)\n ) {\n conflicts.push({\n frameId: sourceFrame.frame_id,\n conflictType: 'content',\n sourceFrame,\n targetFrame,\n conflictDetails: [\n {\n field: 'inputs',\n sourceValue: sourceFrame.inputs,\n targetValue: targetFrame.inputs,\n lastModified: {\n source: new Date(sourceFrame.created_at * 1000),\n target: new Date(targetFrame.created_at * 1000),\n },\n },\n ],\n severity: 'medium',\n autoResolvable: false,\n });\n }\n\n // Analyze event conflicts\n const eventConflicts = await this.analyzeEventConflicts(\n sourceFrame,\n targetFrame\n );\n conflicts.push(...eventConflicts);\n\n // Analyze anchor conflicts\n const anchorConflicts = await this.analyzeAnchorConflicts(\n sourceFrame,\n targetFrame\n );\n conflicts.push(...anchorConflicts);\n\n return conflicts;\n }\n\n /**\n * Analyze conflicts in frame events\n */\n private async analyzeEventConflicts(\n sourceFrame: Frame,\n targetFrame: Frame\n ): Promise<MergeConflict[]> {\n const conflicts: MergeConflict[] = [];\n\n try {\n const sourceStack = this.getStackManager(sourceFrame.project_id);\n const targetStack = this.getStackManager(targetFrame.project_id);\n\n const sourceEvents = await sourceStack.getFrameEvents(\n sourceFrame.frame_id\n );\n const targetEvents = await targetStack.getFrameEvents(\n targetFrame.frame_id\n );\n\n // Check for sequence conflicts\n if (sourceEvents.length !== targetEvents.length) {\n conflicts.push({\n frameId: sourceFrame.frame_id,\n conflictType: 'sequence',\n sourceFrame,\n targetFrame,\n conflictDetails: [\n {\n field: 'event_count',\n sourceValue: sourceEvents.length,\n targetValue: targetEvents.length,\n lastModified: {\n source: new Date(),\n target: new Date(),\n },\n },\n ],\n severity: 'high',\n autoResolvable: true, // Can merge events\n });\n }\n\n // Check for content conflicts in matching events\n const minLength = Math.min(sourceEvents.length, targetEvents.length);\n for (let i = 0; i < minLength; i++) {\n const sourceEvent = sourceEvents[i];\n const targetEvent = targetEvents[i];\n\n if (\n sourceEvent.text !== targetEvent.text ||\n JSON.stringify(sourceEvent.metadata) !==\n JSON.stringify(targetEvent.metadata)\n ) {\n conflicts.push({\n frameId: sourceFrame.frame_id,\n conflictType: 'content',\n sourceFrame,\n targetFrame,\n conflictDetails: [\n {\n field: `event_${i}`,\n sourceValue: {\n text: sourceEvent.text,\n metadata: sourceEvent.metadata,\n },\n targetValue: {\n text: targetEvent.text,\n metadata: targetEvent.metadata,\n },\n lastModified: {\n source: new Date(),\n target: new Date(),\n },\n },\n ],\n severity: 'medium',\n autoResolvable: false,\n });\n }\n }\n } catch (error: unknown) {\n logger.warn(\n `Failed to analyze event conflicts for frame: ${sourceFrame.frame_id}`,\n error\n );\n }\n\n return conflicts;\n }\n\n /**\n * Analyze conflicts in frame anchors\n */\n private async analyzeAnchorConflicts(\n sourceFrame: Frame,\n targetFrame: Frame\n ): Promise<MergeConflict[]> {\n const conflicts: MergeConflict[] = [];\n\n try {\n const sourceStack = this.getStackManager(sourceFrame.project_id);\n const targetStack = this.getStackManager(targetFrame.project_id);\n\n const sourceAnchors = await sourceStack.getFrameAnchors(\n sourceFrame.frame_id\n );\n const targetAnchors = await targetStack.getFrameAnchors(\n targetFrame.frame_id\n );\n\n // Group anchors by type for comparison\n const sourceAnchorsByType = this.groupAnchorsByType(sourceAnchors);\n const targetAnchorsByType = this.groupAnchorsByType(targetAnchors);\n\n const allTypes = new Set([\n ...Object.keys(sourceAnchorsByType),\n ...Object.keys(targetAnchorsByType),\n ]);\n\n for (const type of allTypes) {\n const sourceTypeAnchors = sourceAnchorsByType[type] || [];\n const targetTypeAnchors = targetAnchorsByType[type] || [];\n\n if (\n sourceTypeAnchors.length !== targetTypeAnchors.length ||\n !this.anchorsEqual(sourceTypeAnchors, targetTypeAnchors)\n ) {\n conflicts.push({\n frameId: sourceFrame.frame_id,\n conflictType: 'content',\n sourceFrame,\n targetFrame,\n conflictDetails: [\n {\n field: `anchors_${type}`,\n sourceValue: sourceTypeAnchors,\n targetValue: targetTypeAnchors,\n lastModified: {\n source: new Date(),\n target: new Date(),\n },\n },\n ],\n severity: 'low',\n autoResolvable: true, // Can merge anchors\n });\n }\n }\n } catch (error: unknown) {\n logger.warn(\n `Failed to analyze anchor conflicts for frame: ${sourceFrame.frame_id}`,\n error\n );\n }\n\n return conflicts;\n }\n\n /**\n * Auto-resolve conflicts based on merge policy\n */\n private async autoResolveConflicts(sessionId: string): Promise<void> {\n const session = this.activeSessions.get(sessionId);\n if (!session) return;\n\n const autoResolvableConflicts = session.conflicts.filter(\n (c) => c.autoResolvable\n );\n\n for (const conflict of autoResolvableConflicts) {\n const resolution = await this.applyMergePolicy(conflict, session.policy);\n if (resolution) {\n session.resolutions.push(resolution);\n session.metadata.autoResolvedConflicts++;\n\n logger.debug(`Auto-resolved conflict: ${conflict.frameId}`, {\n type: conflict.conflictType,\n strategy: resolution.strategy,\n });\n }\n }\n\n // Update session status\n const remainingConflicts = session.conflicts.filter(\n (c) => !session.resolutions.find((r) => r.conflictId === c.frameId)\n );\n\n if (remainingConflicts.length === 0) {\n session.status = 'completed';\n session.completedAt = new Date();\n } else if (remainingConflicts.every((c) => !c.autoResolvable)) {\n session.status = 'manual_review';\n }\n\n this.activeSessions.set(sessionId, session);\n }\n\n /**\n * Apply merge policy to resolve conflicts automatically\n */\n private async applyMergePolicy(\n conflict: MergeConflict,\n policy: MergePolicy\n ): Promise<MergeResolution | null> {\n // Sort rules by priority\n const sortedRules = policy.rules.sort((a, b) => b.priority - a.priority);\n\n for (const rule of sortedRules) {\n if (this.evaluateRuleCondition(conflict, rule.condition)) {\n return {\n conflictId: conflict.frameId,\n strategy:\n rule.action === 'require_manual' ? 'manual' : (rule.action as any),\n resolvedBy: 'system',\n resolvedAt: new Date(),\n notes: `Auto-resolved by policy: ${policy.name}`,\n };\n }\n }\n\n return null;\n }\n\n /**\n * Manually resolve a specific conflict\n */\n async resolveConflict(\n sessionId: string,\n conflictId: string,\n resolution: Omit<MergeResolution, 'conflictId' | 'resolvedAt'>\n ): Promise<void> {\n // Validate input parameters\n const input = validateInput(ConflictResolutionSchema, {\n strategy: resolution.strategy,\n resolvedBy: resolution.resolvedBy,\n notes: resolution.notes,\n });\n const session = this.activeSessions.get(sessionId);\n if (!session) {\n throw new ValidationError(\n `Merge session not found: ${sessionId}`,\n ErrorCode.MERGE_SESSION_INVALID\n );\n }\n\n const conflict = session.conflicts.find((c) => c.frameId === conflictId);\n if (!conflict) {\n throw new ValidationError(\n `Conflict not found: ${conflictId}`,\n ErrorCode.MERGE_CONFLICT_UNRESOLVABLE\n );\n }\n\n const fullResolution: MergeResolution = {\n ...input,\n conflictId,\n resolvedAt: new Date(),\n };\n\n session.resolutions.push(fullResolution);\n session.metadata.manualResolvedConflicts++;\n\n // Check if all conflicts are resolved\n const resolvedConflictIds = new Set(\n session.resolutions.map((r) => r.conflictId)\n );\n const allResolved = session.conflicts.every((c) =>\n resolvedConflictIds.has(c.frameId)\n );\n\n if (allResolved) {\n session.status = 'completed';\n session.completedAt = new Date();\n }\n\n this.activeSessions.set(sessionId, session);\n\n logger.info(`Conflict manually resolved: ${conflictId}`, {\n strategy: resolution.strategy,\n resolvedBy: resolution.resolvedBy,\n });\n }\n\n /**\n * Execute merge with resolved conflicts\n */\n async executeMerge(sessionId: string): Promise<StackSyncResult> {\n const session = this.activeSessions.get(sessionId);\n if (!session) {\n throw new DatabaseError(\n `Merge session not found: ${sessionId}`,\n ErrorCode.RESOURCE_NOT_FOUND\n );\n }\n\n if (session.status !== 'completed') {\n throw new DatabaseError(\n `Merge session not ready for execution: ${session.status}`,\n ErrorCode.INVALID_STATE\n );\n }\n\n try {\n // Build resolution map\n const resolutionMap = new Map(\n session.resolutions.map((r) => [r.conflictId, r])\n );\n\n // Execute sync with custom conflict resolution\n const result = await this.dualStackManager.syncStacks(\n session.sourceStackId,\n session.targetStackId,\n {\n conflictResolution: 'merge', // Will be overridden by our resolution map\n frameIds: session.conflicts.map((c) => c.frameId),\n }\n );\n\n logger.info(`Merge executed: ${sessionId}`, {\n mergedFrames: result.mergedFrames.length,\n conflicts: result.conflictFrames.length,\n errors: result.errors.length,\n });\n\n return result;\n } catch (error: unknown) {\n throw new DatabaseError(\n 'Failed to execute merge',\n ErrorCode.OPERATION_FAILED,\n { sessionId },\n error instanceof Error ? error : undefined\n );\n }\n }\n\n /**\n * Get merge session details\n */\n async getMergeSession(sessionId: string): Promise<MergeSession | null> {\n return this.activeSessions.get(sessionId) || null;\n }\n\n /**\n * Create custom merge policy\n */\n async createMergePolicy(policy: MergePolicy): Promise<void> {\n // Validate input parameters\n const input = validateInput(CreateMergePolicySchema, policy);\n\n this.mergePolicies.set(input.name, input);\n logger.info(`Created merge policy: ${input.name}`, {\n rules: input.rules.length,\n autoApplyThreshold: input.autoApplyThreshold,\n });\n }\n\n /**\n * Initialize default merge policies\n */\n private initializeDefaultPolicies(): void {\n // Conservative policy - prefer manual resolution\n this.mergePolicies.set('conservative', {\n name: 'conservative',\n description: 'Prefer manual resolution for most conflicts',\n rules: [\n {\n condition: '$.conflictType == \"metadata\" && $.severity == \"low\"',\n action: 'target_wins',\n priority: 1,\n },\n {\n condition: '$.severity == \"critical\"',\n action: 'require_manual',\n priority: 10,\n },\n ],\n autoApplyThreshold: 'never',\n });\n\n // Aggressive policy - auto-resolve when possible\n this.mergePolicies.set('aggressive', {\n name: 'aggressive',\n description: 'Auto-resolve conflicts when safe',\n rules: [\n {\n condition: '$.conflictType == \"sequence\"',\n action: 'merge_both',\n priority: 5,\n },\n {\n condition: '$.severity == \"low\"',\n action: 'source_wins',\n priority: 2,\n },\n {\n condition: '$.severity == \"medium\" && $.autoResolvable',\n action: 'merge_both',\n priority: 4,\n },\n ],\n autoApplyThreshold: 'medium',\n });\n\n // Default policy - balanced approach\n this.mergePolicies.set('default', {\n name: 'default',\n description: 'Balanced conflict resolution',\n rules: [\n {\n condition: '$.conflictType == \"sequence\" && $.severity == \"low\"',\n action: 'merge_both',\n priority: 3,\n },\n {\n condition: '$.conflictType == \"metadata\" && $.autoResolvable',\n action: 'target_wins',\n priority: 2,\n },\n {\n condition: '$.severity == \"critical\"',\n action: 'require_manual',\n priority: 10,\n },\n ],\n autoApplyThreshold: 'low',\n });\n }\n\n // Helper methods\n private getStackManager(stackId: string): any {\n // Use DualStackManager's getStackManager method to get the right stack\n return this.dualStackManager.getStackManager(stackId);\n }\n\n private groupAnchorsByType(anchors: Anchor[]): Record<string, Anchor[]> {\n return anchors.reduce(\n (groups, anchor) => {\n if (!groups[anchor.type]) groups[anchor.type] = [];\n groups[anchor.type].push(anchor);\n return groups;\n },\n {} as Record<string, Anchor[]>\n );\n }\n\n private anchorsEqual(anchors1: Anchor[], anchors2: Anchor[]): boolean {\n if (anchors1.length !== anchors2.length) return false;\n\n // Sort by text for comparison\n const sorted1 = [...anchors1].sort((a, b) => a.text.localeCompare(b.text));\n const sorted2 = [...anchors2].sort((a, b) => a.text.localeCompare(b.text));\n\n return sorted1.every(\n (anchor, i) =>\n anchor.text === sorted2[i].text &&\n anchor.priority === sorted2[i].priority\n );\n }\n\n private evaluateRuleCondition(\n conflict: MergeConflict,\n condition: string\n ): boolean {\n // Simple condition evaluation - in real implementation would use JSONPath\n return (\n condition.includes(conflict.conflictType) ||\n condition.includes(conflict.severity)\n );\n }\n}\n"],
|
|
4
|
+
"sourcesContent": ["/**\n * Stack Merge Conflict Resolution - STA-101\n * Advanced conflict resolution for frame merging between individual and shared stacks\n */\n\nimport type { Frame, Event, Anchor } from './frame-types.js';\nimport {\n DualStackManager,\n type StackSyncResult,\n} from './dual-stack-manager.js';\nimport { logger } from '../monitoring/logger.js';\nimport { ValidationError, DatabaseError, ErrorCode } from '../errors/index.js';\nimport {\n validateInput,\n StartMergeSessionSchema,\n CreateMergePolicySchema,\n ConflictResolutionSchema,\n type StartMergeSessionInput,\n type CreateMergePolicyInput,\n type ConflictResolutionInput,\n} from './validation.js';\n\nexport interface MergeConflict {\n frameId: string;\n conflictType:\n | 'content'\n | 'metadata'\n | 'sequence'\n | 'dependency'\n | 'permission';\n sourceFrame: Frame;\n targetFrame: Frame;\n conflictDetails: {\n field: string;\n sourceValue: any;\n targetValue: any;\n lastModified: {\n source: Date;\n target: Date;\n };\n }[];\n severity: 'low' | 'medium' | 'high' | 'critical';\n autoResolvable: boolean;\n}\n\nexport interface MergeResolution {\n conflictId: string;\n strategy: 'source_wins' | 'target_wins' | 'merge_both' | 'manual' | 'skip';\n resolutionData?: Record<string, any>;\n resolvedBy: string;\n resolvedAt: Date;\n notes?: string;\n}\n\nexport interface MergePolicy {\n name: string;\n description: string;\n rules: Array<{\n condition: string; // JSONPath expression\n action: 'source_wins' | 'target_wins' | 'merge_both' | 'require_manual';\n priority: number;\n }>;\n autoApplyThreshold: 'low' | 'medium' | 'high' | 'never';\n}\n\nexport interface MergeSession {\n sessionId: string;\n sourceStackId: string;\n targetStackId: string;\n conflicts: MergeConflict[];\n resolutions: MergeResolution[];\n policy: MergePolicy;\n status: 'analyzing' | 'resolving' | 'completed' | 'failed' | 'manual_review';\n startedAt: Date;\n completedAt?: Date;\n metadata: {\n totalFrames: number;\n conflictFrames: number;\n autoResolvedConflicts: number;\n manualResolvedConflicts: number;\n };\n}\n\nexport class StackMergeResolver {\n private dualStackManager: DualStackManager;\n private activeSessions: Map<string, MergeSession> = new Map();\n private mergePolicies: Map<string, MergePolicy> = new Map();\n\n constructor(dualStackManager: DualStackManager) {\n this.dualStackManager = dualStackManager;\n this.initializeDefaultPolicies();\n logger.debug('StackMergeResolver initialized', {\n policies: Array.from(this.mergePolicies.keys()),\n });\n }\n\n /**\n * Start a merge session with conflict analysis\n */\n async startMergeSession(\n sourceStackId: string,\n targetStackId: string,\n frameIds?: string[],\n policyName: string = 'default'\n ): Promise<string> {\n // Validate input parameters\n const input = validateInput(StartMergeSessionSchema, {\n sourceStackId,\n targetStackId,\n frameIds,\n policyName,\n });\n const sessionId = `merge-${Date.now()}-${Math.random().toString(36).substr(2, 9)}`;\n\n logger.debug('Looking for merge policy', {\n policyName: input.policyName,\n availablePolicies: Array.from(this.mergePolicies.keys()),\n });\n const policy = this.mergePolicies.get(input.policyName);\n if (!policy) {\n logger.error('Merge policy not found', {\n requested: input.policyName,\n available: Array.from(this.mergePolicies.keys()),\n });\n throw new ValidationError(\n `Merge policy not found: ${input.policyName}`,\n ErrorCode.RESOURCE_NOT_FOUND\n );\n }\n\n try {\n // Check merge permissions on both stacks\n const currentUserId =\n this.dualStackManager.getCurrentContext().ownerId || 'unknown';\n await this.dualStackManager\n .getPermissionManager()\n .enforcePermission(\n this.dualStackManager\n .getPermissionManager()\n .createContext(currentUserId, 'merge', 'stack', input.sourceStackId)\n );\n\n await this.dualStackManager\n .getPermissionManager()\n .enforcePermission(\n this.dualStackManager\n .getPermissionManager()\n .createContext(currentUserId, 'merge', 'stack', input.targetStackId)\n );\n\n // Create merge session\n const session: MergeSession = {\n sessionId,\n sourceStackId: input.sourceStackId,\n targetStackId: input.targetStackId,\n conflicts: [],\n resolutions: [],\n policy,\n status: 'analyzing',\n startedAt: new Date(),\n metadata: {\n totalFrames: 0,\n conflictFrames: 0,\n autoResolvedConflicts: 0,\n manualResolvedConflicts: 0,\n },\n };\n\n this.activeSessions.set(sessionId, session);\n\n // Analyze conflicts\n await this.analyzeConflicts(sessionId, frameIds);\n\n // Auto-resolve conflicts where possible\n await this.autoResolveConflicts(sessionId);\n\n logger.info(`Merge session started: ${sessionId}`, {\n sourceStack: sourceStackId,\n targetStack: targetStackId,\n conflicts: session.conflicts.length,\n policy: policyName,\n });\n\n return sessionId;\n } catch (error: unknown) {\n logger.error('Failed to start merge session', {\n error: error instanceof Error ? error.message : error,\n sourceStackId: input.sourceStackId,\n targetStackId: input.targetStackId,\n policyName: input.policyName,\n });\n throw new DatabaseError(\n 'Failed to start merge session',\n ErrorCode.OPERATION_FAILED,\n { sourceStackId, targetStackId },\n error instanceof Error ? error : undefined\n );\n }\n }\n\n /**\n * Analyze conflicts between source and target stacks\n */\n private async analyzeConflicts(\n sessionId: string,\n frameIds?: string[]\n ): Promise<void> {\n const session = this.activeSessions.get(sessionId);\n if (!session) {\n throw new DatabaseError(\n `Merge session not found: ${sessionId}`,\n ErrorCode.RESOURCE_NOT_FOUND\n );\n }\n\n try {\n const sourceStack = this.getStackManager(session.sourceStackId);\n const targetStack = this.getStackManager(session.targetStackId);\n\n // Get frames to analyze\n const framesToAnalyze =\n frameIds ||\n (await sourceStack.getActiveFrames()).map((f) => f.frame_id);\n\n session.metadata.totalFrames = framesToAnalyze.length;\n\n for (const frameId of framesToAnalyze) {\n const sourceFrame = await sourceStack.getFrame(frameId);\n if (!sourceFrame) continue;\n\n const targetFrame = await targetStack.getFrame(frameId);\n if (!targetFrame) continue; // No conflict if target doesn't exist\n\n // Analyze frame-level conflicts\n const conflicts = await this.analyzeFrameConflicts(\n sourceFrame,\n targetFrame\n );\n session.conflicts.push(...conflicts);\n }\n\n session.metadata.conflictFrames = new Set(\n session.conflicts.map((c) => c.frameId)\n ).size;\n session.status = 'resolving';\n\n this.activeSessions.set(sessionId, session);\n\n logger.info(`Conflict analysis completed: ${sessionId}`, {\n totalConflicts: session.conflicts.length,\n conflictFrames: session.metadata.conflictFrames,\n });\n } catch (error: unknown) {\n session.status = 'failed';\n this.activeSessions.set(sessionId, session);\n throw error;\n }\n }\n\n /**\n * Analyze conflicts within a single frame\n */\n private async analyzeFrameConflicts(\n sourceFrame: Frame,\n targetFrame: Frame\n ): Promise<MergeConflict[]> {\n const conflicts: MergeConflict[] = [];\n\n // Content conflicts\n if (sourceFrame.name !== targetFrame.name) {\n conflicts.push({\n frameId: sourceFrame.frame_id,\n conflictType: 'content',\n sourceFrame,\n targetFrame,\n conflictDetails: [\n {\n field: 'name',\n sourceValue: sourceFrame.name,\n targetValue: targetFrame.name,\n lastModified: {\n source: new Date(sourceFrame.created_at * 1000),\n target: new Date(targetFrame.created_at * 1000),\n },\n },\n ],\n severity: 'medium',\n autoResolvable: false,\n });\n }\n\n // State conflicts\n if (sourceFrame.state !== targetFrame.state) {\n conflicts.push({\n frameId: sourceFrame.frame_id,\n conflictType: 'metadata',\n sourceFrame,\n targetFrame,\n conflictDetails: [\n {\n field: 'state',\n sourceValue: sourceFrame.state,\n targetValue: targetFrame.state,\n lastModified: {\n source: new Date(sourceFrame.created_at * 1000),\n target: new Date(targetFrame.created_at * 1000),\n },\n },\n ],\n severity: 'high',\n autoResolvable: true, // Can auto-resolve based on timestamps\n });\n }\n\n // Input/Output conflicts\n if (\n JSON.stringify(sourceFrame.inputs) !== JSON.stringify(targetFrame.inputs)\n ) {\n conflicts.push({\n frameId: sourceFrame.frame_id,\n conflictType: 'content',\n sourceFrame,\n targetFrame,\n conflictDetails: [\n {\n field: 'inputs',\n sourceValue: sourceFrame.inputs,\n targetValue: targetFrame.inputs,\n lastModified: {\n source: new Date(sourceFrame.created_at * 1000),\n target: new Date(targetFrame.created_at * 1000),\n },\n },\n ],\n severity: 'medium',\n autoResolvable: false,\n });\n }\n\n // Analyze event conflicts\n const eventConflicts = await this.analyzeEventConflicts(\n sourceFrame,\n targetFrame\n );\n conflicts.push(...eventConflicts);\n\n // Analyze anchor conflicts\n const anchorConflicts = await this.analyzeAnchorConflicts(\n sourceFrame,\n targetFrame\n );\n conflicts.push(...anchorConflicts);\n\n return conflicts;\n }\n\n /**\n * Analyze conflicts in frame events\n */\n private async analyzeEventConflicts(\n sourceFrame: Frame,\n targetFrame: Frame\n ): Promise<MergeConflict[]> {\n const conflicts: MergeConflict[] = [];\n\n try {\n const sourceStack = this.getStackManager(sourceFrame.project_id);\n const targetStack = this.getStackManager(targetFrame.project_id);\n\n const sourceEvents = await sourceStack.getFrameEvents(\n sourceFrame.frame_id\n );\n const targetEvents = await targetStack.getFrameEvents(\n targetFrame.frame_id\n );\n\n // Check for sequence conflicts\n if (sourceEvents.length !== targetEvents.length) {\n conflicts.push({\n frameId: sourceFrame.frame_id,\n conflictType: 'sequence',\n sourceFrame,\n targetFrame,\n conflictDetails: [\n {\n field: 'event_count',\n sourceValue: sourceEvents.length,\n targetValue: targetEvents.length,\n lastModified: {\n source: new Date(),\n target: new Date(),\n },\n },\n ],\n severity: 'high',\n autoResolvable: true, // Can merge events\n });\n }\n\n // Check for content conflicts in matching events\n const minLength = Math.min(sourceEvents.length, targetEvents.length);\n for (let i = 0; i < minLength; i++) {\n const sourceEvent = sourceEvents[i];\n const targetEvent = targetEvents[i];\n\n if (\n sourceEvent.text !== targetEvent.text ||\n JSON.stringify(sourceEvent.metadata) !==\n JSON.stringify(targetEvent.metadata)\n ) {\n conflicts.push({\n frameId: sourceFrame.frame_id,\n conflictType: 'content',\n sourceFrame,\n targetFrame,\n conflictDetails: [\n {\n field: `event_${i}`,\n sourceValue: {\n text: sourceEvent.text,\n metadata: sourceEvent.metadata,\n },\n targetValue: {\n text: targetEvent.text,\n metadata: targetEvent.metadata,\n },\n lastModified: {\n source: new Date(),\n target: new Date(),\n },\n },\n ],\n severity: 'medium',\n autoResolvable: false,\n });\n }\n }\n } catch (error: unknown) {\n logger.warn(\n `Failed to analyze event conflicts for frame: ${sourceFrame.frame_id}`,\n error\n );\n }\n\n return conflicts;\n }\n\n /**\n * Analyze conflicts in frame anchors\n */\n private async analyzeAnchorConflicts(\n sourceFrame: Frame,\n targetFrame: Frame\n ): Promise<MergeConflict[]> {\n const conflicts: MergeConflict[] = [];\n\n try {\n const sourceStack = this.getStackManager(sourceFrame.project_id);\n const targetStack = this.getStackManager(targetFrame.project_id);\n\n const sourceAnchors = await sourceStack.getFrameAnchors(\n sourceFrame.frame_id\n );\n const targetAnchors = await targetStack.getFrameAnchors(\n targetFrame.frame_id\n );\n\n // Group anchors by type for comparison\n const sourceAnchorsByType = this.groupAnchorsByType(sourceAnchors);\n const targetAnchorsByType = this.groupAnchorsByType(targetAnchors);\n\n const allTypes = new Set([\n ...Object.keys(sourceAnchorsByType),\n ...Object.keys(targetAnchorsByType),\n ]);\n\n for (const type of allTypes) {\n const sourceTypeAnchors = sourceAnchorsByType[type] || [];\n const targetTypeAnchors = targetAnchorsByType[type] || [];\n\n if (\n sourceTypeAnchors.length !== targetTypeAnchors.length ||\n !this.anchorsEqual(sourceTypeAnchors, targetTypeAnchors)\n ) {\n conflicts.push({\n frameId: sourceFrame.frame_id,\n conflictType: 'content',\n sourceFrame,\n targetFrame,\n conflictDetails: [\n {\n field: `anchors_${type}`,\n sourceValue: sourceTypeAnchors,\n targetValue: targetTypeAnchors,\n lastModified: {\n source: new Date(),\n target: new Date(),\n },\n },\n ],\n severity: 'low',\n autoResolvable: true, // Can merge anchors\n });\n }\n }\n } catch (error: unknown) {\n logger.warn(\n `Failed to analyze anchor conflicts for frame: ${sourceFrame.frame_id}`,\n error\n );\n }\n\n return conflicts;\n }\n\n /**\n * Auto-resolve conflicts based on merge policy\n */\n private async autoResolveConflicts(sessionId: string): Promise<void> {\n const session = this.activeSessions.get(sessionId);\n if (!session) return;\n\n const autoResolvableConflicts = session.conflicts.filter(\n (c) => c.autoResolvable\n );\n\n for (const conflict of autoResolvableConflicts) {\n const resolution = await this.applyMergePolicy(conflict, session.policy);\n if (resolution) {\n session.resolutions.push(resolution);\n session.metadata.autoResolvedConflicts++;\n\n logger.debug(`Auto-resolved conflict: ${conflict.frameId}`, {\n type: conflict.conflictType,\n strategy: resolution.strategy,\n });\n }\n }\n\n // Update session status\n const remainingConflicts = session.conflicts.filter(\n (c) => !session.resolutions.find((r) => r.conflictId === c.frameId)\n );\n\n if (remainingConflicts.length === 0) {\n session.status = 'completed';\n session.completedAt = new Date();\n } else if (remainingConflicts.every((c) => !c.autoResolvable)) {\n session.status = 'manual_review';\n }\n\n this.activeSessions.set(sessionId, session);\n }\n\n /**\n * Apply merge policy to resolve conflicts automatically\n */\n private async applyMergePolicy(\n conflict: MergeConflict,\n policy: MergePolicy\n ): Promise<MergeResolution | null> {\n // Sort rules by priority\n const sortedRules = policy.rules.sort((a, b) => b.priority - a.priority);\n\n for (const rule of sortedRules) {\n if (this.evaluateRuleCondition(conflict, rule.condition)) {\n return {\n conflictId: conflict.frameId,\n strategy:\n rule.action === 'require_manual' ? 'manual' : (rule.action as any),\n resolvedBy: 'system',\n resolvedAt: new Date(),\n notes: `Auto-resolved by policy: ${policy.name}`,\n };\n }\n }\n\n return null;\n }\n\n /**\n * Manually resolve a specific conflict\n */\n async resolveConflict(\n sessionId: string,\n conflictId: string,\n resolution: Omit<MergeResolution, 'conflictId' | 'resolvedAt'>\n ): Promise<void> {\n // Validate input parameters\n const input = validateInput(ConflictResolutionSchema, {\n strategy: resolution.strategy,\n resolvedBy: resolution.resolvedBy,\n notes: resolution.notes,\n });\n const session = this.activeSessions.get(sessionId);\n if (!session) {\n throw new ValidationError(\n `Merge session not found: ${sessionId}`,\n ErrorCode.MERGE_SESSION_INVALID\n );\n }\n\n const conflict = session.conflicts.find((c) => c.frameId === conflictId);\n if (!conflict) {\n throw new ValidationError(\n `Conflict not found: ${conflictId}`,\n ErrorCode.MERGE_CONFLICT_UNRESOLVABLE\n );\n }\n\n const fullResolution: MergeResolution = {\n ...input,\n conflictId,\n resolvedAt: new Date(),\n };\n\n session.resolutions.push(fullResolution);\n session.metadata.manualResolvedConflicts++;\n\n // Check if all conflicts are resolved\n const resolvedConflictIds = new Set(\n session.resolutions.map((r) => r.conflictId)\n );\n const allResolved = session.conflicts.every((c) =>\n resolvedConflictIds.has(c.frameId)\n );\n\n if (allResolved) {\n session.status = 'completed';\n session.completedAt = new Date();\n }\n\n this.activeSessions.set(sessionId, session);\n\n logger.info(`Conflict manually resolved: ${conflictId}`, {\n strategy: resolution.strategy,\n resolvedBy: resolution.resolvedBy,\n });\n }\n\n /**\n * Execute merge with resolved conflicts\n */\n async executeMerge(sessionId: string): Promise<StackSyncResult> {\n const session = this.activeSessions.get(sessionId);\n if (!session) {\n throw new DatabaseError(\n `Merge session not found: ${sessionId}`,\n ErrorCode.RESOURCE_NOT_FOUND\n );\n }\n\n if (session.status !== 'completed') {\n throw new DatabaseError(\n `Merge session not ready for execution: ${session.status}`,\n ErrorCode.INVALID_STATE\n );\n }\n\n try {\n // Build resolution map\n const resolutionMap = new Map(\n session.resolutions.map((r) => [r.conflictId, r])\n );\n\n // Execute sync with custom conflict resolution\n const result = await this.dualStackManager.syncStacks(\n session.sourceStackId,\n session.targetStackId,\n {\n conflictResolution: 'merge', // Will be overridden by our resolution map\n frameIds: session.conflicts.map((c) => c.frameId),\n }\n );\n\n logger.info(`Merge executed: ${sessionId}`, {\n mergedFrames: result.mergedFrames.length,\n conflicts: result.conflictFrames.length,\n errors: result.errors.length,\n });\n\n return result;\n } catch (error: unknown) {\n throw new DatabaseError(\n 'Failed to execute merge',\n ErrorCode.OPERATION_FAILED,\n { sessionId },\n error instanceof Error ? error : undefined\n );\n }\n }\n\n /**\n * Get merge session details\n */\n async getMergeSession(sessionId: string): Promise<MergeSession | null> {\n return this.activeSessions.get(sessionId) || null;\n }\n\n /**\n * Create custom merge policy\n */\n async createMergePolicy(policy: MergePolicy): Promise<void> {\n // Validate input parameters\n const input = validateInput(CreateMergePolicySchema, policy);\n\n this.mergePolicies.set(input.name, input);\n logger.info(`Created merge policy: ${input.name}`, {\n rules: input.rules.length,\n autoApplyThreshold: input.autoApplyThreshold,\n });\n }\n\n /**\n * Initialize default merge policies\n */\n private initializeDefaultPolicies(): void {\n // Conservative policy - prefer manual resolution\n this.mergePolicies.set('conservative', {\n name: 'conservative',\n description: 'Prefer manual resolution for most conflicts',\n rules: [\n {\n condition: '$.conflictType == \"metadata\" && $.severity == \"low\"',\n action: 'target_wins',\n priority: 1,\n },\n {\n condition: '$.severity == \"critical\"',\n action: 'require_manual',\n priority: 10,\n },\n ],\n autoApplyThreshold: 'never',\n });\n\n // Aggressive policy - auto-resolve when possible\n this.mergePolicies.set('aggressive', {\n name: 'aggressive',\n description: 'Auto-resolve conflicts when safe',\n rules: [\n {\n condition: '$.conflictType == \"sequence\"',\n action: 'merge_both',\n priority: 5,\n },\n {\n condition: '$.severity == \"low\"',\n action: 'source_wins',\n priority: 2,\n },\n {\n condition: '$.severity == \"medium\" && $.autoResolvable',\n action: 'merge_both',\n priority: 4,\n },\n ],\n autoApplyThreshold: 'medium',\n });\n\n // Default policy - balanced approach\n this.mergePolicies.set('default', {\n name: 'default',\n description: 'Balanced conflict resolution',\n rules: [\n {\n condition: '$.conflictType == \"sequence\" && $.severity == \"low\"',\n action: 'merge_both',\n priority: 3,\n },\n {\n condition: '$.conflictType == \"metadata\" && $.autoResolvable',\n action: 'target_wins',\n priority: 2,\n },\n {\n condition: '$.severity == \"critical\"',\n action: 'require_manual',\n priority: 10,\n },\n ],\n autoApplyThreshold: 'low',\n });\n }\n\n // Helper methods\n private getStackManager(stackId: string): any {\n // Use DualStackManager's getStackManager method to get the right stack\n return this.dualStackManager.getStackManager(stackId);\n }\n\n private groupAnchorsByType(anchors: Anchor[]): Record<string, Anchor[]> {\n return anchors.reduce(\n (groups, anchor) => {\n if (!groups[anchor.type]) groups[anchor.type] = [];\n groups[anchor.type].push(anchor);\n return groups;\n },\n {} as Record<string, Anchor[]>\n );\n }\n\n private anchorsEqual(anchors1: Anchor[], anchors2: Anchor[]): boolean {\n if (anchors1.length !== anchors2.length) return false;\n\n // Sort by text for comparison\n const sorted1 = [...anchors1].sort((a, b) => a.text.localeCompare(b.text));\n const sorted2 = [...anchors2].sort((a, b) => a.text.localeCompare(b.text));\n\n return sorted1.every(\n (anchor, i) =>\n anchor.text === sorted2[i].text &&\n anchor.priority === sorted2[i].priority\n );\n }\n\n private evaluateRuleCondition(\n conflict: MergeConflict,\n condition: string\n ): boolean {\n // Simple condition evaluation - in real implementation would use JSONPath\n return (\n condition.includes(conflict.conflictType) ||\n condition.includes(conflict.severity)\n );\n }\n}\n"],
|
|
5
5
|
"mappings": ";;;;AAUA,SAAS,cAAc;AACvB,SAAS,iBAAiB,eAAe,iBAAiB;AAC1D;AAAA,EACE;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,OAIK;AA+DA,MAAM,mBAAmB;AAAA,EACtB;AAAA,EACA,iBAA4C,oBAAI,IAAI;AAAA,EACpD,gBAA0C,oBAAI,IAAI;AAAA,EAE1D,YAAY,kBAAoC;AAC9C,SAAK,mBAAmB;AACxB,SAAK,0BAA0B;AAC/B,WAAO,MAAM,kCAAkC;AAAA,MAC7C,UAAU,MAAM,KAAK,KAAK,cAAc,KAAK,CAAC;AAAA,IAChD,CAAC;AAAA,EACH;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,kBACJ,eACA,eACA,UACA,aAAqB,WACJ;AAEjB,UAAM,QAAQ,cAAc,yBAAyB;AAAA,MACnD;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,IACF,CAAC;AACD,UAAM,YAAY,SAAS,KAAK,IAAI,CAAC,IAAI,KAAK,OAAO,EAAE,SAAS,EAAE,EAAE,OAAO,GAAG,CAAC,CAAC;AAEhF,WAAO,MAAM,4BAA4B;AAAA,MACvC,YAAY,MAAM;AAAA,MAClB,mBAAmB,MAAM,KAAK,KAAK,cAAc,KAAK,CAAC;AAAA,IACzD,CAAC;AACD,UAAM,SAAS,KAAK,cAAc,IAAI,MAAM,UAAU;AACtD,QAAI,CAAC,QAAQ;AACX,aAAO,MAAM,0BAA0B;AAAA,QACrC,WAAW,MAAM;AAAA,QACjB,WAAW,MAAM,KAAK,KAAK,cAAc,KAAK,CAAC;AAAA,MACjD,CAAC;AACD,YAAM,IAAI;AAAA,QACR,2BAA2B,MAAM,UAAU;AAAA,QAC3C,UAAU;AAAA,MACZ;AAAA,IACF;AAEA,QAAI;AAEF,YAAM,gBACJ,KAAK,iBAAiB,kBAAkB,EAAE,WAAW;AACvD,YAAM,KAAK,iBACR,qBAAqB,EACrB;AAAA,QACC,KAAK,iBACF,qBAAqB,EACrB,cAAc,eAAe,SAAS,SAAS,MAAM,aAAa;AAAA,MACvE;AAEF,YAAM,KAAK,iBACR,qBAAqB,EACrB;AAAA,QACC,KAAK,iBACF,qBAAqB,EACrB,cAAc,eAAe,SAAS,SAAS,MAAM,aAAa;AAAA,MACvE;AAGF,YAAM,UAAwB;AAAA,QAC5B;AAAA,QACA,eAAe,MAAM;AAAA,QACrB,eAAe,MAAM;AAAA,QACrB,WAAW,CAAC;AAAA,QACZ,aAAa,CAAC;AAAA,QACd;AAAA,QACA,QAAQ;AAAA,QACR,WAAW,oBAAI,KAAK;AAAA,QACpB,UAAU;AAAA,UACR,aAAa;AAAA,UACb,gBAAgB;AAAA,UAChB,uBAAuB;AAAA,UACvB,yBAAyB;AAAA,QAC3B;AAAA,MACF;AAEA,WAAK,eAAe,IAAI,WAAW,OAAO;AAG1C,YAAM,KAAK,iBAAiB,WAAW,QAAQ;AAG/C,YAAM,KAAK,qBAAqB,SAAS;AAEzC,aAAO,KAAK,0BAA0B,SAAS,IAAI;AAAA,QACjD,aAAa;AAAA,QACb,aAAa;AAAA,QACb,WAAW,QAAQ,UAAU;AAAA,QAC7B,QAAQ;AAAA,MACV,CAAC;AAED,aAAO;AAAA,IACT,SAAS,OAAgB;AACvB,aAAO,MAAM,iCAAiC;AAAA,QAC5C,OAAO,iBAAiB,QAAQ,MAAM,UAAU;AAAA,QAChD,eAAe,MAAM;AAAA,QACrB,eAAe,MAAM;AAAA,QACrB,YAAY,MAAM;AAAA,MACpB,CAAC;AACD,YAAM,IAAI;AAAA,QACR;AAAA,QACA,UAAU;AAAA,QACV,EAAE,eAAe,cAAc;AAAA,QAC/B,iBAAiB,QAAQ,QAAQ;AAAA,MACnC;AAAA,IACF;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,MAAc,iBACZ,WACA,UACe;AACf,UAAM,UAAU,KAAK,eAAe,IAAI,SAAS;AACjD,QAAI,CAAC,SAAS;AACZ,YAAM,IAAI;AAAA,QACR,4BAA4B,SAAS;AAAA,QACrC,UAAU;AAAA,MACZ;AAAA,IACF;AAEA,QAAI;AACF,YAAM,cAAc,KAAK,gBAAgB,QAAQ,aAAa;AAC9D,YAAM,cAAc,KAAK,gBAAgB,QAAQ,aAAa;AAG9D,YAAM,kBACJ,aACC,MAAM,YAAY,gBAAgB,GAAG,IAAI,CAAC,MAAM,EAAE,QAAQ;AAE7D,cAAQ,SAAS,cAAc,gBAAgB;AAE/C,iBAAW,WAAW,iBAAiB;AACrC,cAAM,cAAc,MAAM,YAAY,SAAS,OAAO;AACtD,YAAI,CAAC,YAAa;AAElB,cAAM,cAAc,MAAM,YAAY,SAAS,OAAO;AACtD,YAAI,CAAC,YAAa;AAGlB,cAAM,YAAY,MAAM,KAAK;AAAA,UAC3B;AAAA,UACA;AAAA,QACF;AACA,gBAAQ,UAAU,KAAK,GAAG,SAAS;AAAA,MACrC;AAEA,cAAQ,SAAS,iBAAiB,IAAI;AAAA,QACpC,QAAQ,UAAU,IAAI,CAAC,MAAM,EAAE,OAAO;AAAA,MACxC,EAAE;AACF,cAAQ,SAAS;AAEjB,WAAK,eAAe,IAAI,WAAW,OAAO;AAE1C,aAAO,KAAK,gCAAgC,SAAS,IAAI;AAAA,QACvD,gBAAgB,QAAQ,UAAU;AAAA,QAClC,gBAAgB,QAAQ,SAAS;AAAA,MACnC,CAAC;AAAA,IACH,SAAS,OAAgB;AACvB,cAAQ,SAAS;AACjB,WAAK,eAAe,IAAI,WAAW,OAAO;AAC1C,YAAM;AAAA,IACR;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,MAAc,sBACZ,aACA,aAC0B;AAC1B,UAAM,YAA6B,CAAC;AAGpC,QAAI,YAAY,SAAS,YAAY,MAAM;AACzC,gBAAU,KAAK;AAAA,QACb,SAAS,YAAY;AAAA,QACrB,cAAc;AAAA,QACd;AAAA,QACA;AAAA,QACA,iBAAiB;AAAA,UACf;AAAA,YACE,OAAO;AAAA,YACP,aAAa,YAAY;AAAA,YACzB,aAAa,YAAY;AAAA,YACzB,cAAc;AAAA,cACZ,QAAQ,IAAI,KAAK,YAAY,aAAa,GAAI;AAAA,cAC9C,QAAQ,IAAI,KAAK,YAAY,aAAa,GAAI;AAAA,YAChD;AAAA,UACF;AAAA,QACF;AAAA,QACA,UAAU;AAAA,QACV,gBAAgB;AAAA,MAClB,CAAC;AAAA,IACH;AAGA,QAAI,YAAY,UAAU,YAAY,OAAO;AAC3C,gBAAU,KAAK;AAAA,QACb,SAAS,YAAY;AAAA,QACrB,cAAc;AAAA,QACd;AAAA,QACA;AAAA,QACA,iBAAiB;AAAA,UACf;AAAA,YACE,OAAO;AAAA,YACP,aAAa,YAAY;AAAA,YACzB,aAAa,YAAY;AAAA,YACzB,cAAc;AAAA,cACZ,QAAQ,IAAI,KAAK,YAAY,aAAa,GAAI;AAAA,cAC9C,QAAQ,IAAI,KAAK,YAAY,aAAa,GAAI;AAAA,YAChD;AAAA,UACF;AAAA,QACF;AAAA,QACA,UAAU;AAAA,QACV,gBAAgB;AAAA;AAAA,MAClB,CAAC;AAAA,IACH;AAGA,QACE,KAAK,UAAU,YAAY,MAAM,MAAM,KAAK,UAAU,YAAY,MAAM,GACxE;AACA,gBAAU,KAAK;AAAA,QACb,SAAS,YAAY;AAAA,QACrB,cAAc;AAAA,QACd;AAAA,QACA;AAAA,QACA,iBAAiB;AAAA,UACf;AAAA,YACE,OAAO;AAAA,YACP,aAAa,YAAY;AAAA,YACzB,aAAa,YAAY;AAAA,YACzB,cAAc;AAAA,cACZ,QAAQ,IAAI,KAAK,YAAY,aAAa,GAAI;AAAA,cAC9C,QAAQ,IAAI,KAAK,YAAY,aAAa,GAAI;AAAA,YAChD;AAAA,UACF;AAAA,QACF;AAAA,QACA,UAAU;AAAA,QACV,gBAAgB;AAAA,MAClB,CAAC;AAAA,IACH;AAGA,UAAM,iBAAiB,MAAM,KAAK;AAAA,MAChC;AAAA,MACA;AAAA,IACF;AACA,cAAU,KAAK,GAAG,cAAc;AAGhC,UAAM,kBAAkB,MAAM,KAAK;AAAA,MACjC;AAAA,MACA;AAAA,IACF;AACA,cAAU,KAAK,GAAG,eAAe;AAEjC,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA,EAKA,MAAc,sBACZ,aACA,aAC0B;AAC1B,UAAM,YAA6B,CAAC;AAEpC,QAAI;AACF,YAAM,cAAc,KAAK,gBAAgB,YAAY,UAAU;AAC/D,YAAM,cAAc,KAAK,gBAAgB,YAAY,UAAU;AAE/D,YAAM,eAAe,MAAM,YAAY;AAAA,QACrC,YAAY;AAAA,MACd;AACA,YAAM,eAAe,MAAM,YAAY;AAAA,QACrC,YAAY;AAAA,MACd;AAGA,UAAI,aAAa,WAAW,aAAa,QAAQ;AAC/C,kBAAU,KAAK;AAAA,UACb,SAAS,YAAY;AAAA,UACrB,cAAc;AAAA,UACd;AAAA,UACA;AAAA,UACA,iBAAiB;AAAA,YACf;AAAA,cACE,OAAO;AAAA,cACP,aAAa,aAAa;AAAA,cAC1B,aAAa,aAAa;AAAA,cAC1B,cAAc;AAAA,gBACZ,QAAQ,oBAAI,KAAK;AAAA,gBACjB,QAAQ,oBAAI,KAAK;AAAA,cACnB;AAAA,YACF;AAAA,UACF;AAAA,UACA,UAAU;AAAA,UACV,gBAAgB;AAAA;AAAA,QAClB,CAAC;AAAA,MACH;AAGA,YAAM,YAAY,KAAK,IAAI,aAAa,QAAQ,aAAa,MAAM;AACnE,eAAS,IAAI,GAAG,IAAI,WAAW,KAAK;AAClC,cAAM,cAAc,aAAa,CAAC;AAClC,cAAM,cAAc,aAAa,CAAC;AAElC,YACE,YAAY,SAAS,YAAY,QACjC,KAAK,UAAU,YAAY,QAAQ,MACjC,KAAK,UAAU,YAAY,QAAQ,GACrC;AACA,oBAAU,KAAK;AAAA,YACb,SAAS,YAAY;AAAA,YACrB,cAAc;AAAA,YACd;AAAA,YACA;AAAA,YACA,iBAAiB;AAAA,cACf;AAAA,gBACE,OAAO,SAAS,CAAC;AAAA,gBACjB,aAAa;AAAA,kBACX,MAAM,YAAY;AAAA,kBAClB,UAAU,YAAY;AAAA,gBACxB;AAAA,gBACA,aAAa;AAAA,kBACX,MAAM,YAAY;AAAA,kBAClB,UAAU,YAAY;AAAA,gBACxB;AAAA,gBACA,cAAc;AAAA,kBACZ,QAAQ,oBAAI,KAAK;AAAA,kBACjB,QAAQ,oBAAI,KAAK;AAAA,gBACnB;AAAA,cACF;AAAA,YACF;AAAA,YACA,UAAU;AAAA,YACV,gBAAgB;AAAA,UAClB,CAAC;AAAA,QACH;AAAA,MACF;AAAA,IACF,SAAS,OAAgB;AACvB,aAAO;AAAA,QACL,gDAAgD,YAAY,QAAQ;AAAA,QACpE;AAAA,MACF;AAAA,IACF;AAEA,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA,EAKA,MAAc,uBACZ,aACA,aAC0B;AAC1B,UAAM,YAA6B,CAAC;AAEpC,QAAI;AACF,YAAM,cAAc,KAAK,gBAAgB,YAAY,UAAU;AAC/D,YAAM,cAAc,KAAK,gBAAgB,YAAY,UAAU;AAE/D,YAAM,gBAAgB,MAAM,YAAY;AAAA,QACtC,YAAY;AAAA,MACd;AACA,YAAM,gBAAgB,MAAM,YAAY;AAAA,QACtC,YAAY;AAAA,MACd;AAGA,YAAM,sBAAsB,KAAK,mBAAmB,aAAa;AACjE,YAAM,sBAAsB,KAAK,mBAAmB,aAAa;AAEjE,YAAM,WAAW,oBAAI,IAAI;AAAA,QACvB,GAAG,OAAO,KAAK,mBAAmB;AAAA,QAClC,GAAG,OAAO,KAAK,mBAAmB;AAAA,MACpC,CAAC;AAED,iBAAW,QAAQ,UAAU;AAC3B,cAAM,oBAAoB,oBAAoB,IAAI,KAAK,CAAC;AACxD,cAAM,oBAAoB,oBAAoB,IAAI,KAAK,CAAC;AAExD,YACE,kBAAkB,WAAW,kBAAkB,UAC/C,CAAC,KAAK,aAAa,mBAAmB,iBAAiB,GACvD;AACA,oBAAU,KAAK;AAAA,YACb,SAAS,YAAY;AAAA,YACrB,cAAc;AAAA,YACd;AAAA,YACA;AAAA,YACA,iBAAiB;AAAA,cACf;AAAA,gBACE,OAAO,WAAW,IAAI;AAAA,gBACtB,aAAa;AAAA,gBACb,aAAa;AAAA,gBACb,cAAc;AAAA,kBACZ,QAAQ,oBAAI,KAAK;AAAA,kBACjB,QAAQ,oBAAI,KAAK;AAAA,gBACnB;AAAA,cACF;AAAA,YACF;AAAA,YACA,UAAU;AAAA,YACV,gBAAgB;AAAA;AAAA,UAClB,CAAC;AAAA,QACH;AAAA,MACF;AAAA,IACF,SAAS,OAAgB;AACvB,aAAO;AAAA,QACL,iDAAiD,YAAY,QAAQ;AAAA,QACrE;AAAA,MACF;AAAA,IACF;AAEA,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA,EAKA,MAAc,qBAAqB,WAAkC;AACnE,UAAM,UAAU,KAAK,eAAe,IAAI,SAAS;AACjD,QAAI,CAAC,QAAS;AAEd,UAAM,0BAA0B,QAAQ,UAAU;AAAA,MAChD,CAAC,MAAM,EAAE;AAAA,IACX;AAEA,eAAW,YAAY,yBAAyB;AAC9C,YAAM,aAAa,MAAM,KAAK,iBAAiB,UAAU,QAAQ,MAAM;AACvE,UAAI,YAAY;AACd,gBAAQ,YAAY,KAAK,UAAU;AACnC,gBAAQ,SAAS;AAEjB,eAAO,MAAM,2BAA2B,SAAS,OAAO,IAAI;AAAA,UAC1D,MAAM,SAAS;AAAA,UACf,UAAU,WAAW;AAAA,QACvB,CAAC;AAAA,MACH;AAAA,IACF;AAGA,UAAM,qBAAqB,QAAQ,UAAU;AAAA,MAC3C,CAAC,MAAM,CAAC,QAAQ,YAAY,KAAK,CAAC,MAAM,EAAE,eAAe,EAAE,OAAO;AAAA,IACpE;AAEA,QAAI,mBAAmB,WAAW,GAAG;AACnC,cAAQ,SAAS;AACjB,cAAQ,cAAc,oBAAI,KAAK;AAAA,IACjC,WAAW,mBAAmB,MAAM,CAAC,MAAM,CAAC,EAAE,cAAc,GAAG;AAC7D,cAAQ,SAAS;AAAA,IACnB;AAEA,SAAK,eAAe,IAAI,WAAW,OAAO;AAAA,EAC5C;AAAA;AAAA;AAAA;AAAA,EAKA,MAAc,iBACZ,UACA,QACiC;AAEjC,UAAM,cAAc,OAAO,MAAM,KAAK,CAAC,GAAG,MAAM,EAAE,WAAW,EAAE,QAAQ;AAEvE,eAAW,QAAQ,aAAa;AAC9B,UAAI,KAAK,sBAAsB,UAAU,KAAK,SAAS,GAAG;AACxD,eAAO;AAAA,UACL,YAAY,SAAS;AAAA,UACrB,UACE,KAAK,WAAW,mBAAmB,WAAY,KAAK;AAAA,UACtD,YAAY;AAAA,UACZ,YAAY,oBAAI,KAAK;AAAA,UACrB,OAAO,4BAA4B,OAAO,IAAI;AAAA,QAChD;AAAA,MACF;AAAA,IACF;AAEA,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,gBACJ,WACA,YACA,YACe;AAEf,UAAM,QAAQ,cAAc,0BAA0B;AAAA,MACpD,UAAU,WAAW;AAAA,MACrB,YAAY,WAAW;AAAA,MACvB,OAAO,WAAW;AAAA,IACpB,CAAC;AACD,UAAM,UAAU,KAAK,eAAe,IAAI,SAAS;AACjD,QAAI,CAAC,SAAS;AACZ,YAAM,IAAI;AAAA,QACR,4BAA4B,SAAS;AAAA,QACrC,UAAU;AAAA,MACZ;AAAA,IACF;AAEA,UAAM,WAAW,QAAQ,UAAU,KAAK,CAAC,MAAM,EAAE,YAAY,UAAU;AACvE,QAAI,CAAC,UAAU;AACb,YAAM,IAAI;AAAA,QACR,uBAAuB,UAAU;AAAA,QACjC,UAAU;AAAA,MACZ;AAAA,IACF;AAEA,UAAM,iBAAkC;AAAA,MACtC,GAAG;AAAA,MACH;AAAA,MACA,YAAY,oBAAI,KAAK;AAAA,IACvB;AAEA,YAAQ,YAAY,KAAK,cAAc;AACvC,YAAQ,SAAS;AAGjB,UAAM,sBAAsB,IAAI;AAAA,MAC9B,QAAQ,YAAY,IAAI,CAAC,MAAM,EAAE,UAAU;AAAA,IAC7C;AACA,UAAM,cAAc,QAAQ,UAAU;AAAA,MAAM,CAAC,MAC3C,oBAAoB,IAAI,EAAE,OAAO;AAAA,IACnC;AAEA,QAAI,aAAa;AACf,cAAQ,SAAS;AACjB,cAAQ,cAAc,oBAAI,KAAK;AAAA,IACjC;AAEA,SAAK,eAAe,IAAI,WAAW,OAAO;AAE1C,WAAO,KAAK,+BAA+B,UAAU,IAAI;AAAA,MACvD,UAAU,WAAW;AAAA,MACrB,YAAY,WAAW;AAAA,IACzB,CAAC;AAAA,EACH;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,aAAa,WAA6C;AAC9D,UAAM,UAAU,KAAK,eAAe,IAAI,SAAS;AACjD,QAAI,CAAC,SAAS;AACZ,YAAM,IAAI;AAAA,QACR,4BAA4B,SAAS;AAAA,QACrC,UAAU;AAAA,MACZ;AAAA,IACF;AAEA,QAAI,QAAQ,WAAW,aAAa;AAClC,YAAM,IAAI;AAAA,QACR,0CAA0C,QAAQ,MAAM;AAAA,QACxD,UAAU;AAAA,MACZ;AAAA,IACF;AAEA,QAAI;AAEF,YAAM,gBAAgB,IAAI;AAAA,QACxB,QAAQ,YAAY,IAAI,CAAC,MAAM,CAAC,EAAE,YAAY,CAAC,CAAC;AAAA,MAClD;AAGA,YAAM,SAAS,MAAM,KAAK,iBAAiB;AAAA,QACzC,QAAQ;AAAA,QACR,QAAQ;AAAA,QACR;AAAA,UACE,oBAAoB;AAAA;AAAA,UACpB,UAAU,QAAQ,UAAU,IAAI,CAAC,MAAM,EAAE,OAAO;AAAA,QAClD;AAAA,MACF;AAEA,aAAO,KAAK,mBAAmB,SAAS,IAAI;AAAA,QAC1C,cAAc,OAAO,aAAa;AAAA,QAClC,WAAW,OAAO,eAAe;AAAA,QACjC,QAAQ,OAAO,OAAO;AAAA,MACxB,CAAC;AAED,aAAO;AAAA,IACT,SAAS,OAAgB;AACvB,YAAM,IAAI;AAAA,QACR;AAAA,QACA,UAAU;AAAA,QACV,EAAE,UAAU;AAAA,QACZ,iBAAiB,QAAQ,QAAQ;AAAA,MACnC;AAAA,IACF;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,gBAAgB,WAAiD;AACrE,WAAO,KAAK,eAAe,IAAI,SAAS,KAAK;AAAA,EAC/C;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,kBAAkB,QAAoC;AAE1D,UAAM,QAAQ,cAAc,yBAAyB,MAAM;AAE3D,SAAK,cAAc,IAAI,MAAM,MAAM,KAAK;AACxC,WAAO,KAAK,yBAAyB,MAAM,IAAI,IAAI;AAAA,MACjD,OAAO,MAAM,MAAM;AAAA,MACnB,oBAAoB,MAAM;AAAA,IAC5B,CAAC;AAAA,EACH;AAAA;AAAA;AAAA;AAAA,EAKQ,4BAAkC;AAExC,SAAK,cAAc,IAAI,gBAAgB;AAAA,MACrC,MAAM;AAAA,MACN,aAAa;AAAA,MACb,OAAO;AAAA,QACL;AAAA,UACE,WAAW;AAAA,UACX,QAAQ;AAAA,UACR,UAAU;AAAA,QACZ;AAAA,QACA;AAAA,UACE,WAAW;AAAA,UACX,QAAQ;AAAA,UACR,UAAU;AAAA,QACZ;AAAA,MACF;AAAA,MACA,oBAAoB;AAAA,IACtB,CAAC;AAGD,SAAK,cAAc,IAAI,cAAc;AAAA,MACnC,MAAM;AAAA,MACN,aAAa;AAAA,MACb,OAAO;AAAA,QACL;AAAA,UACE,WAAW;AAAA,UACX,QAAQ;AAAA,UACR,UAAU;AAAA,QACZ;AAAA,QACA;AAAA,UACE,WAAW;AAAA,UACX,QAAQ;AAAA,UACR,UAAU;AAAA,QACZ;AAAA,QACA;AAAA,UACE,WAAW;AAAA,UACX,QAAQ;AAAA,UACR,UAAU;AAAA,QACZ;AAAA,MACF;AAAA,MACA,oBAAoB;AAAA,IACtB,CAAC;AAGD,SAAK,cAAc,IAAI,WAAW;AAAA,MAChC,MAAM;AAAA,MACN,aAAa;AAAA,MACb,OAAO;AAAA,QACL;AAAA,UACE,WAAW;AAAA,UACX,QAAQ;AAAA,UACR,UAAU;AAAA,QACZ;AAAA,QACA;AAAA,UACE,WAAW;AAAA,UACX,QAAQ;AAAA,UACR,UAAU;AAAA,QACZ;AAAA,QACA;AAAA,UACE,WAAW;AAAA,UACX,QAAQ;AAAA,UACR,UAAU;AAAA,QACZ;AAAA,MACF;AAAA,MACA,oBAAoB;AAAA,IACtB,CAAC;AAAA,EACH;AAAA;AAAA,EAGQ,gBAAgB,SAAsB;AAE5C,WAAO,KAAK,iBAAiB,gBAAgB,OAAO;AAAA,EACtD;AAAA,EAEQ,mBAAmB,SAA6C;AACtE,WAAO,QAAQ;AAAA,MACb,CAAC,QAAQ,WAAW;AAClB,YAAI,CAAC,OAAO,OAAO,IAAI,EAAG,QAAO,OAAO,IAAI,IAAI,CAAC;AACjD,eAAO,OAAO,IAAI,EAAE,KAAK,MAAM;AAC/B,eAAO;AAAA,MACT;AAAA,MACA,CAAC;AAAA,IACH;AAAA,EACF;AAAA,EAEQ,aAAa,UAAoB,UAA6B;AACpE,QAAI,SAAS,WAAW,SAAS,OAAQ,QAAO;AAGhD,UAAM,UAAU,CAAC,GAAG,QAAQ,EAAE,KAAK,CAAC,GAAG,MAAM,EAAE,KAAK,cAAc,EAAE,IAAI,CAAC;AACzE,UAAM,UAAU,CAAC,GAAG,QAAQ,EAAE,KAAK,CAAC,GAAG,MAAM,EAAE,KAAK,cAAc,EAAE,IAAI,CAAC;AAEzE,WAAO,QAAQ;AAAA,MACb,CAAC,QAAQ,MACP,OAAO,SAAS,QAAQ,CAAC,EAAE,QAC3B,OAAO,aAAa,QAAQ,CAAC,EAAE;AAAA,IACnC;AAAA,EACF;AAAA,EAEQ,sBACN,UACA,WACS;AAET,WACE,UAAU,SAAS,SAAS,YAAY,KACxC,UAAU,SAAS,SAAS,QAAQ;AAAA,EAExC;AACF;",
|
|
6
6
|
"names": []
|
|
7
7
|
}
|
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
{
|
|
2
2
|
"version": 3,
|
|
3
3
|
"sources": ["../../../src/core/database/database-adapter.ts"],
|
|
4
|
-
"sourcesContent": ["/**\n * Database Adapter Interface\n * Provides abstraction layer for different database implementations\n * Supports SQLite (current) and ParadeDB (new) with seamless migration\n */\n\nimport type { Frame, Event, Anchor } from '../context/
|
|
4
|
+
"sourcesContent": ["/**\n * Database Adapter Interface\n * Provides abstraction layer for different database implementations\n * Supports SQLite (current) and ParadeDB (new) with seamless migration\n */\n\nimport type { Frame, Event, Anchor } from '../context/index.js';\n\nexport interface QueryOptions {\n limit?: number;\n offset?: number;\n orderBy?: string;\n orderDirection?: 'ASC' | 'DESC';\n timeout?: number;\n}\n\nexport interface SearchOptions extends QueryOptions {\n query: string;\n searchType?: 'text' | 'vector' | 'hybrid';\n scoreThreshold?: number;\n fields?: string[];\n boost?: Record<string, number>;\n}\n\nexport interface AggregationOptions {\n groupBy: string[];\n metrics: Array<{\n field: string;\n operation: 'count' | 'sum' | 'avg' | 'min' | 'max';\n alias?: string;\n }>;\n having?: Record<string, any>;\n}\n\nexport interface BulkOperation {\n type: 'insert' | 'update' | 'delete';\n table: string;\n data?: any;\n where?: Record<string, any>;\n}\n\nexport interface DatabaseStats {\n totalFrames: number;\n activeFrames: number;\n totalEvents: number;\n totalAnchors: number;\n diskUsage: number;\n lastVacuum?: Date;\n}\n\n// Database result type interfaces\nexport interface CountResult {\n count: number;\n}\n\nexport interface VersionResult {\n version: number;\n}\n\nexport interface FrameRow {\n frame_id: string;\n parent_frame_id?: string;\n project_id: string;\n run_id: string;\n type: string;\n name: string;\n state: string;\n depth: number;\n inputs: string;\n outputs: string;\n digest_text: string;\n digest_json: string;\n created_at: number;\n closed_at?: number;\n score?: number;\n}\n\nexport interface EventRow {\n event_id: string;\n frame_id: string;\n seq: number;\n type: string;\n text: string;\n metadata: string;\n}\n\nexport interface AnchorRow {\n anchor_id: string;\n frame_id: string;\n type: string;\n text: string;\n priority: number;\n created_at: number;\n metadata: string;\n}\n\nexport abstract class DatabaseAdapter {\n protected readonly projectId: string;\n protected readonly config: any;\n\n constructor(projectId: string, config?: any) {\n this.projectId = projectId;\n this.config = config || {};\n }\n\n // Lifecycle methods\n abstract connect(): Promise<void>;\n abstract disconnect(): Promise<void>;\n abstract isConnected(): boolean;\n abstract ping(): Promise<boolean>;\n\n // Schema management\n abstract initializeSchema(): Promise<void>;\n abstract migrateSchema(targetVersion: number): Promise<void>;\n abstract getSchemaVersion(): Promise<number>;\n\n // Frame operations\n abstract createFrame(frame: Partial<Frame>): Promise<string>;\n abstract getFrame(frameId: string): Promise<Frame | null>;\n abstract updateFrame(frameId: string, updates: Partial<Frame>): Promise<void>;\n abstract deleteFrame(frameId: string): Promise<void>;\n abstract getActiveFrames(runId?: string): Promise<Frame[]>;\n abstract closeFrame(frameId: string, outputs?: any): Promise<void>;\n\n // Event operations\n abstract createEvent(event: Partial<Event>): Promise<string>;\n abstract getFrameEvents(\n frameId: string,\n options?: QueryOptions\n ): Promise<Event[]>;\n abstract deleteFrameEvents(frameId: string): Promise<void>;\n\n // Anchor operations\n abstract createAnchor(anchor: Partial<Anchor>): Promise<string>;\n abstract getFrameAnchors(frameId: string): Promise<Anchor[]>;\n abstract deleteFrameAnchors(frameId: string): Promise<void>;\n\n // Search operations (enhanced for ParadeDB)\n abstract search(\n options: SearchOptions\n ): Promise<Array<Frame & { score: number }>>;\n abstract searchByVector(\n embedding: number[],\n options?: QueryOptions\n ): Promise<Array<Frame & { similarity: number }>>;\n abstract searchHybrid(\n textQuery: string,\n embedding: number[],\n weights?: { text: number; vector: number }\n ): Promise<Array<Frame & { score: number }>>;\n\n // Aggregation operations\n abstract aggregate(\n table: string,\n options: AggregationOptions\n ): Promise<Record<string, any>[]>;\n\n // Pattern detection\n abstract detectPatterns(timeRange?: { start: Date; end: Date }): Promise<\n Array<{\n pattern: string;\n type: string;\n frequency: number;\n lastSeen: Date;\n }>\n >;\n\n // Bulk operations\n abstract executeBulk(operations: BulkOperation[]): Promise<void>;\n abstract vacuum(): Promise<void>;\n abstract analyze(): Promise<void>;\n\n // Statistics\n abstract getStats(): Promise<DatabaseStats>;\n abstract getQueryStats(): Promise<\n Array<{\n query: string;\n calls: number;\n meanTime: number;\n totalTime: number;\n }>\n >;\n\n // Transaction support\n abstract beginTransaction(): Promise<void>;\n abstract commitTransaction(): Promise<void>;\n abstract rollbackTransaction(): Promise<void>;\n abstract inTransaction(\n callback: (adapter: DatabaseAdapter) => Promise<void>\n ): Promise<void>;\n\n // Export/Import for migration\n abstract exportData(\n tables: string[],\n format: 'json' | 'parquet' | 'csv'\n ): Promise<Buffer>;\n abstract importData(\n data: Buffer,\n format: 'json' | 'parquet' | 'csv',\n options?: { truncate?: boolean; upsert?: boolean }\n ): Promise<void>;\n\n // Utility methods\n protected generateId(): string {\n return crypto.randomUUID();\n }\n\n protected sanitizeQuery(query: string): string {\n // DEPRECATED: Use parameterized queries instead\n // This method is kept for legacy compatibility but should not be used\n console.warn(\n 'sanitizeQuery() is deprecated and unsafe - use parameterized queries'\n );\n return query.replace(/[;'\"\\\\]/g, '');\n }\n\n protected buildWhereClause(conditions: Record<string, any>): string {\n const clauses = Object.entries(conditions).map(([key, value]) => {\n if (value === null) {\n return `${key} IS NULL`;\n } else if (Array.isArray(value)) {\n return `${key} IN (${value.map(() => '?').join(',')})`;\n } else {\n return `${key} = ?`;\n }\n });\n return clauses.length > 0 ? `WHERE ${clauses.join(' AND ')}` : '';\n }\n\n protected buildOrderByClause(\n orderBy?: string,\n direction?: 'ASC' | 'DESC'\n ): string {\n if (!orderBy) return '';\n // Whitelist validation: allow letters, numbers, underscore, dot (for table aliasing)\n const isSafe = /^[a-zA-Z0-9_.]+$/.test(orderBy);\n if (!isSafe) {\n // Drop ORDER BY if unsafe to prevent injection via column name\n return '';\n }\n const dir = direction === 'DESC' ? 'DESC' : 'ASC';\n return ` ORDER BY ${orderBy} ${dir}`;\n }\n\n protected buildLimitClause(limit?: number, offset?: number): string {\n if (!limit) return '';\n let clause = ` LIMIT ${limit}`;\n if (offset) clause += ` OFFSET ${offset}`;\n return clause;\n }\n}\n\n// Feature flags for gradual migration\nexport interface DatabaseFeatures {\n supportsFullTextSearch: boolean;\n supportsVectorSearch: boolean;\n supportsPartitioning: boolean;\n supportsAnalytics: boolean;\n supportsCompression: boolean;\n supportsMaterializedViews: boolean;\n supportsParallelQueries: boolean;\n}\n\nexport abstract class FeatureAwareDatabaseAdapter extends DatabaseAdapter {\n abstract getFeatures(): DatabaseFeatures;\n\n async canUseFeature(feature: keyof DatabaseFeatures): Promise<boolean> {\n const features = this.getFeatures();\n return features[feature] || false;\n }\n}\n"],
|
|
5
5
|
"mappings": ";;;;AAgGO,MAAe,gBAAgB;AAAA,EACjB;AAAA,EACA;AAAA,EAEnB,YAAY,WAAmB,QAAc;AAC3C,SAAK,YAAY;AACjB,SAAK,SAAS,UAAU,CAAC;AAAA,EAC3B;AAAA;AAAA,EAoGU,aAAqB;AAC7B,WAAO,OAAO,WAAW;AAAA,EAC3B;AAAA,EAEU,cAAc,OAAuB;AAG7C,YAAQ;AAAA,MACN;AAAA,IACF;AACA,WAAO,MAAM,QAAQ,YAAY,EAAE;AAAA,EACrC;AAAA,EAEU,iBAAiB,YAAyC;AAClE,UAAM,UAAU,OAAO,QAAQ,UAAU,EAAE,IAAI,CAAC,CAAC,KAAK,KAAK,MAAM;AAC/D,UAAI,UAAU,MAAM;AAClB,eAAO,GAAG,GAAG;AAAA,MACf,WAAW,MAAM,QAAQ,KAAK,GAAG;AAC/B,eAAO,GAAG,GAAG,QAAQ,MAAM,IAAI,MAAM,GAAG,EAAE,KAAK,GAAG,CAAC;AAAA,MACrD,OAAO;AACL,eAAO,GAAG,GAAG;AAAA,MACf;AAAA,IACF,CAAC;AACD,WAAO,QAAQ,SAAS,IAAI,SAAS,QAAQ,KAAK,OAAO,CAAC,KAAK;AAAA,EACjE;AAAA,EAEU,mBACR,SACA,WACQ;AACR,QAAI,CAAC,QAAS,QAAO;AAErB,UAAM,SAAS,mBAAmB,KAAK,OAAO;AAC9C,QAAI,CAAC,QAAQ;AAEX,aAAO;AAAA,IACT;AACA,UAAM,MAAM,cAAc,SAAS,SAAS;AAC5C,WAAO,aAAa,OAAO,IAAI,GAAG;AAAA,EACpC;AAAA,EAEU,iBAAiB,OAAgB,QAAyB;AAClE,QAAI,CAAC,MAAO,QAAO;AACnB,QAAI,SAAS,UAAU,KAAK;AAC5B,QAAI,OAAQ,WAAU,WAAW,MAAM;AACvC,WAAO;AAAA,EACT;AACF;AAaO,MAAe,oCAAoC,gBAAgB;AAAA,EAGxE,MAAM,cAAc,SAAmD;AACrE,UAAM,WAAW,KAAK,YAAY;AAClC,WAAO,SAAS,OAAO,KAAK;AAAA,EAC9B;AACF;",
|
|
6
6
|
"names": []
|
|
7
7
|
}
|
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
{
|
|
2
2
|
"version": 3,
|
|
3
3
|
"sources": ["../../../src/core/database/paradedb-adapter.ts"],
|
|
4
|
-
"sourcesContent": ["/**\n * ParadeDB Database Adapter\n * Advanced PostgreSQL with built-in search (BM25) and analytics capabilities\n */\n\nimport { Pool, PoolClient } from 'pg';\nimport {\n FeatureAwareDatabaseAdapter,\n DatabaseFeatures,\n SearchOptions,\n QueryOptions,\n AggregationOptions,\n BulkOperation,\n DatabaseStats,\n} from './database-adapter.js';\nimport type { Frame, Event, Anchor } from '../context/frame-manager.js';\nimport { logger } from '../monitoring/logger.js';\nimport { DatabaseError, ErrorCode, ValidationError } from '../errors/index.js';\n\nexport interface ParadeDBConfig {\n connectionString?: string;\n host?: string;\n port?: number;\n database?: string;\n user?: string;\n password?: string;\n ssl?: boolean | { rejectUnauthorized?: boolean };\n max?: number; // Max pool size\n idleTimeoutMillis?: number;\n connectionTimeoutMillis?: number;\n statementTimeout?: number;\n enableBM25?: boolean;\n enableVector?: boolean;\n enableAnalytics?: boolean;\n}\n\nexport class ParadeDBAdapter extends FeatureAwareDatabaseAdapter {\n private pool: Pool | null = null;\n private activeClient: PoolClient | null = null;\n\n constructor(projectId: string, config: ParadeDBConfig) {\n super(projectId, config);\n }\n\n getFeatures(): DatabaseFeatures {\n const config = this.config as ParadeDBConfig;\n return {\n supportsFullTextSearch: config.enableBM25 !== false,\n supportsVectorSearch: config.enableVector !== false,\n supportsPartitioning: true,\n supportsAnalytics: config.enableAnalytics !== false,\n supportsCompression: true,\n supportsMaterializedViews: true,\n supportsParallelQueries: true,\n };\n }\n\n async connect(): Promise<void> {\n if (this.pool) return;\n\n const config = this.config as ParadeDBConfig;\n\n this.pool = new Pool({\n connectionString: config.connectionString,\n host: config.host || 'localhost',\n port: config.port || 5432,\n database: config.database || 'stackmemory',\n user: config.user,\n password: config.password,\n ssl: config.ssl,\n max: config.max || 20,\n idleTimeoutMillis: config.idleTimeoutMillis || 30000,\n connectionTimeoutMillis: config.connectionTimeoutMillis || 2000,\n statement_timeout: config.statementTimeout || 30000,\n });\n\n // Test connection\n const client = await this.pool.connect();\n try {\n await client.query('SELECT 1');\n logger.info('ParadeDB connected successfully');\n } finally {\n client.release();\n }\n }\n\n async disconnect(): Promise<void> {\n if (!this.pool) return;\n\n await this.pool.end();\n this.pool = null;\n logger.info('ParadeDB disconnected');\n }\n\n isConnected(): boolean {\n return this.pool !== null && !this.pool.ended;\n }\n\n async ping(): Promise<boolean> {\n if (!this.pool) return false;\n\n try {\n const client = await this.pool.connect();\n try {\n await client.query('SELECT 1');\n return true;\n } finally {\n client.release();\n }\n } catch {\n return false;\n }\n }\n\n async initializeSchema(): Promise<void> {\n const client = await this.getClient();\n\n try {\n await client.query('BEGIN');\n\n // Enable required extensions\n await client.query(`\n CREATE EXTENSION IF NOT EXISTS \"uuid-ossp\";\n CREATE EXTENSION IF NOT EXISTS \"pg_trgm\";\n CREATE EXTENSION IF NOT EXISTS \"btree_gin\";\n `);\n\n // Enable ParadeDB extensions if configured\n const config = this.config as ParadeDBConfig;\n\n if (config.enableBM25 !== false) {\n await client.query('CREATE EXTENSION IF NOT EXISTS pg_search;');\n }\n\n if (config.enableVector !== false) {\n await client.query('CREATE EXTENSION IF NOT EXISTS vector;');\n }\n\n if (config.enableAnalytics !== false) {\n await client.query('CREATE EXTENSION IF NOT EXISTS pg_analytics;');\n }\n\n // Create main tables with partitioning support\n await client.query(`\n -- Main frames table\n CREATE TABLE IF NOT EXISTS frames (\n frame_id UUID PRIMARY KEY DEFAULT uuid_generate_v4(),\n run_id UUID NOT NULL,\n project_id TEXT NOT NULL,\n parent_frame_id UUID REFERENCES frames(frame_id) ON DELETE CASCADE,\n depth INTEGER NOT NULL DEFAULT 0,\n type TEXT NOT NULL,\n name TEXT NOT NULL,\n state TEXT DEFAULT 'active',\n score FLOAT DEFAULT 0.5,\n inputs JSONB DEFAULT '{}',\n outputs JSONB DEFAULT '{}',\n metadata JSONB DEFAULT '{}',\n digest_text TEXT,\n digest_json JSONB DEFAULT '{}',\n content TEXT, -- For full-text search\n embedding vector(768), -- For vector search\n created_at TIMESTAMPTZ DEFAULT NOW(),\n closed_at TIMESTAMPTZ,\n CONSTRAINT check_state CHECK (state IN ('active', 'closed', 'suspended'))\n ) PARTITION BY RANGE (created_at);\n\n -- Create partitions for time-based data\n CREATE TABLE IF NOT EXISTS frames_recent PARTITION OF frames\n FOR VALUES FROM (NOW() - INTERVAL '30 days') TO (NOW() + INTERVAL '1 day');\n \n CREATE TABLE IF NOT EXISTS frames_archive PARTITION OF frames\n FOR VALUES FROM ('2020-01-01') TO (NOW() - INTERVAL '30 days');\n\n -- Events table\n CREATE TABLE IF NOT EXISTS events (\n event_id UUID PRIMARY KEY DEFAULT uuid_generate_v4(),\n run_id UUID NOT NULL,\n frame_id UUID NOT NULL REFERENCES frames(frame_id) ON DELETE CASCADE,\n seq INTEGER NOT NULL,\n event_type TEXT NOT NULL,\n payload JSONB NOT NULL DEFAULT '{}',\n ts TIMESTAMPTZ DEFAULT NOW()\n );\n\n -- Anchors table\n CREATE TABLE IF NOT EXISTS anchors (\n anchor_id UUID PRIMARY KEY DEFAULT uuid_generate_v4(),\n frame_id UUID NOT NULL REFERENCES frames(frame_id) ON DELETE CASCADE,\n project_id TEXT NOT NULL,\n type TEXT NOT NULL,\n text TEXT NOT NULL,\n priority INTEGER DEFAULT 0,\n metadata JSONB DEFAULT '{}',\n created_at TIMESTAMPTZ DEFAULT NOW()\n );\n\n -- Schema version tracking\n CREATE TABLE IF NOT EXISTS schema_version (\n version INTEGER PRIMARY KEY,\n applied_at TIMESTAMPTZ DEFAULT NOW(),\n description TEXT\n );\n `);\n\n // Create indexes for performance\n await client.query(`\n -- Standard B-tree indexes\n CREATE INDEX IF NOT EXISTS idx_frames_run_id ON frames USING btree(run_id);\n CREATE INDEX IF NOT EXISTS idx_frames_project_id ON frames USING btree(project_id);\n CREATE INDEX IF NOT EXISTS idx_frames_parent ON frames USING btree(parent_frame_id);\n CREATE INDEX IF NOT EXISTS idx_frames_state ON frames USING btree(state);\n CREATE INDEX IF NOT EXISTS idx_frames_type ON frames USING btree(type);\n CREATE INDEX IF NOT EXISTS idx_frames_created_at ON frames USING btree(created_at DESC);\n CREATE INDEX IF NOT EXISTS idx_frames_score ON frames USING btree(score DESC);\n\n -- GIN indexes for JSONB\n CREATE INDEX IF NOT EXISTS idx_frames_inputs ON frames USING gin(inputs);\n CREATE INDEX IF NOT EXISTS idx_frames_outputs ON frames USING gin(outputs);\n CREATE INDEX IF NOT EXISTS idx_frames_metadata ON frames USING gin(metadata);\n CREATE INDEX IF NOT EXISTS idx_frames_digest ON frames USING gin(digest_json);\n\n -- Trigram index for fuzzy text search\n CREATE INDEX IF NOT EXISTS idx_frames_name_trgm ON frames USING gin(name gin_trgm_ops);\n CREATE INDEX IF NOT EXISTS idx_frames_content_trgm ON frames USING gin(content gin_trgm_ops);\n\n -- Event indexes\n CREATE INDEX IF NOT EXISTS idx_events_frame ON events USING btree(frame_id);\n CREATE INDEX IF NOT EXISTS idx_events_seq ON events USING btree(frame_id, seq);\n CREATE INDEX IF NOT EXISTS idx_events_type ON events USING btree(event_type);\n CREATE INDEX IF NOT EXISTS idx_events_ts ON events USING btree(ts DESC);\n\n -- Anchor indexes\n CREATE INDEX IF NOT EXISTS idx_anchors_frame ON anchors USING btree(frame_id);\n CREATE INDEX IF NOT EXISTS idx_anchors_type ON anchors USING btree(type);\n CREATE INDEX IF NOT EXISTS idx_anchors_priority ON anchors USING btree(priority DESC);\n `);\n\n // Create BM25 search index if enabled\n if (config.enableBM25 !== false) {\n await client.query(`\n -- Create BM25 index for full-text search\n CALL paradedb.create_bm25_test_table(\n index_name => 'frames_search_idx',\n table_name => 'frames',\n schema_name => 'public',\n key_field => 'frame_id',\n text_fields => paradedb.field('name') || \n paradedb.field('content') || \n paradedb.field('digest_text'),\n numeric_fields => paradedb.field('score') || \n paradedb.field('depth'),\n json_fields => paradedb.field('metadata', flatten => true),\n datetime_fields => paradedb.field('created_at')\n );\n `);\n }\n\n // Create vector index if enabled\n if (config.enableVector !== false) {\n await client.query(`\n -- HNSW index for vector similarity search\n CREATE INDEX IF NOT EXISTS idx_frames_embedding \n ON frames USING hnsw (embedding vector_cosine_ops)\n WITH (m = 16, ef_construction = 64);\n `);\n }\n\n // Create materialized views for patterns\n await client.query(`\n CREATE MATERIALIZED VIEW IF NOT EXISTS pattern_summary AS\n WITH pattern_extraction AS (\n SELECT \n project_id,\n type as pattern_type,\n metadata->>'error' as error_pattern,\n COUNT(*) as frequency,\n MAX(score) as max_score,\n MAX(created_at) as last_seen,\n MIN(created_at) as first_seen\n FROM frames\n WHERE created_at > NOW() - INTERVAL '30 days'\n GROUP BY project_id, pattern_type, error_pattern\n )\n SELECT * FROM pattern_extraction\n WHERE frequency > 3;\n\n CREATE UNIQUE INDEX IF NOT EXISTS idx_pattern_summary_unique \n ON pattern_summary(project_id, pattern_type, error_pattern);\n `);\n\n // Set initial schema version\n await client.query(`\n INSERT INTO schema_version (version, description) \n VALUES (1, 'Initial ParadeDB schema with search and analytics')\n ON CONFLICT (version) DO NOTHING;\n `);\n\n await client.query('COMMIT');\n logger.info('ParadeDB schema initialized successfully');\n } catch (error: unknown) {\n await client.query('ROLLBACK');\n throw error;\n } finally {\n this.releaseClient(client);\n }\n }\n\n async migrateSchema(targetVersion: number): Promise<void> {\n const client = await this.getClient();\n\n try {\n const result = await client.query(\n 'SELECT MAX(version) as version FROM schema_version'\n );\n const currentVersion = result.rows[0]?.version || 0;\n\n if (currentVersion >= targetVersion) {\n logger.info('Schema already at target version', {\n currentVersion,\n targetVersion,\n });\n return;\n }\n\n // Apply migrations sequentially\n for (let v = currentVersion + 1; v <= targetVersion; v++) {\n logger.info(`Applying migration to version ${v}`);\n // Migration logic would go here based on version\n await client.query(\n 'INSERT INTO schema_version (version, description) VALUES ($1, $2)',\n [v, `Migration to version ${v}`]\n );\n }\n } finally {\n this.releaseClient(client);\n }\n }\n\n async getSchemaVersion(): Promise<number> {\n const client = await this.getClient();\n\n try {\n const result = await client.query(\n 'SELECT MAX(version) as version FROM schema_version'\n );\n return result.rows[0]?.version || 0;\n } finally {\n this.releaseClient(client);\n }\n }\n\n // Frame operations\n async createFrame(frame: Partial<Frame>): Promise<string> {\n const client = await this.getClient();\n\n try {\n const result = await client.query(\n `\n INSERT INTO frames (\n frame_id, run_id, project_id, parent_frame_id, depth,\n type, name, state, score, inputs, outputs, metadata,\n digest_text, digest_json, content\n ) VALUES (\n COALESCE($1::uuid, uuid_generate_v4()), $2, $3, $4, $5,\n $6, $7, $8, $9, $10, $11, $12, $13, $14, $15\n ) RETURNING frame_id\n `,\n [\n frame.frame_id || null,\n frame.run_id,\n frame.project_id || this.projectId,\n frame.parent_frame_id || null,\n frame.depth || 0,\n frame.type,\n frame.name,\n frame.state || 'active',\n frame.score || 0.5,\n JSON.stringify(frame.inputs || {}),\n JSON.stringify(frame.outputs || {}),\n JSON.stringify(frame.metadata || {}),\n frame.digest_text || null,\n JSON.stringify(frame.digest_json || {}),\n frame.content || `${frame.name} ${frame.digest_text || ''}`,\n ]\n );\n\n return result.rows[0].frame_id;\n } finally {\n this.releaseClient(client);\n }\n }\n\n async getFrame(frameId: string): Promise<Frame | null> {\n const client = await this.getClient();\n\n try {\n const result = await client.query(\n 'SELECT * FROM frames WHERE frame_id = $1',\n [frameId]\n );\n\n if (result.rows.length === 0) return null;\n\n const row = result.rows[0];\n return {\n ...row,\n frame_id: row.frame_id,\n run_id: row.run_id,\n created_at: row.created_at.getTime(),\n closed_at: row.closed_at?.getTime(),\n };\n } finally {\n this.releaseClient(client);\n }\n }\n\n async updateFrame(frameId: string, updates: Partial<Frame>): Promise<void> {\n const client = await this.getClient();\n\n try {\n const fields = [];\n const values = [];\n let paramCount = 1;\n\n if (updates.state !== undefined) {\n fields.push(`state = $${paramCount++}`);\n values.push(updates.state);\n }\n\n if (updates.outputs !== undefined) {\n fields.push(`outputs = $${paramCount++}`);\n values.push(JSON.stringify(updates.outputs));\n }\n\n if (updates.score !== undefined) {\n fields.push(`score = $${paramCount++}`);\n values.push(updates.score);\n }\n\n if (updates.digest_text !== undefined) {\n fields.push(`digest_text = $${paramCount++}`);\n values.push(updates.digest_text);\n }\n\n if (updates.digest_json !== undefined) {\n fields.push(`digest_json = $${paramCount++}`);\n values.push(JSON.stringify(updates.digest_json));\n }\n\n if (updates.closed_at !== undefined) {\n fields.push(`closed_at = $${paramCount++}`);\n values.push(new Date(updates.closed_at));\n }\n\n if (fields.length === 0) return;\n\n values.push(frameId);\n\n await client.query(\n `\n UPDATE frames SET ${fields.join(', ')} WHERE frame_id = $${paramCount}\n `,\n values\n );\n } finally {\n this.releaseClient(client);\n }\n }\n\n async deleteFrame(frameId: string): Promise<void> {\n const client = await this.getClient();\n\n try {\n // CASCADE delete handles events and anchors\n await client.query('DELETE FROM frames WHERE frame_id = $1', [frameId]);\n } finally {\n this.releaseClient(client);\n }\n }\n\n async getActiveFrames(runId?: string): Promise<Frame[]> {\n const client = await this.getClient();\n\n try {\n let query = 'SELECT * FROM frames WHERE state = $1';\n const params: any[] = ['active'];\n\n if (runId) {\n query += ' AND run_id = $2';\n params.push(runId);\n }\n\n query += ' ORDER BY depth ASC, created_at ASC';\n\n const result = await client.query(query, params);\n\n return result.rows.map((row) => ({\n ...row,\n created_at: row.created_at.getTime(),\n closed_at: row.closed_at?.getTime(),\n }));\n } finally {\n this.releaseClient(client);\n }\n }\n\n async closeFrame(frameId: string, outputs?: any): Promise<void> {\n await this.updateFrame(frameId, {\n state: 'closed',\n outputs,\n closed_at: Date.now(),\n });\n }\n\n // Event operations\n async createEvent(event: Partial<Event>): Promise<string> {\n const client = await this.getClient();\n\n try {\n const result = await client.query(\n `\n INSERT INTO events (event_id, run_id, frame_id, seq, event_type, payload, ts)\n VALUES (COALESCE($1::uuid, uuid_generate_v4()), $2, $3, $4, $5, $6, $7)\n RETURNING event_id\n `,\n [\n event.event_id || null,\n event.run_id,\n event.frame_id,\n event.seq || 0,\n event.event_type,\n JSON.stringify(event.payload || {}),\n event.ts ? new Date(event.ts) : new Date(),\n ]\n );\n\n return result.rows[0].event_id;\n } finally {\n this.releaseClient(client);\n }\n }\n\n async getFrameEvents(\n frameId: string,\n options?: QueryOptions\n ): Promise<Event[]> {\n const client = await this.getClient();\n\n try {\n let query = 'SELECT * FROM events WHERE frame_id = $1';\n const params: any[] = [frameId];\n\n query += this.buildOrderByClause(\n options?.orderBy || 'seq',\n options?.orderDirection\n );\n query += this.buildLimitClause(options?.limit, options?.offset);\n\n const result = await client.query(query, params);\n\n return result.rows.map((row) => ({\n ...row,\n ts: row.ts.getTime(),\n }));\n } finally {\n this.releaseClient(client);\n }\n }\n\n async deleteFrameEvents(frameId: string): Promise<void> {\n const client = await this.getClient();\n\n try {\n await client.query('DELETE FROM events WHERE frame_id = $1', [frameId]);\n } finally {\n this.releaseClient(client);\n }\n }\n\n // Anchor operations\n async createAnchor(anchor: Partial<Anchor>): Promise<string> {\n const client = await this.getClient();\n\n try {\n const result = await client.query(\n `\n INSERT INTO anchors (anchor_id, frame_id, project_id, type, text, priority, metadata)\n VALUES (COALESCE($1::uuid, uuid_generate_v4()), $2, $3, $4, $5, $6, $7)\n RETURNING anchor_id\n `,\n [\n anchor.anchor_id || null,\n anchor.frame_id,\n anchor.project_id || this.projectId,\n anchor.type,\n anchor.text,\n anchor.priority || 0,\n JSON.stringify(anchor.metadata || {}),\n ]\n );\n\n return result.rows[0].anchor_id;\n } finally {\n this.releaseClient(client);\n }\n }\n\n async getFrameAnchors(frameId: string): Promise<Anchor[]> {\n const client = await this.getClient();\n\n try {\n const result = await client.query(\n `\n SELECT * FROM anchors WHERE frame_id = $1 \n ORDER BY priority DESC, created_at ASC\n `,\n [frameId]\n );\n\n return result.rows.map((row) => ({\n ...row,\n created_at: row.created_at.getTime(),\n }));\n } finally {\n this.releaseClient(client);\n }\n }\n\n async deleteFrameAnchors(frameId: string): Promise<void> {\n const client = await this.getClient();\n\n try {\n await client.query('DELETE FROM anchors WHERE frame_id = $1', [frameId]);\n } finally {\n this.releaseClient(client);\n }\n }\n\n // Advanced search with BM25\n async search(\n options: SearchOptions\n ): Promise<Array<Frame & { score: number }>> {\n const client = await this.getClient();\n\n try {\n const config = this.config as ParadeDBConfig;\n\n if (config.enableBM25 !== false) {\n // Use ParadeDB BM25 search\n const result = await client.query(\n `\n SELECT f.*, s.score_bm25 as score\n FROM frames_search_idx.search(\n query => $1,\n limit_rows => $2,\n offset_rows => $3\n ) s\n JOIN frames f ON f.frame_id = s.frame_id\n WHERE ($4::float IS NULL OR s.score_bm25 >= $4)\n ORDER BY s.score_bm25 DESC\n `,\n [\n options.query,\n options.limit || 100,\n options.offset || 0,\n options.scoreThreshold || null,\n ]\n );\n\n return result.rows.map((row) => ({\n ...row,\n created_at: row.created_at.getTime(),\n closed_at: row.closed_at?.getTime(),\n }));\n } else {\n // Fallback to PostgreSQL full-text search\n const result = await client.query(\n `\n SELECT *,\n ts_rank(\n to_tsvector('english', COALESCE(name, '') || ' ' || COALESCE(content, '')),\n plainto_tsquery('english', $1)\n ) as score\n FROM frames\n WHERE to_tsvector('english', COALESCE(name, '') || ' ' || COALESCE(content, ''))\n @@ plainto_tsquery('english', $1)\n ORDER BY score DESC\n LIMIT $2 OFFSET $3\n `,\n [options.query, options.limit || 100, options.offset || 0]\n );\n\n return result.rows.map((row) => ({\n ...row,\n created_at: row.created_at.getTime(),\n closed_at: row.closed_at?.getTime(),\n }));\n }\n } finally {\n this.releaseClient(client);\n }\n }\n\n // Vector similarity search\n async searchByVector(\n embedding: number[],\n options?: QueryOptions\n ): Promise<Array<Frame & { similarity: number }>> {\n const client = await this.getClient();\n\n try {\n const config = this.config as ParadeDBConfig;\n\n if (config.enableVector === false) {\n logger.warn('Vector search not enabled in ParadeDB configuration');\n return [];\n }\n\n const result = await client.query(\n `\n SELECT *,\n 1 - (embedding <=> $1::vector) as similarity\n FROM frames\n WHERE embedding IS NOT NULL\n ORDER BY embedding <=> $1::vector\n LIMIT $2 OFFSET $3\n `,\n [\n `[${embedding.join(',')}]`,\n options?.limit || 100,\n options?.offset || 0,\n ]\n );\n\n return result.rows.map((row) => ({\n ...row,\n created_at: row.created_at.getTime(),\n closed_at: row.closed_at?.getTime(),\n }));\n } finally {\n this.releaseClient(client);\n }\n }\n\n // Hybrid search combining BM25 and vector\n async searchHybrid(\n textQuery: string,\n embedding: number[],\n weights?: { text: number; vector: number }\n ): Promise<Array<Frame & { score: number }>> {\n const client = await this.getClient();\n\n try {\n const textWeight = weights?.text || 0.6;\n const vectorWeight = weights?.vector || 0.4;\n\n const result = await client.query(\n `\n WITH bm25_results AS (\n SELECT frame_id, score_bm25\n FROM frames_search_idx.search(\n query => $1,\n limit_rows => 200\n )\n ),\n vector_results AS (\n SELECT frame_id,\n 1 - (embedding <=> $2::vector) as score_vector\n FROM frames\n WHERE embedding IS NOT NULL\n ORDER BY embedding <=> $2::vector\n LIMIT 200\n )\n SELECT f.*,\n (COALESCE(b.score_bm25, 0) * $3 + \n COALESCE(v.score_vector, 0) * $4) as score\n FROM frames f\n LEFT JOIN bm25_results b ON f.frame_id = b.frame_id\n LEFT JOIN vector_results v ON f.frame_id = v.frame_id\n WHERE b.frame_id IS NOT NULL OR v.frame_id IS NOT NULL\n ORDER BY score DESC\n LIMIT $5\n `,\n [textQuery, `[${embedding.join(',')}]`, textWeight, vectorWeight, 100]\n );\n\n return result.rows.map((row) => ({\n ...row,\n created_at: row.created_at.getTime(),\n closed_at: row.closed_at?.getTime(),\n }));\n } finally {\n this.releaseClient(client);\n }\n }\n\n // Advanced aggregation\n async aggregate(\n table: string,\n options: AggregationOptions\n ): Promise<Record<string, any>[]> {\n const client = await this.getClient();\n\n try {\n const metrics = options.metrics\n .map((m) => {\n const alias = m.alias || `${m.operation}_${m.field}`;\n return `${m.operation}(${m.field}) AS \"${alias}\"`;\n })\n .join(', ');\n\n let query = `\n SELECT ${options.groupBy.map((g) => `\"${g}\"`).join(', ')}, ${metrics}\n FROM ${table}\n GROUP BY ${options.groupBy.map((g) => `\"${g}\"`).join(', ')}\n `;\n\n if (options.having) {\n const havingClauses = Object.entries(options.having).map(\n ([key, value], i) => {\n return `${key} ${typeof value === 'object' ? value.op : '='} $${i + 1}`;\n }\n );\n query += ` HAVING ${havingClauses.join(' AND ')}`;\n }\n\n const result = await client.query(\n query,\n Object.values(options.having || {})\n );\n return result.rows;\n } finally {\n this.releaseClient(client);\n }\n }\n\n // Pattern detection with analytics\n async detectPatterns(timeRange?: { start: Date; end: Date }): Promise<\n Array<{\n pattern: string;\n type: string;\n frequency: number;\n lastSeen: Date;\n }>\n > {\n const client = await this.getClient();\n\n try {\n // Use materialized view for better performance\n const result = await client.query(\n `\n SELECT \n COALESCE(error_pattern, pattern_type) as pattern,\n pattern_type as type,\n frequency,\n last_seen\n FROM pattern_summary\n WHERE project_id = $1\n AND ($2::timestamptz IS NULL OR last_seen >= $2)\n AND ($3::timestamptz IS NULL OR first_seen <= $3)\n ORDER BY frequency DESC, last_seen DESC\n LIMIT 100\n `,\n [this.projectId, timeRange?.start || null, timeRange?.end || null]\n );\n\n return result.rows.map((row) => ({\n pattern: row.pattern,\n type: row.type,\n frequency: row.frequency,\n lastSeen: row.last_seen,\n }));\n } finally {\n this.releaseClient(client);\n }\n }\n\n // Bulk operations\n async executeBulk(operations: BulkOperation[]): Promise<void> {\n await this.inTransaction(async () => {\n const client = this.activeClient!;\n\n for (const op of operations) {\n switch (op.type) {\n case 'insert': {\n const cols = Object.keys(op.data);\n const values = Object.values(op.data);\n const placeholders = values.map((_, i) => `$${i + 1}`).join(',');\n\n await client.query(\n `INSERT INTO ${op.table} (${cols.join(',')}) VALUES (${placeholders})`,\n values\n );\n break;\n }\n\n case 'update': {\n const sets = Object.keys(op.data)\n .map((k, i) => `${k} = $${i + 1}`)\n .join(',');\n const whereClause = this.buildWhereClausePostgres(\n op.where || {},\n Object.keys(op.data).length\n );\n const values = [\n ...Object.values(op.data),\n ...Object.values(op.where || {}),\n ];\n\n await client.query(\n `UPDATE ${op.table} SET ${sets} ${whereClause}`,\n values\n );\n break;\n }\n\n case 'delete': {\n const whereClause = this.buildWhereClausePostgres(\n op.where || {},\n 0\n );\n await client.query(\n `DELETE FROM ${op.table} ${whereClause}`,\n Object.values(op.where || {})\n );\n break;\n }\n }\n }\n });\n }\n\n async vacuum(): Promise<void> {\n const client = await this.getClient();\n\n try {\n await client.query('VACUUM ANALYZE frames');\n await client.query('VACUUM ANALYZE events');\n await client.query('VACUUM ANALYZE anchors');\n\n // Refresh materialized views\n await client.query(\n 'REFRESH MATERIALIZED VIEW CONCURRENTLY pattern_summary'\n );\n\n logger.info('ParadeDB vacuum and analyze completed');\n } finally {\n this.releaseClient(client);\n }\n }\n\n async analyze(): Promise<void> {\n const client = await this.getClient();\n\n try {\n await client.query('ANALYZE frames');\n await client.query('ANALYZE events');\n await client.query('ANALYZE anchors');\n logger.info('ParadeDB analyze completed');\n } finally {\n this.releaseClient(client);\n }\n }\n\n // Statistics\n async getStats(): Promise<DatabaseStats> {\n const client = await this.getClient();\n\n try {\n const result = await client.query(`\n SELECT\n (SELECT COUNT(*) FROM frames) as total_frames,\n (SELECT COUNT(*) FROM frames WHERE state = 'active') as active_frames,\n (SELECT COUNT(*) FROM events) as total_events,\n (SELECT COUNT(*) FROM anchors) as total_anchors,\n pg_database_size(current_database()) as disk_usage\n `);\n\n return {\n totalFrames: parseInt(result.rows[0].total_frames),\n activeFrames: parseInt(result.rows[0].active_frames),\n totalEvents: parseInt(result.rows[0].total_events),\n totalAnchors: parseInt(result.rows[0].total_anchors),\n diskUsage: parseInt(result.rows[0].disk_usage),\n };\n } finally {\n this.releaseClient(client);\n }\n }\n\n async getQueryStats(): Promise<\n Array<{\n query: string;\n calls: number;\n meanTime: number;\n totalTime: number;\n }>\n > {\n const client = await this.getClient();\n\n try {\n const result = await client.query(`\n SELECT \n query,\n calls,\n mean_exec_time as mean_time,\n total_exec_time as total_time\n FROM pg_stat_statements\n WHERE query NOT LIKE '%pg_stat_statements%'\n ORDER BY total_exec_time DESC\n LIMIT 100\n `);\n\n return result.rows.map((row) => ({\n query: row.query,\n calls: parseInt(row.calls),\n meanTime: parseFloat(row.mean_time),\n totalTime: parseFloat(row.total_time),\n }));\n } catch (error: unknown) {\n logger.warn('pg_stat_statements not available', error);\n return [];\n } finally {\n this.releaseClient(client);\n }\n }\n\n // Transaction support\n async beginTransaction(): Promise<void> {\n this.activeClient = await this.pool!.connect();\n await this.activeClient.query('BEGIN');\n }\n\n async commitTransaction(): Promise<void> {\n if (!this.activeClient)\n throw new DatabaseError(\n 'No active transaction',\n ErrorCode.DB_TRANSACTION_FAILED\n );\n\n await this.activeClient.query('COMMIT');\n this.activeClient.release();\n this.activeClient = null;\n }\n\n async rollbackTransaction(): Promise<void> {\n if (!this.activeClient)\n throw new DatabaseError(\n 'No active transaction',\n ErrorCode.DB_TRANSACTION_FAILED\n );\n\n await this.activeClient.query('ROLLBACK');\n this.activeClient.release();\n this.activeClient = null;\n }\n\n async inTransaction(\n callback: (adapter: DatabaseAdapter) => Promise<void>\n ): Promise<void> {\n await this.beginTransaction();\n\n try {\n await callback(this);\n await this.commitTransaction();\n } catch (error: unknown) {\n try {\n await this.rollbackTransaction();\n } catch (rollbackError: unknown) {\n // Log rollback failure but don't mask original error\n console.error('Transaction rollback failed:', rollbackError);\n // Connection might be in bad state - mark as unusable if connection pool exists\n if (this.connectionPool) {\n this.connectionPool.markConnectionAsBad(this.client);\n }\n }\n throw error;\n }\n }\n\n // Export/Import\n async exportData(\n tables: string[],\n format: 'json' | 'parquet' | 'csv'\n ): Promise<Buffer> {\n const client = await this.getClient();\n\n try {\n if (format === 'json') {\n const data: Record<string, any[]> = {};\n\n for (const table of tables) {\n const result = await client.query(`SELECT * FROM ${table}`);\n data[table] = result.rows;\n }\n\n return Buffer.from(JSON.stringify(data, null, 2));\n } else if (format === 'csv') {\n // Export as CSV using COPY\n const chunks: string[] = [];\n\n for (const table of tables) {\n const result = await client.query(`\n COPY (SELECT * FROM ${table}) TO STDOUT WITH CSV HEADER\n `);\n chunks.push(result.toString());\n }\n\n return Buffer.from(chunks.join('\\n\\n'));\n } else {\n throw new ValidationError(\n `Format ${format} not yet implemented for ParadeDB export`,\n ErrorCode.VALIDATION_FAILED,\n { format, supportedFormats: ['json'] }\n );\n }\n } finally {\n this.releaseClient(client);\n }\n }\n\n async importData(\n data: Buffer,\n format: 'json' | 'parquet' | 'csv',\n options?: { truncate?: boolean; upsert?: boolean }\n ): Promise<void> {\n const client = await this.getClient();\n\n try {\n if (format === 'json') {\n const parsed = JSON.parse(data.toString());\n\n await client.query('BEGIN');\n\n for (const [table, rows] of Object.entries(parsed)) {\n if (options?.truncate) {\n await client.query(`TRUNCATE TABLE ${table} CASCADE`);\n }\n\n for (const row of rows as any[]) {\n const cols = Object.keys(row);\n const values = Object.values(row);\n const placeholders = values.map((_, i) => `$${i + 1}`).join(',');\n\n if (options?.upsert) {\n const updates = cols.map((c) => `${c} = EXCLUDED.${c}`).join(',');\n await client.query(\n `INSERT INTO ${table} (${cols.join(',')}) VALUES (${placeholders})\n ON CONFLICT DO UPDATE SET ${updates}`,\n values\n );\n } else {\n await client.query(\n `INSERT INTO ${table} (${cols.join(',')}) VALUES (${placeholders})`,\n values\n );\n }\n }\n }\n\n await client.query('COMMIT');\n } else {\n throw new ValidationError(\n `Format ${format} not yet implemented for ParadeDB import`,\n ErrorCode.VALIDATION_FAILED,\n { format, supportedFormats: ['json'] }\n );\n }\n } catch (error: unknown) {\n await client.query('ROLLBACK');\n throw error;\n } finally {\n this.releaseClient(client);\n }\n }\n\n // Helper methods\n private async getClient(): Promise<PoolClient> {\n if (this.activeClient) {\n return this.activeClient;\n }\n\n if (!this.pool) {\n throw new DatabaseError(\n 'Database not connected',\n ErrorCode.DB_CONNECTION_FAILED\n );\n }\n\n return await this.pool.connect();\n }\n\n private releaseClient(client: PoolClient): void {\n if (client !== this.activeClient) {\n client.release();\n }\n }\n\n private buildWhereClausePostgres(\n conditions: Record<string, any>,\n startParam: number\n ): string {\n const clauses = Object.entries(conditions).map(([key, value], i) => {\n const paramNum = startParam + i + 1;\n\n if (value === null) {\n return `${key} IS NULL`;\n } else if (Array.isArray(value)) {\n const placeholders = value.map((_, j) => `$${paramNum + j}`).join(',');\n return `${key} IN (${placeholders})`;\n } else {\n return `${key} = $${paramNum}`;\n }\n });\n\n return clauses.length > 0 ? `WHERE ${clauses.join(' AND ')}` : '';\n }\n}\n"],
|
|
4
|
+
"sourcesContent": ["/**\n * ParadeDB Database Adapter\n * Advanced PostgreSQL with built-in search (BM25) and analytics capabilities\n */\n\nimport { Pool, PoolClient } from 'pg';\nimport {\n FeatureAwareDatabaseAdapter,\n DatabaseFeatures,\n SearchOptions,\n QueryOptions,\n AggregationOptions,\n BulkOperation,\n DatabaseStats,\n} from './database-adapter.js';\nimport type { Frame, Event, Anchor } from '../context/index.js';\nimport { logger } from '../monitoring/logger.js';\nimport { DatabaseError, ErrorCode, ValidationError } from '../errors/index.js';\n\nexport interface ParadeDBConfig {\n connectionString?: string;\n host?: string;\n port?: number;\n database?: string;\n user?: string;\n password?: string;\n ssl?: boolean | { rejectUnauthorized?: boolean };\n max?: number; // Max pool size\n idleTimeoutMillis?: number;\n connectionTimeoutMillis?: number;\n statementTimeout?: number;\n enableBM25?: boolean;\n enableVector?: boolean;\n enableAnalytics?: boolean;\n}\n\nexport class ParadeDBAdapter extends FeatureAwareDatabaseAdapter {\n private pool: Pool | null = null;\n private activeClient: PoolClient | null = null;\n\n constructor(projectId: string, config: ParadeDBConfig) {\n super(projectId, config);\n }\n\n getFeatures(): DatabaseFeatures {\n const config = this.config as ParadeDBConfig;\n return {\n supportsFullTextSearch: config.enableBM25 !== false,\n supportsVectorSearch: config.enableVector !== false,\n supportsPartitioning: true,\n supportsAnalytics: config.enableAnalytics !== false,\n supportsCompression: true,\n supportsMaterializedViews: true,\n supportsParallelQueries: true,\n };\n }\n\n async connect(): Promise<void> {\n if (this.pool) return;\n\n const config = this.config as ParadeDBConfig;\n\n this.pool = new Pool({\n connectionString: config.connectionString,\n host: config.host || 'localhost',\n port: config.port || 5432,\n database: config.database || 'stackmemory',\n user: config.user,\n password: config.password,\n ssl: config.ssl,\n max: config.max || 20,\n idleTimeoutMillis: config.idleTimeoutMillis || 30000,\n connectionTimeoutMillis: config.connectionTimeoutMillis || 2000,\n statement_timeout: config.statementTimeout || 30000,\n });\n\n // Test connection\n const client = await this.pool.connect();\n try {\n await client.query('SELECT 1');\n logger.info('ParadeDB connected successfully');\n } finally {\n client.release();\n }\n }\n\n async disconnect(): Promise<void> {\n if (!this.pool) return;\n\n await this.pool.end();\n this.pool = null;\n logger.info('ParadeDB disconnected');\n }\n\n isConnected(): boolean {\n return this.pool !== null && !this.pool.ended;\n }\n\n async ping(): Promise<boolean> {\n if (!this.pool) return false;\n\n try {\n const client = await this.pool.connect();\n try {\n await client.query('SELECT 1');\n return true;\n } finally {\n client.release();\n }\n } catch {\n return false;\n }\n }\n\n async initializeSchema(): Promise<void> {\n const client = await this.getClient();\n\n try {\n await client.query('BEGIN');\n\n // Enable required extensions\n await client.query(`\n CREATE EXTENSION IF NOT EXISTS \"uuid-ossp\";\n CREATE EXTENSION IF NOT EXISTS \"pg_trgm\";\n CREATE EXTENSION IF NOT EXISTS \"btree_gin\";\n `);\n\n // Enable ParadeDB extensions if configured\n const config = this.config as ParadeDBConfig;\n\n if (config.enableBM25 !== false) {\n await client.query('CREATE EXTENSION IF NOT EXISTS pg_search;');\n }\n\n if (config.enableVector !== false) {\n await client.query('CREATE EXTENSION IF NOT EXISTS vector;');\n }\n\n if (config.enableAnalytics !== false) {\n await client.query('CREATE EXTENSION IF NOT EXISTS pg_analytics;');\n }\n\n // Create main tables with partitioning support\n await client.query(`\n -- Main frames table\n CREATE TABLE IF NOT EXISTS frames (\n frame_id UUID PRIMARY KEY DEFAULT uuid_generate_v4(),\n run_id UUID NOT NULL,\n project_id TEXT NOT NULL,\n parent_frame_id UUID REFERENCES frames(frame_id) ON DELETE CASCADE,\n depth INTEGER NOT NULL DEFAULT 0,\n type TEXT NOT NULL,\n name TEXT NOT NULL,\n state TEXT DEFAULT 'active',\n score FLOAT DEFAULT 0.5,\n inputs JSONB DEFAULT '{}',\n outputs JSONB DEFAULT '{}',\n metadata JSONB DEFAULT '{}',\n digest_text TEXT,\n digest_json JSONB DEFAULT '{}',\n content TEXT, -- For full-text search\n embedding vector(768), -- For vector search\n created_at TIMESTAMPTZ DEFAULT NOW(),\n closed_at TIMESTAMPTZ,\n CONSTRAINT check_state CHECK (state IN ('active', 'closed', 'suspended'))\n ) PARTITION BY RANGE (created_at);\n\n -- Create partitions for time-based data\n CREATE TABLE IF NOT EXISTS frames_recent PARTITION OF frames\n FOR VALUES FROM (NOW() - INTERVAL '30 days') TO (NOW() + INTERVAL '1 day');\n \n CREATE TABLE IF NOT EXISTS frames_archive PARTITION OF frames\n FOR VALUES FROM ('2020-01-01') TO (NOW() - INTERVAL '30 days');\n\n -- Events table\n CREATE TABLE IF NOT EXISTS events (\n event_id UUID PRIMARY KEY DEFAULT uuid_generate_v4(),\n run_id UUID NOT NULL,\n frame_id UUID NOT NULL REFERENCES frames(frame_id) ON DELETE CASCADE,\n seq INTEGER NOT NULL,\n event_type TEXT NOT NULL,\n payload JSONB NOT NULL DEFAULT '{}',\n ts TIMESTAMPTZ DEFAULT NOW()\n );\n\n -- Anchors table\n CREATE TABLE IF NOT EXISTS anchors (\n anchor_id UUID PRIMARY KEY DEFAULT uuid_generate_v4(),\n frame_id UUID NOT NULL REFERENCES frames(frame_id) ON DELETE CASCADE,\n project_id TEXT NOT NULL,\n type TEXT NOT NULL,\n text TEXT NOT NULL,\n priority INTEGER DEFAULT 0,\n metadata JSONB DEFAULT '{}',\n created_at TIMESTAMPTZ DEFAULT NOW()\n );\n\n -- Schema version tracking\n CREATE TABLE IF NOT EXISTS schema_version (\n version INTEGER PRIMARY KEY,\n applied_at TIMESTAMPTZ DEFAULT NOW(),\n description TEXT\n );\n `);\n\n // Create indexes for performance\n await client.query(`\n -- Standard B-tree indexes\n CREATE INDEX IF NOT EXISTS idx_frames_run_id ON frames USING btree(run_id);\n CREATE INDEX IF NOT EXISTS idx_frames_project_id ON frames USING btree(project_id);\n CREATE INDEX IF NOT EXISTS idx_frames_parent ON frames USING btree(parent_frame_id);\n CREATE INDEX IF NOT EXISTS idx_frames_state ON frames USING btree(state);\n CREATE INDEX IF NOT EXISTS idx_frames_type ON frames USING btree(type);\n CREATE INDEX IF NOT EXISTS idx_frames_created_at ON frames USING btree(created_at DESC);\n CREATE INDEX IF NOT EXISTS idx_frames_score ON frames USING btree(score DESC);\n\n -- GIN indexes for JSONB\n CREATE INDEX IF NOT EXISTS idx_frames_inputs ON frames USING gin(inputs);\n CREATE INDEX IF NOT EXISTS idx_frames_outputs ON frames USING gin(outputs);\n CREATE INDEX IF NOT EXISTS idx_frames_metadata ON frames USING gin(metadata);\n CREATE INDEX IF NOT EXISTS idx_frames_digest ON frames USING gin(digest_json);\n\n -- Trigram index for fuzzy text search\n CREATE INDEX IF NOT EXISTS idx_frames_name_trgm ON frames USING gin(name gin_trgm_ops);\n CREATE INDEX IF NOT EXISTS idx_frames_content_trgm ON frames USING gin(content gin_trgm_ops);\n\n -- Event indexes\n CREATE INDEX IF NOT EXISTS idx_events_frame ON events USING btree(frame_id);\n CREATE INDEX IF NOT EXISTS idx_events_seq ON events USING btree(frame_id, seq);\n CREATE INDEX IF NOT EXISTS idx_events_type ON events USING btree(event_type);\n CREATE INDEX IF NOT EXISTS idx_events_ts ON events USING btree(ts DESC);\n\n -- Anchor indexes\n CREATE INDEX IF NOT EXISTS idx_anchors_frame ON anchors USING btree(frame_id);\n CREATE INDEX IF NOT EXISTS idx_anchors_type ON anchors USING btree(type);\n CREATE INDEX IF NOT EXISTS idx_anchors_priority ON anchors USING btree(priority DESC);\n `);\n\n // Create BM25 search index if enabled\n if (config.enableBM25 !== false) {\n await client.query(`\n -- Create BM25 index for full-text search\n CALL paradedb.create_bm25_test_table(\n index_name => 'frames_search_idx',\n table_name => 'frames',\n schema_name => 'public',\n key_field => 'frame_id',\n text_fields => paradedb.field('name') || \n paradedb.field('content') || \n paradedb.field('digest_text'),\n numeric_fields => paradedb.field('score') || \n paradedb.field('depth'),\n json_fields => paradedb.field('metadata', flatten => true),\n datetime_fields => paradedb.field('created_at')\n );\n `);\n }\n\n // Create vector index if enabled\n if (config.enableVector !== false) {\n await client.query(`\n -- HNSW index for vector similarity search\n CREATE INDEX IF NOT EXISTS idx_frames_embedding \n ON frames USING hnsw (embedding vector_cosine_ops)\n WITH (m = 16, ef_construction = 64);\n `);\n }\n\n // Create materialized views for patterns\n await client.query(`\n CREATE MATERIALIZED VIEW IF NOT EXISTS pattern_summary AS\n WITH pattern_extraction AS (\n SELECT \n project_id,\n type as pattern_type,\n metadata->>'error' as error_pattern,\n COUNT(*) as frequency,\n MAX(score) as max_score,\n MAX(created_at) as last_seen,\n MIN(created_at) as first_seen\n FROM frames\n WHERE created_at > NOW() - INTERVAL '30 days'\n GROUP BY project_id, pattern_type, error_pattern\n )\n SELECT * FROM pattern_extraction\n WHERE frequency > 3;\n\n CREATE UNIQUE INDEX IF NOT EXISTS idx_pattern_summary_unique \n ON pattern_summary(project_id, pattern_type, error_pattern);\n `);\n\n // Set initial schema version\n await client.query(`\n INSERT INTO schema_version (version, description) \n VALUES (1, 'Initial ParadeDB schema with search and analytics')\n ON CONFLICT (version) DO NOTHING;\n `);\n\n await client.query('COMMIT');\n logger.info('ParadeDB schema initialized successfully');\n } catch (error: unknown) {\n await client.query('ROLLBACK');\n throw error;\n } finally {\n this.releaseClient(client);\n }\n }\n\n async migrateSchema(targetVersion: number): Promise<void> {\n const client = await this.getClient();\n\n try {\n const result = await client.query(\n 'SELECT MAX(version) as version FROM schema_version'\n );\n const currentVersion = result.rows[0]?.version || 0;\n\n if (currentVersion >= targetVersion) {\n logger.info('Schema already at target version', {\n currentVersion,\n targetVersion,\n });\n return;\n }\n\n // Apply migrations sequentially\n for (let v = currentVersion + 1; v <= targetVersion; v++) {\n logger.info(`Applying migration to version ${v}`);\n // Migration logic would go here based on version\n await client.query(\n 'INSERT INTO schema_version (version, description) VALUES ($1, $2)',\n [v, `Migration to version ${v}`]\n );\n }\n } finally {\n this.releaseClient(client);\n }\n }\n\n async getSchemaVersion(): Promise<number> {\n const client = await this.getClient();\n\n try {\n const result = await client.query(\n 'SELECT MAX(version) as version FROM schema_version'\n );\n return result.rows[0]?.version || 0;\n } finally {\n this.releaseClient(client);\n }\n }\n\n // Frame operations\n async createFrame(frame: Partial<Frame>): Promise<string> {\n const client = await this.getClient();\n\n try {\n const result = await client.query(\n `\n INSERT INTO frames (\n frame_id, run_id, project_id, parent_frame_id, depth,\n type, name, state, score, inputs, outputs, metadata,\n digest_text, digest_json, content\n ) VALUES (\n COALESCE($1::uuid, uuid_generate_v4()), $2, $3, $4, $5,\n $6, $7, $8, $9, $10, $11, $12, $13, $14, $15\n ) RETURNING frame_id\n `,\n [\n frame.frame_id || null,\n frame.run_id,\n frame.project_id || this.projectId,\n frame.parent_frame_id || null,\n frame.depth || 0,\n frame.type,\n frame.name,\n frame.state || 'active',\n frame.score || 0.5,\n JSON.stringify(frame.inputs || {}),\n JSON.stringify(frame.outputs || {}),\n JSON.stringify(frame.metadata || {}),\n frame.digest_text || null,\n JSON.stringify(frame.digest_json || {}),\n frame.content || `${frame.name} ${frame.digest_text || ''}`,\n ]\n );\n\n return result.rows[0].frame_id;\n } finally {\n this.releaseClient(client);\n }\n }\n\n async getFrame(frameId: string): Promise<Frame | null> {\n const client = await this.getClient();\n\n try {\n const result = await client.query(\n 'SELECT * FROM frames WHERE frame_id = $1',\n [frameId]\n );\n\n if (result.rows.length === 0) return null;\n\n const row = result.rows[0];\n return {\n ...row,\n frame_id: row.frame_id,\n run_id: row.run_id,\n created_at: row.created_at.getTime(),\n closed_at: row.closed_at?.getTime(),\n };\n } finally {\n this.releaseClient(client);\n }\n }\n\n async updateFrame(frameId: string, updates: Partial<Frame>): Promise<void> {\n const client = await this.getClient();\n\n try {\n const fields = [];\n const values = [];\n let paramCount = 1;\n\n if (updates.state !== undefined) {\n fields.push(`state = $${paramCount++}`);\n values.push(updates.state);\n }\n\n if (updates.outputs !== undefined) {\n fields.push(`outputs = $${paramCount++}`);\n values.push(JSON.stringify(updates.outputs));\n }\n\n if (updates.score !== undefined) {\n fields.push(`score = $${paramCount++}`);\n values.push(updates.score);\n }\n\n if (updates.digest_text !== undefined) {\n fields.push(`digest_text = $${paramCount++}`);\n values.push(updates.digest_text);\n }\n\n if (updates.digest_json !== undefined) {\n fields.push(`digest_json = $${paramCount++}`);\n values.push(JSON.stringify(updates.digest_json));\n }\n\n if (updates.closed_at !== undefined) {\n fields.push(`closed_at = $${paramCount++}`);\n values.push(new Date(updates.closed_at));\n }\n\n if (fields.length === 0) return;\n\n values.push(frameId);\n\n await client.query(\n `\n UPDATE frames SET ${fields.join(', ')} WHERE frame_id = $${paramCount}\n `,\n values\n );\n } finally {\n this.releaseClient(client);\n }\n }\n\n async deleteFrame(frameId: string): Promise<void> {\n const client = await this.getClient();\n\n try {\n // CASCADE delete handles events and anchors\n await client.query('DELETE FROM frames WHERE frame_id = $1', [frameId]);\n } finally {\n this.releaseClient(client);\n }\n }\n\n async getActiveFrames(runId?: string): Promise<Frame[]> {\n const client = await this.getClient();\n\n try {\n let query = 'SELECT * FROM frames WHERE state = $1';\n const params: any[] = ['active'];\n\n if (runId) {\n query += ' AND run_id = $2';\n params.push(runId);\n }\n\n query += ' ORDER BY depth ASC, created_at ASC';\n\n const result = await client.query(query, params);\n\n return result.rows.map((row) => ({\n ...row,\n created_at: row.created_at.getTime(),\n closed_at: row.closed_at?.getTime(),\n }));\n } finally {\n this.releaseClient(client);\n }\n }\n\n async closeFrame(frameId: string, outputs?: any): Promise<void> {\n await this.updateFrame(frameId, {\n state: 'closed',\n outputs,\n closed_at: Date.now(),\n });\n }\n\n // Event operations\n async createEvent(event: Partial<Event>): Promise<string> {\n const client = await this.getClient();\n\n try {\n const result = await client.query(\n `\n INSERT INTO events (event_id, run_id, frame_id, seq, event_type, payload, ts)\n VALUES (COALESCE($1::uuid, uuid_generate_v4()), $2, $3, $4, $5, $6, $7)\n RETURNING event_id\n `,\n [\n event.event_id || null,\n event.run_id,\n event.frame_id,\n event.seq || 0,\n event.event_type,\n JSON.stringify(event.payload || {}),\n event.ts ? new Date(event.ts) : new Date(),\n ]\n );\n\n return result.rows[0].event_id;\n } finally {\n this.releaseClient(client);\n }\n }\n\n async getFrameEvents(\n frameId: string,\n options?: QueryOptions\n ): Promise<Event[]> {\n const client = await this.getClient();\n\n try {\n let query = 'SELECT * FROM events WHERE frame_id = $1';\n const params: any[] = [frameId];\n\n query += this.buildOrderByClause(\n options?.orderBy || 'seq',\n options?.orderDirection\n );\n query += this.buildLimitClause(options?.limit, options?.offset);\n\n const result = await client.query(query, params);\n\n return result.rows.map((row) => ({\n ...row,\n ts: row.ts.getTime(),\n }));\n } finally {\n this.releaseClient(client);\n }\n }\n\n async deleteFrameEvents(frameId: string): Promise<void> {\n const client = await this.getClient();\n\n try {\n await client.query('DELETE FROM events WHERE frame_id = $1', [frameId]);\n } finally {\n this.releaseClient(client);\n }\n }\n\n // Anchor operations\n async createAnchor(anchor: Partial<Anchor>): Promise<string> {\n const client = await this.getClient();\n\n try {\n const result = await client.query(\n `\n INSERT INTO anchors (anchor_id, frame_id, project_id, type, text, priority, metadata)\n VALUES (COALESCE($1::uuid, uuid_generate_v4()), $2, $3, $4, $5, $6, $7)\n RETURNING anchor_id\n `,\n [\n anchor.anchor_id || null,\n anchor.frame_id,\n anchor.project_id || this.projectId,\n anchor.type,\n anchor.text,\n anchor.priority || 0,\n JSON.stringify(anchor.metadata || {}),\n ]\n );\n\n return result.rows[0].anchor_id;\n } finally {\n this.releaseClient(client);\n }\n }\n\n async getFrameAnchors(frameId: string): Promise<Anchor[]> {\n const client = await this.getClient();\n\n try {\n const result = await client.query(\n `\n SELECT * FROM anchors WHERE frame_id = $1 \n ORDER BY priority DESC, created_at ASC\n `,\n [frameId]\n );\n\n return result.rows.map((row) => ({\n ...row,\n created_at: row.created_at.getTime(),\n }));\n } finally {\n this.releaseClient(client);\n }\n }\n\n async deleteFrameAnchors(frameId: string): Promise<void> {\n const client = await this.getClient();\n\n try {\n await client.query('DELETE FROM anchors WHERE frame_id = $1', [frameId]);\n } finally {\n this.releaseClient(client);\n }\n }\n\n // Advanced search with BM25\n async search(\n options: SearchOptions\n ): Promise<Array<Frame & { score: number }>> {\n const client = await this.getClient();\n\n try {\n const config = this.config as ParadeDBConfig;\n\n if (config.enableBM25 !== false) {\n // Use ParadeDB BM25 search\n const result = await client.query(\n `\n SELECT f.*, s.score_bm25 as score\n FROM frames_search_idx.search(\n query => $1,\n limit_rows => $2,\n offset_rows => $3\n ) s\n JOIN frames f ON f.frame_id = s.frame_id\n WHERE ($4::float IS NULL OR s.score_bm25 >= $4)\n ORDER BY s.score_bm25 DESC\n `,\n [\n options.query,\n options.limit || 100,\n options.offset || 0,\n options.scoreThreshold || null,\n ]\n );\n\n return result.rows.map((row) => ({\n ...row,\n created_at: row.created_at.getTime(),\n closed_at: row.closed_at?.getTime(),\n }));\n } else {\n // Fallback to PostgreSQL full-text search\n const result = await client.query(\n `\n SELECT *,\n ts_rank(\n to_tsvector('english', COALESCE(name, '') || ' ' || COALESCE(content, '')),\n plainto_tsquery('english', $1)\n ) as score\n FROM frames\n WHERE to_tsvector('english', COALESCE(name, '') || ' ' || COALESCE(content, ''))\n @@ plainto_tsquery('english', $1)\n ORDER BY score DESC\n LIMIT $2 OFFSET $3\n `,\n [options.query, options.limit || 100, options.offset || 0]\n );\n\n return result.rows.map((row) => ({\n ...row,\n created_at: row.created_at.getTime(),\n closed_at: row.closed_at?.getTime(),\n }));\n }\n } finally {\n this.releaseClient(client);\n }\n }\n\n // Vector similarity search\n async searchByVector(\n embedding: number[],\n options?: QueryOptions\n ): Promise<Array<Frame & { similarity: number }>> {\n const client = await this.getClient();\n\n try {\n const config = this.config as ParadeDBConfig;\n\n if (config.enableVector === false) {\n logger.warn('Vector search not enabled in ParadeDB configuration');\n return [];\n }\n\n const result = await client.query(\n `\n SELECT *,\n 1 - (embedding <=> $1::vector) as similarity\n FROM frames\n WHERE embedding IS NOT NULL\n ORDER BY embedding <=> $1::vector\n LIMIT $2 OFFSET $3\n `,\n [\n `[${embedding.join(',')}]`,\n options?.limit || 100,\n options?.offset || 0,\n ]\n );\n\n return result.rows.map((row) => ({\n ...row,\n created_at: row.created_at.getTime(),\n closed_at: row.closed_at?.getTime(),\n }));\n } finally {\n this.releaseClient(client);\n }\n }\n\n // Hybrid search combining BM25 and vector\n async searchHybrid(\n textQuery: string,\n embedding: number[],\n weights?: { text: number; vector: number }\n ): Promise<Array<Frame & { score: number }>> {\n const client = await this.getClient();\n\n try {\n const textWeight = weights?.text || 0.6;\n const vectorWeight = weights?.vector || 0.4;\n\n const result = await client.query(\n `\n WITH bm25_results AS (\n SELECT frame_id, score_bm25\n FROM frames_search_idx.search(\n query => $1,\n limit_rows => 200\n )\n ),\n vector_results AS (\n SELECT frame_id,\n 1 - (embedding <=> $2::vector) as score_vector\n FROM frames\n WHERE embedding IS NOT NULL\n ORDER BY embedding <=> $2::vector\n LIMIT 200\n )\n SELECT f.*,\n (COALESCE(b.score_bm25, 0) * $3 + \n COALESCE(v.score_vector, 0) * $4) as score\n FROM frames f\n LEFT JOIN bm25_results b ON f.frame_id = b.frame_id\n LEFT JOIN vector_results v ON f.frame_id = v.frame_id\n WHERE b.frame_id IS NOT NULL OR v.frame_id IS NOT NULL\n ORDER BY score DESC\n LIMIT $5\n `,\n [textQuery, `[${embedding.join(',')}]`, textWeight, vectorWeight, 100]\n );\n\n return result.rows.map((row) => ({\n ...row,\n created_at: row.created_at.getTime(),\n closed_at: row.closed_at?.getTime(),\n }));\n } finally {\n this.releaseClient(client);\n }\n }\n\n // Advanced aggregation\n async aggregate(\n table: string,\n options: AggregationOptions\n ): Promise<Record<string, any>[]> {\n const client = await this.getClient();\n\n try {\n const metrics = options.metrics\n .map((m) => {\n const alias = m.alias || `${m.operation}_${m.field}`;\n return `${m.operation}(${m.field}) AS \"${alias}\"`;\n })\n .join(', ');\n\n let query = `\n SELECT ${options.groupBy.map((g) => `\"${g}\"`).join(', ')}, ${metrics}\n FROM ${table}\n GROUP BY ${options.groupBy.map((g) => `\"${g}\"`).join(', ')}\n `;\n\n if (options.having) {\n const havingClauses = Object.entries(options.having).map(\n ([key, value], i) => {\n return `${key} ${typeof value === 'object' ? value.op : '='} $${i + 1}`;\n }\n );\n query += ` HAVING ${havingClauses.join(' AND ')}`;\n }\n\n const result = await client.query(\n query,\n Object.values(options.having || {})\n );\n return result.rows;\n } finally {\n this.releaseClient(client);\n }\n }\n\n // Pattern detection with analytics\n async detectPatterns(timeRange?: { start: Date; end: Date }): Promise<\n Array<{\n pattern: string;\n type: string;\n frequency: number;\n lastSeen: Date;\n }>\n > {\n const client = await this.getClient();\n\n try {\n // Use materialized view for better performance\n const result = await client.query(\n `\n SELECT \n COALESCE(error_pattern, pattern_type) as pattern,\n pattern_type as type,\n frequency,\n last_seen\n FROM pattern_summary\n WHERE project_id = $1\n AND ($2::timestamptz IS NULL OR last_seen >= $2)\n AND ($3::timestamptz IS NULL OR first_seen <= $3)\n ORDER BY frequency DESC, last_seen DESC\n LIMIT 100\n `,\n [this.projectId, timeRange?.start || null, timeRange?.end || null]\n );\n\n return result.rows.map((row) => ({\n pattern: row.pattern,\n type: row.type,\n frequency: row.frequency,\n lastSeen: row.last_seen,\n }));\n } finally {\n this.releaseClient(client);\n }\n }\n\n // Bulk operations\n async executeBulk(operations: BulkOperation[]): Promise<void> {\n await this.inTransaction(async () => {\n const client = this.activeClient!;\n\n for (const op of operations) {\n switch (op.type) {\n case 'insert': {\n const cols = Object.keys(op.data);\n const values = Object.values(op.data);\n const placeholders = values.map((_, i) => `$${i + 1}`).join(',');\n\n await client.query(\n `INSERT INTO ${op.table} (${cols.join(',')}) VALUES (${placeholders})`,\n values\n );\n break;\n }\n\n case 'update': {\n const sets = Object.keys(op.data)\n .map((k, i) => `${k} = $${i + 1}`)\n .join(',');\n const whereClause = this.buildWhereClausePostgres(\n op.where || {},\n Object.keys(op.data).length\n );\n const values = [\n ...Object.values(op.data),\n ...Object.values(op.where || {}),\n ];\n\n await client.query(\n `UPDATE ${op.table} SET ${sets} ${whereClause}`,\n values\n );\n break;\n }\n\n case 'delete': {\n const whereClause = this.buildWhereClausePostgres(\n op.where || {},\n 0\n );\n await client.query(\n `DELETE FROM ${op.table} ${whereClause}`,\n Object.values(op.where || {})\n );\n break;\n }\n }\n }\n });\n }\n\n async vacuum(): Promise<void> {\n const client = await this.getClient();\n\n try {\n await client.query('VACUUM ANALYZE frames');\n await client.query('VACUUM ANALYZE events');\n await client.query('VACUUM ANALYZE anchors');\n\n // Refresh materialized views\n await client.query(\n 'REFRESH MATERIALIZED VIEW CONCURRENTLY pattern_summary'\n );\n\n logger.info('ParadeDB vacuum and analyze completed');\n } finally {\n this.releaseClient(client);\n }\n }\n\n async analyze(): Promise<void> {\n const client = await this.getClient();\n\n try {\n await client.query('ANALYZE frames');\n await client.query('ANALYZE events');\n await client.query('ANALYZE anchors');\n logger.info('ParadeDB analyze completed');\n } finally {\n this.releaseClient(client);\n }\n }\n\n // Statistics\n async getStats(): Promise<DatabaseStats> {\n const client = await this.getClient();\n\n try {\n const result = await client.query(`\n SELECT\n (SELECT COUNT(*) FROM frames) as total_frames,\n (SELECT COUNT(*) FROM frames WHERE state = 'active') as active_frames,\n (SELECT COUNT(*) FROM events) as total_events,\n (SELECT COUNT(*) FROM anchors) as total_anchors,\n pg_database_size(current_database()) as disk_usage\n `);\n\n return {\n totalFrames: parseInt(result.rows[0].total_frames),\n activeFrames: parseInt(result.rows[0].active_frames),\n totalEvents: parseInt(result.rows[0].total_events),\n totalAnchors: parseInt(result.rows[0].total_anchors),\n diskUsage: parseInt(result.rows[0].disk_usage),\n };\n } finally {\n this.releaseClient(client);\n }\n }\n\n async getQueryStats(): Promise<\n Array<{\n query: string;\n calls: number;\n meanTime: number;\n totalTime: number;\n }>\n > {\n const client = await this.getClient();\n\n try {\n const result = await client.query(`\n SELECT \n query,\n calls,\n mean_exec_time as mean_time,\n total_exec_time as total_time\n FROM pg_stat_statements\n WHERE query NOT LIKE '%pg_stat_statements%'\n ORDER BY total_exec_time DESC\n LIMIT 100\n `);\n\n return result.rows.map((row) => ({\n query: row.query,\n calls: parseInt(row.calls),\n meanTime: parseFloat(row.mean_time),\n totalTime: parseFloat(row.total_time),\n }));\n } catch (error: unknown) {\n logger.warn('pg_stat_statements not available', error);\n return [];\n } finally {\n this.releaseClient(client);\n }\n }\n\n // Transaction support\n async beginTransaction(): Promise<void> {\n this.activeClient = await this.pool!.connect();\n await this.activeClient.query('BEGIN');\n }\n\n async commitTransaction(): Promise<void> {\n if (!this.activeClient)\n throw new DatabaseError(\n 'No active transaction',\n ErrorCode.DB_TRANSACTION_FAILED\n );\n\n await this.activeClient.query('COMMIT');\n this.activeClient.release();\n this.activeClient = null;\n }\n\n async rollbackTransaction(): Promise<void> {\n if (!this.activeClient)\n throw new DatabaseError(\n 'No active transaction',\n ErrorCode.DB_TRANSACTION_FAILED\n );\n\n await this.activeClient.query('ROLLBACK');\n this.activeClient.release();\n this.activeClient = null;\n }\n\n async inTransaction(\n callback: (adapter: DatabaseAdapter) => Promise<void>\n ): Promise<void> {\n await this.beginTransaction();\n\n try {\n await callback(this);\n await this.commitTransaction();\n } catch (error: unknown) {\n try {\n await this.rollbackTransaction();\n } catch (rollbackError: unknown) {\n // Log rollback failure but don't mask original error\n console.error('Transaction rollback failed:', rollbackError);\n // Connection might be in bad state - mark as unusable if connection pool exists\n if (this.connectionPool) {\n this.connectionPool.markConnectionAsBad(this.client);\n }\n }\n throw error;\n }\n }\n\n // Export/Import\n async exportData(\n tables: string[],\n format: 'json' | 'parquet' | 'csv'\n ): Promise<Buffer> {\n const client = await this.getClient();\n\n try {\n if (format === 'json') {\n const data: Record<string, any[]> = {};\n\n for (const table of tables) {\n const result = await client.query(`SELECT * FROM ${table}`);\n data[table] = result.rows;\n }\n\n return Buffer.from(JSON.stringify(data, null, 2));\n } else if (format === 'csv') {\n // Export as CSV using COPY\n const chunks: string[] = [];\n\n for (const table of tables) {\n const result = await client.query(`\n COPY (SELECT * FROM ${table}) TO STDOUT WITH CSV HEADER\n `);\n chunks.push(result.toString());\n }\n\n return Buffer.from(chunks.join('\\n\\n'));\n } else {\n throw new ValidationError(\n `Format ${format} not yet implemented for ParadeDB export`,\n ErrorCode.VALIDATION_FAILED,\n { format, supportedFormats: ['json'] }\n );\n }\n } finally {\n this.releaseClient(client);\n }\n }\n\n async importData(\n data: Buffer,\n format: 'json' | 'parquet' | 'csv',\n options?: { truncate?: boolean; upsert?: boolean }\n ): Promise<void> {\n const client = await this.getClient();\n\n try {\n if (format === 'json') {\n const parsed = JSON.parse(data.toString());\n\n await client.query('BEGIN');\n\n for (const [table, rows] of Object.entries(parsed)) {\n if (options?.truncate) {\n await client.query(`TRUNCATE TABLE ${table} CASCADE`);\n }\n\n for (const row of rows as any[]) {\n const cols = Object.keys(row);\n const values = Object.values(row);\n const placeholders = values.map((_, i) => `$${i + 1}`).join(',');\n\n if (options?.upsert) {\n const updates = cols.map((c) => `${c} = EXCLUDED.${c}`).join(',');\n await client.query(\n `INSERT INTO ${table} (${cols.join(',')}) VALUES (${placeholders})\n ON CONFLICT DO UPDATE SET ${updates}`,\n values\n );\n } else {\n await client.query(\n `INSERT INTO ${table} (${cols.join(',')}) VALUES (${placeholders})`,\n values\n );\n }\n }\n }\n\n await client.query('COMMIT');\n } else {\n throw new ValidationError(\n `Format ${format} not yet implemented for ParadeDB import`,\n ErrorCode.VALIDATION_FAILED,\n { format, supportedFormats: ['json'] }\n );\n }\n } catch (error: unknown) {\n await client.query('ROLLBACK');\n throw error;\n } finally {\n this.releaseClient(client);\n }\n }\n\n // Helper methods\n private async getClient(): Promise<PoolClient> {\n if (this.activeClient) {\n return this.activeClient;\n }\n\n if (!this.pool) {\n throw new DatabaseError(\n 'Database not connected',\n ErrorCode.DB_CONNECTION_FAILED\n );\n }\n\n return await this.pool.connect();\n }\n\n private releaseClient(client: PoolClient): void {\n if (client !== this.activeClient) {\n client.release();\n }\n }\n\n private buildWhereClausePostgres(\n conditions: Record<string, any>,\n startParam: number\n ): string {\n const clauses = Object.entries(conditions).map(([key, value], i) => {\n const paramNum = startParam + i + 1;\n\n if (value === null) {\n return `${key} IS NULL`;\n } else if (Array.isArray(value)) {\n const placeholders = value.map((_, j) => `$${paramNum + j}`).join(',');\n return `${key} IN (${placeholders})`;\n } else {\n return `${key} = $${paramNum}`;\n }\n });\n\n return clauses.length > 0 ? `WHERE ${clauses.join(' AND ')}` : '';\n }\n}\n"],
|
|
5
5
|
"mappings": ";;;;AAKA,SAAS,YAAwB;AACjC;AAAA,EACE;AAAA,OAOK;AAEP,SAAS,cAAc;AACvB,SAAS,eAAe,WAAW,uBAAuB;AAmBnD,MAAM,wBAAwB,4BAA4B;AAAA,EACvD,OAAoB;AAAA,EACpB,eAAkC;AAAA,EAE1C,YAAY,WAAmB,QAAwB;AACrD,UAAM,WAAW,MAAM;AAAA,EACzB;AAAA,EAEA,cAAgC;AAC9B,UAAM,SAAS,KAAK;AACpB,WAAO;AAAA,MACL,wBAAwB,OAAO,eAAe;AAAA,MAC9C,sBAAsB,OAAO,iBAAiB;AAAA,MAC9C,sBAAsB;AAAA,MACtB,mBAAmB,OAAO,oBAAoB;AAAA,MAC9C,qBAAqB;AAAA,MACrB,2BAA2B;AAAA,MAC3B,yBAAyB;AAAA,IAC3B;AAAA,EACF;AAAA,EAEA,MAAM,UAAyB;AAC7B,QAAI,KAAK,KAAM;AAEf,UAAM,SAAS,KAAK;AAEpB,SAAK,OAAO,IAAI,KAAK;AAAA,MACnB,kBAAkB,OAAO;AAAA,MACzB,MAAM,OAAO,QAAQ;AAAA,MACrB,MAAM,OAAO,QAAQ;AAAA,MACrB,UAAU,OAAO,YAAY;AAAA,MAC7B,MAAM,OAAO;AAAA,MACb,UAAU,OAAO;AAAA,MACjB,KAAK,OAAO;AAAA,MACZ,KAAK,OAAO,OAAO;AAAA,MACnB,mBAAmB,OAAO,qBAAqB;AAAA,MAC/C,yBAAyB,OAAO,2BAA2B;AAAA,MAC3D,mBAAmB,OAAO,oBAAoB;AAAA,IAChD,CAAC;AAGD,UAAM,SAAS,MAAM,KAAK,KAAK,QAAQ;AACvC,QAAI;AACF,YAAM,OAAO,MAAM,UAAU;AAC7B,aAAO,KAAK,iCAAiC;AAAA,IAC/C,UAAE;AACA,aAAO,QAAQ;AAAA,IACjB;AAAA,EACF;AAAA,EAEA,MAAM,aAA4B;AAChC,QAAI,CAAC,KAAK,KAAM;AAEhB,UAAM,KAAK,KAAK,IAAI;AACpB,SAAK,OAAO;AACZ,WAAO,KAAK,uBAAuB;AAAA,EACrC;AAAA,EAEA,cAAuB;AACrB,WAAO,KAAK,SAAS,QAAQ,CAAC,KAAK,KAAK;AAAA,EAC1C;AAAA,EAEA,MAAM,OAAyB;AAC7B,QAAI,CAAC,KAAK,KAAM,QAAO;AAEvB,QAAI;AACF,YAAM,SAAS,MAAM,KAAK,KAAK,QAAQ;AACvC,UAAI;AACF,cAAM,OAAO,MAAM,UAAU;AAC7B,eAAO;AAAA,MACT,UAAE;AACA,eAAO,QAAQ;AAAA,MACjB;AAAA,IACF,QAAQ;AACN,aAAO;AAAA,IACT;AAAA,EACF;AAAA,EAEA,MAAM,mBAAkC;AACtC,UAAM,SAAS,MAAM,KAAK,UAAU;AAEpC,QAAI;AACF,YAAM,OAAO,MAAM,OAAO;AAG1B,YAAM,OAAO,MAAM;AAAA;AAAA;AAAA;AAAA,OAIlB;AAGD,YAAM,SAAS,KAAK;AAEpB,UAAI,OAAO,eAAe,OAAO;AAC/B,cAAM,OAAO,MAAM,2CAA2C;AAAA,MAChE;AAEA,UAAI,OAAO,iBAAiB,OAAO;AACjC,cAAM,OAAO,MAAM,wCAAwC;AAAA,MAC7D;AAEA,UAAI,OAAO,oBAAoB,OAAO;AACpC,cAAM,OAAO,MAAM,8CAA8C;AAAA,MACnE;AAGA,YAAM,OAAO,MAAM;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,OA4DlB;AAGD,YAAM,OAAO,MAAM;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,OA8BlB;AAGD,UAAI,OAAO,eAAe,OAAO;AAC/B,cAAM,OAAO,MAAM;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,SAelB;AAAA,MACH;AAGA,UAAI,OAAO,iBAAiB,OAAO;AACjC,cAAM,OAAO,MAAM;AAAA;AAAA;AAAA;AAAA;AAAA,SAKlB;AAAA,MACH;AAGA,YAAM,OAAO,MAAM;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,OAoBlB;AAGD,YAAM,OAAO,MAAM;AAAA;AAAA;AAAA;AAAA,OAIlB;AAED,YAAM,OAAO,MAAM,QAAQ;AAC3B,aAAO,KAAK,0CAA0C;AAAA,IACxD,SAAS,OAAgB;AACvB,YAAM,OAAO,MAAM,UAAU;AAC7B,YAAM;AAAA,IACR,UAAE;AACA,WAAK,cAAc,MAAM;AAAA,IAC3B;AAAA,EACF;AAAA,EAEA,MAAM,cAAc,eAAsC;AACxD,UAAM,SAAS,MAAM,KAAK,UAAU;AAEpC,QAAI;AACF,YAAM,SAAS,MAAM,OAAO;AAAA,QAC1B;AAAA,MACF;AACA,YAAM,iBAAiB,OAAO,KAAK,CAAC,GAAG,WAAW;AAElD,UAAI,kBAAkB,eAAe;AACnC,eAAO,KAAK,oCAAoC;AAAA,UAC9C;AAAA,UACA;AAAA,QACF,CAAC;AACD;AAAA,MACF;AAGA,eAAS,IAAI,iBAAiB,GAAG,KAAK,eAAe,KAAK;AACxD,eAAO,KAAK,iCAAiC,CAAC,EAAE;AAEhD,cAAM,OAAO;AAAA,UACX;AAAA,UACA,CAAC,GAAG,wBAAwB,CAAC,EAAE;AAAA,QACjC;AAAA,MACF;AAAA,IACF,UAAE;AACA,WAAK,cAAc,MAAM;AAAA,IAC3B;AAAA,EACF;AAAA,EAEA,MAAM,mBAAoC;AACxC,UAAM,SAAS,MAAM,KAAK,UAAU;AAEpC,QAAI;AACF,YAAM,SAAS,MAAM,OAAO;AAAA,QAC1B;AAAA,MACF;AACA,aAAO,OAAO,KAAK,CAAC,GAAG,WAAW;AAAA,IACpC,UAAE;AACA,WAAK,cAAc,MAAM;AAAA,IAC3B;AAAA,EACF;AAAA;AAAA,EAGA,MAAM,YAAY,OAAwC;AACxD,UAAM,SAAS,MAAM,KAAK,UAAU;AAEpC,QAAI;AACF,YAAM,SAAS,MAAM,OAAO;AAAA,QAC1B;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,QAUA;AAAA,UACE,MAAM,YAAY;AAAA,UAClB,MAAM;AAAA,UACN,MAAM,cAAc,KAAK;AAAA,UACzB,MAAM,mBAAmB;AAAA,UACzB,MAAM,SAAS;AAAA,UACf,MAAM;AAAA,UACN,MAAM;AAAA,UACN,MAAM,SAAS;AAAA,UACf,MAAM,SAAS;AAAA,UACf,KAAK,UAAU,MAAM,UAAU,CAAC,CAAC;AAAA,UACjC,KAAK,UAAU,MAAM,WAAW,CAAC,CAAC;AAAA,UAClC,KAAK,UAAU,MAAM,YAAY,CAAC,CAAC;AAAA,UACnC,MAAM,eAAe;AAAA,UACrB,KAAK,UAAU,MAAM,eAAe,CAAC,CAAC;AAAA,UACtC,MAAM,WAAW,GAAG,MAAM,IAAI,IAAI,MAAM,eAAe,EAAE;AAAA,QAC3D;AAAA,MACF;AAEA,aAAO,OAAO,KAAK,CAAC,EAAE;AAAA,IACxB,UAAE;AACA,WAAK,cAAc,MAAM;AAAA,IAC3B;AAAA,EACF;AAAA,EAEA,MAAM,SAAS,SAAwC;AACrD,UAAM,SAAS,MAAM,KAAK,UAAU;AAEpC,QAAI;AACF,YAAM,SAAS,MAAM,OAAO;AAAA,QAC1B;AAAA,QACA,CAAC,OAAO;AAAA,MACV;AAEA,UAAI,OAAO,KAAK,WAAW,EAAG,QAAO;AAErC,YAAM,MAAM,OAAO,KAAK,CAAC;AACzB,aAAO;AAAA,QACL,GAAG;AAAA,QACH,UAAU,IAAI;AAAA,QACd,QAAQ,IAAI;AAAA,QACZ,YAAY,IAAI,WAAW,QAAQ;AAAA,QACnC,WAAW,IAAI,WAAW,QAAQ;AAAA,MACpC;AAAA,IACF,UAAE;AACA,WAAK,cAAc,MAAM;AAAA,IAC3B;AAAA,EACF;AAAA,EAEA,MAAM,YAAY,SAAiB,SAAwC;AACzE,UAAM,SAAS,MAAM,KAAK,UAAU;AAEpC,QAAI;AACF,YAAM,SAAS,CAAC;AAChB,YAAM,SAAS,CAAC;AAChB,UAAI,aAAa;AAEjB,UAAI,QAAQ,UAAU,QAAW;AAC/B,eAAO,KAAK,YAAY,YAAY,EAAE;AACtC,eAAO,KAAK,QAAQ,KAAK;AAAA,MAC3B;AAEA,UAAI,QAAQ,YAAY,QAAW;AACjC,eAAO,KAAK,cAAc,YAAY,EAAE;AACxC,eAAO,KAAK,KAAK,UAAU,QAAQ,OAAO,CAAC;AAAA,MAC7C;AAEA,UAAI,QAAQ,UAAU,QAAW;AAC/B,eAAO,KAAK,YAAY,YAAY,EAAE;AACtC,eAAO,KAAK,QAAQ,KAAK;AAAA,MAC3B;AAEA,UAAI,QAAQ,gBAAgB,QAAW;AACrC,eAAO,KAAK,kBAAkB,YAAY,EAAE;AAC5C,eAAO,KAAK,QAAQ,WAAW;AAAA,MACjC;AAEA,UAAI,QAAQ,gBAAgB,QAAW;AACrC,eAAO,KAAK,kBAAkB,YAAY,EAAE;AAC5C,eAAO,KAAK,KAAK,UAAU,QAAQ,WAAW,CAAC;AAAA,MACjD;AAEA,UAAI,QAAQ,cAAc,QAAW;AACnC,eAAO,KAAK,gBAAgB,YAAY,EAAE;AAC1C,eAAO,KAAK,IAAI,KAAK,QAAQ,SAAS,CAAC;AAAA,MACzC;AAEA,UAAI,OAAO,WAAW,EAAG;AAEzB,aAAO,KAAK,OAAO;AAEnB,YAAM,OAAO;AAAA,QACX;AAAA,4BACoB,OAAO,KAAK,IAAI,CAAC,sBAAsB,UAAU;AAAA;AAAA,QAErE;AAAA,MACF;AAAA,IACF,UAAE;AACA,WAAK,cAAc,MAAM;AAAA,IAC3B;AAAA,EACF;AAAA,EAEA,MAAM,YAAY,SAAgC;AAChD,UAAM,SAAS,MAAM,KAAK,UAAU;AAEpC,QAAI;AAEF,YAAM,OAAO,MAAM,0CAA0C,CAAC,OAAO,CAAC;AAAA,IACxE,UAAE;AACA,WAAK,cAAc,MAAM;AAAA,IAC3B;AAAA,EACF;AAAA,EAEA,MAAM,gBAAgB,OAAkC;AACtD,UAAM,SAAS,MAAM,KAAK,UAAU;AAEpC,QAAI;AACF,UAAI,QAAQ;AACZ,YAAM,SAAgB,CAAC,QAAQ;AAE/B,UAAI,OAAO;AACT,iBAAS;AACT,eAAO,KAAK,KAAK;AAAA,MACnB;AAEA,eAAS;AAET,YAAM,SAAS,MAAM,OAAO,MAAM,OAAO,MAAM;AAE/C,aAAO,OAAO,KAAK,IAAI,CAAC,SAAS;AAAA,QAC/B,GAAG;AAAA,QACH,YAAY,IAAI,WAAW,QAAQ;AAAA,QACnC,WAAW,IAAI,WAAW,QAAQ;AAAA,MACpC,EAAE;AAAA,IACJ,UAAE;AACA,WAAK,cAAc,MAAM;AAAA,IAC3B;AAAA,EACF;AAAA,EAEA,MAAM,WAAW,SAAiB,SAA8B;AAC9D,UAAM,KAAK,YAAY,SAAS;AAAA,MAC9B,OAAO;AAAA,MACP;AAAA,MACA,WAAW,KAAK,IAAI;AAAA,IACtB,CAAC;AAAA,EACH;AAAA;AAAA,EAGA,MAAM,YAAY,OAAwC;AACxD,UAAM,SAAS,MAAM,KAAK,UAAU;AAEpC,QAAI;AACF,YAAM,SAAS,MAAM,OAAO;AAAA,QAC1B;AAAA;AAAA;AAAA;AAAA;AAAA,QAKA;AAAA,UACE,MAAM,YAAY;AAAA,UAClB,MAAM;AAAA,UACN,MAAM;AAAA,UACN,MAAM,OAAO;AAAA,UACb,MAAM;AAAA,UACN,KAAK,UAAU,MAAM,WAAW,CAAC,CAAC;AAAA,UAClC,MAAM,KAAK,IAAI,KAAK,MAAM,EAAE,IAAI,oBAAI,KAAK;AAAA,QAC3C;AAAA,MACF;AAEA,aAAO,OAAO,KAAK,CAAC,EAAE;AAAA,IACxB,UAAE;AACA,WAAK,cAAc,MAAM;AAAA,IAC3B;AAAA,EACF;AAAA,EAEA,MAAM,eACJ,SACA,SACkB;AAClB,UAAM,SAAS,MAAM,KAAK,UAAU;AAEpC,QAAI;AACF,UAAI,QAAQ;AACZ,YAAM,SAAgB,CAAC,OAAO;AAE9B,eAAS,KAAK;AAAA,QACZ,SAAS,WAAW;AAAA,QACpB,SAAS;AAAA,MACX;AACA,eAAS,KAAK,iBAAiB,SAAS,OAAO,SAAS,MAAM;AAE9D,YAAM,SAAS,MAAM,OAAO,MAAM,OAAO,MAAM;AAE/C,aAAO,OAAO,KAAK,IAAI,CAAC,SAAS;AAAA,QAC/B,GAAG;AAAA,QACH,IAAI,IAAI,GAAG,QAAQ;AAAA,MACrB,EAAE;AAAA,IACJ,UAAE;AACA,WAAK,cAAc,MAAM;AAAA,IAC3B;AAAA,EACF;AAAA,EAEA,MAAM,kBAAkB,SAAgC;AACtD,UAAM,SAAS,MAAM,KAAK,UAAU;AAEpC,QAAI;AACF,YAAM,OAAO,MAAM,0CAA0C,CAAC,OAAO,CAAC;AAAA,IACxE,UAAE;AACA,WAAK,cAAc,MAAM;AAAA,IAC3B;AAAA,EACF;AAAA;AAAA,EAGA,MAAM,aAAa,QAA0C;AAC3D,UAAM,SAAS,MAAM,KAAK,UAAU;AAEpC,QAAI;AACF,YAAM,SAAS,MAAM,OAAO;AAAA,QAC1B;AAAA;AAAA;AAAA;AAAA;AAAA,QAKA;AAAA,UACE,OAAO,aAAa;AAAA,UACpB,OAAO;AAAA,UACP,OAAO,cAAc,KAAK;AAAA,UAC1B,OAAO;AAAA,UACP,OAAO;AAAA,UACP,OAAO,YAAY;AAAA,UACnB,KAAK,UAAU,OAAO,YAAY,CAAC,CAAC;AAAA,QACtC;AAAA,MACF;AAEA,aAAO,OAAO,KAAK,CAAC,EAAE;AAAA,IACxB,UAAE;AACA,WAAK,cAAc,MAAM;AAAA,IAC3B;AAAA,EACF;AAAA,EAEA,MAAM,gBAAgB,SAAoC;AACxD,UAAM,SAAS,MAAM,KAAK,UAAU;AAEpC,QAAI;AACF,YAAM,SAAS,MAAM,OAAO;AAAA,QAC1B;AAAA;AAAA;AAAA;AAAA,QAIA,CAAC,OAAO;AAAA,MACV;AAEA,aAAO,OAAO,KAAK,IAAI,CAAC,SAAS;AAAA,QAC/B,GAAG;AAAA,QACH,YAAY,IAAI,WAAW,QAAQ;AAAA,MACrC,EAAE;AAAA,IACJ,UAAE;AACA,WAAK,cAAc,MAAM;AAAA,IAC3B;AAAA,EACF;AAAA,EAEA,MAAM,mBAAmB,SAAgC;AACvD,UAAM,SAAS,MAAM,KAAK,UAAU;AAEpC,QAAI;AACF,YAAM,OAAO,MAAM,2CAA2C,CAAC,OAAO,CAAC;AAAA,IACzE,UAAE;AACA,WAAK,cAAc,MAAM;AAAA,IAC3B;AAAA,EACF;AAAA;AAAA,EAGA,MAAM,OACJ,SAC2C;AAC3C,UAAM,SAAS,MAAM,KAAK,UAAU;AAEpC,QAAI;AACF,YAAM,SAAS,KAAK;AAEpB,UAAI,OAAO,eAAe,OAAO;AAE/B,cAAM,SAAS,MAAM,OAAO;AAAA,UAC1B;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,UAWA;AAAA,YACE,QAAQ;AAAA,YACR,QAAQ,SAAS;AAAA,YACjB,QAAQ,UAAU;AAAA,YAClB,QAAQ,kBAAkB;AAAA,UAC5B;AAAA,QACF;AAEA,eAAO,OAAO,KAAK,IAAI,CAAC,SAAS;AAAA,UAC/B,GAAG;AAAA,UACH,YAAY,IAAI,WAAW,QAAQ;AAAA,UACnC,WAAW,IAAI,WAAW,QAAQ;AAAA,QACpC,EAAE;AAAA,MACJ,OAAO;AAEL,cAAM,SAAS,MAAM,OAAO;AAAA,UAC1B;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,UAYA,CAAC,QAAQ,OAAO,QAAQ,SAAS,KAAK,QAAQ,UAAU,CAAC;AAAA,QAC3D;AAEA,eAAO,OAAO,KAAK,IAAI,CAAC,SAAS;AAAA,UAC/B,GAAG;AAAA,UACH,YAAY,IAAI,WAAW,QAAQ;AAAA,UACnC,WAAW,IAAI,WAAW,QAAQ;AAAA,QACpC,EAAE;AAAA,MACJ;AAAA,IACF,UAAE;AACA,WAAK,cAAc,MAAM;AAAA,IAC3B;AAAA,EACF;AAAA;AAAA,EAGA,MAAM,eACJ,WACA,SACgD;AAChD,UAAM,SAAS,MAAM,KAAK,UAAU;AAEpC,QAAI;AACF,YAAM,SAAS,KAAK;AAEpB,UAAI,OAAO,iBAAiB,OAAO;AACjC,eAAO,KAAK,qDAAqD;AACjE,eAAO,CAAC;AAAA,MACV;AAEA,YAAM,SAAS,MAAM,OAAO;AAAA,QAC1B;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,QAQA;AAAA,UACE,IAAI,UAAU,KAAK,GAAG,CAAC;AAAA,UACvB,SAAS,SAAS;AAAA,UAClB,SAAS,UAAU;AAAA,QACrB;AAAA,MACF;AAEA,aAAO,OAAO,KAAK,IAAI,CAAC,SAAS;AAAA,QAC/B,GAAG;AAAA,QACH,YAAY,IAAI,WAAW,QAAQ;AAAA,QACnC,WAAW,IAAI,WAAW,QAAQ;AAAA,MACpC,EAAE;AAAA,IACJ,UAAE;AACA,WAAK,cAAc,MAAM;AAAA,IAC3B;AAAA,EACF;AAAA;AAAA,EAGA,MAAM,aACJ,WACA,WACA,SAC2C;AAC3C,UAAM,SAAS,MAAM,KAAK,UAAU;AAEpC,QAAI;AACF,YAAM,aAAa,SAAS,QAAQ;AACpC,YAAM,eAAe,SAAS,UAAU;AAExC,YAAM,SAAS,MAAM,OAAO;AAAA,QAC1B;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,QA0BA,CAAC,WAAW,IAAI,UAAU,KAAK,GAAG,CAAC,KAAK,YAAY,cAAc,GAAG;AAAA,MACvE;AAEA,aAAO,OAAO,KAAK,IAAI,CAAC,SAAS;AAAA,QAC/B,GAAG;AAAA,QACH,YAAY,IAAI,WAAW,QAAQ;AAAA,QACnC,WAAW,IAAI,WAAW,QAAQ;AAAA,MACpC,EAAE;AAAA,IACJ,UAAE;AACA,WAAK,cAAc,MAAM;AAAA,IAC3B;AAAA,EACF;AAAA;AAAA,EAGA,MAAM,UACJ,OACA,SACgC;AAChC,UAAM,SAAS,MAAM,KAAK,UAAU;AAEpC,QAAI;AACF,YAAM,UAAU,QAAQ,QACrB,IAAI,CAAC,MAAM;AACV,cAAM,QAAQ,EAAE,SAAS,GAAG,EAAE,SAAS,IAAI,EAAE,KAAK;AAClD,eAAO,GAAG,EAAE,SAAS,IAAI,EAAE,KAAK,SAAS,KAAK;AAAA,MAChD,CAAC,EACA,KAAK,IAAI;AAEZ,UAAI,QAAQ;AAAA,iBACD,QAAQ,QAAQ,IAAI,CAAC,MAAM,IAAI,CAAC,GAAG,EAAE,KAAK,IAAI,CAAC,KAAK,OAAO;AAAA,eAC7D,KAAK;AAAA,mBACD,QAAQ,QAAQ,IAAI,CAAC,MAAM,IAAI,CAAC,GAAG,EAAE,KAAK,IAAI,CAAC;AAAA;AAG5D,UAAI,QAAQ,QAAQ;AAClB,cAAM,gBAAgB,OAAO,QAAQ,QAAQ,MAAM,EAAE;AAAA,UACnD,CAAC,CAAC,KAAK,KAAK,GAAG,MAAM;AACnB,mBAAO,GAAG,GAAG,IAAI,OAAO,UAAU,WAAW,MAAM,KAAK,GAAG,KAAK,IAAI,CAAC;AAAA,UACvE;AAAA,QACF;AACA,iBAAS,WAAW,cAAc,KAAK,OAAO,CAAC;AAAA,MACjD;AAEA,YAAM,SAAS,MAAM,OAAO;AAAA,QAC1B;AAAA,QACA,OAAO,OAAO,QAAQ,UAAU,CAAC,CAAC;AAAA,MACpC;AACA,aAAO,OAAO;AAAA,IAChB,UAAE;AACA,WAAK,cAAc,MAAM;AAAA,IAC3B;AAAA,EACF;AAAA;AAAA,EAGA,MAAM,eAAe,WAOnB;AACA,UAAM,SAAS,MAAM,KAAK,UAAU;AAEpC,QAAI;AAEF,YAAM,SAAS,MAAM,OAAO;AAAA,QAC1B;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,QAaA,CAAC,KAAK,WAAW,WAAW,SAAS,MAAM,WAAW,OAAO,IAAI;AAAA,MACnE;AAEA,aAAO,OAAO,KAAK,IAAI,CAAC,SAAS;AAAA,QAC/B,SAAS,IAAI;AAAA,QACb,MAAM,IAAI;AAAA,QACV,WAAW,IAAI;AAAA,QACf,UAAU,IAAI;AAAA,MAChB,EAAE;AAAA,IACJ,UAAE;AACA,WAAK,cAAc,MAAM;AAAA,IAC3B;AAAA,EACF;AAAA;AAAA,EAGA,MAAM,YAAY,YAA4C;AAC5D,UAAM,KAAK,cAAc,YAAY;AACnC,YAAM,SAAS,KAAK;AAEpB,iBAAW,MAAM,YAAY;AAC3B,gBAAQ,GAAG,MAAM;AAAA,UACf,KAAK,UAAU;AACb,kBAAM,OAAO,OAAO,KAAK,GAAG,IAAI;AAChC,kBAAM,SAAS,OAAO,OAAO,GAAG,IAAI;AACpC,kBAAM,eAAe,OAAO,IAAI,CAAC,GAAG,MAAM,IAAI,IAAI,CAAC,EAAE,EAAE,KAAK,GAAG;AAE/D,kBAAM,OAAO;AAAA,cACX,eAAe,GAAG,KAAK,KAAK,KAAK,KAAK,GAAG,CAAC,aAAa,YAAY;AAAA,cACnE;AAAA,YACF;AACA;AAAA,UACF;AAAA,UAEA,KAAK,UAAU;AACb,kBAAM,OAAO,OAAO,KAAK,GAAG,IAAI,EAC7B,IAAI,CAAC,GAAG,MAAM,GAAG,CAAC,OAAO,IAAI,CAAC,EAAE,EAChC,KAAK,GAAG;AACX,kBAAM,cAAc,KAAK;AAAA,cACvB,GAAG,SAAS,CAAC;AAAA,cACb,OAAO,KAAK,GAAG,IAAI,EAAE;AAAA,YACvB;AACA,kBAAM,SAAS;AAAA,cACb,GAAG,OAAO,OAAO,GAAG,IAAI;AAAA,cACxB,GAAG,OAAO,OAAO,GAAG,SAAS,CAAC,CAAC;AAAA,YACjC;AAEA,kBAAM,OAAO;AAAA,cACX,UAAU,GAAG,KAAK,QAAQ,IAAI,IAAI,WAAW;AAAA,cAC7C;AAAA,YACF;AACA;AAAA,UACF;AAAA,UAEA,KAAK,UAAU;AACb,kBAAM,cAAc,KAAK;AAAA,cACvB,GAAG,SAAS,CAAC;AAAA,cACb;AAAA,YACF;AACA,kBAAM,OAAO;AAAA,cACX,eAAe,GAAG,KAAK,IAAI,WAAW;AAAA,cACtC,OAAO,OAAO,GAAG,SAAS,CAAC,CAAC;AAAA,YAC9B;AACA;AAAA,UACF;AAAA,QACF;AAAA,MACF;AAAA,IACF,CAAC;AAAA,EACH;AAAA,EAEA,MAAM,SAAwB;AAC5B,UAAM,SAAS,MAAM,KAAK,UAAU;AAEpC,QAAI;AACF,YAAM,OAAO,MAAM,uBAAuB;AAC1C,YAAM,OAAO,MAAM,uBAAuB;AAC1C,YAAM,OAAO,MAAM,wBAAwB;AAG3C,YAAM,OAAO;AAAA,QACX;AAAA,MACF;AAEA,aAAO,KAAK,uCAAuC;AAAA,IACrD,UAAE;AACA,WAAK,cAAc,MAAM;AAAA,IAC3B;AAAA,EACF;AAAA,EAEA,MAAM,UAAyB;AAC7B,UAAM,SAAS,MAAM,KAAK,UAAU;AAEpC,QAAI;AACF,YAAM,OAAO,MAAM,gBAAgB;AACnC,YAAM,OAAO,MAAM,gBAAgB;AACnC,YAAM,OAAO,MAAM,iBAAiB;AACpC,aAAO,KAAK,4BAA4B;AAAA,IAC1C,UAAE;AACA,WAAK,cAAc,MAAM;AAAA,IAC3B;AAAA,EACF;AAAA;AAAA,EAGA,MAAM,WAAmC;AACvC,UAAM,SAAS,MAAM,KAAK,UAAU;AAEpC,QAAI;AACF,YAAM,SAAS,MAAM,OAAO,MAAM;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,OAOjC;AAED,aAAO;AAAA,QACL,aAAa,SAAS,OAAO,KAAK,CAAC,EAAE,YAAY;AAAA,QACjD,cAAc,SAAS,OAAO,KAAK,CAAC,EAAE,aAAa;AAAA,QACnD,aAAa,SAAS,OAAO,KAAK,CAAC,EAAE,YAAY;AAAA,QACjD,cAAc,SAAS,OAAO,KAAK,CAAC,EAAE,aAAa;AAAA,QACnD,WAAW,SAAS,OAAO,KAAK,CAAC,EAAE,UAAU;AAAA,MAC/C;AAAA,IACF,UAAE;AACA,WAAK,cAAc,MAAM;AAAA,IAC3B;AAAA,EACF;AAAA,EAEA,MAAM,gBAOJ;AACA,UAAM,SAAS,MAAM,KAAK,UAAU;AAEpC,QAAI;AACF,YAAM,SAAS,MAAM,OAAO,MAAM;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,OAUjC;AAED,aAAO,OAAO,KAAK,IAAI,CAAC,SAAS;AAAA,QAC/B,OAAO,IAAI;AAAA,QACX,OAAO,SAAS,IAAI,KAAK;AAAA,QACzB,UAAU,WAAW,IAAI,SAAS;AAAA,QAClC,WAAW,WAAW,IAAI,UAAU;AAAA,MACtC,EAAE;AAAA,IACJ,SAAS,OAAgB;AACvB,aAAO,KAAK,oCAAoC,KAAK;AACrD,aAAO,CAAC;AAAA,IACV,UAAE;AACA,WAAK,cAAc,MAAM;AAAA,IAC3B;AAAA,EACF;AAAA;AAAA,EAGA,MAAM,mBAAkC;AACtC,SAAK,eAAe,MAAM,KAAK,KAAM,QAAQ;AAC7C,UAAM,KAAK,aAAa,MAAM,OAAO;AAAA,EACvC;AAAA,EAEA,MAAM,oBAAmC;AACvC,QAAI,CAAC,KAAK;AACR,YAAM,IAAI;AAAA,QACR;AAAA,QACA,UAAU;AAAA,MACZ;AAEF,UAAM,KAAK,aAAa,MAAM,QAAQ;AACtC,SAAK,aAAa,QAAQ;AAC1B,SAAK,eAAe;AAAA,EACtB;AAAA,EAEA,MAAM,sBAAqC;AACzC,QAAI,CAAC,KAAK;AACR,YAAM,IAAI;AAAA,QACR;AAAA,QACA,UAAU;AAAA,MACZ;AAEF,UAAM,KAAK,aAAa,MAAM,UAAU;AACxC,SAAK,aAAa,QAAQ;AAC1B,SAAK,eAAe;AAAA,EACtB;AAAA,EAEA,MAAM,cACJ,UACe;AACf,UAAM,KAAK,iBAAiB;AAE5B,QAAI;AACF,YAAM,SAAS,IAAI;AACnB,YAAM,KAAK,kBAAkB;AAAA,IAC/B,SAAS,OAAgB;AACvB,UAAI;AACF,cAAM,KAAK,oBAAoB;AAAA,MACjC,SAAS,eAAwB;AAE/B,gBAAQ,MAAM,gCAAgC,aAAa;AAE3D,YAAI,KAAK,gBAAgB;AACvB,eAAK,eAAe,oBAAoB,KAAK,MAAM;AAAA,QACrD;AAAA,MACF;AACA,YAAM;AAAA,IACR;AAAA,EACF;AAAA;AAAA,EAGA,MAAM,WACJ,QACA,QACiB;AACjB,UAAM,SAAS,MAAM,KAAK,UAAU;AAEpC,QAAI;AACF,UAAI,WAAW,QAAQ;AACrB,cAAM,OAA8B,CAAC;AAErC,mBAAW,SAAS,QAAQ;AAC1B,gBAAM,SAAS,MAAM,OAAO,MAAM,iBAAiB,KAAK,EAAE;AAC1D,eAAK,KAAK,IAAI,OAAO;AAAA,QACvB;AAEA,eAAO,OAAO,KAAK,KAAK,UAAU,MAAM,MAAM,CAAC,CAAC;AAAA,MAClD,WAAW,WAAW,OAAO;AAE3B,cAAM,SAAmB,CAAC;AAE1B,mBAAW,SAAS,QAAQ;AAC1B,gBAAM,SAAS,MAAM,OAAO,MAAM;AAAA,kCACV,KAAK;AAAA,WAC5B;AACD,iBAAO,KAAK,OAAO,SAAS,CAAC;AAAA,QAC/B;AAEA,eAAO,OAAO,KAAK,OAAO,KAAK,MAAM,CAAC;AAAA,MACxC,OAAO;AACL,cAAM,IAAI;AAAA,UACR,UAAU,MAAM;AAAA,UAChB,UAAU;AAAA,UACV,EAAE,QAAQ,kBAAkB,CAAC,MAAM,EAAE;AAAA,QACvC;AAAA,MACF;AAAA,IACF,UAAE;AACA,WAAK,cAAc,MAAM;AAAA,IAC3B;AAAA,EACF;AAAA,EAEA,MAAM,WACJ,MACA,QACA,SACe;AACf,UAAM,SAAS,MAAM,KAAK,UAAU;AAEpC,QAAI;AACF,UAAI,WAAW,QAAQ;AACrB,cAAM,SAAS,KAAK,MAAM,KAAK,SAAS,CAAC;AAEzC,cAAM,OAAO,MAAM,OAAO;AAE1B,mBAAW,CAAC,OAAO,IAAI,KAAK,OAAO,QAAQ,MAAM,GAAG;AAClD,cAAI,SAAS,UAAU;AACrB,kBAAM,OAAO,MAAM,kBAAkB,KAAK,UAAU;AAAA,UACtD;AAEA,qBAAW,OAAO,MAAe;AAC/B,kBAAM,OAAO,OAAO,KAAK,GAAG;AAC5B,kBAAM,SAAS,OAAO,OAAO,GAAG;AAChC,kBAAM,eAAe,OAAO,IAAI,CAAC,GAAG,MAAM,IAAI,IAAI,CAAC,EAAE,EAAE,KAAK,GAAG;AAE/D,gBAAI,SAAS,QAAQ;AACnB,oBAAM,UAAU,KAAK,IAAI,CAAC,MAAM,GAAG,CAAC,eAAe,CAAC,EAAE,EAAE,KAAK,GAAG;AAChE,oBAAM,OAAO;AAAA,gBACX,eAAe,KAAK,KAAK,KAAK,KAAK,GAAG,CAAC,aAAa,YAAY;AAAA,6CACnC,OAAO;AAAA,gBACpC;AAAA,cACF;AAAA,YACF,OAAO;AACL,oBAAM,OAAO;AAAA,gBACX,eAAe,KAAK,KAAK,KAAK,KAAK,GAAG,CAAC,aAAa,YAAY;AAAA,gBAChE;AAAA,cACF;AAAA,YACF;AAAA,UACF;AAAA,QACF;AAEA,cAAM,OAAO,MAAM,QAAQ;AAAA,MAC7B,OAAO;AACL,cAAM,IAAI;AAAA,UACR,UAAU,MAAM;AAAA,UAChB,UAAU;AAAA,UACV,EAAE,QAAQ,kBAAkB,CAAC,MAAM,EAAE;AAAA,QACvC;AAAA,MACF;AAAA,IACF,SAAS,OAAgB;AACvB,YAAM,OAAO,MAAM,UAAU;AAC7B,YAAM;AAAA,IACR,UAAE;AACA,WAAK,cAAc,MAAM;AAAA,IAC3B;AAAA,EACF;AAAA;AAAA,EAGA,MAAc,YAAiC;AAC7C,QAAI,KAAK,cAAc;AACrB,aAAO,KAAK;AAAA,IACd;AAEA,QAAI,CAAC,KAAK,MAAM;AACd,YAAM,IAAI;AAAA,QACR;AAAA,QACA,UAAU;AAAA,MACZ;AAAA,IACF;AAEA,WAAO,MAAM,KAAK,KAAK,QAAQ;AAAA,EACjC;AAAA,EAEQ,cAAc,QAA0B;AAC9C,QAAI,WAAW,KAAK,cAAc;AAChC,aAAO,QAAQ;AAAA,IACjB;AAAA,EACF;AAAA,EAEQ,yBACN,YACA,YACQ;AACR,UAAM,UAAU,OAAO,QAAQ,UAAU,EAAE,IAAI,CAAC,CAAC,KAAK,KAAK,GAAG,MAAM;AAClE,YAAM,WAAW,aAAa,IAAI;AAElC,UAAI,UAAU,MAAM;AAClB,eAAO,GAAG,GAAG;AAAA,MACf,WAAW,MAAM,QAAQ,KAAK,GAAG;AAC/B,cAAM,eAAe,MAAM,IAAI,CAAC,GAAG,MAAM,IAAI,WAAW,CAAC,EAAE,EAAE,KAAK,GAAG;AACrE,eAAO,GAAG,GAAG,QAAQ,YAAY;AAAA,MACnC,OAAO;AACL,eAAO,GAAG,GAAG,OAAO,QAAQ;AAAA,MAC9B;AAAA,IACF,CAAC;AAED,WAAO,QAAQ,SAAS,IAAI,SAAS,QAAQ,KAAK,OAAO,CAAC,KAAK;AAAA,EACjE;AACF;",
|
|
6
6
|
"names": []
|
|
7
7
|
}
|
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
{
|
|
2
2
|
"version": 3,
|
|
3
3
|
"sources": ["../../../src/core/database/query-router.ts"],
|
|
4
|
-
"sourcesContent": ["/**\n * Query Router for Tiered Storage\n * Routes database queries to appropriate storage tier based on data age, query type, and performance requirements\n */\n\nimport { DatabaseAdapter } from './database-adapter.js';\nimport type { Frame } from '../context/frame-manager.js';\nimport { logger } from '../monitoring/logger.js';\nimport { DatabaseError, ErrorCode } from '../errors/index.js';\nimport { EventEmitter } from 'events';\n\nexport interface StorageTier {\n name: string;\n adapter: DatabaseAdapter;\n priority: number;\n config: TierConfig;\n}\n\nexport interface TierConfig {\n // Data age thresholds\n maxAge?: number; // Data older than this goes to next tier (ms)\n minAge?: number; // Data newer than this stays in this tier (ms)\n\n // Query type preferences\n preferredOperations: string[]; // ['read', 'write', 'search', 'analytics']\n supportedFeatures: string[]; // ['full_text', 'vector', 'aggregation']\n\n // Performance characteristics\n maxLatency?: number; // Max acceptable latency for this tier (ms)\n maxThroughput?: number; // Max queries per second this tier can handle\n\n // Capacity limits\n maxFrames?: number; // Max frames before promoting to next tier\n maxSizeMB?: number; // Max storage size in MB\n\n // Routing rules\n routingRules: RoutingRule[];\n}\n\nexport interface RoutingRule {\n condition: string; // 'age' | 'size' | 'query_type' | 'load' | 'feature'\n operator: string; // '>', '<', '=', '!=', 'in', 'not_in'\n value: any; // Comparison value\n weight: number; // Rule weight (0-1)\n}\n\nexport interface QueryContext {\n queryType: 'read' | 'write' | 'search' | 'analytics' | 'bulk';\n frames?: Frame[];\n frameIds?: string[];\n requiredFeatures?: string[];\n timeRange?: { start: Date; end: Date };\n priority?: 'low' | 'medium' | 'high' | 'critical';\n timeout?: number;\n cacheStrategy?: 'none' | 'read' | 'write' | 'read_write';\n}\n\nexport interface RoutingDecision {\n primaryTier: StorageTier;\n fallbackTiers: StorageTier[];\n rationale: string;\n confidence: number; // 0-1 confidence in decision\n estimatedLatency: number; // Estimated query latency (ms)\n cacheRecommendation?: string;\n}\n\nexport interface QueryMetrics {\n totalQueries: number;\n queriesByTier: Map<string, number>;\n queriesByType: Map<string, number>;\n averageLatency: number;\n latencyByTier: Map<string, number>;\n errorsByTier: Map<string, number>;\n cacheHitRate: number;\n routingDecisions: number;\n}\n\nexport class QueryRouter extends EventEmitter {\n private tiers: Map<string, StorageTier> = new Map();\n private metrics: QueryMetrics;\n private decisionCache: Map<string, RoutingDecision> = new Map();\n private readonly cacheExpiration = 60000; // 1 minute\n private readonly maxCacheSize = 1000;\n\n constructor() {\n super();\n this.metrics = {\n totalQueries: 0,\n queriesByTier: new Map(),\n queriesByType: new Map(),\n averageLatency: 0,\n latencyByTier: new Map(),\n errorsByTier: new Map(),\n cacheHitRate: 0,\n routingDecisions: 0,\n };\n }\n\n /**\n * Register a storage tier with the router\n */\n registerTier(tier: StorageTier): void {\n this.tiers.set(tier.name, tier);\n logger.info(\n `Registered storage tier: ${tier.name} (priority: ${tier.priority})`\n );\n this.emit('tierRegistered', tier);\n }\n\n /**\n * Remove a storage tier from the router\n */\n unregisterTier(tierName: string): void {\n const tier = this.tiers.get(tierName);\n if (tier) {\n this.tiers.delete(tierName);\n logger.info(`Unregistered storage tier: ${tierName}`);\n this.emit('tierUnregistered', tier);\n }\n }\n\n /**\n * Route a query to the most appropriate storage tier\n */\n async route<T>(\n operation: string,\n context: QueryContext,\n executor: (adapter: DatabaseAdapter) => Promise<T>\n ): Promise<T> {\n const startTime = Date.now();\n this.metrics.totalQueries++;\n this.metrics.queriesByType.set(\n context.queryType,\n (this.metrics.queriesByType.get(context.queryType) || 0) + 1\n );\n\n try {\n // Get routing decision\n const decision = await this.makeRoutingDecision(operation, context);\n\n // Try primary tier first\n try {\n const result = await this.executeOnTier(decision.primaryTier, executor);\n this.updateMetrics(decision.primaryTier.name, startTime, true);\n return result;\n } catch (error: unknown) {\n logger.warn(\n `Query failed on primary tier ${decision.primaryTier.name}:`,\n error\n );\n this.updateMetrics(decision.primaryTier.name, startTime, false);\n\n // Try fallback tiers\n for (const fallbackTier of decision.fallbackTiers) {\n try {\n logger.info(`Attempting fallback to tier: ${fallbackTier.name}`);\n const result = await this.executeOnTier(fallbackTier, executor);\n this.updateMetrics(fallbackTier.name, startTime, true);\n return result;\n } catch (fallbackError: unknown) {\n logger.warn(\n `Query failed on fallback tier ${fallbackTier.name}:`,\n fallbackError\n );\n this.updateMetrics(fallbackTier.name, startTime, false);\n }\n }\n\n // If all tiers failed, throw the original error\n throw error;\n }\n } catch (error: unknown) {\n logger.error('Query routing failed:', error);\n this.emit('routingError', { operation, context, error });\n throw error;\n }\n }\n\n /**\n * Make routing decision based on query context\n */\n private async makeRoutingDecision(\n operation: string,\n context: QueryContext\n ): Promise<RoutingDecision> {\n // Check cache first\n const cacheKey = this.generateCacheKey(operation, context);\n const cached = this.decisionCache.get(cacheKey);\n if (cached && Date.now() - cached.estimatedLatency < this.cacheExpiration) {\n this.metrics.cacheHitRate =\n (this.metrics.cacheHitRate * this.metrics.routingDecisions + 1) /\n (this.metrics.routingDecisions + 1);\n return cached;\n }\n\n this.metrics.routingDecisions++;\n\n // Evaluate each tier\n const evaluations: Array<{\n tier: StorageTier;\n score: number;\n rationale: string;\n }> = [];\n\n for (const tier of this.tiers.values()) {\n const score = await this.evaluateTier(tier, operation, context);\n const rationale = this.generateRationale(tier, operation, context, score);\n evaluations.push({ tier, score, rationale });\n }\n\n // Sort by score (highest first)\n evaluations.sort((a, b) => b.score - a.score);\n\n if (evaluations.length === 0) {\n throw new DatabaseError(\n 'No storage tiers available for routing',\n ErrorCode.DB_CONNECTION_FAILED,\n { query: context.query, tiersConfigured: this.tiers.length }\n );\n }\n\n const primaryEval = evaluations[0];\n const fallbackTiers = evaluations\n .slice(1)\n .map((evaluation) => evaluation.tier);\n\n const decision: RoutingDecision = {\n primaryTier: primaryEval.tier,\n fallbackTiers,\n rationale: primaryEval.rationale,\n confidence: primaryEval.score,\n estimatedLatency: this.estimateLatency(\n primaryEval.tier,\n operation,\n context\n ),\n cacheRecommendation: this.recommendCacheStrategy(\n primaryEval.tier,\n context\n ),\n };\n\n // Cache decision\n this.cacheDecision(cacheKey, decision);\n\n logger.debug(\n `Routing decision: ${decision.primaryTier.name} (confidence: ${decision.confidence.toFixed(2)})`\n );\n this.emit('routingDecision', { operation, context, decision });\n\n return decision;\n }\n\n /**\n * Evaluate how well a tier fits the query requirements\n */\n private async evaluateTier(\n tier: StorageTier,\n operation: string,\n context: QueryContext\n ): Promise<number> {\n let score = 0;\n let maxScore = 0;\n\n // Evaluate each routing rule\n for (const rule of tier.config.routingRules) {\n maxScore += rule.weight;\n\n if (this.evaluateRule(rule, operation, context, tier)) {\n score += rule.weight;\n }\n }\n\n // Check operation preference\n if (tier.config.preferredOperations.includes(context.queryType)) {\n score += 0.2;\n maxScore += 0.2;\n }\n\n // Check feature support\n if (context.requiredFeatures) {\n const supportedFeatures = context.requiredFeatures.filter((feature) =>\n tier.config.supportedFeatures.includes(feature)\n );\n if (supportedFeatures.length === context.requiredFeatures.length) {\n score += 0.3;\n }\n maxScore += 0.3;\n }\n\n // Check current load\n const currentLoad = await this.getCurrentLoad(tier);\n if (\n tier.config.maxThroughput &&\n currentLoad < tier.config.maxThroughput * 0.8\n ) {\n score += 0.1;\n }\n maxScore += 0.1;\n\n // Check capacity\n if (await this.isWithinCapacity(tier)) {\n score += 0.1;\n }\n maxScore += 0.1;\n\n return maxScore > 0 ? score / maxScore : 0;\n }\n\n /**\n * Evaluate a single routing rule\n */\n private evaluateRule(\n rule: RoutingRule,\n _operation: string,\n context: QueryContext,\n _tier: StorageTier\n ): boolean {\n let actualValue: any;\n\n switch (rule.condition) {\n case 'age':\n // Check data age if frames are provided\n if (context.frames && context.frames.length > 0) {\n const avgAge =\n context.frames.reduce(\n (sum, frame) => sum + (Date.now() - frame.created_at),\n 0\n ) / context.frames.length;\n actualValue = avgAge;\n } else if (context.timeRange) {\n actualValue = Date.now() - context.timeRange.end.getTime();\n } else {\n return false;\n }\n break;\n\n case 'query_type':\n actualValue = context.queryType;\n break;\n\n case 'feature':\n actualValue = context.requiredFeatures || [];\n break;\n\n case 'priority':\n actualValue = context.priority || 'medium';\n break;\n\n case 'size':\n actualValue = context.frames ? context.frames.length : 0;\n break;\n\n default:\n return false;\n }\n\n return this.compareValues(actualValue, rule.operator, rule.value);\n }\n\n /**\n * Compare values based on operator\n */\n private compareValues(actual: any, operator: string, expected: any): boolean {\n switch (operator) {\n case '>':\n return actual > expected;\n case '<':\n return actual < expected;\n case '=':\n case '==':\n return actual === expected;\n case '!=':\n return actual !== expected;\n case 'in':\n return Array.isArray(expected) && expected.includes(actual);\n case 'not_in':\n return Array.isArray(expected) && !expected.includes(actual);\n case 'contains':\n return (\n Array.isArray(actual) &&\n actual.some((item) => expected.includes(item))\n );\n default:\n return false;\n }\n }\n\n /**\n * Execute query on specific tier\n */\n private async executeOnTier<T>(\n tier: StorageTier,\n executor: (adapter: DatabaseAdapter) => Promise<T>\n ): Promise<T> {\n const startTime = Date.now();\n\n try {\n const result = await executor(tier.adapter);\n const duration = Date.now() - startTime;\n\n logger.debug(`Query executed on tier ${tier.name} in ${duration}ms`);\n this.emit('queryExecuted', {\n tierName: tier.name,\n duration,\n success: true,\n });\n\n return result;\n } catch (error: unknown) {\n const duration = Date.now() - startTime;\n\n logger.error(\n `Query failed on tier ${tier.name} after ${duration}ms:`,\n error\n );\n this.emit('queryExecuted', {\n tierName: tier.name,\n duration,\n success: false,\n error,\n });\n\n throw error;\n }\n }\n\n /**\n * Generate cache key for routing decisions\n */\n private generateCacheKey(operation: string, context: QueryContext): string {\n const keyParts = [\n operation,\n context.queryType,\n context.priority || 'medium',\n (context.requiredFeatures || []).sort().join(','),\n context.timeRange\n ? `${context.timeRange.start.getTime()}-${context.timeRange.end.getTime()}`\n : '',\n ];\n\n return keyParts.join('|');\n }\n\n /**\n * Cache routing decision\n */\n private cacheDecision(key: string, decision: RoutingDecision): void {\n // Implement LRU eviction if cache is full\n if (this.decisionCache.size >= this.maxCacheSize) {\n const firstKey = this.decisionCache.keys().next().value;\n this.decisionCache.delete(firstKey);\n }\n\n this.decisionCache.set(key, decision);\n }\n\n /**\n * Estimate query latency for a tier\n */\n private estimateLatency(\n tier: StorageTier,\n operation: string,\n context: QueryContext\n ): number {\n const baseLatency =\n this.metrics.latencyByTier.get(tier.name) ||\n tier.config.maxLatency ||\n 100;\n\n // Adjust based on operation type\n let multiplier = 1;\n switch (context.queryType) {\n case 'search':\n multiplier = 1.5;\n break;\n case 'analytics':\n multiplier = 2.0;\n break;\n case 'bulk':\n multiplier = 3.0;\n break;\n default:\n multiplier = 1.0;\n }\n\n return baseLatency * multiplier;\n }\n\n /**\n * Recommend cache strategy for the context\n */\n private recommendCacheStrategy(\n tier: StorageTier,\n context: QueryContext\n ): string {\n if (context.cacheStrategy && context.cacheStrategy !== 'none') {\n return context.cacheStrategy;\n }\n\n // Default recommendations based on query type and tier\n if (tier.name === 'hot' || tier.name === 'memory') {\n return 'read_write';\n } else if (context.queryType === 'read') {\n return 'read';\n }\n\n return 'none';\n }\n\n /**\n * Generate human-readable rationale for routing decision\n */\n private generateRationale(\n tier: StorageTier,\n operation: string,\n context: QueryContext,\n score: number\n ): string {\n const reasons = [];\n\n if (tier.config.preferredOperations.includes(context.queryType)) {\n reasons.push(`optimized for ${context.queryType} operations`);\n }\n\n if (\n context.requiredFeatures?.every((feature) =>\n tier.config.supportedFeatures.includes(feature)\n )\n ) {\n reasons.push(\n `supports all required features (${context.requiredFeatures.join(', ')})`\n );\n }\n\n if (score > 0.8) {\n reasons.push('high confidence match');\n } else if (score > 0.6) {\n reasons.push('good match');\n } else if (score > 0.4) {\n reasons.push('acceptable match');\n }\n\n return reasons.length > 0 ? reasons.join(', ') : 'default tier selection';\n }\n\n /**\n * Get current load for a tier\n */\n private async getCurrentLoad(tier: StorageTier): Promise<number> {\n // This would integrate with actual monitoring\n // For now, return a placeholder based on recent queries\n return this.metrics.queriesByTier.get(tier.name) || 0;\n }\n\n /**\n * Check if tier is within capacity limits\n */\n private async isWithinCapacity(tier: StorageTier): Promise<boolean> {\n try {\n const stats = await tier.adapter.getStats();\n\n if (tier.config.maxFrames && stats.totalFrames >= tier.config.maxFrames) {\n return false;\n }\n\n if (\n tier.config.maxSizeMB &&\n stats.diskUsage >= tier.config.maxSizeMB * 1024 * 1024\n ) {\n return false;\n }\n\n return true;\n } catch (error: unknown) {\n logger.warn(`Failed to check capacity for tier ${tier.name}:`, error);\n return true; // Assume capacity is OK if we can't check\n }\n }\n\n /**\n * Update routing metrics\n */\n private updateMetrics(\n tierName: string,\n startTime: number,\n success: boolean\n ): void {\n const duration = Date.now() - startTime;\n\n // Update tier metrics\n this.metrics.queriesByTier.set(\n tierName,\n (this.metrics.queriesByTier.get(tierName) || 0) + 1\n );\n\n if (success) {\n // Update latency\n const currentAvg = this.metrics.latencyByTier.get(tierName) || 0;\n const count = this.metrics.queriesByTier.get(tierName) || 1;\n const newAvg = (currentAvg * (count - 1) + duration) / count;\n this.metrics.latencyByTier.set(tierName, newAvg);\n\n // Update overall average\n this.metrics.averageLatency =\n (this.metrics.averageLatency * (this.metrics.totalQueries - 1) +\n duration) /\n this.metrics.totalQueries;\n } else {\n // Update error count\n this.metrics.errorsByTier.set(\n tierName,\n (this.metrics.errorsByTier.get(tierName) || 0) + 1\n );\n }\n }\n\n /**\n * Get current routing metrics\n */\n getMetrics(): QueryMetrics {\n // Update cache hit rate\n const cacheRequests = this.metrics.routingDecisions;\n const cacheHits = cacheRequests - this.decisionCache.size; // Approximation\n this.metrics.cacheHitRate =\n cacheRequests > 0 ? cacheHits / cacheRequests : 0;\n\n return { ...this.metrics };\n }\n\n /**\n * Get registered tiers\n */\n getTiers(): StorageTier[] {\n return Array.from(this.tiers.values()).sort(\n (a, b) => b.priority - a.priority\n );\n }\n\n /**\n * Clear routing decision cache\n */\n clearCache(): void {\n this.decisionCache.clear();\n logger.info('Routing decision cache cleared');\n }\n\n /**\n * Get tier by name\n */\n getTier(name: string): StorageTier | undefined {\n return this.tiers.get(name);\n }\n}\n"],
|
|
4
|
+
"sourcesContent": ["/**\n * Query Router for Tiered Storage\n * Routes database queries to appropriate storage tier based on data age, query type, and performance requirements\n */\n\nimport { DatabaseAdapter } from './database-adapter.js';\nimport type { Frame } from '../context/index.js';\nimport { logger } from '../monitoring/logger.js';\nimport { DatabaseError, ErrorCode } from '../errors/index.js';\nimport { EventEmitter } from 'events';\n\nexport interface StorageTier {\n name: string;\n adapter: DatabaseAdapter;\n priority: number;\n config: TierConfig;\n}\n\nexport interface TierConfig {\n // Data age thresholds\n maxAge?: number; // Data older than this goes to next tier (ms)\n minAge?: number; // Data newer than this stays in this tier (ms)\n\n // Query type preferences\n preferredOperations: string[]; // ['read', 'write', 'search', 'analytics']\n supportedFeatures: string[]; // ['full_text', 'vector', 'aggregation']\n\n // Performance characteristics\n maxLatency?: number; // Max acceptable latency for this tier (ms)\n maxThroughput?: number; // Max queries per second this tier can handle\n\n // Capacity limits\n maxFrames?: number; // Max frames before promoting to next tier\n maxSizeMB?: number; // Max storage size in MB\n\n // Routing rules\n routingRules: RoutingRule[];\n}\n\nexport interface RoutingRule {\n condition: string; // 'age' | 'size' | 'query_type' | 'load' | 'feature'\n operator: string; // '>', '<', '=', '!=', 'in', 'not_in'\n value: any; // Comparison value\n weight: number; // Rule weight (0-1)\n}\n\nexport interface QueryContext {\n queryType: 'read' | 'write' | 'search' | 'analytics' | 'bulk';\n frames?: Frame[];\n frameIds?: string[];\n requiredFeatures?: string[];\n timeRange?: { start: Date; end: Date };\n priority?: 'low' | 'medium' | 'high' | 'critical';\n timeout?: number;\n cacheStrategy?: 'none' | 'read' | 'write' | 'read_write';\n}\n\nexport interface RoutingDecision {\n primaryTier: StorageTier;\n fallbackTiers: StorageTier[];\n rationale: string;\n confidence: number; // 0-1 confidence in decision\n estimatedLatency: number; // Estimated query latency (ms)\n cacheRecommendation?: string;\n}\n\nexport interface QueryMetrics {\n totalQueries: number;\n queriesByTier: Map<string, number>;\n queriesByType: Map<string, number>;\n averageLatency: number;\n latencyByTier: Map<string, number>;\n errorsByTier: Map<string, number>;\n cacheHitRate: number;\n routingDecisions: number;\n}\n\nexport class QueryRouter extends EventEmitter {\n private tiers: Map<string, StorageTier> = new Map();\n private metrics: QueryMetrics;\n private decisionCache: Map<string, RoutingDecision> = new Map();\n private readonly cacheExpiration = 60000; // 1 minute\n private readonly maxCacheSize = 1000;\n\n constructor() {\n super();\n this.metrics = {\n totalQueries: 0,\n queriesByTier: new Map(),\n queriesByType: new Map(),\n averageLatency: 0,\n latencyByTier: new Map(),\n errorsByTier: new Map(),\n cacheHitRate: 0,\n routingDecisions: 0,\n };\n }\n\n /**\n * Register a storage tier with the router\n */\n registerTier(tier: StorageTier): void {\n this.tiers.set(tier.name, tier);\n logger.info(\n `Registered storage tier: ${tier.name} (priority: ${tier.priority})`\n );\n this.emit('tierRegistered', tier);\n }\n\n /**\n * Remove a storage tier from the router\n */\n unregisterTier(tierName: string): void {\n const tier = this.tiers.get(tierName);\n if (tier) {\n this.tiers.delete(tierName);\n logger.info(`Unregistered storage tier: ${tierName}`);\n this.emit('tierUnregistered', tier);\n }\n }\n\n /**\n * Route a query to the most appropriate storage tier\n */\n async route<T>(\n operation: string,\n context: QueryContext,\n executor: (adapter: DatabaseAdapter) => Promise<T>\n ): Promise<T> {\n const startTime = Date.now();\n this.metrics.totalQueries++;\n this.metrics.queriesByType.set(\n context.queryType,\n (this.metrics.queriesByType.get(context.queryType) || 0) + 1\n );\n\n try {\n // Get routing decision\n const decision = await this.makeRoutingDecision(operation, context);\n\n // Try primary tier first\n try {\n const result = await this.executeOnTier(decision.primaryTier, executor);\n this.updateMetrics(decision.primaryTier.name, startTime, true);\n return result;\n } catch (error: unknown) {\n logger.warn(\n `Query failed on primary tier ${decision.primaryTier.name}:`,\n error\n );\n this.updateMetrics(decision.primaryTier.name, startTime, false);\n\n // Try fallback tiers\n for (const fallbackTier of decision.fallbackTiers) {\n try {\n logger.info(`Attempting fallback to tier: ${fallbackTier.name}`);\n const result = await this.executeOnTier(fallbackTier, executor);\n this.updateMetrics(fallbackTier.name, startTime, true);\n return result;\n } catch (fallbackError: unknown) {\n logger.warn(\n `Query failed on fallback tier ${fallbackTier.name}:`,\n fallbackError\n );\n this.updateMetrics(fallbackTier.name, startTime, false);\n }\n }\n\n // If all tiers failed, throw the original error\n throw error;\n }\n } catch (error: unknown) {\n logger.error('Query routing failed:', error);\n this.emit('routingError', { operation, context, error });\n throw error;\n }\n }\n\n /**\n * Make routing decision based on query context\n */\n private async makeRoutingDecision(\n operation: string,\n context: QueryContext\n ): Promise<RoutingDecision> {\n // Check cache first\n const cacheKey = this.generateCacheKey(operation, context);\n const cached = this.decisionCache.get(cacheKey);\n if (cached && Date.now() - cached.estimatedLatency < this.cacheExpiration) {\n this.metrics.cacheHitRate =\n (this.metrics.cacheHitRate * this.metrics.routingDecisions + 1) /\n (this.metrics.routingDecisions + 1);\n return cached;\n }\n\n this.metrics.routingDecisions++;\n\n // Evaluate each tier\n const evaluations: Array<{\n tier: StorageTier;\n score: number;\n rationale: string;\n }> = [];\n\n for (const tier of this.tiers.values()) {\n const score = await this.evaluateTier(tier, operation, context);\n const rationale = this.generateRationale(tier, operation, context, score);\n evaluations.push({ tier, score, rationale });\n }\n\n // Sort by score (highest first)\n evaluations.sort((a, b) => b.score - a.score);\n\n if (evaluations.length === 0) {\n throw new DatabaseError(\n 'No storage tiers available for routing',\n ErrorCode.DB_CONNECTION_FAILED,\n { query: context.query, tiersConfigured: this.tiers.length }\n );\n }\n\n const primaryEval = evaluations[0];\n const fallbackTiers = evaluations\n .slice(1)\n .map((evaluation) => evaluation.tier);\n\n const decision: RoutingDecision = {\n primaryTier: primaryEval.tier,\n fallbackTiers,\n rationale: primaryEval.rationale,\n confidence: primaryEval.score,\n estimatedLatency: this.estimateLatency(\n primaryEval.tier,\n operation,\n context\n ),\n cacheRecommendation: this.recommendCacheStrategy(\n primaryEval.tier,\n context\n ),\n };\n\n // Cache decision\n this.cacheDecision(cacheKey, decision);\n\n logger.debug(\n `Routing decision: ${decision.primaryTier.name} (confidence: ${decision.confidence.toFixed(2)})`\n );\n this.emit('routingDecision', { operation, context, decision });\n\n return decision;\n }\n\n /**\n * Evaluate how well a tier fits the query requirements\n */\n private async evaluateTier(\n tier: StorageTier,\n operation: string,\n context: QueryContext\n ): Promise<number> {\n let score = 0;\n let maxScore = 0;\n\n // Evaluate each routing rule\n for (const rule of tier.config.routingRules) {\n maxScore += rule.weight;\n\n if (this.evaluateRule(rule, operation, context, tier)) {\n score += rule.weight;\n }\n }\n\n // Check operation preference\n if (tier.config.preferredOperations.includes(context.queryType)) {\n score += 0.2;\n maxScore += 0.2;\n }\n\n // Check feature support\n if (context.requiredFeatures) {\n const supportedFeatures = context.requiredFeatures.filter((feature) =>\n tier.config.supportedFeatures.includes(feature)\n );\n if (supportedFeatures.length === context.requiredFeatures.length) {\n score += 0.3;\n }\n maxScore += 0.3;\n }\n\n // Check current load\n const currentLoad = await this.getCurrentLoad(tier);\n if (\n tier.config.maxThroughput &&\n currentLoad < tier.config.maxThroughput * 0.8\n ) {\n score += 0.1;\n }\n maxScore += 0.1;\n\n // Check capacity\n if (await this.isWithinCapacity(tier)) {\n score += 0.1;\n }\n maxScore += 0.1;\n\n return maxScore > 0 ? score / maxScore : 0;\n }\n\n /**\n * Evaluate a single routing rule\n */\n private evaluateRule(\n rule: RoutingRule,\n _operation: string,\n context: QueryContext,\n _tier: StorageTier\n ): boolean {\n let actualValue: any;\n\n switch (rule.condition) {\n case 'age':\n // Check data age if frames are provided\n if (context.frames && context.frames.length > 0) {\n const avgAge =\n context.frames.reduce(\n (sum, frame) => sum + (Date.now() - frame.created_at),\n 0\n ) / context.frames.length;\n actualValue = avgAge;\n } else if (context.timeRange) {\n actualValue = Date.now() - context.timeRange.end.getTime();\n } else {\n return false;\n }\n break;\n\n case 'query_type':\n actualValue = context.queryType;\n break;\n\n case 'feature':\n actualValue = context.requiredFeatures || [];\n break;\n\n case 'priority':\n actualValue = context.priority || 'medium';\n break;\n\n case 'size':\n actualValue = context.frames ? context.frames.length : 0;\n break;\n\n default:\n return false;\n }\n\n return this.compareValues(actualValue, rule.operator, rule.value);\n }\n\n /**\n * Compare values based on operator\n */\n private compareValues(actual: any, operator: string, expected: any): boolean {\n switch (operator) {\n case '>':\n return actual > expected;\n case '<':\n return actual < expected;\n case '=':\n case '==':\n return actual === expected;\n case '!=':\n return actual !== expected;\n case 'in':\n return Array.isArray(expected) && expected.includes(actual);\n case 'not_in':\n return Array.isArray(expected) && !expected.includes(actual);\n case 'contains':\n return (\n Array.isArray(actual) &&\n actual.some((item) => expected.includes(item))\n );\n default:\n return false;\n }\n }\n\n /**\n * Execute query on specific tier\n */\n private async executeOnTier<T>(\n tier: StorageTier,\n executor: (adapter: DatabaseAdapter) => Promise<T>\n ): Promise<T> {\n const startTime = Date.now();\n\n try {\n const result = await executor(tier.adapter);\n const duration = Date.now() - startTime;\n\n logger.debug(`Query executed on tier ${tier.name} in ${duration}ms`);\n this.emit('queryExecuted', {\n tierName: tier.name,\n duration,\n success: true,\n });\n\n return result;\n } catch (error: unknown) {\n const duration = Date.now() - startTime;\n\n logger.error(\n `Query failed on tier ${tier.name} after ${duration}ms:`,\n error\n );\n this.emit('queryExecuted', {\n tierName: tier.name,\n duration,\n success: false,\n error,\n });\n\n throw error;\n }\n }\n\n /**\n * Generate cache key for routing decisions\n */\n private generateCacheKey(operation: string, context: QueryContext): string {\n const keyParts = [\n operation,\n context.queryType,\n context.priority || 'medium',\n (context.requiredFeatures || []).sort().join(','),\n context.timeRange\n ? `${context.timeRange.start.getTime()}-${context.timeRange.end.getTime()}`\n : '',\n ];\n\n return keyParts.join('|');\n }\n\n /**\n * Cache routing decision\n */\n private cacheDecision(key: string, decision: RoutingDecision): void {\n // Implement LRU eviction if cache is full\n if (this.decisionCache.size >= this.maxCacheSize) {\n const firstKey = this.decisionCache.keys().next().value;\n this.decisionCache.delete(firstKey);\n }\n\n this.decisionCache.set(key, decision);\n }\n\n /**\n * Estimate query latency for a tier\n */\n private estimateLatency(\n tier: StorageTier,\n operation: string,\n context: QueryContext\n ): number {\n const baseLatency =\n this.metrics.latencyByTier.get(tier.name) ||\n tier.config.maxLatency ||\n 100;\n\n // Adjust based on operation type\n let multiplier = 1;\n switch (context.queryType) {\n case 'search':\n multiplier = 1.5;\n break;\n case 'analytics':\n multiplier = 2.0;\n break;\n case 'bulk':\n multiplier = 3.0;\n break;\n default:\n multiplier = 1.0;\n }\n\n return baseLatency * multiplier;\n }\n\n /**\n * Recommend cache strategy for the context\n */\n private recommendCacheStrategy(\n tier: StorageTier,\n context: QueryContext\n ): string {\n if (context.cacheStrategy && context.cacheStrategy !== 'none') {\n return context.cacheStrategy;\n }\n\n // Default recommendations based on query type and tier\n if (tier.name === 'hot' || tier.name === 'memory') {\n return 'read_write';\n } else if (context.queryType === 'read') {\n return 'read';\n }\n\n return 'none';\n }\n\n /**\n * Generate human-readable rationale for routing decision\n */\n private generateRationale(\n tier: StorageTier,\n operation: string,\n context: QueryContext,\n score: number\n ): string {\n const reasons = [];\n\n if (tier.config.preferredOperations.includes(context.queryType)) {\n reasons.push(`optimized for ${context.queryType} operations`);\n }\n\n if (\n context.requiredFeatures?.every((feature) =>\n tier.config.supportedFeatures.includes(feature)\n )\n ) {\n reasons.push(\n `supports all required features (${context.requiredFeatures.join(', ')})`\n );\n }\n\n if (score > 0.8) {\n reasons.push('high confidence match');\n } else if (score > 0.6) {\n reasons.push('good match');\n } else if (score > 0.4) {\n reasons.push('acceptable match');\n }\n\n return reasons.length > 0 ? reasons.join(', ') : 'default tier selection';\n }\n\n /**\n * Get current load for a tier\n */\n private async getCurrentLoad(tier: StorageTier): Promise<number> {\n // This would integrate with actual monitoring\n // For now, return a placeholder based on recent queries\n return this.metrics.queriesByTier.get(tier.name) || 0;\n }\n\n /**\n * Check if tier is within capacity limits\n */\n private async isWithinCapacity(tier: StorageTier): Promise<boolean> {\n try {\n const stats = await tier.adapter.getStats();\n\n if (tier.config.maxFrames && stats.totalFrames >= tier.config.maxFrames) {\n return false;\n }\n\n if (\n tier.config.maxSizeMB &&\n stats.diskUsage >= tier.config.maxSizeMB * 1024 * 1024\n ) {\n return false;\n }\n\n return true;\n } catch (error: unknown) {\n logger.warn(`Failed to check capacity for tier ${tier.name}:`, error);\n return true; // Assume capacity is OK if we can't check\n }\n }\n\n /**\n * Update routing metrics\n */\n private updateMetrics(\n tierName: string,\n startTime: number,\n success: boolean\n ): void {\n const duration = Date.now() - startTime;\n\n // Update tier metrics\n this.metrics.queriesByTier.set(\n tierName,\n (this.metrics.queriesByTier.get(tierName) || 0) + 1\n );\n\n if (success) {\n // Update latency\n const currentAvg = this.metrics.latencyByTier.get(tierName) || 0;\n const count = this.metrics.queriesByTier.get(tierName) || 1;\n const newAvg = (currentAvg * (count - 1) + duration) / count;\n this.metrics.latencyByTier.set(tierName, newAvg);\n\n // Update overall average\n this.metrics.averageLatency =\n (this.metrics.averageLatency * (this.metrics.totalQueries - 1) +\n duration) /\n this.metrics.totalQueries;\n } else {\n // Update error count\n this.metrics.errorsByTier.set(\n tierName,\n (this.metrics.errorsByTier.get(tierName) || 0) + 1\n );\n }\n }\n\n /**\n * Get current routing metrics\n */\n getMetrics(): QueryMetrics {\n // Update cache hit rate\n const cacheRequests = this.metrics.routingDecisions;\n const cacheHits = cacheRequests - this.decisionCache.size; // Approximation\n this.metrics.cacheHitRate =\n cacheRequests > 0 ? cacheHits / cacheRequests : 0;\n\n return { ...this.metrics };\n }\n\n /**\n * Get registered tiers\n */\n getTiers(): StorageTier[] {\n return Array.from(this.tiers.values()).sort(\n (a, b) => b.priority - a.priority\n );\n }\n\n /**\n * Clear routing decision cache\n */\n clearCache(): void {\n this.decisionCache.clear();\n logger.info('Routing decision cache cleared');\n }\n\n /**\n * Get tier by name\n */\n getTier(name: string): StorageTier | undefined {\n return this.tiers.get(name);\n }\n}\n"],
|
|
5
5
|
"mappings": ";;;;AAOA,SAAS,cAAc;AACvB,SAAS,eAAe,iBAAiB;AACzC,SAAS,oBAAoB;AAoEtB,MAAM,oBAAoB,aAAa;AAAA,EACpC,QAAkC,oBAAI,IAAI;AAAA,EAC1C;AAAA,EACA,gBAA8C,oBAAI,IAAI;AAAA,EAC7C,kBAAkB;AAAA;AAAA,EAClB,eAAe;AAAA,EAEhC,cAAc;AACZ,UAAM;AACN,SAAK,UAAU;AAAA,MACb,cAAc;AAAA,MACd,eAAe,oBAAI,IAAI;AAAA,MACvB,eAAe,oBAAI,IAAI;AAAA,MACvB,gBAAgB;AAAA,MAChB,eAAe,oBAAI,IAAI;AAAA,MACvB,cAAc,oBAAI,IAAI;AAAA,MACtB,cAAc;AAAA,MACd,kBAAkB;AAAA,IACpB;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,aAAa,MAAyB;AACpC,SAAK,MAAM,IAAI,KAAK,MAAM,IAAI;AAC9B,WAAO;AAAA,MACL,4BAA4B,KAAK,IAAI,eAAe,KAAK,QAAQ;AAAA,IACnE;AACA,SAAK,KAAK,kBAAkB,IAAI;AAAA,EAClC;AAAA;AAAA;AAAA;AAAA,EAKA,eAAe,UAAwB;AACrC,UAAM,OAAO,KAAK,MAAM,IAAI,QAAQ;AACpC,QAAI,MAAM;AACR,WAAK,MAAM,OAAO,QAAQ;AAC1B,aAAO,KAAK,8BAA8B,QAAQ,EAAE;AACpD,WAAK,KAAK,oBAAoB,IAAI;AAAA,IACpC;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,MACJ,WACA,SACA,UACY;AACZ,UAAM,YAAY,KAAK,IAAI;AAC3B,SAAK,QAAQ;AACb,SAAK,QAAQ,cAAc;AAAA,MACzB,QAAQ;AAAA,OACP,KAAK,QAAQ,cAAc,IAAI,QAAQ,SAAS,KAAK,KAAK;AAAA,IAC7D;AAEA,QAAI;AAEF,YAAM,WAAW,MAAM,KAAK,oBAAoB,WAAW,OAAO;AAGlE,UAAI;AACF,cAAM,SAAS,MAAM,KAAK,cAAc,SAAS,aAAa,QAAQ;AACtE,aAAK,cAAc,SAAS,YAAY,MAAM,WAAW,IAAI;AAC7D,eAAO;AAAA,MACT,SAAS,OAAgB;AACvB,eAAO;AAAA,UACL,gCAAgC,SAAS,YAAY,IAAI;AAAA,UACzD;AAAA,QACF;AACA,aAAK,cAAc,SAAS,YAAY,MAAM,WAAW,KAAK;AAG9D,mBAAW,gBAAgB,SAAS,eAAe;AACjD,cAAI;AACF,mBAAO,KAAK,gCAAgC,aAAa,IAAI,EAAE;AAC/D,kBAAM,SAAS,MAAM,KAAK,cAAc,cAAc,QAAQ;AAC9D,iBAAK,cAAc,aAAa,MAAM,WAAW,IAAI;AACrD,mBAAO;AAAA,UACT,SAAS,eAAwB;AAC/B,mBAAO;AAAA,cACL,iCAAiC,aAAa,IAAI;AAAA,cAClD;AAAA,YACF;AACA,iBAAK,cAAc,aAAa,MAAM,WAAW,KAAK;AAAA,UACxD;AAAA,QACF;AAGA,cAAM;AAAA,MACR;AAAA,IACF,SAAS,OAAgB;AACvB,aAAO,MAAM,yBAAyB,KAAK;AAC3C,WAAK,KAAK,gBAAgB,EAAE,WAAW,SAAS,MAAM,CAAC;AACvD,YAAM;AAAA,IACR;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,MAAc,oBACZ,WACA,SAC0B;AAE1B,UAAM,WAAW,KAAK,iBAAiB,WAAW,OAAO;AACzD,UAAM,SAAS,KAAK,cAAc,IAAI,QAAQ;AAC9C,QAAI,UAAU,KAAK,IAAI,IAAI,OAAO,mBAAmB,KAAK,iBAAiB;AACzE,WAAK,QAAQ,gBACV,KAAK,QAAQ,eAAe,KAAK,QAAQ,mBAAmB,MAC5D,KAAK,QAAQ,mBAAmB;AACnC,aAAO;AAAA,IACT;AAEA,SAAK,QAAQ;AAGb,UAAM,cAID,CAAC;AAEN,eAAW,QAAQ,KAAK,MAAM,OAAO,GAAG;AACtC,YAAM,QAAQ,MAAM,KAAK,aAAa,MAAM,WAAW,OAAO;AAC9D,YAAM,YAAY,KAAK,kBAAkB,MAAM,WAAW,SAAS,KAAK;AACxE,kBAAY,KAAK,EAAE,MAAM,OAAO,UAAU,CAAC;AAAA,IAC7C;AAGA,gBAAY,KAAK,CAAC,GAAG,MAAM,EAAE,QAAQ,EAAE,KAAK;AAE5C,QAAI,YAAY,WAAW,GAAG;AAC5B,YAAM,IAAI;AAAA,QACR;AAAA,QACA,UAAU;AAAA,QACV,EAAE,OAAO,QAAQ,OAAO,iBAAiB,KAAK,MAAM,OAAO;AAAA,MAC7D;AAAA,IACF;AAEA,UAAM,cAAc,YAAY,CAAC;AACjC,UAAM,gBAAgB,YACnB,MAAM,CAAC,EACP,IAAI,CAAC,eAAe,WAAW,IAAI;AAEtC,UAAM,WAA4B;AAAA,MAChC,aAAa,YAAY;AAAA,MACzB;AAAA,MACA,WAAW,YAAY;AAAA,MACvB,YAAY,YAAY;AAAA,MACxB,kBAAkB,KAAK;AAAA,QACrB,YAAY;AAAA,QACZ;AAAA,QACA;AAAA,MACF;AAAA,MACA,qBAAqB,KAAK;AAAA,QACxB,YAAY;AAAA,QACZ;AAAA,MACF;AAAA,IACF;AAGA,SAAK,cAAc,UAAU,QAAQ;AAErC,WAAO;AAAA,MACL,qBAAqB,SAAS,YAAY,IAAI,iBAAiB,SAAS,WAAW,QAAQ,CAAC,CAAC;AAAA,IAC/F;AACA,SAAK,KAAK,mBAAmB,EAAE,WAAW,SAAS,SAAS,CAAC;AAE7D,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA,EAKA,MAAc,aACZ,MACA,WACA,SACiB;AACjB,QAAI,QAAQ;AACZ,QAAI,WAAW;AAGf,eAAW,QAAQ,KAAK,OAAO,cAAc;AAC3C,kBAAY,KAAK;AAEjB,UAAI,KAAK,aAAa,MAAM,WAAW,SAAS,IAAI,GAAG;AACrD,iBAAS,KAAK;AAAA,MAChB;AAAA,IACF;AAGA,QAAI,KAAK,OAAO,oBAAoB,SAAS,QAAQ,SAAS,GAAG;AAC/D,eAAS;AACT,kBAAY;AAAA,IACd;AAGA,QAAI,QAAQ,kBAAkB;AAC5B,YAAM,oBAAoB,QAAQ,iBAAiB;AAAA,QAAO,CAAC,YACzD,KAAK,OAAO,kBAAkB,SAAS,OAAO;AAAA,MAChD;AACA,UAAI,kBAAkB,WAAW,QAAQ,iBAAiB,QAAQ;AAChE,iBAAS;AAAA,MACX;AACA,kBAAY;AAAA,IACd;AAGA,UAAM,cAAc,MAAM,KAAK,eAAe,IAAI;AAClD,QACE,KAAK,OAAO,iBACZ,cAAc,KAAK,OAAO,gBAAgB,KAC1C;AACA,eAAS;AAAA,IACX;AACA,gBAAY;AAGZ,QAAI,MAAM,KAAK,iBAAiB,IAAI,GAAG;AACrC,eAAS;AAAA,IACX;AACA,gBAAY;AAEZ,WAAO,WAAW,IAAI,QAAQ,WAAW;AAAA,EAC3C;AAAA;AAAA;AAAA;AAAA,EAKQ,aACN,MACA,YACA,SACA,OACS;AACT,QAAI;AAEJ,YAAQ,KAAK,WAAW;AAAA,MACtB,KAAK;AAEH,YAAI,QAAQ,UAAU,QAAQ,OAAO,SAAS,GAAG;AAC/C,gBAAM,SACJ,QAAQ,OAAO;AAAA,YACb,CAAC,KAAK,UAAU,OAAO,KAAK,IAAI,IAAI,MAAM;AAAA,YAC1C;AAAA,UACF,IAAI,QAAQ,OAAO;AACrB,wBAAc;AAAA,QAChB,WAAW,QAAQ,WAAW;AAC5B,wBAAc,KAAK,IAAI,IAAI,QAAQ,UAAU,IAAI,QAAQ;AAAA,QAC3D,OAAO;AACL,iBAAO;AAAA,QACT;AACA;AAAA,MAEF,KAAK;AACH,sBAAc,QAAQ;AACtB;AAAA,MAEF,KAAK;AACH,sBAAc,QAAQ,oBAAoB,CAAC;AAC3C;AAAA,MAEF,KAAK;AACH,sBAAc,QAAQ,YAAY;AAClC;AAAA,MAEF,KAAK;AACH,sBAAc,QAAQ,SAAS,QAAQ,OAAO,SAAS;AACvD;AAAA,MAEF;AACE,eAAO;AAAA,IACX;AAEA,WAAO,KAAK,cAAc,aAAa,KAAK,UAAU,KAAK,KAAK;AAAA,EAClE;AAAA;AAAA;AAAA;AAAA,EAKQ,cAAc,QAAa,UAAkB,UAAwB;AAC3E,YAAQ,UAAU;AAAA,MAChB,KAAK;AACH,eAAO,SAAS;AAAA,MAClB,KAAK;AACH,eAAO,SAAS;AAAA,MAClB,KAAK;AAAA,MACL,KAAK;AACH,eAAO,WAAW;AAAA,MACpB,KAAK;AACH,eAAO,WAAW;AAAA,MACpB,KAAK;AACH,eAAO,MAAM,QAAQ,QAAQ,KAAK,SAAS,SAAS,MAAM;AAAA,MAC5D,KAAK;AACH,eAAO,MAAM,QAAQ,QAAQ,KAAK,CAAC,SAAS,SAAS,MAAM;AAAA,MAC7D,KAAK;AACH,eACE,MAAM,QAAQ,MAAM,KACpB,OAAO,KAAK,CAAC,SAAS,SAAS,SAAS,IAAI,CAAC;AAAA,MAEjD;AACE,eAAO;AAAA,IACX;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,MAAc,cACZ,MACA,UACY;AACZ,UAAM,YAAY,KAAK,IAAI;AAE3B,QAAI;AACF,YAAM,SAAS,MAAM,SAAS,KAAK,OAAO;AAC1C,YAAM,WAAW,KAAK,IAAI,IAAI;AAE9B,aAAO,MAAM,0BAA0B,KAAK,IAAI,OAAO,QAAQ,IAAI;AACnE,WAAK,KAAK,iBAAiB;AAAA,QACzB,UAAU,KAAK;AAAA,QACf;AAAA,QACA,SAAS;AAAA,MACX,CAAC;AAED,aAAO;AAAA,IACT,SAAS,OAAgB;AACvB,YAAM,WAAW,KAAK,IAAI,IAAI;AAE9B,aAAO;AAAA,QACL,wBAAwB,KAAK,IAAI,UAAU,QAAQ;AAAA,QACnD;AAAA,MACF;AACA,WAAK,KAAK,iBAAiB;AAAA,QACzB,UAAU,KAAK;AAAA,QACf;AAAA,QACA,SAAS;AAAA,QACT;AAAA,MACF,CAAC;AAED,YAAM;AAAA,IACR;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKQ,iBAAiB,WAAmB,SAA+B;AACzE,UAAM,WAAW;AAAA,MACf;AAAA,MACA,QAAQ;AAAA,MACR,QAAQ,YAAY;AAAA,OACnB,QAAQ,oBAAoB,CAAC,GAAG,KAAK,EAAE,KAAK,GAAG;AAAA,MAChD,QAAQ,YACJ,GAAG,QAAQ,UAAU,MAAM,QAAQ,CAAC,IAAI,QAAQ,UAAU,IAAI,QAAQ,CAAC,KACvE;AAAA,IACN;AAEA,WAAO,SAAS,KAAK,GAAG;AAAA,EAC1B;AAAA;AAAA;AAAA;AAAA,EAKQ,cAAc,KAAa,UAAiC;AAElE,QAAI,KAAK,cAAc,QAAQ,KAAK,cAAc;AAChD,YAAM,WAAW,KAAK,cAAc,KAAK,EAAE,KAAK,EAAE;AAClD,WAAK,cAAc,OAAO,QAAQ;AAAA,IACpC;AAEA,SAAK,cAAc,IAAI,KAAK,QAAQ;AAAA,EACtC;AAAA;AAAA;AAAA;AAAA,EAKQ,gBACN,MACA,WACA,SACQ;AACR,UAAM,cACJ,KAAK,QAAQ,cAAc,IAAI,KAAK,IAAI,KACxC,KAAK,OAAO,cACZ;AAGF,QAAI,aAAa;AACjB,YAAQ,QAAQ,WAAW;AAAA,MACzB,KAAK;AACH,qBAAa;AACb;AAAA,MACF,KAAK;AACH,qBAAa;AACb;AAAA,MACF,KAAK;AACH,qBAAa;AACb;AAAA,MACF;AACE,qBAAa;AAAA,IACjB;AAEA,WAAO,cAAc;AAAA,EACvB;AAAA;AAAA;AAAA;AAAA,EAKQ,uBACN,MACA,SACQ;AACR,QAAI,QAAQ,iBAAiB,QAAQ,kBAAkB,QAAQ;AAC7D,aAAO,QAAQ;AAAA,IACjB;AAGA,QAAI,KAAK,SAAS,SAAS,KAAK,SAAS,UAAU;AACjD,aAAO;AAAA,IACT,WAAW,QAAQ,cAAc,QAAQ;AACvC,aAAO;AAAA,IACT;AAEA,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA,EAKQ,kBACN,MACA,WACA,SACA,OACQ;AACR,UAAM,UAAU,CAAC;AAEjB,QAAI,KAAK,OAAO,oBAAoB,SAAS,QAAQ,SAAS,GAAG;AAC/D,cAAQ,KAAK,iBAAiB,QAAQ,SAAS,aAAa;AAAA,IAC9D;AAEA,QACE,QAAQ,kBAAkB;AAAA,MAAM,CAAC,YAC/B,KAAK,OAAO,kBAAkB,SAAS,OAAO;AAAA,IAChD,GACA;AACA,cAAQ;AAAA,QACN,mCAAmC,QAAQ,iBAAiB,KAAK,IAAI,CAAC;AAAA,MACxE;AAAA,IACF;AAEA,QAAI,QAAQ,KAAK;AACf,cAAQ,KAAK,uBAAuB;AAAA,IACtC,WAAW,QAAQ,KAAK;AACtB,cAAQ,KAAK,YAAY;AAAA,IAC3B,WAAW,QAAQ,KAAK;AACtB,cAAQ,KAAK,kBAAkB;AAAA,IACjC;AAEA,WAAO,QAAQ,SAAS,IAAI,QAAQ,KAAK,IAAI,IAAI;AAAA,EACnD;AAAA;AAAA;AAAA;AAAA,EAKA,MAAc,eAAe,MAAoC;AAG/D,WAAO,KAAK,QAAQ,cAAc,IAAI,KAAK,IAAI,KAAK;AAAA,EACtD;AAAA;AAAA;AAAA;AAAA,EAKA,MAAc,iBAAiB,MAAqC;AAClE,QAAI;AACF,YAAM,QAAQ,MAAM,KAAK,QAAQ,SAAS;AAE1C,UAAI,KAAK,OAAO,aAAa,MAAM,eAAe,KAAK,OAAO,WAAW;AACvE,eAAO;AAAA,MACT;AAEA,UACE,KAAK,OAAO,aACZ,MAAM,aAAa,KAAK,OAAO,YAAY,OAAO,MAClD;AACA,eAAO;AAAA,MACT;AAEA,aAAO;AAAA,IACT,SAAS,OAAgB;AACvB,aAAO,KAAK,qCAAqC,KAAK,IAAI,KAAK,KAAK;AACpE,aAAO;AAAA,IACT;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKQ,cACN,UACA,WACA,SACM;AACN,UAAM,WAAW,KAAK,IAAI,IAAI;AAG9B,SAAK,QAAQ,cAAc;AAAA,MACzB;AAAA,OACC,KAAK,QAAQ,cAAc,IAAI,QAAQ,KAAK,KAAK;AAAA,IACpD;AAEA,QAAI,SAAS;AAEX,YAAM,aAAa,KAAK,QAAQ,cAAc,IAAI,QAAQ,KAAK;AAC/D,YAAM,QAAQ,KAAK,QAAQ,cAAc,IAAI,QAAQ,KAAK;AAC1D,YAAM,UAAU,cAAc,QAAQ,KAAK,YAAY;AACvD,WAAK,QAAQ,cAAc,IAAI,UAAU,MAAM;AAG/C,WAAK,QAAQ,kBACV,KAAK,QAAQ,kBAAkB,KAAK,QAAQ,eAAe,KAC1D,YACF,KAAK,QAAQ;AAAA,IACjB,OAAO;AAEL,WAAK,QAAQ,aAAa;AAAA,QACxB;AAAA,SACC,KAAK,QAAQ,aAAa,IAAI,QAAQ,KAAK,KAAK;AAAA,MACnD;AAAA,IACF;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,aAA2B;AAEzB,UAAM,gBAAgB,KAAK,QAAQ;AACnC,UAAM,YAAY,gBAAgB,KAAK,cAAc;AACrD,SAAK,QAAQ,eACX,gBAAgB,IAAI,YAAY,gBAAgB;AAElD,WAAO,EAAE,GAAG,KAAK,QAAQ;AAAA,EAC3B;AAAA;AAAA;AAAA;AAAA,EAKA,WAA0B;AACxB,WAAO,MAAM,KAAK,KAAK,MAAM,OAAO,CAAC,EAAE;AAAA,MACrC,CAAC,GAAG,MAAM,EAAE,WAAW,EAAE;AAAA,IAC3B;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,aAAmB;AACjB,SAAK,cAAc,MAAM;AACzB,WAAO,KAAK,gCAAgC;AAAA,EAC9C;AAAA;AAAA;AAAA;AAAA,EAKA,QAAQ,MAAuC;AAC7C,WAAO,KAAK,MAAM,IAAI,IAAI;AAAA,EAC5B;AACF;",
|
|
6
6
|
"names": []
|
|
7
7
|
}
|