@stackmemoryai/stackmemory 0.5.33 → 0.5.35
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/agents/core/agent-task-manager.js.map +1 -1
- package/dist/cli/commands/clear.js +1 -1
- package/dist/cli/commands/clear.js.map +1 -1
- package/dist/cli/commands/context.js +1 -1
- package/dist/cli/commands/context.js.map +1 -1
- package/dist/cli/commands/dashboard.js.map +1 -1
- package/dist/cli/commands/discovery.js +1 -1
- package/dist/cli/commands/discovery.js.map +1 -1
- package/dist/cli/commands/handoff.js +1 -1
- package/dist/cli/commands/handoff.js.map +1 -1
- package/dist/cli/commands/monitor.js +1 -1
- package/dist/cli/commands/monitor.js.map +1 -1
- package/dist/cli/commands/quality.js +1 -1
- package/dist/cli/commands/quality.js.map +1 -1
- package/dist/cli/commands/skills.js +1 -1
- package/dist/cli/commands/skills.js.map +1 -1
- package/dist/cli/commands/workflow.js +1 -1
- package/dist/cli/commands/workflow.js.map +1 -1
- package/dist/cli/commands/worktree.js +1 -1
- package/dist/cli/commands/worktree.js.map +1 -1
- package/dist/cli/index.js +1 -1
- package/dist/cli/index.js.map +1 -1
- package/dist/core/context/auto-context.js.map +1 -1
- package/dist/core/context/compaction-handler.js.map +2 -2
- package/dist/core/context/context-bridge.js.map +2 -2
- package/dist/core/context/dual-stack-manager.js +1 -1
- package/dist/core/context/dual-stack-manager.js.map +1 -1
- package/dist/core/context/enhanced-rehydration.js.map +1 -1
- package/dist/core/context/frame-database.js +43 -10
- package/dist/core/context/frame-database.js.map +2 -2
- package/dist/core/context/frame-handoff-manager.js.map +1 -1
- package/dist/core/context/frame-lifecycle-hooks.js +119 -0
- package/dist/core/context/frame-lifecycle-hooks.js.map +7 -0
- package/dist/core/context/frame-stack.js +36 -7
- package/dist/core/context/frame-stack.js.map +2 -2
- package/dist/core/context/incremental-gc.js.map +2 -2
- package/dist/core/context/index.js +4 -22
- package/dist/core/context/index.js.map +2 -2
- package/dist/core/context/refactored-frame-manager.js +170 -37
- package/dist/core/context/refactored-frame-manager.js.map +3 -3
- package/dist/core/context/shared-context-layer.js.map +1 -1
- package/dist/core/context/stack-merge-resolver.js.map +1 -1
- package/dist/core/database/database-adapter.js.map +1 -1
- package/dist/core/database/paradedb-adapter.js.map +1 -1
- package/dist/core/database/query-router.js.map +1 -1
- package/dist/core/database/sqlite-adapter.js.map +1 -1
- package/dist/core/digest/frame-digest-integration.js.map +1 -1
- package/dist/core/digest/hybrid-digest-generator.js.map +1 -1
- package/dist/core/digest/types.js.map +1 -1
- package/dist/core/errors/index.js +249 -0
- package/dist/core/errors/index.js.map +2 -2
- package/dist/core/frame/workflow-templates.js.map +2 -2
- package/dist/core/merge/conflict-detector.js.map +1 -1
- package/dist/core/merge/resolution-engine.js.map +1 -1
- package/dist/core/merge/stack-diff.js.map +1 -1
- package/dist/core/models/model-router.js +10 -1
- package/dist/core/models/model-router.js.map +2 -2
- package/dist/core/monitoring/error-handler.js +37 -270
- package/dist/core/monitoring/error-handler.js.map +3 -3
- package/dist/core/monitoring/session-monitor.js.map +1 -1
- package/dist/core/performance/lazy-context-loader.js.map +1 -1
- package/dist/core/performance/optimized-frame-context.js.map +1 -1
- package/dist/core/retrieval/context-retriever.js.map +1 -1
- package/dist/core/retrieval/graph-retrieval.js.map +1 -1
- package/dist/core/retrieval/hierarchical-retrieval.js.map +1 -1
- package/dist/core/retrieval/llm-context-retrieval.js.map +1 -1
- package/dist/core/retrieval/retrieval-benchmarks.js.map +1 -1
- package/dist/core/retrieval/summary-generator.js.map +1 -1
- package/dist/core/retrieval/types.js.map +1 -1
- package/dist/core/storage/chromadb-adapter.js.map +1 -1
- package/dist/core/storage/infinite-storage.js.map +1 -1
- package/dist/core/storage/two-tier-storage.js.map +1 -1
- package/dist/features/tasks/task-aware-context.js.map +1 -1
- package/dist/features/web/server/index.js +1 -1
- package/dist/features/web/server/index.js.map +1 -1
- package/dist/hooks/schemas.js +50 -0
- package/dist/hooks/schemas.js.map +2 -2
- package/dist/hooks/sms-action-runner.js +47 -1
- package/dist/hooks/sms-action-runner.js.map +2 -2
- package/dist/hooks/sms-notify.js +63 -1
- package/dist/hooks/sms-notify.js.map +2 -2
- package/dist/hooks/sms-webhook.js +10 -3
- package/dist/hooks/sms-webhook.js.map +2 -2
- package/dist/hooks/whatsapp-commands.js +172 -69
- package/dist/hooks/whatsapp-commands.js.map +2 -2
- package/dist/hooks/whatsapp-sync.js +34 -0
- package/dist/hooks/whatsapp-sync.js.map +2 -2
- package/dist/index.js +1 -1
- package/dist/index.js.map +1 -1
- package/dist/integrations/mcp/handlers/context-handlers.js.map +1 -1
- package/dist/integrations/mcp/handlers/discovery-handlers.js.map +1 -1
- package/dist/integrations/mcp/server.js +1 -1
- package/dist/integrations/mcp/server.js.map +1 -1
- package/dist/integrations/ralph/bridge/ralph-stackmemory-bridge.js +1 -1
- package/dist/integrations/ralph/bridge/ralph-stackmemory-bridge.js.map +1 -1
- package/dist/integrations/ralph/context/stackmemory-context-loader.js +1 -1
- package/dist/integrations/ralph/context/stackmemory-context-loader.js.map +1 -1
- package/dist/integrations/ralph/learning/pattern-learner.js +1 -1
- package/dist/integrations/ralph/learning/pattern-learner.js.map +1 -1
- package/dist/integrations/ralph/orchestration/multi-loop-orchestrator.js +1 -1
- package/dist/integrations/ralph/orchestration/multi-loop-orchestrator.js.map +1 -1
- package/dist/integrations/ralph/swarm/swarm-coordinator.js +1 -1
- package/dist/integrations/ralph/swarm/swarm-coordinator.js.map +1 -1
- package/dist/integrations/ralph/visualization/ralph-debugger.js +1 -1
- package/dist/integrations/ralph/visualization/ralph-debugger.js.map +1 -1
- package/dist/mcp/stackmemory-mcp-server.js +1 -1
- package/dist/mcp/stackmemory-mcp-server.js.map +1 -1
- package/dist/skills/claude-skills.js.map +1 -1
- package/dist/skills/recursive-agent-orchestrator.js.map +1 -1
- package/dist/skills/unified-rlm-orchestrator.js.map +1 -1
- package/package.json +1 -1
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
{
|
|
2
2
|
"version": 3,
|
|
3
3
|
"sources": ["../../../src/core/database/paradedb-adapter.ts"],
|
|
4
|
-
"sourcesContent": ["/**\n * ParadeDB Database Adapter\n * Advanced PostgreSQL with built-in search (BM25) and analytics capabilities\n */\n\nimport { Pool, PoolClient } from 'pg';\nimport {\n FeatureAwareDatabaseAdapter,\n DatabaseFeatures,\n SearchOptions,\n QueryOptions,\n AggregationOptions,\n BulkOperation,\n DatabaseStats,\n} from './database-adapter.js';\nimport type { Frame, Event, Anchor } from '../context/frame-manager.js';\nimport { logger } from '../monitoring/logger.js';\nimport { DatabaseError, ErrorCode, ValidationError } from '../errors/index.js';\n\nexport interface ParadeDBConfig {\n connectionString?: string;\n host?: string;\n port?: number;\n database?: string;\n user?: string;\n password?: string;\n ssl?: boolean | { rejectUnauthorized?: boolean };\n max?: number; // Max pool size\n idleTimeoutMillis?: number;\n connectionTimeoutMillis?: number;\n statementTimeout?: number;\n enableBM25?: boolean;\n enableVector?: boolean;\n enableAnalytics?: boolean;\n}\n\nexport class ParadeDBAdapter extends FeatureAwareDatabaseAdapter {\n private pool: Pool | null = null;\n private activeClient: PoolClient | null = null;\n\n constructor(projectId: string, config: ParadeDBConfig) {\n super(projectId, config);\n }\n\n getFeatures(): DatabaseFeatures {\n const config = this.config as ParadeDBConfig;\n return {\n supportsFullTextSearch: config.enableBM25 !== false,\n supportsVectorSearch: config.enableVector !== false,\n supportsPartitioning: true,\n supportsAnalytics: config.enableAnalytics !== false,\n supportsCompression: true,\n supportsMaterializedViews: true,\n supportsParallelQueries: true,\n };\n }\n\n async connect(): Promise<void> {\n if (this.pool) return;\n\n const config = this.config as ParadeDBConfig;\n\n this.pool = new Pool({\n connectionString: config.connectionString,\n host: config.host || 'localhost',\n port: config.port || 5432,\n database: config.database || 'stackmemory',\n user: config.user,\n password: config.password,\n ssl: config.ssl,\n max: config.max || 20,\n idleTimeoutMillis: config.idleTimeoutMillis || 30000,\n connectionTimeoutMillis: config.connectionTimeoutMillis || 2000,\n statement_timeout: config.statementTimeout || 30000,\n });\n\n // Test connection\n const client = await this.pool.connect();\n try {\n await client.query('SELECT 1');\n logger.info('ParadeDB connected successfully');\n } finally {\n client.release();\n }\n }\n\n async disconnect(): Promise<void> {\n if (!this.pool) return;\n\n await this.pool.end();\n this.pool = null;\n logger.info('ParadeDB disconnected');\n }\n\n isConnected(): boolean {\n return this.pool !== null && !this.pool.ended;\n }\n\n async ping(): Promise<boolean> {\n if (!this.pool) return false;\n\n try {\n const client = await this.pool.connect();\n try {\n await client.query('SELECT 1');\n return true;\n } finally {\n client.release();\n }\n } catch {\n return false;\n }\n }\n\n async initializeSchema(): Promise<void> {\n const client = await this.getClient();\n\n try {\n await client.query('BEGIN');\n\n // Enable required extensions\n await client.query(`\n CREATE EXTENSION IF NOT EXISTS \"uuid-ossp\";\n CREATE EXTENSION IF NOT EXISTS \"pg_trgm\";\n CREATE EXTENSION IF NOT EXISTS \"btree_gin\";\n `);\n\n // Enable ParadeDB extensions if configured\n const config = this.config as ParadeDBConfig;\n\n if (config.enableBM25 !== false) {\n await client.query('CREATE EXTENSION IF NOT EXISTS pg_search;');\n }\n\n if (config.enableVector !== false) {\n await client.query('CREATE EXTENSION IF NOT EXISTS vector;');\n }\n\n if (config.enableAnalytics !== false) {\n await client.query('CREATE EXTENSION IF NOT EXISTS pg_analytics;');\n }\n\n // Create main tables with partitioning support\n await client.query(`\n -- Main frames table\n CREATE TABLE IF NOT EXISTS frames (\n frame_id UUID PRIMARY KEY DEFAULT uuid_generate_v4(),\n run_id UUID NOT NULL,\n project_id TEXT NOT NULL,\n parent_frame_id UUID REFERENCES frames(frame_id) ON DELETE CASCADE,\n depth INTEGER NOT NULL DEFAULT 0,\n type TEXT NOT NULL,\n name TEXT NOT NULL,\n state TEXT DEFAULT 'active',\n score FLOAT DEFAULT 0.5,\n inputs JSONB DEFAULT '{}',\n outputs JSONB DEFAULT '{}',\n metadata JSONB DEFAULT '{}',\n digest_text TEXT,\n digest_json JSONB DEFAULT '{}',\n content TEXT, -- For full-text search\n embedding vector(768), -- For vector search\n created_at TIMESTAMPTZ DEFAULT NOW(),\n closed_at TIMESTAMPTZ,\n CONSTRAINT check_state CHECK (state IN ('active', 'closed', 'suspended'))\n ) PARTITION BY RANGE (created_at);\n\n -- Create partitions for time-based data\n CREATE TABLE IF NOT EXISTS frames_recent PARTITION OF frames\n FOR VALUES FROM (NOW() - INTERVAL '30 days') TO (NOW() + INTERVAL '1 day');\n \n CREATE TABLE IF NOT EXISTS frames_archive PARTITION OF frames\n FOR VALUES FROM ('2020-01-01') TO (NOW() - INTERVAL '30 days');\n\n -- Events table\n CREATE TABLE IF NOT EXISTS events (\n event_id UUID PRIMARY KEY DEFAULT uuid_generate_v4(),\n run_id UUID NOT NULL,\n frame_id UUID NOT NULL REFERENCES frames(frame_id) ON DELETE CASCADE,\n seq INTEGER NOT NULL,\n event_type TEXT NOT NULL,\n payload JSONB NOT NULL DEFAULT '{}',\n ts TIMESTAMPTZ DEFAULT NOW()\n );\n\n -- Anchors table\n CREATE TABLE IF NOT EXISTS anchors (\n anchor_id UUID PRIMARY KEY DEFAULT uuid_generate_v4(),\n frame_id UUID NOT NULL REFERENCES frames(frame_id) ON DELETE CASCADE,\n project_id TEXT NOT NULL,\n type TEXT NOT NULL,\n text TEXT NOT NULL,\n priority INTEGER DEFAULT 0,\n metadata JSONB DEFAULT '{}',\n created_at TIMESTAMPTZ DEFAULT NOW()\n );\n\n -- Schema version tracking\n CREATE TABLE IF NOT EXISTS schema_version (\n version INTEGER PRIMARY KEY,\n applied_at TIMESTAMPTZ DEFAULT NOW(),\n description TEXT\n );\n `);\n\n // Create indexes for performance\n await client.query(`\n -- Standard B-tree indexes\n CREATE INDEX IF NOT EXISTS idx_frames_run_id ON frames USING btree(run_id);\n CREATE INDEX IF NOT EXISTS idx_frames_project_id ON frames USING btree(project_id);\n CREATE INDEX IF NOT EXISTS idx_frames_parent ON frames USING btree(parent_frame_id);\n CREATE INDEX IF NOT EXISTS idx_frames_state ON frames USING btree(state);\n CREATE INDEX IF NOT EXISTS idx_frames_type ON frames USING btree(type);\n CREATE INDEX IF NOT EXISTS idx_frames_created_at ON frames USING btree(created_at DESC);\n CREATE INDEX IF NOT EXISTS idx_frames_score ON frames USING btree(score DESC);\n\n -- GIN indexes for JSONB\n CREATE INDEX IF NOT EXISTS idx_frames_inputs ON frames USING gin(inputs);\n CREATE INDEX IF NOT EXISTS idx_frames_outputs ON frames USING gin(outputs);\n CREATE INDEX IF NOT EXISTS idx_frames_metadata ON frames USING gin(metadata);\n CREATE INDEX IF NOT EXISTS idx_frames_digest ON frames USING gin(digest_json);\n\n -- Trigram index for fuzzy text search\n CREATE INDEX IF NOT EXISTS idx_frames_name_trgm ON frames USING gin(name gin_trgm_ops);\n CREATE INDEX IF NOT EXISTS idx_frames_content_trgm ON frames USING gin(content gin_trgm_ops);\n\n -- Event indexes\n CREATE INDEX IF NOT EXISTS idx_events_frame ON events USING btree(frame_id);\n CREATE INDEX IF NOT EXISTS idx_events_seq ON events USING btree(frame_id, seq);\n CREATE INDEX IF NOT EXISTS idx_events_type ON events USING btree(event_type);\n CREATE INDEX IF NOT EXISTS idx_events_ts ON events USING btree(ts DESC);\n\n -- Anchor indexes\n CREATE INDEX IF NOT EXISTS idx_anchors_frame ON anchors USING btree(frame_id);\n CREATE INDEX IF NOT EXISTS idx_anchors_type ON anchors USING btree(type);\n CREATE INDEX IF NOT EXISTS idx_anchors_priority ON anchors USING btree(priority DESC);\n `);\n\n // Create BM25 search index if enabled\n if (config.enableBM25 !== false) {\n await client.query(`\n -- Create BM25 index for full-text search\n CALL paradedb.create_bm25_test_table(\n index_name => 'frames_search_idx',\n table_name => 'frames',\n schema_name => 'public',\n key_field => 'frame_id',\n text_fields => paradedb.field('name') || \n paradedb.field('content') || \n paradedb.field('digest_text'),\n numeric_fields => paradedb.field('score') || \n paradedb.field('depth'),\n json_fields => paradedb.field('metadata', flatten => true),\n datetime_fields => paradedb.field('created_at')\n );\n `);\n }\n\n // Create vector index if enabled\n if (config.enableVector !== false) {\n await client.query(`\n -- HNSW index for vector similarity search\n CREATE INDEX IF NOT EXISTS idx_frames_embedding \n ON frames USING hnsw (embedding vector_cosine_ops)\n WITH (m = 16, ef_construction = 64);\n `);\n }\n\n // Create materialized views for patterns\n await client.query(`\n CREATE MATERIALIZED VIEW IF NOT EXISTS pattern_summary AS\n WITH pattern_extraction AS (\n SELECT \n project_id,\n type as pattern_type,\n metadata->>'error' as error_pattern,\n COUNT(*) as frequency,\n MAX(score) as max_score,\n MAX(created_at) as last_seen,\n MIN(created_at) as first_seen\n FROM frames\n WHERE created_at > NOW() - INTERVAL '30 days'\n GROUP BY project_id, pattern_type, error_pattern\n )\n SELECT * FROM pattern_extraction\n WHERE frequency > 3;\n\n CREATE UNIQUE INDEX IF NOT EXISTS idx_pattern_summary_unique \n ON pattern_summary(project_id, pattern_type, error_pattern);\n `);\n\n // Set initial schema version\n await client.query(`\n INSERT INTO schema_version (version, description) \n VALUES (1, 'Initial ParadeDB schema with search and analytics')\n ON CONFLICT (version) DO NOTHING;\n `);\n\n await client.query('COMMIT');\n logger.info('ParadeDB schema initialized successfully');\n } catch (error: unknown) {\n await client.query('ROLLBACK');\n throw error;\n } finally {\n this.releaseClient(client);\n }\n }\n\n async migrateSchema(targetVersion: number): Promise<void> {\n const client = await this.getClient();\n\n try {\n const result = await client.query(\n 'SELECT MAX(version) as version FROM schema_version'\n );\n const currentVersion = result.rows[0]?.version || 0;\n\n if (currentVersion >= targetVersion) {\n logger.info('Schema already at target version', {\n currentVersion,\n targetVersion,\n });\n return;\n }\n\n // Apply migrations sequentially\n for (let v = currentVersion + 1; v <= targetVersion; v++) {\n logger.info(`Applying migration to version ${v}`);\n // Migration logic would go here based on version\n await client.query(\n 'INSERT INTO schema_version (version, description) VALUES ($1, $2)',\n [v, `Migration to version ${v}`]\n );\n }\n } finally {\n this.releaseClient(client);\n }\n }\n\n async getSchemaVersion(): Promise<number> {\n const client = await this.getClient();\n\n try {\n const result = await client.query(\n 'SELECT MAX(version) as version FROM schema_version'\n );\n return result.rows[0]?.version || 0;\n } finally {\n this.releaseClient(client);\n }\n }\n\n // Frame operations\n async createFrame(frame: Partial<Frame>): Promise<string> {\n const client = await this.getClient();\n\n try {\n const result = await client.query(\n `\n INSERT INTO frames (\n frame_id, run_id, project_id, parent_frame_id, depth,\n type, name, state, score, inputs, outputs, metadata,\n digest_text, digest_json, content\n ) VALUES (\n COALESCE($1::uuid, uuid_generate_v4()), $2, $3, $4, $5,\n $6, $7, $8, $9, $10, $11, $12, $13, $14, $15\n ) RETURNING frame_id\n `,\n [\n frame.frame_id || null,\n frame.run_id,\n frame.project_id || this.projectId,\n frame.parent_frame_id || null,\n frame.depth || 0,\n frame.type,\n frame.name,\n frame.state || 'active',\n frame.score || 0.5,\n JSON.stringify(frame.inputs || {}),\n JSON.stringify(frame.outputs || {}),\n JSON.stringify(frame.metadata || {}),\n frame.digest_text || null,\n JSON.stringify(frame.digest_json || {}),\n frame.content || `${frame.name} ${frame.digest_text || ''}`,\n ]\n );\n\n return result.rows[0].frame_id;\n } finally {\n this.releaseClient(client);\n }\n }\n\n async getFrame(frameId: string): Promise<Frame | null> {\n const client = await this.getClient();\n\n try {\n const result = await client.query(\n 'SELECT * FROM frames WHERE frame_id = $1',\n [frameId]\n );\n\n if (result.rows.length === 0) return null;\n\n const row = result.rows[0];\n return {\n ...row,\n frame_id: row.frame_id,\n run_id: row.run_id,\n created_at: row.created_at.getTime(),\n closed_at: row.closed_at?.getTime(),\n };\n } finally {\n this.releaseClient(client);\n }\n }\n\n async updateFrame(frameId: string, updates: Partial<Frame>): Promise<void> {\n const client = await this.getClient();\n\n try {\n const fields = [];\n const values = [];\n let paramCount = 1;\n\n if (updates.state !== undefined) {\n fields.push(`state = $${paramCount++}`);\n values.push(updates.state);\n }\n\n if (updates.outputs !== undefined) {\n fields.push(`outputs = $${paramCount++}`);\n values.push(JSON.stringify(updates.outputs));\n }\n\n if (updates.score !== undefined) {\n fields.push(`score = $${paramCount++}`);\n values.push(updates.score);\n }\n\n if (updates.digest_text !== undefined) {\n fields.push(`digest_text = $${paramCount++}`);\n values.push(updates.digest_text);\n }\n\n if (updates.digest_json !== undefined) {\n fields.push(`digest_json = $${paramCount++}`);\n values.push(JSON.stringify(updates.digest_json));\n }\n\n if (updates.closed_at !== undefined) {\n fields.push(`closed_at = $${paramCount++}`);\n values.push(new Date(updates.closed_at));\n }\n\n if (fields.length === 0) return;\n\n values.push(frameId);\n\n await client.query(\n `\n UPDATE frames SET ${fields.join(', ')} WHERE frame_id = $${paramCount}\n `,\n values\n );\n } finally {\n this.releaseClient(client);\n }\n }\n\n async deleteFrame(frameId: string): Promise<void> {\n const client = await this.getClient();\n\n try {\n // CASCADE delete handles events and anchors\n await client.query('DELETE FROM frames WHERE frame_id = $1', [frameId]);\n } finally {\n this.releaseClient(client);\n }\n }\n\n async getActiveFrames(runId?: string): Promise<Frame[]> {\n const client = await this.getClient();\n\n try {\n let query = 'SELECT * FROM frames WHERE state = $1';\n const params: any[] = ['active'];\n\n if (runId) {\n query += ' AND run_id = $2';\n params.push(runId);\n }\n\n query += ' ORDER BY depth ASC, created_at ASC';\n\n const result = await client.query(query, params);\n\n return result.rows.map((row) => ({\n ...row,\n created_at: row.created_at.getTime(),\n closed_at: row.closed_at?.getTime(),\n }));\n } finally {\n this.releaseClient(client);\n }\n }\n\n async closeFrame(frameId: string, outputs?: any): Promise<void> {\n await this.updateFrame(frameId, {\n state: 'closed',\n outputs,\n closed_at: Date.now(),\n });\n }\n\n // Event operations\n async createEvent(event: Partial<Event>): Promise<string> {\n const client = await this.getClient();\n\n try {\n const result = await client.query(\n `\n INSERT INTO events (event_id, run_id, frame_id, seq, event_type, payload, ts)\n VALUES (COALESCE($1::uuid, uuid_generate_v4()), $2, $3, $4, $5, $6, $7)\n RETURNING event_id\n `,\n [\n event.event_id || null,\n event.run_id,\n event.frame_id,\n event.seq || 0,\n event.event_type,\n JSON.stringify(event.payload || {}),\n event.ts ? new Date(event.ts) : new Date(),\n ]\n );\n\n return result.rows[0].event_id;\n } finally {\n this.releaseClient(client);\n }\n }\n\n async getFrameEvents(\n frameId: string,\n options?: QueryOptions\n ): Promise<Event[]> {\n const client = await this.getClient();\n\n try {\n let query = 'SELECT * FROM events WHERE frame_id = $1';\n const params: any[] = [frameId];\n\n query += this.buildOrderByClause(\n options?.orderBy || 'seq',\n options?.orderDirection\n );\n query += this.buildLimitClause(options?.limit, options?.offset);\n\n const result = await client.query(query, params);\n\n return result.rows.map((row) => ({\n ...row,\n ts: row.ts.getTime(),\n }));\n } finally {\n this.releaseClient(client);\n }\n }\n\n async deleteFrameEvents(frameId: string): Promise<void> {\n const client = await this.getClient();\n\n try {\n await client.query('DELETE FROM events WHERE frame_id = $1', [frameId]);\n } finally {\n this.releaseClient(client);\n }\n }\n\n // Anchor operations\n async createAnchor(anchor: Partial<Anchor>): Promise<string> {\n const client = await this.getClient();\n\n try {\n const result = await client.query(\n `\n INSERT INTO anchors (anchor_id, frame_id, project_id, type, text, priority, metadata)\n VALUES (COALESCE($1::uuid, uuid_generate_v4()), $2, $3, $4, $5, $6, $7)\n RETURNING anchor_id\n `,\n [\n anchor.anchor_id || null,\n anchor.frame_id,\n anchor.project_id || this.projectId,\n anchor.type,\n anchor.text,\n anchor.priority || 0,\n JSON.stringify(anchor.metadata || {}),\n ]\n );\n\n return result.rows[0].anchor_id;\n } finally {\n this.releaseClient(client);\n }\n }\n\n async getFrameAnchors(frameId: string): Promise<Anchor[]> {\n const client = await this.getClient();\n\n try {\n const result = await client.query(\n `\n SELECT * FROM anchors WHERE frame_id = $1 \n ORDER BY priority DESC, created_at ASC\n `,\n [frameId]\n );\n\n return result.rows.map((row) => ({\n ...row,\n created_at: row.created_at.getTime(),\n }));\n } finally {\n this.releaseClient(client);\n }\n }\n\n async deleteFrameAnchors(frameId: string): Promise<void> {\n const client = await this.getClient();\n\n try {\n await client.query('DELETE FROM anchors WHERE frame_id = $1', [frameId]);\n } finally {\n this.releaseClient(client);\n }\n }\n\n // Advanced search with BM25\n async search(\n options: SearchOptions\n ): Promise<Array<Frame & { score: number }>> {\n const client = await this.getClient();\n\n try {\n const config = this.config as ParadeDBConfig;\n\n if (config.enableBM25 !== false) {\n // Use ParadeDB BM25 search\n const result = await client.query(\n `\n SELECT f.*, s.score_bm25 as score\n FROM frames_search_idx.search(\n query => $1,\n limit_rows => $2,\n offset_rows => $3\n ) s\n JOIN frames f ON f.frame_id = s.frame_id\n WHERE ($4::float IS NULL OR s.score_bm25 >= $4)\n ORDER BY s.score_bm25 DESC\n `,\n [\n options.query,\n options.limit || 100,\n options.offset || 0,\n options.scoreThreshold || null,\n ]\n );\n\n return result.rows.map((row) => ({\n ...row,\n created_at: row.created_at.getTime(),\n closed_at: row.closed_at?.getTime(),\n }));\n } else {\n // Fallback to PostgreSQL full-text search\n const result = await client.query(\n `\n SELECT *,\n ts_rank(\n to_tsvector('english', COALESCE(name, '') || ' ' || COALESCE(content, '')),\n plainto_tsquery('english', $1)\n ) as score\n FROM frames\n WHERE to_tsvector('english', COALESCE(name, '') || ' ' || COALESCE(content, ''))\n @@ plainto_tsquery('english', $1)\n ORDER BY score DESC\n LIMIT $2 OFFSET $3\n `,\n [options.query, options.limit || 100, options.offset || 0]\n );\n\n return result.rows.map((row) => ({\n ...row,\n created_at: row.created_at.getTime(),\n closed_at: row.closed_at?.getTime(),\n }));\n }\n } finally {\n this.releaseClient(client);\n }\n }\n\n // Vector similarity search\n async searchByVector(\n embedding: number[],\n options?: QueryOptions\n ): Promise<Array<Frame & { similarity: number }>> {\n const client = await this.getClient();\n\n try {\n const config = this.config as ParadeDBConfig;\n\n if (config.enableVector === false) {\n logger.warn('Vector search not enabled in ParadeDB configuration');\n return [];\n }\n\n const result = await client.query(\n `\n SELECT *,\n 1 - (embedding <=> $1::vector) as similarity\n FROM frames\n WHERE embedding IS NOT NULL\n ORDER BY embedding <=> $1::vector\n LIMIT $2 OFFSET $3\n `,\n [\n `[${embedding.join(',')}]`,\n options?.limit || 100,\n options?.offset || 0,\n ]\n );\n\n return result.rows.map((row) => ({\n ...row,\n created_at: row.created_at.getTime(),\n closed_at: row.closed_at?.getTime(),\n }));\n } finally {\n this.releaseClient(client);\n }\n }\n\n // Hybrid search combining BM25 and vector\n async searchHybrid(\n textQuery: string,\n embedding: number[],\n weights?: { text: number; vector: number }\n ): Promise<Array<Frame & { score: number }>> {\n const client = await this.getClient();\n\n try {\n const textWeight = weights?.text || 0.6;\n const vectorWeight = weights?.vector || 0.4;\n\n const result = await client.query(\n `\n WITH bm25_results AS (\n SELECT frame_id, score_bm25\n FROM frames_search_idx.search(\n query => $1,\n limit_rows => 200\n )\n ),\n vector_results AS (\n SELECT frame_id,\n 1 - (embedding <=> $2::vector) as score_vector\n FROM frames\n WHERE embedding IS NOT NULL\n ORDER BY embedding <=> $2::vector\n LIMIT 200\n )\n SELECT f.*,\n (COALESCE(b.score_bm25, 0) * $3 + \n COALESCE(v.score_vector, 0) * $4) as score\n FROM frames f\n LEFT JOIN bm25_results b ON f.frame_id = b.frame_id\n LEFT JOIN vector_results v ON f.frame_id = v.frame_id\n WHERE b.frame_id IS NOT NULL OR v.frame_id IS NOT NULL\n ORDER BY score DESC\n LIMIT $5\n `,\n [textQuery, `[${embedding.join(',')}]`, textWeight, vectorWeight, 100]\n );\n\n return result.rows.map((row) => ({\n ...row,\n created_at: row.created_at.getTime(),\n closed_at: row.closed_at?.getTime(),\n }));\n } finally {\n this.releaseClient(client);\n }\n }\n\n // Advanced aggregation\n async aggregate(\n table: string,\n options: AggregationOptions\n ): Promise<Record<string, any>[]> {\n const client = await this.getClient();\n\n try {\n const metrics = options.metrics\n .map((m) => {\n const alias = m.alias || `${m.operation}_${m.field}`;\n return `${m.operation}(${m.field}) AS \"${alias}\"`;\n })\n .join(', ');\n\n let query = `\n SELECT ${options.groupBy.map((g) => `\"${g}\"`).join(', ')}, ${metrics}\n FROM ${table}\n GROUP BY ${options.groupBy.map((g) => `\"${g}\"`).join(', ')}\n `;\n\n if (options.having) {\n const havingClauses = Object.entries(options.having).map(\n ([key, value], i) => {\n return `${key} ${typeof value === 'object' ? value.op : '='} $${i + 1}`;\n }\n );\n query += ` HAVING ${havingClauses.join(' AND ')}`;\n }\n\n const result = await client.query(\n query,\n Object.values(options.having || {})\n );\n return result.rows;\n } finally {\n this.releaseClient(client);\n }\n }\n\n // Pattern detection with analytics\n async detectPatterns(timeRange?: { start: Date; end: Date }): Promise<\n Array<{\n pattern: string;\n type: string;\n frequency: number;\n lastSeen: Date;\n }>\n > {\n const client = await this.getClient();\n\n try {\n // Use materialized view for better performance\n const result = await client.query(\n `\n SELECT \n COALESCE(error_pattern, pattern_type) as pattern,\n pattern_type as type,\n frequency,\n last_seen\n FROM pattern_summary\n WHERE project_id = $1\n AND ($2::timestamptz IS NULL OR last_seen >= $2)\n AND ($3::timestamptz IS NULL OR first_seen <= $3)\n ORDER BY frequency DESC, last_seen DESC\n LIMIT 100\n `,\n [this.projectId, timeRange?.start || null, timeRange?.end || null]\n );\n\n return result.rows.map((row) => ({\n pattern: row.pattern,\n type: row.type,\n frequency: row.frequency,\n lastSeen: row.last_seen,\n }));\n } finally {\n this.releaseClient(client);\n }\n }\n\n // Bulk operations\n async executeBulk(operations: BulkOperation[]): Promise<void> {\n await this.inTransaction(async () => {\n const client = this.activeClient!;\n\n for (const op of operations) {\n switch (op.type) {\n case 'insert': {\n const cols = Object.keys(op.data);\n const values = Object.values(op.data);\n const placeholders = values.map((_, i) => `$${i + 1}`).join(',');\n\n await client.query(\n `INSERT INTO ${op.table} (${cols.join(',')}) VALUES (${placeholders})`,\n values\n );\n break;\n }\n\n case 'update': {\n const sets = Object.keys(op.data)\n .map((k, i) => `${k} = $${i + 1}`)\n .join(',');\n const whereClause = this.buildWhereClausePostgres(\n op.where || {},\n Object.keys(op.data).length\n );\n const values = [\n ...Object.values(op.data),\n ...Object.values(op.where || {}),\n ];\n\n await client.query(\n `UPDATE ${op.table} SET ${sets} ${whereClause}`,\n values\n );\n break;\n }\n\n case 'delete': {\n const whereClause = this.buildWhereClausePostgres(\n op.where || {},\n 0\n );\n await client.query(\n `DELETE FROM ${op.table} ${whereClause}`,\n Object.values(op.where || {})\n );\n break;\n }\n }\n }\n });\n }\n\n async vacuum(): Promise<void> {\n const client = await this.getClient();\n\n try {\n await client.query('VACUUM ANALYZE frames');\n await client.query('VACUUM ANALYZE events');\n await client.query('VACUUM ANALYZE anchors');\n\n // Refresh materialized views\n await client.query(\n 'REFRESH MATERIALIZED VIEW CONCURRENTLY pattern_summary'\n );\n\n logger.info('ParadeDB vacuum and analyze completed');\n } finally {\n this.releaseClient(client);\n }\n }\n\n async analyze(): Promise<void> {\n const client = await this.getClient();\n\n try {\n await client.query('ANALYZE frames');\n await client.query('ANALYZE events');\n await client.query('ANALYZE anchors');\n logger.info('ParadeDB analyze completed');\n } finally {\n this.releaseClient(client);\n }\n }\n\n // Statistics\n async getStats(): Promise<DatabaseStats> {\n const client = await this.getClient();\n\n try {\n const result = await client.query(`\n SELECT\n (SELECT COUNT(*) FROM frames) as total_frames,\n (SELECT COUNT(*) FROM frames WHERE state = 'active') as active_frames,\n (SELECT COUNT(*) FROM events) as total_events,\n (SELECT COUNT(*) FROM anchors) as total_anchors,\n pg_database_size(current_database()) as disk_usage\n `);\n\n return {\n totalFrames: parseInt(result.rows[0].total_frames),\n activeFrames: parseInt(result.rows[0].active_frames),\n totalEvents: parseInt(result.rows[0].total_events),\n totalAnchors: parseInt(result.rows[0].total_anchors),\n diskUsage: parseInt(result.rows[0].disk_usage),\n };\n } finally {\n this.releaseClient(client);\n }\n }\n\n async getQueryStats(): Promise<\n Array<{\n query: string;\n calls: number;\n meanTime: number;\n totalTime: number;\n }>\n > {\n const client = await this.getClient();\n\n try {\n const result = await client.query(`\n SELECT \n query,\n calls,\n mean_exec_time as mean_time,\n total_exec_time as total_time\n FROM pg_stat_statements\n WHERE query NOT LIKE '%pg_stat_statements%'\n ORDER BY total_exec_time DESC\n LIMIT 100\n `);\n\n return result.rows.map((row) => ({\n query: row.query,\n calls: parseInt(row.calls),\n meanTime: parseFloat(row.mean_time),\n totalTime: parseFloat(row.total_time),\n }));\n } catch (error: unknown) {\n logger.warn('pg_stat_statements not available', error);\n return [];\n } finally {\n this.releaseClient(client);\n }\n }\n\n // Transaction support\n async beginTransaction(): Promise<void> {\n this.activeClient = await this.pool!.connect();\n await this.activeClient.query('BEGIN');\n }\n\n async commitTransaction(): Promise<void> {\n if (!this.activeClient)\n throw new DatabaseError(\n 'No active transaction',\n ErrorCode.DB_TRANSACTION_FAILED\n );\n\n await this.activeClient.query('COMMIT');\n this.activeClient.release();\n this.activeClient = null;\n }\n\n async rollbackTransaction(): Promise<void> {\n if (!this.activeClient)\n throw new DatabaseError(\n 'No active transaction',\n ErrorCode.DB_TRANSACTION_FAILED\n );\n\n await this.activeClient.query('ROLLBACK');\n this.activeClient.release();\n this.activeClient = null;\n }\n\n async inTransaction(\n callback: (adapter: DatabaseAdapter) => Promise<void>\n ): Promise<void> {\n await this.beginTransaction();\n\n try {\n await callback(this);\n await this.commitTransaction();\n } catch (error: unknown) {\n try {\n await this.rollbackTransaction();\n } catch (rollbackError: unknown) {\n // Log rollback failure but don't mask original error\n console.error('Transaction rollback failed:', rollbackError);\n // Connection might be in bad state - mark as unusable if connection pool exists\n if (this.connectionPool) {\n this.connectionPool.markConnectionAsBad(this.client);\n }\n }\n throw error;\n }\n }\n\n // Export/Import\n async exportData(\n tables: string[],\n format: 'json' | 'parquet' | 'csv'\n ): Promise<Buffer> {\n const client = await this.getClient();\n\n try {\n if (format === 'json') {\n const data: Record<string, any[]> = {};\n\n for (const table of tables) {\n const result = await client.query(`SELECT * FROM ${table}`);\n data[table] = result.rows;\n }\n\n return Buffer.from(JSON.stringify(data, null, 2));\n } else if (format === 'csv') {\n // Export as CSV using COPY\n const chunks: string[] = [];\n\n for (const table of tables) {\n const result = await client.query(`\n COPY (SELECT * FROM ${table}) TO STDOUT WITH CSV HEADER\n `);\n chunks.push(result.toString());\n }\n\n return Buffer.from(chunks.join('\\n\\n'));\n } else {\n throw new ValidationError(\n `Format ${format} not yet implemented for ParadeDB export`,\n ErrorCode.VALIDATION_FAILED,\n { format, supportedFormats: ['json'] }\n );\n }\n } finally {\n this.releaseClient(client);\n }\n }\n\n async importData(\n data: Buffer,\n format: 'json' | 'parquet' | 'csv',\n options?: { truncate?: boolean; upsert?: boolean }\n ): Promise<void> {\n const client = await this.getClient();\n\n try {\n if (format === 'json') {\n const parsed = JSON.parse(data.toString());\n\n await client.query('BEGIN');\n\n for (const [table, rows] of Object.entries(parsed)) {\n if (options?.truncate) {\n await client.query(`TRUNCATE TABLE ${table} CASCADE`);\n }\n\n for (const row of rows as any[]) {\n const cols = Object.keys(row);\n const values = Object.values(row);\n const placeholders = values.map((_, i) => `$${i + 1}`).join(',');\n\n if (options?.upsert) {\n const updates = cols.map((c) => `${c} = EXCLUDED.${c}`).join(',');\n await client.query(\n `INSERT INTO ${table} (${cols.join(',')}) VALUES (${placeholders})\n ON CONFLICT DO UPDATE SET ${updates}`,\n values\n );\n } else {\n await client.query(\n `INSERT INTO ${table} (${cols.join(',')}) VALUES (${placeholders})`,\n values\n );\n }\n }\n }\n\n await client.query('COMMIT');\n } else {\n throw new ValidationError(\n `Format ${format} not yet implemented for ParadeDB import`,\n ErrorCode.VALIDATION_FAILED,\n { format, supportedFormats: ['json'] }\n );\n }\n } catch (error: unknown) {\n await client.query('ROLLBACK');\n throw error;\n } finally {\n this.releaseClient(client);\n }\n }\n\n // Helper methods\n private async getClient(): Promise<PoolClient> {\n if (this.activeClient) {\n return this.activeClient;\n }\n\n if (!this.pool) {\n throw new DatabaseError(\n 'Database not connected',\n ErrorCode.DB_CONNECTION_FAILED\n );\n }\n\n return await this.pool.connect();\n }\n\n private releaseClient(client: PoolClient): void {\n if (client !== this.activeClient) {\n client.release();\n }\n }\n\n private buildWhereClausePostgres(\n conditions: Record<string, any>,\n startParam: number\n ): string {\n const clauses = Object.entries(conditions).map(([key, value], i) => {\n const paramNum = startParam + i + 1;\n\n if (value === null) {\n return `${key} IS NULL`;\n } else if (Array.isArray(value)) {\n const placeholders = value.map((_, j) => `$${paramNum + j}`).join(',');\n return `${key} IN (${placeholders})`;\n } else {\n return `${key} = $${paramNum}`;\n }\n });\n\n return clauses.length > 0 ? `WHERE ${clauses.join(' AND ')}` : '';\n }\n}\n"],
|
|
4
|
+
"sourcesContent": ["/**\n * ParadeDB Database Adapter\n * Advanced PostgreSQL with built-in search (BM25) and analytics capabilities\n */\n\nimport { Pool, PoolClient } from 'pg';\nimport {\n FeatureAwareDatabaseAdapter,\n DatabaseFeatures,\n SearchOptions,\n QueryOptions,\n AggregationOptions,\n BulkOperation,\n DatabaseStats,\n} from './database-adapter.js';\nimport type { Frame, Event, Anchor } from '../context/index.js';\nimport { logger } from '../monitoring/logger.js';\nimport { DatabaseError, ErrorCode, ValidationError } from '../errors/index.js';\n\nexport interface ParadeDBConfig {\n connectionString?: string;\n host?: string;\n port?: number;\n database?: string;\n user?: string;\n password?: string;\n ssl?: boolean | { rejectUnauthorized?: boolean };\n max?: number; // Max pool size\n idleTimeoutMillis?: number;\n connectionTimeoutMillis?: number;\n statementTimeout?: number;\n enableBM25?: boolean;\n enableVector?: boolean;\n enableAnalytics?: boolean;\n}\n\nexport class ParadeDBAdapter extends FeatureAwareDatabaseAdapter {\n private pool: Pool | null = null;\n private activeClient: PoolClient | null = null;\n\n constructor(projectId: string, config: ParadeDBConfig) {\n super(projectId, config);\n }\n\n getFeatures(): DatabaseFeatures {\n const config = this.config as ParadeDBConfig;\n return {\n supportsFullTextSearch: config.enableBM25 !== false,\n supportsVectorSearch: config.enableVector !== false,\n supportsPartitioning: true,\n supportsAnalytics: config.enableAnalytics !== false,\n supportsCompression: true,\n supportsMaterializedViews: true,\n supportsParallelQueries: true,\n };\n }\n\n async connect(): Promise<void> {\n if (this.pool) return;\n\n const config = this.config as ParadeDBConfig;\n\n this.pool = new Pool({\n connectionString: config.connectionString,\n host: config.host || 'localhost',\n port: config.port || 5432,\n database: config.database || 'stackmemory',\n user: config.user,\n password: config.password,\n ssl: config.ssl,\n max: config.max || 20,\n idleTimeoutMillis: config.idleTimeoutMillis || 30000,\n connectionTimeoutMillis: config.connectionTimeoutMillis || 2000,\n statement_timeout: config.statementTimeout || 30000,\n });\n\n // Test connection\n const client = await this.pool.connect();\n try {\n await client.query('SELECT 1');\n logger.info('ParadeDB connected successfully');\n } finally {\n client.release();\n }\n }\n\n async disconnect(): Promise<void> {\n if (!this.pool) return;\n\n await this.pool.end();\n this.pool = null;\n logger.info('ParadeDB disconnected');\n }\n\n isConnected(): boolean {\n return this.pool !== null && !this.pool.ended;\n }\n\n async ping(): Promise<boolean> {\n if (!this.pool) return false;\n\n try {\n const client = await this.pool.connect();\n try {\n await client.query('SELECT 1');\n return true;\n } finally {\n client.release();\n }\n } catch {\n return false;\n }\n }\n\n async initializeSchema(): Promise<void> {\n const client = await this.getClient();\n\n try {\n await client.query('BEGIN');\n\n // Enable required extensions\n await client.query(`\n CREATE EXTENSION IF NOT EXISTS \"uuid-ossp\";\n CREATE EXTENSION IF NOT EXISTS \"pg_trgm\";\n CREATE EXTENSION IF NOT EXISTS \"btree_gin\";\n `);\n\n // Enable ParadeDB extensions if configured\n const config = this.config as ParadeDBConfig;\n\n if (config.enableBM25 !== false) {\n await client.query('CREATE EXTENSION IF NOT EXISTS pg_search;');\n }\n\n if (config.enableVector !== false) {\n await client.query('CREATE EXTENSION IF NOT EXISTS vector;');\n }\n\n if (config.enableAnalytics !== false) {\n await client.query('CREATE EXTENSION IF NOT EXISTS pg_analytics;');\n }\n\n // Create main tables with partitioning support\n await client.query(`\n -- Main frames table\n CREATE TABLE IF NOT EXISTS frames (\n frame_id UUID PRIMARY KEY DEFAULT uuid_generate_v4(),\n run_id UUID NOT NULL,\n project_id TEXT NOT NULL,\n parent_frame_id UUID REFERENCES frames(frame_id) ON DELETE CASCADE,\n depth INTEGER NOT NULL DEFAULT 0,\n type TEXT NOT NULL,\n name TEXT NOT NULL,\n state TEXT DEFAULT 'active',\n score FLOAT DEFAULT 0.5,\n inputs JSONB DEFAULT '{}',\n outputs JSONB DEFAULT '{}',\n metadata JSONB DEFAULT '{}',\n digest_text TEXT,\n digest_json JSONB DEFAULT '{}',\n content TEXT, -- For full-text search\n embedding vector(768), -- For vector search\n created_at TIMESTAMPTZ DEFAULT NOW(),\n closed_at TIMESTAMPTZ,\n CONSTRAINT check_state CHECK (state IN ('active', 'closed', 'suspended'))\n ) PARTITION BY RANGE (created_at);\n\n -- Create partitions for time-based data\n CREATE TABLE IF NOT EXISTS frames_recent PARTITION OF frames\n FOR VALUES FROM (NOW() - INTERVAL '30 days') TO (NOW() + INTERVAL '1 day');\n \n CREATE TABLE IF NOT EXISTS frames_archive PARTITION OF frames\n FOR VALUES FROM ('2020-01-01') TO (NOW() - INTERVAL '30 days');\n\n -- Events table\n CREATE TABLE IF NOT EXISTS events (\n event_id UUID PRIMARY KEY DEFAULT uuid_generate_v4(),\n run_id UUID NOT NULL,\n frame_id UUID NOT NULL REFERENCES frames(frame_id) ON DELETE CASCADE,\n seq INTEGER NOT NULL,\n event_type TEXT NOT NULL,\n payload JSONB NOT NULL DEFAULT '{}',\n ts TIMESTAMPTZ DEFAULT NOW()\n );\n\n -- Anchors table\n CREATE TABLE IF NOT EXISTS anchors (\n anchor_id UUID PRIMARY KEY DEFAULT uuid_generate_v4(),\n frame_id UUID NOT NULL REFERENCES frames(frame_id) ON DELETE CASCADE,\n project_id TEXT NOT NULL,\n type TEXT NOT NULL,\n text TEXT NOT NULL,\n priority INTEGER DEFAULT 0,\n metadata JSONB DEFAULT '{}',\n created_at TIMESTAMPTZ DEFAULT NOW()\n );\n\n -- Schema version tracking\n CREATE TABLE IF NOT EXISTS schema_version (\n version INTEGER PRIMARY KEY,\n applied_at TIMESTAMPTZ DEFAULT NOW(),\n description TEXT\n );\n `);\n\n // Create indexes for performance\n await client.query(`\n -- Standard B-tree indexes\n CREATE INDEX IF NOT EXISTS idx_frames_run_id ON frames USING btree(run_id);\n CREATE INDEX IF NOT EXISTS idx_frames_project_id ON frames USING btree(project_id);\n CREATE INDEX IF NOT EXISTS idx_frames_parent ON frames USING btree(parent_frame_id);\n CREATE INDEX IF NOT EXISTS idx_frames_state ON frames USING btree(state);\n CREATE INDEX IF NOT EXISTS idx_frames_type ON frames USING btree(type);\n CREATE INDEX IF NOT EXISTS idx_frames_created_at ON frames USING btree(created_at DESC);\n CREATE INDEX IF NOT EXISTS idx_frames_score ON frames USING btree(score DESC);\n\n -- GIN indexes for JSONB\n CREATE INDEX IF NOT EXISTS idx_frames_inputs ON frames USING gin(inputs);\n CREATE INDEX IF NOT EXISTS idx_frames_outputs ON frames USING gin(outputs);\n CREATE INDEX IF NOT EXISTS idx_frames_metadata ON frames USING gin(metadata);\n CREATE INDEX IF NOT EXISTS idx_frames_digest ON frames USING gin(digest_json);\n\n -- Trigram index for fuzzy text search\n CREATE INDEX IF NOT EXISTS idx_frames_name_trgm ON frames USING gin(name gin_trgm_ops);\n CREATE INDEX IF NOT EXISTS idx_frames_content_trgm ON frames USING gin(content gin_trgm_ops);\n\n -- Event indexes\n CREATE INDEX IF NOT EXISTS idx_events_frame ON events USING btree(frame_id);\n CREATE INDEX IF NOT EXISTS idx_events_seq ON events USING btree(frame_id, seq);\n CREATE INDEX IF NOT EXISTS idx_events_type ON events USING btree(event_type);\n CREATE INDEX IF NOT EXISTS idx_events_ts ON events USING btree(ts DESC);\n\n -- Anchor indexes\n CREATE INDEX IF NOT EXISTS idx_anchors_frame ON anchors USING btree(frame_id);\n CREATE INDEX IF NOT EXISTS idx_anchors_type ON anchors USING btree(type);\n CREATE INDEX IF NOT EXISTS idx_anchors_priority ON anchors USING btree(priority DESC);\n `);\n\n // Create BM25 search index if enabled\n if (config.enableBM25 !== false) {\n await client.query(`\n -- Create BM25 index for full-text search\n CALL paradedb.create_bm25_test_table(\n index_name => 'frames_search_idx',\n table_name => 'frames',\n schema_name => 'public',\n key_field => 'frame_id',\n text_fields => paradedb.field('name') || \n paradedb.field('content') || \n paradedb.field('digest_text'),\n numeric_fields => paradedb.field('score') || \n paradedb.field('depth'),\n json_fields => paradedb.field('metadata', flatten => true),\n datetime_fields => paradedb.field('created_at')\n );\n `);\n }\n\n // Create vector index if enabled\n if (config.enableVector !== false) {\n await client.query(`\n -- HNSW index for vector similarity search\n CREATE INDEX IF NOT EXISTS idx_frames_embedding \n ON frames USING hnsw (embedding vector_cosine_ops)\n WITH (m = 16, ef_construction = 64);\n `);\n }\n\n // Create materialized views for patterns\n await client.query(`\n CREATE MATERIALIZED VIEW IF NOT EXISTS pattern_summary AS\n WITH pattern_extraction AS (\n SELECT \n project_id,\n type as pattern_type,\n metadata->>'error' as error_pattern,\n COUNT(*) as frequency,\n MAX(score) as max_score,\n MAX(created_at) as last_seen,\n MIN(created_at) as first_seen\n FROM frames\n WHERE created_at > NOW() - INTERVAL '30 days'\n GROUP BY project_id, pattern_type, error_pattern\n )\n SELECT * FROM pattern_extraction\n WHERE frequency > 3;\n\n CREATE UNIQUE INDEX IF NOT EXISTS idx_pattern_summary_unique \n ON pattern_summary(project_id, pattern_type, error_pattern);\n `);\n\n // Set initial schema version\n await client.query(`\n INSERT INTO schema_version (version, description) \n VALUES (1, 'Initial ParadeDB schema with search and analytics')\n ON CONFLICT (version) DO NOTHING;\n `);\n\n await client.query('COMMIT');\n logger.info('ParadeDB schema initialized successfully');\n } catch (error: unknown) {\n await client.query('ROLLBACK');\n throw error;\n } finally {\n this.releaseClient(client);\n }\n }\n\n async migrateSchema(targetVersion: number): Promise<void> {\n const client = await this.getClient();\n\n try {\n const result = await client.query(\n 'SELECT MAX(version) as version FROM schema_version'\n );\n const currentVersion = result.rows[0]?.version || 0;\n\n if (currentVersion >= targetVersion) {\n logger.info('Schema already at target version', {\n currentVersion,\n targetVersion,\n });\n return;\n }\n\n // Apply migrations sequentially\n for (let v = currentVersion + 1; v <= targetVersion; v++) {\n logger.info(`Applying migration to version ${v}`);\n // Migration logic would go here based on version\n await client.query(\n 'INSERT INTO schema_version (version, description) VALUES ($1, $2)',\n [v, `Migration to version ${v}`]\n );\n }\n } finally {\n this.releaseClient(client);\n }\n }\n\n async getSchemaVersion(): Promise<number> {\n const client = await this.getClient();\n\n try {\n const result = await client.query(\n 'SELECT MAX(version) as version FROM schema_version'\n );\n return result.rows[0]?.version || 0;\n } finally {\n this.releaseClient(client);\n }\n }\n\n // Frame operations\n async createFrame(frame: Partial<Frame>): Promise<string> {\n const client = await this.getClient();\n\n try {\n const result = await client.query(\n `\n INSERT INTO frames (\n frame_id, run_id, project_id, parent_frame_id, depth,\n type, name, state, score, inputs, outputs, metadata,\n digest_text, digest_json, content\n ) VALUES (\n COALESCE($1::uuid, uuid_generate_v4()), $2, $3, $4, $5,\n $6, $7, $8, $9, $10, $11, $12, $13, $14, $15\n ) RETURNING frame_id\n `,\n [\n frame.frame_id || null,\n frame.run_id,\n frame.project_id || this.projectId,\n frame.parent_frame_id || null,\n frame.depth || 0,\n frame.type,\n frame.name,\n frame.state || 'active',\n frame.score || 0.5,\n JSON.stringify(frame.inputs || {}),\n JSON.stringify(frame.outputs || {}),\n JSON.stringify(frame.metadata || {}),\n frame.digest_text || null,\n JSON.stringify(frame.digest_json || {}),\n frame.content || `${frame.name} ${frame.digest_text || ''}`,\n ]\n );\n\n return result.rows[0].frame_id;\n } finally {\n this.releaseClient(client);\n }\n }\n\n async getFrame(frameId: string): Promise<Frame | null> {\n const client = await this.getClient();\n\n try {\n const result = await client.query(\n 'SELECT * FROM frames WHERE frame_id = $1',\n [frameId]\n );\n\n if (result.rows.length === 0) return null;\n\n const row = result.rows[0];\n return {\n ...row,\n frame_id: row.frame_id,\n run_id: row.run_id,\n created_at: row.created_at.getTime(),\n closed_at: row.closed_at?.getTime(),\n };\n } finally {\n this.releaseClient(client);\n }\n }\n\n async updateFrame(frameId: string, updates: Partial<Frame>): Promise<void> {\n const client = await this.getClient();\n\n try {\n const fields = [];\n const values = [];\n let paramCount = 1;\n\n if (updates.state !== undefined) {\n fields.push(`state = $${paramCount++}`);\n values.push(updates.state);\n }\n\n if (updates.outputs !== undefined) {\n fields.push(`outputs = $${paramCount++}`);\n values.push(JSON.stringify(updates.outputs));\n }\n\n if (updates.score !== undefined) {\n fields.push(`score = $${paramCount++}`);\n values.push(updates.score);\n }\n\n if (updates.digest_text !== undefined) {\n fields.push(`digest_text = $${paramCount++}`);\n values.push(updates.digest_text);\n }\n\n if (updates.digest_json !== undefined) {\n fields.push(`digest_json = $${paramCount++}`);\n values.push(JSON.stringify(updates.digest_json));\n }\n\n if (updates.closed_at !== undefined) {\n fields.push(`closed_at = $${paramCount++}`);\n values.push(new Date(updates.closed_at));\n }\n\n if (fields.length === 0) return;\n\n values.push(frameId);\n\n await client.query(\n `\n UPDATE frames SET ${fields.join(', ')} WHERE frame_id = $${paramCount}\n `,\n values\n );\n } finally {\n this.releaseClient(client);\n }\n }\n\n async deleteFrame(frameId: string): Promise<void> {\n const client = await this.getClient();\n\n try {\n // CASCADE delete handles events and anchors\n await client.query('DELETE FROM frames WHERE frame_id = $1', [frameId]);\n } finally {\n this.releaseClient(client);\n }\n }\n\n async getActiveFrames(runId?: string): Promise<Frame[]> {\n const client = await this.getClient();\n\n try {\n let query = 'SELECT * FROM frames WHERE state = $1';\n const params: any[] = ['active'];\n\n if (runId) {\n query += ' AND run_id = $2';\n params.push(runId);\n }\n\n query += ' ORDER BY depth ASC, created_at ASC';\n\n const result = await client.query(query, params);\n\n return result.rows.map((row) => ({\n ...row,\n created_at: row.created_at.getTime(),\n closed_at: row.closed_at?.getTime(),\n }));\n } finally {\n this.releaseClient(client);\n }\n }\n\n async closeFrame(frameId: string, outputs?: any): Promise<void> {\n await this.updateFrame(frameId, {\n state: 'closed',\n outputs,\n closed_at: Date.now(),\n });\n }\n\n // Event operations\n async createEvent(event: Partial<Event>): Promise<string> {\n const client = await this.getClient();\n\n try {\n const result = await client.query(\n `\n INSERT INTO events (event_id, run_id, frame_id, seq, event_type, payload, ts)\n VALUES (COALESCE($1::uuid, uuid_generate_v4()), $2, $3, $4, $5, $6, $7)\n RETURNING event_id\n `,\n [\n event.event_id || null,\n event.run_id,\n event.frame_id,\n event.seq || 0,\n event.event_type,\n JSON.stringify(event.payload || {}),\n event.ts ? new Date(event.ts) : new Date(),\n ]\n );\n\n return result.rows[0].event_id;\n } finally {\n this.releaseClient(client);\n }\n }\n\n async getFrameEvents(\n frameId: string,\n options?: QueryOptions\n ): Promise<Event[]> {\n const client = await this.getClient();\n\n try {\n let query = 'SELECT * FROM events WHERE frame_id = $1';\n const params: any[] = [frameId];\n\n query += this.buildOrderByClause(\n options?.orderBy || 'seq',\n options?.orderDirection\n );\n query += this.buildLimitClause(options?.limit, options?.offset);\n\n const result = await client.query(query, params);\n\n return result.rows.map((row) => ({\n ...row,\n ts: row.ts.getTime(),\n }));\n } finally {\n this.releaseClient(client);\n }\n }\n\n async deleteFrameEvents(frameId: string): Promise<void> {\n const client = await this.getClient();\n\n try {\n await client.query('DELETE FROM events WHERE frame_id = $1', [frameId]);\n } finally {\n this.releaseClient(client);\n }\n }\n\n // Anchor operations\n async createAnchor(anchor: Partial<Anchor>): Promise<string> {\n const client = await this.getClient();\n\n try {\n const result = await client.query(\n `\n INSERT INTO anchors (anchor_id, frame_id, project_id, type, text, priority, metadata)\n VALUES (COALESCE($1::uuid, uuid_generate_v4()), $2, $3, $4, $5, $6, $7)\n RETURNING anchor_id\n `,\n [\n anchor.anchor_id || null,\n anchor.frame_id,\n anchor.project_id || this.projectId,\n anchor.type,\n anchor.text,\n anchor.priority || 0,\n JSON.stringify(anchor.metadata || {}),\n ]\n );\n\n return result.rows[0].anchor_id;\n } finally {\n this.releaseClient(client);\n }\n }\n\n async getFrameAnchors(frameId: string): Promise<Anchor[]> {\n const client = await this.getClient();\n\n try {\n const result = await client.query(\n `\n SELECT * FROM anchors WHERE frame_id = $1 \n ORDER BY priority DESC, created_at ASC\n `,\n [frameId]\n );\n\n return result.rows.map((row) => ({\n ...row,\n created_at: row.created_at.getTime(),\n }));\n } finally {\n this.releaseClient(client);\n }\n }\n\n async deleteFrameAnchors(frameId: string): Promise<void> {\n const client = await this.getClient();\n\n try {\n await client.query('DELETE FROM anchors WHERE frame_id = $1', [frameId]);\n } finally {\n this.releaseClient(client);\n }\n }\n\n // Advanced search with BM25\n async search(\n options: SearchOptions\n ): Promise<Array<Frame & { score: number }>> {\n const client = await this.getClient();\n\n try {\n const config = this.config as ParadeDBConfig;\n\n if (config.enableBM25 !== false) {\n // Use ParadeDB BM25 search\n const result = await client.query(\n `\n SELECT f.*, s.score_bm25 as score\n FROM frames_search_idx.search(\n query => $1,\n limit_rows => $2,\n offset_rows => $3\n ) s\n JOIN frames f ON f.frame_id = s.frame_id\n WHERE ($4::float IS NULL OR s.score_bm25 >= $4)\n ORDER BY s.score_bm25 DESC\n `,\n [\n options.query,\n options.limit || 100,\n options.offset || 0,\n options.scoreThreshold || null,\n ]\n );\n\n return result.rows.map((row) => ({\n ...row,\n created_at: row.created_at.getTime(),\n closed_at: row.closed_at?.getTime(),\n }));\n } else {\n // Fallback to PostgreSQL full-text search\n const result = await client.query(\n `\n SELECT *,\n ts_rank(\n to_tsvector('english', COALESCE(name, '') || ' ' || COALESCE(content, '')),\n plainto_tsquery('english', $1)\n ) as score\n FROM frames\n WHERE to_tsvector('english', COALESCE(name, '') || ' ' || COALESCE(content, ''))\n @@ plainto_tsquery('english', $1)\n ORDER BY score DESC\n LIMIT $2 OFFSET $3\n `,\n [options.query, options.limit || 100, options.offset || 0]\n );\n\n return result.rows.map((row) => ({\n ...row,\n created_at: row.created_at.getTime(),\n closed_at: row.closed_at?.getTime(),\n }));\n }\n } finally {\n this.releaseClient(client);\n }\n }\n\n // Vector similarity search\n async searchByVector(\n embedding: number[],\n options?: QueryOptions\n ): Promise<Array<Frame & { similarity: number }>> {\n const client = await this.getClient();\n\n try {\n const config = this.config as ParadeDBConfig;\n\n if (config.enableVector === false) {\n logger.warn('Vector search not enabled in ParadeDB configuration');\n return [];\n }\n\n const result = await client.query(\n `\n SELECT *,\n 1 - (embedding <=> $1::vector) as similarity\n FROM frames\n WHERE embedding IS NOT NULL\n ORDER BY embedding <=> $1::vector\n LIMIT $2 OFFSET $3\n `,\n [\n `[${embedding.join(',')}]`,\n options?.limit || 100,\n options?.offset || 0,\n ]\n );\n\n return result.rows.map((row) => ({\n ...row,\n created_at: row.created_at.getTime(),\n closed_at: row.closed_at?.getTime(),\n }));\n } finally {\n this.releaseClient(client);\n }\n }\n\n // Hybrid search combining BM25 and vector\n async searchHybrid(\n textQuery: string,\n embedding: number[],\n weights?: { text: number; vector: number }\n ): Promise<Array<Frame & { score: number }>> {\n const client = await this.getClient();\n\n try {\n const textWeight = weights?.text || 0.6;\n const vectorWeight = weights?.vector || 0.4;\n\n const result = await client.query(\n `\n WITH bm25_results AS (\n SELECT frame_id, score_bm25\n FROM frames_search_idx.search(\n query => $1,\n limit_rows => 200\n )\n ),\n vector_results AS (\n SELECT frame_id,\n 1 - (embedding <=> $2::vector) as score_vector\n FROM frames\n WHERE embedding IS NOT NULL\n ORDER BY embedding <=> $2::vector\n LIMIT 200\n )\n SELECT f.*,\n (COALESCE(b.score_bm25, 0) * $3 + \n COALESCE(v.score_vector, 0) * $4) as score\n FROM frames f\n LEFT JOIN bm25_results b ON f.frame_id = b.frame_id\n LEFT JOIN vector_results v ON f.frame_id = v.frame_id\n WHERE b.frame_id IS NOT NULL OR v.frame_id IS NOT NULL\n ORDER BY score DESC\n LIMIT $5\n `,\n [textQuery, `[${embedding.join(',')}]`, textWeight, vectorWeight, 100]\n );\n\n return result.rows.map((row) => ({\n ...row,\n created_at: row.created_at.getTime(),\n closed_at: row.closed_at?.getTime(),\n }));\n } finally {\n this.releaseClient(client);\n }\n }\n\n // Advanced aggregation\n async aggregate(\n table: string,\n options: AggregationOptions\n ): Promise<Record<string, any>[]> {\n const client = await this.getClient();\n\n try {\n const metrics = options.metrics\n .map((m) => {\n const alias = m.alias || `${m.operation}_${m.field}`;\n return `${m.operation}(${m.field}) AS \"${alias}\"`;\n })\n .join(', ');\n\n let query = `\n SELECT ${options.groupBy.map((g) => `\"${g}\"`).join(', ')}, ${metrics}\n FROM ${table}\n GROUP BY ${options.groupBy.map((g) => `\"${g}\"`).join(', ')}\n `;\n\n if (options.having) {\n const havingClauses = Object.entries(options.having).map(\n ([key, value], i) => {\n return `${key} ${typeof value === 'object' ? value.op : '='} $${i + 1}`;\n }\n );\n query += ` HAVING ${havingClauses.join(' AND ')}`;\n }\n\n const result = await client.query(\n query,\n Object.values(options.having || {})\n );\n return result.rows;\n } finally {\n this.releaseClient(client);\n }\n }\n\n // Pattern detection with analytics\n async detectPatterns(timeRange?: { start: Date; end: Date }): Promise<\n Array<{\n pattern: string;\n type: string;\n frequency: number;\n lastSeen: Date;\n }>\n > {\n const client = await this.getClient();\n\n try {\n // Use materialized view for better performance\n const result = await client.query(\n `\n SELECT \n COALESCE(error_pattern, pattern_type) as pattern,\n pattern_type as type,\n frequency,\n last_seen\n FROM pattern_summary\n WHERE project_id = $1\n AND ($2::timestamptz IS NULL OR last_seen >= $2)\n AND ($3::timestamptz IS NULL OR first_seen <= $3)\n ORDER BY frequency DESC, last_seen DESC\n LIMIT 100\n `,\n [this.projectId, timeRange?.start || null, timeRange?.end || null]\n );\n\n return result.rows.map((row) => ({\n pattern: row.pattern,\n type: row.type,\n frequency: row.frequency,\n lastSeen: row.last_seen,\n }));\n } finally {\n this.releaseClient(client);\n }\n }\n\n // Bulk operations\n async executeBulk(operations: BulkOperation[]): Promise<void> {\n await this.inTransaction(async () => {\n const client = this.activeClient!;\n\n for (const op of operations) {\n switch (op.type) {\n case 'insert': {\n const cols = Object.keys(op.data);\n const values = Object.values(op.data);\n const placeholders = values.map((_, i) => `$${i + 1}`).join(',');\n\n await client.query(\n `INSERT INTO ${op.table} (${cols.join(',')}) VALUES (${placeholders})`,\n values\n );\n break;\n }\n\n case 'update': {\n const sets = Object.keys(op.data)\n .map((k, i) => `${k} = $${i + 1}`)\n .join(',');\n const whereClause = this.buildWhereClausePostgres(\n op.where || {},\n Object.keys(op.data).length\n );\n const values = [\n ...Object.values(op.data),\n ...Object.values(op.where || {}),\n ];\n\n await client.query(\n `UPDATE ${op.table} SET ${sets} ${whereClause}`,\n values\n );\n break;\n }\n\n case 'delete': {\n const whereClause = this.buildWhereClausePostgres(\n op.where || {},\n 0\n );\n await client.query(\n `DELETE FROM ${op.table} ${whereClause}`,\n Object.values(op.where || {})\n );\n break;\n }\n }\n }\n });\n }\n\n async vacuum(): Promise<void> {\n const client = await this.getClient();\n\n try {\n await client.query('VACUUM ANALYZE frames');\n await client.query('VACUUM ANALYZE events');\n await client.query('VACUUM ANALYZE anchors');\n\n // Refresh materialized views\n await client.query(\n 'REFRESH MATERIALIZED VIEW CONCURRENTLY pattern_summary'\n );\n\n logger.info('ParadeDB vacuum and analyze completed');\n } finally {\n this.releaseClient(client);\n }\n }\n\n async analyze(): Promise<void> {\n const client = await this.getClient();\n\n try {\n await client.query('ANALYZE frames');\n await client.query('ANALYZE events');\n await client.query('ANALYZE anchors');\n logger.info('ParadeDB analyze completed');\n } finally {\n this.releaseClient(client);\n }\n }\n\n // Statistics\n async getStats(): Promise<DatabaseStats> {\n const client = await this.getClient();\n\n try {\n const result = await client.query(`\n SELECT\n (SELECT COUNT(*) FROM frames) as total_frames,\n (SELECT COUNT(*) FROM frames WHERE state = 'active') as active_frames,\n (SELECT COUNT(*) FROM events) as total_events,\n (SELECT COUNT(*) FROM anchors) as total_anchors,\n pg_database_size(current_database()) as disk_usage\n `);\n\n return {\n totalFrames: parseInt(result.rows[0].total_frames),\n activeFrames: parseInt(result.rows[0].active_frames),\n totalEvents: parseInt(result.rows[0].total_events),\n totalAnchors: parseInt(result.rows[0].total_anchors),\n diskUsage: parseInt(result.rows[0].disk_usage),\n };\n } finally {\n this.releaseClient(client);\n }\n }\n\n async getQueryStats(): Promise<\n Array<{\n query: string;\n calls: number;\n meanTime: number;\n totalTime: number;\n }>\n > {\n const client = await this.getClient();\n\n try {\n const result = await client.query(`\n SELECT \n query,\n calls,\n mean_exec_time as mean_time,\n total_exec_time as total_time\n FROM pg_stat_statements\n WHERE query NOT LIKE '%pg_stat_statements%'\n ORDER BY total_exec_time DESC\n LIMIT 100\n `);\n\n return result.rows.map((row) => ({\n query: row.query,\n calls: parseInt(row.calls),\n meanTime: parseFloat(row.mean_time),\n totalTime: parseFloat(row.total_time),\n }));\n } catch (error: unknown) {\n logger.warn('pg_stat_statements not available', error);\n return [];\n } finally {\n this.releaseClient(client);\n }\n }\n\n // Transaction support\n async beginTransaction(): Promise<void> {\n this.activeClient = await this.pool!.connect();\n await this.activeClient.query('BEGIN');\n }\n\n async commitTransaction(): Promise<void> {\n if (!this.activeClient)\n throw new DatabaseError(\n 'No active transaction',\n ErrorCode.DB_TRANSACTION_FAILED\n );\n\n await this.activeClient.query('COMMIT');\n this.activeClient.release();\n this.activeClient = null;\n }\n\n async rollbackTransaction(): Promise<void> {\n if (!this.activeClient)\n throw new DatabaseError(\n 'No active transaction',\n ErrorCode.DB_TRANSACTION_FAILED\n );\n\n await this.activeClient.query('ROLLBACK');\n this.activeClient.release();\n this.activeClient = null;\n }\n\n async inTransaction(\n callback: (adapter: DatabaseAdapter) => Promise<void>\n ): Promise<void> {\n await this.beginTransaction();\n\n try {\n await callback(this);\n await this.commitTransaction();\n } catch (error: unknown) {\n try {\n await this.rollbackTransaction();\n } catch (rollbackError: unknown) {\n // Log rollback failure but don't mask original error\n console.error('Transaction rollback failed:', rollbackError);\n // Connection might be in bad state - mark as unusable if connection pool exists\n if (this.connectionPool) {\n this.connectionPool.markConnectionAsBad(this.client);\n }\n }\n throw error;\n }\n }\n\n // Export/Import\n async exportData(\n tables: string[],\n format: 'json' | 'parquet' | 'csv'\n ): Promise<Buffer> {\n const client = await this.getClient();\n\n try {\n if (format === 'json') {\n const data: Record<string, any[]> = {};\n\n for (const table of tables) {\n const result = await client.query(`SELECT * FROM ${table}`);\n data[table] = result.rows;\n }\n\n return Buffer.from(JSON.stringify(data, null, 2));\n } else if (format === 'csv') {\n // Export as CSV using COPY\n const chunks: string[] = [];\n\n for (const table of tables) {\n const result = await client.query(`\n COPY (SELECT * FROM ${table}) TO STDOUT WITH CSV HEADER\n `);\n chunks.push(result.toString());\n }\n\n return Buffer.from(chunks.join('\\n\\n'));\n } else {\n throw new ValidationError(\n `Format ${format} not yet implemented for ParadeDB export`,\n ErrorCode.VALIDATION_FAILED,\n { format, supportedFormats: ['json'] }\n );\n }\n } finally {\n this.releaseClient(client);\n }\n }\n\n async importData(\n data: Buffer,\n format: 'json' | 'parquet' | 'csv',\n options?: { truncate?: boolean; upsert?: boolean }\n ): Promise<void> {\n const client = await this.getClient();\n\n try {\n if (format === 'json') {\n const parsed = JSON.parse(data.toString());\n\n await client.query('BEGIN');\n\n for (const [table, rows] of Object.entries(parsed)) {\n if (options?.truncate) {\n await client.query(`TRUNCATE TABLE ${table} CASCADE`);\n }\n\n for (const row of rows as any[]) {\n const cols = Object.keys(row);\n const values = Object.values(row);\n const placeholders = values.map((_, i) => `$${i + 1}`).join(',');\n\n if (options?.upsert) {\n const updates = cols.map((c) => `${c} = EXCLUDED.${c}`).join(',');\n await client.query(\n `INSERT INTO ${table} (${cols.join(',')}) VALUES (${placeholders})\n ON CONFLICT DO UPDATE SET ${updates}`,\n values\n );\n } else {\n await client.query(\n `INSERT INTO ${table} (${cols.join(',')}) VALUES (${placeholders})`,\n values\n );\n }\n }\n }\n\n await client.query('COMMIT');\n } else {\n throw new ValidationError(\n `Format ${format} not yet implemented for ParadeDB import`,\n ErrorCode.VALIDATION_FAILED,\n { format, supportedFormats: ['json'] }\n );\n }\n } catch (error: unknown) {\n await client.query('ROLLBACK');\n throw error;\n } finally {\n this.releaseClient(client);\n }\n }\n\n // Helper methods\n private async getClient(): Promise<PoolClient> {\n if (this.activeClient) {\n return this.activeClient;\n }\n\n if (!this.pool) {\n throw new DatabaseError(\n 'Database not connected',\n ErrorCode.DB_CONNECTION_FAILED\n );\n }\n\n return await this.pool.connect();\n }\n\n private releaseClient(client: PoolClient): void {\n if (client !== this.activeClient) {\n client.release();\n }\n }\n\n private buildWhereClausePostgres(\n conditions: Record<string, any>,\n startParam: number\n ): string {\n const clauses = Object.entries(conditions).map(([key, value], i) => {\n const paramNum = startParam + i + 1;\n\n if (value === null) {\n return `${key} IS NULL`;\n } else if (Array.isArray(value)) {\n const placeholders = value.map((_, j) => `$${paramNum + j}`).join(',');\n return `${key} IN (${placeholders})`;\n } else {\n return `${key} = $${paramNum}`;\n }\n });\n\n return clauses.length > 0 ? `WHERE ${clauses.join(' AND ')}` : '';\n }\n}\n"],
|
|
5
5
|
"mappings": ";;;;AAKA,SAAS,YAAwB;AACjC;AAAA,EACE;AAAA,OAOK;AAEP,SAAS,cAAc;AACvB,SAAS,eAAe,WAAW,uBAAuB;AAmBnD,MAAM,wBAAwB,4BAA4B;AAAA,EACvD,OAAoB;AAAA,EACpB,eAAkC;AAAA,EAE1C,YAAY,WAAmB,QAAwB;AACrD,UAAM,WAAW,MAAM;AAAA,EACzB;AAAA,EAEA,cAAgC;AAC9B,UAAM,SAAS,KAAK;AACpB,WAAO;AAAA,MACL,wBAAwB,OAAO,eAAe;AAAA,MAC9C,sBAAsB,OAAO,iBAAiB;AAAA,MAC9C,sBAAsB;AAAA,MACtB,mBAAmB,OAAO,oBAAoB;AAAA,MAC9C,qBAAqB;AAAA,MACrB,2BAA2B;AAAA,MAC3B,yBAAyB;AAAA,IAC3B;AAAA,EACF;AAAA,EAEA,MAAM,UAAyB;AAC7B,QAAI,KAAK,KAAM;AAEf,UAAM,SAAS,KAAK;AAEpB,SAAK,OAAO,IAAI,KAAK;AAAA,MACnB,kBAAkB,OAAO;AAAA,MACzB,MAAM,OAAO,QAAQ;AAAA,MACrB,MAAM,OAAO,QAAQ;AAAA,MACrB,UAAU,OAAO,YAAY;AAAA,MAC7B,MAAM,OAAO;AAAA,MACb,UAAU,OAAO;AAAA,MACjB,KAAK,OAAO;AAAA,MACZ,KAAK,OAAO,OAAO;AAAA,MACnB,mBAAmB,OAAO,qBAAqB;AAAA,MAC/C,yBAAyB,OAAO,2BAA2B;AAAA,MAC3D,mBAAmB,OAAO,oBAAoB;AAAA,IAChD,CAAC;AAGD,UAAM,SAAS,MAAM,KAAK,KAAK,QAAQ;AACvC,QAAI;AACF,YAAM,OAAO,MAAM,UAAU;AAC7B,aAAO,KAAK,iCAAiC;AAAA,IAC/C,UAAE;AACA,aAAO,QAAQ;AAAA,IACjB;AAAA,EACF;AAAA,EAEA,MAAM,aAA4B;AAChC,QAAI,CAAC,KAAK,KAAM;AAEhB,UAAM,KAAK,KAAK,IAAI;AACpB,SAAK,OAAO;AACZ,WAAO,KAAK,uBAAuB;AAAA,EACrC;AAAA,EAEA,cAAuB;AACrB,WAAO,KAAK,SAAS,QAAQ,CAAC,KAAK,KAAK;AAAA,EAC1C;AAAA,EAEA,MAAM,OAAyB;AAC7B,QAAI,CAAC,KAAK,KAAM,QAAO;AAEvB,QAAI;AACF,YAAM,SAAS,MAAM,KAAK,KAAK,QAAQ;AACvC,UAAI;AACF,cAAM,OAAO,MAAM,UAAU;AAC7B,eAAO;AAAA,MACT,UAAE;AACA,eAAO,QAAQ;AAAA,MACjB;AAAA,IACF,QAAQ;AACN,aAAO;AAAA,IACT;AAAA,EACF;AAAA,EAEA,MAAM,mBAAkC;AACtC,UAAM,SAAS,MAAM,KAAK,UAAU;AAEpC,QAAI;AACF,YAAM,OAAO,MAAM,OAAO;AAG1B,YAAM,OAAO,MAAM;AAAA;AAAA;AAAA;AAAA,OAIlB;AAGD,YAAM,SAAS,KAAK;AAEpB,UAAI,OAAO,eAAe,OAAO;AAC/B,cAAM,OAAO,MAAM,2CAA2C;AAAA,MAChE;AAEA,UAAI,OAAO,iBAAiB,OAAO;AACjC,cAAM,OAAO,MAAM,wCAAwC;AAAA,MAC7D;AAEA,UAAI,OAAO,oBAAoB,OAAO;AACpC,cAAM,OAAO,MAAM,8CAA8C;AAAA,MACnE;AAGA,YAAM,OAAO,MAAM;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,OA4DlB;AAGD,YAAM,OAAO,MAAM;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,OA8BlB;AAGD,UAAI,OAAO,eAAe,OAAO;AAC/B,cAAM,OAAO,MAAM;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,SAelB;AAAA,MACH;AAGA,UAAI,OAAO,iBAAiB,OAAO;AACjC,cAAM,OAAO,MAAM;AAAA;AAAA;AAAA;AAAA;AAAA,SAKlB;AAAA,MACH;AAGA,YAAM,OAAO,MAAM;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,OAoBlB;AAGD,YAAM,OAAO,MAAM;AAAA;AAAA;AAAA;AAAA,OAIlB;AAED,YAAM,OAAO,MAAM,QAAQ;AAC3B,aAAO,KAAK,0CAA0C;AAAA,IACxD,SAAS,OAAgB;AACvB,YAAM,OAAO,MAAM,UAAU;AAC7B,YAAM;AAAA,IACR,UAAE;AACA,WAAK,cAAc,MAAM;AAAA,IAC3B;AAAA,EACF;AAAA,EAEA,MAAM,cAAc,eAAsC;AACxD,UAAM,SAAS,MAAM,KAAK,UAAU;AAEpC,QAAI;AACF,YAAM,SAAS,MAAM,OAAO;AAAA,QAC1B;AAAA,MACF;AACA,YAAM,iBAAiB,OAAO,KAAK,CAAC,GAAG,WAAW;AAElD,UAAI,kBAAkB,eAAe;AACnC,eAAO,KAAK,oCAAoC;AAAA,UAC9C;AAAA,UACA;AAAA,QACF,CAAC;AACD;AAAA,MACF;AAGA,eAAS,IAAI,iBAAiB,GAAG,KAAK,eAAe,KAAK;AACxD,eAAO,KAAK,iCAAiC,CAAC,EAAE;AAEhD,cAAM,OAAO;AAAA,UACX;AAAA,UACA,CAAC,GAAG,wBAAwB,CAAC,EAAE;AAAA,QACjC;AAAA,MACF;AAAA,IACF,UAAE;AACA,WAAK,cAAc,MAAM;AAAA,IAC3B;AAAA,EACF;AAAA,EAEA,MAAM,mBAAoC;AACxC,UAAM,SAAS,MAAM,KAAK,UAAU;AAEpC,QAAI;AACF,YAAM,SAAS,MAAM,OAAO;AAAA,QAC1B;AAAA,MACF;AACA,aAAO,OAAO,KAAK,CAAC,GAAG,WAAW;AAAA,IACpC,UAAE;AACA,WAAK,cAAc,MAAM;AAAA,IAC3B;AAAA,EACF;AAAA;AAAA,EAGA,MAAM,YAAY,OAAwC;AACxD,UAAM,SAAS,MAAM,KAAK,UAAU;AAEpC,QAAI;AACF,YAAM,SAAS,MAAM,OAAO;AAAA,QAC1B;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,QAUA;AAAA,UACE,MAAM,YAAY;AAAA,UAClB,MAAM;AAAA,UACN,MAAM,cAAc,KAAK;AAAA,UACzB,MAAM,mBAAmB;AAAA,UACzB,MAAM,SAAS;AAAA,UACf,MAAM;AAAA,UACN,MAAM;AAAA,UACN,MAAM,SAAS;AAAA,UACf,MAAM,SAAS;AAAA,UACf,KAAK,UAAU,MAAM,UAAU,CAAC,CAAC;AAAA,UACjC,KAAK,UAAU,MAAM,WAAW,CAAC,CAAC;AAAA,UAClC,KAAK,UAAU,MAAM,YAAY,CAAC,CAAC;AAAA,UACnC,MAAM,eAAe;AAAA,UACrB,KAAK,UAAU,MAAM,eAAe,CAAC,CAAC;AAAA,UACtC,MAAM,WAAW,GAAG,MAAM,IAAI,IAAI,MAAM,eAAe,EAAE;AAAA,QAC3D;AAAA,MACF;AAEA,aAAO,OAAO,KAAK,CAAC,EAAE;AAAA,IACxB,UAAE;AACA,WAAK,cAAc,MAAM;AAAA,IAC3B;AAAA,EACF;AAAA,EAEA,MAAM,SAAS,SAAwC;AACrD,UAAM,SAAS,MAAM,KAAK,UAAU;AAEpC,QAAI;AACF,YAAM,SAAS,MAAM,OAAO;AAAA,QAC1B;AAAA,QACA,CAAC,OAAO;AAAA,MACV;AAEA,UAAI,OAAO,KAAK,WAAW,EAAG,QAAO;AAErC,YAAM,MAAM,OAAO,KAAK,CAAC;AACzB,aAAO;AAAA,QACL,GAAG;AAAA,QACH,UAAU,IAAI;AAAA,QACd,QAAQ,IAAI;AAAA,QACZ,YAAY,IAAI,WAAW,QAAQ;AAAA,QACnC,WAAW,IAAI,WAAW,QAAQ;AAAA,MACpC;AAAA,IACF,UAAE;AACA,WAAK,cAAc,MAAM;AAAA,IAC3B;AAAA,EACF;AAAA,EAEA,MAAM,YAAY,SAAiB,SAAwC;AACzE,UAAM,SAAS,MAAM,KAAK,UAAU;AAEpC,QAAI;AACF,YAAM,SAAS,CAAC;AAChB,YAAM,SAAS,CAAC;AAChB,UAAI,aAAa;AAEjB,UAAI,QAAQ,UAAU,QAAW;AAC/B,eAAO,KAAK,YAAY,YAAY,EAAE;AACtC,eAAO,KAAK,QAAQ,KAAK;AAAA,MAC3B;AAEA,UAAI,QAAQ,YAAY,QAAW;AACjC,eAAO,KAAK,cAAc,YAAY,EAAE;AACxC,eAAO,KAAK,KAAK,UAAU,QAAQ,OAAO,CAAC;AAAA,MAC7C;AAEA,UAAI,QAAQ,UAAU,QAAW;AAC/B,eAAO,KAAK,YAAY,YAAY,EAAE;AACtC,eAAO,KAAK,QAAQ,KAAK;AAAA,MAC3B;AAEA,UAAI,QAAQ,gBAAgB,QAAW;AACrC,eAAO,KAAK,kBAAkB,YAAY,EAAE;AAC5C,eAAO,KAAK,QAAQ,WAAW;AAAA,MACjC;AAEA,UAAI,QAAQ,gBAAgB,QAAW;AACrC,eAAO,KAAK,kBAAkB,YAAY,EAAE;AAC5C,eAAO,KAAK,KAAK,UAAU,QAAQ,WAAW,CAAC;AAAA,MACjD;AAEA,UAAI,QAAQ,cAAc,QAAW;AACnC,eAAO,KAAK,gBAAgB,YAAY,EAAE;AAC1C,eAAO,KAAK,IAAI,KAAK,QAAQ,SAAS,CAAC;AAAA,MACzC;AAEA,UAAI,OAAO,WAAW,EAAG;AAEzB,aAAO,KAAK,OAAO;AAEnB,YAAM,OAAO;AAAA,QACX;AAAA,4BACoB,OAAO,KAAK,IAAI,CAAC,sBAAsB,UAAU;AAAA;AAAA,QAErE;AAAA,MACF;AAAA,IACF,UAAE;AACA,WAAK,cAAc,MAAM;AAAA,IAC3B;AAAA,EACF;AAAA,EAEA,MAAM,YAAY,SAAgC;AAChD,UAAM,SAAS,MAAM,KAAK,UAAU;AAEpC,QAAI;AAEF,YAAM,OAAO,MAAM,0CAA0C,CAAC,OAAO,CAAC;AAAA,IACxE,UAAE;AACA,WAAK,cAAc,MAAM;AAAA,IAC3B;AAAA,EACF;AAAA,EAEA,MAAM,gBAAgB,OAAkC;AACtD,UAAM,SAAS,MAAM,KAAK,UAAU;AAEpC,QAAI;AACF,UAAI,QAAQ;AACZ,YAAM,SAAgB,CAAC,QAAQ;AAE/B,UAAI,OAAO;AACT,iBAAS;AACT,eAAO,KAAK,KAAK;AAAA,MACnB;AAEA,eAAS;AAET,YAAM,SAAS,MAAM,OAAO,MAAM,OAAO,MAAM;AAE/C,aAAO,OAAO,KAAK,IAAI,CAAC,SAAS;AAAA,QAC/B,GAAG;AAAA,QACH,YAAY,IAAI,WAAW,QAAQ;AAAA,QACnC,WAAW,IAAI,WAAW,QAAQ;AAAA,MACpC,EAAE;AAAA,IACJ,UAAE;AACA,WAAK,cAAc,MAAM;AAAA,IAC3B;AAAA,EACF;AAAA,EAEA,MAAM,WAAW,SAAiB,SAA8B;AAC9D,UAAM,KAAK,YAAY,SAAS;AAAA,MAC9B,OAAO;AAAA,MACP;AAAA,MACA,WAAW,KAAK,IAAI;AAAA,IACtB,CAAC;AAAA,EACH;AAAA;AAAA,EAGA,MAAM,YAAY,OAAwC;AACxD,UAAM,SAAS,MAAM,KAAK,UAAU;AAEpC,QAAI;AACF,YAAM,SAAS,MAAM,OAAO;AAAA,QAC1B;AAAA;AAAA;AAAA;AAAA;AAAA,QAKA;AAAA,UACE,MAAM,YAAY;AAAA,UAClB,MAAM;AAAA,UACN,MAAM;AAAA,UACN,MAAM,OAAO;AAAA,UACb,MAAM;AAAA,UACN,KAAK,UAAU,MAAM,WAAW,CAAC,CAAC;AAAA,UAClC,MAAM,KAAK,IAAI,KAAK,MAAM,EAAE,IAAI,oBAAI,KAAK;AAAA,QAC3C;AAAA,MACF;AAEA,aAAO,OAAO,KAAK,CAAC,EAAE;AAAA,IACxB,UAAE;AACA,WAAK,cAAc,MAAM;AAAA,IAC3B;AAAA,EACF;AAAA,EAEA,MAAM,eACJ,SACA,SACkB;AAClB,UAAM,SAAS,MAAM,KAAK,UAAU;AAEpC,QAAI;AACF,UAAI,QAAQ;AACZ,YAAM,SAAgB,CAAC,OAAO;AAE9B,eAAS,KAAK;AAAA,QACZ,SAAS,WAAW;AAAA,QACpB,SAAS;AAAA,MACX;AACA,eAAS,KAAK,iBAAiB,SAAS,OAAO,SAAS,MAAM;AAE9D,YAAM,SAAS,MAAM,OAAO,MAAM,OAAO,MAAM;AAE/C,aAAO,OAAO,KAAK,IAAI,CAAC,SAAS;AAAA,QAC/B,GAAG;AAAA,QACH,IAAI,IAAI,GAAG,QAAQ;AAAA,MACrB,EAAE;AAAA,IACJ,UAAE;AACA,WAAK,cAAc,MAAM;AAAA,IAC3B;AAAA,EACF;AAAA,EAEA,MAAM,kBAAkB,SAAgC;AACtD,UAAM,SAAS,MAAM,KAAK,UAAU;AAEpC,QAAI;AACF,YAAM,OAAO,MAAM,0CAA0C,CAAC,OAAO,CAAC;AAAA,IACxE,UAAE;AACA,WAAK,cAAc,MAAM;AAAA,IAC3B;AAAA,EACF;AAAA;AAAA,EAGA,MAAM,aAAa,QAA0C;AAC3D,UAAM,SAAS,MAAM,KAAK,UAAU;AAEpC,QAAI;AACF,YAAM,SAAS,MAAM,OAAO;AAAA,QAC1B;AAAA;AAAA;AAAA;AAAA;AAAA,QAKA;AAAA,UACE,OAAO,aAAa;AAAA,UACpB,OAAO;AAAA,UACP,OAAO,cAAc,KAAK;AAAA,UAC1B,OAAO;AAAA,UACP,OAAO;AAAA,UACP,OAAO,YAAY;AAAA,UACnB,KAAK,UAAU,OAAO,YAAY,CAAC,CAAC;AAAA,QACtC;AAAA,MACF;AAEA,aAAO,OAAO,KAAK,CAAC,EAAE;AAAA,IACxB,UAAE;AACA,WAAK,cAAc,MAAM;AAAA,IAC3B;AAAA,EACF;AAAA,EAEA,MAAM,gBAAgB,SAAoC;AACxD,UAAM,SAAS,MAAM,KAAK,UAAU;AAEpC,QAAI;AACF,YAAM,SAAS,MAAM,OAAO;AAAA,QAC1B;AAAA;AAAA;AAAA;AAAA,QAIA,CAAC,OAAO;AAAA,MACV;AAEA,aAAO,OAAO,KAAK,IAAI,CAAC,SAAS;AAAA,QAC/B,GAAG;AAAA,QACH,YAAY,IAAI,WAAW,QAAQ;AAAA,MACrC,EAAE;AAAA,IACJ,UAAE;AACA,WAAK,cAAc,MAAM;AAAA,IAC3B;AAAA,EACF;AAAA,EAEA,MAAM,mBAAmB,SAAgC;AACvD,UAAM,SAAS,MAAM,KAAK,UAAU;AAEpC,QAAI;AACF,YAAM,OAAO,MAAM,2CAA2C,CAAC,OAAO,CAAC;AAAA,IACzE,UAAE;AACA,WAAK,cAAc,MAAM;AAAA,IAC3B;AAAA,EACF;AAAA;AAAA,EAGA,MAAM,OACJ,SAC2C;AAC3C,UAAM,SAAS,MAAM,KAAK,UAAU;AAEpC,QAAI;AACF,YAAM,SAAS,KAAK;AAEpB,UAAI,OAAO,eAAe,OAAO;AAE/B,cAAM,SAAS,MAAM,OAAO;AAAA,UAC1B;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,UAWA;AAAA,YACE,QAAQ;AAAA,YACR,QAAQ,SAAS;AAAA,YACjB,QAAQ,UAAU;AAAA,YAClB,QAAQ,kBAAkB;AAAA,UAC5B;AAAA,QACF;AAEA,eAAO,OAAO,KAAK,IAAI,CAAC,SAAS;AAAA,UAC/B,GAAG;AAAA,UACH,YAAY,IAAI,WAAW,QAAQ;AAAA,UACnC,WAAW,IAAI,WAAW,QAAQ;AAAA,QACpC,EAAE;AAAA,MACJ,OAAO;AAEL,cAAM,SAAS,MAAM,OAAO;AAAA,UAC1B;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,UAYA,CAAC,QAAQ,OAAO,QAAQ,SAAS,KAAK,QAAQ,UAAU,CAAC;AAAA,QAC3D;AAEA,eAAO,OAAO,KAAK,IAAI,CAAC,SAAS;AAAA,UAC/B,GAAG;AAAA,UACH,YAAY,IAAI,WAAW,QAAQ;AAAA,UACnC,WAAW,IAAI,WAAW,QAAQ;AAAA,QACpC,EAAE;AAAA,MACJ;AAAA,IACF,UAAE;AACA,WAAK,cAAc,MAAM;AAAA,IAC3B;AAAA,EACF;AAAA;AAAA,EAGA,MAAM,eACJ,WACA,SACgD;AAChD,UAAM,SAAS,MAAM,KAAK,UAAU;AAEpC,QAAI;AACF,YAAM,SAAS,KAAK;AAEpB,UAAI,OAAO,iBAAiB,OAAO;AACjC,eAAO,KAAK,qDAAqD;AACjE,eAAO,CAAC;AAAA,MACV;AAEA,YAAM,SAAS,MAAM,OAAO;AAAA,QAC1B;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,QAQA;AAAA,UACE,IAAI,UAAU,KAAK,GAAG,CAAC;AAAA,UACvB,SAAS,SAAS;AAAA,UAClB,SAAS,UAAU;AAAA,QACrB;AAAA,MACF;AAEA,aAAO,OAAO,KAAK,IAAI,CAAC,SAAS;AAAA,QAC/B,GAAG;AAAA,QACH,YAAY,IAAI,WAAW,QAAQ;AAAA,QACnC,WAAW,IAAI,WAAW,QAAQ;AAAA,MACpC,EAAE;AAAA,IACJ,UAAE;AACA,WAAK,cAAc,MAAM;AAAA,IAC3B;AAAA,EACF;AAAA;AAAA,EAGA,MAAM,aACJ,WACA,WACA,SAC2C;AAC3C,UAAM,SAAS,MAAM,KAAK,UAAU;AAEpC,QAAI;AACF,YAAM,aAAa,SAAS,QAAQ;AACpC,YAAM,eAAe,SAAS,UAAU;AAExC,YAAM,SAAS,MAAM,OAAO;AAAA,QAC1B;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,QA0BA,CAAC,WAAW,IAAI,UAAU,KAAK,GAAG,CAAC,KAAK,YAAY,cAAc,GAAG;AAAA,MACvE;AAEA,aAAO,OAAO,KAAK,IAAI,CAAC,SAAS;AAAA,QAC/B,GAAG;AAAA,QACH,YAAY,IAAI,WAAW,QAAQ;AAAA,QACnC,WAAW,IAAI,WAAW,QAAQ;AAAA,MACpC,EAAE;AAAA,IACJ,UAAE;AACA,WAAK,cAAc,MAAM;AAAA,IAC3B;AAAA,EACF;AAAA;AAAA,EAGA,MAAM,UACJ,OACA,SACgC;AAChC,UAAM,SAAS,MAAM,KAAK,UAAU;AAEpC,QAAI;AACF,YAAM,UAAU,QAAQ,QACrB,IAAI,CAAC,MAAM;AACV,cAAM,QAAQ,EAAE,SAAS,GAAG,EAAE,SAAS,IAAI,EAAE,KAAK;AAClD,eAAO,GAAG,EAAE,SAAS,IAAI,EAAE,KAAK,SAAS,KAAK;AAAA,MAChD,CAAC,EACA,KAAK,IAAI;AAEZ,UAAI,QAAQ;AAAA,iBACD,QAAQ,QAAQ,IAAI,CAAC,MAAM,IAAI,CAAC,GAAG,EAAE,KAAK,IAAI,CAAC,KAAK,OAAO;AAAA,eAC7D,KAAK;AAAA,mBACD,QAAQ,QAAQ,IAAI,CAAC,MAAM,IAAI,CAAC,GAAG,EAAE,KAAK,IAAI,CAAC;AAAA;AAG5D,UAAI,QAAQ,QAAQ;AAClB,cAAM,gBAAgB,OAAO,QAAQ,QAAQ,MAAM,EAAE;AAAA,UACnD,CAAC,CAAC,KAAK,KAAK,GAAG,MAAM;AACnB,mBAAO,GAAG,GAAG,IAAI,OAAO,UAAU,WAAW,MAAM,KAAK,GAAG,KAAK,IAAI,CAAC;AAAA,UACvE;AAAA,QACF;AACA,iBAAS,WAAW,cAAc,KAAK,OAAO,CAAC;AAAA,MACjD;AAEA,YAAM,SAAS,MAAM,OAAO;AAAA,QAC1B;AAAA,QACA,OAAO,OAAO,QAAQ,UAAU,CAAC,CAAC;AAAA,MACpC;AACA,aAAO,OAAO;AAAA,IAChB,UAAE;AACA,WAAK,cAAc,MAAM;AAAA,IAC3B;AAAA,EACF;AAAA;AAAA,EAGA,MAAM,eAAe,WAOnB;AACA,UAAM,SAAS,MAAM,KAAK,UAAU;AAEpC,QAAI;AAEF,YAAM,SAAS,MAAM,OAAO;AAAA,QAC1B;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,QAaA,CAAC,KAAK,WAAW,WAAW,SAAS,MAAM,WAAW,OAAO,IAAI;AAAA,MACnE;AAEA,aAAO,OAAO,KAAK,IAAI,CAAC,SAAS;AAAA,QAC/B,SAAS,IAAI;AAAA,QACb,MAAM,IAAI;AAAA,QACV,WAAW,IAAI;AAAA,QACf,UAAU,IAAI;AAAA,MAChB,EAAE;AAAA,IACJ,UAAE;AACA,WAAK,cAAc,MAAM;AAAA,IAC3B;AAAA,EACF;AAAA;AAAA,EAGA,MAAM,YAAY,YAA4C;AAC5D,UAAM,KAAK,cAAc,YAAY;AACnC,YAAM,SAAS,KAAK;AAEpB,iBAAW,MAAM,YAAY;AAC3B,gBAAQ,GAAG,MAAM;AAAA,UACf,KAAK,UAAU;AACb,kBAAM,OAAO,OAAO,KAAK,GAAG,IAAI;AAChC,kBAAM,SAAS,OAAO,OAAO,GAAG,IAAI;AACpC,kBAAM,eAAe,OAAO,IAAI,CAAC,GAAG,MAAM,IAAI,IAAI,CAAC,EAAE,EAAE,KAAK,GAAG;AAE/D,kBAAM,OAAO;AAAA,cACX,eAAe,GAAG,KAAK,KAAK,KAAK,KAAK,GAAG,CAAC,aAAa,YAAY;AAAA,cACnE;AAAA,YACF;AACA;AAAA,UACF;AAAA,UAEA,KAAK,UAAU;AACb,kBAAM,OAAO,OAAO,KAAK,GAAG,IAAI,EAC7B,IAAI,CAAC,GAAG,MAAM,GAAG,CAAC,OAAO,IAAI,CAAC,EAAE,EAChC,KAAK,GAAG;AACX,kBAAM,cAAc,KAAK;AAAA,cACvB,GAAG,SAAS,CAAC;AAAA,cACb,OAAO,KAAK,GAAG,IAAI,EAAE;AAAA,YACvB;AACA,kBAAM,SAAS;AAAA,cACb,GAAG,OAAO,OAAO,GAAG,IAAI;AAAA,cACxB,GAAG,OAAO,OAAO,GAAG,SAAS,CAAC,CAAC;AAAA,YACjC;AAEA,kBAAM,OAAO;AAAA,cACX,UAAU,GAAG,KAAK,QAAQ,IAAI,IAAI,WAAW;AAAA,cAC7C;AAAA,YACF;AACA;AAAA,UACF;AAAA,UAEA,KAAK,UAAU;AACb,kBAAM,cAAc,KAAK;AAAA,cACvB,GAAG,SAAS,CAAC;AAAA,cACb;AAAA,YACF;AACA,kBAAM,OAAO;AAAA,cACX,eAAe,GAAG,KAAK,IAAI,WAAW;AAAA,cACtC,OAAO,OAAO,GAAG,SAAS,CAAC,CAAC;AAAA,YAC9B;AACA;AAAA,UACF;AAAA,QACF;AAAA,MACF;AAAA,IACF,CAAC;AAAA,EACH;AAAA,EAEA,MAAM,SAAwB;AAC5B,UAAM,SAAS,MAAM,KAAK,UAAU;AAEpC,QAAI;AACF,YAAM,OAAO,MAAM,uBAAuB;AAC1C,YAAM,OAAO,MAAM,uBAAuB;AAC1C,YAAM,OAAO,MAAM,wBAAwB;AAG3C,YAAM,OAAO;AAAA,QACX;AAAA,MACF;AAEA,aAAO,KAAK,uCAAuC;AAAA,IACrD,UAAE;AACA,WAAK,cAAc,MAAM;AAAA,IAC3B;AAAA,EACF;AAAA,EAEA,MAAM,UAAyB;AAC7B,UAAM,SAAS,MAAM,KAAK,UAAU;AAEpC,QAAI;AACF,YAAM,OAAO,MAAM,gBAAgB;AACnC,YAAM,OAAO,MAAM,gBAAgB;AACnC,YAAM,OAAO,MAAM,iBAAiB;AACpC,aAAO,KAAK,4BAA4B;AAAA,IAC1C,UAAE;AACA,WAAK,cAAc,MAAM;AAAA,IAC3B;AAAA,EACF;AAAA;AAAA,EAGA,MAAM,WAAmC;AACvC,UAAM,SAAS,MAAM,KAAK,UAAU;AAEpC,QAAI;AACF,YAAM,SAAS,MAAM,OAAO,MAAM;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,OAOjC;AAED,aAAO;AAAA,QACL,aAAa,SAAS,OAAO,KAAK,CAAC,EAAE,YAAY;AAAA,QACjD,cAAc,SAAS,OAAO,KAAK,CAAC,EAAE,aAAa;AAAA,QACnD,aAAa,SAAS,OAAO,KAAK,CAAC,EAAE,YAAY;AAAA,QACjD,cAAc,SAAS,OAAO,KAAK,CAAC,EAAE,aAAa;AAAA,QACnD,WAAW,SAAS,OAAO,KAAK,CAAC,EAAE,UAAU;AAAA,MAC/C;AAAA,IACF,UAAE;AACA,WAAK,cAAc,MAAM;AAAA,IAC3B;AAAA,EACF;AAAA,EAEA,MAAM,gBAOJ;AACA,UAAM,SAAS,MAAM,KAAK,UAAU;AAEpC,QAAI;AACF,YAAM,SAAS,MAAM,OAAO,MAAM;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,OAUjC;AAED,aAAO,OAAO,KAAK,IAAI,CAAC,SAAS;AAAA,QAC/B,OAAO,IAAI;AAAA,QACX,OAAO,SAAS,IAAI,KAAK;AAAA,QACzB,UAAU,WAAW,IAAI,SAAS;AAAA,QAClC,WAAW,WAAW,IAAI,UAAU;AAAA,MACtC,EAAE;AAAA,IACJ,SAAS,OAAgB;AACvB,aAAO,KAAK,oCAAoC,KAAK;AACrD,aAAO,CAAC;AAAA,IACV,UAAE;AACA,WAAK,cAAc,MAAM;AAAA,IAC3B;AAAA,EACF;AAAA;AAAA,EAGA,MAAM,mBAAkC;AACtC,SAAK,eAAe,MAAM,KAAK,KAAM,QAAQ;AAC7C,UAAM,KAAK,aAAa,MAAM,OAAO;AAAA,EACvC;AAAA,EAEA,MAAM,oBAAmC;AACvC,QAAI,CAAC,KAAK;AACR,YAAM,IAAI;AAAA,QACR;AAAA,QACA,UAAU;AAAA,MACZ;AAEF,UAAM,KAAK,aAAa,MAAM,QAAQ;AACtC,SAAK,aAAa,QAAQ;AAC1B,SAAK,eAAe;AAAA,EACtB;AAAA,EAEA,MAAM,sBAAqC;AACzC,QAAI,CAAC,KAAK;AACR,YAAM,IAAI;AAAA,QACR;AAAA,QACA,UAAU;AAAA,MACZ;AAEF,UAAM,KAAK,aAAa,MAAM,UAAU;AACxC,SAAK,aAAa,QAAQ;AAC1B,SAAK,eAAe;AAAA,EACtB;AAAA,EAEA,MAAM,cACJ,UACe;AACf,UAAM,KAAK,iBAAiB;AAE5B,QAAI;AACF,YAAM,SAAS,IAAI;AACnB,YAAM,KAAK,kBAAkB;AAAA,IAC/B,SAAS,OAAgB;AACvB,UAAI;AACF,cAAM,KAAK,oBAAoB;AAAA,MACjC,SAAS,eAAwB;AAE/B,gBAAQ,MAAM,gCAAgC,aAAa;AAE3D,YAAI,KAAK,gBAAgB;AACvB,eAAK,eAAe,oBAAoB,KAAK,MAAM;AAAA,QACrD;AAAA,MACF;AACA,YAAM;AAAA,IACR;AAAA,EACF;AAAA;AAAA,EAGA,MAAM,WACJ,QACA,QACiB;AACjB,UAAM,SAAS,MAAM,KAAK,UAAU;AAEpC,QAAI;AACF,UAAI,WAAW,QAAQ;AACrB,cAAM,OAA8B,CAAC;AAErC,mBAAW,SAAS,QAAQ;AAC1B,gBAAM,SAAS,MAAM,OAAO,MAAM,iBAAiB,KAAK,EAAE;AAC1D,eAAK,KAAK,IAAI,OAAO;AAAA,QACvB;AAEA,eAAO,OAAO,KAAK,KAAK,UAAU,MAAM,MAAM,CAAC,CAAC;AAAA,MAClD,WAAW,WAAW,OAAO;AAE3B,cAAM,SAAmB,CAAC;AAE1B,mBAAW,SAAS,QAAQ;AAC1B,gBAAM,SAAS,MAAM,OAAO,MAAM;AAAA,kCACV,KAAK;AAAA,WAC5B;AACD,iBAAO,KAAK,OAAO,SAAS,CAAC;AAAA,QAC/B;AAEA,eAAO,OAAO,KAAK,OAAO,KAAK,MAAM,CAAC;AAAA,MACxC,OAAO;AACL,cAAM,IAAI;AAAA,UACR,UAAU,MAAM;AAAA,UAChB,UAAU;AAAA,UACV,EAAE,QAAQ,kBAAkB,CAAC,MAAM,EAAE;AAAA,QACvC;AAAA,MACF;AAAA,IACF,UAAE;AACA,WAAK,cAAc,MAAM;AAAA,IAC3B;AAAA,EACF;AAAA,EAEA,MAAM,WACJ,MACA,QACA,SACe;AACf,UAAM,SAAS,MAAM,KAAK,UAAU;AAEpC,QAAI;AACF,UAAI,WAAW,QAAQ;AACrB,cAAM,SAAS,KAAK,MAAM,KAAK,SAAS,CAAC;AAEzC,cAAM,OAAO,MAAM,OAAO;AAE1B,mBAAW,CAAC,OAAO,IAAI,KAAK,OAAO,QAAQ,MAAM,GAAG;AAClD,cAAI,SAAS,UAAU;AACrB,kBAAM,OAAO,MAAM,kBAAkB,KAAK,UAAU;AAAA,UACtD;AAEA,qBAAW,OAAO,MAAe;AAC/B,kBAAM,OAAO,OAAO,KAAK,GAAG;AAC5B,kBAAM,SAAS,OAAO,OAAO,GAAG;AAChC,kBAAM,eAAe,OAAO,IAAI,CAAC,GAAG,MAAM,IAAI,IAAI,CAAC,EAAE,EAAE,KAAK,GAAG;AAE/D,gBAAI,SAAS,QAAQ;AACnB,oBAAM,UAAU,KAAK,IAAI,CAAC,MAAM,GAAG,CAAC,eAAe,CAAC,EAAE,EAAE,KAAK,GAAG;AAChE,oBAAM,OAAO;AAAA,gBACX,eAAe,KAAK,KAAK,KAAK,KAAK,GAAG,CAAC,aAAa,YAAY;AAAA,6CACnC,OAAO;AAAA,gBACpC;AAAA,cACF;AAAA,YACF,OAAO;AACL,oBAAM,OAAO;AAAA,gBACX,eAAe,KAAK,KAAK,KAAK,KAAK,GAAG,CAAC,aAAa,YAAY;AAAA,gBAChE;AAAA,cACF;AAAA,YACF;AAAA,UACF;AAAA,QACF;AAEA,cAAM,OAAO,MAAM,QAAQ;AAAA,MAC7B,OAAO;AACL,cAAM,IAAI;AAAA,UACR,UAAU,MAAM;AAAA,UAChB,UAAU;AAAA,UACV,EAAE,QAAQ,kBAAkB,CAAC,MAAM,EAAE;AAAA,QACvC;AAAA,MACF;AAAA,IACF,SAAS,OAAgB;AACvB,YAAM,OAAO,MAAM,UAAU;AAC7B,YAAM;AAAA,IACR,UAAE;AACA,WAAK,cAAc,MAAM;AAAA,IAC3B;AAAA,EACF;AAAA;AAAA,EAGA,MAAc,YAAiC;AAC7C,QAAI,KAAK,cAAc;AACrB,aAAO,KAAK;AAAA,IACd;AAEA,QAAI,CAAC,KAAK,MAAM;AACd,YAAM,IAAI;AAAA,QACR;AAAA,QACA,UAAU;AAAA,MACZ;AAAA,IACF;AAEA,WAAO,MAAM,KAAK,KAAK,QAAQ;AAAA,EACjC;AAAA,EAEQ,cAAc,QAA0B;AAC9C,QAAI,WAAW,KAAK,cAAc;AAChC,aAAO,QAAQ;AAAA,IACjB;AAAA,EACF;AAAA,EAEQ,yBACN,YACA,YACQ;AACR,UAAM,UAAU,OAAO,QAAQ,UAAU,EAAE,IAAI,CAAC,CAAC,KAAK,KAAK,GAAG,MAAM;AAClE,YAAM,WAAW,aAAa,IAAI;AAElC,UAAI,UAAU,MAAM;AAClB,eAAO,GAAG,GAAG;AAAA,MACf,WAAW,MAAM,QAAQ,KAAK,GAAG;AAC/B,cAAM,eAAe,MAAM,IAAI,CAAC,GAAG,MAAM,IAAI,WAAW,CAAC,EAAE,EAAE,KAAK,GAAG;AACrE,eAAO,GAAG,GAAG,QAAQ,YAAY;AAAA,MACnC,OAAO;AACL,eAAO,GAAG,GAAG,OAAO,QAAQ;AAAA,MAC9B;AAAA,IACF,CAAC;AAED,WAAO,QAAQ,SAAS,IAAI,SAAS,QAAQ,KAAK,OAAO,CAAC,KAAK;AAAA,EACjE;AACF;",
|
|
6
6
|
"names": []
|
|
7
7
|
}
|
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
{
|
|
2
2
|
"version": 3,
|
|
3
3
|
"sources": ["../../../src/core/database/query-router.ts"],
|
|
4
|
-
"sourcesContent": ["/**\n * Query Router for Tiered Storage\n * Routes database queries to appropriate storage tier based on data age, query type, and performance requirements\n */\n\nimport { DatabaseAdapter } from './database-adapter.js';\nimport type { Frame } from '../context/frame-manager.js';\nimport { logger } from '../monitoring/logger.js';\nimport { DatabaseError, ErrorCode } from '../errors/index.js';\nimport { EventEmitter } from 'events';\n\nexport interface StorageTier {\n name: string;\n adapter: DatabaseAdapter;\n priority: number;\n config: TierConfig;\n}\n\nexport interface TierConfig {\n // Data age thresholds\n maxAge?: number; // Data older than this goes to next tier (ms)\n minAge?: number; // Data newer than this stays in this tier (ms)\n\n // Query type preferences\n preferredOperations: string[]; // ['read', 'write', 'search', 'analytics']\n supportedFeatures: string[]; // ['full_text', 'vector', 'aggregation']\n\n // Performance characteristics\n maxLatency?: number; // Max acceptable latency for this tier (ms)\n maxThroughput?: number; // Max queries per second this tier can handle\n\n // Capacity limits\n maxFrames?: number; // Max frames before promoting to next tier\n maxSizeMB?: number; // Max storage size in MB\n\n // Routing rules\n routingRules: RoutingRule[];\n}\n\nexport interface RoutingRule {\n condition: string; // 'age' | 'size' | 'query_type' | 'load' | 'feature'\n operator: string; // '>', '<', '=', '!=', 'in', 'not_in'\n value: any; // Comparison value\n weight: number; // Rule weight (0-1)\n}\n\nexport interface QueryContext {\n queryType: 'read' | 'write' | 'search' | 'analytics' | 'bulk';\n frames?: Frame[];\n frameIds?: string[];\n requiredFeatures?: string[];\n timeRange?: { start: Date; end: Date };\n priority?: 'low' | 'medium' | 'high' | 'critical';\n timeout?: number;\n cacheStrategy?: 'none' | 'read' | 'write' | 'read_write';\n}\n\nexport interface RoutingDecision {\n primaryTier: StorageTier;\n fallbackTiers: StorageTier[];\n rationale: string;\n confidence: number; // 0-1 confidence in decision\n estimatedLatency: number; // Estimated query latency (ms)\n cacheRecommendation?: string;\n}\n\nexport interface QueryMetrics {\n totalQueries: number;\n queriesByTier: Map<string, number>;\n queriesByType: Map<string, number>;\n averageLatency: number;\n latencyByTier: Map<string, number>;\n errorsByTier: Map<string, number>;\n cacheHitRate: number;\n routingDecisions: number;\n}\n\nexport class QueryRouter extends EventEmitter {\n private tiers: Map<string, StorageTier> = new Map();\n private metrics: QueryMetrics;\n private decisionCache: Map<string, RoutingDecision> = new Map();\n private readonly cacheExpiration = 60000; // 1 minute\n private readonly maxCacheSize = 1000;\n\n constructor() {\n super();\n this.metrics = {\n totalQueries: 0,\n queriesByTier: new Map(),\n queriesByType: new Map(),\n averageLatency: 0,\n latencyByTier: new Map(),\n errorsByTier: new Map(),\n cacheHitRate: 0,\n routingDecisions: 0,\n };\n }\n\n /**\n * Register a storage tier with the router\n */\n registerTier(tier: StorageTier): void {\n this.tiers.set(tier.name, tier);\n logger.info(\n `Registered storage tier: ${tier.name} (priority: ${tier.priority})`\n );\n this.emit('tierRegistered', tier);\n }\n\n /**\n * Remove a storage tier from the router\n */\n unregisterTier(tierName: string): void {\n const tier = this.tiers.get(tierName);\n if (tier) {\n this.tiers.delete(tierName);\n logger.info(`Unregistered storage tier: ${tierName}`);\n this.emit('tierUnregistered', tier);\n }\n }\n\n /**\n * Route a query to the most appropriate storage tier\n */\n async route<T>(\n operation: string,\n context: QueryContext,\n executor: (adapter: DatabaseAdapter) => Promise<T>\n ): Promise<T> {\n const startTime = Date.now();\n this.metrics.totalQueries++;\n this.metrics.queriesByType.set(\n context.queryType,\n (this.metrics.queriesByType.get(context.queryType) || 0) + 1\n );\n\n try {\n // Get routing decision\n const decision = await this.makeRoutingDecision(operation, context);\n\n // Try primary tier first\n try {\n const result = await this.executeOnTier(decision.primaryTier, executor);\n this.updateMetrics(decision.primaryTier.name, startTime, true);\n return result;\n } catch (error: unknown) {\n logger.warn(\n `Query failed on primary tier ${decision.primaryTier.name}:`,\n error\n );\n this.updateMetrics(decision.primaryTier.name, startTime, false);\n\n // Try fallback tiers\n for (const fallbackTier of decision.fallbackTiers) {\n try {\n logger.info(`Attempting fallback to tier: ${fallbackTier.name}`);\n const result = await this.executeOnTier(fallbackTier, executor);\n this.updateMetrics(fallbackTier.name, startTime, true);\n return result;\n } catch (fallbackError: unknown) {\n logger.warn(\n `Query failed on fallback tier ${fallbackTier.name}:`,\n fallbackError\n );\n this.updateMetrics(fallbackTier.name, startTime, false);\n }\n }\n\n // If all tiers failed, throw the original error\n throw error;\n }\n } catch (error: unknown) {\n logger.error('Query routing failed:', error);\n this.emit('routingError', { operation, context, error });\n throw error;\n }\n }\n\n /**\n * Make routing decision based on query context\n */\n private async makeRoutingDecision(\n operation: string,\n context: QueryContext\n ): Promise<RoutingDecision> {\n // Check cache first\n const cacheKey = this.generateCacheKey(operation, context);\n const cached = this.decisionCache.get(cacheKey);\n if (cached && Date.now() - cached.estimatedLatency < this.cacheExpiration) {\n this.metrics.cacheHitRate =\n (this.metrics.cacheHitRate * this.metrics.routingDecisions + 1) /\n (this.metrics.routingDecisions + 1);\n return cached;\n }\n\n this.metrics.routingDecisions++;\n\n // Evaluate each tier\n const evaluations: Array<{\n tier: StorageTier;\n score: number;\n rationale: string;\n }> = [];\n\n for (const tier of this.tiers.values()) {\n const score = await this.evaluateTier(tier, operation, context);\n const rationale = this.generateRationale(tier, operation, context, score);\n evaluations.push({ tier, score, rationale });\n }\n\n // Sort by score (highest first)\n evaluations.sort((a, b) => b.score - a.score);\n\n if (evaluations.length === 0) {\n throw new DatabaseError(\n 'No storage tiers available for routing',\n ErrorCode.DB_CONNECTION_FAILED,\n { query: context.query, tiersConfigured: this.tiers.length }\n );\n }\n\n const primaryEval = evaluations[0];\n const fallbackTiers = evaluations\n .slice(1)\n .map((evaluation) => evaluation.tier);\n\n const decision: RoutingDecision = {\n primaryTier: primaryEval.tier,\n fallbackTiers,\n rationale: primaryEval.rationale,\n confidence: primaryEval.score,\n estimatedLatency: this.estimateLatency(\n primaryEval.tier,\n operation,\n context\n ),\n cacheRecommendation: this.recommendCacheStrategy(\n primaryEval.tier,\n context\n ),\n };\n\n // Cache decision\n this.cacheDecision(cacheKey, decision);\n\n logger.debug(\n `Routing decision: ${decision.primaryTier.name} (confidence: ${decision.confidence.toFixed(2)})`\n );\n this.emit('routingDecision', { operation, context, decision });\n\n return decision;\n }\n\n /**\n * Evaluate how well a tier fits the query requirements\n */\n private async evaluateTier(\n tier: StorageTier,\n operation: string,\n context: QueryContext\n ): Promise<number> {\n let score = 0;\n let maxScore = 0;\n\n // Evaluate each routing rule\n for (const rule of tier.config.routingRules) {\n maxScore += rule.weight;\n\n if (this.evaluateRule(rule, operation, context, tier)) {\n score += rule.weight;\n }\n }\n\n // Check operation preference\n if (tier.config.preferredOperations.includes(context.queryType)) {\n score += 0.2;\n maxScore += 0.2;\n }\n\n // Check feature support\n if (context.requiredFeatures) {\n const supportedFeatures = context.requiredFeatures.filter((feature) =>\n tier.config.supportedFeatures.includes(feature)\n );\n if (supportedFeatures.length === context.requiredFeatures.length) {\n score += 0.3;\n }\n maxScore += 0.3;\n }\n\n // Check current load\n const currentLoad = await this.getCurrentLoad(tier);\n if (\n tier.config.maxThroughput &&\n currentLoad < tier.config.maxThroughput * 0.8\n ) {\n score += 0.1;\n }\n maxScore += 0.1;\n\n // Check capacity\n if (await this.isWithinCapacity(tier)) {\n score += 0.1;\n }\n maxScore += 0.1;\n\n return maxScore > 0 ? score / maxScore : 0;\n }\n\n /**\n * Evaluate a single routing rule\n */\n private evaluateRule(\n rule: RoutingRule,\n _operation: string,\n context: QueryContext,\n _tier: StorageTier\n ): boolean {\n let actualValue: any;\n\n switch (rule.condition) {\n case 'age':\n // Check data age if frames are provided\n if (context.frames && context.frames.length > 0) {\n const avgAge =\n context.frames.reduce(\n (sum, frame) => sum + (Date.now() - frame.created_at),\n 0\n ) / context.frames.length;\n actualValue = avgAge;\n } else if (context.timeRange) {\n actualValue = Date.now() - context.timeRange.end.getTime();\n } else {\n return false;\n }\n break;\n\n case 'query_type':\n actualValue = context.queryType;\n break;\n\n case 'feature':\n actualValue = context.requiredFeatures || [];\n break;\n\n case 'priority':\n actualValue = context.priority || 'medium';\n break;\n\n case 'size':\n actualValue = context.frames ? context.frames.length : 0;\n break;\n\n default:\n return false;\n }\n\n return this.compareValues(actualValue, rule.operator, rule.value);\n }\n\n /**\n * Compare values based on operator\n */\n private compareValues(actual: any, operator: string, expected: any): boolean {\n switch (operator) {\n case '>':\n return actual > expected;\n case '<':\n return actual < expected;\n case '=':\n case '==':\n return actual === expected;\n case '!=':\n return actual !== expected;\n case 'in':\n return Array.isArray(expected) && expected.includes(actual);\n case 'not_in':\n return Array.isArray(expected) && !expected.includes(actual);\n case 'contains':\n return (\n Array.isArray(actual) &&\n actual.some((item) => expected.includes(item))\n );\n default:\n return false;\n }\n }\n\n /**\n * Execute query on specific tier\n */\n private async executeOnTier<T>(\n tier: StorageTier,\n executor: (adapter: DatabaseAdapter) => Promise<T>\n ): Promise<T> {\n const startTime = Date.now();\n\n try {\n const result = await executor(tier.adapter);\n const duration = Date.now() - startTime;\n\n logger.debug(`Query executed on tier ${tier.name} in ${duration}ms`);\n this.emit('queryExecuted', {\n tierName: tier.name,\n duration,\n success: true,\n });\n\n return result;\n } catch (error: unknown) {\n const duration = Date.now() - startTime;\n\n logger.error(\n `Query failed on tier ${tier.name} after ${duration}ms:`,\n error\n );\n this.emit('queryExecuted', {\n tierName: tier.name,\n duration,\n success: false,\n error,\n });\n\n throw error;\n }\n }\n\n /**\n * Generate cache key for routing decisions\n */\n private generateCacheKey(operation: string, context: QueryContext): string {\n const keyParts = [\n operation,\n context.queryType,\n context.priority || 'medium',\n (context.requiredFeatures || []).sort().join(','),\n context.timeRange\n ? `${context.timeRange.start.getTime()}-${context.timeRange.end.getTime()}`\n : '',\n ];\n\n return keyParts.join('|');\n }\n\n /**\n * Cache routing decision\n */\n private cacheDecision(key: string, decision: RoutingDecision): void {\n // Implement LRU eviction if cache is full\n if (this.decisionCache.size >= this.maxCacheSize) {\n const firstKey = this.decisionCache.keys().next().value;\n this.decisionCache.delete(firstKey);\n }\n\n this.decisionCache.set(key, decision);\n }\n\n /**\n * Estimate query latency for a tier\n */\n private estimateLatency(\n tier: StorageTier,\n operation: string,\n context: QueryContext\n ): number {\n const baseLatency =\n this.metrics.latencyByTier.get(tier.name) ||\n tier.config.maxLatency ||\n 100;\n\n // Adjust based on operation type\n let multiplier = 1;\n switch (context.queryType) {\n case 'search':\n multiplier = 1.5;\n break;\n case 'analytics':\n multiplier = 2.0;\n break;\n case 'bulk':\n multiplier = 3.0;\n break;\n default:\n multiplier = 1.0;\n }\n\n return baseLatency * multiplier;\n }\n\n /**\n * Recommend cache strategy for the context\n */\n private recommendCacheStrategy(\n tier: StorageTier,\n context: QueryContext\n ): string {\n if (context.cacheStrategy && context.cacheStrategy !== 'none') {\n return context.cacheStrategy;\n }\n\n // Default recommendations based on query type and tier\n if (tier.name === 'hot' || tier.name === 'memory') {\n return 'read_write';\n } else if (context.queryType === 'read') {\n return 'read';\n }\n\n return 'none';\n }\n\n /**\n * Generate human-readable rationale for routing decision\n */\n private generateRationale(\n tier: StorageTier,\n operation: string,\n context: QueryContext,\n score: number\n ): string {\n const reasons = [];\n\n if (tier.config.preferredOperations.includes(context.queryType)) {\n reasons.push(`optimized for ${context.queryType} operations`);\n }\n\n if (\n context.requiredFeatures?.every((feature) =>\n tier.config.supportedFeatures.includes(feature)\n )\n ) {\n reasons.push(\n `supports all required features (${context.requiredFeatures.join(', ')})`\n );\n }\n\n if (score > 0.8) {\n reasons.push('high confidence match');\n } else if (score > 0.6) {\n reasons.push('good match');\n } else if (score > 0.4) {\n reasons.push('acceptable match');\n }\n\n return reasons.length > 0 ? reasons.join(', ') : 'default tier selection';\n }\n\n /**\n * Get current load for a tier\n */\n private async getCurrentLoad(tier: StorageTier): Promise<number> {\n // This would integrate with actual monitoring\n // For now, return a placeholder based on recent queries\n return this.metrics.queriesByTier.get(tier.name) || 0;\n }\n\n /**\n * Check if tier is within capacity limits\n */\n private async isWithinCapacity(tier: StorageTier): Promise<boolean> {\n try {\n const stats = await tier.adapter.getStats();\n\n if (tier.config.maxFrames && stats.totalFrames >= tier.config.maxFrames) {\n return false;\n }\n\n if (\n tier.config.maxSizeMB &&\n stats.diskUsage >= tier.config.maxSizeMB * 1024 * 1024\n ) {\n return false;\n }\n\n return true;\n } catch (error: unknown) {\n logger.warn(`Failed to check capacity for tier ${tier.name}:`, error);\n return true; // Assume capacity is OK if we can't check\n }\n }\n\n /**\n * Update routing metrics\n */\n private updateMetrics(\n tierName: string,\n startTime: number,\n success: boolean\n ): void {\n const duration = Date.now() - startTime;\n\n // Update tier metrics\n this.metrics.queriesByTier.set(\n tierName,\n (this.metrics.queriesByTier.get(tierName) || 0) + 1\n );\n\n if (success) {\n // Update latency\n const currentAvg = this.metrics.latencyByTier.get(tierName) || 0;\n const count = this.metrics.queriesByTier.get(tierName) || 1;\n const newAvg = (currentAvg * (count - 1) + duration) / count;\n this.metrics.latencyByTier.set(tierName, newAvg);\n\n // Update overall average\n this.metrics.averageLatency =\n (this.metrics.averageLatency * (this.metrics.totalQueries - 1) +\n duration) /\n this.metrics.totalQueries;\n } else {\n // Update error count\n this.metrics.errorsByTier.set(\n tierName,\n (this.metrics.errorsByTier.get(tierName) || 0) + 1\n );\n }\n }\n\n /**\n * Get current routing metrics\n */\n getMetrics(): QueryMetrics {\n // Update cache hit rate\n const cacheRequests = this.metrics.routingDecisions;\n const cacheHits = cacheRequests - this.decisionCache.size; // Approximation\n this.metrics.cacheHitRate =\n cacheRequests > 0 ? cacheHits / cacheRequests : 0;\n\n return { ...this.metrics };\n }\n\n /**\n * Get registered tiers\n */\n getTiers(): StorageTier[] {\n return Array.from(this.tiers.values()).sort(\n (a, b) => b.priority - a.priority\n );\n }\n\n /**\n * Clear routing decision cache\n */\n clearCache(): void {\n this.decisionCache.clear();\n logger.info('Routing decision cache cleared');\n }\n\n /**\n * Get tier by name\n */\n getTier(name: string): StorageTier | undefined {\n return this.tiers.get(name);\n }\n}\n"],
|
|
4
|
+
"sourcesContent": ["/**\n * Query Router for Tiered Storage\n * Routes database queries to appropriate storage tier based on data age, query type, and performance requirements\n */\n\nimport { DatabaseAdapter } from './database-adapter.js';\nimport type { Frame } from '../context/index.js';\nimport { logger } from '../monitoring/logger.js';\nimport { DatabaseError, ErrorCode } from '../errors/index.js';\nimport { EventEmitter } from 'events';\n\nexport interface StorageTier {\n name: string;\n adapter: DatabaseAdapter;\n priority: number;\n config: TierConfig;\n}\n\nexport interface TierConfig {\n // Data age thresholds\n maxAge?: number; // Data older than this goes to next tier (ms)\n minAge?: number; // Data newer than this stays in this tier (ms)\n\n // Query type preferences\n preferredOperations: string[]; // ['read', 'write', 'search', 'analytics']\n supportedFeatures: string[]; // ['full_text', 'vector', 'aggregation']\n\n // Performance characteristics\n maxLatency?: number; // Max acceptable latency for this tier (ms)\n maxThroughput?: number; // Max queries per second this tier can handle\n\n // Capacity limits\n maxFrames?: number; // Max frames before promoting to next tier\n maxSizeMB?: number; // Max storage size in MB\n\n // Routing rules\n routingRules: RoutingRule[];\n}\n\nexport interface RoutingRule {\n condition: string; // 'age' | 'size' | 'query_type' | 'load' | 'feature'\n operator: string; // '>', '<', '=', '!=', 'in', 'not_in'\n value: any; // Comparison value\n weight: number; // Rule weight (0-1)\n}\n\nexport interface QueryContext {\n queryType: 'read' | 'write' | 'search' | 'analytics' | 'bulk';\n frames?: Frame[];\n frameIds?: string[];\n requiredFeatures?: string[];\n timeRange?: { start: Date; end: Date };\n priority?: 'low' | 'medium' | 'high' | 'critical';\n timeout?: number;\n cacheStrategy?: 'none' | 'read' | 'write' | 'read_write';\n}\n\nexport interface RoutingDecision {\n primaryTier: StorageTier;\n fallbackTiers: StorageTier[];\n rationale: string;\n confidence: number; // 0-1 confidence in decision\n estimatedLatency: number; // Estimated query latency (ms)\n cacheRecommendation?: string;\n}\n\nexport interface QueryMetrics {\n totalQueries: number;\n queriesByTier: Map<string, number>;\n queriesByType: Map<string, number>;\n averageLatency: number;\n latencyByTier: Map<string, number>;\n errorsByTier: Map<string, number>;\n cacheHitRate: number;\n routingDecisions: number;\n}\n\nexport class QueryRouter extends EventEmitter {\n private tiers: Map<string, StorageTier> = new Map();\n private metrics: QueryMetrics;\n private decisionCache: Map<string, RoutingDecision> = new Map();\n private readonly cacheExpiration = 60000; // 1 minute\n private readonly maxCacheSize = 1000;\n\n constructor() {\n super();\n this.metrics = {\n totalQueries: 0,\n queriesByTier: new Map(),\n queriesByType: new Map(),\n averageLatency: 0,\n latencyByTier: new Map(),\n errorsByTier: new Map(),\n cacheHitRate: 0,\n routingDecisions: 0,\n };\n }\n\n /**\n * Register a storage tier with the router\n */\n registerTier(tier: StorageTier): void {\n this.tiers.set(tier.name, tier);\n logger.info(\n `Registered storage tier: ${tier.name} (priority: ${tier.priority})`\n );\n this.emit('tierRegistered', tier);\n }\n\n /**\n * Remove a storage tier from the router\n */\n unregisterTier(tierName: string): void {\n const tier = this.tiers.get(tierName);\n if (tier) {\n this.tiers.delete(tierName);\n logger.info(`Unregistered storage tier: ${tierName}`);\n this.emit('tierUnregistered', tier);\n }\n }\n\n /**\n * Route a query to the most appropriate storage tier\n */\n async route<T>(\n operation: string,\n context: QueryContext,\n executor: (adapter: DatabaseAdapter) => Promise<T>\n ): Promise<T> {\n const startTime = Date.now();\n this.metrics.totalQueries++;\n this.metrics.queriesByType.set(\n context.queryType,\n (this.metrics.queriesByType.get(context.queryType) || 0) + 1\n );\n\n try {\n // Get routing decision\n const decision = await this.makeRoutingDecision(operation, context);\n\n // Try primary tier first\n try {\n const result = await this.executeOnTier(decision.primaryTier, executor);\n this.updateMetrics(decision.primaryTier.name, startTime, true);\n return result;\n } catch (error: unknown) {\n logger.warn(\n `Query failed on primary tier ${decision.primaryTier.name}:`,\n error\n );\n this.updateMetrics(decision.primaryTier.name, startTime, false);\n\n // Try fallback tiers\n for (const fallbackTier of decision.fallbackTiers) {\n try {\n logger.info(`Attempting fallback to tier: ${fallbackTier.name}`);\n const result = await this.executeOnTier(fallbackTier, executor);\n this.updateMetrics(fallbackTier.name, startTime, true);\n return result;\n } catch (fallbackError: unknown) {\n logger.warn(\n `Query failed on fallback tier ${fallbackTier.name}:`,\n fallbackError\n );\n this.updateMetrics(fallbackTier.name, startTime, false);\n }\n }\n\n // If all tiers failed, throw the original error\n throw error;\n }\n } catch (error: unknown) {\n logger.error('Query routing failed:', error);\n this.emit('routingError', { operation, context, error });\n throw error;\n }\n }\n\n /**\n * Make routing decision based on query context\n */\n private async makeRoutingDecision(\n operation: string,\n context: QueryContext\n ): Promise<RoutingDecision> {\n // Check cache first\n const cacheKey = this.generateCacheKey(operation, context);\n const cached = this.decisionCache.get(cacheKey);\n if (cached && Date.now() - cached.estimatedLatency < this.cacheExpiration) {\n this.metrics.cacheHitRate =\n (this.metrics.cacheHitRate * this.metrics.routingDecisions + 1) /\n (this.metrics.routingDecisions + 1);\n return cached;\n }\n\n this.metrics.routingDecisions++;\n\n // Evaluate each tier\n const evaluations: Array<{\n tier: StorageTier;\n score: number;\n rationale: string;\n }> = [];\n\n for (const tier of this.tiers.values()) {\n const score = await this.evaluateTier(tier, operation, context);\n const rationale = this.generateRationale(tier, operation, context, score);\n evaluations.push({ tier, score, rationale });\n }\n\n // Sort by score (highest first)\n evaluations.sort((a, b) => b.score - a.score);\n\n if (evaluations.length === 0) {\n throw new DatabaseError(\n 'No storage tiers available for routing',\n ErrorCode.DB_CONNECTION_FAILED,\n { query: context.query, tiersConfigured: this.tiers.length }\n );\n }\n\n const primaryEval = evaluations[0];\n const fallbackTiers = evaluations\n .slice(1)\n .map((evaluation) => evaluation.tier);\n\n const decision: RoutingDecision = {\n primaryTier: primaryEval.tier,\n fallbackTiers,\n rationale: primaryEval.rationale,\n confidence: primaryEval.score,\n estimatedLatency: this.estimateLatency(\n primaryEval.tier,\n operation,\n context\n ),\n cacheRecommendation: this.recommendCacheStrategy(\n primaryEval.tier,\n context\n ),\n };\n\n // Cache decision\n this.cacheDecision(cacheKey, decision);\n\n logger.debug(\n `Routing decision: ${decision.primaryTier.name} (confidence: ${decision.confidence.toFixed(2)})`\n );\n this.emit('routingDecision', { operation, context, decision });\n\n return decision;\n }\n\n /**\n * Evaluate how well a tier fits the query requirements\n */\n private async evaluateTier(\n tier: StorageTier,\n operation: string,\n context: QueryContext\n ): Promise<number> {\n let score = 0;\n let maxScore = 0;\n\n // Evaluate each routing rule\n for (const rule of tier.config.routingRules) {\n maxScore += rule.weight;\n\n if (this.evaluateRule(rule, operation, context, tier)) {\n score += rule.weight;\n }\n }\n\n // Check operation preference\n if (tier.config.preferredOperations.includes(context.queryType)) {\n score += 0.2;\n maxScore += 0.2;\n }\n\n // Check feature support\n if (context.requiredFeatures) {\n const supportedFeatures = context.requiredFeatures.filter((feature) =>\n tier.config.supportedFeatures.includes(feature)\n );\n if (supportedFeatures.length === context.requiredFeatures.length) {\n score += 0.3;\n }\n maxScore += 0.3;\n }\n\n // Check current load\n const currentLoad = await this.getCurrentLoad(tier);\n if (\n tier.config.maxThroughput &&\n currentLoad < tier.config.maxThroughput * 0.8\n ) {\n score += 0.1;\n }\n maxScore += 0.1;\n\n // Check capacity\n if (await this.isWithinCapacity(tier)) {\n score += 0.1;\n }\n maxScore += 0.1;\n\n return maxScore > 0 ? score / maxScore : 0;\n }\n\n /**\n * Evaluate a single routing rule\n */\n private evaluateRule(\n rule: RoutingRule,\n _operation: string,\n context: QueryContext,\n _tier: StorageTier\n ): boolean {\n let actualValue: any;\n\n switch (rule.condition) {\n case 'age':\n // Check data age if frames are provided\n if (context.frames && context.frames.length > 0) {\n const avgAge =\n context.frames.reduce(\n (sum, frame) => sum + (Date.now() - frame.created_at),\n 0\n ) / context.frames.length;\n actualValue = avgAge;\n } else if (context.timeRange) {\n actualValue = Date.now() - context.timeRange.end.getTime();\n } else {\n return false;\n }\n break;\n\n case 'query_type':\n actualValue = context.queryType;\n break;\n\n case 'feature':\n actualValue = context.requiredFeatures || [];\n break;\n\n case 'priority':\n actualValue = context.priority || 'medium';\n break;\n\n case 'size':\n actualValue = context.frames ? context.frames.length : 0;\n break;\n\n default:\n return false;\n }\n\n return this.compareValues(actualValue, rule.operator, rule.value);\n }\n\n /**\n * Compare values based on operator\n */\n private compareValues(actual: any, operator: string, expected: any): boolean {\n switch (operator) {\n case '>':\n return actual > expected;\n case '<':\n return actual < expected;\n case '=':\n case '==':\n return actual === expected;\n case '!=':\n return actual !== expected;\n case 'in':\n return Array.isArray(expected) && expected.includes(actual);\n case 'not_in':\n return Array.isArray(expected) && !expected.includes(actual);\n case 'contains':\n return (\n Array.isArray(actual) &&\n actual.some((item) => expected.includes(item))\n );\n default:\n return false;\n }\n }\n\n /**\n * Execute query on specific tier\n */\n private async executeOnTier<T>(\n tier: StorageTier,\n executor: (adapter: DatabaseAdapter) => Promise<T>\n ): Promise<T> {\n const startTime = Date.now();\n\n try {\n const result = await executor(tier.adapter);\n const duration = Date.now() - startTime;\n\n logger.debug(`Query executed on tier ${tier.name} in ${duration}ms`);\n this.emit('queryExecuted', {\n tierName: tier.name,\n duration,\n success: true,\n });\n\n return result;\n } catch (error: unknown) {\n const duration = Date.now() - startTime;\n\n logger.error(\n `Query failed on tier ${tier.name} after ${duration}ms:`,\n error\n );\n this.emit('queryExecuted', {\n tierName: tier.name,\n duration,\n success: false,\n error,\n });\n\n throw error;\n }\n }\n\n /**\n * Generate cache key for routing decisions\n */\n private generateCacheKey(operation: string, context: QueryContext): string {\n const keyParts = [\n operation,\n context.queryType,\n context.priority || 'medium',\n (context.requiredFeatures || []).sort().join(','),\n context.timeRange\n ? `${context.timeRange.start.getTime()}-${context.timeRange.end.getTime()}`\n : '',\n ];\n\n return keyParts.join('|');\n }\n\n /**\n * Cache routing decision\n */\n private cacheDecision(key: string, decision: RoutingDecision): void {\n // Implement LRU eviction if cache is full\n if (this.decisionCache.size >= this.maxCacheSize) {\n const firstKey = this.decisionCache.keys().next().value;\n this.decisionCache.delete(firstKey);\n }\n\n this.decisionCache.set(key, decision);\n }\n\n /**\n * Estimate query latency for a tier\n */\n private estimateLatency(\n tier: StorageTier,\n operation: string,\n context: QueryContext\n ): number {\n const baseLatency =\n this.metrics.latencyByTier.get(tier.name) ||\n tier.config.maxLatency ||\n 100;\n\n // Adjust based on operation type\n let multiplier = 1;\n switch (context.queryType) {\n case 'search':\n multiplier = 1.5;\n break;\n case 'analytics':\n multiplier = 2.0;\n break;\n case 'bulk':\n multiplier = 3.0;\n break;\n default:\n multiplier = 1.0;\n }\n\n return baseLatency * multiplier;\n }\n\n /**\n * Recommend cache strategy for the context\n */\n private recommendCacheStrategy(\n tier: StorageTier,\n context: QueryContext\n ): string {\n if (context.cacheStrategy && context.cacheStrategy !== 'none') {\n return context.cacheStrategy;\n }\n\n // Default recommendations based on query type and tier\n if (tier.name === 'hot' || tier.name === 'memory') {\n return 'read_write';\n } else if (context.queryType === 'read') {\n return 'read';\n }\n\n return 'none';\n }\n\n /**\n * Generate human-readable rationale for routing decision\n */\n private generateRationale(\n tier: StorageTier,\n operation: string,\n context: QueryContext,\n score: number\n ): string {\n const reasons = [];\n\n if (tier.config.preferredOperations.includes(context.queryType)) {\n reasons.push(`optimized for ${context.queryType} operations`);\n }\n\n if (\n context.requiredFeatures?.every((feature) =>\n tier.config.supportedFeatures.includes(feature)\n )\n ) {\n reasons.push(\n `supports all required features (${context.requiredFeatures.join(', ')})`\n );\n }\n\n if (score > 0.8) {\n reasons.push('high confidence match');\n } else if (score > 0.6) {\n reasons.push('good match');\n } else if (score > 0.4) {\n reasons.push('acceptable match');\n }\n\n return reasons.length > 0 ? reasons.join(', ') : 'default tier selection';\n }\n\n /**\n * Get current load for a tier\n */\n private async getCurrentLoad(tier: StorageTier): Promise<number> {\n // This would integrate with actual monitoring\n // For now, return a placeholder based on recent queries\n return this.metrics.queriesByTier.get(tier.name) || 0;\n }\n\n /**\n * Check if tier is within capacity limits\n */\n private async isWithinCapacity(tier: StorageTier): Promise<boolean> {\n try {\n const stats = await tier.adapter.getStats();\n\n if (tier.config.maxFrames && stats.totalFrames >= tier.config.maxFrames) {\n return false;\n }\n\n if (\n tier.config.maxSizeMB &&\n stats.diskUsage >= tier.config.maxSizeMB * 1024 * 1024\n ) {\n return false;\n }\n\n return true;\n } catch (error: unknown) {\n logger.warn(`Failed to check capacity for tier ${tier.name}:`, error);\n return true; // Assume capacity is OK if we can't check\n }\n }\n\n /**\n * Update routing metrics\n */\n private updateMetrics(\n tierName: string,\n startTime: number,\n success: boolean\n ): void {\n const duration = Date.now() - startTime;\n\n // Update tier metrics\n this.metrics.queriesByTier.set(\n tierName,\n (this.metrics.queriesByTier.get(tierName) || 0) + 1\n );\n\n if (success) {\n // Update latency\n const currentAvg = this.metrics.latencyByTier.get(tierName) || 0;\n const count = this.metrics.queriesByTier.get(tierName) || 1;\n const newAvg = (currentAvg * (count - 1) + duration) / count;\n this.metrics.latencyByTier.set(tierName, newAvg);\n\n // Update overall average\n this.metrics.averageLatency =\n (this.metrics.averageLatency * (this.metrics.totalQueries - 1) +\n duration) /\n this.metrics.totalQueries;\n } else {\n // Update error count\n this.metrics.errorsByTier.set(\n tierName,\n (this.metrics.errorsByTier.get(tierName) || 0) + 1\n );\n }\n }\n\n /**\n * Get current routing metrics\n */\n getMetrics(): QueryMetrics {\n // Update cache hit rate\n const cacheRequests = this.metrics.routingDecisions;\n const cacheHits = cacheRequests - this.decisionCache.size; // Approximation\n this.metrics.cacheHitRate =\n cacheRequests > 0 ? cacheHits / cacheRequests : 0;\n\n return { ...this.metrics };\n }\n\n /**\n * Get registered tiers\n */\n getTiers(): StorageTier[] {\n return Array.from(this.tiers.values()).sort(\n (a, b) => b.priority - a.priority\n );\n }\n\n /**\n * Clear routing decision cache\n */\n clearCache(): void {\n this.decisionCache.clear();\n logger.info('Routing decision cache cleared');\n }\n\n /**\n * Get tier by name\n */\n getTier(name: string): StorageTier | undefined {\n return this.tiers.get(name);\n }\n}\n"],
|
|
5
5
|
"mappings": ";;;;AAOA,SAAS,cAAc;AACvB,SAAS,eAAe,iBAAiB;AACzC,SAAS,oBAAoB;AAoEtB,MAAM,oBAAoB,aAAa;AAAA,EACpC,QAAkC,oBAAI,IAAI;AAAA,EAC1C;AAAA,EACA,gBAA8C,oBAAI,IAAI;AAAA,EAC7C,kBAAkB;AAAA;AAAA,EAClB,eAAe;AAAA,EAEhC,cAAc;AACZ,UAAM;AACN,SAAK,UAAU;AAAA,MACb,cAAc;AAAA,MACd,eAAe,oBAAI,IAAI;AAAA,MACvB,eAAe,oBAAI,IAAI;AAAA,MACvB,gBAAgB;AAAA,MAChB,eAAe,oBAAI,IAAI;AAAA,MACvB,cAAc,oBAAI,IAAI;AAAA,MACtB,cAAc;AAAA,MACd,kBAAkB;AAAA,IACpB;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,aAAa,MAAyB;AACpC,SAAK,MAAM,IAAI,KAAK,MAAM,IAAI;AAC9B,WAAO;AAAA,MACL,4BAA4B,KAAK,IAAI,eAAe,KAAK,QAAQ;AAAA,IACnE;AACA,SAAK,KAAK,kBAAkB,IAAI;AAAA,EAClC;AAAA;AAAA;AAAA;AAAA,EAKA,eAAe,UAAwB;AACrC,UAAM,OAAO,KAAK,MAAM,IAAI,QAAQ;AACpC,QAAI,MAAM;AACR,WAAK,MAAM,OAAO,QAAQ;AAC1B,aAAO,KAAK,8BAA8B,QAAQ,EAAE;AACpD,WAAK,KAAK,oBAAoB,IAAI;AAAA,IACpC;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,MACJ,WACA,SACA,UACY;AACZ,UAAM,YAAY,KAAK,IAAI;AAC3B,SAAK,QAAQ;AACb,SAAK,QAAQ,cAAc;AAAA,MACzB,QAAQ;AAAA,OACP,KAAK,QAAQ,cAAc,IAAI,QAAQ,SAAS,KAAK,KAAK;AAAA,IAC7D;AAEA,QAAI;AAEF,YAAM,WAAW,MAAM,KAAK,oBAAoB,WAAW,OAAO;AAGlE,UAAI;AACF,cAAM,SAAS,MAAM,KAAK,cAAc,SAAS,aAAa,QAAQ;AACtE,aAAK,cAAc,SAAS,YAAY,MAAM,WAAW,IAAI;AAC7D,eAAO;AAAA,MACT,SAAS,OAAgB;AACvB,eAAO;AAAA,UACL,gCAAgC,SAAS,YAAY,IAAI;AAAA,UACzD;AAAA,QACF;AACA,aAAK,cAAc,SAAS,YAAY,MAAM,WAAW,KAAK;AAG9D,mBAAW,gBAAgB,SAAS,eAAe;AACjD,cAAI;AACF,mBAAO,KAAK,gCAAgC,aAAa,IAAI,EAAE;AAC/D,kBAAM,SAAS,MAAM,KAAK,cAAc,cAAc,QAAQ;AAC9D,iBAAK,cAAc,aAAa,MAAM,WAAW,IAAI;AACrD,mBAAO;AAAA,UACT,SAAS,eAAwB;AAC/B,mBAAO;AAAA,cACL,iCAAiC,aAAa,IAAI;AAAA,cAClD;AAAA,YACF;AACA,iBAAK,cAAc,aAAa,MAAM,WAAW,KAAK;AAAA,UACxD;AAAA,QACF;AAGA,cAAM;AAAA,MACR;AAAA,IACF,SAAS,OAAgB;AACvB,aAAO,MAAM,yBAAyB,KAAK;AAC3C,WAAK,KAAK,gBAAgB,EAAE,WAAW,SAAS,MAAM,CAAC;AACvD,YAAM;AAAA,IACR;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,MAAc,oBACZ,WACA,SAC0B;AAE1B,UAAM,WAAW,KAAK,iBAAiB,WAAW,OAAO;AACzD,UAAM,SAAS,KAAK,cAAc,IAAI,QAAQ;AAC9C,QAAI,UAAU,KAAK,IAAI,IAAI,OAAO,mBAAmB,KAAK,iBAAiB;AACzE,WAAK,QAAQ,gBACV,KAAK,QAAQ,eAAe,KAAK,QAAQ,mBAAmB,MAC5D,KAAK,QAAQ,mBAAmB;AACnC,aAAO;AAAA,IACT;AAEA,SAAK,QAAQ;AAGb,UAAM,cAID,CAAC;AAEN,eAAW,QAAQ,KAAK,MAAM,OAAO,GAAG;AACtC,YAAM,QAAQ,MAAM,KAAK,aAAa,MAAM,WAAW,OAAO;AAC9D,YAAM,YAAY,KAAK,kBAAkB,MAAM,WAAW,SAAS,KAAK;AACxE,kBAAY,KAAK,EAAE,MAAM,OAAO,UAAU,CAAC;AAAA,IAC7C;AAGA,gBAAY,KAAK,CAAC,GAAG,MAAM,EAAE,QAAQ,EAAE,KAAK;AAE5C,QAAI,YAAY,WAAW,GAAG;AAC5B,YAAM,IAAI;AAAA,QACR;AAAA,QACA,UAAU;AAAA,QACV,EAAE,OAAO,QAAQ,OAAO,iBAAiB,KAAK,MAAM,OAAO;AAAA,MAC7D;AAAA,IACF;AAEA,UAAM,cAAc,YAAY,CAAC;AACjC,UAAM,gBAAgB,YACnB,MAAM,CAAC,EACP,IAAI,CAAC,eAAe,WAAW,IAAI;AAEtC,UAAM,WAA4B;AAAA,MAChC,aAAa,YAAY;AAAA,MACzB;AAAA,MACA,WAAW,YAAY;AAAA,MACvB,YAAY,YAAY;AAAA,MACxB,kBAAkB,KAAK;AAAA,QACrB,YAAY;AAAA,QACZ;AAAA,QACA;AAAA,MACF;AAAA,MACA,qBAAqB,KAAK;AAAA,QACxB,YAAY;AAAA,QACZ;AAAA,MACF;AAAA,IACF;AAGA,SAAK,cAAc,UAAU,QAAQ;AAErC,WAAO;AAAA,MACL,qBAAqB,SAAS,YAAY,IAAI,iBAAiB,SAAS,WAAW,QAAQ,CAAC,CAAC;AAAA,IAC/F;AACA,SAAK,KAAK,mBAAmB,EAAE,WAAW,SAAS,SAAS,CAAC;AAE7D,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA,EAKA,MAAc,aACZ,MACA,WACA,SACiB;AACjB,QAAI,QAAQ;AACZ,QAAI,WAAW;AAGf,eAAW,QAAQ,KAAK,OAAO,cAAc;AAC3C,kBAAY,KAAK;AAEjB,UAAI,KAAK,aAAa,MAAM,WAAW,SAAS,IAAI,GAAG;AACrD,iBAAS,KAAK;AAAA,MAChB;AAAA,IACF;AAGA,QAAI,KAAK,OAAO,oBAAoB,SAAS,QAAQ,SAAS,GAAG;AAC/D,eAAS;AACT,kBAAY;AAAA,IACd;AAGA,QAAI,QAAQ,kBAAkB;AAC5B,YAAM,oBAAoB,QAAQ,iBAAiB;AAAA,QAAO,CAAC,YACzD,KAAK,OAAO,kBAAkB,SAAS,OAAO;AAAA,MAChD;AACA,UAAI,kBAAkB,WAAW,QAAQ,iBAAiB,QAAQ;AAChE,iBAAS;AAAA,MACX;AACA,kBAAY;AAAA,IACd;AAGA,UAAM,cAAc,MAAM,KAAK,eAAe,IAAI;AAClD,QACE,KAAK,OAAO,iBACZ,cAAc,KAAK,OAAO,gBAAgB,KAC1C;AACA,eAAS;AAAA,IACX;AACA,gBAAY;AAGZ,QAAI,MAAM,KAAK,iBAAiB,IAAI,GAAG;AACrC,eAAS;AAAA,IACX;AACA,gBAAY;AAEZ,WAAO,WAAW,IAAI,QAAQ,WAAW;AAAA,EAC3C;AAAA;AAAA;AAAA;AAAA,EAKQ,aACN,MACA,YACA,SACA,OACS;AACT,QAAI;AAEJ,YAAQ,KAAK,WAAW;AAAA,MACtB,KAAK;AAEH,YAAI,QAAQ,UAAU,QAAQ,OAAO,SAAS,GAAG;AAC/C,gBAAM,SACJ,QAAQ,OAAO;AAAA,YACb,CAAC,KAAK,UAAU,OAAO,KAAK,IAAI,IAAI,MAAM;AAAA,YAC1C;AAAA,UACF,IAAI,QAAQ,OAAO;AACrB,wBAAc;AAAA,QAChB,WAAW,QAAQ,WAAW;AAC5B,wBAAc,KAAK,IAAI,IAAI,QAAQ,UAAU,IAAI,QAAQ;AAAA,QAC3D,OAAO;AACL,iBAAO;AAAA,QACT;AACA;AAAA,MAEF,KAAK;AACH,sBAAc,QAAQ;AACtB;AAAA,MAEF,KAAK;AACH,sBAAc,QAAQ,oBAAoB,CAAC;AAC3C;AAAA,MAEF,KAAK;AACH,sBAAc,QAAQ,YAAY;AAClC;AAAA,MAEF,KAAK;AACH,sBAAc,QAAQ,SAAS,QAAQ,OAAO,SAAS;AACvD;AAAA,MAEF;AACE,eAAO;AAAA,IACX;AAEA,WAAO,KAAK,cAAc,aAAa,KAAK,UAAU,KAAK,KAAK;AAAA,EAClE;AAAA;AAAA;AAAA;AAAA,EAKQ,cAAc,QAAa,UAAkB,UAAwB;AAC3E,YAAQ,UAAU;AAAA,MAChB,KAAK;AACH,eAAO,SAAS;AAAA,MAClB,KAAK;AACH,eAAO,SAAS;AAAA,MAClB,KAAK;AAAA,MACL,KAAK;AACH,eAAO,WAAW;AAAA,MACpB,KAAK;AACH,eAAO,WAAW;AAAA,MACpB,KAAK;AACH,eAAO,MAAM,QAAQ,QAAQ,KAAK,SAAS,SAAS,MAAM;AAAA,MAC5D,KAAK;AACH,eAAO,MAAM,QAAQ,QAAQ,KAAK,CAAC,SAAS,SAAS,MAAM;AAAA,MAC7D,KAAK;AACH,eACE,MAAM,QAAQ,MAAM,KACpB,OAAO,KAAK,CAAC,SAAS,SAAS,SAAS,IAAI,CAAC;AAAA,MAEjD;AACE,eAAO;AAAA,IACX;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,MAAc,cACZ,MACA,UACY;AACZ,UAAM,YAAY,KAAK,IAAI;AAE3B,QAAI;AACF,YAAM,SAAS,MAAM,SAAS,KAAK,OAAO;AAC1C,YAAM,WAAW,KAAK,IAAI,IAAI;AAE9B,aAAO,MAAM,0BAA0B,KAAK,IAAI,OAAO,QAAQ,IAAI;AACnE,WAAK,KAAK,iBAAiB;AAAA,QACzB,UAAU,KAAK;AAAA,QACf;AAAA,QACA,SAAS;AAAA,MACX,CAAC;AAED,aAAO;AAAA,IACT,SAAS,OAAgB;AACvB,YAAM,WAAW,KAAK,IAAI,IAAI;AAE9B,aAAO;AAAA,QACL,wBAAwB,KAAK,IAAI,UAAU,QAAQ;AAAA,QACnD;AAAA,MACF;AACA,WAAK,KAAK,iBAAiB;AAAA,QACzB,UAAU,KAAK;AAAA,QACf;AAAA,QACA,SAAS;AAAA,QACT;AAAA,MACF,CAAC;AAED,YAAM;AAAA,IACR;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKQ,iBAAiB,WAAmB,SAA+B;AACzE,UAAM,WAAW;AAAA,MACf;AAAA,MACA,QAAQ;AAAA,MACR,QAAQ,YAAY;AAAA,OACnB,QAAQ,oBAAoB,CAAC,GAAG,KAAK,EAAE,KAAK,GAAG;AAAA,MAChD,QAAQ,YACJ,GAAG,QAAQ,UAAU,MAAM,QAAQ,CAAC,IAAI,QAAQ,UAAU,IAAI,QAAQ,CAAC,KACvE;AAAA,IACN;AAEA,WAAO,SAAS,KAAK,GAAG;AAAA,EAC1B;AAAA;AAAA;AAAA;AAAA,EAKQ,cAAc,KAAa,UAAiC;AAElE,QAAI,KAAK,cAAc,QAAQ,KAAK,cAAc;AAChD,YAAM,WAAW,KAAK,cAAc,KAAK,EAAE,KAAK,EAAE;AAClD,WAAK,cAAc,OAAO,QAAQ;AAAA,IACpC;AAEA,SAAK,cAAc,IAAI,KAAK,QAAQ;AAAA,EACtC;AAAA;AAAA;AAAA;AAAA,EAKQ,gBACN,MACA,WACA,SACQ;AACR,UAAM,cACJ,KAAK,QAAQ,cAAc,IAAI,KAAK,IAAI,KACxC,KAAK,OAAO,cACZ;AAGF,QAAI,aAAa;AACjB,YAAQ,QAAQ,WAAW;AAAA,MACzB,KAAK;AACH,qBAAa;AACb;AAAA,MACF,KAAK;AACH,qBAAa;AACb;AAAA,MACF,KAAK;AACH,qBAAa;AACb;AAAA,MACF;AACE,qBAAa;AAAA,IACjB;AAEA,WAAO,cAAc;AAAA,EACvB;AAAA;AAAA;AAAA;AAAA,EAKQ,uBACN,MACA,SACQ;AACR,QAAI,QAAQ,iBAAiB,QAAQ,kBAAkB,QAAQ;AAC7D,aAAO,QAAQ;AAAA,IACjB;AAGA,QAAI,KAAK,SAAS,SAAS,KAAK,SAAS,UAAU;AACjD,aAAO;AAAA,IACT,WAAW,QAAQ,cAAc,QAAQ;AACvC,aAAO;AAAA,IACT;AAEA,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA,EAKQ,kBACN,MACA,WACA,SACA,OACQ;AACR,UAAM,UAAU,CAAC;AAEjB,QAAI,KAAK,OAAO,oBAAoB,SAAS,QAAQ,SAAS,GAAG;AAC/D,cAAQ,KAAK,iBAAiB,QAAQ,SAAS,aAAa;AAAA,IAC9D;AAEA,QACE,QAAQ,kBAAkB;AAAA,MAAM,CAAC,YAC/B,KAAK,OAAO,kBAAkB,SAAS,OAAO;AAAA,IAChD,GACA;AACA,cAAQ;AAAA,QACN,mCAAmC,QAAQ,iBAAiB,KAAK,IAAI,CAAC;AAAA,MACxE;AAAA,IACF;AAEA,QAAI,QAAQ,KAAK;AACf,cAAQ,KAAK,uBAAuB;AAAA,IACtC,WAAW,QAAQ,KAAK;AACtB,cAAQ,KAAK,YAAY;AAAA,IAC3B,WAAW,QAAQ,KAAK;AACtB,cAAQ,KAAK,kBAAkB;AAAA,IACjC;AAEA,WAAO,QAAQ,SAAS,IAAI,QAAQ,KAAK,IAAI,IAAI;AAAA,EACnD;AAAA;AAAA;AAAA;AAAA,EAKA,MAAc,eAAe,MAAoC;AAG/D,WAAO,KAAK,QAAQ,cAAc,IAAI,KAAK,IAAI,KAAK;AAAA,EACtD;AAAA;AAAA;AAAA;AAAA,EAKA,MAAc,iBAAiB,MAAqC;AAClE,QAAI;AACF,YAAM,QAAQ,MAAM,KAAK,QAAQ,SAAS;AAE1C,UAAI,KAAK,OAAO,aAAa,MAAM,eAAe,KAAK,OAAO,WAAW;AACvE,eAAO;AAAA,MACT;AAEA,UACE,KAAK,OAAO,aACZ,MAAM,aAAa,KAAK,OAAO,YAAY,OAAO,MAClD;AACA,eAAO;AAAA,MACT;AAEA,aAAO;AAAA,IACT,SAAS,OAAgB;AACvB,aAAO,KAAK,qCAAqC,KAAK,IAAI,KAAK,KAAK;AACpE,aAAO;AAAA,IACT;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKQ,cACN,UACA,WACA,SACM;AACN,UAAM,WAAW,KAAK,IAAI,IAAI;AAG9B,SAAK,QAAQ,cAAc;AAAA,MACzB;AAAA,OACC,KAAK,QAAQ,cAAc,IAAI,QAAQ,KAAK,KAAK;AAAA,IACpD;AAEA,QAAI,SAAS;AAEX,YAAM,aAAa,KAAK,QAAQ,cAAc,IAAI,QAAQ,KAAK;AAC/D,YAAM,QAAQ,KAAK,QAAQ,cAAc,IAAI,QAAQ,KAAK;AAC1D,YAAM,UAAU,cAAc,QAAQ,KAAK,YAAY;AACvD,WAAK,QAAQ,cAAc,IAAI,UAAU,MAAM;AAG/C,WAAK,QAAQ,kBACV,KAAK,QAAQ,kBAAkB,KAAK,QAAQ,eAAe,KAC1D,YACF,KAAK,QAAQ;AAAA,IACjB,OAAO;AAEL,WAAK,QAAQ,aAAa;AAAA,QACxB;AAAA,SACC,KAAK,QAAQ,aAAa,IAAI,QAAQ,KAAK,KAAK;AAAA,MACnD;AAAA,IACF;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,aAA2B;AAEzB,UAAM,gBAAgB,KAAK,QAAQ;AACnC,UAAM,YAAY,gBAAgB,KAAK,cAAc;AACrD,SAAK,QAAQ,eACX,gBAAgB,IAAI,YAAY,gBAAgB;AAElD,WAAO,EAAE,GAAG,KAAK,QAAQ;AAAA,EAC3B;AAAA;AAAA;AAAA;AAAA,EAKA,WAA0B;AACxB,WAAO,MAAM,KAAK,KAAK,MAAM,OAAO,CAAC,EAAE;AAAA,MACrC,CAAC,GAAG,MAAM,EAAE,WAAW,EAAE;AAAA,IAC3B;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,aAAmB;AACjB,SAAK,cAAc,MAAM;AACzB,WAAO,KAAK,gCAAgC;AAAA,EAC9C;AAAA;AAAA;AAAA;AAAA,EAKA,QAAQ,MAAuC;AAC7C,WAAO,KAAK,MAAM,IAAI,IAAI;AAAA,EAC5B;AACF;",
|
|
6
6
|
"names": []
|
|
7
7
|
}
|
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
{
|
|
2
2
|
"version": 3,
|
|
3
3
|
"sources": ["../../../src/core/database/sqlite-adapter.ts"],
|
|
4
|
-
"sourcesContent": ["/**\n * SQLite Database Adapter\n * Maintains backward compatibility with existing SQLite implementation\n */\n\nimport Database from 'better-sqlite3';\nimport {\n FeatureAwareDatabaseAdapter,\n DatabaseFeatures,\n SearchOptions,\n QueryOptions,\n AggregationOptions,\n BulkOperation,\n DatabaseStats,\n CountResult,\n VersionResult,\n FrameRow,\n} from './database-adapter.js';\nimport type { Frame, Event, Anchor } from '../context/frame-manager.js';\nimport { logger } from '../monitoring/logger.js';\nimport { DatabaseError, ErrorCode, ValidationError } from '../errors/index.js';\nimport * as fs from 'fs/promises';\nimport * as path from 'path';\n\nexport interface SQLiteConfig {\n dbPath: string;\n walMode?: boolean;\n busyTimeout?: number;\n cacheSize?: number;\n synchronous?: 'OFF' | 'NORMAL' | 'FULL' | 'EXTRA';\n}\n\nexport class SQLiteAdapter extends FeatureAwareDatabaseAdapter {\n private db: Database.Database | null = null;\n private readonly dbPath: string;\n private inTransactionFlag = false;\n\n constructor(projectId: string, config: SQLiteConfig) {\n super(projectId, config);\n this.dbPath = config.dbPath;\n }\n\n getFeatures(): DatabaseFeatures {\n return {\n supportsFullTextSearch: false, // Could enable with FTS5\n supportsVectorSearch: false,\n supportsPartitioning: false,\n supportsAnalytics: false,\n supportsCompression: false,\n supportsMaterializedViews: false,\n supportsParallelQueries: false,\n };\n }\n\n async connect(): Promise<void> {\n if (this.db) return;\n\n const config = this.config as SQLiteConfig;\n\n // Ensure directory exists\n const dir = path.dirname(this.dbPath);\n await fs.mkdir(dir, { recursive: true });\n\n this.db = new Database(this.dbPath);\n\n // Enforce referential integrity\n this.db.pragma('foreign_keys = ON');\n\n // Configure SQLite for better performance\n if (config.walMode !== false) {\n this.db.pragma('journal_mode = WAL');\n }\n\n if (config.busyTimeout) {\n this.db.pragma(`busy_timeout = ${config.busyTimeout}`);\n }\n\n if (config.cacheSize) {\n this.db.pragma(`cache_size = ${config.cacheSize}`);\n }\n\n if (config.synchronous) {\n this.db.pragma(`synchronous = ${config.synchronous}`);\n }\n\n logger.info('SQLite database connected', { dbPath: this.dbPath });\n }\n\n async disconnect(): Promise<void> {\n if (!this.db) return;\n\n this.db.close();\n this.db = null;\n logger.info('SQLite database disconnected');\n }\n\n /**\n * Get raw database handle for testing purposes\n * @internal\n */\n getRawDatabase(): Database.Database | null {\n return this.db;\n }\n\n isConnected(): boolean {\n return this.db !== null && this.db.open;\n }\n\n async ping(): Promise<boolean> {\n if (!this.db) return false;\n\n try {\n this.db.prepare('SELECT 1').get();\n return true;\n } catch {\n return false;\n }\n }\n\n async initializeSchema(): Promise<void> {\n if (!this.db)\n throw new DatabaseError(\n 'Database not connected',\n ErrorCode.DB_CONNECTION_FAILED\n );\n\n this.db.exec(`\n CREATE TABLE IF NOT EXISTS frames (\n frame_id TEXT PRIMARY KEY,\n run_id TEXT NOT NULL,\n project_id TEXT NOT NULL,\n parent_frame_id TEXT REFERENCES frames(frame_id),\n depth INTEGER NOT NULL DEFAULT 0,\n type TEXT NOT NULL,\n name TEXT NOT NULL,\n state TEXT DEFAULT 'active',\n inputs TEXT DEFAULT '{}',\n outputs TEXT DEFAULT '{}',\n digest_text TEXT,\n digest_json TEXT DEFAULT '{}',\n created_at INTEGER DEFAULT (unixepoch()),\n closed_at INTEGER\n );\n\n CREATE TABLE IF NOT EXISTS events (\n event_id TEXT PRIMARY KEY,\n run_id TEXT NOT NULL,\n frame_id TEXT NOT NULL,\n seq INTEGER NOT NULL,\n event_type TEXT NOT NULL,\n payload TEXT NOT NULL,\n ts INTEGER DEFAULT (unixepoch()),\n FOREIGN KEY(frame_id) REFERENCES frames(frame_id) ON DELETE CASCADE\n );\n\n CREATE TABLE IF NOT EXISTS anchors (\n anchor_id TEXT PRIMARY KEY,\n frame_id TEXT NOT NULL,\n project_id TEXT NOT NULL,\n type TEXT NOT NULL,\n text TEXT NOT NULL,\n priority INTEGER DEFAULT 0,\n created_at INTEGER DEFAULT (unixepoch()),\n metadata TEXT DEFAULT '{}',\n FOREIGN KEY(frame_id) REFERENCES frames(frame_id) ON DELETE CASCADE\n );\n\n CREATE TABLE IF NOT EXISTS schema_version (\n version INTEGER PRIMARY KEY,\n applied_at INTEGER DEFAULT (unixepoch())\n );\n\n -- Indexes for performance\n CREATE INDEX IF NOT EXISTS idx_frames_run ON frames(run_id);\n CREATE INDEX IF NOT EXISTS idx_frames_project ON frames(project_id);\n CREATE INDEX IF NOT EXISTS idx_frames_parent ON frames(parent_frame_id);\n CREATE INDEX IF NOT EXISTS idx_frames_state ON frames(state);\n CREATE INDEX IF NOT EXISTS idx_frames_created ON frames(created_at DESC);\n CREATE INDEX IF NOT EXISTS idx_events_frame ON events(frame_id);\n CREATE INDEX IF NOT EXISTS idx_events_seq ON events(frame_id, seq);\n CREATE INDEX IF NOT EXISTS idx_anchors_frame ON anchors(frame_id);\n\n -- Set initial schema version if not exists\n INSERT OR IGNORE INTO schema_version (version) VALUES (1);\n `);\n\n // Ensure cascade constraints exist on dependent tables for existing DBs\n try {\n this.ensureCascadeConstraints();\n } catch (e) {\n logger.warn('Failed to ensure cascade constraints', e as Error);\n }\n }\n\n /**\n * Ensure ON DELETE CASCADE exists for events/anchors referencing frames\n * Migrates existing tables in-place if needed without data loss.\n */\n private ensureCascadeConstraints(): void {\n if (!this.db) return;\n\n const needsCascade = (table: string): boolean => {\n const rows = this.db!.prepare(\n `PRAGMA foreign_key_list(${table})`\n ).all() as any[];\n // If any FK points to frames without cascade, we need migration\n return rows.some(\n (r) =>\n r.table === 'frames' &&\n String(r.on_delete).toUpperCase() !== 'CASCADE'\n );\n };\n\n const migrateTable = (table: 'events' | 'anchors') => {\n const createSql =\n table === 'events'\n ? `CREATE TABLE events_new (\n event_id TEXT PRIMARY KEY,\n run_id TEXT NOT NULL,\n frame_id TEXT NOT NULL,\n seq INTEGER NOT NULL,\n event_type TEXT NOT NULL,\n payload TEXT NOT NULL,\n ts INTEGER DEFAULT (unixepoch()),\n FOREIGN KEY(frame_id) REFERENCES frames(frame_id) ON DELETE CASCADE\n );`\n : `CREATE TABLE anchors_new (\n anchor_id TEXT PRIMARY KEY,\n frame_id TEXT NOT NULL,\n project_id TEXT NOT NULL,\n type TEXT NOT NULL,\n text TEXT NOT NULL,\n priority INTEGER DEFAULT 0,\n created_at INTEGER DEFAULT (unixepoch()),\n metadata TEXT DEFAULT '{}',\n FOREIGN KEY(frame_id) REFERENCES frames(frame_id) ON DELETE CASCADE\n );`;\n\n const cols =\n table === 'events'\n ? 'event_id, run_id, frame_id, seq, event_type, payload, ts'\n : 'anchor_id, frame_id, project_id, type, text, priority, created_at, metadata';\n\n const idxSql =\n table === 'events'\n ? [\n 'CREATE INDEX IF NOT EXISTS idx_events_frame ON events(frame_id);',\n 'CREATE INDEX IF NOT EXISTS idx_events_seq ON events(frame_id, seq);',\n ]\n : [\n 'CREATE INDEX IF NOT EXISTS idx_anchors_frame ON anchors(frame_id);',\n ];\n\n this.db!.exec('PRAGMA foreign_keys = OFF;');\n this.db!.exec('BEGIN;');\n this.db!.exec(createSql);\n this.db!.prepare(\n `INSERT INTO ${table === 'events' ? 'events_new' : 'anchors_new'} (${cols}) SELECT ${cols} FROM ${table}`\n ).run();\n this.db!.exec(`DROP TABLE ${table};`);\n this.db!.exec(`ALTER TABLE ${table}_new RENAME TO ${table};`);\n for (const stmt of idxSql) this.db!.exec(stmt);\n this.db!.exec('COMMIT;');\n this.db!.exec('PRAGMA foreign_keys = ON;');\n logger.info(`Migrated ${table} to include ON DELETE CASCADE`);\n };\n\n if (needsCascade('events')) migrateTable('events');\n if (needsCascade('anchors')) migrateTable('anchors');\n }\n\n async migrateSchema(targetVersion: number): Promise<void> {\n if (!this.db)\n throw new DatabaseError(\n 'Database not connected',\n ErrorCode.DB_CONNECTION_FAILED\n );\n\n const currentVersion = await this.getSchemaVersion();\n\n if (currentVersion >= targetVersion) {\n logger.info('Schema already at target version', {\n currentVersion,\n targetVersion,\n });\n return;\n }\n\n // Apply migrations sequentially\n for (let v = currentVersion + 1; v <= targetVersion; v++) {\n logger.info(`Applying migration to version ${v}`);\n // Migration logic would go here\n this.db.prepare('UPDATE schema_version SET version = ?').run(v);\n }\n }\n\n async getSchemaVersion(): Promise<number> {\n if (!this.db)\n throw new DatabaseError(\n 'Database not connected',\n ErrorCode.DB_CONNECTION_FAILED\n );\n\n try {\n const result = this.db\n .prepare('SELECT MAX(version) as version FROM schema_version')\n .get() as VersionResult;\n return result?.version || 0;\n } catch {\n return 0;\n }\n }\n\n // Frame operations\n async createFrame(frame: Partial<Frame>): Promise<string> {\n if (!this.db)\n throw new DatabaseError(\n 'Database not connected',\n ErrorCode.DB_CONNECTION_FAILED\n );\n\n const frameId = frame.frame_id || this.generateId();\n\n this.db\n .prepare(\n `\n INSERT INTO frames (\n frame_id, run_id, project_id, parent_frame_id, depth,\n type, name, state, inputs, outputs, digest_text, digest_json\n ) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)\n `\n )\n .run(\n frameId,\n frame.run_id,\n frame.project_id || this.projectId,\n frame.parent_frame_id || null,\n frame.depth || 0,\n frame.type,\n frame.name,\n frame.state || 'active',\n JSON.stringify(frame.inputs || {}),\n JSON.stringify(frame.outputs || {}),\n frame.digest_text || null,\n JSON.stringify(frame.digest_json || {})\n );\n\n return frameId;\n }\n\n async getFrame(frameId: string): Promise<Frame | null> {\n if (!this.db)\n throw new DatabaseError(\n 'Database not connected',\n ErrorCode.DB_CONNECTION_FAILED\n );\n\n const row = this.db\n .prepare('SELECT * FROM frames WHERE frame_id = ?')\n .get(frameId) as FrameRow | undefined;\n\n if (!row) return null;\n\n return {\n ...row,\n inputs: JSON.parse(row.inputs || '{}'),\n outputs: JSON.parse(row.outputs || '{}'),\n digest_json: JSON.parse(row.digest_json || '{}'),\n };\n }\n\n async updateFrame(frameId: string, updates: Partial<Frame>): Promise<void> {\n if (!this.db)\n throw new DatabaseError(\n 'Database not connected',\n ErrorCode.DB_CONNECTION_FAILED\n );\n\n const fields = [];\n const values = [];\n\n if (updates.state !== undefined) {\n fields.push('state = ?');\n values.push(updates.state);\n }\n\n if (updates.outputs !== undefined) {\n fields.push('outputs = ?');\n values.push(JSON.stringify(updates.outputs));\n }\n\n if (updates.digest_text !== undefined) {\n fields.push('digest_text = ?');\n values.push(updates.digest_text);\n }\n\n if (updates.digest_json !== undefined) {\n fields.push('digest_json = ?');\n values.push(JSON.stringify(updates.digest_json));\n }\n\n if (updates.closed_at !== undefined) {\n fields.push('closed_at = ?');\n values.push(updates.closed_at);\n }\n\n if (fields.length === 0) return;\n\n values.push(frameId);\n\n this.db\n .prepare(\n `\n UPDATE frames SET ${fields.join(', ')} WHERE frame_id = ?\n `\n )\n .run(...values);\n }\n\n async deleteFrame(frameId: string): Promise<void> {\n if (!this.db)\n throw new DatabaseError(\n 'Database not connected',\n ErrorCode.DB_CONNECTION_FAILED\n );\n\n // Delete in order due to foreign keys\n await this.deleteFrameAnchors(frameId);\n await this.deleteFrameEvents(frameId);\n\n this.db.prepare('DELETE FROM frames WHERE frame_id = ?').run(frameId);\n }\n\n async getActiveFrames(runId?: string): Promise<Frame[]> {\n if (!this.db)\n throw new DatabaseError(\n 'Database not connected',\n ErrorCode.DB_CONNECTION_FAILED\n );\n\n let query = \"SELECT * FROM frames WHERE state = 'active'\";\n const params = [];\n\n if (runId) {\n query += ' AND run_id = ?';\n params.push(runId);\n }\n\n query += ' ORDER BY depth ASC, created_at ASC';\n\n const rows = this.db.prepare(query).all(...params) as any[];\n\n return rows.map((row) => ({\n ...row,\n inputs: JSON.parse(row.inputs || '{}'),\n outputs: JSON.parse(row.outputs || '{}'),\n digest_json: JSON.parse(row.digest_json || '{}'),\n }));\n }\n\n async closeFrame(frameId: string, outputs?: any): Promise<void> {\n await this.updateFrame(frameId, {\n state: 'closed',\n outputs,\n closed_at: Date.now(),\n });\n }\n\n // Event operations\n async createEvent(event: Partial<Event>): Promise<string> {\n if (!this.db)\n throw new DatabaseError(\n 'Database not connected',\n ErrorCode.DB_CONNECTION_FAILED\n );\n\n const eventId = event.event_id || this.generateId();\n\n this.db\n .prepare(\n `\n INSERT INTO events (event_id, run_id, frame_id, seq, event_type, payload, ts)\n VALUES (?, ?, ?, ?, ?, ?, ?)\n `\n )\n .run(\n eventId,\n event.run_id,\n event.frame_id,\n event.seq || 0,\n event.event_type,\n JSON.stringify(event.payload || {}),\n event.ts || Date.now()\n );\n\n return eventId;\n }\n\n async getFrameEvents(\n frameId: string,\n options?: QueryOptions\n ): Promise<Event[]> {\n if (!this.db)\n throw new DatabaseError(\n 'Database not connected',\n ErrorCode.DB_CONNECTION_FAILED\n );\n\n let query = 'SELECT * FROM events WHERE frame_id = ?';\n query += this.buildOrderByClause(\n options?.orderBy || 'seq',\n options?.orderDirection\n );\n query += this.buildLimitClause(options?.limit, options?.offset);\n\n const rows = this.db.prepare(query).all(frameId) as any[];\n\n return rows.map((row) => ({\n ...row,\n payload: JSON.parse(row.payload || '{}'),\n }));\n }\n\n async deleteFrameEvents(frameId: string): Promise<void> {\n if (!this.db)\n throw new DatabaseError(\n 'Database not connected',\n ErrorCode.DB_CONNECTION_FAILED\n );\n\n this.db.prepare('DELETE FROM events WHERE frame_id = ?').run(frameId);\n }\n\n // Anchor operations\n async createAnchor(anchor: Partial<Anchor>): Promise<string> {\n if (!this.db)\n throw new DatabaseError(\n 'Database not connected',\n ErrorCode.DB_CONNECTION_FAILED\n );\n\n const anchorId = anchor.anchor_id || this.generateId();\n\n this.db\n .prepare(\n `\n INSERT INTO anchors (anchor_id, frame_id, project_id, type, text, priority, metadata)\n VALUES (?, ?, ?, ?, ?, ?, ?)\n `\n )\n .run(\n anchorId,\n anchor.frame_id,\n anchor.project_id || this.projectId,\n anchor.type,\n anchor.text,\n anchor.priority || 0,\n JSON.stringify(anchor.metadata || {})\n );\n\n return anchorId;\n }\n\n async getFrameAnchors(frameId: string): Promise<Anchor[]> {\n if (!this.db)\n throw new DatabaseError(\n 'Database not connected',\n ErrorCode.DB_CONNECTION_FAILED\n );\n\n const rows = this.db\n .prepare(\n `\n SELECT * FROM anchors WHERE frame_id = ? \n ORDER BY priority DESC, created_at ASC\n `\n )\n .all(frameId) as any[];\n\n return rows.map((row) => ({\n ...row,\n metadata: JSON.parse(row.metadata || '{}'),\n }));\n }\n\n async deleteFrameAnchors(frameId: string): Promise<void> {\n if (!this.db)\n throw new DatabaseError(\n 'Database not connected',\n ErrorCode.DB_CONNECTION_FAILED\n );\n\n this.db.prepare('DELETE FROM anchors WHERE frame_id = ?').run(frameId);\n }\n\n // Limited search (basic LIKE queries)\n async search(\n options: SearchOptions\n ): Promise<Array<Frame & { score: number }>> {\n if (!this.db)\n throw new DatabaseError(\n 'Database not connected',\n ErrorCode.DB_CONNECTION_FAILED\n );\n\n // SQLite doesn't support HAVING on non-aggregate queries, so we filter in application\n const sql = `\n SELECT *, \n CASE \n WHEN name LIKE ? THEN 1.0\n WHEN digest_text LIKE ? THEN 0.8\n WHEN inputs LIKE ? THEN 0.6\n ELSE 0.5\n END as score\n FROM frames\n WHERE name LIKE ? OR digest_text LIKE ? OR inputs LIKE ?\n ORDER BY score DESC\n `;\n\n const params = Array(6).fill(`%${options.query}%`);\n\n let rows = this.db.prepare(sql).all(...params) as any[];\n\n // Apply score threshold in application layer\n if (options.scoreThreshold) {\n rows = rows.filter((row) => row.score >= options.scoreThreshold);\n }\n\n // Apply limit and offset in application layer if threshold is used\n if (options.limit || options.offset) {\n const start = options.offset || 0;\n const end = options.limit ? start + options.limit : rows.length;\n rows = rows.slice(start, end);\n }\n\n return rows.map((row) => ({\n ...row,\n inputs: JSON.parse(row.inputs || '{}'),\n outputs: JSON.parse(row.outputs || '{}'),\n digest_json: JSON.parse(row.digest_json || '{}'),\n }));\n }\n\n async searchByVector(\n _embedding: number[],\n _options?: QueryOptions\n ): Promise<Array<Frame & { similarity: number }>> {\n // Not supported in SQLite\n logger.warn('Vector search not supported in SQLite adapter');\n return [];\n }\n\n async searchHybrid(\n textQuery: string,\n _embedding: number[],\n weights?: { text: number; vector: number }\n ): Promise<Array<Frame & { score: number }>> {\n // Fall back to text search only\n return this.search({ query: textQuery, ...weights });\n }\n\n // Basic aggregation\n async aggregate(\n table: string,\n options: AggregationOptions\n ): Promise<Record<string, any>[]> {\n if (!this.db)\n throw new DatabaseError(\n 'Database not connected',\n ErrorCode.DB_CONNECTION_FAILED\n );\n\n const metrics = options.metrics\n .map(\n (m) =>\n `${m.operation}(${m.field}) AS ${m.alias || `${m.operation}_${m.field}`}`\n )\n .join(', ');\n\n let sql = `SELECT ${options.groupBy.join(', ')}, ${metrics} FROM ${table}`;\n sql += ` GROUP BY ${options.groupBy.join(', ')}`;\n\n if (options.having) {\n const havingClauses = Object.entries(options.having).map(\n ([key, value]) =>\n `${key} ${typeof value === 'object' ? value.op : '='} ?`\n );\n sql += ` HAVING ${havingClauses.join(' AND ')}`;\n }\n\n return this.db\n .prepare(sql)\n .all(...Object.values(options.having || {})) as any[];\n }\n\n // Pattern detection (basic)\n async detectPatterns(timeRange?: { start: Date; end: Date }): Promise<\n Array<{\n pattern: string;\n type: string;\n frequency: number;\n lastSeen: Date;\n }>\n > {\n if (!this.db)\n throw new DatabaseError(\n 'Database not connected',\n ErrorCode.DB_CONNECTION_FAILED\n );\n\n let sql = `\n SELECT type as pattern, type, COUNT(*) as frequency, MAX(created_at) as last_seen\n FROM frames\n `;\n\n const params = [];\n if (timeRange) {\n sql += ' WHERE created_at >= ? AND created_at <= ?';\n params.push(\n Math.floor(timeRange.start.getTime() / 1000),\n Math.floor(timeRange.end.getTime() / 1000)\n );\n }\n\n sql += ' GROUP BY type HAVING COUNT(*) > 1 ORDER BY frequency DESC';\n\n const rows = this.db.prepare(sql).all(...params) as any[];\n\n return rows.map((row) => ({\n pattern: row.pattern,\n type: row.type,\n frequency: row.frequency,\n lastSeen: new Date(row.last_seen * 1000),\n }));\n }\n\n // Bulk operations\n async executeBulk(operations: BulkOperation[]): Promise<void> {\n if (!this.db)\n throw new DatabaseError(\n 'Database not connected',\n ErrorCode.DB_CONNECTION_FAILED\n );\n\n await this.inTransaction(async () => {\n for (const op of operations) {\n switch (op.type) {\n case 'insert':\n // Build insert dynamically based on data\n const insertCols = Object.keys(op.data);\n const insertPlaceholders = insertCols.map(() => '?').join(',');\n this.db!.prepare(\n `INSERT INTO ${op.table} (${insertCols.join(',')}) VALUES (${insertPlaceholders})`\n ).run(...Object.values(op.data));\n break;\n\n case 'update':\n const updateSets = Object.keys(op.data)\n .map((k) => `${k} = ?`)\n .join(',');\n const whereClause = this.buildWhereClause(op.where || {});\n this.db!.prepare(\n `UPDATE ${op.table} SET ${updateSets} ${whereClause}`\n ).run(...Object.values(op.data), ...Object.values(op.where || {}));\n break;\n\n case 'delete':\n const deleteWhere = this.buildWhereClause(op.where || {});\n this.db!.prepare(`DELETE FROM ${op.table} ${deleteWhere}`).run(\n ...Object.values(op.where || {})\n );\n break;\n }\n }\n });\n }\n\n async vacuum(): Promise<void> {\n if (!this.db)\n throw new DatabaseError(\n 'Database not connected',\n ErrorCode.DB_CONNECTION_FAILED\n );\n\n this.db.pragma('vacuum');\n logger.info('SQLite database vacuumed');\n }\n\n async analyze(): Promise<void> {\n if (!this.db)\n throw new DatabaseError(\n 'Database not connected',\n ErrorCode.DB_CONNECTION_FAILED\n );\n\n this.db.pragma('analyze');\n logger.info('SQLite database analyzed');\n }\n\n // Statistics\n async getStats(): Promise<DatabaseStats> {\n if (!this.db)\n throw new DatabaseError(\n 'Database not connected',\n ErrorCode.DB_CONNECTION_FAILED\n );\n\n const stats = {\n totalFrames: (\n this.db\n .prepare('SELECT COUNT(*) as count FROM frames')\n .get() as CountResult\n ).count,\n activeFrames: (\n this.db\n .prepare(\n \"SELECT COUNT(*) as count FROM frames WHERE state = 'active'\"\n )\n .get() as CountResult\n ).count,\n totalEvents: (\n this.db\n .prepare('SELECT COUNT(*) as count FROM events')\n .get() as CountResult\n ).count,\n totalAnchors: (\n this.db\n .prepare('SELECT COUNT(*) as count FROM anchors')\n .get() as CountResult\n ).count,\n diskUsage: 0,\n };\n\n // Get file size\n try {\n const fileStats = await fs.stat(this.dbPath);\n stats.diskUsage = fileStats.size;\n } catch {}\n\n return stats;\n }\n\n async getQueryStats(): Promise<\n Array<{\n query: string;\n calls: number;\n meanTime: number;\n totalTime: number;\n }>\n > {\n // SQLite doesn't have built-in query stats\n logger.warn('Query stats not available for SQLite');\n return [];\n }\n\n // Transaction support\n async beginTransaction(): Promise<void> {\n if (!this.db)\n throw new DatabaseError(\n 'Database not connected',\n ErrorCode.DB_CONNECTION_FAILED\n );\n\n this.db.prepare('BEGIN').run();\n this.inTransactionFlag = true;\n }\n\n async commitTransaction(): Promise<void> {\n if (!this.db)\n throw new DatabaseError(\n 'Database not connected',\n ErrorCode.DB_CONNECTION_FAILED\n );\n\n this.db.prepare('COMMIT').run();\n this.inTransactionFlag = false;\n }\n\n async rollbackTransaction(): Promise<void> {\n if (!this.db)\n throw new DatabaseError(\n 'Database not connected',\n ErrorCode.DB_CONNECTION_FAILED\n );\n\n this.db.prepare('ROLLBACK').run();\n this.inTransactionFlag = false;\n }\n\n async inTransaction(\n callback: (adapter: DatabaseAdapter) => Promise<void>\n ): Promise<void> {\n await this.beginTransaction();\n\n try {\n await callback(this);\n await this.commitTransaction();\n } catch (error: unknown) {\n await this.rollbackTransaction();\n throw error;\n }\n }\n\n // Export/Import\n async exportData(\n tables: string[],\n format: 'json' | 'parquet' | 'csv'\n ): Promise<Buffer> {\n if (!this.db)\n throw new DatabaseError(\n 'Database not connected',\n ErrorCode.DB_CONNECTION_FAILED\n );\n\n if (format !== 'json') {\n throw new ValidationError(\n `Format ${format} not supported for SQLite export`,\n ErrorCode.VALIDATION_FAILED,\n { format, supportedFormats: ['json'] }\n );\n }\n\n const data: Record<string, any[]> = {};\n\n for (const table of tables) {\n data[table] = this.db.prepare(`SELECT * FROM ${table}`).all();\n }\n\n return Buffer.from(JSON.stringify(data, null, 2));\n }\n\n async importData(\n data: Buffer,\n format: 'json' | 'parquet' | 'csv',\n options?: { truncate?: boolean; upsert?: boolean }\n ): Promise<void> {\n if (!this.db)\n throw new DatabaseError(\n 'Database not connected',\n ErrorCode.DB_CONNECTION_FAILED\n );\n\n if (format !== 'json') {\n throw new ValidationError(\n `Format ${format} not supported for SQLite import`,\n ErrorCode.VALIDATION_FAILED,\n { format, supportedFormats: ['json'] }\n );\n }\n\n const parsed = JSON.parse(data.toString());\n\n await this.inTransaction(async () => {\n for (const [table, rows] of Object.entries(parsed)) {\n if (options?.truncate) {\n this.db!.prepare(`DELETE FROM ${table}`).run();\n }\n\n for (const row of rows as any[]) {\n const cols = Object.keys(row);\n const placeholders = cols.map(() => '?').join(',');\n\n if (options?.upsert) {\n const updates = cols.map((c) => `${c} = excluded.${c}`).join(',');\n this.db!.prepare(\n `INSERT INTO ${table} (${cols.join(',')}) VALUES (${placeholders})\n ON CONFLICT DO UPDATE SET ${updates}`\n ).run(...Object.values(row));\n } else {\n this.db!.prepare(\n `INSERT INTO ${table} (${cols.join(',')}) VALUES (${placeholders})`\n ).run(...Object.values(row));\n }\n }\n }\n });\n }\n}\n"],
|
|
4
|
+
"sourcesContent": ["/**\n * SQLite Database Adapter\n * Maintains backward compatibility with existing SQLite implementation\n */\n\nimport Database from 'better-sqlite3';\nimport {\n FeatureAwareDatabaseAdapter,\n DatabaseFeatures,\n SearchOptions,\n QueryOptions,\n AggregationOptions,\n BulkOperation,\n DatabaseStats,\n CountResult,\n VersionResult,\n FrameRow,\n} from './database-adapter.js';\nimport type { Frame, Event, Anchor } from '../context/index.js';\nimport { logger } from '../monitoring/logger.js';\nimport { DatabaseError, ErrorCode, ValidationError } from '../errors/index.js';\nimport * as fs from 'fs/promises';\nimport * as path from 'path';\n\nexport interface SQLiteConfig {\n dbPath: string;\n walMode?: boolean;\n busyTimeout?: number;\n cacheSize?: number;\n synchronous?: 'OFF' | 'NORMAL' | 'FULL' | 'EXTRA';\n}\n\nexport class SQLiteAdapter extends FeatureAwareDatabaseAdapter {\n private db: Database.Database | null = null;\n private readonly dbPath: string;\n private inTransactionFlag = false;\n\n constructor(projectId: string, config: SQLiteConfig) {\n super(projectId, config);\n this.dbPath = config.dbPath;\n }\n\n getFeatures(): DatabaseFeatures {\n return {\n supportsFullTextSearch: false, // Could enable with FTS5\n supportsVectorSearch: false,\n supportsPartitioning: false,\n supportsAnalytics: false,\n supportsCompression: false,\n supportsMaterializedViews: false,\n supportsParallelQueries: false,\n };\n }\n\n async connect(): Promise<void> {\n if (this.db) return;\n\n const config = this.config as SQLiteConfig;\n\n // Ensure directory exists\n const dir = path.dirname(this.dbPath);\n await fs.mkdir(dir, { recursive: true });\n\n this.db = new Database(this.dbPath);\n\n // Enforce referential integrity\n this.db.pragma('foreign_keys = ON');\n\n // Configure SQLite for better performance\n if (config.walMode !== false) {\n this.db.pragma('journal_mode = WAL');\n }\n\n if (config.busyTimeout) {\n this.db.pragma(`busy_timeout = ${config.busyTimeout}`);\n }\n\n if (config.cacheSize) {\n this.db.pragma(`cache_size = ${config.cacheSize}`);\n }\n\n if (config.synchronous) {\n this.db.pragma(`synchronous = ${config.synchronous}`);\n }\n\n logger.info('SQLite database connected', { dbPath: this.dbPath });\n }\n\n async disconnect(): Promise<void> {\n if (!this.db) return;\n\n this.db.close();\n this.db = null;\n logger.info('SQLite database disconnected');\n }\n\n /**\n * Get raw database handle for testing purposes\n * @internal\n */\n getRawDatabase(): Database.Database | null {\n return this.db;\n }\n\n isConnected(): boolean {\n return this.db !== null && this.db.open;\n }\n\n async ping(): Promise<boolean> {\n if (!this.db) return false;\n\n try {\n this.db.prepare('SELECT 1').get();\n return true;\n } catch {\n return false;\n }\n }\n\n async initializeSchema(): Promise<void> {\n if (!this.db)\n throw new DatabaseError(\n 'Database not connected',\n ErrorCode.DB_CONNECTION_FAILED\n );\n\n this.db.exec(`\n CREATE TABLE IF NOT EXISTS frames (\n frame_id TEXT PRIMARY KEY,\n run_id TEXT NOT NULL,\n project_id TEXT NOT NULL,\n parent_frame_id TEXT REFERENCES frames(frame_id),\n depth INTEGER NOT NULL DEFAULT 0,\n type TEXT NOT NULL,\n name TEXT NOT NULL,\n state TEXT DEFAULT 'active',\n inputs TEXT DEFAULT '{}',\n outputs TEXT DEFAULT '{}',\n digest_text TEXT,\n digest_json TEXT DEFAULT '{}',\n created_at INTEGER DEFAULT (unixepoch()),\n closed_at INTEGER\n );\n\n CREATE TABLE IF NOT EXISTS events (\n event_id TEXT PRIMARY KEY,\n run_id TEXT NOT NULL,\n frame_id TEXT NOT NULL,\n seq INTEGER NOT NULL,\n event_type TEXT NOT NULL,\n payload TEXT NOT NULL,\n ts INTEGER DEFAULT (unixepoch()),\n FOREIGN KEY(frame_id) REFERENCES frames(frame_id) ON DELETE CASCADE\n );\n\n CREATE TABLE IF NOT EXISTS anchors (\n anchor_id TEXT PRIMARY KEY,\n frame_id TEXT NOT NULL,\n project_id TEXT NOT NULL,\n type TEXT NOT NULL,\n text TEXT NOT NULL,\n priority INTEGER DEFAULT 0,\n created_at INTEGER DEFAULT (unixepoch()),\n metadata TEXT DEFAULT '{}',\n FOREIGN KEY(frame_id) REFERENCES frames(frame_id) ON DELETE CASCADE\n );\n\n CREATE TABLE IF NOT EXISTS schema_version (\n version INTEGER PRIMARY KEY,\n applied_at INTEGER DEFAULT (unixepoch())\n );\n\n -- Indexes for performance\n CREATE INDEX IF NOT EXISTS idx_frames_run ON frames(run_id);\n CREATE INDEX IF NOT EXISTS idx_frames_project ON frames(project_id);\n CREATE INDEX IF NOT EXISTS idx_frames_parent ON frames(parent_frame_id);\n CREATE INDEX IF NOT EXISTS idx_frames_state ON frames(state);\n CREATE INDEX IF NOT EXISTS idx_frames_created ON frames(created_at DESC);\n CREATE INDEX IF NOT EXISTS idx_events_frame ON events(frame_id);\n CREATE INDEX IF NOT EXISTS idx_events_seq ON events(frame_id, seq);\n CREATE INDEX IF NOT EXISTS idx_anchors_frame ON anchors(frame_id);\n\n -- Set initial schema version if not exists\n INSERT OR IGNORE INTO schema_version (version) VALUES (1);\n `);\n\n // Ensure cascade constraints exist on dependent tables for existing DBs\n try {\n this.ensureCascadeConstraints();\n } catch (e) {\n logger.warn('Failed to ensure cascade constraints', e as Error);\n }\n }\n\n /**\n * Ensure ON DELETE CASCADE exists for events/anchors referencing frames\n * Migrates existing tables in-place if needed without data loss.\n */\n private ensureCascadeConstraints(): void {\n if (!this.db) return;\n\n const needsCascade = (table: string): boolean => {\n const rows = this.db!.prepare(\n `PRAGMA foreign_key_list(${table})`\n ).all() as any[];\n // If any FK points to frames without cascade, we need migration\n return rows.some(\n (r) =>\n r.table === 'frames' &&\n String(r.on_delete).toUpperCase() !== 'CASCADE'\n );\n };\n\n const migrateTable = (table: 'events' | 'anchors') => {\n const createSql =\n table === 'events'\n ? `CREATE TABLE events_new (\n event_id TEXT PRIMARY KEY,\n run_id TEXT NOT NULL,\n frame_id TEXT NOT NULL,\n seq INTEGER NOT NULL,\n event_type TEXT NOT NULL,\n payload TEXT NOT NULL,\n ts INTEGER DEFAULT (unixepoch()),\n FOREIGN KEY(frame_id) REFERENCES frames(frame_id) ON DELETE CASCADE\n );`\n : `CREATE TABLE anchors_new (\n anchor_id TEXT PRIMARY KEY,\n frame_id TEXT NOT NULL,\n project_id TEXT NOT NULL,\n type TEXT NOT NULL,\n text TEXT NOT NULL,\n priority INTEGER DEFAULT 0,\n created_at INTEGER DEFAULT (unixepoch()),\n metadata TEXT DEFAULT '{}',\n FOREIGN KEY(frame_id) REFERENCES frames(frame_id) ON DELETE CASCADE\n );`;\n\n const cols =\n table === 'events'\n ? 'event_id, run_id, frame_id, seq, event_type, payload, ts'\n : 'anchor_id, frame_id, project_id, type, text, priority, created_at, metadata';\n\n const idxSql =\n table === 'events'\n ? [\n 'CREATE INDEX IF NOT EXISTS idx_events_frame ON events(frame_id);',\n 'CREATE INDEX IF NOT EXISTS idx_events_seq ON events(frame_id, seq);',\n ]\n : [\n 'CREATE INDEX IF NOT EXISTS idx_anchors_frame ON anchors(frame_id);',\n ];\n\n this.db!.exec('PRAGMA foreign_keys = OFF;');\n this.db!.exec('BEGIN;');\n this.db!.exec(createSql);\n this.db!.prepare(\n `INSERT INTO ${table === 'events' ? 'events_new' : 'anchors_new'} (${cols}) SELECT ${cols} FROM ${table}`\n ).run();\n this.db!.exec(`DROP TABLE ${table};`);\n this.db!.exec(`ALTER TABLE ${table}_new RENAME TO ${table};`);\n for (const stmt of idxSql) this.db!.exec(stmt);\n this.db!.exec('COMMIT;');\n this.db!.exec('PRAGMA foreign_keys = ON;');\n logger.info(`Migrated ${table} to include ON DELETE CASCADE`);\n };\n\n if (needsCascade('events')) migrateTable('events');\n if (needsCascade('anchors')) migrateTable('anchors');\n }\n\n async migrateSchema(targetVersion: number): Promise<void> {\n if (!this.db)\n throw new DatabaseError(\n 'Database not connected',\n ErrorCode.DB_CONNECTION_FAILED\n );\n\n const currentVersion = await this.getSchemaVersion();\n\n if (currentVersion >= targetVersion) {\n logger.info('Schema already at target version', {\n currentVersion,\n targetVersion,\n });\n return;\n }\n\n // Apply migrations sequentially\n for (let v = currentVersion + 1; v <= targetVersion; v++) {\n logger.info(`Applying migration to version ${v}`);\n // Migration logic would go here\n this.db.prepare('UPDATE schema_version SET version = ?').run(v);\n }\n }\n\n async getSchemaVersion(): Promise<number> {\n if (!this.db)\n throw new DatabaseError(\n 'Database not connected',\n ErrorCode.DB_CONNECTION_FAILED\n );\n\n try {\n const result = this.db\n .prepare('SELECT MAX(version) as version FROM schema_version')\n .get() as VersionResult;\n return result?.version || 0;\n } catch {\n return 0;\n }\n }\n\n // Frame operations\n async createFrame(frame: Partial<Frame>): Promise<string> {\n if (!this.db)\n throw new DatabaseError(\n 'Database not connected',\n ErrorCode.DB_CONNECTION_FAILED\n );\n\n const frameId = frame.frame_id || this.generateId();\n\n this.db\n .prepare(\n `\n INSERT INTO frames (\n frame_id, run_id, project_id, parent_frame_id, depth,\n type, name, state, inputs, outputs, digest_text, digest_json\n ) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)\n `\n )\n .run(\n frameId,\n frame.run_id,\n frame.project_id || this.projectId,\n frame.parent_frame_id || null,\n frame.depth || 0,\n frame.type,\n frame.name,\n frame.state || 'active',\n JSON.stringify(frame.inputs || {}),\n JSON.stringify(frame.outputs || {}),\n frame.digest_text || null,\n JSON.stringify(frame.digest_json || {})\n );\n\n return frameId;\n }\n\n async getFrame(frameId: string): Promise<Frame | null> {\n if (!this.db)\n throw new DatabaseError(\n 'Database not connected',\n ErrorCode.DB_CONNECTION_FAILED\n );\n\n const row = this.db\n .prepare('SELECT * FROM frames WHERE frame_id = ?')\n .get(frameId) as FrameRow | undefined;\n\n if (!row) return null;\n\n return {\n ...row,\n inputs: JSON.parse(row.inputs || '{}'),\n outputs: JSON.parse(row.outputs || '{}'),\n digest_json: JSON.parse(row.digest_json || '{}'),\n };\n }\n\n async updateFrame(frameId: string, updates: Partial<Frame>): Promise<void> {\n if (!this.db)\n throw new DatabaseError(\n 'Database not connected',\n ErrorCode.DB_CONNECTION_FAILED\n );\n\n const fields = [];\n const values = [];\n\n if (updates.state !== undefined) {\n fields.push('state = ?');\n values.push(updates.state);\n }\n\n if (updates.outputs !== undefined) {\n fields.push('outputs = ?');\n values.push(JSON.stringify(updates.outputs));\n }\n\n if (updates.digest_text !== undefined) {\n fields.push('digest_text = ?');\n values.push(updates.digest_text);\n }\n\n if (updates.digest_json !== undefined) {\n fields.push('digest_json = ?');\n values.push(JSON.stringify(updates.digest_json));\n }\n\n if (updates.closed_at !== undefined) {\n fields.push('closed_at = ?');\n values.push(updates.closed_at);\n }\n\n if (fields.length === 0) return;\n\n values.push(frameId);\n\n this.db\n .prepare(\n `\n UPDATE frames SET ${fields.join(', ')} WHERE frame_id = ?\n `\n )\n .run(...values);\n }\n\n async deleteFrame(frameId: string): Promise<void> {\n if (!this.db)\n throw new DatabaseError(\n 'Database not connected',\n ErrorCode.DB_CONNECTION_FAILED\n );\n\n // Delete in order due to foreign keys\n await this.deleteFrameAnchors(frameId);\n await this.deleteFrameEvents(frameId);\n\n this.db.prepare('DELETE FROM frames WHERE frame_id = ?').run(frameId);\n }\n\n async getActiveFrames(runId?: string): Promise<Frame[]> {\n if (!this.db)\n throw new DatabaseError(\n 'Database not connected',\n ErrorCode.DB_CONNECTION_FAILED\n );\n\n let query = \"SELECT * FROM frames WHERE state = 'active'\";\n const params = [];\n\n if (runId) {\n query += ' AND run_id = ?';\n params.push(runId);\n }\n\n query += ' ORDER BY depth ASC, created_at ASC';\n\n const rows = this.db.prepare(query).all(...params) as any[];\n\n return rows.map((row) => ({\n ...row,\n inputs: JSON.parse(row.inputs || '{}'),\n outputs: JSON.parse(row.outputs || '{}'),\n digest_json: JSON.parse(row.digest_json || '{}'),\n }));\n }\n\n async closeFrame(frameId: string, outputs?: any): Promise<void> {\n await this.updateFrame(frameId, {\n state: 'closed',\n outputs,\n closed_at: Date.now(),\n });\n }\n\n // Event operations\n async createEvent(event: Partial<Event>): Promise<string> {\n if (!this.db)\n throw new DatabaseError(\n 'Database not connected',\n ErrorCode.DB_CONNECTION_FAILED\n );\n\n const eventId = event.event_id || this.generateId();\n\n this.db\n .prepare(\n `\n INSERT INTO events (event_id, run_id, frame_id, seq, event_type, payload, ts)\n VALUES (?, ?, ?, ?, ?, ?, ?)\n `\n )\n .run(\n eventId,\n event.run_id,\n event.frame_id,\n event.seq || 0,\n event.event_type,\n JSON.stringify(event.payload || {}),\n event.ts || Date.now()\n );\n\n return eventId;\n }\n\n async getFrameEvents(\n frameId: string,\n options?: QueryOptions\n ): Promise<Event[]> {\n if (!this.db)\n throw new DatabaseError(\n 'Database not connected',\n ErrorCode.DB_CONNECTION_FAILED\n );\n\n let query = 'SELECT * FROM events WHERE frame_id = ?';\n query += this.buildOrderByClause(\n options?.orderBy || 'seq',\n options?.orderDirection\n );\n query += this.buildLimitClause(options?.limit, options?.offset);\n\n const rows = this.db.prepare(query).all(frameId) as any[];\n\n return rows.map((row) => ({\n ...row,\n payload: JSON.parse(row.payload || '{}'),\n }));\n }\n\n async deleteFrameEvents(frameId: string): Promise<void> {\n if (!this.db)\n throw new DatabaseError(\n 'Database not connected',\n ErrorCode.DB_CONNECTION_FAILED\n );\n\n this.db.prepare('DELETE FROM events WHERE frame_id = ?').run(frameId);\n }\n\n // Anchor operations\n async createAnchor(anchor: Partial<Anchor>): Promise<string> {\n if (!this.db)\n throw new DatabaseError(\n 'Database not connected',\n ErrorCode.DB_CONNECTION_FAILED\n );\n\n const anchorId = anchor.anchor_id || this.generateId();\n\n this.db\n .prepare(\n `\n INSERT INTO anchors (anchor_id, frame_id, project_id, type, text, priority, metadata)\n VALUES (?, ?, ?, ?, ?, ?, ?)\n `\n )\n .run(\n anchorId,\n anchor.frame_id,\n anchor.project_id || this.projectId,\n anchor.type,\n anchor.text,\n anchor.priority || 0,\n JSON.stringify(anchor.metadata || {})\n );\n\n return anchorId;\n }\n\n async getFrameAnchors(frameId: string): Promise<Anchor[]> {\n if (!this.db)\n throw new DatabaseError(\n 'Database not connected',\n ErrorCode.DB_CONNECTION_FAILED\n );\n\n const rows = this.db\n .prepare(\n `\n SELECT * FROM anchors WHERE frame_id = ? \n ORDER BY priority DESC, created_at ASC\n `\n )\n .all(frameId) as any[];\n\n return rows.map((row) => ({\n ...row,\n metadata: JSON.parse(row.metadata || '{}'),\n }));\n }\n\n async deleteFrameAnchors(frameId: string): Promise<void> {\n if (!this.db)\n throw new DatabaseError(\n 'Database not connected',\n ErrorCode.DB_CONNECTION_FAILED\n );\n\n this.db.prepare('DELETE FROM anchors WHERE frame_id = ?').run(frameId);\n }\n\n // Limited search (basic LIKE queries)\n async search(\n options: SearchOptions\n ): Promise<Array<Frame & { score: number }>> {\n if (!this.db)\n throw new DatabaseError(\n 'Database not connected',\n ErrorCode.DB_CONNECTION_FAILED\n );\n\n // SQLite doesn't support HAVING on non-aggregate queries, so we filter in application\n const sql = `\n SELECT *, \n CASE \n WHEN name LIKE ? THEN 1.0\n WHEN digest_text LIKE ? THEN 0.8\n WHEN inputs LIKE ? THEN 0.6\n ELSE 0.5\n END as score\n FROM frames\n WHERE name LIKE ? OR digest_text LIKE ? OR inputs LIKE ?\n ORDER BY score DESC\n `;\n\n const params = Array(6).fill(`%${options.query}%`);\n\n let rows = this.db.prepare(sql).all(...params) as any[];\n\n // Apply score threshold in application layer\n if (options.scoreThreshold) {\n rows = rows.filter((row) => row.score >= options.scoreThreshold);\n }\n\n // Apply limit and offset in application layer if threshold is used\n if (options.limit || options.offset) {\n const start = options.offset || 0;\n const end = options.limit ? start + options.limit : rows.length;\n rows = rows.slice(start, end);\n }\n\n return rows.map((row) => ({\n ...row,\n inputs: JSON.parse(row.inputs || '{}'),\n outputs: JSON.parse(row.outputs || '{}'),\n digest_json: JSON.parse(row.digest_json || '{}'),\n }));\n }\n\n async searchByVector(\n _embedding: number[],\n _options?: QueryOptions\n ): Promise<Array<Frame & { similarity: number }>> {\n // Not supported in SQLite\n logger.warn('Vector search not supported in SQLite adapter');\n return [];\n }\n\n async searchHybrid(\n textQuery: string,\n _embedding: number[],\n weights?: { text: number; vector: number }\n ): Promise<Array<Frame & { score: number }>> {\n // Fall back to text search only\n return this.search({ query: textQuery, ...weights });\n }\n\n // Basic aggregation\n async aggregate(\n table: string,\n options: AggregationOptions\n ): Promise<Record<string, any>[]> {\n if (!this.db)\n throw new DatabaseError(\n 'Database not connected',\n ErrorCode.DB_CONNECTION_FAILED\n );\n\n const metrics = options.metrics\n .map(\n (m) =>\n `${m.operation}(${m.field}) AS ${m.alias || `${m.operation}_${m.field}`}`\n )\n .join(', ');\n\n let sql = `SELECT ${options.groupBy.join(', ')}, ${metrics} FROM ${table}`;\n sql += ` GROUP BY ${options.groupBy.join(', ')}`;\n\n if (options.having) {\n const havingClauses = Object.entries(options.having).map(\n ([key, value]) =>\n `${key} ${typeof value === 'object' ? value.op : '='} ?`\n );\n sql += ` HAVING ${havingClauses.join(' AND ')}`;\n }\n\n return this.db\n .prepare(sql)\n .all(...Object.values(options.having || {})) as any[];\n }\n\n // Pattern detection (basic)\n async detectPatterns(timeRange?: { start: Date; end: Date }): Promise<\n Array<{\n pattern: string;\n type: string;\n frequency: number;\n lastSeen: Date;\n }>\n > {\n if (!this.db)\n throw new DatabaseError(\n 'Database not connected',\n ErrorCode.DB_CONNECTION_FAILED\n );\n\n let sql = `\n SELECT type as pattern, type, COUNT(*) as frequency, MAX(created_at) as last_seen\n FROM frames\n `;\n\n const params = [];\n if (timeRange) {\n sql += ' WHERE created_at >= ? AND created_at <= ?';\n params.push(\n Math.floor(timeRange.start.getTime() / 1000),\n Math.floor(timeRange.end.getTime() / 1000)\n );\n }\n\n sql += ' GROUP BY type HAVING COUNT(*) > 1 ORDER BY frequency DESC';\n\n const rows = this.db.prepare(sql).all(...params) as any[];\n\n return rows.map((row) => ({\n pattern: row.pattern,\n type: row.type,\n frequency: row.frequency,\n lastSeen: new Date(row.last_seen * 1000),\n }));\n }\n\n // Bulk operations\n async executeBulk(operations: BulkOperation[]): Promise<void> {\n if (!this.db)\n throw new DatabaseError(\n 'Database not connected',\n ErrorCode.DB_CONNECTION_FAILED\n );\n\n await this.inTransaction(async () => {\n for (const op of operations) {\n switch (op.type) {\n case 'insert':\n // Build insert dynamically based on data\n const insertCols = Object.keys(op.data);\n const insertPlaceholders = insertCols.map(() => '?').join(',');\n this.db!.prepare(\n `INSERT INTO ${op.table} (${insertCols.join(',')}) VALUES (${insertPlaceholders})`\n ).run(...Object.values(op.data));\n break;\n\n case 'update':\n const updateSets = Object.keys(op.data)\n .map((k) => `${k} = ?`)\n .join(',');\n const whereClause = this.buildWhereClause(op.where || {});\n this.db!.prepare(\n `UPDATE ${op.table} SET ${updateSets} ${whereClause}`\n ).run(...Object.values(op.data), ...Object.values(op.where || {}));\n break;\n\n case 'delete':\n const deleteWhere = this.buildWhereClause(op.where || {});\n this.db!.prepare(`DELETE FROM ${op.table} ${deleteWhere}`).run(\n ...Object.values(op.where || {})\n );\n break;\n }\n }\n });\n }\n\n async vacuum(): Promise<void> {\n if (!this.db)\n throw new DatabaseError(\n 'Database not connected',\n ErrorCode.DB_CONNECTION_FAILED\n );\n\n this.db.pragma('vacuum');\n logger.info('SQLite database vacuumed');\n }\n\n async analyze(): Promise<void> {\n if (!this.db)\n throw new DatabaseError(\n 'Database not connected',\n ErrorCode.DB_CONNECTION_FAILED\n );\n\n this.db.pragma('analyze');\n logger.info('SQLite database analyzed');\n }\n\n // Statistics\n async getStats(): Promise<DatabaseStats> {\n if (!this.db)\n throw new DatabaseError(\n 'Database not connected',\n ErrorCode.DB_CONNECTION_FAILED\n );\n\n const stats = {\n totalFrames: (\n this.db\n .prepare('SELECT COUNT(*) as count FROM frames')\n .get() as CountResult\n ).count,\n activeFrames: (\n this.db\n .prepare(\n \"SELECT COUNT(*) as count FROM frames WHERE state = 'active'\"\n )\n .get() as CountResult\n ).count,\n totalEvents: (\n this.db\n .prepare('SELECT COUNT(*) as count FROM events')\n .get() as CountResult\n ).count,\n totalAnchors: (\n this.db\n .prepare('SELECT COUNT(*) as count FROM anchors')\n .get() as CountResult\n ).count,\n diskUsage: 0,\n };\n\n // Get file size\n try {\n const fileStats = await fs.stat(this.dbPath);\n stats.diskUsage = fileStats.size;\n } catch {}\n\n return stats;\n }\n\n async getQueryStats(): Promise<\n Array<{\n query: string;\n calls: number;\n meanTime: number;\n totalTime: number;\n }>\n > {\n // SQLite doesn't have built-in query stats\n logger.warn('Query stats not available for SQLite');\n return [];\n }\n\n // Transaction support\n async beginTransaction(): Promise<void> {\n if (!this.db)\n throw new DatabaseError(\n 'Database not connected',\n ErrorCode.DB_CONNECTION_FAILED\n );\n\n this.db.prepare('BEGIN').run();\n this.inTransactionFlag = true;\n }\n\n async commitTransaction(): Promise<void> {\n if (!this.db)\n throw new DatabaseError(\n 'Database not connected',\n ErrorCode.DB_CONNECTION_FAILED\n );\n\n this.db.prepare('COMMIT').run();\n this.inTransactionFlag = false;\n }\n\n async rollbackTransaction(): Promise<void> {\n if (!this.db)\n throw new DatabaseError(\n 'Database not connected',\n ErrorCode.DB_CONNECTION_FAILED\n );\n\n this.db.prepare('ROLLBACK').run();\n this.inTransactionFlag = false;\n }\n\n async inTransaction(\n callback: (adapter: DatabaseAdapter) => Promise<void>\n ): Promise<void> {\n await this.beginTransaction();\n\n try {\n await callback(this);\n await this.commitTransaction();\n } catch (error: unknown) {\n await this.rollbackTransaction();\n throw error;\n }\n }\n\n // Export/Import\n async exportData(\n tables: string[],\n format: 'json' | 'parquet' | 'csv'\n ): Promise<Buffer> {\n if (!this.db)\n throw new DatabaseError(\n 'Database not connected',\n ErrorCode.DB_CONNECTION_FAILED\n );\n\n if (format !== 'json') {\n throw new ValidationError(\n `Format ${format} not supported for SQLite export`,\n ErrorCode.VALIDATION_FAILED,\n { format, supportedFormats: ['json'] }\n );\n }\n\n const data: Record<string, any[]> = {};\n\n for (const table of tables) {\n data[table] = this.db.prepare(`SELECT * FROM ${table}`).all();\n }\n\n return Buffer.from(JSON.stringify(data, null, 2));\n }\n\n async importData(\n data: Buffer,\n format: 'json' | 'parquet' | 'csv',\n options?: { truncate?: boolean; upsert?: boolean }\n ): Promise<void> {\n if (!this.db)\n throw new DatabaseError(\n 'Database not connected',\n ErrorCode.DB_CONNECTION_FAILED\n );\n\n if (format !== 'json') {\n throw new ValidationError(\n `Format ${format} not supported for SQLite import`,\n ErrorCode.VALIDATION_FAILED,\n { format, supportedFormats: ['json'] }\n );\n }\n\n const parsed = JSON.parse(data.toString());\n\n await this.inTransaction(async () => {\n for (const [table, rows] of Object.entries(parsed)) {\n if (options?.truncate) {\n this.db!.prepare(`DELETE FROM ${table}`).run();\n }\n\n for (const row of rows as any[]) {\n const cols = Object.keys(row);\n const placeholders = cols.map(() => '?').join(',');\n\n if (options?.upsert) {\n const updates = cols.map((c) => `${c} = excluded.${c}`).join(',');\n this.db!.prepare(\n `INSERT INTO ${table} (${cols.join(',')}) VALUES (${placeholders})\n ON CONFLICT DO UPDATE SET ${updates}`\n ).run(...Object.values(row));\n } else {\n this.db!.prepare(\n `INSERT INTO ${table} (${cols.join(',')}) VALUES (${placeholders})`\n ).run(...Object.values(row));\n }\n }\n }\n });\n }\n}\n"],
|
|
5
5
|
"mappings": ";;;;AAKA,OAAO,cAAc;AACrB;AAAA,EACE;AAAA,OAUK;AAEP,SAAS,cAAc;AACvB,SAAS,eAAe,WAAW,uBAAuB;AAC1D,YAAY,QAAQ;AACpB,YAAY,UAAU;AAUf,MAAM,sBAAsB,4BAA4B;AAAA,EACrD,KAA+B;AAAA,EACtB;AAAA,EACT,oBAAoB;AAAA,EAE5B,YAAY,WAAmB,QAAsB;AACnD,UAAM,WAAW,MAAM;AACvB,SAAK,SAAS,OAAO;AAAA,EACvB;AAAA,EAEA,cAAgC;AAC9B,WAAO;AAAA,MACL,wBAAwB;AAAA;AAAA,MACxB,sBAAsB;AAAA,MACtB,sBAAsB;AAAA,MACtB,mBAAmB;AAAA,MACnB,qBAAqB;AAAA,MACrB,2BAA2B;AAAA,MAC3B,yBAAyB;AAAA,IAC3B;AAAA,EACF;AAAA,EAEA,MAAM,UAAyB;AAC7B,QAAI,KAAK,GAAI;AAEb,UAAM,SAAS,KAAK;AAGpB,UAAM,MAAM,KAAK,QAAQ,KAAK,MAAM;AACpC,UAAM,GAAG,MAAM,KAAK,EAAE,WAAW,KAAK,CAAC;AAEvC,SAAK,KAAK,IAAI,SAAS,KAAK,MAAM;AAGlC,SAAK,GAAG,OAAO,mBAAmB;AAGlC,QAAI,OAAO,YAAY,OAAO;AAC5B,WAAK,GAAG,OAAO,oBAAoB;AAAA,IACrC;AAEA,QAAI,OAAO,aAAa;AACtB,WAAK,GAAG,OAAO,kBAAkB,OAAO,WAAW,EAAE;AAAA,IACvD;AAEA,QAAI,OAAO,WAAW;AACpB,WAAK,GAAG,OAAO,gBAAgB,OAAO,SAAS,EAAE;AAAA,IACnD;AAEA,QAAI,OAAO,aAAa;AACtB,WAAK,GAAG,OAAO,iBAAiB,OAAO,WAAW,EAAE;AAAA,IACtD;AAEA,WAAO,KAAK,6BAA6B,EAAE,QAAQ,KAAK,OAAO,CAAC;AAAA,EAClE;AAAA,EAEA,MAAM,aAA4B;AAChC,QAAI,CAAC,KAAK,GAAI;AAEd,SAAK,GAAG,MAAM;AACd,SAAK,KAAK;AACV,WAAO,KAAK,8BAA8B;AAAA,EAC5C;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,iBAA2C;AACzC,WAAO,KAAK;AAAA,EACd;AAAA,EAEA,cAAuB;AACrB,WAAO,KAAK,OAAO,QAAQ,KAAK,GAAG;AAAA,EACrC;AAAA,EAEA,MAAM,OAAyB;AAC7B,QAAI,CAAC,KAAK,GAAI,QAAO;AAErB,QAAI;AACF,WAAK,GAAG,QAAQ,UAAU,EAAE,IAAI;AAChC,aAAO;AAAA,IACT,QAAQ;AACN,aAAO;AAAA,IACT;AAAA,EACF;AAAA,EAEA,MAAM,mBAAkC;AACtC,QAAI,CAAC,KAAK;AACR,YAAM,IAAI;AAAA,QACR;AAAA,QACA,UAAU;AAAA,MACZ;AAEF,SAAK,GAAG,KAAK;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,KA0DZ;AAGD,QAAI;AACF,WAAK,yBAAyB;AAAA,IAChC,SAAS,GAAG;AACV,aAAO,KAAK,wCAAwC,CAAU;AAAA,IAChE;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA,EAMQ,2BAAiC;AACvC,QAAI,CAAC,KAAK,GAAI;AAEd,UAAM,eAAe,CAAC,UAA2B;AAC/C,YAAM,OAAO,KAAK,GAAI;AAAA,QACpB,2BAA2B,KAAK;AAAA,MAClC,EAAE,IAAI;AAEN,aAAO,KAAK;AAAA,QACV,CAAC,MACC,EAAE,UAAU,YACZ,OAAO,EAAE,SAAS,EAAE,YAAY,MAAM;AAAA,MAC1C;AAAA,IACF;AAEA,UAAM,eAAe,CAAC,UAAgC;AACpD,YAAM,YACJ,UAAU,WACN;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,kBAUA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAYN,YAAM,OACJ,UAAU,WACN,6DACA;AAEN,YAAM,SACJ,UAAU,WACN;AAAA,QACE;AAAA,QACA;AAAA,MACF,IACA;AAAA,QACE;AAAA,MACF;AAEN,WAAK,GAAI,KAAK,4BAA4B;AAC1C,WAAK,GAAI,KAAK,QAAQ;AACtB,WAAK,GAAI,KAAK,SAAS;AACvB,WAAK,GAAI;AAAA,QACP,eAAe,UAAU,WAAW,eAAe,aAAa,KAAK,IAAI,YAAY,IAAI,SAAS,KAAK;AAAA,MACzG,EAAE,IAAI;AACN,WAAK,GAAI,KAAK,cAAc,KAAK,GAAG;AACpC,WAAK,GAAI,KAAK,eAAe,KAAK,kBAAkB,KAAK,GAAG;AAC5D,iBAAW,QAAQ,OAAQ,MAAK,GAAI,KAAK,IAAI;AAC7C,WAAK,GAAI,KAAK,SAAS;AACvB,WAAK,GAAI,KAAK,2BAA2B;AACzC,aAAO,KAAK,YAAY,KAAK,+BAA+B;AAAA,IAC9D;AAEA,QAAI,aAAa,QAAQ,EAAG,cAAa,QAAQ;AACjD,QAAI,aAAa,SAAS,EAAG,cAAa,SAAS;AAAA,EACrD;AAAA,EAEA,MAAM,cAAc,eAAsC;AACxD,QAAI,CAAC,KAAK;AACR,YAAM,IAAI;AAAA,QACR;AAAA,QACA,UAAU;AAAA,MACZ;AAEF,UAAM,iBAAiB,MAAM,KAAK,iBAAiB;AAEnD,QAAI,kBAAkB,eAAe;AACnC,aAAO,KAAK,oCAAoC;AAAA,QAC9C;AAAA,QACA;AAAA,MACF,CAAC;AACD;AAAA,IACF;AAGA,aAAS,IAAI,iBAAiB,GAAG,KAAK,eAAe,KAAK;AACxD,aAAO,KAAK,iCAAiC,CAAC,EAAE;AAEhD,WAAK,GAAG,QAAQ,uCAAuC,EAAE,IAAI,CAAC;AAAA,IAChE;AAAA,EACF;AAAA,EAEA,MAAM,mBAAoC;AACxC,QAAI,CAAC,KAAK;AACR,YAAM,IAAI;AAAA,QACR;AAAA,QACA,UAAU;AAAA,MACZ;AAEF,QAAI;AACF,YAAM,SAAS,KAAK,GACjB,QAAQ,oDAAoD,EAC5D,IAAI;AACP,aAAO,QAAQ,WAAW;AAAA,IAC5B,QAAQ;AACN,aAAO;AAAA,IACT;AAAA,EACF;AAAA;AAAA,EAGA,MAAM,YAAY,OAAwC;AACxD,QAAI,CAAC,KAAK;AACR,YAAM,IAAI;AAAA,QACR;AAAA,QACA,UAAU;AAAA,MACZ;AAEF,UAAM,UAAU,MAAM,YAAY,KAAK,WAAW;AAElD,SAAK,GACF;AAAA,MACC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,IAMF,EACC;AAAA,MACC;AAAA,MACA,MAAM;AAAA,MACN,MAAM,cAAc,KAAK;AAAA,MACzB,MAAM,mBAAmB;AAAA,MACzB,MAAM,SAAS;AAAA,MACf,MAAM;AAAA,MACN,MAAM;AAAA,MACN,MAAM,SAAS;AAAA,MACf,KAAK,UAAU,MAAM,UAAU,CAAC,CAAC;AAAA,MACjC,KAAK,UAAU,MAAM,WAAW,CAAC,CAAC;AAAA,MAClC,MAAM,eAAe;AAAA,MACrB,KAAK,UAAU,MAAM,eAAe,CAAC,CAAC;AAAA,IACxC;AAEF,WAAO;AAAA,EACT;AAAA,EAEA,MAAM,SAAS,SAAwC;AACrD,QAAI,CAAC,KAAK;AACR,YAAM,IAAI;AAAA,QACR;AAAA,QACA,UAAU;AAAA,MACZ;AAEF,UAAM,MAAM,KAAK,GACd,QAAQ,yCAAyC,EACjD,IAAI,OAAO;AAEd,QAAI,CAAC,IAAK,QAAO;AAEjB,WAAO;AAAA,MACL,GAAG;AAAA,MACH,QAAQ,KAAK,MAAM,IAAI,UAAU,IAAI;AAAA,MACrC,SAAS,KAAK,MAAM,IAAI,WAAW,IAAI;AAAA,MACvC,aAAa,KAAK,MAAM,IAAI,eAAe,IAAI;AAAA,IACjD;AAAA,EACF;AAAA,EAEA,MAAM,YAAY,SAAiB,SAAwC;AACzE,QAAI,CAAC,KAAK;AACR,YAAM,IAAI;AAAA,QACR;AAAA,QACA,UAAU;AAAA,MACZ;AAEF,UAAM,SAAS,CAAC;AAChB,UAAM,SAAS,CAAC;AAEhB,QAAI,QAAQ,UAAU,QAAW;AAC/B,aAAO,KAAK,WAAW;AACvB,aAAO,KAAK,QAAQ,KAAK;AAAA,IAC3B;AAEA,QAAI,QAAQ,YAAY,QAAW;AACjC,aAAO,KAAK,aAAa;AACzB,aAAO,KAAK,KAAK,UAAU,QAAQ,OAAO,CAAC;AAAA,IAC7C;AAEA,QAAI,QAAQ,gBAAgB,QAAW;AACrC,aAAO,KAAK,iBAAiB;AAC7B,aAAO,KAAK,QAAQ,WAAW;AAAA,IACjC;AAEA,QAAI,QAAQ,gBAAgB,QAAW;AACrC,aAAO,KAAK,iBAAiB;AAC7B,aAAO,KAAK,KAAK,UAAU,QAAQ,WAAW,CAAC;AAAA,IACjD;AAEA,QAAI,QAAQ,cAAc,QAAW;AACnC,aAAO,KAAK,eAAe;AAC3B,aAAO,KAAK,QAAQ,SAAS;AAAA,IAC/B;AAEA,QAAI,OAAO,WAAW,EAAG;AAEzB,WAAO,KAAK,OAAO;AAEnB,SAAK,GACF;AAAA,MACC;AAAA,0BACkB,OAAO,KAAK,IAAI,CAAC;AAAA;AAAA,IAErC,EACC,IAAI,GAAG,MAAM;AAAA,EAClB;AAAA,EAEA,MAAM,YAAY,SAAgC;AAChD,QAAI,CAAC,KAAK;AACR,YAAM,IAAI;AAAA,QACR;AAAA,QACA,UAAU;AAAA,MACZ;AAGF,UAAM,KAAK,mBAAmB,OAAO;AACrC,UAAM,KAAK,kBAAkB,OAAO;AAEpC,SAAK,GAAG,QAAQ,uCAAuC,EAAE,IAAI,OAAO;AAAA,EACtE;AAAA,EAEA,MAAM,gBAAgB,OAAkC;AACtD,QAAI,CAAC,KAAK;AACR,YAAM,IAAI;AAAA,QACR;AAAA,QACA,UAAU;AAAA,MACZ;AAEF,QAAI,QAAQ;AACZ,UAAM,SAAS,CAAC;AAEhB,QAAI,OAAO;AACT,eAAS;AACT,aAAO,KAAK,KAAK;AAAA,IACnB;AAEA,aAAS;AAET,UAAM,OAAO,KAAK,GAAG,QAAQ,KAAK,EAAE,IAAI,GAAG,MAAM;AAEjD,WAAO,KAAK,IAAI,CAAC,SAAS;AAAA,MACxB,GAAG;AAAA,MACH,QAAQ,KAAK,MAAM,IAAI,UAAU,IAAI;AAAA,MACrC,SAAS,KAAK,MAAM,IAAI,WAAW,IAAI;AAAA,MACvC,aAAa,KAAK,MAAM,IAAI,eAAe,IAAI;AAAA,IACjD,EAAE;AAAA,EACJ;AAAA,EAEA,MAAM,WAAW,SAAiB,SAA8B;AAC9D,UAAM,KAAK,YAAY,SAAS;AAAA,MAC9B,OAAO;AAAA,MACP;AAAA,MACA,WAAW,KAAK,IAAI;AAAA,IACtB,CAAC;AAAA,EACH;AAAA;AAAA,EAGA,MAAM,YAAY,OAAwC;AACxD,QAAI,CAAC,KAAK;AACR,YAAM,IAAI;AAAA,QACR;AAAA,QACA,UAAU;AAAA,MACZ;AAEF,UAAM,UAAU,MAAM,YAAY,KAAK,WAAW;AAElD,SAAK,GACF;AAAA,MACC;AAAA;AAAA;AAAA;AAAA,IAIF,EACC;AAAA,MACC;AAAA,MACA,MAAM;AAAA,MACN,MAAM;AAAA,MACN,MAAM,OAAO;AAAA,MACb,MAAM;AAAA,MACN,KAAK,UAAU,MAAM,WAAW,CAAC,CAAC;AAAA,MAClC,MAAM,MAAM,KAAK,IAAI;AAAA,IACvB;AAEF,WAAO;AAAA,EACT;AAAA,EAEA,MAAM,eACJ,SACA,SACkB;AAClB,QAAI,CAAC,KAAK;AACR,YAAM,IAAI;AAAA,QACR;AAAA,QACA,UAAU;AAAA,MACZ;AAEF,QAAI,QAAQ;AACZ,aAAS,KAAK;AAAA,MACZ,SAAS,WAAW;AAAA,MACpB,SAAS;AAAA,IACX;AACA,aAAS,KAAK,iBAAiB,SAAS,OAAO,SAAS,MAAM;AAE9D,UAAM,OAAO,KAAK,GAAG,QAAQ,KAAK,EAAE,IAAI,OAAO;AAE/C,WAAO,KAAK,IAAI,CAAC,SAAS;AAAA,MACxB,GAAG;AAAA,MACH,SAAS,KAAK,MAAM,IAAI,WAAW,IAAI;AAAA,IACzC,EAAE;AAAA,EACJ;AAAA,EAEA,MAAM,kBAAkB,SAAgC;AACtD,QAAI,CAAC,KAAK;AACR,YAAM,IAAI;AAAA,QACR;AAAA,QACA,UAAU;AAAA,MACZ;AAEF,SAAK,GAAG,QAAQ,uCAAuC,EAAE,IAAI,OAAO;AAAA,EACtE;AAAA;AAAA,EAGA,MAAM,aAAa,QAA0C;AAC3D,QAAI,CAAC,KAAK;AACR,YAAM,IAAI;AAAA,QACR;AAAA,QACA,UAAU;AAAA,MACZ;AAEF,UAAM,WAAW,OAAO,aAAa,KAAK,WAAW;AAErD,SAAK,GACF;AAAA,MACC;AAAA;AAAA;AAAA;AAAA,IAIF,EACC;AAAA,MACC;AAAA,MACA,OAAO;AAAA,MACP,OAAO,cAAc,KAAK;AAAA,MAC1B,OAAO;AAAA,MACP,OAAO;AAAA,MACP,OAAO,YAAY;AAAA,MACnB,KAAK,UAAU,OAAO,YAAY,CAAC,CAAC;AAAA,IACtC;AAEF,WAAO;AAAA,EACT;AAAA,EAEA,MAAM,gBAAgB,SAAoC;AACxD,QAAI,CAAC,KAAK;AACR,YAAM,IAAI;AAAA,QACR;AAAA,QACA,UAAU;AAAA,MACZ;AAEF,UAAM,OAAO,KAAK,GACf;AAAA,MACC;AAAA;AAAA;AAAA;AAAA,IAIF,EACC,IAAI,OAAO;AAEd,WAAO,KAAK,IAAI,CAAC,SAAS;AAAA,MACxB,GAAG;AAAA,MACH,UAAU,KAAK,MAAM,IAAI,YAAY,IAAI;AAAA,IAC3C,EAAE;AAAA,EACJ;AAAA,EAEA,MAAM,mBAAmB,SAAgC;AACvD,QAAI,CAAC,KAAK;AACR,YAAM,IAAI;AAAA,QACR;AAAA,QACA,UAAU;AAAA,MACZ;AAEF,SAAK,GAAG,QAAQ,wCAAwC,EAAE,IAAI,OAAO;AAAA,EACvE;AAAA;AAAA,EAGA,MAAM,OACJ,SAC2C;AAC3C,QAAI,CAAC,KAAK;AACR,YAAM,IAAI;AAAA,QACR;AAAA,QACA,UAAU;AAAA,MACZ;AAGF,UAAM,MAAM;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAaZ,UAAM,SAAS,MAAM,CAAC,EAAE,KAAK,IAAI,QAAQ,KAAK,GAAG;AAEjD,QAAI,OAAO,KAAK,GAAG,QAAQ,GAAG,EAAE,IAAI,GAAG,MAAM;AAG7C,QAAI,QAAQ,gBAAgB;AAC1B,aAAO,KAAK,OAAO,CAAC,QAAQ,IAAI,SAAS,QAAQ,cAAc;AAAA,IACjE;AAGA,QAAI,QAAQ,SAAS,QAAQ,QAAQ;AACnC,YAAM,QAAQ,QAAQ,UAAU;AAChC,YAAM,MAAM,QAAQ,QAAQ,QAAQ,QAAQ,QAAQ,KAAK;AACzD,aAAO,KAAK,MAAM,OAAO,GAAG;AAAA,IAC9B;AAEA,WAAO,KAAK,IAAI,CAAC,SAAS;AAAA,MACxB,GAAG;AAAA,MACH,QAAQ,KAAK,MAAM,IAAI,UAAU,IAAI;AAAA,MACrC,SAAS,KAAK,MAAM,IAAI,WAAW,IAAI;AAAA,MACvC,aAAa,KAAK,MAAM,IAAI,eAAe,IAAI;AAAA,IACjD,EAAE;AAAA,EACJ;AAAA,EAEA,MAAM,eACJ,YACA,UACgD;AAEhD,WAAO,KAAK,+CAA+C;AAC3D,WAAO,CAAC;AAAA,EACV;AAAA,EAEA,MAAM,aACJ,WACA,YACA,SAC2C;AAE3C,WAAO,KAAK,OAAO,EAAE,OAAO,WAAW,GAAG,QAAQ,CAAC;AAAA,EACrD;AAAA;AAAA,EAGA,MAAM,UACJ,OACA,SACgC;AAChC,QAAI,CAAC,KAAK;AACR,YAAM,IAAI;AAAA,QACR;AAAA,QACA,UAAU;AAAA,MACZ;AAEF,UAAM,UAAU,QAAQ,QACrB;AAAA,MACC,CAAC,MACC,GAAG,EAAE,SAAS,IAAI,EAAE,KAAK,QAAQ,EAAE,SAAS,GAAG,EAAE,SAAS,IAAI,EAAE,KAAK,EAAE;AAAA,IAC3E,EACC,KAAK,IAAI;AAEZ,QAAI,MAAM,UAAU,QAAQ,QAAQ,KAAK,IAAI,CAAC,KAAK,OAAO,SAAS,KAAK;AACxE,WAAO,aAAa,QAAQ,QAAQ,KAAK,IAAI,CAAC;AAE9C,QAAI,QAAQ,QAAQ;AAClB,YAAM,gBAAgB,OAAO,QAAQ,QAAQ,MAAM,EAAE;AAAA,QACnD,CAAC,CAAC,KAAK,KAAK,MACV,GAAG,GAAG,IAAI,OAAO,UAAU,WAAW,MAAM,KAAK,GAAG;AAAA,MACxD;AACA,aAAO,WAAW,cAAc,KAAK,OAAO,CAAC;AAAA,IAC/C;AAEA,WAAO,KAAK,GACT,QAAQ,GAAG,EACX,IAAI,GAAG,OAAO,OAAO,QAAQ,UAAU,CAAC,CAAC,CAAC;AAAA,EAC/C;AAAA;AAAA,EAGA,MAAM,eAAe,WAOnB;AACA,QAAI,CAAC,KAAK;AACR,YAAM,IAAI;AAAA,QACR;AAAA,QACA,UAAU;AAAA,MACZ;AAEF,QAAI,MAAM;AAAA;AAAA;AAAA;AAKV,UAAM,SAAS,CAAC;AAChB,QAAI,WAAW;AACb,aAAO;AACP,aAAO;AAAA,QACL,KAAK,MAAM,UAAU,MAAM,QAAQ,IAAI,GAAI;AAAA,QAC3C,KAAK,MAAM,UAAU,IAAI,QAAQ,IAAI,GAAI;AAAA,MAC3C;AAAA,IACF;AAEA,WAAO;AAEP,UAAM,OAAO,KAAK,GAAG,QAAQ,GAAG,EAAE,IAAI,GAAG,MAAM;AAE/C,WAAO,KAAK,IAAI,CAAC,SAAS;AAAA,MACxB,SAAS,IAAI;AAAA,MACb,MAAM,IAAI;AAAA,MACV,WAAW,IAAI;AAAA,MACf,UAAU,IAAI,KAAK,IAAI,YAAY,GAAI;AAAA,IACzC,EAAE;AAAA,EACJ;AAAA;AAAA,EAGA,MAAM,YAAY,YAA4C;AAC5D,QAAI,CAAC,KAAK;AACR,YAAM,IAAI;AAAA,QACR;AAAA,QACA,UAAU;AAAA,MACZ;AAEF,UAAM,KAAK,cAAc,YAAY;AACnC,iBAAW,MAAM,YAAY;AAC3B,gBAAQ,GAAG,MAAM;AAAA,UACf,KAAK;AAEH,kBAAM,aAAa,OAAO,KAAK,GAAG,IAAI;AACtC,kBAAM,qBAAqB,WAAW,IAAI,MAAM,GAAG,EAAE,KAAK,GAAG;AAC7D,iBAAK,GAAI;AAAA,cACP,eAAe,GAAG,KAAK,KAAK,WAAW,KAAK,GAAG,CAAC,aAAa,kBAAkB;AAAA,YACjF,EAAE,IAAI,GAAG,OAAO,OAAO,GAAG,IAAI,CAAC;AAC/B;AAAA,UAEF,KAAK;AACH,kBAAM,aAAa,OAAO,KAAK,GAAG,IAAI,EACnC,IAAI,CAAC,MAAM,GAAG,CAAC,MAAM,EACrB,KAAK,GAAG;AACX,kBAAM,cAAc,KAAK,iBAAiB,GAAG,SAAS,CAAC,CAAC;AACxD,iBAAK,GAAI;AAAA,cACP,UAAU,GAAG,KAAK,QAAQ,UAAU,IAAI,WAAW;AAAA,YACrD,EAAE,IAAI,GAAG,OAAO,OAAO,GAAG,IAAI,GAAG,GAAG,OAAO,OAAO,GAAG,SAAS,CAAC,CAAC,CAAC;AACjE;AAAA,UAEF,KAAK;AACH,kBAAM,cAAc,KAAK,iBAAiB,GAAG,SAAS,CAAC,CAAC;AACxD,iBAAK,GAAI,QAAQ,eAAe,GAAG,KAAK,IAAI,WAAW,EAAE,EAAE;AAAA,cACzD,GAAG,OAAO,OAAO,GAAG,SAAS,CAAC,CAAC;AAAA,YACjC;AACA;AAAA,QACJ;AAAA,MACF;AAAA,IACF,CAAC;AAAA,EACH;AAAA,EAEA,MAAM,SAAwB;AAC5B,QAAI,CAAC,KAAK;AACR,YAAM,IAAI;AAAA,QACR;AAAA,QACA,UAAU;AAAA,MACZ;AAEF,SAAK,GAAG,OAAO,QAAQ;AACvB,WAAO,KAAK,0BAA0B;AAAA,EACxC;AAAA,EAEA,MAAM,UAAyB;AAC7B,QAAI,CAAC,KAAK;AACR,YAAM,IAAI;AAAA,QACR;AAAA,QACA,UAAU;AAAA,MACZ;AAEF,SAAK,GAAG,OAAO,SAAS;AACxB,WAAO,KAAK,0BAA0B;AAAA,EACxC;AAAA;AAAA,EAGA,MAAM,WAAmC;AACvC,QAAI,CAAC,KAAK;AACR,YAAM,IAAI;AAAA,QACR;AAAA,QACA,UAAU;AAAA,MACZ;AAEF,UAAM,QAAQ;AAAA,MACZ,aACE,KAAK,GACF,QAAQ,sCAAsC,EAC9C,IAAI,EACP;AAAA,MACF,cACE,KAAK,GACF;AAAA,QACC;AAAA,MACF,EACC,IAAI,EACP;AAAA,MACF,aACE,KAAK,GACF,QAAQ,sCAAsC,EAC9C,IAAI,EACP;AAAA,MACF,cACE,KAAK,GACF,QAAQ,uCAAuC,EAC/C,IAAI,EACP;AAAA,MACF,WAAW;AAAA,IACb;AAGA,QAAI;AACF,YAAM,YAAY,MAAM,GAAG,KAAK,KAAK,MAAM;AAC3C,YAAM,YAAY,UAAU;AAAA,IAC9B,QAAQ;AAAA,IAAC;AAET,WAAO;AAAA,EACT;AAAA,EAEA,MAAM,gBAOJ;AAEA,WAAO,KAAK,sCAAsC;AAClD,WAAO,CAAC;AAAA,EACV;AAAA;AAAA,EAGA,MAAM,mBAAkC;AACtC,QAAI,CAAC,KAAK;AACR,YAAM,IAAI;AAAA,QACR;AAAA,QACA,UAAU;AAAA,MACZ;AAEF,SAAK,GAAG,QAAQ,OAAO,EAAE,IAAI;AAC7B,SAAK,oBAAoB;AAAA,EAC3B;AAAA,EAEA,MAAM,oBAAmC;AACvC,QAAI,CAAC,KAAK;AACR,YAAM,IAAI;AAAA,QACR;AAAA,QACA,UAAU;AAAA,MACZ;AAEF,SAAK,GAAG,QAAQ,QAAQ,EAAE,IAAI;AAC9B,SAAK,oBAAoB;AAAA,EAC3B;AAAA,EAEA,MAAM,sBAAqC;AACzC,QAAI,CAAC,KAAK;AACR,YAAM,IAAI;AAAA,QACR;AAAA,QACA,UAAU;AAAA,MACZ;AAEF,SAAK,GAAG,QAAQ,UAAU,EAAE,IAAI;AAChC,SAAK,oBAAoB;AAAA,EAC3B;AAAA,EAEA,MAAM,cACJ,UACe;AACf,UAAM,KAAK,iBAAiB;AAE5B,QAAI;AACF,YAAM,SAAS,IAAI;AACnB,YAAM,KAAK,kBAAkB;AAAA,IAC/B,SAAS,OAAgB;AACvB,YAAM,KAAK,oBAAoB;AAC/B,YAAM;AAAA,IACR;AAAA,EACF;AAAA;AAAA,EAGA,MAAM,WACJ,QACA,QACiB;AACjB,QAAI,CAAC,KAAK;AACR,YAAM,IAAI;AAAA,QACR;AAAA,QACA,UAAU;AAAA,MACZ;AAEF,QAAI,WAAW,QAAQ;AACrB,YAAM,IAAI;AAAA,QACR,UAAU,MAAM;AAAA,QAChB,UAAU;AAAA,QACV,EAAE,QAAQ,kBAAkB,CAAC,MAAM,EAAE;AAAA,MACvC;AAAA,IACF;AAEA,UAAM,OAA8B,CAAC;AAErC,eAAW,SAAS,QAAQ;AAC1B,WAAK,KAAK,IAAI,KAAK,GAAG,QAAQ,iBAAiB,KAAK,EAAE,EAAE,IAAI;AAAA,IAC9D;AAEA,WAAO,OAAO,KAAK,KAAK,UAAU,MAAM,MAAM,CAAC,CAAC;AAAA,EAClD;AAAA,EAEA,MAAM,WACJ,MACA,QACA,SACe;AACf,QAAI,CAAC,KAAK;AACR,YAAM,IAAI;AAAA,QACR;AAAA,QACA,UAAU;AAAA,MACZ;AAEF,QAAI,WAAW,QAAQ;AACrB,YAAM,IAAI;AAAA,QACR,UAAU,MAAM;AAAA,QAChB,UAAU;AAAA,QACV,EAAE,QAAQ,kBAAkB,CAAC,MAAM,EAAE;AAAA,MACvC;AAAA,IACF;AAEA,UAAM,SAAS,KAAK,MAAM,KAAK,SAAS,CAAC;AAEzC,UAAM,KAAK,cAAc,YAAY;AACnC,iBAAW,CAAC,OAAO,IAAI,KAAK,OAAO,QAAQ,MAAM,GAAG;AAClD,YAAI,SAAS,UAAU;AACrB,eAAK,GAAI,QAAQ,eAAe,KAAK,EAAE,EAAE,IAAI;AAAA,QAC/C;AAEA,mBAAW,OAAO,MAAe;AAC/B,gBAAM,OAAO,OAAO,KAAK,GAAG;AAC5B,gBAAM,eAAe,KAAK,IAAI,MAAM,GAAG,EAAE,KAAK,GAAG;AAEjD,cAAI,SAAS,QAAQ;AACnB,kBAAM,UAAU,KAAK,IAAI,CAAC,MAAM,GAAG,CAAC,eAAe,CAAC,EAAE,EAAE,KAAK,GAAG;AAChE,iBAAK,GAAI;AAAA,cACP,eAAe,KAAK,KAAK,KAAK,KAAK,GAAG,CAAC,aAAa,YAAY;AAAA,2CACnC,OAAO;AAAA,YACtC,EAAE,IAAI,GAAG,OAAO,OAAO,GAAG,CAAC;AAAA,UAC7B,OAAO;AACL,iBAAK,GAAI;AAAA,cACP,eAAe,KAAK,KAAK,KAAK,KAAK,GAAG,CAAC,aAAa,YAAY;AAAA,YAClE,EAAE,IAAI,GAAG,OAAO,OAAO,GAAG,CAAC;AAAA,UAC7B;AAAA,QACF;AAAA,MACF;AAAA,IACF,CAAC;AAAA,EACH;AACF;",
|
|
6
6
|
"names": []
|
|
7
7
|
}
|
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
{
|
|
2
2
|
"version": 3,
|
|
3
3
|
"sources": ["../../../src/core/digest/frame-digest-integration.ts"],
|
|
4
|
-
"sourcesContent": ["/**\n * Integration module for Enhanced Hybrid Digest with Frame Manager\n * Connects the 60/40 digest generator with frame lifecycle events\n */\n\nimport Database from 'better-sqlite3';\nimport {\n FrameManager,\n Frame,\n Event,\n Anchor,\n} from '../context/
|
|
4
|
+
"sourcesContent": ["/**\n * Integration module for Enhanced Hybrid Digest with Frame Manager\n * Connects the 60/40 digest generator with frame lifecycle events\n */\n\nimport Database from 'better-sqlite3';\nimport {\n FrameManager,\n Frame,\n Event,\n Anchor,\n} from '../context/index.js';\nimport { EnhancedHybridDigestGenerator } from './enhanced-hybrid-digest.js';\nimport { DigestInput, DigestLLMProvider } from './types.js';\nimport { logger } from '../monitoring/logger.js';\n\n/**\n * Frame Digest Integration\n * Enhances FrameManager with hybrid digest capabilities\n */\nexport class FrameDigestIntegration {\n private frameManager: FrameManager;\n private digestGenerator: EnhancedHybridDigestGenerator;\n private frameActivityMap = new Map<string, number>();\n\n constructor(\n frameManager: FrameManager,\n db: Database.Database,\n llmProvider?: DigestLLMProvider\n ) {\n this.frameManager = frameManager;\n this.digestGenerator = new EnhancedHybridDigestGenerator(\n db,\n {\n enableAIGeneration: true,\n maxTokens: 200,\n },\n llmProvider\n );\n\n this.setupHooks();\n }\n\n /**\n * Setup frame lifecycle hooks\n */\n private setupHooks(): void {\n // Track frame activity\n const originalAddEvent = this.frameManager.addEvent.bind(this.frameManager);\n this.frameManager.addEvent = (\n eventType: Event['event_type'],\n payload: Record<string, any>,\n frameId?: string\n ): string => {\n const result = originalAddEvent(eventType, payload, frameId);\n\n // Track tool calls for idle detection\n if (eventType === 'tool_call') {\n this.digestGenerator.recordToolCall();\n }\n\n // Track user messages\n if (eventType === 'user_message') {\n this.digestGenerator.recordUserInput();\n }\n\n // Track frame activity\n const targetFrameId = frameId || this.frameManager.getCurrentFrameId();\n if (targetFrameId) {\n this.frameActivityMap.set(targetFrameId, Date.now());\n }\n\n return result;\n };\n\n // Hook into frame creation\n const originalCreateFrame = this.frameManager.createFrame.bind(\n this.frameManager\n );\n this.frameManager.createFrame = (options: any): string => {\n const frameId = originalCreateFrame(options);\n this.digestGenerator.onFrameOpened(frameId);\n this.frameActivityMap.set(frameId, Date.now());\n return frameId;\n };\n\n // Hook into frame closure\n const originalCloseFrame = this.frameManager.closeFrame.bind(\n this.frameManager\n );\n this.frameManager.closeFrame = (\n frameId?: string,\n outputs?: Record<string, any>\n ): void => {\n const targetFrameId = frameId || this.frameManager.getCurrentFrameId();\n\n if (targetFrameId) {\n // Generate enhanced digest\n const digest = this.generateEnhancedDigest(targetFrameId);\n\n // Merge digest outputs with provided outputs\n const enhancedOutputs = {\n ...outputs,\n digest: digest.json,\n digestText: digest.text,\n };\n\n // Notify digest generator of frame closure\n this.digestGenerator.onFrameClosed(targetFrameId);\n this.frameActivityMap.delete(targetFrameId);\n\n // Call original with enhanced outputs\n originalCloseFrame(frameId, enhancedOutputs);\n } else {\n originalCloseFrame(frameId, outputs);\n }\n };\n }\n\n /**\n * Generate enhanced digest for a frame\n */\n private generateEnhancedDigest(frameId: string): {\n text: string;\n json: Record<string, any>;\n } {\n // Get frame data\n const frame = this.frameManager.getFrame(frameId);\n if (!frame) {\n logger.warn('Frame not found for digest generation', { frameId });\n return { text: '', json: {} };\n }\n\n // Get events and anchors\n const events = this.frameManager.getFrameEvents(frameId);\n const anchors = this.getFrameAnchors(frameId);\n\n // Convert to digest input format\n const digestInput: DigestInput = {\n frame: this.convertFrame(frame),\n events: events.map(this.convertEvent),\n anchors: anchors.map(this.convertAnchor),\n };\n\n // Generate hybrid digest\n const hybridDigest = this.digestGenerator.generateDigest(digestInput);\n\n // Format for frame manager\n return {\n text: hybridDigest.text,\n json: {\n deterministic: hybridDigest.deterministic,\n aiGenerated: hybridDigest.aiGenerated,\n status: hybridDigest.status,\n generatedAt: Date.now(),\n },\n };\n }\n\n /**\n * Convert FrameManager frame to DigestInput frame\n */\n private convertFrame(frame: Frame): Frame {\n // Frame types are the same - just pass through with type assertion\n return frame;\n }\n\n /**\n * Convert FrameManager event to DigestInput event\n */\n private convertEvent(event: Event): Event {\n // Events are the same type - just pass through\n return event;\n }\n\n /**\n * Convert FrameManager anchor to DigestInput anchor\n */\n private convertAnchor(anchor: Anchor): Anchor {\n // Anchors are the same type - just pass through\n return anchor;\n }\n\n /**\n * Calculate importance score based on frame characteristics\n */\n private calculateImportanceScore(frame: Frame): number {\n let score = 0.5; // Base score\n\n // Adjust based on frame type\n const typeScores: Record<string, number> = {\n task: 0.6,\n debug: 0.8,\n review: 0.7,\n write: 0.5,\n tool_scope: 0.3,\n subtask: 0.4,\n };\n score = typeScores[frame.type] || score;\n\n // Adjust based on depth (deeper frames are usually less important)\n score -= frame.depth * 0.05;\n\n // Adjust based on duration (longer frames might be more important)\n if (frame.closed_at) {\n const durationMinutes = (frame.closed_at - frame.created_at) / 60;\n if (durationMinutes > 10) score += 0.1;\n if (durationMinutes > 30) score += 0.1;\n }\n\n // Clamp between 0 and 1\n return Math.max(0, Math.min(1, score));\n }\n\n /**\n * Get frame anchors (wrapper for proper typing)\n */\n private getFrameAnchors(frameId: string): Anchor[] {\n // This would typically use frameManager's method, but it's private\n // For now, return empty array - in production would need to expose this\n return [];\n }\n\n /**\n * Handle user interruption\n */\n public handleUserInterruption(): void {\n this.digestGenerator.handleInterruption();\n }\n\n /**\n * Get idle status\n */\n public getIdleStatus(): ReturnType<\n EnhancedHybridDigestGenerator['getIdleStatus']\n > {\n return this.digestGenerator.getIdleStatus();\n }\n\n /**\n * Force process digest queue\n */\n public async forceProcessQueue(): Promise<void> {\n await this.digestGenerator.forceProcessQueue();\n }\n\n /**\n * Cleanup\n */\n public shutdown(): void {\n this.digestGenerator.shutdown();\n }\n}\n\n/**\n * Factory function to enhance existing FrameManager\n */\nexport function enhanceFrameManagerWithDigest(\n frameManager: FrameManager,\n db: Database.Database,\n llmProvider?: DigestLLMProvider\n): FrameDigestIntegration {\n return new FrameDigestIntegration(frameManager, db, llmProvider);\n}\n"],
|
|
5
5
|
"mappings": ";;;;AAYA,SAAS,qCAAqC;AAE9C,SAAS,cAAc;AAMhB,MAAM,uBAAuB;AAAA,EAC1B;AAAA,EACA;AAAA,EACA,mBAAmB,oBAAI,IAAoB;AAAA,EAEnD,YACE,cACA,IACA,aACA;AACA,SAAK,eAAe;AACpB,SAAK,kBAAkB,IAAI;AAAA,MACzB;AAAA,MACA;AAAA,QACE,oBAAoB;AAAA,QACpB,WAAW;AAAA,MACb;AAAA,MACA;AAAA,IACF;AAEA,SAAK,WAAW;AAAA,EAClB;AAAA;AAAA;AAAA;AAAA,EAKQ,aAAmB;AAEzB,UAAM,mBAAmB,KAAK,aAAa,SAAS,KAAK,KAAK,YAAY;AAC1E,SAAK,aAAa,WAAW,CAC3B,WACA,SACA,YACW;AACX,YAAM,SAAS,iBAAiB,WAAW,SAAS,OAAO;AAG3D,UAAI,cAAc,aAAa;AAC7B,aAAK,gBAAgB,eAAe;AAAA,MACtC;AAGA,UAAI,cAAc,gBAAgB;AAChC,aAAK,gBAAgB,gBAAgB;AAAA,MACvC;AAGA,YAAM,gBAAgB,WAAW,KAAK,aAAa,kBAAkB;AACrE,UAAI,eAAe;AACjB,aAAK,iBAAiB,IAAI,eAAe,KAAK,IAAI,CAAC;AAAA,MACrD;AAEA,aAAO;AAAA,IACT;AAGA,UAAM,sBAAsB,KAAK,aAAa,YAAY;AAAA,MACxD,KAAK;AAAA,IACP;AACA,SAAK,aAAa,cAAc,CAAC,YAAyB;AACxD,YAAM,UAAU,oBAAoB,OAAO;AAC3C,WAAK,gBAAgB,cAAc,OAAO;AAC1C,WAAK,iBAAiB,IAAI,SAAS,KAAK,IAAI,CAAC;AAC7C,aAAO;AAAA,IACT;AAGA,UAAM,qBAAqB,KAAK,aAAa,WAAW;AAAA,MACtD,KAAK;AAAA,IACP;AACA,SAAK,aAAa,aAAa,CAC7B,SACA,YACS;AACT,YAAM,gBAAgB,WAAW,KAAK,aAAa,kBAAkB;AAErE,UAAI,eAAe;AAEjB,cAAM,SAAS,KAAK,uBAAuB,aAAa;AAGxD,cAAM,kBAAkB;AAAA,UACtB,GAAG;AAAA,UACH,QAAQ,OAAO;AAAA,UACf,YAAY,OAAO;AAAA,QACrB;AAGA,aAAK,gBAAgB,cAAc,aAAa;AAChD,aAAK,iBAAiB,OAAO,aAAa;AAG1C,2BAAmB,SAAS,eAAe;AAAA,MAC7C,OAAO;AACL,2BAAmB,SAAS,OAAO;AAAA,MACrC;AAAA,IACF;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKQ,uBAAuB,SAG7B;AAEA,UAAM,QAAQ,KAAK,aAAa,SAAS,OAAO;AAChD,QAAI,CAAC,OAAO;AACV,aAAO,KAAK,yCAAyC,EAAE,QAAQ,CAAC;AAChE,aAAO,EAAE,MAAM,IAAI,MAAM,CAAC,EAAE;AAAA,IAC9B;AAGA,UAAM,SAAS,KAAK,aAAa,eAAe,OAAO;AACvD,UAAM,UAAU,KAAK,gBAAgB,OAAO;AAG5C,UAAM,cAA2B;AAAA,MAC/B,OAAO,KAAK,aAAa,KAAK;AAAA,MAC9B,QAAQ,OAAO,IAAI,KAAK,YAAY;AAAA,MACpC,SAAS,QAAQ,IAAI,KAAK,aAAa;AAAA,IACzC;AAGA,UAAM,eAAe,KAAK,gBAAgB,eAAe,WAAW;AAGpE,WAAO;AAAA,MACL,MAAM,aAAa;AAAA,MACnB,MAAM;AAAA,QACJ,eAAe,aAAa;AAAA,QAC5B,aAAa,aAAa;AAAA,QAC1B,QAAQ,aAAa;AAAA,QACrB,aAAa,KAAK,IAAI;AAAA,MACxB;AAAA,IACF;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKQ,aAAa,OAAqB;AAExC,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA,EAKQ,aAAa,OAAqB;AAExC,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA,EAKQ,cAAc,QAAwB;AAE5C,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA,EAKQ,yBAAyB,OAAsB;AACrD,QAAI,QAAQ;AAGZ,UAAM,aAAqC;AAAA,MACzC,MAAM;AAAA,MACN,OAAO;AAAA,MACP,QAAQ;AAAA,MACR,OAAO;AAAA,MACP,YAAY;AAAA,MACZ,SAAS;AAAA,IACX;AACA,YAAQ,WAAW,MAAM,IAAI,KAAK;AAGlC,aAAS,MAAM,QAAQ;AAGvB,QAAI,MAAM,WAAW;AACnB,YAAM,mBAAmB,MAAM,YAAY,MAAM,cAAc;AAC/D,UAAI,kBAAkB,GAAI,UAAS;AACnC,UAAI,kBAAkB,GAAI,UAAS;AAAA,IACrC;AAGA,WAAO,KAAK,IAAI,GAAG,KAAK,IAAI,GAAG,KAAK,CAAC;AAAA,EACvC;AAAA;AAAA;AAAA;AAAA,EAKQ,gBAAgB,SAA2B;AAGjD,WAAO,CAAC;AAAA,EACV;AAAA;AAAA;AAAA;AAAA,EAKO,yBAA+B;AACpC,SAAK,gBAAgB,mBAAmB;AAAA,EAC1C;AAAA;AAAA;AAAA;AAAA,EAKO,gBAEL;AACA,WAAO,KAAK,gBAAgB,cAAc;AAAA,EAC5C;AAAA;AAAA;AAAA;AAAA,EAKA,MAAa,oBAAmC;AAC9C,UAAM,KAAK,gBAAgB,kBAAkB;AAAA,EAC/C;AAAA;AAAA;AAAA;AAAA,EAKO,WAAiB;AACtB,SAAK,gBAAgB,SAAS;AAAA,EAChC;AACF;AAKO,SAAS,8BACd,cACA,IACA,aACwB;AACxB,SAAO,IAAI,uBAAuB,cAAc,IAAI,WAAW;AACjE;",
|
|
6
6
|
"names": []
|
|
7
7
|
}
|