@vinaes/succ 1.3.19
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +87 -0
- package/README.md +588 -0
- package/agents/succ-checkpoint-manager.md +51 -0
- package/agents/succ-code-reviewer.md +181 -0
- package/agents/succ-context-optimizer.md +83 -0
- package/agents/succ-debug.md +224 -0
- package/agents/succ-decision-auditor.md +74 -0
- package/agents/succ-deep-search.md +41 -0
- package/agents/succ-diff-reviewer.md +123 -0
- package/agents/succ-explore.md +83 -0
- package/agents/succ-general.md +109 -0
- package/agents/succ-knowledge-indexer.md +45 -0
- package/agents/succ-knowledge-mapper.md +61 -0
- package/agents/succ-memory-curator.md +43 -0
- package/agents/succ-memory-health-monitor.md +55 -0
- package/agents/succ-pattern-detective.md +62 -0
- package/agents/succ-plan.md +172 -0
- package/agents/succ-quality-improvement-coach.md +63 -0
- package/agents/succ-readiness-improver.md +62 -0
- package/agents/succ-session-handoff-orchestrator.md +54 -0
- package/agents/succ-session-reviewer.md +46 -0
- package/agents/succ-style-tracker.md +73 -0
- package/dist/cli.d.ts +3 -0
- package/dist/cli.d.ts.map +1 -0
- package/dist/cli.js +749 -0
- package/dist/cli.js.map +1 -0
- package/dist/commands/agents-md.d.ts +16 -0
- package/dist/commands/agents-md.d.ts.map +1 -0
- package/dist/commands/agents-md.js +33 -0
- package/dist/commands/agents-md.js.map +1 -0
- package/dist/commands/analyze-agents.d.ts +20 -0
- package/dist/commands/analyze-agents.d.ts.map +1 -0
- package/dist/commands/analyze-agents.js +305 -0
- package/dist/commands/analyze-agents.js.map +1 -0
- package/dist/commands/analyze-helpers.d.ts +22 -0
- package/dist/commands/analyze-helpers.d.ts.map +1 -0
- package/dist/commands/analyze-helpers.js +38 -0
- package/dist/commands/analyze-helpers.js.map +1 -0
- package/dist/commands/analyze-profile.d.ts +53 -0
- package/dist/commands/analyze-profile.d.ts.map +1 -0
- package/dist/commands/analyze-profile.js +638 -0
- package/dist/commands/analyze-profile.js.map +1 -0
- package/dist/commands/analyze-recursive.d.ts +20 -0
- package/dist/commands/analyze-recursive.d.ts.map +1 -0
- package/dist/commands/analyze-recursive.js +326 -0
- package/dist/commands/analyze-recursive.js.map +1 -0
- package/dist/commands/analyze-utils.d.ts +83 -0
- package/dist/commands/analyze-utils.d.ts.map +1 -0
- package/dist/commands/analyze-utils.js +541 -0
- package/dist/commands/analyze-utils.js.map +1 -0
- package/dist/commands/analyze.d.ts +15 -0
- package/dist/commands/analyze.d.ts.map +1 -0
- package/dist/commands/analyze.js +265 -0
- package/dist/commands/analyze.js.map +1 -0
- package/dist/commands/backfill.d.ts +22 -0
- package/dist/commands/backfill.d.ts.map +1 -0
- package/dist/commands/backfill.js +62 -0
- package/dist/commands/backfill.js.map +1 -0
- package/dist/commands/benchmark-quality.d.ts +18 -0
- package/dist/commands/benchmark-quality.d.ts.map +1 -0
- package/dist/commands/benchmark-quality.js +316 -0
- package/dist/commands/benchmark-quality.js.map +1 -0
- package/dist/commands/benchmark-sqlite-vec.d.ts +12 -0
- package/dist/commands/benchmark-sqlite-vec.d.ts.map +1 -0
- package/dist/commands/benchmark-sqlite-vec.js +281 -0
- package/dist/commands/benchmark-sqlite-vec.js.map +1 -0
- package/dist/commands/benchmark.d.ts +57 -0
- package/dist/commands/benchmark.d.ts.map +1 -0
- package/dist/commands/benchmark.js +682 -0
- package/dist/commands/benchmark.js.map +1 -0
- package/dist/commands/chat.d.ts +14 -0
- package/dist/commands/chat.d.ts.map +1 -0
- package/dist/commands/chat.js +84 -0
- package/dist/commands/chat.js.map +1 -0
- package/dist/commands/checkpoint.d.ts +27 -0
- package/dist/commands/checkpoint.d.ts.map +1 -0
- package/dist/commands/checkpoint.js +181 -0
- package/dist/commands/checkpoint.js.map +1 -0
- package/dist/commands/clear.d.ts +9 -0
- package/dist/commands/clear.d.ts.map +1 -0
- package/dist/commands/clear.js +31 -0
- package/dist/commands/clear.js.map +1 -0
- package/dist/commands/config.d.ts +26 -0
- package/dist/commands/config.d.ts.map +1 -0
- package/dist/commands/config.js +247 -0
- package/dist/commands/config.js.map +1 -0
- package/dist/commands/consolidate.d.ts +21 -0
- package/dist/commands/consolidate.d.ts.map +1 -0
- package/dist/commands/consolidate.js +117 -0
- package/dist/commands/consolidate.js.map +1 -0
- package/dist/commands/daemon.d.ts +48 -0
- package/dist/commands/daemon.d.ts.map +1 -0
- package/dist/commands/daemon.js +218 -0
- package/dist/commands/daemon.js.map +1 -0
- package/dist/commands/embedding.d.ts +26 -0
- package/dist/commands/embedding.d.ts.map +1 -0
- package/dist/commands/embedding.js +168 -0
- package/dist/commands/embedding.js.map +1 -0
- package/dist/commands/graph.d.ts +20 -0
- package/dist/commands/graph.d.ts.map +1 -0
- package/dist/commands/graph.js +128 -0
- package/dist/commands/graph.js.map +1 -0
- package/dist/commands/index-code.d.ts +23 -0
- package/dist/commands/index-code.d.ts.map +1 -0
- package/dist/commands/index-code.js +218 -0
- package/dist/commands/index-code.js.map +1 -0
- package/dist/commands/index.d.ts +23 -0
- package/dist/commands/index.d.ts.map +1 -0
- package/dist/commands/index.js +217 -0
- package/dist/commands/index.js.map +1 -0
- package/dist/commands/init-templates.d.ts +21 -0
- package/dist/commands/init-templates.d.ts.map +1 -0
- package/dist/commands/init-templates.js +487 -0
- package/dist/commands/init-templates.js.map +1 -0
- package/dist/commands/init.d.ts +10 -0
- package/dist/commands/init.d.ts.map +1 -0
- package/dist/commands/init.js +865 -0
- package/dist/commands/init.js.map +1 -0
- package/dist/commands/memories.d.ts +47 -0
- package/dist/commands/memories.d.ts.map +1 -0
- package/dist/commands/memories.js +597 -0
- package/dist/commands/memories.js.map +1 -0
- package/dist/commands/migrate.d.ts +19 -0
- package/dist/commands/migrate.d.ts.map +1 -0
- package/dist/commands/migrate.js +154 -0
- package/dist/commands/migrate.js.map +1 -0
- package/dist/commands/prd.d.ts +46 -0
- package/dist/commands/prd.d.ts.map +1 -0
- package/dist/commands/prd.js +378 -0
- package/dist/commands/prd.js.map +1 -0
- package/dist/commands/precompute-context.d.ts +11 -0
- package/dist/commands/precompute-context.d.ts.map +1 -0
- package/dist/commands/precompute-context.js +12 -0
- package/dist/commands/precompute-context.js.map +1 -0
- package/dist/commands/progress.d.ts +16 -0
- package/dist/commands/progress.d.ts.map +1 -0
- package/dist/commands/progress.js +38 -0
- package/dist/commands/progress.js.map +1 -0
- package/dist/commands/reindex.d.ts +17 -0
- package/dist/commands/reindex.d.ts.map +1 -0
- package/dist/commands/reindex.js +83 -0
- package/dist/commands/reindex.js.map +1 -0
- package/dist/commands/retention.d.ts +19 -0
- package/dist/commands/retention.d.ts.map +1 -0
- package/dist/commands/retention.js +162 -0
- package/dist/commands/retention.js.map +1 -0
- package/dist/commands/score.d.ts +10 -0
- package/dist/commands/score.d.ts.map +1 -0
- package/dist/commands/score.js +28 -0
- package/dist/commands/score.js.map +1 -0
- package/dist/commands/search.d.ts +7 -0
- package/dist/commands/search.d.ts.map +1 -0
- package/dist/commands/search.js +89 -0
- package/dist/commands/search.js.map +1 -0
- package/dist/commands/session-summary.d.ts +15 -0
- package/dist/commands/session-summary.d.ts.map +1 -0
- package/dist/commands/session-summary.js +16 -0
- package/dist/commands/session-summary.js.map +1 -0
- package/dist/commands/setup.d.ts +6 -0
- package/dist/commands/setup.d.ts.map +1 -0
- package/dist/commands/setup.js +222 -0
- package/dist/commands/setup.js.map +1 -0
- package/dist/commands/soul.d.ts +9 -0
- package/dist/commands/soul.d.ts.map +1 -0
- package/dist/commands/soul.js +256 -0
- package/dist/commands/soul.js.map +1 -0
- package/dist/commands/stats.d.ts +18 -0
- package/dist/commands/stats.d.ts.map +1 -0
- package/dist/commands/stats.js +138 -0
- package/dist/commands/stats.js.map +1 -0
- package/dist/commands/status.d.ts +2 -0
- package/dist/commands/status.d.ts.map +1 -0
- package/dist/commands/status.js +145 -0
- package/dist/commands/status.js.map +1 -0
- package/dist/commands/train-bpe.d.ts +8 -0
- package/dist/commands/train-bpe.d.ts.map +1 -0
- package/dist/commands/train-bpe.js +35 -0
- package/dist/commands/train-bpe.js.map +1 -0
- package/dist/commands/watch.d.ts +22 -0
- package/dist/commands/watch.d.ts.map +1 -0
- package/dist/commands/watch.js +171 -0
- package/dist/commands/watch.js.map +1 -0
- package/dist/daemon/analyzer.d.ts +54 -0
- package/dist/daemon/analyzer.d.ts.map +1 -0
- package/dist/daemon/analyzer.js +362 -0
- package/dist/daemon/analyzer.js.map +1 -0
- package/dist/daemon/client.d.ts +87 -0
- package/dist/daemon/client.d.ts.map +1 -0
- package/dist/daemon/client.js +356 -0
- package/dist/daemon/client.js.map +1 -0
- package/dist/daemon/index.d.ts +12 -0
- package/dist/daemon/index.d.ts.map +1 -0
- package/dist/daemon/index.js +12 -0
- package/dist/daemon/index.js.map +1 -0
- package/dist/daemon/service.d.ts +51 -0
- package/dist/daemon/service.d.ts.map +1 -0
- package/dist/daemon/service.js +1203 -0
- package/dist/daemon/service.js.map +1 -0
- package/dist/daemon/session-processor.d.ts +85 -0
- package/dist/daemon/session-processor.d.ts.map +1 -0
- package/dist/daemon/session-processor.js +571 -0
- package/dist/daemon/session-processor.js.map +1 -0
- package/dist/daemon/sessions.d.ts +62 -0
- package/dist/daemon/sessions.d.ts.map +1 -0
- package/dist/daemon/sessions.js +192 -0
- package/dist/daemon/sessions.js.map +1 -0
- package/dist/daemon/watcher.d.ts +52 -0
- package/dist/daemon/watcher.d.ts.map +1 -0
- package/dist/daemon/watcher.js +363 -0
- package/dist/daemon/watcher.js.map +1 -0
- package/dist/lib/agents-md-generator.d.ts +33 -0
- package/dist/lib/agents-md-generator.d.ts.map +1 -0
- package/dist/lib/agents-md-generator.js +156 -0
- package/dist/lib/agents-md-generator.js.map +1 -0
- package/dist/lib/ai-readiness.d.ts +132 -0
- package/dist/lib/ai-readiness.d.ts.map +1 -0
- package/dist/lib/ai-readiness.js +702 -0
- package/dist/lib/ai-readiness.js.map +1 -0
- package/dist/lib/analyze-state.d.ts +34 -0
- package/dist/lib/analyze-state.d.ts.map +1 -0
- package/dist/lib/analyze-state.js +106 -0
- package/dist/lib/analyze-state.js.map +1 -0
- package/dist/lib/benchmark.d.ts +250 -0
- package/dist/lib/benchmark.d.ts.map +1 -0
- package/dist/lib/benchmark.js +778 -0
- package/dist/lib/benchmark.js.map +1 -0
- package/dist/lib/bm25.d.ts +114 -0
- package/dist/lib/bm25.d.ts.map +1 -0
- package/dist/lib/bm25.js +727 -0
- package/dist/lib/bm25.js.map +1 -0
- package/dist/lib/bpe.d.ts +70 -0
- package/dist/lib/bpe.d.ts.map +1 -0
- package/dist/lib/bpe.js +270 -0
- package/dist/lib/bpe.js.map +1 -0
- package/dist/lib/checkpoint.d.ts +124 -0
- package/dist/lib/checkpoint.d.ts.map +1 -0
- package/dist/lib/checkpoint.js +321 -0
- package/dist/lib/checkpoint.js.map +1 -0
- package/dist/lib/chunker.d.ts +47 -0
- package/dist/lib/chunker.d.ts.map +1 -0
- package/dist/lib/chunker.js +358 -0
- package/dist/lib/chunker.js.map +1 -0
- package/dist/lib/claude-ws-transport.d.ts +76 -0
- package/dist/lib/claude-ws-transport.d.ts.map +1 -0
- package/dist/lib/claude-ws-transport.js +487 -0
- package/dist/lib/claude-ws-transport.js.map +1 -0
- package/dist/lib/compact-briefing.d.ts +22 -0
- package/dist/lib/compact-briefing.d.ts.map +1 -0
- package/dist/lib/compact-briefing.js +180 -0
- package/dist/lib/compact-briefing.js.map +1 -0
- package/dist/lib/config-defaults.d.ts +41 -0
- package/dist/lib/config-defaults.d.ts.map +1 -0
- package/dist/lib/config-defaults.js +99 -0
- package/dist/lib/config-defaults.js.map +1 -0
- package/dist/lib/config-display.d.ts +16 -0
- package/dist/lib/config-display.d.ts.map +1 -0
- package/dist/lib/config-display.js +408 -0
- package/dist/lib/config-display.js.map +1 -0
- package/dist/lib/config-types.d.ts +601 -0
- package/dist/lib/config-types.d.ts.map +1 -0
- package/dist/lib/config-types.js +7 -0
- package/dist/lib/config-types.js.map +1 -0
- package/dist/lib/config-validation.d.ts +19 -0
- package/dist/lib/config-validation.d.ts.map +1 -0
- package/dist/lib/config-validation.js +136 -0
- package/dist/lib/config-validation.js.map +1 -0
- package/dist/lib/config.d.ts +143 -0
- package/dist/lib/config.d.ts.map +1 -0
- package/dist/lib/config.js +689 -0
- package/dist/lib/config.js.map +1 -0
- package/dist/lib/consolidate.d.ts +115 -0
- package/dist/lib/consolidate.d.ts.map +1 -0
- package/dist/lib/consolidate.js +600 -0
- package/dist/lib/consolidate.js.map +1 -0
- package/dist/lib/db/bm25-indexes.d.ts +58 -0
- package/dist/lib/db/bm25-indexes.d.ts.map +1 -0
- package/dist/lib/db/bm25-indexes.js +333 -0
- package/dist/lib/db/bm25-indexes.js.map +1 -0
- package/dist/lib/db/bpe.d.ts +18 -0
- package/dist/lib/db/bpe.d.ts.map +1 -0
- package/dist/lib/db/bpe.js +44 -0
- package/dist/lib/db/bpe.js.map +1 -0
- package/dist/lib/db/connection.d.ts +47 -0
- package/dist/lib/db/connection.d.ts.map +1 -0
- package/dist/lib/db/connection.js +114 -0
- package/dist/lib/db/connection.js.map +1 -0
- package/dist/lib/db/documents.d.ts +58 -0
- package/dist/lib/db/documents.d.ts.map +1 -0
- package/dist/lib/db/documents.js +374 -0
- package/dist/lib/db/documents.js.map +1 -0
- package/dist/lib/db/file-hash.d.ts +26 -0
- package/dist/lib/db/file-hash.d.ts.map +1 -0
- package/dist/lib/db/file-hash.js +56 -0
- package/dist/lib/db/file-hash.js.map +1 -0
- package/dist/lib/db/global-memories.d.ts +62 -0
- package/dist/lib/db/global-memories.d.ts.map +1 -0
- package/dist/lib/db/global-memories.js +173 -0
- package/dist/lib/db/global-memories.js.map +1 -0
- package/dist/lib/db/graph.d.ts +163 -0
- package/dist/lib/db/graph.d.ts.map +1 -0
- package/dist/lib/db/graph.js +488 -0
- package/dist/lib/db/graph.js.map +1 -0
- package/dist/lib/db/helpers.d.ts +13 -0
- package/dist/lib/db/helpers.d.ts.map +1 -0
- package/dist/lib/db/helpers.js +21 -0
- package/dist/lib/db/helpers.js.map +1 -0
- package/dist/lib/db/hybrid-search.d.ts +64 -0
- package/dist/lib/db/hybrid-search.d.ts.map +1 -0
- package/dist/lib/db/hybrid-search.js +549 -0
- package/dist/lib/db/hybrid-search.js.map +1 -0
- package/dist/lib/db/index.d.ts +23 -0
- package/dist/lib/db/index.d.ts.map +1 -0
- package/dist/lib/db/index.js +23 -0
- package/dist/lib/db/index.js.map +1 -0
- package/dist/lib/db/memories.d.ts +174 -0
- package/dist/lib/db/memories.d.ts.map +1 -0
- package/dist/lib/db/memories.js +866 -0
- package/dist/lib/db/memories.js.map +1 -0
- package/dist/lib/db/retention.d.ts +64 -0
- package/dist/lib/db/retention.d.ts.map +1 -0
- package/dist/lib/db/retention.js +115 -0
- package/dist/lib/db/retention.js.map +1 -0
- package/dist/lib/db/schema.d.ts +29 -0
- package/dist/lib/db/schema.d.ts.map +1 -0
- package/dist/lib/db/schema.js +832 -0
- package/dist/lib/db/schema.js.map +1 -0
- package/dist/lib/db/skills.d.ts +65 -0
- package/dist/lib/db/skills.d.ts.map +1 -0
- package/dist/lib/db/skills.js +119 -0
- package/dist/lib/db/skills.js.map +1 -0
- package/dist/lib/db/token-frequency.d.ts +34 -0
- package/dist/lib/db/token-frequency.d.ts.map +1 -0
- package/dist/lib/db/token-frequency.js +92 -0
- package/dist/lib/db/token-frequency.js.map +1 -0
- package/dist/lib/db/token-stats.d.ts +43 -0
- package/dist/lib/db/token-stats.d.ts.map +1 -0
- package/dist/lib/db/token-stats.js +59 -0
- package/dist/lib/db/token-stats.js.map +1 -0
- package/dist/lib/db/types.d.ts +57 -0
- package/dist/lib/db/types.d.ts.map +1 -0
- package/dist/lib/db/types.js +5 -0
- package/dist/lib/db/types.js.map +1 -0
- package/dist/lib/db/web-search-history.d.ts +22 -0
- package/dist/lib/db/web-search-history.d.ts.map +1 -0
- package/dist/lib/db/web-search-history.js +107 -0
- package/dist/lib/db/web-search-history.js.map +1 -0
- package/dist/lib/debug/state.d.ts +17 -0
- package/dist/lib/debug/state.d.ts.map +1 -0
- package/dist/lib/debug/state.js +131 -0
- package/dist/lib/debug/state.js.map +1 -0
- package/dist/lib/debug/types.d.ts +74 -0
- package/dist/lib/debug/types.d.ts.map +1 -0
- package/dist/lib/debug/types.js +85 -0
- package/dist/lib/debug/types.js.map +1 -0
- package/dist/lib/embedding-pool.d.ts +37 -0
- package/dist/lib/embedding-pool.d.ts.map +1 -0
- package/dist/lib/embedding-pool.js +159 -0
- package/dist/lib/embedding-pool.js.map +1 -0
- package/dist/lib/embedding-worker.d.ts +7 -0
- package/dist/lib/embedding-worker.d.ts.map +1 -0
- package/dist/lib/embedding-worker.js +49 -0
- package/dist/lib/embedding-worker.js.map +1 -0
- package/dist/lib/embeddings.d.ts +54 -0
- package/dist/lib/embeddings.d.ts.map +1 -0
- package/dist/lib/embeddings.js +437 -0
- package/dist/lib/embeddings.js.map +1 -0
- package/dist/lib/errors.d.ts +40 -0
- package/dist/lib/errors.d.ts.map +1 -0
- package/dist/lib/errors.js +66 -0
- package/dist/lib/errors.js.map +1 -0
- package/dist/lib/fault-logger.d.ts +35 -0
- package/dist/lib/fault-logger.d.ts.map +1 -0
- package/dist/lib/fault-logger.js +182 -0
- package/dist/lib/fault-logger.js.map +1 -0
- package/dist/lib/graph/centrality.d.ts +34 -0
- package/dist/lib/graph/centrality.d.ts.map +1 -0
- package/dist/lib/graph/centrality.js +76 -0
- package/dist/lib/graph/centrality.js.map +1 -0
- package/dist/lib/graph/cleanup.d.ts +36 -0
- package/dist/lib/graph/cleanup.d.ts.map +1 -0
- package/dist/lib/graph/cleanup.js +96 -0
- package/dist/lib/graph/cleanup.js.map +1 -0
- package/dist/lib/graph/community-detection.d.ts +61 -0
- package/dist/lib/graph/community-detection.d.ts.map +1 -0
- package/dist/lib/graph/community-detection.js +218 -0
- package/dist/lib/graph/community-detection.js.map +1 -0
- package/dist/lib/graph/contextual-proximity.d.ts +41 -0
- package/dist/lib/graph/contextual-proximity.d.ts.map +1 -0
- package/dist/lib/graph/contextual-proximity.js +125 -0
- package/dist/lib/graph/contextual-proximity.js.map +1 -0
- package/dist/lib/graph/llm-relations.d.ts +54 -0
- package/dist/lib/graph/llm-relations.d.ts.map +1 -0
- package/dist/lib/graph/llm-relations.js +265 -0
- package/dist/lib/graph/llm-relations.js.map +1 -0
- package/dist/lib/graph-export.d.ts +13 -0
- package/dist/lib/graph-export.d.ts.map +1 -0
- package/dist/lib/graph-export.js +637 -0
- package/dist/lib/graph-export.js.map +1 -0
- package/dist/lib/graph-scheduler.d.ts +7 -0
- package/dist/lib/graph-scheduler.d.ts.map +1 -0
- package/dist/lib/graph-scheduler.js +21 -0
- package/dist/lib/graph-scheduler.js.map +1 -0
- package/dist/lib/indexer.d.ts +36 -0
- package/dist/lib/indexer.d.ts.map +1 -0
- package/dist/lib/indexer.js +226 -0
- package/dist/lib/indexer.js.map +1 -0
- package/dist/lib/learning-delta.d.ts +35 -0
- package/dist/lib/learning-delta.d.ts.map +1 -0
- package/dist/lib/learning-delta.js +53 -0
- package/dist/lib/learning-delta.js.map +1 -0
- package/dist/lib/llm.d.ts +118 -0
- package/dist/lib/llm.d.ts.map +1 -0
- package/dist/lib/llm.js +439 -0
- package/dist/lib/llm.js.map +1 -0
- package/dist/lib/lock.d.ts +27 -0
- package/dist/lib/lock.d.ts.map +1 -0
- package/dist/lib/lock.js +143 -0
- package/dist/lib/lock.js.map +1 -0
- package/dist/lib/md-fetch.d.ts +61 -0
- package/dist/lib/md-fetch.d.ts.map +1 -0
- package/dist/lib/md-fetch.js +141 -0
- package/dist/lib/md-fetch.js.map +1 -0
- package/dist/lib/mmr.d.ts +29 -0
- package/dist/lib/mmr.d.ts.map +1 -0
- package/dist/lib/mmr.js +89 -0
- package/dist/lib/mmr.js.map +1 -0
- package/dist/lib/onboarding/ai-chat.d.ts +11 -0
- package/dist/lib/onboarding/ai-chat.d.ts.map +1 -0
- package/dist/lib/onboarding/ai-chat.js +85 -0
- package/dist/lib/onboarding/ai-chat.js.map +1 -0
- package/dist/lib/onboarding/index.d.ts +7 -0
- package/dist/lib/onboarding/index.d.ts.map +1 -0
- package/dist/lib/onboarding/index.js +7 -0
- package/dist/lib/onboarding/index.js.map +1 -0
- package/dist/lib/onboarding/wizard.d.ts +11 -0
- package/dist/lib/onboarding/wizard.d.ts.map +1 -0
- package/dist/lib/onboarding/wizard.js +104 -0
- package/dist/lib/onboarding/wizard.js.map +1 -0
- package/dist/lib/ort-provider.d.ts +27 -0
- package/dist/lib/ort-provider.d.ts.map +1 -0
- package/dist/lib/ort-provider.js +83 -0
- package/dist/lib/ort-provider.js.map +1 -0
- package/dist/lib/ort-session.d.ts +32 -0
- package/dist/lib/ort-session.d.ts.map +1 -0
- package/dist/lib/ort-session.js +227 -0
- package/dist/lib/ort-session.js.map +1 -0
- package/dist/lib/patterns.d.ts +43 -0
- package/dist/lib/patterns.d.ts.map +1 -0
- package/dist/lib/patterns.js +395 -0
- package/dist/lib/patterns.js.map +1 -0
- package/dist/lib/prd/codebase-context.d.ts +27 -0
- package/dist/lib/prd/codebase-context.d.ts.map +1 -0
- package/dist/lib/prd/codebase-context.js +420 -0
- package/dist/lib/prd/codebase-context.js.map +1 -0
- package/dist/lib/prd/context.d.ts +24 -0
- package/dist/lib/prd/context.d.ts.map +1 -0
- package/dist/lib/prd/context.js +68 -0
- package/dist/lib/prd/context.js.map +1 -0
- package/dist/lib/prd/executor.d.ts +52 -0
- package/dist/lib/prd/executor.d.ts.map +1 -0
- package/dist/lib/prd/executor.js +154 -0
- package/dist/lib/prd/executor.js.map +1 -0
- package/dist/lib/prd/export.d.ts +40 -0
- package/dist/lib/prd/export.d.ts.map +1 -0
- package/dist/lib/prd/export.js +511 -0
- package/dist/lib/prd/export.js.map +1 -0
- package/dist/lib/prd/gates.d.ts +30 -0
- package/dist/lib/prd/gates.d.ts.map +1 -0
- package/dist/lib/prd/gates.js +100 -0
- package/dist/lib/prd/gates.js.map +1 -0
- package/dist/lib/prd/generate.d.ts +56 -0
- package/dist/lib/prd/generate.d.ts.map +1 -0
- package/dist/lib/prd/generate.js +357 -0
- package/dist/lib/prd/generate.js.map +1 -0
- package/dist/lib/prd/parse.d.ts +21 -0
- package/dist/lib/prd/parse.d.ts.map +1 -0
- package/dist/lib/prd/parse.js +270 -0
- package/dist/lib/prd/parse.js.map +1 -0
- package/dist/lib/prd/prompt-builder.d.ts +18 -0
- package/dist/lib/prd/prompt-builder.d.ts.map +1 -0
- package/dist/lib/prd/prompt-builder.js +61 -0
- package/dist/lib/prd/prompt-builder.js.map +1 -0
- package/dist/lib/prd/runner.d.ts +54 -0
- package/dist/lib/prd/runner.d.ts.map +1 -0
- package/dist/lib/prd/runner.js +563 -0
- package/dist/lib/prd/runner.js.map +1 -0
- package/dist/lib/prd/scheduler.d.ts +27 -0
- package/dist/lib/prd/scheduler.d.ts.map +1 -0
- package/dist/lib/prd/scheduler.js +113 -0
- package/dist/lib/prd/scheduler.js.map +1 -0
- package/dist/lib/prd/state.d.ts +77 -0
- package/dist/lib/prd/state.d.ts.map +1 -0
- package/dist/lib/prd/state.js +253 -0
- package/dist/lib/prd/state.js.map +1 -0
- package/dist/lib/prd/team-runner.d.ts +48 -0
- package/dist/lib/prd/team-runner.d.ts.map +1 -0
- package/dist/lib/prd/team-runner.js +261 -0
- package/dist/lib/prd/team-runner.js.map +1 -0
- package/dist/lib/prd/types.d.ts +169 -0
- package/dist/lib/prd/types.d.ts.map +1 -0
- package/dist/lib/prd/types.js +143 -0
- package/dist/lib/prd/types.js.map +1 -0
- package/dist/lib/prd/worktree.d.ts +54 -0
- package/dist/lib/prd/worktree.d.ts.map +1 -0
- package/dist/lib/prd/worktree.js +255 -0
- package/dist/lib/prd/worktree.js.map +1 -0
- package/dist/lib/precompute-context.d.ts +48 -0
- package/dist/lib/precompute-context.d.ts.map +1 -0
- package/dist/lib/precompute-context.js +265 -0
- package/dist/lib/precompute-context.js.map +1 -0
- package/dist/lib/pricing.d.ts +60 -0
- package/dist/lib/pricing.d.ts.map +1 -0
- package/dist/lib/pricing.js +258 -0
- package/dist/lib/pricing.js.map +1 -0
- package/dist/lib/process-registry.d.ts +30 -0
- package/dist/lib/process-registry.d.ts.map +1 -0
- package/dist/lib/process-registry.js +92 -0
- package/dist/lib/process-registry.js.map +1 -0
- package/dist/lib/progress-log.d.ts +44 -0
- package/dist/lib/progress-log.d.ts.map +1 -0
- package/dist/lib/progress-log.js +58 -0
- package/dist/lib/progress-log.js.map +1 -0
- package/dist/lib/public-api.d.ts +56 -0
- package/dist/lib/public-api.d.ts.map +1 -0
- package/dist/lib/public-api.js +63 -0
- package/dist/lib/public-api.js.map +1 -0
- package/dist/lib/quality.d.ts +66 -0
- package/dist/lib/quality.d.ts.map +1 -0
- package/dist/lib/quality.js +486 -0
- package/dist/lib/quality.js.map +1 -0
- package/dist/lib/query-expansion.d.ts +16 -0
- package/dist/lib/query-expansion.d.ts.map +1 -0
- package/dist/lib/query-expansion.js +53 -0
- package/dist/lib/query-expansion.js.map +1 -0
- package/dist/lib/readiness.d.ts +35 -0
- package/dist/lib/readiness.d.ts.map +1 -0
- package/dist/lib/readiness.js +142 -0
- package/dist/lib/readiness.js.map +1 -0
- package/dist/lib/reference-embeddings.d.ts +39 -0
- package/dist/lib/reference-embeddings.d.ts.map +1 -0
- package/dist/lib/reference-embeddings.js +95 -0
- package/dist/lib/reference-embeddings.js.map +1 -0
- package/dist/lib/reflection-synthesizer.d.ts +27 -0
- package/dist/lib/reflection-synthesizer.d.ts.map +1 -0
- package/dist/lib/reflection-synthesizer.js +149 -0
- package/dist/lib/reflection-synthesizer.js.map +1 -0
- package/dist/lib/retention.d.ts +105 -0
- package/dist/lib/retention.d.ts.map +1 -0
- package/dist/lib/retention.js +246 -0
- package/dist/lib/retention.js.map +1 -0
- package/dist/lib/sensitive-filter.d.ts +34 -0
- package/dist/lib/sensitive-filter.d.ts.map +1 -0
- package/dist/lib/sensitive-filter.js +344 -0
- package/dist/lib/sensitive-filter.js.map +1 -0
- package/dist/lib/session-observations.d.ts +59 -0
- package/dist/lib/session-observations.d.ts.map +1 -0
- package/dist/lib/session-observations.js +128 -0
- package/dist/lib/session-observations.js.map +1 -0
- package/dist/lib/session-summary.d.ts +65 -0
- package/dist/lib/session-summary.d.ts.map +1 -0
- package/dist/lib/session-summary.js +344 -0
- package/dist/lib/session-summary.js.map +1 -0
- package/dist/lib/similarity-worker.d.ts +7 -0
- package/dist/lib/similarity-worker.d.ts.map +1 -0
- package/dist/lib/similarity-worker.js +36 -0
- package/dist/lib/similarity-worker.js.map +1 -0
- package/dist/lib/skills.d.ts +78 -0
- package/dist/lib/skills.d.ts.map +1 -0
- package/dist/lib/skills.js +439 -0
- package/dist/lib/skills.js.map +1 -0
- package/dist/lib/skyll-client.d.ts +59 -0
- package/dist/lib/skyll-client.d.ts.map +1 -0
- package/dist/lib/skyll-client.js +257 -0
- package/dist/lib/skyll-client.js.map +1 -0
- package/dist/lib/storage/backends/interface.d.ts +383 -0
- package/dist/lib/storage/backends/interface.d.ts.map +1 -0
- package/dist/lib/storage/backends/interface.js +12 -0
- package/dist/lib/storage/backends/interface.js.map +1 -0
- package/dist/lib/storage/backends/postgresql.d.ts +454 -0
- package/dist/lib/storage/backends/postgresql.d.ts.map +1 -0
- package/dist/lib/storage/backends/postgresql.js +2528 -0
- package/dist/lib/storage/backends/postgresql.js.map +1 -0
- package/dist/lib/storage/benchmark.d.ts +16 -0
- package/dist/lib/storage/benchmark.d.ts.map +1 -0
- package/dist/lib/storage/benchmark.js +219 -0
- package/dist/lib/storage/benchmark.js.map +1 -0
- package/dist/lib/storage/dispatcher-export.d.ts +108 -0
- package/dist/lib/storage/dispatcher-export.d.ts.map +1 -0
- package/dist/lib/storage/dispatcher-export.js +593 -0
- package/dist/lib/storage/dispatcher-export.js.map +1 -0
- package/dist/lib/storage/dispatcher.d.ts +468 -0
- package/dist/lib/storage/dispatcher.d.ts.map +1 -0
- package/dist/lib/storage/dispatcher.js +1926 -0
- package/dist/lib/storage/dispatcher.js.map +1 -0
- package/dist/lib/storage/index.d.ts +481 -0
- package/dist/lib/storage/index.d.ts.map +1 -0
- package/dist/lib/storage/index.js +727 -0
- package/dist/lib/storage/index.js.map +1 -0
- package/dist/lib/storage/migration/export-import.d.ts +133 -0
- package/dist/lib/storage/migration/export-import.d.ts.map +1 -0
- package/dist/lib/storage/migration/export-import.js +264 -0
- package/dist/lib/storage/migration/export-import.js.map +1 -0
- package/dist/lib/storage/types.d.ts +313 -0
- package/dist/lib/storage/types.d.ts.map +1 -0
- package/dist/lib/storage/types.js +30 -0
- package/dist/lib/storage/types.js.map +1 -0
- package/dist/lib/storage/vector/interface.d.ts +89 -0
- package/dist/lib/storage/vector/interface.d.ts.map +1 -0
- package/dist/lib/storage/vector/interface.js +10 -0
- package/dist/lib/storage/vector/interface.js.map +1 -0
- package/dist/lib/storage/vector/qdrant.d.ts +221 -0
- package/dist/lib/storage/vector/qdrant.d.ts.map +1 -0
- package/dist/lib/storage/vector/qdrant.js +880 -0
- package/dist/lib/storage/vector/qdrant.js.map +1 -0
- package/dist/lib/supersession.d.ts +26 -0
- package/dist/lib/supersession.d.ts.map +1 -0
- package/dist/lib/supersession.js +97 -0
- package/dist/lib/supersession.js.map +1 -0
- package/dist/lib/temporal.d.ts +140 -0
- package/dist/lib/temporal.d.ts.map +1 -0
- package/dist/lib/temporal.js +303 -0
- package/dist/lib/temporal.js.map +1 -0
- package/dist/lib/token-budget.d.ts +79 -0
- package/dist/lib/token-budget.d.ts.map +1 -0
- package/dist/lib/token-budget.js +146 -0
- package/dist/lib/token-budget.js.map +1 -0
- package/dist/lib/token-counter.d.ts +27 -0
- package/dist/lib/token-counter.d.ts.map +1 -0
- package/dist/lib/token-counter.js +52 -0
- package/dist/lib/token-counter.js.map +1 -0
- package/dist/lib/tree-sitter/chunker-ts.d.ts +24 -0
- package/dist/lib/tree-sitter/chunker-ts.d.ts.map +1 -0
- package/dist/lib/tree-sitter/chunker-ts.js +206 -0
- package/dist/lib/tree-sitter/chunker-ts.js.map +1 -0
- package/dist/lib/tree-sitter/extractor.d.ts +43 -0
- package/dist/lib/tree-sitter/extractor.d.ts.map +1 -0
- package/dist/lib/tree-sitter/extractor.js +297 -0
- package/dist/lib/tree-sitter/extractor.js.map +1 -0
- package/dist/lib/tree-sitter/index.d.ts +13 -0
- package/dist/lib/tree-sitter/index.d.ts.map +1 -0
- package/dist/lib/tree-sitter/index.js +14 -0
- package/dist/lib/tree-sitter/index.js.map +1 -0
- package/dist/lib/tree-sitter/parser.d.ts +70 -0
- package/dist/lib/tree-sitter/parser.d.ts.map +1 -0
- package/dist/lib/tree-sitter/parser.js +354 -0
- package/dist/lib/tree-sitter/parser.js.map +1 -0
- package/dist/lib/tree-sitter/public.d.ts +28 -0
- package/dist/lib/tree-sitter/public.d.ts.map +1 -0
- package/dist/lib/tree-sitter/public.js +50 -0
- package/dist/lib/tree-sitter/public.js.map +1 -0
- package/dist/lib/tree-sitter/queries.d.ts +28 -0
- package/dist/lib/tree-sitter/queries.d.ts.map +1 -0
- package/dist/lib/tree-sitter/queries.js +383 -0
- package/dist/lib/tree-sitter/queries.js.map +1 -0
- package/dist/lib/tree-sitter/types.d.ts +69 -0
- package/dist/lib/tree-sitter/types.d.ts.map +1 -0
- package/dist/lib/tree-sitter/types.js +131 -0
- package/dist/lib/tree-sitter/types.js.map +1 -0
- package/dist/lib/working-memory-pipeline.d.ts +118 -0
- package/dist/lib/working-memory-pipeline.d.ts.map +1 -0
- package/dist/lib/working-memory-pipeline.js +344 -0
- package/dist/lib/working-memory-pipeline.js.map +1 -0
- package/dist/mcp/helpers.d.ts +44 -0
- package/dist/mcp/helpers.d.ts.map +1 -0
- package/dist/mcp/helpers.js +244 -0
- package/dist/mcp/helpers.js.map +1 -0
- package/dist/mcp/index.d.ts +10 -0
- package/dist/mcp/index.d.ts.map +1 -0
- package/dist/mcp/index.js +15 -0
- package/dist/mcp/index.js.map +1 -0
- package/dist/mcp/resources.d.ts +12 -0
- package/dist/mcp/resources.d.ts.map +1 -0
- package/dist/mcp/resources.js +136 -0
- package/dist/mcp/resources.js.map +1 -0
- package/dist/mcp/server.d.ts +22 -0
- package/dist/mcp/server.d.ts.map +1 -0
- package/dist/mcp/server.js +131 -0
- package/dist/mcp/server.js.map +1 -0
- package/dist/mcp/tools/config.d.ts +10 -0
- package/dist/mcp/tools/config.d.ts.map +1 -0
- package/dist/mcp/tools/config.js +236 -0
- package/dist/mcp/tools/config.js.map +1 -0
- package/dist/mcp/tools/dead-end.d.ts +9 -0
- package/dist/mcp/tools/dead-end.d.ts.map +1 -0
- package/dist/mcp/tools/dead-end.js +137 -0
- package/dist/mcp/tools/dead-end.js.map +1 -0
- package/dist/mcp/tools/debug.d.ts +9 -0
- package/dist/mcp/tools/debug.d.ts.map +1 -0
- package/dist/mcp/tools/debug.js +387 -0
- package/dist/mcp/tools/debug.js.map +1 -0
- package/dist/mcp/tools/graph.d.ts +9 -0
- package/dist/mcp/tools/graph.d.ts.map +1 -0
- package/dist/mcp/tools/graph.js +337 -0
- package/dist/mcp/tools/graph.js.map +1 -0
- package/dist/mcp/tools/indexing.d.ts +12 -0
- package/dist/mcp/tools/indexing.d.ts.map +1 -0
- package/dist/mcp/tools/indexing.js +289 -0
- package/dist/mcp/tools/indexing.js.map +1 -0
- package/dist/mcp/tools/memory.d.ts +14 -0
- package/dist/mcp/tools/memory.d.ts.map +1 -0
- package/dist/mcp/tools/memory.js +1170 -0
- package/dist/mcp/tools/memory.js.map +1 -0
- package/dist/mcp/tools/prd.d.ts +12 -0
- package/dist/mcp/tools/prd.d.ts.map +1 -0
- package/dist/mcp/tools/prd.js +276 -0
- package/dist/mcp/tools/prd.js.map +1 -0
- package/dist/mcp/tools/search.d.ts +9 -0
- package/dist/mcp/tools/search.d.ts.map +1 -0
- package/dist/mcp/tools/search.js +279 -0
- package/dist/mcp/tools/search.js.map +1 -0
- package/dist/mcp/tools/status.d.ts +10 -0
- package/dist/mcp/tools/status.d.ts.map +1 -0
- package/dist/mcp/tools/status.js +296 -0
- package/dist/mcp/tools/status.js.map +1 -0
- package/dist/mcp/tools/web-fetch.d.ts +10 -0
- package/dist/mcp/tools/web-fetch.d.ts.map +1 -0
- package/dist/mcp/tools/web-fetch.js +107 -0
- package/dist/mcp/tools/web-fetch.js.map +1 -0
- package/dist/mcp/tools/web-search.d.ts +14 -0
- package/dist/mcp/tools/web-search.d.ts.map +1 -0
- package/dist/mcp/tools/web-search.js +427 -0
- package/dist/mcp/tools/web-search.js.map +1 -0
- package/dist/mcp/types.d.ts +16 -0
- package/dist/mcp/types.d.ts.map +1 -0
- package/dist/mcp/types.js +5 -0
- package/dist/mcp/types.js.map +1 -0
- package/dist/mcp-server.d.ts +9 -0
- package/dist/mcp-server.d.ts.map +1 -0
- package/dist/mcp-server.js +9 -0
- package/dist/mcp-server.js.map +1 -0
- package/dist/prompts/analyze.d.ts +21 -0
- package/dist/prompts/analyze.d.ts.map +1 -0
- package/dist/prompts/analyze.js +27 -0
- package/dist/prompts/analyze.js.map +1 -0
- package/dist/prompts/briefing.d.ts +27 -0
- package/dist/prompts/briefing.d.ts.map +1 -0
- package/dist/prompts/briefing.js +123 -0
- package/dist/prompts/briefing.js.map +1 -0
- package/dist/prompts/chat.d.ts +8 -0
- package/dist/prompts/chat.d.ts.map +1 -0
- package/dist/prompts/chat.js +39 -0
- package/dist/prompts/chat.js.map +1 -0
- package/dist/prompts/daemon.d.ts +16 -0
- package/dist/prompts/daemon.d.ts.map +1 -0
- package/dist/prompts/daemon.js +46 -0
- package/dist/prompts/daemon.js.map +1 -0
- package/dist/prompts/extraction.d.ts +13 -0
- package/dist/prompts/extraction.d.ts.map +1 -0
- package/dist/prompts/extraction.js +93 -0
- package/dist/prompts/extraction.js.map +1 -0
- package/dist/prompts/index.d.ts +17 -0
- package/dist/prompts/index.d.ts.map +1 -0
- package/dist/prompts/index.js +27 -0
- package/dist/prompts/index.js.map +1 -0
- package/dist/prompts/memory.d.ts +11 -0
- package/dist/prompts/memory.d.ts.map +1 -0
- package/dist/prompts/memory.js +23 -0
- package/dist/prompts/memory.js.map +1 -0
- package/dist/prompts/onboarding.d.ts +16 -0
- package/dist/prompts/onboarding.d.ts.map +1 -0
- package/dist/prompts/onboarding.js +183 -0
- package/dist/prompts/onboarding.js.map +1 -0
- package/dist/prompts/prd.d.ts +12 -0
- package/dist/prompts/prd.d.ts.map +1 -0
- package/dist/prompts/prd.js +211 -0
- package/dist/prompts/prd.js.map +1 -0
- package/dist/prompts/quality.d.ts +11 -0
- package/dist/prompts/quality.d.ts.map +1 -0
- package/dist/prompts/quality.js +11 -0
- package/dist/prompts/quality.js.map +1 -0
- package/dist/prompts/skills.d.ts +16 -0
- package/dist/prompts/skills.d.ts.map +1 -0
- package/dist/prompts/skills.js +30 -0
- package/dist/prompts/skills.js.map +1 -0
- package/hooks/succ-post-tool.cjs +227 -0
- package/hooks/succ-pre-tool.cjs +312 -0
- package/hooks/succ-session-end.cjs +85 -0
- package/hooks/succ-session-start.cjs +618 -0
- package/hooks/succ-stop-reflection.cjs +87 -0
- package/hooks/succ-user-prompt.cjs +220 -0
- package/package.json +128 -0
|
@@ -0,0 +1,2528 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* PostgreSQL storage backend implementation.
|
|
3
|
+
*
|
|
4
|
+
* Uses pg (node-postgres) for connections and pgvector extension for vector similarity search.
|
|
5
|
+
*
|
|
6
|
+
* Schema mirrors SQLite structure with PostgreSQL-specific adaptations:
|
|
7
|
+
* - SERIAL instead of AUTOINCREMENT
|
|
8
|
+
* - BYTEA instead of BLOB
|
|
9
|
+
* - vector(N) type from pgvector extension
|
|
10
|
+
* - ON CONFLICT instead of INSERT OR REPLACE
|
|
11
|
+
*/
|
|
12
|
+
import { StorageError, ConfigError } from '../../errors.js';
|
|
13
|
+
// Lazy-load pg to make it optional
|
|
14
|
+
let pg = null;
|
|
15
|
+
async function loadPg() {
|
|
16
|
+
if (pg)
|
|
17
|
+
return pg;
|
|
18
|
+
try {
|
|
19
|
+
pg = await import('pg');
|
|
20
|
+
return pg;
|
|
21
|
+
}
|
|
22
|
+
catch {
|
|
23
|
+
throw new ConfigError('PostgreSQL support requires the "pg" package. ' + 'Install it with: npm install pg');
|
|
24
|
+
}
|
|
25
|
+
}
|
|
26
|
+
/**
|
|
27
|
+
* Convert number[] embedding to pgvector format string: '[1.0, 2.0, 3.0]'
|
|
28
|
+
*/
|
|
29
|
+
function toPgVector(embedding) {
|
|
30
|
+
return '[' + embedding.join(',') + ']';
|
|
31
|
+
}
|
|
32
|
+
/**
|
|
33
|
+
* Parse pgvector string back to number[]
|
|
34
|
+
*/
|
|
35
|
+
function fromPgVector(str) {
|
|
36
|
+
// pgvector returns string like '[1.0, 2.0, 3.0]'
|
|
37
|
+
const inner = str.slice(1, -1);
|
|
38
|
+
if (!inner)
|
|
39
|
+
return [];
|
|
40
|
+
return inner.split(',').map((s) => parseFloat(s.trim()));
|
|
41
|
+
}
|
|
42
|
+
export class PostgresBackend {
|
|
43
|
+
pool = null;
|
|
44
|
+
config;
|
|
45
|
+
initialized = false;
|
|
46
|
+
projectId = null;
|
|
47
|
+
constructor(config, projectId) {
|
|
48
|
+
this.config = config;
|
|
49
|
+
this.projectId = projectId?.toLowerCase() ?? null;
|
|
50
|
+
}
|
|
51
|
+
/**
|
|
52
|
+
* Set the current project ID for scoping memories.
|
|
53
|
+
* NULL = global memories (shared across all projects)
|
|
54
|
+
* Always normalized to lowercase for case-insensitive path matching (Windows).
|
|
55
|
+
*/
|
|
56
|
+
setProjectId(projectId) {
|
|
57
|
+
this.projectId = projectId?.toLowerCase() ?? null;
|
|
58
|
+
}
|
|
59
|
+
getProjectId() {
|
|
60
|
+
return this.projectId;
|
|
61
|
+
}
|
|
62
|
+
/**
|
|
63
|
+
* Get or create the connection pool.
|
|
64
|
+
*/
|
|
65
|
+
async getPool() {
|
|
66
|
+
if (this.pool)
|
|
67
|
+
return this.pool;
|
|
68
|
+
const { Pool } = await loadPg();
|
|
69
|
+
const poolConfig = {
|
|
70
|
+
max: this.config.poolSize ?? 10,
|
|
71
|
+
};
|
|
72
|
+
if (this.config.connectionString) {
|
|
73
|
+
poolConfig.connectionString = this.config.connectionString;
|
|
74
|
+
}
|
|
75
|
+
else {
|
|
76
|
+
poolConfig.host = this.config.host ?? 'localhost';
|
|
77
|
+
poolConfig.port = this.config.port ?? 5432;
|
|
78
|
+
poolConfig.database = this.config.database ?? 'succ';
|
|
79
|
+
poolConfig.user = this.config.user;
|
|
80
|
+
poolConfig.password = this.config.password;
|
|
81
|
+
}
|
|
82
|
+
if (this.config.ssl) {
|
|
83
|
+
if (typeof this.config.ssl === 'object') {
|
|
84
|
+
poolConfig.ssl = {
|
|
85
|
+
rejectUnauthorized: this.config.ssl.rejectUnauthorized ?? true,
|
|
86
|
+
ca: this.config.ssl.ca,
|
|
87
|
+
};
|
|
88
|
+
}
|
|
89
|
+
else {
|
|
90
|
+
poolConfig.ssl = { rejectUnauthorized: true };
|
|
91
|
+
}
|
|
92
|
+
}
|
|
93
|
+
this.pool = new Pool(poolConfig);
|
|
94
|
+
if (!this.initialized) {
|
|
95
|
+
await this.initSchema();
|
|
96
|
+
this.initialized = true;
|
|
97
|
+
}
|
|
98
|
+
return this.pool;
|
|
99
|
+
}
|
|
100
|
+
/**
|
|
101
|
+
* Get the current embedding dimensions from config.
|
|
102
|
+
*/
|
|
103
|
+
async getVectorDims() {
|
|
104
|
+
try {
|
|
105
|
+
const { getEmbeddingInfo } = await import('../../embeddings.js');
|
|
106
|
+
return getEmbeddingInfo().dimensions ?? 384;
|
|
107
|
+
}
|
|
108
|
+
catch {
|
|
109
|
+
return 384;
|
|
110
|
+
}
|
|
111
|
+
}
|
|
112
|
+
/**
|
|
113
|
+
* Initialize database schema.
|
|
114
|
+
*/
|
|
115
|
+
async initSchema() {
|
|
116
|
+
const pool = await this.getPool();
|
|
117
|
+
const dims = await this.getVectorDims();
|
|
118
|
+
// Enable pgvector extension
|
|
119
|
+
await pool.query('CREATE EXTENSION IF NOT EXISTS vector');
|
|
120
|
+
// Migrate embedding column dimensions if they changed
|
|
121
|
+
for (const table of ['documents', 'memories', 'skills']) {
|
|
122
|
+
await pool.query(`
|
|
123
|
+
DO $$
|
|
124
|
+
DECLARE
|
|
125
|
+
current_dims INTEGER;
|
|
126
|
+
BEGIN
|
|
127
|
+
SELECT atttypmod INTO current_dims
|
|
128
|
+
FROM pg_attribute
|
|
129
|
+
WHERE attrelid = '${table}'::regclass
|
|
130
|
+
AND attname = 'embedding';
|
|
131
|
+
IF current_dims IS NOT NULL AND current_dims != ${dims} THEN
|
|
132
|
+
-- Drop IVFFlat index (incompatible across dimensions)
|
|
133
|
+
EXECUTE format('DROP INDEX IF EXISTS idx_%s_embedding', '${table}');
|
|
134
|
+
-- Clear old embeddings (incompatible dimensions)
|
|
135
|
+
EXECUTE format('UPDATE %I SET embedding = NULL', '${table}');
|
|
136
|
+
-- Alter column to new dimension
|
|
137
|
+
EXECUTE format('ALTER TABLE %I ALTER COLUMN embedding TYPE vector(%s)', '${table}', ${dims});
|
|
138
|
+
END IF;
|
|
139
|
+
EXCEPTION WHEN undefined_table THEN
|
|
140
|
+
NULL; -- Table doesn't exist yet, will be created below
|
|
141
|
+
END $$;
|
|
142
|
+
`);
|
|
143
|
+
}
|
|
144
|
+
// Documents table
|
|
145
|
+
// project_id: scopes documents to a specific project
|
|
146
|
+
await pool.query(`
|
|
147
|
+
CREATE TABLE IF NOT EXISTS documents (
|
|
148
|
+
id SERIAL PRIMARY KEY,
|
|
149
|
+
project_id TEXT NOT NULL,
|
|
150
|
+
file_path TEXT NOT NULL,
|
|
151
|
+
chunk_index INTEGER NOT NULL,
|
|
152
|
+
content TEXT NOT NULL,
|
|
153
|
+
start_line INTEGER NOT NULL,
|
|
154
|
+
end_line INTEGER NOT NULL,
|
|
155
|
+
embedding vector(${dims}),
|
|
156
|
+
symbol_name TEXT,
|
|
157
|
+
symbol_type TEXT,
|
|
158
|
+
signature TEXT,
|
|
159
|
+
created_at TIMESTAMPTZ DEFAULT NOW(),
|
|
160
|
+
updated_at TIMESTAMPTZ DEFAULT NOW(),
|
|
161
|
+
UNIQUE(project_id, file_path, chunk_index)
|
|
162
|
+
)
|
|
163
|
+
`);
|
|
164
|
+
// Migration: add project_id column if missing
|
|
165
|
+
await pool.query(`
|
|
166
|
+
DO $$
|
|
167
|
+
BEGIN
|
|
168
|
+
IF NOT EXISTS (
|
|
169
|
+
SELECT 1 FROM information_schema.columns
|
|
170
|
+
WHERE table_name = 'documents' AND column_name = 'project_id'
|
|
171
|
+
) THEN
|
|
172
|
+
ALTER TABLE documents ADD COLUMN project_id TEXT;
|
|
173
|
+
-- Drop old unique constraint and add new one
|
|
174
|
+
ALTER TABLE documents DROP CONSTRAINT IF EXISTS documents_file_path_chunk_index_key;
|
|
175
|
+
ALTER TABLE documents ADD CONSTRAINT documents_project_file_chunk_key UNIQUE(project_id, file_path, chunk_index);
|
|
176
|
+
END IF;
|
|
177
|
+
END $$;
|
|
178
|
+
`);
|
|
179
|
+
// Migration: add symbol columns if missing
|
|
180
|
+
await pool.query(`
|
|
181
|
+
DO $$
|
|
182
|
+
BEGIN
|
|
183
|
+
IF NOT EXISTS (
|
|
184
|
+
SELECT 1 FROM information_schema.columns
|
|
185
|
+
WHERE table_name = 'documents' AND column_name = 'symbol_name'
|
|
186
|
+
) THEN
|
|
187
|
+
ALTER TABLE documents ADD COLUMN symbol_name TEXT;
|
|
188
|
+
ALTER TABLE documents ADD COLUMN symbol_type TEXT;
|
|
189
|
+
ALTER TABLE documents ADD COLUMN signature TEXT;
|
|
190
|
+
END IF;
|
|
191
|
+
END $$;
|
|
192
|
+
`);
|
|
193
|
+
await pool.query('CREATE INDEX IF NOT EXISTS idx_documents_project_id ON documents(project_id)');
|
|
194
|
+
await pool.query('CREATE INDEX IF NOT EXISTS idx_documents_file_path ON documents(file_path)');
|
|
195
|
+
await pool.query('CREATE INDEX IF NOT EXISTS idx_documents_embedding ON documents USING ivfflat (embedding vector_cosine_ops) WITH (lists = 100)');
|
|
196
|
+
// Metadata table
|
|
197
|
+
await pool.query(`
|
|
198
|
+
CREATE TABLE IF NOT EXISTS metadata (
|
|
199
|
+
key TEXT PRIMARY KEY,
|
|
200
|
+
value TEXT NOT NULL
|
|
201
|
+
)
|
|
202
|
+
`);
|
|
203
|
+
// File hashes table
|
|
204
|
+
// project_id: scopes file hashes to a specific project
|
|
205
|
+
await pool.query(`
|
|
206
|
+
CREATE TABLE IF NOT EXISTS file_hashes (
|
|
207
|
+
project_id TEXT NOT NULL,
|
|
208
|
+
file_path TEXT NOT NULL,
|
|
209
|
+
content_hash TEXT NOT NULL,
|
|
210
|
+
indexed_at TIMESTAMPTZ DEFAULT NOW(),
|
|
211
|
+
PRIMARY KEY (project_id, file_path)
|
|
212
|
+
)
|
|
213
|
+
`);
|
|
214
|
+
// Migration: add project_id column if missing
|
|
215
|
+
await pool.query(`
|
|
216
|
+
DO $$
|
|
217
|
+
BEGIN
|
|
218
|
+
IF NOT EXISTS (
|
|
219
|
+
SELECT 1 FROM information_schema.columns
|
|
220
|
+
WHERE table_name = 'file_hashes' AND column_name = 'project_id'
|
|
221
|
+
) THEN
|
|
222
|
+
-- Need to recreate table with new primary key
|
|
223
|
+
ALTER TABLE file_hashes ADD COLUMN project_id TEXT;
|
|
224
|
+
ALTER TABLE file_hashes DROP CONSTRAINT IF EXISTS file_hashes_pkey;
|
|
225
|
+
ALTER TABLE file_hashes ADD PRIMARY KEY (project_id, file_path);
|
|
226
|
+
END IF;
|
|
227
|
+
END $$;
|
|
228
|
+
`);
|
|
229
|
+
// Memories table
|
|
230
|
+
// project_id: NULL = global memory (shared across all projects)
|
|
231
|
+
// non-NULL = project-specific memory
|
|
232
|
+
await pool.query(`
|
|
233
|
+
CREATE TABLE IF NOT EXISTS memories (
|
|
234
|
+
id SERIAL PRIMARY KEY,
|
|
235
|
+
project_id TEXT,
|
|
236
|
+
content TEXT NOT NULL,
|
|
237
|
+
tags JSONB,
|
|
238
|
+
source TEXT,
|
|
239
|
+
type TEXT DEFAULT 'observation',
|
|
240
|
+
quality_score REAL,
|
|
241
|
+
quality_factors JSONB,
|
|
242
|
+
embedding vector(${dims}),
|
|
243
|
+
access_count REAL DEFAULT 0,
|
|
244
|
+
last_accessed TIMESTAMPTZ,
|
|
245
|
+
valid_from TIMESTAMPTZ,
|
|
246
|
+
valid_until TIMESTAMPTZ,
|
|
247
|
+
created_at TIMESTAMPTZ DEFAULT NOW()
|
|
248
|
+
)
|
|
249
|
+
`);
|
|
250
|
+
// Add project_id column if it doesn't exist (migration for existing databases)
|
|
251
|
+
await pool.query(`
|
|
252
|
+
DO $$
|
|
253
|
+
BEGIN
|
|
254
|
+
IF NOT EXISTS (
|
|
255
|
+
SELECT 1 FROM information_schema.columns
|
|
256
|
+
WHERE table_name = 'memories' AND column_name = 'project_id'
|
|
257
|
+
) THEN
|
|
258
|
+
ALTER TABLE memories ADD COLUMN project_id TEXT;
|
|
259
|
+
END IF;
|
|
260
|
+
END $$;
|
|
261
|
+
`);
|
|
262
|
+
await pool.query('CREATE INDEX IF NOT EXISTS idx_memories_created_at ON memories(created_at)');
|
|
263
|
+
await pool.query('CREATE INDEX IF NOT EXISTS idx_memories_quality ON memories(quality_score)');
|
|
264
|
+
await pool.query('CREATE INDEX IF NOT EXISTS idx_memories_type ON memories(type)');
|
|
265
|
+
await pool.query('CREATE INDEX IF NOT EXISTS idx_memories_project_id ON memories(project_id)');
|
|
266
|
+
await pool.query('CREATE INDEX IF NOT EXISTS idx_memories_embedding ON memories USING ivfflat (embedding vector_cosine_ops) WITH (lists = 100)');
|
|
267
|
+
// Memory links table
|
|
268
|
+
await pool.query(`
|
|
269
|
+
CREATE TABLE IF NOT EXISTS memory_links (
|
|
270
|
+
id SERIAL PRIMARY KEY,
|
|
271
|
+
source_id INTEGER NOT NULL REFERENCES memories(id) ON DELETE CASCADE,
|
|
272
|
+
target_id INTEGER NOT NULL REFERENCES memories(id) ON DELETE CASCADE,
|
|
273
|
+
relation TEXT NOT NULL DEFAULT 'related',
|
|
274
|
+
weight REAL DEFAULT 1.0,
|
|
275
|
+
valid_from TIMESTAMPTZ,
|
|
276
|
+
valid_until TIMESTAMPTZ,
|
|
277
|
+
created_at TIMESTAMPTZ DEFAULT NOW(),
|
|
278
|
+
UNIQUE(source_id, target_id, relation)
|
|
279
|
+
)
|
|
280
|
+
`);
|
|
281
|
+
await pool.query('CREATE INDEX IF NOT EXISTS idx_memory_links_source ON memory_links(source_id)');
|
|
282
|
+
await pool.query('CREATE INDEX IF NOT EXISTS idx_memory_links_target ON memory_links(target_id)');
|
|
283
|
+
// Migration: add llm_enriched column to memory_links
|
|
284
|
+
await pool.query('ALTER TABLE memory_links ADD COLUMN IF NOT EXISTS llm_enriched INTEGER DEFAULT 0');
|
|
285
|
+
// Migration: add project_id column to memory_links (for multi-project PG)
|
|
286
|
+
await pool.query(`
|
|
287
|
+
DO $$
|
|
288
|
+
BEGIN
|
|
289
|
+
IF NOT EXISTS (
|
|
290
|
+
SELECT 1 FROM information_schema.columns
|
|
291
|
+
WHERE table_name = 'memory_links' AND column_name = 'project_id'
|
|
292
|
+
) THEN
|
|
293
|
+
ALTER TABLE memory_links ADD COLUMN project_id TEXT;
|
|
294
|
+
END IF;
|
|
295
|
+
END $$;
|
|
296
|
+
`);
|
|
297
|
+
await pool.query('CREATE INDEX IF NOT EXISTS idx_memory_links_project_id ON memory_links(project_id)');
|
|
298
|
+
// Memory centrality cache table
|
|
299
|
+
await pool.query(`
|
|
300
|
+
CREATE TABLE IF NOT EXISTS memory_centrality (
|
|
301
|
+
memory_id INTEGER PRIMARY KEY REFERENCES memories(id) ON DELETE CASCADE,
|
|
302
|
+
degree REAL DEFAULT 0,
|
|
303
|
+
normalized_degree REAL DEFAULT 0,
|
|
304
|
+
updated_at TIMESTAMPTZ DEFAULT NOW()
|
|
305
|
+
)
|
|
306
|
+
`);
|
|
307
|
+
// Token frequencies table
|
|
308
|
+
await pool.query(`
|
|
309
|
+
CREATE TABLE IF NOT EXISTS token_frequencies (
|
|
310
|
+
token TEXT PRIMARY KEY,
|
|
311
|
+
frequency INTEGER NOT NULL DEFAULT 1,
|
|
312
|
+
updated_at TIMESTAMPTZ DEFAULT NOW()
|
|
313
|
+
)
|
|
314
|
+
`);
|
|
315
|
+
await pool.query('CREATE INDEX IF NOT EXISTS idx_token_freq ON token_frequencies(frequency DESC)');
|
|
316
|
+
// Token stats table
|
|
317
|
+
// project_id: scopes stats to a specific project
|
|
318
|
+
await pool.query(`
|
|
319
|
+
CREATE TABLE IF NOT EXISTS token_stats (
|
|
320
|
+
id SERIAL PRIMARY KEY,
|
|
321
|
+
project_id TEXT,
|
|
322
|
+
event_type TEXT NOT NULL,
|
|
323
|
+
query TEXT,
|
|
324
|
+
returned_tokens INTEGER NOT NULL DEFAULT 0,
|
|
325
|
+
full_source_tokens INTEGER NOT NULL DEFAULT 0,
|
|
326
|
+
savings_tokens INTEGER NOT NULL DEFAULT 0,
|
|
327
|
+
files_count INTEGER,
|
|
328
|
+
chunks_count INTEGER,
|
|
329
|
+
model TEXT,
|
|
330
|
+
estimated_cost REAL DEFAULT 0,
|
|
331
|
+
created_at TIMESTAMPTZ DEFAULT NOW()
|
|
332
|
+
)
|
|
333
|
+
`);
|
|
334
|
+
// Migration: add project_id column if missing
|
|
335
|
+
await pool.query(`
|
|
336
|
+
DO $$
|
|
337
|
+
BEGIN
|
|
338
|
+
IF NOT EXISTS (
|
|
339
|
+
SELECT 1 FROM information_schema.columns
|
|
340
|
+
WHERE table_name = 'token_stats' AND column_name = 'project_id'
|
|
341
|
+
) THEN
|
|
342
|
+
ALTER TABLE token_stats ADD COLUMN project_id TEXT;
|
|
343
|
+
END IF;
|
|
344
|
+
END $$;
|
|
345
|
+
`);
|
|
346
|
+
await pool.query('CREATE INDEX IF NOT EXISTS idx_token_stats_project_id ON token_stats(project_id)');
|
|
347
|
+
await pool.query('CREATE INDEX IF NOT EXISTS idx_token_stats_type ON token_stats(event_type)');
|
|
348
|
+
await pool.query('CREATE INDEX IF NOT EXISTS idx_token_stats_created ON token_stats(created_at)');
|
|
349
|
+
// Skills table
|
|
350
|
+
// project_id: scopes skills to a specific project (NULL for Skyll cached skills which are global)
|
|
351
|
+
await pool.query(`
|
|
352
|
+
CREATE TABLE IF NOT EXISTS skills (
|
|
353
|
+
id SERIAL PRIMARY KEY,
|
|
354
|
+
project_id TEXT,
|
|
355
|
+
name TEXT NOT NULL,
|
|
356
|
+
description TEXT NOT NULL,
|
|
357
|
+
source TEXT NOT NULL,
|
|
358
|
+
path TEXT,
|
|
359
|
+
content TEXT,
|
|
360
|
+
embedding vector(${dims}),
|
|
361
|
+
skyll_id TEXT,
|
|
362
|
+
usage_count INTEGER DEFAULT 0,
|
|
363
|
+
last_used TIMESTAMPTZ,
|
|
364
|
+
cached_at TIMESTAMPTZ,
|
|
365
|
+
cache_expires TIMESTAMPTZ,
|
|
366
|
+
created_at TIMESTAMPTZ DEFAULT NOW(),
|
|
367
|
+
updated_at TIMESTAMPTZ DEFAULT NOW(),
|
|
368
|
+
UNIQUE(project_id, name)
|
|
369
|
+
)
|
|
370
|
+
`);
|
|
371
|
+
// Migration: add project_id column if missing (for existing databases)
|
|
372
|
+
await pool.query(`
|
|
373
|
+
DO $$
|
|
374
|
+
BEGIN
|
|
375
|
+
IF NOT EXISTS (
|
|
376
|
+
SELECT 1 FROM information_schema.columns
|
|
377
|
+
WHERE table_name = 'skills' AND column_name = 'project_id'
|
|
378
|
+
) THEN
|
|
379
|
+
-- Add project_id column
|
|
380
|
+
ALTER TABLE skills ADD COLUMN project_id TEXT;
|
|
381
|
+
|
|
382
|
+
-- Drop old unique constraint on name only
|
|
383
|
+
ALTER TABLE skills DROP CONSTRAINT IF EXISTS skills_name_key;
|
|
384
|
+
|
|
385
|
+
-- Add new unique constraint on (project_id, name)
|
|
386
|
+
ALTER TABLE skills ADD CONSTRAINT skills_project_name_unique UNIQUE(project_id, name);
|
|
387
|
+
END IF;
|
|
388
|
+
END $$;
|
|
389
|
+
`);
|
|
390
|
+
await pool.query('CREATE INDEX IF NOT EXISTS idx_skills_project_id ON skills(project_id)');
|
|
391
|
+
await pool.query('CREATE INDEX IF NOT EXISTS idx_skills_name ON skills(name)');
|
|
392
|
+
await pool.query('CREATE INDEX IF NOT EXISTS idx_skills_source ON skills(source)');
|
|
393
|
+
// Migration: add invalidated_by column for soft-delete during consolidation
|
|
394
|
+
await pool.query(`
|
|
395
|
+
DO $$
|
|
396
|
+
BEGIN
|
|
397
|
+
IF NOT EXISTS (
|
|
398
|
+
SELECT 1 FROM information_schema.columns
|
|
399
|
+
WHERE table_name = 'memories' AND column_name = 'invalidated_by'
|
|
400
|
+
) THEN
|
|
401
|
+
ALTER TABLE memories ADD COLUMN invalidated_by INTEGER;
|
|
402
|
+
END IF;
|
|
403
|
+
END $$;
|
|
404
|
+
`);
|
|
405
|
+
await pool.query('CREATE INDEX IF NOT EXISTS idx_memories_invalidated_by ON memories(invalidated_by)');
|
|
406
|
+
// Migration: add correction_count and is_invariant for working memory pins
|
|
407
|
+
await pool.query(`
|
|
408
|
+
DO $$ BEGIN
|
|
409
|
+
IF NOT EXISTS (
|
|
410
|
+
SELECT 1 FROM information_schema.columns
|
|
411
|
+
WHERE table_name = 'memories' AND column_name = 'correction_count'
|
|
412
|
+
) THEN
|
|
413
|
+
ALTER TABLE memories ADD COLUMN correction_count INTEGER DEFAULT 0;
|
|
414
|
+
END IF;
|
|
415
|
+
IF NOT EXISTS (
|
|
416
|
+
SELECT 1 FROM information_schema.columns
|
|
417
|
+
WHERE table_name = 'memories' AND column_name = 'is_invariant'
|
|
418
|
+
) THEN
|
|
419
|
+
ALTER TABLE memories ADD COLUMN is_invariant INTEGER DEFAULT 0;
|
|
420
|
+
END IF;
|
|
421
|
+
END $$;
|
|
422
|
+
`);
|
|
423
|
+
await pool.query('CREATE INDEX IF NOT EXISTS idx_memories_pinned ON memories(correction_count, is_invariant)');
|
|
424
|
+
// Migration: add priority_score column for working memory ranking
|
|
425
|
+
await pool.query(`
|
|
426
|
+
DO $$ BEGIN
|
|
427
|
+
IF NOT EXISTS (
|
|
428
|
+
SELECT 1 FROM information_schema.columns
|
|
429
|
+
WHERE table_name = 'memories' AND column_name = 'priority_score'
|
|
430
|
+
) THEN
|
|
431
|
+
ALTER TABLE memories ADD COLUMN priority_score REAL DEFAULT NULL;
|
|
432
|
+
END IF;
|
|
433
|
+
END $$;
|
|
434
|
+
`);
|
|
435
|
+
await pool.query('CREATE INDEX IF NOT EXISTS idx_memories_priority ON memories(priority_score DESC)');
|
|
436
|
+
// Learning deltas table for session progress tracking
|
|
437
|
+
await pool.query(`
|
|
438
|
+
CREATE TABLE IF NOT EXISTS learning_deltas (
|
|
439
|
+
id SERIAL PRIMARY KEY,
|
|
440
|
+
project_id TEXT,
|
|
441
|
+
timestamp TIMESTAMPTZ NOT NULL,
|
|
442
|
+
source TEXT NOT NULL,
|
|
443
|
+
memories_before INTEGER NOT NULL DEFAULT 0,
|
|
444
|
+
memories_after INTEGER NOT NULL DEFAULT 0,
|
|
445
|
+
new_memories INTEGER NOT NULL DEFAULT 0,
|
|
446
|
+
types_added JSONB,
|
|
447
|
+
avg_quality REAL,
|
|
448
|
+
created_at TIMESTAMPTZ DEFAULT NOW()
|
|
449
|
+
)
|
|
450
|
+
`);
|
|
451
|
+
await pool.query('CREATE INDEX IF NOT EXISTS idx_learning_deltas_timestamp ON learning_deltas(timestamp)');
|
|
452
|
+
await pool.query('CREATE INDEX IF NOT EXISTS idx_learning_deltas_source ON learning_deltas(source)');
|
|
453
|
+
await pool.query('CREATE INDEX IF NOT EXISTS idx_learning_deltas_project_id ON learning_deltas(project_id)');
|
|
454
|
+
// Web search history table
|
|
455
|
+
await pool.query(`
|
|
456
|
+
CREATE TABLE IF NOT EXISTS web_search_history (
|
|
457
|
+
id SERIAL PRIMARY KEY,
|
|
458
|
+
project_id TEXT,
|
|
459
|
+
tool_name TEXT NOT NULL,
|
|
460
|
+
model TEXT NOT NULL,
|
|
461
|
+
query TEXT NOT NULL,
|
|
462
|
+
prompt_tokens INTEGER NOT NULL DEFAULT 0,
|
|
463
|
+
completion_tokens INTEGER NOT NULL DEFAULT 0,
|
|
464
|
+
estimated_cost_usd REAL NOT NULL DEFAULT 0,
|
|
465
|
+
citations_count INTEGER NOT NULL DEFAULT 0,
|
|
466
|
+
has_reasoning BOOLEAN NOT NULL DEFAULT FALSE,
|
|
467
|
+
response_length_chars INTEGER NOT NULL DEFAULT 0,
|
|
468
|
+
created_at TIMESTAMPTZ DEFAULT NOW()
|
|
469
|
+
)
|
|
470
|
+
`);
|
|
471
|
+
await pool.query('CREATE INDEX IF NOT EXISTS idx_wsh_project_id ON web_search_history(project_id)');
|
|
472
|
+
await pool.query('CREATE INDEX IF NOT EXISTS idx_wsh_created ON web_search_history(created_at)');
|
|
473
|
+
await pool.query('CREATE INDEX IF NOT EXISTS idx_wsh_tool ON web_search_history(tool_name)');
|
|
474
|
+
}
|
|
475
|
+
/**
|
|
476
|
+
* Close all connections.
|
|
477
|
+
*/
|
|
478
|
+
async close() {
|
|
479
|
+
if (this.pool) {
|
|
480
|
+
await this.pool.end();
|
|
481
|
+
this.pool = null;
|
|
482
|
+
this.initialized = false;
|
|
483
|
+
}
|
|
484
|
+
}
|
|
485
|
+
// ============================================================================
|
|
486
|
+
// Document Operations
|
|
487
|
+
// ============================================================================
|
|
488
|
+
async upsertDocument(filePath, chunkIndex, content, startLine, endLine, embedding, symbolName, symbolType, signature) {
|
|
489
|
+
if (!this.projectId) {
|
|
490
|
+
throw new StorageError('Project ID must be set before upserting documents');
|
|
491
|
+
}
|
|
492
|
+
const pool = await this.getPool();
|
|
493
|
+
const result = await pool.query(`INSERT INTO documents (project_id, file_path, chunk_index, content, start_line, end_line, embedding, symbol_name, symbol_type, signature, updated_at)
|
|
494
|
+
VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, NOW())
|
|
495
|
+
ON CONFLICT(project_id, file_path, chunk_index) DO UPDATE SET
|
|
496
|
+
content = EXCLUDED.content,
|
|
497
|
+
start_line = EXCLUDED.start_line,
|
|
498
|
+
end_line = EXCLUDED.end_line,
|
|
499
|
+
embedding = EXCLUDED.embedding,
|
|
500
|
+
symbol_name = EXCLUDED.symbol_name,
|
|
501
|
+
symbol_type = EXCLUDED.symbol_type,
|
|
502
|
+
signature = EXCLUDED.signature,
|
|
503
|
+
updated_at = NOW()
|
|
504
|
+
RETURNING id`, [
|
|
505
|
+
this.projectId,
|
|
506
|
+
filePath,
|
|
507
|
+
chunkIndex,
|
|
508
|
+
content,
|
|
509
|
+
startLine,
|
|
510
|
+
endLine,
|
|
511
|
+
toPgVector(embedding),
|
|
512
|
+
symbolName ?? null,
|
|
513
|
+
symbolType ?? null,
|
|
514
|
+
signature ?? null,
|
|
515
|
+
]);
|
|
516
|
+
return result.rows[0].id;
|
|
517
|
+
}
|
|
518
|
+
async upsertDocumentsBatch(documents) {
|
|
519
|
+
if (documents.length === 0)
|
|
520
|
+
return [];
|
|
521
|
+
if (!this.projectId) {
|
|
522
|
+
throw new StorageError('Project ID must be set before upserting documents');
|
|
523
|
+
}
|
|
524
|
+
const pool = await this.getPool();
|
|
525
|
+
const client = await pool.connect();
|
|
526
|
+
const ids = [];
|
|
527
|
+
try {
|
|
528
|
+
await client.query('BEGIN');
|
|
529
|
+
for (const doc of documents) {
|
|
530
|
+
const result = await client.query(`INSERT INTO documents (project_id, file_path, chunk_index, content, start_line, end_line, embedding, symbol_name, symbol_type, signature, updated_at)
|
|
531
|
+
VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, NOW())
|
|
532
|
+
ON CONFLICT(project_id, file_path, chunk_index) DO UPDATE SET
|
|
533
|
+
content = EXCLUDED.content,
|
|
534
|
+
start_line = EXCLUDED.start_line,
|
|
535
|
+
end_line = EXCLUDED.end_line,
|
|
536
|
+
embedding = EXCLUDED.embedding,
|
|
537
|
+
symbol_name = EXCLUDED.symbol_name,
|
|
538
|
+
symbol_type = EXCLUDED.symbol_type,
|
|
539
|
+
signature = EXCLUDED.signature,
|
|
540
|
+
updated_at = NOW()
|
|
541
|
+
RETURNING id`, [
|
|
542
|
+
this.projectId,
|
|
543
|
+
doc.filePath,
|
|
544
|
+
doc.chunkIndex,
|
|
545
|
+
doc.content,
|
|
546
|
+
doc.startLine,
|
|
547
|
+
doc.endLine,
|
|
548
|
+
toPgVector(doc.embedding),
|
|
549
|
+
doc.symbolName ?? null,
|
|
550
|
+
doc.symbolType ?? null,
|
|
551
|
+
doc.signature ?? null,
|
|
552
|
+
]);
|
|
553
|
+
ids.push(result.rows[0].id);
|
|
554
|
+
}
|
|
555
|
+
await client.query('COMMIT');
|
|
556
|
+
}
|
|
557
|
+
catch (e) {
|
|
558
|
+
await client.query('ROLLBACK');
|
|
559
|
+
throw e;
|
|
560
|
+
}
|
|
561
|
+
finally {
|
|
562
|
+
client.release();
|
|
563
|
+
}
|
|
564
|
+
return ids;
|
|
565
|
+
}
|
|
566
|
+
async upsertDocumentsBatchWithHashes(documents) {
|
|
567
|
+
if (documents.length === 0)
|
|
568
|
+
return [];
|
|
569
|
+
if (!this.projectId) {
|
|
570
|
+
throw new StorageError('Project ID must be set before upserting documents');
|
|
571
|
+
}
|
|
572
|
+
const pool = await this.getPool();
|
|
573
|
+
const client = await pool.connect();
|
|
574
|
+
const ids = [];
|
|
575
|
+
try {
|
|
576
|
+
await client.query('BEGIN');
|
|
577
|
+
const processedFiles = new Set();
|
|
578
|
+
for (const doc of documents) {
|
|
579
|
+
const result = await client.query(`INSERT INTO documents (project_id, file_path, chunk_index, content, start_line, end_line, embedding, symbol_name, symbol_type, signature, updated_at)
|
|
580
|
+
VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, NOW())
|
|
581
|
+
ON CONFLICT(project_id, file_path, chunk_index) DO UPDATE SET
|
|
582
|
+
content = EXCLUDED.content,
|
|
583
|
+
start_line = EXCLUDED.start_line,
|
|
584
|
+
end_line = EXCLUDED.end_line,
|
|
585
|
+
embedding = EXCLUDED.embedding,
|
|
586
|
+
symbol_name = EXCLUDED.symbol_name,
|
|
587
|
+
symbol_type = EXCLUDED.symbol_type,
|
|
588
|
+
signature = EXCLUDED.signature,
|
|
589
|
+
updated_at = NOW()
|
|
590
|
+
RETURNING id`, [
|
|
591
|
+
this.projectId,
|
|
592
|
+
doc.filePath,
|
|
593
|
+
doc.chunkIndex,
|
|
594
|
+
doc.content,
|
|
595
|
+
doc.startLine,
|
|
596
|
+
doc.endLine,
|
|
597
|
+
toPgVector(doc.embedding),
|
|
598
|
+
doc.symbolName ?? null,
|
|
599
|
+
doc.symbolType ?? null,
|
|
600
|
+
doc.signature ?? null,
|
|
601
|
+
]);
|
|
602
|
+
ids.push(result.rows[0].id);
|
|
603
|
+
if (!processedFiles.has(doc.filePath)) {
|
|
604
|
+
await client.query(`INSERT INTO file_hashes (project_id, file_path, content_hash, indexed_at)
|
|
605
|
+
VALUES ($1, $2, $3, NOW())
|
|
606
|
+
ON CONFLICT(project_id, file_path) DO UPDATE SET
|
|
607
|
+
content_hash = EXCLUDED.content_hash,
|
|
608
|
+
indexed_at = NOW()`, [this.projectId, doc.filePath, doc.hash]);
|
|
609
|
+
processedFiles.add(doc.filePath);
|
|
610
|
+
}
|
|
611
|
+
}
|
|
612
|
+
await client.query('COMMIT');
|
|
613
|
+
}
|
|
614
|
+
catch (e) {
|
|
615
|
+
await client.query('ROLLBACK');
|
|
616
|
+
throw e;
|
|
617
|
+
}
|
|
618
|
+
finally {
|
|
619
|
+
client.release();
|
|
620
|
+
}
|
|
621
|
+
return ids;
|
|
622
|
+
}
|
|
623
|
+
async deleteDocumentsByPath(filePath) {
|
|
624
|
+
if (!this.projectId) {
|
|
625
|
+
throw new StorageError('Project ID must be set before deleting documents');
|
|
626
|
+
}
|
|
627
|
+
const pool = await this.getPool();
|
|
628
|
+
const result = await pool.query('DELETE FROM documents WHERE LOWER(project_id) = $1 AND file_path = $2 RETURNING id', [this.projectId, filePath]);
|
|
629
|
+
return result.rows.map((r) => r.id);
|
|
630
|
+
}
|
|
631
|
+
async searchDocuments(queryEmbedding, limit = 5, threshold = 0.5, options) {
|
|
632
|
+
if (!this.projectId) {
|
|
633
|
+
throw new StorageError('Project ID must be set before searching documents');
|
|
634
|
+
}
|
|
635
|
+
const pool = await this.getPool();
|
|
636
|
+
// pgvector cosine distance: <=> returns distance (0 = identical, 2 = opposite)
|
|
637
|
+
// similarity = 1 - distance/2 for normalized vectors
|
|
638
|
+
// For our use: similarity = 1 - distance (since cosine distance from pgvector is 1-cosine_similarity)
|
|
639
|
+
let whereExtra = '';
|
|
640
|
+
const params = [toPgVector(queryEmbedding), this.projectId, threshold, limit];
|
|
641
|
+
if (options?.codeOnly)
|
|
642
|
+
whereExtra = " AND file_path LIKE 'code:%'";
|
|
643
|
+
else if (options?.docsOnly)
|
|
644
|
+
whereExtra = " AND file_path NOT LIKE 'code:%'";
|
|
645
|
+
if (options?.symbolType) {
|
|
646
|
+
params.push(options.symbolType);
|
|
647
|
+
whereExtra += ` AND symbol_type = $${params.length}`;
|
|
648
|
+
}
|
|
649
|
+
const result = await pool.query(`SELECT file_path, content, start_line, end_line, symbol_name, symbol_type, signature,
|
|
650
|
+
1 - (embedding <=> $1) as similarity
|
|
651
|
+
FROM documents
|
|
652
|
+
WHERE LOWER(project_id) = $2 AND 1 - (embedding <=> $1) >= $3${whereExtra}
|
|
653
|
+
ORDER BY embedding <=> $1
|
|
654
|
+
LIMIT $4`, params);
|
|
655
|
+
return result.rows;
|
|
656
|
+
}
|
|
657
|
+
/**
|
|
658
|
+
* Batch fetch documents by IDs (for Qdrant search → PG metadata pattern).
|
|
659
|
+
* Optionally filter by code/doc type via file_path prefix.
|
|
660
|
+
*/
|
|
661
|
+
async getDocumentsByIds(ids, options) {
|
|
662
|
+
if (ids.length === 0)
|
|
663
|
+
return [];
|
|
664
|
+
const pool = await this.getPool();
|
|
665
|
+
let query = `SELECT id, file_path, content, start_line, end_line FROM documents WHERE id = ANY($1)`;
|
|
666
|
+
const params = [ids];
|
|
667
|
+
if (options?.codeOnly) {
|
|
668
|
+
query += ` AND file_path LIKE 'code:%'`;
|
|
669
|
+
}
|
|
670
|
+
else if (options?.docsOnly) {
|
|
671
|
+
query += ` AND file_path NOT LIKE 'code:%'`;
|
|
672
|
+
}
|
|
673
|
+
const result = await pool.query(query, params);
|
|
674
|
+
return result.rows;
|
|
675
|
+
}
|
|
676
|
+
/**
|
|
677
|
+
* Batch fetch memories by IDs with optional SQL-level filters
|
|
678
|
+
* (for Qdrant search → PG metadata pattern).
|
|
679
|
+
*/
|
|
680
|
+
async getMemoriesByIds(ids, filters) {
|
|
681
|
+
if (ids.length === 0)
|
|
682
|
+
return [];
|
|
683
|
+
const pool = await this.getPool();
|
|
684
|
+
let query = `
|
|
685
|
+
SELECT id, content, tags, source, type, quality_score, quality_factors,
|
|
686
|
+
access_count, last_accessed, valid_from, valid_until, created_at
|
|
687
|
+
FROM memories WHERE id = ANY($1)`;
|
|
688
|
+
const params = [ids];
|
|
689
|
+
let idx = 2;
|
|
690
|
+
// Soft-delete filter (default: exclude invalidated)
|
|
691
|
+
if (filters?.excludeInvalidated !== false) {
|
|
692
|
+
query += ` AND invalidated_by IS NULL`;
|
|
693
|
+
}
|
|
694
|
+
// Temporal validity
|
|
695
|
+
if (!filters?.includeExpired && filters?.temporalAsOf) {
|
|
696
|
+
const asOf = filters.temporalAsOf.toISOString();
|
|
697
|
+
query += ` AND (valid_from IS NULL OR valid_from <= $${idx})`;
|
|
698
|
+
params.push(asOf);
|
|
699
|
+
idx++;
|
|
700
|
+
query += ` AND (valid_until IS NULL OR valid_until > $${idx})`;
|
|
701
|
+
params.push(asOf);
|
|
702
|
+
idx++;
|
|
703
|
+
}
|
|
704
|
+
// Since filter
|
|
705
|
+
if (filters?.since) {
|
|
706
|
+
query += ` AND created_at >= $${idx}`;
|
|
707
|
+
params.push(filters.since.toISOString());
|
|
708
|
+
idx++;
|
|
709
|
+
}
|
|
710
|
+
// Created-before filter (for point-in-time queries)
|
|
711
|
+
if (filters?.createdBefore) {
|
|
712
|
+
query += ` AND created_at <= $${idx}`;
|
|
713
|
+
params.push(filters.createdBefore.toISOString());
|
|
714
|
+
idx++;
|
|
715
|
+
}
|
|
716
|
+
const result = await pool.query(query, params);
|
|
717
|
+
return result.rows.map((row) => ({
|
|
718
|
+
id: row.id,
|
|
719
|
+
content: row.content,
|
|
720
|
+
tags: row.tags ? (typeof row.tags === 'string' ? JSON.parse(row.tags) : row.tags) : [],
|
|
721
|
+
source: row.source,
|
|
722
|
+
type: row.type,
|
|
723
|
+
quality_score: row.quality_score,
|
|
724
|
+
quality_factors: row.quality_factors
|
|
725
|
+
? typeof row.quality_factors === 'string'
|
|
726
|
+
? JSON.parse(row.quality_factors)
|
|
727
|
+
: row.quality_factors
|
|
728
|
+
: null,
|
|
729
|
+
access_count: row.access_count ?? 0,
|
|
730
|
+
last_accessed: row.last_accessed,
|
|
731
|
+
correction_count: row.correction_count ?? 0,
|
|
732
|
+
is_invariant: !!row.is_invariant,
|
|
733
|
+
priority_score: row.priority_score ?? null,
|
|
734
|
+
valid_from: row.valid_from,
|
|
735
|
+
valid_until: row.valid_until,
|
|
736
|
+
created_at: row.created_at,
|
|
737
|
+
}));
|
|
738
|
+
}
|
|
739
|
+
async getDocumentStats() {
|
|
740
|
+
const pool = await this.getPool();
|
|
741
|
+
// If project_id is set, get stats for that project; otherwise get global stats
|
|
742
|
+
if (this.projectId) {
|
|
743
|
+
const totalDocs = await pool.query('SELECT COUNT(*) as count FROM documents WHERE LOWER(project_id) = $1', [this.projectId]);
|
|
744
|
+
const totalFiles = await pool.query('SELECT COUNT(DISTINCT file_path) as count FROM documents WHERE LOWER(project_id) = $1', [this.projectId]);
|
|
745
|
+
const lastIndexed = await pool.query('SELECT MAX(updated_at) as last FROM documents WHERE LOWER(project_id) = $1', [this.projectId]);
|
|
746
|
+
return {
|
|
747
|
+
total_documents: parseInt(totalDocs.rows[0].count),
|
|
748
|
+
total_files: parseInt(totalFiles.rows[0].count),
|
|
749
|
+
last_indexed: lastIndexed.rows[0].last,
|
|
750
|
+
};
|
|
751
|
+
}
|
|
752
|
+
// No project set = aggregate stats across all projects
|
|
753
|
+
const totalDocs = await pool.query('SELECT COUNT(*) as count FROM documents');
|
|
754
|
+
const totalFiles = await pool.query('SELECT COUNT(DISTINCT file_path) as count FROM documents');
|
|
755
|
+
const lastIndexed = await pool.query('SELECT MAX(updated_at) as last FROM documents');
|
|
756
|
+
return {
|
|
757
|
+
total_documents: parseInt(totalDocs.rows[0].count),
|
|
758
|
+
total_files: parseInt(totalFiles.rows[0].count),
|
|
759
|
+
last_indexed: lastIndexed.rows[0].last,
|
|
760
|
+
};
|
|
761
|
+
}
|
|
762
|
+
async clearDocuments() {
|
|
763
|
+
const pool = await this.getPool();
|
|
764
|
+
if (this.projectId) {
|
|
765
|
+
// Clear only current project's documents
|
|
766
|
+
await pool.query('DELETE FROM documents WHERE LOWER(project_id) = $1', [this.projectId]);
|
|
767
|
+
await pool.query('DELETE FROM file_hashes WHERE LOWER(project_id) = $1', [this.projectId]);
|
|
768
|
+
}
|
|
769
|
+
else {
|
|
770
|
+
// No project set = clear ALL documents (dangerous!)
|
|
771
|
+
await pool.query('DELETE FROM documents');
|
|
772
|
+
await pool.query('DELETE FROM file_hashes');
|
|
773
|
+
}
|
|
774
|
+
await pool.query("DELETE FROM metadata WHERE key = 'embedding_model'");
|
|
775
|
+
}
|
|
776
|
+
// ============================================================================
|
|
777
|
+
// File Hashes
|
|
778
|
+
// ============================================================================
|
|
779
|
+
async getFileHash(filePath) {
|
|
780
|
+
if (!this.projectId) {
|
|
781
|
+
throw new StorageError('Project ID must be set before getting file hash');
|
|
782
|
+
}
|
|
783
|
+
const pool = await this.getPool();
|
|
784
|
+
const result = await pool.query('SELECT content_hash FROM file_hashes WHERE LOWER(project_id) = $1 AND file_path = $2', [this.projectId, filePath]);
|
|
785
|
+
return result.rows[0]?.content_hash ?? null;
|
|
786
|
+
}
|
|
787
|
+
async setFileHash(filePath, hash) {
|
|
788
|
+
if (!this.projectId) {
|
|
789
|
+
throw new StorageError('Project ID must be set before setting file hash');
|
|
790
|
+
}
|
|
791
|
+
const pool = await this.getPool();
|
|
792
|
+
await pool.query(`INSERT INTO file_hashes (project_id, file_path, content_hash, indexed_at)
|
|
793
|
+
VALUES ($1, $2, $3, NOW())
|
|
794
|
+
ON CONFLICT(project_id, file_path) DO UPDATE SET
|
|
795
|
+
content_hash = EXCLUDED.content_hash,
|
|
796
|
+
indexed_at = NOW()`, [this.projectId, filePath, hash]);
|
|
797
|
+
}
|
|
798
|
+
async deleteFileHash(filePath) {
|
|
799
|
+
if (!this.projectId) {
|
|
800
|
+
throw new StorageError('Project ID must be set before deleting file hash');
|
|
801
|
+
}
|
|
802
|
+
const pool = await this.getPool();
|
|
803
|
+
await pool.query('DELETE FROM file_hashes WHERE LOWER(project_id) = $1 AND file_path = $2', [
|
|
804
|
+
this.projectId,
|
|
805
|
+
filePath,
|
|
806
|
+
]);
|
|
807
|
+
}
|
|
808
|
+
async getAllFileHashes() {
|
|
809
|
+
if (!this.projectId) {
|
|
810
|
+
throw new StorageError('Project ID must be set before getting all file hashes');
|
|
811
|
+
}
|
|
812
|
+
const pool = await this.getPool();
|
|
813
|
+
const result = await pool.query('SELECT file_path, content_hash FROM file_hashes WHERE LOWER(project_id) = $1', [this.projectId]);
|
|
814
|
+
return new Map(result.rows.map((r) => [r.file_path, r.content_hash]));
|
|
815
|
+
}
|
|
816
|
+
async getAllFileHashesWithTimestamps() {
|
|
817
|
+
if (!this.projectId) {
|
|
818
|
+
throw new StorageError('Project ID must be set before getting file hashes');
|
|
819
|
+
}
|
|
820
|
+
const pool = await this.getPool();
|
|
821
|
+
const result = await pool.query('SELECT file_path, content_hash, indexed_at::text FROM file_hashes WHERE LOWER(project_id) = $1', [this.projectId]);
|
|
822
|
+
return result.rows;
|
|
823
|
+
}
|
|
824
|
+
// ============================================================================
|
|
825
|
+
// Memory Operations
|
|
826
|
+
// ============================================================================
|
|
827
|
+
async saveMemory(content, embedding, tags = [], source, type = 'observation', qualityScore, qualityFactors, validFrom, validUntil, isGlobal = false) {
|
|
828
|
+
const pool = await this.getPool();
|
|
829
|
+
const projectId = isGlobal ? null : this.projectId;
|
|
830
|
+
const result = await pool.query(`INSERT INTO memories (project_id, content, tags, source, type, quality_score, quality_factors, embedding, valid_from, valid_until)
|
|
831
|
+
VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10)
|
|
832
|
+
RETURNING id`, [
|
|
833
|
+
projectId,
|
|
834
|
+
content,
|
|
835
|
+
tags.length > 0 ? JSON.stringify(tags) : null,
|
|
836
|
+
source ?? null,
|
|
837
|
+
type,
|
|
838
|
+
qualityScore ?? null,
|
|
839
|
+
qualityFactors ? JSON.stringify(qualityFactors) : null,
|
|
840
|
+
toPgVector(embedding),
|
|
841
|
+
validFrom ?? null,
|
|
842
|
+
validUntil ?? null,
|
|
843
|
+
]);
|
|
844
|
+
return result.rows[0].id;
|
|
845
|
+
}
|
|
846
|
+
async searchMemories(queryEmbedding, limit = 5, threshold = 0.3, tags, since, options) {
|
|
847
|
+
const pool = await this.getPool();
|
|
848
|
+
const now = options?.asOfDate ?? new Date();
|
|
849
|
+
const includeExpired = options?.includeExpired ?? false;
|
|
850
|
+
const includeGlobal = options?.includeGlobal ?? true;
|
|
851
|
+
let query = `
|
|
852
|
+
SELECT id, project_id, content, tags, source, type, quality_score, quality_factors,
|
|
853
|
+
access_count, last_accessed, valid_from, valid_until, created_at,
|
|
854
|
+
1 - (embedding <=> $1) as similarity
|
|
855
|
+
FROM memories
|
|
856
|
+
WHERE 1 - (embedding <=> $1) >= $2
|
|
857
|
+
AND invalidated_by IS NULL
|
|
858
|
+
`;
|
|
859
|
+
const params = [toPgVector(queryEmbedding), threshold];
|
|
860
|
+
let paramIndex = 3;
|
|
861
|
+
// Filter by project_id: include current project AND optionally global (NULL)
|
|
862
|
+
if (this.projectId) {
|
|
863
|
+
if (includeGlobal) {
|
|
864
|
+
query += ` AND (LOWER(project_id) = $${paramIndex} OR project_id IS NULL)`;
|
|
865
|
+
}
|
|
866
|
+
else {
|
|
867
|
+
query += ` AND LOWER(project_id) = $${paramIndex}`;
|
|
868
|
+
}
|
|
869
|
+
params.push(this.projectId);
|
|
870
|
+
paramIndex++;
|
|
871
|
+
}
|
|
872
|
+
else {
|
|
873
|
+
// No project set = only return global memories
|
|
874
|
+
query += ` AND project_id IS NULL`;
|
|
875
|
+
}
|
|
876
|
+
if (since) {
|
|
877
|
+
query += ` AND created_at >= $${paramIndex}`;
|
|
878
|
+
params.push(since.toISOString());
|
|
879
|
+
paramIndex++;
|
|
880
|
+
}
|
|
881
|
+
if (!includeExpired) {
|
|
882
|
+
query += ` AND (valid_from IS NULL OR valid_from <= $${paramIndex})`;
|
|
883
|
+
params.push(now.toISOString());
|
|
884
|
+
paramIndex++;
|
|
885
|
+
query += ` AND (valid_until IS NULL OR valid_until > $${paramIndex})`;
|
|
886
|
+
params.push(now.toISOString());
|
|
887
|
+
paramIndex++;
|
|
888
|
+
}
|
|
889
|
+
query += ` ORDER BY embedding <=> $1 LIMIT $${paramIndex}`;
|
|
890
|
+
params.push(limit);
|
|
891
|
+
const result = await pool.query(query, params);
|
|
892
|
+
let memories = result.rows.map((row) => ({
|
|
893
|
+
id: row.id,
|
|
894
|
+
content: row.content,
|
|
895
|
+
tags: row.tags ? (typeof row.tags === 'string' ? JSON.parse(row.tags) : row.tags) : [],
|
|
896
|
+
source: row.source,
|
|
897
|
+
type: row.type,
|
|
898
|
+
quality_score: row.quality_score,
|
|
899
|
+
quality_factors: row.quality_factors
|
|
900
|
+
? typeof row.quality_factors === 'string'
|
|
901
|
+
? JSON.parse(row.quality_factors)
|
|
902
|
+
: row.quality_factors
|
|
903
|
+
: null,
|
|
904
|
+
access_count: row.access_count ?? 0,
|
|
905
|
+
last_accessed: row.last_accessed,
|
|
906
|
+
correction_count: row.correction_count ?? 0,
|
|
907
|
+
is_invariant: !!row.is_invariant,
|
|
908
|
+
priority_score: row.priority_score ?? null,
|
|
909
|
+
valid_from: row.valid_from,
|
|
910
|
+
valid_until: row.valid_until,
|
|
911
|
+
created_at: row.created_at,
|
|
912
|
+
similarity: parseFloat(row.similarity),
|
|
913
|
+
}));
|
|
914
|
+
// Filter by tags if specified
|
|
915
|
+
if (tags && tags.length > 0) {
|
|
916
|
+
memories = memories.filter((m) => tags.some((t) => m.tags.some((rt) => rt.toLowerCase().includes(t.toLowerCase()))));
|
|
917
|
+
}
|
|
918
|
+
return memories;
|
|
919
|
+
}
|
|
920
|
+
async getMemoryById(id) {
|
|
921
|
+
const pool = await this.getPool();
|
|
922
|
+
const result = await pool.query(`SELECT id, content, tags, source, type, quality_score, quality_factors,
|
|
923
|
+
access_count, last_accessed, valid_from, valid_until,
|
|
924
|
+
correction_count, is_invariant, priority_score, created_at
|
|
925
|
+
FROM memories WHERE id = $1`, [id]);
|
|
926
|
+
if (result.rows.length === 0)
|
|
927
|
+
return null;
|
|
928
|
+
const row = result.rows[0];
|
|
929
|
+
return {
|
|
930
|
+
id: row.id,
|
|
931
|
+
content: row.content,
|
|
932
|
+
tags: row.tags ? (typeof row.tags === 'string' ? JSON.parse(row.tags) : row.tags) : [],
|
|
933
|
+
source: row.source,
|
|
934
|
+
type: row.type,
|
|
935
|
+
quality_score: row.quality_score,
|
|
936
|
+
quality_factors: row.quality_factors
|
|
937
|
+
? typeof row.quality_factors === 'string'
|
|
938
|
+
? JSON.parse(row.quality_factors)
|
|
939
|
+
: row.quality_factors
|
|
940
|
+
: null,
|
|
941
|
+
access_count: row.access_count ?? 0,
|
|
942
|
+
last_accessed: row.last_accessed,
|
|
943
|
+
correction_count: row.correction_count ?? 0,
|
|
944
|
+
is_invariant: !!row.is_invariant,
|
|
945
|
+
priority_score: row.priority_score ?? null,
|
|
946
|
+
valid_from: row.valid_from,
|
|
947
|
+
valid_until: row.valid_until,
|
|
948
|
+
created_at: row.created_at,
|
|
949
|
+
};
|
|
950
|
+
}
|
|
951
|
+
async deleteMemory(id) {
|
|
952
|
+
const pool = await this.getPool();
|
|
953
|
+
// Only delete memories belonging to current project or global memories
|
|
954
|
+
const result = await pool.query('DELETE FROM memories WHERE id = $1 AND (LOWER(project_id) = $2 OR project_id IS NULL)', [id, this.projectId]);
|
|
955
|
+
return (result.rowCount ?? 0) > 0;
|
|
956
|
+
}
|
|
957
|
+
/**
|
|
958
|
+
* Soft-invalidate a memory (mark as superseded by another memory).
|
|
959
|
+
*/
|
|
960
|
+
async invalidateMemory(memoryId, supersededById) {
|
|
961
|
+
const pool = await this.getPool();
|
|
962
|
+
const result = await pool.query(`UPDATE memories
|
|
963
|
+
SET valid_until = NOW(), invalidated_by = $1
|
|
964
|
+
WHERE id = $2 AND invalidated_by IS NULL`, [supersededById, memoryId]);
|
|
965
|
+
return (result.rowCount ?? 0) > 0;
|
|
966
|
+
}
|
|
967
|
+
/**
|
|
968
|
+
* Restore a soft-invalidated memory.
|
|
969
|
+
*/
|
|
970
|
+
async restoreInvalidatedMemory(memoryId) {
|
|
971
|
+
const pool = await this.getPool();
|
|
972
|
+
const result = await pool.query(`UPDATE memories
|
|
973
|
+
SET valid_until = NULL, invalidated_by = NULL
|
|
974
|
+
WHERE id = $1 AND invalidated_by IS NOT NULL`, [memoryId]);
|
|
975
|
+
return (result.rowCount ?? 0) > 0;
|
|
976
|
+
}
|
|
977
|
+
async getRecentMemories(limit = 10, includeGlobal = true) {
|
|
978
|
+
const pool = await this.getPool();
|
|
979
|
+
let query = `
|
|
980
|
+
SELECT id, project_id, content, tags, source, type, quality_score, quality_factors,
|
|
981
|
+
access_count, last_accessed, valid_from, valid_until,
|
|
982
|
+
correction_count, is_invariant, priority_score, created_at
|
|
983
|
+
FROM memories
|
|
984
|
+
`;
|
|
985
|
+
const params = [];
|
|
986
|
+
let paramIndex = 1;
|
|
987
|
+
// Filter by project_id and exclude soft-deleted
|
|
988
|
+
if (this.projectId) {
|
|
989
|
+
if (includeGlobal) {
|
|
990
|
+
query += ` WHERE (LOWER(project_id) = $${paramIndex} OR project_id IS NULL) AND invalidated_by IS NULL`;
|
|
991
|
+
}
|
|
992
|
+
else {
|
|
993
|
+
query += ` WHERE LOWER(project_id) = $${paramIndex} AND invalidated_by IS NULL`;
|
|
994
|
+
}
|
|
995
|
+
params.push(this.projectId);
|
|
996
|
+
paramIndex++;
|
|
997
|
+
}
|
|
998
|
+
else {
|
|
999
|
+
query += ` WHERE project_id IS NULL AND invalidated_by IS NULL`;
|
|
1000
|
+
}
|
|
1001
|
+
query += ` ORDER BY created_at DESC LIMIT $${paramIndex}`;
|
|
1002
|
+
params.push(limit);
|
|
1003
|
+
const result = await pool.query(query, params);
|
|
1004
|
+
return result.rows.map((row) => ({
|
|
1005
|
+
id: row.id,
|
|
1006
|
+
content: row.content,
|
|
1007
|
+
tags: row.tags ? (typeof row.tags === 'string' ? JSON.parse(row.tags) : row.tags) : [],
|
|
1008
|
+
source: row.source,
|
|
1009
|
+
type: row.type,
|
|
1010
|
+
quality_score: row.quality_score,
|
|
1011
|
+
quality_factors: row.quality_factors
|
|
1012
|
+
? typeof row.quality_factors === 'string'
|
|
1013
|
+
? JSON.parse(row.quality_factors)
|
|
1014
|
+
: row.quality_factors
|
|
1015
|
+
: null,
|
|
1016
|
+
access_count: row.access_count ?? 0,
|
|
1017
|
+
last_accessed: row.last_accessed,
|
|
1018
|
+
valid_from: row.valid_from,
|
|
1019
|
+
valid_until: row.valid_until,
|
|
1020
|
+
correction_count: row.correction_count ?? 0,
|
|
1021
|
+
is_invariant: !!row.is_invariant,
|
|
1022
|
+
priority_score: row.priority_score ?? null,
|
|
1023
|
+
created_at: row.created_at,
|
|
1024
|
+
}));
|
|
1025
|
+
}
|
|
1026
|
+
async incrementCorrectionCount(memoryId) {
|
|
1027
|
+
const pool = await this.getPool();
|
|
1028
|
+
await pool.query(`UPDATE memories
|
|
1029
|
+
SET correction_count = COALESCE(correction_count, 0) + 1
|
|
1030
|
+
WHERE id = $1`, [memoryId]);
|
|
1031
|
+
}
|
|
1032
|
+
async setMemoryInvariant(memoryId, isInvariant) {
|
|
1033
|
+
const pool = await this.getPool();
|
|
1034
|
+
await pool.query(`UPDATE memories SET is_invariant = $1 WHERE id = $2`, [
|
|
1035
|
+
isInvariant ? 1 : 0,
|
|
1036
|
+
memoryId,
|
|
1037
|
+
]);
|
|
1038
|
+
}
|
|
1039
|
+
async updatePriorityScore(memoryId, score) {
|
|
1040
|
+
const pool = await this.getPool();
|
|
1041
|
+
await pool.query(`UPDATE memories SET priority_score = $1 WHERE id = $2`, [score, memoryId]);
|
|
1042
|
+
}
|
|
1043
|
+
async getPinnedMemories(threshold = 2) {
|
|
1044
|
+
const pool = await this.getPool();
|
|
1045
|
+
let query = `
|
|
1046
|
+
SELECT id, content, tags, source, type, quality_score, quality_factors,
|
|
1047
|
+
access_count, last_accessed, valid_from, valid_until,
|
|
1048
|
+
correction_count, is_invariant, priority_score, created_at
|
|
1049
|
+
FROM memories
|
|
1050
|
+
`;
|
|
1051
|
+
const params = [];
|
|
1052
|
+
let paramIndex = 1;
|
|
1053
|
+
if (this.projectId) {
|
|
1054
|
+
query += ` WHERE (LOWER(project_id) = $${paramIndex} OR project_id IS NULL) AND invalidated_by IS NULL`;
|
|
1055
|
+
params.push(this.projectId);
|
|
1056
|
+
paramIndex++;
|
|
1057
|
+
}
|
|
1058
|
+
else {
|
|
1059
|
+
query += ` WHERE project_id IS NULL AND invalidated_by IS NULL`;
|
|
1060
|
+
}
|
|
1061
|
+
query += ` AND (correction_count >= $${paramIndex} OR is_invariant = 1)`;
|
|
1062
|
+
params.push(threshold);
|
|
1063
|
+
paramIndex++;
|
|
1064
|
+
query += ` ORDER BY is_invariant DESC, correction_count DESC, quality_score DESC`;
|
|
1065
|
+
const result = await pool.query(query, params);
|
|
1066
|
+
return result.rows.map((row) => ({
|
|
1067
|
+
id: row.id,
|
|
1068
|
+
content: row.content,
|
|
1069
|
+
tags: row.tags ? (typeof row.tags === 'string' ? JSON.parse(row.tags) : row.tags) : [],
|
|
1070
|
+
source: row.source,
|
|
1071
|
+
type: row.type,
|
|
1072
|
+
quality_score: row.quality_score,
|
|
1073
|
+
quality_factors: row.quality_factors
|
|
1074
|
+
? typeof row.quality_factors === 'string'
|
|
1075
|
+
? JSON.parse(row.quality_factors)
|
|
1076
|
+
: row.quality_factors
|
|
1077
|
+
: null,
|
|
1078
|
+
access_count: row.access_count ?? 0,
|
|
1079
|
+
last_accessed: row.last_accessed,
|
|
1080
|
+
valid_from: row.valid_from,
|
|
1081
|
+
valid_until: row.valid_until,
|
|
1082
|
+
correction_count: row.correction_count ?? 0,
|
|
1083
|
+
is_invariant: !!row.is_invariant,
|
|
1084
|
+
priority_score: row.priority_score ?? null,
|
|
1085
|
+
created_at: row.created_at,
|
|
1086
|
+
}));
|
|
1087
|
+
}
|
|
1088
|
+
async incrementMemoryAccess(memoryId, weight = 1.0) {
|
|
1089
|
+
const pool = await this.getPool();
|
|
1090
|
+
await pool.query(`UPDATE memories
|
|
1091
|
+
SET access_count = COALESCE(access_count, 0) + $1,
|
|
1092
|
+
last_accessed = NOW()
|
|
1093
|
+
WHERE id = $2`, [weight, memoryId]);
|
|
1094
|
+
}
|
|
1095
|
+
// ============================================================================
|
|
1096
|
+
// Memory Links
|
|
1097
|
+
// ============================================================================
|
|
1098
|
+
async createMemoryLink(sourceId, targetId, relation = 'related', weight = 1.0, validFrom, validUntil) {
|
|
1099
|
+
const pool = await this.getPool();
|
|
1100
|
+
// Validate that both memories belong to current project (case-insensitive for Windows paths)
|
|
1101
|
+
const validation = await pool.query(`SELECT COUNT(*) as count FROM memories
|
|
1102
|
+
WHERE id IN ($1, $2) AND (LOWER(project_id) = $3 OR project_id IS NULL)`, [sourceId, targetId, this.projectId]);
|
|
1103
|
+
if (parseInt(validation.rows[0].count) !== 2) {
|
|
1104
|
+
throw new StorageError('Cannot link memories from different projects');
|
|
1105
|
+
}
|
|
1106
|
+
const result = await pool.query(`INSERT INTO memory_links (source_id, target_id, relation, weight, valid_from, valid_until)
|
|
1107
|
+
VALUES ($1, $2, $3, $4, $5, $6)
|
|
1108
|
+
ON CONFLICT (source_id, target_id, relation) DO NOTHING
|
|
1109
|
+
RETURNING id`, [sourceId, targetId, relation, weight, validFrom ?? null, validUntil ?? null]);
|
|
1110
|
+
if (result.rows.length > 0) {
|
|
1111
|
+
return { id: result.rows[0].id, created: true };
|
|
1112
|
+
}
|
|
1113
|
+
// Link already existed — fetch its id
|
|
1114
|
+
const existing = await pool.query('SELECT id FROM memory_links WHERE source_id = $1 AND target_id = $2 AND relation = $3', [sourceId, targetId, relation]);
|
|
1115
|
+
return { id: existing.rows[0].id, created: false };
|
|
1116
|
+
}
|
|
1117
|
+
async deleteMemoryLink(sourceId, targetId, relation) {
|
|
1118
|
+
const pool = await this.getPool();
|
|
1119
|
+
// Only delete links where both memories belong to current project
|
|
1120
|
+
if (relation) {
|
|
1121
|
+
const result = await pool.query(`DELETE FROM memory_links ml
|
|
1122
|
+
USING memories m1, memories m2
|
|
1123
|
+
WHERE ml.source_id = m1.id AND ml.target_id = m2.id
|
|
1124
|
+
AND ml.source_id = $1 AND ml.target_id = $2 AND ml.relation = $3
|
|
1125
|
+
AND (LOWER(m1.project_id) = $4 OR m1.project_id IS NULL)
|
|
1126
|
+
AND (LOWER(m2.project_id) = $4 OR m2.project_id IS NULL)`, [sourceId, targetId, relation, this.projectId]);
|
|
1127
|
+
return (result.rowCount ?? 0) > 0;
|
|
1128
|
+
}
|
|
1129
|
+
else {
|
|
1130
|
+
const result = await pool.query(`DELETE FROM memory_links ml
|
|
1131
|
+
USING memories m1, memories m2
|
|
1132
|
+
WHERE ml.source_id = m1.id AND ml.target_id = m2.id
|
|
1133
|
+
AND ml.source_id = $1 AND ml.target_id = $2
|
|
1134
|
+
AND (LOWER(m1.project_id) = $3 OR m1.project_id IS NULL)
|
|
1135
|
+
AND (LOWER(m2.project_id) = $3 OR m2.project_id IS NULL)`, [sourceId, targetId, this.projectId]);
|
|
1136
|
+
return (result.rowCount ?? 0) > 0;
|
|
1137
|
+
}
|
|
1138
|
+
}
|
|
1139
|
+
async getMemoryLinks(memoryId) {
|
|
1140
|
+
const pool = await this.getPool();
|
|
1141
|
+
// Only return links where both source and target memories belong to current project
|
|
1142
|
+
const outgoing = await pool.query(`SELECT ml.* FROM memory_links ml
|
|
1143
|
+
JOIN memories m ON ml.target_id = m.id
|
|
1144
|
+
WHERE ml.source_id = $1 AND (LOWER(m.project_id) = $2 OR m.project_id IS NULL)`, [memoryId, this.projectId]);
|
|
1145
|
+
const incoming = await pool.query(`SELECT ml.* FROM memory_links ml
|
|
1146
|
+
JOIN memories m ON ml.source_id = m.id
|
|
1147
|
+
WHERE ml.target_id = $1 AND (LOWER(m.project_id) = $2 OR m.project_id IS NULL)`, [memoryId, this.projectId]);
|
|
1148
|
+
return {
|
|
1149
|
+
outgoing: outgoing.rows,
|
|
1150
|
+
incoming: incoming.rows,
|
|
1151
|
+
};
|
|
1152
|
+
}
|
|
1153
|
+
async getGraphStats() {
|
|
1154
|
+
const pool = await this.getPool();
|
|
1155
|
+
// Only count memories and links for current project
|
|
1156
|
+
const totalMemories = await pool.query('SELECT COUNT(*) as count FROM memories WHERE LOWER(project_id) = $1 OR project_id IS NULL', [this.projectId]);
|
|
1157
|
+
// Count links only between memories of current project
|
|
1158
|
+
const totalLinks = await pool.query(`SELECT COUNT(*) as count FROM memory_links ml
|
|
1159
|
+
JOIN memories m1 ON ml.source_id = m1.id
|
|
1160
|
+
JOIN memories m2 ON ml.target_id = m2.id
|
|
1161
|
+
WHERE (LOWER(m1.project_id) = $1 OR m1.project_id IS NULL)
|
|
1162
|
+
AND (LOWER(m2.project_id) = $1 OR m2.project_id IS NULL)`, [this.projectId]);
|
|
1163
|
+
const isolated = await pool.query(`SELECT COUNT(*) as count FROM memories m
|
|
1164
|
+
WHERE (LOWER(m.project_id) = $1 OR m.project_id IS NULL)
|
|
1165
|
+
AND NOT EXISTS (
|
|
1166
|
+
SELECT 1 FROM memory_links ml
|
|
1167
|
+
JOIN memories m2 ON (ml.source_id = m2.id OR ml.target_id = m2.id)
|
|
1168
|
+
WHERE (ml.source_id = m.id OR ml.target_id = m.id)
|
|
1169
|
+
AND (LOWER(m2.project_id) = $1 OR m2.project_id IS NULL)
|
|
1170
|
+
)`, [this.projectId]);
|
|
1171
|
+
const relations = await pool.query(`SELECT ml.relation, COUNT(*) as count FROM memory_links ml
|
|
1172
|
+
JOIN memories m1 ON ml.source_id = m1.id
|
|
1173
|
+
JOIN memories m2 ON ml.target_id = m2.id
|
|
1174
|
+
WHERE (LOWER(m1.project_id) = $1 OR m1.project_id IS NULL)
|
|
1175
|
+
AND (LOWER(m2.project_id) = $1 OR m2.project_id IS NULL)
|
|
1176
|
+
GROUP BY ml.relation`, [this.projectId]);
|
|
1177
|
+
const relationsMap = {};
|
|
1178
|
+
for (const row of relations.rows) {
|
|
1179
|
+
relationsMap[row.relation] = parseInt(row.count);
|
|
1180
|
+
}
|
|
1181
|
+
const total = parseInt(totalMemories.rows[0].count);
|
|
1182
|
+
const links = parseInt(totalLinks.rows[0].count);
|
|
1183
|
+
return {
|
|
1184
|
+
total_memories: total,
|
|
1185
|
+
total_links: links,
|
|
1186
|
+
avg_links_per_memory: total > 0 ? links / total : 0,
|
|
1187
|
+
isolated_memories: parseInt(isolated.rows[0].count),
|
|
1188
|
+
relations: relationsMap,
|
|
1189
|
+
};
|
|
1190
|
+
}
|
|
1191
|
+
// ============================================================================
|
|
1192
|
+
// Token Stats
|
|
1193
|
+
// ============================================================================
|
|
1194
|
+
async recordTokenStat(record) {
|
|
1195
|
+
const pool = await this.getPool();
|
|
1196
|
+
await pool.query(`INSERT INTO token_stats (project_id, event_type, query, returned_tokens, full_source_tokens, savings_tokens, files_count, chunks_count, model, estimated_cost)
|
|
1197
|
+
VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10)`, [
|
|
1198
|
+
this.projectId, // Can be NULL for global stats
|
|
1199
|
+
record.event_type,
|
|
1200
|
+
record.query ?? null,
|
|
1201
|
+
record.returned_tokens,
|
|
1202
|
+
record.full_source_tokens,
|
|
1203
|
+
record.savings_tokens,
|
|
1204
|
+
record.files_count ?? null,
|
|
1205
|
+
record.chunks_count ?? null,
|
|
1206
|
+
record.model ?? null,
|
|
1207
|
+
record.estimated_cost ?? 0,
|
|
1208
|
+
]);
|
|
1209
|
+
}
|
|
1210
|
+
async getTokenStatsSummary() {
|
|
1211
|
+
const pool = await this.getPool();
|
|
1212
|
+
// If project_id is set, get stats for that project; otherwise get all stats
|
|
1213
|
+
const whereClause = this.projectId ? 'WHERE LOWER(project_id) = $1' : '';
|
|
1214
|
+
const params = this.projectId ? [this.projectId] : [];
|
|
1215
|
+
const result = await pool.query(`
|
|
1216
|
+
SELECT
|
|
1217
|
+
COUNT(*) as total_queries,
|
|
1218
|
+
COALESCE(SUM(returned_tokens), 0) as total_returned_tokens,
|
|
1219
|
+
COALESCE(SUM(full_source_tokens), 0) as total_full_source_tokens,
|
|
1220
|
+
COALESCE(SUM(savings_tokens), 0) as total_savings_tokens,
|
|
1221
|
+
COALESCE(SUM(estimated_cost), 0) as total_estimated_cost
|
|
1222
|
+
FROM token_stats
|
|
1223
|
+
${whereClause}
|
|
1224
|
+
`, params);
|
|
1225
|
+
const row = result.rows[0];
|
|
1226
|
+
return {
|
|
1227
|
+
total_queries: parseInt(row.total_queries),
|
|
1228
|
+
total_returned_tokens: parseInt(row.total_returned_tokens),
|
|
1229
|
+
total_full_source_tokens: parseInt(row.total_full_source_tokens),
|
|
1230
|
+
total_savings_tokens: parseInt(row.total_savings_tokens),
|
|
1231
|
+
total_estimated_cost: parseFloat(row.total_estimated_cost),
|
|
1232
|
+
};
|
|
1233
|
+
}
|
|
1234
|
+
// ============================================================================
|
|
1235
|
+
// Metadata
|
|
1236
|
+
// ============================================================================
|
|
1237
|
+
async getMetadata(key) {
|
|
1238
|
+
const pool = await this.getPool();
|
|
1239
|
+
const result = await pool.query('SELECT value FROM metadata WHERE key = $1', [key]);
|
|
1240
|
+
return result.rows[0]?.value ?? null;
|
|
1241
|
+
}
|
|
1242
|
+
async setMetadata(key, value) {
|
|
1243
|
+
const pool = await this.getPool();
|
|
1244
|
+
await pool.query(`INSERT INTO metadata (key, value) VALUES ($1, $2)
|
|
1245
|
+
ON CONFLICT(key) DO UPDATE SET value = EXCLUDED.value`, [key, value]);
|
|
1246
|
+
}
|
|
1247
|
+
// ============================================================================
|
|
1248
|
+
// Global Memory Operations (project_id = NULL)
|
|
1249
|
+
// ============================================================================
|
|
1250
|
+
/**
|
|
1251
|
+
* Save a global memory (shared across all projects).
|
|
1252
|
+
*/
|
|
1253
|
+
async saveGlobalMemory(content, embedding, tags = [], source, type = 'observation', qualityScore, qualityFactors) {
|
|
1254
|
+
return this.saveMemory(content, embedding, tags, source, type, qualityScore, qualityFactors, undefined, // validFrom
|
|
1255
|
+
undefined, // validUntil
|
|
1256
|
+
true // isGlobal
|
|
1257
|
+
);
|
|
1258
|
+
}
|
|
1259
|
+
/**
|
|
1260
|
+
* Search global memories only.
|
|
1261
|
+
*/
|
|
1262
|
+
async searchGlobalMemories(queryEmbedding, limit = 5, threshold = 0.3, tags) {
|
|
1263
|
+
const pool = await this.getPool();
|
|
1264
|
+
const query = `
|
|
1265
|
+
SELECT id, project_id, content, tags, source, type, quality_score, quality_factors,
|
|
1266
|
+
access_count, last_accessed, valid_from, valid_until, created_at,
|
|
1267
|
+
1 - (embedding <=> $1) as similarity
|
|
1268
|
+
FROM memories
|
|
1269
|
+
WHERE 1 - (embedding <=> $1) >= $2
|
|
1270
|
+
AND project_id IS NULL
|
|
1271
|
+
AND invalidated_by IS NULL
|
|
1272
|
+
ORDER BY embedding <=> $1
|
|
1273
|
+
LIMIT $3
|
|
1274
|
+
`;
|
|
1275
|
+
const params = [toPgVector(queryEmbedding), threshold, limit];
|
|
1276
|
+
const result = await pool.query(query, params);
|
|
1277
|
+
let memories = result.rows.map((row) => ({
|
|
1278
|
+
id: row.id,
|
|
1279
|
+
content: row.content,
|
|
1280
|
+
tags: row.tags ? (typeof row.tags === 'string' ? JSON.parse(row.tags) : row.tags) : [],
|
|
1281
|
+
source: row.source,
|
|
1282
|
+
type: row.type,
|
|
1283
|
+
quality_score: row.quality_score,
|
|
1284
|
+
quality_factors: row.quality_factors
|
|
1285
|
+
? typeof row.quality_factors === 'string'
|
|
1286
|
+
? JSON.parse(row.quality_factors)
|
|
1287
|
+
: row.quality_factors
|
|
1288
|
+
: null,
|
|
1289
|
+
access_count: row.access_count ?? 0,
|
|
1290
|
+
last_accessed: row.last_accessed,
|
|
1291
|
+
correction_count: row.correction_count ?? 0,
|
|
1292
|
+
is_invariant: !!row.is_invariant,
|
|
1293
|
+
priority_score: row.priority_score ?? null,
|
|
1294
|
+
valid_from: row.valid_from,
|
|
1295
|
+
valid_until: row.valid_until,
|
|
1296
|
+
created_at: row.created_at,
|
|
1297
|
+
similarity: parseFloat(row.similarity),
|
|
1298
|
+
}));
|
|
1299
|
+
// Filter by tags if specified
|
|
1300
|
+
if (tags && tags.length > 0) {
|
|
1301
|
+
memories = memories.filter((m) => tags.some((t) => m.tags.some((rt) => rt.toLowerCase().includes(t.toLowerCase()))));
|
|
1302
|
+
}
|
|
1303
|
+
return memories;
|
|
1304
|
+
}
|
|
1305
|
+
/**
|
|
1306
|
+
* Get recent global memories.
|
|
1307
|
+
*/
|
|
1308
|
+
async getRecentGlobalMemories(limit = 10) {
|
|
1309
|
+
const pool = await this.getPool();
|
|
1310
|
+
const result = await pool.query(`SELECT id, project_id, content, tags, source, type, quality_score, quality_factors,
|
|
1311
|
+
access_count, last_accessed, valid_from, valid_until, created_at
|
|
1312
|
+
FROM memories
|
|
1313
|
+
WHERE project_id IS NULL
|
|
1314
|
+
AND invalidated_by IS NULL
|
|
1315
|
+
ORDER BY created_at DESC
|
|
1316
|
+
LIMIT $1`, [limit]);
|
|
1317
|
+
return result.rows.map((row) => ({
|
|
1318
|
+
id: row.id,
|
|
1319
|
+
content: row.content,
|
|
1320
|
+
tags: row.tags ? (typeof row.tags === 'string' ? JSON.parse(row.tags) : row.tags) : [],
|
|
1321
|
+
source: row.source,
|
|
1322
|
+
type: row.type,
|
|
1323
|
+
quality_score: row.quality_score,
|
|
1324
|
+
quality_factors: row.quality_factors
|
|
1325
|
+
? typeof row.quality_factors === 'string'
|
|
1326
|
+
? JSON.parse(row.quality_factors)
|
|
1327
|
+
: row.quality_factors
|
|
1328
|
+
: null,
|
|
1329
|
+
access_count: row.access_count ?? 0,
|
|
1330
|
+
last_accessed: row.last_accessed,
|
|
1331
|
+
correction_count: row.correction_count ?? 0,
|
|
1332
|
+
is_invariant: !!row.is_invariant,
|
|
1333
|
+
priority_score: row.priority_score ?? null,
|
|
1334
|
+
valid_from: row.valid_from,
|
|
1335
|
+
valid_until: row.valid_until,
|
|
1336
|
+
created_at: row.created_at,
|
|
1337
|
+
}));
|
|
1338
|
+
}
|
|
1339
|
+
/**
|
|
1340
|
+
* Delete a global memory.
|
|
1341
|
+
*/
|
|
1342
|
+
async deleteGlobalMemory(id) {
|
|
1343
|
+
const pool = await this.getPool();
|
|
1344
|
+
const result = await pool.query('DELETE FROM memories WHERE id = $1 AND project_id IS NULL', [
|
|
1345
|
+
id,
|
|
1346
|
+
]);
|
|
1347
|
+
return (result.rowCount ?? 0) > 0;
|
|
1348
|
+
}
|
|
1349
|
+
/**
|
|
1350
|
+
* Get global memory statistics.
|
|
1351
|
+
*/
|
|
1352
|
+
async getGlobalMemoryStats() {
|
|
1353
|
+
const pool = await this.getPool();
|
|
1354
|
+
const total = await pool.query('SELECT COUNT(*) as count FROM memories WHERE project_id IS NULL');
|
|
1355
|
+
const byType = await pool.query('SELECT type, COUNT(*) as count FROM memories WHERE project_id IS NULL GROUP BY type');
|
|
1356
|
+
const byQuality = await pool.query(`
|
|
1357
|
+
SELECT
|
|
1358
|
+
CASE
|
|
1359
|
+
WHEN quality_score >= 0.7 THEN 'high'
|
|
1360
|
+
WHEN quality_score >= 0.4 THEN 'medium'
|
|
1361
|
+
WHEN quality_score IS NOT NULL THEN 'low'
|
|
1362
|
+
ELSE 'unscored'
|
|
1363
|
+
END as bucket,
|
|
1364
|
+
COUNT(*) as count
|
|
1365
|
+
FROM memories
|
|
1366
|
+
WHERE project_id IS NULL
|
|
1367
|
+
GROUP BY bucket
|
|
1368
|
+
`);
|
|
1369
|
+
const typeMap = {};
|
|
1370
|
+
for (const row of byType.rows) {
|
|
1371
|
+
typeMap[row.type || 'observation'] = parseInt(row.count);
|
|
1372
|
+
}
|
|
1373
|
+
const qualityMap = { high: 0, medium: 0, low: 0, unscored: 0 };
|
|
1374
|
+
for (const row of byQuality.rows) {
|
|
1375
|
+
qualityMap[row.bucket] = parseInt(row.count);
|
|
1376
|
+
}
|
|
1377
|
+
return {
|
|
1378
|
+
total: parseInt(total.rows[0].count),
|
|
1379
|
+
by_type: typeMap,
|
|
1380
|
+
by_quality: qualityMap,
|
|
1381
|
+
};
|
|
1382
|
+
}
|
|
1383
|
+
// ============================================================================
|
|
1384
|
+
// Skills Operations
|
|
1385
|
+
// ============================================================================
|
|
1386
|
+
/**
|
|
1387
|
+
* Upsert a skill (local or Skyll-cached).
|
|
1388
|
+
* Local skills use project_id, Skyll skills use project_id = NULL (global cache).
|
|
1389
|
+
*/
|
|
1390
|
+
async upsertSkill(skill) {
|
|
1391
|
+
const pool = await this.getPool();
|
|
1392
|
+
// Local skills are project-scoped, Skyll skills are global (project_id = NULL)
|
|
1393
|
+
const projectId = skill.source === 'local' ? this.projectId : null;
|
|
1394
|
+
const result = await pool.query(`INSERT INTO skills (project_id, name, description, source, path, content, embedding, skyll_id, cached_at, cache_expires, created_at, updated_at)
|
|
1395
|
+
VALUES ($1, $2, $3, $4, $5, $6, $7, $8, NOW(), $9, NOW(), NOW())
|
|
1396
|
+
ON CONFLICT(project_id, name) DO UPDATE SET
|
|
1397
|
+
description = EXCLUDED.description,
|
|
1398
|
+
path = EXCLUDED.path,
|
|
1399
|
+
content = EXCLUDED.content,
|
|
1400
|
+
embedding = EXCLUDED.embedding,
|
|
1401
|
+
skyll_id = EXCLUDED.skyll_id,
|
|
1402
|
+
cached_at = CASE WHEN EXCLUDED.source = 'skyll' THEN NOW() ELSE skills.cached_at END,
|
|
1403
|
+
cache_expires = EXCLUDED.cache_expires,
|
|
1404
|
+
updated_at = NOW()
|
|
1405
|
+
RETURNING id`, [
|
|
1406
|
+
projectId,
|
|
1407
|
+
skill.name,
|
|
1408
|
+
skill.description,
|
|
1409
|
+
skill.source,
|
|
1410
|
+
skill.path ?? null,
|
|
1411
|
+
skill.content ?? null,
|
|
1412
|
+
skill.embedding ? toPgVector(skill.embedding) : null,
|
|
1413
|
+
skill.skyllId ?? null,
|
|
1414
|
+
skill.cacheExpires ?? null,
|
|
1415
|
+
]);
|
|
1416
|
+
return result.rows[0].id;
|
|
1417
|
+
}
|
|
1418
|
+
/**
|
|
1419
|
+
* Get all skills for the current project (includes local + global Skyll cache).
|
|
1420
|
+
*/
|
|
1421
|
+
async getAllSkills() {
|
|
1422
|
+
const pool = await this.getPool();
|
|
1423
|
+
// Include both project-specific local skills AND global Skyll cached skills
|
|
1424
|
+
const result = await pool.query(`SELECT id, name, description, source, path, content, skyll_id, usage_count, last_used
|
|
1425
|
+
FROM skills
|
|
1426
|
+
WHERE LOWER(project_id) = $1 OR project_id IS NULL
|
|
1427
|
+
ORDER BY usage_count DESC, updated_at DESC`, [this.projectId]);
|
|
1428
|
+
return result.rows.map((row) => ({
|
|
1429
|
+
id: row.id,
|
|
1430
|
+
name: row.name,
|
|
1431
|
+
description: row.description,
|
|
1432
|
+
source: row.source,
|
|
1433
|
+
path: row.path,
|
|
1434
|
+
content: row.content,
|
|
1435
|
+
skyllId: row.skyll_id,
|
|
1436
|
+
usageCount: row.usage_count ?? 0,
|
|
1437
|
+
lastUsed: row.last_used,
|
|
1438
|
+
}));
|
|
1439
|
+
}
|
|
1440
|
+
/**
|
|
1441
|
+
* Search skills by name or description.
|
|
1442
|
+
*/
|
|
1443
|
+
async searchSkills(query, limit = 10) {
|
|
1444
|
+
const pool = await this.getPool();
|
|
1445
|
+
const result = await pool.query(`SELECT id, name, description, source, path, usage_count
|
|
1446
|
+
FROM skills
|
|
1447
|
+
WHERE (LOWER(project_id) = $1 OR project_id IS NULL)
|
|
1448
|
+
AND (name ILIKE $2 OR description ILIKE $2)
|
|
1449
|
+
ORDER BY usage_count DESC, updated_at DESC
|
|
1450
|
+
LIMIT $3`, [this.projectId, `%${query}%`, limit]);
|
|
1451
|
+
return result.rows.map((row) => ({
|
|
1452
|
+
id: row.id,
|
|
1453
|
+
name: row.name,
|
|
1454
|
+
description: row.description,
|
|
1455
|
+
source: row.source,
|
|
1456
|
+
path: row.path,
|
|
1457
|
+
usageCount: row.usage_count ?? 0,
|
|
1458
|
+
}));
|
|
1459
|
+
}
|
|
1460
|
+
/**
|
|
1461
|
+
* Get a skill by name.
|
|
1462
|
+
*/
|
|
1463
|
+
async getSkillByName(name) {
|
|
1464
|
+
const pool = await this.getPool();
|
|
1465
|
+
const result = await pool.query(`SELECT id, name, description, source, path, content, skyll_id
|
|
1466
|
+
FROM skills
|
|
1467
|
+
WHERE name = $1 AND (LOWER(project_id) = $2 OR project_id IS NULL)
|
|
1468
|
+
LIMIT 1`, [name, this.projectId]);
|
|
1469
|
+
if (result.rows.length === 0)
|
|
1470
|
+
return null;
|
|
1471
|
+
const row = result.rows[0];
|
|
1472
|
+
return {
|
|
1473
|
+
id: row.id,
|
|
1474
|
+
name: row.name,
|
|
1475
|
+
description: row.description,
|
|
1476
|
+
source: row.source,
|
|
1477
|
+
path: row.path,
|
|
1478
|
+
content: row.content,
|
|
1479
|
+
skyllId: row.skyll_id,
|
|
1480
|
+
};
|
|
1481
|
+
}
|
|
1482
|
+
/**
|
|
1483
|
+
* Track skill usage (increment usage count).
|
|
1484
|
+
*/
|
|
1485
|
+
async trackSkillUsage(name) {
|
|
1486
|
+
const pool = await this.getPool();
|
|
1487
|
+
await pool.query(`UPDATE skills SET usage_count = usage_count + 1, last_used = NOW()
|
|
1488
|
+
WHERE name = $1 AND (LOWER(project_id) = $2 OR project_id IS NULL)`, [name, this.projectId]);
|
|
1489
|
+
}
|
|
1490
|
+
/**
|
|
1491
|
+
* Delete a skill by name.
|
|
1492
|
+
*/
|
|
1493
|
+
async deleteSkill(name) {
|
|
1494
|
+
const pool = await this.getPool();
|
|
1495
|
+
const result = await pool.query('DELETE FROM skills WHERE name = $1 AND LOWER(project_id) = $2', [name, this.projectId]);
|
|
1496
|
+
return (result.rowCount ?? 0) > 0;
|
|
1497
|
+
}
|
|
1498
|
+
/**
|
|
1499
|
+
* Clear expired Skyll cache entries.
|
|
1500
|
+
*/
|
|
1501
|
+
async clearExpiredSkyllCache() {
|
|
1502
|
+
const pool = await this.getPool();
|
|
1503
|
+
const result = await pool.query(`DELETE FROM skills WHERE source = 'skyll' AND cache_expires < NOW()`);
|
|
1504
|
+
return result.rowCount ?? 0;
|
|
1505
|
+
}
|
|
1506
|
+
/**
|
|
1507
|
+
* Get cached Skyll skill by ID.
|
|
1508
|
+
*/
|
|
1509
|
+
async getCachedSkyllSkill(skyllId) {
|
|
1510
|
+
const pool = await this.getPool();
|
|
1511
|
+
const result = await pool.query(`SELECT id, name, description, content
|
|
1512
|
+
FROM skills
|
|
1513
|
+
WHERE skyll_id = $1 AND cache_expires > NOW()`, [skyllId]);
|
|
1514
|
+
if (result.rows.length === 0)
|
|
1515
|
+
return null;
|
|
1516
|
+
const row = result.rows[0];
|
|
1517
|
+
return {
|
|
1518
|
+
id: row.id,
|
|
1519
|
+
name: row.name,
|
|
1520
|
+
description: row.description,
|
|
1521
|
+
content: row.content,
|
|
1522
|
+
};
|
|
1523
|
+
}
|
|
1524
|
+
/**
|
|
1525
|
+
* Get Skyll cache status.
|
|
1526
|
+
*
|
|
1527
|
+
* Note: Skyll cached skills are INTENTIONALLY global (not project-scoped).
|
|
1528
|
+
* This is because Skyll marketplace skills are shared across all projects.
|
|
1529
|
+
* Only local skills use project_id scoping.
|
|
1530
|
+
*/
|
|
1531
|
+
async getSkyllCacheStats() {
|
|
1532
|
+
const pool = await this.getPool();
|
|
1533
|
+
// Global count - Skyll skills are shared across projects (project_id IS NULL)
|
|
1534
|
+
const result = await pool.query(`SELECT COUNT(*) as count FROM skills WHERE source = 'skyll' AND project_id IS NULL`);
|
|
1535
|
+
return { cachedSkills: parseInt(result.rows[0].count) };
|
|
1536
|
+
}
|
|
1537
|
+
// ============================================================================
|
|
1538
|
+
// Graph Enrichment Operations
|
|
1539
|
+
// ============================================================================
|
|
1540
|
+
async updateMemoryEmbedding(memoryId, embedding) {
|
|
1541
|
+
const pool = await this.getPool();
|
|
1542
|
+
await pool.query('UPDATE memories SET embedding = $1 WHERE id = $2', [
|
|
1543
|
+
toPgVector(embedding),
|
|
1544
|
+
memoryId,
|
|
1545
|
+
]);
|
|
1546
|
+
}
|
|
1547
|
+
async updateMemoryEmbeddingsBatch(updates) {
|
|
1548
|
+
const pool = await this.getPool();
|
|
1549
|
+
const client = await pool.connect();
|
|
1550
|
+
try {
|
|
1551
|
+
await client.query('BEGIN');
|
|
1552
|
+
for (const { id, embedding } of updates) {
|
|
1553
|
+
await client.query('UPDATE memories SET embedding = $1 WHERE id = $2', [
|
|
1554
|
+
toPgVector(embedding),
|
|
1555
|
+
id,
|
|
1556
|
+
]);
|
|
1557
|
+
}
|
|
1558
|
+
await client.query('COMMIT');
|
|
1559
|
+
}
|
|
1560
|
+
catch (err) {
|
|
1561
|
+
await client.query('ROLLBACK');
|
|
1562
|
+
throw err;
|
|
1563
|
+
}
|
|
1564
|
+
finally {
|
|
1565
|
+
client.release();
|
|
1566
|
+
}
|
|
1567
|
+
}
|
|
1568
|
+
async getMemoriesWithoutEmbeddings(limit = 100) {
|
|
1569
|
+
const pool = await this.getPool();
|
|
1570
|
+
const result = await pool.query('SELECT id, content FROM memories WHERE embedding IS NULL ORDER BY id LIMIT $1', [limit]);
|
|
1571
|
+
return result.rows;
|
|
1572
|
+
}
|
|
1573
|
+
async getMemoriesNeedingReembedding(limit = 100, afterId = 0) {
|
|
1574
|
+
const pool = await this.getPool();
|
|
1575
|
+
const result = await pool.query('SELECT id, content FROM memories WHERE id > $1 ORDER BY id LIMIT $2', [afterId, limit]);
|
|
1576
|
+
return result.rows;
|
|
1577
|
+
}
|
|
1578
|
+
async getMemoryCount() {
|
|
1579
|
+
const pool = await this.getPool();
|
|
1580
|
+
const result = await pool.query('SELECT count(*) FROM memories');
|
|
1581
|
+
return parseInt(result.rows[0].count, 10);
|
|
1582
|
+
}
|
|
1583
|
+
async getMemoryEmbeddingCount() {
|
|
1584
|
+
const pool = await this.getPool();
|
|
1585
|
+
const result = await pool.query('SELECT count(*) FROM memories WHERE embedding IS NOT NULL');
|
|
1586
|
+
return parseInt(result.rows[0].count, 10);
|
|
1587
|
+
}
|
|
1588
|
+
async updateMemoryTags(memoryId, tags) {
|
|
1589
|
+
const pool = await this.getPool();
|
|
1590
|
+
await pool.query('UPDATE memories SET tags = $1 WHERE id = $2 AND LOWER(project_id) = $3', [
|
|
1591
|
+
JSON.stringify(tags),
|
|
1592
|
+
memoryId,
|
|
1593
|
+
this.projectId,
|
|
1594
|
+
]);
|
|
1595
|
+
}
|
|
1596
|
+
async updateMemoryLink(linkId, updates) {
|
|
1597
|
+
const pool = await this.getPool();
|
|
1598
|
+
const sets = [];
|
|
1599
|
+
const params = [];
|
|
1600
|
+
let idx = 1;
|
|
1601
|
+
if (updates.relation !== undefined) {
|
|
1602
|
+
sets.push(`relation = $${idx++}`);
|
|
1603
|
+
params.push(updates.relation);
|
|
1604
|
+
}
|
|
1605
|
+
if (updates.weight !== undefined) {
|
|
1606
|
+
sets.push(`weight = $${idx++}`);
|
|
1607
|
+
params.push(updates.weight);
|
|
1608
|
+
}
|
|
1609
|
+
if (updates.llmEnriched !== undefined) {
|
|
1610
|
+
sets.push(`llm_enriched = $${idx++}`);
|
|
1611
|
+
params.push(updates.llmEnriched ? 1 : 0);
|
|
1612
|
+
}
|
|
1613
|
+
if (sets.length > 0) {
|
|
1614
|
+
params.push(linkId);
|
|
1615
|
+
try {
|
|
1616
|
+
await pool.query(`UPDATE memory_links SET ${sets.join(', ')} WHERE id = $${idx}`, params);
|
|
1617
|
+
}
|
|
1618
|
+
catch (err) {
|
|
1619
|
+
// Duplicate key: a link with the target (source_id, target_id, relation) already exists.
|
|
1620
|
+
// Delete this link and mark the existing one as enriched instead.
|
|
1621
|
+
if (err.code === '23505' && updates.relation) {
|
|
1622
|
+
const row = await pool.query('SELECT source_id, target_id FROM memory_links WHERE id = $1', [linkId]);
|
|
1623
|
+
if (row.rows.length > 0) {
|
|
1624
|
+
const { source_id, target_id } = row.rows[0];
|
|
1625
|
+
const client = await pool.connect();
|
|
1626
|
+
try {
|
|
1627
|
+
await client.query('BEGIN');
|
|
1628
|
+
await client.query('DELETE FROM memory_links WHERE id = $1', [linkId]);
|
|
1629
|
+
await client.query('UPDATE memory_links SET llm_enriched = 1, weight = GREATEST(weight, $1) WHERE source_id = $2 AND target_id = $3 AND relation = $4', [updates.weight ?? 0.5, source_id, target_id, updates.relation]);
|
|
1630
|
+
await client.query('COMMIT');
|
|
1631
|
+
}
|
|
1632
|
+
catch (txErr) {
|
|
1633
|
+
await client.query('ROLLBACK');
|
|
1634
|
+
throw txErr;
|
|
1635
|
+
}
|
|
1636
|
+
finally {
|
|
1637
|
+
client.release();
|
|
1638
|
+
}
|
|
1639
|
+
}
|
|
1640
|
+
}
|
|
1641
|
+
else {
|
|
1642
|
+
throw err;
|
|
1643
|
+
}
|
|
1644
|
+
}
|
|
1645
|
+
}
|
|
1646
|
+
}
|
|
1647
|
+
async upsertCentralityScore(memoryId, degree, normalizedDegree) {
|
|
1648
|
+
const pool = await this.getPool();
|
|
1649
|
+
await pool.query(`INSERT INTO memory_centrality (memory_id, degree, normalized_degree, updated_at)
|
|
1650
|
+
VALUES ($1, $2, $3, NOW())
|
|
1651
|
+
ON CONFLICT (memory_id) DO UPDATE SET degree = EXCLUDED.degree, normalized_degree = EXCLUDED.normalized_degree, updated_at = NOW()`, [memoryId, degree, normalizedDegree]);
|
|
1652
|
+
}
|
|
1653
|
+
async getCentralityScores(memoryIds) {
|
|
1654
|
+
const map = new Map();
|
|
1655
|
+
if (memoryIds.length === 0)
|
|
1656
|
+
return map;
|
|
1657
|
+
const pool = await this.getPool();
|
|
1658
|
+
const placeholders = memoryIds.map((_, i) => `$${i + 1}`).join(',');
|
|
1659
|
+
const { rows } = await pool.query(`SELECT memory_id, normalized_degree FROM memory_centrality WHERE memory_id IN (${placeholders})`, memoryIds);
|
|
1660
|
+
for (const row of rows)
|
|
1661
|
+
map.set(row.memory_id, row.normalized_degree);
|
|
1662
|
+
return map;
|
|
1663
|
+
}
|
|
1664
|
+
// ============================================================================
|
|
1665
|
+
// Token Frequency Operations
|
|
1666
|
+
// ============================================================================
|
|
1667
|
+
async updateTokenFrequencies(tokens) {
|
|
1668
|
+
if (tokens.length === 0)
|
|
1669
|
+
return;
|
|
1670
|
+
const pool = await this.getPool();
|
|
1671
|
+
const client = await pool.connect();
|
|
1672
|
+
try {
|
|
1673
|
+
await client.query('BEGIN');
|
|
1674
|
+
for (const token of tokens) {
|
|
1675
|
+
if (token.length >= 2) {
|
|
1676
|
+
await client.query(`INSERT INTO token_frequencies (token, frequency, updated_at)
|
|
1677
|
+
VALUES ($1, 1, NOW())
|
|
1678
|
+
ON CONFLICT(token) DO UPDATE SET
|
|
1679
|
+
frequency = token_frequencies.frequency + 1,
|
|
1680
|
+
updated_at = NOW()`, [token]);
|
|
1681
|
+
}
|
|
1682
|
+
}
|
|
1683
|
+
await client.query('COMMIT');
|
|
1684
|
+
}
|
|
1685
|
+
catch (e) {
|
|
1686
|
+
await client.query('ROLLBACK');
|
|
1687
|
+
throw e;
|
|
1688
|
+
}
|
|
1689
|
+
finally {
|
|
1690
|
+
client.release();
|
|
1691
|
+
}
|
|
1692
|
+
}
|
|
1693
|
+
async getTokenFrequency(token) {
|
|
1694
|
+
const pool = await this.getPool();
|
|
1695
|
+
const result = await pool.query('SELECT frequency FROM token_frequencies WHERE token = $1', [token]);
|
|
1696
|
+
return result.rows[0]?.frequency ?? 0;
|
|
1697
|
+
}
|
|
1698
|
+
async getTokenFrequencies(tokens) {
|
|
1699
|
+
const result = new Map();
|
|
1700
|
+
if (tokens.length === 0)
|
|
1701
|
+
return result;
|
|
1702
|
+
const pool = await this.getPool();
|
|
1703
|
+
const placeholders = tokens.map((_, i) => `$${i + 1}`).join(',');
|
|
1704
|
+
const rows = await pool.query(`SELECT token, frequency FROM token_frequencies WHERE token IN (${placeholders})`, tokens);
|
|
1705
|
+
for (const row of rows.rows) {
|
|
1706
|
+
result.set(row.token, row.frequency);
|
|
1707
|
+
}
|
|
1708
|
+
return result;
|
|
1709
|
+
}
|
|
1710
|
+
async getTotalTokenCount() {
|
|
1711
|
+
const pool = await this.getPool();
|
|
1712
|
+
const result = await pool.query('SELECT COALESCE(SUM(frequency), 0) as total FROM token_frequencies');
|
|
1713
|
+
return parseInt(result.rows[0].total);
|
|
1714
|
+
}
|
|
1715
|
+
async getTopTokens(limit = 100) {
|
|
1716
|
+
const pool = await this.getPool();
|
|
1717
|
+
const result = await pool.query('SELECT token, frequency FROM token_frequencies ORDER BY frequency DESC LIMIT $1', [limit]);
|
|
1718
|
+
return result.rows;
|
|
1719
|
+
}
|
|
1720
|
+
async clearTokenFrequencies() {
|
|
1721
|
+
const pool = await this.getPool();
|
|
1722
|
+
await pool.query('DELETE FROM token_frequencies');
|
|
1723
|
+
}
|
|
1724
|
+
async getTokenFrequencyStats() {
|
|
1725
|
+
const pool = await this.getPool();
|
|
1726
|
+
const result = await pool.query(`
|
|
1727
|
+
SELECT
|
|
1728
|
+
COUNT(*) as unique_tokens,
|
|
1729
|
+
COALESCE(SUM(frequency), 0) as total_occurrences
|
|
1730
|
+
FROM token_frequencies
|
|
1731
|
+
`);
|
|
1732
|
+
const row = result.rows[0];
|
|
1733
|
+
const unique = parseInt(row.unique_tokens);
|
|
1734
|
+
const total = parseInt(row.total_occurrences);
|
|
1735
|
+
return {
|
|
1736
|
+
unique_tokens: unique,
|
|
1737
|
+
total_occurrences: total,
|
|
1738
|
+
avg_frequency: unique > 0 ? total / unique : 0,
|
|
1739
|
+
};
|
|
1740
|
+
}
|
|
1741
|
+
// ============================================================================
|
|
1742
|
+
// Token Stats - Additional Operations
|
|
1743
|
+
// ============================================================================
|
|
1744
|
+
async getTokenStatsAggregated() {
|
|
1745
|
+
const pool = await this.getPool();
|
|
1746
|
+
const whereClause = this.projectId ? 'WHERE LOWER(project_id) = $1' : '';
|
|
1747
|
+
const params = this.projectId ? [this.projectId] : [];
|
|
1748
|
+
const result = await pool.query(`
|
|
1749
|
+
SELECT
|
|
1750
|
+
event_type,
|
|
1751
|
+
COUNT(*) as query_count,
|
|
1752
|
+
SUM(returned_tokens) as total_returned_tokens,
|
|
1753
|
+
SUM(full_source_tokens) as total_full_source_tokens,
|
|
1754
|
+
SUM(savings_tokens) as total_savings_tokens,
|
|
1755
|
+
COALESCE(SUM(estimated_cost), 0) as total_estimated_cost
|
|
1756
|
+
FROM token_stats
|
|
1757
|
+
${whereClause}
|
|
1758
|
+
GROUP BY event_type
|
|
1759
|
+
ORDER BY event_type
|
|
1760
|
+
`, params);
|
|
1761
|
+
return result.rows.map((row) => ({
|
|
1762
|
+
event_type: row.event_type,
|
|
1763
|
+
query_count: parseInt(row.query_count),
|
|
1764
|
+
total_returned_tokens: parseInt(row.total_returned_tokens),
|
|
1765
|
+
total_full_source_tokens: parseInt(row.total_full_source_tokens),
|
|
1766
|
+
total_savings_tokens: parseInt(row.total_savings_tokens),
|
|
1767
|
+
total_estimated_cost: parseFloat(row.total_estimated_cost),
|
|
1768
|
+
}));
|
|
1769
|
+
}
|
|
1770
|
+
async clearTokenStats() {
|
|
1771
|
+
const pool = await this.getPool();
|
|
1772
|
+
if (this.projectId) {
|
|
1773
|
+
await pool.query('DELETE FROM token_stats WHERE LOWER(project_id) = $1', [this.projectId]);
|
|
1774
|
+
}
|
|
1775
|
+
else {
|
|
1776
|
+
await pool.query('DELETE FROM token_stats');
|
|
1777
|
+
}
|
|
1778
|
+
}
|
|
1779
|
+
// ============================================================================
|
|
1780
|
+
// Web Search History
|
|
1781
|
+
// ============================================================================
|
|
1782
|
+
async recordWebSearch(record) {
|
|
1783
|
+
const pool = await this.getPool();
|
|
1784
|
+
const result = await pool.query(`INSERT INTO web_search_history (project_id, tool_name, model, query, prompt_tokens, completion_tokens, estimated_cost_usd, citations_count, has_reasoning, response_length_chars)
|
|
1785
|
+
VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10) RETURNING id`, [
|
|
1786
|
+
this.projectId,
|
|
1787
|
+
record.tool_name,
|
|
1788
|
+
record.model,
|
|
1789
|
+
record.query,
|
|
1790
|
+
record.prompt_tokens,
|
|
1791
|
+
record.completion_tokens,
|
|
1792
|
+
record.estimated_cost_usd,
|
|
1793
|
+
record.citations_count,
|
|
1794
|
+
record.has_reasoning,
|
|
1795
|
+
record.response_length_chars,
|
|
1796
|
+
]);
|
|
1797
|
+
return result.rows[0].id;
|
|
1798
|
+
}
|
|
1799
|
+
async getWebSearchHistory(filter) {
|
|
1800
|
+
const pool = await this.getPool();
|
|
1801
|
+
const conditions = [];
|
|
1802
|
+
const params = [];
|
|
1803
|
+
let paramIdx = 1;
|
|
1804
|
+
if (this.projectId) {
|
|
1805
|
+
conditions.push(`LOWER(project_id) = $${paramIdx++}`);
|
|
1806
|
+
params.push(this.projectId);
|
|
1807
|
+
}
|
|
1808
|
+
if (filter.tool_name) {
|
|
1809
|
+
conditions.push(`tool_name = $${paramIdx++}`);
|
|
1810
|
+
params.push(filter.tool_name);
|
|
1811
|
+
}
|
|
1812
|
+
if (filter.model) {
|
|
1813
|
+
conditions.push(`model = $${paramIdx++}`);
|
|
1814
|
+
params.push(filter.model);
|
|
1815
|
+
}
|
|
1816
|
+
if (filter.query_text) {
|
|
1817
|
+
conditions.push(`query ILIKE $${paramIdx++}`);
|
|
1818
|
+
params.push(`%${filter.query_text}%`);
|
|
1819
|
+
}
|
|
1820
|
+
if (filter.date_from) {
|
|
1821
|
+
conditions.push(`created_at >= $${paramIdx++}`);
|
|
1822
|
+
params.push(filter.date_from);
|
|
1823
|
+
}
|
|
1824
|
+
if (filter.date_to) {
|
|
1825
|
+
conditions.push(`created_at <= $${paramIdx++}::date + interval '1 day'`);
|
|
1826
|
+
params.push(filter.date_to);
|
|
1827
|
+
}
|
|
1828
|
+
const where = conditions.length > 0 ? `WHERE ${conditions.join(' AND ')}` : '';
|
|
1829
|
+
const limit = filter.limit ?? 20;
|
|
1830
|
+
params.push(limit);
|
|
1831
|
+
const result = await pool.query(`SELECT id, tool_name, model, query, prompt_tokens, completion_tokens, estimated_cost_usd, citations_count, has_reasoning, response_length_chars, created_at
|
|
1832
|
+
FROM web_search_history ${where} ORDER BY created_at DESC LIMIT $${paramIdx}`, params);
|
|
1833
|
+
return result.rows.map((row) => ({
|
|
1834
|
+
id: parseInt(row.id),
|
|
1835
|
+
tool_name: row.tool_name,
|
|
1836
|
+
model: row.model,
|
|
1837
|
+
query: row.query,
|
|
1838
|
+
prompt_tokens: parseInt(row.prompt_tokens),
|
|
1839
|
+
completion_tokens: parseInt(row.completion_tokens),
|
|
1840
|
+
estimated_cost_usd: parseFloat(row.estimated_cost_usd),
|
|
1841
|
+
citations_count: parseInt(row.citations_count),
|
|
1842
|
+
has_reasoning: !!row.has_reasoning,
|
|
1843
|
+
response_length_chars: parseInt(row.response_length_chars),
|
|
1844
|
+
created_at: row.created_at instanceof Date ? row.created_at.toISOString() : row.created_at,
|
|
1845
|
+
}));
|
|
1846
|
+
}
|
|
1847
|
+
async getWebSearchSummary() {
|
|
1848
|
+
const pool = await this.getPool();
|
|
1849
|
+
const whereClause = this.projectId ? 'WHERE LOWER(project_id) = $1' : '';
|
|
1850
|
+
const params = this.projectId ? [this.projectId] : [];
|
|
1851
|
+
const totals = await pool.query(`SELECT COUNT(*) as total_searches, COALESCE(SUM(estimated_cost_usd), 0) as total_cost_usd
|
|
1852
|
+
FROM web_search_history ${whereClause}`, params);
|
|
1853
|
+
const byTool = await pool.query(`SELECT tool_name, COUNT(*) as count, COALESCE(SUM(estimated_cost_usd), 0) as cost
|
|
1854
|
+
FROM web_search_history ${whereClause} GROUP BY tool_name`, params);
|
|
1855
|
+
const todayWhere = this.projectId
|
|
1856
|
+
? "WHERE LOWER(project_id) = $1 AND DATE_TRUNC('day', created_at) = DATE_TRUNC('day', NOW())"
|
|
1857
|
+
: "WHERE DATE_TRUNC('day', created_at) = DATE_TRUNC('day', NOW())";
|
|
1858
|
+
const today = await pool.query(`SELECT COUNT(*) as today_searches, COALESCE(SUM(estimated_cost_usd), 0) as today_cost_usd
|
|
1859
|
+
FROM web_search_history ${todayWhere}`, params);
|
|
1860
|
+
const by_tool = {};
|
|
1861
|
+
for (const row of byTool.rows) {
|
|
1862
|
+
by_tool[row.tool_name] = { count: parseInt(row.count), cost: parseFloat(row.cost) };
|
|
1863
|
+
}
|
|
1864
|
+
return {
|
|
1865
|
+
total_searches: parseInt(totals.rows[0].total_searches),
|
|
1866
|
+
total_cost_usd: parseFloat(totals.rows[0].total_cost_usd),
|
|
1867
|
+
by_tool,
|
|
1868
|
+
today_searches: parseInt(today.rows[0].today_searches),
|
|
1869
|
+
today_cost_usd: parseFloat(today.rows[0].today_cost_usd),
|
|
1870
|
+
};
|
|
1871
|
+
}
|
|
1872
|
+
async getTodayWebSearchSpend() {
|
|
1873
|
+
const pool = await this.getPool();
|
|
1874
|
+
const whereClause = this.projectId
|
|
1875
|
+
? "WHERE LOWER(project_id) = $1 AND DATE_TRUNC('day', created_at) = DATE_TRUNC('day', NOW())"
|
|
1876
|
+
: "WHERE DATE_TRUNC('day', created_at) = DATE_TRUNC('day', NOW())";
|
|
1877
|
+
const params = this.projectId ? [this.projectId] : [];
|
|
1878
|
+
const result = await pool.query(`SELECT COALESCE(SUM(estimated_cost_usd), 0) as total FROM web_search_history ${whereClause}`, params);
|
|
1879
|
+
return parseFloat(result.rows[0].total);
|
|
1880
|
+
}
|
|
1881
|
+
async clearWebSearchHistory() {
|
|
1882
|
+
const pool = await this.getPool();
|
|
1883
|
+
if (this.projectId) {
|
|
1884
|
+
await pool.query('DELETE FROM web_search_history WHERE LOWER(project_id) = $1', [
|
|
1885
|
+
this.projectId,
|
|
1886
|
+
]);
|
|
1887
|
+
}
|
|
1888
|
+
else {
|
|
1889
|
+
await pool.query('DELETE FROM web_search_history');
|
|
1890
|
+
}
|
|
1891
|
+
}
|
|
1892
|
+
// ============================================================================
|
|
1893
|
+
// Document Operations - Additional
|
|
1894
|
+
// ============================================================================
|
|
1895
|
+
async getRecentDocuments(limit = 10) {
|
|
1896
|
+
if (!this.projectId) {
|
|
1897
|
+
throw new StorageError('Project ID must be set before getting recent documents');
|
|
1898
|
+
}
|
|
1899
|
+
const pool = await this.getPool();
|
|
1900
|
+
const result = await pool.query(`SELECT file_path, content, start_line, end_line
|
|
1901
|
+
FROM documents
|
|
1902
|
+
WHERE LOWER(project_id) = $1 AND file_path NOT LIKE 'code:%'
|
|
1903
|
+
ORDER BY id DESC
|
|
1904
|
+
LIMIT $2`, [this.projectId, limit]);
|
|
1905
|
+
return result.rows;
|
|
1906
|
+
}
|
|
1907
|
+
async clearCodeDocuments() {
|
|
1908
|
+
const pool = await this.getPool();
|
|
1909
|
+
if (this.projectId) {
|
|
1910
|
+
await pool.query("DELETE FROM documents WHERE LOWER(project_id) = $1 AND file_path LIKE 'code:%'", [this.projectId]);
|
|
1911
|
+
await pool.query("DELETE FROM file_hashes WHERE LOWER(project_id) = $1 AND file_path LIKE 'code:%'", [this.projectId]);
|
|
1912
|
+
}
|
|
1913
|
+
else {
|
|
1914
|
+
await pool.query("DELETE FROM documents WHERE file_path LIKE 'code:%'");
|
|
1915
|
+
await pool.query("DELETE FROM file_hashes WHERE file_path LIKE 'code:%'");
|
|
1916
|
+
}
|
|
1917
|
+
}
|
|
1918
|
+
async getStoredEmbeddingDimension() {
|
|
1919
|
+
const pool = await this.getPool();
|
|
1920
|
+
const result = await pool.query('SELECT embedding FROM documents LIMIT 1');
|
|
1921
|
+
if (result.rows.length === 0)
|
|
1922
|
+
return null;
|
|
1923
|
+
const embedding = fromPgVector(result.rows[0].embedding);
|
|
1924
|
+
return embedding.length;
|
|
1925
|
+
}
|
|
1926
|
+
// ============================================================================
|
|
1927
|
+
// Memory Operations - Additional
|
|
1928
|
+
// ============================================================================
|
|
1929
|
+
async findSimilarMemory(embedding, threshold = 0.92) {
|
|
1930
|
+
const pool = await this.getPool();
|
|
1931
|
+
let query;
|
|
1932
|
+
let params;
|
|
1933
|
+
if (this.projectId) {
|
|
1934
|
+
query = `SELECT id, content, 1 - (embedding <=> $1) as similarity
|
|
1935
|
+
FROM memories
|
|
1936
|
+
WHERE (LOWER(project_id) = $2 OR project_id IS NULL)
|
|
1937
|
+
AND 1 - (embedding <=> $1) >= $3
|
|
1938
|
+
ORDER BY embedding <=> $1
|
|
1939
|
+
LIMIT 1`;
|
|
1940
|
+
params = [toPgVector(embedding), this.projectId, threshold];
|
|
1941
|
+
}
|
|
1942
|
+
else {
|
|
1943
|
+
query = `SELECT id, content, 1 - (embedding <=> $1) as similarity
|
|
1944
|
+
FROM memories
|
|
1945
|
+
WHERE project_id IS NULL
|
|
1946
|
+
AND 1 - (embedding <=> $1) >= $2
|
|
1947
|
+
ORDER BY embedding <=> $1
|
|
1948
|
+
LIMIT 1`;
|
|
1949
|
+
params = [toPgVector(embedding), threshold];
|
|
1950
|
+
}
|
|
1951
|
+
const result = await pool.query(query, params);
|
|
1952
|
+
if (result.rows.length === 0)
|
|
1953
|
+
return null;
|
|
1954
|
+
return {
|
|
1955
|
+
id: result.rows[0].id,
|
|
1956
|
+
content: result.rows[0].content,
|
|
1957
|
+
similarity: parseFloat(String(result.rows[0].similarity)),
|
|
1958
|
+
};
|
|
1959
|
+
}
|
|
1960
|
+
async findSimilarGlobalMemory(embedding, threshold = 0.92) {
|
|
1961
|
+
const pool = await this.getPool();
|
|
1962
|
+
const result = await pool.query(`SELECT id, content, 1 - (embedding <=> $1) as similarity
|
|
1963
|
+
FROM memories
|
|
1964
|
+
WHERE project_id IS NULL
|
|
1965
|
+
AND 1 - (embedding <=> $1) >= $2
|
|
1966
|
+
ORDER BY embedding <=> $1
|
|
1967
|
+
LIMIT 1`, [toPgVector(embedding), threshold]);
|
|
1968
|
+
if (result.rows.length === 0)
|
|
1969
|
+
return null;
|
|
1970
|
+
return {
|
|
1971
|
+
id: result.rows[0].id,
|
|
1972
|
+
content: result.rows[0].content,
|
|
1973
|
+
similarity: parseFloat(String(result.rows[0].similarity)),
|
|
1974
|
+
};
|
|
1975
|
+
}
|
|
1976
|
+
async saveMemoriesBatch(memories, deduplicateThreshold = 0.92, options) {
|
|
1977
|
+
if (memories.length === 0) {
|
|
1978
|
+
return { saved: 0, skipped: 0, results: [] };
|
|
1979
|
+
}
|
|
1980
|
+
const pool = await this.getPool();
|
|
1981
|
+
const client = await pool.connect();
|
|
1982
|
+
const results = [];
|
|
1983
|
+
let saved = 0;
|
|
1984
|
+
let skipped = 0;
|
|
1985
|
+
const shouldDedup = options?.deduplicate !== false;
|
|
1986
|
+
try {
|
|
1987
|
+
await client.query('BEGIN');
|
|
1988
|
+
for (let i = 0; i < memories.length; i++) {
|
|
1989
|
+
const memory = memories[i];
|
|
1990
|
+
// Check for duplicates via pgvector
|
|
1991
|
+
if (shouldDedup) {
|
|
1992
|
+
let dupQuery;
|
|
1993
|
+
let dupParams;
|
|
1994
|
+
if (this.projectId) {
|
|
1995
|
+
dupQuery = `SELECT id, 1 - (embedding <=> $1) as similarity
|
|
1996
|
+
FROM memories
|
|
1997
|
+
WHERE (LOWER(project_id) = $2 OR project_id IS NULL)
|
|
1998
|
+
AND 1 - (embedding <=> $1) >= $3
|
|
1999
|
+
ORDER BY embedding <=> $1
|
|
2000
|
+
LIMIT 1`;
|
|
2001
|
+
dupParams = [toPgVector(memory.embedding), this.projectId, deduplicateThreshold];
|
|
2002
|
+
}
|
|
2003
|
+
else {
|
|
2004
|
+
dupQuery = `SELECT id, 1 - (embedding <=> $1) as similarity
|
|
2005
|
+
FROM memories
|
|
2006
|
+
WHERE project_id IS NULL
|
|
2007
|
+
AND 1 - (embedding <=> $1) >= $2
|
|
2008
|
+
ORDER BY embedding <=> $1
|
|
2009
|
+
LIMIT 1`;
|
|
2010
|
+
dupParams = [toPgVector(memory.embedding), deduplicateThreshold];
|
|
2011
|
+
}
|
|
2012
|
+
const dupResult = await client.query(dupQuery, dupParams);
|
|
2013
|
+
if (dupResult.rows.length > 0) {
|
|
2014
|
+
results.push({
|
|
2015
|
+
index: i,
|
|
2016
|
+
isDuplicate: true,
|
|
2017
|
+
id: dupResult.rows[0].id,
|
|
2018
|
+
reason: 'duplicate',
|
|
2019
|
+
similarity: parseFloat(String(dupResult.rows[0].similarity)),
|
|
2020
|
+
});
|
|
2021
|
+
skipped++;
|
|
2022
|
+
continue;
|
|
2023
|
+
}
|
|
2024
|
+
}
|
|
2025
|
+
// Insert memory
|
|
2026
|
+
const validFromStr = memory.validFrom
|
|
2027
|
+
? memory.validFrom instanceof Date
|
|
2028
|
+
? memory.validFrom.toISOString()
|
|
2029
|
+
: memory.validFrom
|
|
2030
|
+
: null;
|
|
2031
|
+
const validUntilStr = memory.validUntil
|
|
2032
|
+
? memory.validUntil instanceof Date
|
|
2033
|
+
? memory.validUntil.toISOString()
|
|
2034
|
+
: memory.validUntil
|
|
2035
|
+
: null;
|
|
2036
|
+
const insertResult = await client.query(`INSERT INTO memories (project_id, content, tags, source, type, quality_score, quality_factors, embedding, valid_from, valid_until)
|
|
2037
|
+
VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10)
|
|
2038
|
+
RETURNING id`, [
|
|
2039
|
+
this.projectId,
|
|
2040
|
+
memory.content,
|
|
2041
|
+
memory.tags.length > 0 ? JSON.stringify(memory.tags) : null,
|
|
2042
|
+
memory.source ?? null,
|
|
2043
|
+
memory.type,
|
|
2044
|
+
memory.qualityScore?.score ?? null,
|
|
2045
|
+
memory.qualityScore?.factors ? JSON.stringify(memory.qualityScore.factors) : null,
|
|
2046
|
+
toPgVector(memory.embedding),
|
|
2047
|
+
validFromStr,
|
|
2048
|
+
validUntilStr,
|
|
2049
|
+
]);
|
|
2050
|
+
results.push({
|
|
2051
|
+
index: i,
|
|
2052
|
+
isDuplicate: false,
|
|
2053
|
+
id: insertResult.rows[0].id,
|
|
2054
|
+
reason: 'saved',
|
|
2055
|
+
});
|
|
2056
|
+
saved++;
|
|
2057
|
+
}
|
|
2058
|
+
await client.query('COMMIT');
|
|
2059
|
+
}
|
|
2060
|
+
catch (e) {
|
|
2061
|
+
await client.query('ROLLBACK');
|
|
2062
|
+
throw e;
|
|
2063
|
+
}
|
|
2064
|
+
finally {
|
|
2065
|
+
client.release();
|
|
2066
|
+
}
|
|
2067
|
+
results.sort((a, b) => a.index - b.index);
|
|
2068
|
+
return { saved, skipped, results };
|
|
2069
|
+
}
|
|
2070
|
+
async getMemoryStats() {
|
|
2071
|
+
const pool = await this.getPool();
|
|
2072
|
+
const scopeCond = this.projectId
|
|
2073
|
+
? '(LOWER(project_id) = $1 OR project_id IS NULL)'
|
|
2074
|
+
: 'project_id IS NULL';
|
|
2075
|
+
const scopeParams = this.projectId ? [this.projectId] : [];
|
|
2076
|
+
const total = await pool.query(`SELECT COUNT(*) as count FROM memories WHERE ${scopeCond}`, scopeParams);
|
|
2077
|
+
const active = await pool.query(`SELECT COUNT(*) as count FROM memories WHERE ${scopeCond} AND invalidated_by IS NULL`, scopeParams);
|
|
2078
|
+
const oldest = await pool.query(`SELECT MIN(created_at)::text as oldest FROM memories WHERE ${scopeCond} AND invalidated_by IS NULL`, scopeParams);
|
|
2079
|
+
const newest = await pool.query(`SELECT MAX(created_at)::text as newest FROM memories WHERE ${scopeCond} AND invalidated_by IS NULL`, scopeParams);
|
|
2080
|
+
const typeCounts = await pool.query(`SELECT COALESCE(type, 'observation') as type, COUNT(*) as count
|
|
2081
|
+
FROM memories WHERE ${scopeCond} AND invalidated_by IS NULL
|
|
2082
|
+
GROUP BY type`, scopeParams);
|
|
2083
|
+
const by_type = {};
|
|
2084
|
+
for (const row of typeCounts.rows) {
|
|
2085
|
+
by_type[row.type] = parseInt(row.count);
|
|
2086
|
+
}
|
|
2087
|
+
// Stale memories: older than 30 days, active only
|
|
2088
|
+
const thirtyDaysAgo = new Date(Date.now() - 30 * 24 * 60 * 60 * 1000).toISOString();
|
|
2089
|
+
const staleParamIdx = scopeParams.length + 1;
|
|
2090
|
+
const stale = await pool.query(`SELECT COUNT(*) as count FROM memories
|
|
2091
|
+
WHERE ${scopeCond} AND created_at < $${staleParamIdx} AND invalidated_by IS NULL`, [...scopeParams, thirtyDaysAgo]);
|
|
2092
|
+
const totalCount = parseInt(total.rows[0].count);
|
|
2093
|
+
const activeCount = parseInt(active.rows[0].count);
|
|
2094
|
+
return {
|
|
2095
|
+
total_memories: totalCount,
|
|
2096
|
+
active_memories: activeCount,
|
|
2097
|
+
invalidated_memories: totalCount - activeCount,
|
|
2098
|
+
oldest_memory: oldest.rows[0].oldest,
|
|
2099
|
+
newest_memory: newest.rows[0].newest,
|
|
2100
|
+
by_type,
|
|
2101
|
+
stale_count: parseInt(stale.rows[0].count),
|
|
2102
|
+
};
|
|
2103
|
+
}
|
|
2104
|
+
async deleteMemoriesOlderThan(date) {
|
|
2105
|
+
const pool = await this.getPool();
|
|
2106
|
+
const result = await pool.query('DELETE FROM memories WHERE created_at < $1', [
|
|
2107
|
+
date.toISOString(),
|
|
2108
|
+
]);
|
|
2109
|
+
return result.rowCount ?? 0;
|
|
2110
|
+
}
|
|
2111
|
+
async deleteMemoriesByTag(tag) {
|
|
2112
|
+
const pool = await this.getPool();
|
|
2113
|
+
// JSONB array containment: tags is a JSONB array, check if any element matches (case-insensitive)
|
|
2114
|
+
const result = await pool.query(`DELETE FROM memories
|
|
2115
|
+
WHERE tags IS NOT NULL
|
|
2116
|
+
AND EXISTS (
|
|
2117
|
+
SELECT 1 FROM jsonb_array_elements_text(tags) elem
|
|
2118
|
+
WHERE LOWER(elem) = LOWER($1)
|
|
2119
|
+
)`, [tag]);
|
|
2120
|
+
return result.rowCount ?? 0;
|
|
2121
|
+
}
|
|
2122
|
+
async deleteMemoriesByIds(ids) {
|
|
2123
|
+
if (ids.length === 0)
|
|
2124
|
+
return 0;
|
|
2125
|
+
const pool = await this.getPool();
|
|
2126
|
+
const placeholders = ids.map((_, i) => `$${i + 1}`).join(',');
|
|
2127
|
+
const result = await pool.query(`DELETE FROM memories WHERE id IN (${placeholders})`, ids);
|
|
2128
|
+
return result.rowCount ?? 0;
|
|
2129
|
+
}
|
|
2130
|
+
async searchMemoriesAsOf(queryEmbedding, asOfDate, limit = 5, threshold = 0.3) {
|
|
2131
|
+
const pool = await this.getPool();
|
|
2132
|
+
const asOfStr = asOfDate.toISOString();
|
|
2133
|
+
// Get memories that existed at that time, with temporal validity check
|
|
2134
|
+
const scopeCond = this.projectId
|
|
2135
|
+
? '(LOWER(project_id) = $4 OR project_id IS NULL)'
|
|
2136
|
+
: 'project_id IS NULL';
|
|
2137
|
+
const scopeParams = this.projectId ? [this.projectId] : [];
|
|
2138
|
+
const result = await pool.query(`SELECT id, project_id, content, tags, source, type, quality_score, quality_factors,
|
|
2139
|
+
access_count, last_accessed, valid_from, valid_until, created_at,
|
|
2140
|
+
1 - (embedding <=> $1) as similarity
|
|
2141
|
+
FROM memories
|
|
2142
|
+
WHERE created_at <= $2
|
|
2143
|
+
AND 1 - (embedding <=> $1) >= $3
|
|
2144
|
+
AND ${scopeCond}
|
|
2145
|
+
AND (valid_from IS NULL OR valid_from <= $2)
|
|
2146
|
+
AND (valid_until IS NULL OR valid_until > $2)
|
|
2147
|
+
ORDER BY embedding <=> $1
|
|
2148
|
+
LIMIT $${scopeParams.length + 4}`, [toPgVector(queryEmbedding), asOfStr, threshold, ...scopeParams, limit]);
|
|
2149
|
+
return result.rows.map((row) => ({
|
|
2150
|
+
id: row.id,
|
|
2151
|
+
content: row.content,
|
|
2152
|
+
tags: row.tags ? (typeof row.tags === 'string' ? JSON.parse(row.tags) : row.tags) : [],
|
|
2153
|
+
source: row.source,
|
|
2154
|
+
type: row.type,
|
|
2155
|
+
quality_score: row.quality_score,
|
|
2156
|
+
quality_factors: row.quality_factors
|
|
2157
|
+
? typeof row.quality_factors === 'string'
|
|
2158
|
+
? JSON.parse(row.quality_factors)
|
|
2159
|
+
: row.quality_factors
|
|
2160
|
+
: null,
|
|
2161
|
+
access_count: row.access_count ?? 0,
|
|
2162
|
+
last_accessed: row.last_accessed,
|
|
2163
|
+
correction_count: row.correction_count ?? 0,
|
|
2164
|
+
is_invariant: !!row.is_invariant,
|
|
2165
|
+
priority_score: row.priority_score ?? null,
|
|
2166
|
+
valid_from: row.valid_from,
|
|
2167
|
+
valid_until: row.valid_until,
|
|
2168
|
+
created_at: row.created_at,
|
|
2169
|
+
similarity: parseFloat(row.similarity),
|
|
2170
|
+
}));
|
|
2171
|
+
}
|
|
2172
|
+
async getConsolidationHistory(limit = 20) {
|
|
2173
|
+
const pool = await this.getPool();
|
|
2174
|
+
// Find memories that have 'supersedes' links (merge results)
|
|
2175
|
+
const rows = await pool.query(`SELECT DISTINCT ml.source_id as merged_id, ml.created_at::text as merged_at
|
|
2176
|
+
FROM memory_links ml
|
|
2177
|
+
WHERE ml.relation = 'supersedes'
|
|
2178
|
+
ORDER BY ml.created_at DESC
|
|
2179
|
+
LIMIT $1`, [limit]);
|
|
2180
|
+
const results = [];
|
|
2181
|
+
for (const row of rows.rows) {
|
|
2182
|
+
const memory = await this.getMemoryById(row.merged_id);
|
|
2183
|
+
const originals = await pool.query(`SELECT target_id FROM memory_links
|
|
2184
|
+
WHERE source_id = $1 AND relation = 'supersedes'`, [row.merged_id]);
|
|
2185
|
+
results.push({
|
|
2186
|
+
mergedMemoryId: row.merged_id,
|
|
2187
|
+
mergedContent: memory?.content ?? '(deleted)',
|
|
2188
|
+
originalIds: originals.rows.map((o) => o.target_id),
|
|
2189
|
+
mergedAt: row.merged_at,
|
|
2190
|
+
});
|
|
2191
|
+
}
|
|
2192
|
+
return results;
|
|
2193
|
+
}
|
|
2194
|
+
// ============================================================================
|
|
2195
|
+
// Retention Operations
|
|
2196
|
+
// ============================================================================
|
|
2197
|
+
async incrementMemoryAccessBatch(accesses) {
|
|
2198
|
+
if (accesses.length === 0)
|
|
2199
|
+
return;
|
|
2200
|
+
const pool = await this.getPool();
|
|
2201
|
+
const client = await pool.connect();
|
|
2202
|
+
try {
|
|
2203
|
+
await client.query('BEGIN');
|
|
2204
|
+
for (const { memoryId, weight } of accesses) {
|
|
2205
|
+
await client.query(`UPDATE memories
|
|
2206
|
+
SET access_count = COALESCE(access_count, 0) + $1,
|
|
2207
|
+
last_accessed = NOW()
|
|
2208
|
+
WHERE id = $2`, [weight, memoryId]);
|
|
2209
|
+
}
|
|
2210
|
+
await client.query('COMMIT');
|
|
2211
|
+
}
|
|
2212
|
+
catch (e) {
|
|
2213
|
+
await client.query('ROLLBACK');
|
|
2214
|
+
throw e;
|
|
2215
|
+
}
|
|
2216
|
+
finally {
|
|
2217
|
+
client.release();
|
|
2218
|
+
}
|
|
2219
|
+
}
|
|
2220
|
+
async getAllMemoriesForRetention() {
|
|
2221
|
+
const pool = await this.getPool();
|
|
2222
|
+
const scopeCond = this.projectId
|
|
2223
|
+
? 'WHERE (LOWER(project_id) = $1 OR project_id IS NULL)'
|
|
2224
|
+
: 'WHERE project_id IS NULL';
|
|
2225
|
+
const params = this.projectId ? [this.projectId] : [];
|
|
2226
|
+
const result = await pool.query(`SELECT id, content, quality_score, access_count, created_at::text as created_at, last_accessed::text as last_accessed
|
|
2227
|
+
FROM memories
|
|
2228
|
+
${scopeCond}
|
|
2229
|
+
ORDER BY created_at ASC`, params);
|
|
2230
|
+
return result.rows.map((row) => ({
|
|
2231
|
+
id: row.id,
|
|
2232
|
+
content: row.content,
|
|
2233
|
+
quality_score: row.quality_score,
|
|
2234
|
+
access_count: row.access_count ?? 0,
|
|
2235
|
+
created_at: row.created_at,
|
|
2236
|
+
last_accessed: row.last_accessed,
|
|
2237
|
+
}));
|
|
2238
|
+
}
|
|
2239
|
+
// ============================================================================
|
|
2240
|
+
// Backfill — fetch all records with embeddings for Qdrant sync
|
|
2241
|
+
// ============================================================================
|
|
2242
|
+
async getAllMemoriesWithEmbeddings() {
|
|
2243
|
+
const pool = await this.getPool();
|
|
2244
|
+
const scopeCond = this.projectId
|
|
2245
|
+
? 'WHERE LOWER(project_id) = $1 AND invalidated_by IS NULL'
|
|
2246
|
+
: 'WHERE project_id IS NULL AND invalidated_by IS NULL';
|
|
2247
|
+
const params = this.projectId ? [this.projectId] : [];
|
|
2248
|
+
const result = await pool.query(`SELECT id, content, tags, source, type, project_id,
|
|
2249
|
+
created_at::text as created_at, valid_from::text as valid_from,
|
|
2250
|
+
valid_until::text as valid_until, invalidated_by, access_count,
|
|
2251
|
+
last_accessed::text as last_accessed, quality_score,
|
|
2252
|
+
embedding::text as embedding
|
|
2253
|
+
FROM memories ${scopeCond}
|
|
2254
|
+
ORDER BY id ASC`, params);
|
|
2255
|
+
return result.rows.map((row) => ({
|
|
2256
|
+
id: row.id,
|
|
2257
|
+
content: row.content,
|
|
2258
|
+
tags: row.tags ?? [],
|
|
2259
|
+
source: row.source,
|
|
2260
|
+
type: row.type,
|
|
2261
|
+
projectId: row.project_id,
|
|
2262
|
+
createdAt: row.created_at,
|
|
2263
|
+
validFrom: row.valid_from,
|
|
2264
|
+
validUntil: row.valid_until,
|
|
2265
|
+
invalidatedBy: row.invalidated_by,
|
|
2266
|
+
accessCount: row.access_count ?? 0,
|
|
2267
|
+
lastAccessed: row.last_accessed,
|
|
2268
|
+
qualityScore: row.quality_score,
|
|
2269
|
+
embedding: fromPgVector(row.embedding),
|
|
2270
|
+
}));
|
|
2271
|
+
}
|
|
2272
|
+
async getAllDocumentsWithEmbeddings() {
|
|
2273
|
+
const pool = await this.getPool();
|
|
2274
|
+
const scopeCond = this.projectId ? 'WHERE LOWER(project_id) = $1' : '';
|
|
2275
|
+
const params = this.projectId ? [this.projectId] : [];
|
|
2276
|
+
const result = await pool.query(`SELECT id, file_path, content, start_line, end_line, project_id,
|
|
2277
|
+
embedding::text as embedding
|
|
2278
|
+
FROM documents ${scopeCond}
|
|
2279
|
+
ORDER BY id ASC`, params);
|
|
2280
|
+
return result.rows.map((row) => ({
|
|
2281
|
+
id: row.id,
|
|
2282
|
+
filePath: row.file_path,
|
|
2283
|
+
content: row.content,
|
|
2284
|
+
startLine: row.start_line,
|
|
2285
|
+
endLine: row.end_line,
|
|
2286
|
+
projectId: row.project_id,
|
|
2287
|
+
embedding: fromPgVector(row.embedding),
|
|
2288
|
+
}));
|
|
2289
|
+
}
|
|
2290
|
+
// ============================================================================
|
|
2291
|
+
// Graph Operations - Additional
|
|
2292
|
+
// ============================================================================
|
|
2293
|
+
async getMemoryWithLinks(memoryId, options) {
|
|
2294
|
+
const memory = await this.getMemoryById(memoryId);
|
|
2295
|
+
if (!memory)
|
|
2296
|
+
return null;
|
|
2297
|
+
const links = await this.getMemoryLinks(memoryId);
|
|
2298
|
+
const now = options?.asOfDate?.getTime() ?? Date.now();
|
|
2299
|
+
const includeExpired = options?.includeExpired ?? false;
|
|
2300
|
+
const filterLink = (link) => {
|
|
2301
|
+
if (includeExpired)
|
|
2302
|
+
return true;
|
|
2303
|
+
if (link.valid_from) {
|
|
2304
|
+
const validFrom = new Date(link.valid_from).getTime();
|
|
2305
|
+
if (now < validFrom)
|
|
2306
|
+
return false;
|
|
2307
|
+
}
|
|
2308
|
+
if (link.valid_until) {
|
|
2309
|
+
const validUntil = new Date(link.valid_until).getTime();
|
|
2310
|
+
if (now > validUntil)
|
|
2311
|
+
return false;
|
|
2312
|
+
}
|
|
2313
|
+
return true;
|
|
2314
|
+
};
|
|
2315
|
+
// Fetch content for linked memories
|
|
2316
|
+
const pool = await this.getPool();
|
|
2317
|
+
const outgoing = [];
|
|
2318
|
+
for (const l of links.outgoing.filter(filterLink)) {
|
|
2319
|
+
const targetMem = await pool.query('SELECT content FROM memories WHERE id = $1', [l.target_id]);
|
|
2320
|
+
outgoing.push({
|
|
2321
|
+
target_id: l.target_id,
|
|
2322
|
+
relation: l.relation,
|
|
2323
|
+
weight: l.weight,
|
|
2324
|
+
target_content: targetMem.rows[0]?.content ?? '',
|
|
2325
|
+
});
|
|
2326
|
+
}
|
|
2327
|
+
const incoming = [];
|
|
2328
|
+
for (const l of links.incoming.filter(filterLink)) {
|
|
2329
|
+
const sourceMem = await pool.query('SELECT content FROM memories WHERE id = $1', [l.source_id]);
|
|
2330
|
+
incoming.push({
|
|
2331
|
+
source_id: l.source_id,
|
|
2332
|
+
relation: l.relation,
|
|
2333
|
+
weight: l.weight,
|
|
2334
|
+
source_content: sourceMem.rows[0]?.content ?? '',
|
|
2335
|
+
});
|
|
2336
|
+
}
|
|
2337
|
+
return {
|
|
2338
|
+
...memory,
|
|
2339
|
+
outgoing_links: outgoing,
|
|
2340
|
+
incoming_links: incoming,
|
|
2341
|
+
};
|
|
2342
|
+
}
|
|
2343
|
+
async findConnectedMemories(memoryId, maxDepth = 2) {
|
|
2344
|
+
const pool = await this.getPool();
|
|
2345
|
+
const visited = new Set([memoryId]);
|
|
2346
|
+
const results = [];
|
|
2347
|
+
// BFS traversal
|
|
2348
|
+
let currentLevel = [{ id: memoryId, path: [memoryId] }];
|
|
2349
|
+
for (let depth = 1; depth <= maxDepth; depth++) {
|
|
2350
|
+
const nextLevel = [];
|
|
2351
|
+
for (const { id, path } of currentLevel) {
|
|
2352
|
+
const outgoing = await pool.query('SELECT target_id FROM memory_links WHERE source_id = $1', [id]);
|
|
2353
|
+
const incoming = await pool.query('SELECT source_id FROM memory_links WHERE target_id = $1', [id]);
|
|
2354
|
+
const neighbors = [
|
|
2355
|
+
...outgoing.rows.map((r) => r.target_id),
|
|
2356
|
+
...incoming.rows.map((r) => r.source_id),
|
|
2357
|
+
];
|
|
2358
|
+
for (const neighborId of neighbors) {
|
|
2359
|
+
if (!visited.has(neighborId)) {
|
|
2360
|
+
visited.add(neighborId);
|
|
2361
|
+
const memory = await this.getMemoryById(neighborId);
|
|
2362
|
+
if (memory) {
|
|
2363
|
+
const newPath = [...path, neighborId];
|
|
2364
|
+
results.push({ memory, depth, path: newPath });
|
|
2365
|
+
nextLevel.push({ id: neighborId, path: newPath });
|
|
2366
|
+
}
|
|
2367
|
+
}
|
|
2368
|
+
}
|
|
2369
|
+
}
|
|
2370
|
+
currentLevel = nextLevel;
|
|
2371
|
+
}
|
|
2372
|
+
return results;
|
|
2373
|
+
}
|
|
2374
|
+
async findRelatedMemoriesForLinking(memoryId, threshold = 0.75, maxLinks = 3) {
|
|
2375
|
+
const pool = await this.getPool();
|
|
2376
|
+
// Use pgvector to find similar memories efficiently (scoped to current project)
|
|
2377
|
+
const source = await pool.query('SELECT embedding FROM memories WHERE id = $1', [memoryId]);
|
|
2378
|
+
if (source.rows.length === 0)
|
|
2379
|
+
return [];
|
|
2380
|
+
const result = await pool.query(`SELECT id, 1 - (embedding <=> (SELECT embedding FROM memories WHERE id = $1)) as similarity
|
|
2381
|
+
FROM memories
|
|
2382
|
+
WHERE id != $1
|
|
2383
|
+
AND (LOWER(project_id) = $4 OR project_id IS NULL)
|
|
2384
|
+
AND 1 - (embedding <=> (SELECT embedding FROM memories WHERE id = $1)) >= $2
|
|
2385
|
+
ORDER BY embedding <=> (SELECT embedding FROM memories WHERE id = $1)
|
|
2386
|
+
LIMIT $3`, [memoryId, threshold, maxLinks, this.projectId]);
|
|
2387
|
+
return result.rows.map((r) => ({
|
|
2388
|
+
id: r.id,
|
|
2389
|
+
similarity: parseFloat(String(r.similarity)),
|
|
2390
|
+
}));
|
|
2391
|
+
}
|
|
2392
|
+
async createAutoLinks(memoryId, threshold = 0.75, maxLinks = 3) {
|
|
2393
|
+
const candidates = await this.findRelatedMemoriesForLinking(memoryId, threshold, maxLinks);
|
|
2394
|
+
let linksCreated = 0;
|
|
2395
|
+
for (const { id: targetId, similarity } of candidates) {
|
|
2396
|
+
const result = await this.createMemoryLink(memoryId, targetId, 'similar_to', similarity);
|
|
2397
|
+
if (result.created) {
|
|
2398
|
+
linksCreated++;
|
|
2399
|
+
}
|
|
2400
|
+
}
|
|
2401
|
+
return linksCreated;
|
|
2402
|
+
}
|
|
2403
|
+
async autoLinkSimilarMemories(threshold = 0.75, maxLinks = 3) {
|
|
2404
|
+
const pool = await this.getPool();
|
|
2405
|
+
const scopeCond = this.projectId
|
|
2406
|
+
? 'WHERE (LOWER(project_id) = $1 OR project_id IS NULL)'
|
|
2407
|
+
: 'WHERE project_id IS NULL';
|
|
2408
|
+
const params = this.projectId ? [this.projectId] : [];
|
|
2409
|
+
// Get all memory IDs
|
|
2410
|
+
const memories = await pool.query(`SELECT id FROM memories ${scopeCond}`, params);
|
|
2411
|
+
let linksCreated = 0;
|
|
2412
|
+
for (const { id } of memories.rows) {
|
|
2413
|
+
const created = await this.createAutoLinks(id, threshold, maxLinks);
|
|
2414
|
+
linksCreated += created;
|
|
2415
|
+
}
|
|
2416
|
+
return linksCreated;
|
|
2417
|
+
}
|
|
2418
|
+
async invalidateMemoryLink(sourceId, targetId, relation) {
|
|
2419
|
+
const pool = await this.getPool();
|
|
2420
|
+
if (relation) {
|
|
2421
|
+
const result = await pool.query(`UPDATE memory_links SET valid_until = NOW()
|
|
2422
|
+
WHERE source_id = $1 AND target_id = $2 AND relation = $3 AND valid_until IS NULL`, [sourceId, targetId, relation]);
|
|
2423
|
+
return (result.rowCount ?? 0) > 0;
|
|
2424
|
+
}
|
|
2425
|
+
else {
|
|
2426
|
+
const result = await pool.query(`UPDATE memory_links SET valid_until = NOW()
|
|
2427
|
+
WHERE source_id = $1 AND target_id = $2 AND valid_until IS NULL`, [sourceId, targetId]);
|
|
2428
|
+
return (result.rowCount ?? 0) > 0;
|
|
2429
|
+
}
|
|
2430
|
+
}
|
|
2431
|
+
async getMemoryLinksAsOf(memoryId, asOfDate) {
|
|
2432
|
+
const pool = await this.getPool();
|
|
2433
|
+
const asOfStr = asOfDate.toISOString();
|
|
2434
|
+
const outgoing = await pool.query(`SELECT * FROM memory_links
|
|
2435
|
+
WHERE source_id = $1
|
|
2436
|
+
AND created_at <= $2
|
|
2437
|
+
AND (valid_from IS NULL OR valid_from <= $2)
|
|
2438
|
+
AND (valid_until IS NULL OR valid_until > $2)`, [memoryId, asOfStr]);
|
|
2439
|
+
const incoming = await pool.query(`SELECT * FROM memory_links
|
|
2440
|
+
WHERE target_id = $1
|
|
2441
|
+
AND created_at <= $2
|
|
2442
|
+
AND (valid_from IS NULL OR valid_from <= $2)
|
|
2443
|
+
AND (valid_until IS NULL OR valid_until > $2)`, [memoryId, asOfStr]);
|
|
2444
|
+
return { outgoing: outgoing.rows, incoming: incoming.rows };
|
|
2445
|
+
}
|
|
2446
|
+
async findConnectedMemoriesAsOf(memoryId, asOfDate, maxDepth = 2) {
|
|
2447
|
+
const pool = await this.getPool();
|
|
2448
|
+
const asOfStr = asOfDate.toISOString();
|
|
2449
|
+
const visited = new Set([memoryId]);
|
|
2450
|
+
const results = [];
|
|
2451
|
+
let currentLevel = [{ id: memoryId, path: [memoryId] }];
|
|
2452
|
+
for (let depth = 1; depth <= maxDepth; depth++) {
|
|
2453
|
+
const nextLevel = [];
|
|
2454
|
+
for (const { id, path } of currentLevel) {
|
|
2455
|
+
const outgoing = await pool.query(`SELECT target_id FROM memory_links
|
|
2456
|
+
WHERE source_id = $1
|
|
2457
|
+
AND created_at <= $2
|
|
2458
|
+
AND (valid_from IS NULL OR valid_from <= $2)
|
|
2459
|
+
AND (valid_until IS NULL OR valid_until > $2)`, [id, asOfStr]);
|
|
2460
|
+
const incoming = await pool.query(`SELECT source_id FROM memory_links
|
|
2461
|
+
WHERE target_id = $1
|
|
2462
|
+
AND created_at <= $2
|
|
2463
|
+
AND (valid_from IS NULL OR valid_from <= $2)
|
|
2464
|
+
AND (valid_until IS NULL OR valid_until > $2)`, [id, asOfStr]);
|
|
2465
|
+
const neighbors = [
|
|
2466
|
+
...outgoing.rows.map((r) => r.target_id),
|
|
2467
|
+
...incoming.rows.map((r) => r.source_id),
|
|
2468
|
+
];
|
|
2469
|
+
for (const neighborId of neighbors) {
|
|
2470
|
+
if (!visited.has(neighborId)) {
|
|
2471
|
+
visited.add(neighborId);
|
|
2472
|
+
const memory = await this.getMemoryById(neighborId);
|
|
2473
|
+
if (memory) {
|
|
2474
|
+
const newPath = [...path, neighborId];
|
|
2475
|
+
results.push({ memory, depth, path: newPath });
|
|
2476
|
+
nextLevel.push({ id: neighborId, path: newPath });
|
|
2477
|
+
}
|
|
2478
|
+
}
|
|
2479
|
+
}
|
|
2480
|
+
}
|
|
2481
|
+
currentLevel = nextLevel;
|
|
2482
|
+
}
|
|
2483
|
+
return results;
|
|
2484
|
+
}
|
|
2485
|
+
async getGraphStatsAsOf(asOfDate) {
|
|
2486
|
+
const pool = await this.getPool();
|
|
2487
|
+
const asOfStr = asOfDate.toISOString();
|
|
2488
|
+
const totalMemories = await pool.query('SELECT COUNT(*) as count FROM memories WHERE created_at <= $1', [asOfStr]);
|
|
2489
|
+
const totalLinks = await pool.query(`SELECT COUNT(*) as count FROM memory_links
|
|
2490
|
+
WHERE created_at <= $1
|
|
2491
|
+
AND (valid_from IS NULL OR valid_from <= $1)
|
|
2492
|
+
AND (valid_until IS NULL OR valid_until > $1)`, [asOfStr]);
|
|
2493
|
+
const relationCounts = await pool.query(`SELECT relation, COUNT(*) as count FROM memory_links
|
|
2494
|
+
WHERE created_at <= $1
|
|
2495
|
+
AND (valid_from IS NULL OR valid_from <= $1)
|
|
2496
|
+
AND (valid_until IS NULL OR valid_until > $1)
|
|
2497
|
+
GROUP BY relation`, [asOfStr]);
|
|
2498
|
+
const relations = {};
|
|
2499
|
+
for (const row of relationCounts.rows) {
|
|
2500
|
+
relations[row.relation] = parseInt(row.count);
|
|
2501
|
+
}
|
|
2502
|
+
const memCount = parseInt(totalMemories.rows[0].count);
|
|
2503
|
+
const linkCount = parseInt(totalLinks.rows[0].count);
|
|
2504
|
+
return {
|
|
2505
|
+
total_memories: memCount,
|
|
2506
|
+
total_links: linkCount,
|
|
2507
|
+
avg_links_per_memory: memCount > 0 ? linkCount / memCount : 0,
|
|
2508
|
+
relations,
|
|
2509
|
+
};
|
|
2510
|
+
}
|
|
2511
|
+
}
|
|
2512
|
+
/**
|
|
2513
|
+
* Create PostgreSQL backend from storage config.
|
|
2514
|
+
*/
|
|
2515
|
+
export function createPostgresBackend(config) {
|
|
2516
|
+
const pgConfig = config.postgresql ?? {};
|
|
2517
|
+
return new PostgresBackend({
|
|
2518
|
+
connectionString: pgConfig.connection_string,
|
|
2519
|
+
host: pgConfig.host,
|
|
2520
|
+
port: pgConfig.port,
|
|
2521
|
+
database: pgConfig.database,
|
|
2522
|
+
user: pgConfig.user,
|
|
2523
|
+
password: pgConfig.password,
|
|
2524
|
+
ssl: pgConfig.ssl,
|
|
2525
|
+
poolSize: pgConfig.pool_size,
|
|
2526
|
+
});
|
|
2527
|
+
}
|
|
2528
|
+
//# sourceMappingURL=postgresql.js.map
|