superlocalmemory 2.8.6 → 3.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (431) hide show
  1. package/LICENSE +9 -1
  2. package/NOTICE +63 -0
  3. package/README.md +165 -480
  4. package/bin/slm +17 -449
  5. package/bin/slm-npm +1 -1
  6. package/conftest.py +5 -0
  7. package/docs/api-reference.md +284 -0
  8. package/docs/architecture.md +149 -0
  9. package/docs/auto-memory.md +150 -0
  10. package/docs/cli-reference.md +276 -0
  11. package/docs/compliance.md +191 -0
  12. package/docs/configuration.md +182 -0
  13. package/docs/getting-started.md +102 -0
  14. package/docs/ide-setup.md +261 -0
  15. package/docs/mcp-tools.md +220 -0
  16. package/docs/migration-from-v2.md +170 -0
  17. package/docs/profiles.md +173 -0
  18. package/docs/troubleshooting.md +310 -0
  19. package/{configs → ide/configs}/antigravity-mcp.json +3 -3
  20. package/ide/configs/chatgpt-desktop-mcp.json +16 -0
  21. package/{configs → ide/configs}/claude-desktop-mcp.json +3 -3
  22. package/{configs → ide/configs}/codex-mcp.toml +4 -4
  23. package/{configs → ide/configs}/continue-mcp.yaml +4 -3
  24. package/{configs → ide/configs}/continue-skills.yaml +6 -6
  25. package/ide/configs/cursor-mcp.json +15 -0
  26. package/{configs → ide/configs}/gemini-cli-mcp.json +2 -2
  27. package/{configs → ide/configs}/jetbrains-mcp.json +2 -2
  28. package/{configs → ide/configs}/opencode-mcp.json +2 -2
  29. package/{configs → ide/configs}/perplexity-mcp.json +2 -2
  30. package/{configs → ide/configs}/vscode-copilot-mcp.json +2 -2
  31. package/{configs → ide/configs}/windsurf-mcp.json +3 -3
  32. package/{configs → ide/configs}/zed-mcp.json +2 -2
  33. package/{hooks → ide/hooks}/context-hook.js +9 -20
  34. package/ide/hooks/memory-list-skill.js +70 -0
  35. package/ide/hooks/memory-profile-skill.js +101 -0
  36. package/ide/hooks/memory-recall-skill.js +62 -0
  37. package/ide/hooks/memory-remember-skill.js +68 -0
  38. package/ide/hooks/memory-reset-skill.js +160 -0
  39. package/{hooks → ide/hooks}/post-recall-hook.js +2 -2
  40. package/ide/integrations/langchain/README.md +106 -0
  41. package/ide/integrations/langchain/langchain_superlocalmemory/__init__.py +9 -0
  42. package/ide/integrations/langchain/langchain_superlocalmemory/chat_message_history.py +201 -0
  43. package/ide/integrations/langchain/pyproject.toml +38 -0
  44. package/{src/learning → ide/integrations/langchain}/tests/__init__.py +1 -0
  45. package/ide/integrations/langchain/tests/test_chat_message_history.py +215 -0
  46. package/ide/integrations/langchain/tests/test_security.py +117 -0
  47. package/ide/integrations/llamaindex/README.md +81 -0
  48. package/ide/integrations/llamaindex/llama_index/storage/chat_store/superlocalmemory/__init__.py +9 -0
  49. package/ide/integrations/llamaindex/llama_index/storage/chat_store/superlocalmemory/base.py +316 -0
  50. package/ide/integrations/llamaindex/pyproject.toml +43 -0
  51. package/{src/lifecycle → ide/integrations/llamaindex}/tests/__init__.py +1 -2
  52. package/ide/integrations/llamaindex/tests/test_chat_store.py +294 -0
  53. package/ide/integrations/llamaindex/tests/test_security.py +241 -0
  54. package/{skills → ide/skills}/slm-build-graph/SKILL.md +6 -6
  55. package/{skills → ide/skills}/slm-list-recent/SKILL.md +5 -5
  56. package/{skills → ide/skills}/slm-recall/SKILL.md +5 -5
  57. package/{skills → ide/skills}/slm-remember/SKILL.md +6 -6
  58. package/{skills → ide/skills}/slm-show-patterns/SKILL.md +7 -7
  59. package/{skills → ide/skills}/slm-status/SKILL.md +9 -9
  60. package/{skills → ide/skills}/slm-switch-profile/SKILL.md +9 -9
  61. package/package.json +13 -22
  62. package/pyproject.toml +85 -0
  63. package/scripts/build-dmg.sh +417 -0
  64. package/scripts/install-skills.ps1 +334 -0
  65. package/scripts/postinstall.js +2 -2
  66. package/scripts/start-dashboard.ps1 +52 -0
  67. package/scripts/start-dashboard.sh +41 -0
  68. package/scripts/sync-wiki.ps1 +127 -0
  69. package/scripts/sync-wiki.sh +82 -0
  70. package/scripts/test-dmg.sh +161 -0
  71. package/scripts/test-npm-package.ps1 +252 -0
  72. package/scripts/test-npm-package.sh +207 -0
  73. package/scripts/verify-install.ps1 +294 -0
  74. package/scripts/verify-install.sh +266 -0
  75. package/src/superlocalmemory/__init__.py +0 -0
  76. package/src/superlocalmemory/attribution/__init__.py +9 -0
  77. package/src/superlocalmemory/attribution/mathematical_dna.py +235 -0
  78. package/src/superlocalmemory/attribution/signer.py +153 -0
  79. package/src/superlocalmemory/attribution/watermark.py +189 -0
  80. package/src/superlocalmemory/cli/__init__.py +5 -0
  81. package/src/superlocalmemory/cli/commands.py +245 -0
  82. package/src/superlocalmemory/cli/main.py +89 -0
  83. package/src/superlocalmemory/cli/migrate_cmd.py +55 -0
  84. package/src/superlocalmemory/cli/post_install.py +99 -0
  85. package/src/superlocalmemory/cli/setup_wizard.py +129 -0
  86. package/src/superlocalmemory/compliance/__init__.py +0 -0
  87. package/src/superlocalmemory/compliance/abac.py +204 -0
  88. package/src/superlocalmemory/compliance/audit.py +314 -0
  89. package/src/superlocalmemory/compliance/eu_ai_act.py +131 -0
  90. package/src/superlocalmemory/compliance/gdpr.py +294 -0
  91. package/src/superlocalmemory/compliance/lifecycle.py +158 -0
  92. package/src/superlocalmemory/compliance/retention.py +232 -0
  93. package/src/superlocalmemory/compliance/scheduler.py +148 -0
  94. package/src/superlocalmemory/core/__init__.py +0 -0
  95. package/src/superlocalmemory/core/config.py +391 -0
  96. package/src/superlocalmemory/core/embeddings.py +293 -0
  97. package/src/superlocalmemory/core/engine.py +701 -0
  98. package/src/superlocalmemory/core/hooks.py +65 -0
  99. package/src/superlocalmemory/core/maintenance.py +172 -0
  100. package/src/superlocalmemory/core/modes.py +140 -0
  101. package/src/superlocalmemory/core/profiles.py +234 -0
  102. package/src/superlocalmemory/core/registry.py +117 -0
  103. package/src/superlocalmemory/dynamics/__init__.py +0 -0
  104. package/src/superlocalmemory/dynamics/fisher_langevin_coupling.py +223 -0
  105. package/src/superlocalmemory/encoding/__init__.py +0 -0
  106. package/src/superlocalmemory/encoding/consolidator.py +485 -0
  107. package/src/superlocalmemory/encoding/emotional.py +125 -0
  108. package/src/superlocalmemory/encoding/entity_resolver.py +525 -0
  109. package/src/superlocalmemory/encoding/entropy_gate.py +104 -0
  110. package/src/superlocalmemory/encoding/fact_extractor.py +775 -0
  111. package/src/superlocalmemory/encoding/foresight.py +91 -0
  112. package/src/superlocalmemory/encoding/graph_builder.py +302 -0
  113. package/src/superlocalmemory/encoding/observation_builder.py +160 -0
  114. package/src/superlocalmemory/encoding/scene_builder.py +183 -0
  115. package/src/superlocalmemory/encoding/signal_inference.py +90 -0
  116. package/src/superlocalmemory/encoding/temporal_parser.py +426 -0
  117. package/src/superlocalmemory/encoding/type_router.py +235 -0
  118. package/src/superlocalmemory/hooks/__init__.py +3 -0
  119. package/src/superlocalmemory/hooks/auto_capture.py +111 -0
  120. package/src/superlocalmemory/hooks/auto_recall.py +93 -0
  121. package/src/superlocalmemory/hooks/ide_connector.py +204 -0
  122. package/src/superlocalmemory/hooks/rules_engine.py +99 -0
  123. package/src/superlocalmemory/infra/__init__.py +3 -0
  124. package/src/superlocalmemory/infra/auth_middleware.py +82 -0
  125. package/src/superlocalmemory/infra/backup.py +317 -0
  126. package/src/superlocalmemory/infra/cache_manager.py +267 -0
  127. package/src/superlocalmemory/infra/event_bus.py +381 -0
  128. package/src/superlocalmemory/infra/rate_limiter.py +135 -0
  129. package/src/{webhook_dispatcher.py → superlocalmemory/infra/webhook_dispatcher.py} +104 -101
  130. package/src/superlocalmemory/learning/__init__.py +0 -0
  131. package/src/superlocalmemory/learning/adaptive.py +172 -0
  132. package/src/superlocalmemory/learning/behavioral.py +490 -0
  133. package/src/superlocalmemory/learning/behavioral_listener.py +94 -0
  134. package/src/superlocalmemory/learning/bootstrap.py +298 -0
  135. package/src/superlocalmemory/learning/cross_project.py +399 -0
  136. package/src/superlocalmemory/learning/database.py +376 -0
  137. package/src/superlocalmemory/learning/engagement.py +323 -0
  138. package/src/superlocalmemory/learning/features.py +138 -0
  139. package/src/superlocalmemory/learning/feedback.py +316 -0
  140. package/src/superlocalmemory/learning/outcomes.py +255 -0
  141. package/src/superlocalmemory/learning/project_context.py +366 -0
  142. package/src/superlocalmemory/learning/ranker.py +155 -0
  143. package/src/superlocalmemory/learning/source_quality.py +303 -0
  144. package/src/superlocalmemory/learning/workflows.py +309 -0
  145. package/src/superlocalmemory/llm/__init__.py +0 -0
  146. package/src/superlocalmemory/llm/backbone.py +316 -0
  147. package/src/superlocalmemory/math/__init__.py +0 -0
  148. package/src/superlocalmemory/math/fisher.py +356 -0
  149. package/src/superlocalmemory/math/langevin.py +398 -0
  150. package/src/superlocalmemory/math/sheaf.py +257 -0
  151. package/src/superlocalmemory/mcp/__init__.py +0 -0
  152. package/src/superlocalmemory/mcp/resources.py +245 -0
  153. package/src/superlocalmemory/mcp/server.py +61 -0
  154. package/src/superlocalmemory/mcp/tools.py +18 -0
  155. package/src/superlocalmemory/mcp/tools_core.py +305 -0
  156. package/src/superlocalmemory/mcp/tools_v28.py +223 -0
  157. package/src/superlocalmemory/mcp/tools_v3.py +286 -0
  158. package/src/superlocalmemory/retrieval/__init__.py +0 -0
  159. package/src/superlocalmemory/retrieval/agentic.py +295 -0
  160. package/src/superlocalmemory/retrieval/ann_index.py +223 -0
  161. package/src/superlocalmemory/retrieval/bm25_channel.py +185 -0
  162. package/src/superlocalmemory/retrieval/bridge_discovery.py +170 -0
  163. package/src/superlocalmemory/retrieval/engine.py +390 -0
  164. package/src/superlocalmemory/retrieval/entity_channel.py +179 -0
  165. package/src/superlocalmemory/retrieval/fusion.py +78 -0
  166. package/src/superlocalmemory/retrieval/profile_channel.py +105 -0
  167. package/src/superlocalmemory/retrieval/reranker.py +154 -0
  168. package/src/superlocalmemory/retrieval/semantic_channel.py +232 -0
  169. package/src/superlocalmemory/retrieval/strategy.py +96 -0
  170. package/src/superlocalmemory/retrieval/temporal_channel.py +175 -0
  171. package/src/superlocalmemory/server/__init__.py +1 -0
  172. package/src/superlocalmemory/server/api.py +248 -0
  173. package/src/superlocalmemory/server/routes/__init__.py +4 -0
  174. package/src/superlocalmemory/server/routes/agents.py +107 -0
  175. package/src/superlocalmemory/server/routes/backup.py +91 -0
  176. package/src/superlocalmemory/server/routes/behavioral.py +127 -0
  177. package/src/superlocalmemory/server/routes/compliance.py +160 -0
  178. package/src/superlocalmemory/server/routes/data_io.py +188 -0
  179. package/src/superlocalmemory/server/routes/events.py +183 -0
  180. package/src/superlocalmemory/server/routes/helpers.py +85 -0
  181. package/src/superlocalmemory/server/routes/learning.py +273 -0
  182. package/src/superlocalmemory/server/routes/lifecycle.py +116 -0
  183. package/src/superlocalmemory/server/routes/memories.py +399 -0
  184. package/src/superlocalmemory/server/routes/profiles.py +219 -0
  185. package/src/superlocalmemory/server/routes/stats.py +346 -0
  186. package/src/superlocalmemory/server/routes/v3_api.py +365 -0
  187. package/src/superlocalmemory/server/routes/ws.py +82 -0
  188. package/src/superlocalmemory/server/security_middleware.py +57 -0
  189. package/src/superlocalmemory/server/ui.py +245 -0
  190. package/src/superlocalmemory/storage/__init__.py +0 -0
  191. package/src/superlocalmemory/storage/access_control.py +182 -0
  192. package/src/superlocalmemory/storage/database.py +594 -0
  193. package/src/superlocalmemory/storage/migrations.py +303 -0
  194. package/src/superlocalmemory/storage/models.py +406 -0
  195. package/src/superlocalmemory/storage/schema.py +726 -0
  196. package/src/superlocalmemory/storage/v2_migrator.py +317 -0
  197. package/src/superlocalmemory/trust/__init__.py +0 -0
  198. package/src/superlocalmemory/trust/gate.py +130 -0
  199. package/src/superlocalmemory/trust/provenance.py +124 -0
  200. package/src/superlocalmemory/trust/scorer.py +347 -0
  201. package/src/superlocalmemory/trust/signals.py +153 -0
  202. package/ui/index.html +278 -5
  203. package/ui/js/auto-settings.js +70 -0
  204. package/ui/js/dashboard.js +90 -0
  205. package/ui/js/fact-detail.js +92 -0
  206. package/ui/js/feedback.js +2 -2
  207. package/ui/js/ide-status.js +102 -0
  208. package/ui/js/math-health.js +98 -0
  209. package/ui/js/recall-lab.js +127 -0
  210. package/ui/js/settings.js +2 -2
  211. package/ui/js/trust-dashboard.js +73 -0
  212. package/api_server.py +0 -724
  213. package/bin/aider-smart +0 -72
  214. package/bin/superlocalmemoryv2-learning +0 -4
  215. package/bin/superlocalmemoryv2-list +0 -3
  216. package/bin/superlocalmemoryv2-patterns +0 -4
  217. package/bin/superlocalmemoryv2-profile +0 -3
  218. package/bin/superlocalmemoryv2-recall +0 -3
  219. package/bin/superlocalmemoryv2-remember +0 -3
  220. package/bin/superlocalmemoryv2-reset +0 -3
  221. package/bin/superlocalmemoryv2-status +0 -3
  222. package/configs/chatgpt-desktop-mcp.json +0 -16
  223. package/configs/cursor-mcp.json +0 -15
  224. package/hooks/memory-list-skill.js +0 -139
  225. package/hooks/memory-profile-skill.js +0 -273
  226. package/hooks/memory-recall-skill.js +0 -114
  227. package/hooks/memory-remember-skill.js +0 -127
  228. package/hooks/memory-reset-skill.js +0 -274
  229. package/mcp_server.py +0 -1808
  230. package/requirements-core.txt +0 -22
  231. package/requirements-learning.txt +0 -12
  232. package/requirements.txt +0 -12
  233. package/src/agent_registry.py +0 -411
  234. package/src/auth_middleware.py +0 -61
  235. package/src/auto_backup.py +0 -459
  236. package/src/behavioral/__init__.py +0 -49
  237. package/src/behavioral/behavioral_listener.py +0 -203
  238. package/src/behavioral/behavioral_patterns.py +0 -275
  239. package/src/behavioral/cross_project_transfer.py +0 -206
  240. package/src/behavioral/outcome_inference.py +0 -194
  241. package/src/behavioral/outcome_tracker.py +0 -193
  242. package/src/behavioral/tests/__init__.py +0 -4
  243. package/src/behavioral/tests/test_behavioral_integration.py +0 -108
  244. package/src/behavioral/tests/test_behavioral_patterns.py +0 -150
  245. package/src/behavioral/tests/test_cross_project_transfer.py +0 -142
  246. package/src/behavioral/tests/test_mcp_behavioral.py +0 -139
  247. package/src/behavioral/tests/test_mcp_report_outcome.py +0 -117
  248. package/src/behavioral/tests/test_outcome_inference.py +0 -107
  249. package/src/behavioral/tests/test_outcome_tracker.py +0 -96
  250. package/src/cache_manager.py +0 -518
  251. package/src/compliance/__init__.py +0 -48
  252. package/src/compliance/abac_engine.py +0 -149
  253. package/src/compliance/abac_middleware.py +0 -116
  254. package/src/compliance/audit_db.py +0 -215
  255. package/src/compliance/audit_logger.py +0 -148
  256. package/src/compliance/retention_manager.py +0 -289
  257. package/src/compliance/retention_scheduler.py +0 -186
  258. package/src/compliance/tests/__init__.py +0 -4
  259. package/src/compliance/tests/test_abac_enforcement.py +0 -95
  260. package/src/compliance/tests/test_abac_engine.py +0 -124
  261. package/src/compliance/tests/test_abac_mcp_integration.py +0 -118
  262. package/src/compliance/tests/test_audit_db.py +0 -123
  263. package/src/compliance/tests/test_audit_logger.py +0 -98
  264. package/src/compliance/tests/test_mcp_audit.py +0 -128
  265. package/src/compliance/tests/test_mcp_retention_policy.py +0 -125
  266. package/src/compliance/tests/test_retention_manager.py +0 -131
  267. package/src/compliance/tests/test_retention_scheduler.py +0 -99
  268. package/src/compression/__init__.py +0 -25
  269. package/src/compression/cli.py +0 -150
  270. package/src/compression/cold_storage.py +0 -217
  271. package/src/compression/config.py +0 -72
  272. package/src/compression/orchestrator.py +0 -133
  273. package/src/compression/tier2_compressor.py +0 -228
  274. package/src/compression/tier3_compressor.py +0 -153
  275. package/src/compression/tier_classifier.py +0 -148
  276. package/src/db_connection_manager.py +0 -536
  277. package/src/embedding_engine.py +0 -63
  278. package/src/embeddings/__init__.py +0 -47
  279. package/src/embeddings/cache.py +0 -70
  280. package/src/embeddings/cli.py +0 -113
  281. package/src/embeddings/constants.py +0 -47
  282. package/src/embeddings/database.py +0 -91
  283. package/src/embeddings/engine.py +0 -247
  284. package/src/embeddings/model_loader.py +0 -145
  285. package/src/event_bus.py +0 -562
  286. package/src/graph/__init__.py +0 -36
  287. package/src/graph/build_helpers.py +0 -74
  288. package/src/graph/cli.py +0 -87
  289. package/src/graph/cluster_builder.py +0 -188
  290. package/src/graph/cluster_summary.py +0 -148
  291. package/src/graph/constants.py +0 -47
  292. package/src/graph/edge_builder.py +0 -162
  293. package/src/graph/entity_extractor.py +0 -95
  294. package/src/graph/graph_core.py +0 -226
  295. package/src/graph/graph_search.py +0 -231
  296. package/src/graph/hierarchical.py +0 -207
  297. package/src/graph/schema.py +0 -99
  298. package/src/graph_engine.py +0 -52
  299. package/src/hnsw_index.py +0 -628
  300. package/src/hybrid_search.py +0 -46
  301. package/src/learning/__init__.py +0 -217
  302. package/src/learning/adaptive_ranker.py +0 -682
  303. package/src/learning/bootstrap/__init__.py +0 -69
  304. package/src/learning/bootstrap/constants.py +0 -93
  305. package/src/learning/bootstrap/db_queries.py +0 -316
  306. package/src/learning/bootstrap/sampling.py +0 -82
  307. package/src/learning/bootstrap/text_utils.py +0 -71
  308. package/src/learning/cross_project_aggregator.py +0 -857
  309. package/src/learning/db/__init__.py +0 -40
  310. package/src/learning/db/constants.py +0 -44
  311. package/src/learning/db/schema.py +0 -279
  312. package/src/learning/engagement_tracker.py +0 -628
  313. package/src/learning/feature_extractor.py +0 -708
  314. package/src/learning/feedback_collector.py +0 -806
  315. package/src/learning/learning_db.py +0 -915
  316. package/src/learning/project_context_manager.py +0 -572
  317. package/src/learning/ranking/__init__.py +0 -33
  318. package/src/learning/ranking/constants.py +0 -84
  319. package/src/learning/ranking/helpers.py +0 -278
  320. package/src/learning/source_quality_scorer.py +0 -676
  321. package/src/learning/synthetic_bootstrap.py +0 -755
  322. package/src/learning/tests/test_adaptive_ranker.py +0 -325
  323. package/src/learning/tests/test_adaptive_ranker_v28.py +0 -60
  324. package/src/learning/tests/test_aggregator.py +0 -306
  325. package/src/learning/tests/test_auto_retrain_v28.py +0 -35
  326. package/src/learning/tests/test_e2e_ranking_v28.py +0 -82
  327. package/src/learning/tests/test_feature_extractor_v28.py +0 -93
  328. package/src/learning/tests/test_feedback_collector.py +0 -294
  329. package/src/learning/tests/test_learning_db.py +0 -602
  330. package/src/learning/tests/test_learning_db_v28.py +0 -110
  331. package/src/learning/tests/test_learning_init_v28.py +0 -48
  332. package/src/learning/tests/test_outcome_signals.py +0 -48
  333. package/src/learning/tests/test_project_context.py +0 -292
  334. package/src/learning/tests/test_schema_migration.py +0 -319
  335. package/src/learning/tests/test_signal_inference.py +0 -397
  336. package/src/learning/tests/test_source_quality.py +0 -351
  337. package/src/learning/tests/test_synthetic_bootstrap.py +0 -429
  338. package/src/learning/tests/test_workflow_miner.py +0 -318
  339. package/src/learning/workflow_pattern_miner.py +0 -655
  340. package/src/lifecycle/__init__.py +0 -54
  341. package/src/lifecycle/bounded_growth.py +0 -239
  342. package/src/lifecycle/compaction_engine.py +0 -226
  343. package/src/lifecycle/lifecycle_engine.py +0 -355
  344. package/src/lifecycle/lifecycle_evaluator.py +0 -257
  345. package/src/lifecycle/lifecycle_scheduler.py +0 -130
  346. package/src/lifecycle/retention_policy.py +0 -285
  347. package/src/lifecycle/tests/test_bounded_growth.py +0 -193
  348. package/src/lifecycle/tests/test_compaction.py +0 -179
  349. package/src/lifecycle/tests/test_lifecycle_engine.py +0 -137
  350. package/src/lifecycle/tests/test_lifecycle_evaluation.py +0 -177
  351. package/src/lifecycle/tests/test_lifecycle_scheduler.py +0 -127
  352. package/src/lifecycle/tests/test_lifecycle_search.py +0 -109
  353. package/src/lifecycle/tests/test_mcp_compact.py +0 -149
  354. package/src/lifecycle/tests/test_mcp_lifecycle_status.py +0 -114
  355. package/src/lifecycle/tests/test_retention_policy.py +0 -162
  356. package/src/mcp_tools_v28.py +0 -281
  357. package/src/memory/__init__.py +0 -36
  358. package/src/memory/cli.py +0 -205
  359. package/src/memory/constants.py +0 -39
  360. package/src/memory/helpers.py +0 -28
  361. package/src/memory/schema.py +0 -166
  362. package/src/memory-profiles.py +0 -595
  363. package/src/memory-reset.py +0 -491
  364. package/src/memory_compression.py +0 -989
  365. package/src/memory_store_v2.py +0 -1155
  366. package/src/migrate_v1_to_v2.py +0 -629
  367. package/src/pattern_learner.py +0 -34
  368. package/src/patterns/__init__.py +0 -24
  369. package/src/patterns/analyzers.py +0 -251
  370. package/src/patterns/learner.py +0 -271
  371. package/src/patterns/scoring.py +0 -171
  372. package/src/patterns/store.py +0 -225
  373. package/src/patterns/terminology.py +0 -140
  374. package/src/provenance_tracker.py +0 -312
  375. package/src/qualixar_attribution.py +0 -139
  376. package/src/qualixar_watermark.py +0 -78
  377. package/src/query_optimizer.py +0 -511
  378. package/src/rate_limiter.py +0 -83
  379. package/src/search/__init__.py +0 -20
  380. package/src/search/cli.py +0 -77
  381. package/src/search/constants.py +0 -26
  382. package/src/search/engine.py +0 -241
  383. package/src/search/fusion.py +0 -122
  384. package/src/search/index_loader.py +0 -114
  385. package/src/search/methods.py +0 -162
  386. package/src/search_engine_v2.py +0 -401
  387. package/src/setup_validator.py +0 -482
  388. package/src/subscription_manager.py +0 -391
  389. package/src/tree/__init__.py +0 -59
  390. package/src/tree/builder.py +0 -185
  391. package/src/tree/nodes.py +0 -202
  392. package/src/tree/queries.py +0 -257
  393. package/src/tree/schema.py +0 -80
  394. package/src/tree_manager.py +0 -19
  395. package/src/trust/__init__.py +0 -45
  396. package/src/trust/constants.py +0 -66
  397. package/src/trust/queries.py +0 -157
  398. package/src/trust/schema.py +0 -95
  399. package/src/trust/scorer.py +0 -299
  400. package/src/trust/signals.py +0 -95
  401. package/src/trust_scorer.py +0 -44
  402. package/ui/app.js +0 -1588
  403. package/ui/js/graph-cytoscape-monolithic-backup.js +0 -1168
  404. package/ui/js/graph-cytoscape.js +0 -1168
  405. package/ui/js/graph-d3-backup.js +0 -32
  406. package/ui/js/graph.js +0 -32
  407. package/ui_server.py +0 -286
  408. /package/docs/{ACCESSIBILITY.md → v2-archive/ACCESSIBILITY.md} +0 -0
  409. /package/docs/{ARCHITECTURE.md → v2-archive/ARCHITECTURE.md} +0 -0
  410. /package/docs/{CLI-COMMANDS-REFERENCE.md → v2-archive/CLI-COMMANDS-REFERENCE.md} +0 -0
  411. /package/docs/{COMPRESSION-README.md → v2-archive/COMPRESSION-README.md} +0 -0
  412. /package/docs/{FRAMEWORK-INTEGRATIONS.md → v2-archive/FRAMEWORK-INTEGRATIONS.md} +0 -0
  413. /package/docs/{MCP-MANUAL-SETUP.md → v2-archive/MCP-MANUAL-SETUP.md} +0 -0
  414. /package/docs/{MCP-TROUBLESHOOTING.md → v2-archive/MCP-TROUBLESHOOTING.md} +0 -0
  415. /package/docs/{PATTERN-LEARNING.md → v2-archive/PATTERN-LEARNING.md} +0 -0
  416. /package/docs/{PROFILES-GUIDE.md → v2-archive/PROFILES-GUIDE.md} +0 -0
  417. /package/docs/{RESET-GUIDE.md → v2-archive/RESET-GUIDE.md} +0 -0
  418. /package/docs/{SEARCH-ENGINE-V2.2.0.md → v2-archive/SEARCH-ENGINE-V2.2.0.md} +0 -0
  419. /package/docs/{SEARCH-INTEGRATION-GUIDE.md → v2-archive/SEARCH-INTEGRATION-GUIDE.md} +0 -0
  420. /package/docs/{UI-SERVER.md → v2-archive/UI-SERVER.md} +0 -0
  421. /package/docs/{UNIVERSAL-INTEGRATION.md → v2-archive/UNIVERSAL-INTEGRATION.md} +0 -0
  422. /package/docs/{V2.2.0-OPTIONAL-SEARCH.md → v2-archive/V2.2.0-OPTIONAL-SEARCH.md} +0 -0
  423. /package/docs/{WINDOWS-INSTALL-README.txt → v2-archive/WINDOWS-INSTALL-README.txt} +0 -0
  424. /package/docs/{WINDOWS-POST-INSTALL.txt → v2-archive/WINDOWS-POST-INSTALL.txt} +0 -0
  425. /package/docs/{example_graph_usage.py → v2-archive/example_graph_usage.py} +0 -0
  426. /package/{completions → ide/completions}/slm.bash +0 -0
  427. /package/{completions → ide/completions}/slm.zsh +0 -0
  428. /package/{configs → ide/configs}/cody-commands.json +0 -0
  429. /package/{install-skills.sh → scripts/install-skills.sh} +0 -0
  430. /package/{install.ps1 → scripts/install.ps1} +0 -0
  431. /package/{install.sh → scripts/install.sh} +0 -0
@@ -1,1155 +0,0 @@
1
- #!/usr/bin/env python3
2
- # SPDX-License-Identifier: MIT
3
- # Copyright (c) 2026 SuperLocalMemory (superlocalmemory.com)
4
- """
5
- MemoryStore V2 - Extended Memory System with Tree and Graph Support
6
- Maintains backward compatibility with V1 API while adding:
7
- - Tree hierarchy (parent_id, tree_path, depth)
8
- - Categories and clusters
9
- - Tier-based progressive summarization
10
- - Enhanced search with tier filtering
11
- """
12
-
13
- import sqlite3
14
- import json
15
- import hashlib
16
- from datetime import datetime
17
- from pathlib import Path
18
- from typing import Optional, List, Dict, Any, Tuple
19
- from contextlib import contextmanager
20
-
21
- # Connection Manager (v2.5+) — fixes "database is locked" with multiple agents
22
- try:
23
- from db_connection_manager import DbConnectionManager
24
- USE_CONNECTION_MANAGER = True
25
- except ImportError:
26
- USE_CONNECTION_MANAGER = False
27
-
28
- # Event Bus (v2.5+) — real-time event broadcasting
29
- try:
30
- from event_bus import EventBus
31
- USE_EVENT_BUS = True
32
- except ImportError:
33
- USE_EVENT_BUS = False
34
-
35
- # Agent Registry + Provenance (v2.5+) — tracks who writes what
36
- try:
37
- from agent_registry import AgentRegistry
38
- from provenance_tracker import ProvenanceTracker
39
- USE_PROVENANCE = True
40
- except ImportError:
41
- USE_PROVENANCE = False
42
-
43
- # Trust Scorer (v2.5+) — silent signal collection, no enforcement
44
- try:
45
- from trust_scorer import TrustScorer
46
- USE_TRUST = True
47
- except ImportError:
48
- USE_TRUST = False
49
-
50
- # TF-IDF for local semantic search (no external APIs)
51
- try:
52
- from sklearn.feature_extraction.text import TfidfVectorizer
53
- from sklearn.metrics.pairwise import cosine_similarity
54
- import numpy as np
55
- SKLEARN_AVAILABLE = True
56
- except ImportError:
57
- SKLEARN_AVAILABLE = False
58
-
59
- import logging
60
- logger = logging.getLogger(__name__)
61
-
62
- # Import constants and utilities from memory package
63
- from memory.constants import (
64
- MEMORY_DIR, DB_PATH, VECTORS_PATH,
65
- MAX_CONTENT_SIZE, MAX_SUMMARY_SIZE, MAX_TAG_LENGTH, MAX_TAGS,
66
- CREATOR_METADATA
67
- )
68
- from memory.schema import (
69
- V2_COLUMNS, V28_MIGRATIONS, V2_INDEXES,
70
- get_memories_table_sql, get_sessions_table_sql, get_fts_table_sql,
71
- get_fts_trigger_insert_sql, get_fts_trigger_delete_sql, get_fts_trigger_update_sql,
72
- get_creator_metadata_table_sql
73
- )
74
- from memory.helpers import format_content
75
-
76
-
77
- class MemoryStoreV2:
78
- """
79
- Extended memory store with hierarchical tree and graph integration.
80
-
81
- Key Features:
82
- - Tree hierarchy via parent_id and materialized paths
83
- - Category-based organization
84
- - GraphRAG cluster integration
85
- - Tier-based access tracking
86
- - Backward compatible with V1 API
87
- """
88
-
89
- def __init__(self, db_path: Optional[Path] = None, profile: Optional[str] = None):
90
- """
91
- Initialize MemoryStore V2.
92
-
93
- Args:
94
- db_path: Optional custom database path (defaults to ~/.claude-memory/memory.db)
95
- profile: Optional profile override. If None, reads from profiles.json config.
96
- """
97
- self.db_path = db_path or DB_PATH
98
- self.vectors_path = VECTORS_PATH
99
- self._profile_override = profile
100
-
101
- # Connection Manager (v2.5+) — thread-safe WAL + write queue
102
- # Falls back to direct sqlite3.connect() if unavailable
103
- self._db_mgr = None
104
- if USE_CONNECTION_MANAGER:
105
- try:
106
- self._db_mgr = DbConnectionManager.get_instance(self.db_path)
107
- except Exception:
108
- pass # Fall back to direct connections
109
-
110
- # Event Bus (v2.5+) — real-time event broadcasting
111
- # If unavailable, events simply don't fire (core ops unaffected)
112
- self._event_bus = None
113
- if USE_EVENT_BUS:
114
- try:
115
- self._event_bus = EventBus.get_instance(self.db_path)
116
- except Exception:
117
- pass
118
-
119
- self._init_db()
120
-
121
- # Agent Registry + Provenance (v2.5+)
122
- # MUST run AFTER _init_db() — ProvenanceTracker ALTER TABLEs the memories table
123
- self._agent_registry = None
124
- self._provenance_tracker = None
125
- if USE_PROVENANCE:
126
- try:
127
- self._agent_registry = AgentRegistry.get_instance(self.db_path)
128
- self._provenance_tracker = ProvenanceTracker.get_instance(self.db_path)
129
- except Exception:
130
- pass
131
-
132
- # Trust Scorer (v2.5+) — silent signal collection
133
- self._trust_scorer = None
134
- if USE_TRUST:
135
- try:
136
- self._trust_scorer = TrustScorer.get_instance(self.db_path)
137
- except Exception:
138
- pass
139
-
140
- self.vectorizer = None
141
- self.vectors = None
142
- self.memory_ids = []
143
- self._last_vector_count = 0
144
- self._load_vectors()
145
-
146
- # HNSW index for O(log n) search (v2.6, optional)
147
- self._hnsw_index = None
148
- try:
149
- from hnsw_index import HNSWIndex
150
- if self.vectors is not None and len(self.memory_ids) > 0:
151
- dim = self.vectors.shape[1]
152
- self._hnsw_index = HNSWIndex(dimension=dim, max_elements=max(len(self.memory_ids) * 2, 1000))
153
- self._hnsw_index.build(self.vectors.toarray() if hasattr(self.vectors, 'toarray') else self.vectors, self.memory_ids)
154
- logger.info("HNSW index built with %d vectors", len(self.memory_ids))
155
- except (ImportError, Exception) as e:
156
- logger.debug("HNSW index not available: %s", e)
157
- self._hnsw_index = None
158
-
159
- # =========================================================================
160
- # Connection helpers — abstract ConnectionManager vs direct sqlite3
161
- # =========================================================================
162
-
163
- @contextmanager
164
- def _read_connection(self):
165
- """
166
- Context manager for read operations.
167
- Uses ConnectionManager pool if available, else direct sqlite3.connect().
168
- """
169
- if self._db_mgr:
170
- with self._db_mgr.read_connection() as conn:
171
- yield conn
172
- else:
173
- conn = sqlite3.connect(self.db_path)
174
- try:
175
- yield conn
176
- finally:
177
- conn.close()
178
-
179
- def _execute_write(self, callback):
180
- """
181
- Execute a write operation (INSERT/UPDATE/DELETE).
182
- Uses ConnectionManager write queue if available, else direct sqlite3.connect().
183
-
184
- Args:
185
- callback: Function(conn) that performs writes and calls conn.commit()
186
-
187
- Returns:
188
- Whatever the callback returns
189
- """
190
- if self._db_mgr:
191
- return self._db_mgr.execute_write(callback)
192
- else:
193
- conn = sqlite3.connect(self.db_path)
194
- try:
195
- result = callback(conn)
196
- return result
197
- finally:
198
- conn.close()
199
-
200
- def _emit_event(self, event_type: str, memory_id: Optional[int] = None, **kwargs):
201
- """
202
- Emit an event to the Event Bus (v2.5+).
203
-
204
- Progressive enhancement: if Event Bus is unavailable, this is a no-op.
205
- Event emission failure must NEVER break core memory operations.
206
-
207
- Args:
208
- event_type: Event type (e.g., "memory.created")
209
- memory_id: Associated memory ID (if applicable)
210
- **kwargs: Additional payload fields
211
- """
212
- if not self._event_bus:
213
- return
214
- try:
215
- self._event_bus.emit(
216
- event_type=event_type,
217
- memory_id=memory_id,
218
- payload=kwargs,
219
- importance=kwargs.get("importance", 5),
220
- )
221
- except Exception:
222
- pass # Event bus failure must never break core operations
223
-
224
- def _get_active_profile(self) -> str:
225
- """
226
- Get the currently active profile name.
227
- Reads from profiles.json config file. Falls back to 'default'.
228
- """
229
- if self._profile_override:
230
- return self._profile_override
231
-
232
- config_file = MEMORY_DIR / "profiles.json"
233
- if config_file.exists():
234
- try:
235
- with open(config_file, 'r') as f:
236
- config = json.load(f)
237
- return config.get('active_profile', 'default')
238
- except (json.JSONDecodeError, IOError):
239
- pass
240
- return 'default'
241
-
242
- def _init_db(self):
243
- """Initialize SQLite database with V2 schema extensions."""
244
- def _do_init(conn):
245
- cursor = conn.cursor()
246
-
247
- # Database integrity check (v2.6: detect corruption early)
248
- try:
249
- result = cursor.execute('PRAGMA quick_check').fetchone()
250
- if result[0] != 'ok':
251
- logger.warning("Database integrity issue detected: %s", result[0])
252
- except Exception:
253
- logger.warning("Could not run database integrity check")
254
-
255
- # Check if we need to add V2 columns to existing table
256
- cursor.execute("PRAGMA table_info(memories)")
257
- existing_columns = {row[1] for row in cursor.fetchall()}
258
-
259
- # Main memories table (V1 compatible + V2 extensions)
260
- cursor.execute(get_memories_table_sql())
261
-
262
- # Add missing V2 columns to existing table (migration support)
263
- # This handles upgrades from very old databases that might be missing columns
264
- for col_name, col_type in V2_COLUMNS.items():
265
- if col_name not in existing_columns:
266
- try:
267
- cursor.execute(f'ALTER TABLE memories ADD COLUMN {col_name} {col_type}')
268
- except sqlite3.OperationalError:
269
- # Column might already exist from concurrent migration
270
- pass
271
-
272
- # v2.8.0 schema migration — lifecycle + access control columns
273
- for col_name, col_type in V28_MIGRATIONS:
274
- try:
275
- cursor.execute(f"ALTER TABLE memories ADD COLUMN {col_name} {col_type}")
276
- except sqlite3.OperationalError:
277
- pass # Column already exists
278
-
279
- # Sessions table (V1 compatible)
280
- cursor.execute(get_sessions_table_sql())
281
-
282
- # Full-text search index (V1 compatible)
283
- cursor.execute(get_fts_table_sql())
284
-
285
- # FTS Triggers (V1 compatible)
286
- cursor.execute(get_fts_trigger_insert_sql())
287
- cursor.execute(get_fts_trigger_delete_sql())
288
- cursor.execute(get_fts_trigger_update_sql())
289
-
290
- # Create indexes for V2 fields (safe for old databases without V2 columns)
291
- for idx_name, col_name in V2_INDEXES:
292
- try:
293
- cursor.execute(f'CREATE INDEX IF NOT EXISTS {idx_name} ON memories({col_name})')
294
- except sqlite3.OperationalError:
295
- # Column doesn't exist yet (old database) - skip index creation
296
- # Index will be created automatically on next schema upgrade
297
- pass
298
-
299
- # v2.8.0 indexes for lifecycle + access control
300
- try:
301
- cursor.execute("CREATE INDEX IF NOT EXISTS idx_lifecycle_state ON memories(lifecycle_state)")
302
- cursor.execute("CREATE INDEX IF NOT EXISTS idx_access_level ON memories(access_level)")
303
- except sqlite3.OperationalError:
304
- pass
305
-
306
- # Creator Attribution Metadata Table (REQUIRED by MIT License)
307
- # This table embeds creator information directly in the database
308
- cursor.execute(get_creator_metadata_table_sql())
309
-
310
- # Insert creator attribution (embedded in database body)
311
- for key, value in CREATOR_METADATA.items():
312
- cursor.execute('''
313
- INSERT OR IGNORE INTO creator_metadata (key, value)
314
- VALUES (?, ?)
315
- ''', (key, value))
316
-
317
- conn.commit()
318
-
319
- self._execute_write(_do_init)
320
-
321
- def _content_hash(self, content: str) -> str:
322
- """Generate hash for deduplication."""
323
- return hashlib.sha256(content.encode()).hexdigest()[:32]
324
-
325
- def add_memory(
326
- self,
327
- content: str,
328
- summary: Optional[str] = None,
329
- project_path: Optional[str] = None,
330
- project_name: Optional[str] = None,
331
- tags: Optional[List[str]] = None,
332
- category: Optional[str] = None,
333
- parent_id: Optional[int] = None,
334
- memory_type: str = "session",
335
- importance: int = 5
336
- ) -> int:
337
- """
338
- Add a new memory with V2 enhancements.
339
-
340
- Args:
341
- content: Memory content (required, max 1MB)
342
- summary: Optional summary (max 10KB)
343
- project_path: Project absolute path
344
- project_name: Human-readable project name
345
- tags: List of tags (max 20 tags, 50 chars each)
346
- category: High-level category (e.g., "frontend", "backend")
347
- parent_id: Parent memory ID for hierarchical nesting
348
- memory_type: Type of memory ('session', 'long-term', 'reference')
349
-
350
- Raises:
351
- TypeError: If content is not a string
352
- ValueError: If content is empty or exceeds size limits
353
-
354
- Returns:
355
- Memory ID (int), or existing ID if duplicate detected
356
- """
357
- # SECURITY: Input validation
358
- if not isinstance(content, str):
359
- raise TypeError("Content must be a string")
360
-
361
- content = content.strip()
362
- if not content:
363
- raise ValueError("Content cannot be empty")
364
-
365
- if len(content) > MAX_CONTENT_SIZE:
366
- raise ValueError(f"Content exceeds maximum size of {MAX_CONTENT_SIZE} bytes")
367
-
368
- if summary and len(summary) > MAX_SUMMARY_SIZE:
369
- raise ValueError(f"Summary exceeds maximum size of {MAX_SUMMARY_SIZE} bytes")
370
-
371
- if tags:
372
- if len(tags) > MAX_TAGS:
373
- raise ValueError(f"Too many tags (max {MAX_TAGS})")
374
- for tag in tags:
375
- if len(tag) > MAX_TAG_LENGTH:
376
- raise ValueError(f"Tag '{tag[:20]}...' exceeds max length of {MAX_TAG_LENGTH}")
377
-
378
- if importance < 1 or importance > 10:
379
- importance = max(1, min(10, importance)) # Clamp to valid range
380
-
381
- content_hash = self._content_hash(content)
382
- active_profile = self._get_active_profile()
383
-
384
- def _do_add(conn):
385
- cursor = conn.cursor()
386
-
387
- try:
388
- # Calculate tree_path and depth
389
- tree_path, depth = self._calculate_tree_position(cursor, parent_id)
390
-
391
- cursor.execute('''
392
- INSERT INTO memories (
393
- content, summary, project_path, project_name, tags, category,
394
- parent_id, tree_path, depth,
395
- memory_type, importance, content_hash,
396
- last_accessed, access_count, profile
397
- )
398
- VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
399
- ''', (
400
- content,
401
- summary,
402
- project_path,
403
- project_name,
404
- json.dumps(tags) if tags else None,
405
- category,
406
- parent_id,
407
- tree_path,
408
- depth,
409
- memory_type,
410
- importance,
411
- content_hash,
412
- datetime.now().isoformat(),
413
- 0,
414
- active_profile
415
- ))
416
- memory_id = cursor.lastrowid
417
-
418
- # Update tree_path with actual memory_id
419
- if tree_path:
420
- tree_path = f"{tree_path}.{memory_id}"
421
- else:
422
- tree_path = str(memory_id)
423
-
424
- cursor.execute('UPDATE memories SET tree_path = ? WHERE id = ?', (tree_path, memory_id))
425
-
426
- conn.commit()
427
- return memory_id
428
-
429
- except sqlite3.IntegrityError:
430
- # Duplicate content
431
- cursor.execute('SELECT id FROM memories WHERE content_hash = ?', (content_hash,))
432
- result = cursor.fetchone()
433
- return result[0] if result else -1
434
-
435
- memory_id = self._execute_write(_do_add)
436
-
437
- # Rebuild vectors after adding (reads only — outside write callback)
438
- self._rebuild_vectors()
439
-
440
- # Emit event (v2.5 — Event Bus)
441
- self._emit_event("memory.created", memory_id=memory_id,
442
- content_preview="[redacted]", tags=tags,
443
- project=project_name, importance=importance)
444
-
445
- # Record provenance (v2.5 — who created this memory)
446
- if self._provenance_tracker:
447
- try:
448
- self._provenance_tracker.record_provenance(memory_id)
449
- except Exception:
450
- pass # Provenance failure must never break core
451
-
452
- # Trust signal (v2.5 — silent collection)
453
- if self._trust_scorer:
454
- try:
455
- self._trust_scorer.on_memory_created("user", memory_id, importance)
456
- except Exception:
457
- pass # Trust failure must never break core
458
-
459
- # Auto-backup check (non-blocking)
460
- try:
461
- from auto_backup import AutoBackup
462
- backup = AutoBackup()
463
- backup.check_and_backup()
464
- except Exception:
465
- pass # Backup failure must never break memory operations
466
-
467
- return memory_id
468
-
469
- def _calculate_tree_position(self, cursor: sqlite3.Cursor, parent_id: Optional[int]) -> Tuple[str, int]:
470
- """
471
- Calculate tree_path and depth for a new memory.
472
-
473
- Args:
474
- cursor: Database cursor
475
- parent_id: Parent memory ID (None for root level)
476
-
477
- Returns:
478
- Tuple of (tree_path, depth)
479
- """
480
- if parent_id is None:
481
- return ("", 0)
482
-
483
- cursor.execute('SELECT tree_path, depth FROM memories WHERE id = ?', (parent_id,))
484
- result = cursor.fetchone()
485
-
486
- if result:
487
- parent_path, parent_depth = result
488
- return (parent_path, parent_depth + 1)
489
- else:
490
- # Parent not found, treat as root
491
- return ("", 0)
492
-
493
- def search(
494
- self,
495
- query: str,
496
- limit: int = 5,
497
- project_path: Optional[str] = None,
498
- memory_type: Optional[str] = None,
499
- category: Optional[str] = None,
500
- cluster_id: Optional[int] = None,
501
- min_importance: Optional[int] = None,
502
- lifecycle_states: Optional[tuple] = None,
503
- agent_context: Optional[Dict[str, Any]] = None,
504
- ) -> List[Dict[str, Any]]:
505
- """
506
- Search memories with enhanced V2 filtering.
507
-
508
- Args:
509
- query: Search query string
510
- limit: Maximum results to return
511
- project_path: Filter by project path
512
- memory_type: Filter by memory type
513
- category: Filter by category
514
- cluster_id: Filter by graph cluster
515
- min_importance: Minimum importance score
516
- lifecycle_states: Tuple of lifecycle states to include (default: active, warm)
517
-
518
- Returns:
519
- List of memory dictionaries with scores
520
- """
521
- if lifecycle_states is None:
522
- lifecycle_states = ("active", "warm")
523
-
524
- results = []
525
- active_profile = self._get_active_profile()
526
-
527
- with self._read_connection() as conn:
528
- # Method 0: HNSW accelerated search (O(log n), v2.6)
529
- _hnsw_used = False
530
- if SKLEARN_AVAILABLE and self.vectorizer is not None and self.vectors is not None:
531
- try:
532
- from hnsw_index import HNSWIndex
533
- if hasattr(self, '_hnsw_index') and self._hnsw_index is not None:
534
- query_vec = self.vectorizer.transform([query]).toarray().flatten()
535
- hnsw_results = self._hnsw_index.search(query_vec, k=limit * 2)
536
- cursor = conn.cursor()
537
- for memory_id, score in hnsw_results:
538
- if score > 0.05:
539
- cursor.execute('''
540
- SELECT id, content, summary, project_path, project_name, tags,
541
- category, parent_id, tree_path, depth,
542
- memory_type, importance, created_at, cluster_id,
543
- last_accessed, access_count, lifecycle_state
544
- FROM memories WHERE id = ? AND profile = ?
545
- ''', (memory_id, active_profile))
546
- row = cursor.fetchone()
547
- if row and self._apply_filters(row, project_path, memory_type,
548
- category, cluster_id, min_importance, lifecycle_states):
549
- results.append(self._row_to_dict(row, score, 'hnsw'))
550
- _hnsw_used = len(results) > 0
551
- except (ImportError, Exception):
552
- pass # HNSW not available, fall through to TF-IDF
553
-
554
- # Method 1: TF-IDF semantic search (fallback if HNSW unavailable or returned no results)
555
- if not _hnsw_used and SKLEARN_AVAILABLE and self.vectorizer is not None and self.vectors is not None:
556
- try:
557
- query_vec = self.vectorizer.transform([query])
558
- similarities = cosine_similarity(query_vec, self.vectors).flatten()
559
- top_indices = np.argsort(similarities)[::-1][:limit * 2]
560
-
561
- cursor = conn.cursor()
562
-
563
- for idx in top_indices:
564
- if idx < len(self.memory_ids):
565
- memory_id = self.memory_ids[idx]
566
- score = float(similarities[idx])
567
-
568
- if score > 0.05: # Minimum relevance threshold
569
- cursor.execute('''
570
- SELECT id, content, summary, project_path, project_name, tags,
571
- category, parent_id, tree_path, depth,
572
- memory_type, importance, created_at, cluster_id,
573
- last_accessed, access_count, lifecycle_state
574
- FROM memories WHERE id = ? AND profile = ?
575
- ''', (memory_id, active_profile))
576
- row = cursor.fetchone()
577
-
578
- if row and self._apply_filters(row, project_path, memory_type,
579
- category, cluster_id, min_importance, lifecycle_states):
580
- results.append(self._row_to_dict(row, score, 'semantic'))
581
-
582
- except Exception as e:
583
- print(f"Semantic search error: {e}")
584
-
585
- # Method 2: FTS fallback/supplement
586
- cursor = conn.cursor()
587
-
588
- # Clean query for FTS
589
- import re
590
- fts_query = ' OR '.join(re.findall(r'\w+', query))
591
-
592
- if fts_query:
593
- cursor.execute('''
594
- SELECT m.id, m.content, m.summary, m.project_path, m.project_name,
595
- m.tags, m.category, m.parent_id, m.tree_path, m.depth,
596
- m.memory_type, m.importance, m.created_at, m.cluster_id,
597
- m.last_accessed, m.access_count, m.lifecycle_state
598
- FROM memories m
599
- JOIN memories_fts fts ON m.id = fts.rowid
600
- WHERE memories_fts MATCH ? AND m.profile = ?
601
- ORDER BY rank
602
- LIMIT ?
603
- ''', (fts_query, active_profile, limit))
604
-
605
- existing_ids = {r['id'] for r in results}
606
-
607
- for row in cursor.fetchall():
608
- if row[0] not in existing_ids:
609
- if self._apply_filters(row, project_path, memory_type,
610
- category, cluster_id, min_importance, lifecycle_states):
611
- results.append(self._row_to_dict(row, 0.5, 'keyword'))
612
-
613
- # Update access tracking for returned results
614
- self._update_access_tracking([r['id'] for r in results])
615
-
616
- # Reactivate warm memories that were recalled (lifecycle v2.8)
617
- warm_ids = [r['id'] for r in results if r.get('lifecycle_state') == 'warm']
618
- if warm_ids:
619
- try:
620
- from lifecycle.lifecycle_engine import LifecycleEngine
621
- engine = LifecycleEngine(self.db_path)
622
- for mem_id in warm_ids:
623
- engine.reactivate_memory(mem_id, trigger="recall")
624
- except (ImportError, Exception):
625
- pass # Lifecycle engine not available
626
-
627
- # Sort by score and limit
628
- results.sort(key=lambda x: x['score'], reverse=True)
629
- return results[:limit]
630
-
631
- def _apply_filters(
632
- self,
633
- row: tuple,
634
- project_path: Optional[str],
635
- memory_type: Optional[str],
636
- category: Optional[str],
637
- cluster_id: Optional[int],
638
- min_importance: Optional[int],
639
- lifecycle_states: Optional[tuple] = None,
640
- ) -> bool:
641
- """Apply filter criteria to a database row."""
642
- # Row indices: project_path=3, category=6, memory_type=10, importance=11, cluster_id=13
643
- if project_path and row[3] != project_path:
644
- return False
645
- if memory_type and row[10] != memory_type:
646
- return False
647
- if category and row[6] != category:
648
- return False
649
- if cluster_id is not None and row[13] != cluster_id:
650
- return False
651
- if min_importance is not None and (row[11] or 0) < min_importance:
652
- return False
653
- # Lifecycle state filter (v2.8) — index 16 if present
654
- if lifecycle_states and len(row) > 16:
655
- state = row[16] or "active"
656
- if state not in lifecycle_states:
657
- return False
658
- return True
659
-
660
- def _check_abac(
661
- self,
662
- subject: Dict[str, Any],
663
- resource: Dict[str, Any],
664
- action: str,
665
- policy_path: Optional[str] = None,
666
- ) -> Dict[str, Any]:
667
- """Check ABAC policy for an access request.
668
-
669
- Returns {"allowed": True/False, "reason": str}.
670
- When ABAC engine is unavailable (import error, missing file),
671
- defaults to allow for backward compatibility with v2.7.
672
- """
673
- try:
674
- from compliance.abac_engine import ABACEngine
675
- if policy_path is None:
676
- policy_path = str(Path(self.db_path).parent / "abac_policies.json")
677
- engine = ABACEngine(config_path=policy_path)
678
- return engine.evaluate(subject=subject, resource=resource, action=action)
679
- except (ImportError, Exception):
680
- return {"allowed": True, "reason": "ABAC unavailable — default allow"}
681
-
682
- def _row_to_dict(self, row: tuple, score: float, match_type: str) -> Dict[str, Any]:
683
- """Convert database row to memory dictionary."""
684
- # Backward compatibility: Handle both JSON array and comma-separated string tags
685
- tags_raw = row[5]
686
- if tags_raw:
687
- try:
688
- # Try parsing as JSON (v2.1.0+ format)
689
- tags = json.loads(tags_raw)
690
- except (json.JSONDecodeError, TypeError):
691
- # Fall back to comma-separated string (v2.0.0 format)
692
- tags = [t.strip() for t in str(tags_raw).split(',') if t.strip()]
693
- else:
694
- tags = []
695
-
696
- return {
697
- 'id': row[0],
698
- 'content': row[1],
699
- 'summary': row[2],
700
- 'project_path': row[3],
701
- 'project_name': row[4],
702
- 'tags': tags,
703
- 'category': row[6],
704
- 'parent_id': row[7],
705
- 'tree_path': row[8],
706
- 'depth': row[9],
707
- 'memory_type': row[10],
708
- 'importance': row[11],
709
- 'created_at': row[12],
710
- 'cluster_id': row[13],
711
- 'last_accessed': row[14],
712
- 'access_count': row[15],
713
- 'lifecycle_state': row[16] if len(row) > 16 else 'active',
714
- 'score': score,
715
- 'match_type': match_type
716
- }
717
-
718
- def _update_access_tracking(self, memory_ids: List[int]):
719
- """Update last_accessed and access_count for retrieved memories."""
720
- if not memory_ids:
721
- return
722
-
723
- def _do_update(conn):
724
- cursor = conn.cursor()
725
- now = datetime.now().isoformat()
726
- for mem_id in memory_ids:
727
- cursor.execute('''
728
- UPDATE memories
729
- SET last_accessed = ?, access_count = access_count + 1
730
- WHERE id = ?
731
- ''', (now, mem_id))
732
- conn.commit()
733
-
734
- self._execute_write(_do_update)
735
-
736
- def get_tree(self, parent_id: Optional[int] = None, max_depth: int = 3) -> List[Dict[str, Any]]:
737
- """
738
- Get hierarchical tree structure of memories.
739
-
740
- Args:
741
- parent_id: Root parent ID (None for top-level)
742
- max_depth: Maximum depth to retrieve
743
-
744
- Returns:
745
- List of memories with tree structure
746
- """
747
- active_profile = self._get_active_profile()
748
-
749
- with self._read_connection() as conn:
750
- cursor = conn.cursor()
751
-
752
- if parent_id is None:
753
- # Get root level memories
754
- cursor.execute('''
755
- SELECT id, content, summary, project_path, project_name, tags,
756
- category, parent_id, tree_path, depth, memory_type, importance,
757
- created_at, cluster_id, last_accessed, access_count
758
- FROM memories
759
- WHERE parent_id IS NULL AND depth <= ? AND profile = ?
760
- ORDER BY tree_path
761
- ''', (max_depth, active_profile))
762
- else:
763
- # Get subtree under specific parent
764
- cursor.execute('''
765
- SELECT tree_path FROM memories WHERE id = ?
766
- ''', (parent_id,))
767
- result = cursor.fetchone()
768
-
769
- if not result:
770
- return []
771
-
772
- parent_path = result[0]
773
- cursor.execute('''
774
- SELECT id, content, summary, project_path, project_name, tags,
775
- category, parent_id, tree_path, depth, memory_type, importance,
776
- created_at, cluster_id, last_accessed, access_count
777
- FROM memories
778
- WHERE tree_path LIKE ? AND depth <= ?
779
- ORDER BY tree_path
780
- ''', (f"{parent_path}.%", max_depth))
781
-
782
- results = []
783
- for row in cursor.fetchall():
784
- results.append(self._row_to_dict(row, 1.0, 'tree'))
785
-
786
- return results
787
-
788
- def update_tier(self, memory_id: int, new_tier: str, compressed_summary: Optional[str] = None):
789
- """
790
- Update memory tier for progressive summarization.
791
-
792
- Args:
793
- memory_id: Memory ID to update
794
- new_tier: New tier level ('hot', 'warm', 'cold', 'archived')
795
- compressed_summary: Optional compressed summary for higher tiers
796
- """
797
- def _do_update(conn):
798
- cursor = conn.cursor()
799
- if compressed_summary:
800
- cursor.execute('''
801
- UPDATE memories
802
- SET memory_type = ?, summary = ?, updated_at = ?
803
- WHERE id = ?
804
- ''', (new_tier, compressed_summary, datetime.now().isoformat(), memory_id))
805
- else:
806
- cursor.execute('''
807
- UPDATE memories
808
- SET memory_type = ?, updated_at = ?
809
- WHERE id = ?
810
- ''', (new_tier, datetime.now().isoformat(), memory_id))
811
- conn.commit()
812
-
813
- self._execute_write(_do_update)
814
-
815
- # Emit event (v2.5)
816
- self._emit_event("memory.updated", memory_id=memory_id, new_tier=new_tier)
817
-
818
- def get_by_cluster(self, cluster_id: int) -> List[Dict[str, Any]]:
819
- """
820
- Get all memories in a specific graph cluster.
821
-
822
- Args:
823
- cluster_id: Graph cluster ID
824
-
825
- Returns:
826
- List of memories in the cluster
827
- """
828
- active_profile = self._get_active_profile()
829
-
830
- with self._read_connection() as conn:
831
- cursor = conn.cursor()
832
-
833
- cursor.execute('''
834
- SELECT id, content, summary, project_path, project_name, tags,
835
- category, parent_id, tree_path, depth, memory_type, importance,
836
- created_at, cluster_id, last_accessed, access_count
837
- FROM memories
838
- WHERE cluster_id = ? AND profile = ?
839
- ORDER BY importance DESC, created_at DESC
840
- ''', (cluster_id, active_profile))
841
-
842
- results = []
843
- for row in cursor.fetchall():
844
- results.append(self._row_to_dict(row, 1.0, 'cluster'))
845
-
846
- return results
847
-
848
- # ========== V1 Backward Compatible Methods ==========
849
-
850
- def _load_vectors(self):
851
- """Load vectors by rebuilding from database (V1 compatible)."""
852
- self._rebuild_vectors()
853
-
854
- def _rebuild_vectors(self):
855
- """Rebuild TF-IDF vectors from active profile memories (V1 compatible, backward compatible)."""
856
- if not SKLEARN_AVAILABLE:
857
- return
858
-
859
- # Incremental optimization: skip rebuild if memory count hasn't changed much (v2.6)
860
- if hasattr(self, '_last_vector_count') and self._last_vector_count > 0:
861
- with self._read_connection() as conn:
862
- cursor = conn.cursor()
863
- active_profile = self._get_active_profile()
864
- cursor.execute("PRAGMA table_info(memories)")
865
- columns = {row[1] for row in cursor.fetchall()}
866
- if 'profile' in columns:
867
- cursor.execute('SELECT COUNT(*) FROM memories WHERE profile = ?', (active_profile,))
868
- else:
869
- cursor.execute('SELECT COUNT(*) FROM memories')
870
- current_count = cursor.fetchone()[0]
871
-
872
- # Only rebuild if count changed by more than 5% or is the first few memories
873
- if self._last_vector_count > 10:
874
- change_ratio = abs(current_count - self._last_vector_count) / self._last_vector_count
875
- if change_ratio < 0.05:
876
- return # Skip rebuild — vectors are still accurate enough
877
-
878
- active_profile = self._get_active_profile()
879
-
880
- with self._read_connection() as conn:
881
- cursor = conn.cursor()
882
-
883
- # Check which columns exist (backward compatibility for old databases)
884
- cursor.execute("PRAGMA table_info(memories)")
885
- columns = {row[1] for row in cursor.fetchall()}
886
-
887
- # Build SELECT query based on available columns, filtered by profile
888
- has_profile = 'profile' in columns
889
- if 'summary' in columns:
890
- if has_profile:
891
- cursor.execute('SELECT id, content, summary FROM memories WHERE profile = ?', (active_profile,))
892
- else:
893
- cursor.execute('SELECT id, content, summary FROM memories')
894
- rows = cursor.fetchall()
895
- texts = [f"{row[1]} {row[2] or ''}" for row in rows]
896
- else:
897
- # Old database without summary column
898
- cursor.execute('SELECT id, content FROM memories')
899
- rows = cursor.fetchall()
900
- texts = [row[1] for row in rows]
901
-
902
- if not rows:
903
- self.vectorizer = None
904
- self.vectors = None
905
- self.memory_ids = []
906
- return
907
-
908
- self.memory_ids = [row[0] for row in rows]
909
-
910
- self.vectorizer = TfidfVectorizer(
911
- max_features=5000,
912
- stop_words='english',
913
- ngram_range=(1, 2)
914
- )
915
- self.vectors = self.vectorizer.fit_transform(texts)
916
- self._last_vector_count = len(self.memory_ids)
917
-
918
- # Save memory IDs as JSON (safe serialization)
919
- self.vectors_path.mkdir(exist_ok=True)
920
- with open(self.vectors_path / "memory_ids.json", 'w') as f:
921
- json.dump(self.memory_ids, f)
922
-
923
- def get_recent(self, limit: int = 10, project_path: Optional[str] = None) -> List[Dict[str, Any]]:
924
- """Get most recent memories (V1 compatible, profile-aware)."""
925
- active_profile = self._get_active_profile()
926
-
927
- with self._read_connection() as conn:
928
- cursor = conn.cursor()
929
-
930
- if project_path:
931
- cursor.execute('''
932
- SELECT id, content, summary, project_path, project_name, tags,
933
- category, parent_id, tree_path, depth, memory_type, importance,
934
- created_at, cluster_id, last_accessed, access_count
935
- FROM memories
936
- WHERE project_path = ? AND profile = ?
937
- ORDER BY created_at DESC
938
- LIMIT ?
939
- ''', (project_path, active_profile, limit))
940
- else:
941
- cursor.execute('''
942
- SELECT id, content, summary, project_path, project_name, tags,
943
- category, parent_id, tree_path, depth, memory_type, importance,
944
- created_at, cluster_id, last_accessed, access_count
945
- FROM memories
946
- WHERE profile = ?
947
- ORDER BY created_at DESC
948
- LIMIT ?
949
- ''', (active_profile, limit))
950
-
951
- results = []
952
- for row in cursor.fetchall():
953
- results.append(self._row_to_dict(row, 1.0, 'recent'))
954
-
955
- return results
956
-
957
- def get_by_id(self, memory_id: int) -> Optional[Dict[str, Any]]:
958
- """Get a specific memory by ID (V1 compatible, profile-aware)."""
959
- active_profile = self._get_active_profile()
960
- with self._read_connection() as conn:
961
- cursor = conn.cursor()
962
-
963
- cursor.execute('''
964
- SELECT id, content, summary, project_path, project_name, tags,
965
- category, parent_id, tree_path, depth, memory_type, importance,
966
- created_at, cluster_id, last_accessed, access_count
967
- FROM memories WHERE id = ? AND profile = ?
968
- ''', (memory_id, active_profile))
969
-
970
- row = cursor.fetchone()
971
-
972
- if not row:
973
- return None
974
-
975
- # Update access tracking
976
- self._update_access_tracking([memory_id])
977
-
978
- return self._row_to_dict(row, 1.0, 'direct')
979
-
980
- def delete_memory(self, memory_id: int) -> bool:
981
- """Delete a specific memory (V1 compatible, profile-aware)."""
982
- active_profile = self._get_active_profile()
983
- def _do_delete(conn):
984
- cursor = conn.cursor()
985
- cursor.execute('DELETE FROM memories WHERE id = ? AND profile = ?', (memory_id, active_profile))
986
- deleted = cursor.rowcount > 0
987
- conn.commit()
988
- return deleted
989
-
990
- deleted = self._execute_write(_do_delete)
991
-
992
- if deleted:
993
- self._rebuild_vectors()
994
- # Emit event (v2.5)
995
- self._emit_event("memory.deleted", memory_id=memory_id)
996
- # Trust signal (v2.5 — silent)
997
- if self._trust_scorer:
998
- try:
999
- self._trust_scorer.on_memory_deleted("user", memory_id)
1000
- except Exception:
1001
- pass
1002
-
1003
- return deleted
1004
-
1005
- def list_all(self, limit: int = 50) -> List[Dict[str, Any]]:
1006
- """List all memories with short previews (V1 compatible, profile-aware)."""
1007
- active_profile = self._get_active_profile()
1008
-
1009
- with self._read_connection() as conn:
1010
- cursor = conn.cursor()
1011
-
1012
- cursor.execute('''
1013
- SELECT id, content, summary, project_path, project_name, tags,
1014
- category, parent_id, tree_path, depth, memory_type, importance,
1015
- created_at, cluster_id, last_accessed, access_count
1016
- FROM memories
1017
- WHERE profile = ?
1018
- ORDER BY created_at DESC
1019
- LIMIT ?
1020
- ''', (active_profile, limit))
1021
-
1022
- results = []
1023
- for row in cursor.fetchall():
1024
- mem_dict = self._row_to_dict(row, 1.0, 'list')
1025
-
1026
- # Add title field for V1 compatibility
1027
- content = row[1]
1028
- first_line = content.split('\n')[0][:60]
1029
- mem_dict['title'] = first_line + ('...' if len(content) > 60 else '')
1030
-
1031
- results.append(mem_dict)
1032
-
1033
- return results
1034
-
1035
- def get_stats(self) -> Dict[str, Any]:
1036
- """Get memory store statistics (V1 compatible with V2 extensions, profile-aware)."""
1037
- active_profile = self._get_active_profile()
1038
-
1039
- with self._read_connection() as conn:
1040
- cursor = conn.cursor()
1041
-
1042
- cursor.execute('SELECT COUNT(*) FROM memories WHERE profile = ?', (active_profile,))
1043
- total_memories = cursor.fetchone()[0]
1044
-
1045
- cursor.execute('SELECT COUNT(DISTINCT project_path) FROM memories WHERE project_path IS NOT NULL AND profile = ?', (active_profile,))
1046
- total_projects = cursor.fetchone()[0]
1047
-
1048
- cursor.execute('SELECT memory_type, COUNT(*) FROM memories WHERE profile = ? GROUP BY memory_type', (active_profile,))
1049
- by_type = dict(cursor.fetchall())
1050
-
1051
- cursor.execute('SELECT category, COUNT(*) FROM memories WHERE category IS NOT NULL AND profile = ? GROUP BY category', (active_profile,))
1052
- by_category = dict(cursor.fetchall())
1053
-
1054
- cursor.execute('SELECT MIN(created_at), MAX(created_at) FROM memories WHERE profile = ?', (active_profile,))
1055
- date_range = cursor.fetchone()
1056
-
1057
- cursor.execute('SELECT COUNT(DISTINCT cluster_id) FROM memories WHERE cluster_id IS NOT NULL AND profile = ?', (active_profile,))
1058
- total_clusters = cursor.fetchone()[0]
1059
-
1060
- cursor.execute('SELECT MAX(depth) FROM memories WHERE profile = ?', (active_profile,))
1061
- max_depth = cursor.fetchone()[0] or 0
1062
-
1063
- # Total across all profiles
1064
- cursor.execute('SELECT COUNT(*) FROM memories')
1065
- total_all_profiles = cursor.fetchone()[0]
1066
-
1067
- return {
1068
- 'total_memories': total_memories,
1069
- 'total_all_profiles': total_all_profiles,
1070
- 'active_profile': active_profile,
1071
- 'total_projects': total_projects,
1072
- 'total_clusters': total_clusters,
1073
- 'max_tree_depth': max_depth,
1074
- 'by_type': by_type,
1075
- 'by_category': by_category,
1076
- 'date_range': {'earliest': date_range[0], 'latest': date_range[1]},
1077
- 'sklearn_available': SKLEARN_AVAILABLE
1078
- }
1079
-
1080
- def get_attribution(self) -> Dict[str, str]:
1081
- """
1082
- Get creator attribution information embedded in the database.
1083
-
1084
- This information is REQUIRED by MIT License and must be preserved.
1085
- Removing or obscuring this attribution violates the license terms.
1086
-
1087
- Returns:
1088
- Dictionary with creator information and attribution requirements,
1089
- including Qualixar platform provenance.
1090
- """
1091
- with self._read_connection() as conn:
1092
- cursor = conn.cursor()
1093
- cursor.execute('SELECT key, value FROM creator_metadata')
1094
- attribution = dict(cursor.fetchall())
1095
-
1096
- # Fallback if table doesn't exist yet (old databases)
1097
- if not attribution:
1098
- attribution = {
1099
- 'creator_name': 'Varun Pratap Bhardwaj',
1100
- 'creator_role': 'Solution Architect & Original Creator',
1101
- 'project_name': 'SuperLocalMemory V2',
1102
- 'license': 'MIT',
1103
- 'attribution_required': 'yes'
1104
- }
1105
-
1106
- # Qualixar platform provenance (non-breaking additions)
1107
- attribution['platform'] = 'Qualixar'
1108
- attribution['website'] = 'https://superlocalmemory.com'
1109
- attribution['author_website'] = 'https://varunpratap.com'
1110
-
1111
- return attribution
1112
-
1113
- def export_for_context(self, query: str, max_tokens: int = 4000) -> str:
1114
- """Export relevant memories formatted for Claude context injection (V1 compatible)."""
1115
- memories = self.search(query, limit=10)
1116
-
1117
- if not memories:
1118
- return "No relevant memories found."
1119
-
1120
- output = ["## Relevant Memory Context\n"]
1121
- char_count = 0
1122
- max_chars = max_tokens * 4 # Rough token to char conversion
1123
-
1124
- for mem in memories:
1125
- entry = f"\n### Memory (Score: {mem['score']:.2f})\n"
1126
- if mem.get('project_name'):
1127
- entry += f"**Project:** {mem['project_name']}\n"
1128
- if mem.get('category'):
1129
- entry += f"**Category:** {mem['category']}\n"
1130
- if mem.get('summary'):
1131
- entry += f"**Summary:** {mem['summary']}\n"
1132
- entry += f"**Content:**\n{mem['content'][:1000]}...\n" if len(mem['content']) > 1000 else f"**Content:**\n{mem['content']}\n"
1133
-
1134
- if char_count + len(entry) > max_chars:
1135
- break
1136
-
1137
- output.append(entry)
1138
- char_count += len(entry)
1139
-
1140
- text = ''.join(output)
1141
-
1142
- # Layer 3: Steganographic watermark on text exports
1143
- try:
1144
- from qualixar_watermark import encode_watermark
1145
- text = encode_watermark(text, "slm")
1146
- except ImportError:
1147
- pass
1148
-
1149
- return text
1150
-
1151
-
1152
- # CLI interface (V1 compatible + V2 extensions)
1153
- if __name__ == "__main__":
1154
- from memory.cli import run_cli
1155
- run_cli()