@memtensor/memos-local-openclaw-plugin 0.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (162) hide show
  1. package/.env.example +11 -0
  2. package/README.md +251 -0
  3. package/SKILL.md +43 -0
  4. package/dist/capture/index.d.ts +16 -0
  5. package/dist/capture/index.d.ts.map +1 -0
  6. package/dist/capture/index.js +80 -0
  7. package/dist/capture/index.js.map +1 -0
  8. package/dist/config.d.ts +4 -0
  9. package/dist/config.d.ts.map +1 -0
  10. package/dist/config.js +96 -0
  11. package/dist/config.js.map +1 -0
  12. package/dist/embedding/index.d.ts +12 -0
  13. package/dist/embedding/index.d.ts.map +1 -0
  14. package/dist/embedding/index.js +75 -0
  15. package/dist/embedding/index.js.map +1 -0
  16. package/dist/embedding/local.d.ts +3 -0
  17. package/dist/embedding/local.d.ts.map +1 -0
  18. package/dist/embedding/local.js +65 -0
  19. package/dist/embedding/local.js.map +1 -0
  20. package/dist/embedding/providers/cohere.d.ts +4 -0
  21. package/dist/embedding/providers/cohere.d.ts.map +1 -0
  22. package/dist/embedding/providers/cohere.js +57 -0
  23. package/dist/embedding/providers/cohere.js.map +1 -0
  24. package/dist/embedding/providers/gemini.d.ts +3 -0
  25. package/dist/embedding/providers/gemini.d.ts.map +1 -0
  26. package/dist/embedding/providers/gemini.js +31 -0
  27. package/dist/embedding/providers/gemini.js.map +1 -0
  28. package/dist/embedding/providers/mistral.d.ts +3 -0
  29. package/dist/embedding/providers/mistral.d.ts.map +1 -0
  30. package/dist/embedding/providers/mistral.js +25 -0
  31. package/dist/embedding/providers/mistral.js.map +1 -0
  32. package/dist/embedding/providers/openai.d.ts +3 -0
  33. package/dist/embedding/providers/openai.d.ts.map +1 -0
  34. package/dist/embedding/providers/openai.js +35 -0
  35. package/dist/embedding/providers/openai.js.map +1 -0
  36. package/dist/embedding/providers/voyage.d.ts +3 -0
  37. package/dist/embedding/providers/voyage.d.ts.map +1 -0
  38. package/dist/embedding/providers/voyage.js +25 -0
  39. package/dist/embedding/providers/voyage.js.map +1 -0
  40. package/dist/index.d.ts +44 -0
  41. package/dist/index.d.ts.map +1 -0
  42. package/dist/index.js +75 -0
  43. package/dist/index.js.map +1 -0
  44. package/dist/ingest/chunker.d.ts +15 -0
  45. package/dist/ingest/chunker.d.ts.map +1 -0
  46. package/dist/ingest/chunker.js +193 -0
  47. package/dist/ingest/chunker.js.map +1 -0
  48. package/dist/ingest/dedup.d.ts +11 -0
  49. package/dist/ingest/dedup.d.ts.map +1 -0
  50. package/dist/ingest/dedup.js +29 -0
  51. package/dist/ingest/dedup.js.map +1 -0
  52. package/dist/ingest/providers/anthropic.d.ts +3 -0
  53. package/dist/ingest/providers/anthropic.d.ts.map +1 -0
  54. package/dist/ingest/providers/anthropic.js +33 -0
  55. package/dist/ingest/providers/anthropic.js.map +1 -0
  56. package/dist/ingest/providers/bedrock.d.ts +8 -0
  57. package/dist/ingest/providers/bedrock.d.ts.map +1 -0
  58. package/dist/ingest/providers/bedrock.js +41 -0
  59. package/dist/ingest/providers/bedrock.js.map +1 -0
  60. package/dist/ingest/providers/gemini.d.ts +3 -0
  61. package/dist/ingest/providers/gemini.d.ts.map +1 -0
  62. package/dist/ingest/providers/gemini.js +31 -0
  63. package/dist/ingest/providers/gemini.js.map +1 -0
  64. package/dist/ingest/providers/index.d.ts +9 -0
  65. package/dist/ingest/providers/index.d.ts.map +1 -0
  66. package/dist/ingest/providers/index.js +68 -0
  67. package/dist/ingest/providers/index.js.map +1 -0
  68. package/dist/ingest/providers/openai.d.ts +3 -0
  69. package/dist/ingest/providers/openai.d.ts.map +1 -0
  70. package/dist/ingest/providers/openai.js +41 -0
  71. package/dist/ingest/providers/openai.js.map +1 -0
  72. package/dist/ingest/worker.d.ts +21 -0
  73. package/dist/ingest/worker.d.ts.map +1 -0
  74. package/dist/ingest/worker.js +111 -0
  75. package/dist/ingest/worker.js.map +1 -0
  76. package/dist/recall/engine.d.ts +23 -0
  77. package/dist/recall/engine.d.ts.map +1 -0
  78. package/dist/recall/engine.js +153 -0
  79. package/dist/recall/engine.js.map +1 -0
  80. package/dist/recall/mmr.d.ts +17 -0
  81. package/dist/recall/mmr.d.ts.map +1 -0
  82. package/dist/recall/mmr.js +51 -0
  83. package/dist/recall/mmr.js.map +1 -0
  84. package/dist/recall/recency.d.ts +20 -0
  85. package/dist/recall/recency.d.ts.map +1 -0
  86. package/dist/recall/recency.js +26 -0
  87. package/dist/recall/recency.js.map +1 -0
  88. package/dist/recall/rrf.d.ts +16 -0
  89. package/dist/recall/rrf.d.ts.map +1 -0
  90. package/dist/recall/rrf.js +15 -0
  91. package/dist/recall/rrf.js.map +1 -0
  92. package/dist/storage/sqlite.d.ts +34 -0
  93. package/dist/storage/sqlite.d.ts.map +1 -0
  94. package/dist/storage/sqlite.js +274 -0
  95. package/dist/storage/sqlite.js.map +1 -0
  96. package/dist/storage/vector.d.ts +13 -0
  97. package/dist/storage/vector.d.ts.map +1 -0
  98. package/dist/storage/vector.js +33 -0
  99. package/dist/storage/vector.js.map +1 -0
  100. package/dist/tools/index.d.ts +4 -0
  101. package/dist/tools/index.d.ts.map +1 -0
  102. package/dist/tools/index.js +10 -0
  103. package/dist/tools/index.js.map +1 -0
  104. package/dist/tools/memory-get.d.ts +4 -0
  105. package/dist/tools/memory-get.d.ts.map +1 -0
  106. package/dist/tools/memory-get.js +59 -0
  107. package/dist/tools/memory-get.js.map +1 -0
  108. package/dist/tools/memory-search.d.ts +4 -0
  109. package/dist/tools/memory-search.d.ts.map +1 -0
  110. package/dist/tools/memory-search.js +36 -0
  111. package/dist/tools/memory-search.js.map +1 -0
  112. package/dist/tools/memory-timeline.d.ts +4 -0
  113. package/dist/tools/memory-timeline.d.ts.map +1 -0
  114. package/dist/tools/memory-timeline.js +64 -0
  115. package/dist/tools/memory-timeline.js.map +1 -0
  116. package/dist/types.d.ts +158 -0
  117. package/dist/types.d.ts.map +1 -0
  118. package/dist/types.js +25 -0
  119. package/dist/types.js.map +1 -0
  120. package/dist/viewer/html.d.ts +2 -0
  121. package/dist/viewer/html.d.ts.map +1 -0
  122. package/dist/viewer/html.js +686 -0
  123. package/dist/viewer/html.js.map +1 -0
  124. package/dist/viewer/server.d.ts +48 -0
  125. package/dist/viewer/server.d.ts.map +1 -0
  126. package/dist/viewer/server.js +470 -0
  127. package/dist/viewer/server.js.map +1 -0
  128. package/index.ts +357 -0
  129. package/openclaw.plugin.json +57 -0
  130. package/package.json +57 -0
  131. package/src/capture/index.ts +92 -0
  132. package/src/config.ts +67 -0
  133. package/src/embedding/index.ts +76 -0
  134. package/src/embedding/local.ts +35 -0
  135. package/src/embedding/providers/cohere.ts +69 -0
  136. package/src/embedding/providers/gemini.ts +41 -0
  137. package/src/embedding/providers/mistral.ts +32 -0
  138. package/src/embedding/providers/openai.ts +42 -0
  139. package/src/embedding/providers/voyage.ts +32 -0
  140. package/src/index.ts +106 -0
  141. package/src/ingest/chunker.ts +217 -0
  142. package/src/ingest/dedup.ts +37 -0
  143. package/src/ingest/providers/anthropic.ts +41 -0
  144. package/src/ingest/providers/bedrock.ts +50 -0
  145. package/src/ingest/providers/gemini.ts +41 -0
  146. package/src/ingest/providers/index.ts +67 -0
  147. package/src/ingest/providers/openai.ts +48 -0
  148. package/src/ingest/worker.ts +130 -0
  149. package/src/recall/engine.ts +182 -0
  150. package/src/recall/mmr.ts +60 -0
  151. package/src/recall/recency.ts +27 -0
  152. package/src/recall/rrf.ts +31 -0
  153. package/src/storage/sqlite.ts +305 -0
  154. package/src/storage/vector.ts +39 -0
  155. package/src/tools/index.ts +3 -0
  156. package/src/tools/memory-get.ts +68 -0
  157. package/src/tools/memory-search.ts +36 -0
  158. package/src/tools/memory-timeline.ts +73 -0
  159. package/src/types.ts +214 -0
  160. package/src/viewer/html.ts +682 -0
  161. package/src/viewer/server.ts +464 -0
  162. package/www/index.html +606 -0
@@ -0,0 +1,41 @@
1
+ import type { SummarizerConfig, Logger } from "../../types";
2
+
3
+ const SYSTEM_PROMPT = `Summarize the text in ONE concise sentence (max 60 tokens). Preserve exact names, commands, error codes. No bullet points, no preamble — output only the sentence.`;
4
+
5
+ export async function summarizeAnthropic(
6
+ text: string,
7
+ cfg: SummarizerConfig,
8
+ log: Logger,
9
+ ): Promise<string> {
10
+ const endpoint = cfg.endpoint ?? "https://api.anthropic.com/v1/messages";
11
+ const model = cfg.model ?? "claude-3-haiku-20240307";
12
+ const headers: Record<string, string> = {
13
+ "Content-Type": "application/json",
14
+ "x-api-key": cfg.apiKey ?? "",
15
+ "anthropic-version": "2023-06-01",
16
+ ...cfg.headers,
17
+ };
18
+
19
+ const resp = await fetch(endpoint, {
20
+ method: "POST",
21
+ headers,
22
+ body: JSON.stringify({
23
+ model,
24
+ max_tokens: 100,
25
+ temperature: cfg.temperature ?? 0,
26
+ system: SYSTEM_PROMPT,
27
+ messages: [{ role: "user", content: text }],
28
+ }),
29
+ signal: AbortSignal.timeout(cfg.timeoutMs ?? 30_000),
30
+ });
31
+
32
+ if (!resp.ok) {
33
+ const body = await resp.text();
34
+ throw new Error(`Anthropic summarize failed (${resp.status}): ${body}`);
35
+ }
36
+
37
+ const json = (await resp.json()) as {
38
+ content: Array<{ type: string; text: string }>;
39
+ };
40
+ return json.content.find((c) => c.type === "text")?.text?.trim() ?? "";
41
+ }
@@ -0,0 +1,50 @@
1
+ import type { SummarizerConfig, Logger } from "../../types";
2
+
3
+ const SYSTEM_PROMPT = `Summarize the text in ONE concise sentence (max 60 tokens). Preserve exact names, commands, error codes. No bullet points, no preamble — output only the sentence.`;
4
+
5
+ /**
6
+ * AWS Bedrock Converse API adapter.
7
+ * Expects cfg.endpoint to be the full Bedrock invoke URL and
8
+ * authentication handled via AWS SDK credential chain (env vars / IAM role).
9
+ */
10
+ export async function summarizeBedrock(
11
+ text: string,
12
+ cfg: SummarizerConfig,
13
+ log: Logger,
14
+ ): Promise<string> {
15
+ const model = cfg.model ?? "anthropic.claude-3-haiku-20240307-v1:0";
16
+ const endpoint = cfg.endpoint;
17
+ if (!endpoint) {
18
+ throw new Error("Bedrock summarizer requires 'endpoint' to be set (e.g. https://bedrock-runtime.us-east-1.amazonaws.com)");
19
+ }
20
+
21
+ const url = `${endpoint}/model/${model}/converse`;
22
+ const headers: Record<string, string> = {
23
+ "Content-Type": "application/json",
24
+ ...cfg.headers,
25
+ };
26
+
27
+ const resp = await fetch(url, {
28
+ method: "POST",
29
+ headers,
30
+ body: JSON.stringify({
31
+ system: [{ text: SYSTEM_PROMPT }],
32
+ messages: [{ role: "user", content: [{ text }] }],
33
+ inferenceConfig: {
34
+ temperature: cfg.temperature ?? 0,
35
+ maxTokens: 100,
36
+ },
37
+ }),
38
+ signal: AbortSignal.timeout(cfg.timeoutMs ?? 30_000),
39
+ });
40
+
41
+ if (!resp.ok) {
42
+ const body = await resp.text();
43
+ throw new Error(`Bedrock summarize failed (${resp.status}): ${body}`);
44
+ }
45
+
46
+ const json = (await resp.json()) as {
47
+ output: { message: { content: Array<{ text: string }> } };
48
+ };
49
+ return json.output?.message?.content?.[0]?.text?.trim() ?? "";
50
+ }
@@ -0,0 +1,41 @@
1
+ import type { SummarizerConfig, Logger } from "../../types";
2
+
3
+ const SYSTEM_PROMPT = `Summarize the text in ONE concise sentence (max 60 tokens). Preserve exact names, commands, error codes. No bullet points, no preamble — output only the sentence.`;
4
+
5
+ export async function summarizeGemini(
6
+ text: string,
7
+ cfg: SummarizerConfig,
8
+ log: Logger,
9
+ ): Promise<string> {
10
+ const model = cfg.model ?? "gemini-1.5-flash";
11
+ const endpoint =
12
+ cfg.endpoint ??
13
+ `https://generativelanguage.googleapis.com/v1beta/models/${model}:generateContent`;
14
+
15
+ const url = `${endpoint}?key=${cfg.apiKey}`;
16
+ const headers: Record<string, string> = {
17
+ "Content-Type": "application/json",
18
+ ...cfg.headers,
19
+ };
20
+
21
+ const resp = await fetch(url, {
22
+ method: "POST",
23
+ headers,
24
+ body: JSON.stringify({
25
+ systemInstruction: { parts: [{ text: SYSTEM_PROMPT }] },
26
+ contents: [{ parts: [{ text }] }],
27
+ generationConfig: { temperature: cfg.temperature ?? 0, maxOutputTokens: 100 },
28
+ }),
29
+ signal: AbortSignal.timeout(cfg.timeoutMs ?? 30_000),
30
+ });
31
+
32
+ if (!resp.ok) {
33
+ const body = await resp.text();
34
+ throw new Error(`Gemini summarize failed (${resp.status}): ${body}`);
35
+ }
36
+
37
+ const json = (await resp.json()) as {
38
+ candidates: Array<{ content: { parts: Array<{ text: string }> } }>;
39
+ };
40
+ return json.candidates?.[0]?.content?.parts?.[0]?.text?.trim() ?? "";
41
+ }
@@ -0,0 +1,67 @@
1
+ import type { SummarizerConfig, Logger } from "../../types";
2
+ import { summarizeOpenAI } from "./openai";
3
+ import { summarizeAnthropic } from "./anthropic";
4
+ import { summarizeGemini } from "./gemini";
5
+ import { summarizeBedrock } from "./bedrock";
6
+
7
+ export class Summarizer {
8
+ constructor(
9
+ private cfg: SummarizerConfig | undefined,
10
+ private log: Logger,
11
+ ) {}
12
+
13
+ async summarize(text: string): Promise<string> {
14
+ if (!this.cfg) {
15
+ return ruleFallback(text);
16
+ }
17
+
18
+ try {
19
+ return await this.callProvider(text);
20
+ } catch (err) {
21
+ this.log.warn(`Summarizer provider failed, using rule fallback: ${err}`);
22
+ return ruleFallback(text);
23
+ }
24
+ }
25
+
26
+ private async callProvider(text: string): Promise<string> {
27
+ const cfg = this.cfg!;
28
+ switch (cfg.provider) {
29
+ case "openai":
30
+ case "openai_compatible":
31
+ return summarizeOpenAI(text, cfg, this.log);
32
+ case "anthropic":
33
+ return summarizeAnthropic(text, cfg, this.log);
34
+ case "gemini":
35
+ return summarizeGemini(text, cfg, this.log);
36
+ case "azure_openai":
37
+ return summarizeOpenAI(text, cfg, this.log);
38
+ case "bedrock":
39
+ return summarizeBedrock(text, cfg, this.log);
40
+ default:
41
+ throw new Error(`Unknown summarizer provider: ${cfg.provider}`);
42
+ }
43
+ }
44
+ }
45
+
46
+ /**
47
+ * Rule-based fallback: produce a single short sentence from the first
48
+ * meaningful line, appending any key entities found in the text.
49
+ */
50
+ function ruleFallback(text: string): string {
51
+ const lines = text.split("\n").filter((l) => l.trim().length > 10);
52
+ const first = (lines[0] ?? text).trim();
53
+
54
+ const entityRe = [/`[^`]+`/g, /\b(?:error|Error|ERROR)\s*[::]\s*.{5,60}/g];
55
+ const entities: string[] = [];
56
+ for (const re of entityRe) {
57
+ for (const m of text.matchAll(re)) {
58
+ if (entities.length < 3) entities.push(m[0].slice(0, 50));
59
+ }
60
+ }
61
+
62
+ let summary = first.length > 120 ? first.slice(0, 117) + "..." : first;
63
+ if (entities.length > 0) {
64
+ summary += ` (${entities.join(", ")})`;
65
+ }
66
+ return summary.slice(0, 200);
67
+ }
@@ -0,0 +1,48 @@
1
+ import type { SummarizerConfig, Logger } from "../../types";
2
+
3
+ const SYSTEM_PROMPT = `Summarize the text in ONE concise sentence (max 60 tokens). Preserve exact names, commands, error codes. No bullet points, no preamble — output only the sentence.`;
4
+
5
+ export async function summarizeOpenAI(
6
+ text: string,
7
+ cfg: SummarizerConfig,
8
+ log: Logger,
9
+ ): Promise<string> {
10
+ const endpoint = normalizeChatEndpoint(cfg.endpoint ?? "https://api.openai.com/v1/chat/completions");
11
+ const model = cfg.model ?? "gpt-4o-mini";
12
+ const headers: Record<string, string> = {
13
+ "Content-Type": "application/json",
14
+ Authorization: `Bearer ${cfg.apiKey}`,
15
+ ...cfg.headers,
16
+ };
17
+
18
+ const resp = await fetch(endpoint, {
19
+ method: "POST",
20
+ headers,
21
+ body: JSON.stringify({
22
+ model,
23
+ temperature: cfg.temperature ?? 0,
24
+ messages: [
25
+ { role: "system", content: SYSTEM_PROMPT },
26
+ { role: "user", content: text },
27
+ ],
28
+ }),
29
+ signal: AbortSignal.timeout(cfg.timeoutMs ?? 30_000),
30
+ });
31
+
32
+ if (!resp.ok) {
33
+ const body = await resp.text();
34
+ throw new Error(`OpenAI summarize failed (${resp.status}): ${body}`);
35
+ }
36
+
37
+ const json = (await resp.json()) as {
38
+ choices: Array<{ message: { content: string } }>;
39
+ };
40
+ return json.choices[0]?.message?.content?.trim() ?? "";
41
+ }
42
+
43
+ function normalizeChatEndpoint(url: string): string {
44
+ const stripped = url.replace(/\/+$/, "");
45
+ if (stripped.endsWith("/chat/completions")) return stripped;
46
+ if (stripped.endsWith("/completions")) return stripped;
47
+ return `${stripped}/chat/completions`;
48
+ }
@@ -0,0 +1,130 @@
1
+ import { v4 as uuid } from "uuid";
2
+ import type { ConversationMessage, Chunk, PluginContext } from "../types";
3
+ import type { SqliteStore } from "../storage/sqlite";
4
+ import type { Embedder } from "../embedding";
5
+ import { Summarizer } from "./providers";
6
+ import { chunkText } from "./chunker";
7
+ import { findDuplicate } from "./dedup";
8
+
9
+ export class IngestWorker {
10
+ private summarizer: Summarizer;
11
+ private queue: ConversationMessage[] = [];
12
+ private processing = false;
13
+ private flushResolvers: Array<() => void> = [];
14
+
15
+ constructor(
16
+ private store: SqliteStore,
17
+ private embedder: Embedder,
18
+ private ctx: PluginContext,
19
+ ) {
20
+ this.summarizer = new Summarizer(ctx.config.summarizer, ctx.log);
21
+ }
22
+
23
+ enqueue(messages: ConversationMessage[]): void {
24
+ this.queue.push(...messages);
25
+ if (!this.processing) {
26
+ this.processQueue().catch((err) => {
27
+ this.ctx.log.error(`Ingest worker error: ${err}`);
28
+ this.processing = false;
29
+ });
30
+ }
31
+ }
32
+
33
+ /** Wait until all queued messages have been processed. */
34
+ async flush(): Promise<void> {
35
+ if (this.queue.length === 0 && !this.processing) return;
36
+ return new Promise((resolve) => {
37
+ this.flushResolvers.push(resolve);
38
+ });
39
+ }
40
+
41
+ private async processQueue(): Promise<void> {
42
+ this.processing = true;
43
+
44
+ while (this.queue.length > 0) {
45
+ const msg = this.queue.shift()!;
46
+ try {
47
+ await this.ingestMessage(msg);
48
+ } catch (err) {
49
+ this.ctx.log.error(`Failed to ingest message turn=${msg.turnId}: ${err}`);
50
+ }
51
+ }
52
+
53
+ this.processing = false;
54
+ for (const resolve of this.flushResolvers) resolve();
55
+ this.flushResolvers = [];
56
+ }
57
+
58
+ private async ingestMessage(msg: ConversationMessage): Promise<void> {
59
+ if (msg.role === "tool") {
60
+ await this.ingestToolResult(msg);
61
+ return;
62
+ }
63
+
64
+ const rawChunks = chunkText(msg.content);
65
+ this.ctx.log.debug(`Chunked turn=${msg.turnId} into ${rawChunks.length} chunks`);
66
+
67
+ for (let seq = 0; seq < rawChunks.length; seq++) {
68
+ const raw = rawChunks[seq];
69
+ await this.storeChunk(msg, raw.content, raw.kind, seq);
70
+ }
71
+ }
72
+
73
+ private async ingestToolResult(msg: ConversationMessage): Promise<void> {
74
+ this.ctx.log.debug(`Ingesting tool result turn=${msg.turnId} tool=${msg.toolName ?? "unknown"} len=${msg.content.length}`);
75
+ await this.storeChunk(msg, msg.content, "tool_result", 0);
76
+ }
77
+
78
+ private async storeChunk(
79
+ msg: ConversationMessage,
80
+ content: string,
81
+ kind: Chunk["kind"],
82
+ seq: number,
83
+ ): Promise<void> {
84
+ const chunkId = uuid();
85
+ const summary = await this.summarizer.summarize(content);
86
+
87
+ let embedding: number[] | null = null;
88
+ try {
89
+ [embedding] = await this.embedder.embed([summary]);
90
+ } catch (err) {
91
+ this.ctx.log.warn(`Embedding failed for chunk=${chunkId}, storing without vector: ${err}`);
92
+ }
93
+
94
+ if (embedding) {
95
+ const dupId = findDuplicate(
96
+ this.store,
97
+ embedding,
98
+ this.ctx.config.dedup?.similarityThreshold ?? 0.93,
99
+ this.ctx.log,
100
+ );
101
+
102
+ if (dupId) {
103
+ this.store.updateSummary(dupId, summary);
104
+ this.store.upsertEmbedding(dupId, embedding);
105
+ this.ctx.log.debug(`Dedup-merged into existing chunk=${dupId}`);
106
+ return;
107
+ }
108
+ }
109
+
110
+ const chunk: Chunk = {
111
+ id: chunkId,
112
+ sessionKey: msg.sessionKey,
113
+ turnId: msg.turnId,
114
+ seq,
115
+ role: msg.role,
116
+ content,
117
+ kind,
118
+ summary,
119
+ embedding: null,
120
+ createdAt: msg.timestamp,
121
+ updatedAt: msg.timestamp,
122
+ };
123
+
124
+ this.store.insertChunk(chunk);
125
+ if (embedding) {
126
+ this.store.upsertEmbedding(chunkId, embedding);
127
+ }
128
+ this.ctx.log.debug(`Stored chunk=${chunkId} kind=${kind} role=${msg.role} len=${content.length} hasVec=${!!embedding}`);
129
+ }
130
+ }
@@ -0,0 +1,182 @@
1
+ import type { SqliteStore } from "../storage/sqlite";
2
+ import type { Embedder } from "../embedding";
3
+ import type { PluginContext, SearchHit, SearchResult } from "../types";
4
+ import { vectorSearch } from "../storage/vector";
5
+ import { rrfFuse } from "./rrf";
6
+ import { mmrRerank } from "./mmr";
7
+ import { applyRecencyDecay } from "./recency";
8
+
9
+ export interface RecallOptions {
10
+ query?: string;
11
+ maxResults?: number;
12
+ minScore?: number;
13
+ }
14
+
15
+ const MAX_RECENT_QUERIES = 20;
16
+
17
+ export class RecallEngine {
18
+ private recentQueries: Array<{ query: string; maxResults: number; minScore: number; hitCount: number }> = [];
19
+
20
+ constructor(
21
+ private store: SqliteStore,
22
+ private embedder: Embedder,
23
+ private ctx: PluginContext,
24
+ ) {}
25
+
26
+ async search(opts: RecallOptions): Promise<SearchResult> {
27
+ const recallCfg = this.ctx.config.recall!;
28
+ const maxResults = Math.min(
29
+ opts.maxResults ?? recallCfg.maxResultsDefault!,
30
+ recallCfg.maxResultsMax!,
31
+ );
32
+ const minScore = opts.minScore ?? recallCfg.minScoreDefault!;
33
+ const query = opts.query ?? "";
34
+
35
+ const repeatNote = this.checkRepeat(query, maxResults, minScore);
36
+ const candidatePool = maxResults * 5;
37
+
38
+ // Step 1: Gather candidates from both FTS and vector search
39
+ const ftsCandidates = query
40
+ ? this.store.ftsSearch(query, candidatePool)
41
+ : [];
42
+
43
+ let vecCandidates: Array<{ chunkId: string; score: number }> = [];
44
+ if (query) {
45
+ try {
46
+ const queryVec = await this.embedder.embedQuery(query);
47
+ vecCandidates = vectorSearch(this.store, queryVec, candidatePool);
48
+ } catch (err) {
49
+ this.ctx.log.warn(`Vector search failed, using FTS only: ${err}`);
50
+ }
51
+ }
52
+
53
+ // Step 2: RRF fusion
54
+ const ftsRanked = ftsCandidates.map((c) => ({ id: c.chunkId, score: c.score }));
55
+ const vecRanked = vecCandidates.map((c) => ({ id: c.chunkId, score: c.score }));
56
+ const rrfScores = rrfFuse([ftsRanked, vecRanked], recallCfg.rrfK);
57
+
58
+ if (rrfScores.size === 0) {
59
+ this.recordQuery(query, maxResults, minScore, 0);
60
+ return {
61
+ hits: [],
62
+ meta: {
63
+ usedMinScore: minScore,
64
+ usedMaxResults: maxResults,
65
+ totalCandidates: 0,
66
+ note: repeatNote ?? "No candidates found for the given query.",
67
+ },
68
+ };
69
+ }
70
+
71
+ // Step 3: MMR re-ranking
72
+ const rrfList = [...rrfScores.entries()]
73
+ .map(([id, score]) => ({ id, score }))
74
+ .sort((a, b) => b.score - a.score);
75
+
76
+ const mmrResults = mmrRerank(rrfList, this.store, recallCfg.mmrLambda, maxResults * 2);
77
+
78
+ // Step 4: Time decay
79
+ const withTs = mmrResults.map((r) => {
80
+ const chunk = this.store.getChunk(r.id);
81
+ return { ...r, createdAt: chunk?.createdAt ?? 0 };
82
+ });
83
+ const decayed = applyRecencyDecay(withTs, recallCfg.recencyHalfLifeDays);
84
+
85
+ // Step 5: Normalize scores to [0,1]
86
+ const maxScore = Math.max(...decayed.map((d) => d.score), 1e-10);
87
+ const normalized = decayed.map((d) => ({
88
+ ...d,
89
+ score: d.score / maxScore,
90
+ }));
91
+
92
+ // Step 6: Filter by minScore and limit
93
+ const filtered = normalized
94
+ .filter((d) => d.score >= minScore)
95
+ .sort((a, b) => b.score - a.score)
96
+ .slice(0, maxResults);
97
+
98
+ // Step 7: Build hits
99
+ const hits: SearchHit[] = [];
100
+ for (const candidate of filtered) {
101
+ const chunk = this.store.getChunk(candidate.id);
102
+ if (!chunk) continue;
103
+
104
+ hits.push({
105
+ summary: chunk.summary,
106
+ original_excerpt: makeExcerpt(chunk.content),
107
+ ref: {
108
+ sessionKey: chunk.sessionKey,
109
+ chunkId: chunk.id,
110
+ turnId: chunk.turnId,
111
+ seq: chunk.seq,
112
+ },
113
+ score: Math.round(candidate.score * 1000) / 1000,
114
+ source: {
115
+ ts: chunk.createdAt,
116
+ role: chunk.role,
117
+ sessionKey: chunk.sessionKey,
118
+ },
119
+ });
120
+ }
121
+
122
+ this.recordQuery(query, maxResults, minScore, hits.length);
123
+
124
+ return {
125
+ hits,
126
+ meta: {
127
+ usedMinScore: minScore,
128
+ usedMaxResults: maxResults,
129
+ totalCandidates: rrfScores.size,
130
+ ...(repeatNote ? { note: repeatNote } : {}),
131
+ },
132
+ };
133
+ }
134
+
135
+ /**
136
+ * PRD §6.1: Detect repeated identical/similar queries and produce a
137
+ * warning note so the model knows to vary its approach.
138
+ */
139
+ private checkRepeat(query: string, maxResults: number, minScore: number): string | undefined {
140
+ const normalized = query.toLowerCase().trim();
141
+ if (!normalized) return undefined;
142
+
143
+ const dup = this.recentQueries.find(
144
+ (q) => q.query === normalized && q.maxResults === maxResults && q.minScore === minScore,
145
+ );
146
+
147
+ if (dup) {
148
+ if (dup.hitCount === 0) {
149
+ return "This exact query with the same parameters was already tried and returned 0 results. Try rephrasing with different keywords, or adjust maxResults/minScore.";
150
+ }
151
+ return "This exact query with the same parameters was already executed. Consider varying the query or expanding parameters to get different results.";
152
+ }
153
+
154
+ return undefined;
155
+ }
156
+
157
+ private recordQuery(query: string, maxResults: number, minScore: number, hitCount: number): void {
158
+ const normalized = query.toLowerCase().trim();
159
+ if (!normalized) return;
160
+
161
+ this.recentQueries = this.recentQueries.filter(
162
+ (q) => !(q.query === normalized && q.maxResults === maxResults && q.minScore === minScore),
163
+ );
164
+ this.recentQueries.push({ query: normalized, maxResults, minScore, hitCount });
165
+
166
+ if (this.recentQueries.length > MAX_RECENT_QUERIES) {
167
+ this.recentQueries.shift();
168
+ }
169
+ }
170
+ }
171
+
172
+ function makeExcerpt(content: string): string {
173
+ const min = 200;
174
+ const max = 500;
175
+ if (content.length <= max) return content;
176
+
177
+ let cut = content.lastIndexOf(".", max);
178
+ if (cut < min) cut = content.lastIndexOf(" ", max);
179
+ if (cut < min) cut = max;
180
+
181
+ return content.slice(0, cut) + "…";
182
+ }
@@ -0,0 +1,60 @@
1
+ import { cosineSimilarity } from "../storage/vector";
2
+ import type { SqliteStore } from "../storage/sqlite";
3
+
4
+ /**
5
+ * Maximal Marginal Relevance (PRD §5.3)
6
+ *
7
+ * Re-ranks candidates to balance relevance with diversity,
8
+ * preventing top-K results from being too similar.
9
+ *
10
+ * MMR = λ · sim(q, d) - (1-λ) · max(sim(d, d_selected))
11
+ */
12
+ export function mmrRerank(
13
+ candidates: Array<{ id: string; score: number }>,
14
+ store: SqliteStore,
15
+ lambda: number = 0.7,
16
+ topK: number = 20,
17
+ ): Array<{ id: string; score: number }> {
18
+ if (candidates.length <= 1) return candidates;
19
+
20
+ const embeddings = new Map<string, number[]>();
21
+ for (const c of candidates) {
22
+ const vec = store.getEmbedding(c.id);
23
+ if (vec) embeddings.set(c.id, vec);
24
+ }
25
+
26
+ const selected: Array<{ id: string; score: number }> = [];
27
+ const remaining = [...candidates];
28
+
29
+ while (selected.length < topK && remaining.length > 0) {
30
+ let bestIdx = 0;
31
+ let bestMmr = -Infinity;
32
+
33
+ for (let i = 0; i < remaining.length; i++) {
34
+ const cand = remaining[i];
35
+ const candVec = embeddings.get(cand.id);
36
+
37
+ let maxSimToSelected = 0;
38
+ if (candVec && selected.length > 0) {
39
+ for (const s of selected) {
40
+ const sVec = embeddings.get(s.id);
41
+ if (sVec) {
42
+ const sim = cosineSimilarity(candVec, sVec);
43
+ maxSimToSelected = Math.max(maxSimToSelected, sim);
44
+ }
45
+ }
46
+ }
47
+
48
+ const mmrScore = lambda * cand.score - (1 - lambda) * maxSimToSelected;
49
+ if (mmrScore > bestMmr) {
50
+ bestMmr = mmrScore;
51
+ bestIdx = i;
52
+ }
53
+ }
54
+
55
+ const chosen = remaining.splice(bestIdx, 1)[0];
56
+ selected.push({ id: chosen.id, score: bestMmr });
57
+ }
58
+
59
+ return selected;
60
+ }
@@ -0,0 +1,27 @@
1
+ /**
2
+ * Time decay scoring (PRD §5.3)
3
+ *
4
+ * Applies exponential decay based on document age, biasing towards
5
+ * more recent memories. Uses configurable half-life (default 14 days).
6
+ *
7
+ * decay(t) = 0.5 ^ (age_days / half_life)
8
+ * final = base_score * (alpha + (1-alpha) * decay)
9
+ *
10
+ * alpha=0.3 ensures old but highly relevant results are not zeroed out.
11
+ */
12
+ export function applyRecencyDecay(
13
+ candidates: Array<{ id: string; score: number; createdAt: number }>,
14
+ halfLifeDays: number = 14,
15
+ now?: number,
16
+ ): Array<{ id: string; score: number }> {
17
+ const currentTime = now ?? Date.now();
18
+ const halfLifeMs = halfLifeDays * 24 * 60 * 60 * 1000;
19
+ const alpha = 0.3;
20
+
21
+ return candidates.map((c) => {
22
+ const ageMs = Math.max(0, currentTime - c.createdAt);
23
+ const decay = Math.pow(0.5, ageMs / halfLifeMs);
24
+ const adjustedScore = c.score * (alpha + (1 - alpha) * decay);
25
+ return { id: c.id, score: adjustedScore };
26
+ });
27
+ }
@@ -0,0 +1,31 @@
1
+ /**
2
+ * Reciprocal Rank Fusion (PRD §5.2)
3
+ *
4
+ * Merges ranked lists from different retrieval sources (FTS, vector)
5
+ * into a single ranking. Handles score scale mismatch between BM25
6
+ * and cosine similarity.
7
+ *
8
+ * RRF(d) = Σ 1 / (k + rank_i(d))
9
+ * where k is a constant (default 60) and rank_i is the rank in list i.
10
+ */
11
+ export interface RankedItem {
12
+ id: string;
13
+ score: number;
14
+ }
15
+
16
+ export function rrfFuse(
17
+ lists: RankedItem[][],
18
+ k: number = 60,
19
+ ): Map<string, number> {
20
+ const scores = new Map<string, number>();
21
+
22
+ for (const list of lists) {
23
+ for (let rank = 0; rank < list.length; rank++) {
24
+ const item = list[rank];
25
+ const prev = scores.get(item.id) ?? 0;
26
+ scores.set(item.id, prev + 1 / (k + rank + 1));
27
+ }
28
+ }
29
+
30
+ return scores;
31
+ }