@mnemoai/core 1.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/index.ts +3395 -0
- package/openclaw.plugin.json +815 -0
- package/package.json +59 -0
- package/src/access-tracker.ts +341 -0
- package/src/adapters/README.md +78 -0
- package/src/adapters/chroma.ts +206 -0
- package/src/adapters/lancedb.ts +237 -0
- package/src/adapters/pgvector.ts +218 -0
- package/src/adapters/qdrant.ts +191 -0
- package/src/adaptive-retrieval.ts +90 -0
- package/src/audit-log.ts +238 -0
- package/src/chunker.ts +254 -0
- package/src/config.ts +271 -0
- package/src/decay-engine.ts +238 -0
- package/src/embedder.ts +735 -0
- package/src/extraction-prompts.ts +339 -0
- package/src/license.ts +258 -0
- package/src/llm-client.ts +125 -0
- package/src/mcp-server.ts +415 -0
- package/src/memory-categories.ts +71 -0
- package/src/memory-upgrader.ts +388 -0
- package/src/migrate.ts +364 -0
- package/src/mnemo.ts +142 -0
- package/src/noise-filter.ts +97 -0
- package/src/noise-prototypes.ts +164 -0
- package/src/observability.ts +81 -0
- package/src/query-tracker.ts +57 -0
- package/src/reflection-event-store.ts +98 -0
- package/src/reflection-item-store.ts +112 -0
- package/src/reflection-mapped-metadata.ts +84 -0
- package/src/reflection-metadata.ts +23 -0
- package/src/reflection-ranking.ts +33 -0
- package/src/reflection-retry.ts +181 -0
- package/src/reflection-slices.ts +265 -0
- package/src/reflection-store.ts +602 -0
- package/src/resonance-state.ts +85 -0
- package/src/retriever.ts +1510 -0
- package/src/scopes.ts +375 -0
- package/src/self-improvement-files.ts +143 -0
- package/src/semantic-gate.ts +121 -0
- package/src/session-recovery.ts +138 -0
- package/src/smart-extractor.ts +923 -0
- package/src/smart-metadata.ts +561 -0
- package/src/storage-adapter.ts +153 -0
- package/src/store.ts +1330 -0
- package/src/tier-manager.ts +189 -0
- package/src/tools.ts +1292 -0
- package/src/wal-recovery.ts +172 -0
- package/test/core.test.mjs +301 -0
|
@@ -0,0 +1,125 @@
|
|
|
1
|
+
// SPDX-License-Identifier: MIT
|
|
2
|
+
/**
|
|
3
|
+
* LLM Client for memory extraction and dedup decisions.
|
|
4
|
+
* Uses OpenAI-compatible API (reuses the embedding provider config).
|
|
5
|
+
*/
|
|
6
|
+
|
|
7
|
+
import OpenAI from "openai";
|
|
8
|
+
|
|
9
|
+
export interface LlmClientConfig {
|
|
10
|
+
apiKey: string;
|
|
11
|
+
model: string;
|
|
12
|
+
baseURL?: string;
|
|
13
|
+
timeoutMs?: number;
|
|
14
|
+
log?: (msg: string) => void;
|
|
15
|
+
}
|
|
16
|
+
|
|
17
|
+
export interface LlmClient {
|
|
18
|
+
/** Send a prompt and parse the JSON response. Returns null on failure. */
|
|
19
|
+
completeJson<T>(prompt: string, label?: string): Promise<T | null>;
|
|
20
|
+
}
|
|
21
|
+
|
|
22
|
+
/**
|
|
23
|
+
* Extract JSON from an LLM response that may be wrapped in markdown fences
|
|
24
|
+
* or contain surrounding text.
|
|
25
|
+
*/
|
|
26
|
+
function extractJsonFromResponse(text: string): string | null {
|
|
27
|
+
// Try markdown code fence first (```json ... ``` or ``` ... ```)
|
|
28
|
+
const fenceMatch = text.match(/```(?:json)?\s*\n?([\s\S]*?)```/);
|
|
29
|
+
if (fenceMatch) {
|
|
30
|
+
return fenceMatch[1].trim();
|
|
31
|
+
}
|
|
32
|
+
|
|
33
|
+
// Try balanced brace extraction
|
|
34
|
+
const firstBrace = text.indexOf("{");
|
|
35
|
+
if (firstBrace === -1) return null;
|
|
36
|
+
|
|
37
|
+
let depth = 0;
|
|
38
|
+
let lastBrace = -1;
|
|
39
|
+
for (let i = firstBrace; i < text.length; i++) {
|
|
40
|
+
if (text[i] === "{") depth++;
|
|
41
|
+
else if (text[i] === "}") {
|
|
42
|
+
depth--;
|
|
43
|
+
if (depth === 0) {
|
|
44
|
+
lastBrace = i;
|
|
45
|
+
break;
|
|
46
|
+
}
|
|
47
|
+
}
|
|
48
|
+
}
|
|
49
|
+
|
|
50
|
+
if (lastBrace === -1) return null;
|
|
51
|
+
return text.substring(firstBrace, lastBrace + 1);
|
|
52
|
+
}
|
|
53
|
+
|
|
54
|
+
function previewText(value: string, maxLen = 200): string {
|
|
55
|
+
const normalized = value.replace(/\s+/g, " ").trim();
|
|
56
|
+
if (normalized.length <= maxLen) return normalized;
|
|
57
|
+
return `${normalized.slice(0, maxLen - 3)}...`;
|
|
58
|
+
}
|
|
59
|
+
|
|
60
|
+
export function createLlmClient(config: LlmClientConfig): LlmClient {
|
|
61
|
+
const client = new OpenAI({
|
|
62
|
+
apiKey: config.apiKey,
|
|
63
|
+
baseURL: config.baseURL,
|
|
64
|
+
timeout: config.timeoutMs ?? 30000,
|
|
65
|
+
});
|
|
66
|
+
const log = config.log ?? (() => {});
|
|
67
|
+
|
|
68
|
+
return {
|
|
69
|
+
async completeJson<T>(prompt: string, label = "generic"): Promise<T | null> {
|
|
70
|
+
try {
|
|
71
|
+
const response = await client.chat.completions.create({
|
|
72
|
+
model: config.model,
|
|
73
|
+
messages: [
|
|
74
|
+
{
|
|
75
|
+
role: "system",
|
|
76
|
+
content:
|
|
77
|
+
"You are a memory extraction assistant. Always respond with valid JSON only.",
|
|
78
|
+
},
|
|
79
|
+
{ role: "user", content: prompt },
|
|
80
|
+
],
|
|
81
|
+
temperature: 0.1,
|
|
82
|
+
});
|
|
83
|
+
|
|
84
|
+
const raw = response.choices?.[0]?.message?.content;
|
|
85
|
+
if (!raw) {
|
|
86
|
+
log(
|
|
87
|
+
`mnemo: llm-client [${label}] empty response content from model ${config.model}`,
|
|
88
|
+
);
|
|
89
|
+
return null;
|
|
90
|
+
}
|
|
91
|
+
if (typeof raw !== "string") {
|
|
92
|
+
log(
|
|
93
|
+
`mnemo: llm-client [${label}] non-string response content type=${Array.isArray(raw) ? "array" : typeof raw} from model ${config.model}`,
|
|
94
|
+
);
|
|
95
|
+
return null;
|
|
96
|
+
}
|
|
97
|
+
|
|
98
|
+
const jsonStr = extractJsonFromResponse(raw);
|
|
99
|
+
if (!jsonStr) {
|
|
100
|
+
log(
|
|
101
|
+
`mnemo: llm-client [${label}] no JSON object found (chars=${raw.length}, preview=${JSON.stringify(previewText(raw))})`,
|
|
102
|
+
);
|
|
103
|
+
return null;
|
|
104
|
+
}
|
|
105
|
+
|
|
106
|
+
try {
|
|
107
|
+
return JSON.parse(jsonStr) as T;
|
|
108
|
+
} catch (err) {
|
|
109
|
+
log(
|
|
110
|
+
`mnemo: llm-client [${label}] JSON.parse failed: ${err instanceof Error ? err.message : String(err)} (jsonChars=${jsonStr.length}, jsonPreview=${JSON.stringify(previewText(jsonStr))})`,
|
|
111
|
+
);
|
|
112
|
+
return null;
|
|
113
|
+
}
|
|
114
|
+
} catch (err) {
|
|
115
|
+
// Graceful degradation — return null so caller can fall back
|
|
116
|
+
log(
|
|
117
|
+
`mnemo: llm-client [${label}] request failed for model ${config.model}: ${err instanceof Error ? err.message : String(err)}`,
|
|
118
|
+
);
|
|
119
|
+
return null;
|
|
120
|
+
}
|
|
121
|
+
},
|
|
122
|
+
};
|
|
123
|
+
}
|
|
124
|
+
|
|
125
|
+
export { extractJsonFromResponse };
|
|
@@ -0,0 +1,415 @@
|
|
|
1
|
+
// SPDX-License-Identifier: LicenseRef-Mnemo-Pro
|
|
2
|
+
/**
|
|
3
|
+
* MCP Server for Mnemo
|
|
4
|
+
*
|
|
5
|
+
* Exposes memory tools (search, store, delete, update, list, stats) over
|
|
6
|
+
* stdio JSON-RPC so Claude Code can call them directly without going through
|
|
7
|
+
* the OpenClaw gateway.
|
|
8
|
+
*
|
|
9
|
+
* Usage:
|
|
10
|
+
* node --import jiti/register src/mcp-server.ts
|
|
11
|
+
*
|
|
12
|
+
* Register with Claude Code:
|
|
13
|
+
* claude mcp add memory -s user -- node --import jiti/register \
|
|
14
|
+
* /path/to/mnemo/src/mcp-server.ts
|
|
15
|
+
*/
|
|
16
|
+
|
|
17
|
+
// Redirect console.log to stderr — stdout is reserved for JSON-RPC
|
|
18
|
+
const _origLog = console.log;
|
|
19
|
+
console.log = (...args: any[]) => console.error(...args);
|
|
20
|
+
|
|
21
|
+
import { McpServer } from "@modelcontextprotocol/sdk/server/mcp.js";
|
|
22
|
+
import { StdioServerTransport } from "@modelcontextprotocol/sdk/server/stdio.js";
|
|
23
|
+
import { z } from "zod";
|
|
24
|
+
import { join } from "node:path";
|
|
25
|
+
import { mkdir, appendFile } from "node:fs/promises";
|
|
26
|
+
|
|
27
|
+
import { loadConfigFromOpenClaw, getDefaultDbPath } from "./config.js";
|
|
28
|
+
import { MemoryStore, validateStoragePath } from "./store.js";
|
|
29
|
+
import { createEmbedder, getVectorDimensions } from "./embedder.js";
|
|
30
|
+
import { createRetriever, DEFAULT_RETRIEVAL_CONFIG } from "./retriever.js";
|
|
31
|
+
import { createScopeManager } from "./scopes.js";
|
|
32
|
+
import { isNoise } from "./noise-filter.js";
|
|
33
|
+
import { SemanticGate } from "./semantic-gate.js";
|
|
34
|
+
import { recoverPendingWrites } from "./wal-recovery.js";
|
|
35
|
+
|
|
36
|
+
// ============================================================================
|
|
37
|
+
// Initialization
|
|
38
|
+
// ============================================================================
|
|
39
|
+
|
|
40
|
+
const config = loadConfigFromOpenClaw();
|
|
41
|
+
|
|
42
|
+
const dbPath = config.dbPath || getDefaultDbPath();
|
|
43
|
+
try {
|
|
44
|
+
validateStoragePath(dbPath);
|
|
45
|
+
} catch (err) {
|
|
46
|
+
console.error(`mnemo mcp: storage path issue — ${String(err)}`);
|
|
47
|
+
}
|
|
48
|
+
|
|
49
|
+
const vectorDim = getVectorDimensions(
|
|
50
|
+
config.embedding.model || "text-embedding-3-small",
|
|
51
|
+
config.embedding.dimensions,
|
|
52
|
+
);
|
|
53
|
+
|
|
54
|
+
const store = new MemoryStore({ dbPath, vectorDim });
|
|
55
|
+
const embedder = createEmbedder({
|
|
56
|
+
provider: "openai-compatible",
|
|
57
|
+
apiKey: config.embedding.apiKey,
|
|
58
|
+
model: config.embedding.model || "text-embedding-3-small",
|
|
59
|
+
baseURL: config.embedding.baseURL,
|
|
60
|
+
dimensions: config.embedding.dimensions,
|
|
61
|
+
taskQuery: config.embedding.taskQuery,
|
|
62
|
+
taskPassage: config.embedding.taskPassage,
|
|
63
|
+
normalized: config.embedding.normalized,
|
|
64
|
+
});
|
|
65
|
+
console.warn(`[config-debug] config.retrieval keys: ${JSON.stringify(Object.keys(config.retrieval || {}))}`);
|
|
66
|
+
console.warn(`[config-debug] config.retrieval.rerankApiKey: ${config.retrieval?.rerankApiKey ? 'SET(' + String(config.retrieval.rerankApiKey).substring(0, 8) + ')' : 'EMPTY'}`);
|
|
67
|
+
const retriever = createRetriever(store, embedder, {
|
|
68
|
+
...DEFAULT_RETRIEVAL_CONFIG,
|
|
69
|
+
...config.retrieval,
|
|
70
|
+
});
|
|
71
|
+
const scopeManager = createScopeManager(config.scopes);
|
|
72
|
+
|
|
73
|
+
// Inject semantic gate into store
|
|
74
|
+
const semanticGate = new SemanticGate(embedder);
|
|
75
|
+
store.setSemanticGate(semanticGate);
|
|
76
|
+
|
|
77
|
+
// WAL recovery: fire-and-forget on startup
|
|
78
|
+
recoverPendingWrites().catch((err) => {
|
|
79
|
+
console.error(`mnemo mcp: WAL recovery failed — ${String(err)}`);
|
|
80
|
+
});
|
|
81
|
+
|
|
82
|
+
// ============================================================================
|
|
83
|
+
// Markdown Mirror (simplified — no OpenClaw API dependency)
|
|
84
|
+
// ============================================================================
|
|
85
|
+
|
|
86
|
+
const mirrorDir = config.mdMirror?.enabled
|
|
87
|
+
? (config.mdMirror.dir || join(getDefaultDbPath(), "..", "lancedb-pro-mirror"))
|
|
88
|
+
: null;
|
|
89
|
+
|
|
90
|
+
async function mirrorWrite(
|
|
91
|
+
text: string,
|
|
92
|
+
category: string,
|
|
93
|
+
scope: string,
|
|
94
|
+
timestamp?: number,
|
|
95
|
+
): Promise<void> {
|
|
96
|
+
if (!mirrorDir) return;
|
|
97
|
+
try {
|
|
98
|
+
const ts = new Date(timestamp || Date.now());
|
|
99
|
+
const dateStr = ts.toISOString().split("T")[0];
|
|
100
|
+
const filePath = join(mirrorDir, `${dateStr}.md`);
|
|
101
|
+
const safeText = text.replace(/\n/g, " ").slice(0, 500);
|
|
102
|
+
const line = `- ${ts.toISOString()} [${category}:${scope}] source=mcp ${safeText}\n`;
|
|
103
|
+
await mkdir(mirrorDir, { recursive: true });
|
|
104
|
+
await appendFile(filePath, line, "utf8");
|
|
105
|
+
} catch {
|
|
106
|
+
// Fail-open: mirror errors never block tool responses
|
|
107
|
+
}
|
|
108
|
+
}
|
|
109
|
+
|
|
110
|
+
// ============================================================================
|
|
111
|
+
// Helpers
|
|
112
|
+
// ============================================================================
|
|
113
|
+
|
|
114
|
+
function clampInt(value: number, min: number, max: number): number {
|
|
115
|
+
if (!Number.isFinite(value)) return min;
|
|
116
|
+
return Math.min(max, Math.max(min, Math.floor(value)));
|
|
117
|
+
}
|
|
118
|
+
|
|
119
|
+
function clamp01(value: number, fallback = 0.7): number {
|
|
120
|
+
if (!Number.isFinite(value)) return fallback;
|
|
121
|
+
return Math.min(1, Math.max(0, value));
|
|
122
|
+
}
|
|
123
|
+
|
|
124
|
+
// ============================================================================
|
|
125
|
+
// MCP Server
|
|
126
|
+
// ============================================================================
|
|
127
|
+
|
|
128
|
+
const server = new McpServer({
|
|
129
|
+
name: "mnemo",
|
|
130
|
+
version: "1.0.0",
|
|
131
|
+
});
|
|
132
|
+
|
|
133
|
+
// --- memory_search ---
|
|
134
|
+
server.tool(
|
|
135
|
+
"memory_search",
|
|
136
|
+
"Search through long-term memories using hybrid retrieval (vector + keyword search). Use when you need context about user preferences, past decisions, or previously discussed topics.",
|
|
137
|
+
{
|
|
138
|
+
query: z.string().describe("Search query for finding relevant memories"),
|
|
139
|
+
limit: z.number().optional().describe("Max results to return (default: 5, max: 20)"),
|
|
140
|
+
scope: z.string().optional().describe("Specific memory scope to search in"),
|
|
141
|
+
category: z.enum(["preference", "fact", "decision", "entity", "other"]).optional(),
|
|
142
|
+
},
|
|
143
|
+
async ({ query, limit = 5, scope, category }) => {
|
|
144
|
+
try {
|
|
145
|
+
const safeLimit = clampInt(limit, 1, 20);
|
|
146
|
+
|
|
147
|
+
let scopeFilter = scopeManager.getAccessibleScopes();
|
|
148
|
+
if (scope) {
|
|
149
|
+
if (scopeManager.isAccessible(scope)) {
|
|
150
|
+
scopeFilter = [scope];
|
|
151
|
+
} else {
|
|
152
|
+
return { content: [{ type: "text" as const, text: `Access denied to scope: ${scope}` }] };
|
|
153
|
+
}
|
|
154
|
+
}
|
|
155
|
+
|
|
156
|
+
const results = await retriever.retrieve({
|
|
157
|
+
query,
|
|
158
|
+
limit: safeLimit,
|
|
159
|
+
scopeFilter,
|
|
160
|
+
category,
|
|
161
|
+
source: "manual",
|
|
162
|
+
});
|
|
163
|
+
|
|
164
|
+
if (results.length === 0) {
|
|
165
|
+
return { content: [{ type: "text" as const, text: "No relevant memories found." }] };
|
|
166
|
+
}
|
|
167
|
+
|
|
168
|
+
const text = results
|
|
169
|
+
.map((r, i) => {
|
|
170
|
+
const sources: string[] = [];
|
|
171
|
+
if (r.sources.vector) sources.push("vector");
|
|
172
|
+
if (r.sources.bm25) sources.push("BM25");
|
|
173
|
+
if (r.sources.reranked) sources.push("reranked");
|
|
174
|
+
return `${i + 1}. [${r.entry.id}] [${r.entry.category}:${r.entry.scope}] ${r.entry.text} (${(r.score * 100).toFixed(0)}%${sources.length > 0 ? `, ${sources.join("+")}` : ""})`;
|
|
175
|
+
})
|
|
176
|
+
.join("\n");
|
|
177
|
+
|
|
178
|
+
return { content: [{ type: "text" as const, text: `Found ${results.length} memories:\n\n${text}` }] };
|
|
179
|
+
} catch (error) {
|
|
180
|
+
return { content: [{ type: "text" as const, text: `Memory search failed: ${error instanceof Error ? error.message : String(error)}` }] };
|
|
181
|
+
}
|
|
182
|
+
},
|
|
183
|
+
);
|
|
184
|
+
|
|
185
|
+
// --- memory_store ---
|
|
186
|
+
server.tool(
|
|
187
|
+
"memory_store",
|
|
188
|
+
"Save important information in long-term memory. Use for preferences, facts, decisions, and other notable information.",
|
|
189
|
+
{
|
|
190
|
+
text: z.string().describe("Information to remember"),
|
|
191
|
+
importance: z.number().optional().describe("Importance score 0-1 (default: 0.7)"),
|
|
192
|
+
category: z.enum(["preference", "fact", "decision", "entity", "other"]).optional(),
|
|
193
|
+
scope: z.string().optional().describe("Memory scope (optional, defaults to global)"),
|
|
194
|
+
},
|
|
195
|
+
async ({ text, importance = 0.7, category = "other", scope }) => {
|
|
196
|
+
try {
|
|
197
|
+
let targetScope = scope || scopeManager.getDefaultScope();
|
|
198
|
+
|
|
199
|
+
if (!scopeManager.isAccessible(targetScope)) {
|
|
200
|
+
return { content: [{ type: "text" as const, text: `Access denied to scope: ${targetScope}` }] };
|
|
201
|
+
}
|
|
202
|
+
|
|
203
|
+
if (isNoise(text)) {
|
|
204
|
+
return { content: [{ type: "text" as const, text: "Skipped: text detected as noise (greeting, boilerplate, or meta-question)" }] };
|
|
205
|
+
}
|
|
206
|
+
|
|
207
|
+
const safeImportance = clamp01(importance, 0.7);
|
|
208
|
+
const vector = await embedder.embedPassage(text);
|
|
209
|
+
|
|
210
|
+
// Dedup check (fail-open)
|
|
211
|
+
let existing: Awaited<ReturnType<typeof store.vectorSearch>> = [];
|
|
212
|
+
try {
|
|
213
|
+
existing = await store.vectorSearch(vector, 1, 0.1, [targetScope]);
|
|
214
|
+
} catch {
|
|
215
|
+
// Dedup check failed — continue store
|
|
216
|
+
}
|
|
217
|
+
|
|
218
|
+
if (existing.length > 0 && existing[0].score > 0.98) {
|
|
219
|
+
return {
|
|
220
|
+
content: [{ type: "text" as const, text: `Similar memory already exists: "${existing[0].entry.text}"` }],
|
|
221
|
+
};
|
|
222
|
+
}
|
|
223
|
+
|
|
224
|
+
const entry = await store.store({
|
|
225
|
+
text,
|
|
226
|
+
vector,
|
|
227
|
+
importance: safeImportance,
|
|
228
|
+
category: category as any,
|
|
229
|
+
scope: targetScope,
|
|
230
|
+
});
|
|
231
|
+
|
|
232
|
+
await mirrorWrite(text, category, targetScope, entry.timestamp);
|
|
233
|
+
|
|
234
|
+
return {
|
|
235
|
+
content: [{ type: "text" as const, text: `Stored: "${text.slice(0, 100)}${text.length > 100 ? "..." : ""}" [id=${entry.id}] in scope '${targetScope}'` }],
|
|
236
|
+
};
|
|
237
|
+
} catch (error) {
|
|
238
|
+
return { content: [{ type: "text" as const, text: `Memory storage failed: ${error instanceof Error ? error.message : String(error)}` }] };
|
|
239
|
+
}
|
|
240
|
+
},
|
|
241
|
+
);
|
|
242
|
+
|
|
243
|
+
// --- memory_delete ---
|
|
244
|
+
server.tool(
|
|
245
|
+
"memory_delete",
|
|
246
|
+
"Delete a specific memory by ID.",
|
|
247
|
+
{
|
|
248
|
+
memoryId: z.string().describe("Memory ID to delete (full UUID or 8+ char prefix)"),
|
|
249
|
+
},
|
|
250
|
+
async ({ memoryId }) => {
|
|
251
|
+
try {
|
|
252
|
+
const scopeFilter = scopeManager.getAccessibleScopes();
|
|
253
|
+
const deleted = await store.delete(memoryId, scopeFilter);
|
|
254
|
+
if (deleted) {
|
|
255
|
+
return { content: [{ type: "text" as const, text: `Memory ${memoryId} deleted.` }] };
|
|
256
|
+
}
|
|
257
|
+
return { content: [{ type: "text" as const, text: `Memory ${memoryId} not found or access denied.` }] };
|
|
258
|
+
} catch (error) {
|
|
259
|
+
return { content: [{ type: "text" as const, text: `Memory deletion failed: ${error instanceof Error ? error.message : String(error)}` }] };
|
|
260
|
+
}
|
|
261
|
+
},
|
|
262
|
+
);
|
|
263
|
+
|
|
264
|
+
// --- memory_update ---
|
|
265
|
+
server.tool(
|
|
266
|
+
"memory_update",
|
|
267
|
+
"Update an existing memory in-place. Preserves original timestamp.",
|
|
268
|
+
{
|
|
269
|
+
memoryId: z.string().describe("ID of the memory to update (full UUID or 8+ char prefix)"),
|
|
270
|
+
text: z.string().optional().describe("New text content (triggers re-embedding)"),
|
|
271
|
+
importance: z.number().optional().describe("New importance score 0-1"),
|
|
272
|
+
category: z.enum(["preference", "fact", "decision", "entity", "other"]).optional(),
|
|
273
|
+
},
|
|
274
|
+
async ({ memoryId, text, importance, category }) => {
|
|
275
|
+
try {
|
|
276
|
+
if (!text && importance === undefined && !category) {
|
|
277
|
+
return { content: [{ type: "text" as const, text: "Nothing to update. Provide at least one of: text, importance, category." }] };
|
|
278
|
+
}
|
|
279
|
+
|
|
280
|
+
const scopeFilter = scopeManager.getAccessibleScopes();
|
|
281
|
+
|
|
282
|
+
let newVector: number[] | undefined;
|
|
283
|
+
if (text) {
|
|
284
|
+
if (isNoise(text)) {
|
|
285
|
+
return { content: [{ type: "text" as const, text: "Skipped: updated text detected as noise" }] };
|
|
286
|
+
}
|
|
287
|
+
newVector = await embedder.embedPassage(text);
|
|
288
|
+
}
|
|
289
|
+
|
|
290
|
+
const updates: Record<string, any> = {};
|
|
291
|
+
if (text) updates.text = text;
|
|
292
|
+
if (newVector) updates.vector = newVector;
|
|
293
|
+
if (importance !== undefined) updates.importance = clamp01(importance, 0.7);
|
|
294
|
+
if (category) updates.category = category;
|
|
295
|
+
|
|
296
|
+
const updated = await store.update(memoryId, updates, scopeFilter);
|
|
297
|
+
|
|
298
|
+
if (!updated) {
|
|
299
|
+
return { content: [{ type: "text" as const, text: `Memory ${memoryId} not found or access denied.` }] };
|
|
300
|
+
}
|
|
301
|
+
|
|
302
|
+
return {
|
|
303
|
+
content: [{ type: "text" as const, text: `Updated memory ${updated.id.slice(0, 8)}...: "${updated.text.slice(0, 80)}${updated.text.length > 80 ? "..." : ""}"` }],
|
|
304
|
+
};
|
|
305
|
+
} catch (error) {
|
|
306
|
+
return { content: [{ type: "text" as const, text: `Memory update failed: ${error instanceof Error ? error.message : String(error)}` }] };
|
|
307
|
+
}
|
|
308
|
+
},
|
|
309
|
+
);
|
|
310
|
+
|
|
311
|
+
// --- memory_list ---
|
|
312
|
+
server.tool(
|
|
313
|
+
"memory_list",
|
|
314
|
+
"List recent memories with optional filtering by scope and category.",
|
|
315
|
+
{
|
|
316
|
+
limit: z.number().optional().describe("Max memories to list (default: 10, max: 50)"),
|
|
317
|
+
offset: z.number().optional().describe("Number of memories to skip (default: 0)"),
|
|
318
|
+
scope: z.string().optional().describe("Filter by specific scope"),
|
|
319
|
+
category: z.enum(["preference", "fact", "decision", "entity", "other"]).optional(),
|
|
320
|
+
},
|
|
321
|
+
async ({ limit = 10, offset = 0, scope, category }) => {
|
|
322
|
+
try {
|
|
323
|
+
const safeLimit = clampInt(limit, 1, 50);
|
|
324
|
+
const safeOffset = clampInt(offset, 0, 1000);
|
|
325
|
+
|
|
326
|
+
let scopeFilter = scopeManager.getAccessibleScopes();
|
|
327
|
+
if (scope) {
|
|
328
|
+
if (scopeManager.isAccessible(scope)) {
|
|
329
|
+
scopeFilter = [scope];
|
|
330
|
+
} else {
|
|
331
|
+
return { content: [{ type: "text" as const, text: `Access denied to scope: ${scope}` }] };
|
|
332
|
+
}
|
|
333
|
+
}
|
|
334
|
+
|
|
335
|
+
const entries = await store.list(scopeFilter, category, safeLimit, safeOffset);
|
|
336
|
+
|
|
337
|
+
if (entries.length === 0) {
|
|
338
|
+
return { content: [{ type: "text" as const, text: "No memories found." }] };
|
|
339
|
+
}
|
|
340
|
+
|
|
341
|
+
const text = entries
|
|
342
|
+
.map((entry, i) => {
|
|
343
|
+
const date = new Date(entry.timestamp).toISOString().split("T")[0];
|
|
344
|
+
return `${safeOffset + i + 1}. [${entry.id}] [${entry.category}:${entry.scope}] ${entry.text.slice(0, 100)}${entry.text.length > 100 ? "..." : ""} (${date})`;
|
|
345
|
+
})
|
|
346
|
+
.join("\n");
|
|
347
|
+
|
|
348
|
+
return { content: [{ type: "text" as const, text: `Recent memories (showing ${entries.length}):\n\n${text}` }] };
|
|
349
|
+
} catch (error) {
|
|
350
|
+
return { content: [{ type: "text" as const, text: `Failed to list memories: ${error instanceof Error ? error.message : String(error)}` }] };
|
|
351
|
+
}
|
|
352
|
+
},
|
|
353
|
+
);
|
|
354
|
+
|
|
355
|
+
// --- memory_stats ---
|
|
356
|
+
server.tool(
|
|
357
|
+
"memory_stats",
|
|
358
|
+
"Get statistics about memory usage, scopes, and categories.",
|
|
359
|
+
{
|
|
360
|
+
scope: z.string().optional().describe("Specific scope to get stats for"),
|
|
361
|
+
},
|
|
362
|
+
async ({ scope }) => {
|
|
363
|
+
try {
|
|
364
|
+
let scopeFilter = scopeManager.getAccessibleScopes();
|
|
365
|
+
if (scope) {
|
|
366
|
+
if (scopeManager.isAccessible(scope)) {
|
|
367
|
+
scopeFilter = [scope];
|
|
368
|
+
} else {
|
|
369
|
+
return { content: [{ type: "text" as const, text: `Access denied to scope: ${scope}` }] };
|
|
370
|
+
}
|
|
371
|
+
}
|
|
372
|
+
|
|
373
|
+
const stats = await store.stats(scopeFilter);
|
|
374
|
+
const scopeManagerStats = scopeManager.getStats();
|
|
375
|
+
const retrievalConfig = retriever.getConfig();
|
|
376
|
+
|
|
377
|
+
const text = [
|
|
378
|
+
`Memory Statistics:`,
|
|
379
|
+
` Total memories: ${stats.totalCount}`,
|
|
380
|
+
` Available scopes: ${scopeManagerStats.totalScopes}`,
|
|
381
|
+
` Retrieval mode: ${retrievalConfig.mode}`,
|
|
382
|
+
` FTS support: ${store.hasFtsSupport ? "Yes" : "No"}`,
|
|
383
|
+
``,
|
|
384
|
+
`Memories by scope:`,
|
|
385
|
+
...Object.entries(stats.scopeCounts).map(
|
|
386
|
+
([s, count]) => ` ${s}: ${count}`,
|
|
387
|
+
),
|
|
388
|
+
``,
|
|
389
|
+
`Memories by category:`,
|
|
390
|
+
...Object.entries(stats.categoryCounts).map(
|
|
391
|
+
([c, count]) => ` ${c}: ${count}`,
|
|
392
|
+
),
|
|
393
|
+
].join("\n");
|
|
394
|
+
|
|
395
|
+
return { content: [{ type: "text" as const, text }] };
|
|
396
|
+
} catch (error) {
|
|
397
|
+
return { content: [{ type: "text" as const, text: `Failed to get memory stats: ${error instanceof Error ? error.message : String(error)}` }] };
|
|
398
|
+
}
|
|
399
|
+
},
|
|
400
|
+
);
|
|
401
|
+
|
|
402
|
+
// ============================================================================
|
|
403
|
+
// Start
|
|
404
|
+
// ============================================================================
|
|
405
|
+
|
|
406
|
+
async function main() {
|
|
407
|
+
const transport = new StdioServerTransport();
|
|
408
|
+
await server.connect(transport);
|
|
409
|
+
console.error("mnemo MCP server started (stdio)");
|
|
410
|
+
}
|
|
411
|
+
|
|
412
|
+
main().catch((err) => {
|
|
413
|
+
console.error("Fatal:", err);
|
|
414
|
+
process.exit(1);
|
|
415
|
+
});
|
|
@@ -0,0 +1,71 @@
|
|
|
1
|
+
// SPDX-License-Identifier: MIT
|
|
2
|
+
/**
|
|
3
|
+
* Memory Categories — 6-category classification system
|
|
4
|
+
*
|
|
5
|
+
* UserMemory: profile, preferences, entities, events
|
|
6
|
+
* AgentMemory: cases, patterns
|
|
7
|
+
*/
|
|
8
|
+
|
|
9
|
+
export const MEMORY_CATEGORIES = [
|
|
10
|
+
"profile",
|
|
11
|
+
"preferences",
|
|
12
|
+
"entities",
|
|
13
|
+
"events",
|
|
14
|
+
"cases",
|
|
15
|
+
"patterns",
|
|
16
|
+
] as const;
|
|
17
|
+
|
|
18
|
+
export type MemoryCategory = (typeof MEMORY_CATEGORIES)[number];
|
|
19
|
+
|
|
20
|
+
/** Categories that always merge (skip dedup entirely). */
|
|
21
|
+
export const ALWAYS_MERGE_CATEGORIES = new Set<MemoryCategory>(["profile"]);
|
|
22
|
+
|
|
23
|
+
/** Categories that support MERGE decision from LLM dedup. */
|
|
24
|
+
export const MERGE_SUPPORTED_CATEGORIES = new Set<MemoryCategory>([
|
|
25
|
+
"preferences",
|
|
26
|
+
"entities",
|
|
27
|
+
"patterns",
|
|
28
|
+
]);
|
|
29
|
+
|
|
30
|
+
/** Categories that are append-only (CREATE or SKIP only, no MERGE). */
|
|
31
|
+
export const APPEND_ONLY_CATEGORIES = new Set<MemoryCategory>([
|
|
32
|
+
"events",
|
|
33
|
+
"cases",
|
|
34
|
+
]);
|
|
35
|
+
|
|
36
|
+
/** Memory tier levels for lifecycle management. */
|
|
37
|
+
export type MemoryTier = "core" | "working" | "peripheral";
|
|
38
|
+
|
|
39
|
+
/** A candidate memory extracted from conversation by LLM. */
|
|
40
|
+
export type CandidateMemory = {
|
|
41
|
+
category: MemoryCategory;
|
|
42
|
+
abstract: string; // L0: one-sentence index
|
|
43
|
+
overview: string; // L1: structured markdown summary
|
|
44
|
+
content: string; // L2: full narrative
|
|
45
|
+
};
|
|
46
|
+
|
|
47
|
+
/** Dedup decision from LLM. */
|
|
48
|
+
export type DedupDecision = "create" | "merge" | "skip" | "support" | "contextualize" | "contradict";
|
|
49
|
+
|
|
50
|
+
export type DedupResult = {
|
|
51
|
+
decision: DedupDecision;
|
|
52
|
+
reason: string;
|
|
53
|
+
matchId?: string; // ID of existing memory to merge with
|
|
54
|
+
contextLabel?: string; // Optional context label for support/contextualize/contradict
|
|
55
|
+
};
|
|
56
|
+
|
|
57
|
+
export type ExtractionStats = {
|
|
58
|
+
created: number;
|
|
59
|
+
merged: number;
|
|
60
|
+
skipped: number;
|
|
61
|
+
supported?: number; // context-aware support count
|
|
62
|
+
};
|
|
63
|
+
|
|
64
|
+
/** Validate and normalize a category string. */
|
|
65
|
+
export function normalizeCategory(raw: string): MemoryCategory | null {
|
|
66
|
+
const lower = raw.toLowerCase().trim();
|
|
67
|
+
if ((MEMORY_CATEGORIES as readonly string[]).includes(lower)) {
|
|
68
|
+
return lower as MemoryCategory;
|
|
69
|
+
}
|
|
70
|
+
return null;
|
|
71
|
+
}
|