neuralmemory 1.14.0 → 1.16.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +1 -1
- package/dist/index.d.ts +26 -1
- package/dist/index.js +174 -18
- package/dist/index.js.map +1 -1
- package/dist/types.d.ts +22 -0
- package/openclaw.plugin.json +111 -93
- package/package.json +53 -53
- package/src/index.ts +572 -342
- package/src/types.ts +28 -0
package/src/index.ts
CHANGED
|
@@ -1,342 +1,572 @@
|
|
|
1
|
-
/**
|
|
2
|
-
* NeuralMemory — OpenClaw Memory Plugin
|
|
3
|
-
*
|
|
4
|
-
* Brain-inspired persistent memory for AI agents.
|
|
5
|
-
* Occupies the exclusive "memory" plugin slot.
|
|
6
|
-
*
|
|
7
|
-
* Architecture:
|
|
8
|
-
* OpenClaw ←→ Plugin (TypeScript) ←→ MCP stdio ←→ NeuralMemory (Python)
|
|
9
|
-
*
|
|
10
|
-
* v1.7.0: Dynamic tool proxy — fetches all tools from MCP `tools/list`
|
|
11
|
-
* instead of hardcoding 6 tools. Automatically exposes every tool the
|
|
12
|
-
* MCP server provides (39+ tools in NM v2.28.0).
|
|
13
|
-
*
|
|
14
|
-
* v1.8.0: Compatible with NM v2.29.0 — RRF score fusion, graph-based
|
|
15
|
-
* query expansion, and Personalized PageRank activation.
|
|
16
|
-
*
|
|
17
|
-
* v1.8.1: Fix async register() — OpenClaw requires synchronous registration.
|
|
18
|
-
* Fallback tools registered sync; MCP connection deferred to service.start().
|
|
19
|
-
*
|
|
20
|
-
* v1.9.0: Backward-compat shim tools (memory_search, memory_get) to prevent
|
|
21
|
-
* "allowList contains unknown entries" warnings when NM replaces memory-core.
|
|
22
|
-
*
|
|
23
|
-
* v1.10.0: Singleton MCP client — multiple workspaces (multi-agent) share
|
|
24
|
-
* the same connected client instance, keyed by (pythonPath, brain). Fixes
|
|
25
|
-
* "NeuralMemory service not running" when OpenClaw registers the plugin
|
|
26
|
-
* for a second workspace after gateway startup.
|
|
27
|
-
*
|
|
28
|
-
* Registers:
|
|
29
|
-
* N tools — dynamically from MCP server (fallback: 5 core + 2 compat)
|
|
30
|
-
* 1 service — MCP process lifecycle (start/stop)
|
|
31
|
-
*
|
|
32
|
-
|
|
33
|
-
|
|
34
|
-
|
|
35
|
-
|
|
36
|
-
|
|
37
|
-
|
|
38
|
-
|
|
39
|
-
|
|
40
|
-
|
|
41
|
-
|
|
42
|
-
|
|
43
|
-
|
|
44
|
-
|
|
45
|
-
|
|
46
|
-
|
|
47
|
-
|
|
48
|
-
|
|
49
|
-
|
|
50
|
-
|
|
51
|
-
|
|
52
|
-
|
|
53
|
-
|
|
54
|
-
|
|
55
|
-
|
|
56
|
-
|
|
57
|
-
|
|
58
|
-
|
|
59
|
-
|
|
60
|
-
|
|
61
|
-
|
|
62
|
-
|
|
63
|
-
|
|
64
|
-
|
|
65
|
-
|
|
66
|
-
|
|
67
|
-
|
|
68
|
-
|
|
69
|
-
|
|
70
|
-
|
|
71
|
-
|
|
72
|
-
|
|
73
|
-
|
|
74
|
-
|
|
75
|
-
|
|
76
|
-
|
|
77
|
-
|
|
78
|
-
|
|
79
|
-
|
|
80
|
-
|
|
81
|
-
|
|
82
|
-
|
|
83
|
-
|
|
84
|
-
|
|
85
|
-
|
|
86
|
-
|
|
87
|
-
|
|
88
|
-
|
|
89
|
-
|
|
90
|
-
|
|
91
|
-
|
|
92
|
-
|
|
93
|
-
|
|
94
|
-
|
|
95
|
-
|
|
96
|
-
|
|
97
|
-
|
|
98
|
-
|
|
99
|
-
|
|
100
|
-
|
|
101
|
-
|
|
102
|
-
|
|
103
|
-
|
|
104
|
-
|
|
105
|
-
|
|
106
|
-
|
|
107
|
-
|
|
108
|
-
|
|
109
|
-
|
|
110
|
-
|
|
111
|
-
|
|
112
|
-
|
|
113
|
-
|
|
114
|
-
|
|
115
|
-
|
|
116
|
-
|
|
117
|
-
|
|
118
|
-
|
|
119
|
-
|
|
120
|
-
|
|
121
|
-
|
|
122
|
-
|
|
123
|
-
|
|
124
|
-
|
|
125
|
-
|
|
126
|
-
|
|
127
|
-
|
|
128
|
-
|
|
129
|
-
|
|
130
|
-
|
|
131
|
-
|
|
132
|
-
|
|
133
|
-
|
|
134
|
-
|
|
135
|
-
|
|
136
|
-
|
|
137
|
-
|
|
138
|
-
|
|
139
|
-
|
|
140
|
-
|
|
141
|
-
|
|
142
|
-
|
|
143
|
-
|
|
144
|
-
|
|
145
|
-
|
|
146
|
-
|
|
147
|
-
|
|
148
|
-
|
|
149
|
-
|
|
150
|
-
|
|
151
|
-
|
|
152
|
-
|
|
153
|
-
|
|
154
|
-
|
|
155
|
-
|
|
156
|
-
|
|
157
|
-
|
|
158
|
-
|
|
159
|
-
|
|
160
|
-
|
|
161
|
-
|
|
162
|
-
|
|
163
|
-
|
|
164
|
-
|
|
165
|
-
|
|
166
|
-
|
|
167
|
-
|
|
168
|
-
|
|
169
|
-
|
|
170
|
-
|
|
171
|
-
|
|
172
|
-
|
|
173
|
-
|
|
174
|
-
|
|
175
|
-
|
|
176
|
-
|
|
177
|
-
|
|
178
|
-
|
|
179
|
-
|
|
180
|
-
|
|
181
|
-
|
|
182
|
-
|
|
183
|
-
|
|
184
|
-
|
|
185
|
-
|
|
186
|
-
|
|
187
|
-
|
|
188
|
-
|
|
189
|
-
|
|
190
|
-
|
|
191
|
-
|
|
192
|
-
|
|
193
|
-
|
|
194
|
-
|
|
195
|
-
|
|
196
|
-
|
|
197
|
-
|
|
198
|
-
|
|
199
|
-
|
|
200
|
-
|
|
201
|
-
|
|
202
|
-
|
|
203
|
-
|
|
204
|
-
|
|
205
|
-
|
|
206
|
-
|
|
207
|
-
|
|
208
|
-
|
|
209
|
-
|
|
210
|
-
|
|
211
|
-
|
|
212
|
-
|
|
213
|
-
|
|
214
|
-
|
|
215
|
-
|
|
216
|
-
|
|
217
|
-
|
|
218
|
-
|
|
219
|
-
|
|
220
|
-
|
|
221
|
-
|
|
222
|
-
|
|
223
|
-
|
|
224
|
-
|
|
225
|
-
|
|
226
|
-
|
|
227
|
-
|
|
228
|
-
|
|
229
|
-
|
|
230
|
-
|
|
231
|
-
|
|
232
|
-
|
|
233
|
-
|
|
234
|
-
|
|
235
|
-
|
|
236
|
-
|
|
237
|
-
|
|
238
|
-
|
|
239
|
-
|
|
240
|
-
|
|
241
|
-
|
|
242
|
-
|
|
243
|
-
|
|
244
|
-
|
|
245
|
-
|
|
246
|
-
|
|
247
|
-
|
|
248
|
-
|
|
249
|
-
|
|
250
|
-
|
|
251
|
-
|
|
252
|
-
|
|
253
|
-
|
|
254
|
-
|
|
255
|
-
|
|
256
|
-
|
|
257
|
-
|
|
258
|
-
|
|
259
|
-
|
|
260
|
-
|
|
261
|
-
|
|
262
|
-
|
|
263
|
-
|
|
264
|
-
|
|
265
|
-
|
|
266
|
-
|
|
267
|
-
|
|
268
|
-
|
|
269
|
-
|
|
270
|
-
|
|
271
|
-
|
|
272
|
-
|
|
273
|
-
|
|
274
|
-
|
|
275
|
-
|
|
276
|
-
|
|
277
|
-
|
|
278
|
-
|
|
279
|
-
|
|
280
|
-
|
|
281
|
-
|
|
282
|
-
|
|
283
|
-
|
|
284
|
-
|
|
285
|
-
|
|
286
|
-
|
|
287
|
-
|
|
288
|
-
|
|
289
|
-
|
|
290
|
-
|
|
291
|
-
|
|
292
|
-
|
|
293
|
-
|
|
294
|
-
|
|
295
|
-
|
|
296
|
-
|
|
297
|
-
|
|
298
|
-
|
|
299
|
-
|
|
300
|
-
|
|
301
|
-
|
|
302
|
-
|
|
303
|
-
|
|
304
|
-
|
|
305
|
-
|
|
306
|
-
|
|
307
|
-
|
|
308
|
-
|
|
309
|
-
|
|
310
|
-
|
|
311
|
-
|
|
312
|
-
|
|
313
|
-
|
|
314
|
-
|
|
315
|
-
|
|
316
|
-
|
|
317
|
-
|
|
318
|
-
|
|
319
|
-
|
|
320
|
-
|
|
321
|
-
|
|
322
|
-
|
|
323
|
-
|
|
324
|
-
|
|
325
|
-
|
|
326
|
-
|
|
327
|
-
|
|
328
|
-
|
|
329
|
-
|
|
330
|
-
|
|
331
|
-
|
|
332
|
-
|
|
333
|
-
|
|
334
|
-
|
|
335
|
-
|
|
336
|
-
|
|
337
|
-
|
|
338
|
-
|
|
339
|
-
|
|
340
|
-
|
|
341
|
-
|
|
342
|
-
|
|
1
|
+
/**
|
|
2
|
+
* NeuralMemory — OpenClaw Memory Plugin
|
|
3
|
+
*
|
|
4
|
+
* Brain-inspired persistent memory for AI agents.
|
|
5
|
+
* Occupies the exclusive "memory" plugin slot.
|
|
6
|
+
*
|
|
7
|
+
* Architecture:
|
|
8
|
+
* OpenClaw ←→ Plugin (TypeScript) ←→ MCP stdio ←→ NeuralMemory (Python)
|
|
9
|
+
*
|
|
10
|
+
* v1.7.0: Dynamic tool proxy — fetches all tools from MCP `tools/list`
|
|
11
|
+
* instead of hardcoding 6 tools. Automatically exposes every tool the
|
|
12
|
+
* MCP server provides (39+ tools in NM v2.28.0).
|
|
13
|
+
*
|
|
14
|
+
* v1.8.0: Compatible with NM v2.29.0 — RRF score fusion, graph-based
|
|
15
|
+
* query expansion, and Personalized PageRank activation.
|
|
16
|
+
*
|
|
17
|
+
* v1.8.1: Fix async register() — OpenClaw requires synchronous registration.
|
|
18
|
+
* Fallback tools registered sync; MCP connection deferred to service.start().
|
|
19
|
+
*
|
|
20
|
+
* v1.9.0: Backward-compat shim tools (memory_search, memory_get) to prevent
|
|
21
|
+
* "allowList contains unknown entries" warnings when NM replaces memory-core.
|
|
22
|
+
*
|
|
23
|
+
* v1.10.0: Singleton MCP client — multiple workspaces (multi-agent) share
|
|
24
|
+
* the same connected client instance, keyed by (pythonPath, brain). Fixes
|
|
25
|
+
* "NeuralMemory service not running" when OpenClaw registers the plugin
|
|
26
|
+
* for a second workspace after gateway startup.
|
|
27
|
+
*
|
|
28
|
+
* Registers:
|
|
29
|
+
* N tools — dynamically from MCP server (fallback: 5 core + 2 compat)
|
|
30
|
+
* 1 service — MCP process lifecycle (start/stop)
|
|
31
|
+
* 5 hooks — before_prompt_build (auto-context), agent_end (auto-capture),
|
|
32
|
+
* before_compaction (flush), before_reset (flush),
|
|
33
|
+
* gateway_start (consolidation)
|
|
34
|
+
*/
|
|
35
|
+
|
|
36
|
+
import type {
|
|
37
|
+
OpenClawPluginDefinition,
|
|
38
|
+
OpenClawPluginApi,
|
|
39
|
+
BeforePromptBuildEvent,
|
|
40
|
+
BeforePromptBuildResult,
|
|
41
|
+
AgentEndEvent,
|
|
42
|
+
SessionCompactEvent,
|
|
43
|
+
CommandEvent,
|
|
44
|
+
GatewayStartupEvent,
|
|
45
|
+
} from "./types.js";
|
|
46
|
+
import { NeuralMemoryMcpClient } from "./mcp-client.js";
|
|
47
|
+
import type { PluginLogger } from "./types.js";
|
|
48
|
+
import { createToolsFromMcp, createFallbackTools, createCompatibilityTools } from "./tools.js";
|
|
49
|
+
import type { ToolDefinition } from "./tools.js";
|
|
50
|
+
|
|
51
|
+
// ── Prompt metadata stripping ─────────────────────────────
|
|
52
|
+
|
|
53
|
+
/**
|
|
54
|
+
* Strip metadata preamble from raw prompts before recall.
|
|
55
|
+
*
|
|
56
|
+
* OpenClaw + Telegram injects JSON metadata, NeuralMemory context blocks,
|
|
57
|
+
* env vars, and system boilerplate into ev.prompt. Passing these raw to
|
|
58
|
+
* nmem_recall creates junk neurons like "[concept] json message id".
|
|
59
|
+
*
|
|
60
|
+
* Stripping order matters — later passes clean up residue from earlier ones.
|
|
61
|
+
*/
|
|
62
|
+
export function stripPromptMetadata(raw: string): string {
|
|
63
|
+
let cleaned = raw;
|
|
64
|
+
|
|
65
|
+
// 1. Remove JSON blocks (Telegram metadata, conversation info)
|
|
66
|
+
cleaned = cleaned.replace(
|
|
67
|
+
/^\{[\s\S]*?"(?:conversation|message_id|sender_id|sender|chat_id|update_id)"[\s\S]*?\}$/gm,
|
|
68
|
+
"",
|
|
69
|
+
);
|
|
70
|
+
|
|
71
|
+
// 2. Remove NeuralMemory context sections (## Relevant Memories, etc.)
|
|
72
|
+
// The |$ ensures sections at end-of-string are also stripped.
|
|
73
|
+
cleaned = cleaned.replace(
|
|
74
|
+
/^#{1,3}\s*(?:Relevant Memories|Related Information|Relevant Context|Neural Memory)[\s\S]*?(?=\n#{1,3}\s|\n\n(?![-•*\s])|$)/gim,
|
|
75
|
+
"",
|
|
76
|
+
);
|
|
77
|
+
|
|
78
|
+
// 3. Remove neuron-type bullet lines injected by NM context
|
|
79
|
+
cleaned = cleaned.replace(
|
|
80
|
+
/^-\s*\[(?:concept|entity|decision|error|preference|insight|memory|fact|workflow|instruction|pattern)\].*$/gim,
|
|
81
|
+
"",
|
|
82
|
+
);
|
|
83
|
+
|
|
84
|
+
// 4. Remove [NeuralMemory — ...] wrapper lines
|
|
85
|
+
cleaned = cleaned.replace(/^\[NeuralMemory\s*[—–-].*\]$/gm, "");
|
|
86
|
+
|
|
87
|
+
// 5. Remove metadata labels (untrusted metadata lines)
|
|
88
|
+
cleaned = cleaned.replace(
|
|
89
|
+
/^(?:Conversation info|Sender|Context|System)\s*\(.*?\)\s*:?\s*$/gim,
|
|
90
|
+
"",
|
|
91
|
+
);
|
|
92
|
+
|
|
93
|
+
// 6. Remove env/export lines
|
|
94
|
+
cleaned = cleaned.replace(/^export\s+\w+=.*$/gm, "");
|
|
95
|
+
|
|
96
|
+
// 7. Collapse whitespace runs
|
|
97
|
+
cleaned = cleaned.replace(/\n{3,}/g, "\n\n").trim();
|
|
98
|
+
|
|
99
|
+
// Fallback: if everything was stripped, use last non-empty line of raw
|
|
100
|
+
if (!cleaned) {
|
|
101
|
+
const lines = raw.split("\n").filter((l) => l.trim());
|
|
102
|
+
cleaned = lines[lines.length - 1]?.trim() ?? raw.trim();
|
|
103
|
+
}
|
|
104
|
+
|
|
105
|
+
return cleaned;
|
|
106
|
+
}
|
|
107
|
+
|
|
108
|
+
// ── Auto-capture sanitization ─────────────────────────────
|
|
109
|
+
|
|
110
|
+
/**
|
|
111
|
+
* Strip NeuralMemory context noise and metadata from auto-capture text.
|
|
112
|
+
*
|
|
113
|
+
* When agent_end forwards assistant messages to nmem_auto, those messages
|
|
114
|
+
* may contain NM context wrappers that were injected by before_prompt_build.
|
|
115
|
+
* Re-ingesting these creates junk neurons like "[concept] json message id".
|
|
116
|
+
*
|
|
117
|
+
* This is defense-in-depth — the Python input_firewall also strips these,
|
|
118
|
+
* but catching them here avoids wasting network round-trips.
|
|
119
|
+
*/
|
|
120
|
+
export function sanitizeAutoCapture(raw: string): string {
|
|
121
|
+
let cleaned = raw;
|
|
122
|
+
|
|
123
|
+
// Strip NM context section headers
|
|
124
|
+
cleaned = cleaned.replace(
|
|
125
|
+
/^#{1,3}\s*(?:Relevant Memories|Related Information|Relevant Context|Neural Memory)\b.*$/gim,
|
|
126
|
+
"",
|
|
127
|
+
);
|
|
128
|
+
|
|
129
|
+
// Strip [NeuralMemory — ...] wrapper lines
|
|
130
|
+
cleaned = cleaned.replace(/^\[NeuralMemory\s*[—–-].*\]$/gm, "");
|
|
131
|
+
|
|
132
|
+
// Strip neuron-type bullet lines (- [concept] ..., - [error] ...)
|
|
133
|
+
cleaned = cleaned.replace(
|
|
134
|
+
/^-\s*\[(?:concept|entity|decision|error|preference|insight|memory|fact|workflow|instruction|pattern)\]\s.*$/gim,
|
|
135
|
+
"",
|
|
136
|
+
);
|
|
137
|
+
|
|
138
|
+
// Strip metadata labels
|
|
139
|
+
cleaned = cleaned.replace(
|
|
140
|
+
/^(?:Conversation info|Sender|Context)\s*\(.*?\)\s*:?\s*$/gim,
|
|
141
|
+
"",
|
|
142
|
+
);
|
|
143
|
+
|
|
144
|
+
// Strip short acknowledgement lines (< 20 chars, common filler)
|
|
145
|
+
cleaned = cleaned.replace(
|
|
146
|
+
/^(?:OK|Sure|Done|Got it|Understood|Noted|Alright|I see|Thanks|Thank you|Okay)\.?\s*$/gim,
|
|
147
|
+
"",
|
|
148
|
+
);
|
|
149
|
+
|
|
150
|
+
// Collapse whitespace
|
|
151
|
+
cleaned = cleaned.replace(/\n{3,}/g, "\n\n").trim();
|
|
152
|
+
|
|
153
|
+
return cleaned;
|
|
154
|
+
}
|
|
155
|
+
|
|
156
|
+
// ── System prompt for tool awareness ──────────────────────
|
|
157
|
+
|
|
158
|
+
/**
|
|
159
|
+
* Build a system prompt listing all registered tool names.
|
|
160
|
+
* This makes the agent aware of which nmem_* tools are available.
|
|
161
|
+
*/
|
|
162
|
+
function buildToolInstructions(tools: ToolDefinition[]): string {
|
|
163
|
+
const toolList = tools
|
|
164
|
+
.map((t) => `- ${t.name}: ${t.description.slice(0, 100)}`)
|
|
165
|
+
.join("\n");
|
|
166
|
+
|
|
167
|
+
return `Neural Memory gives you persistent memory across sessions. Use it proactively — each session starts fresh, so without explicit saves ALL discoveries are lost.
|
|
168
|
+
|
|
169
|
+
These are TOOL CALLS, not CLI commands. Do NOT run "nmem remember" in terminal.
|
|
170
|
+
|
|
171
|
+
## Available Tools
|
|
172
|
+
${toolList}
|
|
173
|
+
|
|
174
|
+
nmem_* is your primary memory system. memory_search/memory_get are legacy aliases for nmem_recall.
|
|
175
|
+
|
|
176
|
+
## WHEN TO RECALL
|
|
177
|
+
- New session starts → nmem_recall("current project context")
|
|
178
|
+
- User references past event → nmem_recall("<that topic>")
|
|
179
|
+
- Prefix queries with project name for precision
|
|
180
|
+
|
|
181
|
+
## WHEN TO SAVE
|
|
182
|
+
After each task: did you make a decision (type="decision", priority=7), fix a bug (type="error", priority=7), learn a preference (type="preference", priority=8), or discover an insight (type="insight", priority=6)?
|
|
183
|
+
|
|
184
|
+
Save with: nmem_remember(content="Chose X over Y because Z", type="decision", priority=7, tags=["project", "topic"])
|
|
185
|
+
|
|
186
|
+
## CONTENT QUALITY
|
|
187
|
+
- Max 1-3 sentences. Use causal language: "Chose X because Y", "Root cause was X, fixed by Y".
|
|
188
|
+
- Always include project name + topic in tags (lowercase).
|
|
189
|
+
- For temporary scratch notes: nmem_remember(content="...", ephemeral=true) — auto-expires, never synced.
|
|
190
|
+
|
|
191
|
+
## SESSION END
|
|
192
|
+
nmem_auto(action="process", text="<brief session summary>")
|
|
193
|
+
|
|
194
|
+
## COMPACT MODE
|
|
195
|
+
All tools support compact=true (saves 60-80% tokens) and token_budget=N.`;
|
|
196
|
+
}
|
|
197
|
+
|
|
198
|
+
// ── Config ─────────────────────────────────────────────────
|
|
199
|
+
|
|
200
|
+
type PluginConfig = {
|
|
201
|
+
pythonPath: string;
|
|
202
|
+
brain: string;
|
|
203
|
+
autoContext: boolean;
|
|
204
|
+
autoCapture: boolean;
|
|
205
|
+
autoFlush: boolean;
|
|
206
|
+
autoConsolidate: boolean;
|
|
207
|
+
contextDepth: number;
|
|
208
|
+
maxContextTokens: number;
|
|
209
|
+
timeout: number;
|
|
210
|
+
initTimeout: number;
|
|
211
|
+
};
|
|
212
|
+
|
|
213
|
+
const DEFAULT_CONFIG: Readonly<PluginConfig> = {
|
|
214
|
+
pythonPath: "python",
|
|
215
|
+
brain: "default",
|
|
216
|
+
autoContext: true,
|
|
217
|
+
autoCapture: true,
|
|
218
|
+
autoFlush: true,
|
|
219
|
+
autoConsolidate: true,
|
|
220
|
+
contextDepth: 1,
|
|
221
|
+
maxContextTokens: 500,
|
|
222
|
+
timeout: 30_000,
|
|
223
|
+
initTimeout: 90_000,
|
|
224
|
+
};
|
|
225
|
+
|
|
226
|
+
export const BRAIN_NAME_RE = /^[a-zA-Z0-9_\-.]{1,64}$/;
|
|
227
|
+
export const MAX_AUTO_CAPTURE_CHARS = 50_000;
|
|
228
|
+
|
|
229
|
+
export function resolveConfig(raw?: Record<string, unknown>): PluginConfig {
|
|
230
|
+
const merged = { ...DEFAULT_CONFIG, ...(raw ?? {}) };
|
|
231
|
+
|
|
232
|
+
return {
|
|
233
|
+
pythonPath:
|
|
234
|
+
typeof merged.pythonPath === "string" && merged.pythonPath.length > 0
|
|
235
|
+
? merged.pythonPath
|
|
236
|
+
: DEFAULT_CONFIG.pythonPath,
|
|
237
|
+
brain:
|
|
238
|
+
typeof merged.brain === "string" && BRAIN_NAME_RE.test(merged.brain)
|
|
239
|
+
? merged.brain
|
|
240
|
+
: DEFAULT_CONFIG.brain,
|
|
241
|
+
autoContext:
|
|
242
|
+
typeof merged.autoContext === "boolean"
|
|
243
|
+
? merged.autoContext
|
|
244
|
+
: DEFAULT_CONFIG.autoContext,
|
|
245
|
+
autoCapture:
|
|
246
|
+
typeof merged.autoCapture === "boolean"
|
|
247
|
+
? merged.autoCapture
|
|
248
|
+
: DEFAULT_CONFIG.autoCapture,
|
|
249
|
+
autoFlush:
|
|
250
|
+
typeof merged.autoFlush === "boolean"
|
|
251
|
+
? merged.autoFlush
|
|
252
|
+
: DEFAULT_CONFIG.autoFlush,
|
|
253
|
+
autoConsolidate:
|
|
254
|
+
typeof merged.autoConsolidate === "boolean"
|
|
255
|
+
? merged.autoConsolidate
|
|
256
|
+
: DEFAULT_CONFIG.autoConsolidate,
|
|
257
|
+
contextDepth:
|
|
258
|
+
typeof merged.contextDepth === "number" &&
|
|
259
|
+
Number.isInteger(merged.contextDepth) &&
|
|
260
|
+
merged.contextDepth >= 0 &&
|
|
261
|
+
merged.contextDepth <= 3
|
|
262
|
+
? merged.contextDepth
|
|
263
|
+
: DEFAULT_CONFIG.contextDepth,
|
|
264
|
+
maxContextTokens:
|
|
265
|
+
typeof merged.maxContextTokens === "number" &&
|
|
266
|
+
Number.isInteger(merged.maxContextTokens) &&
|
|
267
|
+
merged.maxContextTokens >= 100 &&
|
|
268
|
+
merged.maxContextTokens <= 10_000
|
|
269
|
+
? merged.maxContextTokens
|
|
270
|
+
: DEFAULT_CONFIG.maxContextTokens,
|
|
271
|
+
timeout:
|
|
272
|
+
typeof merged.timeout === "number" &&
|
|
273
|
+
Number.isFinite(merged.timeout) &&
|
|
274
|
+
merged.timeout >= 5_000 &&
|
|
275
|
+
merged.timeout <= 120_000
|
|
276
|
+
? merged.timeout
|
|
277
|
+
: DEFAULT_CONFIG.timeout,
|
|
278
|
+
initTimeout:
|
|
279
|
+
typeof merged.initTimeout === "number" &&
|
|
280
|
+
Number.isFinite(merged.initTimeout) &&
|
|
281
|
+
merged.initTimeout >= 10_000 &&
|
|
282
|
+
merged.initTimeout <= 300_000
|
|
283
|
+
? merged.initTimeout
|
|
284
|
+
: DEFAULT_CONFIG.initTimeout,
|
|
285
|
+
};
|
|
286
|
+
}
|
|
287
|
+
|
|
288
|
+
// ── Singleton MCP client pool ────────────────────────────────
|
|
289
|
+
// Multiple workspaces may call register() independently, but all
|
|
290
|
+
// should share the same MCP process per (pythonPath, brain) combo.
|
|
291
|
+
|
|
292
|
+
const mcpClients = new Map<string, NeuralMemoryMcpClient>();
|
|
293
|
+
|
|
294
|
+
function getOrCreateMcpClient(
|
|
295
|
+
cfg: PluginConfig,
|
|
296
|
+
logger: PluginLogger,
|
|
297
|
+
): NeuralMemoryMcpClient {
|
|
298
|
+
const key = `${cfg.pythonPath}::${cfg.brain}`;
|
|
299
|
+
|
|
300
|
+
const existing = mcpClients.get(key);
|
|
301
|
+
if (existing) {
|
|
302
|
+
logger.debug?.(`Reusing existing MCP client for brain "${cfg.brain}"`);
|
|
303
|
+
return existing;
|
|
304
|
+
}
|
|
305
|
+
|
|
306
|
+
const mcp = new NeuralMemoryMcpClient({
|
|
307
|
+
pythonPath: cfg.pythonPath,
|
|
308
|
+
brain: cfg.brain,
|
|
309
|
+
logger,
|
|
310
|
+
timeout: cfg.timeout,
|
|
311
|
+
initTimeout: cfg.initTimeout,
|
|
312
|
+
});
|
|
313
|
+
|
|
314
|
+
mcpClients.set(key, mcp);
|
|
315
|
+
return mcp;
|
|
316
|
+
}
|
|
317
|
+
|
|
318
|
+
// ── Plugin definition ──────────────────────────────────────
|
|
319
|
+
|
|
320
|
+
const plugin: OpenClawPluginDefinition = {
|
|
321
|
+
id: "neuralmemory",
|
|
322
|
+
name: "Neural Memory",
|
|
323
|
+
description:
|
|
324
|
+
"Brain-inspired persistent memory for AI agents — neurons, synapses, and fibers",
|
|
325
|
+
version: "1.16.0",
|
|
326
|
+
kind: "memory",
|
|
327
|
+
|
|
328
|
+
register(api: OpenClawPluginApi): void {
|
|
329
|
+
const cfg = resolveConfig(api.pluginConfig);
|
|
330
|
+
|
|
331
|
+
const mcp = getOrCreateMcpClient(cfg, api.logger);
|
|
332
|
+
|
|
333
|
+
// ── Register fallback tools synchronously ────────────
|
|
334
|
+
// OpenClaw requires register() to be synchronous.
|
|
335
|
+
// Register stable fallback tools immediately; MCP connection
|
|
336
|
+
// and dynamic tool discovery happen in service.start().
|
|
337
|
+
// Fallback tools auto-reconnect MCP on first call.
|
|
338
|
+
|
|
339
|
+
const registeredTools = createFallbackTools(mcp);
|
|
340
|
+
const compatTools = createCompatibilityTools(mcp);
|
|
341
|
+
|
|
342
|
+
for (const t of [...registeredTools, ...compatTools]) {
|
|
343
|
+
api.registerTool(t, { name: t.name });
|
|
344
|
+
}
|
|
345
|
+
|
|
346
|
+
api.logger.info(
|
|
347
|
+
`Registered ${registeredTools.length} NeuralMemory tools + ${compatTools.length} compat shims (sync)`,
|
|
348
|
+
);
|
|
349
|
+
|
|
350
|
+
// ── Service: MCP process lifecycle ───────────────────
|
|
351
|
+
|
|
352
|
+
api.registerService({
|
|
353
|
+
id: "neuralmemory-mcp",
|
|
354
|
+
|
|
355
|
+
async start(): Promise<void> {
|
|
356
|
+
if (!mcp.connected) {
|
|
357
|
+
try {
|
|
358
|
+
await mcp.connect();
|
|
359
|
+
api.logger.info("NeuralMemory MCP connected in service.start()");
|
|
360
|
+
|
|
361
|
+
// Log discovered tools for diagnostics (cannot re-register
|
|
362
|
+
// after register() — OpenClaw freezes the tool list).
|
|
363
|
+
try {
|
|
364
|
+
const dynamicTools = await createToolsFromMcp(mcp);
|
|
365
|
+
api.logger.info(
|
|
366
|
+
`NeuralMemory MCP discovered ${dynamicTools.length} tools`,
|
|
367
|
+
);
|
|
368
|
+
} catch (err) {
|
|
369
|
+
api.logger.warn(
|
|
370
|
+
`Tool discovery failed: ${(err as Error).message}`,
|
|
371
|
+
);
|
|
372
|
+
}
|
|
373
|
+
} catch (err) {
|
|
374
|
+
api.logger.error(
|
|
375
|
+
`Failed to start NeuralMemory MCP: ${(err as Error).message}`,
|
|
376
|
+
);
|
|
377
|
+
throw err;
|
|
378
|
+
}
|
|
379
|
+
}
|
|
380
|
+
},
|
|
381
|
+
|
|
382
|
+
async stop(): Promise<void> {
|
|
383
|
+
// Remove from singleton pool so next register() creates fresh client
|
|
384
|
+
const key = `${cfg.pythonPath}::${cfg.brain}`;
|
|
385
|
+
mcpClients.delete(key);
|
|
386
|
+
await mcp.close();
|
|
387
|
+
api.logger.info("NeuralMemory MCP service stopped");
|
|
388
|
+
},
|
|
389
|
+
});
|
|
390
|
+
|
|
391
|
+
// ── Hook: tool awareness + auto-context before prompt build ──
|
|
392
|
+
// Migrated from legacy before_agent_start to before_prompt_build
|
|
393
|
+
// per OpenClaw compatibility guidance (issue #116).
|
|
394
|
+
|
|
395
|
+
api.on(
|
|
396
|
+
"before_prompt_build",
|
|
397
|
+
async (
|
|
398
|
+
event: unknown,
|
|
399
|
+
_ctx: unknown,
|
|
400
|
+
): Promise<BeforePromptBuildResult | void> => {
|
|
401
|
+
const result: BeforePromptBuildResult = {
|
|
402
|
+
systemPrompt: buildToolInstructions(registeredTools),
|
|
403
|
+
};
|
|
404
|
+
|
|
405
|
+
if (cfg.autoContext && mcp.connected) {
|
|
406
|
+
const ev = event as BeforePromptBuildEvent;
|
|
407
|
+
|
|
408
|
+
try {
|
|
409
|
+
const query = stripPromptMetadata(ev.prompt);
|
|
410
|
+
const raw = await mcp.callTool("nmem_recall", {
|
|
411
|
+
query,
|
|
412
|
+
depth: cfg.contextDepth,
|
|
413
|
+
max_tokens: cfg.maxContextTokens,
|
|
414
|
+
clean_for_prompt: true,
|
|
415
|
+
});
|
|
416
|
+
|
|
417
|
+
const data = JSON.parse(raw) as {
|
|
418
|
+
answer?: string;
|
|
419
|
+
confidence?: number;
|
|
420
|
+
};
|
|
421
|
+
|
|
422
|
+
if (data.answer && (data.confidence ?? 0) > 0.1) {
|
|
423
|
+
result.prependContext = `[NeuralMemory — relevant context]\n${data.answer}`;
|
|
424
|
+
}
|
|
425
|
+
} catch (err) {
|
|
426
|
+
api.logger.warn(
|
|
427
|
+
`Auto-context failed: ${(err as Error).message}`,
|
|
428
|
+
);
|
|
429
|
+
}
|
|
430
|
+
}
|
|
431
|
+
|
|
432
|
+
return result;
|
|
433
|
+
},
|
|
434
|
+
{ priority: 10 },
|
|
435
|
+
);
|
|
436
|
+
|
|
437
|
+
// ── Hook: auto-capture after agent completes ────────
|
|
438
|
+
|
|
439
|
+
if (cfg.autoCapture) {
|
|
440
|
+
api.on(
|
|
441
|
+
"agent_end",
|
|
442
|
+
async (event: unknown, _ctx: unknown): Promise<void> => {
|
|
443
|
+
if (!mcp.connected) return;
|
|
444
|
+
|
|
445
|
+
const ev = event as AgentEndEvent;
|
|
446
|
+
if (!ev.success) return;
|
|
447
|
+
|
|
448
|
+
try {
|
|
449
|
+
const messages = ev.messages?.slice(-5) ?? [];
|
|
450
|
+
const rawText = messages
|
|
451
|
+
.filter(
|
|
452
|
+
(m: unknown): m is { role: string; content: string } =>
|
|
453
|
+
typeof m === "object" &&
|
|
454
|
+
m !== null &&
|
|
455
|
+
(m as { role?: string }).role === "assistant" &&
|
|
456
|
+
typeof (m as { content?: unknown }).content === "string",
|
|
457
|
+
)
|
|
458
|
+
.map((m) => m.content)
|
|
459
|
+
.join("\n")
|
|
460
|
+
.slice(0, MAX_AUTO_CAPTURE_CHARS);
|
|
461
|
+
|
|
462
|
+
// Strip NM context noise and short acknowledgements before re-ingest
|
|
463
|
+
const text = sanitizeAutoCapture(rawText);
|
|
464
|
+
|
|
465
|
+
if (text.length > 50) {
|
|
466
|
+
await mcp.callTool("nmem_auto", {
|
|
467
|
+
action: "process",
|
|
468
|
+
text,
|
|
469
|
+
});
|
|
470
|
+
}
|
|
471
|
+
} catch (err) {
|
|
472
|
+
api.logger.warn(
|
|
473
|
+
`Auto-capture failed: ${(err as Error).message}`,
|
|
474
|
+
);
|
|
475
|
+
}
|
|
476
|
+
},
|
|
477
|
+
{ priority: 90 },
|
|
478
|
+
);
|
|
479
|
+
}
|
|
480
|
+
|
|
481
|
+
// ── Hook: flush memories before context compaction ──
|
|
482
|
+
// Migrated from legacy session:compact:before to before_compaction
|
|
483
|
+
|
|
484
|
+
if (cfg.autoFlush) {
|
|
485
|
+
api.on(
|
|
486
|
+
"before_compaction",
|
|
487
|
+
async (_event: unknown, _ctx: unknown): Promise<void> => {
|
|
488
|
+
if (!mcp.connected) return;
|
|
489
|
+
|
|
490
|
+
try {
|
|
491
|
+
await mcp.callTool("nmem_auto", {
|
|
492
|
+
action: "process",
|
|
493
|
+
text: "[pre-compact emergency flush]",
|
|
494
|
+
});
|
|
495
|
+
api.logger.info("Pre-compact flush completed");
|
|
496
|
+
} catch (err) {
|
|
497
|
+
api.logger.warn(
|
|
498
|
+
`Pre-compact flush failed: ${(err as Error).message}`,
|
|
499
|
+
);
|
|
500
|
+
}
|
|
501
|
+
},
|
|
502
|
+
{ priority: 5 },
|
|
503
|
+
);
|
|
504
|
+
|
|
505
|
+
// Flush on session boundary (/new and /reset)
|
|
506
|
+
// Migrated from legacy command:new + command:reset to before_reset
|
|
507
|
+
api.on(
|
|
508
|
+
"before_reset",
|
|
509
|
+
async (_event: unknown, _ctx: unknown): Promise<void> => {
|
|
510
|
+
if (!mcp.connected) return;
|
|
511
|
+
|
|
512
|
+
try {
|
|
513
|
+
await mcp.callTool("nmem_auto", {
|
|
514
|
+
action: "process",
|
|
515
|
+
text: "[session boundary — reset]",
|
|
516
|
+
});
|
|
517
|
+
} catch (err) {
|
|
518
|
+
api.logger.warn(
|
|
519
|
+
`Session boundary flush failed: ${(err as Error).message}`,
|
|
520
|
+
);
|
|
521
|
+
}
|
|
522
|
+
},
|
|
523
|
+
{ priority: 10 },
|
|
524
|
+
);
|
|
525
|
+
}
|
|
526
|
+
|
|
527
|
+
// ── Hook: consolidation on gateway start ─────────────
|
|
528
|
+
// Migrated from legacy gateway:startup to gateway_start
|
|
529
|
+
|
|
530
|
+
if (cfg.autoConsolidate) {
|
|
531
|
+
api.on(
|
|
532
|
+
"gateway_start",
|
|
533
|
+
async (_event: unknown, _ctx: unknown): Promise<void> => {
|
|
534
|
+
if (!mcp.connected) {
|
|
535
|
+
try {
|
|
536
|
+
await mcp.ensureConnected();
|
|
537
|
+
} catch (err) {
|
|
538
|
+
api.logger.warn(
|
|
539
|
+
`MCP connect on startup failed: ${(err as Error).message}`,
|
|
540
|
+
);
|
|
541
|
+
return;
|
|
542
|
+
}
|
|
543
|
+
}
|
|
544
|
+
|
|
545
|
+
try {
|
|
546
|
+
await mcp.callTool("nmem_consolidate", {
|
|
547
|
+
strategy: "enrich",
|
|
548
|
+
compact: true,
|
|
549
|
+
});
|
|
550
|
+
api.logger.info("Startup consolidation completed");
|
|
551
|
+
} catch (err) {
|
|
552
|
+
api.logger.warn(
|
|
553
|
+
`Startup consolidation failed: ${(err as Error).message}`,
|
|
554
|
+
);
|
|
555
|
+
}
|
|
556
|
+
},
|
|
557
|
+
{ priority: 50 },
|
|
558
|
+
);
|
|
559
|
+
}
|
|
560
|
+
|
|
561
|
+
// ── Done ────────────────────────────────────────────
|
|
562
|
+
|
|
563
|
+
api.logger.info(
|
|
564
|
+
`NeuralMemory registered (brain: ${cfg.brain}, ` +
|
|
565
|
+
`autoContext: ${cfg.autoContext}, autoCapture: ${cfg.autoCapture}, ` +
|
|
566
|
+
`autoFlush: ${cfg.autoFlush}, autoConsolidate: ${cfg.autoConsolidate}) — ` +
|
|
567
|
+
`tools will be loaded dynamically from MCP on service start`,
|
|
568
|
+
);
|
|
569
|
+
},
|
|
570
|
+
};
|
|
571
|
+
|
|
572
|
+
export default plugin;
|