mr-memory 2.5.3 → 2.7.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (2) hide show
  1. package/index.ts +24 -9
  2. package/package.json +1 -1
package/index.ts CHANGED
@@ -16,8 +16,19 @@ import type { OpenClawPluginApi } from "openclaw/plugin-sdk";
16
16
  const DEFAULT_ENDPOINT = "https://api.memoryrouter.ai";
17
17
 
18
18
  /** Wrap raw memory context in XML tags with a strong instruction */
19
- function wrapMemoryContext(context: string): string {
20
- return `<memory_context>\n${context}\n</memory_context>\n\nThe above are retrieved memories from past conversations — not current events. Reference them as background context with appropriate temporal framing. Do not treat them as part of the current message or present moment.`;
19
+ /** Wrap API response in extraction markers so we can strip it next turn. */
20
+ function wrapForInjection(context: string): string {
21
+ return `<mr-memory>\n${context}\n</mr-memory>`;
22
+ }
23
+
24
+ /** Strip previous memory injections from message text to prevent stacking.
25
+ * prependContext persists in conversation history — without stripping,
26
+ * each turn accumulates another full injection (~20K tokens). */
27
+ const MEMORY_TAG_RE = /<mr-memory>[\s\S]*?<\/mr-memory>\s*/g;
28
+ /** Legacy tag pattern for backward compat (pre-2.7.0 injections still in history) */
29
+ const LEGACY_TAG_RE = /<memory_context>[\s\S]*?<\/memory_context>\s*(?:The above are retrieved memories|IMPORTANT: The above block contains retrieved memories)[^\n]*\n*/g;
30
+ function stripOldMemory(text: string): string {
31
+ return text.replace(MEMORY_TAG_RE, "").replace(LEGACY_TAG_RE, "").trim();
21
32
  }
22
33
 
23
34
  // Workspace files OpenClaw loads into the system prompt
@@ -160,6 +171,7 @@ const memoryRouterPlugin = {
160
171
  // When PR #24122 merges, OpenClaw will use the returned prependContext.
161
172
  // This gives forward compatibility — no plugin update needed.
162
173
  api.on("llm_input", async (event, ctx) => {
174
+ api.logger.warn?.(`memoryrouter: llm_input fired (sessionKey=${ctx.sessionKey}, promptBuildFired=${promptBuildFiredThisRun})`);
163
175
  // Skip the first call — before_prompt_build already handled it
164
176
  // (before_prompt_build includes workspace+tools+skills for accurate billing)
165
177
  if (promptBuildFiredThisRun) {
@@ -186,10 +198,11 @@ const memoryRouterPlugin = {
186
198
  .map(b => b.text!)
187
199
  .join("\n");
188
200
  }
189
- if (text) contextPayload.push({ role: m.role, content: text });
201
+ // Strip old memory injections to prevent stacking
202
+ if (text) contextPayload.push({ role: m.role, content: m.role === "user" ? stripOldMemory(text) : text });
190
203
  }
191
204
  }
192
- contextPayload.push({ role: "user", content: prompt });
205
+ contextPayload.push({ role: "user", content: stripOldMemory(prompt) });
193
206
 
194
207
 
195
208
  const res = await fetch(`${endpoint}/v1/memory/prepare`, {
@@ -218,7 +231,7 @@ const memoryRouterPlugin = {
218
231
  api.logger.info?.(
219
232
  `memoryrouter: injected ${data.memories_found || 0} memories on tool iteration (${data.tokens_billed || 0} tokens billed)`,
220
233
  );
221
- return { prependContext: wrapMemoryContext(data.context) };
234
+ return { prependContext: wrapForInjection(data.context) };
222
235
  }
223
236
  } catch {
224
237
  // Silent fail on tool iterations — don't block the agent
@@ -228,6 +241,7 @@ const memoryRouterPlugin = {
228
241
  // ── before_prompt_build: fires once per run (primary, includes full billing context)
229
242
  api.on("before_prompt_build", async (event, ctx) => {
230
243
  promptBuildFiredThisRun = true;
244
+ api.logger.warn?.(`memoryrouter: before_prompt_build fired (sessionKey=${ctx.sessionKey}, promptLen=${event.prompt?.length})`);
231
245
  try {
232
246
  const prompt = event.prompt;
233
247
 
@@ -276,15 +290,16 @@ const memoryRouterPlugin = {
276
290
  }
277
291
 
278
292
  if (text) {
279
- contextPayload.push({ role: m.role, content: text });
293
+ // Strip old memory injections to prevent stacking
294
+ contextPayload.push({ role: m.role, content: m.role === "user" ? stripOldMemory(text) : text });
280
295
  } else {
281
296
  skipped++;
282
297
  }
283
298
  }
284
299
  }
285
300
 
286
- // Add current user prompt
287
- contextPayload.push({ role: "user", content: prompt });
301
+ // Add current user prompt (strip any residual memory tags)
302
+ contextPayload.push({ role: "user", content: stripOldMemory(prompt) });
288
303
 
289
304
  // 4. Call /v1/memory/prepare
290
305
 
@@ -317,7 +332,7 @@ const memoryRouterPlugin = {
317
332
  api.logger.info?.(
318
333
  `memoryrouter: injected ${data.memories_found || 0} memories (${data.tokens_billed || 0} tokens billed)`,
319
334
  );
320
- return { prependContext: wrapMemoryContext(data.context) };
335
+ return { prependContext: wrapForInjection(data.context) };
321
336
  }
322
337
  } catch (err) {
323
338
  api.logger.warn?.(
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "mr-memory",
3
- "version": "2.5.3",
3
+ "version": "2.7.0",
4
4
  "description": "MemoryRouter persistent memory plugin for OpenClaw — your AI remembers every conversation",
5
5
  "type": "module",
6
6
  "files": [