engrm 0.4.35 → 0.4.37

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md CHANGED
@@ -288,6 +288,25 @@ How to use it:
288
288
  - `load_recall_item` opens an exact handoff, thread, chat, or memory key returned by the index
289
289
  - `repair_recall` is the repair step when continuity is still thin, hook-only, or under-captured
290
290
 
291
+ ### Explicit Save Protocol
292
+
293
+ When something should be remembered on purpose, do not wait for an end-of-session
294
+ digest if an explicit write is more appropriate.
295
+
296
+ Use:
297
+
298
+ - `save_observation`
299
+ - direct durable memory write for a bugfix, decision, discovery, pattern,
300
+ feature, or change
301
+ - `create_handoff` / `refresh_handoff`
302
+ - preserve the active thread for another device or a later session
303
+ - `capture_openclaw_content`
304
+ - save OpenClaw-style research, posting, outcomes, and next actions as
305
+ reusable memory
306
+
307
+ Automatic session digests are a safety net.
308
+ They are not the only path for preserving important work.
309
+
291
310
  ### Thin Tools, Thick Memory
292
311
 
293
312
  Engrm now has a real thin-tool layer, not just a plugin spec.
package/dist/cli.js CHANGED
@@ -1592,7 +1592,12 @@ class MemDatabase {
1592
1592
  vecChatInsert(chatMessageId, embedding) {
1593
1593
  if (!this.vecAvailable)
1594
1594
  return;
1595
- this.db.query("INSERT OR REPLACE INTO vec_chat_messages (chat_message_id, embedding) VALUES (?, ?)").run(chatMessageId, new Uint8Array(embedding.buffer));
1595
+ const normalizedId = Number(chatMessageId);
1596
+ if (!Number.isInteger(normalizedId) || normalizedId <= 0)
1597
+ return;
1598
+ try {
1599
+ this.db.query("INSERT OR REPLACE INTO vec_chat_messages (chat_message_id, embedding) VALUES (?, ?)").run(normalizedId, new Uint8Array(embedding.buffer));
1600
+ } catch {}
1596
1601
  }
1597
1602
  searchChatVec(queryEmbedding, projectId, limit = 20, userId) {
1598
1603
  if (!this.vecAvailable)
@@ -2428,7 +2428,12 @@ class MemDatabase {
2428
2428
  vecChatInsert(chatMessageId, embedding) {
2429
2429
  if (!this.vecAvailable)
2430
2430
  return;
2431
- this.db.query("INSERT OR REPLACE INTO vec_chat_messages (chat_message_id, embedding) VALUES (?, ?)").run(chatMessageId, new Uint8Array(embedding.buffer));
2431
+ const normalizedId = Number(chatMessageId);
2432
+ if (!Number.isInteger(normalizedId) || normalizedId <= 0)
2433
+ return;
2434
+ try {
2435
+ this.db.query("INSERT OR REPLACE INTO vec_chat_messages (chat_message_id, embedding) VALUES (?, ?)").run(normalizedId, new Uint8Array(embedding.buffer));
2436
+ } catch {}
2432
2437
  }
2433
2438
  searchChatVec(queryEmbedding, projectId, limit = 20, userId) {
2434
2439
  if (!this.vecAvailable)
@@ -1770,7 +1770,12 @@ class MemDatabase {
1770
1770
  vecChatInsert(chatMessageId, embedding) {
1771
1771
  if (!this.vecAvailable)
1772
1772
  return;
1773
- this.db.query("INSERT OR REPLACE INTO vec_chat_messages (chat_message_id, embedding) VALUES (?, ?)").run(chatMessageId, new Uint8Array(embedding.buffer));
1773
+ const normalizedId = Number(chatMessageId);
1774
+ if (!Number.isInteger(normalizedId) || normalizedId <= 0)
1775
+ return;
1776
+ try {
1777
+ this.db.query("INSERT OR REPLACE INTO vec_chat_messages (chat_message_id, embedding) VALUES (?, ?)").run(normalizedId, new Uint8Array(embedding.buffer));
1778
+ } catch {}
1774
1779
  }
1775
1780
  searchChatVec(queryEmbedding, projectId, limit = 20, userId) {
1776
1781
  if (!this.vecAvailable)
@@ -1564,7 +1564,12 @@ class MemDatabase {
1564
1564
  vecChatInsert(chatMessageId, embedding) {
1565
1565
  if (!this.vecAvailable)
1566
1566
  return;
1567
- this.db.query("INSERT OR REPLACE INTO vec_chat_messages (chat_message_id, embedding) VALUES (?, ?)").run(chatMessageId, new Uint8Array(embedding.buffer));
1567
+ const normalizedId = Number(chatMessageId);
1568
+ if (!Number.isInteger(normalizedId) || normalizedId <= 0)
1569
+ return;
1570
+ try {
1571
+ this.db.query("INSERT OR REPLACE INTO vec_chat_messages (chat_message_id, embedding) VALUES (?, ?)").run(normalizedId, new Uint8Array(embedding.buffer));
1572
+ } catch {}
1568
1573
  }
1569
1574
  searchChatVec(queryEmbedding, projectId, limit = 20, userId) {
1570
1575
  if (!this.vecAvailable)
@@ -1640,7 +1640,12 @@ class MemDatabase {
1640
1640
  vecChatInsert(chatMessageId, embedding) {
1641
1641
  if (!this.vecAvailable)
1642
1642
  return;
1643
- this.db.query("INSERT OR REPLACE INTO vec_chat_messages (chat_message_id, embedding) VALUES (?, ?)").run(chatMessageId, new Uint8Array(embedding.buffer));
1643
+ const normalizedId = Number(chatMessageId);
1644
+ if (!Number.isInteger(normalizedId) || normalizedId <= 0)
1645
+ return;
1646
+ try {
1647
+ this.db.query("INSERT OR REPLACE INTO vec_chat_messages (chat_message_id, embedding) VALUES (?, ?)").run(normalizedId, new Uint8Array(embedding.buffer));
1648
+ } catch {}
1644
1649
  }
1645
1650
  searchChatVec(queryEmbedding, projectId, limit = 20, userId) {
1646
1651
  if (!this.vecAvailable)
@@ -3225,7 +3225,7 @@ import { existsSync as existsSync3, readFileSync as readFileSync2, writeFileSync
3225
3225
  import { join as join3 } from "node:path";
3226
3226
  import { homedir } from "node:os";
3227
3227
  var STATE_PATH = join3(homedir(), ".engrm", "config-fingerprint.json");
3228
- var CLIENT_VERSION = "0.4.35";
3228
+ var CLIENT_VERSION = "0.4.37";
3229
3229
  function hashFile(filePath) {
3230
3230
  try {
3231
3231
  if (!existsSync3(filePath))
@@ -5224,7 +5224,12 @@ class MemDatabase {
5224
5224
  vecChatInsert(chatMessageId, embedding) {
5225
5225
  if (!this.vecAvailable)
5226
5226
  return;
5227
- this.db.query("INSERT OR REPLACE INTO vec_chat_messages (chat_message_id, embedding) VALUES (?, ?)").run(chatMessageId, new Uint8Array(embedding.buffer));
5227
+ const normalizedId = Number(chatMessageId);
5228
+ if (!Number.isInteger(normalizedId) || normalizedId <= 0)
5229
+ return;
5230
+ try {
5231
+ this.db.query("INSERT OR REPLACE INTO vec_chat_messages (chat_message_id, embedding) VALUES (?, ?)").run(normalizedId, new Uint8Array(embedding.buffer));
5232
+ } catch {}
5228
5233
  }
5229
5234
  searchChatVec(queryEmbedding, projectId, limit = 20, userId) {
5230
5235
  if (!this.vecAvailable)
@@ -1797,7 +1797,12 @@ class MemDatabase {
1797
1797
  vecChatInsert(chatMessageId, embedding) {
1798
1798
  if (!this.vecAvailable)
1799
1799
  return;
1800
- this.db.query("INSERT OR REPLACE INTO vec_chat_messages (chat_message_id, embedding) VALUES (?, ?)").run(chatMessageId, new Uint8Array(embedding.buffer));
1800
+ const normalizedId = Number(chatMessageId);
1801
+ if (!Number.isInteger(normalizedId) || normalizedId <= 0)
1802
+ return;
1803
+ try {
1804
+ this.db.query("INSERT OR REPLACE INTO vec_chat_messages (chat_message_id, embedding) VALUES (?, ?)").run(normalizedId, new Uint8Array(embedding.buffer));
1805
+ } catch {}
1801
1806
  }
1802
1807
  searchChatVec(queryEmbedding, projectId, limit = 20, userId) {
1803
1808
  if (!this.vecAvailable)
@@ -3082,7 +3087,7 @@ function buildBeacon(db, config, sessionId, metrics) {
3082
3087
  sentinel_used: valueSignals.security_findings_count > 0,
3083
3088
  risk_score: riskScore,
3084
3089
  stacks_detected: stacks,
3085
- client_version: "0.4.35",
3090
+ client_version: "0.4.37",
3086
3091
  context_observations_injected: metrics?.contextObsInjected ?? 0,
3087
3092
  context_total_available: metrics?.contextTotalAvailable ?? 0,
3088
3093
  recall_attempts: metrics?.recallAttempts ?? 0,
@@ -1708,7 +1708,12 @@ class MemDatabase {
1708
1708
  vecChatInsert(chatMessageId, embedding) {
1709
1709
  if (!this.vecAvailable)
1710
1710
  return;
1711
- this.db.query("INSERT OR REPLACE INTO vec_chat_messages (chat_message_id, embedding) VALUES (?, ?)").run(chatMessageId, new Uint8Array(embedding.buffer));
1711
+ const normalizedId = Number(chatMessageId);
1712
+ if (!Number.isInteger(normalizedId) || normalizedId <= 0)
1713
+ return;
1714
+ try {
1715
+ this.db.query("INSERT OR REPLACE INTO vec_chat_messages (chat_message_id, embedding) VALUES (?, ?)").run(normalizedId, new Uint8Array(embedding.buffer));
1716
+ } catch {}
1712
1717
  }
1713
1718
  searchChatVec(queryEmbedding, projectId, limit = 20, userId) {
1714
1719
  if (!this.vecAvailable)
package/dist/server.js CHANGED
@@ -15114,7 +15114,12 @@ class MemDatabase {
15114
15114
  vecChatInsert(chatMessageId, embedding) {
15115
15115
  if (!this.vecAvailable)
15116
15116
  return;
15117
- this.db.query("INSERT OR REPLACE INTO vec_chat_messages (chat_message_id, embedding) VALUES (?, ?)").run(chatMessageId, new Uint8Array(embedding.buffer));
15117
+ const normalizedId = Number(chatMessageId);
15118
+ if (!Number.isInteger(normalizedId) || normalizedId <= 0)
15119
+ return;
15120
+ try {
15121
+ this.db.query("INSERT OR REPLACE INTO vec_chat_messages (chat_message_id, embedding) VALUES (?, ?)").run(normalizedId, new Uint8Array(embedding.buffer));
15122
+ } catch {}
15118
15123
  }
15119
15124
  searchChatVec(queryEmbedding, projectId, limit = 20, userId) {
15120
15125
  if (!this.vecAvailable)
@@ -16387,11 +16392,13 @@ function mergeResults(ftsResults, vecResults, limit) {
16387
16392
  return Array.from(scores.entries()).map(([id, score]) => ({ id, score })).sort((a, b) => b.score - a.score).slice(0, limit);
16388
16393
  }
16389
16394
  function sanitizeFtsQuery(query) {
16390
- let safe = query.replace(/[{}()[\]^~*:]/g, " ");
16391
- safe = safe.replace(/\s+/g, " ").trim();
16392
- if (!safe)
16395
+ const normalized = String(query ?? "").normalize("NFKC").replace(/[^\p{L}\p{N}_\s]+/gu, " ").replace(/\s+/g, " ").trim();
16396
+ if (!normalized)
16393
16397
  return "";
16394
- return safe;
16398
+ const terms = normalized.split(" ").map((term) => term.trim()).filter(Boolean).slice(0, 16);
16399
+ if (terms.length === 0)
16400
+ return "";
16401
+ return terms.map((term) => `"${term}"`).join(" ");
16395
16402
  }
16396
16403
 
16397
16404
  // src/tools/recent-chat.ts
@@ -18947,6 +18954,7 @@ function getMemoryConsole(db, input) {
18947
18954
  recent_outcomes: projectIndex?.recent_outcomes ?? [],
18948
18955
  hot_files: projectIndex?.hot_files ?? [],
18949
18956
  provenance_summary: projectIndex?.provenance_summary ?? [],
18957
+ provenance_type_mix: collectProvenanceTypeMix(observations),
18950
18958
  assistant_checkpoint_count: projectIndex?.assistant_checkpoint_count,
18951
18959
  assistant_checkpoint_types: projectIndex?.assistant_checkpoint_types ?? [],
18952
18960
  top_types: projectIndex?.top_types ?? [],
@@ -18954,6 +18962,24 @@ function getMemoryConsole(db, input) {
18954
18962
  suggested_tools: projectIndex?.suggested_tools ?? buildFallbackSuggestedTools(sessions.length, requests.length, tools.length, observations.length, recentHandoffs.length, recentChat.messages.length, recentChat.coverage_state, activeAgents.length)
18955
18963
  };
18956
18964
  }
18965
+ function collectProvenanceTypeMix(observations) {
18966
+ const grouped = new Map;
18967
+ for (const observation of observations) {
18968
+ if (!observation.source_tool)
18969
+ continue;
18970
+ const typeCounts = grouped.get(observation.source_tool) ?? new Map;
18971
+ typeCounts.set(observation.type, (typeCounts.get(observation.type) ?? 0) + 1);
18972
+ grouped.set(observation.source_tool, typeCounts);
18973
+ }
18974
+ return Array.from(grouped.entries()).map(([tool, typeCounts]) => {
18975
+ const topTypes = Array.from(typeCounts.entries()).map(([type, count]) => ({ type, count })).sort((a, b) => b.count - a.count || a.type.localeCompare(b.type)).slice(0, 4);
18976
+ return {
18977
+ tool,
18978
+ count: topTypes.reduce((sum, item) => sum + item.count, 0),
18979
+ top_types: topTypes
18980
+ };
18981
+ }).sort((a, b) => b.count - a.count || a.tool.localeCompare(b.tool)).slice(0, 6);
18982
+ }
18957
18983
  function buildFallbackSuggestedTools(sessionCount, requestCount, toolCount, observationCount, handoffCount, chatCount, chatCoverageState, activeAgentCount) {
18958
18984
  const suggested = [];
18959
18985
  if (sessionCount > 0)
@@ -20686,7 +20712,12 @@ async function repairRecall(db, config2, input = {}) {
20686
20712
  let sessionsWithImports = 0;
20687
20713
  for (const session of targetSessions) {
20688
20714
  const sessionCwd = session.project_id !== null ? db.getProjectById(session.project_id)?.local_path ?? cwd : cwd;
20689
- const syncResult = await syncTranscriptChat(db, config2, session.session_id, sessionCwd, input.transcript_path);
20715
+ let syncResult = { imported: 0, total: 0 };
20716
+ try {
20717
+ syncResult = await syncTranscriptChat(db, config2, session.session_id, sessionCwd, input.transcript_path);
20718
+ } catch {
20719
+ syncResult = { imported: 0, total: 0 };
20720
+ }
20690
20721
  const chatMessages = db.getSessionChatMessages(session.session_id, 200);
20691
20722
  const prompts = db.getSessionUserPrompts(session.session_id, 200);
20692
20723
  const sourceSummary = summarizeChatSources(chatMessages);
@@ -22842,9 +22873,9 @@ process.on("SIGTERM", () => {
22842
22873
  });
22843
22874
  var server = new McpServer({
22844
22875
  name: "engrm",
22845
- version: "0.4.35"
22876
+ version: "0.4.37"
22846
22877
  });
22847
- server.tool("save_observation", "Save an observation to memory", {
22878
+ server.tool("save_observation", "Directly save a durable memory item now. Use this when something should be remembered on purpose instead of waiting for an end-of-session digest.", {
22848
22879
  type: exports_external.enum([
22849
22880
  "bugfix",
22850
22881
  "discovery",
@@ -23141,7 +23172,7 @@ Findings: ${findingSummary}` : ""}`
23141
23172
  ]
23142
23173
  };
23143
23174
  });
23144
- server.tool("capture_openclaw_content", "Capture OpenClaw content, research, and follow-up work as durable memory. Best for preserving posted outcomes, discoveries, and next actions.", {
23175
+ server.tool("capture_openclaw_content", "Directly save OpenClaw content, research, and follow-up work as durable memory. Best for preserving posted outcomes, discoveries, and next actions during or right after the run.", {
23145
23176
  title: exports_external.string().optional().describe("Short content, campaign, or research title."),
23146
23177
  posted: exports_external.array(exports_external.string()).optional().describe("Concrete posted items or shipped content outcomes."),
23147
23178
  researched: exports_external.array(exports_external.string()).optional().describe("Research or discovery items worth retaining."),
@@ -23815,7 +23846,7 @@ server.tool("memory_console", "Show a high-signal local overview of what Engrm c
23815
23846
  `) : "- (none)";
23816
23847
  const provenanceLines = result.provenance_summary.length > 0 ? result.provenance_summary.map((item) => `- ${item.tool}: ${item.count}`).join(`
23817
23848
  `) : "- (none)";
23818
- const provenanceMixLines2 = result.provenance_type_mix.length > 0 ? result.provenance_type_mix.map((item) => `- ${item.tool}: ${item.top_types.map((entry) => `${entry.type} ${entry.count}`).join(", ")}`).join(`
23849
+ const provenanceMixLines = result.provenance_type_mix.length > 0 ? result.provenance_type_mix.map((item) => `- ${item.tool}: ${item.top_types.map((entry) => `${entry.type} ${entry.count}`).join(", ")}`).join(`
23819
23850
  `) : "- (none)";
23820
23851
  const checkpointTypeLines = result.assistant_checkpoint_types.length > 0 ? result.assistant_checkpoint_types.map((item) => `- ${item.type}: ${item.count}`).join(`
23821
23852
  `) : "- (none)";
@@ -23935,6 +23966,8 @@ server.tool("capture_quality", "Show how healthy Engrm capture is across the wor
23935
23966
  const provenanceLines = result.provenance_summary.length > 0 ? result.provenance_summary.map((item) => `- ${item.tool}: ${item.count}`).join(`
23936
23967
  `) : "- (none)";
23937
23968
  const checkpointTypeLines = result.assistant_checkpoint_types.length > 0 ? result.assistant_checkpoint_types.map((item) => `- ${item.type}: ${item.count}`).join(`
23969
+ `) : "- (none)";
23970
+ const provenanceMixLines = result.provenance_type_mix.length > 0 ? result.provenance_type_mix.map((item) => `- ${item.tool}: ${item.top_types.map((entry) => `${entry.type} ${entry.count}`).join(", ")}`).join(`
23938
23971
  `) : "- (none)";
23939
23972
  const projectLines = result.top_projects.length > 0 ? result.top_projects.map((project) => `- ${project.name} [${project.raw_capture_state}] obs=${project.observation_count} sessions=${project.session_count} prompts=${project.prompt_count} tools=${project.tool_event_count} checkpoints=${project.assistant_checkpoint_count} chat=${project.chat_message_count} (${project.chat_coverage_state})`).join(`
23940
23973
  `) : "- (none)";
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "engrm",
3
- "version": "0.4.35",
3
+ "version": "0.4.37",
4
4
  "description": "Shared memory across devices, sessions, and agents, with thin MCP tools for durable capture and live continuity",
5
5
  "mcpName": "io.github.dr12hes/engrm",
6
6
  "type": "module",