@j0hanz/memdb 1.2.8 → 1.3.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (56) hide show
  1. package/README.md +110 -287
  2. package/dist/assets/logo.svg +12 -0
  3. package/dist/config.d.ts +0 -1
  4. package/dist/config.js +0 -1
  5. package/dist/core/db.d.ts +6 -3
  6. package/dist/core/db.js +102 -47
  7. package/dist/core/memory-read.d.ts +1 -2
  8. package/dist/core/memory-read.js +32 -18
  9. package/dist/core/memory-write.d.ts +1 -2
  10. package/dist/core/memory-write.js +59 -83
  11. package/dist/core/relationships.d.ts +0 -1
  12. package/dist/core/relationships.js +15 -36
  13. package/dist/core/search.d.ts +2 -3
  14. package/dist/core/search.js +96 -89
  15. package/dist/index.d.ts +0 -1
  16. package/dist/index.js +42 -15
  17. package/dist/instructions.md +41 -24
  18. package/dist/logger.d.ts +0 -1
  19. package/dist/logger.js +0 -1
  20. package/dist/protocol-version-guard.d.ts +0 -1
  21. package/dist/protocol-version-guard.js +0 -1
  22. package/dist/schemas.d.ts +0 -1
  23. package/dist/schemas.js +14 -11
  24. package/dist/stdio-transport.d.ts +0 -1
  25. package/dist/stdio-transport.js +0 -1
  26. package/dist/tools.d.ts +1 -2
  27. package/dist/tools.js +195 -211
  28. package/dist/types.d.ts +0 -1
  29. package/dist/types.js +0 -1
  30. package/package.json +23 -20
  31. package/dist/config.d.ts.map +0 -1
  32. package/dist/config.js.map +0 -1
  33. package/dist/core/db.d.ts.map +0 -1
  34. package/dist/core/db.js.map +0 -1
  35. package/dist/core/memory-read.d.ts.map +0 -1
  36. package/dist/core/memory-read.js.map +0 -1
  37. package/dist/core/memory-write.d.ts.map +0 -1
  38. package/dist/core/memory-write.js.map +0 -1
  39. package/dist/core/relationships.d.ts.map +0 -1
  40. package/dist/core/relationships.js.map +0 -1
  41. package/dist/core/search.d.ts.map +0 -1
  42. package/dist/core/search.js.map +0 -1
  43. package/dist/index.d.ts.map +0 -1
  44. package/dist/index.js.map +0 -1
  45. package/dist/logger.d.ts.map +0 -1
  46. package/dist/logger.js.map +0 -1
  47. package/dist/protocol-version-guard.d.ts.map +0 -1
  48. package/dist/protocol-version-guard.js.map +0 -1
  49. package/dist/schemas.d.ts.map +0 -1
  50. package/dist/schemas.js.map +0 -1
  51. package/dist/stdio-transport.d.ts.map +0 -1
  52. package/dist/stdio-transport.js.map +0 -1
  53. package/dist/tools.d.ts.map +0 -1
  54. package/dist/tools.js.map +0 -1
  55. package/dist/types.d.ts.map +0 -1
  56. package/dist/types.js.map +0 -1
@@ -1,8 +1,15 @@
1
- import { db, executeAll, loadTagsForMemoryIds, mapRowToRelationship, mapRowToSearchResult, prepareCached, toSafeInteger, } from './db.js';
1
+ import { executeAll, loadTagsForMemoryIds, mapRowToRelationship, mapRowToSearchResult, prepareCached, toSafeInteger, } from './db.js';
2
2
  const MAX_QUERY_TOKENS = 50;
3
3
  const DEFAULT_LIMIT = 100;
4
+ const MAX_RECALL_DEPTH = 3;
5
+ const MAX_RECALL_MEMORIES = 50;
4
6
  const RECENCY_DECAY_DAYS = 7;
5
7
  const RECENCY_WEIGHT = 0.15;
8
+ const throwIfAborted = (signal) => {
9
+ if (signal && typeof signal.throwIfAborted === 'function') {
10
+ signal.throwIfAborted();
11
+ }
12
+ };
6
13
  const tokenizeQuery = (query) => {
7
14
  const parts = query
8
15
  .trim()
@@ -15,21 +22,12 @@ const tokenizeQuery = (query) => {
15
22
  }
16
23
  return parts;
17
24
  };
18
- const buildFtsQuery = (tokens) => {
19
- if (tokens.length === 0)
20
- return '""';
21
- const escaped = tokens.map((t) => `"${t.replace(/"/g, '""')}"`);
22
- return escaped.join(' OR ');
23
- };
24
- const buildTagPlaceholders = (count) => {
25
- return Array.from({ length: count }, () => '?').join(', ');
26
- };
25
+ const escapeFtsToken = (token) => `"${token.replace(/"/g, '""')}"`;
26
+ const buildFtsQuery = (tokens) => tokens.length === 0 ? '""' : tokens.map(escapeFtsToken).join(' OR ');
27
27
  const buildSearchQuery = (tokens) => {
28
28
  const ftsQuery = buildFtsQuery(tokens);
29
- const tagPlaceholders = buildTagPlaceholders(tokens.length);
30
29
  const relevanceExpr = '1.0 / (1.0 + abs(bm25(memories_fts)))';
31
30
  const recencyBoost = `MAX(0.0, (${RECENCY_DECAY_DAYS}.0 - julianday('now') + julianday(created_at)) / ${RECENCY_DECAY_DAYS}.0) * ${RECENCY_WEIGHT}`;
32
- // Union of FTS content matches and tag matches, deduplicated
33
31
  const sql = `
34
32
  WITH content_matches AS (
35
33
  SELECT m.*, ${relevanceExpr} as base_relevance, ${recencyBoost} as recency_bonus
@@ -41,7 +39,7 @@ const buildSearchQuery = (tokens) => {
41
39
  SELECT DISTINCT m.*, 0.5 as base_relevance, ${recencyBoost} as recency_bonus
42
40
  FROM memories m
43
41
  JOIN tags t ON m.id = t.memory_id
44
- WHERE t.tag IN (${tagPlaceholders})
42
+ WHERE t.tag IN (SELECT value FROM json_each(?))
45
43
  ),
46
44
  combined AS (
47
45
  SELECT *, (base_relevance + recency_bonus) as relevance FROM content_matches
@@ -55,7 +53,7 @@ const buildSearchQuery = (tokens) => {
55
53
  ORDER BY relevance DESC
56
54
  LIMIT ?
57
55
  `;
58
- return { sql, params: [ftsQuery, ...tokens, DEFAULT_LIMIT] };
56
+ return { sql, params: [ftsQuery, JSON.stringify(tokens), DEFAULT_LIMIT] };
59
57
  };
60
58
  const INDEX_MISSING_TOKENS = [
61
59
  'no such module: fts5',
@@ -68,12 +66,10 @@ const getErrorMessage = (err) => err instanceof Error ? err.message : String(err
68
66
  const toSearchError = (err) => {
69
67
  const message = getErrorMessage(err);
70
68
  if (isSearchIndexMissing(message)) {
71
- return new Error('Search index unavailable. Ensure FTS5 is enabled and the index is ' +
72
- 'initialized.');
69
+ return new Error('Search index unavailable. Ensure FTS5 is enabled and the index is initialized.');
73
70
  }
74
71
  if (isSearchQueryInvalid(message)) {
75
- return new Error('Invalid search query syntax. Check for unbalanced quotes or special ' +
76
- 'characters. ' +
72
+ return new Error('Invalid search query syntax. Check for unbalanced quotes or special characters. ' +
77
73
  `Details: ${message}`);
78
74
  }
79
75
  return undefined;
@@ -87,7 +83,8 @@ const executeSearch = (sql, params) => {
87
83
  throw toSearchError(err) ?? err;
88
84
  }
89
85
  };
90
- const mapRowsToSearchResultsWithTags = (rows) => {
86
+ const enrichSearchResultsWithTags = (rows, signal) => {
87
+ throwIfAborted(signal);
91
88
  const ids = rows.map((row) => toSafeInteger(row.id, 'id'));
92
89
  const tagsById = loadTagsForMemoryIds(ids);
93
90
  return rows.map((row) => {
@@ -95,78 +92,87 @@ const mapRowsToSearchResultsWithTags = (rows) => {
95
92
  return mapRowToSearchResult(row, tagsById.get(id) ?? []);
96
93
  });
97
94
  };
98
- export const searchMemories = (input) => {
95
+ export const searchMemories = (input, signal) => {
96
+ throwIfAborted(signal);
99
97
  const tokens = tokenizeQuery(input.query);
100
- if (tokens.length === 0) {
98
+ if (tokens.length === 0)
101
99
  throw new Error('Query cannot be empty');
102
- }
103
100
  const { sql, params } = buildSearchQuery(tokens);
104
101
  const rows = executeSearch(sql, params);
105
- return mapRowsToSearchResultsWithTags(rows);
106
- };
107
- const MAX_RECALL_DEPTH = 3;
108
- const MAX_RECALL_MEMORIES = 50;
109
- const buildRecallQuery = (seedCount, depth) => {
110
- const seedPlaceholders = Array.from({ length: seedCount }, () => '?').join(', ');
111
- const safeDepth = Math.min(Math.max(0, depth), MAX_RECALL_DEPTH);
112
- const sql = `
113
- WITH RECURSIVE connected(memory_id, depth) AS (
114
- -- Seed memories from search results
115
- SELECT id, 0 FROM memories WHERE id IN (${seedPlaceholders})
116
- UNION
117
- -- Follow relationships (both directions) up to max depth
118
- SELECT
119
- CASE
120
- WHEN r.from_memory_id = c.memory_id THEN r.to_memory_id
121
- ELSE r.from_memory_id
122
- END,
123
- c.depth + 1
124
- FROM relationships r
125
- JOIN connected c ON (r.from_memory_id = c.memory_id OR r.to_memory_id = c.memory_id)
126
- WHERE c.depth < ${safeDepth}
127
- ),
128
- unique_memories AS (
129
- SELECT DISTINCT memory_id, MIN(depth) as min_depth
130
- FROM connected
131
- GROUP BY memory_id
132
- ORDER BY min_depth
133
- LIMIT ${MAX_RECALL_MEMORIES}
134
- )
135
- SELECT m.*, 1.0 / (1.0 + um.min_depth) as relevance
102
+ throwIfAborted(signal);
103
+ return enrichSearchResultsWithTags(rows, signal);
104
+ };
105
+ const normalizeRecallDepth = (depth) => {
106
+ const raw = depth ?? 1;
107
+ if (!Number.isFinite(raw))
108
+ return 1;
109
+ const asInt = Math.trunc(raw);
110
+ return Math.min(Math.max(0, asInt), MAX_RECALL_DEPTH);
111
+ };
112
+ const buildRecallQuery = () => `
113
+ WITH RECURSIVE connected(memory_id, depth) AS (
114
+ -- Seed memories from search results
115
+ SELECT m.id, 0
136
116
  FROM memories m
137
- JOIN unique_memories um ON m.id = um.memory_id
138
- ORDER BY um.min_depth, m.created_at DESC
139
- `;
140
- return { sql };
141
- };
142
- const buildRelationshipsQuery = (memoryCount) => {
143
- const placeholders = Array.from({ length: memoryCount }, () => '?').join(', ');
144
- const sql = `
145
- SELECT r.id, r.relation_type, r.created_at,
146
- mf.hash as from_hash, mt.hash as to_hash
117
+ WHERE m.id IN (SELECT value FROM json_each(?))
118
+
119
+ UNION
120
+
121
+ -- Follow relationships (both directions) up to max depth
122
+ SELECT
123
+ CASE
124
+ WHEN r.from_memory_id = c.memory_id THEN r.to_memory_id
125
+ ELSE r.from_memory_id
126
+ END,
127
+ c.depth + 1
147
128
  FROM relationships r
148
- JOIN memories mf ON r.from_memory_id = mf.id
149
- JOIN memories mt ON r.to_memory_id = mt.id
150
- WHERE r.from_memory_id IN (${placeholders})
151
- AND r.to_memory_id IN (${placeholders})
152
- ORDER BY r.relation_type, mf.hash, mt.hash, r.created_at, r.id
153
- `;
154
- return { sql };
155
- };
129
+ JOIN connected c ON (r.from_memory_id = c.memory_id OR r.to_memory_id = c.memory_id)
130
+ WHERE c.depth < ?
131
+ ),
132
+ unique_memories AS (
133
+ SELECT DISTINCT memory_id, MIN(depth) as min_depth
134
+ FROM connected
135
+ GROUP BY memory_id
136
+ ORDER BY min_depth
137
+ LIMIT ?
138
+ )
139
+ SELECT m.*, 1.0 / (1.0 + um.min_depth) as relevance
140
+ FROM memories m
141
+ JOIN unique_memories um ON m.id = um.memory_id
142
+ ORDER BY um.min_depth, m.created_at DESC
143
+ `;
144
+ const buildRelationshipsQuery = () => `
145
+ WITH ids(id) AS (SELECT value FROM json_each(?))
146
+ SELECT r.id, r.relation_type, r.created_at,
147
+ mf.hash as from_hash, mt.hash as to_hash
148
+ FROM relationships r
149
+ JOIN ids a ON r.from_memory_id = a.id
150
+ JOIN ids b ON r.to_memory_id = b.id
151
+ JOIN memories mf ON r.from_memory_id = mf.id
152
+ JOIN memories mt ON r.to_memory_id = mt.id
153
+ ORDER BY r.relation_type, mf.hash, mt.hash, r.created_at, r.id
154
+ `;
156
155
  const executeWithSql = (sql, params) => {
157
- const stmt = db.prepare(sql);
156
+ const stmt = prepareCached(sql);
158
157
  return executeAll(stmt, ...params);
159
158
  };
160
159
  const executeRecall = (seedIds, depth) => {
161
- const { sql } = buildRecallQuery(seedIds.length, depth);
162
- return executeWithSql(sql, seedIds);
160
+ if (seedIds.length === 0)
161
+ return [];
162
+ const sql = buildRecallQuery();
163
+ return executeWithSql(sql, [
164
+ JSON.stringify(seedIds),
165
+ depth,
166
+ MAX_RECALL_MEMORIES,
167
+ ]);
163
168
  };
164
169
  const loadRelationshipsForMemoryIds = (memoryIds) => {
165
- const { sql } = buildRelationshipsQuery(memoryIds.length);
166
- const rows = executeWithSql(sql, [...memoryIds, ...memoryIds]);
170
+ if (memoryIds.length === 0)
171
+ return [];
172
+ const sql = buildRelationshipsQuery();
173
+ const rows = executeWithSql(sql, [JSON.stringify(memoryIds)]);
167
174
  return rows.map(mapRowToRelationship);
168
175
  };
169
- const getRecallDepth = (depth) => depth ?? 1;
170
176
  const emptyRecallResult = (depth) => ({
171
177
  memories: [],
172
178
  relationships: [],
@@ -177,26 +183,27 @@ const recallAtDepthZero = (searchResults, depth) => {
177
183
  return undefined;
178
184
  return { memories: searchResults, relationships: [], depth };
179
185
  };
180
- const recallAtPositiveDepth = (searchResults, depth) => {
186
+ const recallAtPositiveDepth = (searchResults, depth, signal) => {
187
+ throwIfAborted(signal);
181
188
  const seedIds = searchResults.map((m) => m.id);
182
189
  const recallRows = executeRecall(seedIds, depth);
183
- const memories = mapRowsToSearchResultsWithTags(recallRows);
184
- if (memories.length === 0) {
190
+ throwIfAborted(signal);
191
+ const memories = enrichSearchResultsWithTags(recallRows, signal);
192
+ if (memories.length === 0)
185
193
  return emptyRecallResult(depth);
186
- }
194
+ throwIfAborted(signal);
187
195
  const relationships = loadRelationshipsForMemoryIds(memories.map((m) => m.id));
188
196
  return { memories, relationships, depth };
189
197
  };
190
- export const recallMemories = (input) => {
191
- const searchResults = searchMemories({ query: input.query });
198
+ export const recallMemories = (input, signal) => {
199
+ throwIfAborted(signal);
200
+ const depth = normalizeRecallDepth(input.depth);
201
+ const searchResults = searchMemories({ query: input.query }, signal);
192
202
  if (searchResults.length === 0) {
193
- return emptyRecallResult(getRecallDepth(input.depth));
203
+ return emptyRecallResult(depth);
194
204
  }
195
- const depth = getRecallDepth(input.depth);
196
205
  const depthZeroResult = recallAtDepthZero(searchResults, depth);
197
- if (depthZeroResult) {
206
+ if (depthZeroResult)
198
207
  return depthZeroResult;
199
- }
200
- return recallAtPositiveDepth(searchResults, depth);
208
+ return recallAtPositiveDepth(searchResults, depth, signal);
201
209
  };
202
- //# sourceMappingURL=search.js.map
package/dist/index.d.ts CHANGED
@@ -1,3 +1,2 @@
1
1
  #!/usr/bin/env node
2
2
  export {};
3
- //# sourceMappingURL=index.d.ts.map
package/dist/index.js CHANGED
@@ -1,23 +1,18 @@
1
1
  #!/usr/bin/env node
2
+ import { readFileSync } from 'node:fs';
2
3
  import { readFile } from 'node:fs/promises';
3
4
  import process from 'node:process';
4
5
  import { McpServer, ResourceTemplate, } from '@modelcontextprotocol/sdk/server/mcp.js';
5
6
  import { SUPPORTED_PROTOCOL_VERSIONS } from '@modelcontextprotocol/sdk/types.js';
6
- import { closeDb } from './core/db.js';
7
+ import pkg from '../package.json' with { type: 'json' };
8
+ import { closeDb, initDb } from './core/db.js';
7
9
  import { attachProtocolLogger, logger } from './logger.js';
8
10
  import { ProtocolVersionGuardTransport } from './protocol-version-guard.js';
9
11
  import { BatchRejectingStdioServerTransport } from './stdio-transport.js';
10
12
  import { registerAllTools } from './tools.js';
11
- const readPackageVersion = async () => {
12
- const packageJsonText = await readFile(new URL('../package.json', import.meta.url), {
13
- encoding: 'utf-8',
14
- signal: AbortSignal.timeout(5000),
15
- });
16
- const parsed = JSON.parse(packageJsonText);
17
- if (typeof parsed !== 'object' || parsed === null)
18
- return undefined;
19
- const { version } = parsed;
20
- return typeof version === 'string' ? version : undefined;
13
+ const readPackageVersion = () => {
14
+ const { version } = pkg;
15
+ return Promise.resolve(typeof version === 'string' ? version : undefined);
21
16
  };
22
17
  const toNonEmptyTrimmedOrUndefined = (text) => {
23
18
  const trimmed = text.trim();
@@ -51,14 +46,46 @@ const loadServerMetadata = async () => {
51
46
  };
52
47
  };
53
48
  const { packageVersion, instructions: serverInstructions } = await loadServerMetadata();
49
+ const getLocalIconData = () => {
50
+ const candidates = ['../assets/logo.svg', './assets/logo.svg'];
51
+ for (const path of candidates) {
52
+ try {
53
+ const url = new URL(path, import.meta.url);
54
+ const content = readFileSync(url, { encoding: 'base64' });
55
+ return `data:image/svg+xml;base64,${content}`;
56
+ }
57
+ catch {
58
+ continue;
59
+ }
60
+ }
61
+ return undefined;
62
+ };
63
+ const localIcon = getLocalIconData();
54
64
  const server = new McpServer({ name: 'memdb', version: packageVersion ?? '0.0.0' }, {
55
65
  instructions: serverInstructions,
56
- capabilities: { tools: {}, logging: {}, resources: {} },
66
+ capabilities: { tools: { listChanged: true }, logging: {}, resources: {} },
67
+ ...(localIcon
68
+ ? {
69
+ icons: [
70
+ { src: localIcon, mimeType: 'image/svg+xml', sizes: ['any'] },
71
+ ],
72
+ }
73
+ : {}),
57
74
  });
58
75
  const instructionsResource = new ResourceTemplate('internal://instructions', {
59
76
  list: undefined,
60
77
  });
61
- server.registerResource('internal://instructions', instructionsResource, { title: 'Instructions', mimeType: 'text/markdown' }, async (uri) => {
78
+ server.registerResource('internal://instructions', instructionsResource, {
79
+ title: 'Instructions',
80
+ mimeType: 'text/markdown',
81
+ ...(localIcon
82
+ ? {
83
+ icons: [
84
+ { src: localIcon, mimeType: 'image/svg+xml', sizes: ['any'] },
85
+ ],
86
+ }
87
+ : {}),
88
+ }, async (uri) => {
62
89
  const text = (await readTextFileOrUndefined(new URL('./instructions.md', import.meta.url))) ?? serverInstructions;
63
90
  return {
64
91
  contents: [
@@ -70,7 +97,7 @@ server.registerResource('internal://instructions', instructionsResource, { title
70
97
  ],
71
98
  };
72
99
  });
73
- registerAllTools(server);
100
+ registerAllTools(server, localIcon);
74
101
  let transport;
75
102
  let shuttingDown = false;
76
103
  const SHUTDOWN_TIMEOUT = 5000;
@@ -137,6 +164,7 @@ const connectServer = async (transportToUse) => {
137
164
  };
138
165
  const main = async () => {
139
166
  try {
167
+ await initDb();
140
168
  const guardedTransport = createTransport();
141
169
  await connectServer(guardedTransport);
142
170
  }
@@ -166,4 +194,3 @@ const registerProcessHandlers = () => {
166
194
  void main();
167
195
  registerSignalHandlers();
168
196
  registerProcessHandlers();
169
- //# sourceMappingURL=index.js.map
@@ -1,42 +1,59 @@
1
1
  # memdb Instructions
2
2
 
3
- > **Guidance for the Agent:** These instructions are available as a resource (`internal://instructions`). Load them when you are confused about tool usage.
3
+ > Guidance for the Agent: These instructions are available as a resource (`internal://instructions`) or prompt (`get-help`). Load them when you are unsure about tool usage.
4
4
 
5
5
  ## 1. Core Capability
6
6
 
7
- - **Domain:** Local, SQLite-backed memory store for text notes with tags and relationships.
7
+ - **Domain:** Local SQLite-backed memory store with vector-like text search and graph relationships.
8
8
  - **Primary Resources:** `Memory`, `Relationship`, `Stats`.
9
9
 
10
10
  ## 2. The "Golden Path" Workflows (Critical)
11
11
 
12
- _Describe the standard order of operations. Do not assume the agent knows this._
12
+ _Describe the standard order of operations using ONLY tools that exist._
13
13
 
14
- ### Workflow A: Find relevant context
14
+ ### Workflow A: Recall & Exploration
15
15
 
16
- 1. Call `search_memories` with a focused query.
17
- 2. Call `recall` with `depth: 1`–`2` if you need connected graph context.
18
- 3. Call `get_memory` using the `hash` from results.
19
- > **Constraint:** Never guess hashes. Always search or recall first.
16
+ 1. Call `search_memories` to find entry points by content/tags.
17
+ 2. Call `recall` (depth 12) to traverse the knowledge graph from relevant hits.
18
+ 3. Call `get_memory` using the `hash` (SHA-256) for exact retrieval.
19
+ > Constraint: Never guess hashes. Always search or recall first.
20
20
 
21
- ### Workflow B: Store or revise knowledge
21
+ ### Workflow B: Knowledge Management
22
22
 
23
- 1. Call `store_memory` (single) or `store_memories` (batch).
24
- 2. Use `update_memory` to revise; the response returns `{ newHash }`.
25
- 3. Call `create_relationship` to link memories when needed.
26
- 4. Call `delete_memory` / `delete_memories` / `delete_relationship` only with explicit user intent.
23
+ 1. Call `store_memory` (single) or `store_memories` (batch) to add context.
24
+ 2. Call `create_relationship` to link related memories (directed).
25
+ 3. Call `update_memory` to revise; this changes the hash.
26
+ 4. Call `delete_memory` only with user confirmation.
27
27
 
28
- ## 3. Tool Nuances & "Gotchas"
28
+ ## 3. Tool Nuances & Gotchas
29
29
 
30
- - **`search_memories`**: Query is 1–1000 chars and max 50 terms; whitespace-only is invalid.
31
- - **`recall`**: `depth` is 0–3; depth 0 returns no relationships.
32
- - **`update_memory`**: Content changes produce a new hash; the tool returns `{ newHash: string }`.
33
- - **`delete_*` tools**: Destructive—confirm user intent before calling.
34
- - **Tags**: 1–100 tags, no whitespace, max 50 chars; prefer `kebab-case`.
35
- - **Hashes**: 32 hex chars; case-insensitive but normalized to lowercase.
30
+ _Do NOT repeat JSON schema. Focus on behavior and pitfalls._
31
+
32
+ - **`search_memories`**
33
+ - **Purpose:** Full-text search over content and tags.
34
+ - **Inputs:** `query` string (required).
35
+ - **Common failure modes:** Empty results for too-specific queries; try broader terms.
36
+
37
+ - **`recall`**
38
+ - **Purpose:** Graph traversal starting from a defined query.
39
+ - **Inputs:** `query` string; `depth` (default 1, max 3 recommended).
40
+ - **Latency:** Higher depth increases time/token usage significantly.
41
+
42
+ - **`store_memory` / `store_memories`**
43
+ - **Purpose:** Persist new information.
44
+ - **Inputs:** `content` (text), `tags` (array of strings).
45
+ - **Side effects:** Writes to DB. Idempotent if content/tags identical (same hash).
46
+
47
+ - **`update_memory`**
48
+ - **Inputs:** `hash` (must exist), new `content`/`tags`.
49
+ - **Side effects:** Creating a new memory hash; effectively a "move" + "create".
50
+
51
+ - **`create_relationship`**
52
+ - **Inputs:** `from_hash`, `to_hash`, `relation_type` (e.g., "related_to").
53
+ - **Constraint:** Both hashes must exist.
36
54
 
37
55
  ## 4. Error Handling Strategy
38
56
 
39
- - If you receive `E_NOT_FOUND`, re-run `search_memories` or `get_relationships` to confirm the `hash`.
40
- - If `E_INVALID_ARG`, check tag formatting, term limits, or batch size (max 50).
41
- - If `E_TOOL_ERROR` (e.g., FTS issues), call `memory_stats` and retry later.
42
- - If `E_TIMEOUT`, reduce batch size or split the request.
57
+ - **`E_NOT_FOUND`**: The hash doesn't exist. Re-run search/recall.
58
+ - **`E_TIMEOUT`**: Operation took too long (>5s default). Reduce batch size or depth.
59
+ - **`E_INVALID_ARG`**: Check inputs against schema (e.g. valid hashes).
package/dist/logger.d.ts CHANGED
@@ -7,4 +7,3 @@ export declare const logger: {
7
7
  error: (msg: string, ...args: unknown[]) => void;
8
8
  };
9
9
  export {};
10
- //# sourceMappingURL=logger.d.ts.map
package/dist/logger.js CHANGED
@@ -33,4 +33,3 @@ export const logger = {
33
33
  warn: createWriter('warn'),
34
34
  error: createWriter('error'),
35
35
  };
36
- //# sourceMappingURL=logger.js.map
@@ -18,4 +18,3 @@ export declare class ProtocolVersionGuardTransport implements Transport {
18
18
  private handleInitializedNotification;
19
19
  private handleBeforeReady;
20
20
  }
21
- //# sourceMappingURL=protocol-version-guard.d.ts.map
@@ -134,4 +134,3 @@ export class ProtocolVersionGuardTransport {
134
134
  }
135
135
  }
136
136
  }
137
- //# sourceMappingURL=protocol-version-guard.js.map
package/dist/schemas.d.ts CHANGED
@@ -79,4 +79,3 @@ export declare const DefaultOutputSchema: z.ZodObject<{
79
79
  message: z.ZodString;
80
80
  }, z.core.$strict>>;
81
81
  }, z.core.$strict>;
82
- //# sourceMappingURL=schemas.d.ts.map
package/dist/schemas.js CHANGED
@@ -1,6 +1,6 @@
1
1
  import { z } from 'zod';
2
2
  import { MEMORY_TYPES } from './types.js';
3
- const hashSchema = z.string().regex(/^[a-f0-9]{32}$/i);
3
+ const hashSchema = z.string().regex(/^[a-f0-9]{64}$/i);
4
4
  const tagSchema = z
5
5
  .string()
6
6
  .min(1)
@@ -51,18 +51,22 @@ export const SearchMemoriesInputSchema = z.strictObject({
51
51
  }),
52
52
  });
53
53
  export const GetMemoryInputSchema = z.strictObject({
54
- hash: hashSchema.meta({ description: 'MD5 hash of the memory' }),
54
+ hash: hashSchema.meta({
55
+ description: 'Hash of the memory (SHA-256, 64 hex chars)',
56
+ }),
55
57
  });
56
58
  export const DeleteMemoryInputSchema = z.strictObject({
57
- hash: hashSchema.meta({ description: 'MD5 hash of the memory' }),
59
+ hash: hashSchema.meta({
60
+ description: 'Hash of the memory (SHA-256, 64 hex chars)',
61
+ }),
58
62
  });
59
63
  export const DeleteMemoriesInputSchema = z.strictObject({
60
64
  hashes: z.array(hashSchema).min(1).max(50).meta({
61
- description: 'MD5 hashes of memories to delete (1-50 hashes)',
65
+ description: 'Hashes of memories to delete (1-50 hashes)',
62
66
  }),
63
67
  });
64
68
  export const UpdateMemoryInputSchema = z.strictObject({
65
- hash: hashSchema.meta({ description: 'MD5 hash of the memory to update' }),
69
+ hash: hashSchema.meta({ description: 'Hash of the memory to update' }),
66
70
  content: contentSchema.meta({ description: 'New content for the memory' }),
67
71
  tags: tagsSchema
68
72
  .max(100)
@@ -74,10 +78,10 @@ export const MemoryStatsInputSchema = z
74
78
  .meta({ description: 'No parameters required' });
75
79
  export const CreateRelationshipInputSchema = z.strictObject({
76
80
  from_hash: hashSchema.meta({
77
- description: 'MD5 hash of the source memory',
81
+ description: 'SHA-256 hash of the source memory',
78
82
  }),
79
83
  to_hash: hashSchema.meta({
80
- description: 'MD5 hash of the target memory',
84
+ description: 'SHA-256 hash of the target memory',
81
85
  }),
82
86
  relation_type: relationTypeSchema.meta({
83
87
  description: 'Type of relationship (e.g., "related_to", "causes", "depends_on", "part_of", "follows")',
@@ -85,7 +89,7 @@ export const CreateRelationshipInputSchema = z.strictObject({
85
89
  });
86
90
  export const GetRelationshipsInputSchema = z.strictObject({
87
91
  hash: hashSchema.meta({
88
- description: 'MD5 hash of the memory to get relationships for',
92
+ description: 'SHA-256 hash of the memory to get relationships for',
89
93
  }),
90
94
  direction: z.enum(['outgoing', 'incoming', 'both']).optional().meta({
91
95
  description: 'Direction: outgoing (from this memory), incoming (to this memory), both (default)',
@@ -93,10 +97,10 @@ export const GetRelationshipsInputSchema = z.strictObject({
93
97
  });
94
98
  export const DeleteRelationshipInputSchema = z.strictObject({
95
99
  from_hash: hashSchema.meta({
96
- description: 'MD5 hash of the source memory',
100
+ description: 'SHA-256 hash of the source memory',
97
101
  }),
98
102
  to_hash: hashSchema.meta({
99
- description: 'MD5 hash of the target memory',
103
+ description: 'SHA-256 hash of the target memory',
100
104
  }),
101
105
  relation_type: relationTypeSchema.meta({
102
106
  description: 'Type of relationship to delete',
@@ -137,4 +141,3 @@ export const DefaultOutputSchema = DefaultOutputSchemaBase.superRefine((value, c
137
141
  addIssue(ctx, ['error'], 'error is required when ok is false');
138
142
  }
139
143
  });
140
- //# sourceMappingURL=schemas.js.map
@@ -30,4 +30,3 @@ export declare class BatchRejectingStdioServerTransport implements Transport {
30
30
  private handleReadBufferError;
31
31
  private processReadBuffer;
32
32
  }
33
- //# sourceMappingURL=stdio-transport.d.ts.map
@@ -258,4 +258,3 @@ export class BatchRejectingStdioServerTransport {
258
258
  }
259
259
  }
260
260
  }
261
- //# sourceMappingURL=stdio-transport.js.map
package/dist/tools.d.ts CHANGED
@@ -19,6 +19,5 @@ export interface ToolDependencies {
19
19
  deleteRelationship: Dependency<typeof deleteRelationship>;
20
20
  recallMemories: Dependency<typeof recallMemories>;
21
21
  }
22
- export declare function registerAllTools(server: McpServer, deps?: ToolDependencies): void;
22
+ export declare function registerAllTools(server: McpServer, localIcon?: string, deps?: ToolDependencies): void;
23
23
  export {};
24
- //# sourceMappingURL=tools.d.ts.map