@j0hanz/memory-mcp 1.5.0 → 1.7.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (45) hide show
  1. package/dist/db/index.js +16 -13
  2. package/dist/lib/errors.d.ts +4 -0
  3. package/dist/lib/errors.js +11 -0
  4. package/dist/lib/graph-traversal.d.ts +12 -0
  5. package/dist/lib/graph-traversal.js +145 -0
  6. package/dist/lib/json-schema.d.ts +5 -0
  7. package/dist/lib/json-schema.js +19 -1
  8. package/dist/lib/pagination.d.ts +0 -2
  9. package/dist/lib/pagination.js +0 -43
  10. package/dist/lib/search.js +44 -23
  11. package/dist/lib/tool-contracts.js +50 -73
  12. package/dist/lib/tool-execution.d.ts +13 -0
  13. package/dist/lib/tool-execution.js +51 -0
  14. package/dist/prompts/index.js +12 -8
  15. package/dist/resources/index.js +67 -43
  16. package/dist/resources/instructions.js +44 -37
  17. package/dist/resources/server-config.js +33 -22
  18. package/dist/resources/tool-catalog.js +2 -6
  19. package/dist/resources/tool-info.js +9 -9
  20. package/dist/resources/workflows.js +69 -40
  21. package/dist/schemas/inputs.d.ts +8 -5
  22. package/dist/schemas/inputs.js +57 -40
  23. package/dist/schemas/outputs.d.ts +6 -6
  24. package/dist/schemas/outputs.js +7 -6
  25. package/dist/server.js +11 -4
  26. package/dist/tools/create-relationship.js +17 -22
  27. package/dist/tools/delete-memories.js +30 -39
  28. package/dist/tools/delete-memory.js +14 -18
  29. package/dist/tools/delete-relationship.js +9 -24
  30. package/dist/tools/get-memory.js +12 -17
  31. package/dist/tools/get-relationships.js +11 -12
  32. package/dist/tools/memory-stats.js +22 -30
  33. package/dist/tools/progress.d.ts +6 -0
  34. package/dist/tools/progress.js +68 -25
  35. package/dist/tools/recall.js +94 -203
  36. package/dist/tools/register-contract.d.ts +1 -2
  37. package/dist/tools/register-contract.js +4 -2
  38. package/dist/tools/result.d.ts +4 -0
  39. package/dist/tools/result.js +27 -0
  40. package/dist/tools/retrieve-context.js +80 -98
  41. package/dist/tools/search-memories.js +31 -34
  42. package/dist/tools/store-memories.js +33 -44
  43. package/dist/tools/store-memory.js +13 -20
  44. package/dist/tools/update-memory.js +45 -49
  45. package/package.json +1 -1
package/dist/db/index.js CHANGED
@@ -7,6 +7,10 @@ const TARGET_SCHEMA_VERSION = 2;
7
7
  const FTS5_CHECK_SQL = 'CREATE VIRTUAL TABLE IF NOT EXISTS __fts5_check USING fts5(x); DROP TABLE __fts5_check;';
8
8
  const FTS5_REQUIRED_MESSAGE = 'SQLite FTS5 extension is not available. memory-mcp requires a SQLite build with FTS5 support.';
9
9
  const DEFENSIVE_PRAGMA_SQL = 'PRAGMA defensive = ON';
10
+ const RELATIONSHIP_INDEX_SQL = [
11
+ 'CREATE INDEX IF NOT EXISTS idx_relationships_from ON relationships(from_hash)',
12
+ 'CREATE INDEX IF NOT EXISTS idx_relationships_to ON relationships(to_hash)',
13
+ ];
10
14
  const RELATIONSHIPS_TABLE_SQL = `CREATE TABLE IF NOT EXISTS relationships (
11
15
  from_hash TEXT NOT NULL REFERENCES memories(hash) ON DELETE CASCADE ON UPDATE CASCADE,
12
16
  to_hash TEXT NOT NULL REFERENCES memories(hash) ON DELETE CASCADE ON UPDATE CASCADE,
@@ -57,12 +61,17 @@ const SCHEMA_SQL = `
57
61
  CREATE INDEX IF NOT EXISTS idx_memories_created
58
62
  ON memories(created_at DESC);
59
63
 
60
- CREATE INDEX IF NOT EXISTS idx_relationships_from
61
- ON relationships(from_hash);
62
-
63
- CREATE INDEX IF NOT EXISTS idx_relationships_to
64
- ON relationships(to_hash);
64
+ ${RELATIONSHIP_INDEX_SQL.join(';\n\n ')};
65
65
  `;
66
+ const RELATIONSHIP_EDGE_COLUMNS = new Set(['from_hash', 'to_hash']);
67
+ function isMemoryRelationshipForeignKey(row) {
68
+ return row.table === 'memories' && RELATIONSHIP_EDGE_COLUMNS.has(row.from);
69
+ }
70
+ function ensureRelationshipIndexes(db) {
71
+ for (const statement of RELATIONSHIP_INDEX_SQL) {
72
+ db.exec(statement);
73
+ }
74
+ }
66
75
  function runImmediateTransaction(db, action) {
67
76
  db.exec('BEGIN IMMEDIATE');
68
77
  try {
@@ -101,13 +110,8 @@ function needsRelationshipsCascadeUpdateMigration(db) {
101
110
  const rows = db
102
111
  .prepare("PRAGMA foreign_key_list('relationships')")
103
112
  .all();
104
- if (rows.length === 0) {
105
- return false;
106
- }
107
113
  for (const row of rows) {
108
- const isMemoryEdge = row.table === 'memories' &&
109
- (row.from === 'from_hash' || row.from === 'to_hash');
110
- if (!isMemoryEdge) {
114
+ if (!isMemoryRelationshipForeignKey(row)) {
111
115
  continue;
112
116
  }
113
117
  if (row.on_update.toUpperCase() !== 'CASCADE') {
@@ -126,8 +130,7 @@ function migrateRelationshipsCascadeUpdate(db) {
126
130
  FROM relationships_old
127
131
  `);
128
132
  db.exec('DROP TABLE relationships_old');
129
- db.exec('CREATE INDEX IF NOT EXISTS idx_relationships_from ON relationships(from_hash)');
130
- db.exec('CREATE INDEX IF NOT EXISTS idx_relationships_to ON relationships(to_hash)');
133
+ ensureRelationshipIndexes(db);
131
134
  });
132
135
  }
133
136
  function applyMigrations(db) {
@@ -8,3 +8,7 @@ export declare const E_UNKNOWN = "E_UNKNOWN";
8
8
  export declare function getErrorMessage(err: unknown): string;
9
9
  export declare function isMcpError(err: unknown): err is McpError;
10
10
  export declare function rethrowMcpError(err: unknown): void;
11
+ export declare class CancelledError extends Error {
12
+ constructor();
13
+ }
14
+ export declare function throwIfAborted(signal?: AbortSignal): void;
@@ -20,3 +20,14 @@ export function rethrowMcpError(err) {
20
20
  if (isMcpError(err))
21
21
  throw err;
22
22
  }
23
+ export class CancelledError extends Error {
24
+ constructor() {
25
+ super(E_CANCELLED);
26
+ this.name = 'CancelledError';
27
+ }
28
+ }
29
+ export function throwIfAborted(signal) {
30
+ if (signal?.aborted) {
31
+ throw new CancelledError();
32
+ }
33
+ }
@@ -0,0 +1,12 @@
1
+ import type { TypedDb } from '../db/typed.js';
2
+ import type { MemoryRow, RelationshipEdge } from './types.js';
3
+ export type ProgressNotifier = (hop: number, total: number) => void;
4
+ export declare const MAX_EDGE_ROWS: number;
5
+ export declare const MAX_VISITED_NODES: number;
6
+ export interface TraverseGraphResult {
7
+ edges: RelationshipEdge[];
8
+ visited: Set<string>;
9
+ depthReached: number;
10
+ aborted: boolean;
11
+ }
12
+ export declare function traverseGraph(db: TypedDb, seeds: MemoryRow[], depth: number, signal?: AbortSignal, onHop?: ProgressNotifier): Promise<TraverseGraphResult>;
@@ -0,0 +1,145 @@
1
+ import process from 'node:process';
2
+ import { throwIfAborted } from './errors.js';
3
+ function yieldToEventLoop() {
4
+ return new Promise((resolve) => setImmediate(resolve));
5
+ }
6
+ function parseEnvInt(name, fallback, min, max) {
7
+ const raw = process.env[name];
8
+ if (raw == null)
9
+ return fallback;
10
+ const parsed = parseInt(raw, 10);
11
+ if (Number.isNaN(parsed))
12
+ return fallback;
13
+ return Math.max(min, Math.min(max, parsed));
14
+ }
15
+ const MAX_FRONTIER_SIZE = parseEnvInt('RECALL_MAX_FRONTIER_SIZE', 1000, 100, 50000);
16
+ export const MAX_EDGE_ROWS = parseEnvInt('RECALL_MAX_EDGE_ROWS', 5000, 100, 50000);
17
+ export const MAX_VISITED_NODES = parseEnvInt('RECALL_MAX_VISITED_NODES', 5000, 100, 50000);
18
+ const EDGE_QUERY_SQL = `SELECT from_hash, to_hash, relation_type FROM relationships
19
+ WHERE from_hash IN (SELECT value FROM json_each(?))
20
+ OR to_hash IN (SELECT value FROM json_each(?))
21
+ LIMIT ?`;
22
+ function initializeTraversalState(seeds) {
23
+ const visited = new Set();
24
+ const frontier = [];
25
+ for (const seed of seeds) {
26
+ visited.add(seed.hash);
27
+ frontier.push(seed.hash);
28
+ }
29
+ return {
30
+ visited,
31
+ frontier,
32
+ edges: [],
33
+ seenEdges: new Set(),
34
+ depthReached: 0,
35
+ aborted: false,
36
+ };
37
+ }
38
+ function capFrontier(state) {
39
+ if (state.frontier.length <= MAX_FRONTIER_SIZE) {
40
+ return;
41
+ }
42
+ state.frontier.length = MAX_FRONTIER_SIZE;
43
+ state.aborted = true;
44
+ }
45
+ function getRemainingBudget(state) {
46
+ return {
47
+ edges: MAX_EDGE_ROWS - state.edges.length,
48
+ nodes: MAX_VISITED_NODES - state.visited.size,
49
+ };
50
+ }
51
+ function hasExhaustedBudget(budget) {
52
+ return budget.edges <= 0 || budget.nodes <= 0;
53
+ }
54
+ function loadEdgeRows(edgeStmt, frontier, edgeLimit) {
55
+ const frontierJson = JSON.stringify(frontier);
56
+ return edgeStmt.all(frontierJson, frontierJson, edgeLimit + 1);
57
+ }
58
+ function toRowsToProcessCount(edgeRowsLength, remainingEdgeBudget) {
59
+ return edgeRowsLength > remainingEdgeBudget
60
+ ? remainingEdgeBudget
61
+ : edgeRowsLength;
62
+ }
63
+ function toEdgeKey(edge) {
64
+ return `${edge.from_hash}|${edge.to_hash}|${edge.relation_type}`;
65
+ }
66
+ function appendEdgeIfNew(state, edge) {
67
+ const edgeKey = toEdgeKey(edge);
68
+ if (state.seenEdges.has(edgeKey)) {
69
+ return;
70
+ }
71
+ state.seenEdges.add(edgeKey);
72
+ state.edges.push({
73
+ from_hash: edge.from_hash,
74
+ to_hash: edge.to_hash,
75
+ relation_type: edge.relation_type,
76
+ });
77
+ }
78
+ function createVisitedQueue(state, nextHashes) {
79
+ return (hash) => {
80
+ if (state.visited.has(hash)) {
81
+ return;
82
+ }
83
+ if (state.visited.size >= MAX_VISITED_NODES) {
84
+ state.aborted = true;
85
+ return;
86
+ }
87
+ state.visited.add(hash);
88
+ if (nextHashes.length < MAX_FRONTIER_SIZE) {
89
+ nextHashes.push(hash);
90
+ return;
91
+ }
92
+ state.aborted = true;
93
+ };
94
+ }
95
+ function shouldStopEdgeProcessing(state) {
96
+ return (state.aborted &&
97
+ (state.edges.length >= MAX_EDGE_ROWS ||
98
+ state.visited.size >= MAX_VISITED_NODES));
99
+ }
100
+ function processEdgeRows(state, edgeRows, rowsToProcess) {
101
+ const nextHashes = [];
102
+ const queueVisitedHash = createVisitedQueue(state, nextHashes);
103
+ for (let i = 0; i < rowsToProcess; i += 1) {
104
+ const edge = edgeRows[i];
105
+ if (!edge) {
106
+ break;
107
+ }
108
+ appendEdgeIfNew(state, edge);
109
+ queueVisitedHash(edge.from_hash);
110
+ queueVisitedHash(edge.to_hash);
111
+ if (shouldStopEdgeProcessing(state)) {
112
+ break;
113
+ }
114
+ }
115
+ state.frontier.length = 0;
116
+ state.frontier.push(...nextHashes);
117
+ }
118
+ export async function traverseGraph(db, seeds, depth, signal, onHop) {
119
+ const state = initializeTraversalState(seeds);
120
+ const edgeStmt = db.prepareOnce(EDGE_QUERY_SQL);
121
+ for (let hop = 0; hop < depth && state.frontier.length > 0; hop += 1) {
122
+ await yieldToEventLoop();
123
+ throwIfAborted(signal);
124
+ state.depthReached = hop + 1;
125
+ onHop?.(hop, depth);
126
+ capFrontier(state);
127
+ const budget = getRemainingBudget(state);
128
+ if (hasExhaustedBudget(budget)) {
129
+ state.aborted = true;
130
+ break;
131
+ }
132
+ const edgeRows = loadEdgeRows(edgeStmt, state.frontier, budget.edges);
133
+ const rowsToProcess = toRowsToProcessCount(edgeRows.length, budget.edges);
134
+ if (edgeRows.length > budget.edges) {
135
+ state.aborted = true;
136
+ }
137
+ processEdgeRows(state, edgeRows, rowsToProcess);
138
+ }
139
+ return {
140
+ edges: state.edges,
141
+ visited: state.visited,
142
+ depthReached: state.depthReached,
143
+ aborted: state.aborted,
144
+ };
145
+ }
@@ -1,3 +1,8 @@
1
1
  import { z } from 'zod/v4';
2
2
  export type JsonSchemaObject = Record<string, unknown>;
3
3
  export declare function extractJsonSchema(schema: z.ZodType): JsonSchemaObject;
4
+ export interface SchemaMeta {
5
+ properties: Record<string, JsonSchemaObject>;
6
+ requiredFields: Set<string>;
7
+ }
8
+ export declare function getSchemaMeta(schema: z.ZodType): SchemaMeta;
@@ -1,9 +1,27 @@
1
1
  import { z } from 'zod/v4';
2
+ const JsonSchemaPayloadSchema = z
3
+ .object({
4
+ properties: z
5
+ .record(z.string(), z.record(z.string(), z.unknown()))
6
+ .optional(),
7
+ required: z.array(z.string()).optional(),
8
+ })
9
+ .catchall(z.unknown());
2
10
  export function extractJsonSchema(schema) {
3
11
  try {
4
- return z.toJSONSchema(schema);
12
+ const raw = z.toJSONSchema(schema);
13
+ return raw;
5
14
  }
6
15
  catch {
7
16
  return {};
8
17
  }
9
18
  }
19
+ export function getSchemaMeta(schema) {
20
+ const jsonSchema = extractJsonSchema(schema);
21
+ const parsed = JsonSchemaPayloadSchema.safeParse(jsonSchema);
22
+ const data = parsed.success ? parsed.data : {};
23
+ return {
24
+ properties: data.properties ?? {},
25
+ requiredFields: new Set(data.required ?? []),
26
+ };
27
+ }
@@ -2,6 +2,4 @@ export interface PageSlice<T> {
2
2
  page: T[];
3
3
  hasMore: boolean;
4
4
  }
5
- export declare function encodeCursor(offset: number): string;
6
- export declare function decodeCursor(cursor: string): number;
7
5
  export declare function splitPage<T>(rows: readonly T[], limit: number): PageSlice<T>;
@@ -1,46 +1,3 @@
1
- import { ErrorCode, McpError } from '@modelcontextprotocol/sdk/types.js';
2
- import { E_INVALID_CURSOR } from './errors.js';
3
- const CURSOR_ENCODING = 'base64url';
4
- const INVALID_CURSOR_STRUCTURE_MESSAGE = 'Invalid cursor structure';
5
- function isRecord(value) {
6
- return typeof value === 'object' && value !== null;
7
- }
8
- function isNonNegativeInteger(value) {
9
- return (typeof value === 'number' &&
10
- Number.isInteger(value) &&
11
- Number.isFinite(value) &&
12
- value >= 0);
13
- }
14
- function isCursorPayload(value) {
15
- if (!isRecord(value)) {
16
- return false;
17
- }
18
- const { offset } = value;
19
- return isNonNegativeInteger(offset);
20
- }
21
- function parseCursorPayload(cursor) {
22
- const json = Buffer.from(cursor, CURSOR_ENCODING).toString();
23
- const parsed = JSON.parse(json);
24
- if (!isCursorPayload(parsed)) {
25
- throw new Error(INVALID_CURSOR_STRUCTURE_MESSAGE);
26
- }
27
- return parsed;
28
- }
29
- function invalidCursor() {
30
- return new McpError(ErrorCode.InvalidParams, `${E_INVALID_CURSOR}: malformed cursor`);
31
- }
32
- export function encodeCursor(offset) {
33
- const payload = { offset };
34
- return Buffer.from(JSON.stringify(payload)).toString(CURSOR_ENCODING);
35
- }
36
- export function decodeCursor(cursor) {
37
- try {
38
- return parseCursorPayload(cursor).offset;
39
- }
40
- catch {
41
- throw invalidCursor();
42
- }
43
- }
44
1
  export function splitPage(rows, limit) {
45
2
  if (rows.length > limit) {
46
3
  return { page: rows.slice(0, limit), hasMore: true };
@@ -12,6 +12,9 @@ export function sanitizeFtsQuery(query) {
12
12
  }
13
13
  return tokens.map((token) => `"${token}"`).join(' ');
14
14
  }
15
+ function isOffsetCursor(cursor) {
16
+ return cursor == null || cursor.mode === 'offset';
17
+ }
15
18
  const FILTER_RULES = [
16
19
  { key: 'min_importance', clause: 'm.importance >= ?' },
17
20
  { key: 'max_importance', clause: 'm.importance <= ?' },
@@ -35,15 +38,18 @@ export function buildAndWhereClause(clauses) {
35
38
  }
36
39
  return ` AND ${clauses.join(' AND ')}`;
37
40
  }
41
+ function buildBaseSearchWhere(whereExtra) {
42
+ return `${BASE_RANKED_SEARCH_SQL}
43
+ WHERE memories_fts MATCH ?${whereExtra}`;
44
+ }
38
45
  function buildRankedSearchSql(whereExtra, cursor) {
39
- if (!cursor || cursor.mode === 'offset') {
40
- return `${BASE_RANKED_SEARCH_SQL}
41
- WHERE memories_fts MATCH ?${whereExtra}
46
+ const whereSql = buildBaseSearchWhere(whereExtra);
47
+ if (isOffsetCursor(cursor)) {
48
+ return `${whereSql}
42
49
  ORDER BY memories_fts.rank, m.hash
43
50
  LIMIT ? OFFSET ?`;
44
51
  }
45
- return `${BASE_RANKED_SEARCH_SQL}
46
- WHERE memories_fts MATCH ?${whereExtra}
52
+ return `${whereSql}
47
53
  AND (
48
54
  memories_fts.rank > ?
49
55
  OR (memories_fts.rank = ? AND m.hash > ?)
@@ -51,28 +57,43 @@ function buildRankedSearchSql(whereExtra, cursor) {
51
57
  ORDER BY memories_fts.rank, m.hash
52
58
  LIMIT ?`;
53
59
  }
54
- export function loadRankedSearchRows(db, query, limit, cursor, filters) {
55
- const ftsQuery = sanitizeFtsQuery(query);
56
- const filter = buildFilterClauses(filters);
57
- const whereExtra = buildAndWhereClause(filter.clauses);
58
- const sql = buildRankedSearchSql(whereExtra, cursor);
59
- const stmt = db.prepareOnce(sql);
60
- if (!cursor || cursor.mode === 'offset') {
60
+ function buildBaseSearchParams(ftsQuery, filterParams) {
61
+ return [ftsQuery, ...filterParams];
62
+ }
63
+ function buildOffsetParams(baseParams, limit, offset) {
64
+ return [...baseParams, limit + 1, offset];
65
+ }
66
+ function buildKeysetParams(baseParams, limit, cursor) {
67
+ return [...baseParams, cursor.rank, cursor.rank, cursor.hash, limit + 1];
68
+ }
69
+ function buildRankedSearchParams(ftsQuery, filterParams, limit, cursor) {
70
+ const baseParams = buildBaseSearchParams(ftsQuery, filterParams);
71
+ if (isOffsetCursor(cursor)) {
61
72
  const offset = cursor?.offset ?? 0;
62
- return stmt.all(ftsQuery, ...filter.params, limit + 1, offset);
73
+ return buildOffsetParams(baseParams, limit, offset);
63
74
  }
64
- return stmt.all(ftsQuery, ...filter.params, cursor.rank, cursor.rank, cursor.hash, limit + 1);
75
+ return buildKeysetParams(baseParams, limit, cursor);
76
+ }
77
+ function buildSearchPlan(query, limit, cursor, filters) {
78
+ const filter = buildFilterClauses(filters);
79
+ const sql = buildRankedSearchSql(buildAndWhereClause(filter.clauses), cursor);
80
+ const params = buildRankedSearchParams(query, filter.params, limit, cursor);
81
+ return { sql, params };
82
+ }
83
+ export function loadRankedSearchRows(db, query, limit, cursor, filters) {
84
+ const ftsQuery = sanitizeFtsQuery(query);
85
+ const plan = buildSearchPlan(ftsQuery, limit, cursor, filters);
86
+ return db.prepareOnce(plan.sql).all(...plan.params);
65
87
  }
66
88
  export function toMemoryFilters(params) {
67
89
  const filters = {};
68
- if (params.min_importance != null) {
69
- filters.min_importance = params.min_importance;
70
- }
71
- if (params.max_importance != null) {
72
- filters.max_importance = params.max_importance;
73
- }
74
- if (params.memory_type != null) {
75
- filters.memory_type = params.memory_type;
76
- }
90
+ const addFilter = (key, value) => {
91
+ if (value != null) {
92
+ filters[key] = value;
93
+ }
94
+ };
95
+ addFilter('min_importance', params.min_importance);
96
+ addFilter('max_importance', params.max_importance);
97
+ addFilter('memory_type', params.memory_type);
77
98
  return filters;
78
99
  }
@@ -1,19 +1,46 @@
1
1
  import {} from 'zod/v4';
2
2
  import { CreateRelationshipInputSchema, DeleteMemoriesInputSchema, DeleteMemoryInputSchema, DeleteRelationshipInputSchema, GetMemoryInputSchema, GetRelationshipsInputSchema, MemoryStatsInputSchema, RecallInputSchema, RetrieveContextInputSchema, SearchMemoriesInputSchema, StoreMemoriesInputSchema, StoreMemoryInputSchema, UpdateMemoryInputSchema, } from '../schemas/inputs.js';
3
3
  import { BatchResultSchema, CreateRelationshipResultSchema, DeleteRelationshipResultSchema, DeleteResultSchema, MemoryResultSchema, RecallResultSchema, RelationshipResultSchema, RetrieveContextResultSchema, SearchResultSchema, StatsResultSchema, StoreResultSchema, UpdateResultSchema, } from '../schemas/outputs.js';
4
- export const TOOL_CONTRACTS = [
4
+ const DEFAULT_ANNOTATIONS = {
5
+ readOnlyHint: false,
6
+ idempotentHint: false,
7
+ destructiveHint: false,
8
+ openWorldHint: false,
9
+ };
10
+ const READ_ONLY_ANNOTATIONS = {
11
+ readOnlyHint: true,
12
+ };
13
+ const IDEMPOTENT_ANNOTATIONS = {
14
+ idempotentHint: true,
15
+ };
16
+ const DESTRUCTIVE_ANNOTATIONS = {
17
+ destructiveHint: true,
18
+ };
19
+ function createToolContract(definition) {
20
+ const { annotations, ...rest } = definition;
21
+ return {
22
+ ...rest,
23
+ annotations: {
24
+ ...DEFAULT_ANNOTATIONS,
25
+ ...annotations,
26
+ },
27
+ };
28
+ }
29
+ function combineAnnotations(...annotations) {
30
+ const merged = {};
31
+ for (const annotation of annotations) {
32
+ Object.assign(merged, annotation);
33
+ }
34
+ return merged;
35
+ }
36
+ const TOOL_DEFINITIONS = [
5
37
  {
6
38
  name: 'store_memory',
7
39
  title: 'Store Memory',
8
40
  description: 'Store single memory. Returns hash. Idempotent (created: false if exists). Prefer store_memories.',
9
41
  inputSchema: StoreMemoryInputSchema,
10
42
  outputSchema: StoreResultSchema,
11
- annotations: {
12
- readOnlyHint: false,
13
- idempotentHint: true,
14
- destructiveHint: false,
15
- openWorldHint: false,
16
- },
43
+ annotations: IDEMPOTENT_ANNOTATIONS,
17
44
  },
18
45
  {
19
46
  name: 'store_memories',
@@ -21,12 +48,7 @@ export const TOOL_CONTRACTS = [
21
48
  description: 'Store 1-50 memories atomically. Idempotent. Rolls back on error.',
22
49
  inputSchema: StoreMemoriesInputSchema,
23
50
  outputSchema: BatchResultSchema,
24
- annotations: {
25
- readOnlyHint: false,
26
- idempotentHint: true,
27
- destructiveHint: false,
28
- openWorldHint: false,
29
- },
51
+ annotations: IDEMPOTENT_ANNOTATIONS,
30
52
  },
31
53
  {
32
54
  name: 'get_memory',
@@ -34,11 +56,7 @@ export const TOOL_CONTRACTS = [
34
56
  description: 'Retrieve memory by SHA-256 hash. Returns E_NOT_FOUND if missing.',
35
57
  inputSchema: GetMemoryInputSchema,
36
58
  outputSchema: MemoryResultSchema,
37
- annotations: {
38
- readOnlyHint: true,
39
- destructiveHint: false,
40
- openWorldHint: false,
41
- },
59
+ annotations: READ_ONLY_ANNOTATIONS,
42
60
  },
43
61
  {
44
62
  name: 'search_memories',
@@ -46,23 +64,15 @@ export const TOOL_CONTRACTS = [
46
64
  description: 'Full-text search (content+tags). Ranked, paginated. Alphanumeric/underscore only. Implicit AND.',
47
65
  inputSchema: SearchMemoriesInputSchema,
48
66
  outputSchema: SearchResultSchema,
49
- annotations: {
50
- readOnlyHint: true,
51
- destructiveHint: false,
52
- openWorldHint: false,
53
- },
67
+ annotations: READ_ONLY_ANNOTATIONS,
54
68
  },
55
69
  {
56
70
  name: 'retrieve_context',
57
71
  title: 'Retrieve Context',
58
- description: 'FTS search within token budget. Sorts by relevance/importance/recency. Returns truncated: true if limit hit.',
72
+ description: 'FTS search within token budget. Sorts by relevance/importance/recency. Supports importance and type filters. Returns truncated: true if limit hit.',
59
73
  inputSchema: RetrieveContextInputSchema,
60
74
  outputSchema: RetrieveContextResultSchema,
61
- annotations: {
62
- readOnlyHint: true,
63
- destructiveHint: false,
64
- openWorldHint: false,
65
- },
75
+ annotations: READ_ONLY_ANNOTATIONS,
66
76
  },
67
77
  {
68
78
  name: 'recall',
@@ -70,23 +80,15 @@ export const TOOL_CONTRACTS = [
70
80
  description: 'FTS search + BFS traversal (depth hops). Returns memories+edges. Emits progress. Aborts on limit.',
71
81
  inputSchema: RecallInputSchema,
72
82
  outputSchema: RecallResultSchema,
73
- annotations: {
74
- readOnlyHint: true,
75
- destructiveHint: false,
76
- openWorldHint: false,
77
- },
83
+ annotations: READ_ONLY_ANNOTATIONS,
78
84
  },
79
85
  {
80
86
  name: 'update_memory',
81
87
  title: 'Update Memory',
82
- description: 'Update content/tags. Returns old+new hash. Cascade updates relationships.',
88
+ description: 'Update content and/or tags (at least one required). Returns old+new hash. Cascade updates relationships.',
83
89
  inputSchema: UpdateMemoryInputSchema,
84
90
  outputSchema: UpdateResultSchema,
85
- annotations: {
86
- readOnlyHint: false,
87
- destructiveHint: true,
88
- openWorldHint: false,
89
- },
91
+ annotations: DESTRUCTIVE_ANNOTATIONS,
90
92
  },
91
93
  {
92
94
  name: 'delete_memory',
@@ -94,12 +96,7 @@ export const TOOL_CONTRACTS = [
94
96
  description: 'Delete memory by hash. Cascade deletes relationships. Idempotent.',
95
97
  inputSchema: DeleteMemoryInputSchema,
96
98
  outputSchema: DeleteResultSchema,
97
- annotations: {
98
- readOnlyHint: false,
99
- idempotentHint: true,
100
- destructiveHint: true,
101
- openWorldHint: false,
102
- },
99
+ annotations: combineAnnotations(DESTRUCTIVE_ANNOTATIONS, IDEMPOTENT_ANNOTATIONS),
103
100
  },
104
101
  {
105
102
  name: 'delete_memories',
@@ -107,11 +104,7 @@ export const TOOL_CONTRACTS = [
107
104
  description: 'Delete 1-50 memories atomically. Cascade deletes. Rolls back on error.',
108
105
  inputSchema: DeleteMemoriesInputSchema,
109
106
  outputSchema: BatchResultSchema,
110
- annotations: {
111
- readOnlyHint: false,
112
- destructiveHint: true,
113
- openWorldHint: false,
114
- },
107
+ annotations: DESTRUCTIVE_ANNOTATIONS,
115
108
  },
116
109
  {
117
110
  name: 'create_relationship',
@@ -119,24 +112,15 @@ export const TOOL_CONTRACTS = [
119
112
  description: 'Create directed edge. Idempotent. Errors if endpoints missing.',
120
113
  inputSchema: CreateRelationshipInputSchema,
121
114
  outputSchema: CreateRelationshipResultSchema,
122
- annotations: {
123
- readOnlyHint: false,
124
- idempotentHint: true,
125
- destructiveHint: false,
126
- openWorldHint: false,
127
- },
115
+ annotations: IDEMPOTENT_ANNOTATIONS,
128
116
  },
129
117
  {
130
118
  name: 'delete_relationship',
131
119
  title: 'Delete Relationship',
132
- description: 'Delete edge. Exact match required. Errors if missing.',
120
+ description: 'Delete edge. Exact match required. Idempotent (deleted: false if missing).',
133
121
  inputSchema: DeleteRelationshipInputSchema,
134
122
  outputSchema: DeleteRelationshipResultSchema,
135
- annotations: {
136
- readOnlyHint: false,
137
- destructiveHint: true,
138
- openWorldHint: false,
139
- },
123
+ annotations: combineAnnotations(DESTRUCTIVE_ANNOTATIONS, IDEMPOTENT_ANNOTATIONS),
140
124
  },
141
125
  {
142
126
  name: 'get_relationships',
@@ -144,11 +128,7 @@ export const TOOL_CONTRACTS = [
144
128
  description: 'Get relationships for memory. Filter direction. Inlines related memory.',
145
129
  inputSchema: GetRelationshipsInputSchema,
146
130
  outputSchema: RelationshipResultSchema,
147
- annotations: {
148
- readOnlyHint: true,
149
- destructiveHint: false,
150
- openWorldHint: false,
151
- },
131
+ annotations: READ_ONLY_ANNOTATIONS,
152
132
  },
153
133
  {
154
134
  name: 'memory_stats',
@@ -156,13 +136,10 @@ export const TOOL_CONTRACTS = [
156
136
  description: 'Get global stats: counts, timestamps, importance.',
157
137
  inputSchema: MemoryStatsInputSchema,
158
138
  outputSchema: StatsResultSchema,
159
- annotations: {
160
- readOnlyHint: true,
161
- destructiveHint: false,
162
- openWorldHint: false,
163
- },
139
+ annotations: READ_ONLY_ANNOTATIONS,
164
140
  },
165
141
  ];
142
+ export const TOOL_CONTRACTS = TOOL_DEFINITIONS.map(createToolContract);
166
143
  const TOOL_CONTRACTS_BY_NAME = new Map(TOOL_CONTRACTS.map((contract) => [contract.name, contract]));
167
144
  export function getToolContracts() {
168
145
  return TOOL_CONTRACTS;
@@ -0,0 +1,13 @@
1
+ import type { CallToolResult } from '@modelcontextprotocol/sdk/types.js';
2
+ interface BatchSummary {
3
+ succeeded: number;
4
+ failed: number;
5
+ matched: number;
6
+ }
7
+ interface BatchItemLike {
8
+ ok: boolean;
9
+ }
10
+ export declare function executeToolSafely(work: () => Promise<CallToolResult> | CallToolResult): Promise<CallToolResult>;
11
+ export declare function summarizeBatch<T extends BatchItemLike>(items: readonly T[], isMatched: (item: T) => boolean): BatchSummary;
12
+ export declare function executeLongRunningToolSafely(work: () => Promise<CallToolResult> | CallToolResult, onFinally?: () => Promise<void>): Promise<CallToolResult>;
13
+ export {};