cortex-engine 0.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (139) hide show
  1. package/LICENSE +21 -0
  2. package/README.md +154 -0
  3. package/dist/bin/cli.d.ts +13 -0
  4. package/dist/bin/cli.d.ts.map +1 -0
  5. package/dist/bin/cli.js +92 -0
  6. package/dist/bin/cli.js.map +1 -0
  7. package/dist/bin/config-cmd.d.ts +12 -0
  8. package/dist/bin/config-cmd.d.ts.map +1 -0
  9. package/dist/bin/config-cmd.js +184 -0
  10. package/dist/bin/config-cmd.js.map +1 -0
  11. package/dist/bin/config-loader.d.ts +12 -0
  12. package/dist/bin/config-loader.d.ts.map +1 -0
  13. package/dist/bin/config-loader.js +42 -0
  14. package/dist/bin/config-loader.js.map +1 -0
  15. package/dist/bin/digest-cmd.d.ts +13 -0
  16. package/dist/bin/digest-cmd.d.ts.map +1 -0
  17. package/dist/bin/digest-cmd.js +286 -0
  18. package/dist/bin/digest-cmd.js.map +1 -0
  19. package/dist/bin/init.d.ts +33 -0
  20. package/dist/bin/init.d.ts.map +1 -0
  21. package/dist/bin/init.js +355 -0
  22. package/dist/bin/init.js.map +1 -0
  23. package/dist/bin/serve.d.ts +15 -0
  24. package/dist/bin/serve.d.ts.map +1 -0
  25. package/dist/bin/serve.js +21 -0
  26. package/dist/bin/serve.js.map +1 -0
  27. package/dist/bridges/bridge.d.ts +46 -0
  28. package/dist/bridges/bridge.d.ts.map +1 -0
  29. package/dist/bridges/bridge.js +128 -0
  30. package/dist/bridges/bridge.js.map +1 -0
  31. package/dist/bridges/registry.d.ts +19 -0
  32. package/dist/bridges/registry.d.ts.map +1 -0
  33. package/dist/bridges/registry.js +36 -0
  34. package/dist/bridges/registry.js.map +1 -0
  35. package/dist/core/config.d.ts +108 -0
  36. package/dist/core/config.d.ts.map +1 -0
  37. package/dist/core/config.js +29 -0
  38. package/dist/core/config.js.map +1 -0
  39. package/dist/core/embed.d.ts +16 -0
  40. package/dist/core/embed.d.ts.map +1 -0
  41. package/dist/core/embed.js +7 -0
  42. package/dist/core/embed.js.map +1 -0
  43. package/dist/core/llm.d.ts +31 -0
  44. package/dist/core/llm.d.ts.map +1 -0
  45. package/dist/core/llm.js +13 -0
  46. package/dist/core/llm.js.map +1 -0
  47. package/dist/core/nli.d.ts +26 -0
  48. package/dist/core/nli.d.ts.map +1 -0
  49. package/dist/core/nli.js +11 -0
  50. package/dist/core/nli.js.map +1 -0
  51. package/dist/core/session.d.ts +26 -0
  52. package/dist/core/session.d.ts.map +1 -0
  53. package/dist/core/session.js +71 -0
  54. package/dist/core/session.js.map +1 -0
  55. package/dist/core/store.d.ts +58 -0
  56. package/dist/core/store.d.ts.map +1 -0
  57. package/dist/core/store.js +8 -0
  58. package/dist/core/store.js.map +1 -0
  59. package/dist/core/types.d.ts +148 -0
  60. package/dist/core/types.d.ts.map +1 -0
  61. package/dist/core/types.js +8 -0
  62. package/dist/core/types.js.map +1 -0
  63. package/dist/engines/cognition.d.ts +76 -0
  64. package/dist/engines/cognition.d.ts.map +1 -0
  65. package/dist/engines/cognition.js +441 -0
  66. package/dist/engines/cognition.js.map +1 -0
  67. package/dist/engines/digest.d.ts +52 -0
  68. package/dist/engines/digest.d.ts.map +1 -0
  69. package/dist/engines/digest.js +351 -0
  70. package/dist/engines/digest.js.map +1 -0
  71. package/dist/engines/fsrs.d.ts +44 -0
  72. package/dist/engines/fsrs.d.ts.map +1 -0
  73. package/dist/engines/fsrs.js +122 -0
  74. package/dist/engines/fsrs.js.map +1 -0
  75. package/dist/engines/keywords.d.ts +12 -0
  76. package/dist/engines/keywords.d.ts.map +1 -0
  77. package/dist/engines/keywords.js +45 -0
  78. package/dist/engines/keywords.js.map +1 -0
  79. package/dist/engines/memory.d.ts +55 -0
  80. package/dist/engines/memory.d.ts.map +1 -0
  81. package/dist/engines/memory.js +147 -0
  82. package/dist/engines/memory.js.map +1 -0
  83. package/dist/index.d.ts +42 -0
  84. package/dist/index.d.ts.map +1 -0
  85. package/dist/index.js +38 -0
  86. package/dist/index.js.map +1 -0
  87. package/dist/mcp/server.d.ts +12 -0
  88. package/dist/mcp/server.d.ts.map +1 -0
  89. package/dist/mcp/server.js +173 -0
  90. package/dist/mcp/server.js.map +1 -0
  91. package/dist/mcp/tools.d.ts +45 -0
  92. package/dist/mcp/tools.d.ts.map +1 -0
  93. package/dist/mcp/tools.js +814 -0
  94. package/dist/mcp/tools.js.map +1 -0
  95. package/dist/namespace/manager.d.ts +28 -0
  96. package/dist/namespace/manager.d.ts.map +1 -0
  97. package/dist/namespace/manager.js +73 -0
  98. package/dist/namespace/manager.js.map +1 -0
  99. package/dist/namespace/scoped-store.d.ts +43 -0
  100. package/dist/namespace/scoped-store.d.ts.map +1 -0
  101. package/dist/namespace/scoped-store.js +91 -0
  102. package/dist/namespace/scoped-store.js.map +1 -0
  103. package/dist/plugins/loader.d.ts +18 -0
  104. package/dist/plugins/loader.d.ts.map +1 -0
  105. package/dist/plugins/loader.js +71 -0
  106. package/dist/plugins/loader.js.map +1 -0
  107. package/dist/providers/nli-http.d.ts +34 -0
  108. package/dist/providers/nli-http.d.ts.map +1 -0
  109. package/dist/providers/nli-http.js +83 -0
  110. package/dist/providers/nli-http.js.map +1 -0
  111. package/dist/providers/ollama.d.ts +37 -0
  112. package/dist/providers/ollama.d.ts.map +1 -0
  113. package/dist/providers/ollama.js +103 -0
  114. package/dist/providers/ollama.js.map +1 -0
  115. package/dist/providers/vertex-embed.d.ts +38 -0
  116. package/dist/providers/vertex-embed.d.ts.map +1 -0
  117. package/dist/providers/vertex-embed.js +69 -0
  118. package/dist/providers/vertex-embed.js.map +1 -0
  119. package/dist/providers/vertex-llm.d.ts +25 -0
  120. package/dist/providers/vertex-llm.d.ts.map +1 -0
  121. package/dist/providers/vertex-llm.js +75 -0
  122. package/dist/providers/vertex-llm.js.map +1 -0
  123. package/dist/stores/firestore.d.ts +59 -0
  124. package/dist/stores/firestore.d.ts.map +1 -0
  125. package/dist/stores/firestore.js +530 -0
  126. package/dist/stores/firestore.js.map +1 -0
  127. package/dist/stores/sqlite.d.ts +43 -0
  128. package/dist/stores/sqlite.d.ts.map +1 -0
  129. package/dist/stores/sqlite.js +593 -0
  130. package/dist/stores/sqlite.js.map +1 -0
  131. package/dist/triggers/pipeline.d.ts +21 -0
  132. package/dist/triggers/pipeline.d.ts.map +1 -0
  133. package/dist/triggers/pipeline.js +30 -0
  134. package/dist/triggers/pipeline.js.map +1 -0
  135. package/dist/triggers/registry.d.ts +17 -0
  136. package/dist/triggers/registry.d.ts.map +1 -0
  137. package/dist/triggers/registry.js +32 -0
  138. package/dist/triggers/registry.js.map +1 -0
  139. package/package.json +94 -0
@@ -0,0 +1,814 @@
1
+ /**
2
+ * MCP tool definitions for cortex-engine.
3
+ *
4
+ * Each ToolDefinition contains a JSON schema for MCP and a working handler.
5
+ * Handlers resolve the namespace, operate on the store, call engine functions,
6
+ * inject provenance from the session, and fire triggers/bridges after writes.
7
+ */
8
+ import { executeIngestionPipeline } from '../triggers/pipeline.js';
9
+ import { checkBridges } from '../bridges/bridge.js';
10
+ import { predictionErrorGate, hydeExpand, spreadActivation, memoryToSummary, } from '../engines/memory.js';
11
+ import { extractKeywords } from '../engines/keywords.js';
12
+ import { retrievability, scheduleNext, elapsedDaysSince } from '../engines/fsrs.js';
13
+ import { dreamConsolidate } from '../engines/cognition.js';
14
+ import { digestDocument } from '../engines/digest.js';
15
+ // ─── Helpers ─────────────────────────────────────────────────────────────────
16
+ function str(args, key) {
17
+ const v = args[key];
18
+ if (typeof v !== 'string')
19
+ throw new Error(`Missing required string argument: ${key}`);
20
+ return v;
21
+ }
22
+ function optStr(args, key) {
23
+ const v = args[key];
24
+ return typeof v === 'string' ? v : undefined;
25
+ }
26
+ function optNum(args, key, def) {
27
+ const v = args[key];
28
+ return typeof v === 'number' ? v : def;
29
+ }
30
+ function optBool(args, key, def) {
31
+ const v = args[key];
32
+ return typeof v === 'boolean' ? v : def;
33
+ }
34
+ /** Build a tool lookup function for ingestion pipeline execution. */
35
+ function makeToolLookup(activeTools, ctx) {
36
+ return (name) => {
37
+ const tool = activeTools.find(t => t.name === name);
38
+ if (!tool)
39
+ return undefined;
40
+ return { name: tool.name, handler: (args) => tool.handler(args, ctx) };
41
+ };
42
+ }
43
+ /** Check bridges for a given event in a source namespace. */
44
+ async function fireBridges(ctx, sourceNamespace, event, eventData, allTools) {
45
+ const rules = ctx.bridges.getRulesForEvent(sourceNamespace, event);
46
+ if (rules.length === 0)
47
+ return;
48
+ const toolLookup = makeToolLookup(allTools, ctx);
49
+ await checkBridges(rules, eventData, async (targetNamespace, text, metadata) => {
50
+ const store = ctx.namespaces.getStore(targetNamespace);
51
+ const triggers = ctx.triggers.getTriggersForEventInNamespace(event, targetNamespace);
52
+ for (const trigger of triggers) {
53
+ await executeIngestionPipeline(trigger, text, metadata, toolLookup);
54
+ }
55
+ void store; // store available for future direct pipeline use
56
+ }, { depth: 0, sourceNamespace, bridgeName: '' });
57
+ }
58
+ /** Fire ingestion triggers for a given event in a namespace. */
59
+ async function fireTriggers(ctx, namespace, event, content, metadata, allTools) {
60
+ const triggers = ctx.triggers.getTriggersForEventInNamespace(event, namespace);
61
+ const toolLookup = makeToolLookup(allTools, ctx);
62
+ for (const trigger of triggers) {
63
+ await executeIngestionPipeline(trigger, content, metadata, toolLookup);
64
+ }
65
+ }
66
+ // ─── Core Tools ───────────────────────────────────────────────────────────────
67
+ const queryTool = {
68
+ name: 'query',
69
+ description: 'Semantic search through memories. Optionally uses HyDE expansion for better concept-level recall and spreading activation for graph-enriched results.',
70
+ inputSchema: {
71
+ type: 'object',
72
+ properties: {
73
+ text: { type: 'string', description: 'The query text to search for' },
74
+ namespace: { type: 'string', description: 'Namespace to search in (defaults to default namespace)' },
75
+ limit: { type: 'number', description: 'Max results to return (default: 5)' },
76
+ hyde: { type: 'boolean', description: 'Use HyDE query expansion (default: true)' },
77
+ },
78
+ required: ['text'],
79
+ },
80
+ async handler(args, ctx) {
81
+ const text = str(args, 'text');
82
+ const namespace = optStr(args, 'namespace');
83
+ const limit = optNum(args, 'limit', 5);
84
+ const useHyde = optBool(args, 'hyde', true);
85
+ const store = ctx.namespaces.getStore(namespace);
86
+ // Embed query — with HyDE expansion if enabled
87
+ let queryEmbedding;
88
+ if (useHyde) {
89
+ queryEmbedding = await hydeExpand(text, ctx.llm, ctx.embed);
90
+ }
91
+ else {
92
+ queryEmbedding = await ctx.embed.embed(text);
93
+ }
94
+ // Find nearest memories
95
+ const nearest = await store.findNearest(queryEmbedding, limit);
96
+ // Spread activation for richer results
97
+ const activated = await spreadActivation(store, nearest);
98
+ // Score retrievability and touch accessed memories
99
+ const now = new Date();
100
+ const results = await Promise.all(activated.slice(0, limit).map(async (r) => {
101
+ const memory = await store.getMemory(r.memory.id);
102
+ const daysSince = memory?.fsrs.last_review
103
+ ? elapsedDaysSince(memory.fsrs.last_review)
104
+ : 0;
105
+ const ret = memory
106
+ ? retrievability(memory.fsrs.stability, daysSince)
107
+ : r.score;
108
+ // Touch the memory (update access count + last_accessed)
109
+ await store.touchMemory(r.memory.id, {});
110
+ return {
111
+ id: r.memory.id,
112
+ name: r.memory.name,
113
+ definition: r.memory.definition,
114
+ category: r.memory.category,
115
+ salience: r.memory.salience,
116
+ confidence: r.memory.confidence,
117
+ score: r.score,
118
+ hop_count: r.hop_count,
119
+ retrievability: ret,
120
+ last_accessed: now.toISOString(),
121
+ provenance: r.memory.provenance,
122
+ };
123
+ }));
124
+ // Fire triggers and bridges after query
125
+ const resolvedNs = namespace ?? ctx.namespaces.getDefaultNamespace();
126
+ await fireTriggers(ctx, resolvedNs, 'query', text, { query: text, result_count: results.length }, ctx.allTools);
127
+ await fireBridges(ctx, resolvedNs, 'query', { query: text, result_count: results.length }, ctx.allTools);
128
+ return {
129
+ query: text,
130
+ hyde_used: useHyde,
131
+ namespace: resolvedNs,
132
+ count: results.length,
133
+ results,
134
+ };
135
+ },
136
+ };
137
+ const observeTool = {
138
+ name: 'observe',
139
+ description: 'Record an observation with prediction error gating. Observations are compared to existing memories — too-similar content is merged, moderately similar is linked, novel content is stored as new.',
140
+ inputSchema: {
141
+ type: 'object',
142
+ properties: {
143
+ text: { type: 'string', description: 'The observation to record' },
144
+ namespace: { type: 'string', description: 'Target namespace (defaults to default namespace)' },
145
+ salience: { type: 'number', description: 'Importance score 1-10 (default: 5)' },
146
+ source_file: { type: 'string', description: 'Source file path for provenance' },
147
+ source_section: { type: 'string', description: 'Source section or heading for provenance' },
148
+ },
149
+ required: ['text'],
150
+ },
151
+ async handler(args, ctx) {
152
+ const text = str(args, 'text');
153
+ const namespace = optStr(args, 'namespace');
154
+ const salience = optNum(args, 'salience', 5);
155
+ const sourceFile = optStr(args, 'source_file') ?? '';
156
+ const sourceSection = optStr(args, 'source_section') ?? '';
157
+ const store = ctx.namespaces.getStore(namespace);
158
+ const provenance = ctx.session.getProvenance();
159
+ // Embed the observation
160
+ const embedding = await ctx.embed.embed(text);
161
+ // Run prediction error gate with namespace-specific thresholds
162
+ const nsConfig = ctx.namespaces.getConfig(namespace);
163
+ const gate = await predictionErrorGate(store, embedding, {
164
+ merge: nsConfig.similarity_merge,
165
+ link: nsConfig.similarity_link,
166
+ });
167
+ // Extract keywords
168
+ const keywords = extractKeywords(text);
169
+ // Store the observation
170
+ const id = await store.putObservation({
171
+ content: text,
172
+ source_file: sourceFile,
173
+ source_section: sourceSection,
174
+ salience,
175
+ processed: false,
176
+ prediction_error: gate.max_similarity > 0 ? 1 - gate.max_similarity : null,
177
+ created_at: new Date(),
178
+ updated_at: new Date(),
179
+ embedding,
180
+ keywords,
181
+ provenance,
182
+ });
183
+ const result = {
184
+ id,
185
+ decision: gate.decision,
186
+ nearest_id: gate.nearest_id,
187
+ max_similarity: gate.max_similarity,
188
+ namespace: namespace ?? ctx.namespaces.getDefaultNamespace(),
189
+ keywords,
190
+ salience,
191
+ };
192
+ // Fire triggers and bridges after observe
193
+ const resolvedNs = namespace ?? ctx.namespaces.getDefaultNamespace();
194
+ await fireTriggers(ctx, resolvedNs, 'observe', text, { observation_id: id, decision: gate.decision }, ctx.allTools);
195
+ await fireBridges(ctx, resolvedNs, 'observe', result, ctx.allTools);
196
+ return result;
197
+ },
198
+ };
199
+ const recallTool = {
200
+ name: 'recall',
201
+ description: 'Retrieve recent observations chronologically. Useful for reviewing what has been observed recently.',
202
+ inputSchema: {
203
+ type: 'object',
204
+ properties: {
205
+ namespace: { type: 'string', description: 'Namespace to query (defaults to default namespace)' },
206
+ limit: { type: 'number', description: 'Max entries to return (default: 10)' },
207
+ days: { type: 'number', description: 'How many days back to look (default: 7)' },
208
+ },
209
+ },
210
+ async handler(args, ctx) {
211
+ const namespace = optStr(args, 'namespace');
212
+ const limit = optNum(args, 'limit', 10);
213
+ const days = optNum(args, 'days', 7);
214
+ const store = ctx.namespaces.getStore(namespace);
215
+ // Query observations ordered by created_at desc within the time window
216
+ const cutoff = new Date(Date.now() - days * 24 * 60 * 60 * 1000);
217
+ const observations = await store.query('observations', [{ field: 'created_at', op: '>=', value: cutoff }], { limit, orderBy: 'created_at', orderDir: 'desc' });
218
+ return {
219
+ namespace: namespace ?? ctx.namespaces.getDefaultNamespace(),
220
+ days,
221
+ count: observations.length,
222
+ observations: observations.map(o => ({
223
+ id: o['id'],
224
+ content: o['content'],
225
+ salience: o['salience'],
226
+ keywords: o['keywords'],
227
+ source_file: o['source_file'],
228
+ created_at: o['created_at'],
229
+ processed: o['processed'],
230
+ provenance: o['provenance'],
231
+ })),
232
+ };
233
+ },
234
+ };
235
+ const neighborsTool = {
236
+ name: 'neighbors',
237
+ description: 'Explore the memory graph starting from a specific memory node. Returns the neighborhood of connected memories.',
238
+ inputSchema: {
239
+ type: 'object',
240
+ properties: {
241
+ memory_id: { type: 'string', description: 'ID of the memory to start from' },
242
+ namespace: { type: 'string', description: 'Namespace to search in (defaults to default namespace)' },
243
+ depth: { type: 'number', description: 'Graph traversal depth (default: 1)' },
244
+ },
245
+ required: ['memory_id'],
246
+ },
247
+ async handler(args, ctx) {
248
+ const memoryId = str(args, 'memory_id');
249
+ const namespace = optStr(args, 'namespace');
250
+ const depth = optNum(args, 'depth', 1);
251
+ const store = ctx.namespaces.getStore(namespace);
252
+ // Get the seed memory
253
+ const seed = await store.getMemory(memoryId);
254
+ if (!seed) {
255
+ return { error: `Memory not found: ${memoryId}`, memory_id: memoryId };
256
+ }
257
+ // Traverse edges layer by layer up to depth
258
+ const visited = new Set([memoryId]);
259
+ const layers = [
260
+ { depth: 0, memory: memoryToSummary(seed), edges: [] },
261
+ ];
262
+ let frontier = [memoryId];
263
+ for (let d = 0; d < depth; d++) {
264
+ const edges = await store.getEdgesForMemories(frontier);
265
+ const nextFrontier = [];
266
+ for (const edge of edges) {
267
+ const targetId = edge.source_id === frontier.find(id => id === edge.source_id)
268
+ ? edge.target_id
269
+ : edge.source_id;
270
+ if (visited.has(targetId))
271
+ continue;
272
+ visited.add(targetId);
273
+ const neighbor = await store.getMemory(targetId);
274
+ if (!neighbor)
275
+ continue;
276
+ layers.push({
277
+ depth: d + 1,
278
+ memory: memoryToSummary(neighbor),
279
+ edges: edges
280
+ .filter(e => e.source_id === memoryId || e.target_id === memoryId)
281
+ .map(e => ({ relation: e.relation, weight: e.weight, evidence: e.evidence })),
282
+ });
283
+ nextFrontier.push(targetId);
284
+ }
285
+ frontier = nextFrontier;
286
+ if (frontier.length === 0)
287
+ break;
288
+ }
289
+ return {
290
+ seed_id: memoryId,
291
+ namespace: namespace ?? ctx.namespaces.getDefaultNamespace(),
292
+ depth,
293
+ node_count: layers.length,
294
+ nodes: layers,
295
+ };
296
+ },
297
+ };
298
+ const statsTool = {
299
+ name: 'stats',
300
+ description: 'Get cortex statistics — memory counts, namespace info, and operational health.',
301
+ inputSchema: {
302
+ type: 'object',
303
+ properties: {
304
+ namespace: { type: 'string', description: 'Namespace to inspect (defaults to default namespace)' },
305
+ },
306
+ },
307
+ async handler(args, ctx) {
308
+ const namespace = optStr(args, 'namespace');
309
+ const resolvedNs = namespace ?? ctx.namespaces.getDefaultNamespace();
310
+ const store = ctx.namespaces.getStore(namespace);
311
+ // Count memories
312
+ const allMemories = await store.getAllMemories();
313
+ const unprocessedObs = await store.getUnprocessedObservations(9999);
314
+ // Namespace config
315
+ const config = ctx.namespaces.getConfig(namespace);
316
+ return {
317
+ namespace: resolvedNs,
318
+ namespaces: ctx.namespaces.getNamespaceNames(),
319
+ default_namespace: ctx.namespaces.getDefaultNamespace(),
320
+ memory_count: allMemories.length,
321
+ unprocessed_observations: unprocessedObs.length,
322
+ cognitive_tools: config.cognitive_tools,
323
+ collections_prefix: config.collections_prefix,
324
+ };
325
+ },
326
+ };
327
+ const opsAppendTool = {
328
+ name: 'ops_append',
329
+ description: 'Log an operational entry. Used for session breadcrumbs, project milestones, decisions, and handoffs.',
330
+ inputSchema: {
331
+ type: 'object',
332
+ properties: {
333
+ content: { type: 'string', description: 'The operational log entry content' },
334
+ type: { type: 'string', enum: ['log', 'instruction', 'handoff', 'milestone', 'decision'], description: 'Entry type (default: log)' },
335
+ project: { type: 'string', description: 'Project name for per-project sub-logs' },
336
+ namespace: { type: 'string', description: 'Namespace (defaults to default namespace)' },
337
+ },
338
+ required: ['content'],
339
+ },
340
+ async handler(args, ctx) {
341
+ const content = str(args, 'content');
342
+ const type = (optStr(args, 'type') ?? 'log');
343
+ const project = optStr(args, 'project') ?? null;
344
+ const namespace = optStr(args, 'namespace');
345
+ const store = ctx.namespaces.getStore(namespace);
346
+ const provenance = ctx.session.getProvenance();
347
+ const keywords = extractKeywords(content);
348
+ const now = new Date();
349
+ const expiresAt = new Date(now.getTime() + 30 * 24 * 60 * 60 * 1000); // 30 days
350
+ const id = await store.appendOps({
351
+ content,
352
+ type,
353
+ status: 'active',
354
+ project,
355
+ session_ref: provenance.model_id,
356
+ keywords,
357
+ created_at: now,
358
+ updated_at: now,
359
+ expires_at: expiresAt,
360
+ provenance,
361
+ });
362
+ return {
363
+ id,
364
+ type,
365
+ project,
366
+ namespace: namespace ?? ctx.namespaces.getDefaultNamespace(),
367
+ keywords,
368
+ };
369
+ },
370
+ };
371
+ const opsQueryTool = {
372
+ name: 'ops_query',
373
+ description: 'Query the operational log with composable filters. Useful for reviewing session history and project progress.',
374
+ inputSchema: {
375
+ type: 'object',
376
+ properties: {
377
+ project: { type: 'string', description: 'Filter by project name' },
378
+ type: { type: 'string', enum: ['log', 'instruction', 'handoff', 'milestone', 'decision'], description: 'Filter by entry type' },
379
+ status: { type: 'string', enum: ['active', 'done', 'stale'], description: 'Filter by status' },
380
+ days: { type: 'number', description: 'Only show entries from last N days' },
381
+ limit: { type: 'number', description: 'Max entries to return' },
382
+ namespace: { type: 'string', description: 'Namespace to query' },
383
+ },
384
+ },
385
+ async handler(args, ctx) {
386
+ const namespace = optStr(args, 'namespace');
387
+ const store = ctx.namespaces.getStore(namespace);
388
+ const entries = await store.queryOps({
389
+ project: optStr(args, 'project'),
390
+ type: optStr(args, 'type'),
391
+ status: optStr(args, 'status'),
392
+ days: args['days'] !== undefined ? optNum(args, 'days', 7) : undefined,
393
+ limit: args['limit'] !== undefined ? optNum(args, 'limit', 20) : undefined,
394
+ });
395
+ return {
396
+ namespace: namespace ?? ctx.namespaces.getDefaultNamespace(),
397
+ count: entries.length,
398
+ entries: entries.map(e => ({
399
+ id: e.id,
400
+ content: e.content,
401
+ type: e.type,
402
+ status: e.status,
403
+ project: e.project,
404
+ keywords: e.keywords,
405
+ created_at: e.created_at,
406
+ })),
407
+ };
408
+ },
409
+ };
410
+ const opsUpdateTool = {
411
+ name: 'ops_update',
412
+ description: 'Update an operational log entry (e.g., mark as done, amend content).',
413
+ inputSchema: {
414
+ type: 'object',
415
+ properties: {
416
+ id: { type: 'string', description: 'ID of the ops entry to update' },
417
+ status: { type: 'string', enum: ['active', 'done', 'stale'], description: 'New status' },
418
+ content: { type: 'string', description: 'Updated content' },
419
+ namespace: { type: 'string', description: 'Namespace (defaults to default namespace)' },
420
+ },
421
+ required: ['id'],
422
+ },
423
+ async handler(args, ctx) {
424
+ const id = str(args, 'id');
425
+ const namespace = optStr(args, 'namespace');
426
+ const store = ctx.namespaces.getStore(namespace);
427
+ const updates = { updated_at: new Date() };
428
+ const newStatus = optStr(args, 'status');
429
+ const newContent = optStr(args, 'content');
430
+ if (newStatus)
431
+ updates['status'] = newStatus;
432
+ if (newContent) {
433
+ updates['content'] = newContent;
434
+ updates['keywords'] = extractKeywords(newContent);
435
+ }
436
+ await store.updateOps(id, updates);
437
+ return {
438
+ id,
439
+ updated: true,
440
+ namespace: namespace ?? ctx.namespaces.getDefaultNamespace(),
441
+ };
442
+ },
443
+ };
444
+ // ─── Extended Tools ───────────────────────────────────────────────────────────
445
+ const predictTool = {
446
+ name: 'predict',
447
+ description: 'Proactive retrieval based on current context. Uses HyDE to expand the context and returns predictions with confidence scores.',
448
+ inputSchema: {
449
+ type: 'object',
450
+ properties: {
451
+ context: { type: 'string', description: 'Current context to predict from' },
452
+ namespace: { type: 'string', description: 'Namespace to predict in (defaults to default namespace)' },
453
+ },
454
+ required: ['context'],
455
+ },
456
+ async handler(args, ctx) {
457
+ const context = str(args, 'context');
458
+ const namespace = optStr(args, 'namespace');
459
+ const store = ctx.namespaces.getStore(namespace);
460
+ // HyDE expand the context
461
+ const expanded = await hydeExpand(context, ctx.llm, ctx.embed);
462
+ // Find nearest memories
463
+ const nearest = await store.findNearest(expanded, 5);
464
+ const predictions = nearest.map(r => {
465
+ const daysSince = elapsedDaysSince(r.memory.fsrs.last_review);
466
+ const ret = retrievability(r.memory.fsrs.stability, daysSince);
467
+ return {
468
+ id: r.memory.id,
469
+ name: r.memory.name,
470
+ definition: r.memory.definition,
471
+ category: r.memory.category,
472
+ confidence: r.score * ret,
473
+ similarity: r.score,
474
+ retrievability: ret,
475
+ };
476
+ });
477
+ return {
478
+ context,
479
+ namespace: namespace ?? ctx.namespaces.getDefaultNamespace(),
480
+ count: predictions.length,
481
+ predictions,
482
+ };
483
+ },
484
+ };
485
+ const validateTool = {
486
+ name: 'validate',
487
+ description: 'Check a prediction against an actual outcome. Updates FSRS scheduling for the referenced memory.',
488
+ inputSchema: {
489
+ type: 'object',
490
+ properties: {
491
+ prediction_id: { type: 'string', description: 'ID of the memory/prediction to validate' },
492
+ outcome: { type: 'boolean', description: 'Whether the prediction was correct' },
493
+ notes: { type: 'string', description: 'Optional notes on the validation outcome' },
494
+ namespace: { type: 'string', description: 'Namespace (defaults to default namespace)' },
495
+ },
496
+ required: ['prediction_id', 'outcome'],
497
+ },
498
+ async handler(args, ctx) {
499
+ const predictionId = str(args, 'prediction_id');
500
+ const outcome = args['outcome'];
501
+ const notes = optStr(args, 'notes') ?? '';
502
+ const namespace = optStr(args, 'namespace');
503
+ const store = ctx.namespaces.getStore(namespace);
504
+ const memory = await store.getMemory(predictionId);
505
+ if (!memory) {
506
+ return { error: `Memory not found: ${predictionId}`, prediction_id: predictionId };
507
+ }
508
+ // FSRS rating: correct=3 (Good), incorrect=1 (Again)
509
+ const rating = outcome ? 3 : 1;
510
+ const elapsed = elapsedDaysSince(memory.fsrs.last_review);
511
+ const scheduled = scheduleNext(memory.fsrs, rating, elapsed);
512
+ // Update memory FSRS state
513
+ await store.touchMemory(predictionId, {
514
+ stability: scheduled.stability,
515
+ difficulty: scheduled.difficulty,
516
+ state: scheduled.state,
517
+ last_review: new Date(),
518
+ reps: memory.fsrs.reps + 1,
519
+ lapses: outcome ? memory.fsrs.lapses : memory.fsrs.lapses + 1,
520
+ });
521
+ const result = {
522
+ prediction_id: predictionId,
523
+ outcome,
524
+ rating,
525
+ notes,
526
+ previous_stability: memory.fsrs.stability,
527
+ new_stability: scheduled.stability,
528
+ interval_days: scheduled.interval_days,
529
+ state: scheduled.state,
530
+ namespace: namespace ?? ctx.namespaces.getDefaultNamespace(),
531
+ };
532
+ const resolvedNs = namespace ?? ctx.namespaces.getDefaultNamespace();
533
+ await fireTriggers(ctx, resolvedNs, 'validate', notes, { prediction_id: predictionId, outcome }, ctx.allTools);
534
+ await fireBridges(ctx, resolvedNs, 'validate', result, ctx.allTools);
535
+ return result;
536
+ },
537
+ };
538
+ const believeTool = {
539
+ name: 'believe',
540
+ description: 'Record a belief change on a memory concept. Logs the old definition, updates the memory, and stores the change in belief history.',
541
+ inputSchema: {
542
+ type: 'object',
543
+ properties: {
544
+ concept_id: { type: 'string', description: 'ID of the memory/concept being revised' },
545
+ new_definition: { type: 'string', description: 'The updated definition or belief' },
546
+ reason: { type: 'string', description: 'Why this belief is changing' },
547
+ namespace: { type: 'string', description: 'Namespace (defaults to default namespace)' },
548
+ },
549
+ required: ['concept_id', 'new_definition', 'reason'],
550
+ },
551
+ async handler(args, ctx) {
552
+ const conceptId = str(args, 'concept_id');
553
+ const newDefinition = str(args, 'new_definition');
554
+ const reason = str(args, 'reason');
555
+ const namespace = optStr(args, 'namespace');
556
+ const store = ctx.namespaces.getStore(namespace);
557
+ const memory = await store.getMemory(conceptId);
558
+ if (!memory) {
559
+ return { error: `Memory not found: ${conceptId}`, concept_id: conceptId };
560
+ }
561
+ const oldDefinition = memory.definition;
562
+ // Log belief change
563
+ const beliefId = await store.putBelief({
564
+ concept_id: conceptId,
565
+ old_definition: oldDefinition,
566
+ new_definition: newDefinition,
567
+ reason,
568
+ changed_at: new Date(),
569
+ });
570
+ // Re-embed with new definition
571
+ const newEmbedding = await ctx.embed.embed(newDefinition);
572
+ // Update the memory
573
+ await store.updateMemory(conceptId, {
574
+ definition: newDefinition,
575
+ embedding: newEmbedding,
576
+ updated_at: new Date(),
577
+ });
578
+ const result = {
579
+ belief_id: beliefId,
580
+ concept_id: conceptId,
581
+ concept_name: memory.name,
582
+ old_definition: oldDefinition,
583
+ new_definition: newDefinition,
584
+ reason,
585
+ namespace: namespace ?? ctx.namespaces.getDefaultNamespace(),
586
+ };
587
+ const resolvedNs = namespace ?? ctx.namespaces.getDefaultNamespace();
588
+ await fireTriggers(ctx, resolvedNs, 'believe', reason, { concept_id: conceptId, belief_id: beliefId }, ctx.allTools);
589
+ await fireBridges(ctx, resolvedNs, 'believe', result, ctx.allTools);
590
+ return result;
591
+ },
592
+ };
593
+ const reflectTool = {
594
+ name: 'reflect',
595
+ description: 'Generate a reflective passage about a topic by querying related memories and synthesizing them with the LLM. Stores the reflection as an observation.',
596
+ inputSchema: {
597
+ type: 'object',
598
+ properties: {
599
+ topic: { type: 'string', description: 'Topic to reflect on' },
600
+ namespace: { type: 'string', description: 'Namespace (defaults to default namespace)' },
601
+ },
602
+ required: ['topic'],
603
+ },
604
+ async handler(args, ctx) {
605
+ const topic = str(args, 'topic');
606
+ const namespace = optStr(args, 'namespace');
607
+ const store = ctx.namespaces.getStore(namespace);
608
+ // Query related memories
609
+ const topicEmbedding = await ctx.embed.embed(topic);
610
+ const related = await store.findNearest(topicEmbedding, 5);
611
+ // Build context from related memories
612
+ const memoryContext = related
613
+ .map(r => `- ${r.memory.name}: ${r.memory.definition}`)
614
+ .join('\n');
615
+ // LLM generates reflection
616
+ const reflection = await ctx.llm.generate(`You are reflecting on the topic: "${topic}"\n\nRelated concepts from memory:\n${memoryContext || '(no related memories found)'}\n\nWrite a 2-4 sentence reflection that synthesizes these concepts and your understanding of the topic. Be honest about uncertainty.`, {
617
+ temperature: 0.7,
618
+ maxTokens: 300,
619
+ systemPrompt: 'You are a reflective cognitive agent. Generate thoughtful, grounded reflections based on the provided memory context.',
620
+ });
621
+ // Store reflection as observation
622
+ const embedding = await ctx.embed.embed(reflection);
623
+ const keywords = extractKeywords(`${topic} ${reflection}`);
624
+ const provenance = ctx.session.getProvenance();
625
+ const obsId = await store.putObservation({
626
+ content: reflection,
627
+ source_file: '',
628
+ source_section: `reflection:${topic}`,
629
+ salience: 6,
630
+ processed: false,
631
+ prediction_error: null,
632
+ created_at: new Date(),
633
+ updated_at: new Date(),
634
+ embedding,
635
+ keywords,
636
+ provenance,
637
+ });
638
+ return {
639
+ topic,
640
+ reflection,
641
+ observation_id: obsId,
642
+ related_memories: related.map(r => ({ id: r.memory.id, name: r.memory.name, score: r.score })),
643
+ namespace: namespace ?? ctx.namespaces.getDefaultNamespace(),
644
+ };
645
+ },
646
+ };
647
+ const wanderTool = {
648
+ name: 'wander',
649
+ description: 'Random walk through the memory graph. Picks a random memory and follows random edges, returning the traversal path. Useful for serendipitous discovery.',
650
+ inputSchema: {
651
+ type: 'object',
652
+ properties: {
653
+ namespace: { type: 'string', description: 'Namespace to wander in (defaults to default namespace)' },
654
+ steps: { type: 'number', description: 'Number of hops to take (default: 3)' },
655
+ },
656
+ },
657
+ async handler(args, ctx) {
658
+ const namespace = optStr(args, 'namespace');
659
+ const steps = optNum(args, 'steps', 3);
660
+ const store = ctx.namespaces.getStore(namespace);
661
+ // Get all memories and pick a random seed
662
+ const allMemories = await store.getAllMemories();
663
+ if (allMemories.length === 0) {
664
+ return { namespace: namespace ?? ctx.namespaces.getDefaultNamespace(), path: [], message: 'No memories to wander through' };
665
+ }
666
+ const seedMemory = allMemories[Math.floor(Math.random() * allMemories.length)];
667
+ const path = [
668
+ { step: 0, memory: memoryToSummary(seedMemory) },
669
+ ];
670
+ let currentId = seedMemory.id;
671
+ for (let step = 1; step <= steps; step++) {
672
+ const edges = await store.getEdgesFrom(currentId);
673
+ if (edges.length === 0)
674
+ break;
675
+ // Pick a random edge weighted by edge weight
676
+ const totalWeight = edges.reduce((sum, e) => sum + e.weight, 0);
677
+ let rand = Math.random() * totalWeight;
678
+ let chosenEdge = edges[0];
679
+ for (const edge of edges) {
680
+ rand -= edge.weight;
681
+ if (rand <= 0) {
682
+ chosenEdge = edge;
683
+ break;
684
+ }
685
+ }
686
+ const nextMemory = await store.getMemory(chosenEdge.target_id);
687
+ if (!nextMemory)
688
+ break;
689
+ path.push({
690
+ step,
691
+ memory: memoryToSummary(nextMemory),
692
+ relation: chosenEdge.relation,
693
+ });
694
+ currentId = chosenEdge.target_id;
695
+ }
696
+ return {
697
+ namespace: namespace ?? ctx.namespaces.getDefaultNamespace(),
698
+ seed_id: seedMemory.id,
699
+ steps_taken: path.length - 1,
700
+ path,
701
+ };
702
+ },
703
+ };
704
+ const dreamTool = {
705
+ name: 'dream',
706
+ description: 'Run the full 7-phase dream consolidation cycle: cluster observations into existing memories, refine definitions, create new memories from unclustered observations, discover edges between recently active concepts, passive FSRS review, cross-domain abstraction (REM), and a narrative report.',
707
+ inputSchema: {
708
+ type: 'object',
709
+ properties: {
710
+ namespace: { type: 'string', description: 'Namespace to consolidate (defaults to default namespace)' },
711
+ limit: { type: 'number', description: 'Max observations to process in the cluster phase (default: 20)' },
712
+ },
713
+ },
714
+ async handler(args, ctx) {
715
+ const namespace = optStr(args, 'namespace');
716
+ const limit = optNum(args, 'limit', 20);
717
+ const store = ctx.namespaces.getStore(namespace);
718
+ const nsConfig = ctx.namespaces.getConfig(namespace);
719
+ const result = await dreamConsolidate(store, ctx.embed, ctx.llm, {
720
+ observation_limit: limit,
721
+ similarity_merge: nsConfig.similarity_merge,
722
+ similarity_link: nsConfig.similarity_link,
723
+ });
724
+ return {
725
+ namespace: namespace ?? ctx.namespaces.getDefaultNamespace(),
726
+ ...result.phases,
727
+ total_processed: result.total_processed,
728
+ duration_ms: result.duration_ms,
729
+ integration_rate: result.integration_rate,
730
+ };
731
+ },
732
+ };
733
+ const digestTool = {
734
+ name: 'digest',
735
+ description: 'Process a document through cortex — extract knowledge via observe, generate insights via reflect. Use for batch ingestion of files, plans, creative writing, or any content cortex should learn from.',
736
+ inputSchema: {
737
+ type: 'object',
738
+ properties: {
739
+ content: { type: 'string', description: 'The document content to digest (markdown, with or without frontmatter)' },
740
+ source_file: { type: 'string', description: 'Source file path for provenance tracking' },
741
+ pipeline: {
742
+ type: 'array',
743
+ items: { type: 'string' },
744
+ description: 'Pipeline steps to run (default: ["observe", "reflect"])',
745
+ },
746
+ namespace: { type: 'string', description: 'Target namespace (defaults to default)' },
747
+ salience: { type: 'number', description: 'Salience override 0.0-1.0 (default: auto-detect)' },
748
+ },
749
+ required: ['content'],
750
+ },
751
+ async handler(args, ctx) {
752
+ const content = str(args, 'content');
753
+ const sourceFile = optStr(args, 'source_file');
754
+ const namespace = optStr(args, 'namespace');
755
+ const salience = args['salience'] !== undefined ? optNum(args, 'salience', 5) : undefined;
756
+ const rawPipeline = args['pipeline'];
757
+ const pipeline = Array.isArray(rawPipeline)
758
+ ? rawPipeline.filter((s) => typeof s === 'string')
759
+ : undefined;
760
+ const store = ctx.namespaces.getStore(namespace);
761
+ const resolvedNs = namespace ?? ctx.namespaces.getDefaultNamespace();
762
+ const result = await digestDocument(content, store, ctx.embed, ctx.llm, {
763
+ pipeline,
764
+ namespace: resolvedNs,
765
+ source_file: sourceFile,
766
+ salience,
767
+ });
768
+ await fireTriggers(ctx, resolvedNs, 'observe', content, { observation_ids: result.observation_ids }, ctx.allTools);
769
+ await fireBridges(ctx, resolvedNs, 'observe', { observation_ids: result.observation_ids, source_file: sourceFile }, ctx.allTools);
770
+ return {
771
+ namespace: resolvedNs,
772
+ source_file: sourceFile ?? '',
773
+ observation_ids: result.observation_ids,
774
+ memories_linked: result.memories_linked,
775
+ insights: result.insights,
776
+ pipeline_executed: result.pipeline_executed,
777
+ processed_at: result.processed_at.toISOString(),
778
+ duration_ms: result.duration_ms,
779
+ };
780
+ },
781
+ };
782
+ // ─── Registry ─────────────────────────────────────────────────────────────────
783
+ /** All 15 cognitive tool definitions. */
784
+ export function createTools() {
785
+ return [
786
+ queryTool,
787
+ observeTool,
788
+ recallTool,
789
+ neighborsTool,
790
+ statsTool,
791
+ opsAppendTool,
792
+ opsQueryTool,
793
+ opsUpdateTool,
794
+ predictTool,
795
+ validateTool,
796
+ believeTool,
797
+ reflectTool,
798
+ wanderTool,
799
+ dreamTool,
800
+ digestTool,
801
+ ];
802
+ }
803
+ /** Core tools that are always active regardless of namespace config. */
804
+ export const CORE_TOOLS = [
805
+ 'query',
806
+ 'observe',
807
+ 'recall',
808
+ 'neighbors',
809
+ 'stats',
810
+ 'ops_append',
811
+ 'ops_query',
812
+ 'ops_update',
813
+ ];
814
+ //# sourceMappingURL=tools.js.map