@hasna/mementos 0.11.1 → 0.14.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (90) hide show
  1. package/dist/cli/brains.d.ts +3 -0
  2. package/dist/cli/brains.d.ts.map +1 -0
  3. package/dist/cli/index.js +2064 -504
  4. package/dist/db/database.d.ts.map +1 -1
  5. package/dist/db/memories.d.ts.map +1 -1
  6. package/dist/db/tool-events.d.ts +27 -0
  7. package/dist/db/tool-events.d.ts.map +1 -0
  8. package/dist/index.d.ts +2 -0
  9. package/dist/index.d.ts.map +1 -1
  10. package/dist/index.js +172 -4
  11. package/dist/lib/activation-matcher.d.ts +16 -0
  12. package/dist/lib/activation-matcher.d.ts.map +1 -0
  13. package/dist/lib/asmr/categorizer.d.ts +31 -0
  14. package/dist/lib/asmr/categorizer.d.ts.map +1 -0
  15. package/dist/lib/asmr/context-agent.d.ts +4 -0
  16. package/dist/lib/asmr/context-agent.d.ts.map +1 -0
  17. package/dist/lib/asmr/ensemble.d.ts +23 -0
  18. package/dist/lib/asmr/ensemble.d.ts.map +1 -0
  19. package/dist/lib/asmr/fact-agent.d.ts +4 -0
  20. package/dist/lib/asmr/fact-agent.d.ts.map +1 -0
  21. package/dist/lib/asmr/index.d.ts +7 -0
  22. package/dist/lib/asmr/index.d.ts.map +1 -0
  23. package/dist/lib/asmr/orchestrator.d.ts +4 -0
  24. package/dist/lib/asmr/orchestrator.d.ts.map +1 -0
  25. package/dist/lib/asmr/temporal-agent.d.ts +4 -0
  26. package/dist/lib/asmr/temporal-agent.d.ts.map +1 -0
  27. package/dist/lib/asmr/types.d.ts +27 -0
  28. package/dist/lib/asmr/types.d.ts.map +1 -0
  29. package/dist/lib/auto-inject-orchestrator.d.ts +57 -0
  30. package/dist/lib/auto-inject-orchestrator.d.ts.map +1 -0
  31. package/dist/lib/built-in-hooks.d.ts.map +1 -1
  32. package/dist/lib/channel-pusher.d.ts +39 -0
  33. package/dist/lib/channel-pusher.d.ts.map +1 -0
  34. package/dist/lib/connectors/files.d.ts +8 -0
  35. package/dist/lib/connectors/files.d.ts.map +1 -0
  36. package/dist/lib/connectors/github.d.ts +7 -0
  37. package/dist/lib/connectors/github.d.ts.map +1 -0
  38. package/dist/lib/connectors/index.d.ts +12 -0
  39. package/dist/lib/connectors/index.d.ts.map +1 -0
  40. package/dist/lib/connectors/notion.d.ts +7 -0
  41. package/dist/lib/connectors/notion.d.ts.map +1 -0
  42. package/dist/lib/connectors/types.d.ts +27 -0
  43. package/dist/lib/connectors/types.d.ts.map +1 -0
  44. package/dist/lib/context-extractor.d.ts +14 -0
  45. package/dist/lib/context-extractor.d.ts.map +1 -0
  46. package/dist/lib/extractors/audio.d.ts +8 -0
  47. package/dist/lib/extractors/audio.d.ts.map +1 -0
  48. package/dist/lib/extractors/index.d.ts +12 -0
  49. package/dist/lib/extractors/index.d.ts.map +1 -0
  50. package/dist/lib/extractors/ocr.d.ts +7 -0
  51. package/dist/lib/extractors/ocr.d.ts.map +1 -0
  52. package/dist/lib/extractors/pdf.d.ts +7 -0
  53. package/dist/lib/extractors/pdf.d.ts.map +1 -0
  54. package/dist/lib/extractors/types.d.ts +12 -0
  55. package/dist/lib/extractors/types.d.ts.map +1 -0
  56. package/dist/lib/gatherer.d.ts +16 -0
  57. package/dist/lib/gatherer.d.ts.map +1 -0
  58. package/dist/lib/injector.d.ts +48 -1
  59. package/dist/lib/injector.d.ts.map +1 -1
  60. package/dist/lib/matryoshka.d.ts +50 -0
  61. package/dist/lib/matryoshka.d.ts.map +1 -0
  62. package/dist/lib/model-config.d.ts +14 -0
  63. package/dist/lib/model-config.d.ts.map +1 -0
  64. package/dist/lib/procedural-extractor.d.ts +21 -0
  65. package/dist/lib/procedural-extractor.d.ts.map +1 -0
  66. package/dist/lib/profile-synthesizer.d.ts +20 -0
  67. package/dist/lib/profile-synthesizer.d.ts.map +1 -0
  68. package/dist/lib/session-processor.d.ts.map +1 -1
  69. package/dist/lib/session-registry.d.ts +47 -0
  70. package/dist/lib/session-registry.d.ts.map +1 -0
  71. package/dist/lib/session-start-briefing.d.ts +10 -0
  72. package/dist/lib/session-start-briefing.d.ts.map +1 -0
  73. package/dist/lib/session-watcher.d.ts +30 -0
  74. package/dist/lib/session-watcher.d.ts.map +1 -0
  75. package/dist/lib/tool-lesson-extractor.d.ts +24 -0
  76. package/dist/lib/tool-lesson-extractor.d.ts.map +1 -0
  77. package/dist/lib/tool-memory-synthesizer.d.ts +28 -0
  78. package/dist/lib/tool-memory-synthesizer.d.ts.map +1 -0
  79. package/dist/lib/topic-clusterer.d.ts +21 -0
  80. package/dist/lib/topic-clusterer.d.ts.map +1 -0
  81. package/dist/lib/when-to-use-generator.d.ts +22 -0
  82. package/dist/lib/when-to-use-generator.d.ts.map +1 -0
  83. package/dist/mcp/index.d.ts +3 -1
  84. package/dist/mcp/index.d.ts.map +1 -1
  85. package/dist/mcp/index.js +3816 -383
  86. package/dist/server/index.d.ts.map +1 -1
  87. package/dist/server/index.js +873 -5
  88. package/dist/types/index.d.ts +57 -0
  89. package/dist/types/index.d.ts.map +1 -1
  90. package/package.json +3 -1
@@ -4,6 +4,7 @@ var __create = Object.create;
4
4
  var __getProtoOf = Object.getPrototypeOf;
5
5
  var __defProp = Object.defineProperty;
6
6
  var __getOwnPropNames = Object.getOwnPropertyNames;
7
+ var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
7
8
  var __hasOwnProp = Object.prototype.hasOwnProperty;
8
9
  var __toESM = (mod, isNodeMode, target) => {
9
10
  target = mod != null ? __create(__getProtoOf(mod)) : {};
@@ -16,6 +17,20 @@ var __toESM = (mod, isNodeMode, target) => {
16
17
  });
17
18
  return to;
18
19
  };
20
+ var __moduleCache = /* @__PURE__ */ new WeakMap;
21
+ var __toCommonJS = (from) => {
22
+ var entry = __moduleCache.get(from), desc;
23
+ if (entry)
24
+ return entry;
25
+ entry = __defProp({}, "__esModule", { value: true });
26
+ if (from && typeof from === "object" || typeof from === "function")
27
+ __getOwnPropNames(from).map((key) => !__hasOwnProp.call(entry, key) && __defProp(entry, key, {
28
+ get: () => from[key],
29
+ enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable
30
+ }));
31
+ __moduleCache.set(from, entry);
32
+ return entry;
33
+ };
19
34
  var __export = (target, all) => {
20
35
  for (var name in all)
21
36
  __defProp(target, name, {
@@ -92,6 +107,17 @@ var init_types = __esm(() => {
92
107
  });
93
108
 
94
109
  // src/db/database.ts
110
+ var exports_database = {};
111
+ __export(exports_database, {
112
+ uuid: () => uuid,
113
+ shortUuid: () => shortUuid,
114
+ resolvePartialId: () => resolvePartialId,
115
+ resetDatabase: () => resetDatabase,
116
+ now: () => now,
117
+ getDbPath: () => getDbPath,
118
+ getDatabase: () => getDatabase,
119
+ closeDatabase: () => closeDatabase
120
+ });
95
121
  import { Database } from "bun:sqlite";
96
122
  import { existsSync, mkdirSync } from "fs";
97
123
  import { dirname, join, resolve } from "path";
@@ -177,6 +203,15 @@ function runMigrations(db) {
177
203
  }
178
204
  }
179
205
  }
206
+ function closeDatabase() {
207
+ if (_db) {
208
+ _db.close();
209
+ _db = null;
210
+ }
211
+ }
212
+ function resetDatabase() {
213
+ _db = null;
214
+ }
180
215
  function now() {
181
216
  return new Date().toISOString();
182
217
  }
@@ -902,6 +937,44 @@ CREATE INDEX IF NOT EXISTS idx_memory_embeddings_model ON memory_embeddings(mode
902
937
 
903
938
  PRAGMA foreign_keys = ON;
904
939
  INSERT OR IGNORE INTO _migrations (id) VALUES (29);
940
+ `,
941
+ `
942
+ ALTER TABLE memories ADD COLUMN when_to_use TEXT DEFAULT NULL;
943
+ CREATE INDEX IF NOT EXISTS idx_memories_when_to_use ON memories(when_to_use) WHERE when_to_use IS NOT NULL;
944
+ ALTER TABLE memory_versions ADD COLUMN when_to_use TEXT;
945
+ INSERT OR IGNORE INTO _migrations (id) VALUES (30);
946
+ `,
947
+ `
948
+ CREATE TABLE IF NOT EXISTS tool_events (
949
+ id TEXT PRIMARY KEY,
950
+ tool_name TEXT NOT NULL,
951
+ action TEXT,
952
+ success INTEGER NOT NULL DEFAULT 1,
953
+ error_type TEXT CHECK(error_type IS NULL OR error_type IN ('timeout', 'permission', 'not_found', 'syntax', 'rate_limit', 'other')),
954
+ error_message TEXT,
955
+ tokens_used INTEGER,
956
+ latency_ms INTEGER,
957
+ context TEXT,
958
+ lesson TEXT,
959
+ when_to_use TEXT,
960
+ agent_id TEXT REFERENCES agents(id) ON DELETE SET NULL,
961
+ project_id TEXT REFERENCES projects(id) ON DELETE SET NULL,
962
+ session_id TEXT,
963
+ metadata TEXT DEFAULT '{}',
964
+ created_at TEXT NOT NULL DEFAULT (datetime('now'))
965
+ );
966
+ CREATE INDEX IF NOT EXISTS idx_tool_events_tool_name ON tool_events(tool_name);
967
+ CREATE INDEX IF NOT EXISTS idx_tool_events_agent ON tool_events(agent_id);
968
+ CREATE INDEX IF NOT EXISTS idx_tool_events_project ON tool_events(project_id);
969
+ CREATE INDEX IF NOT EXISTS idx_tool_events_success ON tool_events(success);
970
+ CREATE INDEX IF NOT EXISTS idx_tool_events_created ON tool_events(created_at);
971
+ INSERT OR IGNORE INTO _migrations (id) VALUES (31);
972
+ `,
973
+ `
974
+ ALTER TABLE memories ADD COLUMN sequence_group TEXT DEFAULT NULL;
975
+ ALTER TABLE memories ADD COLUMN sequence_order INTEGER DEFAULT NULL;
976
+ CREATE INDEX IF NOT EXISTS idx_memories_sequence_group ON memories(sequence_group) WHERE sequence_group IS NOT NULL;
977
+ INSERT OR IGNORE INTO _migrations (id) VALUES (32);
905
978
  `
906
979
  ];
907
980
  });
@@ -1249,6 +1322,9 @@ function parseMemoryRow(row) {
1249
1322
  session_id: row["session_id"] || null,
1250
1323
  machine_id: row["machine_id"] || null,
1251
1324
  flag: row["flag"] || null,
1325
+ when_to_use: row["when_to_use"] || null,
1326
+ sequence_group: row["sequence_group"] || null,
1327
+ sequence_order: row["sequence_order"] ?? null,
1252
1328
  content_type: row["content_type"] || "text",
1253
1329
  namespace: row["namespace"] || null,
1254
1330
  created_by_agent: row["created_by_agent"] || null,
@@ -1307,6 +1383,7 @@ function createMemory(input, dedupeMode = "merge", db) {
1307
1383
  d.run(`UPDATE memories SET
1308
1384
  value = ?, category = ?, summary = ?, tags = ?,
1309
1385
  importance = ?, metadata = ?, expires_at = ?,
1386
+ when_to_use = ?,
1310
1387
  pinned = COALESCE(pinned, 0),
1311
1388
  version = version + 1, updated_at = ?
1312
1389
  WHERE id = ?`, [
@@ -1317,6 +1394,7 @@ function createMemory(input, dedupeMode = "merge", db) {
1317
1394
  input.importance ?? 5,
1318
1395
  metadataJson,
1319
1396
  expiresAt,
1397
+ input.when_to_use || null,
1320
1398
  timestamp,
1321
1399
  existing.id
1322
1400
  ]);
@@ -1341,8 +1419,8 @@ function createMemory(input, dedupeMode = "merge", db) {
1341
1419
  return merged;
1342
1420
  }
1343
1421
  }
1344
- d.run(`INSERT INTO memories (id, key, value, category, scope, summary, tags, importance, source, status, pinned, agent_id, project_id, session_id, machine_id, namespace, created_by_agent, metadata, access_count, version, expires_at, valid_from, valid_until, ingested_at, created_at, updated_at)
1345
- VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, 'active', 0, ?, ?, ?, ?, ?, ?, ?, 0, 1, ?, ?, ?, ?, ?, ?)`, [
1422
+ d.run(`INSERT INTO memories (id, key, value, category, scope, summary, tags, importance, source, status, pinned, agent_id, project_id, session_id, machine_id, namespace, created_by_agent, when_to_use, sequence_group, sequence_order, metadata, access_count, version, expires_at, valid_from, valid_until, ingested_at, created_at, updated_at)
1423
+ VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, 'active', 0, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, 0, 1, ?, ?, ?, ?, ?, ?)`, [
1346
1424
  id,
1347
1425
  input.key,
1348
1426
  input.value,
@@ -1358,6 +1436,9 @@ function createMemory(input, dedupeMode = "merge", db) {
1358
1436
  input.machine_id || null,
1359
1437
  input.namespace || null,
1360
1438
  input.agent_id || null,
1439
+ input.when_to_use || null,
1440
+ input.sequence_group || null,
1441
+ input.sequence_order ?? null,
1361
1442
  metadataJson,
1362
1443
  expiresAt,
1363
1444
  input.metadata?.valid_from ?? timestamp,
@@ -1568,8 +1649,8 @@ function updateMemory(id, input, db) {
1568
1649
  throw new VersionConflictError(id, input.version, existing.version);
1569
1650
  }
1570
1651
  try {
1571
- d.run(`INSERT OR IGNORE INTO memory_versions (id, memory_id, version, value, importance, scope, category, tags, summary, pinned, status, created_at)
1572
- VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)`, [
1652
+ d.run(`INSERT OR IGNORE INTO memory_versions (id, memory_id, version, value, importance, scope, category, tags, summary, pinned, status, when_to_use, created_at)
1653
+ VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)`, [
1573
1654
  uuid(),
1574
1655
  existing.id,
1575
1656
  existing.version,
@@ -1581,6 +1662,7 @@ function updateMemory(id, input, db) {
1581
1662
  existing.summary,
1582
1663
  existing.pinned ? 1 : 0,
1583
1664
  existing.status,
1665
+ existing.when_to_use || null,
1584
1666
  existing.updated_at
1585
1667
  ]);
1586
1668
  } catch {}
@@ -1626,6 +1708,10 @@ function updateMemory(id, input, db) {
1626
1708
  sets.push("flag = ?");
1627
1709
  params.push(input.flag ?? null);
1628
1710
  }
1711
+ if (input.when_to_use !== undefined) {
1712
+ sets.push("when_to_use = ?");
1713
+ params.push(input.when_to_use ?? null);
1714
+ }
1629
1715
  if (input.tags !== undefined) {
1630
1716
  sets.push("tags = ?");
1631
1717
  params.push(JSON.stringify(input.tags));
@@ -3763,6 +3849,311 @@ var init_synthesis = __esm(() => {
3763
3849
  init_database();
3764
3850
  });
3765
3851
 
3852
+ // src/lib/when-to-use-generator.ts
3853
+ var exports_when_to_use_generator = {};
3854
+ __export(exports_when_to_use_generator, {
3855
+ generateWhenToUse: () => generateWhenToUse,
3856
+ autoGenerateWhenToUse: () => autoGenerateWhenToUse
3857
+ });
3858
+ async function generateWhenToUse(key, value, category, tags) {
3859
+ if (process.env["MEMENTOS_AUTO_WHEN_TO_USE"] !== "true")
3860
+ return null;
3861
+ const apiKey = process.env["ANTHROPIC_API_KEY"];
3862
+ if (!apiKey)
3863
+ return null;
3864
+ try {
3865
+ const userMessage = `Key: "${key}"
3866
+ Value: "${value}"
3867
+ Category: ${category}
3868
+ Tags: ${tags.join(", ") || "none"}
3869
+
3870
+ Generate the when_to_use activation context (1-2 sentences):`;
3871
+ const response = await fetch("https://api.anthropic.com/v1/messages", {
3872
+ method: "POST",
3873
+ headers: {
3874
+ "x-api-key": apiKey,
3875
+ "anthropic-version": "2023-06-01",
3876
+ "content-type": "application/json"
3877
+ },
3878
+ body: JSON.stringify({
3879
+ model: "claude-haiku-4-5-20251001",
3880
+ max_tokens: 150,
3881
+ system: SYSTEM_PROMPT,
3882
+ messages: [{ role: "user", content: userMessage }]
3883
+ })
3884
+ });
3885
+ if (!response.ok)
3886
+ return null;
3887
+ const data = await response.json();
3888
+ const text = data.content?.[0]?.text?.trim();
3889
+ return text || null;
3890
+ } catch {
3891
+ return null;
3892
+ }
3893
+ }
3894
+ async function autoGenerateWhenToUse(ctx) {
3895
+ if (ctx.memory.when_to_use)
3896
+ return;
3897
+ if (process.env["MEMENTOS_AUTO_WHEN_TO_USE"] !== "true")
3898
+ return;
3899
+ try {
3900
+ const whenToUse = await generateWhenToUse(ctx.memory.key, ctx.memory.value, ctx.memory.category, ctx.memory.tags);
3901
+ if (whenToUse) {
3902
+ const db = getDatabase();
3903
+ db.run("UPDATE memories SET when_to_use = ? WHERE id = ? AND when_to_use IS NULL", [whenToUse, ctx.memory.id]);
3904
+ }
3905
+ } catch {}
3906
+ }
3907
+ var SYSTEM_PROMPT = `You generate activation contexts for memory records. Given a memory's key, value, category, and tags, output a 1-2 sentence "when to use" description that describes the SITUATION or CONDITION under which an AI agent should retrieve this memory.
3908
+
3909
+ Rules:
3910
+ - Start with "When" or "If"
3911
+ - Describe the situation, not the content
3912
+ - Be specific enough to avoid false matches but general enough to catch relevant scenarios
3913
+ - Focus on the task/action the agent would be doing, not what the memory contains
3914
+
3915
+ Examples:
3916
+ - Key: "preferred-language", Value: "Always use TypeScript, never JavaScript" \u2192 "When choosing a programming language for a new file or project"
3917
+ - Key: "db-migration-order", Value: "Always run migrations before deploying" \u2192 "When deploying or updating database schema"
3918
+ - Key: "bash-chain-bug", Value: "Bash tool mangles && chains" \u2192 "When chaining commands with && in the Bash tool"`;
3919
+ var init_when_to_use_generator = __esm(() => {
3920
+ init_database();
3921
+ });
3922
+
3923
+ // src/lib/profile-synthesizer.ts
3924
+ var exports_profile_synthesizer = {};
3925
+ __export(exports_profile_synthesizer, {
3926
+ synthesizeProfile: () => synthesizeProfile,
3927
+ markProfileStale: () => markProfileStale,
3928
+ getProfileKey: () => getProfileKey
3929
+ });
3930
+ function getProfileKey(scope, id) {
3931
+ return `_profile_${scope}_${id}`;
3932
+ }
3933
+ async function synthesizeProfile(options) {
3934
+ const scope = options.scope || (options.project_id ? "project" : options.agent_id ? "agent" : "global");
3935
+ const id = options.project_id || options.agent_id || "global";
3936
+ const profileKey = getProfileKey(scope, id);
3937
+ if (!options.force_refresh) {
3938
+ const cached = getMemoryByKey(profileKey, "shared", undefined, options.project_id);
3939
+ if (cached) {
3940
+ const age = Date.now() - new Date(cached.updated_at).getTime();
3941
+ const maxAge = 24 * 60 * 60 * 1000;
3942
+ const isStale = cached.metadata?.stale === true;
3943
+ if (age < maxAge && !isStale) {
3944
+ return { profile: cached.value, memory_count: 0, from_cache: true };
3945
+ }
3946
+ }
3947
+ }
3948
+ const prefMemories = listMemories({
3949
+ category: "preference",
3950
+ project_id: options.project_id,
3951
+ status: "active",
3952
+ limit: 30
3953
+ });
3954
+ const factMemories = listMemories({
3955
+ category: "fact",
3956
+ project_id: options.project_id,
3957
+ status: "active",
3958
+ limit: 30
3959
+ });
3960
+ const allMemories = [...prefMemories, ...factMemories];
3961
+ if (allMemories.length === 0)
3962
+ return null;
3963
+ const apiKey = process.env["ANTHROPIC_API_KEY"];
3964
+ if (!apiKey) {
3965
+ const lines = allMemories.map((m) => `- ${m.key}: ${m.value}`).join(`
3966
+ `);
3967
+ const fallbackProfile = `## Profile
3968
+ ${lines}`;
3969
+ saveProfile(profileKey, fallbackProfile, allMemories.length, options);
3970
+ return { profile: fallbackProfile, memory_count: allMemories.length, from_cache: false };
3971
+ }
3972
+ try {
3973
+ const memoryList = allMemories.sort((a, b) => b.importance - a.importance).map((m) => `[${m.category}] ${m.key}: ${m.value}`).join(`
3974
+ `);
3975
+ const response = await fetch("https://api.anthropic.com/v1/messages", {
3976
+ method: "POST",
3977
+ headers: {
3978
+ "x-api-key": apiKey,
3979
+ "anthropic-version": "2023-06-01",
3980
+ "content-type": "application/json"
3981
+ },
3982
+ body: JSON.stringify({
3983
+ model: "claude-haiku-4-5-20251001",
3984
+ max_tokens: 500,
3985
+ system: PROFILE_PROMPT,
3986
+ messages: [{ role: "user", content: `Synthesize a profile from these ${allMemories.length} memories:
3987
+
3988
+ ${memoryList}` }]
3989
+ })
3990
+ });
3991
+ if (!response.ok)
3992
+ return null;
3993
+ const data = await response.json();
3994
+ const profile = data.content?.[0]?.text?.trim();
3995
+ if (!profile)
3996
+ return null;
3997
+ saveProfile(profileKey, profile, allMemories.length, options);
3998
+ return { profile, memory_count: allMemories.length, from_cache: false };
3999
+ } catch {
4000
+ return null;
4001
+ }
4002
+ }
4003
+ function saveProfile(key, value, memoryCount, options) {
4004
+ try {
4005
+ createMemory({
4006
+ key,
4007
+ value,
4008
+ category: "fact",
4009
+ scope: "shared",
4010
+ importance: 10,
4011
+ source: "auto",
4012
+ tags: ["profile", "synthesized"],
4013
+ when_to_use: "When needing to understand this agent's or project's preferences, style, and conventions",
4014
+ metadata: { memory_count: memoryCount, synthesized_at: new Date().toISOString(), stale: false },
4015
+ agent_id: options.agent_id,
4016
+ project_id: options.project_id
4017
+ });
4018
+ } catch {}
4019
+ }
4020
+ function markProfileStale(projectId, _agentId) {
4021
+ try {
4022
+ const { getDatabase: getDatabase2 } = (init_database(), __toCommonJS(exports_database));
4023
+ const db = getDatabase2();
4024
+ db.run(`UPDATE memories SET metadata = json_set(COALESCE(metadata, '{}'), '$.stale', json('true'))
4025
+ WHERE key LIKE '_profile_%' AND COALESCE(project_id, '') = ?`, [projectId || ""]);
4026
+ } catch {}
4027
+ }
4028
+ var PROFILE_PROMPT = `You synthesize a coherent agent/project profile from individual preference and fact memories.
4029
+
4030
+ Output a concise profile (200-300 words max) organized by:
4031
+ - **Stack & Tools**: Languages, frameworks, package managers, etc.
4032
+ - **Code Style**: Formatting, patterns, naming conventions
4033
+ - **Workflow**: Testing, deployment, git practices
4034
+ - **Communication**: Response style, verbosity, formatting preferences
4035
+ - **Key Facts**: Architecture decisions, constraints, team conventions
4036
+
4037
+ Only include sections that have relevant data. Be specific and actionable.
4038
+ Output in markdown format.`;
4039
+ var init_profile_synthesizer = __esm(() => {
4040
+ init_memories();
4041
+ });
4042
+
4043
+ // src/lib/contradiction.ts
4044
+ var exports_contradiction = {};
4045
+ __export(exports_contradiction, {
4046
+ invalidateFact: () => invalidateFact,
4047
+ detectContradiction: () => detectContradiction
4048
+ });
4049
+ function invalidateFact(oldMemoryId, newMemoryId, db) {
4050
+ const d = db || getDatabase();
4051
+ const timestamp = now();
4052
+ d.run("UPDATE memories SET valid_until = ?, updated_at = ? WHERE id = ?", [timestamp, timestamp, oldMemoryId]);
4053
+ if (newMemoryId) {
4054
+ const row = d.query("SELECT metadata FROM memories WHERE id = ?").get(newMemoryId);
4055
+ if (row) {
4056
+ const metadata = JSON.parse(row.metadata || "{}");
4057
+ metadata.supersedes_id = oldMemoryId;
4058
+ d.run("UPDATE memories SET metadata = ?, updated_at = ? WHERE id = ?", [JSON.stringify(metadata), timestamp, newMemoryId]);
4059
+ }
4060
+ }
4061
+ return {
4062
+ invalidated_memory_id: oldMemoryId,
4063
+ new_memory_id: newMemoryId || null,
4064
+ valid_until: timestamp,
4065
+ supersedes_id: oldMemoryId
4066
+ };
4067
+ }
4068
+ function heuristicContradictionScore(newValue, existingValue, newKey, existingKey) {
4069
+ if (newKey !== existingKey)
4070
+ return 0;
4071
+ const newLower = newValue.toLowerCase().trim();
4072
+ const existingLower = existingValue.toLowerCase().trim();
4073
+ if (newLower === existingLower)
4074
+ return 0;
4075
+ const newWords = new Set(newLower.split(/\s+/));
4076
+ const existingWords = new Set(existingLower.split(/\s+/));
4077
+ let overlap = 0;
4078
+ for (const w of newWords) {
4079
+ if (existingWords.has(w))
4080
+ overlap++;
4081
+ }
4082
+ const totalUnique = new Set([...newWords, ...existingWords]).size;
4083
+ const overlapRatio = totalUnique > 0 ? overlap / totalUnique : 0;
4084
+ if (overlapRatio < 0.3)
4085
+ return 0.7;
4086
+ if (overlapRatio < 0.5)
4087
+ return 0.4;
4088
+ return 0.1;
4089
+ }
4090
+ async function llmContradictionCheck(_newValue, _existingValue, _key) {
4091
+ const provider = providerRegistry.getAvailable();
4092
+ if (!provider) {
4093
+ return { contradicts: false, confidence: 0, reasoning: "No LLM provider available" };
4094
+ }
4095
+ try {
4096
+ return { contradicts: false, confidence: 0, reasoning: "LLM check skipped \u2014 using heuristic only" };
4097
+ } catch {
4098
+ return { contradicts: false, confidence: 0, reasoning: "LLM check failed" };
4099
+ }
4100
+ }
4101
+ async function detectContradiction(newKey, newValue, options = {}, db) {
4102
+ const d = db || getDatabase();
4103
+ const { scope, project_id, min_importance = 7, use_llm = false } = options;
4104
+ const conditions = ["key = ?", "status = 'active'", "importance >= ?"];
4105
+ const params = [newKey, min_importance];
4106
+ if (scope) {
4107
+ conditions.push("scope = ?");
4108
+ params.push(scope);
4109
+ }
4110
+ if (project_id) {
4111
+ conditions.push("project_id = ?");
4112
+ params.push(project_id);
4113
+ }
4114
+ conditions.push("(valid_until IS NULL OR valid_until > datetime('now'))");
4115
+ const sql = `SELECT * FROM memories WHERE ${conditions.join(" AND ")} ORDER BY importance DESC LIMIT 10`;
4116
+ const rows = d.query(sql).all(...params);
4117
+ if (rows.length === 0) {
4118
+ return { contradicts: false, conflicting_memory: null, confidence: 0, reasoning: "No existing memories with this key" };
4119
+ }
4120
+ let bestContradiction = {
4121
+ contradicts: false,
4122
+ conflicting_memory: null,
4123
+ confidence: 0,
4124
+ reasoning: "No contradiction detected"
4125
+ };
4126
+ for (const row of rows) {
4127
+ const existing = parseMemoryRow(row);
4128
+ const heuristicScore = heuristicContradictionScore(newValue, existing.value, newKey, existing.key);
4129
+ if (heuristicScore > bestContradiction.confidence) {
4130
+ bestContradiction = {
4131
+ contradicts: heuristicScore >= 0.5,
4132
+ conflicting_memory: existing,
4133
+ confidence: heuristicScore,
4134
+ reasoning: heuristicScore >= 0.7 ? `New value for "${newKey}" significantly differs from existing high-importance memory (importance ${existing.importance})` : heuristicScore >= 0.5 ? `New value for "${newKey}" partially conflicts with existing memory (importance ${existing.importance})` : `Minor difference detected for "${newKey}"`
4135
+ };
4136
+ }
4137
+ }
4138
+ if (use_llm && bestContradiction.confidence >= 0.3 && bestContradiction.confidence < 0.7 && bestContradiction.conflicting_memory) {
4139
+ const llmResult = await llmContradictionCheck(newValue, bestContradiction.conflicting_memory.value, newKey);
4140
+ if (llmResult.confidence > bestContradiction.confidence) {
4141
+ bestContradiction = {
4142
+ ...bestContradiction,
4143
+ contradicts: llmResult.contradicts,
4144
+ confidence: llmResult.confidence,
4145
+ reasoning: llmResult.reasoning
4146
+ };
4147
+ }
4148
+ }
4149
+ return bestContradiction;
4150
+ }
4151
+ var init_contradiction = __esm(() => {
4152
+ init_database();
4153
+ init_memories();
4154
+ init_registry();
4155
+ });
4156
+
3766
4157
  // src/server/index.ts
3767
4158
  init_memories();
3768
4159
  import { existsSync as existsSync3 } from "fs";
@@ -4348,10 +4739,76 @@ hookRegistry.register({
4348
4739
  description: "Generate and store vector embedding for semantic memory search",
4349
4740
  handler: async (ctx) => {
4350
4741
  const { indexMemoryEmbedding: indexMemoryEmbedding2 } = await Promise.resolve().then(() => (init_memories(), exports_memories));
4351
- const text = [ctx.memory.value, ctx.memory.summary].filter(Boolean).join(" ");
4742
+ const text = ctx.memory.when_to_use || [ctx.memory.value, ctx.memory.summary].filter(Boolean).join(" ");
4352
4743
  indexMemoryEmbedding2(ctx.memory.id, text);
4353
4744
  }
4354
4745
  });
4746
+ hookRegistry.register({
4747
+ type: "PostMemorySave",
4748
+ blocking: false,
4749
+ builtin: true,
4750
+ priority: 60,
4751
+ description: "Auto-generate when_to_use activation context via LLM if missing",
4752
+ handler: async (ctx) => {
4753
+ const { autoGenerateWhenToUse: autoGenerateWhenToUse2 } = await Promise.resolve().then(() => (init_when_to_use_generator(), exports_when_to_use_generator));
4754
+ await autoGenerateWhenToUse2(ctx);
4755
+ }
4756
+ });
4757
+ hookRegistry.register({
4758
+ type: "PostMemorySave",
4759
+ blocking: false,
4760
+ builtin: true,
4761
+ priority: 65,
4762
+ description: "Mark synthesized profile as stale when a preference or fact memory is saved",
4763
+ handler: async (ctx) => {
4764
+ const category = ctx.memory?.category;
4765
+ if (category !== "preference" && category !== "fact")
4766
+ return;
4767
+ try {
4768
+ const { markProfileStale: markProfileStale2 } = await Promise.resolve().then(() => (init_profile_synthesizer(), exports_profile_synthesizer));
4769
+ markProfileStale2(ctx.projectId, ctx.agentId);
4770
+ } catch {}
4771
+ }
4772
+ });
4773
+ hookRegistry.register({
4774
+ type: "PostMemorySave",
4775
+ blocking: false,
4776
+ builtin: true,
4777
+ priority: 70,
4778
+ description: "Auto-decay importance of existing memories contradicted by the newly saved memory",
4779
+ handler: async (ctx) => {
4780
+ if (ctx.wasUpdated)
4781
+ return;
4782
+ const memory = ctx.memory;
4783
+ if (memory.category !== "fact" && memory.category !== "knowledge")
4784
+ return;
4785
+ try {
4786
+ const { detectContradiction: detectContradiction2 } = await Promise.resolve().then(() => (init_contradiction(), exports_contradiction));
4787
+ const { updateMemory: updateMemory2, getMemory: getMemory2 } = await Promise.resolve().then(() => (init_memories(), exports_memories));
4788
+ const result = await detectContradiction2(memory.key, memory.value, {
4789
+ scope: memory.scope,
4790
+ project_id: ctx.projectId,
4791
+ min_importance: 1
4792
+ });
4793
+ if (!result.contradicts || !result.conflicting_memory)
4794
+ return;
4795
+ const conflicting = result.conflicting_memory;
4796
+ if (conflicting.id === memory.id)
4797
+ return;
4798
+ const fresh = getMemory2(conflicting.id);
4799
+ if (!fresh || fresh.status !== "active")
4800
+ return;
4801
+ const halvedImportance = Math.max(1, Math.floor(fresh.importance / 2));
4802
+ const metadata = { ...fresh.metadata || {}, contradicted_by: memory.id };
4803
+ updateMemory2(fresh.id, {
4804
+ importance: halvedImportance,
4805
+ flag: "contradicted",
4806
+ metadata,
4807
+ version: fresh.version
4808
+ });
4809
+ } catch {}
4810
+ }
4811
+ });
4355
4812
  hookRegistry.register({
4356
4813
  type: "PostMemoryInject",
4357
4814
  blocking: false,
@@ -5434,12 +5891,349 @@ function getNextPendingJob(db) {
5434
5891
  return parseJobRow(row);
5435
5892
  }
5436
5893
 
5894
+ // src/db/tool-events.ts
5895
+ init_database();
5896
+ function parseToolEventRow(row) {
5897
+ return {
5898
+ id: row["id"],
5899
+ tool_name: row["tool_name"],
5900
+ action: row["action"] || null,
5901
+ success: !!row["success"],
5902
+ error_type: row["error_type"] || null,
5903
+ error_message: row["error_message"] || null,
5904
+ tokens_used: row["tokens_used"] ?? null,
5905
+ latency_ms: row["latency_ms"] ?? null,
5906
+ context: row["context"] || null,
5907
+ lesson: row["lesson"] || null,
5908
+ when_to_use: row["when_to_use"] || null,
5909
+ agent_id: row["agent_id"] || null,
5910
+ project_id: row["project_id"] || null,
5911
+ session_id: row["session_id"] || null,
5912
+ metadata: JSON.parse(row["metadata"] || "{}"),
5913
+ created_at: row["created_at"]
5914
+ };
5915
+ }
5916
+ function saveToolEvent(input, db) {
5917
+ const d = db || getDatabase();
5918
+ const id = uuid();
5919
+ const timestamp = now();
5920
+ const metadataJson = JSON.stringify(input.metadata || {});
5921
+ d.run(`INSERT INTO tool_events (id, tool_name, action, success, error_type, error_message, tokens_used, latency_ms, context, lesson, when_to_use, agent_id, project_id, session_id, metadata, created_at)
5922
+ VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)`, [
5923
+ id,
5924
+ input.tool_name,
5925
+ input.action || null,
5926
+ input.success ? 1 : 0,
5927
+ input.error_type || null,
5928
+ input.error_message || null,
5929
+ input.tokens_used ?? null,
5930
+ input.latency_ms ?? null,
5931
+ input.context || null,
5932
+ input.lesson || null,
5933
+ input.when_to_use || null,
5934
+ input.agent_id || null,
5935
+ input.project_id || null,
5936
+ input.session_id || null,
5937
+ metadataJson,
5938
+ timestamp
5939
+ ]);
5940
+ return getToolEvent(id, d);
5941
+ }
5942
+ function getToolEvent(id, db) {
5943
+ const d = db || getDatabase();
5944
+ const row = d.query("SELECT * FROM tool_events WHERE id = ?").get(id);
5945
+ if (!row)
5946
+ return null;
5947
+ return parseToolEventRow(row);
5948
+ }
5949
+ function getToolEvents(filters, db) {
5950
+ const d = db || getDatabase();
5951
+ const conditions = [];
5952
+ const params = [];
5953
+ if (filters.tool_name) {
5954
+ conditions.push("tool_name = ?");
5955
+ params.push(filters.tool_name);
5956
+ }
5957
+ if (filters.agent_id) {
5958
+ conditions.push("agent_id = ?");
5959
+ params.push(filters.agent_id);
5960
+ }
5961
+ if (filters.project_id) {
5962
+ conditions.push("project_id = ?");
5963
+ params.push(filters.project_id);
5964
+ }
5965
+ if (filters.success !== undefined) {
5966
+ conditions.push("success = ?");
5967
+ params.push(filters.success ? 1 : 0);
5968
+ }
5969
+ if (filters.from_date) {
5970
+ conditions.push("created_at >= ?");
5971
+ params.push(filters.from_date);
5972
+ }
5973
+ if (filters.to_date) {
5974
+ conditions.push("created_at <= ?");
5975
+ params.push(filters.to_date);
5976
+ }
5977
+ const where = conditions.length > 0 ? `WHERE ${conditions.join(" AND ")}` : "";
5978
+ const limit = filters.limit || 50;
5979
+ const offset = filters.offset || 0;
5980
+ const rows = d.query(`SELECT * FROM tool_events ${where} ORDER BY created_at DESC LIMIT ? OFFSET ?`).all(...params, limit, offset);
5981
+ return rows.map(parseToolEventRow);
5982
+ }
5983
+ function getToolStats(tool_name, project_id, db) {
5984
+ const d = db || getDatabase();
5985
+ let where = "WHERE tool_name = ?";
5986
+ const params = [tool_name];
5987
+ if (project_id) {
5988
+ where += " AND project_id = ?";
5989
+ params.push(project_id);
5990
+ }
5991
+ const stats = d.query(`SELECT
5992
+ COUNT(*) as total_calls,
5993
+ SUM(CASE WHEN success = 1 THEN 1 ELSE 0 END) as success_count,
5994
+ SUM(CASE WHEN success = 0 THEN 1 ELSE 0 END) as failure_count,
5995
+ AVG(CASE WHEN tokens_used IS NOT NULL THEN tokens_used END) as avg_tokens,
5996
+ AVG(CASE WHEN latency_ms IS NOT NULL THEN latency_ms END) as avg_latency_ms,
5997
+ MAX(created_at) as last_used
5998
+ FROM tool_events ${where}`).get(...params);
5999
+ const total = stats["total_calls"] || 0;
6000
+ const successCount = stats["success_count"] || 0;
6001
+ const errorRows = d.query(`SELECT error_type, COUNT(*) as count
6002
+ FROM tool_events ${where} AND error_type IS NOT NULL
6003
+ GROUP BY error_type ORDER BY count DESC LIMIT 5`).all(...params);
6004
+ return {
6005
+ tool_name,
6006
+ total_calls: total,
6007
+ success_count: successCount,
6008
+ failure_count: stats["failure_count"] || 0,
6009
+ success_rate: total > 0 ? successCount / total : 0,
6010
+ avg_tokens: stats["avg_tokens"] ?? null,
6011
+ avg_latency_ms: stats["avg_latency_ms"] ?? null,
6012
+ common_errors: errorRows,
6013
+ last_used: stats["last_used"] || ""
6014
+ };
6015
+ }
6016
+ function getToolLessons(tool_name, project_id, limit, db) {
6017
+ const d = db || getDatabase();
6018
+ let where = "WHERE tool_name = ? AND lesson IS NOT NULL";
6019
+ const params = [tool_name];
6020
+ if (project_id) {
6021
+ where += " AND project_id = ?";
6022
+ params.push(project_id);
6023
+ }
6024
+ const rows = d.query(`SELECT lesson, when_to_use, created_at FROM tool_events ${where} ORDER BY created_at DESC LIMIT ?`).all(...params, limit || 20);
6025
+ return rows;
6026
+ }
6027
+
6028
+ // src/server/index.ts
6029
+ init_profile_synthesizer();
6030
+
5437
6031
  // src/lib/session-queue.ts
5438
6032
  init_database();
5439
6033
 
5440
6034
  // src/lib/session-processor.ts
5441
6035
  init_memories();
5442
6036
  init_registry();
6037
+
6038
+ // src/lib/tool-lesson-extractor.ts
6039
+ init_memories();
6040
+ var SYSTEM_PROMPT2 = `You are a tool usage analyst. Given a session transcript containing tool calls and their results, extract actionable lessons about tool usage.
6041
+
6042
+ For each tool lesson, output a JSON array of objects with these fields:
6043
+ - tool_name: name of the tool
6044
+ - lesson: the insight (1-2 sentences)
6045
+ - when_to_use: activation context \u2014 when should an agent recall this lesson (start with "When" or "If")
6046
+ - success: boolean \u2014 was the tool call that taught this lesson successful?
6047
+ - error_type: if failed, one of: timeout, permission, not_found, syntax, rate_limit, other (or null if success)
6048
+
6049
+ Focus on:
6050
+ 1. Successful patterns: what worked and why
6051
+ 2. Failure lessons: what went wrong and how to avoid it
6052
+ 3. Parameter insights: optimal settings discovered
6053
+ 4. Alternative tools: when one tool is better than another
6054
+ 5. Error recovery: what to do when a specific error occurs
6055
+
6056
+ Only extract genuinely useful, non-obvious lessons. Skip trivial observations.
6057
+ Output ONLY the JSON array, no markdown or explanation.`;
6058
+ async function extractToolLessons(transcript, options) {
6059
+ const apiKey = process.env["ANTHROPIC_API_KEY"];
6060
+ if (!apiKey)
6061
+ return [];
6062
+ try {
6063
+ const truncated = transcript.length > 8000 ? transcript.slice(0, 8000) + `
6064
+ [...truncated]` : transcript;
6065
+ const response = await fetch("https://api.anthropic.com/v1/messages", {
6066
+ method: "POST",
6067
+ headers: {
6068
+ "x-api-key": apiKey,
6069
+ "anthropic-version": "2023-06-01",
6070
+ "content-type": "application/json"
6071
+ },
6072
+ body: JSON.stringify({
6073
+ model: "claude-haiku-4-5-20251001",
6074
+ max_tokens: 1500,
6075
+ system: SYSTEM_PROMPT2,
6076
+ messages: [{ role: "user", content: `Extract tool lessons from this session transcript:
6077
+
6078
+ ${truncated}` }]
6079
+ })
6080
+ });
6081
+ if (!response.ok)
6082
+ return [];
6083
+ const data = await response.json();
6084
+ const text = data.content?.[0]?.text?.trim();
6085
+ if (!text)
6086
+ return [];
6087
+ const lessons = JSON.parse(text);
6088
+ if (!Array.isArray(lessons))
6089
+ return [];
6090
+ for (const lesson of lessons) {
6091
+ if (!lesson.tool_name || !lesson.lesson)
6092
+ continue;
6093
+ try {
6094
+ saveToolEvent({
6095
+ tool_name: lesson.tool_name,
6096
+ success: lesson.success,
6097
+ error_type: lesson.error_type || undefined,
6098
+ lesson: lesson.lesson,
6099
+ when_to_use: lesson.when_to_use,
6100
+ context: "extracted from session transcript",
6101
+ agent_id: options?.agent_id,
6102
+ project_id: options?.project_id,
6103
+ session_id: options?.session_id
6104
+ });
6105
+ } catch {}
6106
+ try {
6107
+ createMemory({
6108
+ key: `tool-lesson-${lesson.tool_name}-${Date.now()}`,
6109
+ value: lesson.lesson,
6110
+ category: "knowledge",
6111
+ scope: "shared",
6112
+ importance: 7,
6113
+ source: "auto",
6114
+ tags: ["tool-memory", lesson.tool_name, "auto-extracted"],
6115
+ when_to_use: lesson.when_to_use,
6116
+ agent_id: options?.agent_id,
6117
+ project_id: options?.project_id,
6118
+ session_id: options?.session_id
6119
+ });
6120
+ } catch {}
6121
+ }
6122
+ return lessons;
6123
+ } catch {
6124
+ return [];
6125
+ }
6126
+ }
6127
+
6128
+ // src/lib/procedural-extractor.ts
6129
+ init_memories();
6130
+ init_database();
6131
+ var SYSTEM_PROMPT3 = `You extract procedural knowledge from session transcripts \u2014 workflows, step sequences, and problem-solution patterns.
6132
+
6133
+ For each procedure found, output a JSON array of objects:
6134
+ {
6135
+ "title": "short name for the workflow",
6136
+ "steps": [
6137
+ {"action": "what to do", "when_to_use": "activation context for this step"},
6138
+ {"action": "next step", "when_to_use": "activation context"}
6139
+ ],
6140
+ "failure_patterns": ["what to avoid and why"],
6141
+ "when_to_use": "overall activation context for the whole procedure"
6142
+ }
6143
+
6144
+ Focus on:
6145
+ 1. Multi-step workflows that were completed successfully
6146
+ 2. Step sequences where order matters
6147
+ 3. Failure \u2192 recovery patterns (what went wrong, how it was fixed)
6148
+ 4. Problem-solution pairs (when X happens, do Y)
6149
+
6150
+ Only extract non-trivial procedures (3+ steps or genuinely useful patterns).
6151
+ Output ONLY the JSON array.`;
6152
+ async function extractProcedures(transcript, options) {
6153
+ const apiKey = process.env["ANTHROPIC_API_KEY"];
6154
+ if (!apiKey)
6155
+ return [];
6156
+ try {
6157
+ const truncated = transcript.length > 8000 ? transcript.slice(0, 8000) + `
6158
+ [...truncated]` : transcript;
6159
+ const response = await fetch("https://api.anthropic.com/v1/messages", {
6160
+ method: "POST",
6161
+ headers: {
6162
+ "x-api-key": apiKey,
6163
+ "anthropic-version": "2023-06-01",
6164
+ "content-type": "application/json"
6165
+ },
6166
+ body: JSON.stringify({
6167
+ model: "claude-haiku-4-5-20251001",
6168
+ max_tokens: 2000,
6169
+ system: SYSTEM_PROMPT3,
6170
+ messages: [{ role: "user", content: `Extract procedures from this session:
6171
+
6172
+ ${truncated}` }]
6173
+ })
6174
+ });
6175
+ if (!response.ok)
6176
+ return [];
6177
+ const data = await response.json();
6178
+ const text = data.content?.[0]?.text?.trim();
6179
+ if (!text)
6180
+ return [];
6181
+ const procedures = JSON.parse(text);
6182
+ if (!Array.isArray(procedures))
6183
+ return [];
6184
+ for (const proc of procedures) {
6185
+ if (!proc.title || !proc.steps?.length)
6186
+ continue;
6187
+ const sequenceGroup = `proc-${shortUuid()}`;
6188
+ for (let i = 0;i < proc.steps.length; i++) {
6189
+ const step = proc.steps[i];
6190
+ if (!step)
6191
+ continue;
6192
+ try {
6193
+ createMemory({
6194
+ key: `${sequenceGroup}-step-${i + 1}`,
6195
+ value: step.action,
6196
+ category: "procedural",
6197
+ scope: "shared",
6198
+ importance: 7,
6199
+ source: "auto",
6200
+ tags: ["procedure", "auto-extracted", proc.title.toLowerCase().replace(/\s+/g, "-")],
6201
+ when_to_use: step.when_to_use || proc.when_to_use,
6202
+ sequence_group: sequenceGroup,
6203
+ sequence_order: i + 1,
6204
+ agent_id: options?.agent_id,
6205
+ project_id: options?.project_id,
6206
+ session_id: options?.session_id
6207
+ });
6208
+ } catch {}
6209
+ }
6210
+ for (const pattern of proc.failure_patterns || []) {
6211
+ try {
6212
+ createMemory({
6213
+ key: `${sequenceGroup}-warning-${shortUuid()}`,
6214
+ value: `WARNING: ${pattern}`,
6215
+ category: "procedural",
6216
+ scope: "shared",
6217
+ importance: 8,
6218
+ source: "auto",
6219
+ tags: ["procedure", "failure-pattern", "auto-extracted"],
6220
+ when_to_use: proc.when_to_use,
6221
+ sequence_group: sequenceGroup,
6222
+ sequence_order: 999,
6223
+ agent_id: options?.agent_id,
6224
+ project_id: options?.project_id,
6225
+ session_id: options?.session_id
6226
+ });
6227
+ } catch {}
6228
+ }
6229
+ }
6230
+ return procedures;
6231
+ } catch {
6232
+ return [];
6233
+ }
6234
+ }
6235
+
6236
+ // src/lib/session-processor.ts
5443
6237
  var SESSION_EXTRACTION_USER_TEMPLATE = (chunk, sessionId) => `Extract memories from this session chunk (session: ${sessionId}):
5444
6238
 
5445
6239
  ${chunk}
@@ -5601,6 +6395,20 @@ async function processSessionJob(jobId, db) {
5601
6395
  }
5602
6396
  }
5603
6397
  result.memoriesExtracted = totalMemories;
6398
+ try {
6399
+ await extractToolLessons(job.transcript, {
6400
+ agent_id: job.agent_id ?? undefined,
6401
+ project_id: job.project_id ?? undefined,
6402
+ session_id: job.session_id
6403
+ });
6404
+ } catch {}
6405
+ try {
6406
+ await extractProcedures(job.transcript, {
6407
+ agent_id: job.agent_id ?? undefined,
6408
+ project_id: job.project_id ?? undefined,
6409
+ session_id: job.session_id
6410
+ });
6411
+ } catch {}
5604
6412
  try {
5605
6413
  if (result.errors.length > 0 && result.chunksProcessed === 0) {
5606
6414
  updateSessionJob(jobId, {
@@ -6981,6 +7789,66 @@ addRoute("GET", "/api/sessions/jobs/:id", (_req, _url, params) => {
6981
7789
  return json(job);
6982
7790
  });
6983
7791
  addRoute("GET", "/api/sessions/queue/stats", () => json(getSessionQueueStats()));
7792
+ addRoute("POST", "/api/tool-events", async (req) => {
7793
+ const body = await readJson(req);
7794
+ if (!body || !body["tool_name"]) {
7795
+ return errorResponse("Missing required field: tool_name", 400);
7796
+ }
7797
+ const event = saveToolEvent(body);
7798
+ return json(event, 201);
7799
+ });
7800
+ addRoute("GET", "/api/tool-events", (_req, url) => {
7801
+ const q = getSearchParams(url);
7802
+ const filters = {};
7803
+ if (q["tool_name"])
7804
+ filters.tool_name = q["tool_name"];
7805
+ if (q["agent_id"])
7806
+ filters.agent_id = q["agent_id"];
7807
+ if (q["project_id"])
7808
+ filters.project_id = q["project_id"];
7809
+ if (q["success"] !== undefined && q["success"] !== "")
7810
+ filters.success = q["success"] === "true";
7811
+ if (q["from_date"])
7812
+ filters.from_date = q["from_date"];
7813
+ if (q["to_date"])
7814
+ filters.to_date = q["to_date"];
7815
+ if (q["limit"])
7816
+ filters.limit = parseInt(q["limit"], 10);
7817
+ if (q["offset"])
7818
+ filters.offset = parseInt(q["offset"], 10);
7819
+ const events = getToolEvents(filters);
7820
+ return json({ events, count: events.length });
7821
+ });
7822
+ addRoute("GET", "/api/tool-insights/:tool_name", (_req, url, params) => {
7823
+ const q = getSearchParams(url);
7824
+ const toolName = decodeURIComponent(params["tool_name"]);
7825
+ const projectId = q["project_id"];
7826
+ const lessonsLimit = q["limit"] ? parseInt(q["limit"], 10) : 20;
7827
+ const stats = getToolStats(toolName, projectId || undefined);
7828
+ const lessons = getToolLessons(toolName, projectId || undefined, lessonsLimit);
7829
+ return json({ stats, lessons });
7830
+ });
7831
+ addRoute("GET", "/api/profile/synthesize", async (_req, url) => {
7832
+ const q = getSearchParams(url);
7833
+ const result = await synthesizeProfile({
7834
+ project_id: q["project_id"] || undefined,
7835
+ agent_id: q["agent_id"] || undefined,
7836
+ force_refresh: q["force_refresh"] === "true"
7837
+ });
7838
+ if (!result) {
7839
+ return json({ profile: null, message: "No preference/fact memories found to synthesize" });
7840
+ }
7841
+ return json(result);
7842
+ });
7843
+ addRoute("GET", "/api/chains/:sequence_group", (_req, _url, params) => {
7844
+ const db = getDatabase();
7845
+ const sequenceGroup = decodeURIComponent(params["sequence_group"]);
7846
+ const rows = db.query(`SELECT * FROM memories WHERE sequence_group = ? AND status = 'active' ORDER BY sequence_order ASC`).all(sequenceGroup);
7847
+ if (rows.length === 0) {
7848
+ return json({ chain: [], count: 0, sequence_group: sequenceGroup });
7849
+ }
7850
+ return json({ chain: rows, count: rows.length, sequence_group: sequenceGroup });
7851
+ });
6984
7852
  function startServer(port) {
6985
7853
  loadWebhooksFromDb();
6986
7854
  startSessionQueueWorker();