@illuma-ai/agents 1.1.28 → 1.3.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (272) hide show
  1. package/dist/cjs/agents/AgentContext.cjs.map +1 -1
  2. package/dist/cjs/common/spawnPath.cjs +104 -0
  3. package/dist/cjs/common/spawnPath.cjs.map +1 -0
  4. package/dist/cjs/graphs/Graph.cjs +89 -45
  5. package/dist/cjs/graphs/Graph.cjs.map +1 -1
  6. package/dist/cjs/graphs/HandoffRegistry.cjs +47 -8
  7. package/dist/cjs/graphs/HandoffRegistry.cjs.map +1 -1
  8. package/dist/cjs/graphs/MultiAgentGraph.cjs +493 -267
  9. package/dist/cjs/graphs/MultiAgentGraph.cjs.map +1 -1
  10. package/dist/cjs/graphs/phases/flushLoop.cjs +214 -0
  11. package/dist/cjs/graphs/phases/flushLoop.cjs.map +1 -0
  12. package/dist/cjs/graphs/phases/memoryFlushPhase.cjs +102 -0
  13. package/dist/cjs/graphs/phases/memoryFlushPhase.cjs.map +1 -0
  14. package/dist/cjs/llm/bedrock/index.cjs +4 -3
  15. package/dist/cjs/llm/bedrock/index.cjs.map +1 -1
  16. package/dist/cjs/main.cjs +117 -0
  17. package/dist/cjs/main.cjs.map +1 -1
  18. package/dist/cjs/memory/citations.cjs +69 -0
  19. package/dist/cjs/memory/citations.cjs.map +1 -0
  20. package/dist/cjs/memory/compositeBackend.cjs +60 -0
  21. package/dist/cjs/memory/compositeBackend.cjs.map +1 -0
  22. package/dist/cjs/memory/constants.cjs +232 -0
  23. package/dist/cjs/memory/constants.cjs.map +1 -0
  24. package/dist/cjs/memory/embeddings.cjs +151 -0
  25. package/dist/cjs/memory/embeddings.cjs.map +1 -0
  26. package/dist/cjs/memory/factory.cjs +95 -0
  27. package/dist/cjs/memory/factory.cjs.map +1 -0
  28. package/dist/cjs/memory/migrate.cjs +81 -0
  29. package/dist/cjs/memory/migrate.cjs.map +1 -0
  30. package/dist/cjs/memory/mmr.cjs +138 -0
  31. package/dist/cjs/memory/mmr.cjs.map +1 -0
  32. package/dist/cjs/memory/paths.cjs +217 -0
  33. package/dist/cjs/memory/paths.cjs.map +1 -0
  34. package/dist/cjs/memory/pgvectorStore.cjs +225 -0
  35. package/dist/cjs/memory/pgvectorStore.cjs.map +1 -0
  36. package/dist/cjs/memory/recallTracking.cjs +98 -0
  37. package/dist/cjs/memory/recallTracking.cjs.map +1 -0
  38. package/dist/cjs/memory/schema.sql +51 -0
  39. package/dist/cjs/memory/temporalDecay.cjs +118 -0
  40. package/dist/cjs/memory/temporalDecay.cjs.map +1 -0
  41. package/dist/cjs/nodes/ApprovalGateNode.cjs +1 -1
  42. package/dist/cjs/nodes/ApprovalGateNode.cjs.map +1 -1
  43. package/dist/cjs/prompts/memoryFlushPrompt.cjs +49 -0
  44. package/dist/cjs/prompts/memoryFlushPrompt.cjs.map +1 -0
  45. package/dist/cjs/run.cjs +16 -3
  46. package/dist/cjs/run.cjs.map +1 -1
  47. package/dist/cjs/tools/AskUser.cjs +6 -1
  48. package/dist/cjs/tools/AskUser.cjs.map +1 -1
  49. package/dist/cjs/tools/BrowserTools.cjs +1 -1
  50. package/dist/cjs/tools/BrowserTools.cjs.map +1 -1
  51. package/dist/cjs/tools/ToolNode.cjs +127 -10
  52. package/dist/cjs/tools/ToolNode.cjs.map +1 -1
  53. package/dist/cjs/tools/approval/constants.cjs +2 -2
  54. package/dist/cjs/tools/approval/constants.cjs.map +1 -1
  55. package/dist/cjs/tools/memory/index.cjs +58 -0
  56. package/dist/cjs/tools/memory/index.cjs.map +1 -0
  57. package/dist/cjs/tools/memory/memoryAppendTool.cjs +69 -0
  58. package/dist/cjs/tools/memory/memoryAppendTool.cjs.map +1 -0
  59. package/dist/cjs/tools/memory/memoryGetTool.cjs +49 -0
  60. package/dist/cjs/tools/memory/memoryGetTool.cjs.map +1 -0
  61. package/dist/cjs/tools/memory/memorySearchTool.cjs +65 -0
  62. package/dist/cjs/tools/memory/memorySearchTool.cjs.map +1 -0
  63. package/dist/cjs/tools/memory/shared.cjs +106 -0
  64. package/dist/cjs/tools/memory/shared.cjs.map +1 -0
  65. package/dist/cjs/types/graph.cjs.map +1 -1
  66. package/dist/cjs/utils/childAgentContext.cjs +242 -0
  67. package/dist/cjs/utils/childAgentContext.cjs.map +1 -0
  68. package/dist/cjs/utils/errors.cjs +113 -0
  69. package/dist/cjs/utils/errors.cjs.map +1 -0
  70. package/dist/cjs/utils/events.cjs +36 -7
  71. package/dist/cjs/utils/events.cjs.map +1 -1
  72. package/dist/cjs/utils/finishReasons.cjs +44 -0
  73. package/dist/cjs/utils/finishReasons.cjs.map +1 -0
  74. package/dist/cjs/utils/llm.cjs.map +1 -1
  75. package/dist/cjs/utils/logging.cjs +34 -0
  76. package/dist/cjs/utils/logging.cjs.map +1 -0
  77. package/dist/cjs/utils/toolCallNormalization.cjs +250 -0
  78. package/dist/cjs/utils/toolCallNormalization.cjs.map +1 -0
  79. package/dist/esm/agents/AgentContext.mjs.map +1 -1
  80. package/dist/esm/common/spawnPath.mjs +95 -0
  81. package/dist/esm/common/spawnPath.mjs.map +1 -0
  82. package/dist/esm/graphs/Graph.mjs +89 -45
  83. package/dist/esm/graphs/Graph.mjs.map +1 -1
  84. package/dist/esm/graphs/HandoffRegistry.mjs +47 -8
  85. package/dist/esm/graphs/HandoffRegistry.mjs.map +1 -1
  86. package/dist/esm/graphs/MultiAgentGraph.mjs +493 -267
  87. package/dist/esm/graphs/MultiAgentGraph.mjs.map +1 -1
  88. package/dist/esm/graphs/phases/flushLoop.mjs +209 -0
  89. package/dist/esm/graphs/phases/flushLoop.mjs.map +1 -0
  90. package/dist/esm/graphs/phases/memoryFlushPhase.mjs +99 -0
  91. package/dist/esm/graphs/phases/memoryFlushPhase.mjs.map +1 -0
  92. package/dist/esm/llm/bedrock/index.mjs +4 -3
  93. package/dist/esm/llm/bedrock/index.mjs.map +1 -1
  94. package/dist/esm/main.mjs +21 -0
  95. package/dist/esm/main.mjs.map +1 -1
  96. package/dist/esm/memory/citations.mjs +64 -0
  97. package/dist/esm/memory/citations.mjs.map +1 -0
  98. package/dist/esm/memory/compositeBackend.mjs +58 -0
  99. package/dist/esm/memory/compositeBackend.mjs.map +1 -0
  100. package/dist/esm/memory/constants.mjs +198 -0
  101. package/dist/esm/memory/constants.mjs.map +1 -0
  102. package/dist/esm/memory/embeddings.mjs +148 -0
  103. package/dist/esm/memory/embeddings.mjs.map +1 -0
  104. package/dist/esm/memory/factory.mjs +93 -0
  105. package/dist/esm/memory/factory.mjs.map +1 -0
  106. package/dist/esm/memory/migrate.mjs +78 -0
  107. package/dist/esm/memory/migrate.mjs.map +1 -0
  108. package/dist/esm/memory/mmr.mjs +130 -0
  109. package/dist/esm/memory/mmr.mjs.map +1 -0
  110. package/dist/esm/memory/paths.mjs +207 -0
  111. package/dist/esm/memory/paths.mjs.map +1 -0
  112. package/dist/esm/memory/pgvectorStore.mjs +223 -0
  113. package/dist/esm/memory/pgvectorStore.mjs.map +1 -0
  114. package/dist/esm/memory/recallTracking.mjs +94 -0
  115. package/dist/esm/memory/recallTracking.mjs.map +1 -0
  116. package/dist/esm/memory/schema.sql +51 -0
  117. package/dist/esm/memory/temporalDecay.mjs +110 -0
  118. package/dist/esm/memory/temporalDecay.mjs.map +1 -0
  119. package/dist/esm/nodes/ApprovalGateNode.mjs +1 -1
  120. package/dist/esm/nodes/ApprovalGateNode.mjs.map +1 -1
  121. package/dist/esm/prompts/memoryFlushPrompt.mjs +44 -0
  122. package/dist/esm/prompts/memoryFlushPrompt.mjs.map +1 -0
  123. package/dist/esm/run.mjs +16 -3
  124. package/dist/esm/run.mjs.map +1 -1
  125. package/dist/esm/tools/AskUser.mjs +6 -1
  126. package/dist/esm/tools/AskUser.mjs.map +1 -1
  127. package/dist/esm/tools/BrowserTools.mjs +1 -1
  128. package/dist/esm/tools/BrowserTools.mjs.map +1 -1
  129. package/dist/esm/tools/ToolNode.mjs +128 -11
  130. package/dist/esm/tools/ToolNode.mjs.map +1 -1
  131. package/dist/esm/tools/approval/constants.mjs +2 -2
  132. package/dist/esm/tools/approval/constants.mjs.map +1 -1
  133. package/dist/esm/tools/memory/index.mjs +46 -0
  134. package/dist/esm/tools/memory/index.mjs.map +1 -0
  135. package/dist/esm/tools/memory/memoryAppendTool.mjs +67 -0
  136. package/dist/esm/tools/memory/memoryAppendTool.mjs.map +1 -0
  137. package/dist/esm/tools/memory/memoryGetTool.mjs +47 -0
  138. package/dist/esm/tools/memory/memoryGetTool.mjs.map +1 -0
  139. package/dist/esm/tools/memory/memorySearchTool.mjs +63 -0
  140. package/dist/esm/tools/memory/memorySearchTool.mjs.map +1 -0
  141. package/dist/esm/tools/memory/shared.mjs +98 -0
  142. package/dist/esm/tools/memory/shared.mjs.map +1 -0
  143. package/dist/esm/types/graph.mjs.map +1 -1
  144. package/dist/esm/utils/childAgentContext.mjs +237 -0
  145. package/dist/esm/utils/childAgentContext.mjs.map +1 -0
  146. package/dist/esm/utils/errors.mjs +109 -0
  147. package/dist/esm/utils/errors.mjs.map +1 -0
  148. package/dist/esm/utils/events.mjs +36 -8
  149. package/dist/esm/utils/events.mjs.map +1 -1
  150. package/dist/esm/utils/finishReasons.mjs +41 -0
  151. package/dist/esm/utils/finishReasons.mjs.map +1 -0
  152. package/dist/esm/utils/llm.mjs.map +1 -1
  153. package/dist/esm/utils/logging.mjs +31 -0
  154. package/dist/esm/utils/logging.mjs.map +1 -0
  155. package/dist/esm/utils/toolCallNormalization.mjs +247 -0
  156. package/dist/esm/utils/toolCallNormalization.mjs.map +1 -0
  157. package/dist/types/common/index.d.ts +1 -0
  158. package/dist/types/common/spawnPath.d.ts +59 -0
  159. package/dist/types/graphs/HandoffRegistry.d.ts +24 -7
  160. package/dist/types/graphs/MultiAgentGraph.d.ts +43 -23
  161. package/dist/types/graphs/phases/flushLoop.d.ts +106 -0
  162. package/dist/types/graphs/phases/memoryFlushPhase.d.ts +100 -0
  163. package/dist/types/index.d.ts +7 -0
  164. package/dist/types/memory/__tests__/mockBackend.d.ts +40 -0
  165. package/dist/types/memory/citations.d.ts +39 -0
  166. package/dist/types/memory/compositeBackend.d.ts +30 -0
  167. package/dist/types/memory/constants.d.ts +121 -0
  168. package/dist/types/memory/embeddings.d.ts +15 -0
  169. package/dist/types/memory/factory.d.ts +23 -0
  170. package/dist/types/memory/index.d.ts +21 -0
  171. package/dist/types/memory/migrate.d.ts +14 -0
  172. package/dist/types/memory/mmr.d.ts +50 -0
  173. package/dist/types/memory/paths.d.ts +107 -0
  174. package/dist/types/memory/pgvectorStore.d.ts +56 -0
  175. package/dist/types/memory/recallTracking.d.ts +30 -0
  176. package/dist/types/memory/temporalDecay.d.ts +53 -0
  177. package/dist/types/memory/types.d.ts +182 -0
  178. package/dist/types/prompts/memoryFlushPrompt.d.ts +54 -0
  179. package/dist/types/run.d.ts +1 -0
  180. package/dist/types/tools/AskUser.d.ts +1 -1
  181. package/dist/types/tools/BrowserTools.d.ts +2 -2
  182. package/dist/types/tools/approval/constants.d.ts +2 -2
  183. package/dist/types/tools/memory/index.d.ts +39 -0
  184. package/dist/types/tools/memory/memoryAppendTool.d.ts +27 -0
  185. package/dist/types/tools/memory/memoryGetTool.d.ts +22 -0
  186. package/dist/types/tools/memory/memorySearchTool.d.ts +22 -0
  187. package/dist/types/tools/memory/shared.d.ts +106 -0
  188. package/dist/types/types/graph.d.ts +10 -3
  189. package/dist/types/utils/childAgentContext.d.ts +99 -0
  190. package/dist/types/utils/errors.d.ts +37 -0
  191. package/dist/types/utils/events.d.ts +21 -0
  192. package/dist/types/utils/finishReasons.d.ts +32 -0
  193. package/dist/types/utils/index.d.ts +1 -0
  194. package/dist/types/utils/logging.d.ts +2 -0
  195. package/dist/types/utils/toolCallNormalization.d.ts +44 -0
  196. package/package.json +6 -4
  197. package/src/agents/AgentContext.ts +12 -4
  198. package/src/common/__tests__/enum.test.ts +4 -2
  199. package/src/common/__tests__/spawnPath.test.ts +110 -0
  200. package/src/common/index.ts +1 -0
  201. package/src/common/spawnPath.ts +101 -0
  202. package/src/graphs/Graph.ts +95 -61
  203. package/src/graphs/HandoffRegistry.ts +48 -17
  204. package/src/graphs/MultiAgentGraph.ts +588 -327
  205. package/src/graphs/__tests__/HandoffRegistry.test.ts +4 -1
  206. package/src/graphs/__tests__/multi-agent-delegate.test.ts +61 -16
  207. package/src/graphs/__tests__/multi-agent-edges.test.ts +4 -2
  208. package/src/graphs/__tests__/multi-agent-nested-subgraph.test.ts +221 -0
  209. package/src/graphs/__tests__/structured-output.integration.test.ts +212 -118
  210. package/src/graphs/contextManagement.e2e.test.ts +1 -1
  211. package/src/graphs/phases/__tests__/flushLoop.test.ts +264 -0
  212. package/src/graphs/phases/__tests__/memoryFlushPhase.test.ts +37 -0
  213. package/src/graphs/phases/__tests__/runMemoryFlush.test.ts +150 -0
  214. package/src/graphs/phases/flushLoop.ts +303 -0
  215. package/src/graphs/phases/memoryFlushPhase.ts +209 -0
  216. package/src/index.ts +30 -1
  217. package/src/llm/bedrock/index.ts +4 -5
  218. package/src/memory/__tests__/citations.test.ts +61 -0
  219. package/src/memory/__tests__/compositeBackend.test.ts +79 -0
  220. package/src/memory/__tests__/isolation.test.ts +206 -0
  221. package/src/memory/__tests__/mmr.test.ts +148 -0
  222. package/src/memory/__tests__/mockBackend.ts +161 -0
  223. package/src/memory/__tests__/paths.test.ts +168 -0
  224. package/src/memory/__tests__/recallTracking.test.ts +96 -0
  225. package/src/memory/__tests__/temporalDecay.test.ts +151 -0
  226. package/src/memory/citations.ts +80 -0
  227. package/src/memory/compositeBackend.ts +99 -0
  228. package/src/memory/constants.ts +229 -0
  229. package/src/memory/embeddings.ts +188 -0
  230. package/src/memory/factory.ts +111 -0
  231. package/src/memory/index.ts +46 -0
  232. package/src/memory/migrate.ts +116 -0
  233. package/src/memory/mmr.ts +161 -0
  234. package/src/memory/paths.ts +258 -0
  235. package/src/memory/pgvectorStore.ts +324 -0
  236. package/src/memory/recallTracking.ts +127 -0
  237. package/src/memory/schema.sql +51 -0
  238. package/src/memory/temporalDecay.ts +134 -0
  239. package/src/memory/types.ts +185 -0
  240. package/src/nodes/ApprovalGateNode.ts +4 -10
  241. package/src/nodes/__tests__/ApprovalGateNode.test.ts +11 -20
  242. package/src/prompts/memoryFlushPrompt.ts +78 -0
  243. package/src/run.ts +17 -6
  244. package/src/scripts/test-bedrock-handoff-autonomous.ts +56 -20
  245. package/src/specs/agent-handoffs-bedrock.integration.test.ts +8 -5
  246. package/src/specs/agent-handoffs.test.ts +8 -2
  247. package/src/tools/AskUser.ts +7 -2
  248. package/src/tools/BrowserTools.ts +3 -5
  249. package/src/tools/ToolNode.ts +150 -13
  250. package/src/tools/__tests__/ToolApproval.test.ts +22 -9
  251. package/src/tools/approval/__tests__/constants.test.ts +1 -1
  252. package/src/tools/approval/constants.ts +2 -2
  253. package/src/tools/memory/__tests__/memoryTools.test.ts +205 -0
  254. package/src/tools/memory/index.ts +96 -0
  255. package/src/tools/memory/memoryAppendTool.ts +101 -0
  256. package/src/tools/memory/memoryGetTool.ts +53 -0
  257. package/src/tools/memory/memorySearchTool.ts +80 -0
  258. package/src/tools/memory/shared.ts +169 -0
  259. package/src/tools/search/search.test.ts +6 -1
  260. package/src/types/graph.ts +10 -3
  261. package/src/utils/__tests__/childAgentContext.test.ts +217 -0
  262. package/src/utils/__tests__/errors.test.ts +136 -0
  263. package/src/utils/__tests__/finishReasons.test.ts +55 -0
  264. package/src/utils/__tests__/toolCallNormalization.test.ts +181 -0
  265. package/src/utils/childAgentContext.ts +259 -0
  266. package/src/utils/errors.ts +115 -0
  267. package/src/utils/events.ts +37 -7
  268. package/src/utils/finishReasons.ts +40 -0
  269. package/src/utils/index.ts +1 -0
  270. package/src/utils/llm.ts +0 -1
  271. package/src/utils/logging.ts +45 -8
  272. package/src/utils/toolCallNormalization.ts +271 -0
@@ -0,0 +1,223 @@
1
+ import { DEFAULT_MEMORY_TABLE, DEFAULT_MAX_SEARCH_RESULTS, DEFAULT_MIN_SCORE, HYBRID_VECTOR_WEIGHT, HYBRID_TEXT_WEIGHT } from './constants.mjs';
2
+ import { getMemoryEmbedder } from './embeddings.mjs';
3
+ import { applyMMRToMemoryHits } from './mmr.mjs';
4
+ import { applyTemporalDecayToHits } from './temporalDecay.mjs';
5
+ import { shouldIncludeCitations, decorateCitations } from './citations.mjs';
6
+ import { getTierForPath, assertWritablePath } from './paths.mjs';
7
+
8
+ function assertScope(scope) {
9
+ if (!scope || !scope.agentId) {
10
+ throw new Error('MemoryScope { agentId } is required — agentId must be non-empty');
11
+ }
12
+ }
13
+ /** pgvector literal format: "[0.1,0.2,...]". */
14
+ function toVectorLiteral(vec) {
15
+ return `[${vec.join(',')}]`;
16
+ }
17
+ /**
18
+ * Normalize caller userId for the layered read filter.
19
+ *
20
+ * The SQL filter is `(user_id IS NULL OR user_id = $2)`, so an empty
21
+ * string from the caller must not match rows whose user_id was set.
22
+ * We coerce empty/null/undefined to `null`, and pg treats `$2 = null`
23
+ * as `false` — which is exactly what we want for isolated callers:
24
+ * they see only agent-tier rows and nothing in the user tier.
25
+ */
26
+ function normalizeCallerId(scope) {
27
+ const raw = scope.userId;
28
+ if (raw == null || raw === '')
29
+ return null;
30
+ return String(raw);
31
+ }
32
+ class PgvectorMemoryStore {
33
+ kind = 'vector';
34
+ pool;
35
+ table;
36
+ embedder;
37
+ constructor(opts) {
38
+ this.pool = opts.pool;
39
+ this.table = opts.table ?? DEFAULT_MEMORY_TABLE;
40
+ this.embedder = opts.embedder ?? getMemoryEmbedder();
41
+ }
42
+ async search(scope, query, opts = {}) {
43
+ assertScope(scope);
44
+ const trimmed = query.trim();
45
+ if (!trimmed)
46
+ return [];
47
+ const maxResults = Math.max(1, opts.maxResults ?? DEFAULT_MAX_SEARCH_RESULTS);
48
+ const minScore = opts.minScore ?? DEFAULT_MIN_SCORE;
49
+ const vector = await this.embedder.embed(trimmed);
50
+ const vectorLiteral = toVectorLiteral(vector);
51
+ const callerId = normalizeCallerId(scope);
52
+ // [memory-layered-search] debug: layered scope filter
53
+ // agent-tier rows (user_id IS NULL) + this caller's user-tier rows.
54
+ // Another user's rows are invisible at the SQL layer.
55
+ const sql = `
56
+ WITH scored AS (
57
+ SELECT
58
+ id,
59
+ path,
60
+ content,
61
+ created_at,
62
+ user_id,
63
+ (1 - (embedding <=> $1::vector)) AS vector_score,
64
+ ts_rank(tsv, plainto_tsquery('english', $2)) AS text_score
65
+ FROM ${this.table}
66
+ WHERE agent_id = $3
67
+ AND (user_id IS NULL OR user_id = $4)
68
+ )
69
+ SELECT
70
+ id,
71
+ path,
72
+ content,
73
+ created_at,
74
+ user_id,
75
+ (${HYBRID_VECTOR_WEIGHT} * vector_score + ${HYBRID_TEXT_WEIGHT} * text_score) AS score
76
+ FROM scored
77
+ WHERE (${HYBRID_VECTOR_WEIGHT} * vector_score + ${HYBRID_TEXT_WEIGHT} * text_score) >= $5
78
+ ORDER BY score DESC
79
+ LIMIT $6
80
+ `;
81
+ const { rows } = await this.pool.query(sql, [
82
+ vectorLiteral,
83
+ trimmed,
84
+ scope.agentId,
85
+ callerId,
86
+ minScore,
87
+ maxResults,
88
+ ]);
89
+ let hits = rows.map((row) => ({
90
+ id: String(row.id),
91
+ path: row.path,
92
+ content: row.content,
93
+ createdAt: new Date(row.created_at),
94
+ score: Number(row.score),
95
+ source: 'vector',
96
+ tier: getTierForPath(row.path),
97
+ }));
98
+ // Phase 2: temporal decay (before MMR so diversity sees post-decay ranks)
99
+ if (opts.temporalDecay?.enabled) {
100
+ hits = applyTemporalDecayToHits(hits, opts.temporalDecay);
101
+ hits.sort((a, b) => b.score - a.score);
102
+ }
103
+ // Phase 2: MMR reranking
104
+ if (opts.mmr?.enabled) {
105
+ hits = applyMMRToMemoryHits(hits, opts.mmr);
106
+ }
107
+ // Phase 2: citations (decorate last — mutates content with Source: trailer)
108
+ const citationsMode = opts.citations ?? 'auto';
109
+ if (shouldIncludeCitations(citationsMode)) {
110
+ hits = decorateCitations(hits, true);
111
+ }
112
+ return hits;
113
+ }
114
+ async get(scope, opts) {
115
+ assertScope(scope);
116
+ if (!opts.path)
117
+ return null;
118
+ const callerId = normalizeCallerId(scope);
119
+ const tier = getTierForPath(opts.path);
120
+ // Agent-tier rows always live under user_id=NULL. User-tier rows
121
+ // always carry the caller's id. Querying with a precise predicate
122
+ // is faster than leaving it open AND guarantees a user cannot
123
+ // read another user's row even if they know the path by heart.
124
+ const userClause = tier === 'agent' ? 'user_id IS NULL' : 'user_id = $3';
125
+ const params = [scope.agentId, opts.path];
126
+ if (tier === 'user')
127
+ params.push(callerId);
128
+ const sql = `
129
+ SELECT content
130
+ FROM ${this.table}
131
+ WHERE agent_id = $1 AND path = $2 AND ${userClause}
132
+ ORDER BY updated_at DESC
133
+ LIMIT 1
134
+ `;
135
+ const { rows } = await this.pool.query(sql, params);
136
+ if (rows.length === 0)
137
+ return null;
138
+ const text = String(rows[0].content ?? '');
139
+ if (opts.from == null && opts.lines == null) {
140
+ return { path: opts.path, text };
141
+ }
142
+ const allLines = text.split('\n');
143
+ const fromIdx = Math.max(0, (opts.from ?? 1) - 1);
144
+ const count = Math.max(0, opts.lines ?? allLines.length - fromIdx);
145
+ const slice = allLines.slice(fromIdx, fromIdx + count).join('\n');
146
+ return { path: opts.path, text: slice };
147
+ }
148
+ async append(scope, input) {
149
+ assertScope(scope);
150
+ // Whitelist + tier + scope-compatibility check in one call. Throws
151
+ // with an actionable message for each failure mode.
152
+ const descriptor = assertWritablePath(input.path, scope);
153
+ const content = input.content.trim();
154
+ if (!content) {
155
+ throw new Error('memory_append content must be non-empty');
156
+ }
157
+ // Tier determines the row's user_id:
158
+ // agent tier → NULL (shared across all users)
159
+ // user tier → the caller's id (assertWritablePath guarantees non-empty)
160
+ const rowUserId = descriptor.tier === 'agent' ? null : String(scope.userId);
161
+ const provenance = scope.userId != null ? String(scope.userId) : null;
162
+ // Read the existing row (if any) for THIS tier so we can embed the
163
+ // merged content. Agent-tier merges regardless of caller; user-tier
164
+ // merges only within the caller's own row.
165
+ const lookupSql = `
166
+ SELECT content FROM ${this.table}
167
+ WHERE agent_id = $1 AND path = $2 AND ${rowUserId === null ? 'user_id IS NULL' : 'user_id = $3'}
168
+ LIMIT 1
169
+ `;
170
+ const lookupParams = rowUserId === null
171
+ ? [scope.agentId, input.path]
172
+ : [scope.agentId, input.path, rowUserId];
173
+ const existing = await this.pool.query(lookupSql, lookupParams);
174
+ const priorContent = existing.rows.length > 0 ? String(existing.rows[0].content ?? '') : '';
175
+ const mergedContent = priorContent
176
+ ? `${priorContent.replace(/\s+$/, '')}\n\n${content}`
177
+ : content;
178
+ const vector = await this.embedder.embed(mergedContent);
179
+ const vectorLiteral = toVectorLiteral(vector);
180
+ // UPSERT on (agent_id, user_id, path) with NULLS NOT DISTINCT so
181
+ // two NULL user_ids collide on the same agent+path (exactly one
182
+ // agent-tier row) while per-user rows on the same path coexist.
183
+ //
184
+ // Provenance note: `last_user_id` always records WHO wrote the
185
+ // latest append, even for agent-tier rows where the row's own
186
+ // `user_id` stays NULL. That gives the admin UI an audit trail
187
+ // ("agent-tier row last updated by Alice") without changing the
188
+ // scoping semantics.
189
+ const upsertSql = `
190
+ INSERT INTO ${this.table} (agent_id, user_id, path, content, embedding, last_user_id, updated_at)
191
+ VALUES ($1, $2, $3, $4, $5::vector, $6, NOW())
192
+ ON CONFLICT (agent_id, user_id, path) DO UPDATE
193
+ SET content = EXCLUDED.content,
194
+ embedding = EXCLUDED.embedding,
195
+ last_user_id = EXCLUDED.last_user_id,
196
+ updated_at = NOW()
197
+ `;
198
+ await this.pool.query(upsertSql, [
199
+ scope.agentId,
200
+ rowUserId,
201
+ input.path,
202
+ mergedContent,
203
+ vectorLiteral,
204
+ provenance,
205
+ ]);
206
+ }
207
+ async health() {
208
+ try {
209
+ await this.pool.query('SELECT 1');
210
+ return { ok: true, backend: 'vector' };
211
+ }
212
+ catch (err) {
213
+ return {
214
+ ok: false,
215
+ backend: 'vector',
216
+ error: err instanceof Error ? err.message : String(err),
217
+ };
218
+ }
219
+ }
220
+ }
221
+
222
+ export { PgvectorMemoryStore };
223
+ //# sourceMappingURL=pgvectorStore.mjs.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"pgvectorStore.mjs","sources":["../../../src/memory/pgvectorStore.ts"],"sourcesContent":["/**\n * Postgres + pgvector implementation of {@link MemoryBackend}.\n *\n * ## Scoping model (two-tier, layered)\n *\n * Every read query applies the layered filter\n *\n * WHERE agent_id = $1 AND (user_id IS NULL OR user_id = $2)\n *\n * so the caller sees:\n * - agent-tier rows (`user_id IS NULL`) — shared operational knowledge,\n * visible to every user of the agent\n * - their own user-tier rows (`user_id = <caller>`) — private per-user\n * personalization\n *\n * Another user's user-tier rows are invisible — the privacy boundary is\n * enforced in SQL, not just in the UI or route layer.\n *\n * ## Writes\n *\n * `append()` routes to a row based on the path's tier, resolved via\n * {@link assertWritablePath}:\n *\n * - `memory/agent/*` → stored with `user_id = NULL` regardless of\n * what scope the caller passed. Agent-tier content is inherently\n * shared; scoping it per-user would defeat the point.\n * - `memory/user/*` → stored with `user_id = scope.userId`. A missing\n * `scope.userId` throws — user-tier paths cannot be written from\n * isolated/autonomous contexts.\n *\n * UPSERT key is `(agent_id, user_id, path)` with `NULLS NOT DISTINCT`,\n * so each user gets their own `user/preferences.md` row and there is\n * exactly one `agent/playbook.md` row shared across the whole user base.\n * Content accumulates via `\\n\\n` concatenation on conflict, with the\n * embedding regenerated over the merged content so search stays\n * consistent.\n */\nimport type { Pool } from 'pg';\nimport {\n DEFAULT_MAX_SEARCH_RESULTS,\n DEFAULT_MEMORY_TABLE,\n DEFAULT_MIN_SCORE,\n HYBRID_TEXT_WEIGHT,\n HYBRID_VECTOR_WEIGHT,\n} from './constants';\nimport { getMemoryEmbedder, type EmbeddingProvider } from './embeddings';\nimport { applyMMRToMemoryHits } from './mmr';\nimport { applyTemporalDecayToHits } from './temporalDecay';\nimport { decorateCitations, shouldIncludeCitations } from './citations';\nimport { assertWritablePath, getTierForPath } from './paths';\nimport type {\n MemoryAppendInput,\n MemoryBackend,\n MemoryEntry,\n MemoryGetOptions,\n MemoryHealth,\n MemoryReadResult,\n MemoryScope,\n MemorySearchOptions,\n} from './types';\n\nexport interface PgvectorStoreOptions {\n pool: Pool;\n table?: string;\n embedder?: EmbeddingProvider;\n}\n\nfunction assertScope(scope: MemoryScope): void {\n if (!scope || !scope.agentId) {\n throw new Error(\n 'MemoryScope { agentId } is required — agentId must be non-empty'\n );\n }\n}\n\n/** pgvector literal format: \"[0.1,0.2,...]\". */\nfunction toVectorLiteral(vec: number[]): string {\n return `[${vec.join(',')}]`;\n}\n\n/**\n * Normalize caller userId for the layered read filter.\n *\n * The SQL filter is `(user_id IS NULL OR user_id = $2)`, so an empty\n * string from the caller must not match rows whose user_id was set.\n * We coerce empty/null/undefined to `null`, and pg treats `$2 = null`\n * as `false` — which is exactly what we want for isolated callers:\n * they see only agent-tier rows and nothing in the user tier.\n */\nfunction normalizeCallerId(scope: MemoryScope): string | null {\n const raw = scope.userId;\n if (raw == null || raw === '') return null;\n return String(raw);\n}\n\nexport class PgvectorMemoryStore implements MemoryBackend {\n readonly kind = 'vector' as const;\n private pool: Pool;\n private table: string;\n private embedder: EmbeddingProvider;\n\n constructor(opts: PgvectorStoreOptions) {\n this.pool = opts.pool;\n this.table = opts.table ?? DEFAULT_MEMORY_TABLE;\n this.embedder = opts.embedder ?? getMemoryEmbedder();\n }\n\n async search(\n scope: MemoryScope,\n query: string,\n opts: MemorySearchOptions = {}\n ): Promise<MemoryEntry[]> {\n assertScope(scope);\n const trimmed = query.trim();\n if (!trimmed) return [];\n\n const maxResults = Math.max(\n 1,\n opts.maxResults ?? DEFAULT_MAX_SEARCH_RESULTS\n );\n const minScore = opts.minScore ?? DEFAULT_MIN_SCORE;\n\n const vector = await this.embedder.embed(trimmed);\n const vectorLiteral = toVectorLiteral(vector);\n const callerId = normalizeCallerId(scope);\n\n // [memory-layered-search] debug: layered scope filter\n // agent-tier rows (user_id IS NULL) + this caller's user-tier rows.\n // Another user's rows are invisible at the SQL layer.\n const sql = `\n WITH scored AS (\n SELECT\n id,\n path,\n content,\n created_at,\n user_id,\n (1 - (embedding <=> $1::vector)) AS vector_score,\n ts_rank(tsv, plainto_tsquery('english', $2)) AS text_score\n FROM ${this.table}\n WHERE agent_id = $3\n AND (user_id IS NULL OR user_id = $4)\n )\n SELECT\n id,\n path,\n content,\n created_at,\n user_id,\n (${HYBRID_VECTOR_WEIGHT} * vector_score + ${HYBRID_TEXT_WEIGHT} * text_score) AS score\n FROM scored\n WHERE (${HYBRID_VECTOR_WEIGHT} * vector_score + ${HYBRID_TEXT_WEIGHT} * text_score) >= $5\n ORDER BY score DESC\n LIMIT $6\n `;\n\n const { rows } = await this.pool.query(sql, [\n vectorLiteral,\n trimmed,\n scope.agentId,\n callerId,\n minScore,\n maxResults,\n ]);\n\n let hits: MemoryEntry[] = rows.map(\n (row: {\n id: string | number;\n path: string;\n content: string;\n created_at: Date;\n user_id: string | null;\n score: string | number;\n }): MemoryEntry => ({\n id: String(row.id),\n path: row.path,\n content: row.content,\n createdAt: new Date(row.created_at),\n score: Number(row.score),\n source: 'vector',\n tier: getTierForPath(row.path),\n })\n );\n\n // Phase 2: temporal decay (before MMR so diversity sees post-decay ranks)\n if (opts.temporalDecay?.enabled) {\n hits = applyTemporalDecayToHits(hits, opts.temporalDecay);\n hits.sort((a, b) => b.score - a.score);\n }\n\n // Phase 2: MMR reranking\n if (opts.mmr?.enabled) {\n hits = applyMMRToMemoryHits(hits, opts.mmr);\n }\n\n // Phase 2: citations (decorate last — mutates content with Source: trailer)\n const citationsMode = opts.citations ?? 'auto';\n if (shouldIncludeCitations(citationsMode)) {\n hits = decorateCitations(hits, true);\n }\n\n return hits;\n }\n\n async get(\n scope: MemoryScope,\n opts: MemoryGetOptions\n ): Promise<MemoryReadResult | null> {\n assertScope(scope);\n if (!opts.path) return null;\n\n const callerId = normalizeCallerId(scope);\n const tier = getTierForPath(opts.path);\n\n // Agent-tier rows always live under user_id=NULL. User-tier rows\n // always carry the caller's id. Querying with a precise predicate\n // is faster than leaving it open AND guarantees a user cannot\n // read another user's row even if they know the path by heart.\n const userClause = tier === 'agent' ? 'user_id IS NULL' : 'user_id = $3';\n\n const params: unknown[] = [scope.agentId, opts.path];\n if (tier === 'user') params.push(callerId);\n\n const sql = `\n SELECT content\n FROM ${this.table}\n WHERE agent_id = $1 AND path = $2 AND ${userClause}\n ORDER BY updated_at DESC\n LIMIT 1\n `;\n const { rows } = await this.pool.query(sql, params);\n if (rows.length === 0) return null;\n\n const text = String(rows[0].content ?? '');\n if (opts.from == null && opts.lines == null) {\n return { path: opts.path, text };\n }\n\n const allLines = text.split('\\n');\n const fromIdx = Math.max(0, (opts.from ?? 1) - 1);\n const count = Math.max(0, opts.lines ?? allLines.length - fromIdx);\n const slice = allLines.slice(fromIdx, fromIdx + count).join('\\n');\n return { path: opts.path, text: slice };\n }\n\n async append(scope: MemoryScope, input: MemoryAppendInput): Promise<void> {\n assertScope(scope);\n // Whitelist + tier + scope-compatibility check in one call. Throws\n // with an actionable message for each failure mode.\n const descriptor = assertWritablePath(input.path, scope);\n const content = input.content.trim();\n if (!content) {\n throw new Error('memory_append content must be non-empty');\n }\n\n // Tier determines the row's user_id:\n // agent tier → NULL (shared across all users)\n // user tier → the caller's id (assertWritablePath guarantees non-empty)\n const rowUserId = descriptor.tier === 'agent' ? null : String(scope.userId);\n const provenance = scope.userId != null ? String(scope.userId) : null;\n\n // Read the existing row (if any) for THIS tier so we can embed the\n // merged content. Agent-tier merges regardless of caller; user-tier\n // merges only within the caller's own row.\n const lookupSql = `\n SELECT content FROM ${this.table}\n WHERE agent_id = $1 AND path = $2 AND ${rowUserId === null ? 'user_id IS NULL' : 'user_id = $3'}\n LIMIT 1\n `;\n const lookupParams: unknown[] =\n rowUserId === null\n ? [scope.agentId, input.path]\n : [scope.agentId, input.path, rowUserId];\n const existing = await this.pool.query(lookupSql, lookupParams);\n const priorContent: string =\n existing.rows.length > 0 ? String(existing.rows[0].content ?? '') : '';\n const mergedContent = priorContent\n ? `${priorContent.replace(/\\s+$/, '')}\\n\\n${content}`\n : content;\n\n const vector = await this.embedder.embed(mergedContent);\n const vectorLiteral = toVectorLiteral(vector);\n\n // UPSERT on (agent_id, user_id, path) with NULLS NOT DISTINCT so\n // two NULL user_ids collide on the same agent+path (exactly one\n // agent-tier row) while per-user rows on the same path coexist.\n //\n // Provenance note: `last_user_id` always records WHO wrote the\n // latest append, even for agent-tier rows where the row's own\n // `user_id` stays NULL. That gives the admin UI an audit trail\n // (\"agent-tier row last updated by Alice\") without changing the\n // scoping semantics.\n const upsertSql = `\n INSERT INTO ${this.table} (agent_id, user_id, path, content, embedding, last_user_id, updated_at)\n VALUES ($1, $2, $3, $4, $5::vector, $6, NOW())\n ON CONFLICT (agent_id, user_id, path) DO UPDATE\n SET content = EXCLUDED.content,\n embedding = EXCLUDED.embedding,\n last_user_id = EXCLUDED.last_user_id,\n updated_at = NOW()\n `;\n await this.pool.query(upsertSql, [\n scope.agentId,\n rowUserId,\n input.path,\n mergedContent,\n vectorLiteral,\n provenance,\n ]);\n }\n\n async health(): Promise<MemoryHealth> {\n try {\n await this.pool.query('SELECT 1');\n return { ok: true, backend: 'vector' };\n } catch (err) {\n return {\n ok: false,\n backend: 'vector',\n error: err instanceof Error ? err.message : String(err),\n };\n }\n }\n}\n"],"names":[],"mappings":";;;;;;;AAmEA,SAAS,WAAW,CAAC,KAAkB,EAAA;IACrC,IAAI,CAAC,KAAK,IAAI,CAAC,KAAK,CAAC,OAAO,EAAE;AAC5B,QAAA,MAAM,IAAI,KAAK,CACb,iEAAiE,CAClE;IACH;AACF;AAEA;AACA,SAAS,eAAe,CAAC,GAAa,EAAA;IACpC,OAAO,CAAA,CAAA,EAAI,GAAG,CAAC,IAAI,CAAC,GAAG,CAAC,GAAG;AAC7B;AAEA;;;;;;;;AAQG;AACH,SAAS,iBAAiB,CAAC,KAAkB,EAAA;AAC3C,IAAA,MAAM,GAAG,GAAG,KAAK,CAAC,MAAM;AACxB,IAAA,IAAI,GAAG,IAAI,IAAI,IAAI,GAAG,KAAK,EAAE;AAAE,QAAA,OAAO,IAAI;AAC1C,IAAA,OAAO,MAAM,CAAC,GAAG,CAAC;AACpB;MAEa,mBAAmB,CAAA;IACrB,IAAI,GAAG,QAAiB;AACzB,IAAA,IAAI;AACJ,IAAA,KAAK;AACL,IAAA,QAAQ;AAEhB,IAAA,WAAA,CAAY,IAA0B,EAAA;AACpC,QAAA,IAAI,CAAC,IAAI,GAAG,IAAI,CAAC,IAAI;QACrB,IAAI,CAAC,KAAK,GAAG,IAAI,CAAC,KAAK,IAAI,oBAAoB;QAC/C,IAAI,CAAC,QAAQ,GAAG,IAAI,CAAC,QAAQ,IAAI,iBAAiB,EAAE;IACtD;IAEA,MAAM,MAAM,CACV,KAAkB,EAClB,KAAa,EACb,OAA4B,EAAE,EAAA;QAE9B,WAAW,CAAC,KAAK,CAAC;AAClB,QAAA,MAAM,OAAO,GAAG,KAAK,CAAC,IAAI,EAAE;AAC5B,QAAA,IAAI,CAAC,OAAO;AAAE,YAAA,OAAO,EAAE;AAEvB,QAAA,MAAM,UAAU,GAAG,IAAI,CAAC,GAAG,CACzB,CAAC,EACD,IAAI,CAAC,UAAU,IAAI,0BAA0B,CAC9C;AACD,QAAA,MAAM,QAAQ,GAAG,IAAI,CAAC,QAAQ,IAAI,iBAAiB;QAEnD,MAAM,MAAM,GAAG,MAAM,IAAI,CAAC,QAAQ,CAAC,KAAK,CAAC,OAAO,CAAC;AACjD,QAAA,MAAM,aAAa,GAAG,eAAe,CAAC,MAAM,CAAC;AAC7C,QAAA,MAAM,QAAQ,GAAG,iBAAiB,CAAC,KAAK,CAAC;;;;AAKzC,QAAA,MAAM,GAAG,GAAG;;;;;;;;;;AAUD,aAAA,EAAA,IAAI,CAAC,KAAK;;;;;;;;;;AAUd,SAAA,EAAA,oBAAoB,qBAAqB,kBAAkB,CAAA;;AAEvD,aAAA,EAAA,oBAAoB,qBAAqB,kBAAkB,CAAA;;;KAGrE;AAED,QAAA,MAAM,EAAE,IAAI,EAAE,GAAG,MAAM,IAAI,CAAC,IAAI,CAAC,KAAK,CAAC,GAAG,EAAE;YAC1C,aAAa;YACb,OAAO;AACP,YAAA,KAAK,CAAC,OAAO;YACb,QAAQ;YACR,QAAQ;YACR,UAAU;AACX,SAAA,CAAC;QAEF,IAAI,IAAI,GAAkB,IAAI,CAAC,GAAG,CAChC,CAAC,GAOA,MAAmB;AAClB,YAAA,EAAE,EAAE,MAAM,CAAC,GAAG,CAAC,EAAE,CAAC;YAClB,IAAI,EAAE,GAAG,CAAC,IAAI;YACd,OAAO,EAAE,GAAG,CAAC,OAAO;AACpB,YAAA,SAAS,EAAE,IAAI,IAAI,CAAC,GAAG,CAAC,UAAU,CAAC;AACnC,YAAA,KAAK,EAAE,MAAM,CAAC,GAAG,CAAC,KAAK,CAAC;AACxB,YAAA,MAAM,EAAE,QAAQ;AAChB,YAAA,IAAI,EAAE,cAAc,CAAC,GAAG,CAAC,IAAI,CAAC;AAC/B,SAAA,CAAC,CACH;;AAGD,QAAA,IAAI,IAAI,CAAC,aAAa,EAAE,OAAO,EAAE;YAC/B,IAAI,GAAG,wBAAwB,CAAC,IAAI,EAAE,IAAI,CAAC,aAAa,CAAC;AACzD,YAAA,IAAI,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,CAAC,KAAK,GAAG,CAAC,CAAC,KAAK,CAAC;QACxC;;AAGA,QAAA,IAAI,IAAI,CAAC,GAAG,EAAE,OAAO,EAAE;YACrB,IAAI,GAAG,oBAAoB,CAAC,IAAI,EAAE,IAAI,CAAC,GAAG,CAAC;QAC7C;;AAGA,QAAA,MAAM,aAAa,GAAG,IAAI,CAAC,SAAS,IAAI,MAAM;AAC9C,QAAA,IAAI,sBAAsB,CAAC,aAAa,CAAC,EAAE;AACzC,YAAA,IAAI,GAAG,iBAAiB,CAAC,IAAI,EAAE,IAAI,CAAC;QACtC;AAEA,QAAA,OAAO,IAAI;IACb;AAEA,IAAA,MAAM,GAAG,CACP,KAAkB,EAClB,IAAsB,EAAA;QAEtB,WAAW,CAAC,KAAK,CAAC;QAClB,IAAI,CAAC,IAAI,CAAC,IAAI;AAAE,YAAA,OAAO,IAAI;AAE3B,QAAA,MAAM,QAAQ,GAAG,iBAAiB,CAAC,KAAK,CAAC;QACzC,MAAM,IAAI,GAAG,cAAc,CAAC,IAAI,CAAC,IAAI,CAAC;;;;;AAMtC,QAAA,MAAM,UAAU,GAAG,IAAI,KAAK,OAAO,GAAG,iBAAiB,GAAG,cAAc;QAExE,MAAM,MAAM,GAAc,CAAC,KAAK,CAAC,OAAO,EAAE,IAAI,CAAC,IAAI,CAAC;QACpD,IAAI,IAAI,KAAK,MAAM;AAAE,YAAA,MAAM,CAAC,IAAI,CAAC,QAAQ,CAAC;AAE1C,QAAA,MAAM,GAAG,GAAG;;AAEH,WAAA,EAAA,IAAI,CAAC,KAAK;8CACuB,UAAU;;;KAGnD;AACD,QAAA,MAAM,EAAE,IAAI,EAAE,GAAG,MAAM,IAAI,CAAC,IAAI,CAAC,KAAK,CAAC,GAAG,EAAE,MAAM,CAAC;AACnD,QAAA,IAAI,IAAI,CAAC,MAAM,KAAK,CAAC;AAAE,YAAA,OAAO,IAAI;AAElC,QAAA,MAAM,IAAI,GAAG,MAAM,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,OAAO,IAAI,EAAE,CAAC;AAC1C,QAAA,IAAI,IAAI,CAAC,IAAI,IAAI,IAAI,IAAI,IAAI,CAAC,KAAK,IAAI,IAAI,EAAE;YAC3C,OAAO,EAAE,IAAI,EAAE,IAAI,CAAC,IAAI,EAAE,IAAI,EAAE;QAClC;QAEA,MAAM,QAAQ,GAAG,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC;AACjC,QAAA,MAAM,OAAO,GAAG,IAAI,CAAC,GAAG,CAAC,CAAC,EAAE,CAAC,IAAI,CAAC,IAAI,IAAI,CAAC,IAAI,CAAC,CAAC;AACjD,QAAA,MAAM,KAAK,GAAG,IAAI,CAAC,GAAG,CAAC,CAAC,EAAE,IAAI,CAAC,KAAK,IAAI,QAAQ,CAAC,MAAM,GAAG,OAAO,CAAC;AAClE,QAAA,MAAM,KAAK,GAAG,QAAQ,CAAC,KAAK,CAAC,OAAO,EAAE,OAAO,GAAG,KAAK,CAAC,CAAC,IAAI,CAAC,IAAI,CAAC;QACjE,OAAO,EAAE,IAAI,EAAE,IAAI,CAAC,IAAI,EAAE,IAAI,EAAE,KAAK,EAAE;IACzC;AAEA,IAAA,MAAM,MAAM,CAAC,KAAkB,EAAE,KAAwB,EAAA;QACvD,WAAW,CAAC,KAAK,CAAC;;;QAGlB,MAAM,UAAU,GAAG,kBAAkB,CAAC,KAAK,CAAC,IAAI,EAAE,KAAK,CAAC;QACxD,MAAM,OAAO,GAAG,KAAK,CAAC,OAAO,CAAC,IAAI,EAAE;QACpC,IAAI,CAAC,OAAO,EAAE;AACZ,YAAA,MAAM,IAAI,KAAK,CAAC,yCAAyC,CAAC;QAC5D;;;;QAKA,MAAM,SAAS,GAAG,UAAU,CAAC,IAAI,KAAK,OAAO,GAAG,IAAI,GAAG,MAAM,CAAC,KAAK,CAAC,MAAM,CAAC;QAC3E,MAAM,UAAU,GAAG,KAAK,CAAC,MAAM,IAAI,IAAI,GAAG,MAAM,CAAC,KAAK,CAAC,MAAM,CAAC,GAAG,IAAI;;;;AAKrE,QAAA,MAAM,SAAS,GAAG;AACM,0BAAA,EAAA,IAAI,CAAC,KAAK;8CACQ,SAAS,KAAK,IAAI,GAAG,iBAAiB,GAAG,cAAc;;KAEhG;AACD,QAAA,MAAM,YAAY,GAChB,SAAS,KAAK;cACV,CAAC,KAAK,CAAC,OAAO,EAAE,KAAK,CAAC,IAAI;AAC5B,cAAE,CAAC,KAAK,CAAC,OAAO,EAAE,KAAK,CAAC,IAAI,EAAE,SAAS,CAAC;AAC5C,QAAA,MAAM,QAAQ,GAAG,MAAM,IAAI,CAAC,IAAI,CAAC,KAAK,CAAC,SAAS,EAAE,YAAY,CAAC;AAC/D,QAAA,MAAM,YAAY,GAChB,QAAQ,CAAC,IAAI,CAAC,MAAM,GAAG,CAAC,GAAG,MAAM,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,OAAO,IAAI,EAAE,CAAC,GAAG,EAAE;QACxE,MAAM,aAAa,GAAG;AACpB,cAAE,CAAA,EAAG,YAAY,CAAC,OAAO,CAAC,MAAM,EAAE,EAAE,CAAC,CAAA,IAAA,EAAO,OAAO,CAAA;cACjD,OAAO;QAEX,MAAM,MAAM,GAAG,MAAM,IAAI,CAAC,QAAQ,CAAC,KAAK,CAAC,aAAa,CAAC;AACvD,QAAA,MAAM,aAAa,GAAG,eAAe,CAAC,MAAM,CAAC;;;;;;;;;;AAW7C,QAAA,MAAM,SAAS,GAAG;AACF,kBAAA,EAAA,IAAI,CAAC,KAAK,CAAA;;;;;;;KAOzB;AACD,QAAA,MAAM,IAAI,CAAC,IAAI,CAAC,KAAK,CAAC,SAAS,EAAE;AAC/B,YAAA,KAAK,CAAC,OAAO;YACb,SAAS;AACT,YAAA,KAAK,CAAC,IAAI;YACV,aAAa;YACb,aAAa;YACb,UAAU;AACX,SAAA,CAAC;IACJ;AAEA,IAAA,MAAM,MAAM,GAAA;AACV,QAAA,IAAI;YACF,MAAM,IAAI,CAAC,IAAI,CAAC,KAAK,CAAC,UAAU,CAAC;YACjC,OAAO,EAAE,EAAE,EAAE,IAAI,EAAE,OAAO,EAAE,QAAQ,EAAE;QACxC;QAAE,OAAO,GAAG,EAAE;YACZ,OAAO;AACL,gBAAA,EAAE,EAAE,KAAK;AACT,gBAAA,OAAO,EAAE,QAAQ;AACjB,gBAAA,KAAK,EAAE,GAAG,YAAY,KAAK,GAAG,GAAG,CAAC,OAAO,GAAG,MAAM,CAAC,GAAG,CAAC;aACxD;QACH;IACF;AACD;;;;"}
@@ -0,0 +1,94 @@
1
+ import { createHash } from 'crypto';
2
+
3
+ /**
4
+ * Recall tracking — Phase 2.
5
+ *
6
+ * Lightweight adaptation of upstream
7
+ * `extensions/memory-core/src/short-term-promotion.ts::recordShortTermRecalls`.
8
+ * Upstream stores recalls in a JSON file under `memory/.dreams/`; we store
9
+ * them in a Postgres table `agent_memory_recalls`. Schema captures what the
10
+ * future Phase 3 dreaming/promotion algorithm will need:
11
+ * - which memory row was surfaced (`memory_id`)
12
+ * - the query that surfaced it (raw + SHA-256 hash for dedupe)
13
+ * - hybrid score at the time of recall
14
+ * - the day bucket (for per-day dedupe / frequency counting)
15
+ * - the recorded timestamp
16
+ *
17
+ * Best-effort: failures never block memory_search. The caller fires
18
+ * {@link RecallTracker.record} without awaiting the result and ignores errors.
19
+ */
20
+ const RECALL_TABLE = 'agent_memory_recalls';
21
+ function hashQuery(query) {
22
+ return createHash('sha256')
23
+ .update(query.trim().toLowerCase())
24
+ .digest('hex')
25
+ .slice(0, 32);
26
+ }
27
+ function dayBucket(nowMs) {
28
+ const d = new Date(nowMs);
29
+ const y = d.getUTCFullYear();
30
+ const m = String(d.getUTCMonth() + 1).padStart(2, '0');
31
+ const day = String(d.getUTCDate()).padStart(2, '0');
32
+ return `${y}-${m}-${day}`;
33
+ }
34
+ class PgvectorRecallTracker {
35
+ pool;
36
+ table;
37
+ constructor(pool, table = RECALL_TABLE) {
38
+ this.pool = pool;
39
+ this.table = table;
40
+ }
41
+ async migrate() {
42
+ // [recall-tracking] debug: create table + indexes if missing
43
+ await this.pool.query(`
44
+ CREATE TABLE IF NOT EXISTS ${this.table} (
45
+ id BIGSERIAL PRIMARY KEY,
46
+ agent_id TEXT NOT NULL,
47
+ memory_id TEXT NOT NULL,
48
+ memory_path TEXT NOT NULL,
49
+ query TEXT NOT NULL,
50
+ query_hash TEXT NOT NULL,
51
+ score DOUBLE PRECISION NOT NULL,
52
+ day_bucket TEXT NOT NULL,
53
+ recorded_at TIMESTAMPTZ NOT NULL DEFAULT NOW()
54
+ )
55
+ `);
56
+ await this.pool.query(`CREATE INDEX IF NOT EXISTS ${this.table}_agent_day_idx ON ${this.table} (agent_id, day_bucket)`);
57
+ await this.pool.query(`CREATE INDEX IF NOT EXISTS ${this.table}_memory_idx ON ${this.table} (agent_id, memory_id)`);
58
+ await this.pool.query(`CREATE UNIQUE INDEX IF NOT EXISTS ${this.table}_dedupe_idx
59
+ ON ${this.table} (agent_id, memory_id, query_hash, day_bucket)`);
60
+ }
61
+ async record(params) {
62
+ if (!params.agentId || !params.query.trim() || params.hits.length === 0)
63
+ return;
64
+ const nowMs = params.nowMs ?? Date.now();
65
+ const qhash = hashQuery(params.query);
66
+ const bucket = dayBucket(nowMs);
67
+ // [recall-tracking] debug: upsert one row per (agent, memory, query, day)
68
+ // Upstream dedupes per-day per-query so repeated searches don't inflate counts.
69
+ const values = [];
70
+ const args = [];
71
+ let i = 1;
72
+ for (const hit of params.hits) {
73
+ values.push(`($${i++}, $${i++}, $${i++}, $${i++}, $${i++}, $${i++}, $${i++}, NOW())`);
74
+ args.push(params.agentId, hit.id, hit.path, params.query, qhash, hit.score, bucket);
75
+ }
76
+ const sql = `
77
+ INSERT INTO ${this.table}
78
+ (agent_id, memory_id, memory_path, query, query_hash, score, day_bucket, recorded_at)
79
+ VALUES ${values.join(', ')}
80
+ ON CONFLICT (agent_id, memory_id, query_hash, day_bucket) DO UPDATE
81
+ SET score = GREATEST(${this.table}.score, EXCLUDED.score),
82
+ recorded_at = NOW()
83
+ `;
84
+ await this.pool.query(sql, args);
85
+ }
86
+ }
87
+ /** No-op tracker — used when recall tracking is disabled or the backend isn't pgvector. */
88
+ class NullRecallTracker {
89
+ async record() { }
90
+ async migrate() { }
91
+ }
92
+
93
+ export { NullRecallTracker, PgvectorRecallTracker, RECALL_TABLE };
94
+ //# sourceMappingURL=recallTracking.mjs.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"recallTracking.mjs","sources":["../../../src/memory/recallTracking.ts"],"sourcesContent":["/**\n * Recall tracking — Phase 2.\n *\n * Lightweight adaptation of upstream\n * `extensions/memory-core/src/short-term-promotion.ts::recordShortTermRecalls`.\n * Upstream stores recalls in a JSON file under `memory/.dreams/`; we store\n * them in a Postgres table `agent_memory_recalls`. Schema captures what the\n * future Phase 3 dreaming/promotion algorithm will need:\n * - which memory row was surfaced (`memory_id`)\n * - the query that surfaced it (raw + SHA-256 hash for dedupe)\n * - hybrid score at the time of recall\n * - the day bucket (for per-day dedupe / frequency counting)\n * - the recorded timestamp\n *\n * Best-effort: failures never block memory_search. The caller fires\n * {@link RecallTracker.record} without awaiting the result and ignores errors.\n */\nimport { createHash } from 'crypto';\nimport type { Pool } from 'pg';\n\nexport interface RecallTracker {\n /** Record that the given memory ids were surfaced to the model for a query. */\n record(params: RecallRecordParams): Promise<void>;\n /** Backend-specific schema migration. Idempotent. */\n migrate(): Promise<void>;\n}\n\nexport interface RecallRecordParams {\n agentId: string;\n query: string;\n hits: Array<{ id: string; path: string; score: number }>;\n nowMs?: number;\n}\n\nexport const RECALL_TABLE = 'agent_memory_recalls';\n\nfunction hashQuery(query: string): string {\n return createHash('sha256')\n .update(query.trim().toLowerCase())\n .digest('hex')\n .slice(0, 32);\n}\n\nfunction dayBucket(nowMs: number): string {\n const d = new Date(nowMs);\n const y = d.getUTCFullYear();\n const m = String(d.getUTCMonth() + 1).padStart(2, '0');\n const day = String(d.getUTCDate()).padStart(2, '0');\n return `${y}-${m}-${day}`;\n}\n\nexport class PgvectorRecallTracker implements RecallTracker {\n constructor(\n private readonly pool: Pool,\n private readonly table: string = RECALL_TABLE\n ) {}\n\n async migrate(): Promise<void> {\n // [recall-tracking] debug: create table + indexes if missing\n await this.pool.query(`\n CREATE TABLE IF NOT EXISTS ${this.table} (\n id BIGSERIAL PRIMARY KEY,\n agent_id TEXT NOT NULL,\n memory_id TEXT NOT NULL,\n memory_path TEXT NOT NULL,\n query TEXT NOT NULL,\n query_hash TEXT NOT NULL,\n score DOUBLE PRECISION NOT NULL,\n day_bucket TEXT NOT NULL,\n recorded_at TIMESTAMPTZ NOT NULL DEFAULT NOW()\n )\n `);\n await this.pool.query(\n `CREATE INDEX IF NOT EXISTS ${this.table}_agent_day_idx ON ${this.table} (agent_id, day_bucket)`\n );\n await this.pool.query(\n `CREATE INDEX IF NOT EXISTS ${this.table}_memory_idx ON ${this.table} (agent_id, memory_id)`\n );\n await this.pool.query(\n `CREATE UNIQUE INDEX IF NOT EXISTS ${this.table}_dedupe_idx\n ON ${this.table} (agent_id, memory_id, query_hash, day_bucket)`\n );\n }\n\n async record(params: RecallRecordParams): Promise<void> {\n if (!params.agentId || !params.query.trim() || params.hits.length === 0)\n return;\n const nowMs = params.nowMs ?? Date.now();\n const qhash = hashQuery(params.query);\n const bucket = dayBucket(nowMs);\n\n // [recall-tracking] debug: upsert one row per (agent, memory, query, day)\n // Upstream dedupes per-day per-query so repeated searches don't inflate counts.\n const values: string[] = [];\n const args: unknown[] = [];\n let i = 1;\n for (const hit of params.hits) {\n values.push(\n `($${i++}, $${i++}, $${i++}, $${i++}, $${i++}, $${i++}, $${i++}, NOW())`\n );\n args.push(\n params.agentId,\n hit.id,\n hit.path,\n params.query,\n qhash,\n hit.score,\n bucket\n );\n }\n const sql = `\n INSERT INTO ${this.table}\n (agent_id, memory_id, memory_path, query, query_hash, score, day_bucket, recorded_at)\n VALUES ${values.join(', ')}\n ON CONFLICT (agent_id, memory_id, query_hash, day_bucket) DO UPDATE\n SET score = GREATEST(${this.table}.score, EXCLUDED.score),\n recorded_at = NOW()\n `;\n await this.pool.query(sql, args);\n }\n}\n\n/** No-op tracker — used when recall tracking is disabled or the backend isn't pgvector. */\nexport class NullRecallTracker implements RecallTracker {\n async record(): Promise<void> {}\n async migrate(): Promise<void> {}\n}\n"],"names":[],"mappings":";;AAAA;;;;;;;;;;;;;;;;AAgBG;AAkBI,MAAM,YAAY,GAAG;AAE5B,SAAS,SAAS,CAAC,KAAa,EAAA;IAC9B,OAAO,UAAU,CAAC,QAAQ;SACvB,MAAM,CAAC,KAAK,CAAC,IAAI,EAAE,CAAC,WAAW,EAAE;SACjC,MAAM,CAAC,KAAK;AACZ,SAAA,KAAK,CAAC,CAAC,EAAE,EAAE,CAAC;AACjB;AAEA,SAAS,SAAS,CAAC,KAAa,EAAA;AAC9B,IAAA,MAAM,CAAC,GAAG,IAAI,IAAI,CAAC,KAAK,CAAC;AACzB,IAAA,MAAM,CAAC,GAAG,CAAC,CAAC,cAAc,EAAE;AAC5B,IAAA,MAAM,CAAC,GAAG,MAAM,CAAC,CAAC,CAAC,WAAW,EAAE,GAAG,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,GAAG,CAAC;AACtD,IAAA,MAAM,GAAG,GAAG,MAAM,CAAC,CAAC,CAAC,UAAU,EAAE,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,GAAG,CAAC;AACnD,IAAA,OAAO,GAAG,CAAC,CAAA,CAAA,EAAI,CAAC,CAAA,CAAA,EAAI,GAAG,EAAE;AAC3B;MAEa,qBAAqB,CAAA;AAEb,IAAA,IAAA;AACA,IAAA,KAAA;IAFnB,WAAA,CACmB,IAAU,EACV,KAAA,GAAgB,YAAY,EAAA;QAD5B,IAAA,CAAA,IAAI,GAAJ,IAAI;QACJ,IAAA,CAAA,KAAK,GAAL,KAAK;IACrB;AAEH,IAAA,MAAM,OAAO,GAAA;;AAEX,QAAA,MAAM,IAAI,CAAC,IAAI,CAAC,KAAK,CAAC;AACS,iCAAA,EAAA,IAAI,CAAC,KAAK,CAAA;;;;;;;;;;;AAWxC,IAAA,CAAA,CAAC;AACF,QAAA,MAAM,IAAI,CAAC,IAAI,CAAC,KAAK,CACnB,CAAA,2BAAA,EAA8B,IAAI,CAAC,KAAK,qBAAqB,IAAI,CAAC,KAAK,CAAA,uBAAA,CAAyB,CACjG;AACD,QAAA,MAAM,IAAI,CAAC,IAAI,CAAC,KAAK,CACnB,CAAA,2BAAA,EAA8B,IAAI,CAAC,KAAK,kBAAkB,IAAI,CAAC,KAAK,CAAA,sBAAA,CAAwB,CAC7F;QACD,MAAM,IAAI,CAAC,IAAI,CAAC,KAAK,CACnB,CAAA,kCAAA,EAAqC,IAAI,CAAC,KAAK,CAAA;AACzC,UAAA,EAAA,IAAI,CAAC,KAAK,CAAA,8CAAA,CAAgD,CACjE;IACH;IAEA,MAAM,MAAM,CAAC,MAA0B,EAAA;AACrC,QAAA,IAAI,CAAC,MAAM,CAAC,OAAO,IAAI,CAAC,MAAM,CAAC,KAAK,CAAC,IAAI,EAAE,IAAI,MAAM,CAAC,IAAI,CAAC,MAAM,KAAK,CAAC;YACrE;QACF,MAAM,KAAK,GAAG,MAAM,CAAC,KAAK,IAAI,IAAI,CAAC,GAAG,EAAE;QACxC,MAAM,KAAK,GAAG,SAAS,CAAC,MAAM,CAAC,KAAK,CAAC;AACrC,QAAA,MAAM,MAAM,GAAG,SAAS,CAAC,KAAK,CAAC;;;QAI/B,MAAM,MAAM,GAAa,EAAE;QAC3B,MAAM,IAAI,GAAc,EAAE;QAC1B,IAAI,CAAC,GAAG,CAAC;AACT,QAAA,KAAK,MAAM,GAAG,IAAI,MAAM,CAAC,IAAI,EAAE;YAC7B,MAAM,CAAC,IAAI,CACT,CAAA,EAAA,EAAK,CAAC,EAAE,CAAA,GAAA,EAAM,CAAC,EAAE,CAAA,GAAA,EAAM,CAAC,EAAE,CAAA,GAAA,EAAM,CAAC,EAAE,CAAA,GAAA,EAAM,CAAC,EAAE,CAAA,GAAA,EAAM,CAAC,EAAE,CAAA,GAAA,EAAM,CAAC,EAAE,CAAA,QAAA,CAAU,CACzE;YACD,IAAI,CAAC,IAAI,CACP,MAAM,CAAC,OAAO,EACd,GAAG,CAAC,EAAE,EACN,GAAG,CAAC,IAAI,EACR,MAAM,CAAC,KAAK,EACZ,KAAK,EACL,GAAG,CAAC,KAAK,EACT,MAAM,CACP;QACH;AACA,QAAA,MAAM,GAAG,GAAG;AACI,kBAAA,EAAA,IAAI,CAAC,KAAK;;AAEf,aAAA,EAAA,MAAM,CAAC,IAAI,CAAC,IAAI,CAAC;;AAEG,iCAAA,EAAA,IAAI,CAAC,KAAK,CAAA;;KAExC;QACD,MAAM,IAAI,CAAC,IAAI,CAAC,KAAK,CAAC,GAAG,EAAE,IAAI,CAAC;IAClC;AACD;AAED;MACa,iBAAiB,CAAA;IAC5B,MAAM,MAAM,GAAA,EAAmB;IAC/B,MAAM,OAAO,GAAA,EAAmB;AACjC;;;;"}
@@ -0,0 +1,51 @@
1
+ -- Autonomous memory — Postgres schema (v2: layered tier scoping).
2
+ --
3
+ -- Two-tier canonical-document model:
4
+ --
5
+ -- memory/agent/* → shared across every user of the agent (user_id = NULL)
6
+ -- memory/user/* → private to a specific caller (user_id = <callerId>)
7
+ --
8
+ -- Uniqueness key is `(agent_id, user_id, path)` with NULLS NOT DISTINCT
9
+ -- so two NULL user_id rows on the same path collide (exactly-one
10
+ -- agent-tier row per path) while per-user rows on the same path coexist
11
+ -- (one row per user for user-tier paths).
12
+ --
13
+ -- The read path filters with `(user_id IS NULL OR user_id = $caller)`,
14
+ -- so callers see agent-tier rows + only their own user-tier rows —
15
+ -- never another user's private memory. Enforcement is in SQL, not just
16
+ -- the UI.
17
+ --
18
+ -- NULLS NOT DISTINCT requires PostgreSQL 15+. Host's production
19
+ -- pgvector image is PG16, so this is safe. The migration in
20
+ -- `migrate.ts` will drop the legacy `(agent_id, path)` constraint
21
+ -- before creating the new one if it exists on an older database.
22
+
23
+ CREATE EXTENSION IF NOT EXISTS vector;
24
+
25
+ CREATE TABLE IF NOT EXISTS agent_memories (
26
+ id BIGSERIAL PRIMARY KEY,
27
+ agent_id TEXT NOT NULL,
28
+ user_id TEXT, -- NULL = agent-tier, shared
29
+ path TEXT NOT NULL,
30
+ content TEXT NOT NULL,
31
+ embedding VECTOR(1024),
32
+ tsv TSVECTOR GENERATED ALWAYS AS (to_tsvector('english', content)) STORED,
33
+ created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
34
+ updated_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
35
+ last_user_id TEXT, -- latest session to append
36
+ CONSTRAINT agent_memories_agent_user_path_uq
37
+ UNIQUE NULLS NOT DISTINCT (agent_id, user_id, path)
38
+ );
39
+
40
+ -- Primary lookup index for list/search/generate-prompt — matches the
41
+ -- exact filter shape of every read query.
42
+ CREATE INDEX IF NOT EXISTS agent_memories_scope_idx
43
+ ON agent_memories (agent_id, user_id, path);
44
+
45
+ -- Vector ANN index — untouched by the scoping change.
46
+ CREATE INDEX IF NOT EXISTS agent_memories_vector_idx
47
+ ON agent_memories USING ivfflat (embedding vector_cosine_ops) WITH (lists = 100);
48
+
49
+ -- Full-text GIN index — untouched.
50
+ CREATE INDEX IF NOT EXISTS agent_memories_tsv_idx
51
+ ON agent_memories USING GIN (tsv);
@@ -0,0 +1,110 @@
1
+ /**
2
+ * Temporal decay — Phase 2.
3
+ *
4
+ * Ported from upstream `extensions/memory-core/src/memory/temporal-decay.ts`.
5
+ * Ages dated memory files (`memory/YYYY-MM-DD.md`) using exponential decay
6
+ * `multiplier = exp(-ln(2) / halfLifeDays * ageInDays)`. At half-life, the
7
+ * score is exactly halved.
8
+ *
9
+ * Evergreen files (MEMORY.md, memory/topics.md, any non-dated file inside
10
+ * memory/) do NOT decay — they represent durable knowledge and should stay
11
+ * hot regardless of age. This mirrors upstream's `isEvergreenMemoryPath`.
12
+ *
13
+ * Since our pgvector rows carry `createdAt`, we don't need filesystem stat
14
+ * fallback — the row timestamp is authoritative for any file without a
15
+ * date in the path.
16
+ */
17
+ const DEFAULT_TEMPORAL_DECAY_CONFIG = {
18
+ enabled: false,
19
+ halfLifeDays: 30,
20
+ };
21
+ const DAY_MS = 24 * 60 * 60 * 1000;
22
+ const DATED_MEMORY_PATH_RE = /(?:^|\/)memory\/(\d{4})-(\d{2})-(\d{2})\.md$/;
23
+ function toDecayLambda(halfLifeDays) {
24
+ if (!Number.isFinite(halfLifeDays) || halfLifeDays <= 0)
25
+ return 0;
26
+ return Math.LN2 / halfLifeDays;
27
+ }
28
+ function calculateTemporalDecayMultiplier(params) {
29
+ const lambda = toDecayLambda(params.halfLifeDays);
30
+ const age = Math.max(0, params.ageInDays);
31
+ if (lambda <= 0 || !Number.isFinite(age))
32
+ return 1;
33
+ return Math.exp(-lambda * age);
34
+ }
35
+ function applyTemporalDecayToScore(params) {
36
+ return params.score * calculateTemporalDecayMultiplier(params);
37
+ }
38
+ function normalizePath(p) {
39
+ return (p ?? '').replace(/\\/g, '/').replace(/^\.\//, '');
40
+ }
41
+ /** Parse a date out of `memory/YYYY-MM-DD.md` — returns null on non-match or invalid date. */
42
+ function parseMemoryDateFromPath(filePath) {
43
+ const m = DATED_MEMORY_PATH_RE.exec(normalizePath(filePath));
44
+ if (!m)
45
+ return null;
46
+ const y = Number(m[1]);
47
+ const mo = Number(m[2]);
48
+ const d = Number(m[3]);
49
+ if (!Number.isInteger(y) || !Number.isInteger(mo) || !Number.isInteger(d))
50
+ return null;
51
+ const ts = Date.UTC(y, mo - 1, d);
52
+ const parsed = new Date(ts);
53
+ if (parsed.getUTCFullYear() !== y ||
54
+ parsed.getUTCMonth() !== mo - 1 ||
55
+ parsed.getUTCDate() !== d) {
56
+ return null;
57
+ }
58
+ return parsed;
59
+ }
60
+ /**
61
+ * Evergreen = durable knowledge file that should not decay.
62
+ * - `MEMORY.md` / `memory.md` at root
63
+ * - anything inside `memory/` that is NOT a dated `YYYY-MM-DD.md` file
64
+ */
65
+ function isEvergreenMemoryPath(filePath) {
66
+ const n = normalizePath(filePath);
67
+ if (n === 'MEMORY.md' || n === 'memory.md')
68
+ return true;
69
+ if (!n.startsWith('memory/'))
70
+ return false;
71
+ return !DATED_MEMORY_PATH_RE.test(n);
72
+ }
73
+ function ageInDays(timestamp, nowMs) {
74
+ return Math.max(0, nowMs - timestamp.getTime()) / DAY_MS;
75
+ }
76
+ /**
77
+ * Apply temporal decay to a list of memory hits.
78
+ *
79
+ * Priority for the effective timestamp:
80
+ * 1. Dated path (`memory/YYYY-MM-DD.md`) — use the date in the path
81
+ * 2. Otherwise, if the path is evergreen — NO decay
82
+ * 3. Otherwise, use the row's `createdAt`
83
+ */
84
+ function applyTemporalDecayToHits(hits, config = {}, nowMs = Date.now()) {
85
+ const merged = { ...DEFAULT_TEMPORAL_DECAY_CONFIG, ...config };
86
+ if (!merged.enabled)
87
+ return [...hits];
88
+ return hits.map((h) => {
89
+ const datedTs = parseMemoryDateFromPath(h.path);
90
+ let ts = datedTs;
91
+ if (!ts) {
92
+ if (isEvergreenMemoryPath(h.path))
93
+ return h;
94
+ ts = h.createdAt ?? null;
95
+ }
96
+ if (!ts)
97
+ return h;
98
+ return {
99
+ ...h,
100
+ score: applyTemporalDecayToScore({
101
+ score: h.score,
102
+ ageInDays: ageInDays(ts, nowMs),
103
+ halfLifeDays: merged.halfLifeDays,
104
+ }),
105
+ };
106
+ });
107
+ }
108
+
109
+ export { DEFAULT_TEMPORAL_DECAY_CONFIG, applyTemporalDecayToHits, applyTemporalDecayToScore, calculateTemporalDecayMultiplier, isEvergreenMemoryPath, parseMemoryDateFromPath, toDecayLambda };
110
+ //# sourceMappingURL=temporalDecay.mjs.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"temporalDecay.mjs","sources":["../../../src/memory/temporalDecay.ts"],"sourcesContent":["/**\n * Temporal decay — Phase 2.\n *\n * Ported from upstream `extensions/memory-core/src/memory/temporal-decay.ts`.\n * Ages dated memory files (`memory/YYYY-MM-DD.md`) using exponential decay\n * `multiplier = exp(-ln(2) / halfLifeDays * ageInDays)`. At half-life, the\n * score is exactly halved.\n *\n * Evergreen files (MEMORY.md, memory/topics.md, any non-dated file inside\n * memory/) do NOT decay — they represent durable knowledge and should stay\n * hot regardless of age. This mirrors upstream's `isEvergreenMemoryPath`.\n *\n * Since our pgvector rows carry `createdAt`, we don't need filesystem stat\n * fallback — the row timestamp is authoritative for any file without a\n * date in the path.\n */\n\nexport interface TemporalDecayConfig {\n enabled: boolean;\n halfLifeDays: number;\n}\n\nexport const DEFAULT_TEMPORAL_DECAY_CONFIG: TemporalDecayConfig = {\n enabled: false,\n halfLifeDays: 30,\n};\n\nconst DAY_MS = 24 * 60 * 60 * 1000;\nconst DATED_MEMORY_PATH_RE = /(?:^|\\/)memory\\/(\\d{4})-(\\d{2})-(\\d{2})\\.md$/;\n\nexport function toDecayLambda(halfLifeDays: number): number {\n if (!Number.isFinite(halfLifeDays) || halfLifeDays <= 0) return 0;\n return Math.LN2 / halfLifeDays;\n}\n\nexport function calculateTemporalDecayMultiplier(params: {\n ageInDays: number;\n halfLifeDays: number;\n}): number {\n const lambda = toDecayLambda(params.halfLifeDays);\n const age = Math.max(0, params.ageInDays);\n if (lambda <= 0 || !Number.isFinite(age)) return 1;\n return Math.exp(-lambda * age);\n}\n\nexport function applyTemporalDecayToScore(params: {\n score: number;\n ageInDays: number;\n halfLifeDays: number;\n}): number {\n return params.score * calculateTemporalDecayMultiplier(params);\n}\n\nfunction normalizePath(p: string): string {\n return (p ?? '').replace(/\\\\/g, '/').replace(/^\\.\\//, '');\n}\n\n/** Parse a date out of `memory/YYYY-MM-DD.md` — returns null on non-match or invalid date. */\nexport function parseMemoryDateFromPath(filePath: string): Date | null {\n const m = DATED_MEMORY_PATH_RE.exec(normalizePath(filePath));\n if (!m) return null;\n const y = Number(m[1]);\n const mo = Number(m[2]);\n const d = Number(m[3]);\n if (!Number.isInteger(y) || !Number.isInteger(mo) || !Number.isInteger(d))\n return null;\n const ts = Date.UTC(y, mo - 1, d);\n const parsed = new Date(ts);\n if (\n parsed.getUTCFullYear() !== y ||\n parsed.getUTCMonth() !== mo - 1 ||\n parsed.getUTCDate() !== d\n ) {\n return null;\n }\n return parsed;\n}\n\n/**\n * Evergreen = durable knowledge file that should not decay.\n * - `MEMORY.md` / `memory.md` at root\n * - anything inside `memory/` that is NOT a dated `YYYY-MM-DD.md` file\n */\nexport function isEvergreenMemoryPath(filePath: string): boolean {\n const n = normalizePath(filePath);\n if (n === 'MEMORY.md' || n === 'memory.md') return true;\n if (!n.startsWith('memory/')) return false;\n return !DATED_MEMORY_PATH_RE.test(n);\n}\n\nfunction ageInDays(timestamp: Date, nowMs: number): number {\n return Math.max(0, nowMs - timestamp.getTime()) / DAY_MS;\n}\n\nexport interface DecayCandidate {\n path: string;\n score: number;\n createdAt?: Date;\n}\n\n/**\n * Apply temporal decay to a list of memory hits.\n *\n * Priority for the effective timestamp:\n * 1. Dated path (`memory/YYYY-MM-DD.md`) — use the date in the path\n * 2. Otherwise, if the path is evergreen — NO decay\n * 3. Otherwise, use the row's `createdAt`\n */\nexport function applyTemporalDecayToHits<T extends DecayCandidate>(\n hits: T[],\n config: Partial<TemporalDecayConfig> = {},\n nowMs: number = Date.now()\n): T[] {\n const merged = { ...DEFAULT_TEMPORAL_DECAY_CONFIG, ...config };\n if (!merged.enabled) return [...hits];\n\n return hits.map((h) => {\n const datedTs = parseMemoryDateFromPath(h.path);\n let ts: Date | null = datedTs;\n if (!ts) {\n if (isEvergreenMemoryPath(h.path)) return h;\n ts = h.createdAt ?? null;\n }\n if (!ts) return h;\n return {\n ...h,\n score: applyTemporalDecayToScore({\n score: h.score,\n ageInDays: ageInDays(ts, nowMs),\n halfLifeDays: merged.halfLifeDays,\n }),\n };\n });\n}\n"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;;AAeG;AAOI,MAAM,6BAA6B,GAAwB;AAChE,IAAA,OAAO,EAAE,KAAK;AACd,IAAA,YAAY,EAAE,EAAE;;AAGlB,MAAM,MAAM,GAAG,EAAE,GAAG,EAAE,GAAG,EAAE,GAAG,IAAI;AAClC,MAAM,oBAAoB,GAAG,8CAA8C;AAErE,SAAU,aAAa,CAAC,YAAoB,EAAA;IAChD,IAAI,CAAC,MAAM,CAAC,QAAQ,CAAC,YAAY,CAAC,IAAI,YAAY,IAAI,CAAC;AAAE,QAAA,OAAO,CAAC;AACjE,IAAA,OAAO,IAAI,CAAC,GAAG,GAAG,YAAY;AAChC;AAEM,SAAU,gCAAgC,CAAC,MAGhD,EAAA;IACC,MAAM,MAAM,GAAG,aAAa,CAAC,MAAM,CAAC,YAAY,CAAC;AACjD,IAAA,MAAM,GAAG,GAAG,IAAI,CAAC,GAAG,CAAC,CAAC,EAAE,MAAM,CAAC,SAAS,CAAC;IACzC,IAAI,MAAM,IAAI,CAAC,IAAI,CAAC,MAAM,CAAC,QAAQ,CAAC,GAAG,CAAC;AAAE,QAAA,OAAO,CAAC;IAClD,OAAO,IAAI,CAAC,GAAG,CAAC,CAAC,MAAM,GAAG,GAAG,CAAC;AAChC;AAEM,SAAU,yBAAyB,CAAC,MAIzC,EAAA;IACC,OAAO,MAAM,CAAC,KAAK,GAAG,gCAAgC,CAAC,MAAM,CAAC;AAChE;AAEA,SAAS,aAAa,CAAC,CAAS,EAAA;AAC9B,IAAA,OAAO,CAAC,CAAC,IAAI,EAAE,EAAE,OAAO,CAAC,KAAK,EAAE,GAAG,CAAC,CAAC,OAAO,CAAC,OAAO,EAAE,EAAE,CAAC;AAC3D;AAEA;AACM,SAAU,uBAAuB,CAAC,QAAgB,EAAA;IACtD,MAAM,CAAC,GAAG,oBAAoB,CAAC,IAAI,CAAC,aAAa,CAAC,QAAQ,CAAC,CAAC;AAC5D,IAAA,IAAI,CAAC,CAAC;AAAE,QAAA,OAAO,IAAI;IACnB,MAAM,CAAC,GAAG,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;IACtB,MAAM,EAAE,GAAG,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;IACvB,MAAM,CAAC,GAAG,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;IACtB,IAAI,CAAC,MAAM,CAAC,SAAS,CAAC,CAAC,CAAC,IAAI,CAAC,MAAM,CAAC,SAAS,CAAC,EAAE,CAAC,IAAI,CAAC,MAAM,CAAC,SAAS,CAAC,CAAC,CAAC;AACvE,QAAA,OAAO,IAAI;AACb,IAAA,MAAM,EAAE,GAAG,IAAI,CAAC,GAAG,CAAC,CAAC,EAAE,EAAE,GAAG,CAAC,EAAE,CAAC,CAAC;AACjC,IAAA,MAAM,MAAM,GAAG,IAAI,IAAI,CAAC,EAAE,CAAC;AAC3B,IAAA,IACE,MAAM,CAAC,cAAc,EAAE,KAAK,CAAC;AAC7B,QAAA,MAAM,CAAC,WAAW,EAAE,KAAK,EAAE,GAAG,CAAC;AAC/B,QAAA,MAAM,CAAC,UAAU,EAAE,KAAK,CAAC,EACzB;AACA,QAAA,OAAO,IAAI;IACb;AACA,IAAA,OAAO,MAAM;AACf;AAEA;;;;AAIG;AACG,SAAU,qBAAqB,CAAC,QAAgB,EAAA;AACpD,IAAA,MAAM,CAAC,GAAG,aAAa,CAAC,QAAQ,CAAC;AACjC,IAAA,IAAI,CAAC,KAAK,WAAW,IAAI,CAAC,KAAK,WAAW;AAAE,QAAA,OAAO,IAAI;AACvD,IAAA,IAAI,CAAC,CAAC,CAAC,UAAU,CAAC,SAAS,CAAC;AAAE,QAAA,OAAO,KAAK;AAC1C,IAAA,OAAO,CAAC,oBAAoB,CAAC,IAAI,CAAC,CAAC,CAAC;AACtC;AAEA,SAAS,SAAS,CAAC,SAAe,EAAE,KAAa,EAAA;AAC/C,IAAA,OAAO,IAAI,CAAC,GAAG,CAAC,CAAC,EAAE,KAAK,GAAG,SAAS,CAAC,OAAO,EAAE,CAAC,GAAG,MAAM;AAC1D;AAQA;;;;;;;AAOG;AACG,SAAU,wBAAwB,CACtC,IAAS,EACT,MAAA,GAAuC,EAAE,EACzC,KAAA,GAAgB,IAAI,CAAC,GAAG,EAAE,EAAA;IAE1B,MAAM,MAAM,GAAG,EAAE,GAAG,6BAA6B,EAAE,GAAG,MAAM,EAAE;IAC9D,IAAI,CAAC,MAAM,CAAC,OAAO;AAAE,QAAA,OAAO,CAAC,GAAG,IAAI,CAAC;AAErC,IAAA,OAAO,IAAI,CAAC,GAAG,CAAC,CAAC,CAAC,KAAI;QACpB,MAAM,OAAO,GAAG,uBAAuB,CAAC,CAAC,CAAC,IAAI,CAAC;QAC/C,IAAI,EAAE,GAAgB,OAAO;QAC7B,IAAI,CAAC,EAAE,EAAE;AACP,YAAA,IAAI,qBAAqB,CAAC,CAAC,CAAC,IAAI,CAAC;AAAE,gBAAA,OAAO,CAAC;AAC3C,YAAA,EAAE,GAAG,CAAC,CAAC,SAAS,IAAI,IAAI;QAC1B;AACA,QAAA,IAAI,CAAC,EAAE;AAAE,YAAA,OAAO,CAAC;QACjB,OAAO;AACL,YAAA,GAAG,CAAC;YACJ,KAAK,EAAE,yBAAyB,CAAC;gBAC/B,KAAK,EAAE,CAAC,CAAC,KAAK;AACd,gBAAA,SAAS,EAAE,SAAS,CAAC,EAAE,EAAE,KAAK,CAAC;gBAC/B,YAAY,EAAE,MAAM,CAAC,YAAY;aAClC,CAAC;SACH;AACH,IAAA,CAAC,CAAC;AACJ;;;;"}
@@ -23,7 +23,7 @@ import { safeDispatchCustomEvent } from '../utils/events.mjs';
23
23
  * @param destinationAgentId - The agent that follows this gate
24
24
  */
25
25
  function createApprovalGateNode(config, sourceAgentId, destinationAgentId) {
26
- const { gateId, channel = 'chat', prompt, approver, timeoutMs, } = config;
26
+ const { gateId, channel = 'chat', prompt, approver, timeoutMs } = config;
27
27
  /**
28
28
  * The gate node function. Receives the current graph state,
29
29
  * dispatches a notification, calls interrupt(), and returns
@@ -1 +1 @@
1
- {"version":3,"file":"ApprovalGateNode.mjs","sources":["../../../src/nodes/ApprovalGateNode.ts"],"sourcesContent":["import { interrupt } from '@langchain/langgraph';\nimport type { RunnableConfig } from '@langchain/core/runnables';\nimport type { ApprovalGateConfig, BaseGraphState } from '@/types/graph';\nimport type { ToolApprovalResponse } from '@/types/tools';\nimport { GraphEvents } from '@/common';\nimport { safeDispatchCustomEvent } from '@/utils/events';\n\n/**\n * Interrupt payload for approval gate nodes.\n * Passed to interrupt() and persisted in the checkpoint.\n */\nexport interface ApprovalGateInterrupt {\n /** Discriminator to distinguish from tool approval interrupts */\n type: 'approval_gate';\n /** Unique gate identifier */\n gateId: string;\n /** Approval channel (chat, outlook, telegram) */\n channel: string;\n /** Human-readable prompt for the approver */\n prompt?: string;\n /** Approver identifier */\n approver?: string;\n /** Timeout in ms */\n timeoutMs?: number;\n /** Source agent ID (who just finished) */\n sourceAgentId?: string;\n /** Destination agent ID (who will run next if approved) */\n destinationAgentId?: string;\n}\n\n/**\n * Creates a graph node function that acts as an approval gate.\n *\n * Unlike tool approval (which respects ExecutionContext and can be auto-approved\n * in scheduled/handoff modes), approval gates ALWAYS fire. They are placed by\n * the builder between agents in a sequence and represent explicit human\n * checkpoints that cannot be bypassed.\n *\n * Flow:\n * 1. Dispatch ON_APPROVAL_GATE notification (for SSE/persistence)\n * 2. Call interrupt() — graph pauses, state is checkpointed\n * 3. On resume, interrupt() returns the ToolApprovalResponse\n * 4. If approved, pass state through (next agent runs)\n * 5. If denied, return state as-is (routing handled by conditional edge)\n *\n * @param config - The approval gate configuration from the edge definition\n * @param sourceAgentId - The agent that precedes this gate\n * @param destinationAgentId - The agent that follows this gate\n */\nexport function createApprovalGateNode(\n config: ApprovalGateConfig,\n sourceAgentId: string,\n destinationAgentId: string,\n) {\n const {\n gateId,\n channel = 'chat',\n prompt,\n approver,\n timeoutMs,\n } = config;\n\n /**\n * The gate node function. Receives the current graph state,\n * dispatches a notification, calls interrupt(), and returns\n * the state with an approval result annotation.\n */\n return async function approvalGateNode(\n state: BaseGraphState,\n runnableConfig?: RunnableConfig,\n ): Promise<Partial<BaseGraphState>> {\n const interruptPayload: ApprovalGateInterrupt = {\n type: 'approval_gate',\n gateId,\n channel,\n prompt,\n approver,\n timeoutMs,\n sourceAgentId,\n destinationAgentId,\n };\n\n // Dispatch notification event so the host can:\n // 1. Persist the approval request to MongoDB\n // 2. Route to the appropriate channel adapter\n // 3. Emit SSE event for chat UI\n safeDispatchCustomEvent(\n GraphEvents.ON_APPROVAL_GATE,\n interruptPayload,\n runnableConfig,\n );\n\n // Pause the graph — state is checkpointed by the MongoDBSaver.\n // On resume via Command({ resume: ToolApprovalResponse }), interrupt()\n // returns the response value.\n const response = interrupt(interruptPayload) as ToolApprovalResponse;\n\n // Return empty state update — the graph structure (conditional edges)\n // handles routing based on the approval result. We store the response\n // in a message so downstream nodes can access it if needed.\n if (response.approved) {\n return {};\n }\n\n // On denial, we could add a system message noting the denial.\n // The conditional edge after this node will route to END or skip.\n return {};\n };\n}\n\n/**\n * Node ID for an approval gate, derived from the gate configuration.\n * Used by MultiAgentGraph when inserting gate nodes into the graph.\n */\nexport function getApprovalGateNodeId(gateId: string): string {\n return `approval_gate_${gateId}`;\n}\n"],"names":[],"mappings":";;;;;AA8BA;;;;;;;;;;;;;;;;;;AAkBG;SACa,sBAAsB,CACpC,MAA0B,EAC1B,aAAqB,EACrB,kBAA0B,EAAA;AAE1B,IAAA,MAAM,EACJ,MAAM,EACN,OAAO,GAAG,MAAM,EAChB,MAAM,EACN,QAAQ,EACR,SAAS,GACV,GAAG,MAAM;AAEV;;;;AAIG;AACH,IAAA,OAAO,eAAe,gBAAgB,CACpC,KAAqB,EACrB,cAA+B,EAAA;AAE/B,QAAA,MAAM,gBAAgB,GAA0B;AAC9C,YAAA,IAAI,EAAE,eAAe;YACrB,MAAM;YACN,OAAO;YACP,MAAM;YACN,QAAQ;YACR,SAAS;YACT,aAAa;YACb,kBAAkB;SACnB;;;;;QAMD,uBAAuB,CACrB,WAAW,CAAC,gBAAgB,EAC5B,gBAAgB,EAChB,cAAc,CACf;;;;AAKD,QAAA,MAAM,QAAQ,GAAG,SAAS,CAAC,gBAAgB,CAAyB;;;;AAKpE,QAAA,IAAI,QAAQ,CAAC,QAAQ,EAAE;AACrB,YAAA,OAAO,EAAE;QACX;;;AAIA,QAAA,OAAO,EAAE;AACX,IAAA,CAAC;AACH;AAEA;;;AAGG;AACG,SAAU,qBAAqB,CAAC,MAAc,EAAA;IAClD,OAAO,CAAA,cAAA,EAAiB,MAAM,CAAA,CAAE;AAClC;;;;"}
1
+ {"version":3,"file":"ApprovalGateNode.mjs","sources":["../../../src/nodes/ApprovalGateNode.ts"],"sourcesContent":["import { interrupt } from '@langchain/langgraph';\nimport type { RunnableConfig } from '@langchain/core/runnables';\nimport type { ApprovalGateConfig, BaseGraphState } from '@/types/graph';\nimport type { ToolApprovalResponse } from '@/types/tools';\nimport { GraphEvents } from '@/common';\nimport { safeDispatchCustomEvent } from '@/utils/events';\n\n/**\n * Interrupt payload for approval gate nodes.\n * Passed to interrupt() and persisted in the checkpoint.\n */\nexport interface ApprovalGateInterrupt {\n /** Discriminator to distinguish from tool approval interrupts */\n type: 'approval_gate';\n /** Unique gate identifier */\n gateId: string;\n /** Approval channel (chat, outlook, telegram) */\n channel: string;\n /** Human-readable prompt for the approver */\n prompt?: string;\n /** Approver identifier */\n approver?: string;\n /** Timeout in ms */\n timeoutMs?: number;\n /** Source agent ID (who just finished) */\n sourceAgentId?: string;\n /** Destination agent ID (who will run next if approved) */\n destinationAgentId?: string;\n}\n\n/**\n * Creates a graph node function that acts as an approval gate.\n *\n * Unlike tool approval (which respects ExecutionContext and can be auto-approved\n * in scheduled/handoff modes), approval gates ALWAYS fire. They are placed by\n * the builder between agents in a sequence and represent explicit human\n * checkpoints that cannot be bypassed.\n *\n * Flow:\n * 1. Dispatch ON_APPROVAL_GATE notification (for SSE/persistence)\n * 2. Call interrupt() — graph pauses, state is checkpointed\n * 3. On resume, interrupt() returns the ToolApprovalResponse\n * 4. If approved, pass state through (next agent runs)\n * 5. If denied, return state as-is (routing handled by conditional edge)\n *\n * @param config - The approval gate configuration from the edge definition\n * @param sourceAgentId - The agent that precedes this gate\n * @param destinationAgentId - The agent that follows this gate\n */\nexport function createApprovalGateNode(\n config: ApprovalGateConfig,\n sourceAgentId: string,\n destinationAgentId: string\n) {\n const { gateId, channel = 'chat', prompt, approver, timeoutMs } = config;\n\n /**\n * The gate node function. Receives the current graph state,\n * dispatches a notification, calls interrupt(), and returns\n * the state with an approval result annotation.\n */\n return async function approvalGateNode(\n state: BaseGraphState,\n runnableConfig?: RunnableConfig\n ): Promise<Partial<BaseGraphState>> {\n const interruptPayload: ApprovalGateInterrupt = {\n type: 'approval_gate',\n gateId,\n channel,\n prompt,\n approver,\n timeoutMs,\n sourceAgentId,\n destinationAgentId,\n };\n\n // Dispatch notification event so the host can:\n // 1. Persist the approval request to MongoDB\n // 2. Route to the appropriate channel adapter\n // 3. Emit SSE event for chat UI\n safeDispatchCustomEvent(\n GraphEvents.ON_APPROVAL_GATE,\n interruptPayload,\n runnableConfig\n );\n\n // Pause the graph — state is checkpointed by the MongoDBSaver.\n // On resume via Command({ resume: ToolApprovalResponse }), interrupt()\n // returns the response value.\n const response = interrupt(interruptPayload) as ToolApprovalResponse;\n\n // Return empty state update — the graph structure (conditional edges)\n // handles routing based on the approval result. We store the response\n // in a message so downstream nodes can access it if needed.\n if (response.approved) {\n return {};\n }\n\n // On denial, we could add a system message noting the denial.\n // The conditional edge after this node will route to END or skip.\n return {};\n };\n}\n\n/**\n * Node ID for an approval gate, derived from the gate configuration.\n * Used by MultiAgentGraph when inserting gate nodes into the graph.\n */\nexport function getApprovalGateNodeId(gateId: string): string {\n return `approval_gate_${gateId}`;\n}\n"],"names":[],"mappings":";;;;;AA8BA;;;;;;;;;;;;;;;;;;AAkBG;SACa,sBAAsB,CACpC,MAA0B,EAC1B,aAAqB,EACrB,kBAA0B,EAAA;AAE1B,IAAA,MAAM,EAAE,MAAM,EAAE,OAAO,GAAG,MAAM,EAAE,MAAM,EAAE,QAAQ,EAAE,SAAS,EAAE,GAAG,MAAM;AAExE;;;;AAIG;AACH,IAAA,OAAO,eAAe,gBAAgB,CACpC,KAAqB,EACrB,cAA+B,EAAA;AAE/B,QAAA,MAAM,gBAAgB,GAA0B;AAC9C,YAAA,IAAI,EAAE,eAAe;YACrB,MAAM;YACN,OAAO;YACP,MAAM;YACN,QAAQ;YACR,SAAS;YACT,aAAa;YACb,kBAAkB;SACnB;;;;;QAMD,uBAAuB,CACrB,WAAW,CAAC,gBAAgB,EAC5B,gBAAgB,EAChB,cAAc,CACf;;;;AAKD,QAAA,MAAM,QAAQ,GAAG,SAAS,CAAC,gBAAgB,CAAyB;;;;AAKpE,QAAA,IAAI,QAAQ,CAAC,QAAQ,EAAE;AACrB,YAAA,OAAO,EAAE;QACX;;;AAIA,QAAA,OAAO,EAAE;AACX,IAAA,CAAC;AACH;AAEA;;;AAGG;AACG,SAAU,qBAAqB,CAAC,MAAc,EAAA;IAClD,OAAO,CAAA,cAAA,EAAiB,MAAM,CAAA,CAAE;AAClC;;;;"}
@@ -0,0 +1,44 @@
1
+ import { DEFAULT_MEMORY_FLUSH_SYSTEM_PROMPT, FLUSH_PROMPT_RUBRIC_PLACEHOLDER, DEFAULT_MEMORY_FLUSH_PROMPT } from '../memory/constants.mjs';
2
+ import { renderPathsRubric } from '../memory/paths.mjs';
3
+
4
+ /**
5
+ * Memory-flush prompt resolver.
6
+ *
7
+ * The raw prompt strings live in `src/memory/constants.ts` and contain a
8
+ * single `{{MEMORY_PATHS_RUBRIC}}` placeholder. This module substitutes
9
+ * the caller-scoped rubric (produced by `renderPathsRubric()` in
10
+ * `src/memory/paths.ts`) into that placeholder at flush time, so:
11
+ *
12
+ * - a collaborative agent with a real caller sees all 8 paths
13
+ * - an isolated/autonomous agent sees only the 4 agent-tier paths
14
+ * (user-tier rows are physically omitted from the rubric the LLM
15
+ * sees, so it cannot route writes there even by mistake)
16
+ *
17
+ * ## Why this lives in prompts/ and not memory/constants.ts
18
+ *
19
+ * The constants file is pure strings — no imports, no runtime work.
20
+ * Injecting the rubric needs access to `renderPathsRubric()` which
21
+ * needs `MemoryScope`, so the substitution has to happen one layer up.
22
+ * Keeping the placeholder in constants and the substitution here means
23
+ * tests of the raw prompt (no scope) and tests of the rendered prompt
24
+ * (scope-aware) stay independent.
25
+ */
26
+ /** Back-compat alias so existing callers keep working. */
27
+ const MEMORY_FLUSH_SYSTEM_PROMPT = DEFAULT_MEMORY_FLUSH_SYSTEM_PROMPT;
28
+ /**
29
+ * Substitutes the paths rubric into the raw prompts for a given scope.
30
+ *
31
+ * Idempotent (calling it twice on already-resolved strings is a no-op
32
+ * because the placeholder will have been replaced already). Safe to
33
+ * call per-flush; no caching needed — the string concat is microseconds.
34
+ */
35
+ function resolveFlushPrompts(options) {
36
+ const rubric = renderPathsRubric(options.scope);
37
+ return {
38
+ prompt: DEFAULT_MEMORY_FLUSH_PROMPT.replaceAll(FLUSH_PROMPT_RUBRIC_PLACEHOLDER, rubric),
39
+ systemPrompt: DEFAULT_MEMORY_FLUSH_SYSTEM_PROMPT.replaceAll(FLUSH_PROMPT_RUBRIC_PLACEHOLDER, rubric),
40
+ };
41
+ }
42
+
43
+ export { DEFAULT_MEMORY_FLUSH_PROMPT, DEFAULT_MEMORY_FLUSH_SYSTEM_PROMPT, MEMORY_FLUSH_SYSTEM_PROMPT, resolveFlushPrompts };
44
+ //# sourceMappingURL=memoryFlushPrompt.mjs.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"memoryFlushPrompt.mjs","sources":["../../../src/prompts/memoryFlushPrompt.ts"],"sourcesContent":["/**\n * Memory-flush prompt resolver.\n *\n * The raw prompt strings live in `src/memory/constants.ts` and contain a\n * single `{{MEMORY_PATHS_RUBRIC}}` placeholder. This module substitutes\n * the caller-scoped rubric (produced by `renderPathsRubric()` in\n * `src/memory/paths.ts`) into that placeholder at flush time, so:\n *\n * - a collaborative agent with a real caller sees all 8 paths\n * - an isolated/autonomous agent sees only the 4 agent-tier paths\n * (user-tier rows are physically omitted from the rubric the LLM\n * sees, so it cannot route writes there even by mistake)\n *\n * ## Why this lives in prompts/ and not memory/constants.ts\n *\n * The constants file is pure strings — no imports, no runtime work.\n * Injecting the rubric needs access to `renderPathsRubric()` which\n * needs `MemoryScope`, so the substitution has to happen one layer up.\n * Keeping the placeholder in constants and the substitution here means\n * tests of the raw prompt (no scope) and tests of the rendered prompt\n * (scope-aware) stay independent.\n */\nimport {\n DEFAULT_MEMORY_FLUSH_PROMPT,\n DEFAULT_MEMORY_FLUSH_SYSTEM_PROMPT,\n FLUSH_PROMPT_RUBRIC_PLACEHOLDER,\n} from '@/memory/constants';\nimport { renderPathsRubric } from '@/memory/paths';\nimport type { MemoryScope } from '@/memory/types';\n\nexport { DEFAULT_MEMORY_FLUSH_PROMPT, DEFAULT_MEMORY_FLUSH_SYSTEM_PROMPT };\n\n/** Back-compat alias so existing callers keep working. */\nexport const MEMORY_FLUSH_SYSTEM_PROMPT = DEFAULT_MEMORY_FLUSH_SYSTEM_PROMPT;\n\n/** Minimum scope shape needed to render the rubric. */\nexport interface ResolveFlushPromptsOptions {\n /**\n * The scope the memory tools were built with. Only `userId` matters\n * for rubric rendering — if absent, the user-tier paths are filtered\n * out of the rubric the LLM sees.\n */\n scope: Pick<MemoryScope, 'userId'>;\n}\n\n/**\n * Result shape returned to `runMemoryFlush`. `prompt` goes in the\n * HumanMessage, `systemPrompt` goes in the SystemMessage. Both already\n * have the rubric substituted — the caller should pass them through\n * verbatim.\n */\nexport interface ResolvedFlushPrompts {\n prompt: string;\n systemPrompt: string;\n}\n\n/**\n * Substitutes the paths rubric into the raw prompts for a given scope.\n *\n * Idempotent (calling it twice on already-resolved strings is a no-op\n * because the placeholder will have been replaced already). Safe to\n * call per-flush; no caching needed — the string concat is microseconds.\n */\nexport function resolveFlushPrompts(\n options: ResolveFlushPromptsOptions\n): ResolvedFlushPrompts {\n const rubric = renderPathsRubric(options.scope);\n return {\n prompt: DEFAULT_MEMORY_FLUSH_PROMPT.replaceAll(\n FLUSH_PROMPT_RUBRIC_PLACEHOLDER,\n rubric\n ),\n systemPrompt: DEFAULT_MEMORY_FLUSH_SYSTEM_PROMPT.replaceAll(\n FLUSH_PROMPT_RUBRIC_PLACEHOLDER,\n rubric\n ),\n };\n}\n"],"names":[],"mappings":";;;AAAA;;;;;;;;;;;;;;;;;;;;;AAqBG;AAWH;AACO,MAAM,0BAA0B,GAAG;AAuB1C;;;;;;AAMG;AACG,SAAU,mBAAmB,CACjC,OAAmC,EAAA;IAEnC,MAAM,MAAM,GAAG,iBAAiB,CAAC,OAAO,CAAC,KAAK,CAAC;IAC/C,OAAO;QACL,MAAM,EAAE,2BAA2B,CAAC,UAAU,CAC5C,+BAA+B,EAC/B,MAAM,CACP;QACD,YAAY,EAAE,kCAAkC,CAAC,UAAU,CACzD,+BAA+B,EAC/B,MAAM,CACP;KACF;AACH;;;;"}