@jonathangu/openclawbrain 0.3.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (113) hide show
  1. package/LICENSE +21 -0
  2. package/README.md +412 -0
  3. package/bin/openclawbrain.js +15 -0
  4. package/docs/END_STATE.md +244 -0
  5. package/docs/EVIDENCE.md +128 -0
  6. package/docs/RELEASE_CONTRACT.md +91 -0
  7. package/docs/agent-tools.md +106 -0
  8. package/docs/architecture.md +224 -0
  9. package/docs/configuration.md +178 -0
  10. package/docs/evidence/2026-03-16/3188b50c4ed30f07dea111e35ce52aabefaced63/brain-teach-session-bound/status.json +87 -0
  11. package/docs/evidence/2026-03-16/3188b50c4ed30f07dea111e35ce52aabefaced63/brain-teach-session-bound/summary.md +16 -0
  12. package/docs/evidence/2026-03-16/3188b50c4ed30f07dea111e35ce52aabefaced63/brain-teach-session-bound/trace.json +273 -0
  13. package/docs/evidence/2026-03-16/3188b50c4ed30f07dea111e35ce52aabefaced63/brain-teach-session-bound/validation-report.json +652 -0
  14. package/docs/evidence/2026-03-16/4941429588810da5d6f7ef1509f229f83fa08031/channels-status.txt +31 -0
  15. package/docs/evidence/2026-03-16/4941429588810da5d6f7ef1509f229f83fa08031/config-snapshot.json +66 -0
  16. package/docs/evidence/2026-03-16/4941429588810da5d6f7ef1509f229f83fa08031/doctor.json +14 -0
  17. package/docs/evidence/2026-03-16/4941429588810da5d6f7ef1509f229f83fa08031/gateway-probe.txt +34 -0
  18. package/docs/evidence/2026-03-16/4941429588810da5d6f7ef1509f229f83fa08031/gateway-status.txt +41 -0
  19. package/docs/evidence/2026-03-16/4941429588810da5d6f7ef1509f229f83fa08031/logs.txt +428 -0
  20. package/docs/evidence/2026-03-16/4941429588810da5d6f7ef1509f229f83fa08031/status-all.txt +60 -0
  21. package/docs/evidence/2026-03-16/4941429588810da5d6f7ef1509f229f83fa08031/status.json +223 -0
  22. package/docs/evidence/2026-03-16/4941429588810da5d6f7ef1509f229f83fa08031/summary.md +13 -0
  23. package/docs/evidence/2026-03-16/4941429588810da5d6f7ef1509f229f83fa08031/trace.json +4 -0
  24. package/docs/evidence/2026-03-16/4941429588810da5d6f7ef1509f229f83fa08031/validation-report.json +334 -0
  25. package/docs/evidence/2026-03-16/7f8dbcb27e741abdeefd5656c210639d0acdd440/channels-status.txt +25 -0
  26. package/docs/evidence/2026-03-16/7f8dbcb27e741abdeefd5656c210639d0acdd440/config-snapshot.json +91 -0
  27. package/docs/evidence/2026-03-16/7f8dbcb27e741abdeefd5656c210639d0acdd440/doctor.json +14 -0
  28. package/docs/evidence/2026-03-16/7f8dbcb27e741abdeefd5656c210639d0acdd440/gateway-probe.txt +36 -0
  29. package/docs/evidence/2026-03-16/7f8dbcb27e741abdeefd5656c210639d0acdd440/gateway-status.txt +44 -0
  30. package/docs/evidence/2026-03-16/7f8dbcb27e741abdeefd5656c210639d0acdd440/logs.txt +428 -0
  31. package/docs/evidence/2026-03-16/7f8dbcb27e741abdeefd5656c210639d0acdd440/short-static-classification/preflight-doctor.json +10 -0
  32. package/docs/evidence/2026-03-16/7f8dbcb27e741abdeefd5656c210639d0acdd440/short-static-classification/preflight-sdk-probe.json +11 -0
  33. package/docs/evidence/2026-03-16/7f8dbcb27e741abdeefd5656c210639d0acdd440/short-static-classification/preflight-setup-only.json +12 -0
  34. package/docs/evidence/2026-03-16/7f8dbcb27e741abdeefd5656c210639d0acdd440/short-static-classification/summary.md +30 -0
  35. package/docs/evidence/2026-03-16/7f8dbcb27e741abdeefd5656c210639d0acdd440/short-static-classification/validation-report.json +72 -0
  36. package/docs/evidence/2026-03-16/7f8dbcb27e741abdeefd5656c210639d0acdd440/status-all.txt +63 -0
  37. package/docs/evidence/2026-03-16/7f8dbcb27e741abdeefd5656c210639d0acdd440/status.json +200 -0
  38. package/docs/evidence/2026-03-16/7f8dbcb27e741abdeefd5656c210639d0acdd440/summary.md +13 -0
  39. package/docs/evidence/2026-03-16/7f8dbcb27e741abdeefd5656c210639d0acdd440/trace.json +4 -0
  40. package/docs/evidence/2026-03-16/7f8dbcb27e741abdeefd5656c210639d0acdd440/validation-report.json +311 -0
  41. package/docs/evidence/README.md +16 -0
  42. package/docs/fts5.md +161 -0
  43. package/docs/tui.md +506 -0
  44. package/index.ts +1372 -0
  45. package/openclaw.plugin.json +136 -0
  46. package/package.json +66 -0
  47. package/src/assembler.ts +804 -0
  48. package/src/brain-cli.ts +316 -0
  49. package/src/brain-core/decay.ts +35 -0
  50. package/src/brain-core/episode.ts +82 -0
  51. package/src/brain-core/graph.ts +321 -0
  52. package/src/brain-core/health.ts +116 -0
  53. package/src/brain-core/mutator.ts +281 -0
  54. package/src/brain-core/pack.ts +117 -0
  55. package/src/brain-core/policy.ts +153 -0
  56. package/src/brain-core/replay.ts +1 -0
  57. package/src/brain-core/teacher.ts +105 -0
  58. package/src/brain-core/trace.ts +40 -0
  59. package/src/brain-core/traverse.ts +230 -0
  60. package/src/brain-core/types.ts +405 -0
  61. package/src/brain-core/update.ts +123 -0
  62. package/src/brain-harvest/human.ts +46 -0
  63. package/src/brain-harvest/scanner.ts +98 -0
  64. package/src/brain-harvest/self.ts +147 -0
  65. package/src/brain-runtime/assembler-extension.ts +230 -0
  66. package/src/brain-runtime/evidence-detectors.ts +68 -0
  67. package/src/brain-runtime/graph-io.ts +72 -0
  68. package/src/brain-runtime/harvester-extension.ts +98 -0
  69. package/src/brain-runtime/service.ts +659 -0
  70. package/src/brain-runtime/tools.ts +109 -0
  71. package/src/brain-runtime/worker-state.ts +106 -0
  72. package/src/brain-runtime/worker-supervisor.ts +169 -0
  73. package/src/brain-store/embedding.ts +179 -0
  74. package/src/brain-store/init.ts +347 -0
  75. package/src/brain-store/migrations.ts +188 -0
  76. package/src/brain-store/store.ts +816 -0
  77. package/src/brain-worker/child-runner.ts +321 -0
  78. package/src/brain-worker/jobs.ts +12 -0
  79. package/src/brain-worker/mutation-job.ts +5 -0
  80. package/src/brain-worker/promotion-job.ts +5 -0
  81. package/src/brain-worker/protocol.ts +79 -0
  82. package/src/brain-worker/teacher-job.ts +5 -0
  83. package/src/brain-worker/update-job.ts +5 -0
  84. package/src/brain-worker/worker.ts +422 -0
  85. package/src/compaction.ts +1332 -0
  86. package/src/db/config.ts +265 -0
  87. package/src/db/connection.ts +72 -0
  88. package/src/db/features.ts +42 -0
  89. package/src/db/migration.ts +561 -0
  90. package/src/engine.ts +1995 -0
  91. package/src/expansion-auth.ts +351 -0
  92. package/src/expansion-policy.ts +303 -0
  93. package/src/expansion.ts +383 -0
  94. package/src/integrity.ts +600 -0
  95. package/src/large-files.ts +527 -0
  96. package/src/openclaw-bridge.ts +22 -0
  97. package/src/retrieval.ts +357 -0
  98. package/src/store/conversation-store.ts +748 -0
  99. package/src/store/fts5-sanitize.ts +29 -0
  100. package/src/store/full-text-fallback.ts +74 -0
  101. package/src/store/index.ts +29 -0
  102. package/src/store/summary-store.ts +918 -0
  103. package/src/summarize.ts +847 -0
  104. package/src/tools/common.ts +53 -0
  105. package/src/tools/lcm-conversation-scope.ts +76 -0
  106. package/src/tools/lcm-describe-tool.ts +234 -0
  107. package/src/tools/lcm-expand-query-tool.ts +594 -0
  108. package/src/tools/lcm-expand-tool.delegation.ts +556 -0
  109. package/src/tools/lcm-expand-tool.ts +448 -0
  110. package/src/tools/lcm-expansion-recursion-guard.ts +286 -0
  111. package/src/tools/lcm-grep-tool.ts +200 -0
  112. package/src/transcript-repair.ts +301 -0
  113. package/src/types.ts +149 -0
package/src/engine.ts ADDED
@@ -0,0 +1,1995 @@
1
+ import { randomUUID } from "node:crypto";
2
+ import { appendFileSync, mkdirSync, readFileSync } from "node:fs";
3
+ import { mkdir, writeFile } from "node:fs/promises";
4
+ import { homedir } from "node:os";
5
+ import { dirname, join } from "node:path";
6
+ import type {
7
+ ContextEngine,
8
+ ContextEngineInfo,
9
+ AssembleResult,
10
+ BootstrapResult,
11
+ CompactResult,
12
+ IngestBatchResult,
13
+ IngestResult,
14
+ SubagentEndReason,
15
+ SubagentSpawnPreparation,
16
+ } from "openclaw/plugin-sdk";
17
+ import { ContextAssembler, type AssembleContextResult } from "./assembler.js";
18
+ import { CompactionEngine, type CompactionConfig } from "./compaction.js";
19
+ import type { LcmConfig } from "./db/config.js";
20
+ import { getLcmConnection, closeLcmConnection } from "./db/connection.js";
21
+ import { getLcmDbFeatures } from "./db/features.js";
22
+ import { runLcmMigrations } from "./db/migration.js";
23
+ import {
24
+ createDelegatedExpansionGrant,
25
+ removeDelegatedExpansionGrantForSession,
26
+ revokeDelegatedExpansionGrantForSession,
27
+ } from "./expansion-auth.js";
28
+ import {
29
+ extensionFromNameOrMime,
30
+ formatFileReference,
31
+ generateExplorationSummary,
32
+ parseFileBlocks,
33
+ } from "./large-files.js";
34
+ import { RetrievalEngine } from "./retrieval.js";
35
+ import {
36
+ BrainAssemblerExtension,
37
+ type BrainAssembledContextResult,
38
+ } from "./brain-runtime/assembler-extension.js";
39
+ import { BrainService } from "./brain-runtime/service.js";
40
+
41
+ function extractLatestUserText(messages: AgentMessage[]): string | null {
42
+ for (let index = messages.length - 1; index >= 0; index -= 1) {
43
+ const message = messages[index];
44
+ if (message?.role !== "user") {
45
+ continue;
46
+ }
47
+ const content = message.content;
48
+ if (typeof content === "string") {
49
+ return content;
50
+ }
51
+ if (Array.isArray(content)) {
52
+ const text = content
53
+ .map((part) => {
54
+ if (typeof part === "string") {
55
+ return part;
56
+ }
57
+ if (
58
+ part
59
+ && typeof part === "object"
60
+ && "type" in part
61
+ && (part as { type?: unknown }).type === "text"
62
+ && "text" in part
63
+ && typeof (part as { text?: unknown }).text === "string"
64
+ ) {
65
+ return (part as { text: string }).text;
66
+ }
67
+ return "";
68
+ })
69
+ .join("")
70
+ .trim();
71
+ if (text) {
72
+ return text;
73
+ }
74
+ }
75
+ }
76
+ return null;
77
+ }
78
+
79
+ function appendValidationAssemblyRecord(record: Record<string, unknown>) {
80
+ const path = process.env.OPENCLAWBRAIN_VALIDATION_RECORD_FILE?.trim();
81
+ if (!path) {
82
+ return;
83
+ }
84
+ try {
85
+ mkdirSync(dirname(path), { recursive: true });
86
+ appendFileSync(path, `${JSON.stringify({ at: Date.now(), ...record })}\n`);
87
+ } catch {
88
+ // Validation logging is best-effort and must never affect runtime behavior.
89
+ }
90
+ }
91
+ import {
92
+ ConversationStore,
93
+ type CreateMessagePartInput,
94
+ type MessagePartType,
95
+ } from "./store/conversation-store.js";
96
+ import { SummaryStore } from "./store/summary-store.js";
97
+ import { createLcmSummarizeFromLegacyParams } from "./summarize.js";
98
+ import type { LcmDependencies } from "./types.js";
99
+
100
+ type AgentMessage = Parameters<ContextEngine["ingest"]>[0]["message"];
101
+ type AssembleResultWithSystemPrompt = AssembleResult & { systemPromptAddition?: string };
102
+
103
+ // ── Helpers ──────────────────────────────────────────────────────────────────
104
+
105
+ /** Rough token estimate: ~4 chars per token. */
106
+ function estimateTokens(text: string): number {
107
+ return Math.ceil(text.length / 4);
108
+ }
109
+
110
+ function toJson(value: unknown): string {
111
+ const encoded = JSON.stringify(value);
112
+ return typeof encoded === "string" ? encoded : "";
113
+ }
114
+
115
+ function safeString(value: unknown): string | undefined {
116
+ return typeof value === "string" ? value : undefined;
117
+ }
118
+
119
+ function safeBoolean(value: unknown): boolean | undefined {
120
+ return typeof value === "boolean" ? value : undefined;
121
+ }
122
+
123
+ function appendTextValue(value: unknown, out: string[]): void {
124
+ if (typeof value === "string") {
125
+ out.push(value);
126
+ return;
127
+ }
128
+ if (Array.isArray(value)) {
129
+ for (const entry of value) {
130
+ appendTextValue(entry, out);
131
+ }
132
+ return;
133
+ }
134
+ if (!value || typeof value !== "object") {
135
+ return;
136
+ }
137
+
138
+ const record = value as Record<string, unknown>;
139
+ appendTextValue(record.text, out);
140
+ appendTextValue(record.value, out);
141
+ }
142
+
143
+ function extractReasoningText(record: Record<string, unknown>): string | undefined {
144
+ const chunks: string[] = [];
145
+ appendTextValue(record.summary, chunks);
146
+ if (chunks.length === 0) {
147
+ return undefined;
148
+ }
149
+
150
+ const normalized = chunks
151
+ .map((chunk) => chunk.trim())
152
+ .filter((chunk, idx, arr) => chunk.length > 0 && arr.indexOf(chunk) === idx);
153
+ return normalized.length > 0 ? normalized.join("\n") : undefined;
154
+ }
155
+
156
+ function normalizeUnknownBlock(value: unknown): {
157
+ type: string;
158
+ text?: string;
159
+ metadata: Record<string, unknown>;
160
+ } {
161
+ if (!value || typeof value !== "object" || Array.isArray(value)) {
162
+ return {
163
+ type: "agent",
164
+ metadata: { raw: value },
165
+ };
166
+ }
167
+
168
+ const record = value as Record<string, unknown>;
169
+ const rawType = safeString(record.type);
170
+ return {
171
+ type: rawType ?? "agent",
172
+ text:
173
+ safeString(record.text) ??
174
+ safeString(record.thinking) ??
175
+ ((rawType === "reasoning" || rawType === "thinking")
176
+ ? extractReasoningText(record)
177
+ : undefined),
178
+ metadata: { raw: record },
179
+ };
180
+ }
181
+
182
+ function toPartType(type: string): MessagePartType {
183
+ switch (type) {
184
+ case "text":
185
+ return "text";
186
+ case "thinking":
187
+ case "reasoning":
188
+ return "reasoning";
189
+ case "tool_use":
190
+ case "toolUse":
191
+ case "tool-use":
192
+ case "toolCall":
193
+ case "functionCall":
194
+ case "function_call":
195
+ case "function_call_output":
196
+ case "tool_result":
197
+ case "toolResult":
198
+ case "tool":
199
+ return "tool";
200
+ case "patch":
201
+ return "patch";
202
+ case "file":
203
+ case "image":
204
+ return "file";
205
+ case "subtask":
206
+ return "subtask";
207
+ case "compaction":
208
+ return "compaction";
209
+ case "step_start":
210
+ case "step-start":
211
+ return "step_start";
212
+ case "step_finish":
213
+ case "step-finish":
214
+ return "step_finish";
215
+ case "snapshot":
216
+ return "snapshot";
217
+ case "retry":
218
+ return "retry";
219
+ case "agent":
220
+ return "agent";
221
+ default:
222
+ return "agent";
223
+ }
224
+ }
225
+
226
+ /**
227
+ * Convert AgentMessage content into plain text for DB storage.
228
+ *
229
+ * For content block arrays we keep only text blocks to avoid persisting raw
230
+ * JSON syntax that can later pollute assembled model context.
231
+ */
232
+ function extractMessageContent(content: unknown): string {
233
+ if (typeof content === "string") {
234
+ return content;
235
+ }
236
+
237
+ if (Array.isArray(content)) {
238
+ return content
239
+ .filter((block): block is { type?: unknown; text?: unknown } => {
240
+ return !!block && typeof block === "object";
241
+ })
242
+ .filter((block) => block.type === "text" && typeof block.text === "string")
243
+ .map((block) => block.text as string)
244
+ .join("\n");
245
+ }
246
+
247
+ const serialized = JSON.stringify(content);
248
+ return typeof serialized === "string" ? serialized : "";
249
+ }
250
+
251
+ function toRuntimeRoleForTokenEstimate(role: string): "user" | "assistant" | "toolResult" {
252
+ if (role === "tool" || role === "toolResult") {
253
+ return "toolResult";
254
+ }
255
+ if (role === "user" || role === "system") {
256
+ return "user";
257
+ }
258
+ return "assistant";
259
+ }
260
+
261
+ function isTextBlock(value: unknown): value is { type: "text"; text: string } {
262
+ if (!value || typeof value !== "object" || Array.isArray(value)) {
263
+ return false;
264
+ }
265
+ const record = value as Record<string, unknown>;
266
+ return record.type === "text" && typeof record.text === "string";
267
+ }
268
+
269
+ /**
270
+ * Estimate token usage for the content shape that the assembler will emit.
271
+ *
272
+ * LCM stores a plain-text fallback copy in messages.content, but message_parts
273
+ * can rehydrate larger structured/raw blocks. This estimator mirrors the
274
+ * rehydrated shape so compaction decisions use realistic token totals.
275
+ */
276
+ function estimateContentTokensForRole(params: {
277
+ role: "user" | "assistant" | "toolResult";
278
+ content: unknown;
279
+ fallbackContent: string;
280
+ }): number {
281
+ const { role, content, fallbackContent } = params;
282
+
283
+ if (typeof content === "string") {
284
+ return estimateTokens(content);
285
+ }
286
+
287
+ if (Array.isArray(content)) {
288
+ if (content.length === 0) {
289
+ return estimateTokens(fallbackContent);
290
+ }
291
+
292
+ if (role === "user" && content.length === 1 && isTextBlock(content[0])) {
293
+ return estimateTokens(content[0].text);
294
+ }
295
+
296
+ const serialized = JSON.stringify(content);
297
+ return estimateTokens(typeof serialized === "string" ? serialized : "");
298
+ }
299
+
300
+ if (content && typeof content === "object") {
301
+ if (role === "user" && isTextBlock(content)) {
302
+ return estimateTokens(content.text);
303
+ }
304
+
305
+ const serialized = JSON.stringify([content]);
306
+ return estimateTokens(typeof serialized === "string" ? serialized : "");
307
+ }
308
+
309
+ return estimateTokens(fallbackContent);
310
+ }
311
+
312
+ function buildMessageParts(params: {
313
+ sessionId: string;
314
+ message: AgentMessage;
315
+ fallbackContent: string;
316
+ }): import("./store/conversation-store.js").CreateMessagePartInput[] {
317
+ const { sessionId, message, fallbackContent } = params;
318
+ const role = typeof message.role === "string" ? message.role : "unknown";
319
+ const topLevel = message as unknown as Record<string, unknown>;
320
+ const topLevelToolCallId =
321
+ safeString(topLevel.toolCallId) ??
322
+ safeString(topLevel.tool_call_id) ??
323
+ safeString(topLevel.toolUseId) ??
324
+ safeString(topLevel.tool_use_id) ??
325
+ safeString(topLevel.call_id) ??
326
+ safeString(topLevel.id);
327
+ const topLevelToolName =
328
+ safeString(topLevel.toolName) ??
329
+ safeString(topLevel.tool_name);
330
+ const topLevelIsError =
331
+ safeBoolean(topLevel.isError) ??
332
+ safeBoolean(topLevel.is_error);
333
+
334
+ // BashExecutionMessage: preserve a synthetic text part so output is round-trippable.
335
+ if (!("content" in message) && "command" in message && "output" in message) {
336
+ return [
337
+ {
338
+ sessionId,
339
+ partType: "text",
340
+ ordinal: 0,
341
+ textContent: fallbackContent,
342
+ metadata: toJson({
343
+ originalRole: role,
344
+ source: "bash-exec",
345
+ command: safeString((message as { command?: unknown }).command),
346
+ }),
347
+ },
348
+ ];
349
+ }
350
+
351
+ if (!("content" in message)) {
352
+ return [
353
+ {
354
+ sessionId,
355
+ partType: "agent",
356
+ ordinal: 0,
357
+ textContent: fallbackContent || null,
358
+ metadata: toJson({
359
+ originalRole: role,
360
+ source: "unknown-message-shape",
361
+ raw: message,
362
+ }),
363
+ },
364
+ ];
365
+ }
366
+
367
+ if (typeof message.content === "string") {
368
+ return [
369
+ {
370
+ sessionId,
371
+ partType: "text",
372
+ ordinal: 0,
373
+ textContent: message.content,
374
+ metadata: toJson({
375
+ originalRole: role,
376
+ toolCallId: topLevelToolCallId,
377
+ toolName: topLevelToolName,
378
+ isError: topLevelIsError,
379
+ }),
380
+ },
381
+ ];
382
+ }
383
+
384
+ if (!Array.isArray(message.content)) {
385
+ return [
386
+ {
387
+ sessionId,
388
+ partType: "agent",
389
+ ordinal: 0,
390
+ textContent: fallbackContent || null,
391
+ metadata: toJson({
392
+ originalRole: role,
393
+ source: "non-array-content",
394
+ raw: message.content,
395
+ }),
396
+ },
397
+ ];
398
+ }
399
+
400
+ const parts: CreateMessagePartInput[] = [];
401
+ for (let ordinal = 0; ordinal < message.content.length; ordinal++) {
402
+ const block = normalizeUnknownBlock(message.content[ordinal]);
403
+ const metadataRecord = block.metadata.raw as Record<string, unknown> | undefined;
404
+ const partType = toPartType(block.type);
405
+ const toolCallId =
406
+ safeString(metadataRecord?.toolCallId) ??
407
+ safeString(metadataRecord?.tool_call_id) ??
408
+ safeString(metadataRecord?.toolUseId) ??
409
+ safeString(metadataRecord?.tool_use_id) ??
410
+ safeString(metadataRecord?.call_id) ??
411
+ (partType === "tool" ? safeString(metadataRecord?.id) : undefined) ??
412
+ topLevelToolCallId;
413
+
414
+ parts.push({
415
+ sessionId,
416
+ partType,
417
+ ordinal,
418
+ textContent: block.text ?? null,
419
+ toolCallId,
420
+ toolName:
421
+ safeString(metadataRecord?.name) ??
422
+ safeString(metadataRecord?.toolName) ??
423
+ safeString(metadataRecord?.tool_name) ??
424
+ topLevelToolName,
425
+ toolInput:
426
+ metadataRecord?.input !== undefined
427
+ ? toJson(metadataRecord.input)
428
+ : metadataRecord?.arguments !== undefined
429
+ ? toJson(metadataRecord.arguments)
430
+ : metadataRecord?.toolInput !== undefined
431
+ ? toJson(metadataRecord.toolInput)
432
+ : (safeString(metadataRecord?.tool_input) ?? null),
433
+ toolOutput:
434
+ metadataRecord?.output !== undefined
435
+ ? toJson(metadataRecord.output)
436
+ : metadataRecord?.toolOutput !== undefined
437
+ ? toJson(metadataRecord.toolOutput)
438
+ : (safeString(metadataRecord?.tool_output) ?? null),
439
+ metadata: toJson({
440
+ originalRole: role,
441
+ toolCallId: topLevelToolCallId,
442
+ toolName: topLevelToolName,
443
+ isError: topLevelIsError,
444
+ rawType: block.type,
445
+ raw: metadataRecord ?? message.content[ordinal],
446
+ }),
447
+ });
448
+ }
449
+
450
+ return parts;
451
+ }
452
+
453
+ /**
454
+ * Map AgentMessage role to the DB enum.
455
+ *
456
+ * "user" -> "user"
457
+ * "assistant" -> "assistant"
458
+ *
459
+ * AgentMessage only has user/assistant roles, but we keep the mapping
460
+ * explicit for clarity and future-proofing.
461
+ */
462
+ function toDbRole(role: string): "user" | "assistant" | "system" | "tool" {
463
+ if (role === "tool" || role === "toolResult") {
464
+ return "tool";
465
+ }
466
+ if (role === "system") {
467
+ return "system";
468
+ }
469
+ if (role === "user") {
470
+ return "user";
471
+ }
472
+ if (role === "assistant") {
473
+ return "assistant";
474
+ }
475
+ // Unknown roles are preserved via message_parts metadata and treated as assistant.
476
+ return "assistant";
477
+ }
478
+
479
+ type StoredMessage = {
480
+ role: "user" | "assistant" | "system" | "tool";
481
+ content: string;
482
+ tokenCount: number;
483
+ };
484
+
485
+ /**
486
+ * Normalize AgentMessage variants into the storage shape used by LCM.
487
+ */
488
+ function toStoredMessage(message: AgentMessage): StoredMessage {
489
+ const content =
490
+ "content" in message
491
+ ? extractMessageContent(message.content)
492
+ : "output" in message
493
+ ? `$ ${(message as { command: string; output: string }).command}\n${(message as { command: string; output: string }).output}`
494
+ : "";
495
+ const runtimeRole = toRuntimeRoleForTokenEstimate(message.role);
496
+ const tokenCount =
497
+ "content" in message
498
+ ? estimateContentTokensForRole({
499
+ role: runtimeRole,
500
+ content: message.content,
501
+ fallbackContent: content,
502
+ })
503
+ : estimateTokens(content);
504
+
505
+ return {
506
+ role: toDbRole(message.role),
507
+ content,
508
+ tokenCount,
509
+ };
510
+ }
511
+
512
+ function estimateMessageContentTokensForAfterTurn(content: unknown): number {
513
+ if (typeof content === "string") {
514
+ return estimateTokens(content);
515
+ }
516
+ if (Array.isArray(content)) {
517
+ let total = 0;
518
+ for (const part of content) {
519
+ if (!part || typeof part !== "object") {
520
+ continue;
521
+ }
522
+ const record = part as Record<string, unknown>;
523
+ const text =
524
+ typeof record.text === "string"
525
+ ? record.text
526
+ : typeof record.thinking === "string"
527
+ ? record.thinking
528
+ : "";
529
+ if (text) {
530
+ total += estimateTokens(text);
531
+ }
532
+ }
533
+ return total;
534
+ }
535
+ if (content == null) {
536
+ return 0;
537
+ }
538
+ const serialized = JSON.stringify(content);
539
+ return estimateTokens(typeof serialized === "string" ? serialized : "");
540
+ }
541
+
542
+ function estimateSessionTokenCountForAfterTurn(messages: AgentMessage[]): number {
543
+ let total = 0;
544
+ for (const message of messages) {
545
+ if ("content" in message) {
546
+ total += estimateMessageContentTokensForAfterTurn(message.content);
547
+ continue;
548
+ }
549
+ if ("command" in message || "output" in message) {
550
+ const commandText =
551
+ typeof (message as { command?: unknown }).command === "string"
552
+ ? (message as { command?: string }).command
553
+ : "";
554
+ const outputText =
555
+ typeof (message as { output?: unknown }).output === "string"
556
+ ? (message as { output?: string }).output
557
+ : "";
558
+ total += estimateTokens(`${commandText}\n${outputText}`);
559
+ }
560
+ }
561
+ return total;
562
+ }
563
+
564
+ function isBootstrapMessage(value: unknown): value is AgentMessage {
565
+ if (!value || typeof value !== "object") {
566
+ return false;
567
+ }
568
+ const msg = value as { role?: unknown; content?: unknown; command?: unknown; output?: unknown };
569
+ if (typeof msg.role !== "string") {
570
+ return false;
571
+ }
572
+ return "content" in msg || ("command" in msg && "output" in msg);
573
+ }
574
+
575
+ /** Load recoverable messages from a JSON/JSONL session file. */
576
+ function readLeafPathMessages(sessionFile: string): AgentMessage[] {
577
+ let raw = "";
578
+ try {
579
+ raw = readFileSync(sessionFile, "utf8");
580
+ } catch {
581
+ return [];
582
+ }
583
+
584
+ const trimmed = raw.trim();
585
+ if (!trimmed) {
586
+ return [];
587
+ }
588
+
589
+ if (trimmed.startsWith("[")) {
590
+ try {
591
+ const parsed = JSON.parse(trimmed);
592
+ if (!Array.isArray(parsed)) {
593
+ return [];
594
+ }
595
+ return parsed.filter(isBootstrapMessage);
596
+ } catch {
597
+ return [];
598
+ }
599
+ }
600
+
601
+ const messages: AgentMessage[] = [];
602
+ const lines = raw.split(/\r?\n/);
603
+ for (const line of lines) {
604
+ const item = line.trim();
605
+ if (!item) {
606
+ continue;
607
+ }
608
+ try {
609
+ const parsed = JSON.parse(item);
610
+ const candidate =
611
+ parsed && typeof parsed === "object" && "message" in parsed
612
+ ? (parsed as { message?: unknown }).message
613
+ : parsed;
614
+ if (isBootstrapMessage(candidate)) {
615
+ messages.push(candidate);
616
+ }
617
+ } catch {
618
+ // Skip malformed lines.
619
+ }
620
+ }
621
+ return messages;
622
+ }
623
+
624
+ function messageIdentity(role: string, content: string): string {
625
+ return `${role}\u0000${content}`;
626
+ }
627
+
628
+ // ── LcmContextEngine ────────────────────────────────────────────────────────
629
+
630
+ export class LcmContextEngine implements ContextEngine {
631
+ readonly info: ContextEngineInfo = {
632
+ id: "openclawbrain",
633
+ name: "OpenClawBrain Engine",
634
+ version: "0.1.0",
635
+ ownsCompaction: true,
636
+ };
637
+
638
+ private config: LcmConfig;
639
+
640
+ /** Get the configured timezone, falling back to system timezone. */
641
+ get timezone(): string {
642
+ return this.config.timezone ?? Intl.DateTimeFormat().resolvedOptions().timeZone;
643
+ }
644
+
645
+ private conversationStore: ConversationStore;
646
+ private summaryStore: SummaryStore;
647
+ private assembler: ContextAssembler;
648
+ private compaction: CompactionEngine;
649
+ private retrieval: RetrievalEngine;
650
+ private migrated = false;
651
+ private readonly fts5Available: boolean;
652
+ private sessionOperationQueues = new Map<string, Promise<void>>();
653
+ private largeFileTextSummarizerResolved = false;
654
+ private largeFileTextSummarizer?: (prompt: string) => Promise<string | null>;
655
+ private deps: LcmDependencies;
656
+ private brainService: BrainService | null = null;
657
+ private brainAssembler: BrainAssemblerExtension | null = null;
658
+ private pendingBrainEpisodeBySession = new Map<string, string>();
659
+
660
+ constructor(deps: LcmDependencies) {
661
+ this.deps = deps;
662
+ this.config = deps.config;
663
+
664
+ const db = getLcmConnection(this.config.databasePath);
665
+ this.fts5Available = getLcmDbFeatures(db).fts5Available;
666
+
667
+ this.conversationStore = new ConversationStore(db, { fts5Available: this.fts5Available });
668
+ this.summaryStore = new SummaryStore(db, { fts5Available: this.fts5Available });
669
+
670
+ if (!this.fts5Available) {
671
+ this.deps.log.warn(
672
+ "[lcm] FTS5 unavailable in the current Node runtime; full_text search will fall back to LIKE and indexing is disabled",
673
+ );
674
+ }
675
+
676
+ this.assembler = new ContextAssembler(
677
+ this.conversationStore,
678
+ this.summaryStore,
679
+ this.config.timezone,
680
+ );
681
+
682
+ const compactionConfig: CompactionConfig = {
683
+ contextThreshold: this.config.contextThreshold,
684
+ freshTailCount: this.config.freshTailCount,
685
+ leafMinFanout: this.config.leafMinFanout,
686
+ condensedMinFanout: this.config.condensedMinFanout,
687
+ condensedMinFanoutHard: this.config.condensedMinFanoutHard,
688
+ incrementalMaxDepth: this.config.incrementalMaxDepth,
689
+ leafChunkTokens: this.config.leafChunkTokens,
690
+ leafTargetTokens: this.config.leafTargetTokens,
691
+ condensedTargetTokens: this.config.condensedTargetTokens,
692
+ maxRounds: 10,
693
+ timezone: this.config.timezone,
694
+ };
695
+ this.compaction = new CompactionEngine(
696
+ this.conversationStore,
697
+ this.summaryStore,
698
+ compactionConfig,
699
+ );
700
+
701
+ this.retrieval = new RetrievalEngine(this.conversationStore, this.summaryStore);
702
+
703
+ if (this.config.brain?.enabled) {
704
+ try {
705
+ this.brainService = new BrainService({ deps });
706
+ this.brainAssembler = new BrainAssemblerExtension(this.brainService);
707
+ } catch (error) {
708
+ this.deps.log.warn(`[brain] Failed to initialize runtime: ${(error as Error).message}`);
709
+ }
710
+ }
711
+ }
712
+
713
+ getBrainService(): BrainService | null {
714
+ return this.brainService;
715
+ }
716
+
717
+ async getConversationIdForSessionKey(sessionKey: string): Promise<number | undefined> {
718
+ return this.resolveConversationIdForSessionKey(sessionKey);
719
+ }
720
+
721
+ /** Ensure DB schema is up-to-date. Called lazily on first bootstrap/ingest/assemble/compact. */
722
+ private ensureMigrated(): void {
723
+ if (this.migrated) {
724
+ return;
725
+ }
726
+ const db = getLcmConnection(this.config.databasePath);
727
+ runLcmMigrations(db, { fts5Available: this.fts5Available });
728
+ this.migrated = true;
729
+ }
730
+
731
+ /**
732
+ * Serialize mutating operations per session to prevent ingest/compaction races.
733
+ */
734
+ private async withSessionQueue<T>(sessionId: string, operation: () => Promise<T>): Promise<T> {
735
+ const previous = this.sessionOperationQueues.get(sessionId) ?? Promise.resolve();
736
+ let releaseQueue: () => void = () => {};
737
+ const current = new Promise<void>((resolve) => {
738
+ releaseQueue = resolve;
739
+ });
740
+ const next = previous.catch(() => {}).then(() => current);
741
+ this.sessionOperationQueues.set(sessionId, next);
742
+
743
+ await previous.catch(() => {});
744
+ try {
745
+ return await operation();
746
+ } finally {
747
+ releaseQueue();
748
+ void next.finally(() => {
749
+ if (this.sessionOperationQueues.get(sessionId) === next) {
750
+ this.sessionOperationQueues.delete(sessionId);
751
+ }
752
+ });
753
+ }
754
+ }
755
+
756
+ /** Normalize optional live token estimates supplied by runtime callers. */
757
+ private normalizeObservedTokenCount(value: unknown): number | undefined {
758
+ if (typeof value !== "number" || !Number.isFinite(value) || value <= 0) {
759
+ return undefined;
760
+ }
761
+ return Math.floor(value);
762
+ }
763
+
764
+ /** Resolve token budget from direct params or legacy fallback input. */
765
+ private resolveTokenBudget(params: {
766
+ tokenBudget?: number;
767
+ legacyParams?: Record<string, unknown>;
768
+ }): number | undefined {
769
+ const lp = params.legacyParams ?? {};
770
+ if (
771
+ typeof params.tokenBudget === "number" &&
772
+ Number.isFinite(params.tokenBudget) &&
773
+ params.tokenBudget > 0
774
+ ) {
775
+ return Math.floor(params.tokenBudget);
776
+ }
777
+ if (
778
+ typeof lp.tokenBudget === "number" &&
779
+ Number.isFinite(lp.tokenBudget) &&
780
+ lp.tokenBudget > 0
781
+ ) {
782
+ return Math.floor(lp.tokenBudget);
783
+ }
784
+ return undefined;
785
+ }
786
+
787
+ /** Resolve an LCM conversation id from a session key via the session store. */
788
+ private async resolveConversationIdForSessionKey(
789
+ sessionKey: string,
790
+ ): Promise<number | undefined> {
791
+ const trimmedKey = sessionKey.trim();
792
+ if (!trimmedKey) {
793
+ return undefined;
794
+ }
795
+ try {
796
+ const runtimeSessionId = await this.deps.resolveSessionIdFromSessionKey(trimmedKey);
797
+ if (!runtimeSessionId) {
798
+ return undefined;
799
+ }
800
+ const conversation =
801
+ await this.conversationStore.getConversationBySessionId(runtimeSessionId);
802
+ return conversation?.conversationId;
803
+ } catch {
804
+ return undefined;
805
+ }
806
+ }
807
+
808
+ /** Build a summarize callback with runtime provider fallback handling. */
809
+ private async resolveSummarize(params: {
810
+ legacyParams?: Record<string, unknown>;
811
+ customInstructions?: string;
812
+ }): Promise<(text: string, aggressive?: boolean) => Promise<string>> {
813
+ const lp = params.legacyParams ?? {};
814
+ if (typeof lp.summarize === "function") {
815
+ return lp.summarize as (text: string, aggressive?: boolean) => Promise<string>;
816
+ }
817
+ try {
818
+ const runtimeSummarizer = await createLcmSummarizeFromLegacyParams({
819
+ deps: this.deps,
820
+ legacyParams: lp,
821
+ customInstructions: params.customInstructions,
822
+ });
823
+ if (runtimeSummarizer) {
824
+ return runtimeSummarizer;
825
+ }
826
+ console.error(`[lcm] resolveSummarize: createLcmSummarizeFromLegacyParams returned undefined`);
827
+ } catch (err) {
828
+ console.error(`[lcm] resolveSummarize failed, using emergency fallback:`, err instanceof Error ? err.message : err);
829
+ }
830
+ console.error(`[lcm] resolveSummarize: FALLING BACK TO EMERGENCY TRUNCATION`);
831
+ return createEmergencyFallbackSummarize();
832
+ }
833
+
834
+ /**
835
+ * Resolve an optional model-backed summarizer for large text file exploration.
836
+ *
837
+ * This is opt-in via env so ingest remains deterministic and lightweight when
838
+ * no summarization model is configured.
839
+ */
840
+ private async resolveLargeFileTextSummarizer(): Promise<
841
+ ((prompt: string) => Promise<string | null>) | undefined
842
+ > {
843
+ if (this.largeFileTextSummarizerResolved) {
844
+ return this.largeFileTextSummarizer;
845
+ }
846
+ this.largeFileTextSummarizerResolved = true;
847
+
848
+ const provider = this.deps.config.largeFileSummaryProvider;
849
+ const model = this.deps.config.largeFileSummaryModel;
850
+ if (!provider || !model) {
851
+ return undefined;
852
+ }
853
+
854
+ try {
855
+ const summarize = await createLcmSummarizeFromLegacyParams({
856
+ deps: this.deps,
857
+ legacyParams: { provider, model },
858
+ });
859
+ if (!summarize) {
860
+ return undefined;
861
+ }
862
+
863
+ this.largeFileTextSummarizer = async (prompt: string): Promise<string | null> => {
864
+ const summary = await summarize(prompt, false);
865
+ if (typeof summary !== "string") {
866
+ return null;
867
+ }
868
+ const trimmed = summary.trim();
869
+ return trimmed.length > 0 ? trimmed : null;
870
+ };
871
+ return this.largeFileTextSummarizer;
872
+ } catch {
873
+ return undefined;
874
+ }
875
+ }
876
+
877
+ /** Persist intercepted large-file text payloads to ~/.openclaw/lcm-files. */
878
+ private async storeLargeFileContent(params: {
879
+ conversationId: number;
880
+ fileId: string;
881
+ extension: string;
882
+ content: string;
883
+ }): Promise<string> {
884
+ const dir = join(homedir(), ".openclaw", "lcm-files", String(params.conversationId));
885
+ await mkdir(dir, { recursive: true });
886
+
887
+ const normalizedExtension = params.extension.replace(/[^a-z0-9]/gi, "").toLowerCase() || "txt";
888
+ const filePath = join(dir, `${params.fileId}.${normalizedExtension}`);
889
+ await writeFile(filePath, params.content, "utf8");
890
+ return filePath;
891
+ }
892
+
893
+ /**
894
+ * Intercept oversized <file> blocks before persistence and replace them with
895
+ * compact file references backed by large_files records.
896
+ */
897
+ private async interceptLargeFiles(params: {
898
+ conversationId: number;
899
+ content: string;
900
+ }): Promise<{ rewrittenContent: string; fileIds: string[] } | null> {
901
+ const blocks = parseFileBlocks(params.content);
902
+ if (blocks.length === 0) {
903
+ return null;
904
+ }
905
+
906
+ const threshold = Math.max(1, this.config.largeFileTokenThreshold);
907
+ const summarizeText = await this.resolveLargeFileTextSummarizer();
908
+ const fileIds: string[] = [];
909
+ const rewrittenSegments: string[] = [];
910
+ let cursor = 0;
911
+ let interceptedAny = false;
912
+
913
+ for (const block of blocks) {
914
+ const blockTokens = estimateTokens(block.text);
915
+ if (blockTokens < threshold) {
916
+ continue;
917
+ }
918
+
919
+ interceptedAny = true;
920
+ const fileId = `file_${randomUUID().replace(/-/g, "").slice(0, 16)}`;
921
+ const extension = extensionFromNameOrMime(block.fileName, block.mimeType);
922
+ const storageUri = await this.storeLargeFileContent({
923
+ conversationId: params.conversationId,
924
+ fileId,
925
+ extension,
926
+ content: block.text,
927
+ });
928
+ const byteSize = Buffer.byteLength(block.text, "utf8");
929
+ const explorationSummary = await generateExplorationSummary({
930
+ content: block.text,
931
+ fileName: block.fileName,
932
+ mimeType: block.mimeType,
933
+ summarizeText,
934
+ });
935
+
936
+ await this.summaryStore.insertLargeFile({
937
+ fileId,
938
+ conversationId: params.conversationId,
939
+ fileName: block.fileName,
940
+ mimeType: block.mimeType,
941
+ byteSize,
942
+ storageUri,
943
+ explorationSummary,
944
+ });
945
+
946
+ rewrittenSegments.push(params.content.slice(cursor, block.start));
947
+ rewrittenSegments.push(
948
+ formatFileReference({
949
+ fileId,
950
+ fileName: block.fileName,
951
+ mimeType: block.mimeType,
952
+ byteSize,
953
+ summary: explorationSummary,
954
+ }),
955
+ );
956
+ cursor = block.end;
957
+ fileIds.push(fileId);
958
+ }
959
+
960
+ if (!interceptedAny) {
961
+ return null;
962
+ }
963
+
964
+ rewrittenSegments.push(params.content.slice(cursor));
965
+ return {
966
+ rewrittenContent: rewrittenSegments.join(""),
967
+ fileIds,
968
+ };
969
+ }
970
+
971
+ // ── ContextEngine interface ─────────────────────────────────────────────
972
+
973
+ /**
974
+ * Reconcile session-file history with persisted messages and append only the
975
+ * tail that is present in JSONL but missing from LCM.
976
+ */
977
+ private async reconcileSessionTail(params: {
978
+ sessionId: string;
979
+ conversationId: number;
980
+ historicalMessages: AgentMessage[];
981
+ }): Promise<{
982
+ importedMessages: number;
983
+ hasOverlap: boolean;
984
+ }> {
985
+ const { sessionId, conversationId, historicalMessages } = params;
986
+ if (historicalMessages.length === 0) {
987
+ return { importedMessages: 0, hasOverlap: false };
988
+ }
989
+
990
+ const latestDbMessage = await this.conversationStore.getLastMessage(conversationId);
991
+ if (!latestDbMessage) {
992
+ return { importedMessages: 0, hasOverlap: false };
993
+ }
994
+
995
+ const storedHistoricalMessages = historicalMessages.map((message) => toStoredMessage(message));
996
+
997
+ // Fast path: one tail comparison for the common in-sync case.
998
+ const latestHistorical = storedHistoricalMessages[storedHistoricalMessages.length - 1];
999
+ const latestIdentity = messageIdentity(latestDbMessage.role, latestDbMessage.content);
1000
+ if (latestIdentity === messageIdentity(latestHistorical.role, latestHistorical.content)) {
1001
+ const dbOccurrences = await this.conversationStore.countMessagesByIdentity(
1002
+ conversationId,
1003
+ latestDbMessage.role,
1004
+ latestDbMessage.content,
1005
+ );
1006
+ let historicalOccurrences = 0;
1007
+ for (const stored of storedHistoricalMessages) {
1008
+ if (messageIdentity(stored.role, stored.content) === latestIdentity) {
1009
+ historicalOccurrences += 1;
1010
+ }
1011
+ }
1012
+ if (dbOccurrences === historicalOccurrences) {
1013
+ return { importedMessages: 0, hasOverlap: true };
1014
+ }
1015
+ }
1016
+
1017
+ // Slow path: walk backward through JSONL to find the most recent anchor
1018
+ // message that already exists in LCM, then append everything after it.
1019
+ let anchorIndex = -1;
1020
+ const historicalIdentityTotals = new Map<string, number>();
1021
+ for (const stored of storedHistoricalMessages) {
1022
+ const identity = messageIdentity(stored.role, stored.content);
1023
+ historicalIdentityTotals.set(identity, (historicalIdentityTotals.get(identity) ?? 0) + 1);
1024
+ }
1025
+
1026
+ const historicalIdentityCountsAfterIndex = new Map<string, number>();
1027
+ const dbIdentityCounts = new Map<string, number>();
1028
+ for (let index = storedHistoricalMessages.length - 1; index >= 0; index--) {
1029
+ const stored = storedHistoricalMessages[index];
1030
+ const identity = messageIdentity(stored.role, stored.content);
1031
+ const seenAfter = historicalIdentityCountsAfterIndex.get(identity) ?? 0;
1032
+ const total = historicalIdentityTotals.get(identity) ?? 0;
1033
+ const occurrencesThroughIndex = total - seenAfter;
1034
+ const exists = await this.conversationStore.hasMessage(
1035
+ conversationId,
1036
+ stored.role,
1037
+ stored.content,
1038
+ );
1039
+ historicalIdentityCountsAfterIndex.set(identity, seenAfter + 1);
1040
+ if (!exists) {
1041
+ continue;
1042
+ }
1043
+
1044
+ let dbCountForIdentity = dbIdentityCounts.get(identity);
1045
+ if (dbCountForIdentity === undefined) {
1046
+ dbCountForIdentity = await this.conversationStore.countMessagesByIdentity(
1047
+ conversationId,
1048
+ stored.role,
1049
+ stored.content,
1050
+ );
1051
+ dbIdentityCounts.set(identity, dbCountForIdentity);
1052
+ }
1053
+
1054
+ // Match the same occurrence index as the DB tail so repeated empty
1055
+ // tool messages do not anchor against a later, still-missing entry.
1056
+ if (dbCountForIdentity !== occurrencesThroughIndex) {
1057
+ continue;
1058
+ }
1059
+
1060
+ anchorIndex = index;
1061
+ break;
1062
+ }
1063
+
1064
+ if (anchorIndex < 0) {
1065
+ return { importedMessages: 0, hasOverlap: false };
1066
+ }
1067
+ if (anchorIndex >= historicalMessages.length - 1) {
1068
+ return { importedMessages: 0, hasOverlap: true };
1069
+ }
1070
+
1071
+ const missingTail = historicalMessages.slice(anchorIndex + 1);
1072
+ let importedMessages = 0;
1073
+ for (const message of missingTail) {
1074
+ const result = await this.ingestSingle({ sessionId, message });
1075
+ if (result.ingested) {
1076
+ importedMessages += 1;
1077
+ }
1078
+ }
1079
+
1080
+ return { importedMessages, hasOverlap: true };
1081
+ }
1082
+
1083
+ async bootstrap(params: { sessionId: string; sessionFile: string }): Promise<BootstrapResult> {
1084
+ this.ensureMigrated();
1085
+
1086
+ const result = await this.withSessionQueue(params.sessionId, async () =>
1087
+ this.conversationStore.withTransaction(async () => {
1088
+ const conversation = await this.conversationStore.getOrCreateConversation(params.sessionId);
1089
+ const conversationId = conversation.conversationId;
1090
+ const historicalMessages = readLeafPathMessages(params.sessionFile);
1091
+
1092
+ // First-time import path: no LCM rows yet, so seed directly from the
1093
+ // active leaf context snapshot.
1094
+ const existingCount = await this.conversationStore.getMessageCount(conversationId);
1095
+ if (existingCount === 0) {
1096
+ if (historicalMessages.length === 0) {
1097
+ await this.conversationStore.markConversationBootstrapped(conversationId);
1098
+ return {
1099
+ bootstrapped: false,
1100
+ importedMessages: 0,
1101
+ reason: "no leaf-path messages in session",
1102
+ };
1103
+ }
1104
+
1105
+ const nextSeq = (await this.conversationStore.getMaxSeq(conversationId)) + 1;
1106
+ const bulkInput = historicalMessages.map((message, index) => {
1107
+ const stored = toStoredMessage(message);
1108
+ return {
1109
+ conversationId,
1110
+ seq: nextSeq + index,
1111
+ role: stored.role,
1112
+ content: stored.content,
1113
+ tokenCount: stored.tokenCount,
1114
+ };
1115
+ });
1116
+
1117
+ const inserted = await this.conversationStore.createMessagesBulk(bulkInput);
1118
+ await this.summaryStore.appendContextMessages(
1119
+ conversationId,
1120
+ inserted.map((record) => record.messageId),
1121
+ );
1122
+ await this.conversationStore.markConversationBootstrapped(conversationId);
1123
+
1124
+ // Prune HEARTBEAT_OK turns from the freshly imported data
1125
+ if (this.config.pruneHeartbeatOk) {
1126
+ const pruned = await this.pruneHeartbeatOkTurns(conversationId);
1127
+ if (pruned > 0) {
1128
+ console.error(
1129
+ `[lcm] bootstrap: pruned ${pruned} HEARTBEAT_OK messages from conversation ${conversationId}`,
1130
+ );
1131
+ }
1132
+ }
1133
+
1134
+ return {
1135
+ bootstrapped: true,
1136
+ importedMessages: inserted.length,
1137
+ };
1138
+ }
1139
+
1140
+ // Existing conversation path: reconcile crash gaps by appending JSONL
1141
+ // messages that were never persisted to LCM.
1142
+ const reconcile = await this.reconcileSessionTail({
1143
+ sessionId: params.sessionId,
1144
+ conversationId,
1145
+ historicalMessages,
1146
+ });
1147
+
1148
+ if (!conversation.bootstrappedAt) {
1149
+ await this.conversationStore.markConversationBootstrapped(conversationId);
1150
+ }
1151
+
1152
+ if (reconcile.importedMessages > 0) {
1153
+ return {
1154
+ bootstrapped: true,
1155
+ importedMessages: reconcile.importedMessages,
1156
+ reason: "reconciled missing session messages",
1157
+ };
1158
+ }
1159
+
1160
+ if (conversation.bootstrappedAt) {
1161
+ return {
1162
+ bootstrapped: false,
1163
+ importedMessages: 0,
1164
+ reason: "already bootstrapped",
1165
+ };
1166
+ }
1167
+
1168
+ return {
1169
+ bootstrapped: false,
1170
+ importedMessages: 0,
1171
+ reason: reconcile.hasOverlap
1172
+ ? "conversation already up to date"
1173
+ : "conversation already has messages",
1174
+ };
1175
+ }),
1176
+ );
1177
+
1178
+ // Post-bootstrap pruning: clean HEARTBEAT_OK turns that were already
1179
+ // in the DB from prior bootstrap cycles (before pruning was enabled).
1180
+ if (this.config.pruneHeartbeatOk && result.bootstrapped === false) {
1181
+ try {
1182
+ const conversation = await this.conversationStore.getConversationBySessionId(
1183
+ params.sessionId,
1184
+ );
1185
+ if (conversation) {
1186
+ const pruned = await this.pruneHeartbeatOkTurns(conversation.conversationId);
1187
+ if (pruned > 0) {
1188
+ console.error(
1189
+ `[lcm] bootstrap: retroactively pruned ${pruned} HEARTBEAT_OK messages from conversation ${conversation.conversationId}`,
1190
+ );
1191
+ }
1192
+ }
1193
+ } catch (err) {
1194
+ console.error(
1195
+ `[lcm] bootstrap: heartbeat pruning failed:`,
1196
+ err instanceof Error ? err.message : err,
1197
+ );
1198
+ }
1199
+ }
1200
+
1201
+ return result;
1202
+ }
1203
+
1204
+ private async ingestSingle(params: {
1205
+ sessionId: string;
1206
+ message: AgentMessage;
1207
+ isHeartbeat?: boolean;
1208
+ brainEpisodeId?: string;
1209
+ }): Promise<IngestResult> {
1210
+ const { sessionId, message, isHeartbeat } = params;
1211
+ if (isHeartbeat) {
1212
+ return { ingested: false };
1213
+ }
1214
+ const stored = toStoredMessage(message);
1215
+
1216
+ // Get or create conversation for this session
1217
+ const conversation = await this.conversationStore.getOrCreateConversation(sessionId);
1218
+ const conversationId = conversation.conversationId;
1219
+
1220
+ let messageForParts = message;
1221
+ if (stored.role === "user") {
1222
+ const intercepted = await this.interceptLargeFiles({
1223
+ conversationId,
1224
+ content: stored.content,
1225
+ });
1226
+ if (intercepted) {
1227
+ stored.content = intercepted.rewrittenContent;
1228
+ stored.tokenCount = estimateTokens(stored.content);
1229
+ if ("content" in message) {
1230
+ messageForParts = {
1231
+ ...message,
1232
+ content: stored.content,
1233
+ } as AgentMessage;
1234
+ }
1235
+ }
1236
+ }
1237
+
1238
+ // Determine next sequence number
1239
+ const maxSeq = await this.conversationStore.getMaxSeq(conversationId);
1240
+ const seq = maxSeq + 1;
1241
+
1242
+ // Persist the message
1243
+ const msgRecord = await this.conversationStore.createMessage({
1244
+ conversationId,
1245
+ seq,
1246
+ role: stored.role,
1247
+ content: stored.content,
1248
+ tokenCount: stored.tokenCount,
1249
+ });
1250
+ const messageParts = buildMessageParts({
1251
+ sessionId,
1252
+ message: messageForParts,
1253
+ fallbackContent: stored.content,
1254
+ });
1255
+ await this.conversationStore.createMessageParts(
1256
+ msgRecord.messageId,
1257
+ messageParts,
1258
+ );
1259
+
1260
+ // Append to context items so assembler can see it
1261
+ await this.summaryStore.appendContextMessage(conversationId, msgRecord.messageId);
1262
+
1263
+ if (this.brainService) {
1264
+ await this.brainService.harvestFromMessage({
1265
+ conversationId,
1266
+ episodeId: params.brainEpisodeId ?? this.pendingBrainEpisodeBySession.get(sessionId),
1267
+ role: stored.role,
1268
+ content: stored.content,
1269
+ messageParts,
1270
+ });
1271
+ }
1272
+
1273
+ return { ingested: true };
1274
+ }
1275
+
1276
+ async ingest(params: {
1277
+ sessionId: string;
1278
+ message: AgentMessage;
1279
+ isHeartbeat?: boolean;
1280
+ brainEpisodeId?: string;
1281
+ }): Promise<IngestResult> {
1282
+ this.ensureMigrated();
1283
+ return this.withSessionQueue(params.sessionId, () => this.ingestSingle(params));
1284
+ }
1285
+
1286
+ async ingestBatch(params: {
1287
+ sessionId: string;
1288
+ messages: AgentMessage[];
1289
+ isHeartbeat?: boolean;
1290
+ brainEpisodeId?: string;
1291
+ }): Promise<IngestBatchResult> {
1292
+ this.ensureMigrated();
1293
+ if (params.messages.length === 0) {
1294
+ return { ingestedCount: 0 };
1295
+ }
1296
+ return this.withSessionQueue(params.sessionId, async () => {
1297
+ let ingestedCount = 0;
1298
+ for (const message of params.messages) {
1299
+ const result = await this.ingestSingle({
1300
+ sessionId: params.sessionId,
1301
+ message,
1302
+ isHeartbeat: params.isHeartbeat,
1303
+ brainEpisodeId: params.brainEpisodeId,
1304
+ });
1305
+ if (result.ingested) {
1306
+ ingestedCount += 1;
1307
+ }
1308
+ }
1309
+ return { ingestedCount };
1310
+ });
1311
+ }
1312
+
1313
+ async afterTurn(params: {
1314
+ sessionId: string;
1315
+ sessionFile: string;
1316
+ messages: AgentMessage[];
1317
+ prePromptMessageCount: number;
1318
+ autoCompactionSummary?: string;
1319
+ isHeartbeat?: boolean;
1320
+ tokenBudget?: number;
1321
+ legacyCompactionParams?: Record<string, unknown>;
1322
+ }): Promise<void> {
1323
+ this.ensureMigrated();
1324
+
1325
+ const ingestBatch: AgentMessage[] = [];
1326
+ if (params.autoCompactionSummary) {
1327
+ ingestBatch.push({
1328
+ role: "user",
1329
+ content: params.autoCompactionSummary,
1330
+ } as AgentMessage);
1331
+ }
1332
+
1333
+ const newMessages = params.messages.slice(params.prePromptMessageCount);
1334
+ ingestBatch.push(...newMessages);
1335
+ if (ingestBatch.length === 0) {
1336
+ return;
1337
+ }
1338
+
1339
+ const completedTurnBrainEpisodeId = this.pendingBrainEpisodeBySession.get(params.sessionId);
1340
+ try {
1341
+ await this.ingestBatch({
1342
+ sessionId: params.sessionId,
1343
+ messages: ingestBatch,
1344
+ isHeartbeat: params.isHeartbeat === true,
1345
+ brainEpisodeId: completedTurnBrainEpisodeId,
1346
+ });
1347
+ } catch {
1348
+ // Continue with proactive compaction even if ingest fails.
1349
+ } finally {
1350
+ this.pendingBrainEpisodeBySession.delete(params.sessionId);
1351
+ }
1352
+
1353
+ const tokenBudget =
1354
+ typeof params.tokenBudget === "number" &&
1355
+ Number.isFinite(params.tokenBudget) &&
1356
+ params.tokenBudget > 0
1357
+ ? Math.floor(params.tokenBudget)
1358
+ : undefined;
1359
+ if (!tokenBudget) {
1360
+ return;
1361
+ }
1362
+
1363
+ const liveContextTokens = estimateSessionTokenCountForAfterTurn(params.messages);
1364
+
1365
+ try {
1366
+ const leafTrigger = await this.evaluateLeafTrigger(params.sessionId);
1367
+ if (leafTrigger.shouldCompact) {
1368
+ this.compactLeafAsync({
1369
+ sessionId: params.sessionId,
1370
+ sessionFile: params.sessionFile,
1371
+ tokenBudget,
1372
+ currentTokenCount: liveContextTokens,
1373
+ legacyParams: params.legacyCompactionParams,
1374
+ }).catch(() => {
1375
+ // Leaf compaction is best-effort and should not fail the caller.
1376
+ });
1377
+ }
1378
+ } catch {
1379
+ // Leaf trigger checks are best-effort.
1380
+ }
1381
+
1382
+ try {
1383
+ await this.compact({
1384
+ sessionId: params.sessionId,
1385
+ sessionFile: params.sessionFile,
1386
+ tokenBudget,
1387
+ currentTokenCount: liveContextTokens,
1388
+ compactionTarget: "threshold",
1389
+ legacyParams: params.legacyCompactionParams,
1390
+ });
1391
+ } catch {
1392
+ // Proactive compaction is best-effort in the post-turn lifecycle.
1393
+ }
1394
+ }
1395
+
1396
+ async assemble(params: {
1397
+ sessionId: string;
1398
+ messages: AgentMessage[];
1399
+ tokenBudget?: number;
1400
+ }): Promise<AssembleResult> {
1401
+ try {
1402
+ this.ensureMigrated();
1403
+
1404
+ const tokenBudget =
1405
+ typeof params.tokenBudget === "number" &&
1406
+ Number.isFinite(params.tokenBudget) &&
1407
+ params.tokenBudget > 0
1408
+ ? Math.floor(params.tokenBudget)
1409
+ : 128_000;
1410
+
1411
+ const brainDecision = this.brainAssembler?.decide({
1412
+ tokenBudget,
1413
+ liveMessages: params.messages,
1414
+ });
1415
+ const shouldRouteThroughBrain =
1416
+ brainDecision?.mode === "use_brain" || brainDecision?.mode === "shadow";
1417
+ let conversation = await this.conversationStore.getConversationBySessionId(
1418
+ params.sessionId,
1419
+ );
1420
+ if (!conversation && shouldRouteThroughBrain) {
1421
+ conversation = await this.conversationStore.getOrCreateConversation(params.sessionId);
1422
+ }
1423
+ if (!conversation) {
1424
+ if (brainDecision && this.brainService && !shouldRouteThroughBrain) {
1425
+ this.brainService.noteAssemblyDecision({
1426
+ mode: brainDecision.mode,
1427
+ footer: `[brain] bypassed: ${brainDecision.mode}.`,
1428
+ });
1429
+ }
1430
+ appendValidationAssemblyRecord({
1431
+ sessionId: params.sessionId,
1432
+ conversationId: null,
1433
+ queryText: extractLatestUserText(params.messages),
1434
+ mode: brainDecision?.mode ?? null,
1435
+ footer: brainDecision ? `[brain] bypassed: ${brainDecision.mode}.` : null,
1436
+ traceId: null,
1437
+ episodeId: null,
1438
+ tokenBudget,
1439
+ });
1440
+ this.pendingBrainEpisodeBySession.delete(params.sessionId);
1441
+ return {
1442
+ messages: params.messages,
1443
+ estimatedTokens: 0,
1444
+ };
1445
+ }
1446
+
1447
+ const contextItems = await this.summaryStore.getContextItems(conversation.conversationId);
1448
+ const hasSummaryItems = contextItems.some((item) => item.itemType === "summary");
1449
+ const canUseAssembledContext =
1450
+ contextItems.length > 0
1451
+ && (hasSummaryItems || contextItems.length >= params.messages.length);
1452
+
1453
+ let assembled: AssembleContextResult;
1454
+ if (canUseAssembledContext) {
1455
+ assembled = await this.assembler.assemble({
1456
+ conversationId: conversation.conversationId,
1457
+ tokenBudget,
1458
+ freshTailCount: this.config.freshTailCount,
1459
+ });
1460
+ } else if (shouldRouteThroughBrain) {
1461
+ assembled = {
1462
+ messages: params.messages,
1463
+ estimatedTokens: estimateSessionTokenCountForAfterTurn(params.messages),
1464
+ stats: {
1465
+ rawMessageCount: params.messages.length,
1466
+ summaryCount: hasSummaryItems ? 1 : 0,
1467
+ totalContextItems: contextItems.length,
1468
+ },
1469
+ };
1470
+ } else {
1471
+ if (brainDecision && this.brainService) {
1472
+ this.brainService.noteAssemblyDecision({
1473
+ mode: brainDecision.mode,
1474
+ conversationId: conversation.conversationId,
1475
+ footer: `[brain] bypassed: ${brainDecision.mode}.`,
1476
+ });
1477
+ }
1478
+ appendValidationAssemblyRecord({
1479
+ sessionId: params.sessionId,
1480
+ conversationId: conversation.conversationId,
1481
+ queryText: extractLatestUserText(params.messages),
1482
+ mode: brainDecision?.mode ?? null,
1483
+ footer: brainDecision ? `[brain] bypassed: ${brainDecision.mode}.` : null,
1484
+ traceId: null,
1485
+ episodeId: null,
1486
+ tokenBudget,
1487
+ });
1488
+ this.pendingBrainEpisodeBySession.delete(params.sessionId);
1489
+ return {
1490
+ messages: params.messages,
1491
+ estimatedTokens: 0,
1492
+ };
1493
+ }
1494
+
1495
+ const hybrid: AssembleContextResult | BrainAssembledContextResult = this.brainAssembler
1496
+ ? await this.brainAssembler.augmentAssembly({
1497
+ conversationId: conversation.conversationId,
1498
+ tokenBudget,
1499
+ assembled,
1500
+ liveMessages: params.messages,
1501
+ })
1502
+ : assembled;
1503
+ if (hybrid.brainDecision?.episodeId) {
1504
+ this.pendingBrainEpisodeBySession.set(params.sessionId, hybrid.brainDecision.episodeId);
1505
+ } else {
1506
+ this.pendingBrainEpisodeBySession.delete(params.sessionId);
1507
+ }
1508
+ appendValidationAssemblyRecord({
1509
+ sessionId: params.sessionId,
1510
+ conversationId: conversation.conversationId,
1511
+ queryText: extractLatestUserText(params.messages),
1512
+ mode: hybrid.brainDecision?.mode ?? null,
1513
+ footer: hybrid.brainDecision?.footer ?? null,
1514
+ traceId: hybrid.brainDecision?.traceId ?? null,
1515
+ episodeId: hybrid.brainDecision?.episodeId ?? null,
1516
+ tokenBudget,
1517
+ });
1518
+
1519
+ // If assembly produced no messages for a non-empty live session,
1520
+ // fail safe to the live context.
1521
+ if (hybrid.messages.length === 0 && params.messages.length > 0) {
1522
+ this.pendingBrainEpisodeBySession.delete(params.sessionId);
1523
+ return {
1524
+ messages: params.messages,
1525
+ estimatedTokens: 0,
1526
+ };
1527
+ }
1528
+
1529
+ const result: AssembleResultWithSystemPrompt = {
1530
+ messages: hybrid.messages,
1531
+ estimatedTokens: hybrid.estimatedTokens,
1532
+ ...(hybrid.systemPromptAddition
1533
+ ? { systemPromptAddition: hybrid.systemPromptAddition }
1534
+ : {}),
1535
+ };
1536
+ return result;
1537
+ } catch {
1538
+ this.pendingBrainEpisodeBySession.delete(params.sessionId);
1539
+ return {
1540
+ messages: params.messages,
1541
+ estimatedTokens: 0,
1542
+ };
1543
+ }
1544
+ }
1545
+
1546
+ /** Evaluate whether incremental leaf compaction should run for a session. */
1547
+ async evaluateLeafTrigger(sessionId: string): Promise<{
1548
+ shouldCompact: boolean;
1549
+ rawTokensOutsideTail: number;
1550
+ threshold: number;
1551
+ }> {
1552
+ this.ensureMigrated();
1553
+ const conversation = await this.conversationStore.getConversationBySessionId(sessionId);
1554
+ if (!conversation) {
1555
+ const fallbackThreshold =
1556
+ typeof this.config.leafChunkTokens === "number" &&
1557
+ Number.isFinite(this.config.leafChunkTokens) &&
1558
+ this.config.leafChunkTokens > 0
1559
+ ? Math.floor(this.config.leafChunkTokens)
1560
+ : 20_000;
1561
+ return {
1562
+ shouldCompact: false,
1563
+ rawTokensOutsideTail: 0,
1564
+ threshold: fallbackThreshold,
1565
+ };
1566
+ }
1567
+ return this.compaction.evaluateLeafTrigger(conversation.conversationId);
1568
+ }
1569
+
1570
+ /** Run one incremental leaf compaction pass in the per-session queue. */
1571
+ async compactLeafAsync(params: {
1572
+ sessionId: string;
1573
+ sessionFile: string;
1574
+ tokenBudget?: number;
1575
+ currentTokenCount?: number;
1576
+ customInstructions?: string;
1577
+ legacyParams?: Record<string, unknown>;
1578
+ force?: boolean;
1579
+ previousSummaryContent?: string;
1580
+ }): Promise<CompactResult> {
1581
+ this.ensureMigrated();
1582
+ return this.withSessionQueue(params.sessionId, async () => {
1583
+ const conversation = await this.conversationStore.getConversationBySessionId(
1584
+ params.sessionId,
1585
+ );
1586
+ if (!conversation) {
1587
+ return {
1588
+ ok: true,
1589
+ compacted: false,
1590
+ reason: "no conversation found for session",
1591
+ };
1592
+ }
1593
+
1594
+ const tokenBudget = this.resolveTokenBudget(params);
1595
+ if (!tokenBudget) {
1596
+ return {
1597
+ ok: false,
1598
+ compacted: false,
1599
+ reason: "missing token budget in compact params",
1600
+ };
1601
+ }
1602
+
1603
+ const lp = params.legacyParams ?? {};
1604
+ const observedTokens = this.normalizeObservedTokenCount(
1605
+ params.currentTokenCount ??
1606
+ (
1607
+ lp as {
1608
+ currentTokenCount?: unknown;
1609
+ }
1610
+ ).currentTokenCount,
1611
+ );
1612
+ const summarize = await this.resolveSummarize({
1613
+ legacyParams: params.legacyParams,
1614
+ customInstructions: params.customInstructions,
1615
+ });
1616
+
1617
+ const leafResult = await this.compaction.compactLeaf({
1618
+ conversationId: conversation.conversationId,
1619
+ tokenBudget,
1620
+ summarize,
1621
+ force: params.force,
1622
+ previousSummaryContent: params.previousSummaryContent,
1623
+ });
1624
+ const tokensBefore = observedTokens ?? leafResult.tokensBefore;
1625
+
1626
+ return {
1627
+ ok: true,
1628
+ compacted: leafResult.actionTaken,
1629
+ reason: leafResult.actionTaken ? "compacted" : "below threshold",
1630
+ result: {
1631
+ tokensBefore,
1632
+ tokensAfter: leafResult.tokensAfter,
1633
+ details: {
1634
+ rounds: leafResult.actionTaken ? 1 : 0,
1635
+ targetTokens: tokenBudget,
1636
+ mode: "leaf",
1637
+ },
1638
+ },
1639
+ };
1640
+ });
1641
+ }
1642
+
1643
+ async compact(params: {
1644
+ sessionId: string;
1645
+ sessionFile: string;
1646
+ tokenBudget?: number;
1647
+ currentTokenCount?: number;
1648
+ compactionTarget?: "budget" | "threshold";
1649
+ customInstructions?: string;
1650
+ legacyParams?: Record<string, unknown>;
1651
+ /** Force compaction even if below threshold */
1652
+ force?: boolean;
1653
+ }): Promise<CompactResult> {
1654
+ this.ensureMigrated();
1655
+ return this.withSessionQueue(params.sessionId, async () => {
1656
+ const { sessionId, force = false } = params;
1657
+
1658
+ // Look up conversation
1659
+ const conversation = await this.conversationStore.getConversationBySessionId(sessionId);
1660
+ if (!conversation) {
1661
+ return {
1662
+ ok: true,
1663
+ compacted: false,
1664
+ reason: "no conversation found for session",
1665
+ };
1666
+ }
1667
+
1668
+ const conversationId = conversation.conversationId;
1669
+
1670
+ const lp = params.legacyParams ?? {};
1671
+ const manualCompactionRequested =
1672
+ (
1673
+ lp as {
1674
+ manualCompaction?: unknown;
1675
+ }
1676
+ ).manualCompaction === true;
1677
+ const forceCompaction = force || manualCompactionRequested;
1678
+ const tokenBudget = this.resolveTokenBudget(params);
1679
+ if (!tokenBudget) {
1680
+ return {
1681
+ ok: false,
1682
+ compacted: false,
1683
+ reason: "missing token budget in compact params",
1684
+ };
1685
+ }
1686
+
1687
+ const summarize = await this.resolveSummarize({
1688
+ legacyParams: params.legacyParams,
1689
+ customInstructions: params.customInstructions,
1690
+ });
1691
+
1692
+ // Evaluate whether compaction is needed (unless forced)
1693
+ const observedTokens = this.normalizeObservedTokenCount(
1694
+ params.currentTokenCount ??
1695
+ (
1696
+ lp as {
1697
+ currentTokenCount?: unknown;
1698
+ }
1699
+ ).currentTokenCount,
1700
+ );
1701
+ const decision =
1702
+ observedTokens !== undefined
1703
+ ? await this.compaction.evaluate(conversationId, tokenBudget, observedTokens)
1704
+ : await this.compaction.evaluate(conversationId, tokenBudget);
1705
+ const targetTokens =
1706
+ params.compactionTarget === "threshold" ? decision.threshold : tokenBudget;
1707
+ const liveContextStillExceedsTarget =
1708
+ observedTokens !== undefined && observedTokens >= targetTokens;
1709
+
1710
+ if (!forceCompaction && !decision.shouldCompact) {
1711
+ return {
1712
+ ok: true,
1713
+ compacted: false,
1714
+ reason: "below threshold",
1715
+ result: {
1716
+ tokensBefore: decision.currentTokens,
1717
+ },
1718
+ };
1719
+ }
1720
+
1721
+ const useSweep =
1722
+ manualCompactionRequested || forceCompaction || params.compactionTarget === "threshold";
1723
+ if (useSweep) {
1724
+ const sweepResult = await this.compaction.compactFullSweep({
1725
+ conversationId,
1726
+ tokenBudget,
1727
+ summarize,
1728
+ force: forceCompaction,
1729
+ hardTrigger: false,
1730
+ });
1731
+
1732
+ return {
1733
+ ok: sweepResult.actionTaken || !liveContextStillExceedsTarget,
1734
+ compacted: sweepResult.actionTaken,
1735
+ reason: sweepResult.actionTaken
1736
+ ? "compacted"
1737
+ : manualCompactionRequested
1738
+ ? "nothing to compact"
1739
+ : liveContextStillExceedsTarget
1740
+ ? "live context still exceeds target"
1741
+ : "already under target",
1742
+ result: {
1743
+ tokensBefore: decision.currentTokens,
1744
+ tokensAfter: sweepResult.tokensAfter,
1745
+ details: {
1746
+ rounds: sweepResult.actionTaken ? 1 : 0,
1747
+ targetTokens,
1748
+ },
1749
+ },
1750
+ };
1751
+ }
1752
+
1753
+ // When forced, use the token budget as target
1754
+ const convergenceTargetTokens = forceCompaction
1755
+ ? tokenBudget
1756
+ : params.compactionTarget === "threshold"
1757
+ ? decision.threshold
1758
+ : tokenBudget;
1759
+
1760
+ const compactResult = await this.compaction.compactUntilUnder({
1761
+ conversationId,
1762
+ tokenBudget,
1763
+ targetTokens: convergenceTargetTokens,
1764
+ ...(observedTokens !== undefined ? { currentTokens: observedTokens } : {}),
1765
+ summarize,
1766
+ });
1767
+ const didCompact = compactResult.rounds > 0;
1768
+
1769
+ return {
1770
+ ok: compactResult.success,
1771
+ compacted: didCompact,
1772
+ reason: compactResult.success
1773
+ ? didCompact
1774
+ ? "compacted"
1775
+ : "already under target"
1776
+ : "could not reach target",
1777
+ result: {
1778
+ tokensBefore: decision.currentTokens,
1779
+ tokensAfter: compactResult.finalTokens,
1780
+ details: {
1781
+ rounds: compactResult.rounds,
1782
+ targetTokens: convergenceTargetTokens,
1783
+ },
1784
+ },
1785
+ };
1786
+ });
1787
+ }
1788
+
1789
+ async prepareSubagentSpawn(params: {
1790
+ parentSessionKey: string;
1791
+ childSessionKey: string;
1792
+ ttlMs?: number;
1793
+ }): Promise<SubagentSpawnPreparation | undefined> {
1794
+ this.ensureMigrated();
1795
+
1796
+ const childSessionKey = params.childSessionKey.trim();
1797
+ const parentSessionKey = params.parentSessionKey.trim();
1798
+ if (!childSessionKey || !parentSessionKey) {
1799
+ return undefined;
1800
+ }
1801
+
1802
+ const conversationId = await this.resolveConversationIdForSessionKey(parentSessionKey);
1803
+ if (typeof conversationId !== "number") {
1804
+ return undefined;
1805
+ }
1806
+
1807
+ const ttlMs =
1808
+ typeof params.ttlMs === "number" && Number.isFinite(params.ttlMs) && params.ttlMs > 0
1809
+ ? Math.floor(params.ttlMs)
1810
+ : undefined;
1811
+
1812
+ createDelegatedExpansionGrant({
1813
+ delegatedSessionKey: childSessionKey,
1814
+ issuerSessionId: parentSessionKey,
1815
+ allowedConversationIds: [conversationId],
1816
+ tokenCap: this.config.maxExpandTokens,
1817
+ ttlMs,
1818
+ });
1819
+
1820
+ return {
1821
+ rollback: () => {
1822
+ revokeDelegatedExpansionGrantForSession(childSessionKey, { removeBinding: true });
1823
+ },
1824
+ };
1825
+ }
1826
+
1827
+ async onSubagentEnded(params: {
1828
+ childSessionKey: string;
1829
+ reason: SubagentEndReason;
1830
+ }): Promise<void> {
1831
+ const childSessionKey = params.childSessionKey.trim();
1832
+ if (!childSessionKey) {
1833
+ return;
1834
+ }
1835
+
1836
+ switch (params.reason) {
1837
+ case "deleted":
1838
+ revokeDelegatedExpansionGrantForSession(childSessionKey, { removeBinding: true });
1839
+ break;
1840
+ case "completed":
1841
+ revokeDelegatedExpansionGrantForSession(childSessionKey);
1842
+ break;
1843
+ case "released":
1844
+ case "swept":
1845
+ removeDelegatedExpansionGrantForSession(childSessionKey);
1846
+ break;
1847
+ }
1848
+ }
1849
+
1850
+ async dispose(): Promise<void> {
1851
+ // No-op for plugin singleton — the connection is shared across runs.
1852
+ // OpenClaw's runner calls dispose() after every run, but the plugin
1853
+ // registers a single engine instance reused by the factory. Closing
1854
+ // the DB here would break subsequent runs with "database is not open".
1855
+ // The connection is cleaned up on process exit via closeLcmConnection().
1856
+ }
1857
+
1858
+ // ── Public accessors for retrieval (used by subagent expansion) ─────────
1859
+
1860
+ getRetrieval(): RetrievalEngine {
1861
+ return this.retrieval;
1862
+ }
1863
+
1864
+ getConversationStore(): ConversationStore {
1865
+ return this.conversationStore;
1866
+ }
1867
+
1868
+ getSummaryStore(): SummaryStore {
1869
+ return this.summaryStore;
1870
+ }
1871
+
1872
+ // ── Heartbeat pruning ──────────────────────────────────────────────────
1873
+
1874
+ /**
1875
+ * Detect HEARTBEAT_OK turn cycles in a conversation and delete them.
1876
+ *
1877
+ * A HEARTBEAT_OK turn is: a user message (the heartbeat prompt), followed by
1878
+ * any tool call/result messages, ending with an assistant message that is a
1879
+ * heartbeat ack. The entire sequence has no durable information value for LCM.
1880
+ *
1881
+ * Detection: assistant content (trimmed, lowercased) starts with "heartbeat_ok"
1882
+ * and any text after is not alphanumeric (matches OpenClaw core's ack detection).
1883
+ * This catches both exact "HEARTBEAT_OK" and chatty variants like
1884
+ * "HEARTBEAT_OK — weekend, no market".
1885
+ *
1886
+ * Returns the number of messages deleted.
1887
+ */
1888
+ private async pruneHeartbeatOkTurns(conversationId: number): Promise<number> {
1889
+ const allMessages = await this.conversationStore.getMessages(conversationId);
1890
+ if (allMessages.length === 0) {
1891
+ return 0;
1892
+ }
1893
+
1894
+ const toDelete: number[] = [];
1895
+
1896
+ // Walk through messages finding HEARTBEAT_OK assistant replies, then
1897
+ // collect the entire turn (back to the preceding user message).
1898
+ for (let i = 0; i < allMessages.length; i++) {
1899
+ const msg = allMessages[i];
1900
+ if (msg.role !== "assistant") {
1901
+ continue;
1902
+ }
1903
+ if (!isHeartbeatOkContent(msg.content)) {
1904
+ continue;
1905
+ }
1906
+
1907
+ // Found a HEARTBEAT_OK reply. Walk backward to find the turn start
1908
+ // (the preceding user message).
1909
+ const turnMessageIds: number[] = [msg.messageId];
1910
+ for (let j = i - 1; j >= 0; j--) {
1911
+ const prev = allMessages[j];
1912
+ turnMessageIds.push(prev.messageId);
1913
+ if (prev.role === "user") {
1914
+ break; // Found turn start
1915
+ }
1916
+ }
1917
+
1918
+ toDelete.push(...turnMessageIds);
1919
+ }
1920
+
1921
+ if (toDelete.length === 0) {
1922
+ return 0;
1923
+ }
1924
+
1925
+ // Deduplicate (a message could theoretically appear in multiple turns)
1926
+ const uniqueIds = [...new Set(toDelete)];
1927
+ return this.conversationStore.deleteMessages(uniqueIds);
1928
+ }
1929
+ }
1930
+
1931
+ // ── Heartbeat detection ─────────────────────────────────────────────────────
1932
+
1933
+ const HEARTBEAT_OK_TOKEN = "heartbeat_ok";
1934
+
1935
+ /**
1936
+ * Detect whether an assistant message is a heartbeat ack.
1937
+ *
1938
+ * Matches the same pattern as OpenClaw core's heartbeat-events-filter:
1939
+ * content starts with "heartbeat_ok" (case-insensitive) and any character
1940
+ * immediately after is not alphanumeric or underscore.
1941
+ *
1942
+ * This catches:
1943
+ * - "HEARTBEAT_OK"
1944
+ * - " HEARTBEAT_OK "
1945
+ * - "HEARTBEAT_OK — weekend, no market."
1946
+ * - "Saturday 10:48 AM PT — weekend, no market. HEARTBEAT_OK"
1947
+ *
1948
+ * But not:
1949
+ * - "HEARTBEAT_OK_EXTENDED" (alphanumeric continuation)
1950
+ */
1951
+ function isHeartbeatOkContent(content: string): boolean {
1952
+ const trimmed = content.trim().toLowerCase();
1953
+ if (!trimmed) {
1954
+ return false;
1955
+ }
1956
+
1957
+ // Check if it starts with the token
1958
+ if (trimmed.startsWith(HEARTBEAT_OK_TOKEN)) {
1959
+ const suffix = trimmed.slice(HEARTBEAT_OK_TOKEN.length);
1960
+ if (suffix.length === 0) {
1961
+ return true;
1962
+ }
1963
+ return !/[a-z0-9_]/.test(suffix[0]);
1964
+ }
1965
+
1966
+ // Also check if it ends with the token (chatty prefix + HEARTBEAT_OK)
1967
+ if (trimmed.endsWith(HEARTBEAT_OK_TOKEN)) {
1968
+ return true;
1969
+ }
1970
+
1971
+ return false;
1972
+ }
1973
+
1974
+ // ── Emergency fallback summarization ────────────────────────────────────────
1975
+
1976
+ /**
1977
+ * Creates a deterministic truncation summarizer used only as an emergency
1978
+ * fallback when the model-backed summarizer cannot be created.
1979
+ *
1980
+ * CompactionEngine already escalates normal -> aggressive -> fallback for
1981
+ * convergence. This function simply provides a stable baseline summarize
1982
+ * callback to keep compaction operable when runtime setup is unavailable.
1983
+ */
1984
+ function createEmergencyFallbackSummarize(): (
1985
+ text: string,
1986
+ aggressive?: boolean,
1987
+ ) => Promise<string> {
1988
+ return async (text: string, aggressive?: boolean): Promise<string> => {
1989
+ const maxChars = aggressive ? 600 * 4 : 900 * 4;
1990
+ if (text.length <= maxChars) {
1991
+ return text;
1992
+ }
1993
+ return text.slice(0, maxChars) + "\n[Truncated for context management]";
1994
+ };
1995
+ }