@martian-engineering/lossless-claw 0.6.1 → 0.6.3

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md CHANGED
@@ -115,7 +115,8 @@ Add a `lossless-claw` entry under `plugins.entries` in your OpenClaw config:
115
115
  ],
116
116
  "summaryModel": "anthropic/claude-haiku-4-5",
117
117
  "expansionModel": "anthropic/claude-haiku-4-5",
118
- "delegationTimeoutMs": 300000
118
+ "delegationTimeoutMs": 300000,
119
+ "summaryTimeoutMs": 60000
119
120
  }
120
121
  }
121
122
  }
@@ -123,7 +124,7 @@ Add a `lossless-claw` entry under `plugins.entries` in your OpenClaw config:
123
124
  }
124
125
  ```
125
126
 
126
- `leafChunkTokens` controls how many source tokens can accumulate in a leaf compaction chunk before summarization is triggered. The default is `20000`, but quota-limited summary providers may benefit from a larger value to reduce compaction frequency. `summaryModel` and `summaryProvider` let you pin compaction summarization to a cheaper or faster model than your main OpenClaw session model. `expansionModel` does the same for `lcm_expand_query` sub-agent calls (drilling into summaries to recover detail). `delegationTimeoutMs` controls how long `lcm_expand_query` waits for that delegated sub-agent to finish before returning a timeout error; it defaults to `120000` (120s). When unset, the model settings still fall back to OpenClaw's configured default model/provider. See [Expansion model override requirements](#expansion-model-override-requirements) for the required `subagent` trust policy when using `expansionModel`.
127
+ `leafChunkTokens` controls how many source tokens can accumulate in a leaf compaction chunk before summarization is triggered. The default is `20000`, but quota-limited summary providers may benefit from a larger value to reduce compaction frequency. `summaryModel` and `summaryProvider` let you pin compaction summarization to a cheaper or faster model than your main OpenClaw session model. `expansionModel` does the same for `lcm_expand_query` sub-agent calls (drilling into summaries to recover detail). `delegationTimeoutMs` controls how long `lcm_expand_query` waits for that delegated sub-agent to finish before returning a timeout error; it defaults to `120000` (120s). `summaryTimeoutMs` controls the per-call timeout for model-backed LCM summarization; it defaults to `60000` (60s). When unset, the model settings still fall back to OpenClaw's configured default model/provider. See [Expansion model override requirements](#expansion-model-override-requirements) for the required `subagent` trust policy when using `expansionModel`.
127
128
 
128
129
  ### Environment variables
129
130
 
@@ -154,6 +155,7 @@ Add a `lossless-claw` entry under `plugins.entries` in your OpenClaw config:
154
155
  | `LCM_EXPANSION_MODEL` | *(from OpenClaw)* | Model override for `lcm_expand_query` sub-agent (e.g. `anthropic/claude-haiku-4-5`) |
155
156
  | `LCM_EXPANSION_PROVIDER` | *(from OpenClaw)* | Provider override for `lcm_expand_query` sub-agent |
156
157
  | `LCM_DELEGATION_TIMEOUT_MS` | `120000` | Max time to wait for delegated `lcm_expand_query` sub-agent completion |
158
+ | `LCM_SUMMARY_TIMEOUT_MS` | `60000` | Max time to wait for a single model-backed LCM summarizer call |
157
159
  | `LCM_PRUNE_HEARTBEAT_OK` | `false` | Retroactively delete `HEARTBEAT_OK` turn cycles from LCM storage |
158
160
 
159
161
  ### Expansion model override requirements
@@ -198,6 +200,7 @@ Plugin config equivalents:
198
200
  - `summaryModel`
199
201
  - `summaryProvider`
200
202
  - `delegationTimeoutMs`
203
+ - `summaryTimeoutMs`
201
204
 
202
205
  Environment variables still win over plugin config when both are set.
203
206
 
@@ -243,7 +246,7 @@ Lossless-claw distinguishes OpenClaw's two session-reset commands:
243
246
  - `2`: keep d2+ summaries; recommended default
244
247
  - `3+`: keep only deeper, more abstract summaries
245
248
 
246
- Lossless-claw currently applies these storage semantics through the `before_reset` hook only. User-facing confirmation text after `/new` or `/reset` must be emitted by OpenClaw's command handlers.
249
+ Lossless-claw applies `/new` pruning through `before_reset` and uses `session_end` to catch transcript rollovers such as `/reset`, idle or daily session rotation, compaction session replacement, and deletions. User-facing confirmation text after `/new` or `/reset` must still be emitted by OpenClaw's command handlers.
247
250
 
248
251
  Use `ignoreSessionPatterns` or `LCM_IGNORE_SESSION_PATTERNS` to keep low-value sessions completely out of LCM. Matching sessions do not create conversations, do not store messages, and do not participate in compaction or delegated expansion grants.
249
252
 
@@ -159,7 +159,7 @@ Lossless-claw treats the two OpenClaw reset commands differently:
159
159
  - `/reset` archives the active conversation row and creates a fresh active row for the same stable `sessionKey`.
160
160
 
161
161
  This preserves lossless history while still giving users a real clean-slate command.
162
- OpenClaw's command handlers still own the user-facing post-command disclosure text; lossless-claw applies only the underlying storage transition through `before_reset`.
162
+ Lossless-claw applies `/new` through `before_reset`, then uses `session_end` to catch the broader rollover cases OpenClaw can emit: `/reset`, idle or daily session rotation, compaction-driven session replacement, and deletions. OpenClaw's command handlers still own the user-facing post-command disclosure text.
163
163
 
164
164
  Use `ignoreSessionPatterns` or `LCM_IGNORE_SESSION_PATTERNS` to keep low-value sessions completely out of LCM. Matching sessions do not create conversations, do not store messages, and do not participate in compaction or delegated expansion grants.
165
165
 
@@ -84,6 +84,10 @@
84
84
  "label": "Delegation Timeout (ms)",
85
85
  "help": "Maximum time to wait for delegated lcm_expand_query sub-agent completion before timing out"
86
86
  },
87
+ "summaryTimeoutMs": {
88
+ "label": "Summary Timeout (ms)",
89
+ "help": "Maximum time to wait for a single model-backed LCM summarizer call before timing out"
90
+ },
87
91
  "maxAssemblyTokenBudget": {
88
92
  "label": "Max Assembly Token Budget",
89
93
  "help": "Hard ceiling for assembly token budget — caps runtime-provided and fallback budgets. Set for smaller context-window models (e.g., 30000 for 32k models)"
@@ -205,6 +209,10 @@
205
209
  "type": "integer",
206
210
  "minimum": 1
207
211
  },
212
+ "summaryTimeoutMs": {
213
+ "type": "integer",
214
+ "minimum": 1
215
+ },
208
216
  "maxAssemblyTokenBudget": {
209
217
  "type": "integer",
210
218
  "minimum": 1000
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@martian-engineering/lossless-claw",
3
- "version": "0.6.1",
3
+ "version": "0.6.3",
4
4
  "description": "Lossless Context Management plugin for OpenClaw — DAG-based conversation summarization with incremental compaction",
5
5
  "type": "module",
6
6
  "main": "index.ts",
package/src/compaction.ts CHANGED
@@ -1379,45 +1379,47 @@ export class CompactionEngine {
1379
1379
  const summaryId = generateSummaryId(summary.content);
1380
1380
  const tokenCount = estimateTokens(summary.content);
1381
1381
 
1382
- await this.summaryStore.insertSummary({
1383
- summaryId,
1384
- conversationId,
1385
- kind: "leaf",
1386
- depth: 0,
1387
- content: summary.content,
1388
- tokenCount,
1389
- fileIds,
1390
- earliestAt:
1391
- messageContents.length > 0
1392
- ? new Date(Math.min(...messageContents.map((message) => message.createdAt.getTime())))
1393
- : undefined,
1394
- latestAt:
1395
- messageContents.length > 0
1396
- ? new Date(Math.max(...messageContents.map((message) => message.createdAt.getTime())))
1397
- : undefined,
1398
- descendantCount: 0,
1399
- descendantTokenCount: 0,
1400
- sourceMessageTokenCount: messageContents.reduce(
1401
- (sum, message) => sum + Math.max(0, Math.floor(message.tokenCount)),
1402
- 0,
1403
- ),
1404
- model: summaryModel,
1405
- });
1382
+ await this.summaryStore.withTransaction(async () => {
1383
+ await this.summaryStore.insertSummary({
1384
+ summaryId,
1385
+ conversationId,
1386
+ kind: "leaf",
1387
+ depth: 0,
1388
+ content: summary.content,
1389
+ tokenCount,
1390
+ fileIds,
1391
+ earliestAt:
1392
+ messageContents.length > 0
1393
+ ? new Date(Math.min(...messageContents.map((message) => message.createdAt.getTime())))
1394
+ : undefined,
1395
+ latestAt:
1396
+ messageContents.length > 0
1397
+ ? new Date(Math.max(...messageContents.map((message) => message.createdAt.getTime())))
1398
+ : undefined,
1399
+ descendantCount: 0,
1400
+ descendantTokenCount: 0,
1401
+ sourceMessageTokenCount: messageContents.reduce(
1402
+ (sum, message) => sum + Math.max(0, Math.floor(message.tokenCount)),
1403
+ 0,
1404
+ ),
1405
+ model: summaryModel,
1406
+ });
1406
1407
 
1407
- // Link to source messages
1408
- const messageIds = messageContents.map((m) => m.messageId);
1409
- await this.summaryStore.linkSummaryToMessages(summaryId, messageIds);
1408
+ // Link to source messages before the context swap becomes visible.
1409
+ const messageIds = messageContents.map((m) => m.messageId);
1410
+ await this.summaryStore.linkSummaryToMessages(summaryId, messageIds);
1410
1411
 
1411
- // Replace the message range in context with the new summary
1412
- const ordinals = messageItems.map((ci) => ci.ordinal);
1413
- const startOrdinal = Math.min(...ordinals);
1414
- const endOrdinal = Math.max(...ordinals);
1412
+ // Replace the message range in context with the new summary.
1413
+ const ordinals = messageItems.map((ci) => ci.ordinal);
1414
+ const startOrdinal = Math.min(...ordinals);
1415
+ const endOrdinal = Math.max(...ordinals);
1415
1416
 
1416
- await this.summaryStore.replaceContextRangeWithSummary({
1417
- conversationId,
1418
- startOrdinal,
1419
- endOrdinal,
1420
- summaryId,
1417
+ await this.summaryStore.replaceContextRangeWithSummary({
1418
+ conversationId,
1419
+ startOrdinal,
1420
+ endOrdinal,
1421
+ summaryId,
1422
+ });
1421
1423
  });
1422
1424
 
1423
1425
  return { summaryId, level: summary.level, content: summary.content };
@@ -1487,72 +1489,76 @@ export class CompactionEngine {
1487
1489
  const summaryId = generateSummaryId(condensed.content);
1488
1490
  const tokenCount = estimateTokens(condensed.content);
1489
1491
 
1490
- await this.summaryStore.insertSummary({
1491
- summaryId,
1492
- conversationId,
1493
- kind: "condensed",
1494
- depth: targetDepth + 1,
1495
- content: condensed.content,
1496
- tokenCount,
1497
- fileIds,
1498
- earliestAt:
1499
- summaryRecords.length > 0
1500
- ? new Date(
1501
- Math.min(
1502
- ...summaryRecords.map((summary) =>
1503
- (summary.earliestAt ?? summary.createdAt).getTime(),
1492
+ await this.summaryStore.withTransaction(async () => {
1493
+ await this.summaryStore.insertSummary({
1494
+ summaryId,
1495
+ conversationId,
1496
+ kind: "condensed",
1497
+ depth: targetDepth + 1,
1498
+ content: condensed.content,
1499
+ tokenCount,
1500
+ fileIds,
1501
+ earliestAt:
1502
+ summaryRecords.length > 0
1503
+ ? new Date(
1504
+ Math.min(
1505
+ ...summaryRecords.map((summary) =>
1506
+ (summary.earliestAt ?? summary.createdAt).getTime(),
1507
+ ),
1504
1508
  ),
1505
- ),
1506
- )
1507
- : undefined,
1508
- latestAt:
1509
- summaryRecords.length > 0
1510
- ? new Date(
1511
- Math.max(
1512
- ...summaryRecords.map((summary) => (summary.latestAt ?? summary.createdAt).getTime()),
1513
- ),
1514
- )
1515
- : undefined,
1516
- descendantCount: summaryRecords.reduce((count, summary) => {
1517
- const childDescendants =
1518
- typeof summary.descendantCount === "number" && Number.isFinite(summary.descendantCount)
1519
- ? Math.max(0, Math.floor(summary.descendantCount))
1520
- : 0;
1521
- return count + childDescendants + 1;
1522
- }, 0),
1523
- descendantTokenCount: summaryRecords.reduce((count, summary) => {
1524
- const childDescendantTokens =
1525
- typeof summary.descendantTokenCount === "number" &&
1526
- Number.isFinite(summary.descendantTokenCount)
1527
- ? Math.max(0, Math.floor(summary.descendantTokenCount))
1528
- : 0;
1529
- return count + Math.max(0, Math.floor(summary.tokenCount)) + childDescendantTokens;
1530
- }, 0),
1531
- sourceMessageTokenCount: summaryRecords.reduce((count, summary) => {
1532
- const sourceTokens =
1533
- typeof summary.sourceMessageTokenCount === "number" &&
1534
- Number.isFinite(summary.sourceMessageTokenCount)
1535
- ? Math.max(0, Math.floor(summary.sourceMessageTokenCount))
1536
- : 0;
1537
- return count + sourceTokens;
1538
- }, 0),
1539
- model: summaryModel,
1540
- });
1509
+ )
1510
+ : undefined,
1511
+ latestAt:
1512
+ summaryRecords.length > 0
1513
+ ? new Date(
1514
+ Math.max(
1515
+ ...summaryRecords.map(
1516
+ (summary) => (summary.latestAt ?? summary.createdAt).getTime(),
1517
+ ),
1518
+ ),
1519
+ )
1520
+ : undefined,
1521
+ descendantCount: summaryRecords.reduce((count, summary) => {
1522
+ const childDescendants =
1523
+ typeof summary.descendantCount === "number" && Number.isFinite(summary.descendantCount)
1524
+ ? Math.max(0, Math.floor(summary.descendantCount))
1525
+ : 0;
1526
+ return count + childDescendants + 1;
1527
+ }, 0),
1528
+ descendantTokenCount: summaryRecords.reduce((count, summary) => {
1529
+ const childDescendantTokens =
1530
+ typeof summary.descendantTokenCount === "number" &&
1531
+ Number.isFinite(summary.descendantTokenCount)
1532
+ ? Math.max(0, Math.floor(summary.descendantTokenCount))
1533
+ : 0;
1534
+ return count + Math.max(0, Math.floor(summary.tokenCount)) + childDescendantTokens;
1535
+ }, 0),
1536
+ sourceMessageTokenCount: summaryRecords.reduce((count, summary) => {
1537
+ const sourceTokens =
1538
+ typeof summary.sourceMessageTokenCount === "number" &&
1539
+ Number.isFinite(summary.sourceMessageTokenCount)
1540
+ ? Math.max(0, Math.floor(summary.sourceMessageTokenCount))
1541
+ : 0;
1542
+ return count + sourceTokens;
1543
+ }, 0),
1544
+ model: summaryModel,
1545
+ });
1541
1546
 
1542
- // Link to parent summaries
1543
- const parentSummaryIds = summaryRecords.map((s) => s.summaryId);
1544
- await this.summaryStore.linkSummaryToParents(summaryId, parentSummaryIds);
1547
+ // Link to parent summaries before the context swap becomes visible.
1548
+ const parentSummaryIds = summaryRecords.map((s) => s.summaryId);
1549
+ await this.summaryStore.linkSummaryToParents(summaryId, parentSummaryIds);
1545
1550
 
1546
- // Replace all summary items in context with the condensed summary
1547
- const ordinals = summaryItems.map((ci) => ci.ordinal);
1548
- const startOrdinal = Math.min(...ordinals);
1549
- const endOrdinal = Math.max(...ordinals);
1551
+ // Replace all summary items in context with the condensed summary.
1552
+ const ordinals = summaryItems.map((ci) => ci.ordinal);
1553
+ const startOrdinal = Math.min(...ordinals);
1554
+ const endOrdinal = Math.max(...ordinals);
1550
1555
 
1551
- await this.summaryStore.replaceContextRangeWithSummary({
1552
- conversationId,
1553
- startOrdinal,
1554
- endOrdinal,
1555
- summaryId,
1556
+ await this.summaryStore.replaceContextRangeWithSummary({
1557
+ conversationId,
1558
+ startOrdinal,
1559
+ endOrdinal,
1560
+ summaryId,
1561
+ });
1556
1562
  });
1557
1563
 
1558
1564
  return { summaryId, level: condensed.level };
package/src/db/config.ts CHANGED
@@ -42,6 +42,8 @@ export type LcmConfig = {
42
42
  expansionModel: string;
43
43
  /** Max time to wait for delegated lcm_expand_query sub-agent completion. */
44
44
  delegationTimeoutMs: number;
45
+ /** Max time to wait for a single model-backed LCM summarizer call. */
46
+ summaryTimeoutMs: number;
45
47
  /** IANA timezone for timestamps in summaries (from TZ env or system default) */
46
48
  timezone: string;
47
49
  /** When true, retroactively delete HEARTBEAT_OK turn cycles from LCM storage. */
@@ -219,6 +221,9 @@ export function resolveLcmConfig(
219
221
  expansionModel:
220
222
  env.LCM_EXPANSION_MODEL?.trim() ?? toStr(pc.expansionModel) ?? "",
221
223
  delegationTimeoutMs: envDelegationTimeoutMs ?? toNumber(pc.delegationTimeoutMs) ?? 120000,
224
+ summaryTimeoutMs:
225
+ parseFiniteInt(env.LCM_SUMMARY_TIMEOUT_MS)
226
+ ?? toNumber(pc.summaryTimeoutMs) ?? 60000,
222
227
  timezone: env.TZ ?? toStr(pc.timezone) ?? Intl.DateTimeFormat().resolvedOptions().timeZone,
223
228
  pruneHeartbeatOk:
224
229
  env.LCM_PRUNE_HEARTBEAT_OK !== undefined
package/src/engine.ts CHANGED
@@ -48,6 +48,7 @@ import { compileSessionPatterns, matchesSessionPattern } from "./session-pattern
48
48
  import { logStartupBannerOnce } from "./startup-banner-log.js";
49
49
  import {
50
50
  ConversationStore,
51
+ type ConversationRecord,
51
52
  type CreateMessagePartInput,
52
53
  type MessagePartRecord,
53
54
  type MessagePartType,
@@ -1017,7 +1018,7 @@ function readFileSegment(sessionFile: string, offset: number): string | null {
1017
1018
  }
1018
1019
  }
1019
1020
 
1020
- function readLastJsonlEntryBeforeOffset(sessionFile: string, offset: number): string | null {
1021
+ function readLastJsonlEntryBeforeOffset(sessionFile: string, offset: number, messageOnly = false): string | null {
1021
1022
  const chunkSize = 16_384;
1022
1023
  let fd: number | null = null;
1023
1024
  try {
@@ -1029,16 +1030,23 @@ function readLastJsonlEntryBeforeOffset(sessionFile: string, offset: number): st
1029
1030
  fd = openSync(sessionFile, "r");
1030
1031
  let cursor = safeOffset;
1031
1032
  let carry = "";
1032
- while (cursor > 0) {
1033
- const start = Math.max(0, cursor - chunkSize);
1034
- const length = cursor - start;
1035
- const buffer = Buffer.alloc(length);
1036
- readSync(fd, buffer, 0, length, start);
1037
- carry = buffer.toString("utf8") + carry;
1033
+ let reachedStart = false;
1034
+ while (cursor > 0 || (reachedStart && carry.length > 0)) {
1035
+ if (!reachedStart) {
1036
+ const start = Math.max(0, cursor - chunkSize);
1037
+ const length = cursor - start;
1038
+ const buffer = Buffer.alloc(length);
1039
+ readSync(fd, buffer, 0, length, start);
1040
+ carry = buffer.toString("utf8") + carry;
1041
+ cursor = start;
1042
+ if (start === 0) {
1043
+ reachedStart = true;
1044
+ }
1045
+ }
1038
1046
 
1039
1047
  const trimmedEnd = carry.replace(/\s+$/u, "");
1040
1048
  if (!trimmedEnd) {
1041
- cursor = start;
1049
+ if (reachedStart) break;
1042
1050
  carry = "";
1043
1051
  continue;
1044
1052
  }
@@ -1047,17 +1055,36 @@ function readLastJsonlEntryBeforeOffset(sessionFile: string, offset: number): st
1047
1055
  if (newlineIndex >= 0) {
1048
1056
  const candidate = trimmedEnd.slice(newlineIndex + 1).trim();
1049
1057
  if (candidate) {
1058
+ if (messageOnly) {
1059
+ let isMessage = false;
1060
+ try {
1061
+ isMessage = extractBootstrapMessageCandidate(JSON.parse(candidate)) != null;
1062
+ } catch { /* not valid JSON, skip */ }
1063
+ if (!isMessage) {
1064
+ carry = trimmedEnd.slice(0, newlineIndex);
1065
+ continue;
1066
+ }
1067
+ }
1050
1068
  return candidate;
1051
1069
  }
1052
1070
  carry = trimmedEnd.slice(0, newlineIndex);
1053
- cursor = start;
1054
1071
  continue;
1055
1072
  }
1056
1073
 
1057
- if (start === 0) {
1058
- return trimmedEnd.trim() || null;
1074
+ // No newline found — entire trimmedEnd is one line
1075
+ if (reachedStart) {
1076
+ const firstLine = trimmedEnd.trim() || null;
1077
+ if (firstLine && messageOnly) {
1078
+ let isMessage = false;
1079
+ try {
1080
+ isMessage = extractBootstrapMessageCandidate(JSON.parse(firstLine)) != null;
1081
+ } catch { /* not valid JSON */ }
1082
+ if (!isMessage) return null;
1083
+ }
1084
+ return firstLine;
1059
1085
  }
1060
- cursor = start;
1086
+ // Need more data from earlier in the file
1087
+ continue;
1061
1088
  }
1062
1089
  return null;
1063
1090
  } catch {
@@ -1826,17 +1853,18 @@ export class LcmContextEngine implements ContextEngine {
1826
1853
  conversationId: number;
1827
1854
  historicalMessages: AgentMessage[];
1828
1855
  }): Promise<{
1856
+ blockedByImportCap: boolean;
1829
1857
  importedMessages: number;
1830
1858
  hasOverlap: boolean;
1831
1859
  }> {
1832
1860
  const { sessionId, conversationId, historicalMessages } = params;
1833
1861
  if (historicalMessages.length === 0) {
1834
- return { importedMessages: 0, hasOverlap: false };
1862
+ return { blockedByImportCap: false, importedMessages: 0, hasOverlap: false };
1835
1863
  }
1836
1864
 
1837
1865
  const latestDbMessage = await this.conversationStore.getLastMessage(conversationId);
1838
1866
  if (!latestDbMessage) {
1839
- return { importedMessages: 0, hasOverlap: false };
1867
+ return { blockedByImportCap: false, importedMessages: 0, hasOverlap: false };
1840
1868
  }
1841
1869
 
1842
1870
  const storedHistoricalMessages = historicalMessages.map((message) => toStoredMessage(message));
@@ -1857,7 +1885,7 @@ export class LcmContextEngine implements ContextEngine {
1857
1885
  }
1858
1886
  }
1859
1887
  if (dbOccurrences === historicalOccurrences) {
1860
- return { importedMessages: 0, hasOverlap: true };
1888
+ return { blockedByImportCap: false, importedMessages: 0, hasOverlap: true };
1861
1889
  }
1862
1890
  }
1863
1891
 
@@ -1909,13 +1937,20 @@ export class LcmContextEngine implements ContextEngine {
1909
1937
  }
1910
1938
 
1911
1939
  if (anchorIndex < 0) {
1912
- return { importedMessages: 0, hasOverlap: false };
1940
+ return { blockedByImportCap: false, importedMessages: 0, hasOverlap: false };
1913
1941
  }
1914
1942
  if (anchorIndex >= historicalMessages.length - 1) {
1915
- return { importedMessages: 0, hasOverlap: true };
1943
+ return { blockedByImportCap: false, importedMessages: 0, hasOverlap: true };
1916
1944
  }
1917
1945
 
1918
1946
  const missingTail = historicalMessages.slice(anchorIndex + 1);
1947
+
1948
+ const existingDbCount = await this.conversationStore.getMessageCount(conversationId);
1949
+ if (existingDbCount > 0 && missingTail.length > Math.max(existingDbCount * 0.2, 50)) {
1950
+ console.error(`[lcm] reconcileSessionTail: import cap exceeded — would import ${missingTail.length} messages (existing: ${existingDbCount}). Aborting to prevent flood.`);
1951
+ return { blockedByImportCap: true, importedMessages: 0, hasOverlap: true };
1952
+ }
1953
+
1919
1954
  let importedMessages = 0;
1920
1955
  for (const message of missingTail) {
1921
1956
  const result = await this.ingestSingle({ sessionId, sessionKey: params.sessionKey, message });
@@ -1924,7 +1959,7 @@ export class LcmContextEngine implements ContextEngine {
1924
1959
  }
1925
1960
  }
1926
1961
 
1927
- return { importedMessages, hasOverlap: true };
1962
+ return { blockedByImportCap: false, importedMessages, hasOverlap: true };
1928
1963
  }
1929
1964
 
1930
1965
  async bootstrap(params: {
@@ -2019,6 +2054,7 @@ export class LcmContextEngine implements ContextEngine {
2019
2054
  const tailEntryRaw = readLastJsonlEntryBeforeOffset(
2020
2055
  params.sessionFile,
2021
2056
  bootstrapState.lastProcessedOffset,
2057
+ true,
2022
2058
  );
2023
2059
  const tailEntryMessage = readBootstrapMessageFromJsonLine(tailEntryRaw);
2024
2060
  const tailEntryHash = tailEntryMessage
@@ -2143,6 +2179,14 @@ export class LcmContextEngine implements ContextEngine {
2143
2179
  historicalMessages,
2144
2180
  });
2145
2181
 
2182
+ if (reconcile.blockedByImportCap) {
2183
+ return {
2184
+ bootstrapped: false,
2185
+ importedMessages: 0,
2186
+ reason: "reconcile import capped",
2187
+ };
2188
+ }
2189
+
2146
2190
  if (!conversation.bootstrappedAt) {
2147
2191
  await this.conversationStore.markConversationBootstrapped(conversationId);
2148
2192
  }
@@ -2405,9 +2449,34 @@ export class LcmContextEngine implements ContextEngine {
2405
2449
  };
2406
2450
  }
2407
2451
 
2408
- return params.runtimeContext.rewriteTranscriptEntries({
2452
+ const result = await params.runtimeContext.rewriteTranscriptEntries({
2409
2453
  replacements,
2410
2454
  });
2455
+
2456
+ if (result.changed) {
2457
+ try {
2458
+ const fileStat = statSync(params.sessionFile);
2459
+ const newSize = fileStat.size;
2460
+ const newMtimeMs = Math.trunc(fileStat.mtimeMs);
2461
+ const lastEntryRaw = readLastJsonlEntryBeforeOffset(params.sessionFile, newSize, true);
2462
+ const lastEntryMsg = readBootstrapMessageFromJsonLine(lastEntryRaw);
2463
+ const lastEntryHash = lastEntryMsg ? createBootstrapEntryHash(toStoredMessage(lastEntryMsg)) : null;
2464
+ if (lastEntryHash) {
2465
+ await this.summaryStore.upsertConversationBootstrapState({
2466
+ conversationId: conversation.conversationId,
2467
+ sessionFilePath: params.sessionFile,
2468
+ lastSeenSize: newSize,
2469
+ lastSeenMtimeMs: newMtimeMs,
2470
+ lastProcessedOffset: newSize,
2471
+ lastProcessedEntryHash: lastEntryHash,
2472
+ });
2473
+ }
2474
+ } catch (e) {
2475
+ console.error("[lcm] Failed to update bootstrap checkpoint after maintain:", e);
2476
+ }
2477
+ }
2478
+
2479
+ return result;
2411
2480
  },
2412
2481
  );
2413
2482
  }
@@ -3020,8 +3089,9 @@ export class LcmContextEngine implements ContextEngine {
3020
3089
  };
3021
3090
  }
3022
3091
 
3023
- const useSweep =
3024
- manualCompactionRequested || forceCompaction || params.compactionTarget === "threshold";
3092
+ // Forced budget recovery should use the capped convergence loop so live
3093
+ // overflow counts can drive recovery even when persisted context is already small.
3094
+ const useSweep = manualCompactionRequested || params.compactionTarget === "threshold";
3025
3095
  if (useSweep) {
3026
3096
  const sweepResult = await this.compaction.compactFullSweep({
3027
3097
  conversationId,
@@ -3218,6 +3288,69 @@ export class LcmContextEngine implements ContextEngine {
3218
3288
  // The shared connection is managed for the lifetime of the plugin process.
3219
3289
  }
3220
3290
 
3291
+ /** Detect the empty replacement row created during a prior lifecycle rollover. */
3292
+ private async isFreshLifecycleConversation(conversation: ConversationRecord): Promise<boolean> {
3293
+ const currentMessageCount = await this.conversationStore.getMessageCount(conversation.conversationId);
3294
+ if (currentMessageCount !== 0) {
3295
+ return false;
3296
+ }
3297
+ const currentContextItems = await this.summaryStore.getContextItems(conversation.conversationId);
3298
+ return currentContextItems.length === 0 && !conversation.bootstrappedAt;
3299
+ }
3300
+
3301
+ /**
3302
+ * Archive the current active conversation and optionally create the replacement
3303
+ * row that bootstrap should attach to for the next session transcript.
3304
+ */
3305
+ private async applySessionReplacement(params: {
3306
+ reason: string;
3307
+ sessionId?: string;
3308
+ sessionKey?: string;
3309
+ nextSessionId?: string;
3310
+ nextSessionKey?: string;
3311
+ createReplacement: boolean;
3312
+ createReplacementWhenMissing?: boolean;
3313
+ }): Promise<void> {
3314
+ const current = await this.conversationStore.getConversationForSession({
3315
+ sessionId: params.sessionId,
3316
+ sessionKey: params.sessionKey,
3317
+ });
3318
+ if (!current && !params.createReplacementWhenMissing) {
3319
+ return;
3320
+ }
3321
+
3322
+ if (current?.active) {
3323
+ if (params.createReplacement && await this.isFreshLifecycleConversation(current)) {
3324
+ this.deps.log.info(
3325
+ `[lcm] ${params.reason} lifecycle no-op for already fresh conversation ${current.conversationId}`,
3326
+ );
3327
+ return;
3328
+ }
3329
+ await this.conversationStore.archiveConversation(current.conversationId);
3330
+ }
3331
+
3332
+ if (!params.createReplacement) {
3333
+ this.deps.log.info(
3334
+ `[lcm] ${params.reason} lifecycle archived conversation ${current?.conversationId ?? "(none)"}`,
3335
+ );
3336
+ return;
3337
+ }
3338
+
3339
+ const nextSessionId = params.nextSessionId?.trim() || params.sessionId?.trim() || current?.sessionId;
3340
+ if (!nextSessionId) {
3341
+ this.deps.log.warn(`[lcm] ${params.reason} lifecycle skipped: no session identity available`);
3342
+ return;
3343
+ }
3344
+ const nextSessionKey = params.nextSessionKey?.trim() || params.sessionKey?.trim() || current?.sessionKey;
3345
+ const freshConversation = await this.conversationStore.createConversation({
3346
+ sessionId: nextSessionId,
3347
+ sessionKey: nextSessionKey,
3348
+ });
3349
+ this.deps.log.info(
3350
+ `[lcm] ${params.reason} lifecycle archived prior conversation and created ${freshConversation.conversationId}`,
3351
+ );
3352
+ }
3353
+
3221
3354
  /** Apply LCM lifecycle semantics for OpenClaw's /new and /reset commands. */
3222
3355
  async handleBeforeReset(params: {
3223
3356
  reason?: string;
@@ -3260,44 +3393,50 @@ export class LcmContextEngine implements ContextEngine {
3260
3393
  );
3261
3394
  return;
3262
3395
  }
3263
-
3264
- const current = await this.conversationStore.getConversationForSession({
3396
+ await this.applySessionReplacement({
3397
+ reason: "/reset",
3265
3398
  sessionId: params.sessionId,
3266
3399
  sessionKey: params.sessionKey,
3400
+ createReplacement: true,
3401
+ createReplacementWhenMissing: true,
3267
3402
  });
3268
- if (current?.active) {
3269
- const currentMessageCount = await this.conversationStore.getMessageCount(
3270
- current.conversationId,
3271
- );
3272
- const currentContextItems = await this.summaryStore.getContextItems(
3273
- current.conversationId,
3274
- );
3275
- if (
3276
- currentMessageCount === 0
3277
- && currentContextItems.length === 0
3278
- && !current.bootstrappedAt
3279
- ) {
3280
- this.deps.log.info(
3281
- `[lcm] /reset no-op for already fresh conversation ${current.conversationId}`,
3282
- );
3283
- return;
3284
- }
3285
- await this.conversationStore.archiveConversation(current.conversationId);
3286
- }
3403
+ }),
3404
+ );
3405
+ }
3287
3406
 
3288
- const nextSessionId = params.sessionId?.trim() || current?.sessionId;
3289
- if (!nextSessionId) {
3290
- this.deps.log.warn("[lcm] /reset skipped: no session identity available");
3291
- return;
3292
- }
3407
+ /** Apply generic lifecycle semantics for session rollover and deletion hooks. */
3408
+ async handleSessionEnd(params: {
3409
+ reason?: string;
3410
+ sessionId?: string;
3411
+ sessionKey?: string;
3412
+ nextSessionId?: string;
3413
+ nextSessionKey?: string;
3414
+ }): Promise<void> {
3415
+ const reason = params.reason?.trim();
3416
+ if (!reason || reason === "new" || reason === "unknown") {
3417
+ return;
3418
+ }
3419
+ if (this.shouldIgnoreSession({ sessionId: params.sessionId, sessionKey: params.sessionKey })) {
3420
+ return;
3421
+ }
3422
+ if (this.isStatelessSession(params.sessionKey ?? params.nextSessionKey)) {
3423
+ return;
3424
+ }
3293
3425
 
3294
- const freshConversation = await this.conversationStore.createConversation({
3295
- sessionId: nextSessionId,
3296
- sessionKey: params.sessionKey?.trim(),
3426
+ const createReplacement = reason !== "deleted";
3427
+ this.ensureMigrated();
3428
+ await this.withSessionQueue(
3429
+ this.resolveSessionQueueKey(params.nextSessionId ?? params.sessionId, params.sessionKey ?? params.nextSessionKey),
3430
+ async () =>
3431
+ this.conversationStore.withTransaction(async () => {
3432
+ await this.applySessionReplacement({
3433
+ reason: `session_end:${reason}`,
3434
+ sessionId: params.sessionId,
3435
+ sessionKey: params.sessionKey ?? params.nextSessionKey,
3436
+ nextSessionId: params.nextSessionId,
3437
+ nextSessionKey: params.nextSessionKey,
3438
+ createReplacement,
3297
3439
  });
3298
- this.deps.log.info(
3299
- `[lcm] /reset archived prior conversation and created ${freshConversation.conversationId}`,
3300
- );
3301
3440
  }),
3302
3441
  );
3303
3442
  }
@@ -3445,3 +3584,6 @@ function createEmergencyFallbackSummarize(): (
3445
3584
  return text.slice(0, maxChars) + "\n[Truncated for context management]";
3446
3585
  };
3447
3586
  }
3587
+
3588
+ /** @internal Exposed for unit tests only. */
3589
+ export const __testing = { readLastJsonlEntryBeforeOffset };
@@ -66,6 +66,14 @@ type RuntimeModelAuthResult = {
66
66
  apiKey?: string;
67
67
  };
68
68
 
69
+ type SessionEndLifecycleEvent = {
70
+ sessionId?: string;
71
+ sessionKey?: string;
72
+ reason?: string;
73
+ nextSessionId?: string;
74
+ nextSessionKey?: string;
75
+ };
76
+
69
77
  type RuntimeModelAuthModel = {
70
78
  id: string;
71
79
  provider: string;
@@ -1171,6 +1179,13 @@ function createLcmDependencies(api: OpenClawPluginApi): LcmDependencies {
1171
1179
 
1172
1180
  return {
1173
1181
  config,
1182
+ isRuntimeManagedAuthProvider: (provider: string, providerApi?: string) => {
1183
+ const normalizedProvider = normalizeProviderId(provider);
1184
+ if (normalizedProvider === "openai-codex" || normalizedProvider === "github-copilot") {
1185
+ return true;
1186
+ }
1187
+ return shouldOmitTemperatureForApi(providerApi);
1188
+ },
1174
1189
  complete: async ({
1175
1190
  provider,
1176
1191
  model,
@@ -1522,9 +1537,9 @@ function createLcmDependencies(api: OpenClawPluginApi): LcmDependencies {
1522
1537
  },
1523
1538
  agentLaneSubagent: "subagent",
1524
1539
  log: {
1525
- info: (msg) => api.logger.info(msg),
1526
- warn: (msg) => api.logger.warn(msg),
1527
- error: (msg) => api.logger.error(msg),
1540
+ info: (msg) => console.error(msg),
1541
+ warn: (msg) => console.error(msg),
1542
+ error: (msg) => console.error(msg),
1528
1543
  debug: (msg) => api.logger.debug?.(msg),
1529
1544
  },
1530
1545
  };
@@ -1561,6 +1576,16 @@ const lcmPlugin = {
1561
1576
  api.on("before_prompt_build", () => ({
1562
1577
  prependSystemContext: LOSSLESS_RECALL_POLICY_PROMPT,
1563
1578
  }));
1579
+ api.on("session_end", async (event) => {
1580
+ const lifecycleEvent = event as SessionEndLifecycleEvent;
1581
+ await lcm.handleSessionEnd({
1582
+ reason: lifecycleEvent.reason,
1583
+ sessionId: lifecycleEvent.sessionId,
1584
+ sessionKey: lifecycleEvent.sessionKey,
1585
+ nextSessionId: lifecycleEvent.nextSessionId,
1586
+ nextSessionKey: lifecycleEvent.nextSessionKey,
1587
+ });
1588
+ });
1564
1589
  api.registerContextEngine("lossless-claw", () => lcm);
1565
1590
  api.registerContextEngine("default", () => lcm);
1566
1591
  api.registerTool((ctx) =>
@@ -1602,12 +1627,12 @@ const lcmPlugin = {
1602
1627
 
1603
1628
  logStartupBannerOnce({
1604
1629
  key: "plugin-loaded",
1605
- log: (message) => api.logger.info(message),
1630
+ log: (message) => console.error(message),
1606
1631
  message: `[lcm] Plugin loaded (enabled=${deps.config.enabled}, db=${deps.config.databasePath}, threshold=${deps.config.contextThreshold})`,
1607
1632
  });
1608
1633
  logStartupBannerOnce({
1609
1634
  key: "compaction-model",
1610
- log: (message) => api.logger.info(message),
1635
+ log: (message) => console.error(message),
1611
1636
  message: buildCompactionModelLog({
1612
1637
  config: deps.config,
1613
1638
  openClawConfig: api.config,
@@ -1,4 +1,5 @@
1
1
  import type { DatabaseSync } from "node:sqlite";
2
+ import { withDatabaseTransaction } from "../transaction-mutex.js";
2
3
  import { formatTimestamp } from "../compaction.js";
3
4
  import type { LcmConfig } from "../db/config.js";
4
5
  import type { LcmSummarizeFn } from "../summarize.js";
@@ -139,27 +140,22 @@ export async function applyScopedDoctorRepair(params: {
139
140
  }
140
141
 
141
142
  if (repairedSummaryIds.length > 0) {
142
- params.db.exec("BEGIN IMMEDIATE");
143
- try {
144
- for (const summaryId of repairedSummaryIds) {
145
- const override = overrides.get(summaryId);
146
- if (!override) {
147
- continue;
143
+ await withDatabaseTransaction(params.db, "BEGIN IMMEDIATE", async () => {
144
+ for (const summaryId of repairedSummaryIds) {
145
+ const override = overrides.get(summaryId);
146
+ if (!override) {
147
+ continue;
148
+ }
149
+ params.db
150
+ .prepare(
151
+ `UPDATE summaries
152
+ SET content = ?, token_count = ?
153
+ WHERE summary_id = ?`,
154
+ )
155
+ .run(override.content, override.tokenCount, summaryId);
156
+ updateSummaryFts(params.db, summaryId, override.content);
148
157
  }
149
- params.db
150
- .prepare(
151
- `UPDATE summaries
152
- SET content = ?, token_count = ?
153
- WHERE summary_id = ?`,
154
- )
155
- .run(override.content, override.tokenCount, summaryId);
156
- updateSummaryFts(params.db, summaryId, override.content);
157
- }
158
- params.db.exec("COMMIT");
159
- } catch (error) {
160
- params.db.exec("ROLLBACK");
161
- throw error;
162
- }
158
+ });
163
159
  }
164
160
 
165
161
  return {
@@ -1,5 +1,6 @@
1
1
  import type { DatabaseSync } from "node:sqlite";
2
2
  import { randomUUID } from "node:crypto";
3
+ import { withDatabaseTransaction } from "../transaction-mutex.js";
3
4
  import { sanitizeFts5Query } from "./fts5-sanitize.js";
4
5
  import { buildLikeSearchPlan, containsCjk, createFallbackSnippet } from "./full-text-fallback.js";
5
6
  import { parseUtcTimestamp, parseUtcTimestampOrNull } from "./parse-utc-timestamp.js";
@@ -270,15 +271,7 @@ export class ConversationStore {
270
271
  // ── Transaction helpers ──────────────────────────────────────────────────
271
272
 
272
273
  async withTransaction<T>(operation: () => Promise<T> | T): Promise<T> {
273
- this.db.exec("BEGIN IMMEDIATE");
274
- try {
275
- const result = await operation();
276
- this.db.exec("COMMIT");
277
- return result;
278
- } catch (error) {
279
- this.db.exec("ROLLBACK");
280
- throw error;
281
- }
274
+ return withDatabaseTransaction(this.db, "BEGIN IMMEDIATE", operation);
282
275
  }
283
276
 
284
277
  // ── Conversation operations ───────────────────────────────────────────────
@@ -1,4 +1,5 @@
1
1
  import type { DatabaseSync } from "node:sqlite";
2
+ import { withDatabaseTransaction } from "../transaction-mutex.js";
2
3
  import { sanitizeFts5Query } from "./fts5-sanitize.js";
3
4
  import { buildLikeSearchPlan, containsCjk, createFallbackSnippet } from "./full-text-fallback.js";
4
5
  import { parseUtcTimestamp, parseUtcTimestampOrNull } from "./parse-utc-timestamp.js";
@@ -820,6 +821,11 @@ export class SummaryStore {
820
821
  return rows.map((row) => row.depth);
821
822
  }
822
823
 
824
+ /** Serialize a multi-step summary write sequence on the shared database. */
825
+ async withTransaction<T>(operation: () => Promise<T> | T): Promise<T> {
826
+ return withDatabaseTransaction(this.db, "BEGIN", operation);
827
+ }
828
+
823
829
  async pruneForNewSession(conversationId: number, retainDepth: number): Promise<void> {
824
830
  if (Number.isFinite(retainDepth) && retainDepth < 0) {
825
831
  return;
@@ -919,56 +925,60 @@ export class SummaryStore {
919
925
  endOrdinal: number;
920
926
  summaryId: string;
921
927
  }): Promise<void> {
928
+ await this.withTransaction(() => {
929
+ this.replaceContextRangeWithSummaryInTransaction(input);
930
+ });
931
+ }
932
+
933
+ // Update the context slice in-place while the caller already owns the txn.
934
+ private replaceContextRangeWithSummaryInTransaction(input: {
935
+ conversationId: number;
936
+ startOrdinal: number;
937
+ endOrdinal: number;
938
+ summaryId: string;
939
+ }): void {
922
940
  const { conversationId, startOrdinal, endOrdinal, summaryId } = input;
923
941
 
924
- this.db.exec("BEGIN");
925
- try {
926
- // 1. Delete context items in the range [startOrdinal, endOrdinal]
927
- this.db
928
- .prepare(
929
- `DELETE FROM context_items
942
+ // 1. Delete context items in the range [startOrdinal, endOrdinal]
943
+ this.db
944
+ .prepare(
945
+ `DELETE FROM context_items
930
946
  WHERE conversation_id = ?
931
947
  AND ordinal >= ?
932
948
  AND ordinal <= ?`,
933
- )
934
- .run(conversationId, startOrdinal, endOrdinal);
949
+ )
950
+ .run(conversationId, startOrdinal, endOrdinal);
935
951
 
936
- // 2. Insert the replacement summary item at startOrdinal
937
- this.db
938
- .prepare(
939
- `INSERT INTO context_items (conversation_id, ordinal, item_type, summary_id)
952
+ // 2. Insert the replacement summary item at startOrdinal
953
+ this.db
954
+ .prepare(
955
+ `INSERT INTO context_items (conversation_id, ordinal, item_type, summary_id)
940
956
  VALUES (?, ?, 'summary', ?)`,
941
- )
942
- .run(conversationId, startOrdinal, summaryId);
957
+ )
958
+ .run(conversationId, startOrdinal, summaryId);
943
959
 
944
- // 3. Resequence all ordinals to maintain contiguity (no gaps).
945
- // Fetch current items, then update ordinals in order.
946
- const items = this.db
947
- .prepare(
948
- `SELECT ordinal FROM context_items
960
+ // 3. Resequence all ordinals to maintain contiguity (no gaps).
961
+ // Fetch current items, then update ordinals in order.
962
+ const items = this.db
963
+ .prepare(
964
+ `SELECT ordinal FROM context_items
949
965
  WHERE conversation_id = ?
950
966
  ORDER BY ordinal`,
951
- )
952
- .all(conversationId) as unknown as { ordinal: number }[];
953
-
954
- const updateStmt = this.db.prepare(
955
- `UPDATE context_items
956
- SET ordinal = ?
957
- WHERE conversation_id = ? AND ordinal = ?`,
958
- );
967
+ )
968
+ .all(conversationId) as unknown as { ordinal: number }[];
959
969
 
960
- // Use negative temp ordinals first to avoid unique constraint conflicts
961
- for (let i = 0; i < items.length; i++) {
962
- updateStmt.run(-(i + 1), conversationId, items[i].ordinal);
963
- }
964
- for (let i = 0; i < items.length; i++) {
965
- updateStmt.run(i, conversationId, -(i + 1));
966
- }
970
+ const updateStmt = this.db.prepare(
971
+ `UPDATE context_items
972
+ SET ordinal = ?
973
+ WHERE conversation_id = ? AND ordinal = ?`,
974
+ );
967
975
 
968
- this.db.exec("COMMIT");
969
- } catch (err) {
970
- this.db.exec("ROLLBACK");
971
- throw err;
976
+ // Use negative temp ordinals first to avoid unique constraint conflicts.
977
+ for (let i = 0; i < items.length; i++) {
978
+ updateStmt.run(-(i + 1), conversationId, items[i].ordinal);
979
+ }
980
+ for (let i = 0; i < items.length; i++) {
981
+ updateStmt.run(i, conversationId, -(i + 1));
972
982
  }
973
983
  }
974
984
 
package/src/summarize.ts CHANGED
@@ -105,7 +105,7 @@ export class LcmProviderAuthError extends Error {
105
105
  * context windows on slower providers, short enough to prevent the gateway
106
106
  * event loop from starving when a provider hangs.
107
107
  */
108
- const SUMMARIZER_TIMEOUT_MS = 60_000;
108
+ const DEFAULT_SUMMARIZER_TIMEOUT_MS = 60_000;
109
109
 
110
110
  /** Error used to distinguish summarizer timeouts from provider failures. */
111
111
  class SummarizerTimeoutError extends Error {
@@ -1136,6 +1136,11 @@ export async function createLcmSummarizeFromLegacyParams(params: {
1136
1136
  ? params.deps.config.leafTargetTokens
1137
1137
  : DEFAULT_LEAF_TARGET_TOKENS;
1138
1138
 
1139
+ const summarizerTimeoutMs =
1140
+ Number.isFinite(params.deps.config.summaryTimeoutMs) && params.deps.config.summaryTimeoutMs > 0
1141
+ ? params.deps.config.summaryTimeoutMs
1142
+ : DEFAULT_SUMMARIZER_TIMEOUT_MS;
1143
+
1139
1144
  const fn: LcmSummarizeFn = async (
1140
1145
  text: string,
1141
1146
  aggressive?: boolean,
@@ -1210,13 +1215,17 @@ export async function createLcmSummarizeFromLegacyParams(params: {
1210
1215
  ],
1211
1216
  maxTokens: targetTokens,
1212
1217
  ...(reasoning ? { reasoning } : {}),
1213
- }), SUMMARIZER_TIMEOUT_MS, label);
1218
+ }), summarizerTimeoutMs, label);
1214
1219
 
1215
1220
  const retryWithoutModelAuth = async (
1216
1221
  failure: ProviderAuthFailure,
1217
1222
  reasoning?: string,
1218
1223
  ): Promise<Awaited<ReturnType<typeof params.deps.complete>>> => {
1219
1224
  const initialAuthError = new LcmProviderAuthError({ provider, model, failure });
1225
+ const runtimeManagedAuth = params.deps.isRuntimeManagedAuthProvider?.(provider, providerApi) === true;
1226
+ if (runtimeManagedAuth) {
1227
+ throw initialAuthError;
1228
+ }
1220
1229
  console.warn(initialAuthError.message);
1221
1230
  console.warn(
1222
1231
  `[lcm] summarizer auth retry: retrying ${provider}/${model} without runtime.modelAuth credentials.`,
@@ -1318,7 +1327,7 @@ export async function createLcmSummarizeFromLegacyParams(params: {
1318
1327
  const errMsg = err instanceof Error ? err.message : String(err);
1319
1328
  const isTimeout = errMsg.includes("summarizer timeout");
1320
1329
  console.warn(
1321
- `[lcm] summarizer ${isTimeout ? "timed out" : "failed"}; provider=${provider}; model=${model}; timeout=${SUMMARIZER_TIMEOUT_MS}ms; error=${errMsg}`,
1330
+ `[lcm] summarizer ${isTimeout ? "timed out" : "failed"}; provider=${provider}; model=${model}; timeout=${summarizerTimeoutMs}ms; error=${errMsg}`,
1322
1331
  );
1323
1332
  if (nextCandidate) {
1324
1333
  console.warn(
@@ -1433,12 +1442,12 @@ export async function createLcmSummarizeFromLegacyParams(params: {
1433
1442
  const isRetryTimeout = retryErrMsg.includes("summarizer timeout");
1434
1443
  if (nextCandidate) {
1435
1444
  console.warn(
1436
- `[lcm] retry ${isRetryTimeout ? "timed out" : "failed"}; provider=${provider}; model=${model}; timeout=${SUMMARIZER_TIMEOUT_MS}ms; error=${retryErrMsg}; retrying with ${nextCandidate.provider}/${nextCandidate.model}`,
1445
+ `[lcm] retry ${isRetryTimeout ? "timed out" : "failed"}; provider=${provider}; model=${model}; timeout=${summarizerTimeoutMs}ms; error=${retryErrMsg}; retrying with ${nextCandidate.provider}/${nextCandidate.model}`,
1437
1446
  );
1438
1447
  continue;
1439
1448
  }
1440
1449
  console.warn(
1441
- `[lcm] retry ${isRetryTimeout ? "timed out" : "failed"}; provider=${provider}; model=${model}; timeout=${SUMMARIZER_TIMEOUT_MS}ms; error=${retryErrMsg}; falling back to truncation`,
1450
+ `[lcm] retry ${isRetryTimeout ? "timed out" : "failed"}; provider=${provider}; model=${model}; timeout=${summarizerTimeoutMs}ms; error=${retryErrMsg}; falling back to truncation`,
1442
1451
  );
1443
1452
  summary = initialSummary;
1444
1453
  }
@@ -0,0 +1,136 @@
1
+ /**
2
+ * Per-database async transaction mutex.
3
+ *
4
+ * Hotfix for https://github.com/Martian-Engineering/lossless-claw/issues/260
5
+ *
6
+ * Problem: Multiple async operations (from different sessions) share one
7
+ * synchronous DatabaseSync handle. SQLite does not support nested transactions.
8
+ * When two async code paths both try to BEGIN while an earlier BEGIN is still
9
+ * in-flight (awaiting async work inside the transaction), the second BEGIN
10
+ * fails with "cannot start a transaction within a transaction".
11
+ *
12
+ * Solution: A per-database async mutex that serializes all explicit transaction
13
+ * entry points. Uses a WeakMap keyed on the DatabaseSync instance so each
14
+ * database gets its own queue, and databases are garbage-collected normally.
15
+ */
16
+
17
+ import { AsyncLocalStorage } from "node:async_hooks";
18
+ import type { DatabaseSync } from "node:sqlite";
19
+
20
+ interface MutexState {
21
+ /** Tail of the promise chain — each acquirer appends to this. */
22
+ tail: Promise<void>;
23
+ }
24
+
25
+ const mutexMap = new WeakMap<DatabaseSync, MutexState>();
26
+ const heldLockContext = new AsyncLocalStorage<Map<DatabaseSync, number>>();
27
+
28
+ let nextSavepointId = 0;
29
+
30
+ function getOrCreateMutex(db: DatabaseSync): MutexState {
31
+ let state = mutexMap.get(db);
32
+ if (!state) {
33
+ state = { tail: Promise.resolve() };
34
+ mutexMap.set(db, state);
35
+ }
36
+ return state;
37
+ }
38
+
39
+ function getHeldLockDepth(db: DatabaseSync): number {
40
+ return heldLockContext.getStore()?.get(db) ?? 0;
41
+ }
42
+
43
+ function nextSavepointName(): string {
44
+ nextSavepointId += 1;
45
+ return `lcm_txn_savepoint_${nextSavepointId}`;
46
+ }
47
+
48
+ /**
49
+ * Acquire exclusive async access to the database for a transaction.
50
+ *
51
+ * Direct lock acquisition is intentionally low-level and non-reentrant.
52
+ * Callers that need nested transaction scopes should use
53
+ * `withDatabaseTransaction()`, which reuses the held lock and isolates nested
54
+ * work with SQLite savepoints.
55
+ *
56
+ * Usage:
57
+ * const release = await acquireTransactionLock(this.db);
58
+ * try {
59
+ * this.db.exec("BEGIN IMMEDIATE");
60
+ * // ... do work ...
61
+ * this.db.exec("COMMIT");
62
+ * } catch (err) {
63
+ * this.db.exec("ROLLBACK");
64
+ * throw err;
65
+ * } finally {
66
+ * release();
67
+ * }
68
+ *
69
+ * Returns a release function that MUST be called in a finally block.
70
+ */
71
+ export function acquireTransactionLock(db: DatabaseSync): Promise<() => void> {
72
+ const mutex = getOrCreateMutex(db);
73
+
74
+ let releaseResolve!: () => void;
75
+ const releasePromise = new Promise<void>((resolve) => {
76
+ releaseResolve = resolve;
77
+ });
78
+
79
+ // Capture the current tail — we wait on it
80
+ const waitOn = mutex.tail;
81
+
82
+ // Advance the tail — next acquirer will wait on our release
83
+ mutex.tail = releasePromise;
84
+
85
+ // Wait for the previous holder to release, then return our release fn
86
+ return waitOn.then(() => releaseResolve);
87
+ }
88
+
89
+ export type BeginTransactionStatement = "BEGIN" | "BEGIN IMMEDIATE";
90
+
91
+ /**
92
+ * Run an operation inside a serialized database transaction.
93
+ *
94
+ * The first scope on an async path acquires the per-database mutex and opens
95
+ * the requested transaction mode. Nested scopes on the same database reuse the
96
+ * held lock and isolate their work with a savepoint instead of hanging.
97
+ */
98
+ export async function withDatabaseTransaction<T>(
99
+ db: DatabaseSync,
100
+ beginStatement: BeginTransactionStatement,
101
+ operation: () => Promise<T> | T,
102
+ ): Promise<T> {
103
+ if (getHeldLockDepth(db) > 0) {
104
+ const savepointName = nextSavepointName();
105
+ db.exec(`SAVEPOINT ${savepointName}`);
106
+ try {
107
+ const result = await operation();
108
+ db.exec(`RELEASE SAVEPOINT ${savepointName}`);
109
+ return result;
110
+ } catch (error) {
111
+ db.exec(`ROLLBACK TO SAVEPOINT ${savepointName}`);
112
+ db.exec(`RELEASE SAVEPOINT ${savepointName}`);
113
+ throw error;
114
+ }
115
+ }
116
+
117
+ const release = await acquireTransactionLock(db);
118
+ try {
119
+ const heldLocks = new Map(heldLockContext.getStore() ?? []);
120
+ heldLocks.set(db, (heldLocks.get(db) ?? 0) + 1);
121
+
122
+ return await heldLockContext.run(heldLocks, async () => {
123
+ db.exec(beginStatement);
124
+ try {
125
+ const result = await operation();
126
+ db.exec("COMMIT");
127
+ return result;
128
+ } catch (error) {
129
+ db.exec("ROLLBACK");
130
+ throw error;
131
+ }
132
+ });
133
+ } finally {
134
+ release();
135
+ }
136
+ }
package/src/types.ts CHANGED
@@ -108,6 +108,9 @@ export interface LcmDependencies {
108
108
  /** LLM completion function for summarization */
109
109
  complete: CompleteFn;
110
110
 
111
+ /** Whether a provider uses runtime-managed OAuth / auth profiles instead of direct API keys. */
112
+ isRuntimeManagedAuthProvider?: (provider: string, providerApi?: string) => boolean;
113
+
111
114
  /** Gateway RPC call function (for subagent spawning, session ops) */
112
115
  callGateway: CallGatewayFn;
113
116