@loreai/core 0.17.1 → 0.19.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (248) hide show
  1. package/dist/bun/agents-file.d.ts +4 -0
  2. package/dist/bun/agents-file.d.ts.map +1 -1
  3. package/dist/bun/config.d.ts +2 -0
  4. package/dist/bun/config.d.ts.map +1 -1
  5. package/dist/bun/curator.d.ts +45 -0
  6. package/dist/bun/curator.d.ts.map +1 -1
  7. package/dist/bun/data-dir.d.ts +18 -0
  8. package/dist/bun/data-dir.d.ts.map +1 -0
  9. package/dist/bun/db.d.ts +85 -0
  10. package/dist/bun/db.d.ts.map +1 -1
  11. package/dist/bun/distillation.d.ts +2 -13
  12. package/dist/bun/distillation.d.ts.map +1 -1
  13. package/dist/bun/embedding-vendor.d.ts +22 -38
  14. package/dist/bun/embedding-vendor.d.ts.map +1 -1
  15. package/dist/bun/embedding-worker-types.d.ts +17 -12
  16. package/dist/bun/embedding-worker-types.d.ts.map +1 -1
  17. package/dist/bun/embedding-worker.d.ts +9 -2
  18. package/dist/bun/embedding-worker.d.ts.map +1 -1
  19. package/dist/bun/embedding-worker.js +38864 -33
  20. package/dist/bun/embedding-worker.js.map +4 -4
  21. package/dist/bun/embedding.d.ts +35 -23
  22. package/dist/bun/embedding.d.ts.map +1 -1
  23. package/dist/bun/gradient.d.ts +17 -1
  24. package/dist/bun/gradient.d.ts.map +1 -1
  25. package/dist/bun/import/detect.d.ts +14 -0
  26. package/dist/bun/import/detect.d.ts.map +1 -0
  27. package/dist/bun/import/extract.d.ts +43 -0
  28. package/dist/bun/import/extract.d.ts.map +1 -0
  29. package/dist/bun/import/history.d.ts +40 -0
  30. package/dist/bun/import/history.d.ts.map +1 -0
  31. package/dist/bun/import/index.d.ts +17 -0
  32. package/dist/bun/import/index.d.ts.map +1 -0
  33. package/dist/bun/import/providers/aider.d.ts +2 -0
  34. package/dist/bun/import/providers/aider.d.ts.map +1 -0
  35. package/dist/bun/import/providers/claude-code.d.ts +2 -0
  36. package/dist/bun/import/providers/claude-code.d.ts.map +1 -0
  37. package/dist/bun/import/providers/cline.d.ts +2 -0
  38. package/dist/bun/import/providers/cline.d.ts.map +1 -0
  39. package/dist/bun/import/providers/codex.d.ts +2 -0
  40. package/dist/bun/import/providers/codex.d.ts.map +1 -0
  41. package/dist/bun/import/providers/continue.d.ts +2 -0
  42. package/dist/bun/import/providers/continue.d.ts.map +1 -0
  43. package/dist/bun/import/providers/index.d.ts +19 -0
  44. package/dist/bun/import/providers/index.d.ts.map +1 -0
  45. package/dist/bun/import/providers/opencode.d.ts +2 -0
  46. package/dist/bun/import/providers/opencode.d.ts.map +1 -0
  47. package/dist/bun/import/providers/pi.d.ts +2 -0
  48. package/dist/bun/import/providers/pi.d.ts.map +1 -0
  49. package/dist/bun/import/types.d.ts +82 -0
  50. package/dist/bun/import/types.d.ts.map +1 -0
  51. package/dist/bun/index.d.ts +5 -2
  52. package/dist/bun/index.d.ts.map +1 -1
  53. package/dist/bun/index.js +3150 -439
  54. package/dist/bun/index.js.map +4 -4
  55. package/dist/bun/instruction-detect.d.ts +66 -0
  56. package/dist/bun/instruction-detect.d.ts.map +1 -0
  57. package/dist/bun/log.d.ts +9 -0
  58. package/dist/bun/log.d.ts.map +1 -1
  59. package/dist/bun/ltm.d.ts +139 -5
  60. package/dist/bun/ltm.d.ts.map +1 -1
  61. package/dist/bun/pattern-extract.d.ts +7 -0
  62. package/dist/bun/pattern-extract.d.ts.map +1 -1
  63. package/dist/bun/prompt.d.ts +1 -1
  64. package/dist/bun/prompt.d.ts.map +1 -1
  65. package/dist/bun/recall.d.ts.map +1 -1
  66. package/dist/bun/search.d.ts +5 -3
  67. package/dist/bun/search.d.ts.map +1 -1
  68. package/dist/bun/session-limiter.d.ts +26 -0
  69. package/dist/bun/session-limiter.d.ts.map +1 -0
  70. package/dist/bun/temporal.d.ts +2 -0
  71. package/dist/bun/temporal.d.ts.map +1 -1
  72. package/dist/bun/types.d.ts +1 -1
  73. package/dist/node/agents-file.d.ts +4 -0
  74. package/dist/node/agents-file.d.ts.map +1 -1
  75. package/dist/node/config.d.ts +2 -0
  76. package/dist/node/config.d.ts.map +1 -1
  77. package/dist/node/curator.d.ts +45 -0
  78. package/dist/node/curator.d.ts.map +1 -1
  79. package/dist/node/data-dir.d.ts +18 -0
  80. package/dist/node/data-dir.d.ts.map +1 -0
  81. package/dist/node/db.d.ts +85 -0
  82. package/dist/node/db.d.ts.map +1 -1
  83. package/dist/node/distillation.d.ts +2 -13
  84. package/dist/node/distillation.d.ts.map +1 -1
  85. package/dist/node/embedding-vendor.d.ts +22 -38
  86. package/dist/node/embedding-vendor.d.ts.map +1 -1
  87. package/dist/node/embedding-worker-types.d.ts +17 -12
  88. package/dist/node/embedding-worker-types.d.ts.map +1 -1
  89. package/dist/node/embedding-worker.d.ts +9 -2
  90. package/dist/node/embedding-worker.d.ts.map +1 -1
  91. package/dist/node/embedding-worker.js +38864 -33
  92. package/dist/node/embedding-worker.js.map +4 -4
  93. package/dist/node/embedding.d.ts +35 -23
  94. package/dist/node/embedding.d.ts.map +1 -1
  95. package/dist/node/gradient.d.ts +17 -1
  96. package/dist/node/gradient.d.ts.map +1 -1
  97. package/dist/node/import/detect.d.ts +14 -0
  98. package/dist/node/import/detect.d.ts.map +1 -0
  99. package/dist/node/import/extract.d.ts +43 -0
  100. package/dist/node/import/extract.d.ts.map +1 -0
  101. package/dist/node/import/history.d.ts +40 -0
  102. package/dist/node/import/history.d.ts.map +1 -0
  103. package/dist/node/import/index.d.ts +17 -0
  104. package/dist/node/import/index.d.ts.map +1 -0
  105. package/dist/node/import/providers/aider.d.ts +2 -0
  106. package/dist/node/import/providers/aider.d.ts.map +1 -0
  107. package/dist/node/import/providers/claude-code.d.ts +2 -0
  108. package/dist/node/import/providers/claude-code.d.ts.map +1 -0
  109. package/dist/node/import/providers/cline.d.ts +2 -0
  110. package/dist/node/import/providers/cline.d.ts.map +1 -0
  111. package/dist/node/import/providers/codex.d.ts +2 -0
  112. package/dist/node/import/providers/codex.d.ts.map +1 -0
  113. package/dist/node/import/providers/continue.d.ts +2 -0
  114. package/dist/node/import/providers/continue.d.ts.map +1 -0
  115. package/dist/node/import/providers/index.d.ts +19 -0
  116. package/dist/node/import/providers/index.d.ts.map +1 -0
  117. package/dist/node/import/providers/opencode.d.ts +2 -0
  118. package/dist/node/import/providers/opencode.d.ts.map +1 -0
  119. package/dist/node/import/providers/pi.d.ts +2 -0
  120. package/dist/node/import/providers/pi.d.ts.map +1 -0
  121. package/dist/node/import/types.d.ts +82 -0
  122. package/dist/node/import/types.d.ts.map +1 -0
  123. package/dist/node/index.d.ts +5 -2
  124. package/dist/node/index.d.ts.map +1 -1
  125. package/dist/node/index.js +3150 -439
  126. package/dist/node/index.js.map +4 -4
  127. package/dist/node/instruction-detect.d.ts +66 -0
  128. package/dist/node/instruction-detect.d.ts.map +1 -0
  129. package/dist/node/log.d.ts +9 -0
  130. package/dist/node/log.d.ts.map +1 -1
  131. package/dist/node/ltm.d.ts +139 -5
  132. package/dist/node/ltm.d.ts.map +1 -1
  133. package/dist/node/pattern-extract.d.ts +7 -0
  134. package/dist/node/pattern-extract.d.ts.map +1 -1
  135. package/dist/node/prompt.d.ts +1 -1
  136. package/dist/node/prompt.d.ts.map +1 -1
  137. package/dist/node/recall.d.ts.map +1 -1
  138. package/dist/node/search.d.ts +5 -3
  139. package/dist/node/search.d.ts.map +1 -1
  140. package/dist/node/session-limiter.d.ts +26 -0
  141. package/dist/node/session-limiter.d.ts.map +1 -0
  142. package/dist/node/temporal.d.ts +2 -0
  143. package/dist/node/temporal.d.ts.map +1 -1
  144. package/dist/node/types.d.ts +1 -1
  145. package/dist/types/agents-file.d.ts +4 -0
  146. package/dist/types/agents-file.d.ts.map +1 -1
  147. package/dist/types/config.d.ts +2 -0
  148. package/dist/types/config.d.ts.map +1 -1
  149. package/dist/types/curator.d.ts +45 -0
  150. package/dist/types/curator.d.ts.map +1 -1
  151. package/dist/types/data-dir.d.ts +18 -0
  152. package/dist/types/data-dir.d.ts.map +1 -0
  153. package/dist/types/db.d.ts +85 -0
  154. package/dist/types/db.d.ts.map +1 -1
  155. package/dist/types/distillation.d.ts +2 -13
  156. package/dist/types/distillation.d.ts.map +1 -1
  157. package/dist/types/embedding-vendor.d.ts +22 -38
  158. package/dist/types/embedding-vendor.d.ts.map +1 -1
  159. package/dist/types/embedding-worker-types.d.ts +17 -12
  160. package/dist/types/embedding-worker-types.d.ts.map +1 -1
  161. package/dist/types/embedding-worker.d.ts +9 -2
  162. package/dist/types/embedding-worker.d.ts.map +1 -1
  163. package/dist/types/embedding.d.ts +35 -23
  164. package/dist/types/embedding.d.ts.map +1 -1
  165. package/dist/types/gradient.d.ts +17 -1
  166. package/dist/types/gradient.d.ts.map +1 -1
  167. package/dist/types/import/detect.d.ts +14 -0
  168. package/dist/types/import/detect.d.ts.map +1 -0
  169. package/dist/types/import/extract.d.ts +43 -0
  170. package/dist/types/import/extract.d.ts.map +1 -0
  171. package/dist/types/import/history.d.ts +40 -0
  172. package/dist/types/import/history.d.ts.map +1 -0
  173. package/dist/types/import/index.d.ts +17 -0
  174. package/dist/types/import/index.d.ts.map +1 -0
  175. package/dist/types/import/providers/aider.d.ts +2 -0
  176. package/dist/types/import/providers/aider.d.ts.map +1 -0
  177. package/dist/types/import/providers/claude-code.d.ts +2 -0
  178. package/dist/types/import/providers/claude-code.d.ts.map +1 -0
  179. package/dist/types/import/providers/cline.d.ts +2 -0
  180. package/dist/types/import/providers/cline.d.ts.map +1 -0
  181. package/dist/types/import/providers/codex.d.ts +2 -0
  182. package/dist/types/import/providers/codex.d.ts.map +1 -0
  183. package/dist/types/import/providers/continue.d.ts +2 -0
  184. package/dist/types/import/providers/continue.d.ts.map +1 -0
  185. package/dist/types/import/providers/index.d.ts +19 -0
  186. package/dist/types/import/providers/index.d.ts.map +1 -0
  187. package/dist/types/import/providers/opencode.d.ts +2 -0
  188. package/dist/types/import/providers/opencode.d.ts.map +1 -0
  189. package/dist/types/import/providers/pi.d.ts +2 -0
  190. package/dist/types/import/providers/pi.d.ts.map +1 -0
  191. package/dist/types/import/types.d.ts +82 -0
  192. package/dist/types/import/types.d.ts.map +1 -0
  193. package/dist/types/index.d.ts +5 -2
  194. package/dist/types/index.d.ts.map +1 -1
  195. package/dist/types/instruction-detect.d.ts +66 -0
  196. package/dist/types/instruction-detect.d.ts.map +1 -0
  197. package/dist/types/log.d.ts +9 -0
  198. package/dist/types/log.d.ts.map +1 -1
  199. package/dist/types/ltm.d.ts +139 -5
  200. package/dist/types/ltm.d.ts.map +1 -1
  201. package/dist/types/pattern-extract.d.ts +7 -0
  202. package/dist/types/pattern-extract.d.ts.map +1 -1
  203. package/dist/types/prompt.d.ts +1 -1
  204. package/dist/types/prompt.d.ts.map +1 -1
  205. package/dist/types/recall.d.ts.map +1 -1
  206. package/dist/types/search.d.ts +5 -3
  207. package/dist/types/search.d.ts.map +1 -1
  208. package/dist/types/session-limiter.d.ts +26 -0
  209. package/dist/types/session-limiter.d.ts.map +1 -0
  210. package/dist/types/temporal.d.ts +2 -0
  211. package/dist/types/temporal.d.ts.map +1 -1
  212. package/dist/types/types.d.ts +1 -1
  213. package/package.json +3 -4
  214. package/src/agents-file.ts +41 -13
  215. package/src/config.ts +31 -18
  216. package/src/curator.ts +163 -75
  217. package/src/data-dir.ts +76 -0
  218. package/src/db.ts +457 -11
  219. package/src/distillation.ts +65 -16
  220. package/src/embedding-vendor.ts +23 -40
  221. package/src/embedding-worker-types.ts +19 -11
  222. package/src/embedding-worker.ts +111 -47
  223. package/src/embedding.ts +224 -174
  224. package/src/gradient.ts +192 -75
  225. package/src/import/detect.ts +37 -0
  226. package/src/import/extract.ts +137 -0
  227. package/src/import/history.ts +99 -0
  228. package/src/import/index.ts +45 -0
  229. package/src/import/providers/aider.ts +207 -0
  230. package/src/import/providers/claude-code.ts +339 -0
  231. package/src/import/providers/cline.ts +324 -0
  232. package/src/import/providers/codex.ts +369 -0
  233. package/src/import/providers/continue.ts +304 -0
  234. package/src/import/providers/index.ts +32 -0
  235. package/src/import/providers/opencode.ts +272 -0
  236. package/src/import/providers/pi.ts +332 -0
  237. package/src/import/types.ts +91 -0
  238. package/src/index.ts +13 -0
  239. package/src/instruction-detect.ts +275 -0
  240. package/src/log.ts +91 -3
  241. package/src/ltm.ts +789 -41
  242. package/src/pattern-extract.ts +41 -0
  243. package/src/prompt.ts +7 -1
  244. package/src/recall.ts +43 -5
  245. package/src/search.ts +7 -5
  246. package/src/session-limiter.ts +47 -0
  247. package/src/temporal.ts +18 -6
  248. package/src/types.ts +1 -1
package/src/gradient.ts CHANGED
@@ -1,6 +1,6 @@
1
1
  import type { LoreMessage, LorePart, LoreMessageWithParts, LoreToolPart, LoreTextPart, LoreToolState, LoreToolStateCompleted } from "./types";
2
2
  import { isTextPart, isReasoningPart, isToolPart } from "./types";
3
- import { db, ensureProject, loadForceMinLayer, saveForceMinLayer } from "./db";
3
+ import { db, ensureProject, loadForceMinLayer, saveForceMinLayer, saveSessionTracking, loadSessionTracking } from "./db";
4
4
  import { config } from "./config";
5
5
  import { formatDistillations } from "./prompt";
6
6
  import { normalize } from "./markdown";
@@ -319,6 +319,27 @@ function getSessionState(sessionID: string): SessionState {
319
319
  // forceMinLayer=2, but if OpenCode restarts before the next turn,
320
320
  // the in-memory escalation would be lost without this.
321
321
  state.forceMinLayer = loadForceMinLayer(sessionID) as SafetyLayer;
322
+
323
+ // Restore gradient calibration state from DB (v24) — avoids uncalibrated
324
+ // first turns after restart. Without this, dynamicContextCap reverts to
325
+ // the static ceiling, bustRateEMA is uninitialized, and lastTurnAt=0
326
+ // prevents onIdleResume() from detecting idle gaps.
327
+ //
328
+ // Atomic restore: lastTurnAt > 0 is the proxy for "gradient state was
329
+ // ever flushed to DB". Restore all fields together or none — avoids
330
+ // per-field sentinel fragility where a valid value (e.g. lastLayer=0)
331
+ // could be mistaken for "never persisted".
332
+ const persisted = loadSessionTracking(sessionID);
333
+ if (persisted && persisted.lastTurnAt > 0) {
334
+ state.dynamicContextCap = persisted.dynamicContextCap;
335
+ state.bustRateEMA = persisted.bustRateEMA;
336
+ state.interBustIntervalEMA = persisted.interBustIntervalEMA;
337
+ state.lastLayer = persisted.lastLayer as SafetyLayer;
338
+ state.lastKnownInput = persisted.lastKnownInput;
339
+ state.lastTurnAt = persisted.lastTurnAt;
340
+ state.lastBustAt = persisted.lastBustAt;
341
+ }
342
+
322
343
  sessionStates.set(sessionID, state);
323
344
  }
324
345
  return state;
@@ -354,11 +375,19 @@ function getSessionState(sessionID: string): SessionState {
354
375
  *
355
376
  * Set `thresholdMs <= 0` to disable. Returns true if a reset fired so the
356
377
  * caller can log/observe.
378
+ *
379
+ * @param skipCompact When true, perform all idle-resume housekeeping
380
+ * (clear caches, set cameOutOfIdle) but do NOT set postIdleCompact.
381
+ * Used when the caller knows the upstream prompt cache is still warm
382
+ * (e.g. cache warmer recently refreshed it) — compacting would produce
383
+ * a different prompt body that doesn't match the warmed prefix, causing
384
+ * a cache bust and wasting the warming cost.
357
385
  */
358
386
  export function onIdleResume(
359
387
  sessionID: string,
360
388
  thresholdMs: number,
361
389
  now: number = Date.now(),
390
+ skipCompact: boolean = false,
362
391
  ): { triggered: false } | { triggered: true; idleMs: number } {
363
392
  if (thresholdMs <= 0) return { triggered: false };
364
393
  const state = getSessionState(sessionID);
@@ -369,7 +398,7 @@ export function onIdleResume(
369
398
  state.rawWindowCache = null;
370
399
  state.distillationSnapshot = null;
371
400
  state.cameOutOfIdle = true;
372
- state.postIdleCompact = true;
401
+ state.postIdleCompact = !skipCompact;
373
402
  return { triggered: true, idleMs };
374
403
  }
375
404
 
@@ -598,6 +627,28 @@ export function setLastTurnAtForTest(sessionID: string, ms: number): void {
598
627
  getSessionState(sessionID).lastTurnAt = ms;
599
628
  }
600
629
 
630
+ /**
631
+ * Persist gradient calibration state to the session_state table.
632
+ *
633
+ * Designed to be called periodically (e.g. every 30s from the idle scheduler
634
+ * tick) rather than on every mutation, to avoid write amplification on the
635
+ * hot path. Max data loss on crash is one tick interval (~30s).
636
+ */
637
+ export function saveGradientState(sessionID: string): void {
638
+ const state = sessionStates.get(sessionID);
639
+ if (!state) return;
640
+
641
+ saveSessionTracking(sessionID, {
642
+ dynamicContextCap: state.dynamicContextCap,
643
+ bustRateEMA: state.bustRateEMA,
644
+ interBustIntervalEMA: state.interBustIntervalEMA,
645
+ lastLayer: state.lastLayer,
646
+ lastKnownInput: state.lastKnownInput,
647
+ lastTurnAt: state.lastTurnAt,
648
+ lastBustAt: state.lastBustAt,
649
+ });
650
+ }
651
+
601
652
  type Distillation = {
602
653
  id: string;
603
654
  observations: string;
@@ -1124,8 +1175,54 @@ function buildPrefixMessages(formatted: string): MessageWithParts[] {
1124
1175
  ];
1125
1176
  }
1126
1177
 
1178
+ // --- Importance-aware distillation selection ---
1179
+ //
1180
+ // When a compression stage limits distillation count (distLimit < Infinity),
1181
+ // selects the most valuable distillations rather than blindly taking the last N.
1182
+ // Scoring: 70% recency (position in chronological order) + 30% content signal.
1183
+ // Results are re-sorted chronologically after selection so the prefix cache
1184
+ // (Approach C) remains byte-stable when the same distillations are selected.
1185
+ //
1186
+ // Content signals (lightweight keyword detection, no LLM call):
1187
+ // - Decisions: "decision"/"decided"/"chose" → +0.3
1188
+ // - Gotchas/bugs: "gotcha"/"bug"/"fix"/"error" → +0.2
1189
+ // - Architecture: "architecture"/"pattern" → +0.1
1190
+ // - Meta-distilled (gen >= 1): +0.2 (consolidation = higher value density)
1191
+
1192
+ const DECISION_RE = /\b(?:decision|decided|chose|chosen|agreed)\b/i;
1193
+ const GOTCHA_RE = /\b(?:gotcha|(?:critical|known|subtle)\s+bug|broken|crash(?:ed|es)?|regression)\b/i;
1194
+ const ARCH_RE = /\b(?:architecture|design.(?:decision|pattern)|system.design)\b/i;
1195
+
1196
+ function importanceBonus(d: Distillation): number {
1197
+ let bonus = 0;
1198
+ if (DECISION_RE.test(d.observations)) bonus += 0.3;
1199
+ if (GOTCHA_RE.test(d.observations)) bonus += 0.2;
1200
+ if (ARCH_RE.test(d.observations)) bonus += 0.1;
1201
+ if (d.generation >= 1) bonus += 0.2;
1202
+ return Math.min(bonus, 1.0);
1203
+ }
1204
+
1205
+ function selectDistillations(all: Distillation[], limit: number): Distillation[] {
1206
+ if (all.length <= limit) return all;
1207
+
1208
+ // Recency: normalize to [0, 0.7] where oldest = 0.0, newest = 0.7.
1209
+ // Use (length - 1) as divisor so the last entry gets full recency weight.
1210
+ const maxIdx = all.length - 1;
1211
+ const scored = all.map((d, i) => ({
1212
+ d,
1213
+ score: (maxIdx > 0 ? (i / maxIdx) : 1) * 0.7 + importanceBonus(d) * 0.3,
1214
+ }));
1215
+
1216
+ // Keep top N by score, then re-sort chronologically (cache-safe).
1217
+ return scored
1218
+ .sort((a, b) => b.score - a.score)
1219
+ .slice(0, limit)
1220
+ .map((s) => s.d)
1221
+ .sort((a, b) => a.created_at - b.created_at);
1222
+ }
1223
+
1127
1224
  // Build a synthetic message pair containing the distilled history.
1128
- // Non-cached path — used by layers 2-4 which already cause full cache invalidation.
1225
+ // Non-cached path — used by layers 2+ which already cause full cache invalidation.
1129
1226
  function distilledPrefix(distillations: Distillation[]): MessageWithParts[] {
1130
1227
  if (!distillations.length) return [];
1131
1228
  const formatted = formatDistillations(distillations);
@@ -1316,7 +1413,7 @@ function tryFitStable(input: {
1316
1413
  rawBudget: number;
1317
1414
  sessionID: string;
1318
1415
  sessState: SessionState;
1319
- }): Omit<TransformResult, "layer" | "usable" | "distilledBudget" | "rawBudget"> | null {
1416
+ }): Omit<TransformResult, "layer" | "usable" | "distilledBudget" | "rawBudget" | "refreshLtm"> | null {
1320
1417
  // If the prefix already overflows its budget there's no point trying.
1321
1418
  if (input.prefixTokens > input.distilledBudget && input.prefix.length > 0)
1322
1419
  return null;
@@ -1417,6 +1514,25 @@ function tryFitStable(input: {
1417
1514
 
1418
1515
  export type SafetyLayer = 0 | 1 | 2 | 3 | 4;
1419
1516
 
1517
+ // --- Compression stage table ---
1518
+ // Defines the escalation path for layers 1-3. Each stage tries increasingly
1519
+ // aggressive compression: tool stripping, tighter budgets, distillation trimming.
1520
+ // Adding a new intermediate stage = one table entry.
1521
+ type CompressionStage = {
1522
+ strip: "none" | "old-tools" | "all-tools";
1523
+ rawFrac: number | null; // fraction of usable; null = use default rawBudget
1524
+ distFrac: number | null; // fraction of usable; null = use default distilledBudget
1525
+ distLimit: number; // Infinity = all, 5 = last 5, etc.
1526
+ protectedTurns: number; // turns exempt from tool stripping
1527
+ useStableWindow: boolean; // use tryFitStable (Approach B pin cache)
1528
+ };
1529
+
1530
+ const COMPRESSION_STAGES: CompressionStage[] = [
1531
+ { strip: "none", rawFrac: null, distFrac: null, distLimit: Infinity, protectedTurns: 0, useStableWindow: true },
1532
+ { strip: "old-tools", rawFrac: 0.50, distFrac: null, distLimit: Infinity, protectedTurns: 2, useStableWindow: false },
1533
+ { strip: "all-tools", rawFrac: 0.55, distFrac: 0.15, distLimit: 5, protectedTurns: 0, useStableWindow: false },
1534
+ ];
1535
+
1420
1536
  export type TransformResult = {
1421
1537
  messages: MessageWithParts[];
1422
1538
  layer: SafetyLayer;
@@ -1427,6 +1543,10 @@ export type TransformResult = {
1427
1543
  usable: number;
1428
1544
  distilledBudget: number;
1429
1545
  rawBudget: number;
1546
+ // Signals that the pipeline should re-run forSession() to refresh LTM
1547
+ // relevance scoring. Set on Layer 4 (emergency) where the context is
1548
+ // fully reset and mid-session knowledge may have changed relevance.
1549
+ refreshLtm: boolean;
1430
1550
  };
1431
1551
 
1432
1552
  // Per-session urgent distillation tracking.
@@ -1522,7 +1642,10 @@ function transformInner(input: {
1522
1642
  // Pinning to the *actual* last layer prevents all downward oscillation.
1523
1643
  // Only applied when calibrated (same session, per-session state) to avoid
1524
1644
  // affecting other sessions including worker sessions.
1525
- if (calibrated && sessState.lastLayer >= 1 && input.messages.length >= sessState.lastKnownMessageCount) {
1645
+ // Layer 4 (emergency) already blows the cache stickiness there just traps
1646
+ // the session at emergency permanently. Only apply stickiness for layers 1-3
1647
+ // where dropping back would bust a warm cache.
1648
+ if (calibrated && sessState.lastLayer >= 1 && sessState.lastLayer <= 3 && input.messages.length >= sessState.lastKnownMessageCount) {
1526
1649
  effectiveMinLayer = Math.max(effectiveMinLayer, sessState.lastLayer) as SafetyLayer;
1527
1650
  }
1528
1651
 
@@ -1600,6 +1723,7 @@ function transformInner(input: {
1600
1723
  usable,
1601
1724
  distilledBudget,
1602
1725
  rawBudget,
1726
+ refreshLtm: false,
1603
1727
  };
1604
1728
  }
1605
1729
 
@@ -1619,7 +1743,7 @@ function transformInner(input: {
1619
1743
 
1620
1744
  // Layer 1 uses the append-only cached prefix (Approach C) to keep the
1621
1745
  // distilled content byte-identical between distillation runs, preserving
1622
- // the prompt cache. Layers 2-4 already cause full cache invalidation via
1746
+ // the prompt cache. Layers 2+ already cause full cache invalidation via
1623
1747
  // tool stripping / message restructuring, so they use the non-cached path.
1624
1748
  const cached = sid
1625
1749
  ? distilledPrefixCached(distillations, sid, sessState)
@@ -1628,79 +1752,71 @@ function transformInner(input: {
1628
1752
  return { messages: msgs, tokens: msgs.reduce((sum, m) => sum + estimateMessage(m), 0) };
1629
1753
  })();
1630
1754
 
1631
- // Layer 1: Normal budget allocation with lazy raw window eviction (Approach B).
1632
- // tryFitStable reuses the previous cutoff when it still fits, keeping the raw
1633
- // window byte-identical across turns for prompt caching. Only advances the
1634
- // cutoff when a genuinely oversized message forces eviction.
1635
- // Skipped when force-escalated to layer 2+ (previous attempt already failed at this level).
1636
- if (effectiveMinLayer <= 1) {
1637
- const layer1 = sid
1638
- ? tryFitStable({
1639
- messages: dedupMessages,
1640
- prefix: cached.messages,
1641
- prefixTokens: cached.tokens,
1642
- distilledBudget,
1643
- rawBudget,
1644
- sessionID: sid,
1645
- sessState,
1646
- })
1647
- : tryFit({
1648
- messages: dedupMessages,
1649
- prefix: cached.messages,
1650
- prefixTokens: cached.tokens,
1651
- distilledBudget,
1652
- rawBudget,
1653
- strip: "none",
1654
- });
1655
- if (fitsWithSafetyMargin(layer1)) {
1656
- if (cached.tokens === 0 && sid) {
1657
- urgentDistillationMap.set(sid, true);
1658
- }
1659
- return { ...layer1!, layer: 1, usable, distilledBudget, rawBudget };
1755
+ // --- Compression stages (layers 1-3) ---
1756
+ // Data-driven table replaces three hardcoded layer blocks. Each stage
1757
+ // escalates tool stripping and/or tightens distillation budgets.
1758
+ // Stage 0 (layer 1): stable window (Approach B), no stripping
1759
+ // Stage 1 (layer 2): strip old tool outputs, protect last 2 turns
1760
+ // Stage 2 (layer 3): strip ALL tool outputs, keep only 5 distillations
1761
+ for (let s = 0; s < COMPRESSION_STAGES.length; s++) {
1762
+ const stageLayer = (s + 1) as SafetyLayer;
1763
+ if (effectiveMinLayer > stageLayer) continue;
1764
+
1765
+ const stage = COMPRESSION_STAGES[s];
1766
+ const stageRawBudget = stage.rawFrac !== null ? Math.floor(usable * stage.rawFrac) : rawBudget;
1767
+ const stageDistBudget = stage.distFrac !== null ? Math.floor(usable * stage.distFrac) : distilledBudget;
1768
+
1769
+ // Determine prefix: if distLimit is finite, re-render with trimmed distillations.
1770
+ // Otherwise use the cached prefix (Approach C, byte-identical for cache).
1771
+ let stagePrefix = cached.messages;
1772
+ let stagePrefixTokens = cached.tokens;
1773
+ if (stage.distLimit !== Infinity && distillations.length > stage.distLimit) {
1774
+ const trimmed = selectDistillations(distillations, stage.distLimit);
1775
+ stagePrefix = distilledPrefix(trimmed);
1776
+ stagePrefixTokens = stagePrefix.reduce((sum, m) => sum + estimateMessage(m), 0);
1660
1777
  }
1661
- }
1662
1778
 
1663
- // Layer 1 didn't fit (or was force-skipped) reset the raw window cache.
1664
- // Layers 2-4 use full scans and already break the prompt cache.
1665
- sessState.rawWindowCache = null;
1779
+ // Stage 0 (layer 1) uses tryFitStable for Approach B pin cache.
1780
+ // Higher stages reset the raw window cache and use plain tryFit.
1781
+ let result: Omit<TransformResult, "layer" | "usable" | "distilledBudget" | "rawBudget" | "refreshLtm"> | null;
1782
+ if (stage.useStableWindow && sid) {
1783
+ result = tryFitStable({
1784
+ messages: dedupMessages,
1785
+ prefix: stagePrefix,
1786
+ prefixTokens: stagePrefixTokens,
1787
+ distilledBudget: stageDistBudget,
1788
+ rawBudget: stageRawBudget,
1789
+ sessionID: sid,
1790
+ sessState,
1791
+ });
1792
+ } else {
1793
+ // Reset raw window cache when leaving stage 0 — higher stages use full
1794
+ // scans and already break the prompt cache. Must fire even when stage 1
1795
+ // is skipped via effectiveMinLayer (e.g. forceMinLayer = 3).
1796
+ sessState.rawWindowCache = null;
1797
+ result = tryFit({
1798
+ messages: dedupMessages,
1799
+ prefix: stagePrefix,
1800
+ prefixTokens: stagePrefixTokens,
1801
+ distilledBudget: stageDistBudget,
1802
+ rawBudget: stageRawBudget,
1803
+ strip: stage.strip,
1804
+ protectedTurns: stage.protectedTurns,
1805
+ });
1806
+ }
1666
1807
 
1667
- // Layer 2: Strip tool outputs from older messages, keep last 2 turns
1668
- // Skipped when force-escalated to layer 3+.
1669
- if (effectiveMinLayer <= 2) {
1670
- const layer2 = tryFit({
1671
- messages: dedupMessages,
1672
- prefix: cached.messages,
1673
- prefixTokens: cached.tokens,
1674
- distilledBudget,
1675
- rawBudget: Math.floor(usable * 0.5), // give raw more room
1676
- strip: "old-tools",
1677
- protectedTurns: 2,
1678
- });
1679
- if (fitsWithSafetyMargin(layer2)) {
1680
- if (sid) urgentDistillationMap.set(sid, true);
1681
- return { ...layer2!, layer: 2, usable, distilledBudget, rawBudget };
1808
+ if (fitsWithSafetyMargin(result)) {
1809
+ // Trigger urgent distillation when: (a) higher stages always need it, or
1810
+ // (b) stage 0 with no distillations = first time in gradient mode.
1811
+ if (sid && (s > 0 || cached.tokens === 0)) {
1812
+ urgentDistillationMap.set(sid, true);
1813
+ }
1814
+ return { ...result!, layer: stageLayer, usable, distilledBudget, rawBudget, refreshLtm: false };
1682
1815
  }
1683
1816
  }
1684
1817
 
1685
- // Layer 3: Strip ALL tool outputs, drop oldest distillations
1686
- const trimmedDistillations = distillations.slice(-5);
1687
- const trimmedPrefix = distilledPrefix(trimmedDistillations);
1688
- const trimmedPrefixTokens = trimmedPrefix.reduce(
1689
- (sum, m) => sum + estimateMessage(m),
1690
- 0,
1691
- );
1692
- const layer3 = tryFit({
1693
- messages: dedupMessages,
1694
- prefix: trimmedPrefix,
1695
- prefixTokens: trimmedPrefixTokens,
1696
- distilledBudget: Math.floor(usable * 0.15),
1697
- rawBudget: Math.floor(usable * 0.55),
1698
- strip: "all-tools",
1699
- });
1700
- if (fitsWithSafetyMargin(layer3)) {
1701
- if (sid) urgentDistillationMap.set(sid, true);
1702
- return { ...layer3!, layer: 3, usable, distilledBudget, rawBudget };
1703
- }
1818
+ // All compression stages exhausted reset raw window cache before emergency.
1819
+ sessState.rawWindowCache = null;
1704
1820
 
1705
1821
  // Layer 4: Emergency — last 2 distillations + token-budget raw tail.
1706
1822
  // We do NOT strip tool parts here: doing so would cause an infinite tool-call loop because
@@ -1716,7 +1832,7 @@ function transformInner(input: {
1716
1832
  // and must always return. Remaining budget is filled backward with older
1717
1833
  // messages.
1718
1834
  if (sid) urgentDistillationMap.set(sid, true);
1719
- const nuclearDistillations = distillations.slice(-2);
1835
+ const nuclearDistillations = selectDistillations(distillations, 2);
1720
1836
  const nuclearPrefix = distilledPrefix(nuclearDistillations);
1721
1837
  const nuclearPrefixTokens = nuclearPrefix.reduce(
1722
1838
  (sum, m) => sum + estimateMessage(m),
@@ -1765,6 +1881,7 @@ function transformInner(input: {
1765
1881
  usable,
1766
1882
  distilledBudget,
1767
1883
  rawBudget,
1884
+ refreshLtm: true,
1768
1885
  };
1769
1886
  }
1770
1887
 
@@ -1885,7 +2002,7 @@ function tryFit(input: {
1885
2002
  rawBudget: number;
1886
2003
  strip: "none" | "old-tools" | "all-tools";
1887
2004
  protectedTurns?: number;
1888
- }): Omit<TransformResult, "layer" | "usable" | "distilledBudget" | "rawBudget"> | null {
2005
+ }): Omit<TransformResult, "layer" | "usable" | "distilledBudget" | "rawBudget" | "refreshLtm"> | null {
1889
2006
  // If distilled prefix exceeds its budget, fail this layer
1890
2007
  if (input.prefixTokens > input.distilledBudget && input.prefix.length > 0)
1891
2008
  return null;
@@ -0,0 +1,37 @@
1
+ /**
2
+ * Detection orchestrator — scans all registered providers for conversation
3
+ * history matching a given project path.
4
+ */
5
+ import type { DetectionResult } from "./types";
6
+ import { getProviders } from "./providers";
7
+
8
+ /**
9
+ * Scan all registered providers for conversation history matching the
10
+ * given project path.
11
+ *
12
+ * @returns Results from all providers that found data, sorted by
13
+ * total messages descending (richest source first).
14
+ */
15
+ export function detectAll(projectPath: string): DetectionResult[] {
16
+ const results: DetectionResult[] = [];
17
+
18
+ for (const provider of getProviders()) {
19
+ try {
20
+ const sessions = provider.detect(projectPath);
21
+ if (sessions.length > 0) {
22
+ results.push({
23
+ agentName: provider.name,
24
+ agentDisplayName: provider.displayName,
25
+ sessions,
26
+ totalTokens: sessions.reduce((s, sess) => s + sess.estimatedTokens, 0),
27
+ totalMessages: sessions.reduce((s, sess) => s + sess.messageCount, 0),
28
+ });
29
+ }
30
+ } catch (err) {
31
+ // Provider failed (e.g. corrupt DB, missing directory) — skip silently.
32
+ // Avoid log.warn to not alarm users about agents they don't use.
33
+ }
34
+ }
35
+
36
+ return results.sort((a, b) => b.totalMessages - a.totalMessages);
37
+ }
@@ -0,0 +1,137 @@
1
+ /**
2
+ * Knowledge extraction from imported conversations.
3
+ *
4
+ * Takes conversation chunks and feeds them to the curator LLM to extract
5
+ * knowledge entries directly, without going through the temporal → distill
6
+ * pipeline. This is cheaper and faster than full-pipeline import.
7
+ */
8
+ import * as ltm from "../ltm";
9
+ import { parseOps, applyOps } from "../curator";
10
+ import { CURATOR_SYSTEM, curatorUser } from "../prompt";
11
+ import type { LLMClient } from "../types";
12
+ import type { ConversationChunk } from "./types";
13
+
14
+ /**
15
+ * System prompt for import extraction.
16
+ * Extends the standard curator prompt with guidance for historical conversations.
17
+ */
18
+ const IMPORT_CURATOR_SYSTEM = `${CURATOR_SYSTEM}
19
+
20
+ ADDITIONAL CONTEXT: You are extracting knowledge from HISTORICAL conversations with a different AI coding agent. Focus on durable insights that are still relevant:
21
+ - Architecture decisions, design patterns, and project conventions
22
+ - Gotchas, non-obvious bugs, and their fixes
23
+ - Developer preferences and workflow patterns
24
+ - Key technical choices and their rationale
25
+
26
+ Ignore:
27
+ - References to the other agent's specific capabilities or limitations
28
+ - Task-specific state that is no longer current (e.g. "currently debugging X")
29
+ - Debugging steps for issues that were already resolved
30
+ - Transient conversation artifacts (greetings, acknowledgments, status updates)`;
31
+
32
+ export type ExtractionProgress = {
33
+ /** Current chunk being processed (1-based) */
34
+ current: number;
35
+ /** Total chunks to process */
36
+ total: number;
37
+ /** Knowledge entries created so far */
38
+ created: number;
39
+ /** Knowledge entries updated (dedup hit) so far */
40
+ updated: number;
41
+ };
42
+
43
+ export type ExtractionResult = {
44
+ /** Total knowledge entries created */
45
+ created: number;
46
+ /** Total entries that hit dedup (updated existing) */
47
+ updated: number;
48
+ /** Total entries deleted */
49
+ deleted: number;
50
+ /** Chunks processed successfully */
51
+ chunksProcessed: number;
52
+ /** Chunks that failed (LLM error) */
53
+ chunksFailed: number;
54
+ };
55
+
56
+ /**
57
+ * Extract knowledge entries from conversation chunks via the curator LLM.
58
+ *
59
+ * Processes chunks sequentially (not parallel) to avoid rate limits
60
+ * and to let later chunks see entries created by earlier chunks
61
+ * (better dedup via the existing entries list in the prompt).
62
+ */
63
+ export async function extractKnowledge(input: {
64
+ llm: LLMClient;
65
+ projectPath: string;
66
+ chunks: ConversationChunk[];
67
+ sessionID?: string;
68
+ model?: { providerID: string; modelID: string };
69
+ onProgress?: (progress: ExtractionProgress) => void;
70
+ }): Promise<ExtractionResult> {
71
+ const result: ExtractionResult = {
72
+ created: 0,
73
+ updated: 0,
74
+ deleted: 0,
75
+ chunksProcessed: 0,
76
+ chunksFailed: 0,
77
+ };
78
+
79
+ // Sort chunks chronologically so knowledge builds up naturally
80
+ const sorted = [...input.chunks].sort((a, b) => a.timestamp - b.timestamp);
81
+
82
+ for (let i = 0; i < sorted.length; i++) {
83
+ const chunk = sorted[i];
84
+
85
+ // Get existing entries (refreshed each iteration for dedup)
86
+ const existing = ltm.forProject(input.projectPath, false);
87
+ const existingForPrompt = existing.map((e) => ({
88
+ id: e.id,
89
+ category: e.category,
90
+ title: e.title,
91
+ content: e.content,
92
+ }));
93
+
94
+ const userContent = curatorUser({
95
+ messages: chunk.text,
96
+ existing: existingForPrompt,
97
+ });
98
+
99
+ try {
100
+ const response = await input.llm.prompt(
101
+ IMPORT_CURATOR_SYSTEM,
102
+ userContent,
103
+ {
104
+ model: input.model,
105
+ workerID: "lore-import",
106
+ thinking: false,
107
+ maxTokens: 4096,
108
+ sessionID: input.sessionID,
109
+ },
110
+ );
111
+
112
+ if (response) {
113
+ const ops = parseOps(response);
114
+ const applied = applyOps(ops, {
115
+ projectPath: input.projectPath,
116
+ sessionID: input.sessionID,
117
+ });
118
+ result.created += applied.created;
119
+ result.updated += applied.updated;
120
+ result.deleted += applied.deleted;
121
+ }
122
+
123
+ result.chunksProcessed++;
124
+ } catch {
125
+ result.chunksFailed++;
126
+ }
127
+
128
+ input.onProgress?.({
129
+ current: i + 1,
130
+ total: sorted.length,
131
+ created: result.created,
132
+ updated: result.updated,
133
+ });
134
+ }
135
+
136
+ return result;
137
+ }
@@ -0,0 +1,99 @@
1
+ /**
2
+ * Import history — tracks which external agent sessions have been imported
3
+ * to prevent re-importing unchanged sources.
4
+ */
5
+ import { db, ensureProject } from "../db";
6
+
7
+ export type ImportRecord = {
8
+ id: string;
9
+ project_id: string;
10
+ agent_name: string;
11
+ source_id: string;
12
+ source_hash: string;
13
+ entries_created: number;
14
+ entries_updated: number;
15
+ imported_at: number;
16
+ };
17
+
18
+ /**
19
+ * Check if a specific source has already been imported with the same hash.
20
+ *
21
+ * @returns The existing record if found with the same hash, or null if
22
+ * the source hasn't been imported or the hash has changed.
23
+ */
24
+ export function isImported(
25
+ projectPath: string,
26
+ agentName: string,
27
+ sourceId: string,
28
+ sourceHash: string,
29
+ ): ImportRecord | null {
30
+ const projectId = ensureProject(projectPath);
31
+ const row = db()
32
+ .query(
33
+ `SELECT * FROM import_history
34
+ WHERE project_id = ? AND agent_name = ? AND source_id = ?`,
35
+ )
36
+ .get(projectId, agentName, sourceId) as ImportRecord | null;
37
+
38
+ if (!row) return null;
39
+ // Hash changed — source has new content since last import
40
+ if (row.source_hash !== sourceHash) return null;
41
+ return row;
42
+ }
43
+
44
+ /**
45
+ * Record a successful import of a source.
46
+ * Uses INSERT OR REPLACE to handle re-imports of changed sources.
47
+ */
48
+ export function recordImport(
49
+ projectPath: string,
50
+ agentName: string,
51
+ sourceId: string,
52
+ sourceHash: string,
53
+ stats: { created: number; updated: number },
54
+ ): void {
55
+ const projectId = ensureProject(projectPath);
56
+ db()
57
+ .query(
58
+ `INSERT OR REPLACE INTO import_history
59
+ (id, project_id, agent_name, source_id, source_hash, entries_created, entries_updated, imported_at)
60
+ VALUES (?, ?, ?, ?, ?, ?, ?, ?)`,
61
+ )
62
+ .run(
63
+ crypto.randomUUID(),
64
+ projectId,
65
+ agentName,
66
+ sourceId,
67
+ sourceHash,
68
+ stats.created,
69
+ stats.updated,
70
+ Date.now(),
71
+ );
72
+ }
73
+
74
+ /**
75
+ * Get all import records for a project.
76
+ * Excludes legacy "__declined__" sentinel rows from pre-v22 databases.
77
+ */
78
+ export function listImports(projectPath: string): ImportRecord[] {
79
+ const projectId = ensureProject(projectPath);
80
+ return db()
81
+ .query(
82
+ `SELECT * FROM import_history
83
+ WHERE project_id = ? AND source_id != '__declined__'
84
+ ORDER BY imported_at DESC`,
85
+ )
86
+ .all(projectId) as ImportRecord[];
87
+ }
88
+
89
+ /**
90
+ * Compute a simple hash string for idempotency checks.
91
+ * Uses a fast non-cryptographic approach: file size + message count + last timestamp.
92
+ */
93
+ export function computeHash(parts: {
94
+ size?: number;
95
+ messageCount?: number;
96
+ lastTimestamp?: number;
97
+ }): string {
98
+ return `${parts.size ?? 0}:${parts.messageCount ?? 0}:${parts.lastTimestamp ?? 0}`;
99
+ }