kongbrain 0.2.1 → 0.3.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "kongbrain",
3
- "version": "0.2.1",
3
+ "version": "0.3.2",
4
4
  "description": "Graph-backed persistent memory engine for OpenClaw. Replaces the default context window with SurrealDB + vector embeddings that learn across sessions.",
5
5
  "type": "module",
6
6
  "license": "MIT",
package/src/acan.ts CHANGED
@@ -94,7 +94,12 @@ function loadWeights(path: string): ACANWeights | null {
94
94
  if (!Array.isArray(raw.W_k) || raw.W_k.length !== EMBED_DIM) return null;
95
95
  if (!Array.isArray(raw.W_final) || raw.W_final.length !== FEATURE_COUNT) return null;
96
96
  if (typeof raw.bias !== "number") return null;
97
- if (raw.W_q[0].length !== ATTN_DIM || raw.W_k[0].length !== ATTN_DIM) return null;
97
+ // Validate inner dimensions check first, middle, and last rows to catch crafted files
98
+ const checkIndices = [0, Math.floor(EMBED_DIM / 2), EMBED_DIM - 1];
99
+ for (const i of checkIndices) {
100
+ if (!Array.isArray(raw.W_q[i]) || raw.W_q[i].length !== ATTN_DIM) return null;
101
+ if (!Array.isArray(raw.W_k[i]) || raw.W_k[i].length !== ATTN_DIM) return null;
102
+ }
98
103
  return raw as ACANWeights;
99
104
  } catch (e) {
100
105
  swallow("acan:loadWeights", e);
@@ -282,8 +287,9 @@ function trainInBackground(
282
287
  const STALENESS_GROWTH_FACTOR = 0.5;
283
288
  const STALENESS_MAX_AGE_MS = 7 * 24 * 60 * 60 * 1000;
284
289
 
285
- export async function checkACANReadiness(store?: SurrealStore): Promise<void> {
290
+ export async function checkACANReadiness(store?: SurrealStore, trainingThreshold?: number): Promise<void> {
286
291
  if (!store) return;
292
+ const threshold = trainingThreshold ?? TRAINING_THRESHOLD;
287
293
  const weightsPath = join(getKongDir(), WEIGHTS_FILENAME);
288
294
  const hasWeights = initACAN();
289
295
  const count = await getTrainingDataCount(store);
@@ -295,13 +301,13 @@ export async function checkACANReadiness(store?: SurrealStore): Promise<void> {
295
301
  const ageMs = Date.now() - trainedAt;
296
302
  const isStale = growthRatio >= STALENESS_GROWTH_FACTOR || ageMs >= STALENESS_MAX_AGE_MS;
297
303
  if (!isStale) return;
298
- } else if (count < TRAINING_THRESHOLD) {
304
+ } else if (count < threshold) {
299
305
  return;
300
306
  }
301
307
 
302
308
  try {
303
309
  const samples = await fetchTrainingData(store);
304
- if (samples.length < TRAINING_THRESHOLD) return;
310
+ if (samples.length < threshold) return;
305
311
  trainInBackground(samples, weightsPath, hasWeights ? _weights ?? undefined : undefined);
306
312
  } catch {
307
313
  // training is best-effort
package/src/causal.ts CHANGED
@@ -138,8 +138,8 @@ export async function queryCausalContext(
138
138
  store.queryFirst<any>(
139
139
  `SELECT id, text, importance, access_count AS accessCount,
140
140
  created_at AS timestamp, category, meta::tb(id) AS table${scoreExpr}
141
- FROM ${id}->${edge}->? LIMIT 3`,
142
- bindings,
141
+ FROM type::record($nid)->${edge}->? LIMIT 3`,
142
+ { ...bindings, nid: id },
143
143
  ).catch(e => { swallow.warn("causal:edge-query", e); return [] as any[]; }),
144
144
  ),
145
145
  );
@@ -149,8 +149,8 @@ export async function queryCausalContext(
149
149
  store.queryFirst<any>(
150
150
  `SELECT id, text, importance, access_count AS accessCount,
151
151
  created_at AS timestamp, category, meta::tb(id) AS table${scoreExpr}
152
- FROM ${id}<-${edge}<-? LIMIT 3`,
153
- bindings,
152
+ FROM type::record($nid)<-${edge}<-? LIMIT 3`,
153
+ { ...bindings, nid: id },
154
154
  ).catch(e => { swallow.warn("causal:edge-query", e); return [] as any[]; }),
155
155
  ),
156
156
  );
@@ -183,12 +183,14 @@ Return ONLY valid JSON.`,
183
183
  if (g.learned) {
184
184
  // Agent followed the correction unprompted — decay toward background (floor 3)
185
185
  await store.queryExec(
186
- `UPDATE ${g.id} SET importance = math::max([3, importance - 2])`,
186
+ `UPDATE type::record($gid) SET importance = math::max([3, importance - 2])`,
187
+ { gid: g.id },
187
188
  ).catch(e => swallow.warn("cognitive-check:correctionDecay", e));
188
189
  } else {
189
190
  // Correction was relevant but agent ignored it — reinforce (cap 9)
190
191
  await store.queryExec(
191
- `UPDATE ${g.id} SET importance = math::min([9, importance + 1])`,
192
+ `UPDATE type::record($gid) SET importance = math::min([9, importance + 1])`,
193
+ { gid: g.id },
192
194
  ).catch(e => swallow.warn("cognitive-check:correctionReinforce", e));
193
195
  }
194
196
  }
@@ -218,8 +220,8 @@ Return ONLY valid JSON.`,
218
220
  const resolvedGrades = result.grades.filter(g => g.resolved && g.id.startsWith("memory:"));
219
221
  for (const g of resolvedGrades) {
220
222
  await store.queryExec(
221
- `UPDATE ${g.id} SET status = 'resolved', resolved_at = time::now(), resolved_by = $sid`,
222
- { sid: params.sessionId },
223
+ `UPDATE type::record($gid) SET status = 'resolved', resolved_at = time::now(), resolved_by = $sid`,
224
+ { gid: g.id, sid: params.sessionId },
223
225
  ).catch(e => swallow.warn("cognitive-check:resolve", e));
224
226
  }
225
227
  } catch (e) {
@@ -234,7 +236,7 @@ Return ONLY valid JSON.`,
234
236
  export function parseCheckResponse(text: string): CognitiveCheckResult | null {
235
237
  // Strip markdown fences if present
236
238
  const stripped = text.replace(/```(?:json)?\s*/g, "").replace(/```\s*$/g, "");
237
- const jsonMatch = stripped.match(/\{[\s\S]*\}/);
239
+ const jsonMatch = stripped.match(/\{[\s\S]*?\}/);
238
240
  if (!jsonMatch) return null;
239
241
 
240
242
  let raw: any;
@@ -316,8 +318,8 @@ async function applyRetrievalGrades(
316
318
  );
317
319
  if (row?.[0]?.id) {
318
320
  await store.queryExec(
319
- `UPDATE ${row[0].id} SET llm_relevance = $score, llm_relevant = $relevant, llm_reason = $reason`,
320
- { score: grade.score, relevant: grade.relevant, reason: grade.reason },
321
+ `UPDATE type::record($rid) SET llm_relevance = $score, llm_relevant = $relevant, llm_reason = $reason`,
322
+ { rid: String(row[0].id), score: grade.score, relevant: grade.relevant, reason: grade.reason },
321
323
  );
322
324
  }
323
325
  // Feed relevance score into the utility cache — drives WMR provenUtility scoring
package/src/config.ts CHANGED
@@ -15,9 +15,23 @@ export interface EmbeddingConfig {
15
15
  dimensions: number;
16
16
  }
17
17
 
18
+ export interface ThresholdConfig {
19
+ /** Tokens accumulated before daemon flushes extraction (default: 4000) */
20
+ daemonTokenThreshold: number;
21
+ /** Cumulative tokens before mid-session cleanup fires (default: 100000) */
22
+ midSessionCleanupThreshold: number;
23
+ /** Per-extraction timeout in ms (default: 60000) */
24
+ extractionTimeoutMs: number;
25
+ /** Max pending thinking blocks kept in memory (default: 20) */
26
+ maxPendingThinking: number;
27
+ /** Retrieval outcome samples needed before ACAN training (default: 5000) */
28
+ acanTrainingThreshold: number;
29
+ }
30
+
18
31
  export interface KongBrainConfig {
19
32
  surreal: SurrealConfig;
20
33
  embedding: EmbeddingConfig;
34
+ thresholds: ThresholdConfig;
21
35
  }
22
36
 
23
37
  /**
@@ -27,6 +41,7 @@ export interface KongBrainConfig {
27
41
  export function parsePluginConfig(raw?: Record<string, unknown>): KongBrainConfig {
28
42
  const surreal = (raw?.surreal ?? {}) as Record<string, unknown>;
29
43
  const embedding = (raw?.embedding ?? {}) as Record<string, unknown>;
44
+ const thresholds = (raw?.thresholds ?? {}) as Record<string, unknown>;
30
45
 
31
46
  // Priority: plugin config > env vars > defaults
32
47
  const url =
@@ -60,5 +75,17 @@ export function parsePluginConfig(raw?: Record<string, unknown>): KongBrainConfi
60
75
  dimensions:
61
76
  typeof embedding.dimensions === "number" ? embedding.dimensions : 1024,
62
77
  },
78
+ thresholds: {
79
+ daemonTokenThreshold:
80
+ typeof thresholds.daemonTokenThreshold === "number" ? thresholds.daemonTokenThreshold : 4000,
81
+ midSessionCleanupThreshold:
82
+ typeof thresholds.midSessionCleanupThreshold === "number" ? thresholds.midSessionCleanupThreshold : 25_000,
83
+ extractionTimeoutMs:
84
+ typeof thresholds.extractionTimeoutMs === "number" ? thresholds.extractionTimeoutMs : 60_000,
85
+ maxPendingThinking:
86
+ typeof thresholds.maxPendingThinking === "number" ? thresholds.maxPendingThinking : 20,
87
+ acanTrainingThreshold:
88
+ typeof thresholds.acanTrainingThreshold === "number" ? thresholds.acanTrainingThreshold : 5000,
89
+ },
63
90
  };
64
91
  }
@@ -73,19 +73,22 @@ export class KongBrainContextEngine implements ContextEngine {
73
73
  }): Promise<BootstrapResult> {
74
74
  const { store, embeddings } = this.state;
75
75
 
76
- // Run schema if first bootstrap
77
- try {
78
- const schemaPath = join(__dirname, "..", "src", "schema.surql");
79
- let schemaSql: string;
76
+ // Run schema once per process (idempotent but expensive on every bootstrap)
77
+ if (!this.state.schemaApplied) {
80
78
  try {
81
- schemaSql = readFileSync(schemaPath, "utf-8");
82
- } catch {
83
- // Fallback: try relative to compiled output
84
- schemaSql = readFileSync(join(__dirname, "schema.surql"), "utf-8");
79
+ const schemaPath = join(__dirname, "..", "src", "schema.surql");
80
+ let schemaSql: string;
81
+ try {
82
+ schemaSql = readFileSync(schemaPath, "utf-8");
83
+ } catch {
84
+ // Fallback: try relative to compiled output
85
+ schemaSql = readFileSync(join(__dirname, "schema.surql"), "utf-8");
86
+ }
87
+ await store.queryExec(schemaSql);
88
+ this.state.schemaApplied = true;
89
+ } catch (e) {
90
+ swallow.warn("context-engine:schema", e);
85
91
  }
86
- await store.queryExec(schemaSql);
87
- } catch (e) {
88
- swallow.warn("context-engine:schema", e);
89
92
  }
90
93
 
91
94
  // 5-pillar graph init
@@ -122,6 +125,7 @@ export class KongBrainContextEngine implements ContextEngine {
122
125
  if (!session.daemon) {
123
126
  session.daemon = startMemoryDaemon(
124
127
  store, embeddings, session.sessionId, this.state.complete,
128
+ this.state.config.thresholds.extractionTimeoutMs,
125
129
  );
126
130
  }
127
131
  } catch (e) {
@@ -135,7 +139,7 @@ export class KongBrainContextEngine implements ContextEngine {
135
139
  store.archiveOldTurns(),
136
140
  store.consolidateMemories((text) => embeddings.embed(text)),
137
141
  store.garbageCollectMemories(),
138
- checkACANReadiness(store),
142
+ checkACANReadiness(store, this.state.config.thresholds.acanTrainingThreshold),
139
143
  // Deferred cleanup is triggered on first afterTurn() when complete() is available
140
144
  ]).catch(e => swallow.warn("bootstrap:maintenance", e));
141
145
 
@@ -404,7 +408,7 @@ export class KongBrainContextEngine implements ContextEngine {
404
408
  }
405
409
 
406
410
  // Flush to daemon when token threshold OR turn count threshold is reached
407
- const tokenReady = session.newContentTokens >= session.DAEMON_TOKEN_THRESHOLD;
411
+ const tokenReady = session.newContentTokens >= session.daemonTokenThreshold;
408
412
  const turnReady = session.userTurnCount >= session.lastDaemonFlushTurnCount + 3;
409
413
  if (session.daemon && (tokenReady || turnReady)) {
410
414
  try {
@@ -439,7 +443,7 @@ export class KongBrainContextEngine implements ContextEngine {
439
443
  // OpenClaw exits via Ctrl+C×2 (no async window), so session_end never fires.
440
444
  // Run reflection, skill extraction, and causal graduation periodically.
441
445
  const tokensSinceCleanup = session.cumulativeTokens - session.lastCleanupTokens;
442
- if (tokensSinceCleanup >= session.MID_SESSION_CLEANUP_THRESHOLD && typeof this.state.complete === "function") {
446
+ if (tokensSinceCleanup >= session.midSessionCleanupThreshold && typeof this.state.complete === "function") {
443
447
  session.lastCleanupTokens = session.cumulativeTokens;
444
448
 
445
449
  // Fire-and-forget: these are non-critical background operations
@@ -478,6 +482,12 @@ export class KongBrainContextEngine implements ContextEngine {
478
482
  .catch(e => swallow.warn("midCleanup:graduateCausal", e)),
479
483
  );
480
484
 
485
+ // ACAN: check if new retrieval outcomes warrant retraining
486
+ cleanupOps.push(
487
+ checkACANReadiness(store, this.state.config.thresholds.acanTrainingThreshold)
488
+ .catch(e => swallow("midCleanup:acan", e)),
489
+ );
490
+
481
491
  // Handoff note — snapshot for wakeup even if session continues
482
492
  cleanupOps.push(
483
493
  (async () => {
@@ -36,6 +36,7 @@ export function startMemoryDaemon(
36
36
  sharedEmbeddings: EmbeddingService,
37
37
  sessionId: string,
38
38
  complete: CompleteFn,
39
+ extractionTimeoutMs = 60_000,
39
40
  ): MemoryDaemon {
40
41
  // Use shared store/embeddings from global state (no duplicate connections)
41
42
  const store = sharedStore;
@@ -96,7 +97,7 @@ export function startMemoryDaemon(
96
97
 
97
98
  const responseText = response.text;
98
99
 
99
- const jsonMatch = responseText.match(/\{[\s\S]*\}/);
100
+ const jsonMatch = responseText.match(/\{[\s\S]*?\}/);
100
101
  if (!jsonMatch) return;
101
102
 
102
103
  let result: Record<string, any>;
@@ -137,7 +138,12 @@ export function startMemoryDaemon(
137
138
  const batch = pendingBatch;
138
139
  pendingBatch = null;
139
140
  try {
140
- await runExtraction(batch.turns, batch.thinking, batch.retrievedMemories, batch.priorExtractions);
141
+ await Promise.race([
142
+ runExtraction(batch.turns, batch.thinking, batch.retrievedMemories, batch.priorExtractions),
143
+ new Promise<void>((_, reject) =>
144
+ setTimeout(() => reject(new Error(`Extraction timed out after ${extractionTimeoutMs}ms`)), extractionTimeoutMs),
145
+ ),
146
+ ]);
141
147
  } catch (e) {
142
148
  errorCount++;
143
149
  swallow.warn("daemon:extraction", e);
@@ -106,10 +106,16 @@ async function processOrphanedSession(
106
106
 
107
107
  try {
108
108
  console.warn(`[deferred] extracting session ${surrealSessionId} (${turns.length} turns, transcript ${transcript.length} chars)`);
109
- const response = await complete({
110
- system: systemPrompt,
111
- messages: [{ role: "user", content: `[TRANSCRIPT]\n${transcript.slice(0, 60000)}` }],
112
- });
109
+ const LLM_CALL_TIMEOUT_MS = 30_000;
110
+ const response = await Promise.race([
111
+ complete({
112
+ system: systemPrompt,
113
+ messages: [{ role: "user", content: `[TRANSCRIPT]\n${transcript.slice(0, 60000)}` }],
114
+ }),
115
+ new Promise<never>((_, reject) =>
116
+ setTimeout(() => reject(new Error("LLM extraction call timed out")), LLM_CALL_TIMEOUT_MS),
117
+ ),
118
+ ]);
113
119
 
114
120
  const responseText = response.text;
115
121
  console.warn(`[deferred] extraction response: ${responseText.length} chars`);
@@ -144,10 +150,15 @@ async function processOrphanedSession(
144
150
  .map(t => `[${t.role}] ${t.text.slice(0, 200)}`)
145
151
  .join("\n");
146
152
 
147
- const handoffResponse = await complete({
148
- system: "Summarize this session for handoff to your next self. What was worked on, what's unfinished, what to remember. 2-3 sentences. Write in first person.",
149
- messages: [{ role: "user", content: turnSummary }],
150
- });
153
+ const handoffResponse = await Promise.race([
154
+ complete({
155
+ system: "Summarize this session for handoff to your next self. What was worked on, what's unfinished, what to remember. 2-3 sentences. Write in first person.",
156
+ messages: [{ role: "user", content: turnSummary }],
157
+ }),
158
+ new Promise<never>((_, reject) =>
159
+ setTimeout(() => reject(new Error("LLM handoff call timed out")), 30_000),
160
+ ),
161
+ ]);
151
162
 
152
163
  const handoffText = handoffResponse.text.trim();
153
164
  console.warn(`[deferred] handoff response: ${handoffText.length} chars`);
package/src/embeddings.ts CHANGED
@@ -11,7 +11,6 @@ export class EmbeddingService {
11
11
  private model: LlamaModel | null = null;
12
12
  private ctx: LlamaEmbeddingContext | null = null;
13
13
  private ready = false;
14
- private embedCallCount = 0;
15
14
 
16
15
  constructor(private readonly config: EmbeddingConfig) {}
17
16
 
@@ -40,7 +39,6 @@ export class EmbeddingService {
40
39
 
41
40
  async embed(text: string): Promise<number[]> {
42
41
  if (!this.ready || !this.ctx) throw new Error("Embeddings not initialized");
43
- this.embedCallCount++;
44
42
  const result = await this.ctx.getEmbeddingFor(text);
45
43
  return Array.from(result.vector);
46
44
  }
@@ -58,16 +56,6 @@ export class EmbeddingService {
58
56
  return this.ready;
59
57
  }
60
58
 
61
- drainEmbedCallCount(): number {
62
- const count = this.embedCallCount;
63
- this.embedCallCount = 0;
64
- return count;
65
- }
66
-
67
- getEmbedCallCount(): number {
68
- return this.embedCallCount;
69
- }
70
-
71
59
  async dispose(): Promise<void> {
72
60
  try {
73
61
  await this.ctx?.dispose();
@@ -634,9 +634,9 @@ async function formatContextMessage(
634
634
 
635
635
  function truncateToolResult(msg: AgentMessage, maxChars: number): AgentMessage {
636
636
  if (!isToolResult(msg)) return msg;
637
- const totalLen = msg.content.reduce((s, c) => s + ((c as TextContent).text?.length ?? 0), 0);
637
+ const totalLen = msg.content.reduce((s: number, c: any) => s + ((c as TextContent).text?.length ?? 0), 0);
638
638
  if (totalLen <= maxChars) return msg;
639
- const content = msg.content.map((c) => {
639
+ const content = msg.content.map((c: any) => {
640
640
  if (c.type !== "text") return c;
641
641
  const tc = c as TextContent;
642
642
  const allowed = Math.max(200, Math.floor((tc.text.length / totalLen) * maxChars));
@@ -654,8 +654,8 @@ function getRecentTurns(messages: AgentMessage[], maxTokens: number, contextWind
654
654
  const clean = messages.map((m) => {
655
655
  if (isAssistant(m) && m.stopReason === "error") {
656
656
  const errorText = m.content
657
- .filter((c): c is TextContent => c.type === "text")
658
- .map((c) => c.text)
657
+ .filter((c: any): c is TextContent => c.type === "text")
658
+ .map((c: any) => c.text)
659
659
  .join("")
660
660
  .slice(0, 150);
661
661
  return {
@@ -672,7 +672,7 @@ function getRecentTurns(messages: AgentMessage[], maxTokens: number, contextWind
672
672
  let i = 0;
673
673
  while (i < clean.length) {
674
674
  const msg = clean[i];
675
- if (isAssistant(msg) && msg.content.some((c) => c.type === "toolCall")) {
675
+ if (isAssistant(msg) && msg.content.some((c: any) => c.type === "toolCall")) {
676
676
  const group: AgentMessage[] = [clean[i]];
677
677
  let j = i + 1;
678
678
  while (j < clean.length && isToolResult(clean[j])) {
@@ -837,9 +837,19 @@ async function graphTransformInner(
837
837
  const config = session.currentConfig;
838
838
  const skipRetrieval = config?.skipRetrieval ?? false;
839
839
  const currentIntent = config?.intent ?? "unknown";
840
- const vectorSearchLimits = config?.vectorSearchLimits ?? {
840
+ const baseLimits = config?.vectorSearchLimits ?? {
841
841
  turn: 25, identity: 10, concept: 20, memory: 20, artifact: 10,
842
842
  };
843
+ // Scale search limits with context window — larger windows can use more results
844
+ const cwScale = Math.max(0.5, Math.min(2.0, contextWindow / 200_000));
845
+ const vectorSearchLimits = {
846
+ turn: Math.round((baseLimits.turn ?? 25) * cwScale),
847
+ identity: baseLimits.identity, // always load full identity
848
+ concept: Math.round((baseLimits.concept ?? 20) * cwScale),
849
+ memory: Math.round((baseLimits.memory ?? 20) * cwScale),
850
+ artifact: Math.round((baseLimits.artifact ?? 10) * cwScale),
851
+ monologue: Math.round(8 * cwScale),
852
+ };
843
853
  let tokenBudget = Math.min(config?.tokenBudget ?? 6000, budgets.retrieval);
844
854
 
845
855
  // Pressure-based adaptive scaling
@@ -30,9 +30,16 @@ export function createLlmOutputHandler(state: GlobalPluginState) {
30
30
  const session = state.getSession(sessionKey);
31
31
  if (!session) return;
32
32
 
33
- // Extract token counts (0 if provider didn't report usage)
34
- const inputTokens = event.usage?.input ?? 0;
35
- const outputTokens = event.usage?.output ?? 0;
33
+ // Measure assistant text output (used for token estimation and planning gate)
34
+ const textLen = event.assistantTexts.reduce((s, t) => s + t.length, 0);
35
+
36
+ // Extract token counts — fall back to text-length estimate when provider
37
+ // doesn't report usage (OpenClaw often passes 0 or undefined)
38
+ let inputTokens = event.usage?.input ?? 0;
39
+ let outputTokens = event.usage?.output ?? 0;
40
+ if (inputTokens + outputTokens === 0 && textLen > 0) {
41
+ outputTokens = Math.ceil(textLen / 4); // ~4 chars per token
42
+ }
36
43
 
37
44
  // Always update session stats — turn_count must increment even without usage data
38
45
  if (session.surrealSessionId) {
@@ -47,14 +54,11 @@ export function createLlmOutputHandler(state: GlobalPluginState) {
47
54
  }
48
55
  }
49
56
 
50
- // Accumulate for daemon batching (only when real tokens present)
51
- if (inputTokens + outputTokens > 0) {
52
- session.newContentTokens += inputTokens + outputTokens;
53
- session.cumulativeTokens += inputTokens + outputTokens;
54
- }
57
+ // Accumulate for daemon batching and mid-session cleanup
58
+ session.newContentTokens += inputTokens + outputTokens;
59
+ session.cumulativeTokens += inputTokens + outputTokens;
55
60
 
56
61
  // Track accumulated text output for planning gate
57
- const textLen = event.assistantTexts.reduce((s, t) => s + t.length, 0);
58
62
  session.turnTextLength += textLen;
59
63
 
60
64
  if (textLen > 50) {
@@ -78,6 +82,11 @@ export function createLlmOutputHandler(state: GlobalPluginState) {
78
82
  const thinking = block.thinking ?? block.text ?? "";
79
83
  if (thinking.length > 50) {
80
84
  session.pendingThinking.push(thinking);
85
+ // Cap to prevent unbounded growth in long sessions
86
+ const max = state.config.thresholds.maxPendingThinking;
87
+ if (session.pendingThinking.length > max) {
88
+ session.pendingThinking.splice(0, session.pendingThinking.length - max);
89
+ }
81
90
  }
82
91
  }
83
92
  }
package/src/index.ts CHANGED
@@ -65,6 +65,36 @@ async function runSessionCleanup(
65
65
  state: GlobalPluginState,
66
66
  ): Promise<void> {
67
67
  const { store: s, embeddings: emb } = state;
68
+ const { complete } = state;
69
+
70
+ // 1. Handoff FIRST — highest value, must survive even if cleanup races out
71
+ try {
72
+ const recentTurns = await s.getSessionTurns(session.sessionId, 15)
73
+ .catch(() => [] as { role: string; text: string }[]);
74
+ if (recentTurns.length >= 2) {
75
+ const turnSummary = recentTurns
76
+ .map(t => `[${t.role}] ${t.text.slice(0, 200)}`)
77
+ .join("\n");
78
+
79
+ const handoffResponse = await complete({
80
+ system: "Summarize this session for handoff to your next self. What was worked on, what's unfinished, what to remember. 2-3 sentences. Write in first person.",
81
+ messages: [{ role: "user", content: turnSummary }],
82
+ });
83
+
84
+ const handoffText = handoffResponse.text.trim();
85
+ if (handoffText.length > 20) {
86
+ let embedding: number[] | null = null;
87
+ if (emb.isAvailable()) {
88
+ try { embedding = await emb.embed(handoffText); } catch { /* ok */ }
89
+ }
90
+ await s.createMemory(handoffText, embedding, 8, "handoff", session.sessionId);
91
+ }
92
+ }
93
+ } catch (e) {
94
+ swallow.warn("cleanup:handoff", e);
95
+ }
96
+
97
+ // 2. Everything else in parallel — lower priority, OK if timeout kills it
68
98
  const endOps: Promise<unknown>[] = [];
69
99
 
70
100
  // Final daemon flush — send full session for extraction
@@ -80,14 +110,12 @@ async function runSessionCleanup(
80
110
  }));
81
111
  session.daemon!.sendTurnBatch(turnData, [...session.pendingThinking], []);
82
112
  } catch (e) { swallow.warn("cleanup:finalDaemonFlush", e); }
83
- await session.daemon!.shutdown(45_000).catch(e => swallow.warn("cleanup:daemonShutdown", e));
113
+ await session.daemon!.shutdown(10_000).catch(e => swallow.warn("cleanup:daemonShutdown", e));
84
114
  session.daemon = null;
85
115
  })(),
86
116
  );
87
117
  }
88
118
 
89
- const { complete } = state;
90
-
91
119
  // Skill extraction
92
120
  if (session.taskId) {
93
121
  endOps.push(
@@ -113,10 +141,9 @@ async function runSessionCleanup(
113
141
  .catch(e => { swallow.warn("cleanup:soulGraduation", e); return null; });
114
142
  endOps.push(graduationPromise);
115
143
 
116
- // The session-end LLM call is critical and needs the full 45s.
117
144
  await Promise.race([
118
145
  Promise.allSettled(endOps),
119
- new Promise(resolve => setTimeout(resolve, 45_000)),
146
+ new Promise(resolve => setTimeout(resolve, 150_000)),
120
147
  ]);
121
148
 
122
149
  // If soul graduation just happened, persist a graduation event so the next
@@ -192,33 +219,6 @@ async function runSessionCleanup(
192
219
  } catch (e) {
193
220
  swallow.warn("cleanup:stageTransition", e);
194
221
  }
195
-
196
- // Generate handoff note for next session wakeup
197
- try {
198
- const recentTurns = await s.getSessionTurns(session.sessionId, 15)
199
- .catch(() => [] as { role: string; text: string }[]);
200
- if (recentTurns.length >= 2) {
201
- const turnSummary = recentTurns
202
- .map(t => `[${t.role}] ${t.text.slice(0, 200)}`)
203
- .join("\n");
204
-
205
- const handoffResponse = await complete({
206
- system: "Summarize this session for handoff to your next self. What was worked on, what's unfinished, what to remember. 2-3 sentences. Write in first person.",
207
- messages: [{ role: "user", content: turnSummary }],
208
- });
209
-
210
- const handoffText = handoffResponse.text.trim();
211
- if (handoffText.length > 20) {
212
- let embedding: number[] | null = null;
213
- if (emb.isAvailable()) {
214
- try { embedding = await emb.embed(handoffText); } catch { /* ok */ }
215
- }
216
- await s.createMemory(handoffText, embedding, 8, "handoff", session.sessionId);
217
- }
218
- }
219
- } catch (e) {
220
- swallow.warn("cleanup:handoff", e);
221
- }
222
222
  }
223
223
 
224
224
  /**
@@ -463,6 +463,7 @@ export default definePluginEntry({
463
463
  globalState!.embeddings,
464
464
  session.sessionId,
465
465
  globalState!.complete,
466
+ globalState!.config.thresholds.extractionTimeoutMs,
466
467
  );
467
468
  } catch (e) {
468
469
  swallow.warn("index:startDaemon", e);
@@ -476,7 +477,7 @@ export default definePluginEntry({
476
477
  (session as any)._hasMigratableFiles = true;
477
478
  }
478
479
  })
479
- .catch(e => swallow("index:migrationCheck", e));
480
+ .catch(e => swallow.warn("index:migrationCheck", e));
480
481
  }
481
482
 
482
483
  // Set reflection context window from config
@@ -174,8 +174,8 @@ export async function writeExtractionResults(
174
174
  if (typeof memId !== "string" || !RECORD_ID_RE.test(memId)) continue;
175
175
  counts.resolved++;
176
176
  await store.queryExec(
177
- `UPDATE ${memId} SET status = 'resolved', resolved_at = time::now(), resolved_by = $sid`,
178
- { sid: sessionId },
177
+ `UPDATE type::record($mid) SET status = 'resolved', resolved_at = time::now(), resolved_by = $sid`,
178
+ { mid: memId, sid: sessionId },
179
179
  ).catch(e => swallow.warn("daemon:resolveMemory", e));
180
180
  }
181
181
  })());
package/src/prefetch.ts CHANGED
@@ -46,9 +46,11 @@ export function getPrefetchHitRate(): { hits: number; misses: number; attempts:
46
46
 
47
47
  function evictStale(): void {
48
48
  const now = Date.now();
49
+ const staleKeys: string[] = [];
49
50
  for (const [key, entry] of warmCache) {
50
- if (now - entry.timestamp > CACHE_TTL_MS) warmCache.delete(key);
51
+ if (now - entry.timestamp > CACHE_TTL_MS) staleKeys.push(key);
51
52
  }
53
+ for (const key of staleKeys) warmCache.delete(key);
52
54
  while (warmCache.size > MAX_CACHE_SIZE) {
53
55
  const oldest = warmCache.keys().next().value;
54
56
  if (oldest) warmCache.delete(oldest);
package/src/schema.surql CHANGED
@@ -76,8 +76,6 @@ DEFINE FIELD IF NOT EXISTS model ON turn TYPE option<string>;
76
76
  DEFINE FIELD IF NOT EXISTS usage ON turn TYPE option<object>;
77
77
  DEFINE INDEX IF NOT EXISTS turn_vec_idx ON turn FIELDS embedding HNSW DIMENSION 1024 DIST COSINE;
78
78
  DEFINE INDEX IF NOT EXISTS turn_session_idx ON turn FIELDS session_id;
79
- -- Migration: backfill created_at from timestamp for existing turns
80
- UPDATE turn SET created_at = timestamp WHERE created_at IS NONE AND timestamp IS NOT NONE;
81
79
 
82
80
  -- Identity chunks (agent persona / identity)
83
81
  DEFINE TABLE IF NOT EXISTS identity_chunk SCHEMALESS;
@@ -112,7 +110,6 @@ DEFINE FIELD IF NOT EXISTS source ON memory TYPE option<string>;
112
110
  DEFINE FIELD IF NOT EXISTS created_at ON memory TYPE datetime DEFAULT time::now();
113
111
  DEFINE FIELD IF NOT EXISTS last_accessed ON memory TYPE option<datetime>;
114
112
  DEFINE FIELD IF NOT EXISTS status ON memory TYPE option<string> DEFAULT "active";
115
- UPDATE memory SET status = "active" WHERE status IS NONE;
116
113
  DEFINE FIELD IF NOT EXISTS resolved_at ON memory TYPE option<datetime>;
117
114
  DEFINE FIELD IF NOT EXISTS resolved_by ON memory TYPE option<string>;
118
115
  DEFINE INDEX IF NOT EXISTS memory_vec_idx ON memory FIELDS embedding HNSW DIMENSION 1024 DIST COSINE;
package/src/skills.ts CHANGED
@@ -80,7 +80,7 @@ export async function extractSkill(
80
80
 
81
81
  if (text.trim() === "null" || text.trim() === "None") return null;
82
82
 
83
- const jsonMatch = text.match(/\{[\s\S]*\}/);
83
+ const jsonMatch = text.match(/\{[\s\S]*?\}/);
84
84
  if (!jsonMatch) return null;
85
85
 
86
86
  const parsed = JSON.parse(jsonMatch[0]) as ExtractedSkill;
@@ -154,7 +154,7 @@ export async function supersedeOldSkills(
154
154
  );
155
155
  }
156
156
  }
157
- } catch (e) { swallow("skills:supersedeOld", e); }
157
+ } catch (e) { swallow.warn("skills:supersedeOld", e); }
158
158
  }
159
159
 
160
160
  // --- Skill Retrieval ---
@@ -238,11 +238,11 @@ export async function recordSkillOutcome(
238
238
  try {
239
239
  const field = success ? "success_count" : "failure_count";
240
240
  await store.queryExec(
241
- `UPDATE ${skillId} SET
241
+ `UPDATE type::record($sid) SET
242
242
  ${field} += 1,
243
243
  avg_duration_ms = (avg_duration_ms * (success_count + failure_count - 1) + $dur) / (success_count + failure_count),
244
244
  last_used = time::now()`,
245
- { dur: durationMs },
245
+ { sid: skillId, dur: durationMs },
246
246
  );
247
247
  } catch (e) { swallow("skills:non-critical", e); }
248
248
  }
@@ -289,7 +289,7 @@ export async function graduateCausalToSkills(
289
289
  });
290
290
 
291
291
  const text = resp.text;
292
- const jsonMatch = text.match(/\{[\s\S]*\}/);
292
+ const jsonMatch = text.match(/\{[\s\S]*?\}/);
293
293
  if (!jsonMatch) continue;
294
294
 
295
295
  let parsed: ExtractedSkill;
package/src/soul.ts CHANGED
@@ -457,6 +457,8 @@ export async function reviseSoul(
457
457
  store: SurrealStore,
458
458
  ): Promise<boolean> {
459
459
  if (!store.isAvailable()) return false;
460
+ const ALLOWED_SECTIONS = new Set(["working_style", "emotional_dimensions", "self_observations", "earned_values"]);
461
+ if (!ALLOWED_SECTIONS.has(section)) return false;
460
462
  try {
461
463
  const now = new Date().toISOString();
462
464
  await store.queryExec(
@@ -567,7 +569,7 @@ Be honest, not aspirational. Only claim what the data supports.`;
567
569
  });
568
570
 
569
571
  const text = response.text.trim();
570
- const jsonMatch = text.match(/\{[\s\S]*\}/);
572
+ const jsonMatch = text.match(/\{[\s\S]*?\}/);
571
573
  if (!jsonMatch) return null;
572
574
 
573
575
  const parsed = JSON.parse(jsonMatch[0]);
@@ -842,7 +844,7 @@ CURRENT QUALITY:
842
844
  });
843
845
 
844
846
  const text = response.text.trim();
845
- const jsonMatch = text.match(/\{[\s\S]*\}/);
847
+ const jsonMatch = text.match(/\{[\s\S]*?\}/);
846
848
  if (!jsonMatch) return false;
847
849
 
848
850
  const revisions = JSON.parse(jsonMatch[0]);
package/src/state.ts CHANGED
@@ -54,13 +54,13 @@ export class SessionState {
54
54
  // Memory daemon
55
55
  daemon: MemoryDaemon | null = null;
56
56
  newContentTokens = 0;
57
- readonly DAEMON_TOKEN_THRESHOLD = 4000;
57
+ daemonTokenThreshold = 4000;
58
58
  lastDaemonFlushTurnCount = 0;
59
59
 
60
60
  // Cumulative session token tracking (for mid-session cleanup trigger)
61
61
  cumulativeTokens = 0;
62
62
  lastCleanupTokens = 0;
63
- readonly MID_SESSION_CLEANUP_THRESHOLD = 100_000;
63
+ midSessionCleanupThreshold = 25_000;
64
64
 
65
65
  // Cleanup tracking
66
66
  cleanedUp = false;
@@ -106,6 +106,7 @@ export class GlobalPluginState {
106
106
  complete: CompleteFn;
107
107
  workspaceDir?: string;
108
108
  enqueueSystemEvent?: EnqueueSystemEventFn;
109
+ schemaApplied = false;
109
110
  private sessions = new Map<string, SessionState>();
110
111
 
111
112
  constructor(
@@ -125,6 +126,8 @@ export class GlobalPluginState {
125
126
  let session = this.sessions.get(sessionKey);
126
127
  if (!session) {
127
128
  session = new SessionState(sessionId, sessionKey);
129
+ session.daemonTokenThreshold = this.config.thresholds.daemonTokenThreshold;
130
+ session.midSessionCleanupThreshold = this.config.thresholds.midSessionCleanupThreshold;
128
131
  this.sessions.set(sessionKey, session);
129
132
  }
130
133
  return session;
package/src/surreal.ts CHANGED
@@ -168,7 +168,7 @@ export class SurrealStore {
168
168
  await new Promise((r) => setTimeout(r, BACKOFF_MS[attempt - 1]));
169
169
  } else {
170
170
  console.error(`[ERROR] SurrealDB reconnection failed after ${MAX_ATTEMPTS} attempts.`);
171
- throw e;
171
+ throw new Error("SurrealDB reconnection failed");
172
172
  }
173
173
  }
174
174
  }
@@ -447,7 +447,10 @@ export class SurrealStore {
447
447
  assertRecordId(fromId);
448
448
  assertRecordId(toId);
449
449
  const safeName = edge.replace(/[^a-zA-Z0-9_]/g, "");
450
- await this.queryExec(`RELATE ${fromId}->${safeName}->${toId}`);
450
+ await this.queryExec(
451
+ `RELATE type::record($from)->${safeName}->type::record($to)`,
452
+ { from: fromId, to: toId },
453
+ );
451
454
  }
452
455
 
453
456
  // ── 5-Pillar entity operations ─────────────────────────────────────────
@@ -501,12 +504,12 @@ export class SurrealStore {
501
504
  ): Promise<void> {
502
505
  assertRecordId(sessionId);
503
506
  await this.queryExec(
504
- `UPDATE ${sessionId} SET
507
+ `UPDATE type::record($sid) SET
505
508
  turn_count += 1,
506
509
  total_input_tokens += $input,
507
510
  total_output_tokens += $output,
508
511
  last_active = time::now()`,
509
- { input: inputTokens, output: outputTokens },
512
+ { sid: sessionId, input: inputTokens, output: outputTokens },
510
513
  );
511
514
  }
512
515
 
@@ -514,25 +517,27 @@ export class SurrealStore {
514
517
  assertRecordId(sessionId);
515
518
  if (summary) {
516
519
  await this.queryExec(
517
- `UPDATE ${sessionId} SET ended_at = time::now(), summary = $summary`,
518
- { summary },
520
+ `UPDATE type::record($sid) SET ended_at = time::now(), summary = $summary`,
521
+ { sid: sessionId, summary },
519
522
  );
520
523
  } else {
521
- await this.queryExec(`UPDATE ${sessionId} SET ended_at = time::now()`);
524
+ await this.queryExec(`UPDATE type::record($sid) SET ended_at = time::now()`, { sid: sessionId });
522
525
  }
523
526
  }
524
527
 
525
528
  async markSessionActive(sessionId: string): Promise<void> {
526
529
  assertRecordId(sessionId);
527
530
  await this.queryExec(
528
- `UPDATE ${sessionId} SET cleanup_completed = false, last_active = time::now()`,
531
+ `UPDATE type::record($sid) SET cleanup_completed = false, last_active = time::now()`,
532
+ { sid: sessionId },
529
533
  );
530
534
  }
531
535
 
532
536
  async markSessionEnded(sessionId: string): Promise<void> {
533
537
  assertRecordId(sessionId);
534
538
  await this.queryExec(
535
- `UPDATE ${sessionId} SET ended_at = time::now(), cleanup_completed = true`,
539
+ `UPDATE type::record($sid) SET ended_at = time::now(), cleanup_completed = true`,
540
+ { sid: sessionId },
536
541
  );
537
542
  }
538
543
 
@@ -547,19 +552,39 @@ export class SurrealStore {
547
552
  }
548
553
 
549
554
  async linkSessionToTask(sessionId: string, taskId: string): Promise<void> {
550
- await this.queryExec(`RELATE ${sessionId}->session_task->${taskId}`);
555
+ assertRecordId(sessionId);
556
+ assertRecordId(taskId);
557
+ await this.queryExec(
558
+ `RELATE type::record($from)->session_task->type::record($to)`,
559
+ { from: sessionId, to: taskId },
560
+ );
551
561
  }
552
562
 
553
563
  async linkTaskToProject(taskId: string, projectId: string): Promise<void> {
554
- await this.queryExec(`RELATE ${taskId}->task_part_of->${projectId}`);
564
+ assertRecordId(taskId);
565
+ assertRecordId(projectId);
566
+ await this.queryExec(
567
+ `RELATE type::record($from)->task_part_of->type::record($to)`,
568
+ { from: taskId, to: projectId },
569
+ );
555
570
  }
556
571
 
557
572
  async linkAgentToTask(agentId: string, taskId: string): Promise<void> {
558
- await this.queryExec(`RELATE ${agentId}->performed->${taskId}`);
573
+ assertRecordId(agentId);
574
+ assertRecordId(taskId);
575
+ await this.queryExec(
576
+ `RELATE type::record($from)->performed->type::record($to)`,
577
+ { from: agentId, to: taskId },
578
+ );
559
579
  }
560
580
 
561
581
  async linkAgentToProject(agentId: string, projectId: string): Promise<void> {
562
- await this.queryExec(`RELATE ${agentId}->owns->${projectId}`);
582
+ assertRecordId(agentId);
583
+ assertRecordId(projectId);
584
+ await this.queryExec(
585
+ `RELATE type::record($from)->owns->type::record($to)`,
586
+ { from: agentId, to: projectId },
587
+ );
563
588
  }
564
589
 
565
590
  // ── Graph traversal ────────────────────────────────────────────────────
@@ -600,7 +625,7 @@ export class SurrealStore {
600
625
  for (let hop = 0; hop < hops && frontier.length > 0; hop++) {
601
626
  const forwardQueries = frontier.flatMap((id) =>
602
627
  forwardEdges.map((edge) =>
603
- this.queryFirst<any>(`${selectFields} FROM ${id}->${edge}->? LIMIT 3`, bindings).catch(
628
+ this.queryFirst<any>(`${selectFields} FROM type::record($nid)->${edge}->? LIMIT 3`, { ...bindings, nid: id }).catch(
604
629
  (e) => {
605
630
  swallow.warn("surreal:graphExpand", e);
606
631
  return [] as Record<string, unknown>[];
@@ -611,7 +636,7 @@ export class SurrealStore {
611
636
 
612
637
  const reverseQueries = frontier.flatMap((id) =>
613
638
  reverseEdges.map((edge) =>
614
- this.queryFirst<any>(`${selectFields} FROM ${id}<-${edge}<-? LIMIT 3`, bindings).catch(
639
+ this.queryFirst<any>(`${selectFields} FROM type::record($nid)<-${edge}<-? LIMIT 3`, { ...bindings, nid: id }).catch(
615
640
  (e) => {
616
641
  swallow.warn("surreal:graphExpand", e);
617
642
  return [] as Record<string, unknown>[];
@@ -662,7 +687,8 @@ export class SurrealStore {
662
687
  try {
663
688
  assertRecordId(id);
664
689
  await this.queryExec(
665
- `UPDATE ${id} SET access_count += 1, last_accessed = time::now()`,
690
+ `UPDATE type::record($rid) SET access_count += 1, last_accessed = time::now()`,
691
+ { rid: id },
666
692
  );
667
693
  } catch (e) {
668
694
  swallow.warn("surreal:bumpAccessCounts", e);
@@ -683,7 +709,10 @@ export class SurrealStore {
683
709
  );
684
710
  if (rows.length > 0) {
685
711
  const id = String(rows[0].id);
686
- await this.queryExec(`UPDATE ${id} SET access_count += 1, last_accessed = time::now()`);
712
+ await this.queryExec(
713
+ `UPDATE type::record($cid) SET access_count += 1, last_accessed = time::now()`,
714
+ { cid: id },
715
+ );
687
716
  return id;
688
717
  }
689
718
  const emb = embedding?.length ? embedding : undefined;
@@ -739,8 +768,8 @@ export class SurrealStore {
739
768
  const existing = dupes[0];
740
769
  const newImp = Math.max(existing.importance ?? 0, importance);
741
770
  await this.queryExec(
742
- `UPDATE ${existing.id} SET access_count += 1, importance = $imp, last_accessed = time::now()`,
743
- { imp: newImp },
771
+ `UPDATE type::record($eid) SET access_count += 1, importance = $imp, last_accessed = time::now()`,
772
+ { eid: String(existing.id), imp: newImp },
744
773
  );
745
774
  return String(existing.id);
746
775
  }
@@ -813,10 +842,11 @@ export class SurrealStore {
813
842
  fields: Partial<Pick<CoreMemoryEntry, "text" | "category" | "priority" | "tier" | "active">>,
814
843
  ): Promise<boolean> {
815
844
  assertRecordId(id);
845
+ const ALLOWED_FIELDS = new Set(["text", "category", "priority", "tier", "active"]);
816
846
  const sets: string[] = [];
817
- const bindings: Record<string, unknown> = {};
847
+ const bindings: Record<string, unknown> = { _rid: id };
818
848
  for (const [key, val] of Object.entries(fields)) {
819
- if (val !== undefined) {
849
+ if (val !== undefined && ALLOWED_FIELDS.has(key)) {
820
850
  sets.push(`${key} = $${key}`);
821
851
  bindings[key] = val;
822
852
  }
@@ -824,7 +854,7 @@ export class SurrealStore {
824
854
  if (sets.length === 0) return false;
825
855
  sets.push("updated_at = time::now()");
826
856
  const rows = await this.queryFirst<{ id: string }>(
827
- `UPDATE ${id} SET ${sets.join(", ")} RETURN id`,
857
+ `UPDATE type::record($_rid) SET ${sets.join(", ")} RETURN id`,
828
858
  bindings,
829
859
  );
830
860
  return rows.length > 0;
@@ -832,7 +862,10 @@ export class SurrealStore {
832
862
 
833
863
  async deleteCoreMemory(id: string): Promise<void> {
834
864
  assertRecordId(id);
835
- await this.queryExec(`UPDATE ${id} SET active = false, updated_at = time::now()`);
865
+ await this.queryExec(
866
+ `UPDATE type::record($rid) SET active = false, updated_at = time::now()`,
867
+ { rid: id },
868
+ );
836
869
  }
837
870
 
838
871
  async deactivateSessionMemories(sessionId: string): Promise<void> {
@@ -1062,16 +1095,16 @@ export class SurrealStore {
1062
1095
 
1063
1096
  async runMemoryMaintenance(): Promise<void> {
1064
1097
  try {
1065
- await this.queryExec(
1066
- `UPDATE memory SET importance = math::max([importance * 0.95, 2.0]) WHERE importance > 2.0`,
1067
- );
1068
- await this.queryExec(
1069
- `UPDATE memory SET importance = math::max([importance, 3 + ((
1098
+ // Single round-trip to reduce transaction conflict window
1099
+ await this.queryExec(`
1100
+ UPDATE memory SET importance = math::max([importance * 0.95, 2.0]) WHERE importance > 2.0;
1101
+ UPDATE memory SET importance = math::max([importance, 3 + ((
1070
1102
  SELECT VALUE avg_utilization FROM memory_utility_cache WHERE memory_id = string::concat(meta::tb(id), ":", meta::id(id)) LIMIT 1
1071
- )[0] ?? 0) * 4]) WHERE importance < 7`,
1072
- );
1103
+ )[0] ?? 0) * 4]) WHERE importance < 7;
1104
+ `);
1073
1105
  } catch (e) {
1074
- swallow.warn("surreal:runMemoryMaintenance", e);
1106
+ // Transaction conflicts expected when daemon writes concurrently — silent
1107
+ swallow("surreal:runMemoryMaintenance", e);
1075
1108
  }
1076
1109
  }
1077
1110
 
@@ -1190,10 +1223,10 @@ export class SurrealStore {
1190
1223
  assertRecordId(String(keep));
1191
1224
  assertRecordId(String(drop));
1192
1225
  await this.queryExec(
1193
- `UPDATE ${keep} SET access_count += 1, importance = math::max([importance, $imp])`,
1194
- { imp: dupe.importance },
1226
+ `UPDATE type::record($kid) SET access_count += 1, importance = math::max([importance, $imp])`,
1227
+ { kid: String(keep), imp: dupe.importance },
1195
1228
  );
1196
- await this.queryExec(`DELETE ${drop}`);
1229
+ await this.queryExec(`DELETE type::record($did)`, { did: String(drop) });
1197
1230
  seen.add(String(drop));
1198
1231
  merged++;
1199
1232
  }
@@ -1218,7 +1251,10 @@ export class SurrealStore {
1218
1251
  try {
1219
1252
  const emb = await embedFn(mem.text);
1220
1253
  if (!emb) continue;
1221
- await this.queryExec(`UPDATE ${mem.id} SET embedding = $emb`, { emb });
1254
+ await this.queryExec(
1255
+ `UPDATE type::record($mid) SET embedding = $emb`,
1256
+ { mid: String(mem.id), emb },
1257
+ );
1222
1258
 
1223
1259
  const dupes = await this.queryFirst<{
1224
1260
  id: string;
@@ -1247,10 +1283,10 @@ export class SurrealStore {
1247
1283
  assertRecordId(String(keep));
1248
1284
  assertRecordId(String(drop));
1249
1285
  await this.queryExec(
1250
- `UPDATE ${keep} SET access_count += 1, importance = math::max([importance, $imp])`,
1251
- { imp: dupe.importance },
1286
+ `UPDATE type::record($kid) SET access_count += 1, importance = math::max([importance, $imp])`,
1287
+ { kid: String(keep), imp: dupe.importance },
1252
1288
  );
1253
- await this.queryExec(`DELETE ${drop}`);
1289
+ await this.queryExec(`DELETE type::record($did)`, { did: String(drop) });
1254
1290
  seen.add(String(drop));
1255
1291
  merged++;
1256
1292
  }
@@ -1350,9 +1386,10 @@ export class SurrealStore {
1350
1386
  memoryId: string,
1351
1387
  ): Promise<void> {
1352
1388
  assertRecordId(checkpointId);
1353
- await this.queryExec(`UPDATE ${checkpointId} SET status = "complete", memory_id = $mid`, {
1354
- mid: memoryId,
1355
- });
1389
+ await this.queryExec(
1390
+ `UPDATE type::record($cpid) SET status = "complete", memory_id = $mid`,
1391
+ { cpid: checkpointId, mid: memoryId },
1392
+ );
1356
1393
  }
1357
1394
 
1358
1395
  async getPendingCheckpoints(
@@ -207,7 +207,7 @@ async function verifyAction(store: any, recordId?: string) {
207
207
  return { content: [{ type: "text" as const, text: `Error: invalid record ID "${recordId}".` }], details: null };
208
208
  }
209
209
 
210
- const rows = await store.queryFirst(`SELECT * FROM ${recordId}`);
210
+ const rows = await store.queryFirst(`SELECT * FROM type::record($rid)`, { rid: recordId });
211
211
  if (rows.length === 0) {
212
212
  return { content: [{ type: "text" as const, text: `Record not found: ${recordId}` }], details: { exists: false } };
213
213
  }
package/src/wakeup.ts CHANGED
@@ -297,7 +297,7 @@ Return ONLY valid JSON.`,
297
297
 
298
298
  const text = response.text;
299
299
 
300
- const jsonMatch = text.match(/\{[\s\S]*\}/);
300
+ const jsonMatch = text.match(/\{[\s\S]*?\}/);
301
301
  if (!jsonMatch) return null;
302
302
 
303
303
  let raw: any;
@@ -22,7 +22,7 @@
22
22
  * copyFile+unlink instead of rename (cross-filesystem safe).
23
23
  */
24
24
 
25
- import { readFile, readdir, stat, copyFile, unlink, mkdir, writeFile, rmdir } from "node:fs/promises";
25
+ import { readFile, readdir, stat, lstat, copyFile, unlink, mkdir, writeFile, rmdir } from "node:fs/promises";
26
26
  import { join, basename, extname, relative, dirname, sep } from "node:path";
27
27
  import type { SurrealStore } from "./surreal.js";
28
28
  import type { EmbeddingService } from "./embeddings.js";
@@ -376,10 +376,10 @@ async function tryReadFile(absPath: string, rootDir: string): Promise<WorkspaceF
376
376
  if (SKIP_FILES.has(name)) return null;
377
377
 
378
378
  let s;
379
- try { s = await stat(absPath); }
379
+ try { s = await lstat(absPath); }
380
380
  catch { return null; }
381
381
 
382
- if (!s.isFile()) return null;
382
+ if (s.isSymbolicLink() || !s.isFile()) return null;
383
383
  if (s.size === 0 || s.size > MAX_FILE_SIZE) return null;
384
384
 
385
385
  try {
@@ -524,7 +524,7 @@ function parseFrontmatter(content: string): { frontmatter: Record<string, unknow
524
524
  }
525
525
 
526
526
  // Try JSON metadata block if present
527
- const jsonMatch = fmBlock.match(/metadata:\s*\n\s*(\{[\s\S]*\})/);
527
+ const jsonMatch = fmBlock.match(/metadata:\s*\n\s*(\{[\s\S]*?\})/);
528
528
  if (jsonMatch) {
529
529
  try {
530
530
  result.metadata = JSON.parse(jsonMatch[1]);