formative-memory 0.1.0 → 0.2.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.js CHANGED
@@ -1,4 +1,4 @@
1
- import { n as MemorySourceGuard, r as TemporalStateGuard, t as MemoryDatabase } from "./db-D2pzT6fw.js";
1
+ import { n as MemorySourceGuard, r as TemporalStateGuard, t as MemoryDatabase } from "./db-D1Sc76VE.js";
2
2
  import { basename, dirname, isAbsolute, join, relative, sep } from "node:path";
3
3
  import { appendFileSync, copyFileSync, existsSync, mkdirSync, readFileSync, readdirSync, realpathSync, statSync, writeFileSync } from "node:fs";
4
4
  import { createGeminiEmbeddingProvider, createOpenAiEmbeddingProvider, getMemoryEmbeddingProvider, listMemoryEmbeddingProviders } from "openclaw/plugin-sdk/memory-core-host-engine-embeddings";
@@ -2795,7 +2795,8 @@ const memoryConfigSchema = { parse(value) {
2795
2795
  "autoCapture",
2796
2796
  "autoRecall",
2797
2797
  "verbose",
2798
- "logQueries"
2798
+ "logQueries",
2799
+ "requireEmbedding"
2799
2800
  ], "memory config");
2800
2801
  let provider = "auto";
2801
2802
  let model;
@@ -2812,13 +2813,75 @@ const memoryConfigSchema = { parse(value) {
2812
2813
  model
2813
2814
  },
2814
2815
  dbPath: typeof cfg.dbPath === "string" ? cfg.dbPath : "~/.openclaw/memory/associative",
2815
- autoCapture: cfg.autoCapture === true,
2816
+ autoCapture: cfg.autoCapture !== false,
2816
2817
  autoRecall: cfg.autoRecall !== false,
2817
2818
  verbose: cfg.verbose === true,
2818
- logQueries: cfg.logQueries === true
2819
+ logQueries: cfg.logQueries === true,
2820
+ requireEmbedding: cfg.requireEmbedding !== false
2819
2821
  };
2820
2822
  } };
2821
2823
  //#endregion
2824
+ //#region src/logger.ts
2825
+ const LEVEL_ORDER = {
2826
+ debug: 0,
2827
+ info: 1,
2828
+ warn: 2,
2829
+ error: 3
2830
+ };
2831
+ /**
2832
+ * Create a logger that forwards to the host logger at or above the
2833
+ * configured minimum level.
2834
+ */
2835
+ function createLogger(opts) {
2836
+ const envDebug = process.env.FORMATIVE_MEMORY_DEBUG === "1";
2837
+ const minOrder = LEVEL_ORDER[opts.verbose || envDebug ? "debug" : "info"];
2838
+ const host = opts.host;
2839
+ function stringifyArg(a) {
2840
+ if (a instanceof Error) return a.stack || a.message;
2841
+ if (typeof a === "object" && a !== null) try {
2842
+ return JSON.stringify(a);
2843
+ } catch {
2844
+ return "[Unserializable]";
2845
+ }
2846
+ return String(a);
2847
+ }
2848
+ function emit(level, msg, args) {
2849
+ if (LEVEL_ORDER[level] < minOrder) return;
2850
+ const prefix = `[formative-memory] [${level}]`;
2851
+ if (host) {
2852
+ const line = `${prefix} ${msg}${args.length > 0 ? " " + args.map(stringifyArg).join(" ") : ""}`;
2853
+ if (level === "debug") (host.debug ?? host.info)(line);
2854
+ else host[level](line);
2855
+ } else {
2856
+ const line = `${prefix} ${msg}`;
2857
+ if (level === "error") console.error(line, ...args);
2858
+ else if (level === "warn") console.warn(line, ...args);
2859
+ else console.log(line, ...args);
2860
+ }
2861
+ }
2862
+ return {
2863
+ debug: (msg, ...args) => emit("debug", msg, args),
2864
+ info: (msg, ...args) => emit("info", msg, args),
2865
+ warn: (msg, ...args) => emit("warn", msg, args),
2866
+ error: (msg, ...args) => emit("error", msg, args),
2867
+ isDebugEnabled: () => minOrder === LEVEL_ORDER.debug
2868
+ };
2869
+ }
2870
+ /** Sanitize content for log output: collapse whitespace/control chars, truncate. */
2871
+ function preview(text, max = 60) {
2872
+ const sanitized = text.replace(/[\x00-\x1F\x7F\s]+/g, " ").trim();
2873
+ if (!sanitized) return "<empty>";
2874
+ return sanitized.length > max ? sanitized.slice(0, max - 1) + "…" : sanitized;
2875
+ }
2876
+ /** A silent logger that discards all output. Useful for tests. */
2877
+ const nullLogger = {
2878
+ debug: () => {},
2879
+ info: () => {},
2880
+ warn: () => {},
2881
+ error: () => {},
2882
+ isDebugEnabled: () => false
2883
+ };
2884
+ //#endregion
2822
2885
  //#region src/consolidation-steps.ts
2823
2886
  /** Retrieval reinforcement learning rate (η). */
2824
2887
  const ETA = .7;
@@ -2843,9 +2906,10 @@ const MODE_WEIGHT_BM25_ONLY = .5;
2843
2906
  * @param lastConsolidationMs Timestamp (ms) of last consolidation run, or null if never run.
2844
2907
  * @param nowMs Current time in ms (injectable for testing).
2845
2908
  */
2846
- function applyCatchUpDecay(db, lastConsolidationMs, nowMs = Date.now()) {
2909
+ function applyCatchUpDecay(db, lastConsolidationMs, nowMs = Date.now(), log = nullLogger) {
2847
2910
  if (lastConsolidationMs == null || !Number.isFinite(lastConsolidationMs)) return 0;
2848
2911
  const allMemories = db.getAllMemories();
2912
+ const debug = log.isDebugEnabled();
2849
2913
  let count = 0;
2850
2914
  const dayMs = 1e3 * 60 * 60 * 24;
2851
2915
  for (const mem of allMemories) {
@@ -2856,6 +2920,7 @@ function applyCatchUpDecay(db, lastConsolidationMs, nowMs = Date.now()) {
2856
2920
  const effectiveCycles = Math.min(cycles, 30);
2857
2921
  const factor = mem.consolidated ? DECAY_CONSOLIDATED : DECAY_WORKING;
2858
2922
  const newStrength = mem.strength * Math.pow(factor, effectiveCycles);
2923
+ if (debug) log.debug(`catch-up decay: "${preview(mem.content)}" ${mem.strength.toFixed(3)} → ${newStrength.toFixed(3)} (${effectiveCycles} cycles, ${mem.consolidated ? "consolidated" : "working"})`);
2859
2924
  db.updateStrength(mem.id, newStrength);
2860
2925
  count++;
2861
2926
  }
@@ -2863,8 +2928,11 @@ function applyCatchUpDecay(db, lastConsolidationMs, nowMs = Date.now()) {
2863
2928
  const globalCycles = Math.max(0, Math.floor(globalDaysSince) - 1);
2864
2929
  if (globalCycles > 0) {
2865
2930
  const effectiveGlobal = Math.min(globalCycles, 30);
2866
- db.decayAllAssociationWeights(Math.pow(DECAY_ASSOCIATION, effectiveGlobal));
2931
+ const associationFactor = Math.pow(DECAY_ASSOCIATION, effectiveGlobal);
2932
+ if (debug) log.debug(`catch-up decay: associations ×${associationFactor.toFixed(4)} (${effectiveGlobal} cycles)`);
2933
+ db.decayAllAssociationWeights(associationFactor);
2867
2934
  }
2935
+ if (count > 0) log.info(`catch-up decay: ${count} memories adjusted for missed cycles`);
2868
2936
  return count;
2869
2937
  }
2870
2938
  /**
@@ -2879,7 +2947,7 @@ function applyCatchUpDecay(db, lastConsolidationMs, nowMs = Date.now()) {
2879
2947
  *
2880
2948
  * Returns count of memories reinforced.
2881
2949
  */
2882
- function applyReinforcement(db) {
2950
+ function applyReinforcement(db, log = nullLogger) {
2883
2951
  const pendingAttrs = db.getUnreinforcedAttributions();
2884
2952
  if (pendingAttrs.length === 0) return 0;
2885
2953
  const reinforcements = /* @__PURE__ */ new Map();
@@ -2890,6 +2958,7 @@ function applyReinforcement(db) {
2890
2958
  reinforcements.set(attr.memory_id, current + reinforcement);
2891
2959
  }
2892
2960
  return db.transaction(() => {
2961
+ const debug = log.isDebugEnabled();
2893
2962
  let count = 0;
2894
2963
  for (const [memoryId, totalReinforcement] of reinforcements) {
2895
2964
  if (totalReinforcement === 0) continue;
@@ -2898,10 +2967,12 @@ function applyReinforcement(db) {
2898
2967
  const newStrength = Math.max(0, Math.min(mem.strength + totalReinforcement, 1));
2899
2968
  if (newStrength !== mem.strength) {
2900
2969
  db.updateStrength(memoryId, newStrength);
2970
+ if (debug) log.debug(`reinforce: "${preview(mem.content)}" ${mem.strength.toFixed(3)} → ${newStrength.toFixed(3)} (+${totalReinforcement.toFixed(3)})`);
2901
2971
  count++;
2902
2972
  }
2903
2973
  }
2904
2974
  for (const attr of pendingAttrs) db.markAttributionsReinforced(attr.message_id, attr.memory_id);
2975
+ if (count > 0) log.info(`reinforce: ${count} memories strengthened from ${pendingAttrs.length} attributions`);
2905
2976
  return count;
2906
2977
  });
2907
2978
  }
@@ -2913,16 +2984,20 @@ function applyReinforcement(db) {
2913
2984
  *
2914
2985
  * Returns count of memories decayed.
2915
2986
  */
2916
- function applyDecay(db) {
2987
+ function applyDecay(db, log = nullLogger) {
2917
2988
  const allMemories = db.getAllMemories();
2989
+ const debug = log.isDebugEnabled();
2918
2990
  let count = 0;
2919
2991
  for (const mem of allMemories) {
2920
2992
  const factor = mem.consolidated ? DECAY_CONSOLIDATED : DECAY_WORKING;
2921
2993
  const newStrength = mem.strength * factor;
2994
+ if (debug) log.debug(`decay: "${preview(mem.content)}" ${mem.strength.toFixed(3)} → ${newStrength.toFixed(3)} (×${factor})`);
2922
2995
  db.updateStrength(mem.id, newStrength);
2923
2996
  count++;
2924
2997
  }
2998
+ if (debug) log.debug(`decay: associations ×${DECAY_ASSOCIATION}`);
2925
2999
  applyAssociationDecay(db);
3000
+ if (count > 0) log.info(`decay: ${count} memories decayed`);
2926
3001
  return count;
2927
3002
  }
2928
3003
  /**
@@ -2946,19 +3021,22 @@ const CO_RETRIEVAL_BASE_WEIGHT = .1;
2946
3021
  *
2947
3022
  * Returns count of associations updated.
2948
3023
  */
2949
- function updateCoRetrievalAssociations(db) {
3024
+ function updateCoRetrievalAssociations(db, log = nullLogger) {
2950
3025
  const groups = db.getCoRetrievalGroups();
2951
3026
  const now = (/* @__PURE__ */ new Date()).toISOString();
2952
3027
  const validIds = new Set(db.getAllMemories().map((m) => m.id));
3028
+ const debug = log.isDebugEnabled();
2953
3029
  let count = 0;
2954
3030
  for (const group of groups) {
2955
3031
  const ids = group.memory_ids.filter((id) => validIds.has(id));
2956
3032
  if (ids.length < 2) continue;
2957
3033
  for (let i = 0; i < ids.length; i++) for (let j = i + 1; j < ids.length; j++) {
2958
3034
  db.upsertAssociationProbOr(ids[i], ids[j], CO_RETRIEVAL_BASE_WEIGHT, now);
3035
+ if (debug) log.debug(`associate: co-retrieval ${ids[i].slice(0, 8)}…↔${ids[j].slice(0, 8)}… (+${CO_RETRIEVAL_BASE_WEIGHT})`);
2959
3036
  count++;
2960
3037
  }
2961
3038
  }
3039
+ if (count > 0) log.info(`associate: ${count} co-retrieval associations updated from ${groups.length} turn groups`);
2962
3040
  return count;
2963
3041
  }
2964
3042
  /**
@@ -2971,9 +3049,10 @@ function updateCoRetrievalAssociations(db) {
2971
3049
  *
2972
3050
  * Returns count of associations created/updated.
2973
3051
  */
2974
- function updateTransitiveAssociations(db, maxUpdates = 100) {
3052
+ function updateTransitiveAssociations(db, maxUpdates = 100, log = nullLogger) {
2975
3053
  const allMemories = db.getAllMemories();
2976
3054
  const now = (/* @__PURE__ */ new Date()).toISOString();
3055
+ const debug = log.isDebugEnabled();
2977
3056
  let count = 0;
2978
3057
  for (const mem of allMemories) {
2979
3058
  if (count >= maxUpdates) break;
@@ -2990,9 +3069,11 @@ function updateTransitiveAssociations(db, maxUpdates = 100) {
2990
3069
  const newWeight = existing + transitiveWeight - existing * transitiveWeight;
2991
3070
  if (newWeight - existing < 1e-9) continue;
2992
3071
  db.upsertAssociation(otherId1, otherId2, newWeight, now);
3072
+ if (debug) log.debug(`associate: transitive ${otherId1.slice(0, 8)}…↔${otherId2.slice(0, 8)}… via ${mem.id.slice(0, 8)}… weight=${newWeight.toFixed(3)}`);
2993
3073
  count++;
2994
3074
  }
2995
3075
  }
3076
+ if (count > 0) log.info(`associate: ${count} transitive associations created/updated`);
2996
3077
  return count;
2997
3078
  }
2998
3079
  /** Association weight threshold below which associations are pruned. */
@@ -3005,14 +3086,17 @@ const PRUNE_ASSOCIATION_THRESHOLD = .01;
3005
3086
  *
3006
3087
  * Returns count of memories pruned.
3007
3088
  */
3008
- function applyPruning(db) {
3089
+ function applyPruning(db, log = nullLogger) {
3009
3090
  const allMemories = db.getAllMemories();
3091
+ const debug = log.isDebugEnabled();
3010
3092
  let memoriesPruned = 0;
3011
3093
  for (const mem of allMemories) if (mem.strength <= .05) {
3094
+ if (debug) log.debug(`prune: removing "${preview(mem.content, 80)}" (strength=${mem.strength.toFixed(3)}, type=${mem.type})`);
3012
3095
  db.deleteMemory(mem.id);
3013
3096
  memoriesPruned++;
3014
3097
  }
3015
3098
  const associationsPruned = db.pruneWeakAssociations(PRUNE_ASSOCIATION_THRESHOLD);
3099
+ if (memoriesPruned > 0 || associationsPruned > 0) log.info(`prune: ${memoriesPruned} memories, ${associationsPruned} associations removed`);
3016
3100
  return {
3017
3101
  memoriesPruned,
3018
3102
  associationsPruned
@@ -3025,9 +3109,10 @@ function applyPruning(db) {
3025
3109
  *
3026
3110
  * Returns count of memories transitioned.
3027
3111
  */
3028
- function applyTemporalTransitions(db) {
3112
+ function applyTemporalTransitions(db, log = nullLogger) {
3029
3113
  const now = /* @__PURE__ */ new Date();
3030
3114
  const allMemories = db.getAllMemories();
3115
+ const debug = log.isDebugEnabled();
3031
3116
  let count = 0;
3032
3117
  for (const mem of allMemories) {
3033
3118
  if (!mem.temporal_anchor) continue;
@@ -3039,10 +3124,12 @@ function applyTemporalTransitions(db) {
3039
3124
  if ((now.getTime() - anchor.getTime()) / (1e3 * 60 * 60) >= 24) newState = "past";
3040
3125
  }
3041
3126
  if (newState) {
3127
+ if (debug) log.debug(`temporal: "${preview(mem.content)}" ${mem.temporal_state} → ${newState}`);
3042
3128
  db.updateTemporalState(mem.id, newState);
3043
3129
  count++;
3044
3130
  }
3045
3131
  }
3132
+ if (count > 0) log.info(`temporal: ${count} memories transitioned`);
3046
3133
  return count;
3047
3134
  }
3048
3135
  /**
@@ -3053,9 +3140,11 @@ function applyTemporalTransitions(db) {
3053
3140
  *
3054
3141
  * Returns count of exposure rows deleted.
3055
3142
  */
3056
- function provenanceGC(db, cutoffDays = 30) {
3143
+ function provenanceGC(db, cutoffDays = 30, log = nullLogger) {
3057
3144
  const cutoffDate = (/* @__PURE__ */ new Date(Date.now() - cutoffDays * 24 * 60 * 60 * 1e3)).toISOString();
3058
- return db.deleteExposuresOlderThan(cutoffDate);
3145
+ const count = db.deleteExposuresOlderThan(cutoffDate);
3146
+ if (count > 0) log.debug(`gc: ${count} exposure rows older than ${cutoffDays}d removed`);
3147
+ return count;
3059
3148
  }
3060
3149
  //#endregion
3061
3150
  //#region src/merge-candidates.ts
@@ -3183,11 +3272,12 @@ function contentHash(content) {
3183
3272
  * refresh strength to 1.0, handle sources normally.
3184
3273
  * 3. newId is novel: create new memory with source="consolidation".
3185
3274
  */
3186
- async function executeMerge(db, pair, contentProducer, embedder) {
3275
+ async function executeMerge(db, pair, contentProducer, embedder, log = nullLogger) {
3187
3276
  if (pair.a === pair.b) throw new Error(`Merge failed: cannot merge memory with itself (${pair.a})`);
3188
3277
  const memA = db.getMemory(pair.a);
3189
3278
  const memB = db.getMemory(pair.b);
3190
3279
  if (!memA || !memB) throw new Error(`Merge failed: memory not found (${pair.a}, ${pair.b})`);
3280
+ if (log.isDebugEnabled()) log.debug(`merge: combining: A: "${preview(memA.content, 100)}" B: "${preview(memB.content, 100)}"`);
3191
3281
  const merged = await contentProducer({
3192
3282
  id: memA.id,
3193
3283
  content: memA.content,
@@ -3208,15 +3298,19 @@ async function executeMerge(db, pair, contentProducer, embedder) {
3208
3298
  } catch {}
3209
3299
  return db.transaction(() => {
3210
3300
  let canonicalId;
3301
+ let outcome;
3211
3302
  if (isAbsorption) {
3303
+ outcome = "absorption";
3212
3304
  canonicalId = newId;
3213
3305
  db.updateStrength(canonicalId, 1);
3214
3306
  } else {
3215
3307
  const existing = db.getMemory(newId);
3216
3308
  if (existing) {
3309
+ outcome = "reuse";
3217
3310
  if (existing.content !== merged.content) throw new Error(`Merge failed: hash collision for ${newId} with different content`);
3218
3311
  db.updateStrength(newId, 1);
3219
3312
  } else {
3313
+ outcome = "new";
3220
3314
  db.insertMemory({
3221
3315
  id: newId,
3222
3316
  type: merged.type,
@@ -3250,6 +3344,11 @@ async function executeMerge(db, pair, contentProducer, embedder) {
3250
3344
  }
3251
3345
  }
3252
3346
  inheritAssociations(db, [memA.id, memB.id], canonicalId, now);
3347
+ log.info(`merge: ${outcome} (${pair.a.slice(0, 8)}… + ${pair.b.slice(0, 8)}…) → "${preview(merged.content, 80)}" (${canonicalId.slice(0, 8)}…)`);
3348
+ if (log.isDebugEnabled()) {
3349
+ if (originalsWeakened.length > 0) log.debug(`merge: weakened originals: ${originalsWeakened.map((id) => id.slice(0, 8) + "…").join(", ")}`);
3350
+ if (intermediatesDeleted.length > 0) log.debug(`merge: deleted intermediates: ${intermediatesDeleted.map((id) => id.slice(0, 8) + "…").join(", ")}`);
3351
+ }
3253
3352
  return {
3254
3353
  newMemoryId: canonicalId,
3255
3354
  mergedFrom: [pair.a, pair.b],
@@ -3265,18 +3364,19 @@ async function executeMerge(db, pair, contentProducer, embedder) {
3265
3364
  * Pairs are processed in order (highest score first).
3266
3365
  * Skips pairs where either memory was already consumed.
3267
3366
  */
3268
- async function executeMerges(db, pairs, contentProducer, embedder) {
3367
+ async function executeMerges(db, pairs, contentProducer, embedder, log = nullLogger) {
3269
3368
  const consumed = /* @__PURE__ */ new Set();
3270
3369
  const results = [];
3271
3370
  for (const pair of pairs) {
3272
3371
  if (consumed.has(pair.a) || consumed.has(pair.b)) continue;
3273
3372
  if (!db.getMemory(pair.a) || !db.getMemory(pair.b)) continue;
3274
- const result = await executeMerge(db, pair, contentProducer, embedder);
3373
+ const result = await executeMerge(db, pair, contentProducer, embedder, log);
3275
3374
  results.push(result);
3276
3375
  consumed.add(pair.a);
3277
3376
  consumed.add(pair.b);
3278
3377
  consumed.add(result.newMemoryId);
3279
3378
  }
3379
+ if (results.length > 0) log.info(`merge: ${results.length} merges completed`);
3280
3380
  return results;
3281
3381
  }
3282
3382
  function isIntermediate(mem) {
@@ -3339,6 +3439,7 @@ function inheritAssociations(db, sourceIds, canonicalId, now) {
3339
3439
  */
3340
3440
  async function runConsolidation(params) {
3341
3441
  const start = Date.now();
3442
+ const log = params.logger ?? nullLogger;
3342
3443
  const consolidationCutoff = (/* @__PURE__ */ new Date()).toISOString();
3343
3444
  const summary = {
3344
3445
  catchUpDecayed: 0,
@@ -3350,6 +3451,7 @@ async function runConsolidation(params) {
3350
3451
  transitioned: 0,
3351
3452
  exposuresGc: 0
3352
3453
  };
3454
+ log.info("consolidation: starting");
3353
3455
  params.db.transaction(() => {
3354
3456
  const lastAt = params.db.getState("last_consolidation_at");
3355
3457
  let lastConsolidationMs = null;
@@ -3357,13 +3459,13 @@ async function runConsolidation(params) {
3357
3459
  const ms = new Date(lastAt).getTime();
3358
3460
  if (Number.isFinite(ms)) lastConsolidationMs = ms;
3359
3461
  }
3360
- summary.catchUpDecayed = applyCatchUpDecay(params.db, lastConsolidationMs);
3361
- summary.reinforced = applyReinforcement(params.db);
3362
- summary.decayed = applyDecay(params.db);
3363
- updateCoRetrievalAssociations(params.db);
3364
- updateTransitiveAssociations(params.db);
3365
- summary.transitioned = applyTemporalTransitions(params.db);
3366
- const pruneResult = applyPruning(params.db);
3462
+ summary.catchUpDecayed = applyCatchUpDecay(params.db, lastConsolidationMs, Date.now(), log);
3463
+ summary.reinforced = applyReinforcement(params.db, log);
3464
+ summary.decayed = applyDecay(params.db, log);
3465
+ updateCoRetrievalAssociations(params.db, log);
3466
+ updateTransitiveAssociations(params.db, 100, log);
3467
+ summary.transitioned = applyTemporalTransitions(params.db, log);
3468
+ const pruneResult = applyPruning(params.db, log);
3367
3469
  summary.pruned = pruneResult.memoriesPruned;
3368
3470
  summary.prunedAssociations = pruneResult.associationsPruned;
3369
3471
  });
@@ -3372,6 +3474,7 @@ async function runConsolidation(params) {
3372
3474
  const lastAt = params.db.getState("last_consolidation_at");
3373
3475
  const sourceMems = params.db.getMergeSources(MERGE_SOURCE_MIN_STRENGTH, lastAt);
3374
3476
  const targetMems = params.db.getMergeTargets(MERGE_TARGET_MIN_STRENGTH);
3477
+ log.debug(`merge: ${sourceMems.length} sources, ${targetMems.length} targets`);
3375
3478
  const uniqueIds = [...new Set([...sourceMems, ...targetMems].map((m) => m.id))];
3376
3479
  const embeddingMap = params.db.getEmbeddingsByIds(uniqueIds);
3377
3480
  const toCandidate = (m) => ({
@@ -3381,14 +3484,19 @@ async function runConsolidation(params) {
3381
3484
  embedding: embeddingMap.get(m.id) ?? null
3382
3485
  });
3383
3486
  const pairs = findMergeCandidatesDelta(sourceMems.map(toCandidate), targetMems.map(toCandidate));
3384
- if (pairs.length > 0) summary.merged = (await executeMerges(params.db, pairs, params.mergeContentProducer, params.embedder)).length;
3487
+ if (pairs.length > 0) {
3488
+ log.info(`merge: ${pairs.length} candidate pairs found`);
3489
+ summary.merged = (await executeMerges(params.db, pairs, params.mergeContentProducer, params.embedder, log)).length;
3490
+ }
3385
3491
  }
3386
3492
  } finally {
3387
3493
  params.db.transaction(() => {
3388
- summary.exposuresGc = provenanceGC(params.db);
3494
+ summary.exposuresGc = provenanceGC(params.db, 30, log);
3389
3495
  params.db.setState("last_consolidation_at", consolidationCutoff);
3390
3496
  });
3391
3497
  }
3498
+ const s = summary;
3499
+ log.info(`consolidation: done in ${Date.now() - start}ms — reinforced=${s.reinforced} decayed=${s.decayed} pruned=${s.pruned}+${s.prunedAssociations} merged=${s.merged} transitioned=${s.transitioned}`);
3392
3500
  return {
3393
3501
  ok: true,
3394
3502
  summary,
@@ -3581,6 +3689,111 @@ function feedbackEvidenceForRating(rating) {
3581
3689
  };
3582
3690
  }
3583
3691
  //#endregion
3692
+ //#region src/llm-caller.ts
3693
+ const DEFAULTS = {
3694
+ anthropic: {
3695
+ model: "claude-haiku-4-5-20251001",
3696
+ maxTokens: 2048
3697
+ },
3698
+ openai: {
3699
+ model: "gpt-4o-mini",
3700
+ maxTokens: 2048
3701
+ }
3702
+ };
3703
+ /**
3704
+ * Call an LLM with a simple prompt → text response.
3705
+ * Throws on failure (network, auth, rate limit, timeout).
3706
+ */
3707
+ async function callLlm(prompt, config) {
3708
+ const { provider, apiKey } = config;
3709
+ const model = config.model ?? DEFAULTS[provider].model;
3710
+ const maxTokens = config.maxTokens ?? DEFAULTS[provider].maxTokens;
3711
+ const timeoutMs = config.timeoutMs ?? 3e4;
3712
+ if (provider === "anthropic") return callAnthropic(prompt, apiKey, model, maxTokens, timeoutMs);
3713
+ return callOpenAi(prompt, apiKey, model, maxTokens, timeoutMs);
3714
+ }
3715
+ async function callAnthropic(prompt, apiKey, model, maxTokens, timeoutMs) {
3716
+ const response = await fetchWithTimeout("https://api.anthropic.com/v1/messages", {
3717
+ method: "POST",
3718
+ headers: {
3719
+ "Content-Type": "application/json",
3720
+ "x-api-key": apiKey,
3721
+ "anthropic-version": "2023-06-01"
3722
+ },
3723
+ body: JSON.stringify({
3724
+ model,
3725
+ max_tokens: maxTokens,
3726
+ messages: [{
3727
+ role: "user",
3728
+ content: prompt
3729
+ }]
3730
+ })
3731
+ }, timeoutMs);
3732
+ if (!response.ok) {
3733
+ const body = await response.text().catch(() => "");
3734
+ throw new Error(`Anthropic API error ${response.status}: ${body.slice(0, 200)}`);
3735
+ }
3736
+ const text = (await response.json()).content?.filter((b) => b.type === "text")?.map((b) => b.text)?.join("") ?? "";
3737
+ if (!text) throw new Error("Anthropic returned empty response");
3738
+ return text;
3739
+ }
3740
+ async function callOpenAi(prompt, apiKey, model, maxTokens, timeoutMs) {
3741
+ const response = await fetchWithTimeout("https://api.openai.com/v1/chat/completions", {
3742
+ method: "POST",
3743
+ headers: {
3744
+ "Content-Type": "application/json",
3745
+ Authorization: `Bearer ${apiKey}`
3746
+ },
3747
+ body: JSON.stringify({
3748
+ model,
3749
+ max_tokens: maxTokens,
3750
+ messages: [{
3751
+ role: "user",
3752
+ content: prompt
3753
+ }]
3754
+ })
3755
+ }, timeoutMs);
3756
+ if (!response.ok) {
3757
+ const body = await response.text().catch(() => "");
3758
+ throw new Error(`OpenAI API error ${response.status}: ${body.slice(0, 200)}`);
3759
+ }
3760
+ const text = (await response.json()).choices?.[0]?.message?.content ?? "";
3761
+ if (!text) throw new Error("OpenAI returned empty response");
3762
+ return text;
3763
+ }
3764
+ async function fetchWithTimeout(url, init, timeoutMs) {
3765
+ const controller = new AbortController();
3766
+ const timer = setTimeout(() => controller.abort(), timeoutMs);
3767
+ try {
3768
+ return await fetch(url, {
3769
+ ...init,
3770
+ signal: controller.signal
3771
+ });
3772
+ } catch (error) {
3773
+ if (error instanceof DOMException && error.name === "AbortError") throw new Error(`LLM API call timed out after ${timeoutMs}ms`);
3774
+ throw error;
3775
+ } finally {
3776
+ clearTimeout(timer);
3777
+ }
3778
+ }
3779
+ /**
3780
+ * Extract an API key from OpenClaw auth profiles.
3781
+ * Tries the specified provider, falls back to the other.
3782
+ */
3783
+ function resolveApiKey(authProfiles, preferredProvider) {
3784
+ if (!authProfiles) return null;
3785
+ for (const profile of Object.values(authProfiles)) if (profile.provider === preferredProvider && profile.key) return {
3786
+ provider: preferredProvider,
3787
+ apiKey: profile.key
3788
+ };
3789
+ const fallback = preferredProvider === "anthropic" ? "openai" : "anthropic";
3790
+ for (const profile of Object.values(authProfiles)) if (profile.provider === fallback && profile.key) return {
3791
+ provider: fallback,
3792
+ apiKey: profile.key
3793
+ };
3794
+ return null;
3795
+ }
3796
+ //#endregion
3584
3797
  //#region src/context-engine.ts
3585
3798
  /**
3586
3799
  * Associative Memory Context Engine
@@ -3841,26 +4054,31 @@ function createAssociativeMemoryContextEngine(options) {
3841
4054
  };
3842
4055
  },
3843
4056
  async afterTurn(params) {
3844
- if (!options.getDb || !options.ledger) {
3845
- options.logger?.warn("afterTurn() disabled: missing getDb or ledger");
3846
- return;
4057
+ if (options.getDb && options.ledger) {
4058
+ const turnFingerprint = userTurnKey(params.messages) ?? "empty";
4059
+ const turnId = `${params.sessionId}:${params.prePromptMessageCount}:${turnFingerprint}`;
4060
+ try {
4061
+ processAfterTurn({
4062
+ sessionId: params.sessionId,
4063
+ turnId,
4064
+ messages: params.messages,
4065
+ prePromptMessageCount: params.prePromptMessageCount,
4066
+ ledger: options.ledger,
4067
+ db: options.getDb(),
4068
+ logPath: options.getLogPath?.(),
4069
+ isBm25Only: options.isBm25Only?.() ?? false
4070
+ });
4071
+ options.logger?.debug?.(`afterTurn: autoInjected=${options.ledger.autoInjected.size} searchResults=${options.ledger.searchResults.size} explicitlyOpened=${options.ledger.explicitlyOpened.size} storedThisTurn=${options.ledger.storedThisTurn.size}`);
4072
+ } catch (error) {
4073
+ options.logger?.warn("afterTurn() provenance write failed", error);
4074
+ }
3847
4075
  }
3848
- const turnFingerprint = userTurnKey(params.messages) ?? "empty";
3849
- const turnId = `${params.sessionId}:${params.prePromptMessageCount}:${turnFingerprint}`;
3850
- try {
3851
- processAfterTurn({
3852
- sessionId: params.sessionId,
3853
- turnId,
3854
- messages: params.messages,
3855
- prePromptMessageCount: params.prePromptMessageCount,
3856
- ledger: options.ledger,
3857
- db: options.getDb(),
3858
- logPath: options.getLogPath?.(),
3859
- isBm25Only: options.isBm25Only?.() ?? false
3860
- });
3861
- options.logger?.debug?.(`afterTurn: autoInjected=${options.ledger.autoInjected.size} searchResults=${options.ledger.searchResults.size} explicitlyOpened=${options.ledger.explicitlyOpened.size} storedThisTurn=${options.ledger.storedThisTurn.size}`);
3862
- } catch (error) {
3863
- options.logger?.warn("afterTurn() provenance write failed", error);
4076
+ if (options.autoCapture) {
4077
+ const turnContent = extractTurnContent(params.messages, params.prePromptMessageCount);
4078
+ if (turnContent) {
4079
+ const llmConfig = options.getLlmConfig?.();
4080
+ if (llmConfig) extractAndStoreMemories(turnContent, llmConfig, getManager(), options.logger);
4081
+ }
3864
4082
  }
3865
4083
  },
3866
4084
  async ingest(_params) {
@@ -3927,6 +4145,158 @@ function checkSleepDebt(getDb) {
3927
4145
  return "";
3928
4146
  }
3929
4147
  }
4148
+ /** Maximum character length for the turn transcript sent to the extraction LLM. */
4149
+ const AUTO_CAPTURE_MAX_CHARS = 4e3;
4150
+ /**
4151
+ * Extract a concise turn summary from the current turn's messages.
4152
+ * Returns null if the turn has no meaningful user+assistant exchange.
4153
+ *
4154
+ * Only considers messages after `prePromptMessageCount` (current turn).
4155
+ * Aggregates ALL user and assistant messages (not just the last one).
4156
+ * Truncates long content to keep the extraction prompt manageable.
4157
+ */
4158
+ function extractTurnContent(messages, prePromptMessageCount) {
4159
+ const turnMessages = messages.slice(prePromptMessageCount);
4160
+ const userText = extractRoleText(turnMessages, "user");
4161
+ const assistantText = extractRoleText(turnMessages, "assistant");
4162
+ if (!userText || !assistantText) return null;
4163
+ if (userText.trim().length < 10 && assistantText.trim().length < 20) return null;
4164
+ const { user: truncatedUser, assistant: truncatedAssistant } = truncatePair(userText, assistantText, AUTO_CAPTURE_MAX_CHARS);
4165
+ return `User: ${truncatedUser}\n\nAssistant: ${truncatedAssistant}`;
4166
+ }
4167
+ /**
4168
+ * Aggregate all text content from messages with the given role.
4169
+ * Iterates forward to preserve chronological order.
4170
+ */
4171
+ function extractRoleText(messages, role) {
4172
+ const parts = [];
4173
+ for (const msg of messages) {
4174
+ if (msg == null || typeof msg !== "object") continue;
4175
+ const m = msg;
4176
+ if (m.role !== role || !m.content) continue;
4177
+ if (typeof m.content === "string") parts.push(m.content);
4178
+ else if (Array.isArray(m.content)) {
4179
+ const texts = m.content.filter(isTextBlock).map((b) => b.text);
4180
+ if (texts.length > 0) parts.push(texts.join("\n"));
4181
+ }
4182
+ }
4183
+ return parts.length > 0 ? parts.join("\n\n") : null;
4184
+ }
4185
+ function truncatePair(userText, assistantText, maxTotal) {
4186
+ if (userText.length + assistantText.length <= maxTotal) return {
4187
+ user: userText,
4188
+ assistant: assistantText
4189
+ };
4190
+ const half = Math.floor(maxTotal / 2);
4191
+ if (userText.length <= half) return {
4192
+ user: userText,
4193
+ assistant: truncate(assistantText, maxTotal - userText.length)
4194
+ };
4195
+ if (assistantText.length <= half) return {
4196
+ user: truncate(userText, maxTotal - assistantText.length),
4197
+ assistant: assistantText
4198
+ };
4199
+ return {
4200
+ user: truncate(userText, half),
4201
+ assistant: truncate(assistantText, half)
4202
+ };
4203
+ }
4204
+ function truncate(text, maxLen) {
4205
+ if (text.length <= maxLen) return text;
4206
+ return text.slice(0, maxLen - 3) + "...";
4207
+ }
4208
+ /** Extraction prompt sent to the LLM to distill facts from a conversation turn. */
4209
+ function buildExtractionPrompt(turnContent) {
4210
+ return `You are a memory extraction system. Read the following conversation exchange and extract facts worth remembering long-term.
4211
+
4212
+ Rules:
4213
+ - Extract durable information: preferences, personal background, goals, plans, work/project context, relationships, recurring patterns, commitments, events, or corrections to prior knowledge.
4214
+ - Facts can come from any source: user statements, tool outputs, environment details, project configuration, or confirmed assistant observations.
4215
+ - Do NOT extract: the current task request itself, ephemeral implementation details, assistant reasoning, pleasantries, or transient operational context.
4216
+ - Each fact should be a single, self-contained statement.
4217
+ - Most conversation turns contain nothing worth remembering long-term — returning an empty array is expected and correct. Only extract when there is clearly durable information.
4218
+ - Return a JSON array of objects, each with "type" and "content" fields.
4219
+ - Valid types: "preference", "about", "person", "event", "goal", "work", "fact"
4220
+ - preference: tastes, values, styles, dislikes
4221
+ - about: background, identity, skills, life situation
4222
+ - person: people and relationships
4223
+ - event: events, schedules, deadlines
4224
+ - goal: objectives, plans, aspirations
4225
+ - work: durable work/project context, constraints, architecture
4226
+ - fact: other durable information (fallback)
4227
+ - Return ONLY the JSON array, nothing else.
4228
+
4229
+ Example output:
4230
+ [{"type": "preference", "content": "User prefers TypeScript over JavaScript for backend work"}, {"type": "event", "content": "User is moving to Berlin in May 2026"}, {"type": "work", "content": "Project must support SQLite only, no Postgres"}]
4231
+
4232
+ Conversation:
4233
+ ${turnContent}`;
4234
+ }
4235
+ const VALID_FACT_TYPES = new Set([
4236
+ "preference",
4237
+ "about",
4238
+ "person",
4239
+ "event",
4240
+ "goal",
4241
+ "work",
4242
+ "fact"
4243
+ ]);
4244
+ /**
4245
+ * Parse the LLM's extraction response into validated facts.
4246
+ * Tolerant of minor formatting issues (markdown fences, trailing text).
4247
+ */
4248
+ function parseExtractionResponse(response) {
4249
+ let cleaned = response.trim();
4250
+ cleaned = cleaned.replace(/^```(?:json)?\s*/i, "").replace(/\s*```\s*$/, "");
4251
+ const start = cleaned.indexOf("[");
4252
+ const end = cleaned.lastIndexOf("]");
4253
+ if (start === -1 || end === -1 || end <= start) return [];
4254
+ let parsed;
4255
+ try {
4256
+ parsed = JSON.parse(cleaned.slice(start, end + 1));
4257
+ } catch {
4258
+ return [];
4259
+ }
4260
+ if (!Array.isArray(parsed)) return [];
4261
+ const facts = [];
4262
+ for (const item of parsed) {
4263
+ if (item == null || typeof item !== "object") continue;
4264
+ const obj = item;
4265
+ if (typeof obj.type !== "string" || typeof obj.content !== "string") continue;
4266
+ if (!obj.content.trim()) continue;
4267
+ const type = VALID_FACT_TYPES.has(obj.type) ? obj.type : "fact";
4268
+ facts.push({
4269
+ type,
4270
+ content: obj.content.trim()
4271
+ });
4272
+ }
4273
+ return facts;
4274
+ }
4275
+ /**
4276
+ * Extract facts from a conversation turn via LLM and store each as a memory.
4277
+ * Fire-and-forget — all errors are caught and logged.
4278
+ */
4279
+ async function extractAndStoreMemories(turnContent, llmConfig, manager, logger) {
4280
+ try {
4281
+ const facts = parseExtractionResponse(await callLlm(buildExtractionPrompt(turnContent), llmConfig));
4282
+ if (facts.length === 0) {
4283
+ logger?.debug?.("auto-capture: LLM extracted 0 facts");
4284
+ return;
4285
+ }
4286
+ for (const fact of facts) try {
4287
+ await manager.store({
4288
+ content: fact.content,
4289
+ type: fact.type,
4290
+ source: "auto_capture"
4291
+ });
4292
+ } catch (error) {
4293
+ logger?.warn(`auto-capture: failed to store fact: ${fact.content.slice(0, 50)}`, error);
4294
+ }
4295
+ logger?.debug?.(`auto-capture: stored ${facts.length} facts`);
4296
+ } catch (error) {
4297
+ logger?.warn("auto-capture: LLM extraction failed", error);
4298
+ }
4299
+ }
3930
4300
  /** Type guard for text content blocks in multimodal messages. */
3931
4301
  function isTextBlock(v) {
3932
4302
  if (v == null || typeof v !== "object") return false;
@@ -4057,111 +4427,6 @@ var EmbeddingTimeoutError = class extends Error {
4057
4427
  }
4058
4428
  };
4059
4429
  //#endregion
4060
- //#region src/llm-caller.ts
4061
- const DEFAULTS = {
4062
- anthropic: {
4063
- model: "claude-haiku-4-5-20251001",
4064
- maxTokens: 2048
4065
- },
4066
- openai: {
4067
- model: "gpt-4o-mini",
4068
- maxTokens: 2048
4069
- }
4070
- };
4071
- /**
4072
- * Call an LLM with a simple prompt → text response.
4073
- * Throws on failure (network, auth, rate limit, timeout).
4074
- */
4075
- async function callLlm(prompt, config) {
4076
- const { provider, apiKey } = config;
4077
- const model = config.model ?? DEFAULTS[provider].model;
4078
- const maxTokens = config.maxTokens ?? DEFAULTS[provider].maxTokens;
4079
- const timeoutMs = config.timeoutMs ?? 3e4;
4080
- if (provider === "anthropic") return callAnthropic(prompt, apiKey, model, maxTokens, timeoutMs);
4081
- return callOpenAi(prompt, apiKey, model, maxTokens, timeoutMs);
4082
- }
4083
- async function callAnthropic(prompt, apiKey, model, maxTokens, timeoutMs) {
4084
- const response = await fetchWithTimeout("https://api.anthropic.com/v1/messages", {
4085
- method: "POST",
4086
- headers: {
4087
- "Content-Type": "application/json",
4088
- "x-api-key": apiKey,
4089
- "anthropic-version": "2023-06-01"
4090
- },
4091
- body: JSON.stringify({
4092
- model,
4093
- max_tokens: maxTokens,
4094
- messages: [{
4095
- role: "user",
4096
- content: prompt
4097
- }]
4098
- })
4099
- }, timeoutMs);
4100
- if (!response.ok) {
4101
- const body = await response.text().catch(() => "");
4102
- throw new Error(`Anthropic API error ${response.status}: ${body.slice(0, 200)}`);
4103
- }
4104
- const text = (await response.json()).content?.filter((b) => b.type === "text")?.map((b) => b.text)?.join("") ?? "";
4105
- if (!text) throw new Error("Anthropic returned empty response");
4106
- return text;
4107
- }
4108
- async function callOpenAi(prompt, apiKey, model, maxTokens, timeoutMs) {
4109
- const response = await fetchWithTimeout("https://api.openai.com/v1/chat/completions", {
4110
- method: "POST",
4111
- headers: {
4112
- "Content-Type": "application/json",
4113
- Authorization: `Bearer ${apiKey}`
4114
- },
4115
- body: JSON.stringify({
4116
- model,
4117
- max_tokens: maxTokens,
4118
- messages: [{
4119
- role: "user",
4120
- content: prompt
4121
- }]
4122
- })
4123
- }, timeoutMs);
4124
- if (!response.ok) {
4125
- const body = await response.text().catch(() => "");
4126
- throw new Error(`OpenAI API error ${response.status}: ${body.slice(0, 200)}`);
4127
- }
4128
- const text = (await response.json()).choices?.[0]?.message?.content ?? "";
4129
- if (!text) throw new Error("OpenAI returned empty response");
4130
- return text;
4131
- }
4132
- async function fetchWithTimeout(url, init, timeoutMs) {
4133
- const controller = new AbortController();
4134
- const timer = setTimeout(() => controller.abort(), timeoutMs);
4135
- try {
4136
- return await fetch(url, {
4137
- ...init,
4138
- signal: controller.signal
4139
- });
4140
- } catch (error) {
4141
- if (error instanceof DOMException && error.name === "AbortError") throw new Error(`LLM API call timed out after ${timeoutMs}ms`);
4142
- throw error;
4143
- } finally {
4144
- clearTimeout(timer);
4145
- }
4146
- }
4147
- /**
4148
- * Extract an API key from OpenClaw auth profiles.
4149
- * Tries the specified provider, falls back to the other.
4150
- */
4151
- function resolveApiKey(authProfiles, preferredProvider) {
4152
- if (!authProfiles) return null;
4153
- for (const profile of Object.values(authProfiles)) if (profile.provider === preferredProvider && profile.key) return {
4154
- provider: preferredProvider,
4155
- apiKey: profile.key
4156
- };
4157
- const fallback = preferredProvider === "anthropic" ? "openai" : "anthropic";
4158
- for (const profile of Object.values(authProfiles)) if (profile.provider === fallback && profile.key) return {
4159
- provider: fallback,
4160
- apiKey: profile.key
4161
- };
4162
- return null;
4163
- }
4164
- //#endregion
4165
4430
  //#region src/memory-manager.ts
4166
4431
  var MemoryManager = class {
4167
4432
  db;
@@ -9786,6 +10051,7 @@ MarkdownIt.prototype.renderInline = function(src, env) {
9786
10051
  * Uses markdown-it for reliable parsing (handles code blocks, frontmatter, CRLF).
9787
10052
  */
9788
10053
  const DATE_FILENAME_RE = /^(\d{4}-\d{2}-\d{2})\.md$/;
10054
+ const DATE_ISO_RE = /^\d{4}-\d{2}-\d{2}/;
9789
10055
  const FRONTMATTER_RE = /^\uFEFF?---[ \t]*\r?\n[\s\S]*?\r?\n---[ \t]*\r?\n/;
9790
10056
  const MAX_SEGMENT_CHARS = 2e3;
9791
10057
  const MIN_SEGMENT_CHARS = 200;
@@ -9882,6 +10148,7 @@ function segmentMarkdown(content, filePath, workspaceDir) {
9882
10148
  heading_level: seg.headingLevel,
9883
10149
  date: fileDate,
9884
10150
  evergreen: isEvergreen,
10151
+ session: false,
9885
10152
  content: seg.content,
9886
10153
  char_count: seg.content.length
9887
10154
  }));
@@ -10019,6 +10286,126 @@ function mergeSmallSegments(segments) {
10019
10286
  }
10020
10287
  return result;
10021
10288
  }
10289
+ /**
10290
+ * Discover JSONL session files in the agent's sessions directory.
10291
+ * Only imports canonical live session files (*.jsonl).
10292
+ * Excludes archive variants (.reset.*, .deleted.*), backups (.bak.*),
10293
+ * and lock files to avoid duplicate extraction from the same session.
10294
+ */
10295
+ function discoverSessionFiles(sessionsDir) {
10296
+ if (!existsSync(sessionsDir) || !statSync(sessionsDir).isDirectory()) return [];
10297
+ const found = [];
10298
+ let entries;
10299
+ try {
10300
+ entries = readdirSync(sessionsDir, { withFileTypes: true });
10301
+ } catch {
10302
+ return [];
10303
+ }
10304
+ for (const entry of entries) {
10305
+ if (!entry.isFile()) continue;
10306
+ const name = entry.name;
10307
+ if (name.endsWith(".jsonl") && name !== "sessions.json") found.push(join(sessionsDir, name));
10308
+ }
10309
+ found.sort((a, b) => a.localeCompare(b));
10310
+ return found;
10311
+ }
10312
+ /**
10313
+ * Parse a JSONL session file and extract conversation turns as ImportSegments.
10314
+ *
10315
+ * Strategy:
10316
+ * - Extract user and assistant text messages (skip tool calls, thinking, system)
10317
+ * - Group consecutive messages into conversation chunks
10318
+ * - Apply the same size limits as markdown segmentation
10319
+ * - Use the session timestamp for dating
10320
+ */
10321
+ function parseSessionJsonl(content, filePath, sessionsDir) {
10322
+ const lines = content.split("\n").filter((l) => l.trim());
10323
+ if (lines.length === 0) return [];
10324
+ const relPath = toPosixRelative(dirname(sessionsDir), filePath);
10325
+ let sessionDate = null;
10326
+ const turns = [];
10327
+ for (const line of lines) {
10328
+ let entry;
10329
+ try {
10330
+ entry = JSON.parse(line);
10331
+ } catch {
10332
+ continue;
10333
+ }
10334
+ if (!sessionDate && entry.timestamp) {
10335
+ const match = DATE_ISO_RE.exec(entry.timestamp);
10336
+ if (match) sessionDate = match[0];
10337
+ }
10338
+ if (entry.type !== "message" || !entry.message) continue;
10339
+ const { role, content: msgContent } = entry.message;
10340
+ if (role !== "user" && role !== "assistant") continue;
10341
+ const text = extractTextFromContent(msgContent);
10342
+ if (!text.trim()) continue;
10343
+ turns.push({
10344
+ role,
10345
+ text: text.trim(),
10346
+ timestamp: entry.timestamp ?? null
10347
+ });
10348
+ }
10349
+ if (turns.length === 0) return [];
10350
+ return groupTurnsIntoExchanges(turns, relPath, sessionDate);
10351
+ }
10352
+ function extractTextFromContent(content) {
10353
+ if (!content) return "";
10354
+ if (typeof content === "string") return content;
10355
+ if (!Array.isArray(content)) return "";
10356
+ return content.filter((block) => block.type === "text" && block.text).map((block) => block.text).join("\n\n");
10357
+ }
10358
+ /**
10359
+ * Group parsed turns into exchanges and produce one ImportSegment per exchange.
10360
+ *
10361
+ * An exchange starts at each user turn and collects all following assistant turns
10362
+ * until the next user turn. Consecutive user turns without an assistant reply
10363
+ * are merged into a single exchange with the next assistant response.
10364
+ *
10365
+ * Each exchange becomes one segment formatted as "User: ...\n\nAssistant: ..."
10366
+ * — preserving the natural conversational boundary for LLM fact extraction.
10367
+ */
10368
+ function groupTurnsIntoExchanges(turns, relPath, sessionDate) {
10369
+ const exchanges = [];
10370
+ let currentUserParts = [];
10371
+ let currentAssistantParts = [];
10372
+ let exchangeTimestamp = null;
10373
+ const flushExchange = () => {
10374
+ if (currentUserParts.length > 0 && currentAssistantParts.length > 0) exchanges.push({
10375
+ userText: currentUserParts.join("\n\n"),
10376
+ assistantText: currentAssistantParts.join("\n\n"),
10377
+ timestamp: exchangeTimestamp
10378
+ });
10379
+ currentUserParts = [];
10380
+ currentAssistantParts = [];
10381
+ exchangeTimestamp = null;
10382
+ };
10383
+ for (const turn of turns) if (turn.role === "user") {
10384
+ if (currentAssistantParts.length > 0) flushExchange();
10385
+ currentUserParts.push(turn.text);
10386
+ if (!exchangeTimestamp) exchangeTimestamp = turn.timestamp;
10387
+ } else currentAssistantParts.push(turn.text);
10388
+ flushExchange();
10389
+ return exchanges.map((ex, i) => {
10390
+ const content = `User: ${ex.userText}\n\nAssistant: ${ex.assistantText}`;
10391
+ return {
10392
+ id: i,
10393
+ source_file: relPath,
10394
+ heading: null,
10395
+ heading_level: null,
10396
+ date: extractDateFromTimestamp(ex.timestamp) ?? sessionDate,
10397
+ evergreen: false,
10398
+ session: true,
10399
+ content,
10400
+ char_count: content.length
10401
+ };
10402
+ });
10403
+ }
10404
+ function extractDateFromTimestamp(timestamp) {
10405
+ if (!timestamp) return null;
10406
+ const match = DATE_ISO_RE.exec(timestamp);
10407
+ return match ? match[0] : null;
10408
+ }
10022
10409
  function isEvergreenFile(filePath, workspaceDir) {
10023
10410
  const rel = relative(workspaceDir, filePath);
10024
10411
  return basename(filePath).toLowerCase() === "memory.md" && dirname(rel) === ".";
@@ -10031,8 +10418,11 @@ function extractDateFromFilename(filename) {
10031
10418
  * Preprocess memory-core files: discover, segment, extract metadata.
10032
10419
  * Returns segments ready for LLM enrichment and storage.
10033
10420
  * Continues processing on per-file errors; collects errors in result.
10421
+ *
10422
+ * @param sessionsDir - Optional path to OpenClaw sessions directory
10423
+ * (e.g. ~/.openclaw/agents/<agentId>/sessions/) for JSONL import.
10034
10424
  */
10035
- function prepareImport(workspaceDir, extraPaths) {
10425
+ function prepareImport(workspaceDir, extraPaths, sessionsDir) {
10036
10426
  const files = discoverMemoryFiles(workspaceDir, extraPaths);
10037
10427
  const allSegments = [];
10038
10428
  const fileInfos = [];
@@ -10056,6 +10446,27 @@ function prepareImport(workspaceDir, extraPaths) {
10056
10446
  error: err instanceof Error ? err.message : String(err)
10057
10447
  });
10058
10448
  }
10449
+ if (sessionsDir) {
10450
+ const sessionFiles = discoverSessionFiles(sessionsDir);
10451
+ for (const filePath of sessionFiles) try {
10452
+ const renumbered = parseSessionJsonl(readFileSync(filePath, "utf8"), filePath, sessionsDir).map((seg) => ({
10453
+ ...seg,
10454
+ id: globalId++
10455
+ }));
10456
+ allSegments.push(...renumbered);
10457
+ fileInfos.push({
10458
+ path: toPosixRelative(dirname(sessionsDir), filePath),
10459
+ segmentCount: renumbered.length,
10460
+ evergreen: false,
10461
+ date: renumbered[0]?.date ?? null
10462
+ });
10463
+ } catch (err) {
10464
+ errors.push({
10465
+ path: toPosixRelative(dirname(sessionsDir), filePath),
10466
+ error: err instanceof Error ? err.message : String(err)
10467
+ });
10468
+ }
10469
+ }
10059
10470
  return {
10060
10471
  segments: allSegments,
10061
10472
  files: fileInfos,
@@ -10099,14 +10510,14 @@ function calculateImportStrength(segmentDate) {
10099
10510
  * Run the memory-core migration. Idempotent — checks db state before proceeding.
10100
10511
  */
10101
10512
  async function runMigration(deps) {
10102
- const { workspaceDir, store, dbState, enrich, logger, extraPaths } = deps;
10513
+ const { workspaceDir, store, dbState, enrich, logger, extraPaths, sessionsDir } = deps;
10103
10514
  const completedAt = dbState.get(STATE_KEY_COMPLETED);
10104
10515
  if (completedAt) {
10105
10516
  logger.info(`Memory migration already completed at ${completedAt}`);
10106
10517
  return { status: "skipped" };
10107
10518
  }
10108
10519
  logger.info("Scanning for memory-core files...");
10109
- const result = prepareImport(workspaceDir, extraPaths);
10520
+ const result = prepareImport(workspaceDir, extraPaths, sessionsDir);
10110
10521
  if (result.errors.length > 0) for (const err of result.errors) logger.warn(`Could not read ${err.path}: ${err.error}`);
10111
10522
  if (result.totalSegments === 0) {
10112
10523
  logger.info("No memory-core files found. Will re-check on next startup.");
@@ -10117,10 +10528,12 @@ async function runMigration(deps) {
10117
10528
  };
10118
10529
  }
10119
10530
  logger.info(`Found ${result.files.length} files, ${result.totalSegments} segments. Starting migration...`);
10531
+ const mdSegments = result.segments.filter((s) => !s.session);
10532
+ const sessionSegments = result.segments.filter((s) => s.session);
10120
10533
  let importedCount = 0;
10121
10534
  const importErrors = [];
10122
- for (let i = 0; i < result.segments.length; i += BATCH_SIZE) {
10123
- const batch = result.segments.slice(i, i + BATCH_SIZE);
10535
+ for (let i = 0; i < mdSegments.length; i += BATCH_SIZE) {
10536
+ const batch = mdSegments.slice(i, i + BATCH_SIZE);
10124
10537
  try {
10125
10538
  const stored = await storeBatch(batch, await enrichBatch(batch, enrich, logger), store, logger, deps.updateStrength);
10126
10539
  importedCount += stored;
@@ -10138,6 +10551,11 @@ async function runMigration(deps) {
10138
10551
  }
10139
10552
  }
10140
10553
  }
10554
+ if (sessionSegments.length > 0) {
10555
+ const extracted = await extractSessionSegments(sessionSegments, store, deps.llmCall, logger, deps.updateStrength);
10556
+ importedCount += extracted.stored;
10557
+ if (extracted.errors.length > 0) importErrors.push(...extracted.errors);
10558
+ }
10141
10559
  if (importErrors.length === 0) dbState.set(STATE_KEY_COMPLETED, (/* @__PURE__ */ new Date()).toISOString());
10142
10560
  dbState.set(STATE_KEY_SOURCE_COUNT, String(result.files.length));
10143
10561
  dbState.set(STATE_KEY_SEGMENT_COUNT, String(importedCount));
@@ -10209,6 +10627,47 @@ function applyImportDecay(id, date, updateStrength) {
10209
10627
  const strength = calculateImportStrength(date);
10210
10628
  if (strength < 1) updateStrength(id, strength);
10211
10629
  }
10630
+ /**
10631
+ * Process JSONL session segments by extracting facts via LLM.
10632
+ * Uses the same extraction pipeline as autoCapture (buildExtractionPrompt/parseExtractionResponse).
10633
+ * Each segment's conversation exchange is sent to the LLM, which distills durable facts.
10634
+ * Requires llmCall — session import is skipped without LLM.
10635
+ */
10636
+ async function extractSessionSegments(segments, store, llmCall, logger, updateStrength) {
10637
+ if (!llmCall) {
10638
+ logger.warn("Session import skipped: LLM extraction required but llmCall not available");
10639
+ return {
10640
+ stored: 0,
10641
+ errors: []
10642
+ };
10643
+ }
10644
+ let stored = 0;
10645
+ const errors = [];
10646
+ for (const seg of segments) try {
10647
+ const facts = parseExtractionResponse(await llmCall(buildExtractionPrompt(seg.content)));
10648
+ if (facts.length === 0) continue;
10649
+ for (const fact of facts) try {
10650
+ applyImportDecay((await store({
10651
+ content: fact.content,
10652
+ type: fact.type,
10653
+ source: "import",
10654
+ temporal_state: seg.date ? "past" : "none",
10655
+ temporal_anchor: seg.date
10656
+ })).id, seg.date, updateStrength);
10657
+ stored++;
10658
+ } catch (err) {
10659
+ logger.warn(`Failed to store extracted fact from segment ${seg.id}: ${err instanceof Error ? err.message : String(err)}`);
10660
+ }
10661
+ } catch (err) {
10662
+ const msg = `Session segment ${seg.id} extraction failed: ${err instanceof Error ? err.message : String(err)}`;
10663
+ logger.error(msg);
10664
+ errors.push(msg);
10665
+ }
10666
+ return {
10667
+ stored,
10668
+ errors
10669
+ };
10670
+ }
10212
10671
  function inferType(seg) {
10213
10672
  if (seg.evergreen) return "fact";
10214
10673
  if (seg.date) return "observation";
@@ -10353,11 +10812,11 @@ async function cleanupWorkspaceFiles(deps) {
10353
10812
  hadFailure = true;
10354
10813
  continue;
10355
10814
  }
10356
- const backupPath = `${file.path}.pre-associative-memory`;
10815
+ const backupPath = `${file.path}.pre-formative-memory`;
10357
10816
  if (!existsSync(backupPath)) copyFileSync(file.path, backupPath);
10358
10817
  writeFileSync(file.path, cleaned, "utf-8");
10359
10818
  modified.push(file.name);
10360
- logger.info(`Cleaned ${file.name} (backup: ${file.name}.pre-associative-memory)`);
10819
+ logger.info(`Cleaned ${file.name} (backup: ${file.name}.pre-formative-memory)`);
10361
10820
  } catch (err) {
10362
10821
  logger.warn(`Failed to clean ${file.name}: ${err instanceof Error ? err.message : String(err)}`);
10363
10822
  hadFailure = true;
@@ -10396,53 +10855,6 @@ Here is the current content of ${fileName}:
10396
10855
  ${content}`;
10397
10856
  }
10398
10857
  //#endregion
10399
- //#region src/logger.ts
10400
- const LEVEL_ORDER = {
10401
- debug: 0,
10402
- info: 1,
10403
- warn: 2,
10404
- error: 3
10405
- };
10406
- /**
10407
- * Create a logger that forwards to the host logger at or above the
10408
- * configured minimum level.
10409
- */
10410
- function createLogger(opts) {
10411
- const envDebug = process.env.FORMATIVE_MEMORY_DEBUG === "1";
10412
- const minOrder = LEVEL_ORDER[opts.verbose || envDebug ? "debug" : "info"];
10413
- const host = opts.host;
10414
- function stringifyArg(a) {
10415
- if (a instanceof Error) return a.stack || a.message;
10416
- if (typeof a === "object" && a !== null) try {
10417
- return JSON.stringify(a);
10418
- } catch {
10419
- return "[Unserializable]";
10420
- }
10421
- return String(a);
10422
- }
10423
- function emit(level, msg, args) {
10424
- if (LEVEL_ORDER[level] < minOrder) return;
10425
- const prefix = `[formative-memory] [${level}]`;
10426
- if (host) {
10427
- const line = `${prefix} ${msg}${args.length > 0 ? " " + args.map(stringifyArg).join(" ") : ""}`;
10428
- if (level === "debug") (host.debug ?? host.info)(line);
10429
- else host[level](line);
10430
- } else {
10431
- const line = `${prefix} ${msg}`;
10432
- if (level === "error") console.error(line, ...args);
10433
- else if (level === "warn") console.warn(line, ...args);
10434
- else console.log(line, ...args);
10435
- }
10436
- }
10437
- return {
10438
- debug: (msg, ...args) => emit("debug", msg, args),
10439
- info: (msg, ...args) => emit("info", msg, args),
10440
- warn: (msg, ...args) => emit("warn", msg, args),
10441
- error: (msg, ...args) => emit("error", msg, args),
10442
- isDebugEnabled: () => minOrder === LEVEL_ORDER.debug
10443
- };
10444
- }
10445
- //#endregion
10446
10858
  //#region src/turn-memory-ledger.ts
10447
10859
  /**
10448
10860
  * Turn Memory Ledger (Phase 3.4)
@@ -10590,6 +11002,7 @@ function resolveMemoryDir(config, workspaceDir, pathResolver) {
10590
11002
  if (isAbsolute(dbPath)) return dbPath;
10591
11003
  return join(workspaceDir, dbPath);
10592
11004
  }
11005
+ const EMBEDDING_REQUIRED_HINT = "Embedding provider required but not available.\nSet one of: OPENAI_API_KEY, GEMINI_API_KEY, VOYAGE_API_KEY, or MISTRAL_API_KEY.\nTo run without embeddings (BM25-only), set \"requireEmbedding\": false in plugin config.";
10593
11006
  /**
10594
11007
  * Try to create an embedding provider directly via SDK factory functions.
10595
11008
  * Used when the memory-core plugin is disabled and the global registry is empty.
@@ -10676,11 +11089,13 @@ function createWorkspace(config, workspaceDir, openclawConfig, agentDir, logger,
10676
11089
  let providerPromise = null;
10677
11090
  const getProvider = () => {
10678
11091
  if (!providerPromise) providerPromise = resolveEmbeddingProvider(config.embedding.provider, openclawConfig, agentDir, config.embedding.model).catch((err) => {
11092
+ if (config.requireEmbedding) throw new Error(`${EMBEDDING_REQUIRED_HINT}\nDetails: ${err instanceof Error ? err.message : String(err)}`);
10679
11093
  providerPromise = null;
10680
11094
  throw err;
10681
11095
  });
10682
11096
  return providerPromise;
10683
11097
  };
11098
+ if (config.requireEmbedding) getProvider().catch(() => {});
10684
11099
  const ws = {
10685
11100
  manager: new MemoryManager(memoryDir, { async embed(text) {
10686
11101
  const provider = await getProvider();
@@ -10709,6 +11124,12 @@ function createWorkspace(config, workspaceDir, openclawConfig, agentDir, logger,
10709
11124
  } catch (err) {
10710
11125
  logger?.warn(`Workspace cleanup failed: ${err instanceof Error ? err.message : String(err)}`);
10711
11126
  }
11127
+ if (config.requireEmbedding) try {
11128
+ await getProvider();
11129
+ } catch (err) {
11130
+ logger?.error(`Migration aborted: embedding required but unavailable. ${err instanceof Error ? err.message : String(err)}`);
11131
+ return;
11132
+ }
10712
11133
  try {
10713
11134
  const userLanguage = detectUserLanguage(workspaceDir);
10714
11135
  const enrichFn = initDeps.llmConfig ? createDirectLlmEnrichFn(initDeps.llmConfig, userLanguage) : async (segments) => segments.map((seg) => ({
@@ -10717,6 +11138,7 @@ function createWorkspace(config, workspaceDir, openclawConfig, agentDir, logger,
10717
11138
  temporal_state: seg.date ? "past" : "none",
10718
11139
  temporal_anchor: seg.date
10719
11140
  }));
11141
+ const sessionsDir = agentDir ? join(agentDir, "sessions") : void 0;
10720
11142
  const migrationResult = await runMigration({
10721
11143
  workspaceDir,
10722
11144
  stateDir: initDeps.stateDir ?? workspaceDir,
@@ -10728,7 +11150,9 @@ function createWorkspace(config, workspaceDir, openclawConfig, agentDir, logger,
10728
11150
  info: () => {},
10729
11151
  warn: () => {},
10730
11152
  error: () => {}
10731
- }
11153
+ },
11154
+ sessionsDir,
11155
+ llmCall: initDeps.llmConfig ? (prompt) => callLlm(prompt, initDeps.llmConfig) : void 0
10732
11156
  });
10733
11157
  if (migrationResult.status === "completed") logger?.info(`Migration: imported ${migrationResult.segmentsImported} memories from ${migrationResult.filesFound} files`);
10734
11158
  } catch (err) {
@@ -10872,8 +11296,8 @@ function createMemoryTools(getManager, getLogPath, ledger) {
10872
11296
  ];
10873
11297
  }
10874
11298
  const associativeMemoryPlugin = {
10875
- id: "memory-associative",
10876
- name: "Memory (Associative)",
11299
+ id: "formative-memory",
11300
+ name: "Formative Memory",
10877
11301
  description: "Biologically-inspired associative memory with consolidation and temporal awareness",
10878
11302
  kind: "memory",
10879
11303
  configSchema: memoryConfigSchema,
@@ -10946,7 +11370,9 @@ const associativeMemoryPlugin = {
10946
11370
  logger: log,
10947
11371
  ledger,
10948
11372
  getDb: () => getWorkspace(".").manager.getDatabase(),
10949
- getLogPath: () => join(getWorkspace(".").memoryDir, "retrieval.log")
11373
+ getLogPath: () => join(getWorkspace(".").memoryDir, "retrieval.log"),
11374
+ autoCapture: config.autoCapture,
11375
+ getLlmConfig: () => resolveLlmConfig(runtimePaths.stateDir, runtimePaths.agentDir, log)
10950
11376
  }));
10951
11377
  api.registerCommand({
10952
11378
  name: "memory-sleep",
@@ -10965,9 +11391,9 @@ const associativeMemoryPlugin = {
10965
11391
  log.debug("consolidation: starting trigger=command");
10966
11392
  const result = await runConsolidation({
10967
11393
  db: ws.manager.getDatabase(),
10968
- mergeContentProducer
11394
+ mergeContentProducer,
11395
+ logger: log
10969
11396
  });
10970
- log.debug(`consolidation: completed trigger=command durationMs=${result.durationMs}`);
10971
11397
  const s = result.summary;
10972
11398
  const catchUpInfo = s.catchUpDecayed > 0 ? `Catch-up decayed: ${s.catchUpDecayed}, ` : "";
10973
11399
  return { text: `Memory consolidation complete (${result.durationMs}ms).\n` + catchUpInfo + `Reinforced: ${s.reinforced}, Decayed: ${s.decayed}, Pruned: ${s.pruned} memories + ${s.prunedAssociations} associations, Merged: ${s.merged}, Transitioned: ${s.transitioned}, Exposure GC: ${s.exposuresGc}` };
@@ -10992,6 +11418,7 @@ const associativeMemoryPlugin = {
10992
11418
  temporal_anchor: seg.date
10993
11419
  }));
10994
11420
  db.setState("migration_completed_at", "");
11421
+ const sessionsDir = runtimePaths.agentDir ? join(runtimePaths.agentDir, "sessions") : void 0;
10995
11422
  const result = await runMigration({
10996
11423
  workspaceDir: ".",
10997
11424
  stateDir: runtimePaths.stateDir ?? ".",
@@ -11002,7 +11429,9 @@ const associativeMemoryPlugin = {
11002
11429
  set: (key, value) => db.setState(key, value)
11003
11430
  },
11004
11431
  enrich: enrichFn,
11005
- logger: log
11432
+ logger: log,
11433
+ sessionsDir,
11434
+ llmCall: llmConfig ? (prompt) => callLlm(prompt, llmConfig) : void 0
11006
11435
  });
11007
11436
  if (result.status === "completed") return { text: `Migration complete: imported ${result.segmentsImported} memories from ${result.filesFound} files` + (result.errors?.length ? ` (${result.errors.length} errors)` : "") };
11008
11437
  return { text: `Migration: ${result.status}` };
@@ -11034,7 +11463,7 @@ const associativeMemoryPlugin = {
11034
11463
  /** Cron job name (used to find/update managed jobs). */
11035
11464
  const CONSOLIDATION_CRON_NAME = "Associative Memory Consolidation";
11036
11465
  /** Tag in description to identify managed jobs. */
11037
- const CONSOLIDATION_CRON_TAG = "[managed-by=memory-associative.consolidation]";
11466
+ const CONSOLIDATION_CRON_TAG = "[managed-by=formative-memory.consolidation]";
11038
11467
  /** Default cron expression: daily at 03:00. */
11039
11468
  const DEFAULT_CONSOLIDATION_CRON = "0 3 * * *";
11040
11469
  /** System event token for cron-triggered temporal transitions. */
@@ -11042,7 +11471,7 @@ const associativeMemoryPlugin = {
11042
11471
  /** Cron job name for temporal transitions. */
11043
11472
  const TEMPORAL_CRON_NAME = "Associative Memory Temporal Transitions";
11044
11473
  /** Tag to identify managed temporal jobs. */
11045
- const TEMPORAL_CRON_TAG = "[managed-by=memory-associative.temporal]";
11474
+ const TEMPORAL_CRON_TAG = "[managed-by=formative-memory.temporal]";
11046
11475
  /** Cron expression: daily at 15:00 (03:00 is covered by full consolidation). */
11047
11476
  const DEFAULT_TEMPORAL_CRON = "0 15 * * *";
11048
11477
  api.registerHook("gateway:startup", async (event) => {
@@ -11112,13 +11541,13 @@ const associativeMemoryPlugin = {
11112
11541
  } catch (err) {
11113
11542
  log.warn(`Failed to register consolidation cron: ${err instanceof Error ? err.message : String(err)}`);
11114
11543
  }
11115
- }, { name: "memory-associative-consolidation-cron" });
11544
+ }, { name: "formative-memory-consolidation-cron" });
11116
11545
  api.on("before_agent_reply", async (event, ctx) => {
11117
11546
  const body = event?.cleanedBody;
11118
11547
  if (!body) return;
11119
11548
  if (body.includes(TEMPORAL_CRON_TRIGGER) && !body.includes(CONSOLIDATION_CRON_TRIGGER)) try {
11120
11549
  const db = getWorkspace(ctx?.workspaceDir ?? ".").manager.getDatabase();
11121
- const count = db.transaction(() => applyTemporalTransitions(db));
11550
+ const count = db.transaction(() => applyTemporalTransitions(db, log));
11122
11551
  if (count > 0) log.info(`Scheduled temporal transitions: ${count} transitioned`);
11123
11552
  return {
11124
11553
  handled: true,
@@ -11144,20 +11573,14 @@ const associativeMemoryPlugin = {
11144
11573
  };
11145
11574
  } : void 0;
11146
11575
  const db = ws.manager.getDatabase();
11147
- const temporalCount = db.transaction(() => applyTemporalTransitions(db));
11148
11576
  log.debug("consolidation: starting trigger=cron");
11149
- const result = await runConsolidation({
11150
- db,
11151
- mergeContentProducer
11152
- });
11153
- log.debug(`consolidation: completed trigger=cron durationMs=${result.durationMs}`);
11154
- const s = result.summary;
11155
- const catchUpInfo = s.catchUpDecayed > 0 ? `Catch-up decayed: ${s.catchUpDecayed}, ` : "";
11156
- const temporalInfo = temporalCount > 0 ? `, Temporal transitions (extra): ${temporalCount}` : "";
11157
- log.info(`Scheduled consolidation complete (${result.durationMs}ms): ${catchUpInfo}Reinforced: ${s.reinforced}, Decayed: ${s.decayed}, Pruned: ${s.pruned}+${s.prunedAssociations}, Merged: ${s.merged}${temporalInfo}`);
11158
11577
  return {
11159
11578
  handled: true,
11160
- reply: { text: `Memory consolidation complete (${result.durationMs}ms).` },
11579
+ reply: { text: `Memory consolidation complete (${(await runConsolidation({
11580
+ db,
11581
+ mergeContentProducer,
11582
+ logger: log
11583
+ })).durationMs}ms).` },
11161
11584
  reason: "associative-memory-consolidation"
11162
11585
  };
11163
11586
  } catch (err) {
@@ -11170,7 +11593,7 @@ const associativeMemoryPlugin = {
11170
11593
  }
11171
11594
  });
11172
11595
  api.registerService({
11173
- id: "memory-associative-startup",
11596
+ id: "formative-memory-startup",
11174
11597
  async start(ctx) {
11175
11598
  runtimePaths.stateDir = ctx.stateDir;
11176
11599
  }