formative-memory 0.1.0 → 0.2.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.js CHANGED
@@ -1,4 +1,4 @@
1
- import { n as MemorySourceGuard, r as TemporalStateGuard, t as MemoryDatabase } from "./db-D2pzT6fw.js";
1
+ import { n as MemorySourceGuard, r as TemporalStateGuard, t as MemoryDatabase } from "./db-D1Sc76VE.js";
2
2
  import { basename, dirname, isAbsolute, join, relative, sep } from "node:path";
3
3
  import { appendFileSync, copyFileSync, existsSync, mkdirSync, readFileSync, readdirSync, realpathSync, statSync, writeFileSync } from "node:fs";
4
4
  import { createGeminiEmbeddingProvider, createOpenAiEmbeddingProvider, getMemoryEmbeddingProvider, listMemoryEmbeddingProviders } from "openclaw/plugin-sdk/memory-core-host-engine-embeddings";
@@ -2795,7 +2795,8 @@ const memoryConfigSchema = { parse(value) {
2795
2795
  "autoCapture",
2796
2796
  "autoRecall",
2797
2797
  "verbose",
2798
- "logQueries"
2798
+ "logQueries",
2799
+ "requireEmbedding"
2799
2800
  ], "memory config");
2800
2801
  let provider = "auto";
2801
2802
  let model;
@@ -2812,13 +2813,74 @@ const memoryConfigSchema = { parse(value) {
2812
2813
  model
2813
2814
  },
2814
2815
  dbPath: typeof cfg.dbPath === "string" ? cfg.dbPath : "~/.openclaw/memory/associative",
2815
- autoCapture: cfg.autoCapture === true,
2816
+ autoCapture: cfg.autoCapture !== false,
2816
2817
  autoRecall: cfg.autoRecall !== false,
2817
2818
  verbose: cfg.verbose === true,
2818
- logQueries: cfg.logQueries === true
2819
+ logQueries: cfg.logQueries === true,
2820
+ requireEmbedding: cfg.requireEmbedding !== false
2819
2821
  };
2820
2822
  } };
2821
2823
  //#endregion
2824
+ //#region src/logger.ts
2825
+ const LEVEL_ORDER = {
2826
+ debug: 0,
2827
+ info: 1,
2828
+ warn: 2,
2829
+ error: 3
2830
+ };
2831
+ /**
2832
+ * Create a logger that forwards to the host logger at or above the
2833
+ * configured minimum level.
2834
+ */
2835
+ function createLogger(opts) {
2836
+ const minOrder = LEVEL_ORDER[opts.verbose ? "debug" : "info"];
2837
+ const host = opts.host;
2838
+ function stringifyArg(a) {
2839
+ if (a instanceof Error) return a.stack || a.message;
2840
+ if (typeof a === "object" && a !== null) try {
2841
+ return JSON.stringify(a);
2842
+ } catch {
2843
+ return "[Unserializable]";
2844
+ }
2845
+ return String(a);
2846
+ }
2847
+ function emit(level, msg, args) {
2848
+ if (LEVEL_ORDER[level] < minOrder) return;
2849
+ const prefix = `[formative-memory] [${level}]`;
2850
+ if (host) {
2851
+ const line = `${prefix} ${msg}${args.length > 0 ? " " + args.map(stringifyArg).join(" ") : ""}`;
2852
+ if (level === "debug") (host.debug ?? host.info)(line);
2853
+ else host[level](line);
2854
+ } else {
2855
+ const line = `${prefix} ${msg}`;
2856
+ if (level === "error") console.error(line, ...args);
2857
+ else if (level === "warn") console.warn(line, ...args);
2858
+ else console.log(line, ...args);
2859
+ }
2860
+ }
2861
+ return {
2862
+ debug: (msg, ...args) => emit("debug", msg, args),
2863
+ info: (msg, ...args) => emit("info", msg, args),
2864
+ warn: (msg, ...args) => emit("warn", msg, args),
2865
+ error: (msg, ...args) => emit("error", msg, args),
2866
+ isDebugEnabled: () => minOrder === LEVEL_ORDER.debug
2867
+ };
2868
+ }
2869
+ /** Sanitize content for log output: collapse whitespace/control chars, truncate. */
2870
+ function preview(text, max = 60) {
2871
+ const sanitized = text.replace(/[\x00-\x1F\x7F\s]+/g, " ").trim();
2872
+ if (!sanitized) return "<empty>";
2873
+ return sanitized.length > max ? sanitized.slice(0, max - 1) + "…" : sanitized;
2874
+ }
2875
+ /** A silent logger that discards all output. Useful for tests. */
2876
+ const nullLogger = {
2877
+ debug: () => {},
2878
+ info: () => {},
2879
+ warn: () => {},
2880
+ error: () => {},
2881
+ isDebugEnabled: () => false
2882
+ };
2883
+ //#endregion
2822
2884
  //#region src/consolidation-steps.ts
2823
2885
  /** Retrieval reinforcement learning rate (η). */
2824
2886
  const ETA = .7;
@@ -2843,9 +2905,10 @@ const MODE_WEIGHT_BM25_ONLY = .5;
2843
2905
  * @param lastConsolidationMs Timestamp (ms) of last consolidation run, or null if never run.
2844
2906
  * @param nowMs Current time in ms (injectable for testing).
2845
2907
  */
2846
- function applyCatchUpDecay(db, lastConsolidationMs, nowMs = Date.now()) {
2908
+ function applyCatchUpDecay(db, lastConsolidationMs, nowMs = Date.now(), log = nullLogger) {
2847
2909
  if (lastConsolidationMs == null || !Number.isFinite(lastConsolidationMs)) return 0;
2848
2910
  const allMemories = db.getAllMemories();
2911
+ const debug = log.isDebugEnabled();
2849
2912
  let count = 0;
2850
2913
  const dayMs = 1e3 * 60 * 60 * 24;
2851
2914
  for (const mem of allMemories) {
@@ -2856,6 +2919,7 @@ function applyCatchUpDecay(db, lastConsolidationMs, nowMs = Date.now()) {
2856
2919
  const effectiveCycles = Math.min(cycles, 30);
2857
2920
  const factor = mem.consolidated ? DECAY_CONSOLIDATED : DECAY_WORKING;
2858
2921
  const newStrength = mem.strength * Math.pow(factor, effectiveCycles);
2922
+ if (debug) log.debug(`catch-up decay: "${preview(mem.content)}" ${mem.strength.toFixed(3)} → ${newStrength.toFixed(3)} (${effectiveCycles} cycles, ${mem.consolidated ? "consolidated" : "working"})`);
2859
2923
  db.updateStrength(mem.id, newStrength);
2860
2924
  count++;
2861
2925
  }
@@ -2863,8 +2927,11 @@ function applyCatchUpDecay(db, lastConsolidationMs, nowMs = Date.now()) {
2863
2927
  const globalCycles = Math.max(0, Math.floor(globalDaysSince) - 1);
2864
2928
  if (globalCycles > 0) {
2865
2929
  const effectiveGlobal = Math.min(globalCycles, 30);
2866
- db.decayAllAssociationWeights(Math.pow(DECAY_ASSOCIATION, effectiveGlobal));
2930
+ const associationFactor = Math.pow(DECAY_ASSOCIATION, effectiveGlobal);
2931
+ if (debug) log.debug(`catch-up decay: associations ×${associationFactor.toFixed(4)} (${effectiveGlobal} cycles)`);
2932
+ db.decayAllAssociationWeights(associationFactor);
2867
2933
  }
2934
+ if (count > 0) log.info(`catch-up decay: ${count} memories adjusted for missed cycles`);
2868
2935
  return count;
2869
2936
  }
2870
2937
  /**
@@ -2879,7 +2946,7 @@ function applyCatchUpDecay(db, lastConsolidationMs, nowMs = Date.now()) {
2879
2946
  *
2880
2947
  * Returns count of memories reinforced.
2881
2948
  */
2882
- function applyReinforcement(db) {
2949
+ function applyReinforcement(db, log = nullLogger) {
2883
2950
  const pendingAttrs = db.getUnreinforcedAttributions();
2884
2951
  if (pendingAttrs.length === 0) return 0;
2885
2952
  const reinforcements = /* @__PURE__ */ new Map();
@@ -2890,6 +2957,7 @@ function applyReinforcement(db) {
2890
2957
  reinforcements.set(attr.memory_id, current + reinforcement);
2891
2958
  }
2892
2959
  return db.transaction(() => {
2960
+ const debug = log.isDebugEnabled();
2893
2961
  let count = 0;
2894
2962
  for (const [memoryId, totalReinforcement] of reinforcements) {
2895
2963
  if (totalReinforcement === 0) continue;
@@ -2898,10 +2966,12 @@ function applyReinforcement(db) {
2898
2966
  const newStrength = Math.max(0, Math.min(mem.strength + totalReinforcement, 1));
2899
2967
  if (newStrength !== mem.strength) {
2900
2968
  db.updateStrength(memoryId, newStrength);
2969
+ if (debug) log.debug(`reinforce: "${preview(mem.content)}" ${mem.strength.toFixed(3)} → ${newStrength.toFixed(3)} (+${totalReinforcement.toFixed(3)})`);
2901
2970
  count++;
2902
2971
  }
2903
2972
  }
2904
2973
  for (const attr of pendingAttrs) db.markAttributionsReinforced(attr.message_id, attr.memory_id);
2974
+ if (count > 0) log.info(`reinforce: ${count} memories strengthened from ${pendingAttrs.length} attributions`);
2905
2975
  return count;
2906
2976
  });
2907
2977
  }
@@ -2913,16 +2983,20 @@ function applyReinforcement(db) {
2913
2983
  *
2914
2984
  * Returns count of memories decayed.
2915
2985
  */
2916
- function applyDecay(db) {
2986
+ function applyDecay(db, log = nullLogger) {
2917
2987
  const allMemories = db.getAllMemories();
2988
+ const debug = log.isDebugEnabled();
2918
2989
  let count = 0;
2919
2990
  for (const mem of allMemories) {
2920
2991
  const factor = mem.consolidated ? DECAY_CONSOLIDATED : DECAY_WORKING;
2921
2992
  const newStrength = mem.strength * factor;
2993
+ if (debug) log.debug(`decay: "${preview(mem.content)}" ${mem.strength.toFixed(3)} → ${newStrength.toFixed(3)} (×${factor})`);
2922
2994
  db.updateStrength(mem.id, newStrength);
2923
2995
  count++;
2924
2996
  }
2997
+ if (debug) log.debug(`decay: associations ×${DECAY_ASSOCIATION}`);
2925
2998
  applyAssociationDecay(db);
2999
+ if (count > 0) log.info(`decay: ${count} memories decayed`);
2926
3000
  return count;
2927
3001
  }
2928
3002
  /**
@@ -2946,19 +3020,22 @@ const CO_RETRIEVAL_BASE_WEIGHT = .1;
2946
3020
  *
2947
3021
  * Returns count of associations updated.
2948
3022
  */
2949
- function updateCoRetrievalAssociations(db) {
3023
+ function updateCoRetrievalAssociations(db, log = nullLogger) {
2950
3024
  const groups = db.getCoRetrievalGroups();
2951
3025
  const now = (/* @__PURE__ */ new Date()).toISOString();
2952
3026
  const validIds = new Set(db.getAllMemories().map((m) => m.id));
3027
+ const debug = log.isDebugEnabled();
2953
3028
  let count = 0;
2954
3029
  for (const group of groups) {
2955
3030
  const ids = group.memory_ids.filter((id) => validIds.has(id));
2956
3031
  if (ids.length < 2) continue;
2957
3032
  for (let i = 0; i < ids.length; i++) for (let j = i + 1; j < ids.length; j++) {
2958
3033
  db.upsertAssociationProbOr(ids[i], ids[j], CO_RETRIEVAL_BASE_WEIGHT, now);
3034
+ if (debug) log.debug(`associate: co-retrieval ${ids[i].slice(0, 8)}…↔${ids[j].slice(0, 8)}… (+${CO_RETRIEVAL_BASE_WEIGHT})`);
2959
3035
  count++;
2960
3036
  }
2961
3037
  }
3038
+ if (count > 0) log.info(`associate: ${count} co-retrieval associations updated from ${groups.length} turn groups`);
2962
3039
  return count;
2963
3040
  }
2964
3041
  /**
@@ -2971,9 +3048,10 @@ function updateCoRetrievalAssociations(db) {
2971
3048
  *
2972
3049
  * Returns count of associations created/updated.
2973
3050
  */
2974
- function updateTransitiveAssociations(db, maxUpdates = 100) {
3051
+ function updateTransitiveAssociations(db, maxUpdates = 100, log = nullLogger) {
2975
3052
  const allMemories = db.getAllMemories();
2976
3053
  const now = (/* @__PURE__ */ new Date()).toISOString();
3054
+ const debug = log.isDebugEnabled();
2977
3055
  let count = 0;
2978
3056
  for (const mem of allMemories) {
2979
3057
  if (count >= maxUpdates) break;
@@ -2990,9 +3068,11 @@ function updateTransitiveAssociations(db, maxUpdates = 100) {
2990
3068
  const newWeight = existing + transitiveWeight - existing * transitiveWeight;
2991
3069
  if (newWeight - existing < 1e-9) continue;
2992
3070
  db.upsertAssociation(otherId1, otherId2, newWeight, now);
3071
+ if (debug) log.debug(`associate: transitive ${otherId1.slice(0, 8)}…↔${otherId2.slice(0, 8)}… via ${mem.id.slice(0, 8)}… weight=${newWeight.toFixed(3)}`);
2993
3072
  count++;
2994
3073
  }
2995
3074
  }
3075
+ if (count > 0) log.info(`associate: ${count} transitive associations created/updated`);
2996
3076
  return count;
2997
3077
  }
2998
3078
  /** Association weight threshold below which associations are pruned. */
@@ -3005,14 +3085,17 @@ const PRUNE_ASSOCIATION_THRESHOLD = .01;
3005
3085
  *
3006
3086
  * Returns count of memories pruned.
3007
3087
  */
3008
- function applyPruning(db) {
3088
+ function applyPruning(db, log = nullLogger) {
3009
3089
  const allMemories = db.getAllMemories();
3090
+ const debug = log.isDebugEnabled();
3010
3091
  let memoriesPruned = 0;
3011
3092
  for (const mem of allMemories) if (mem.strength <= .05) {
3093
+ if (debug) log.debug(`prune: removing "${preview(mem.content, 80)}" (strength=${mem.strength.toFixed(3)}, type=${mem.type})`);
3012
3094
  db.deleteMemory(mem.id);
3013
3095
  memoriesPruned++;
3014
3096
  }
3015
3097
  const associationsPruned = db.pruneWeakAssociations(PRUNE_ASSOCIATION_THRESHOLD);
3098
+ if (memoriesPruned > 0 || associationsPruned > 0) log.info(`prune: ${memoriesPruned} memories, ${associationsPruned} associations removed`);
3016
3099
  return {
3017
3100
  memoriesPruned,
3018
3101
  associationsPruned
@@ -3025,9 +3108,10 @@ function applyPruning(db) {
3025
3108
  *
3026
3109
  * Returns count of memories transitioned.
3027
3110
  */
3028
- function applyTemporalTransitions(db) {
3111
+ function applyTemporalTransitions(db, log = nullLogger) {
3029
3112
  const now = /* @__PURE__ */ new Date();
3030
3113
  const allMemories = db.getAllMemories();
3114
+ const debug = log.isDebugEnabled();
3031
3115
  let count = 0;
3032
3116
  for (const mem of allMemories) {
3033
3117
  if (!mem.temporal_anchor) continue;
@@ -3039,10 +3123,12 @@ function applyTemporalTransitions(db) {
3039
3123
  if ((now.getTime() - anchor.getTime()) / (1e3 * 60 * 60) >= 24) newState = "past";
3040
3124
  }
3041
3125
  if (newState) {
3126
+ if (debug) log.debug(`temporal: "${preview(mem.content)}" ${mem.temporal_state} → ${newState}`);
3042
3127
  db.updateTemporalState(mem.id, newState);
3043
3128
  count++;
3044
3129
  }
3045
3130
  }
3131
+ if (count > 0) log.info(`temporal: ${count} memories transitioned`);
3046
3132
  return count;
3047
3133
  }
3048
3134
  /**
@@ -3053,9 +3139,11 @@ function applyTemporalTransitions(db) {
3053
3139
  *
3054
3140
  * Returns count of exposure rows deleted.
3055
3141
  */
3056
- function provenanceGC(db, cutoffDays = 30) {
3142
+ function provenanceGC(db, cutoffDays = 30, log = nullLogger) {
3057
3143
  const cutoffDate = (/* @__PURE__ */ new Date(Date.now() - cutoffDays * 24 * 60 * 60 * 1e3)).toISOString();
3058
- return db.deleteExposuresOlderThan(cutoffDate);
3144
+ const count = db.deleteExposuresOlderThan(cutoffDate);
3145
+ if (count > 0) log.debug(`gc: ${count} exposure rows older than ${cutoffDays}d removed`);
3146
+ return count;
3059
3147
  }
3060
3148
  //#endregion
3061
3149
  //#region src/merge-candidates.ts
@@ -3183,11 +3271,12 @@ function contentHash(content) {
3183
3271
  * refresh strength to 1.0, handle sources normally.
3184
3272
  * 3. newId is novel: create new memory with source="consolidation".
3185
3273
  */
3186
- async function executeMerge(db, pair, contentProducer, embedder) {
3274
+ async function executeMerge(db, pair, contentProducer, embedder, log = nullLogger) {
3187
3275
  if (pair.a === pair.b) throw new Error(`Merge failed: cannot merge memory with itself (${pair.a})`);
3188
3276
  const memA = db.getMemory(pair.a);
3189
3277
  const memB = db.getMemory(pair.b);
3190
3278
  if (!memA || !memB) throw new Error(`Merge failed: memory not found (${pair.a}, ${pair.b})`);
3279
+ if (log.isDebugEnabled()) log.debug(`merge: combining: A: "${preview(memA.content, 100)}" B: "${preview(memB.content, 100)}"`);
3191
3280
  const merged = await contentProducer({
3192
3281
  id: memA.id,
3193
3282
  content: memA.content,
@@ -3208,15 +3297,19 @@ async function executeMerge(db, pair, contentProducer, embedder) {
3208
3297
  } catch {}
3209
3298
  return db.transaction(() => {
3210
3299
  let canonicalId;
3300
+ let outcome;
3211
3301
  if (isAbsorption) {
3302
+ outcome = "absorption";
3212
3303
  canonicalId = newId;
3213
3304
  db.updateStrength(canonicalId, 1);
3214
3305
  } else {
3215
3306
  const existing = db.getMemory(newId);
3216
3307
  if (existing) {
3308
+ outcome = "reuse";
3217
3309
  if (existing.content !== merged.content) throw new Error(`Merge failed: hash collision for ${newId} with different content`);
3218
3310
  db.updateStrength(newId, 1);
3219
3311
  } else {
3312
+ outcome = "new";
3220
3313
  db.insertMemory({
3221
3314
  id: newId,
3222
3315
  type: merged.type,
@@ -3250,6 +3343,11 @@ async function executeMerge(db, pair, contentProducer, embedder) {
3250
3343
  }
3251
3344
  }
3252
3345
  inheritAssociations(db, [memA.id, memB.id], canonicalId, now);
3346
+ log.info(`merge: ${outcome} (${pair.a.slice(0, 8)}… + ${pair.b.slice(0, 8)}…) → "${preview(merged.content, 80)}" (${canonicalId.slice(0, 8)}…)`);
3347
+ if (log.isDebugEnabled()) {
3348
+ if (originalsWeakened.length > 0) log.debug(`merge: weakened originals: ${originalsWeakened.map((id) => id.slice(0, 8) + "…").join(", ")}`);
3349
+ if (intermediatesDeleted.length > 0) log.debug(`merge: deleted intermediates: ${intermediatesDeleted.map((id) => id.slice(0, 8) + "…").join(", ")}`);
3350
+ }
3253
3351
  return {
3254
3352
  newMemoryId: canonicalId,
3255
3353
  mergedFrom: [pair.a, pair.b],
@@ -3265,18 +3363,19 @@ async function executeMerge(db, pair, contentProducer, embedder) {
3265
3363
  * Pairs are processed in order (highest score first).
3266
3364
  * Skips pairs where either memory was already consumed.
3267
3365
  */
3268
- async function executeMerges(db, pairs, contentProducer, embedder) {
3366
+ async function executeMerges(db, pairs, contentProducer, embedder, log = nullLogger) {
3269
3367
  const consumed = /* @__PURE__ */ new Set();
3270
3368
  const results = [];
3271
3369
  for (const pair of pairs) {
3272
3370
  if (consumed.has(pair.a) || consumed.has(pair.b)) continue;
3273
3371
  if (!db.getMemory(pair.a) || !db.getMemory(pair.b)) continue;
3274
- const result = await executeMerge(db, pair, contentProducer, embedder);
3372
+ const result = await executeMerge(db, pair, contentProducer, embedder, log);
3275
3373
  results.push(result);
3276
3374
  consumed.add(pair.a);
3277
3375
  consumed.add(pair.b);
3278
3376
  consumed.add(result.newMemoryId);
3279
3377
  }
3378
+ if (results.length > 0) log.info(`merge: ${results.length} merges completed`);
3280
3379
  return results;
3281
3380
  }
3282
3381
  function isIntermediate(mem) {
@@ -3339,6 +3438,7 @@ function inheritAssociations(db, sourceIds, canonicalId, now) {
3339
3438
  */
3340
3439
  async function runConsolidation(params) {
3341
3440
  const start = Date.now();
3441
+ const log = params.logger ?? nullLogger;
3342
3442
  const consolidationCutoff = (/* @__PURE__ */ new Date()).toISOString();
3343
3443
  const summary = {
3344
3444
  catchUpDecayed: 0,
@@ -3350,6 +3450,7 @@ async function runConsolidation(params) {
3350
3450
  transitioned: 0,
3351
3451
  exposuresGc: 0
3352
3452
  };
3453
+ log.info("consolidation: starting");
3353
3454
  params.db.transaction(() => {
3354
3455
  const lastAt = params.db.getState("last_consolidation_at");
3355
3456
  let lastConsolidationMs = null;
@@ -3357,13 +3458,13 @@ async function runConsolidation(params) {
3357
3458
  const ms = new Date(lastAt).getTime();
3358
3459
  if (Number.isFinite(ms)) lastConsolidationMs = ms;
3359
3460
  }
3360
- summary.catchUpDecayed = applyCatchUpDecay(params.db, lastConsolidationMs);
3361
- summary.reinforced = applyReinforcement(params.db);
3362
- summary.decayed = applyDecay(params.db);
3363
- updateCoRetrievalAssociations(params.db);
3364
- updateTransitiveAssociations(params.db);
3365
- summary.transitioned = applyTemporalTransitions(params.db);
3366
- const pruneResult = applyPruning(params.db);
3461
+ summary.catchUpDecayed = applyCatchUpDecay(params.db, lastConsolidationMs, Date.now(), log);
3462
+ summary.reinforced = applyReinforcement(params.db, log);
3463
+ summary.decayed = applyDecay(params.db, log);
3464
+ updateCoRetrievalAssociations(params.db, log);
3465
+ updateTransitiveAssociations(params.db, 100, log);
3466
+ summary.transitioned = applyTemporalTransitions(params.db, log);
3467
+ const pruneResult = applyPruning(params.db, log);
3367
3468
  summary.pruned = pruneResult.memoriesPruned;
3368
3469
  summary.prunedAssociations = pruneResult.associationsPruned;
3369
3470
  });
@@ -3372,6 +3473,7 @@ async function runConsolidation(params) {
3372
3473
  const lastAt = params.db.getState("last_consolidation_at");
3373
3474
  const sourceMems = params.db.getMergeSources(MERGE_SOURCE_MIN_STRENGTH, lastAt);
3374
3475
  const targetMems = params.db.getMergeTargets(MERGE_TARGET_MIN_STRENGTH);
3476
+ log.debug(`merge: ${sourceMems.length} sources, ${targetMems.length} targets`);
3375
3477
  const uniqueIds = [...new Set([...sourceMems, ...targetMems].map((m) => m.id))];
3376
3478
  const embeddingMap = params.db.getEmbeddingsByIds(uniqueIds);
3377
3479
  const toCandidate = (m) => ({
@@ -3381,14 +3483,19 @@ async function runConsolidation(params) {
3381
3483
  embedding: embeddingMap.get(m.id) ?? null
3382
3484
  });
3383
3485
  const pairs = findMergeCandidatesDelta(sourceMems.map(toCandidate), targetMems.map(toCandidate));
3384
- if (pairs.length > 0) summary.merged = (await executeMerges(params.db, pairs, params.mergeContentProducer, params.embedder)).length;
3486
+ if (pairs.length > 0) {
3487
+ log.info(`merge: ${pairs.length} candidate pairs found`);
3488
+ summary.merged = (await executeMerges(params.db, pairs, params.mergeContentProducer, params.embedder, log)).length;
3489
+ }
3385
3490
  }
3386
3491
  } finally {
3387
3492
  params.db.transaction(() => {
3388
- summary.exposuresGc = provenanceGC(params.db);
3493
+ summary.exposuresGc = provenanceGC(params.db, 30, log);
3389
3494
  params.db.setState("last_consolidation_at", consolidationCutoff);
3390
3495
  });
3391
3496
  }
3497
+ const s = summary;
3498
+ log.info(`consolidation: done in ${Date.now() - start}ms — reinforced=${s.reinforced} decayed=${s.decayed} pruned=${s.pruned}+${s.prunedAssociations} merged=${s.merged} transitioned=${s.transitioned}`);
3392
3499
  return {
3393
3500
  ok: true,
3394
3501
  summary,
@@ -3581,6 +3688,111 @@ function feedbackEvidenceForRating(rating) {
3581
3688
  };
3582
3689
  }
3583
3690
  //#endregion
3691
+ //#region src/llm-caller.ts
3692
+ const DEFAULTS = {
3693
+ anthropic: {
3694
+ model: "claude-haiku-4-5-20251001",
3695
+ maxTokens: 2048
3696
+ },
3697
+ openai: {
3698
+ model: "gpt-4o-mini",
3699
+ maxTokens: 2048
3700
+ }
3701
+ };
3702
+ /**
3703
+ * Call an LLM with a simple prompt → text response.
3704
+ * Throws on failure (network, auth, rate limit, timeout).
3705
+ */
3706
+ async function callLlm(prompt, config) {
3707
+ const { provider, apiKey } = config;
3708
+ const model = config.model ?? DEFAULTS[provider].model;
3709
+ const maxTokens = config.maxTokens ?? DEFAULTS[provider].maxTokens;
3710
+ const timeoutMs = config.timeoutMs ?? 3e4;
3711
+ if (provider === "anthropic") return callAnthropic(prompt, apiKey, model, maxTokens, timeoutMs);
3712
+ return callOpenAi(prompt, apiKey, model, maxTokens, timeoutMs);
3713
+ }
3714
+ async function callAnthropic(prompt, apiKey, model, maxTokens, timeoutMs) {
3715
+ const response = await fetchWithTimeout("https://api.anthropic.com/v1/messages", {
3716
+ method: "POST",
3717
+ headers: {
3718
+ "Content-Type": "application/json",
3719
+ "x-api-key": apiKey,
3720
+ "anthropic-version": "2023-06-01"
3721
+ },
3722
+ body: JSON.stringify({
3723
+ model,
3724
+ max_tokens: maxTokens,
3725
+ messages: [{
3726
+ role: "user",
3727
+ content: prompt
3728
+ }]
3729
+ })
3730
+ }, timeoutMs);
3731
+ if (!response.ok) {
3732
+ const body = await response.text().catch(() => "");
3733
+ throw new Error(`Anthropic API error ${response.status}: ${body.slice(0, 200)}`);
3734
+ }
3735
+ const text = (await response.json()).content?.filter((b) => b.type === "text")?.map((b) => b.text)?.join("") ?? "";
3736
+ if (!text) throw new Error("Anthropic returned empty response");
3737
+ return text;
3738
+ }
3739
+ async function callOpenAi(prompt, apiKey, model, maxTokens, timeoutMs) {
3740
+ const response = await fetchWithTimeout("https://api.openai.com/v1/chat/completions", {
3741
+ method: "POST",
3742
+ headers: {
3743
+ "Content-Type": "application/json",
3744
+ Authorization: `Bearer ${apiKey}`
3745
+ },
3746
+ body: JSON.stringify({
3747
+ model,
3748
+ max_tokens: maxTokens,
3749
+ messages: [{
3750
+ role: "user",
3751
+ content: prompt
3752
+ }]
3753
+ })
3754
+ }, timeoutMs);
3755
+ if (!response.ok) {
3756
+ const body = await response.text().catch(() => "");
3757
+ throw new Error(`OpenAI API error ${response.status}: ${body.slice(0, 200)}`);
3758
+ }
3759
+ const text = (await response.json()).choices?.[0]?.message?.content ?? "";
3760
+ if (!text) throw new Error("OpenAI returned empty response");
3761
+ return text;
3762
+ }
3763
+ async function fetchWithTimeout(url, init, timeoutMs) {
3764
+ const controller = new AbortController();
3765
+ const timer = setTimeout(() => controller.abort(), timeoutMs);
3766
+ try {
3767
+ return await fetch(url, {
3768
+ ...init,
3769
+ signal: controller.signal
3770
+ });
3771
+ } catch (error) {
3772
+ if (error instanceof DOMException && error.name === "AbortError") throw new Error(`LLM API call timed out after ${timeoutMs}ms`);
3773
+ throw error;
3774
+ } finally {
3775
+ clearTimeout(timer);
3776
+ }
3777
+ }
3778
+ /**
3779
+ * Extract an API key from OpenClaw auth profiles.
3780
+ * Tries the specified provider, falls back to the other.
3781
+ */
3782
+ function resolveApiKey(authProfiles, preferredProvider) {
3783
+ if (!authProfiles) return null;
3784
+ for (const profile of Object.values(authProfiles)) if (profile.provider === preferredProvider && profile.key) return {
3785
+ provider: preferredProvider,
3786
+ apiKey: profile.key
3787
+ };
3788
+ const fallback = preferredProvider === "anthropic" ? "openai" : "anthropic";
3789
+ for (const profile of Object.values(authProfiles)) if (profile.provider === fallback && profile.key) return {
3790
+ provider: fallback,
3791
+ apiKey: profile.key
3792
+ };
3793
+ return null;
3794
+ }
3795
+ //#endregion
3584
3796
  //#region src/context-engine.ts
3585
3797
  /**
3586
3798
  * Associative Memory Context Engine
@@ -3841,26 +4053,31 @@ function createAssociativeMemoryContextEngine(options) {
3841
4053
  };
3842
4054
  },
3843
4055
  async afterTurn(params) {
3844
- if (!options.getDb || !options.ledger) {
3845
- options.logger?.warn("afterTurn() disabled: missing getDb or ledger");
3846
- return;
4056
+ if (options.getDb && options.ledger) {
4057
+ const turnFingerprint = userTurnKey(params.messages) ?? "empty";
4058
+ const turnId = `${params.sessionId}:${params.prePromptMessageCount}:${turnFingerprint}`;
4059
+ try {
4060
+ processAfterTurn({
4061
+ sessionId: params.sessionId,
4062
+ turnId,
4063
+ messages: params.messages,
4064
+ prePromptMessageCount: params.prePromptMessageCount,
4065
+ ledger: options.ledger,
4066
+ db: options.getDb(),
4067
+ logPath: options.getLogPath?.(),
4068
+ isBm25Only: options.isBm25Only?.() ?? false
4069
+ });
4070
+ options.logger?.debug?.(`afterTurn: autoInjected=${options.ledger.autoInjected.size} searchResults=${options.ledger.searchResults.size} explicitlyOpened=${options.ledger.explicitlyOpened.size} storedThisTurn=${options.ledger.storedThisTurn.size}`);
4071
+ } catch (error) {
4072
+ options.logger?.warn("afterTurn() provenance write failed", error);
4073
+ }
3847
4074
  }
3848
- const turnFingerprint = userTurnKey(params.messages) ?? "empty";
3849
- const turnId = `${params.sessionId}:${params.prePromptMessageCount}:${turnFingerprint}`;
3850
- try {
3851
- processAfterTurn({
3852
- sessionId: params.sessionId,
3853
- turnId,
3854
- messages: params.messages,
3855
- prePromptMessageCount: params.prePromptMessageCount,
3856
- ledger: options.ledger,
3857
- db: options.getDb(),
3858
- logPath: options.getLogPath?.(),
3859
- isBm25Only: options.isBm25Only?.() ?? false
3860
- });
3861
- options.logger?.debug?.(`afterTurn: autoInjected=${options.ledger.autoInjected.size} searchResults=${options.ledger.searchResults.size} explicitlyOpened=${options.ledger.explicitlyOpened.size} storedThisTurn=${options.ledger.storedThisTurn.size}`);
3862
- } catch (error) {
3863
- options.logger?.warn("afterTurn() provenance write failed", error);
4075
+ if (options.autoCapture) {
4076
+ const turnContent = extractTurnContent(params.messages, params.prePromptMessageCount);
4077
+ if (turnContent) {
4078
+ const llmConfig = options.getLlmConfig?.();
4079
+ if (llmConfig) extractAndStoreMemories(turnContent, llmConfig, getManager(), options.logger);
4080
+ }
3864
4081
  }
3865
4082
  },
3866
4083
  async ingest(_params) {
@@ -3927,6 +4144,158 @@ function checkSleepDebt(getDb) {
3927
4144
  return "";
3928
4145
  }
3929
4146
  }
4147
+ /** Maximum character length for the turn transcript sent to the extraction LLM. */
4148
+ const AUTO_CAPTURE_MAX_CHARS = 4e3;
4149
+ /**
4150
+ * Extract a concise turn summary from the current turn's messages.
4151
+ * Returns null if the turn has no meaningful user+assistant exchange.
4152
+ *
4153
+ * Only considers messages after `prePromptMessageCount` (current turn).
4154
+ * Aggregates ALL user and assistant messages (not just the last one).
4155
+ * Truncates long content to keep the extraction prompt manageable.
4156
+ */
4157
+ function extractTurnContent(messages, prePromptMessageCount) {
4158
+ const turnMessages = messages.slice(prePromptMessageCount);
4159
+ const userText = extractRoleText(turnMessages, "user");
4160
+ const assistantText = extractRoleText(turnMessages, "assistant");
4161
+ if (!userText || !assistantText) return null;
4162
+ if (userText.trim().length < 10 && assistantText.trim().length < 20) return null;
4163
+ const { user: truncatedUser, assistant: truncatedAssistant } = truncatePair(userText, assistantText, AUTO_CAPTURE_MAX_CHARS);
4164
+ return `User: ${truncatedUser}\n\nAssistant: ${truncatedAssistant}`;
4165
+ }
4166
+ /**
4167
+ * Aggregate all text content from messages with the given role.
4168
+ * Iterates forward to preserve chronological order.
4169
+ */
4170
+ function extractRoleText(messages, role) {
4171
+ const parts = [];
4172
+ for (const msg of messages) {
4173
+ if (msg == null || typeof msg !== "object") continue;
4174
+ const m = msg;
4175
+ if (m.role !== role || !m.content) continue;
4176
+ if (typeof m.content === "string") parts.push(m.content);
4177
+ else if (Array.isArray(m.content)) {
4178
+ const texts = m.content.filter(isTextBlock).map((b) => b.text);
4179
+ if (texts.length > 0) parts.push(texts.join("\n"));
4180
+ }
4181
+ }
4182
+ return parts.length > 0 ? parts.join("\n\n") : null;
4183
+ }
4184
+ function truncatePair(userText, assistantText, maxTotal) {
4185
+ if (userText.length + assistantText.length <= maxTotal) return {
4186
+ user: userText,
4187
+ assistant: assistantText
4188
+ };
4189
+ const half = Math.floor(maxTotal / 2);
4190
+ if (userText.length <= half) return {
4191
+ user: userText,
4192
+ assistant: truncate(assistantText, maxTotal - userText.length)
4193
+ };
4194
+ if (assistantText.length <= half) return {
4195
+ user: truncate(userText, maxTotal - assistantText.length),
4196
+ assistant: assistantText
4197
+ };
4198
+ return {
4199
+ user: truncate(userText, half),
4200
+ assistant: truncate(assistantText, half)
4201
+ };
4202
+ }
4203
+ function truncate(text, maxLen) {
4204
+ if (text.length <= maxLen) return text;
4205
+ return text.slice(0, maxLen - 3) + "...";
4206
+ }
4207
+ /** Extraction prompt sent to the LLM to distill facts from a conversation turn. */
4208
+ function buildExtractionPrompt(turnContent) {
4209
+ return `You are a memory extraction system. Read the following conversation exchange and extract facts worth remembering long-term.
4210
+
4211
+ Rules:
4212
+ - Extract durable information: preferences, personal background, goals, plans, work/project context, relationships, recurring patterns, commitments, events, or corrections to prior knowledge.
4213
+ - Facts can come from any source: user statements, tool outputs, environment details, project configuration, or confirmed assistant observations.
4214
+ - Do NOT extract: the current task request itself, ephemeral implementation details, assistant reasoning, pleasantries, or transient operational context.
4215
+ - Each fact should be a single, self-contained statement.
4216
+ - Most conversation turns contain nothing worth remembering long-term — returning an empty array is expected and correct. Only extract when there is clearly durable information.
4217
+ - Return a JSON array of objects, each with "type" and "content" fields.
4218
+ - Valid types: "preference", "about", "person", "event", "goal", "work", "fact"
4219
+ - preference: tastes, values, styles, dislikes
4220
+ - about: background, identity, skills, life situation
4221
+ - person: people and relationships
4222
+ - event: events, schedules, deadlines
4223
+ - goal: objectives, plans, aspirations
4224
+ - work: durable work/project context, constraints, architecture
4225
+ - fact: other durable information (fallback)
4226
+ - Return ONLY the JSON array, nothing else.
4227
+
4228
+ Example output:
4229
+ [{"type": "preference", "content": "User prefers TypeScript over JavaScript for backend work"}, {"type": "event", "content": "User is moving to Berlin in May 2026"}, {"type": "work", "content": "Project must support SQLite only, no Postgres"}]
4230
+
4231
+ Conversation:
4232
+ ${turnContent}`;
4233
+ }
4234
+ const VALID_FACT_TYPES = new Set([
4235
+ "preference",
4236
+ "about",
4237
+ "person",
4238
+ "event",
4239
+ "goal",
4240
+ "work",
4241
+ "fact"
4242
+ ]);
4243
+ /**
4244
+ * Parse the LLM's extraction response into validated facts.
4245
+ * Tolerant of minor formatting issues (markdown fences, trailing text).
4246
+ */
4247
+ function parseExtractionResponse(response) {
4248
+ let cleaned = response.trim();
4249
+ cleaned = cleaned.replace(/^```(?:json)?\s*/i, "").replace(/\s*```\s*$/, "");
4250
+ const start = cleaned.indexOf("[");
4251
+ const end = cleaned.lastIndexOf("]");
4252
+ if (start === -1 || end === -1 || end <= start) return [];
4253
+ let parsed;
4254
+ try {
4255
+ parsed = JSON.parse(cleaned.slice(start, end + 1));
4256
+ } catch {
4257
+ return [];
4258
+ }
4259
+ if (!Array.isArray(parsed)) return [];
4260
+ const facts = [];
4261
+ for (const item of parsed) {
4262
+ if (item == null || typeof item !== "object") continue;
4263
+ const obj = item;
4264
+ if (typeof obj.type !== "string" || typeof obj.content !== "string") continue;
4265
+ if (!obj.content.trim()) continue;
4266
+ const type = VALID_FACT_TYPES.has(obj.type) ? obj.type : "fact";
4267
+ facts.push({
4268
+ type,
4269
+ content: obj.content.trim()
4270
+ });
4271
+ }
4272
+ return facts;
4273
+ }
4274
+ /**
4275
+ * Extract facts from a conversation turn via LLM and store each as a memory.
4276
+ * Fire-and-forget — all errors are caught and logged.
4277
+ */
4278
+ async function extractAndStoreMemories(turnContent, llmConfig, manager, logger) {
4279
+ try {
4280
+ const facts = parseExtractionResponse(await callLlm(buildExtractionPrompt(turnContent), llmConfig));
4281
+ if (facts.length === 0) {
4282
+ logger?.debug?.("auto-capture: LLM extracted 0 facts");
4283
+ return;
4284
+ }
4285
+ for (const fact of facts) try {
4286
+ await manager.store({
4287
+ content: fact.content,
4288
+ type: fact.type,
4289
+ source: "auto_capture"
4290
+ });
4291
+ } catch (error) {
4292
+ logger?.warn(`auto-capture: failed to store fact: ${fact.content.slice(0, 50)}`, error);
4293
+ }
4294
+ logger?.debug?.(`auto-capture: stored ${facts.length} facts`);
4295
+ } catch (error) {
4296
+ logger?.warn("auto-capture: LLM extraction failed", error);
4297
+ }
4298
+ }
3930
4299
  /** Type guard for text content blocks in multimodal messages. */
3931
4300
  function isTextBlock(v) {
3932
4301
  if (v == null || typeof v !== "object") return false;
@@ -4057,111 +4426,6 @@ var EmbeddingTimeoutError = class extends Error {
4057
4426
  }
4058
4427
  };
4059
4428
  //#endregion
4060
- //#region src/llm-caller.ts
4061
- const DEFAULTS = {
4062
- anthropic: {
4063
- model: "claude-haiku-4-5-20251001",
4064
- maxTokens: 2048
4065
- },
4066
- openai: {
4067
- model: "gpt-4o-mini",
4068
- maxTokens: 2048
4069
- }
4070
- };
4071
- /**
4072
- * Call an LLM with a simple prompt → text response.
4073
- * Throws on failure (network, auth, rate limit, timeout).
4074
- */
4075
- async function callLlm(prompt, config) {
4076
- const { provider, apiKey } = config;
4077
- const model = config.model ?? DEFAULTS[provider].model;
4078
- const maxTokens = config.maxTokens ?? DEFAULTS[provider].maxTokens;
4079
- const timeoutMs = config.timeoutMs ?? 3e4;
4080
- if (provider === "anthropic") return callAnthropic(prompt, apiKey, model, maxTokens, timeoutMs);
4081
- return callOpenAi(prompt, apiKey, model, maxTokens, timeoutMs);
4082
- }
4083
- async function callAnthropic(prompt, apiKey, model, maxTokens, timeoutMs) {
4084
- const response = await fetchWithTimeout("https://api.anthropic.com/v1/messages", {
4085
- method: "POST",
4086
- headers: {
4087
- "Content-Type": "application/json",
4088
- "x-api-key": apiKey,
4089
- "anthropic-version": "2023-06-01"
4090
- },
4091
- body: JSON.stringify({
4092
- model,
4093
- max_tokens: maxTokens,
4094
- messages: [{
4095
- role: "user",
4096
- content: prompt
4097
- }]
4098
- })
4099
- }, timeoutMs);
4100
- if (!response.ok) {
4101
- const body = await response.text().catch(() => "");
4102
- throw new Error(`Anthropic API error ${response.status}: ${body.slice(0, 200)}`);
4103
- }
4104
- const text = (await response.json()).content?.filter((b) => b.type === "text")?.map((b) => b.text)?.join("") ?? "";
4105
- if (!text) throw new Error("Anthropic returned empty response");
4106
- return text;
4107
- }
4108
- async function callOpenAi(prompt, apiKey, model, maxTokens, timeoutMs) {
4109
- const response = await fetchWithTimeout("https://api.openai.com/v1/chat/completions", {
4110
- method: "POST",
4111
- headers: {
4112
- "Content-Type": "application/json",
4113
- Authorization: `Bearer ${apiKey}`
4114
- },
4115
- body: JSON.stringify({
4116
- model,
4117
- max_tokens: maxTokens,
4118
- messages: [{
4119
- role: "user",
4120
- content: prompt
4121
- }]
4122
- })
4123
- }, timeoutMs);
4124
- if (!response.ok) {
4125
- const body = await response.text().catch(() => "");
4126
- throw new Error(`OpenAI API error ${response.status}: ${body.slice(0, 200)}`);
4127
- }
4128
- const text = (await response.json()).choices?.[0]?.message?.content ?? "";
4129
- if (!text) throw new Error("OpenAI returned empty response");
4130
- return text;
4131
- }
4132
- async function fetchWithTimeout(url, init, timeoutMs) {
4133
- const controller = new AbortController();
4134
- const timer = setTimeout(() => controller.abort(), timeoutMs);
4135
- try {
4136
- return await fetch(url, {
4137
- ...init,
4138
- signal: controller.signal
4139
- });
4140
- } catch (error) {
4141
- if (error instanceof DOMException && error.name === "AbortError") throw new Error(`LLM API call timed out after ${timeoutMs}ms`);
4142
- throw error;
4143
- } finally {
4144
- clearTimeout(timer);
4145
- }
4146
- }
4147
- /**
4148
- * Extract an API key from OpenClaw auth profiles.
4149
- * Tries the specified provider, falls back to the other.
4150
- */
4151
- function resolveApiKey(authProfiles, preferredProvider) {
4152
- if (!authProfiles) return null;
4153
- for (const profile of Object.values(authProfiles)) if (profile.provider === preferredProvider && profile.key) return {
4154
- provider: preferredProvider,
4155
- apiKey: profile.key
4156
- };
4157
- const fallback = preferredProvider === "anthropic" ? "openai" : "anthropic";
4158
- for (const profile of Object.values(authProfiles)) if (profile.provider === fallback && profile.key) return {
4159
- provider: fallback,
4160
- apiKey: profile.key
4161
- };
4162
- return null;
4163
- }
4164
- //#endregion
4165
4429
  //#region src/memory-manager.ts
4166
4430
  var MemoryManager = class {
4167
4431
  db;
@@ -9786,6 +10050,7 @@ MarkdownIt.prototype.renderInline = function(src, env) {
9786
10050
  * Uses markdown-it for reliable parsing (handles code blocks, frontmatter, CRLF).
9787
10051
  */
9788
10052
  const DATE_FILENAME_RE = /^(\d{4}-\d{2}-\d{2})\.md$/;
10053
+ const DATE_ISO_RE = /^\d{4}-\d{2}-\d{2}/;
9789
10054
  const FRONTMATTER_RE = /^\uFEFF?---[ \t]*\r?\n[\s\S]*?\r?\n---[ \t]*\r?\n/;
9790
10055
  const MAX_SEGMENT_CHARS = 2e3;
9791
10056
  const MIN_SEGMENT_CHARS = 200;
@@ -9882,6 +10147,7 @@ function segmentMarkdown(content, filePath, workspaceDir) {
9882
10147
  heading_level: seg.headingLevel,
9883
10148
  date: fileDate,
9884
10149
  evergreen: isEvergreen,
10150
+ session: false,
9885
10151
  content: seg.content,
9886
10152
  char_count: seg.content.length
9887
10153
  }));
@@ -10019,6 +10285,126 @@ function mergeSmallSegments(segments) {
10019
10285
  }
10020
10286
  return result;
10021
10287
  }
10288
+ /**
10289
+ * Discover JSONL session files in the agent's sessions directory.
10290
+ * Only imports canonical live session files (*.jsonl).
10291
+ * Excludes archive variants (.reset.*, .deleted.*), backups (.bak.*),
10292
+ * and lock files to avoid duplicate extraction from the same session.
10293
+ */
10294
+ function discoverSessionFiles(sessionsDir) {
10295
+ if (!existsSync(sessionsDir) || !statSync(sessionsDir).isDirectory()) return [];
10296
+ const found = [];
10297
+ let entries;
10298
+ try {
10299
+ entries = readdirSync(sessionsDir, { withFileTypes: true });
10300
+ } catch {
10301
+ return [];
10302
+ }
10303
+ for (const entry of entries) {
10304
+ if (!entry.isFile()) continue;
10305
+ const name = entry.name;
10306
+ if (name.endsWith(".jsonl") && name !== "sessions.json") found.push(join(sessionsDir, name));
10307
+ }
10308
+ found.sort((a, b) => a.localeCompare(b));
10309
+ return found;
10310
+ }
10311
+ /**
10312
+ * Parse a JSONL session file and extract conversation turns as ImportSegments.
10313
+ *
10314
+ * Strategy:
10315
+ * - Extract user and assistant text messages (skip tool calls, thinking, system)
10316
+ * - Group consecutive messages into conversation chunks
10317
+ * - Apply the same size limits as markdown segmentation
10318
+ * - Use the session timestamp for dating
10319
+ */
10320
+ function parseSessionJsonl(content, filePath, sessionsDir) {
10321
+ const lines = content.split("\n").filter((l) => l.trim());
10322
+ if (lines.length === 0) return [];
10323
+ const relPath = toPosixRelative(dirname(sessionsDir), filePath);
10324
+ let sessionDate = null;
10325
+ const turns = [];
10326
+ for (const line of lines) {
10327
+ let entry;
10328
+ try {
10329
+ entry = JSON.parse(line);
10330
+ } catch {
10331
+ continue;
10332
+ }
10333
+ if (!sessionDate && entry.timestamp) {
10334
+ const match = DATE_ISO_RE.exec(entry.timestamp);
10335
+ if (match) sessionDate = match[0];
10336
+ }
10337
+ if (entry.type !== "message" || !entry.message) continue;
10338
+ const { role, content: msgContent } = entry.message;
10339
+ if (role !== "user" && role !== "assistant") continue;
10340
+ const text = extractTextFromContent(msgContent);
10341
+ if (!text.trim()) continue;
10342
+ turns.push({
10343
+ role,
10344
+ text: text.trim(),
10345
+ timestamp: entry.timestamp ?? null
10346
+ });
10347
+ }
10348
+ if (turns.length === 0) return [];
10349
+ return groupTurnsIntoExchanges(turns, relPath, sessionDate);
10350
+ }
10351
+ function extractTextFromContent(content) {
10352
+ if (!content) return "";
10353
+ if (typeof content === "string") return content;
10354
+ if (!Array.isArray(content)) return "";
10355
+ return content.filter((block) => block.type === "text" && block.text).map((block) => block.text).join("\n\n");
10356
+ }
10357
+ /**
10358
+ * Group parsed turns into exchanges and produce one ImportSegment per exchange.
10359
+ *
10360
+ * An exchange starts at each user turn and collects all following assistant turns
10361
+ * until the next user turn. Consecutive user turns without an assistant reply
10362
+ * are merged into a single exchange with the next assistant response.
10363
+ *
10364
+ * Each exchange becomes one segment formatted as "User: ...\n\nAssistant: ..."
10365
+ * — preserving the natural conversational boundary for LLM fact extraction.
10366
+ */
10367
+ function groupTurnsIntoExchanges(turns, relPath, sessionDate) {
10368
+ const exchanges = [];
10369
+ let currentUserParts = [];
10370
+ let currentAssistantParts = [];
10371
+ let exchangeTimestamp = null;
10372
+ const flushExchange = () => {
10373
+ if (currentUserParts.length > 0 && currentAssistantParts.length > 0) exchanges.push({
10374
+ userText: currentUserParts.join("\n\n"),
10375
+ assistantText: currentAssistantParts.join("\n\n"),
10376
+ timestamp: exchangeTimestamp
10377
+ });
10378
+ currentUserParts = [];
10379
+ currentAssistantParts = [];
10380
+ exchangeTimestamp = null;
10381
+ };
10382
+ for (const turn of turns) if (turn.role === "user") {
10383
+ if (currentAssistantParts.length > 0) flushExchange();
10384
+ currentUserParts.push(turn.text);
10385
+ if (!exchangeTimestamp) exchangeTimestamp = turn.timestamp;
10386
+ } else currentAssistantParts.push(turn.text);
10387
+ flushExchange();
10388
+ return exchanges.map((ex, i) => {
10389
+ const content = `User: ${ex.userText}\n\nAssistant: ${ex.assistantText}`;
10390
+ return {
10391
+ id: i,
10392
+ source_file: relPath,
10393
+ heading: null,
10394
+ heading_level: null,
10395
+ date: extractDateFromTimestamp(ex.timestamp) ?? sessionDate,
10396
+ evergreen: false,
10397
+ session: true,
10398
+ content,
10399
+ char_count: content.length
10400
+ };
10401
+ });
10402
+ }
10403
+ function extractDateFromTimestamp(timestamp) {
10404
+ if (!timestamp) return null;
10405
+ const match = DATE_ISO_RE.exec(timestamp);
10406
+ return match ? match[0] : null;
10407
+ }
10022
10408
  function isEvergreenFile(filePath, workspaceDir) {
10023
10409
  const rel = relative(workspaceDir, filePath);
10024
10410
  return basename(filePath).toLowerCase() === "memory.md" && dirname(rel) === ".";
@@ -10031,8 +10417,11 @@ function extractDateFromFilename(filename) {
10031
10417
  * Preprocess memory-core files: discover, segment, extract metadata.
10032
10418
  * Returns segments ready for LLM enrichment and storage.
10033
10419
  * Continues processing on per-file errors; collects errors in result.
10420
+ *
10421
+ * @param sessionsDir - Optional path to OpenClaw sessions directory
10422
+ * (e.g. ~/.openclaw/agents/<agentId>/sessions/) for JSONL import.
10034
10423
  */
10035
- function prepareImport(workspaceDir, extraPaths) {
10424
+ function prepareImport(workspaceDir, extraPaths, sessionsDir) {
10036
10425
  const files = discoverMemoryFiles(workspaceDir, extraPaths);
10037
10426
  const allSegments = [];
10038
10427
  const fileInfos = [];
@@ -10056,6 +10445,27 @@ function prepareImport(workspaceDir, extraPaths) {
10056
10445
  error: err instanceof Error ? err.message : String(err)
10057
10446
  });
10058
10447
  }
10448
+ if (sessionsDir) {
10449
+ const sessionFiles = discoverSessionFiles(sessionsDir);
10450
+ for (const filePath of sessionFiles) try {
10451
+ const renumbered = parseSessionJsonl(readFileSync(filePath, "utf8"), filePath, sessionsDir).map((seg) => ({
10452
+ ...seg,
10453
+ id: globalId++
10454
+ }));
10455
+ allSegments.push(...renumbered);
10456
+ fileInfos.push({
10457
+ path: toPosixRelative(dirname(sessionsDir), filePath),
10458
+ segmentCount: renumbered.length,
10459
+ evergreen: false,
10460
+ date: renumbered[0]?.date ?? null
10461
+ });
10462
+ } catch (err) {
10463
+ errors.push({
10464
+ path: toPosixRelative(dirname(sessionsDir), filePath),
10465
+ error: err instanceof Error ? err.message : String(err)
10466
+ });
10467
+ }
10468
+ }
10059
10469
  return {
10060
10470
  segments: allSegments,
10061
10471
  files: fileInfos,
@@ -10099,14 +10509,14 @@ function calculateImportStrength(segmentDate) {
10099
10509
  * Run the memory-core migration. Idempotent — checks db state before proceeding.
10100
10510
  */
10101
10511
  async function runMigration(deps) {
10102
- const { workspaceDir, store, dbState, enrich, logger, extraPaths } = deps;
10512
+ const { workspaceDir, store, dbState, enrich, logger, extraPaths, sessionsDir } = deps;
10103
10513
  const completedAt = dbState.get(STATE_KEY_COMPLETED);
10104
10514
  if (completedAt) {
10105
10515
  logger.info(`Memory migration already completed at ${completedAt}`);
10106
10516
  return { status: "skipped" };
10107
10517
  }
10108
10518
  logger.info("Scanning for memory-core files...");
10109
- const result = prepareImport(workspaceDir, extraPaths);
10519
+ const result = prepareImport(workspaceDir, extraPaths, sessionsDir);
10110
10520
  if (result.errors.length > 0) for (const err of result.errors) logger.warn(`Could not read ${err.path}: ${err.error}`);
10111
10521
  if (result.totalSegments === 0) {
10112
10522
  logger.info("No memory-core files found. Will re-check on next startup.");
@@ -10117,10 +10527,12 @@ async function runMigration(deps) {
10117
10527
  };
10118
10528
  }
10119
10529
  logger.info(`Found ${result.files.length} files, ${result.totalSegments} segments. Starting migration...`);
10530
+ const mdSegments = result.segments.filter((s) => !s.session);
10531
+ const sessionSegments = result.segments.filter((s) => s.session);
10120
10532
  let importedCount = 0;
10121
10533
  const importErrors = [];
10122
- for (let i = 0; i < result.segments.length; i += BATCH_SIZE) {
10123
- const batch = result.segments.slice(i, i + BATCH_SIZE);
10534
+ for (let i = 0; i < mdSegments.length; i += BATCH_SIZE) {
10535
+ const batch = mdSegments.slice(i, i + BATCH_SIZE);
10124
10536
  try {
10125
10537
  const stored = await storeBatch(batch, await enrichBatch(batch, enrich, logger), store, logger, deps.updateStrength);
10126
10538
  importedCount += stored;
@@ -10138,6 +10550,11 @@ async function runMigration(deps) {
10138
10550
  }
10139
10551
  }
10140
10552
  }
10553
+ if (sessionSegments.length > 0) {
10554
+ const extracted = await extractSessionSegments(sessionSegments, store, deps.llmCall, logger, deps.updateStrength);
10555
+ importedCount += extracted.stored;
10556
+ if (extracted.errors.length > 0) importErrors.push(...extracted.errors);
10557
+ }
10141
10558
  if (importErrors.length === 0) dbState.set(STATE_KEY_COMPLETED, (/* @__PURE__ */ new Date()).toISOString());
10142
10559
  dbState.set(STATE_KEY_SOURCE_COUNT, String(result.files.length));
10143
10560
  dbState.set(STATE_KEY_SEGMENT_COUNT, String(importedCount));
@@ -10209,6 +10626,47 @@ function applyImportDecay(id, date, updateStrength) {
10209
10626
  const strength = calculateImportStrength(date);
10210
10627
  if (strength < 1) updateStrength(id, strength);
10211
10628
  }
10629
+ /**
10630
+ * Process JSONL session segments by extracting facts via LLM.
10631
+ * Uses the same extraction pipeline as autoCapture (buildExtractionPrompt/parseExtractionResponse).
10632
+ * Each segment's conversation exchange is sent to the LLM, which distills durable facts.
10633
+ * Requires llmCall — session import is skipped without LLM.
10634
+ */
10635
+ async function extractSessionSegments(segments, store, llmCall, logger, updateStrength) {
10636
+ if (!llmCall) {
10637
+ logger.warn("Session import skipped: LLM extraction required but llmCall not available");
10638
+ return {
10639
+ stored: 0,
10640
+ errors: []
10641
+ };
10642
+ }
10643
+ let stored = 0;
10644
+ const errors = [];
10645
+ for (const seg of segments) try {
10646
+ const facts = parseExtractionResponse(await llmCall(buildExtractionPrompt(seg.content)));
10647
+ if (facts.length === 0) continue;
10648
+ for (const fact of facts) try {
10649
+ applyImportDecay((await store({
10650
+ content: fact.content,
10651
+ type: fact.type,
10652
+ source: "import",
10653
+ temporal_state: seg.date ? "past" : "none",
10654
+ temporal_anchor: seg.date
10655
+ })).id, seg.date, updateStrength);
10656
+ stored++;
10657
+ } catch (err) {
10658
+ logger.warn(`Failed to store extracted fact from segment ${seg.id}: ${err instanceof Error ? err.message : String(err)}`);
10659
+ }
10660
+ } catch (err) {
10661
+ const msg = `Session segment ${seg.id} extraction failed: ${err instanceof Error ? err.message : String(err)}`;
10662
+ logger.error(msg);
10663
+ errors.push(msg);
10664
+ }
10665
+ return {
10666
+ stored,
10667
+ errors
10668
+ };
10669
+ }
10212
10670
  function inferType(seg) {
10213
10671
  if (seg.evergreen) return "fact";
10214
10672
  if (seg.date) return "observation";
@@ -10353,11 +10811,11 @@ async function cleanupWorkspaceFiles(deps) {
10353
10811
  hadFailure = true;
10354
10812
  continue;
10355
10813
  }
10356
- const backupPath = `${file.path}.pre-associative-memory`;
10814
+ const backupPath = `${file.path}.pre-formative-memory`;
10357
10815
  if (!existsSync(backupPath)) copyFileSync(file.path, backupPath);
10358
10816
  writeFileSync(file.path, cleaned, "utf-8");
10359
10817
  modified.push(file.name);
10360
- logger.info(`Cleaned ${file.name} (backup: ${file.name}.pre-associative-memory)`);
10818
+ logger.info(`Cleaned ${file.name} (backup: ${file.name}.pre-formative-memory)`);
10361
10819
  } catch (err) {
10362
10820
  logger.warn(`Failed to clean ${file.name}: ${err instanceof Error ? err.message : String(err)}`);
10363
10821
  hadFailure = true;
@@ -10396,53 +10854,6 @@ Here is the current content of ${fileName}:
10396
10854
  ${content}`;
10397
10855
  }
10398
10856
  //#endregion
10399
- //#region src/logger.ts
10400
- const LEVEL_ORDER = {
10401
- debug: 0,
10402
- info: 1,
10403
- warn: 2,
10404
- error: 3
10405
- };
10406
- /**
10407
- * Create a logger that forwards to the host logger at or above the
10408
- * configured minimum level.
10409
- */
10410
- function createLogger(opts) {
10411
- const envDebug = process.env.FORMATIVE_MEMORY_DEBUG === "1";
10412
- const minOrder = LEVEL_ORDER[opts.verbose || envDebug ? "debug" : "info"];
10413
- const host = opts.host;
10414
- function stringifyArg(a) {
10415
- if (a instanceof Error) return a.stack || a.message;
10416
- if (typeof a === "object" && a !== null) try {
10417
- return JSON.stringify(a);
10418
- } catch {
10419
- return "[Unserializable]";
10420
- }
10421
- return String(a);
10422
- }
10423
- function emit(level, msg, args) {
10424
- if (LEVEL_ORDER[level] < minOrder) return;
10425
- const prefix = `[formative-memory] [${level}]`;
10426
- if (host) {
10427
- const line = `${prefix} ${msg}${args.length > 0 ? " " + args.map(stringifyArg).join(" ") : ""}`;
10428
- if (level === "debug") (host.debug ?? host.info)(line);
10429
- else host[level](line);
10430
- } else {
10431
- const line = `${prefix} ${msg}`;
10432
- if (level === "error") console.error(line, ...args);
10433
- else if (level === "warn") console.warn(line, ...args);
10434
- else console.log(line, ...args);
10435
- }
10436
- }
10437
- return {
10438
- debug: (msg, ...args) => emit("debug", msg, args),
10439
- info: (msg, ...args) => emit("info", msg, args),
10440
- warn: (msg, ...args) => emit("warn", msg, args),
10441
- error: (msg, ...args) => emit("error", msg, args),
10442
- isDebugEnabled: () => minOrder === LEVEL_ORDER.debug
10443
- };
10444
- }
10445
- //#endregion
10446
10857
  //#region src/turn-memory-ledger.ts
10447
10858
  /**
10448
10859
  * Turn Memory Ledger (Phase 3.4)
@@ -10590,6 +11001,7 @@ function resolveMemoryDir(config, workspaceDir, pathResolver) {
10590
11001
  if (isAbsolute(dbPath)) return dbPath;
10591
11002
  return join(workspaceDir, dbPath);
10592
11003
  }
11004
+ const EMBEDDING_REQUIRED_HINT = "Embedding provider required but not available.\nSet one of: OPENAI_API_KEY, GEMINI_API_KEY, VOYAGE_API_KEY, or MISTRAL_API_KEY.\nTo run without embeddings (BM25-only), set \"requireEmbedding\": false in plugin config.";
10593
11005
  /**
10594
11006
  * Try to create an embedding provider directly via SDK factory functions.
10595
11007
  * Used when the memory-core plugin is disabled and the global registry is empty.
@@ -10676,11 +11088,13 @@ function createWorkspace(config, workspaceDir, openclawConfig, agentDir, logger,
10676
11088
  let providerPromise = null;
10677
11089
  const getProvider = () => {
10678
11090
  if (!providerPromise) providerPromise = resolveEmbeddingProvider(config.embedding.provider, openclawConfig, agentDir, config.embedding.model).catch((err) => {
11091
+ if (config.requireEmbedding) throw new Error(`${EMBEDDING_REQUIRED_HINT}\nDetails: ${err instanceof Error ? err.message : String(err)}`);
10679
11092
  providerPromise = null;
10680
11093
  throw err;
10681
11094
  });
10682
11095
  return providerPromise;
10683
11096
  };
11097
+ if (config.requireEmbedding) getProvider().catch(() => {});
10684
11098
  const ws = {
10685
11099
  manager: new MemoryManager(memoryDir, { async embed(text) {
10686
11100
  const provider = await getProvider();
@@ -10709,6 +11123,12 @@ function createWorkspace(config, workspaceDir, openclawConfig, agentDir, logger,
10709
11123
  } catch (err) {
10710
11124
  logger?.warn(`Workspace cleanup failed: ${err instanceof Error ? err.message : String(err)}`);
10711
11125
  }
11126
+ if (config.requireEmbedding) try {
11127
+ await getProvider();
11128
+ } catch (err) {
11129
+ logger?.error(`Migration aborted: embedding required but unavailable. ${err instanceof Error ? err.message : String(err)}`);
11130
+ return;
11131
+ }
10712
11132
  try {
10713
11133
  const userLanguage = detectUserLanguage(workspaceDir);
10714
11134
  const enrichFn = initDeps.llmConfig ? createDirectLlmEnrichFn(initDeps.llmConfig, userLanguage) : async (segments) => segments.map((seg) => ({
@@ -10717,6 +11137,7 @@ function createWorkspace(config, workspaceDir, openclawConfig, agentDir, logger,
10717
11137
  temporal_state: seg.date ? "past" : "none",
10718
11138
  temporal_anchor: seg.date
10719
11139
  }));
11140
+ const sessionsDir = agentDir ? join(agentDir, "sessions") : void 0;
10720
11141
  const migrationResult = await runMigration({
10721
11142
  workspaceDir,
10722
11143
  stateDir: initDeps.stateDir ?? workspaceDir,
@@ -10728,7 +11149,9 @@ function createWorkspace(config, workspaceDir, openclawConfig, agentDir, logger,
10728
11149
  info: () => {},
10729
11150
  warn: () => {},
10730
11151
  error: () => {}
10731
- }
11152
+ },
11153
+ sessionsDir,
11154
+ llmCall: initDeps.llmConfig ? (prompt) => callLlm(prompt, initDeps.llmConfig) : void 0
10732
11155
  });
10733
11156
  if (migrationResult.status === "completed") logger?.info(`Migration: imported ${migrationResult.segmentsImported} memories from ${migrationResult.filesFound} files`);
10734
11157
  } catch (err) {
@@ -10872,8 +11295,8 @@ function createMemoryTools(getManager, getLogPath, ledger) {
10872
11295
  ];
10873
11296
  }
10874
11297
  const associativeMemoryPlugin = {
10875
- id: "memory-associative",
10876
- name: "Memory (Associative)",
11298
+ id: "formative-memory",
11299
+ name: "Formative Memory",
10877
11300
  description: "Biologically-inspired associative memory with consolidation and temporal awareness",
10878
11301
  kind: "memory",
10879
11302
  configSchema: memoryConfigSchema,
@@ -10946,7 +11369,9 @@ const associativeMemoryPlugin = {
10946
11369
  logger: log,
10947
11370
  ledger,
10948
11371
  getDb: () => getWorkspace(".").manager.getDatabase(),
10949
- getLogPath: () => join(getWorkspace(".").memoryDir, "retrieval.log")
11372
+ getLogPath: () => join(getWorkspace(".").memoryDir, "retrieval.log"),
11373
+ autoCapture: config.autoCapture,
11374
+ getLlmConfig: () => resolveLlmConfig(runtimePaths.stateDir, runtimePaths.agentDir, log)
10950
11375
  }));
10951
11376
  api.registerCommand({
10952
11377
  name: "memory-sleep",
@@ -10965,9 +11390,9 @@ const associativeMemoryPlugin = {
10965
11390
  log.debug("consolidation: starting trigger=command");
10966
11391
  const result = await runConsolidation({
10967
11392
  db: ws.manager.getDatabase(),
10968
- mergeContentProducer
11393
+ mergeContentProducer,
11394
+ logger: log
10969
11395
  });
10970
- log.debug(`consolidation: completed trigger=command durationMs=${result.durationMs}`);
10971
11396
  const s = result.summary;
10972
11397
  const catchUpInfo = s.catchUpDecayed > 0 ? `Catch-up decayed: ${s.catchUpDecayed}, ` : "";
10973
11398
  return { text: `Memory consolidation complete (${result.durationMs}ms).\n` + catchUpInfo + `Reinforced: ${s.reinforced}, Decayed: ${s.decayed}, Pruned: ${s.pruned} memories + ${s.prunedAssociations} associations, Merged: ${s.merged}, Transitioned: ${s.transitioned}, Exposure GC: ${s.exposuresGc}` };
@@ -10992,6 +11417,7 @@ const associativeMemoryPlugin = {
10992
11417
  temporal_anchor: seg.date
10993
11418
  }));
10994
11419
  db.setState("migration_completed_at", "");
11420
+ const sessionsDir = runtimePaths.agentDir ? join(runtimePaths.agentDir, "sessions") : void 0;
10995
11421
  const result = await runMigration({
10996
11422
  workspaceDir: ".",
10997
11423
  stateDir: runtimePaths.stateDir ?? ".",
@@ -11002,7 +11428,9 @@ const associativeMemoryPlugin = {
11002
11428
  set: (key, value) => db.setState(key, value)
11003
11429
  },
11004
11430
  enrich: enrichFn,
11005
- logger: log
11431
+ logger: log,
11432
+ sessionsDir,
11433
+ llmCall: llmConfig ? (prompt) => callLlm(prompt, llmConfig) : void 0
11006
11434
  });
11007
11435
  if (result.status === "completed") return { text: `Migration complete: imported ${result.segmentsImported} memories from ${result.filesFound} files` + (result.errors?.length ? ` (${result.errors.length} errors)` : "") };
11008
11436
  return { text: `Migration: ${result.status}` };
@@ -11034,7 +11462,7 @@ const associativeMemoryPlugin = {
11034
11462
  /** Cron job name (used to find/update managed jobs). */
11035
11463
  const CONSOLIDATION_CRON_NAME = "Associative Memory Consolidation";
11036
11464
  /** Tag in description to identify managed jobs. */
11037
- const CONSOLIDATION_CRON_TAG = "[managed-by=memory-associative.consolidation]";
11465
+ const CONSOLIDATION_CRON_TAG = "[managed-by=formative-memory.consolidation]";
11038
11466
  /** Default cron expression: daily at 03:00. */
11039
11467
  const DEFAULT_CONSOLIDATION_CRON = "0 3 * * *";
11040
11468
  /** System event token for cron-triggered temporal transitions. */
@@ -11042,7 +11470,7 @@ const associativeMemoryPlugin = {
11042
11470
  /** Cron job name for temporal transitions. */
11043
11471
  const TEMPORAL_CRON_NAME = "Associative Memory Temporal Transitions";
11044
11472
  /** Tag to identify managed temporal jobs. */
11045
- const TEMPORAL_CRON_TAG = "[managed-by=memory-associative.temporal]";
11473
+ const TEMPORAL_CRON_TAG = "[managed-by=formative-memory.temporal]";
11046
11474
  /** Cron expression: daily at 15:00 (03:00 is covered by full consolidation). */
11047
11475
  const DEFAULT_TEMPORAL_CRON = "0 15 * * *";
11048
11476
  api.registerHook("gateway:startup", async (event) => {
@@ -11112,13 +11540,13 @@ const associativeMemoryPlugin = {
11112
11540
  } catch (err) {
11113
11541
  log.warn(`Failed to register consolidation cron: ${err instanceof Error ? err.message : String(err)}`);
11114
11542
  }
11115
- }, { name: "memory-associative-consolidation-cron" });
11543
+ }, { name: "formative-memory-consolidation-cron" });
11116
11544
  api.on("before_agent_reply", async (event, ctx) => {
11117
11545
  const body = event?.cleanedBody;
11118
11546
  if (!body) return;
11119
11547
  if (body.includes(TEMPORAL_CRON_TRIGGER) && !body.includes(CONSOLIDATION_CRON_TRIGGER)) try {
11120
11548
  const db = getWorkspace(ctx?.workspaceDir ?? ".").manager.getDatabase();
11121
- const count = db.transaction(() => applyTemporalTransitions(db));
11549
+ const count = db.transaction(() => applyTemporalTransitions(db, log));
11122
11550
  if (count > 0) log.info(`Scheduled temporal transitions: ${count} transitioned`);
11123
11551
  return {
11124
11552
  handled: true,
@@ -11144,20 +11572,14 @@ const associativeMemoryPlugin = {
11144
11572
  };
11145
11573
  } : void 0;
11146
11574
  const db = ws.manager.getDatabase();
11147
- const temporalCount = db.transaction(() => applyTemporalTransitions(db));
11148
11575
  log.debug("consolidation: starting trigger=cron");
11149
- const result = await runConsolidation({
11150
- db,
11151
- mergeContentProducer
11152
- });
11153
- log.debug(`consolidation: completed trigger=cron durationMs=${result.durationMs}`);
11154
- const s = result.summary;
11155
- const catchUpInfo = s.catchUpDecayed > 0 ? `Catch-up decayed: ${s.catchUpDecayed}, ` : "";
11156
- const temporalInfo = temporalCount > 0 ? `, Temporal transitions (extra): ${temporalCount}` : "";
11157
- log.info(`Scheduled consolidation complete (${result.durationMs}ms): ${catchUpInfo}Reinforced: ${s.reinforced}, Decayed: ${s.decayed}, Pruned: ${s.pruned}+${s.prunedAssociations}, Merged: ${s.merged}${temporalInfo}`);
11158
11576
  return {
11159
11577
  handled: true,
11160
- reply: { text: `Memory consolidation complete (${result.durationMs}ms).` },
11578
+ reply: { text: `Memory consolidation complete (${(await runConsolidation({
11579
+ db,
11580
+ mergeContentProducer,
11581
+ logger: log
11582
+ })).durationMs}ms).` },
11161
11583
  reason: "associative-memory-consolidation"
11162
11584
  };
11163
11585
  } catch (err) {
@@ -11170,7 +11592,7 @@ const associativeMemoryPlugin = {
11170
11592
  }
11171
11593
  });
11172
11594
  api.registerService({
11173
- id: "memory-associative-startup",
11595
+ id: "formative-memory-startup",
11174
11596
  async start(ctx) {
11175
11597
  runtimePaths.stateDir = ctx.stateDir;
11176
11598
  }