clementine-agent 1.6.2 → 1.7.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -18,33 +18,79 @@ import { existsSync, readFileSync, readdirSync, writeFileSync, mkdirSync } from
18
18
  import path from 'node:path';
19
19
  import matter from 'gray-matter';
20
20
  import yaml from 'js-yaml';
21
- import { CRON_FILE, WORKFLOWS_DIR } from '../../config.js';
21
+ import { CRON_FILE, WORKFLOWS_DIR, AGENTS_DIR } from '../../config.js';
22
22
  import { snapshotWorkflow } from './snapshots.js';
23
23
  // ── ID scheme ───────────────────────────────────────────────────────
24
+ //
25
+ // Global: `cron:<key>` / `workflow:<key>`
26
+ // Agent-scoped: `cron@<slug>:<key>` / `workflow@<slug>:<key>`
27
+ // `@` is reserved (slugs and workflow keys are kebab-case `[a-z0-9-]+`),
28
+ // so it cleanly distinguishes scoped ids from global ones without
29
+ // breaking any existing global id parser.
24
30
  const CRON_ID_PREFIX = 'cron:';
25
31
  const WORKFLOW_ID_PREFIX = 'workflow:';
26
- export function cronId(name) {
32
+ const CRON_AGENT_PREFIX = 'cron@';
33
+ const WORKFLOW_AGENT_PREFIX = 'workflow@';
34
+ export function cronId(name, agentSlug) {
35
+ if (agentSlug)
36
+ return CRON_AGENT_PREFIX + agentSlug + ':' + name;
27
37
  return CRON_ID_PREFIX + name;
28
38
  }
29
- export function workflowId(filename) {
39
+ export function workflowId(filename, agentSlug) {
30
40
  const base = filename.endsWith('.md') ? filename.slice(0, -3) : filename;
41
+ if (agentSlug)
42
+ return WORKFLOW_AGENT_PREFIX + agentSlug + ':' + base;
31
43
  return WORKFLOW_ID_PREFIX + base;
32
44
  }
33
45
  export function parseBuilderId(id) {
46
+ if (id.startsWith(CRON_AGENT_PREFIX)) {
47
+ const rest = id.slice(CRON_AGENT_PREFIX.length);
48
+ const colon = rest.indexOf(':');
49
+ if (colon < 1 || colon === rest.length - 1)
50
+ return null;
51
+ return { origin: 'cron', scope: 'agent', agentSlug: rest.slice(0, colon), key: rest.slice(colon + 1) };
52
+ }
53
+ if (id.startsWith(WORKFLOW_AGENT_PREFIX)) {
54
+ const rest = id.slice(WORKFLOW_AGENT_PREFIX.length);
55
+ const colon = rest.indexOf(':');
56
+ if (colon < 1 || colon === rest.length - 1)
57
+ return null;
58
+ return { origin: 'workflow', scope: 'agent', agentSlug: rest.slice(0, colon), key: rest.slice(colon + 1) };
59
+ }
34
60
  if (id.startsWith(CRON_ID_PREFIX))
35
- return { origin: 'cron', key: id.slice(CRON_ID_PREFIX.length) };
61
+ return { origin: 'cron', scope: 'global', key: id.slice(CRON_ID_PREFIX.length) };
36
62
  if (id.startsWith(WORKFLOW_ID_PREFIX))
37
- return { origin: 'workflow', key: id.slice(WORKFLOW_ID_PREFIX.length) };
63
+ return { origin: 'workflow', scope: 'global', key: id.slice(WORKFLOW_ID_PREFIX.length) };
38
64
  return null;
39
65
  }
66
+ // ── Agent path helpers ──────────────────────────────────────────────
67
+ function agentCronFile(slug) {
68
+ return path.join(AGENTS_DIR, slug, 'CRON.md');
69
+ }
70
+ function agentWorkflowsDir(slug) {
71
+ return path.join(AGENTS_DIR, slug, 'workflows');
72
+ }
73
+ function listAgentSlugs() {
74
+ if (!existsSync(AGENTS_DIR))
75
+ return [];
76
+ try {
77
+ return readdirSync(AGENTS_DIR, { withFileTypes: true })
78
+ .filter(d => d.isDirectory())
79
+ .map(d => d.name);
80
+ }
81
+ catch {
82
+ return [];
83
+ }
84
+ }
40
85
  // ── List ────────────────────────────────────────────────────────────
41
86
  export function listAllForBuilder() {
42
87
  const out = [];
43
- // Crons from CRON.md
44
- for (const job of readCronJobs()) {
88
+ // Global crons from CRON.md
89
+ for (const job of readCronJobsFromFile(CRON_FILE)) {
45
90
  out.push({
46
91
  id: cronId(job.name),
47
92
  origin: 'cron',
93
+ scope: 'global',
48
94
  name: job.name,
49
95
  description: '',
50
96
  enabled: job.enabled,
@@ -54,7 +100,7 @@ export function listAllForBuilder() {
54
100
  agentSlug: job.agentSlug,
55
101
  });
56
102
  }
57
- // Workflows from workflows dir
103
+ // Global workflows from workflows dir
58
104
  if (existsSync(WORKFLOWS_DIR)) {
59
105
  for (const file of readdirSync(WORKFLOWS_DIR).filter(f => f.endsWith('.md'))) {
60
106
  try {
@@ -62,6 +108,7 @@ export function listAllForBuilder() {
62
108
  out.push({
63
109
  id: workflowId(file),
64
110
  origin: 'workflow',
111
+ scope: 'global',
65
112
  name: wf.name,
66
113
  description: wf.description,
67
114
  enabled: wf.enabled,
@@ -76,6 +123,47 @@ export function listAllForBuilder() {
76
123
  }
77
124
  }
78
125
  }
126
+ // Agent-scoped crons + workflows from <AGENTS_DIR>/<slug>/...
127
+ for (const slug of listAgentSlugs()) {
128
+ const cronFile = agentCronFile(slug);
129
+ for (const job of readCronJobsFromFile(cronFile)) {
130
+ out.push({
131
+ id: cronId(job.name, slug),
132
+ origin: 'cron',
133
+ scope: 'agent',
134
+ name: job.name,
135
+ description: '',
136
+ enabled: job.enabled,
137
+ schedule: job.schedule,
138
+ stepCount: 1,
139
+ sourceFile: cronFile,
140
+ agentSlug: slug,
141
+ });
142
+ }
143
+ const wfDir = agentWorkflowsDir(slug);
144
+ if (existsSync(wfDir)) {
145
+ for (const file of readdirSync(wfDir).filter(f => f.endsWith('.md'))) {
146
+ try {
147
+ const wf = parseWorkflowFile(path.join(wfDir, file));
148
+ out.push({
149
+ id: workflowId(file, slug),
150
+ origin: 'workflow',
151
+ scope: 'agent',
152
+ name: wf.name,
153
+ description: wf.description,
154
+ enabled: wf.enabled,
155
+ schedule: wf.trigger.schedule,
156
+ stepCount: wf.steps.length,
157
+ sourceFile: wf.sourceFile,
158
+ agentSlug: slug,
159
+ });
160
+ }
161
+ catch {
162
+ // Skip unparseable workflow files
163
+ }
164
+ }
165
+ }
166
+ }
79
167
  return out.sort((a, b) => a.name.localeCompare(b.name));
80
168
  }
81
169
  // ── Read ────────────────────────────────────────────────────────────
@@ -84,25 +172,31 @@ export function readWorkflow(id) {
84
172
  if (!parsed)
85
173
  return null;
86
174
  if (parsed.origin === 'cron') {
87
- const job = readCronJobs().find(j => j.name === parsed.key);
175
+ const cronFile = parsed.scope === 'agent' ? agentCronFile(parsed.agentSlug) : CRON_FILE;
176
+ const slug = parsed.scope === 'agent' ? parsed.agentSlug : undefined;
177
+ const job = readCronJobsFromFile(cronFile).find(j => j.name === parsed.key);
88
178
  if (!job)
89
179
  return null;
90
- return cronJobToWorkflow(job);
180
+ return cronJobToWorkflow(slug ? { ...job, agentSlug: slug } : job, { sourceFile: cronFile });
91
181
  }
92
- const file = path.join(WORKFLOWS_DIR, parsed.key + '.md');
182
+ const wfDir = parsed.scope === 'agent' ? agentWorkflowsDir(parsed.agentSlug) : WORKFLOWS_DIR;
183
+ const file = path.join(wfDir, parsed.key + '.md');
93
184
  if (!existsSync(file))
94
185
  return null;
95
186
  try {
96
- return parseWorkflowFile(file);
187
+ const wf = parseWorkflowFile(file);
188
+ if (parsed.scope === 'agent' && !wf.agentSlug)
189
+ wf.agentSlug = parsed.agentSlug;
190
+ return wf;
97
191
  }
98
192
  catch {
99
193
  return null;
100
194
  }
101
195
  }
102
- function readCronJobs() {
103
- if (!existsSync(CRON_FILE))
196
+ function readCronJobsFromFile(cronFile) {
197
+ if (!existsSync(cronFile))
104
198
  return [];
105
- const raw = readFileSync(CRON_FILE, 'utf-8');
199
+ const raw = readFileSync(cronFile, 'utf-8');
106
200
  let parsed;
107
201
  try {
108
202
  parsed = matter(raw);
@@ -219,7 +313,7 @@ function parseWorkflowFile(filePath) {
219
313
  };
220
314
  }
221
315
  // ── Cron ⇄ Workflow ─────────────────────────────────────────────────
222
- export function cronJobToWorkflow(job) {
316
+ export function cronJobToWorkflow(job, opts = {}) {
223
317
  const step = {
224
318
  id: 'main',
225
319
  prompt: job.prompt,
@@ -237,7 +331,7 @@ export function cronJobToWorkflow(job) {
237
331
  trigger: { schedule: job.schedule, manual: false },
238
332
  inputs: {},
239
333
  steps: [step],
240
- sourceFile: CRON_FILE,
334
+ sourceFile: opts.sourceFile ?? CRON_FILE,
241
335
  agentSlug: job.agentSlug,
242
336
  };
243
337
  }
@@ -265,23 +359,41 @@ export function saveWorkflow(id, wf) {
265
359
  if (!isCronShape(wf)) {
266
360
  return { ok: false, error: 'Cron entry must remain a single prompt step with a cron schedule' };
267
361
  }
268
- return saveCronEntry(parsed.key, wf);
362
+ const cronFile = parsed.scope === 'agent' ? agentCronFile(parsed.agentSlug) : CRON_FILE;
363
+ const slug = parsed.scope === 'agent' ? parsed.agentSlug : undefined;
364
+ return saveCronEntry(parsed.key, wf, { cronFile, agentSlug: slug });
269
365
  }
270
- return saveWorkflowFile(parsed.key, wf);
366
+ const wfDir = parsed.scope === 'agent' ? agentWorkflowsDir(parsed.agentSlug) : WORKFLOWS_DIR;
367
+ const slug = parsed.scope === 'agent' ? parsed.agentSlug : undefined;
368
+ return saveWorkflowFile(parsed.key, wf, { dir: wfDir, agentSlug: slug });
271
369
  }
272
- /** Resolve the on-disk file path for a builder id (cron entries all share CRON_FILE). */
370
+ /** Resolve the on-disk file path for a builder id. */
273
371
  export function sourceFileForId(id, parsedHint) {
274
372
  const parsed = parsedHint ?? parseBuilderId(id);
275
373
  if (!parsed)
276
374
  return null;
277
- if (parsed.origin === 'cron')
278
- return CRON_FILE;
279
- return path.join(WORKFLOWS_DIR, parsed.key + '.md');
375
+ if (parsed.origin === 'cron') {
376
+ return parsed.scope === 'agent' ? agentCronFile(parsed.agentSlug) : CRON_FILE;
377
+ }
378
+ const dir = parsed.scope === 'agent' ? agentWorkflowsDir(parsed.agentSlug) : WORKFLOWS_DIR;
379
+ return path.join(dir, parsed.key + '.md');
280
380
  }
281
- function saveCronEntry(originalName, wf) {
282
- if (!existsSync(CRON_FILE))
283
- return { ok: false, error: 'CRON.md does not exist' };
284
- const raw = readFileSync(CRON_FILE, 'utf-8');
381
+ /**
382
+ * Strip an `<agentSlug>:` prefix the UI may have stored in `wf.name` when
383
+ * editing an agent-scoped entity. The on-disk name is always the bare key —
384
+ * the slug lives in the file path, not the name.
385
+ */
386
+ function stripAgentPrefix(name, agentSlug) {
387
+ if (!agentSlug)
388
+ return name;
389
+ const prefix = agentSlug + ':';
390
+ return name.startsWith(prefix) ? name.slice(prefix.length) : name;
391
+ }
392
+ function saveCronEntry(originalName, wf, opts) {
393
+ const { cronFile, agentSlug } = opts;
394
+ if (!existsSync(cronFile))
395
+ return { ok: false, error: 'CRON.md does not exist: ' + cronFile };
396
+ const raw = readFileSync(cronFile, 'utf-8');
285
397
  let parsed;
286
398
  try {
287
399
  parsed = matter(raw);
@@ -297,7 +409,7 @@ function saveCronEntry(originalName, wf) {
297
409
  const prev = jobs[idx];
298
410
  const updated = {
299
411
  ...prev,
300
- name: wf.name,
412
+ name: stripAgentPrefix(wf.name, agentSlug),
301
413
  schedule: wf.trigger.schedule,
302
414
  prompt: step.prompt,
303
415
  enabled: wf.enabled,
@@ -309,18 +421,21 @@ function saveCronEntry(originalName, wf) {
309
421
  updated.model = step.model;
310
422
  if (step.workDir != null)
311
423
  updated.work_dir = step.workDir;
312
- if (wf.agentSlug)
424
+ // Agent slug for global crons that are bound to a specific agent (legacy
425
+ // shape). For agent-dir crons the slug lives in the path, not the entry.
426
+ if (!agentSlug && wf.agentSlug)
313
427
  updated.agentSlug = wf.agentSlug;
314
428
  jobs[idx] = updated;
315
429
  parsed.data.jobs = jobs;
316
430
  const out = matter.stringify(parsed.content ?? '', parsed.data);
317
- writeFileSync(CRON_FILE, out, 'utf-8');
431
+ writeFileSync(cronFile, out, 'utf-8');
318
432
  return { ok: true };
319
433
  }
320
- function saveWorkflowFile(key, wf) {
321
- if (!existsSync(WORKFLOWS_DIR))
322
- mkdirSync(WORKFLOWS_DIR, { recursive: true });
323
- const file = path.join(WORKFLOWS_DIR, key + '.md');
434
+ function saveWorkflowFile(key, wf, opts) {
435
+ const { dir, agentSlug } = opts;
436
+ if (!existsSync(dir))
437
+ mkdirSync(dir, { recursive: true });
438
+ const file = path.join(dir, key + '.md');
324
439
  // Preserve body content if the file exists; otherwise empty body.
325
440
  let body = '';
326
441
  if (existsSync(file)) {
@@ -334,12 +449,14 @@ function saveWorkflowFile(key, wf) {
334
449
  }
335
450
  const data = {
336
451
  type: 'workflow',
337
- name: wf.name,
452
+ name: stripAgentPrefix(wf.name, agentSlug),
338
453
  description: wf.description,
339
454
  enabled: wf.enabled,
340
455
  trigger: wf.trigger,
341
456
  };
342
- if (wf.agentSlug)
457
+ // Agent slug for legacy global workflows that target a specific agent.
458
+ // For agent-dir workflows the slug lives in the path, not the frontmatter.
459
+ if (!agentSlug && wf.agentSlug)
343
460
  data.agentSlug = wf.agentSlug;
344
461
  if (Object.keys(wf.inputs).length > 0)
345
462
  data.inputs = wf.inputs;
@@ -20,6 +20,9 @@ export declare class HeartbeatScheduler {
20
20
  private lastAgentSiRuns;
21
21
  private cronScheduler;
22
22
  private runLog;
23
+ private lastDenseBackfillAt;
24
+ private denseBackfillInFlight;
25
+ private lastSalienceDecayDate;
23
26
  /** Wire up the cron scheduler so daily plan suggestions can be applied. */
24
27
  setCronScheduler(cs: CronScheduler): void;
25
28
  private getLastAgentSiRun;
@@ -33,6 +36,23 @@ export declare class HeartbeatScheduler {
33
36
  * Proactive insight check — gather signals, evaluate urgency, send if warranted.
34
37
  * Runs as a lightweight Haiku call, separate from the main heartbeat LLM invocation.
35
38
  */
39
+ /**
40
+ * Self-healing dense embedding backfill. Runs a small batch each tick when:
41
+ * - chat lane has 0 active sessions (don't compete with response latency)
42
+ * - dense model is available
43
+ * - >= 10 minutes since last backfill (cooldown)
44
+ * - not already running
45
+ *
46
+ * Per-pass cap (50 chunks) keeps each tick under ~30s on Apple Silicon CPU.
47
+ * Coverage climbs over hours/days without user action.
48
+ */
49
+ private maybeIdleDenseBackfill;
50
+ /**
51
+ * Daily salience decay. Multiplies salience by 0.95 on chunks unaccessed
52
+ * for >30 days. Date-gated (one pass per calendar day), persisted in
53
+ * HeartbeatState. Pinned chunks exempt; soft-deleted and superseded skipped.
54
+ */
55
+ private maybeRunSalienceDecay;
36
56
  private runInsightCheck;
37
57
  /** Called when user replies to a proactive message — resets cooldown. */
38
58
  recordInsightAcknowledged(): void;
@@ -30,6 +30,9 @@ export class HeartbeatScheduler {
30
30
  lastAgentSiRuns = new Map();
31
31
  cronScheduler = null;
32
32
  runLog = new CronRunLog();
33
+ lastDenseBackfillAt = 0;
34
+ denseBackfillInFlight = false;
35
+ lastSalienceDecayDate = '';
33
36
  /** Wire up the cron scheduler so daily plan suggestions can be applied. */
34
37
  setCronScheduler(cs) { this.cronScheduler = cs; }
35
38
  getLastAgentSiRun(slug) {
@@ -48,6 +51,8 @@ export class HeartbeatScheduler {
48
51
  this.lastSelfImproveDate = this.lastState.lastSelfImproveDate;
49
52
  if (this.lastState.lastConsolidationDate)
50
53
  this.lastConsolidationDate = this.lastState.lastConsolidationDate;
54
+ if (this.lastState.lastSalienceDecayDate)
55
+ this.lastSalienceDecayDate = this.lastState.lastSalienceDecayDate;
51
56
  if (this.lastState.lastAgentSiRuns) {
52
57
  this.lastAgentSiRuns = new Map(Object.entries(this.lastState.lastAgentSiRuns));
53
58
  }
@@ -115,6 +120,18 @@ export class HeartbeatScheduler {
115
120
  logger.warn({ err }, 'Failure sweep failed');
116
121
  });
117
122
  }).catch(err => logger.warn({ err }, 'Failure sweep import failed'));
123
+ // Idle dense-embedding backfill. Retrieval quality silently degrades when
124
+ // chunks are stuck on TF-IDF only — this self-healing loop keeps coverage
125
+ // climbing during quiet periods. Skipped when chat lane is busy (CPU
126
+ // would compete with response latency) or already on cooldown.
127
+ this.maybeIdleDenseBackfill().catch(err => {
128
+ logger.debug({ err }, 'Idle dense backfill failed (non-fatal)');
129
+ });
130
+ // Daily salience decay — fades stale, unaccessed chunks so retrieval
131
+ // doesn't keep boosting facts that aren't earning their context budget.
132
+ // Pinned + soft-deleted + superseded chunks are exempt. One UPDATE per
133
+ // day, gated by a date stamp on HeartbeatState.
134
+ this.maybeRunSalienceDecay();
118
135
  // Claim verification sweep — auto-verify pending claims whose due
119
136
  // times have passed (e.g. "I scheduled X for 8am" → check at 9am).
120
137
  import('./claim-tracker.js').then(async ({ verifyDueClaims, drainLLMFallback }) => {
@@ -713,6 +730,81 @@ export class HeartbeatScheduler {
713
730
  * Proactive insight check — gather signals, evaluate urgency, send if warranted.
714
731
  * Runs as a lightweight Haiku call, separate from the main heartbeat LLM invocation.
715
732
  */
733
+ /**
734
+ * Self-healing dense embedding backfill. Runs a small batch each tick when:
735
+ * - chat lane has 0 active sessions (don't compete with response latency)
736
+ * - dense model is available
737
+ * - >= 10 minutes since last backfill (cooldown)
738
+ * - not already running
739
+ *
740
+ * Per-pass cap (50 chunks) keeps each tick under ~30s on Apple Silicon CPU.
741
+ * Coverage climbs over hours/days without user action.
742
+ */
743
+ async maybeIdleDenseBackfill() {
744
+ if (this.denseBackfillInFlight)
745
+ return;
746
+ const sinceLastMs = Date.now() - this.lastDenseBackfillAt;
747
+ if (sinceLastMs < 10 * 60 * 1000)
748
+ return;
749
+ const { lanes } = await import('./lanes.js');
750
+ if (lanes.status().chat.active > 0)
751
+ return;
752
+ const store = this.gateway.getMemoryStore();
753
+ if (!store)
754
+ return;
755
+ const stats = store.getMemoryStats();
756
+ if (stats.totalChunks === 0)
757
+ return;
758
+ if (stats.chunksWithDenseEmbeddings >= stats.totalChunks)
759
+ return;
760
+ const embeddings = await import('../memory/embeddings.js');
761
+ if (!embeddings.isDenseReady()) {
762
+ // First time: try to load the model in the background. Don't block the
763
+ // tick — if download is needed (~440MB), it can take a while.
764
+ embeddings.probeDenseReady().catch(() => { });
765
+ return;
766
+ }
767
+ this.denseBackfillInFlight = true;
768
+ this.lastDenseBackfillAt = Date.now();
769
+ try {
770
+ const result = await store.backfillDenseEmbeddings({ limit: 50 });
771
+ if (result.embedded > 0) {
772
+ const after = store.getMemoryStats();
773
+ const pct = after.totalChunks > 0
774
+ ? Math.round((after.chunksWithDenseEmbeddings / after.totalChunks) * 100)
775
+ : 0;
776
+ logger.info({ embedded: result.embedded, failed: result.failed, coveragePct: pct, model: result.model }, 'Idle dense backfill batch complete');
777
+ }
778
+ }
779
+ finally {
780
+ this.denseBackfillInFlight = false;
781
+ }
782
+ }
783
+ /**
784
+ * Daily salience decay. Multiplies salience by 0.95 on chunks unaccessed
785
+ * for >30 days. Date-gated (one pass per calendar day), persisted in
786
+ * HeartbeatState. Pinned chunks exempt; soft-deleted and superseded skipped.
787
+ */
788
+ maybeRunSalienceDecay() {
789
+ const today = todayISO();
790
+ if (this.lastSalienceDecayDate === today)
791
+ return;
792
+ const store = this.gateway.getMemoryStore();
793
+ if (!store || typeof store.decayStaleSalience !== 'function')
794
+ return;
795
+ try {
796
+ const decayed = store
797
+ .decayStaleSalience({ staleDays: 30, decayFactor: 0.95, floor: 0 });
798
+ this.lastSalienceDecayDate = today;
799
+ this.lastState.lastSalienceDecayDate = today;
800
+ this.saveState();
801
+ if (decayed > 0)
802
+ logger.info({ decayed }, 'Daily salience decay sweep complete');
803
+ }
804
+ catch (err) {
805
+ logger.debug({ err }, 'Salience decay sweep failed (non-fatal)');
806
+ }
807
+ }
716
808
  async runInsightCheck() {
717
809
  // Initialize insight state if needed
718
810
  if (!this.lastState.insightState) {
@@ -275,6 +275,7 @@ export declare class MemoryStore {
275
275
  chunkIds: number[];
276
276
  scores: number[];
277
277
  agentSlug?: string | null;
278
+ matchTypes?: string[];
278
279
  }): void;
279
280
  /** Internal sync recall_trace insert. Called by the WriteQueue. */
280
281
  _logRecallTraceSync(opts: {
@@ -284,6 +285,7 @@ export declare class MemoryStore {
284
285
  chunkIds: number[];
285
286
  scores: number[];
286
287
  agentSlug?: string | null;
288
+ matchTypes?: string[];
287
289
  }): void;
288
290
  /**
289
291
  * Fetch recent recall traces for a session, newest first.
@@ -891,6 +893,104 @@ export declare class MemoryStore {
891
893
  * so frequently-mentioned facts surface higher in search results.
892
894
  */
893
895
  bumpChunkSalience(chunkId: number, boost?: number): void;
896
+ /**
897
+ * Mark a chunk as superseded by a newer one. The old chunk becomes
898
+ * invisible to retrieval but stays in the DB for provenance. Idempotent:
899
+ * supersede(A, B) followed by supersede(A, C) records C as the new
900
+ * pointer. Won't link a chunk to itself or to a missing chunk.
901
+ */
902
+ markChunkSuperseded(oldChunkId: number, newChunkId: number, opts?: {
903
+ reason?: string;
904
+ agent?: string;
905
+ }): boolean;
906
+ /**
907
+ * Daily salience-decay sweep. Multiplies salience by `decayFactor` (default
908
+ * 0.95) on chunks that haven't been accessed or written-to in `staleDays`.
909
+ * Pinned chunks are exempt — pinning is the user's explicit "keep this hot."
910
+ * Soft-deleted and superseded chunks are also skipped (they're already out
911
+ * of circulation). Returns count of rows updated.
912
+ */
913
+ decayStaleSalience(opts?: {
914
+ staleDays?: number;
915
+ decayFactor?: number;
916
+ floor?: number;
917
+ }): number;
918
+ /**
919
+ * Composition / graph stats — wikilink density + per-match-type recall
920
+ * contribution over a recent window. Surfaces whether entity linking is
921
+ * paying off in retrieval.
922
+ */
923
+ getGraphStats(opts?: {
924
+ topN?: number;
925
+ lookbackHours?: number;
926
+ }): {
927
+ wikilinkCount: number;
928
+ topLinkedTargets: Array<{
929
+ target: string;
930
+ count: number;
931
+ }>;
932
+ recallContributionByType: Record<string, number>;
933
+ tracesAnalyzed: number;
934
+ };
935
+ /**
936
+ * Cross-channel session bridge — the most recent N session summaries per
937
+ * channel (Discord / dashboard / cron / etc). Channel inferred from the
938
+ * sessionKey prefix (everything before first ':'). Powers the dashboard
939
+ * "Cross-channel handoff" panel so we can see whether continuity is
940
+ * actually flowing between sources.
941
+ */
942
+ getRecentSummariesByChannel(limitPerChannel?: number): Record<string, SessionSummary[]>;
943
+ /**
944
+ * Stats for the supersede graph — count of superseded chunks (excluded
945
+ * from retrieval) for the dashboard.
946
+ */
947
+ getSupersedeStats(): {
948
+ superseded: number;
949
+ recent: Array<{
950
+ oldId: number;
951
+ newId: number;
952
+ reason: string | null;
953
+ supersededAt: string;
954
+ }>;
955
+ };
956
+ /**
957
+ * Apply an agent-supplied salience hint to chunks freshly written by
958
+ * memory_write. Called after incrementalSync so the new chunks exist.
959
+ * Sets salience to MAX(existing, hint) — a higher hint wins, but we
960
+ * never trample reinforcement that already accumulated. Allowed range
961
+ * 0.5–2.0; values >1.0 are reserved for explicit "this is critical"
962
+ * signals from the agent (e.g. user identity, hard preferences,
963
+ * irreversible decisions).
964
+ */
965
+ applyWriteSalience(sourceFile: string, section: string | null, hint: number): number;
966
+ /**
967
+ * Apply an agent-supplied confidence value to chunks freshly written by
968
+ * memory_write. Confidence (0.0–1.0) is orthogonal to salience: salience
969
+ * = "how important if true," confidence = "how certain it's still true."
970
+ * Defaults to 1.0 on insert; agents lower it for tentative facts. Used as
971
+ * a multiplier in retrieval scoring so uncertain chunks lose ranking.
972
+ */
973
+ applyWriteConfidence(sourceFile: string, section: string | null, confidence: number): number;
974
+ /**
975
+ * Recent writes panel — surfaces what the agent has been capturing and why.
976
+ * Joins memory_extractions to chunks (best-effort match by source_file +
977
+ * section if the tool_input carries those). Limit defaults to 50 since this
978
+ * powers a dashboard panel, not analytics.
979
+ */
980
+ getRecentWrites(limit?: number): Array<{
981
+ id: number;
982
+ extractedAt: string;
983
+ sessionKey: string;
984
+ agentSlug: string | null;
985
+ toolName: string;
986
+ action: string | null;
987
+ section: string | null;
988
+ filePath: string | null;
989
+ reason: string | null;
990
+ salienceHint: number | null;
991
+ status: string;
992
+ userMessage: string;
993
+ }>;
894
994
  /**
895
995
  * Log a memory extraction event for transparency tracking.
896
996
  */
@@ -1160,6 +1260,16 @@ export declare class MemoryStore {
1160
1260
  } | null;
1161
1261
  dbSizeBytes: number;
1162
1262
  lastVacuumAt: string | null;
1263
+ denseEmbeddings: {
1264
+ withDense: number;
1265
+ total: number;
1266
+ models: Array<{
1267
+ model: string;
1268
+ count: number;
1269
+ }>;
1270
+ currentModel: string;
1271
+ ready: boolean;
1272
+ };
1163
1273
  };
1164
1274
  /**
1165
1275
  * Get consolidation stats for monitoring.