audrey 0.3.3 → 0.5.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md CHANGED
@@ -46,7 +46,7 @@ npx audrey status
46
46
  npx audrey uninstall
47
47
  ```
48
48
 
49
- Every Claude Code session now has 5 memory tools: `memory_encode`, `memory_recall`, `memory_consolidate`, `memory_introspect`, `memory_resolve_truth`.
49
+ Every Claude Code session now has 7 memory tools: `memory_encode`, `memory_recall`, `memory_consolidate`, `memory_introspect`, `memory_resolve_truth`, `memory_export`, `memory_import`.
50
50
 
51
51
  ### SDK in Your Code
52
52
 
@@ -398,6 +398,7 @@ brain.on('contradiction', ({ episodeId, contradictionId, semanticId, resolution
398
398
  brain.on('consolidation', ({ runId, principlesExtracted }) => { ... });
399
399
  brain.on('decay', ({ totalEvaluated, transitionedToDormant }) => { ... });
400
400
  brain.on('rollback', ({ runId, rolledBackMemories }) => { ... });
401
+ brain.on('migration', ({ episodes, semantics, procedures }) => { ... });
401
402
  brain.on('error', (err) => { ... });
402
403
  ```
403
404
 
@@ -429,10 +430,14 @@ src/
429
430
  rollback.js Undo consolidation runs.
430
431
  utils.js Date math, safe JSON parse.
431
432
  validate.js KNN validation + LLM contradiction detection.
433
+ migrate.js Dimension migration re-embedding.
434
+ adaptive.js Adaptive consolidation parameter suggestions.
435
+ export.js Memory export (JSON snapshots).
436
+ import.js Memory import with re-embedding.
432
437
  index.js Barrel export.
433
438
 
434
439
  mcp-server/
435
- index.js MCP tool server (5 tools, stdio transport) + CLI subcommands.
440
+ index.js MCP tool server (7 tools, stdio transport) + CLI subcommands.
436
441
  config.js Shared config (env var parsing, install arg builder).
437
442
  ```
438
443
 
@@ -456,7 +461,7 @@ All mutations use SQLite transactions. CHECK constraints enforce valid states an
456
461
  ## Running Tests
457
462
 
458
463
  ```bash
459
- npm test # 208 tests across 17 files
464
+ npm test # 243 tests across 22 files
460
465
  npm run test:watch
461
466
  ```
462
467
 
@@ -533,14 +538,18 @@ Demonstrates the full pipeline: encode 3 rate-limit observations → consolidate
533
538
  - [x] Published to npm with proper package metadata
534
539
  - [x] 194 tests across 17 test files
535
540
 
536
- ### v0.3.3Hardening (current)
541
+ ### v0.5.0Feature Depth (current)
537
542
 
538
- - [x] Fix status command dimension mismatch (read stored dimensions from existing database)
539
- - [x] Safe JSON parsing in LLM providers (descriptive errors on malformed responses)
540
- - [x] Fetch timeouts on all API calls (configurable, default 30s)
541
- - [x] Config validation in Audrey constructor (dormantThreshold, minEpisodes)
542
- - [x] encodeBatch error isolation tests
543
- - [x] 208 tests across 17 test files
543
+ - [x] Configurable confidence weights per Audrey instance
544
+ - [x] Configurable decay rates (half-lives) per Audrey instance
545
+ - [x] Confidence config wired through constructor to recall and decay
546
+ - [x] Memory export (JSON snapshot of all tables, no raw embeddings)
547
+ - [x] Memory import with automatic re-embedding via current provider
548
+ - [x] `memory_export` and `memory_import` MCP tools (7 tools total)
549
+ - [x] Auto-consolidation scheduling (`startAutoConsolidate` / `stopAutoConsolidate`)
550
+ - [x] Consolidation metrics tracking (per-run params and results)
551
+ - [x] Adaptive consolidation parameter suggestions based on historical yield
552
+ - [x] 243 tests across 22 test files
544
553
 
545
554
  ### v0.4.0 — Type Safety & Developer Experience
546
555
 
@@ -554,16 +563,6 @@ Demonstrates the full pipeline: encode 3 rate-limit observations → consolidate
554
563
  - [ ] Embedding migration pipeline (re-embed when models change)
555
564
  - [ ] Re-consolidation queue (re-run consolidation with new embedding model)
556
565
 
557
- ### v0.5.0 — Advanced Memory Features
558
-
559
- - [ ] Adaptive consolidation threshold (learn optimal N per domain, not fixed N=3)
560
- - [ ] Source-aware confidence for semantic memories (track strongest source composition)
561
- - [ ] Configurable decay rates per Audrey instance
562
- - [ ] Configurable confidence weights per Audrey instance
563
- - [ ] PII detection and redaction (opt-in)
564
- - [ ] Memory export/import (JSON snapshot)
565
- - [ ] Auto-consolidation scheduling (setInterval with configurable interval)
566
-
567
566
  ### v0.6.0 — Scale
568
567
 
569
568
  - [ ] pgvector adapter for PostgreSQL backend
@@ -1,7 +1,7 @@
1
1
  import { homedir } from 'node:os';
2
2
  import { join } from 'node:path';
3
3
 
4
- export const VERSION = '0.3.3';
4
+ export const VERSION = '0.5.1';
5
5
  export const SERVER_NAME = 'audrey-memory';
6
6
  export const DEFAULT_DATA_DIR = join(homedir(), '.audrey', 'data');
7
7
 
@@ -1,4 +1,4 @@
1
- #!/usr/bin/env node
1
+ #!/usr/bin/env node
2
2
  import { McpServer } from '@modelcontextprotocol/sdk/server/mcp.js';
3
3
  import { StdioServerTransport } from '@modelcontextprotocol/sdk/server/stdio.js';
4
4
  import { z } from 'zod';
@@ -65,12 +65,14 @@ function install() {
65
65
  console.log(`
66
66
  Audrey registered as "${SERVER_NAME}" with Claude Code.
67
67
 
68
- 5 tools available in every session:
68
+ 7 tools available in every session:
69
69
  memory_encode — Store observations, facts, preferences
70
70
  memory_recall — Search memories by semantic similarity
71
71
  memory_consolidate — Extract principles from accumulated episodes
72
72
  memory_introspect — Check memory system health
73
73
  memory_resolve_truth — Resolve contradictions between claims
74
+ memory_export — Export all memories as JSON snapshot
75
+ memory_import — Import a snapshot into a fresh database
74
76
 
75
77
  Data stored in: ${DEFAULT_DATA_DIR}
76
78
  Verify: claude mcp list
@@ -239,6 +241,44 @@ async function main() {
239
241
  },
240
242
  );
241
243
 
244
+ server.tool(
245
+ 'memory_export',
246
+ {},
247
+ async () => {
248
+ try {
249
+ const snapshot = audrey.export();
250
+ return toolResult(snapshot);
251
+ } catch (err) {
252
+ return toolError(err);
253
+ }
254
+ },
255
+ );
256
+
257
+ server.tool(
258
+ 'memory_import',
259
+ {
260
+ snapshot: z.object({
261
+ version: z.string(),
262
+ episodes: z.array(z.any()),
263
+ semantics: z.array(z.any()).optional(),
264
+ procedures: z.array(z.any()).optional(),
265
+ causalLinks: z.array(z.any()).optional(),
266
+ contradictions: z.array(z.any()).optional(),
267
+ consolidationRuns: z.array(z.any()).optional(),
268
+ config: z.record(z.string()).optional(),
269
+ }).passthrough().describe('A snapshot from memory_export'),
270
+ },
271
+ async ({ snapshot }) => {
272
+ try {
273
+ await audrey.import(snapshot);
274
+ const stats = audrey.introspect();
275
+ return toolResult({ imported: true, stats });
276
+ } catch (err) {
277
+ return toolError(err);
278
+ }
279
+ },
280
+ );
281
+
242
282
  const transport = new StdioServerTransport();
243
283
  await server.connect(transport);
244
284
  console.error('[audrey-mcp] connected via stdio');
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "audrey",
3
- "version": "0.3.3",
3
+ "version": "0.5.1",
4
4
  "description": "Biological memory architecture for AI agents — encode, consolidate, and recall memories with confidence decay, contradiction detection, and causal graphs",
5
5
  "type": "module",
6
6
  "main": "src/index.js",
@@ -0,0 +1,53 @@
1
+ export function suggestConsolidationParams(db) {
2
+ const runs = db.prepare(`
3
+ SELECT min_cluster_size, similarity_threshold, clusters_found, principles_extracted, episodes_evaluated
4
+ FROM consolidation_metrics
5
+ ORDER BY created_at DESC
6
+ LIMIT 20
7
+ `).all();
8
+
9
+ if (runs.length === 0) {
10
+ return {
11
+ minClusterSize: 3,
12
+ similarityThreshold: 0.85,
13
+ confidence: 'no_data',
14
+ };
15
+ }
16
+
17
+ const paramScores = new Map();
18
+ for (const run of runs) {
19
+ if (run.episodes_evaluated === 0) continue;
20
+ const key = `${run.min_cluster_size}:${run.similarity_threshold}`;
21
+ if (!paramScores.has(key)) {
22
+ paramScores.set(key, {
23
+ minClusterSize: run.min_cluster_size,
24
+ similarityThreshold: run.similarity_threshold,
25
+ yields: [],
26
+ });
27
+ }
28
+ paramScores.get(key).yields.push(run.principles_extracted / run.episodes_evaluated);
29
+ }
30
+
31
+ let bestKey = null;
32
+ let bestAvgYield = -1;
33
+ for (const [key, data] of paramScores) {
34
+ const avg = data.yields.reduce((a, b) => a + b, 0) / data.yields.length;
35
+ if (avg > bestAvgYield) {
36
+ bestAvgYield = avg;
37
+ bestKey = key;
38
+ }
39
+ }
40
+
41
+ if (!bestKey) {
42
+ return { minClusterSize: 3, similarityThreshold: 0.85, confidence: 'no_data' };
43
+ }
44
+
45
+ const best = paramScores.get(bestKey);
46
+ const confidence = runs.length >= 5 ? 'high' : runs.length >= 2 ? 'medium' : 'low';
47
+
48
+ return {
49
+ minClusterSize: best.minClusterSize,
50
+ similarityThreshold: best.similarityThreshold,
51
+ confidence,
52
+ };
53
+ }
package/src/audrey.js CHANGED
@@ -10,6 +10,10 @@ import { applyDecay } from './decay.js';
10
10
  import { rollbackConsolidation, getConsolidationHistory } from './rollback.js';
11
11
  import { introspect as introspectFn } from './introspect.js';
12
12
  import { buildContextResolutionPrompt } from './prompts.js';
13
+ import { exportMemories } from './export.js';
14
+ import { importMemories } from './import.js';
15
+ import { suggestConsolidationParams as suggestParamsFn } from './adaptive.js';
16
+ import { reembedAll } from './migrate.js';
13
17
 
14
18
  /**
15
19
  * @typedef {'direct-observation' | 'told-by-user' | 'tool-result' | 'inference' | 'model-generated'} SourceType
@@ -77,6 +81,7 @@ export class Audrey extends EventEmitter {
77
81
  agent = 'default',
78
82
  embedding = { provider: 'mock', dimensions: 64 },
79
83
  llm,
84
+ confidence = {},
80
85
  consolidation = {},
81
86
  decay = {},
82
87
  } = {}) {
@@ -95,10 +100,27 @@ export class Audrey extends EventEmitter {
95
100
  this.agent = agent;
96
101
  this.dataDir = dataDir;
97
102
  this.embeddingProvider = createEmbeddingProvider(embedding);
98
- this.db = createDatabase(dataDir, { dimensions: this.embeddingProvider.dimensions });
103
+ const { db, migrated } = createDatabase(dataDir, { dimensions: this.embeddingProvider.dimensions });
104
+ this.db = db;
105
+ this._migrationPending = migrated;
99
106
  this.llmProvider = llm ? createLLMProvider(llm) : null;
100
- this.consolidationConfig = { minEpisodes };
101
- this.decayConfig = { dormantThreshold };
107
+ this.confidenceConfig = {
108
+ weights: confidence.weights,
109
+ halfLives: confidence.halfLives,
110
+ sourceReliability: confidence.sourceReliability,
111
+ };
112
+ this.consolidationConfig = {
113
+ minEpisodes: consolidation.minEpisodes || 3,
114
+ };
115
+ this.decayConfig = { dormantThreshold: decay.dormantThreshold || 0.1 };
116
+ this._autoConsolidateTimer = null;
117
+ }
118
+
119
+ async _ensureMigrated() {
120
+ if (!this._migrationPending) return;
121
+ const counts = await reembedAll(this.db, this.embeddingProvider);
122
+ this._migrationPending = false;
123
+ this.emit('migration', counts);
102
124
  }
103
125
 
104
126
  _emitValidation(id, params) {
@@ -130,6 +152,7 @@ export class Audrey extends EventEmitter {
130
152
  * @returns {Promise<string>}
131
153
  */
132
154
  async encode(params) {
155
+ await this._ensureMigrated();
133
156
  const id = await encodeEpisode(this.db, this.embeddingProvider, params);
134
157
  this.emit('encode', { id, ...params });
135
158
  this._emitValidation(id, params);
@@ -141,6 +164,7 @@ export class Audrey extends EventEmitter {
141
164
  * @returns {Promise<string[]>}
142
165
  */
143
166
  async encodeBatch(paramsList) {
167
+ await this._ensureMigrated();
144
168
  const ids = [];
145
169
  for (const params of paramsList) {
146
170
  const id = await encodeEpisode(this.db, this.embeddingProvider, params);
@@ -160,8 +184,12 @@ export class Audrey extends EventEmitter {
160
184
  * @param {RecallOptions} [options]
161
185
  * @returns {Promise<RecallResult[]>}
162
186
  */
163
- recall(query, options = {}) {
164
- return recallFn(this.db, this.embeddingProvider, query, options);
187
+ async recall(query, options = {}) {
188
+ await this._ensureMigrated();
189
+ return recallFn(this.db, this.embeddingProvider, query, {
190
+ ...options,
191
+ confidenceConfig: options.confidenceConfig ?? this.confidenceConfig,
192
+ });
165
193
  }
166
194
 
167
195
  /**
@@ -170,7 +198,11 @@ export class Audrey extends EventEmitter {
170
198
  * @returns {AsyncGenerator<RecallResult>}
171
199
  */
172
200
  async *recallStream(query, options = {}) {
173
- yield* recallStreamFn(this.db, this.embeddingProvider, query, options);
201
+ await this._ensureMigrated();
202
+ yield* recallStreamFn(this.db, this.embeddingProvider, query, {
203
+ ...options,
204
+ confidenceConfig: options.confidenceConfig ?? this.confidenceConfig,
205
+ });
174
206
  }
175
207
 
176
208
  /**
@@ -178,6 +210,7 @@ export class Audrey extends EventEmitter {
178
210
  * @returns {Promise<ConsolidationResult>}
179
211
  */
180
212
  async consolidate(options = {}) {
213
+ await this._ensureMigrated();
181
214
  const result = await runConsolidation(this.db, this.embeddingProvider, {
182
215
  minClusterSize: options.minClusterSize || this.consolidationConfig.minEpisodes,
183
216
  similarityThreshold: options.similarityThreshold || 0.80,
@@ -197,6 +230,7 @@ export class Audrey extends EventEmitter {
197
230
  decay(options = {}) {
198
231
  const result = applyDecay(this.db, {
199
232
  dormantThreshold: options.dormantThreshold || this.decayConfig.dormantThreshold,
233
+ halfLives: options.halfLives ?? this.confidenceConfig.halfLives,
200
234
  });
201
235
  this.emit('decay', result);
202
236
  return result;
@@ -278,8 +312,40 @@ export class Audrey extends EventEmitter {
278
312
  return introspectFn(this.db);
279
313
  }
280
314
 
315
+ export() {
316
+ return exportMemories(this.db);
317
+ }
318
+
319
+ async import(snapshot) {
320
+ return importMemories(this.db, this.embeddingProvider, snapshot);
321
+ }
322
+
323
+ startAutoConsolidate(intervalMs, options = {}) {
324
+ if (intervalMs < 1000) {
325
+ throw new Error('Auto-consolidation interval must be at least 1000ms');
326
+ }
327
+ if (this._autoConsolidateTimer) {
328
+ throw new Error('Auto-consolidation is already running');
329
+ }
330
+ this._autoConsolidateTimer = setInterval(() => {
331
+ this.consolidate(options).catch(err => this.emit('error', err));
332
+ }, intervalMs);
333
+ }
334
+
335
+ stopAutoConsolidate() {
336
+ if (this._autoConsolidateTimer) {
337
+ clearInterval(this._autoConsolidateTimer);
338
+ this._autoConsolidateTimer = null;
339
+ }
340
+ }
341
+
342
+ suggestConsolidationParams() {
343
+ return suggestParamsFn(this.db);
344
+ }
345
+
281
346
  /** @returns {void} */
282
347
  close() {
348
+ this.stopAutoConsolidate();
283
349
  closeDatabase(this.db);
284
350
  }
285
351
  }
@@ -213,6 +213,15 @@ export async function runConsolidation(db, embeddingProvider, options = {}) {
213
213
 
214
214
  promoteAll();
215
215
 
216
+ db.prepare(`
217
+ INSERT INTO consolidation_metrics (id, run_id, min_cluster_size, similarity_threshold,
218
+ episodes_evaluated, clusters_found, principles_extracted, created_at)
219
+ VALUES (?, ?, ?, ?, ?, ?, ?, ?)
220
+ `).run(
221
+ generateId(), runId, minClusterSize, similarityThreshold,
222
+ episodesEvaluated, clusters.length, principlesExtracted, new Date().toISOString(),
223
+ );
224
+
216
225
  return {
217
226
  runId,
218
227
  episodesEvaluated,
package/src/db.js CHANGED
@@ -104,6 +104,18 @@ const SCHEMA = `
104
104
  value TEXT NOT NULL
105
105
  );
106
106
 
107
+ CREATE TABLE IF NOT EXISTS consolidation_metrics (
108
+ id TEXT PRIMARY KEY,
109
+ run_id TEXT NOT NULL,
110
+ min_cluster_size INTEGER NOT NULL,
111
+ similarity_threshold REAL NOT NULL,
112
+ episodes_evaluated INTEGER NOT NULL,
113
+ clusters_found INTEGER NOT NULL,
114
+ principles_extracted INTEGER NOT NULL,
115
+ created_at TEXT NOT NULL,
116
+ FOREIGN KEY (run_id) REFERENCES consolidation_runs(id)
117
+ );
118
+
107
119
  CREATE INDEX IF NOT EXISTS idx_episodes_created ON episodes(created_at);
108
120
  CREATE INDEX IF NOT EXISTS idx_episodes_consolidated ON episodes(consolidated);
109
121
  CREATE INDEX IF NOT EXISTS idx_episodes_source ON episodes(source);
@@ -138,6 +150,12 @@ function createVec0Tables(db, dimensions) {
138
150
  `);
139
151
  }
140
152
 
153
+ function dropVec0Tables(db) {
154
+ db.exec('DROP TABLE IF EXISTS vec_episodes');
155
+ db.exec('DROP TABLE IF EXISTS vec_semantics');
156
+ db.exec('DROP TABLE IF EXISTS vec_procedures');
157
+ }
158
+
141
159
  function migrateTable(db, { source, target, selectCols, insertCols, placeholders, transform }) {
142
160
  const count = db.prepare(`SELECT COUNT(*) as c FROM ${target}`).get().c;
143
161
  if (count > 0) return;
@@ -186,10 +204,11 @@ function migrateEmbeddingsToVec0(db) {
186
204
  /**
187
205
  * @param {string} dataDir
188
206
  * @param {{ dimensions?: number }} [options]
189
- * @returns {import('better-sqlite3').Database}
207
+ * @returns {{ db: import('better-sqlite3').Database, migrated: boolean }}
190
208
  */
191
209
  export function createDatabase(dataDir, options = {}) {
192
210
  const { dimensions } = options;
211
+ let migrated = false;
193
212
 
194
213
  mkdirSync(dataDir, { recursive: true });
195
214
  const dbPath = join(dataDir, 'audrey.db');
@@ -213,10 +232,11 @@ export function createDatabase(dataDir, options = {}) {
213
232
  if (existing) {
214
233
  const storedDims = parseInt(existing.value, 10);
215
234
  if (storedDims !== dimensions) {
216
- db.close();
217
- throw new Error(
218
- `Dimension mismatch: database was created with ${storedDims} dimensions, but ${dimensions} were requested`
219
- );
235
+ dropVec0Tables(db);
236
+ db.prepare(
237
+ "UPDATE audrey_config SET value = ? WHERE key = 'dimensions'"
238
+ ).run(String(dimensions));
239
+ migrated = true;
220
240
  }
221
241
  } else {
222
242
  db.prepare(
@@ -226,10 +246,12 @@ export function createDatabase(dataDir, options = {}) {
226
246
 
227
247
  createVec0Tables(db, dimensions);
228
248
 
229
- migrateEmbeddingsToVec0(db);
249
+ if (!migrated) {
250
+ migrateEmbeddingsToVec0(db);
251
+ }
230
252
  }
231
253
 
232
- return db;
254
+ return { db, migrated };
233
255
  }
234
256
 
235
257
  export function readStoredDimensions(dataDir) {
package/src/decay.js CHANGED
@@ -6,7 +6,7 @@ import { daysBetween } from './utils.js';
6
6
  * @param {{ dormantThreshold?: number }} [options]
7
7
  * @returns {{ totalEvaluated: number, transitionedToDormant: number, timestamp: string }}
8
8
  */
9
- export function applyDecay(db, { dormantThreshold = 0.1 } = {}) {
9
+ export function applyDecay(db, { dormantThreshold = 0.1, halfLives } = {}) {
10
10
  const now = new Date();
11
11
  let totalEvaluated = 0;
12
12
  let transitionedToDormant = 0;
@@ -31,7 +31,7 @@ export function applyDecay(db, { dormantThreshold = 0.1 } = {}) {
31
31
  supportingCount: sem.supporting_count || 0,
32
32
  contradictingCount: sem.contradicting_count || 0,
33
33
  ageDays,
34
- halfLifeDays: DEFAULT_HALF_LIVES.semantic,
34
+ halfLifeDays: halfLives?.semantic ?? DEFAULT_HALF_LIVES.semantic,
35
35
  retrievalCount: sem.retrieval_count || 0,
36
36
  daysSinceRetrieval,
37
37
  });
@@ -62,7 +62,7 @@ export function applyDecay(db, { dormantThreshold = 0.1 } = {}) {
62
62
  supportingCount: proc.success_count || 0,
63
63
  contradictingCount: proc.failure_count || 0,
64
64
  ageDays,
65
- halfLifeDays: DEFAULT_HALF_LIVES.procedural,
65
+ halfLifeDays: halfLives?.procedural ?? DEFAULT_HALF_LIVES.procedural,
66
66
  retrievalCount: proc.retrieval_count || 0,
67
67
  daysSinceRetrieval,
68
68
  });
package/src/export.js ADDED
@@ -0,0 +1,59 @@
1
+ import { readFileSync } from 'node:fs';
2
+ import { fileURLToPath } from 'node:url';
3
+ import { join, dirname } from 'node:path';
4
+ import { safeJsonParse } from './utils.js';
5
+
6
+ const __dirname = dirname(fileURLToPath(import.meta.url));
7
+ const pkg = JSON.parse(readFileSync(join(__dirname, '../package.json'), 'utf-8'));
8
+
9
+ export function exportMemories(db) {
10
+ const episodes = db.prepare(
11
+ 'SELECT id, content, source, source_reliability, salience, tags, causal_trigger, causal_consequence, created_at, supersedes, superseded_by, consolidated FROM episodes'
12
+ ).all().map(ep => ({
13
+ ...ep,
14
+ tags: safeJsonParse(ep.tags, null),
15
+ }));
16
+
17
+ const semantics = db.prepare(
18
+ 'SELECT id, content, state, conditions, evidence_episode_ids, evidence_count, supporting_count, contradicting_count, source_type_diversity, consolidation_checkpoint, created_at, last_reinforced_at, retrieval_count, challenge_count FROM semantics'
19
+ ).all().map(sem => ({
20
+ ...sem,
21
+ evidence_episode_ids: safeJsonParse(sem.evidence_episode_ids, []),
22
+ }));
23
+
24
+ const procedures = db.prepare(
25
+ 'SELECT id, content, state, trigger_conditions, evidence_episode_ids, success_count, failure_count, created_at, last_reinforced_at, retrieval_count FROM procedures'
26
+ ).all().map(proc => ({
27
+ ...proc,
28
+ evidence_episode_ids: safeJsonParse(proc.evidence_episode_ids, []),
29
+ }));
30
+
31
+ const causalLinks = db.prepare('SELECT * FROM causal_links').all();
32
+
33
+ const contradictions = db.prepare(
34
+ 'SELECT id, claim_a_id, claim_a_type, claim_b_id, claim_b_type, state, resolution, resolved_at, reopened_at, reopen_evidence_id, created_at FROM contradictions'
35
+ ).all();
36
+
37
+ const consolidationRuns = db.prepare(
38
+ 'SELECT id, input_episode_ids, output_memory_ids, started_at, completed_at, status FROM consolidation_runs'
39
+ ).all().map(run => ({
40
+ ...run,
41
+ input_episode_ids: safeJsonParse(run.input_episode_ids, []),
42
+ output_memory_ids: safeJsonParse(run.output_memory_ids, []),
43
+ }));
44
+
45
+ const configRows = db.prepare('SELECT key, value FROM audrey_config').all();
46
+ const config = Object.fromEntries(configRows.map(r => [r.key, r.value]));
47
+
48
+ return {
49
+ version: pkg.version,
50
+ exportedAt: new Date().toISOString(),
51
+ episodes,
52
+ semantics,
53
+ procedures,
54
+ causalLinks,
55
+ contradictions,
56
+ consolidationRuns,
57
+ config,
58
+ };
59
+ }
package/src/import.js ADDED
@@ -0,0 +1,116 @@
1
+ export async function importMemories(db, embeddingProvider, snapshot) {
2
+ const existingEpisodes = db.prepare('SELECT COUNT(*) as c FROM episodes').get().c;
3
+ if (existingEpisodes > 0) {
4
+ throw new Error('Cannot import into a database that is not empty');
5
+ }
6
+
7
+ const insertEpisode = db.prepare(`
8
+ INSERT INTO episodes (id, content, source, source_reliability, salience, tags,
9
+ causal_trigger, causal_consequence, created_at, supersedes, superseded_by, consolidated)
10
+ VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
11
+ `);
12
+
13
+ const insertVecEpisode = db.prepare(
14
+ 'INSERT INTO vec_episodes(id, embedding, source, consolidated) VALUES (?, ?, ?, ?)'
15
+ );
16
+
17
+ const insertSemantic = db.prepare(`
18
+ INSERT INTO semantics (id, content, state, conditions, evidence_episode_ids,
19
+ evidence_count, supporting_count, contradicting_count, source_type_diversity,
20
+ consolidation_checkpoint, created_at, last_reinforced_at, retrieval_count, challenge_count)
21
+ VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
22
+ `);
23
+
24
+ const insertVecSemantic = db.prepare(
25
+ 'INSERT INTO vec_semantics(id, embedding, state) VALUES (?, ?, ?)'
26
+ );
27
+
28
+ const insertProcedure = db.prepare(`
29
+ INSERT INTO procedures (id, content, state, trigger_conditions, evidence_episode_ids,
30
+ success_count, failure_count, created_at, last_reinforced_at, retrieval_count)
31
+ VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
32
+ `);
33
+
34
+ const insertVecProcedure = db.prepare(
35
+ 'INSERT INTO vec_procedures(id, embedding, state) VALUES (?, ?, ?)'
36
+ );
37
+
38
+ const insertCausalLink = db.prepare(`
39
+ INSERT INTO causal_links (id, cause_id, effect_id, link_type, mechanism, confidence, evidence_count, created_at)
40
+ VALUES (?, ?, ?, ?, ?, ?, ?, ?)
41
+ `);
42
+
43
+ const insertContradiction = db.prepare(`
44
+ INSERT INTO contradictions (id, claim_a_id, claim_a_type, claim_b_id, claim_b_type,
45
+ state, resolution, resolved_at, reopened_at, reopen_evidence_id, created_at)
46
+ VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
47
+ `);
48
+
49
+ const insertConsolidationRun = db.prepare(`
50
+ INSERT INTO consolidation_runs (id, input_episode_ids, output_memory_ids, started_at, completed_at, status)
51
+ VALUES (?, ?, ?, ?, ?, ?)
52
+ `);
53
+
54
+ for (const ep of snapshot.episodes) {
55
+ const tags = ep.tags ? JSON.stringify(ep.tags) : null;
56
+ insertEpisode.run(
57
+ ep.id, ep.content, ep.source, ep.source_reliability, ep.salience ?? 0.5,
58
+ tags, ep.causal_trigger ?? null, ep.causal_consequence ?? null,
59
+ ep.created_at, ep.supersedes ?? null, ep.superseded_by ?? null, ep.consolidated ?? 0,
60
+ );
61
+
62
+ const vector = await embeddingProvider.embed(ep.content);
63
+ const buffer = embeddingProvider.vectorToBuffer(vector);
64
+ insertVecEpisode.run(ep.id, buffer, ep.source, BigInt(ep.consolidated ?? 0));
65
+ }
66
+
67
+ for (const sem of (snapshot.semantics || [])) {
68
+ insertSemantic.run(
69
+ sem.id, sem.content, sem.state, sem.conditions ?? null,
70
+ JSON.stringify(sem.evidence_episode_ids || []),
71
+ sem.evidence_count ?? 0, sem.supporting_count ?? 0, sem.contradicting_count ?? 0,
72
+ sem.source_type_diversity ?? 0, sem.consolidation_checkpoint ?? null,
73
+ sem.created_at, sem.last_reinforced_at ?? null, sem.retrieval_count ?? 0, sem.challenge_count ?? 0,
74
+ );
75
+
76
+ const vector = await embeddingProvider.embed(sem.content);
77
+ const buffer = embeddingProvider.vectorToBuffer(vector);
78
+ insertVecSemantic.run(sem.id, buffer, sem.state);
79
+ }
80
+
81
+ for (const proc of (snapshot.procedures || [])) {
82
+ insertProcedure.run(
83
+ proc.id, proc.content, proc.state, proc.trigger_conditions ?? null,
84
+ JSON.stringify(proc.evidence_episode_ids || []),
85
+ proc.success_count ?? 0, proc.failure_count ?? 0,
86
+ proc.created_at, proc.last_reinforced_at ?? null, proc.retrieval_count ?? 0,
87
+ );
88
+
89
+ const vector = await embeddingProvider.embed(proc.content);
90
+ const buffer = embeddingProvider.vectorToBuffer(vector);
91
+ insertVecProcedure.run(proc.id, buffer, proc.state);
92
+ }
93
+
94
+ for (const link of (snapshot.causalLinks || [])) {
95
+ insertCausalLink.run(
96
+ link.id, link.cause_id, link.effect_id, link.link_type ?? 'causal',
97
+ link.mechanism ?? null, link.confidence ?? null, link.evidence_count ?? 1, link.created_at,
98
+ );
99
+ }
100
+
101
+ for (const con of (snapshot.contradictions || [])) {
102
+ insertContradiction.run(
103
+ con.id, con.claim_a_id, con.claim_a_type, con.claim_b_id, con.claim_b_type,
104
+ con.state, con.resolution ?? null, con.resolved_at ?? null,
105
+ con.reopened_at ?? null, con.reopen_evidence_id ?? null, con.created_at,
106
+ );
107
+ }
108
+
109
+ for (const run of (snapshot.consolidationRuns || [])) {
110
+ insertConsolidationRun.run(
111
+ run.id, JSON.stringify(run.input_episode_ids || []),
112
+ JSON.stringify(run.output_memory_ids || []),
113
+ run.started_at ?? null, run.completed_at ?? null, run.status,
114
+ );
115
+ }
116
+ }
package/src/index.js CHANGED
@@ -10,3 +10,7 @@ export {
10
10
  buildCausalArticulationPrompt,
11
11
  buildContextResolutionPrompt,
12
12
  } from './prompts.js';
13
+ export { exportMemories } from './export.js';
14
+ export { importMemories } from './import.js';
15
+ export { suggestConsolidationParams } from './adaptive.js';
16
+ export { reembedAll } from './migrate.js';
package/src/migrate.js ADDED
@@ -0,0 +1,32 @@
1
+ export async function reembedAll(db, embeddingProvider) {
2
+ const episodes = db.prepare('SELECT id, content, source FROM episodes').all();
3
+ const semantics = db.prepare('SELECT id, content, state FROM semantics').all();
4
+ const procedures = db.prepare('SELECT id, content, state FROM procedures').all();
5
+
6
+ for (const ep of episodes) {
7
+ const vector = await embeddingProvider.embed(ep.content);
8
+ const buffer = embeddingProvider.vectorToBuffer(vector);
9
+ db.prepare('UPDATE episodes SET embedding = ? WHERE id = ?').run(buffer, ep.id);
10
+ db.prepare('INSERT INTO vec_episodes(id, embedding, source, consolidated) VALUES (?, ?, ?, ?)').run(ep.id, buffer, ep.source, BigInt(0));
11
+ }
12
+
13
+ for (const sem of semantics) {
14
+ const vector = await embeddingProvider.embed(sem.content);
15
+ const buffer = embeddingProvider.vectorToBuffer(vector);
16
+ db.prepare('UPDATE semantics SET embedding = ? WHERE id = ?').run(buffer, sem.id);
17
+ db.prepare('INSERT INTO vec_semantics(id, embedding, state) VALUES (?, ?, ?)').run(sem.id, buffer, sem.state);
18
+ }
19
+
20
+ for (const proc of procedures) {
21
+ const vector = await embeddingProvider.embed(proc.content);
22
+ const buffer = embeddingProvider.vectorToBuffer(vector);
23
+ db.prepare('UPDATE procedures SET embedding = ? WHERE id = ?').run(buffer, proc.id);
24
+ db.prepare('INSERT INTO vec_procedures(id, embedding, state) VALUES (?, ?, ?)').run(proc.id, buffer, proc.state);
25
+ }
26
+
27
+ return {
28
+ episodes: episodes.length,
29
+ semantics: semantics.length,
30
+ procedures: procedures.length,
31
+ };
32
+ }
package/src/recall.js CHANGED
@@ -1,48 +1,57 @@
1
1
  import { computeConfidence, DEFAULT_HALF_LIVES } from './confidence.js';
2
2
  import { daysBetween, safeJsonParse } from './utils.js';
3
3
 
4
- function computeEpisodicConfidence(ep, now) {
4
+ function computeEpisodicConfidence(ep, now, confidenceConfig = {}) {
5
5
  const ageDays = daysBetween(ep.created_at, now);
6
+ const halfLives = confidenceConfig.halfLives || DEFAULT_HALF_LIVES;
6
7
  return computeConfidence({
7
8
  sourceType: ep.source,
8
9
  supportingCount: 1,
9
10
  contradictingCount: 0,
10
11
  ageDays,
11
- halfLifeDays: DEFAULT_HALF_LIVES.episodic,
12
+ halfLifeDays: halfLives.episodic ?? DEFAULT_HALF_LIVES.episodic,
12
13
  retrievalCount: 0,
13
14
  daysSinceRetrieval: ageDays,
15
+ weights: confidenceConfig.weights,
16
+ customSourceReliability: confidenceConfig.sourceReliability,
14
17
  });
15
18
  }
16
19
 
17
- function computeSemanticConfidence(sem, now) {
20
+ function computeSemanticConfidence(sem, now, confidenceConfig = {}) {
18
21
  const ageDays = daysBetween(sem.created_at, now);
19
22
  const daysSinceRetrieval = sem.last_reinforced_at
20
23
  ? daysBetween(sem.last_reinforced_at, now)
21
24
  : ageDays;
25
+ const halfLives = confidenceConfig.halfLives || DEFAULT_HALF_LIVES;
22
26
  return computeConfidence({
23
27
  sourceType: 'tool-result',
24
28
  supportingCount: sem.supporting_count || 0,
25
29
  contradictingCount: sem.contradicting_count || 0,
26
30
  ageDays,
27
- halfLifeDays: DEFAULT_HALF_LIVES.semantic,
31
+ halfLifeDays: halfLives.semantic ?? DEFAULT_HALF_LIVES.semantic,
28
32
  retrievalCount: sem.retrieval_count || 0,
29
33
  daysSinceRetrieval,
34
+ weights: confidenceConfig.weights,
35
+ customSourceReliability: confidenceConfig.sourceReliability,
30
36
  });
31
37
  }
32
38
 
33
- function computeProceduralConfidence(proc, now) {
39
+ function computeProceduralConfidence(proc, now, confidenceConfig = {}) {
34
40
  const ageDays = daysBetween(proc.created_at, now);
35
41
  const daysSinceRetrieval = proc.last_reinforced_at
36
42
  ? daysBetween(proc.last_reinforced_at, now)
37
43
  : ageDays;
44
+ const halfLives = confidenceConfig.halfLives || DEFAULT_HALF_LIVES;
38
45
  return computeConfidence({
39
46
  sourceType: 'tool-result',
40
47
  supportingCount: proc.success_count || 0,
41
48
  contradictingCount: proc.failure_count || 0,
42
49
  ageDays,
43
- halfLifeDays: DEFAULT_HALF_LIVES.procedural,
50
+ halfLifeDays: halfLives.procedural ?? DEFAULT_HALF_LIVES.procedural,
44
51
  retrievalCount: proc.retrieval_count || 0,
45
52
  daysSinceRetrieval,
53
+ weights: confidenceConfig.weights,
54
+ customSourceReliability: confidenceConfig.sourceReliability,
46
55
  });
47
56
  }
48
57
 
@@ -112,7 +121,7 @@ function buildProceduralEntry(proc, confidence, score, includeProvenance) {
112
121
  return entry;
113
122
  }
114
123
 
115
- function knnEpisodic(db, queryBuffer, candidateK, now, minConfidence, includeProvenance) {
124
+ function knnEpisodic(db, queryBuffer, candidateK, now, minConfidence, includeProvenance, confidenceConfig) {
116
125
  const rows = db.prepare(`
117
126
  SELECT e.*, (1.0 - v.distance) AS similarity
118
127
  FROM vec_episodes v
@@ -124,7 +133,7 @@ function knnEpisodic(db, queryBuffer, candidateK, now, minConfidence, includePro
124
133
 
125
134
  const results = [];
126
135
  for (const row of rows) {
127
- const confidence = computeEpisodicConfidence(row, now);
136
+ const confidence = computeEpisodicConfidence(row, now, confidenceConfig);
128
137
  if (confidence < minConfidence) continue;
129
138
  const score = row.similarity * confidence;
130
139
  results.push(buildEpisodicEntry(row, confidence, score, includeProvenance));
@@ -132,7 +141,7 @@ function knnEpisodic(db, queryBuffer, candidateK, now, minConfidence, includePro
132
141
  return results;
133
142
  }
134
143
 
135
- function knnSemantic(db, queryBuffer, candidateK, now, minConfidence, includeProvenance, includeDormant) {
144
+ function knnSemantic(db, queryBuffer, candidateK, now, minConfidence, includeProvenance, includeDormant, confidenceConfig) {
136
145
  let stateFilter;
137
146
  if (includeDormant) {
138
147
  stateFilter = "AND (v.state = 'active' OR v.state = 'context_dependent' OR v.state = 'dormant')";
@@ -152,7 +161,7 @@ function knnSemantic(db, queryBuffer, candidateK, now, minConfidence, includePro
152
161
  const results = [];
153
162
  const matchedIds = [];
154
163
  for (const row of rows) {
155
- const confidence = computeSemanticConfidence(row, now);
164
+ const confidence = computeSemanticConfidence(row, now, confidenceConfig);
156
165
  if (confidence < minConfidence) continue;
157
166
  const score = row.similarity * confidence;
158
167
  matchedIds.push(row.id);
@@ -161,7 +170,7 @@ function knnSemantic(db, queryBuffer, candidateK, now, minConfidence, includePro
161
170
  return { results, matchedIds };
162
171
  }
163
172
 
164
- function knnProcedural(db, queryBuffer, candidateK, now, minConfidence, includeProvenance, includeDormant) {
173
+ function knnProcedural(db, queryBuffer, candidateK, now, minConfidence, includeProvenance, includeDormant, confidenceConfig) {
165
174
  let stateFilter;
166
175
  if (includeDormant) {
167
176
  stateFilter = "AND (v.state = 'active' OR v.state = 'context_dependent' OR v.state = 'dormant')";
@@ -181,7 +190,7 @@ function knnProcedural(db, queryBuffer, candidateK, now, minConfidence, includeP
181
190
  const results = [];
182
191
  const matchedIds = [];
183
192
  for (const row of rows) {
184
- const confidence = computeProceduralConfidence(row, now);
193
+ const confidence = computeProceduralConfidence(row, now, confidenceConfig);
185
194
  if (confidence < minConfidence) continue;
186
195
  const score = row.similarity * confidence;
187
196
  matchedIds.push(row.id);
@@ -204,6 +213,7 @@ export async function* recallStream(db, embeddingProvider, query, options = {})
204
213
  limit = 10,
205
214
  includeProvenance = false,
206
215
  includeDormant = false,
216
+ confidenceConfig,
207
217
  } = options;
208
218
 
209
219
  const queryVector = await embeddingProvider.embed(query);
@@ -215,13 +225,13 @@ export async function* recallStream(db, embeddingProvider, query, options = {})
215
225
  const allResults = [];
216
226
 
217
227
  if (searchTypes.includes('episodic')) {
218
- const episodic = knnEpisodic(db, queryBuffer, candidateK, now, minConfidence, includeProvenance);
228
+ const episodic = knnEpisodic(db, queryBuffer, candidateK, now, minConfidence, includeProvenance, confidenceConfig);
219
229
  allResults.push(...episodic);
220
230
  }
221
231
 
222
232
  if (searchTypes.includes('semantic')) {
223
233
  const { results: semResults, matchedIds: semIds } =
224
- knnSemantic(db, queryBuffer, candidateK, now, minConfidence, includeProvenance, includeDormant);
234
+ knnSemantic(db, queryBuffer, candidateK, now, minConfidence, includeProvenance, includeDormant, confidenceConfig);
225
235
  allResults.push(...semResults);
226
236
 
227
237
  if (semIds.length > 0) {
@@ -237,7 +247,7 @@ export async function* recallStream(db, embeddingProvider, query, options = {})
237
247
 
238
248
  if (searchTypes.includes('procedural')) {
239
249
  const { results: procResults, matchedIds: procIds } =
240
- knnProcedural(db, queryBuffer, candidateK, now, minConfidence, includeProvenance, includeDormant);
250
+ knnProcedural(db, queryBuffer, candidateK, now, minConfidence, includeProvenance, includeDormant, confidenceConfig);
241
251
  allResults.push(...procResults);
242
252
 
243
253
  if (procIds.length > 0) {