audrey 0.3.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/package.json ADDED
@@ -0,0 +1,74 @@
1
+ {
2
+ "name": "audrey",
3
+ "version": "0.3.0",
4
+ "description": "Biological memory architecture for AI agents — encode, consolidate, and recall memories with confidence decay, contradiction detection, and causal graphs",
5
+ "type": "module",
6
+ "main": "src/index.js",
7
+ "exports": {
8
+ ".": "./src/index.js",
9
+ "./mcp": "./mcp-server/index.js"
10
+ },
11
+ "bin": {
12
+ "audrey-mcp": "./mcp-server/index.js"
13
+ },
14
+ "files": [
15
+ "src/",
16
+ "mcp-server/",
17
+ "README.md",
18
+ "LICENSE"
19
+ ],
20
+ "scripts": {
21
+ "test": "vitest run",
22
+ "test:watch": "vitest"
23
+ },
24
+ "keywords": [
25
+ "ai",
26
+ "memory",
27
+ "agents",
28
+ "llm",
29
+ "cognitive",
30
+ "sqlite",
31
+ "embedding",
32
+ "mcp",
33
+ "biological-memory",
34
+ "episodic-memory",
35
+ "semantic-memory",
36
+ "forgetting-curve",
37
+ "vector-search",
38
+ "knowledge-graph",
39
+ "causal-graph",
40
+ "ai-agent",
41
+ "model-context-protocol",
42
+ "recall",
43
+ "consolidation",
44
+ "confidence",
45
+ "long-term-memory",
46
+ "persistent-memory",
47
+ "rag",
48
+ "claude",
49
+ "agent-framework"
50
+ ],
51
+ "repository": {
52
+ "type": "git",
53
+ "url": "https://github.com/Evilander/Audrey.git"
54
+ },
55
+ "homepage": "https://github.com/Evilander/Audrey",
56
+ "bugs": {
57
+ "url": "https://github.com/Evilander/Audrey/issues"
58
+ },
59
+ "author": "Tyler Eveland <j.tyler.eveland@gmail.com>",
60
+ "engines": {
61
+ "node": ">=18"
62
+ },
63
+ "license": "MIT",
64
+ "dependencies": {
65
+ "@modelcontextprotocol/sdk": "^1.26.0",
66
+ "better-sqlite3": "^12.6.2",
67
+ "sqlite-vec": "^0.1.7-alpha.2",
68
+ "ulid": "^3.0.2",
69
+ "zod": "^4.3.6"
70
+ },
71
+ "devDependencies": {
72
+ "vitest": "^4.0.18"
73
+ }
74
+ }
package/src/audrey.js ADDED
@@ -0,0 +1,179 @@
1
+ import { EventEmitter } from 'node:events';
2
+ import { createDatabase, closeDatabase } from './db.js';
3
+ import { createEmbeddingProvider } from './embedding.js';
4
+ import { createLLMProvider } from './llm.js';
5
+ import { encodeEpisode } from './encode.js';
6
+ import { recall as recallFn, recallStream as recallStreamFn } from './recall.js';
7
+ import { validateMemory } from './validate.js';
8
+ import { runConsolidation } from './consolidate.js';
9
+ import { applyDecay } from './decay.js';
10
+ import { rollbackConsolidation, getConsolidationHistory } from './rollback.js';
11
+ import { introspect as introspectFn } from './introspect.js';
12
+ import { buildContextResolutionPrompt } from './prompts.js';
13
+
14
+ export class Audrey extends EventEmitter {
15
+ constructor({
16
+ dataDir = './audrey-data',
17
+ agent = 'default',
18
+ embedding = { provider: 'mock', dimensions: 64 },
19
+ llm,
20
+ consolidation = {},
21
+ decay = {},
22
+ } = {}) {
23
+ super();
24
+ this.agent = agent;
25
+ this.dataDir = dataDir;
26
+ this.embeddingProvider = createEmbeddingProvider(embedding);
27
+ this.db = createDatabase(dataDir, { dimensions: this.embeddingProvider.dimensions });
28
+ this.llmProvider = llm ? createLLMProvider(llm) : null;
29
+ this.consolidationConfig = {
30
+ minEpisodes: consolidation.minEpisodes || 3,
31
+ };
32
+ this.decayConfig = { dormantThreshold: decay.dormantThreshold || 0.1 };
33
+ }
34
+
35
+ _emitValidation(id, params) {
36
+ validateMemory(this.db, this.embeddingProvider, { id, ...params }, {
37
+ llmProvider: this.llmProvider,
38
+ })
39
+ .then(validation => {
40
+ if (validation.action === 'reinforced') {
41
+ this.emit('reinforcement', {
42
+ episodeId: id,
43
+ targetId: validation.semanticId,
44
+ similarity: validation.similarity,
45
+ });
46
+ } else if (validation.action === 'contradiction') {
47
+ this.emit('contradiction', {
48
+ episodeId: id,
49
+ contradictionId: validation.contradictionId,
50
+ semanticId: validation.semanticId,
51
+ similarity: validation.similarity,
52
+ resolution: validation.resolution,
53
+ });
54
+ }
55
+ })
56
+ .catch(err => this.emit('error', err));
57
+ }
58
+
59
+ async encode(params) {
60
+ const id = await encodeEpisode(this.db, this.embeddingProvider, params);
61
+ this.emit('encode', { id, ...params });
62
+ this._emitValidation(id, params);
63
+ return id;
64
+ }
65
+
66
+ async encodeBatch(paramsList) {
67
+ const ids = [];
68
+ for (const params of paramsList) {
69
+ const id = await encodeEpisode(this.db, this.embeddingProvider, params);
70
+ ids.push(id);
71
+ this.emit('encode', { id, ...params });
72
+ }
73
+
74
+ for (let i = 0; i < ids.length; i++) {
75
+ this._emitValidation(ids[i], paramsList[i]);
76
+ }
77
+
78
+ return ids;
79
+ }
80
+
81
+ recall(query, options = {}) {
82
+ return recallFn(this.db, this.embeddingProvider, query, options);
83
+ }
84
+
85
+ async *recallStream(query, options = {}) {
86
+ yield* recallStreamFn(this.db, this.embeddingProvider, query, options);
87
+ }
88
+
89
+ async consolidate(options = {}) {
90
+ const result = await runConsolidation(this.db, this.embeddingProvider, {
91
+ minClusterSize: options.minClusterSize || this.consolidationConfig.minEpisodes,
92
+ similarityThreshold: options.similarityThreshold || 0.80,
93
+ extractPrinciple: options.extractPrinciple,
94
+ llmProvider: options.llmProvider || this.llmProvider,
95
+ });
96
+ const run = this.db.prepare('SELECT status FROM consolidation_runs WHERE id = ?').get(result.runId);
97
+ const output = { ...result, status: run?.status || 'completed' };
98
+ this.emit('consolidation', output);
99
+ return output;
100
+ }
101
+
102
+ decay(options = {}) {
103
+ const result = applyDecay(this.db, {
104
+ dormantThreshold: options.dormantThreshold || this.decayConfig.dormantThreshold,
105
+ });
106
+ this.emit('decay', result);
107
+ return result;
108
+ }
109
+
110
+ rollback(runId) {
111
+ const result = rollbackConsolidation(this.db, runId);
112
+ this.emit('rollback', { runId, ...result });
113
+ return result;
114
+ }
115
+
116
+ async resolveTruth(contradictionId) {
117
+ if (!this.llmProvider) {
118
+ throw new Error('resolveTruth requires an LLM provider');
119
+ }
120
+
121
+ const contradiction = this.db.prepare(
122
+ 'SELECT * FROM contradictions WHERE id = ?'
123
+ ).get(contradictionId);
124
+ if (!contradiction) throw new Error(`Contradiction not found: ${contradictionId}`);
125
+
126
+ const claimA = this._loadClaimContent(contradiction.claim_a_id, contradiction.claim_a_type);
127
+ const claimB = this._loadClaimContent(contradiction.claim_b_id, contradiction.claim_b_type);
128
+
129
+ const messages = buildContextResolutionPrompt(claimA, claimB);
130
+ const result = await this.llmProvider.json(messages);
131
+
132
+ const now = new Date().toISOString();
133
+ const newState = result.resolution === 'context_dependent' ? 'context_dependent' : 'resolved';
134
+ this.db.prepare(`
135
+ UPDATE contradictions SET state = ?, resolution = ?, resolved_at = ?
136
+ WHERE id = ?
137
+ `).run(newState, JSON.stringify(result), now, contradictionId);
138
+
139
+ if (result.resolution === 'a_wins' && contradiction.claim_a_type === 'semantic') {
140
+ this.db.prepare("UPDATE semantics SET state = 'active' WHERE id = ?").run(contradiction.claim_a_id);
141
+ }
142
+ if (result.resolution === 'b_wins' && contradiction.claim_b_type === 'semantic') {
143
+ this.db.prepare("UPDATE semantics SET state = 'active' WHERE id = ?").run(contradiction.claim_b_id);
144
+ }
145
+ if (result.resolution === 'context_dependent') {
146
+ if (contradiction.claim_a_type === 'semantic' && result.conditions) {
147
+ this.db.prepare("UPDATE semantics SET state = 'context_dependent', conditions = ? WHERE id = ?")
148
+ .run(JSON.stringify(result.conditions), contradiction.claim_a_id);
149
+ }
150
+ }
151
+
152
+ return result;
153
+ }
154
+
155
+ _loadClaimContent(claimId, claimType) {
156
+ if (claimType === 'semantic') {
157
+ const row = this.db.prepare('SELECT content FROM semantics WHERE id = ?').get(claimId);
158
+ if (!row) throw new Error(`Semantic memory not found: ${claimId}`);
159
+ return row.content;
160
+ } else if (claimType === 'episodic') {
161
+ const row = this.db.prepare('SELECT content FROM episodes WHERE id = ?').get(claimId);
162
+ if (!row) throw new Error(`Episode not found: ${claimId}`);
163
+ return row.content;
164
+ }
165
+ throw new Error(`Unknown claim type: ${claimType}`);
166
+ }
167
+
168
+ consolidationHistory() {
169
+ return getConsolidationHistory(this.db);
170
+ }
171
+
172
+ introspect() {
173
+ return introspectFn(this.db);
174
+ }
175
+
176
+ close() {
177
+ closeDatabase(this.db);
178
+ }
179
+ }
package/src/causal.js ADDED
@@ -0,0 +1,77 @@
1
+ import { generateId } from './ulid.js';
2
+ import { buildCausalArticulationPrompt } from './prompts.js';
3
+
4
+ export function addCausalLink(db, { causeId, effectId, linkType = 'causal', mechanism, confidence }) {
5
+ const id = generateId();
6
+ const now = new Date().toISOString();
7
+
8
+ db.prepare(`
9
+ INSERT INTO causal_links (id, cause_id, effect_id, link_type, mechanism, confidence, created_at)
10
+ VALUES (?, ?, ?, ?, ?, ?, ?)
11
+ `).run(id, causeId, effectId, linkType, mechanism, confidence, now);
12
+
13
+ return id;
14
+ }
15
+
16
+ export function getCausalChain(db, memoryId, options = {}) {
17
+ const { depth = 10 } = options;
18
+ const results = [];
19
+ const visited = new Set();
20
+ const queue = [memoryId];
21
+ let currentDepth = 0;
22
+
23
+ while (queue.length > 0 && currentDepth < depth) {
24
+ const nextQueue = [];
25
+ for (const nodeId of queue) {
26
+ if (visited.has(nodeId)) continue;
27
+ visited.add(nodeId);
28
+
29
+ const links = db.prepare(
30
+ 'SELECT * FROM causal_links WHERE cause_id = ?'
31
+ ).all(nodeId);
32
+
33
+ for (const link of links) {
34
+ if (!visited.has(link.effect_id)) {
35
+ results.push(link);
36
+ nextQueue.push(link.effect_id);
37
+ }
38
+ }
39
+ }
40
+ queue.length = 0;
41
+ queue.push(...nextQueue);
42
+ currentDepth++;
43
+ }
44
+
45
+ return results;
46
+ }
47
+
48
+ export async function articulateCausalLink(db, llmProvider, cause, effect) {
49
+ const messages = buildCausalArticulationPrompt(cause, effect);
50
+ const result = await llmProvider.json(messages);
51
+
52
+ if (result.spurious) {
53
+ return {
54
+ linkId: null,
55
+ mechanism: result.mechanism,
56
+ linkType: result.linkType,
57
+ confidence: result.confidence,
58
+ spurious: true,
59
+ };
60
+ }
61
+
62
+ const linkId = addCausalLink(db, {
63
+ causeId: cause.id,
64
+ effectId: effect.id,
65
+ linkType: result.linkType || 'correlational',
66
+ mechanism: result.mechanism,
67
+ confidence: result.confidence,
68
+ });
69
+
70
+ return {
71
+ linkId,
72
+ mechanism: result.mechanism,
73
+ linkType: result.linkType,
74
+ confidence: result.confidence,
75
+ spurious: false,
76
+ };
77
+ }
@@ -0,0 +1,75 @@
1
+ export const DEFAULT_SOURCE_RELIABILITY = {
2
+ 'direct-observation': 0.95,
3
+ 'told-by-user': 0.90,
4
+ 'tool-result': 0.85,
5
+ 'inference': 0.60,
6
+ 'model-generated': 0.40,
7
+ };
8
+
9
+ export const DEFAULT_WEIGHTS = {
10
+ source: 0.30,
11
+ evidence: 0.35,
12
+ recency: 0.20,
13
+ retrieval: 0.15,
14
+ };
15
+
16
+ export const DEFAULT_HALF_LIVES = {
17
+ episodic: 7,
18
+ semantic: 30,
19
+ procedural: 90,
20
+ };
21
+
22
+ export const MODEL_GENERATED_CONFIDENCE_CAP = 0.6;
23
+
24
+ export function sourceReliability(sourceType, customReliability) {
25
+ const table = customReliability || DEFAULT_SOURCE_RELIABILITY;
26
+ const value = table[sourceType];
27
+ if (value === undefined) {
28
+ throw new Error(`Unknown source type: ${sourceType}. Valid types: ${Object.keys(table).join(', ')}`);
29
+ }
30
+ return value;
31
+ }
32
+
33
+ export function evidenceAgreement(supportingCount, contradictingCount) {
34
+ const total = supportingCount + contradictingCount;
35
+ if (total === 0) return 1.0;
36
+ return supportingCount / total;
37
+ }
38
+
39
+ export function recencyDecay(ageDays, halfLifeDays) {
40
+ const lambda = Math.LN2 / halfLifeDays;
41
+ return Math.exp(-lambda * ageDays);
42
+ }
43
+
44
+ export function retrievalReinforcement(retrievalCount, daysSinceRetrieval) {
45
+ if (retrievalCount === 0) return 0;
46
+ const lambdaRet = Math.LN2 / 14; // 14-day half-life for retrieval decay
47
+ return Math.min(1.0, 0.3 * Math.log(1 + retrievalCount) * Math.exp(-lambdaRet * daysSinceRetrieval));
48
+ }
49
+
50
+ export function computeConfidence({
51
+ sourceType,
52
+ supportingCount,
53
+ contradictingCount,
54
+ ageDays,
55
+ halfLifeDays,
56
+ retrievalCount,
57
+ daysSinceRetrieval,
58
+ weights,
59
+ customSourceReliability,
60
+ }) {
61
+ const w = weights || DEFAULT_WEIGHTS;
62
+
63
+ const s = sourceReliability(sourceType, customSourceReliability);
64
+ const e = evidenceAgreement(supportingCount, contradictingCount);
65
+ const r = recencyDecay(ageDays, halfLifeDays);
66
+ const ret = retrievalReinforcement(retrievalCount, daysSinceRetrieval);
67
+
68
+ let confidence = w.source * s + w.evidence * e + w.recency * r + w.retrieval * ret;
69
+
70
+ if (sourceType === 'model-generated') {
71
+ confidence = Math.min(confidence, MODEL_GENERATED_CONFIDENCE_CAP);
72
+ }
73
+
74
+ return Math.max(0, Math.min(1, confidence));
75
+ }
@@ -0,0 +1,219 @@
1
+ import { generateId } from './ulid.js';
2
+ import { buildPrincipleExtractionPrompt } from './prompts.js';
3
+
4
+ function clusterViaKNN(db, episodes, similarityThreshold, minClusterSize) {
5
+ const n = episodes.length;
6
+ const k = Math.min(50, n);
7
+ const idToIndex = new Map(episodes.map((ep, i) => [ep.id, i]));
8
+
9
+ const parent = new Array(n);
10
+ for (let i = 0; i < n; i++) parent[i] = i;
11
+
12
+ function find(x) {
13
+ while (parent[x] !== x) {
14
+ parent[x] = parent[parent[x]];
15
+ x = parent[x];
16
+ }
17
+ return x;
18
+ }
19
+
20
+ function union(a, b) {
21
+ const ra = find(a);
22
+ const rb = find(b);
23
+ if (ra !== rb) parent[ra] = rb;
24
+ }
25
+
26
+ const getEmbedding = db.prepare('SELECT embedding FROM vec_episodes WHERE id = ?');
27
+ const knnQuery = db.prepare(`
28
+ SELECT id, distance
29
+ FROM vec_episodes
30
+ WHERE embedding MATCH ? AND k = ? AND consolidated = 0
31
+ `);
32
+
33
+ for (let i = 0; i < n; i++) {
34
+ const ep = episodes[i];
35
+ const vecRow = getEmbedding.get(ep.id);
36
+ if (!vecRow) continue;
37
+
38
+ const neighbors = knnQuery.all(vecRow.embedding, k);
39
+ for (const neighbor of neighbors) {
40
+ if (neighbor.id === ep.id) continue;
41
+ const j = idToIndex.get(neighbor.id);
42
+ if (j === undefined) continue;
43
+ const similarity = 1.0 - neighbor.distance;
44
+ if (similarity >= similarityThreshold) {
45
+ union(i, j);
46
+ }
47
+ }
48
+ }
49
+
50
+ const groups = new Map();
51
+ for (let i = 0; i < n; i++) {
52
+ const root = find(i);
53
+ if (!groups.has(root)) groups.set(root, []);
54
+ groups.get(root).push(episodes[i]);
55
+ }
56
+
57
+ const clusters = [];
58
+ for (const group of groups.values()) {
59
+ if (group.length >= minClusterSize) {
60
+ clusters.push(group);
61
+ }
62
+ }
63
+ return clusters;
64
+ }
65
+
66
+ export function clusterEpisodes(db, embeddingProvider, options = {}) {
67
+ const {
68
+ similarityThreshold = 0.85,
69
+ minClusterSize = 3,
70
+ } = options;
71
+
72
+ const episodes = db.prepare(
73
+ 'SELECT * FROM episodes WHERE consolidated = 0 AND superseded_by IS NULL AND embedding IS NOT NULL'
74
+ ).all();
75
+
76
+ if (episodes.length === 0) return [];
77
+
78
+ return clusterViaKNN(db, episodes, similarityThreshold, minClusterSize);
79
+ }
80
+
81
+ function defaultExtractPrinciple(episodes) {
82
+ const uniqueContents = [...new Set(episodes.map(e => e.content))];
83
+ return {
84
+ content: `Recurring pattern: ${uniqueContents.join('; ')}`,
85
+ type: 'semantic',
86
+ };
87
+ }
88
+
89
+ async function llmExtractPrinciple(llmProvider, episodes) {
90
+ const messages = buildPrincipleExtractionPrompt(episodes);
91
+ return llmProvider.json(messages);
92
+ }
93
+
94
+ export async function runConsolidation(db, embeddingProvider, options = {}) {
95
+ const {
96
+ similarityThreshold = 0.85,
97
+ minClusterSize = 3,
98
+ extractPrinciple,
99
+ llmProvider,
100
+ } = options;
101
+
102
+ const runId = generateId();
103
+ const now = new Date().toISOString();
104
+
105
+ db.prepare(`
106
+ INSERT INTO consolidation_runs (id, started_at, status, input_episode_ids, output_memory_ids, consolidation_model)
107
+ VALUES (?, ?, 'running', '[]', '[]', ?)
108
+ `).run(runId, now, llmProvider?.modelName || null);
109
+
110
+ try {
111
+ const clusters = clusterEpisodes(db, embeddingProvider, { similarityThreshold, minClusterSize });
112
+
113
+ const episodesEvaluated = db.prepare(
114
+ 'SELECT COUNT(*) as count FROM episodes WHERE consolidated = 0 AND superseded_by IS NULL AND embedding IS NOT NULL'
115
+ ).get().count;
116
+
117
+ const clusterData = [];
118
+ for (const cluster of clusters) {
119
+ let principle;
120
+ if (extractPrinciple) {
121
+ principle = extractPrinciple(cluster);
122
+ } else if (llmProvider) {
123
+ principle = await llmExtractPrinciple(llmProvider, cluster);
124
+ } else {
125
+ principle = defaultExtractPrinciple(cluster);
126
+ }
127
+
128
+ if (!principle || !principle.content) continue;
129
+
130
+ const clusterIds = cluster.map(ep => ep.id);
131
+ const sourceTypes = new Set(cluster.map(ep => ep.source));
132
+ const vector = await embeddingProvider.embed(principle.content);
133
+ const embeddingBuffer = embeddingProvider.vectorToBuffer(vector);
134
+
135
+ clusterData.push({
136
+ cluster,
137
+ principle,
138
+ clusterIds,
139
+ sourceTypeDiversity: sourceTypes.size,
140
+ embeddingBuffer,
141
+ semanticId: generateId(),
142
+ semanticNow: new Date().toISOString(),
143
+ });
144
+ }
145
+
146
+ const allInputIds = [];
147
+ const allOutputIds = [];
148
+ let principlesExtracted = 0;
149
+
150
+ const promoteAll = db.transaction(() => {
151
+ for (const entry of clusterData) {
152
+ allInputIds.push(...entry.clusterIds);
153
+
154
+ db.prepare(`
155
+ INSERT INTO semantics (
156
+ id, content, embedding, state, evidence_episode_ids,
157
+ evidence_count, supporting_count, source_type_diversity,
158
+ consolidation_checkpoint, embedding_model, embedding_version,
159
+ consolidation_model, created_at
160
+ ) VALUES (?, ?, ?, 'active', ?, ?, ?, ?, ?, ?, ?, ?, ?)
161
+ `).run(
162
+ entry.semanticId,
163
+ entry.principle.content,
164
+ entry.embeddingBuffer,
165
+ JSON.stringify(entry.clusterIds),
166
+ entry.cluster.length,
167
+ entry.cluster.length,
168
+ entry.sourceTypeDiversity,
169
+ runId,
170
+ embeddingProvider.modelName,
171
+ embeddingProvider.modelVersion,
172
+ llmProvider?.modelName || null,
173
+ entry.semanticNow,
174
+ );
175
+
176
+ db.prepare('INSERT INTO vec_semantics(id, embedding, state) VALUES (?, ?, ?)').run(
177
+ entry.semanticId, entry.embeddingBuffer, 'active'
178
+ );
179
+
180
+ allOutputIds.push(entry.semanticId);
181
+ principlesExtracted++;
182
+
183
+ const markStmt = db.prepare('UPDATE episodes SET consolidated = 1 WHERE id = ?');
184
+ const markVecStmt = db.prepare('UPDATE vec_episodes SET consolidated = ? WHERE id = ?');
185
+ for (const ep of entry.cluster) {
186
+ markStmt.run(ep.id);
187
+ markVecStmt.run(BigInt(1), ep.id);
188
+ }
189
+ }
190
+
191
+ const completedAt = new Date().toISOString();
192
+ db.prepare(`
193
+ UPDATE consolidation_runs
194
+ SET status = 'completed',
195
+ completed_at = ?,
196
+ input_episode_ids = ?,
197
+ output_memory_ids = ?
198
+ WHERE id = ?
199
+ `).run(completedAt, JSON.stringify(allInputIds), JSON.stringify(allOutputIds), runId);
200
+ });
201
+
202
+ promoteAll();
203
+
204
+ return {
205
+ runId,
206
+ episodesEvaluated,
207
+ clustersFound: clusters.length,
208
+ principlesExtracted,
209
+ };
210
+ } catch (err) {
211
+ const failedAt = new Date().toISOString();
212
+ db.prepare(`
213
+ UPDATE consolidation_runs
214
+ SET status = 'failed', completed_at = ?
215
+ WHERE id = ?
216
+ `).run(failedAt, runId);
217
+ throw err;
218
+ }
219
+ }