audrey 0.5.1 → 0.9.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/src/affect.js ADDED
@@ -0,0 +1,64 @@
1
+ export function arousalSalienceBoost(arousal) {
2
+ if (arousal === undefined || arousal === null) return 0;
3
+ // Inverted-U (Yerkes-Dodson): peaks at 0.7, Gaussian sigma=0.3
4
+ return Math.exp(-Math.pow(arousal - 0.7, 2) / (2 * 0.3 * 0.3));
5
+ }
6
+
7
+ export function affectSimilarity(a, b) {
8
+ if (!a || !b) return 0;
9
+ if (a.valence === undefined || b.valence === undefined) return 0;
10
+ const valenceDist = Math.abs(a.valence - b.valence);
11
+ const valenceSim = 1.0 - (valenceDist / 2.0);
12
+ if (a.arousal === undefined || b.arousal === undefined) return valenceSim;
13
+ const arousalSim = 1.0 - Math.abs(a.arousal - b.arousal);
14
+ // Valence is primary (70%), arousal secondary (30%) per Bower 1981
15
+ return 0.7 * valenceSim + 0.3 * arousalSim;
16
+ }
17
+
18
+ export function moodCongruenceModifier(encodingAffect, retrievalMood, weight = 0.2) {
19
+ if (!encodingAffect || !retrievalMood) return 1.0;
20
+ const similarity = affectSimilarity(encodingAffect, retrievalMood);
21
+ if (similarity === 0) return 1.0;
22
+ return 1.0 + (weight * similarity);
23
+ }
24
+
25
+ export async function detectResonance(db, embeddingProvider, episodeId, { content, affect }, config = {}) {
26
+ const { enabled = true, k = 5, threshold = 0.5, affectThreshold = 0.6 } = config;
27
+ if (!enabled || !affect || affect.valence === undefined) return [];
28
+
29
+ const vector = await embeddingProvider.embed(content);
30
+ const buffer = embeddingProvider.vectorToBuffer(vector);
31
+
32
+ const matches = db.prepare(`
33
+ SELECT e.*, (1.0 - v.distance) AS similarity
34
+ FROM vec_episodes v
35
+ JOIN episodes e ON e.id = v.id
36
+ WHERE v.embedding MATCH ?
37
+ AND k = ?
38
+ AND e.id != ?
39
+ AND e.superseded_by IS NULL
40
+ `).all(buffer, k, episodeId);
41
+
42
+ const resonances = [];
43
+ for (const match of matches) {
44
+ if (match.similarity < threshold) continue;
45
+ let priorAffect;
46
+ try { priorAffect = JSON.parse(match.affect || '{}'); } catch { continue; }
47
+ if (priorAffect.valence === undefined) continue;
48
+
49
+ const emotionalSimilarity = affectSimilarity(affect, priorAffect);
50
+ if (emotionalSimilarity < affectThreshold) continue;
51
+
52
+ resonances.push({
53
+ priorEpisodeId: match.id,
54
+ priorContent: match.content,
55
+ priorAffect,
56
+ semanticSimilarity: match.similarity,
57
+ emotionalSimilarity,
58
+ timeDelta: Date.now() - new Date(match.created_at).getTime(),
59
+ priorCreatedAt: match.created_at,
60
+ });
61
+ }
62
+
63
+ return resonances;
64
+ }
package/src/audrey.js CHANGED
@@ -8,12 +8,15 @@ import { validateMemory } from './validate.js';
8
8
  import { runConsolidation } from './consolidate.js';
9
9
  import { applyDecay } from './decay.js';
10
10
  import { rollbackConsolidation, getConsolidationHistory } from './rollback.js';
11
+ import { forgetMemory, forgetByQuery as forgetByQueryFn, purgeMemories } from './forget.js';
11
12
  import { introspect as introspectFn } from './introspect.js';
12
13
  import { buildContextResolutionPrompt } from './prompts.js';
13
14
  import { exportMemories } from './export.js';
14
15
  import { importMemories } from './import.js';
15
16
  import { suggestConsolidationParams as suggestParamsFn } from './adaptive.js';
16
17
  import { reembedAll } from './migrate.js';
18
+ import { applyInterference } from './interference.js';
19
+ import { detectResonance } from './affect.js';
17
20
 
18
21
  /**
19
22
  * @typedef {'direct-observation' | 'told-by-user' | 'tool-result' | 'inference' | 'model-generated'} SourceType
@@ -33,6 +36,10 @@ import { reembedAll } from './migrate.js';
33
36
  * @property {number} [limit]
34
37
  * @property {boolean} [includeProvenance]
35
38
  * @property {boolean} [includeDormant]
39
+ * @property {string[]} [tags]
40
+ * @property {string[]} [sources]
41
+ * @property {string} [after]
42
+ * @property {string} [before]
36
43
  *
37
44
  * @typedef {Object} RecallResult
38
45
  * @property {string} id
@@ -84,6 +91,9 @@ export class Audrey extends EventEmitter {
84
91
  confidence = {},
85
92
  consolidation = {},
86
93
  decay = {},
94
+ interference = {},
95
+ context = {},
96
+ affect = {},
87
97
  } = {}) {
88
98
  super();
89
99
 
@@ -108,12 +118,36 @@ export class Audrey extends EventEmitter {
108
118
  weights: confidence.weights,
109
119
  halfLives: confidence.halfLives,
110
120
  sourceReliability: confidence.sourceReliability,
121
+ interferenceWeight: interference.weight ?? 0.1,
122
+ contextWeight: context.weight ?? 0.3,
123
+ affectWeight: affect.weight ?? 0.2,
111
124
  };
112
125
  this.consolidationConfig = {
113
126
  minEpisodes: consolidation.minEpisodes || 3,
114
127
  };
115
128
  this.decayConfig = { dormantThreshold: decay.dormantThreshold || 0.1 };
116
129
  this._autoConsolidateTimer = null;
130
+ this.interferenceConfig = {
131
+ enabled: interference.enabled ?? true,
132
+ k: interference.k ?? 5,
133
+ threshold: interference.threshold ?? 0.6,
134
+ weight: interference.weight ?? 0.1,
135
+ };
136
+ this.contextConfig = {
137
+ enabled: context.enabled ?? true,
138
+ weight: context.weight ?? 0.3,
139
+ };
140
+ this.affectConfig = {
141
+ enabled: affect.enabled ?? true,
142
+ weight: affect.weight ?? 0.2,
143
+ arousalWeight: affect.arousalWeight ?? 0.3,
144
+ resonance: {
145
+ enabled: affect.resonance?.enabled ?? true,
146
+ k: affect.resonance?.k ?? 5,
147
+ threshold: affect.resonance?.threshold ?? 0.5,
148
+ affectThreshold: affect.resonance?.affectThreshold ?? 0.6,
149
+ },
150
+ };
117
151
  }
118
152
 
119
153
  async _ensureMigrated() {
@@ -153,8 +187,27 @@ export class Audrey extends EventEmitter {
153
187
  */
154
188
  async encode(params) {
155
189
  await this._ensureMigrated();
156
- const id = await encodeEpisode(this.db, this.embeddingProvider, params);
190
+ const encodeParams = { ...params, arousalWeight: this.affectConfig.arousalWeight };
191
+ const id = await encodeEpisode(this.db, this.embeddingProvider, encodeParams);
157
192
  this.emit('encode', { id, ...params });
193
+ if (this.interferenceConfig.enabled) {
194
+ applyInterference(this.db, this.embeddingProvider, id, params, this.interferenceConfig)
195
+ .then(affected => {
196
+ if (affected.length > 0) {
197
+ this.emit('interference', { episodeId: id, affected });
198
+ }
199
+ })
200
+ .catch(err => this.emit('error', err));
201
+ }
202
+ if (this.affectConfig.enabled && this.affectConfig.resonance.enabled && params.affect?.valence !== undefined) {
203
+ detectResonance(this.db, this.embeddingProvider, id, params, this.affectConfig.resonance)
204
+ .then(echoes => {
205
+ if (echoes.length > 0) {
206
+ this.emit('resonance', { episodeId: id, affect: params.affect, echoes });
207
+ }
208
+ })
209
+ .catch(err => this.emit('error', err));
210
+ }
158
211
  this._emitValidation(id, params);
159
212
  return id;
160
213
  }
@@ -188,7 +241,7 @@ export class Audrey extends EventEmitter {
188
241
  await this._ensureMigrated();
189
242
  return recallFn(this.db, this.embeddingProvider, query, {
190
243
  ...options,
191
- confidenceConfig: options.confidenceConfig ?? this.confidenceConfig,
244
+ confidenceConfig: this._recallConfig(options),
192
245
  });
193
246
  }
194
247
 
@@ -201,10 +254,21 @@ export class Audrey extends EventEmitter {
201
254
  await this._ensureMigrated();
202
255
  yield* recallStreamFn(this.db, this.embeddingProvider, query, {
203
256
  ...options,
204
- confidenceConfig: options.confidenceConfig ?? this.confidenceConfig,
257
+ confidenceConfig: this._recallConfig(options),
205
258
  });
206
259
  }
207
260
 
261
+ _recallConfig(options) {
262
+ let config = options.confidenceConfig ?? this.confidenceConfig;
263
+ if (this.contextConfig.enabled && options.context) {
264
+ config = { ...config, retrievalContext: options.context };
265
+ }
266
+ if (this.affectConfig.enabled && options.mood) {
267
+ config = { ...config, retrievalMood: options.mood };
268
+ }
269
+ return config;
270
+ }
271
+
208
272
  /**
209
273
  * @param {{ minClusterSize?: number, similarityThreshold?: number, extractPrinciple?: Function, llmProvider?: import('./llm.js').LLMProvider }} [options]
210
274
  * @returns {Promise<ConsolidationResult>}
@@ -343,6 +407,25 @@ export class Audrey extends EventEmitter {
343
407
  return suggestParamsFn(this.db);
344
408
  }
345
409
 
410
+ forget(id, options = {}) {
411
+ const result = forgetMemory(this.db, id, options);
412
+ this.emit('forget', result);
413
+ return result;
414
+ }
415
+
416
+ async forgetByQuery(query, options = {}) {
417
+ await this._ensureMigrated();
418
+ const result = await forgetByQueryFn(this.db, this.embeddingProvider, query, options);
419
+ if (result) this.emit('forget', result);
420
+ return result;
421
+ }
422
+
423
+ purge() {
424
+ const result = purgeMemories(this.db);
425
+ this.emit('purge', result);
426
+ return result;
427
+ }
428
+
346
429
  /** @returns {void} */
347
430
  close() {
348
431
  this.stopAutoConsolidate();
package/src/confidence.js CHANGED
@@ -67,8 +67,16 @@ export function recencyDecay(ageDays, halfLifeDays) {
67
67
  */
68
68
  export function retrievalReinforcement(retrievalCount, daysSinceRetrieval) {
69
69
  if (retrievalCount === 0) return 0;
70
- const lambdaRet = Math.LN2 / 14; // 14-day half-life for retrieval decay
71
- return Math.min(1.0, 0.3 * Math.log(1 + retrievalCount) * Math.exp(-lambdaRet * daysSinceRetrieval));
70
+ const lambdaRet = Math.LN2 / 14;
71
+ const baseReinforcement = 0.3 * Math.log(1 + retrievalCount);
72
+ const recencyWeight = Math.exp(-lambdaRet * daysSinceRetrieval);
73
+ const spacedBonus = Math.min(0.15, 0.02 * Math.log(1 + daysSinceRetrieval));
74
+ return Math.min(1.0, baseReinforcement * recencyWeight + spacedBonus);
75
+ }
76
+
77
+ export function salienceModifier(salience) {
78
+ const s = salience ?? 0.5;
79
+ return 0.5 + s;
72
80
  }
73
81
 
74
82
  /**
@@ -152,6 +152,7 @@ export async function runConsolidation(db, embeddingProvider, options = {}) {
152
152
  embeddingBuffer,
153
153
  semanticId: generateId(),
154
154
  semanticNow: new Date().toISOString(),
155
+ maxSalience: Math.max(...cluster.map(ep => ep.salience ?? 0.5)),
155
156
  });
156
157
  }
157
158
 
@@ -168,8 +169,8 @@ export async function runConsolidation(db, embeddingProvider, options = {}) {
168
169
  id, content, embedding, state, evidence_episode_ids,
169
170
  evidence_count, supporting_count, source_type_diversity,
170
171
  consolidation_checkpoint, embedding_model, embedding_version,
171
- consolidation_model, created_at
172
- ) VALUES (?, ?, ?, 'active', ?, ?, ?, ?, ?, ?, ?, ?, ?)
172
+ consolidation_model, created_at, salience
173
+ ) VALUES (?, ?, ?, 'active', ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
173
174
  `).run(
174
175
  entry.semanticId,
175
176
  entry.principle.content,
@@ -183,6 +184,7 @@ export async function runConsolidation(db, embeddingProvider, options = {}) {
183
184
  embeddingProvider.modelVersion,
184
185
  llmProvider?.modelName || null,
185
186
  entry.semanticNow,
187
+ entry.maxSalience,
186
188
  );
187
189
 
188
190
  db.prepare('INSERT INTO vec_semantics(id, embedding, state) VALUES (?, ?, ?)').run(
package/src/context.js ADDED
@@ -0,0 +1,15 @@
1
+ export function contextMatchRatio(encodingContext, retrievalContext) {
2
+ if (!encodingContext || !retrievalContext) return 0;
3
+ const retrievalKeys = Object.keys(retrievalContext);
4
+ if (retrievalKeys.length === 0) return 0;
5
+ const sharedKeys = retrievalKeys.filter(k => k in encodingContext);
6
+ if (sharedKeys.length === 0) return 0;
7
+ const matches = sharedKeys.filter(k => encodingContext[k] === retrievalContext[k]).length;
8
+ return matches / retrievalKeys.length;
9
+ }
10
+
11
+ export function contextModifier(encodingContext, retrievalContext, weight = 0.3) {
12
+ if (!encodingContext || !retrievalContext) return 1.0;
13
+ const ratio = contextMatchRatio(encodingContext, retrievalContext);
14
+ return 1.0 + (weight * ratio);
15
+ }
package/src/db.js CHANGED
@@ -11,6 +11,8 @@ const SCHEMA = `
11
11
  source TEXT NOT NULL CHECK(source IN ('direct-observation','told-by-user','tool-result','inference','model-generated')),
12
12
  source_reliability REAL NOT NULL,
13
13
  salience REAL DEFAULT 0.5,
14
+ context TEXT DEFAULT '{}',
15
+ affect TEXT DEFAULT '{}',
14
16
  tags TEXT,
15
17
  causal_trigger TEXT,
16
18
  causal_consequence TEXT,
@@ -42,7 +44,9 @@ const SCHEMA = `
42
44
  created_at TEXT NOT NULL,
43
45
  last_reinforced_at TEXT,
44
46
  retrieval_count INTEGER DEFAULT 0,
45
- challenge_count INTEGER DEFAULT 0
47
+ challenge_count INTEGER DEFAULT 0,
48
+ interference_count INTEGER DEFAULT 0,
49
+ salience REAL DEFAULT 0.5
46
50
  );
47
51
 
48
52
  CREATE TABLE IF NOT EXISTS procedures (
@@ -58,7 +62,9 @@ const SCHEMA = `
58
62
  embedding_version TEXT,
59
63
  created_at TEXT NOT NULL,
60
64
  last_reinforced_at TEXT,
61
- retrieval_count INTEGER DEFAULT 0
65
+ retrieval_count INTEGER DEFAULT 0,
66
+ interference_count INTEGER DEFAULT 0,
67
+ salience REAL DEFAULT 0.5
62
68
  );
63
69
 
64
70
  CREATE TABLE IF NOT EXISTS causal_links (
package/src/decay.js CHANGED
@@ -1,4 +1,5 @@
1
- import { computeConfidence, DEFAULT_HALF_LIVES } from './confidence.js';
1
+ import { computeConfidence, DEFAULT_HALF_LIVES, salienceModifier } from './confidence.js';
2
+ import { interferenceModifier } from './interference.js';
2
3
  import { daysBetween } from './utils.js';
3
4
 
4
5
  /**
@@ -13,7 +14,7 @@ export function applyDecay(db, { dormantThreshold = 0.1, halfLives } = {}) {
13
14
 
14
15
  const semantics = db.prepare(`
15
16
  SELECT id, supporting_count, contradicting_count, created_at,
16
- last_reinforced_at, retrieval_count
17
+ last_reinforced_at, retrieval_count, interference_count, salience
17
18
  FROM semantics WHERE state = 'active'
18
19
  `).all();
19
20
 
@@ -26,7 +27,7 @@ export function applyDecay(db, { dormantThreshold = 0.1, halfLives } = {}) {
26
27
  ? daysBetween(sem.last_reinforced_at, now)
27
28
  : ageDays;
28
29
 
29
- const confidence = computeConfidence({
30
+ let confidence = computeConfidence({
30
31
  sourceType: 'tool-result',
31
32
  supportingCount: sem.supporting_count || 0,
32
33
  contradictingCount: sem.contradicting_count || 0,
@@ -35,6 +36,9 @@ export function applyDecay(db, { dormantThreshold = 0.1, halfLives } = {}) {
35
36
  retrievalCount: sem.retrieval_count || 0,
36
37
  daysSinceRetrieval,
37
38
  });
39
+ confidence *= interferenceModifier(sem.interference_count || 0);
40
+ confidence *= salienceModifier(sem.salience ?? 0.5);
41
+ confidence = Math.max(0, Math.min(1, confidence));
38
42
 
39
43
  if (confidence < dormantThreshold) {
40
44
  markDormantSem.run('dormant', sem.id);
@@ -44,7 +48,7 @@ export function applyDecay(db, { dormantThreshold = 0.1, halfLives } = {}) {
44
48
 
45
49
  const procedures = db.prepare(`
46
50
  SELECT id, success_count, failure_count, created_at,
47
- last_reinforced_at, retrieval_count
51
+ last_reinforced_at, retrieval_count, interference_count, salience
48
52
  FROM procedures WHERE state = 'active'
49
53
  `).all();
50
54
 
@@ -57,7 +61,7 @@ export function applyDecay(db, { dormantThreshold = 0.1, halfLives } = {}) {
57
61
  ? daysBetween(proc.last_reinforced_at, now)
58
62
  : ageDays;
59
63
 
60
- const confidence = computeConfidence({
64
+ let confidence = computeConfidence({
61
65
  sourceType: 'tool-result',
62
66
  supportingCount: proc.success_count || 0,
63
67
  contradictingCount: proc.failure_count || 0,
@@ -66,6 +70,9 @@ export function applyDecay(db, { dormantThreshold = 0.1, halfLives } = {}) {
66
70
  retrievalCount: proc.retrieval_count || 0,
67
71
  daysSinceRetrieval,
68
72
  });
73
+ confidence *= interferenceModifier(proc.interference_count || 0);
74
+ confidence *= salienceModifier(proc.salience ?? 0.5);
75
+ confidence = Math.max(0, Math.min(1, confidence));
69
76
 
70
77
  if (confidence < dormantThreshold) {
71
78
  markDormantProc.run('dormant', proc.id);
package/src/encode.js CHANGED
@@ -1,5 +1,6 @@
1
1
  import { generateId } from './ulid.js';
2
2
  import { sourceReliability } from './confidence.js';
3
+ import { arousalSalienceBoost } from './affect.js';
3
4
 
4
5
  /**
5
6
  * @param {import('better-sqlite3').Database} db
@@ -14,6 +15,9 @@ export async function encodeEpisode(db, embeddingProvider, {
14
15
  causal,
15
16
  tags,
16
17
  supersedes,
18
+ context = {},
19
+ affect = {},
20
+ arousalWeight = 0.3,
17
21
  }) {
18
22
  if (!content || typeof content !== 'string') throw new Error('content must be a non-empty string');
19
23
  if (salience < 0 || salience > 1) throw new Error('salience must be between 0 and 1');
@@ -25,15 +29,20 @@ export async function encodeEpisode(db, embeddingProvider, {
25
29
  const id = generateId();
26
30
  const now = new Date().toISOString();
27
31
 
32
+ const boost = arousalSalienceBoost(affect.arousal);
33
+ const effectiveSalience = Math.min(1.0, salience + (boost * arousalWeight));
34
+
28
35
  const insertAndLink = db.transaction(() => {
29
36
  db.prepare(`
30
37
  INSERT INTO episodes (
31
- id, content, embedding, source, source_reliability, salience,
38
+ id, content, embedding, source, source_reliability, salience, context, affect,
32
39
  tags, causal_trigger, causal_consequence, created_at,
33
40
  embedding_model, embedding_version, supersedes
34
- ) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
41
+ ) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
35
42
  `).run(
36
- id, content, embeddingBuffer, source, reliability, salience,
43
+ id, content, embeddingBuffer, source, reliability, effectiveSalience,
44
+ JSON.stringify(context),
45
+ JSON.stringify(affect),
37
46
  tags ? JSON.stringify(tags) : null,
38
47
  causal?.trigger || null, causal?.consequence || null,
39
48
  now, embeddingProvider.modelName, embeddingProvider.modelVersion,
package/src/forget.js ADDED
@@ -0,0 +1,111 @@
1
+ export function forgetMemory(db, id, { purge = false } = {}) {
2
+ const episode = db.prepare('SELECT id FROM episodes WHERE id = ?').get(id);
3
+ if (episode) {
4
+ if (purge) {
5
+ db.prepare('DELETE FROM vec_episodes WHERE id = ?').run(id);
6
+ db.prepare('DELETE FROM episodes WHERE id = ?').run(id);
7
+ } else {
8
+ db.prepare("UPDATE episodes SET superseded_by = 'forgotten' WHERE id = ?").run(id);
9
+ db.prepare('DELETE FROM vec_episodes WHERE id = ?').run(id);
10
+ }
11
+ return { id, type: 'episodic', purged: purge };
12
+ }
13
+
14
+ const semantic = db.prepare('SELECT id FROM semantics WHERE id = ?').get(id);
15
+ if (semantic) {
16
+ if (purge) {
17
+ db.prepare('DELETE FROM vec_semantics WHERE id = ?').run(id);
18
+ db.prepare('DELETE FROM semantics WHERE id = ?').run(id);
19
+ } else {
20
+ db.prepare("UPDATE semantics SET state = 'superseded' WHERE id = ?").run(id);
21
+ db.prepare('DELETE FROM vec_semantics WHERE id = ?').run(id);
22
+ }
23
+ return { id, type: 'semantic', purged: purge };
24
+ }
25
+
26
+ const procedure = db.prepare('SELECT id FROM procedures WHERE id = ?').get(id);
27
+ if (procedure) {
28
+ if (purge) {
29
+ db.prepare('DELETE FROM vec_procedures WHERE id = ?').run(id);
30
+ db.prepare('DELETE FROM procedures WHERE id = ?').run(id);
31
+ } else {
32
+ db.prepare("UPDATE procedures SET state = 'superseded' WHERE id = ?").run(id);
33
+ db.prepare('DELETE FROM vec_procedures WHERE id = ?').run(id);
34
+ }
35
+ return { id, type: 'procedural', purged: purge };
36
+ }
37
+
38
+ throw new Error(`Memory not found: ${id}`);
39
+ }
40
+
41
+ export function purgeMemories(db) {
42
+ const deadEpisodes = db.prepare(
43
+ 'SELECT id FROM episodes WHERE superseded_by IS NOT NULL'
44
+ ).all();
45
+ const deadSemantics = db.prepare(
46
+ "SELECT id FROM semantics WHERE state IN ('superseded', 'dormant', 'rolled_back')"
47
+ ).all();
48
+ const deadProcedures = db.prepare(
49
+ "SELECT id FROM procedures WHERE state IN ('superseded', 'dormant', 'rolled_back')"
50
+ ).all();
51
+
52
+ const purgeAll = db.transaction(() => {
53
+ for (const row of deadEpisodes) {
54
+ db.prepare('DELETE FROM vec_episodes WHERE id = ?').run(row.id);
55
+ db.prepare('DELETE FROM episodes WHERE id = ?').run(row.id);
56
+ }
57
+ for (const row of deadSemantics) {
58
+ db.prepare('DELETE FROM vec_semantics WHERE id = ?').run(row.id);
59
+ db.prepare('DELETE FROM semantics WHERE id = ?').run(row.id);
60
+ }
61
+ for (const row of deadProcedures) {
62
+ db.prepare('DELETE FROM vec_procedures WHERE id = ?').run(row.id);
63
+ db.prepare('DELETE FROM procedures WHERE id = ?').run(row.id);
64
+ }
65
+ });
66
+
67
+ purgeAll();
68
+
69
+ return {
70
+ episodes: deadEpisodes.length,
71
+ semantics: deadSemantics.length,
72
+ procedures: deadProcedures.length,
73
+ };
74
+ }
75
+
76
+ export async function forgetByQuery(db, embeddingProvider, query, { minSimilarity = 0.9, purge = false } = {}) {
77
+ const queryVector = await embeddingProvider.embed(query);
78
+ const queryBuffer = embeddingProvider.vectorToBuffer(queryVector);
79
+
80
+ const candidates = [];
81
+
82
+ const epMatch = db.prepare(`
83
+ SELECT e.id, (1.0 - v.distance) AS similarity, 'episodic' AS type
84
+ FROM vec_episodes v JOIN episodes e ON e.id = v.id
85
+ WHERE v.embedding MATCH ? AND k = 1 AND e.superseded_by IS NULL
86
+ `).get(queryBuffer);
87
+ if (epMatch) candidates.push(epMatch);
88
+
89
+ const semMatch = db.prepare(`
90
+ SELECT s.id, (1.0 - v.distance) AS similarity, 'semantic' AS type
91
+ FROM vec_semantics v JOIN semantics s ON s.id = v.id
92
+ WHERE v.embedding MATCH ? AND k = 1 AND (v.state = 'active' OR v.state = 'context_dependent')
93
+ `).get(queryBuffer);
94
+ if (semMatch) candidates.push(semMatch);
95
+
96
+ const procMatch = db.prepare(`
97
+ SELECT p.id, (1.0 - v.distance) AS similarity, 'procedural' AS type
98
+ FROM vec_procedures v JOIN procedures p ON p.id = v.id
99
+ WHERE v.embedding MATCH ? AND k = 1 AND (v.state = 'active' OR v.state = 'context_dependent')
100
+ `).get(queryBuffer);
101
+ if (procMatch) candidates.push(procMatch);
102
+
103
+ if (candidates.length === 0) return null;
104
+
105
+ candidates.sort((a, b) => b.similarity - a.similarity);
106
+ const best = candidates[0];
107
+
108
+ if (best.similarity < minSimilarity) return null;
109
+
110
+ return forgetMemory(db, best.id, { purge });
111
+ }
package/src/index.js CHANGED
@@ -1,5 +1,5 @@
1
1
  export { Audrey } from './audrey.js';
2
- export { computeConfidence, sourceReliability, DEFAULT_SOURCE_RELIABILITY, DEFAULT_WEIGHTS, DEFAULT_HALF_LIVES } from './confidence.js';
2
+ export { computeConfidence, sourceReliability, salienceModifier, DEFAULT_SOURCE_RELIABILITY, DEFAULT_WEIGHTS, DEFAULT_HALF_LIVES } from './confidence.js';
3
3
  export { createEmbeddingProvider, MockEmbeddingProvider, OpenAIEmbeddingProvider } from './embedding.js';
4
4
  export { createLLMProvider, MockLLMProvider, AnthropicLLMProvider, OpenAILLMProvider } from './llm.js';
5
5
  export { recall, recallStream } from './recall.js';
@@ -14,3 +14,7 @@ export { exportMemories } from './export.js';
14
14
  export { importMemories } from './import.js';
15
15
  export { suggestConsolidationParams } from './adaptive.js';
16
16
  export { reembedAll } from './migrate.js';
17
+ export { forgetMemory, forgetByQuery, purgeMemories } from './forget.js';
18
+ export { applyInterference, interferenceModifier } from './interference.js';
19
+ export { contextMatchRatio, contextModifier } from './context.js';
20
+ export { arousalSalienceBoost, affectSimilarity, moodCongruenceModifier, detectResonance } from './affect.js';
@@ -0,0 +1,51 @@
1
+ export function interferenceModifier(interferenceCount, weight = 0.1) {
2
+ return 1 / (1 + weight * interferenceCount);
3
+ }
4
+
5
+ export async function applyInterference(db, embeddingProvider, episodeId, { content }, config = {}) {
6
+ const { enabled = true, k = 5, threshold = 0.6, weight = 0.1 } = config;
7
+
8
+ if (!enabled) return [];
9
+
10
+ const vector = await embeddingProvider.embed(content);
11
+ const buffer = embeddingProvider.vectorToBuffer(vector);
12
+
13
+ const semanticHits = db.prepare(`
14
+ SELECT s.id, s.interference_count, (1.0 - v.distance) AS similarity
15
+ FROM vec_semantics v
16
+ JOIN semantics s ON s.id = v.id
17
+ WHERE v.embedding MATCH ?
18
+ AND k = ?
19
+ AND (v.state = 'active' OR v.state = 'context_dependent')
20
+ `).all(buffer, k);
21
+
22
+ const proceduralHits = db.prepare(`
23
+ SELECT p.id, p.interference_count, (1.0 - v.distance) AS similarity
24
+ FROM vec_procedures v
25
+ JOIN procedures p ON p.id = v.id
26
+ WHERE v.embedding MATCH ?
27
+ AND k = ?
28
+ AND (v.state = 'active' OR v.state = 'context_dependent')
29
+ `).all(buffer, k);
30
+
31
+ const affected = [];
32
+
33
+ const updateSemantic = db.prepare('UPDATE semantics SET interference_count = ? WHERE id = ?');
34
+ const updateProcedural = db.prepare('UPDATE procedures SET interference_count = ? WHERE id = ?');
35
+
36
+ for (const hit of semanticHits) {
37
+ if (hit.similarity < threshold) continue;
38
+ const newCount = hit.interference_count + 1;
39
+ updateSemantic.run(newCount, hit.id);
40
+ affected.push({ id: hit.id, type: 'semantic', newCount, similarity: hit.similarity });
41
+ }
42
+
43
+ for (const hit of proceduralHits) {
44
+ if (hit.similarity < threshold) continue;
45
+ const newCount = hit.interference_count + 1;
46
+ updateProcedural.run(newCount, hit.id);
47
+ affected.push({ id: hit.id, type: 'procedural', newCount, similarity: hit.similarity });
48
+ }
49
+
50
+ return affected;
51
+ }