audrey 0.3.1 → 0.3.3

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md CHANGED
@@ -403,7 +403,7 @@ brain.on('error', (err) => { ... });
403
403
 
404
404
  ### `brain.close()`
405
405
 
406
- Close the database connection and stop auto-consolidation.
406
+ Close the database connection.
407
407
 
408
408
  ## Architecture
409
409
 
@@ -456,7 +456,7 @@ All mutations use SQLite transactions. CHECK constraints enforce valid states an
456
456
  ## Running Tests
457
457
 
458
458
  ```bash
459
- npm test # 194 tests across 17 files
459
+ npm test # 208 tests across 17 files
460
460
  npm run test:watch
461
461
  ```
462
462
 
@@ -522,28 +522,37 @@ Demonstrates the full pipeline: encode 3 rate-limit observations → consolidate
522
522
  - [x] Automatic migration from v0.2.0 embedding BLOBs to vec0 tables
523
523
  - [x] 168 tests across 16 test files
524
524
 
525
- ### v0.3.1 — MCP Server (current)
525
+ ### v0.3.1 — MCP Server + JSDoc Types
526
526
 
527
527
  - [x] MCP tool server via `@modelcontextprotocol/sdk` with stdio transport
528
528
  - [x] 5 tools: `memory_encode`, `memory_recall`, `memory_consolidate`, `memory_introspect`, `memory_resolve_truth`
529
529
  - [x] Configuration via environment variables (data dir, embedding provider, LLM provider)
530
530
  - [x] One-command install: `npx audrey install` (auto-detects API keys)
531
531
  - [x] CLI subcommands: `install`, `uninstall`, `status`
532
+ - [x] JSDoc type annotations on all public exports (16 source files)
533
+ - [x] Published to npm with proper package metadata
532
534
  - [x] 194 tests across 17 test files
533
535
 
534
- ### v0.3.5Embedding Migration (deferred from v0.3.0)
536
+ ### v0.3.3Hardening (current)
535
537
 
536
- - [ ] Embedding migration pipeline (re-embed when models change)
537
- - [ ] Re-consolidation queue (re-run consolidation with new embedding model)
538
+ - [x] Fix status command dimension mismatch (read stored dimensions from existing database)
539
+ - [x] Safe JSON parsing in LLM providers (descriptive errors on malformed responses)
540
+ - [x] Fetch timeouts on all API calls (configurable, default 30s)
541
+ - [x] Config validation in Audrey constructor (dormantThreshold, minEpisodes)
542
+ - [x] encodeBatch error isolation tests
543
+ - [x] 208 tests across 17 test files
538
544
 
539
545
  ### v0.4.0 — Type Safety & Developer Experience
540
546
 
541
547
  - [ ] Full TypeScript conversion with strict mode
542
- - [ ] JSDoc types on all exports (interim before TS conversion)
543
548
  - [ ] Published type declarations (.d.ts)
544
549
  - [ ] Schema versioning and migration system
545
550
  - [ ] Structured logging (optional, pluggable)
546
- - [ ] npm publish with proper package metadata
551
+
552
+ ### v0.4.5 — Embedding Migration (deferred from v0.3.0)
553
+
554
+ - [ ] Embedding migration pipeline (re-embed when models change)
555
+ - [ ] Re-consolidation queue (re-run consolidation with new embedding model)
547
556
 
548
557
  ### v0.5.0 — Advanced Memory Features
549
558
 
@@ -1,7 +1,7 @@
1
1
  import { homedir } from 'node:os';
2
2
  import { join } from 'node:path';
3
3
 
4
- export const VERSION = '0.3.1';
4
+ export const VERSION = '0.3.3';
5
5
  export const SERVER_NAME = 'audrey-memory';
6
6
  export const DEFAULT_DATA_DIR = join(homedir(), '.audrey', 'data');
7
7
 
@@ -7,6 +7,7 @@ import { join } from 'node:path';
7
7
  import { existsSync, readFileSync } from 'node:fs';
8
8
  import { execFileSync } from 'node:child_process';
9
9
  import { Audrey } from '../src/index.js';
10
+ import { readStoredDimensions } from '../src/db.js';
10
11
  import { VERSION, SERVER_NAME, DEFAULT_DATA_DIR, buildAudreyConfig, buildInstallArgs } from './config.js';
11
12
 
12
13
  const VALID_SOURCES = ['direct-observation', 'told-by-user', 'tool-result', 'inference', 'model-generated'];
@@ -45,6 +46,13 @@ function install() {
45
46
  console.log('Detected ANTHROPIC_API_KEY — enabling LLM-powered consolidation + contradiction detection');
46
47
  }
47
48
 
49
+ // Remove existing entry first so re-installs work cleanly
50
+ try {
51
+ execFileSync('claude', ['mcp', 'remove', SERVER_NAME], { stdio: 'ignore' });
52
+ } catch {
53
+ // Not registered yet — that's fine
54
+ }
55
+
48
56
  const args = buildInstallArgs(process.env);
49
57
 
50
58
  try {
@@ -100,10 +108,11 @@ function status() {
100
108
 
101
109
  if (existsSync(DEFAULT_DATA_DIR)) {
102
110
  try {
111
+ const dimensions = readStoredDimensions(DEFAULT_DATA_DIR) || 8;
103
112
  const audrey = new Audrey({
104
113
  dataDir: DEFAULT_DATA_DIR,
105
114
  agent: 'status-check',
106
- embedding: { provider: 'mock', dimensions: 8 },
115
+ embedding: { provider: 'mock', dimensions },
107
116
  });
108
117
  const stats = audrey.introspect();
109
118
  audrey.close();
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "audrey",
3
- "version": "0.3.1",
3
+ "version": "0.3.3",
4
4
  "description": "Biological memory architecture for AI agents — encode, consolidate, and recall memories with confidence decay, contradiction detection, and causal graphs",
5
5
  "type": "module",
6
6
  "main": "src/index.js",
package/src/audrey.js CHANGED
@@ -81,15 +81,24 @@ export class Audrey extends EventEmitter {
81
81
  decay = {},
82
82
  } = {}) {
83
83
  super();
84
+
85
+ const dormantThreshold = decay.dormantThreshold ?? 0.1;
86
+ if (dormantThreshold < 0 || dormantThreshold > 1) {
87
+ throw new Error(`dormantThreshold must be between 0 and 1, got: ${dormantThreshold}`);
88
+ }
89
+
90
+ const minEpisodes = consolidation.minEpisodes ?? 3;
91
+ if (!Number.isInteger(minEpisodes) || minEpisodes < 1) {
92
+ throw new Error(`minEpisodes must be a positive integer, got: ${minEpisodes}`);
93
+ }
94
+
84
95
  this.agent = agent;
85
96
  this.dataDir = dataDir;
86
97
  this.embeddingProvider = createEmbeddingProvider(embedding);
87
98
  this.db = createDatabase(dataDir, { dimensions: this.embeddingProvider.dimensions });
88
99
  this.llmProvider = llm ? createLLMProvider(llm) : null;
89
- this.consolidationConfig = {
90
- minEpisodes: consolidation.minEpisodes || 3,
91
- };
92
- this.decayConfig = { dormantThreshold: decay.dormantThreshold || 0.1 };
100
+ this.consolidationConfig = { minEpisodes };
101
+ this.decayConfig = { dormantThreshold };
93
102
  }
94
103
 
95
104
  _emitValidation(id, params) {
package/src/db.js CHANGED
@@ -1,7 +1,7 @@
1
1
  import Database from 'better-sqlite3';
2
2
  import * as sqliteVec from 'sqlite-vec';
3
3
  import { join } from 'node:path';
4
- import { mkdirSync } from 'node:fs';
4
+ import { mkdirSync, existsSync } from 'node:fs';
5
5
 
6
6
  const SCHEMA = `
7
7
  CREATE TABLE IF NOT EXISTS episodes (
@@ -232,10 +232,21 @@ export function createDatabase(dataDir, options = {}) {
232
232
  return db;
233
233
  }
234
234
 
235
- /**
236
- * @param {import('better-sqlite3').Database} db
237
- * @returns {void}
238
- */
235
+ export function readStoredDimensions(dataDir) {
236
+ const dbPath = join(dataDir, 'audrey.db');
237
+ if (!existsSync(dbPath)) return null;
238
+ const db = new Database(dbPath, { readonly: true });
239
+ try {
240
+ const row = db.prepare("SELECT value FROM audrey_config WHERE key = 'dimensions'").get();
241
+ return row ? parseInt(row.value, 10) : null;
242
+ } catch (err) {
243
+ if (err.message?.includes('no such table')) return null;
244
+ throw err;
245
+ } finally {
246
+ db.close();
247
+ }
248
+ }
249
+
239
250
  export function closeDatabase(db) {
240
251
  if (db && db.open) {
241
252
  db.close();
package/src/embedding.js CHANGED
@@ -76,10 +76,11 @@ export class MockEmbeddingProvider {
76
76
  /** @implements {EmbeddingProvider} */
77
77
  export class OpenAIEmbeddingProvider {
78
78
  /** @param {Partial<OpenAIEmbeddingConfig>} [config={}] */
79
- constructor({ apiKey, model = 'text-embedding-3-small', dimensions = 1536 } = {}) {
79
+ constructor({ apiKey, model = 'text-embedding-3-small', dimensions = 1536, timeout = 30000 } = {}) {
80
80
  this.apiKey = apiKey || process.env.OPENAI_API_KEY;
81
81
  this.model = model;
82
82
  this.dimensions = dimensions;
83
+ this.timeout = timeout;
83
84
  this.modelName = model;
84
85
  this.modelVersion = 'latest';
85
86
  }
@@ -89,17 +90,24 @@ export class OpenAIEmbeddingProvider {
89
90
  * @returns {Promise<number[]>}
90
91
  */
91
92
  async embed(text) {
92
- const response = await fetch('https://api.openai.com/v1/embeddings', {
93
- method: 'POST',
94
- headers: {
95
- 'Authorization': `Bearer ${this.apiKey}`,
96
- 'Content-Type': 'application/json',
97
- },
98
- body: JSON.stringify({ input: text, model: this.model, dimensions: this.dimensions }),
99
- });
100
- if (!response.ok) throw new Error(`OpenAI embedding failed: ${response.status}`);
101
- const data = await response.json();
102
- return data.data[0].embedding;
93
+ const controller = new AbortController();
94
+ const timer = setTimeout(() => controller.abort(), this.timeout);
95
+ try {
96
+ const response = await fetch('https://api.openai.com/v1/embeddings', {
97
+ method: 'POST',
98
+ headers: {
99
+ 'Authorization': `Bearer ${this.apiKey}`,
100
+ 'Content-Type': 'application/json',
101
+ },
102
+ body: JSON.stringify({ input: text, model: this.model, dimensions: this.dimensions }),
103
+ signal: controller.signal,
104
+ });
105
+ if (!response.ok) throw new Error(`OpenAI embedding failed: ${response.status}`);
106
+ const data = await response.json();
107
+ return data.data[0].embedding;
108
+ } finally {
109
+ clearTimeout(timer);
110
+ }
103
111
  }
104
112
 
105
113
  /**
@@ -107,17 +115,24 @@ export class OpenAIEmbeddingProvider {
107
115
  * @returns {Promise<number[][]>}
108
116
  */
109
117
  async embedBatch(texts) {
110
- const response = await fetch('https://api.openai.com/v1/embeddings', {
111
- method: 'POST',
112
- headers: {
113
- 'Authorization': `Bearer ${this.apiKey}`,
114
- 'Content-Type': 'application/json',
115
- },
116
- body: JSON.stringify({ input: texts, model: this.model, dimensions: this.dimensions }),
117
- });
118
- if (!response.ok) throw new Error(`OpenAI embedding failed: ${response.status}`);
119
- const data = await response.json();
120
- return data.data.map(d => d.embedding);
118
+ const controller = new AbortController();
119
+ const timer = setTimeout(() => controller.abort(), this.timeout);
120
+ try {
121
+ const response = await fetch('https://api.openai.com/v1/embeddings', {
122
+ method: 'POST',
123
+ headers: {
124
+ 'Authorization': `Bearer ${this.apiKey}`,
125
+ 'Content-Type': 'application/json',
126
+ },
127
+ body: JSON.stringify({ input: texts, model: this.model, dimensions: this.dimensions }),
128
+ signal: controller.signal,
129
+ });
130
+ if (!response.ok) throw new Error(`OpenAI embedding failed: ${response.status}`);
131
+ const data = await response.json();
132
+ return data.data.map(d => d.embedding);
133
+ } finally {
134
+ clearTimeout(timer);
135
+ }
121
136
  }
122
137
 
123
138
  /**
package/src/llm.js CHANGED
@@ -92,10 +92,11 @@ export class MockLLMProvider {
92
92
  /** @implements {LLMProvider} */
93
93
  export class AnthropicLLMProvider {
94
94
  /** @param {Partial<AnthropicLLMConfig>} [config={}] */
95
- constructor({ apiKey, model = 'claude-sonnet-4-6', maxTokens = 1024 } = {}) {
95
+ constructor({ apiKey, model = 'claude-sonnet-4-6', maxTokens = 1024, timeout = 30000 } = {}) {
96
96
  this.apiKey = apiKey || process.env.ANTHROPIC_API_KEY;
97
97
  this.model = model;
98
98
  this.maxTokens = maxTokens;
99
+ this.timeout = timeout;
99
100
  this.modelName = model;
100
101
  this.modelVersion = 'latest';
101
102
  }
@@ -116,23 +117,30 @@ export class AnthropicLLMProvider {
116
117
  };
117
118
  if (systemMsg) body.system = systemMsg;
118
119
 
119
- const response = await fetch('https://api.anthropic.com/v1/messages', {
120
- method: 'POST',
121
- headers: {
122
- 'x-api-key': this.apiKey,
123
- 'anthropic-version': '2023-06-01',
124
- 'content-type': 'application/json',
125
- },
126
- body: JSON.stringify(body),
127
- });
128
-
129
- if (!response.ok) {
130
- throw new Error(`Anthropic API error: ${response.status}`);
120
+ const controller = new AbortController();
121
+ const timer = setTimeout(() => controller.abort(), this.timeout);
122
+ try {
123
+ const response = await fetch('https://api.anthropic.com/v1/messages', {
124
+ method: 'POST',
125
+ headers: {
126
+ 'x-api-key': this.apiKey,
127
+ 'anthropic-version': '2023-06-01',
128
+ 'content-type': 'application/json',
129
+ },
130
+ body: JSON.stringify(body),
131
+ signal: controller.signal,
132
+ });
133
+
134
+ if (!response.ok) {
135
+ throw new Error(`Anthropic API error: ${response.status}`);
136
+ }
137
+
138
+ const data = await response.json();
139
+ const text = data.content?.[0]?.text || '';
140
+ return { content: text };
141
+ } finally {
142
+ clearTimeout(timer);
131
143
  }
132
-
133
- const data = await response.json();
134
- const text = data.content?.[0]?.text || '';
135
- return { content: text };
136
144
  }
137
145
 
138
146
  /**
@@ -142,17 +150,22 @@ export class AnthropicLLMProvider {
142
150
  */
143
151
  async json(messages, options = {}) {
144
152
  const result = await this.complete(messages, options);
145
- return JSON.parse(result.content);
153
+ try {
154
+ return JSON.parse(result.content);
155
+ } catch {
156
+ throw new Error(`Failed to parse LLM response as JSON: ${result.content.slice(0, 200)}`);
157
+ }
146
158
  }
147
159
  }
148
160
 
149
161
  /** @implements {LLMProvider} */
150
162
  export class OpenAILLMProvider {
151
163
  /** @param {Partial<OpenAILLMConfig>} [config={}] */
152
- constructor({ apiKey, model = 'gpt-4o', maxTokens = 1024 } = {}) {
164
+ constructor({ apiKey, model = 'gpt-4o', maxTokens = 1024, timeout = 30000 } = {}) {
153
165
  this.apiKey = apiKey || process.env.OPENAI_API_KEY;
154
166
  this.model = model;
155
167
  this.maxTokens = maxTokens;
168
+ this.timeout = timeout;
156
169
  this.modelName = model;
157
170
  this.modelVersion = 'latest';
158
171
  }
@@ -169,22 +182,29 @@ export class OpenAILLMProvider {
169
182
  messages,
170
183
  };
171
184
 
172
- const response = await fetch('https://api.openai.com/v1/chat/completions', {
173
- method: 'POST',
174
- headers: {
175
- 'Authorization': `Bearer ${this.apiKey}`,
176
- 'Content-Type': 'application/json',
177
- },
178
- body: JSON.stringify(body),
179
- });
180
-
181
- if (!response.ok) {
182
- throw new Error(`OpenAI API error: ${response.status}`);
185
+ const controller = new AbortController();
186
+ const timer = setTimeout(() => controller.abort(), this.timeout);
187
+ try {
188
+ const response = await fetch('https://api.openai.com/v1/chat/completions', {
189
+ method: 'POST',
190
+ headers: {
191
+ 'Authorization': `Bearer ${this.apiKey}`,
192
+ 'Content-Type': 'application/json',
193
+ },
194
+ body: JSON.stringify(body),
195
+ signal: controller.signal,
196
+ });
197
+
198
+ if (!response.ok) {
199
+ throw new Error(`OpenAI API error: ${response.status}`);
200
+ }
201
+
202
+ const data = await response.json();
203
+ const text = data.choices?.[0]?.message?.content || '';
204
+ return { content: text };
205
+ } finally {
206
+ clearTimeout(timer);
183
207
  }
184
-
185
- const data = await response.json();
186
- const text = data.choices?.[0]?.message?.content || '';
187
- return { content: text };
188
208
  }
189
209
 
190
210
  /**
@@ -194,7 +214,11 @@ export class OpenAILLMProvider {
194
214
  */
195
215
  async json(messages, options = {}) {
196
216
  const result = await this.complete(messages, options);
197
- return JSON.parse(result.content);
217
+ try {
218
+ return JSON.parse(result.content);
219
+ } catch {
220
+ throw new Error(`Failed to parse LLM response as JSON: ${result.content.slice(0, 200)}`);
221
+ }
198
222
  }
199
223
  }
200
224