audrey 0.3.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/src/db.js ADDED
@@ -0,0 +1,234 @@
1
+ import Database from 'better-sqlite3';
2
+ import * as sqliteVec from 'sqlite-vec';
3
+ import { join } from 'node:path';
4
+ import { mkdirSync } from 'node:fs';
5
+
6
+ const SCHEMA = `
7
+ CREATE TABLE IF NOT EXISTS episodes (
8
+ id TEXT PRIMARY KEY,
9
+ content TEXT NOT NULL,
10
+ embedding BLOB,
11
+ source TEXT NOT NULL CHECK(source IN ('direct-observation','told-by-user','tool-result','inference','model-generated')),
12
+ source_reliability REAL NOT NULL,
13
+ salience REAL DEFAULT 0.5,
14
+ tags TEXT,
15
+ causal_trigger TEXT,
16
+ causal_consequence TEXT,
17
+ created_at TEXT NOT NULL,
18
+ embedding_model TEXT,
19
+ embedding_version TEXT,
20
+ supersedes TEXT,
21
+ superseded_by TEXT,
22
+ consolidated INTEGER DEFAULT 0,
23
+ FOREIGN KEY (supersedes) REFERENCES episodes(id)
24
+ );
25
+
26
+ CREATE TABLE IF NOT EXISTS semantics (
27
+ id TEXT PRIMARY KEY,
28
+ content TEXT NOT NULL,
29
+ embedding BLOB,
30
+ state TEXT DEFAULT 'active' CHECK(state IN ('active','disputed','superseded','context_dependent','dormant','rolled_back')),
31
+ conditions TEXT,
32
+ evidence_episode_ids TEXT,
33
+ evidence_count INTEGER DEFAULT 0,
34
+ supporting_count INTEGER DEFAULT 0,
35
+ contradicting_count INTEGER DEFAULT 0,
36
+ source_type_diversity INTEGER DEFAULT 0,
37
+ consolidation_checkpoint TEXT,
38
+ embedding_model TEXT,
39
+ embedding_version TEXT,
40
+ consolidation_model TEXT,
41
+ consolidation_prompt_hash TEXT,
42
+ created_at TEXT NOT NULL,
43
+ last_reinforced_at TEXT,
44
+ retrieval_count INTEGER DEFAULT 0,
45
+ challenge_count INTEGER DEFAULT 0
46
+ );
47
+
48
+ CREATE TABLE IF NOT EXISTS procedures (
49
+ id TEXT PRIMARY KEY,
50
+ content TEXT NOT NULL,
51
+ embedding BLOB,
52
+ state TEXT DEFAULT 'active' CHECK(state IN ('active','disputed','superseded','context_dependent','dormant','rolled_back')),
53
+ trigger_conditions TEXT,
54
+ evidence_episode_ids TEXT,
55
+ success_count INTEGER DEFAULT 0,
56
+ failure_count INTEGER DEFAULT 0,
57
+ embedding_model TEXT,
58
+ embedding_version TEXT,
59
+ created_at TEXT NOT NULL,
60
+ last_reinforced_at TEXT,
61
+ retrieval_count INTEGER DEFAULT 0
62
+ );
63
+
64
+ CREATE TABLE IF NOT EXISTS causal_links (
65
+ id TEXT PRIMARY KEY,
66
+ cause_id TEXT NOT NULL,
67
+ effect_id TEXT NOT NULL,
68
+ link_type TEXT DEFAULT 'causal' CHECK(link_type IN ('causal','correlational','temporal')),
69
+ mechanism TEXT,
70
+ confidence REAL,
71
+ evidence_count INTEGER DEFAULT 1,
72
+ created_at TEXT NOT NULL
73
+ );
74
+
75
+ CREATE TABLE IF NOT EXISTS contradictions (
76
+ id TEXT PRIMARY KEY,
77
+ claim_a_id TEXT NOT NULL,
78
+ claim_b_id TEXT NOT NULL,
79
+ claim_a_type TEXT NOT NULL,
80
+ claim_b_type TEXT NOT NULL,
81
+ state TEXT DEFAULT 'open' CHECK(state IN ('open','resolved','context_dependent','reopened')),
82
+ resolution TEXT,
83
+ resolved_at TEXT,
84
+ reopened_at TEXT,
85
+ reopen_evidence_id TEXT,
86
+ created_at TEXT NOT NULL
87
+ );
88
+
89
+ CREATE TABLE IF NOT EXISTS consolidation_runs (
90
+ id TEXT PRIMARY KEY,
91
+ checkpoint_cursor TEXT,
92
+ input_episode_ids TEXT,
93
+ output_memory_ids TEXT,
94
+ confidence_deltas TEXT,
95
+ consolidation_model TEXT,
96
+ consolidation_prompt_hash TEXT,
97
+ started_at TEXT,
98
+ completed_at TEXT,
99
+ status TEXT DEFAULT 'running' CHECK(status IN ('running','completed','failed','rolled_back'))
100
+ );
101
+
102
+ CREATE TABLE IF NOT EXISTS audrey_config (
103
+ key TEXT PRIMARY KEY,
104
+ value TEXT NOT NULL
105
+ );
106
+
107
+ CREATE INDEX IF NOT EXISTS idx_episodes_created ON episodes(created_at);
108
+ CREATE INDEX IF NOT EXISTS idx_episodes_consolidated ON episodes(consolidated);
109
+ CREATE INDEX IF NOT EXISTS idx_episodes_source ON episodes(source);
110
+ CREATE INDEX IF NOT EXISTS idx_semantics_state ON semantics(state);
111
+ CREATE INDEX IF NOT EXISTS idx_procedures_state ON procedures(state);
112
+ CREATE INDEX IF NOT EXISTS idx_contradictions_state ON contradictions(state);
113
+ CREATE INDEX IF NOT EXISTS idx_consolidation_status ON consolidation_runs(status);
114
+ `;
115
+
116
+ function createVec0Tables(db, dimensions) {
117
+ db.exec(`
118
+ CREATE VIRTUAL TABLE IF NOT EXISTS vec_episodes USING vec0(
119
+ id text primary key,
120
+ embedding float[${dimensions}] distance_metric=cosine,
121
+ source text,
122
+ consolidated integer
123
+ );
124
+ `);
125
+ db.exec(`
126
+ CREATE VIRTUAL TABLE IF NOT EXISTS vec_semantics USING vec0(
127
+ id text primary key,
128
+ embedding float[${dimensions}] distance_metric=cosine,
129
+ state text
130
+ );
131
+ `);
132
+ db.exec(`
133
+ CREATE VIRTUAL TABLE IF NOT EXISTS vec_procedures USING vec0(
134
+ id text primary key,
135
+ embedding float[${dimensions}] distance_metric=cosine,
136
+ state text
137
+ );
138
+ `);
139
+ }
140
+
141
+ function migrateTable(db, { source, target, selectCols, insertCols, placeholders, transform }) {
142
+ const count = db.prepare(`SELECT COUNT(*) as c FROM ${target}`).get().c;
143
+ if (count > 0) return;
144
+
145
+ const rows = db.prepare(`SELECT ${selectCols} FROM ${source} WHERE embedding IS NOT NULL`).all();
146
+ if (rows.length === 0) return;
147
+
148
+ const insert = db.prepare(`INSERT INTO ${target}(${insertCols}) VALUES (${placeholders})`);
149
+ const tx = db.transaction(() => {
150
+ for (const row of rows) {
151
+ insert.run(...transform(row));
152
+ }
153
+ });
154
+ tx();
155
+ }
156
+
157
+ function migrateEmbeddingsToVec0(db) {
158
+ migrateTable(db, {
159
+ source: 'episodes',
160
+ target: 'vec_episodes',
161
+ selectCols: 'id, embedding, source, consolidated',
162
+ insertCols: 'id, embedding, source, consolidated',
163
+ placeholders: '?, ?, ?, ?',
164
+ transform: (row) => [row.id, row.embedding, row.source, BigInt(row.consolidated ?? 0)],
165
+ });
166
+
167
+ migrateTable(db, {
168
+ source: 'semantics',
169
+ target: 'vec_semantics',
170
+ selectCols: 'id, embedding, state',
171
+ insertCols: 'id, embedding, state',
172
+ placeholders: '?, ?, ?',
173
+ transform: (row) => [row.id, row.embedding, row.state],
174
+ });
175
+
176
+ migrateTable(db, {
177
+ source: 'procedures',
178
+ target: 'vec_procedures',
179
+ selectCols: 'id, embedding, state',
180
+ insertCols: 'id, embedding, state',
181
+ placeholders: '?, ?, ?',
182
+ transform: (row) => [row.id, row.embedding, row.state],
183
+ });
184
+ }
185
+
186
+ export function createDatabase(dataDir, options = {}) {
187
+ const { dimensions } = options;
188
+
189
+ mkdirSync(dataDir, { recursive: true });
190
+ const dbPath = join(dataDir, 'audrey.db');
191
+ const db = new Database(dbPath);
192
+ db.pragma('journal_mode = WAL');
193
+ db.pragma('foreign_keys = ON');
194
+ db.pragma('busy_timeout = 5000');
195
+ db.exec(SCHEMA);
196
+
197
+ if (dimensions != null) {
198
+ if (!Number.isInteger(dimensions) || dimensions <= 0) {
199
+ throw new Error(`dimensions must be a positive integer, got: ${dimensions}`);
200
+ }
201
+
202
+ sqliteVec.load(db);
203
+
204
+ const existing = db.prepare(
205
+ "SELECT value FROM audrey_config WHERE key = 'dimensions'"
206
+ ).get();
207
+
208
+ if (existing) {
209
+ const storedDims = parseInt(existing.value, 10);
210
+ if (storedDims !== dimensions) {
211
+ db.close();
212
+ throw new Error(
213
+ `Dimension mismatch: database was created with ${storedDims} dimensions, but ${dimensions} were requested`
214
+ );
215
+ }
216
+ } else {
217
+ db.prepare(
218
+ "INSERT INTO audrey_config (key, value) VALUES ('dimensions', ?)"
219
+ ).run(String(dimensions));
220
+ }
221
+
222
+ createVec0Tables(db, dimensions);
223
+
224
+ migrateEmbeddingsToVec0(db);
225
+ }
226
+
227
+ return db;
228
+ }
229
+
230
+ export function closeDatabase(db) {
231
+ if (db && db.open) {
232
+ db.close();
233
+ }
234
+ }
package/src/decay.js ADDED
@@ -0,0 +1,72 @@
1
+ import { computeConfidence, DEFAULT_HALF_LIVES } from './confidence.js';
2
+ import { daysBetween } from './utils.js';
3
+
4
+ export function applyDecay(db, { dormantThreshold = 0.1 } = {}) {
5
+ const now = new Date();
6
+ let totalEvaluated = 0;
7
+ let transitionedToDormant = 0;
8
+
9
+ const semantics = db.prepare(`
10
+ SELECT id, supporting_count, contradicting_count, created_at,
11
+ last_reinforced_at, retrieval_count
12
+ FROM semantics WHERE state = 'active'
13
+ `).all();
14
+
15
+ const markDormantSem = db.prepare('UPDATE semantics SET state = ? WHERE id = ?');
16
+
17
+ for (const sem of semantics) {
18
+ totalEvaluated++;
19
+ const ageDays = daysBetween(sem.created_at, now);
20
+ const daysSinceRetrieval = sem.last_reinforced_at
21
+ ? daysBetween(sem.last_reinforced_at, now)
22
+ : ageDays;
23
+
24
+ const confidence = computeConfidence({
25
+ sourceType: 'tool-result',
26
+ supportingCount: sem.supporting_count || 0,
27
+ contradictingCount: sem.contradicting_count || 0,
28
+ ageDays,
29
+ halfLifeDays: DEFAULT_HALF_LIVES.semantic,
30
+ retrievalCount: sem.retrieval_count || 0,
31
+ daysSinceRetrieval,
32
+ });
33
+
34
+ if (confidence < dormantThreshold) {
35
+ markDormantSem.run('dormant', sem.id);
36
+ transitionedToDormant++;
37
+ }
38
+ }
39
+
40
+ const procedures = db.prepare(`
41
+ SELECT id, success_count, failure_count, created_at,
42
+ last_reinforced_at, retrieval_count
43
+ FROM procedures WHERE state = 'active'
44
+ `).all();
45
+
46
+ const markDormantProc = db.prepare('UPDATE procedures SET state = ? WHERE id = ?');
47
+
48
+ for (const proc of procedures) {
49
+ totalEvaluated++;
50
+ const ageDays = daysBetween(proc.created_at, now);
51
+ const daysSinceRetrieval = proc.last_reinforced_at
52
+ ? daysBetween(proc.last_reinforced_at, now)
53
+ : ageDays;
54
+
55
+ const confidence = computeConfidence({
56
+ sourceType: 'tool-result',
57
+ supportingCount: proc.success_count || 0,
58
+ contradictingCount: proc.failure_count || 0,
59
+ ageDays,
60
+ halfLifeDays: DEFAULT_HALF_LIVES.procedural,
61
+ retrievalCount: proc.retrieval_count || 0,
62
+ daysSinceRetrieval,
63
+ });
64
+
65
+ if (confidence < dormantThreshold) {
66
+ markDormantProc.run('dormant', proc.id);
67
+ transitionedToDormant++;
68
+ }
69
+ }
70
+
71
+ return { totalEvaluated, transitionedToDormant, timestamp: now.toISOString() };
72
+ }
@@ -0,0 +1,88 @@
1
+ import { createHash } from 'node:crypto';
2
+
3
+ export class MockEmbeddingProvider {
4
+ constructor({ dimensions = 64 } = {}) {
5
+ this.dimensions = dimensions;
6
+ this.modelName = 'mock-embedding';
7
+ this.modelVersion = '1.0.0';
8
+ }
9
+
10
+ async embed(text) {
11
+ const hash = createHash('sha256').update(text).digest();
12
+ const vector = new Array(this.dimensions);
13
+ for (let i = 0; i < this.dimensions; i++) {
14
+ vector[i] = (hash[i % hash.length] / 255) * 2 - 1;
15
+ }
16
+ const magnitude = Math.sqrt(vector.reduce((sum, v) => sum + v * v, 0));
17
+ return vector.map(v => v / magnitude);
18
+ }
19
+
20
+ async embedBatch(texts) {
21
+ return Promise.all(texts.map(t => this.embed(t)));
22
+ }
23
+
24
+ vectorToBuffer(vector) {
25
+ return Buffer.from(new Float32Array(vector).buffer);
26
+ }
27
+
28
+ bufferToVector(buffer) {
29
+ return Array.from(new Float32Array(buffer.buffer, buffer.byteOffset, buffer.byteLength / 4));
30
+ }
31
+ }
32
+
33
+ export class OpenAIEmbeddingProvider {
34
+ constructor({ apiKey, model = 'text-embedding-3-small', dimensions = 1536 } = {}) {
35
+ this.apiKey = apiKey || process.env.OPENAI_API_KEY;
36
+ this.model = model;
37
+ this.dimensions = dimensions;
38
+ this.modelName = model;
39
+ this.modelVersion = 'latest';
40
+ }
41
+
42
+ async embed(text) {
43
+ const response = await fetch('https://api.openai.com/v1/embeddings', {
44
+ method: 'POST',
45
+ headers: {
46
+ 'Authorization': `Bearer ${this.apiKey}`,
47
+ 'Content-Type': 'application/json',
48
+ },
49
+ body: JSON.stringify({ input: text, model: this.model, dimensions: this.dimensions }),
50
+ });
51
+ if (!response.ok) throw new Error(`OpenAI embedding failed: ${response.status}`);
52
+ const data = await response.json();
53
+ return data.data[0].embedding;
54
+ }
55
+
56
+ async embedBatch(texts) {
57
+ const response = await fetch('https://api.openai.com/v1/embeddings', {
58
+ method: 'POST',
59
+ headers: {
60
+ 'Authorization': `Bearer ${this.apiKey}`,
61
+ 'Content-Type': 'application/json',
62
+ },
63
+ body: JSON.stringify({ input: texts, model: this.model, dimensions: this.dimensions }),
64
+ });
65
+ if (!response.ok) throw new Error(`OpenAI embedding failed: ${response.status}`);
66
+ const data = await response.json();
67
+ return data.data.map(d => d.embedding);
68
+ }
69
+
70
+ vectorToBuffer(vector) {
71
+ return Buffer.from(new Float32Array(vector).buffer);
72
+ }
73
+
74
+ bufferToVector(buffer) {
75
+ return Array.from(new Float32Array(buffer.buffer, buffer.byteOffset, buffer.byteLength / 4));
76
+ }
77
+ }
78
+
79
+ export function createEmbeddingProvider(config) {
80
+ switch (config.provider) {
81
+ case 'mock':
82
+ return new MockEmbeddingProvider(config);
83
+ case 'openai':
84
+ return new OpenAIEmbeddingProvider(config);
85
+ default:
86
+ throw new Error(`Unknown embedding provider: ${config.provider}. Valid: mock, openai`);
87
+ }
88
+ }
package/src/encode.js ADDED
@@ -0,0 +1,46 @@
1
+ import { generateId } from './ulid.js';
2
+ import { sourceReliability } from './confidence.js';
3
+
4
+ export async function encodeEpisode(db, embeddingProvider, {
5
+ content,
6
+ source,
7
+ salience = 0.5,
8
+ causal,
9
+ tags,
10
+ supersedes,
11
+ }) {
12
+ if (!content || typeof content !== 'string') throw new Error('content must be a non-empty string');
13
+ if (salience < 0 || salience > 1) throw new Error('salience must be between 0 and 1');
14
+ if (tags && !Array.isArray(tags)) throw new Error('tags must be an array');
15
+
16
+ const reliability = sourceReliability(source);
17
+ const vector = await embeddingProvider.embed(content);
18
+ const embeddingBuffer = embeddingProvider.vectorToBuffer(vector);
19
+ const id = generateId();
20
+ const now = new Date().toISOString();
21
+
22
+ const insertAndLink = db.transaction(() => {
23
+ db.prepare(`
24
+ INSERT INTO episodes (
25
+ id, content, embedding, source, source_reliability, salience,
26
+ tags, causal_trigger, causal_consequence, created_at,
27
+ embedding_model, embedding_version, supersedes
28
+ ) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
29
+ `).run(
30
+ id, content, embeddingBuffer, source, reliability, salience,
31
+ tags ? JSON.stringify(tags) : null,
32
+ causal?.trigger || null, causal?.consequence || null,
33
+ now, embeddingProvider.modelName, embeddingProvider.modelVersion,
34
+ supersedes || null,
35
+ );
36
+ db.prepare(
37
+ 'INSERT INTO vec_episodes(id, embedding, source, consolidated) VALUES (?, ?, ?, ?)'
38
+ ).run(id, embeddingBuffer, source, BigInt(0));
39
+ if (supersedes) {
40
+ db.prepare('UPDATE episodes SET superseded_by = ? WHERE id = ?').run(id, supersedes);
41
+ }
42
+ });
43
+
44
+ insertAndLink();
45
+ return id;
46
+ }
package/src/index.js ADDED
@@ -0,0 +1,12 @@
1
+ export { Audrey } from './audrey.js';
2
+ export { computeConfidence, sourceReliability, DEFAULT_SOURCE_RELIABILITY, DEFAULT_WEIGHTS, DEFAULT_HALF_LIVES } from './confidence.js';
3
+ export { createEmbeddingProvider, MockEmbeddingProvider, OpenAIEmbeddingProvider } from './embedding.js';
4
+ export { createLLMProvider, MockLLMProvider, AnthropicLLMProvider, OpenAILLMProvider } from './llm.js';
5
+ export { recall, recallStream } from './recall.js';
6
+ export { addCausalLink, getCausalChain, articulateCausalLink } from './causal.js';
7
+ export {
8
+ buildPrincipleExtractionPrompt,
9
+ buildContradictionDetectionPrompt,
10
+ buildCausalArticulationPrompt,
11
+ buildContextResolutionPrompt,
12
+ } from './prompts.js';
@@ -0,0 +1,44 @@
1
+ import { safeJsonParse } from './utils.js';
2
+
3
+ export function introspect(db) {
4
+ const counts = db.prepare(`
5
+ SELECT
6
+ (SELECT COUNT(*) FROM episodes) as episodic,
7
+ (SELECT COUNT(*) FROM semantics WHERE state != 'rolled_back') as semantic,
8
+ (SELECT COUNT(*) FROM procedures WHERE state != 'rolled_back') as procedural,
9
+ (SELECT COUNT(*) FROM causal_links) as causal_links,
10
+ (SELECT COUNT(*) FROM semantics WHERE state = 'dormant')
11
+ + (SELECT COUNT(*) FROM procedures WHERE state = 'dormant') as dormant
12
+ `).get();
13
+
14
+ const contradictions = db.prepare(`
15
+ SELECT
16
+ SUM(CASE WHEN state = 'open' THEN 1 ELSE 0 END) as open,
17
+ SUM(CASE WHEN state = 'resolved' THEN 1 ELSE 0 END) as resolved,
18
+ SUM(CASE WHEN state = 'context_dependent' THEN 1 ELSE 0 END) as context_dependent,
19
+ SUM(CASE WHEN state = 'reopened' THEN 1 ELSE 0 END) as reopened
20
+ FROM contradictions
21
+ `).get();
22
+
23
+ const lastRun = db.prepare(`
24
+ SELECT completed_at FROM consolidation_runs
25
+ WHERE status = 'completed' ORDER BY completed_at DESC LIMIT 1
26
+ `).get();
27
+ const totalRuns = db.prepare('SELECT COUNT(*) as count FROM consolidation_runs').get().count;
28
+
29
+ return {
30
+ episodic: counts.episodic,
31
+ semantic: counts.semantic,
32
+ procedural: counts.procedural,
33
+ causalLinks: counts.causal_links,
34
+ dormant: counts.dormant,
35
+ contradictions: {
36
+ open: contradictions?.open || 0,
37
+ resolved: contradictions?.resolved || 0,
38
+ context_dependent: contradictions?.context_dependent || 0,
39
+ reopened: contradictions?.reopened || 0,
40
+ },
41
+ lastConsolidation: lastRun?.completed_at || null,
42
+ totalConsolidationRuns: totalRuns,
43
+ };
44
+ }
package/src/llm.js ADDED
@@ -0,0 +1,132 @@
1
+ const PROMPT_TYPE_KEYS = [
2
+ 'principleExtraction',
3
+ 'contradictionDetection',
4
+ 'causalArticulation',
5
+ 'contextResolution',
6
+ ];
7
+
8
+ export class MockLLMProvider {
9
+ constructor({ responses = {} } = {}) {
10
+ this.responses = responses;
11
+ this.modelName = 'mock-llm';
12
+ this.modelVersion = '1.0.0';
13
+ }
14
+
15
+ _matchPromptType(messages) {
16
+ const systemMsg = messages.find(m => m.role === 'system')?.content || '';
17
+ for (const key of PROMPT_TYPE_KEYS) {
18
+ if (systemMsg.includes(key)) return key;
19
+ }
20
+ return null;
21
+ }
22
+
23
+ async complete(messages) {
24
+ const promptType = this._matchPromptType(messages);
25
+ const cannedResponse = promptType ? this.responses[promptType] : undefined;
26
+ return { content: cannedResponse !== undefined ? JSON.stringify(cannedResponse) : '{}' };
27
+ }
28
+
29
+ async json(messages) {
30
+ const promptType = this._matchPromptType(messages);
31
+ const cannedResponse = promptType ? this.responses[promptType] : undefined;
32
+ return cannedResponse !== undefined ? cannedResponse : {};
33
+ }
34
+ }
35
+
36
+ export class AnthropicLLMProvider {
37
+ constructor({ apiKey, model = 'claude-sonnet-4-6', maxTokens = 1024 } = {}) {
38
+ this.apiKey = apiKey || process.env.ANTHROPIC_API_KEY;
39
+ this.model = model;
40
+ this.maxTokens = maxTokens;
41
+ this.modelName = model;
42
+ this.modelVersion = 'latest';
43
+ }
44
+
45
+ async complete(messages, options = {}) {
46
+ const systemMsg = messages.find(m => m.role === 'system')?.content;
47
+ const nonSystemMsgs = messages.filter(m => m.role !== 'system');
48
+
49
+ const body = {
50
+ model: this.model,
51
+ max_tokens: options.maxTokens || this.maxTokens,
52
+ messages: nonSystemMsgs,
53
+ };
54
+ if (systemMsg) body.system = systemMsg;
55
+
56
+ const response = await fetch('https://api.anthropic.com/v1/messages', {
57
+ method: 'POST',
58
+ headers: {
59
+ 'x-api-key': this.apiKey,
60
+ 'anthropic-version': '2023-06-01',
61
+ 'content-type': 'application/json',
62
+ },
63
+ body: JSON.stringify(body),
64
+ });
65
+
66
+ if (!response.ok) {
67
+ throw new Error(`Anthropic API error: ${response.status}`);
68
+ }
69
+
70
+ const data = await response.json();
71
+ const text = data.content?.[0]?.text || '';
72
+ return { content: text };
73
+ }
74
+
75
+ async json(messages, options = {}) {
76
+ const result = await this.complete(messages, options);
77
+ return JSON.parse(result.content);
78
+ }
79
+ }
80
+
81
+ export class OpenAILLMProvider {
82
+ constructor({ apiKey, model = 'gpt-4o', maxTokens = 1024 } = {}) {
83
+ this.apiKey = apiKey || process.env.OPENAI_API_KEY;
84
+ this.model = model;
85
+ this.maxTokens = maxTokens;
86
+ this.modelName = model;
87
+ this.modelVersion = 'latest';
88
+ }
89
+
90
+ async complete(messages, options = {}) {
91
+ const body = {
92
+ model: this.model,
93
+ max_tokens: options.maxTokens || this.maxTokens,
94
+ messages,
95
+ };
96
+
97
+ const response = await fetch('https://api.openai.com/v1/chat/completions', {
98
+ method: 'POST',
99
+ headers: {
100
+ 'Authorization': `Bearer ${this.apiKey}`,
101
+ 'Content-Type': 'application/json',
102
+ },
103
+ body: JSON.stringify(body),
104
+ });
105
+
106
+ if (!response.ok) {
107
+ throw new Error(`OpenAI API error: ${response.status}`);
108
+ }
109
+
110
+ const data = await response.json();
111
+ const text = data.choices?.[0]?.message?.content || '';
112
+ return { content: text };
113
+ }
114
+
115
+ async json(messages, options = {}) {
116
+ const result = await this.complete(messages, options);
117
+ return JSON.parse(result.content);
118
+ }
119
+ }
120
+
121
+ export function createLLMProvider(config) {
122
+ switch (config.provider) {
123
+ case 'mock':
124
+ return new MockLLMProvider(config);
125
+ case 'anthropic':
126
+ return new AnthropicLLMProvider(config);
127
+ case 'openai':
128
+ return new OpenAILLMProvider(config);
129
+ default:
130
+ throw new Error(`Unknown LLM provider: ${config.provider}. Valid: mock, anthropic, openai`);
131
+ }
132
+ }