@mzhub/mem-ts 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +335 -0
- package/dist/BaseAdapter-BoRh1T7O.d.mts +75 -0
- package/dist/BaseAdapter-CQVX-gcA.d.ts +75 -0
- package/dist/BaseProvider-CEoiLGj5.d.ts +34 -0
- package/dist/BaseProvider-edMh_R9t.d.mts +34 -0
- package/dist/adapters/index.d.mts +259 -0
- package/dist/adapters/index.d.ts +259 -0
- package/dist/adapters/index.js +1570 -0
- package/dist/adapters/index.js.map +1 -0
- package/dist/adapters/index.mjs +1542 -0
- package/dist/adapters/index.mjs.map +1 -0
- package/dist/index-Ci5Q9G9H.d.mts +289 -0
- package/dist/index-Dl-Q2au9.d.ts +289 -0
- package/dist/index.d.mts +1206 -0
- package/dist/index.d.ts +1206 -0
- package/dist/index.js +5126 -0
- package/dist/index.js.map +1 -0
- package/dist/index.mjs +5058 -0
- package/dist/index.mjs.map +1 -0
- package/dist/middleware/index.d.mts +4 -0
- package/dist/middleware/index.d.ts +4 -0
- package/dist/middleware/index.js +63 -0
- package/dist/middleware/index.js.map +1 -0
- package/dist/middleware/index.mjs +59 -0
- package/dist/middleware/index.mjs.map +1 -0
- package/dist/providers/index.d.mts +96 -0
- package/dist/providers/index.d.ts +96 -0
- package/dist/providers/index.js +379 -0
- package/dist/providers/index.js.map +1 -0
- package/dist/providers/index.mjs +370 -0
- package/dist/providers/index.mjs.map +1 -0
- package/dist/types-G9qmfSeZ.d.mts +260 -0
- package/dist/types-G9qmfSeZ.d.ts +260 -0
- package/logo.png +0 -0
- package/package.json +114 -0
|
@@ -0,0 +1,370 @@
|
|
|
1
|
+
var __require = /* @__PURE__ */ ((x) => typeof require !== "undefined" ? require : typeof Proxy !== "undefined" ? new Proxy(x, {
|
|
2
|
+
get: (a, b) => (typeof require !== "undefined" ? require : a)[b]
|
|
3
|
+
}) : x)(function(x) {
|
|
4
|
+
if (typeof require !== "undefined") return require.apply(this, arguments);
|
|
5
|
+
throw Error('Dynamic require of "' + x + '" is not supported');
|
|
6
|
+
});
|
|
7
|
+
|
|
8
|
+
// src/providers/BaseProvider.ts
|
|
9
|
+
var BaseProvider = class {
|
|
10
|
+
apiKey;
|
|
11
|
+
model;
|
|
12
|
+
baseUrl;
|
|
13
|
+
constructor(config) {
|
|
14
|
+
if (!config.apiKey) {
|
|
15
|
+
throw new Error("API key is required");
|
|
16
|
+
}
|
|
17
|
+
this.apiKey = config.apiKey;
|
|
18
|
+
this.model = config.model || this.getDefaultModel();
|
|
19
|
+
this.baseUrl = config.baseUrl;
|
|
20
|
+
}
|
|
21
|
+
/**
|
|
22
|
+
* Check if the provider SDK is available
|
|
23
|
+
*/
|
|
24
|
+
static isAvailable() {
|
|
25
|
+
return true;
|
|
26
|
+
}
|
|
27
|
+
};
|
|
28
|
+
|
|
29
|
+
// src/providers/OpenAIProvider.ts
|
|
30
|
+
var OpenAIProvider = class extends BaseProvider {
|
|
31
|
+
endpoint;
|
|
32
|
+
constructor(config) {
|
|
33
|
+
super(config);
|
|
34
|
+
this.endpoint = this.baseUrl || "https://api.openai.com/v1";
|
|
35
|
+
}
|
|
36
|
+
getDefaultModel() {
|
|
37
|
+
return "gpt-4o-mini";
|
|
38
|
+
}
|
|
39
|
+
getName() {
|
|
40
|
+
return "openai";
|
|
41
|
+
}
|
|
42
|
+
async complete(options) {
|
|
43
|
+
const {
|
|
44
|
+
systemPrompt,
|
|
45
|
+
userPrompt,
|
|
46
|
+
maxTokens = 1e3,
|
|
47
|
+
temperature = 0.3,
|
|
48
|
+
jsonMode = true
|
|
49
|
+
} = options;
|
|
50
|
+
const response = await fetch(`${this.endpoint}/chat/completions`, {
|
|
51
|
+
method: "POST",
|
|
52
|
+
headers: {
|
|
53
|
+
"Content-Type": "application/json",
|
|
54
|
+
Authorization: `Bearer ${this.apiKey}`
|
|
55
|
+
},
|
|
56
|
+
body: JSON.stringify({
|
|
57
|
+
model: this.model,
|
|
58
|
+
messages: [
|
|
59
|
+
{ role: "system", content: systemPrompt },
|
|
60
|
+
{ role: "user", content: userPrompt }
|
|
61
|
+
],
|
|
62
|
+
max_tokens: maxTokens,
|
|
63
|
+
temperature,
|
|
64
|
+
...jsonMode && { response_format: { type: "json_object" } }
|
|
65
|
+
})
|
|
66
|
+
});
|
|
67
|
+
if (!response.ok) {
|
|
68
|
+
const errorData = await response.json().catch(() => ({ error: { message: response.statusText } }));
|
|
69
|
+
throw new Error(
|
|
70
|
+
`OpenAI API error: ${errorData.error?.message || response.statusText}`
|
|
71
|
+
);
|
|
72
|
+
}
|
|
73
|
+
const data = await response.json();
|
|
74
|
+
return {
|
|
75
|
+
content: data.choices[0]?.message?.content || "",
|
|
76
|
+
usage: {
|
|
77
|
+
inputTokens: data.usage?.prompt_tokens || 0,
|
|
78
|
+
outputTokens: data.usage?.completion_tokens || 0
|
|
79
|
+
}
|
|
80
|
+
};
|
|
81
|
+
}
|
|
82
|
+
};
|
|
83
|
+
|
|
84
|
+
// src/providers/AnthropicProvider.ts
|
|
85
|
+
var AnthropicProvider = class extends BaseProvider {
|
|
86
|
+
client;
|
|
87
|
+
constructor(config) {
|
|
88
|
+
super(config);
|
|
89
|
+
this.initClient();
|
|
90
|
+
}
|
|
91
|
+
async initClient() {
|
|
92
|
+
try {
|
|
93
|
+
const { default: Anthropic } = await import('@anthropic-ai/sdk');
|
|
94
|
+
this.client = new Anthropic({
|
|
95
|
+
apiKey: this.apiKey,
|
|
96
|
+
...this.baseUrl && { baseURL: this.baseUrl }
|
|
97
|
+
});
|
|
98
|
+
} catch {
|
|
99
|
+
throw new Error(
|
|
100
|
+
"Anthropic SDK not installed. Run: npm install @anthropic-ai/sdk"
|
|
101
|
+
);
|
|
102
|
+
}
|
|
103
|
+
}
|
|
104
|
+
getDefaultModel() {
|
|
105
|
+
return "claude-3-haiku-20240307";
|
|
106
|
+
}
|
|
107
|
+
getName() {
|
|
108
|
+
return "anthropic";
|
|
109
|
+
}
|
|
110
|
+
static isAvailable() {
|
|
111
|
+
try {
|
|
112
|
+
__require.resolve("@anthropic-ai/sdk");
|
|
113
|
+
return true;
|
|
114
|
+
} catch {
|
|
115
|
+
return false;
|
|
116
|
+
}
|
|
117
|
+
}
|
|
118
|
+
async complete(options) {
|
|
119
|
+
const {
|
|
120
|
+
systemPrompt,
|
|
121
|
+
userPrompt,
|
|
122
|
+
maxTokens = 1e3,
|
|
123
|
+
temperature = 0.3
|
|
124
|
+
} = options;
|
|
125
|
+
if (!this.client) {
|
|
126
|
+
await this.initClient();
|
|
127
|
+
}
|
|
128
|
+
const client = this.client;
|
|
129
|
+
const message = await client.messages.create({
|
|
130
|
+
model: this.model,
|
|
131
|
+
max_tokens: maxTokens,
|
|
132
|
+
temperature,
|
|
133
|
+
system: systemPrompt,
|
|
134
|
+
messages: [{ role: "user", content: userPrompt }]
|
|
135
|
+
});
|
|
136
|
+
return {
|
|
137
|
+
content: message.content[0]?.text || "",
|
|
138
|
+
usage: {
|
|
139
|
+
inputTokens: message.usage?.input_tokens || 0,
|
|
140
|
+
outputTokens: message.usage?.output_tokens || 0
|
|
141
|
+
}
|
|
142
|
+
};
|
|
143
|
+
}
|
|
144
|
+
};
|
|
145
|
+
|
|
146
|
+
// src/providers/GeminiProvider.ts
|
|
147
|
+
var GeminiProvider = class extends BaseProvider {
|
|
148
|
+
genAI;
|
|
149
|
+
constructor(config) {
|
|
150
|
+
super(config);
|
|
151
|
+
this.initClient();
|
|
152
|
+
}
|
|
153
|
+
async initClient() {
|
|
154
|
+
try {
|
|
155
|
+
const { GoogleGenerativeAI } = await import('@google/generative-ai');
|
|
156
|
+
this.genAI = new GoogleGenerativeAI(this.apiKey);
|
|
157
|
+
} catch {
|
|
158
|
+
throw new Error(
|
|
159
|
+
"Google Generative AI SDK not installed. Run: npm install @google/generative-ai"
|
|
160
|
+
);
|
|
161
|
+
}
|
|
162
|
+
}
|
|
163
|
+
getDefaultModel() {
|
|
164
|
+
return "gemini-2.0-flash";
|
|
165
|
+
}
|
|
166
|
+
getName() {
|
|
167
|
+
return "gemini";
|
|
168
|
+
}
|
|
169
|
+
static isAvailable() {
|
|
170
|
+
try {
|
|
171
|
+
__require.resolve("@google/generative-ai");
|
|
172
|
+
return true;
|
|
173
|
+
} catch {
|
|
174
|
+
return false;
|
|
175
|
+
}
|
|
176
|
+
}
|
|
177
|
+
async complete(options) {
|
|
178
|
+
const {
|
|
179
|
+
systemPrompt,
|
|
180
|
+
userPrompt,
|
|
181
|
+
maxTokens = 1e3,
|
|
182
|
+
temperature = 0.3
|
|
183
|
+
} = options;
|
|
184
|
+
if (!this.genAI) {
|
|
185
|
+
await this.initClient();
|
|
186
|
+
}
|
|
187
|
+
const genAI = this.genAI;
|
|
188
|
+
const model = genAI.getGenerativeModel({
|
|
189
|
+
model: this.model,
|
|
190
|
+
systemInstruction: systemPrompt
|
|
191
|
+
});
|
|
192
|
+
const result = await model.generateContent({
|
|
193
|
+
contents: [{ role: "user", parts: [{ text: userPrompt }] }],
|
|
194
|
+
generationConfig: {
|
|
195
|
+
maxOutputTokens: maxTokens,
|
|
196
|
+
temperature
|
|
197
|
+
}
|
|
198
|
+
});
|
|
199
|
+
const response = result.response;
|
|
200
|
+
return {
|
|
201
|
+
content: response.text() || "",
|
|
202
|
+
usage: {
|
|
203
|
+
inputTokens: response.usageMetadata?.promptTokenCount || 0,
|
|
204
|
+
outputTokens: response.usageMetadata?.candidatesTokenCount || 0
|
|
205
|
+
}
|
|
206
|
+
};
|
|
207
|
+
}
|
|
208
|
+
};
|
|
209
|
+
|
|
210
|
+
// src/providers/GroqProvider.ts
|
|
211
|
+
var GroqProvider = class extends BaseProvider {
|
|
212
|
+
client;
|
|
213
|
+
constructor(config) {
|
|
214
|
+
super(config);
|
|
215
|
+
this.initClient();
|
|
216
|
+
}
|
|
217
|
+
async initClient() {
|
|
218
|
+
try {
|
|
219
|
+
const { default: Groq } = await import('groq-sdk');
|
|
220
|
+
this.client = new Groq({
|
|
221
|
+
apiKey: this.apiKey,
|
|
222
|
+
...this.baseUrl && { baseURL: this.baseUrl }
|
|
223
|
+
});
|
|
224
|
+
} catch {
|
|
225
|
+
throw new Error("Groq SDK not installed. Run: npm install groq-sdk");
|
|
226
|
+
}
|
|
227
|
+
}
|
|
228
|
+
getDefaultModel() {
|
|
229
|
+
return "llama-3.3-70b-versatile";
|
|
230
|
+
}
|
|
231
|
+
getName() {
|
|
232
|
+
return "groq";
|
|
233
|
+
}
|
|
234
|
+
static isAvailable() {
|
|
235
|
+
try {
|
|
236
|
+
__require.resolve("groq-sdk");
|
|
237
|
+
return true;
|
|
238
|
+
} catch {
|
|
239
|
+
return false;
|
|
240
|
+
}
|
|
241
|
+
}
|
|
242
|
+
async complete(options) {
|
|
243
|
+
const {
|
|
244
|
+
systemPrompt,
|
|
245
|
+
userPrompt,
|
|
246
|
+
maxTokens = 1e3,
|
|
247
|
+
temperature = 0.3
|
|
248
|
+
} = options;
|
|
249
|
+
if (!this.client) {
|
|
250
|
+
await this.initClient();
|
|
251
|
+
}
|
|
252
|
+
const client = this.client;
|
|
253
|
+
const completion = await client.chat.completions.create({
|
|
254
|
+
model: this.model,
|
|
255
|
+
messages: [
|
|
256
|
+
{ role: "system", content: systemPrompt },
|
|
257
|
+
{ role: "user", content: userPrompt }
|
|
258
|
+
],
|
|
259
|
+
max_tokens: maxTokens,
|
|
260
|
+
temperature
|
|
261
|
+
});
|
|
262
|
+
return {
|
|
263
|
+
content: completion.choices[0]?.message?.content || "",
|
|
264
|
+
usage: {
|
|
265
|
+
inputTokens: completion.usage?.prompt_tokens || 0,
|
|
266
|
+
outputTokens: completion.usage?.completion_tokens || 0
|
|
267
|
+
}
|
|
268
|
+
};
|
|
269
|
+
}
|
|
270
|
+
};
|
|
271
|
+
|
|
272
|
+
// src/providers/CerebrasProvider.ts
|
|
273
|
+
var CerebrasProvider = class extends BaseProvider {
|
|
274
|
+
client;
|
|
275
|
+
constructor(config) {
|
|
276
|
+
super(config);
|
|
277
|
+
this.initClient();
|
|
278
|
+
}
|
|
279
|
+
async initClient() {
|
|
280
|
+
try {
|
|
281
|
+
const { default: Cerebras } = await import('@cerebras/cerebras_cloud_sdk');
|
|
282
|
+
this.client = new Cerebras({
|
|
283
|
+
apiKey: this.apiKey,
|
|
284
|
+
...this.baseUrl && { baseURL: this.baseUrl }
|
|
285
|
+
});
|
|
286
|
+
} catch {
|
|
287
|
+
throw new Error(
|
|
288
|
+
"Cerebras SDK not installed. Run: npm install @cerebras/cerebras_cloud_sdk"
|
|
289
|
+
);
|
|
290
|
+
}
|
|
291
|
+
}
|
|
292
|
+
getDefaultModel() {
|
|
293
|
+
return "llama-3.3-70b";
|
|
294
|
+
}
|
|
295
|
+
getName() {
|
|
296
|
+
return "cerebras";
|
|
297
|
+
}
|
|
298
|
+
static isAvailable() {
|
|
299
|
+
try {
|
|
300
|
+
__require.resolve("@cerebras/cerebras_cloud_sdk");
|
|
301
|
+
return true;
|
|
302
|
+
} catch {
|
|
303
|
+
return false;
|
|
304
|
+
}
|
|
305
|
+
}
|
|
306
|
+
async complete(options) {
|
|
307
|
+
const {
|
|
308
|
+
systemPrompt,
|
|
309
|
+
userPrompt,
|
|
310
|
+
maxTokens = 1e3,
|
|
311
|
+
temperature = 0.3
|
|
312
|
+
} = options;
|
|
313
|
+
if (!this.client) {
|
|
314
|
+
await this.initClient();
|
|
315
|
+
}
|
|
316
|
+
const client = this.client;
|
|
317
|
+
const completion = await client.chat.completions.create({
|
|
318
|
+
model: this.model,
|
|
319
|
+
messages: [
|
|
320
|
+
{ role: "system", content: systemPrompt },
|
|
321
|
+
{ role: "user", content: userPrompt }
|
|
322
|
+
],
|
|
323
|
+
max_completion_tokens: maxTokens,
|
|
324
|
+
temperature
|
|
325
|
+
});
|
|
326
|
+
return {
|
|
327
|
+
content: completion.choices[0]?.message?.content || "",
|
|
328
|
+
usage: {
|
|
329
|
+
inputTokens: completion.usage?.prompt_tokens || 0,
|
|
330
|
+
outputTokens: completion.usage?.completion_tokens || 0
|
|
331
|
+
}
|
|
332
|
+
};
|
|
333
|
+
}
|
|
334
|
+
};
|
|
335
|
+
|
|
336
|
+
// src/providers/index.ts
|
|
337
|
+
var providerRegistry = {
|
|
338
|
+
openai: OpenAIProvider,
|
|
339
|
+
anthropic: AnthropicProvider,
|
|
340
|
+
gemini: GeminiProvider,
|
|
341
|
+
groq: GroqProvider,
|
|
342
|
+
cerebras: CerebrasProvider
|
|
343
|
+
};
|
|
344
|
+
function createProvider(config) {
|
|
345
|
+
const ProviderClass = providerRegistry[config.provider];
|
|
346
|
+
if (!ProviderClass) {
|
|
347
|
+
throw new Error(
|
|
348
|
+
`Unknown provider: ${config.provider}. Available: ${Object.keys(
|
|
349
|
+
providerRegistry
|
|
350
|
+
).join(", ")}`
|
|
351
|
+
);
|
|
352
|
+
}
|
|
353
|
+
return new ProviderClass({
|
|
354
|
+
apiKey: config.apiKey,
|
|
355
|
+
model: config.model,
|
|
356
|
+
baseUrl: config.baseUrl
|
|
357
|
+
});
|
|
358
|
+
}
|
|
359
|
+
function getAvailableProviders() {
|
|
360
|
+
const available = ["openai"];
|
|
361
|
+
if (AnthropicProvider.isAvailable()) available.push("anthropic");
|
|
362
|
+
if (GeminiProvider.isAvailable()) available.push("gemini");
|
|
363
|
+
if (GroqProvider.isAvailable()) available.push("groq");
|
|
364
|
+
if (CerebrasProvider.isAvailable()) available.push("cerebras");
|
|
365
|
+
return available;
|
|
366
|
+
}
|
|
367
|
+
|
|
368
|
+
export { AnthropicProvider, BaseProvider, CerebrasProvider, GeminiProvider, GroqProvider, OpenAIProvider, createProvider, getAvailableProviders };
|
|
369
|
+
//# sourceMappingURL=index.mjs.map
|
|
370
|
+
//# sourceMappingURL=index.mjs.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../../src/providers/BaseProvider.ts","../../src/providers/OpenAIProvider.ts","../../src/providers/AnthropicProvider.ts","../../src/providers/GeminiProvider.ts","../../src/providers/GroqProvider.ts","../../src/providers/CerebrasProvider.ts","../../src/providers/index.ts"],"names":[],"mappings":";;;;;;;;AAMO,IAAe,eAAf,MAA4B;AAAA,EACvB,MAAA;AAAA,EACA,KAAA;AAAA,EACA,OAAA;AAAA,EAEV,YAAY,MAAA,EAA8D;AACxE,IAAA,IAAI,CAAC,OAAO,MAAA,EAAQ;AAClB,MAAA,MAAM,IAAI,MAAM,qBAAqB,CAAA;AAAA,IACvC;AACA,IAAA,IAAA,CAAK,SAAS,MAAA,CAAO,MAAA;AACrB,IAAA,IAAA,CAAK,KAAA,GAAQ,MAAA,CAAO,KAAA,IAAS,IAAA,CAAK,eAAA,EAAgB;AAClD,IAAA,IAAA,CAAK,UAAU,MAAA,CAAO,OAAA;AAAA,EACxB;AAAA;AAAA;AAAA;AAAA,EAoBA,OAAO,WAAA,GAAuB;AAC5B,IAAA,OAAO,IAAA;AAAA,EACT;AACF;;;ACnCO,IAAM,cAAA,GAAN,cAA6B,YAAA,CAAa;AAAA,EACvC,QAAA;AAAA,EAER,YAAY,MAAA,EAA8D;AACxE,IAAA,KAAA,CAAM,MAAM,CAAA;AACZ,IAAA,IAAA,CAAK,QAAA,GAAW,KAAK,OAAA,IAAW,2BAAA;AAAA,EAClC;AAAA,EAEA,eAAA,GAA0B;AACxB,IAAA,OAAO,aAAA;AAAA,EACT;AAAA,EAEA,OAAA,GAAkB;AAChB,IAAA,OAAO,QAAA;AAAA,EACT;AAAA,EAEA,MAAM,SAAS,OAAA,EAAuD;AACpE,IAAA,MAAM;AAAA,MACJ,YAAA;AAAA,MACA,UAAA;AAAA,MACA,SAAA,GAAY,GAAA;AAAA,MACZ,WAAA,GAAc,GAAA;AAAA,MACd,QAAA,GAAW;AAAA,KACb,GAAI,OAAA;AAEJ,IAAA,MAAM,WAAW,MAAM,KAAA,CAAM,CAAA,EAAG,IAAA,CAAK,QAAQ,CAAA,iBAAA,CAAA,EAAqB;AAAA,MAChE,MAAA,EAAQ,MAAA;AAAA,MACR,OAAA,EAAS;AAAA,QACP,cAAA,EAAgB,kBAAA;AAAA,QAChB,aAAA,EAAe,CAAA,OAAA,EAAU,IAAA,CAAK,MAAM,CAAA;AAAA,OACtC;AAAA,MACA,IAAA,EAAM,KAAK,SAAA,CAAU;AAAA,QACnB,OAAO,IAAA,CAAK,KAAA;AAAA,QACZ,QAAA,EAAU;AAAA,UACR,EAAE,IAAA,EAAM,QAAA,EAAU,OAAA,EAAS,YAAA,EAAa;AAAA,UACxC,EAAE,IAAA,EAAM,MAAA,EAAQ,OAAA,EAAS,UAAA;AAAW,SACtC;AAAA,QACA,UAAA,EAAY,SAAA;AAAA,QACZ,WAAA;AAAA,QACA,GAAI,QAAA,IAAY,EAAE,iBAAiB,EAAE,IAAA,EAAM,eAAc;AAAE,OAC5D;AAAA,KACF,CAAA;AAED,IAAA,IAAI,CAAC,SAAS,EAAA,EAAI;AAChB,MAAA,MAAM,SAAA,GAAa,MAAM,QAAA,CACtB,IAAA,GACA,KAAA,CAAM,OAAO,EAAE,KAAA,EAAO,EAAE,OAAA,EAAS,QAAA,CAAS,UAAA,IAAa,CAAE,CAAA;AAG5D,MAAA,MAAM,IAAI,KAAA;AAAA,QACR,CAAA,kBAAA,EAAqB,SAAA,CAAU,KAAA,EAAO,OAAA,IAAW,SAAS,UAAU,CAAA;AAAA,OACtE;AAAA,IACF;AAOA,IAAA,MAAM,IAAA,GAAQ,MAAM,QAAA,CAAS,IAAA,EAAK;AAElC,IAAA,OAAO;AAAA,MACL,SAAS,IAAA,CAAK,OAAA,CAAQ,CAAC,CAAA,EAAG,SAAS,OAAA,IAAW,EAAA;AAAA,MAC9C,KAAA,EAAO;AAAA,QACL,WAAA,EAAa,IAAA,CAAK,KAAA,EAAO,aAAA,IAAiB,CAAA;AAAA,QAC1C,YAAA,EAAc,IAAA,CAAK,KAAA,EAAO,iBAAA,IAAqB;AAAA;AACjD,KACF;AAAA,EACF;AACF;;;ACrEO,IAAM,iBAAA,GAAN,cAAgC,YAAA,CAAa;AAAA,EAC1C,MAAA;AAAA,EAER,YAAY,MAAA,EAA8D;AACxE,IAAA,KAAA,CAAM,MAAM,CAAA;AACZ,IAAA,IAAA,CAAK,UAAA,EAAW;AAAA,EAClB;AAAA,EAEA,MAAc,UAAA,GAA4B;AACxC,IAAA,IAAI;AAEF,MAAA,MAAM,EAAE,OAAA,EAAS,SAAA,EAAU,GAAI,MAAM,OAAO,mBAAmB,CAAA;AAC/D,MAAA,IAAA,CAAK,MAAA,GAAS,IAAI,SAAA,CAAU;AAAA,QAC1B,QAAQ,IAAA,CAAK,MAAA;AAAA,QACb,GAAI,IAAA,CAAK,OAAA,IAAW,EAAE,OAAA,EAAS,KAAK,OAAA;AAAQ,OAC7C,CAAA;AAAA,IACH,CAAA,CAAA,MAAQ;AACN,MAAA,MAAM,IAAI,KAAA;AAAA,QACR;AAAA,OACF;AAAA,IACF;AAAA,EACF;AAAA,EAEA,eAAA,GAA0B;AACxB,IAAA,OAAO,yBAAA;AAAA,EACT;AAAA,EAEA,OAAA,GAAkB;AAChB,IAAA,OAAO,WAAA;AAAA,EACT;AAAA,EAEA,OAAO,WAAA,GAAuB;AAC5B,IAAA,IAAI;AACF,MAAA,SAAA,CAAQ,QAAQ,mBAAmB,CAAA;AACnC,MAAA,OAAO,IAAA;AAAA,IACT,CAAA,CAAA,MAAQ;AACN,MAAA,OAAO,KAAA;AAAA,IACT;AAAA,EACF;AAAA,EAEA,MAAM,SAAS,OAAA,EAAuD;AACpE,IAAA,MAAM;AAAA,MACJ,YAAA;AAAA,MACA,UAAA;AAAA,MACA,SAAA,GAAY,GAAA;AAAA,MACZ,WAAA,GAAc;AAAA,KAChB,GAAI,OAAA;AAEJ,IAAA,IAAI,CAAC,KAAK,MAAA,EAAQ;AAChB,MAAA,MAAM,KAAK,UAAA,EAAW;AAAA,IACxB;AAGA,IAAA,MAAM,SAAS,IAAA,CAAK,MAAA;AACpB,IAAA,MAAM,OAAA,GAAU,MAAM,MAAA,CAAO,QAAA,CAAS,MAAA,CAAO;AAAA,MAC3C,OAAO,IAAA,CAAK,KAAA;AAAA,MACZ,UAAA,EAAY,SAAA;AAAA,MACZ,WAAA;AAAA,MACA,MAAA,EAAQ,YAAA;AAAA,MACR,UAAU,CAAC,EAAE,MAAM,MAAA,EAAQ,OAAA,EAAS,YAAY;AAAA,KACjD,CAAA;AAED,IAAA,OAAO;AAAA,MACL,OAAA,EAAS,OAAA,CAAQ,OAAA,CAAQ,CAAC,GAAG,IAAA,IAAQ,EAAA;AAAA,MACrC,KAAA,EAAO;AAAA,QACL,WAAA,EAAa,OAAA,CAAQ,KAAA,EAAO,YAAA,IAAgB,CAAA;AAAA,QAC5C,YAAA,EAAc,OAAA,CAAQ,KAAA,EAAO,aAAA,IAAiB;AAAA;AAChD,KACF;AAAA,EACF;AACF;;;ACtEO,IAAM,cAAA,GAAN,cAA6B,YAAA,CAAa;AAAA,EACvC,KAAA;AAAA,EAER,YAAY,MAAA,EAA8D;AACxE,IAAA,KAAA,CAAM,MAAM,CAAA;AACZ,IAAA,IAAA,CAAK,UAAA,EAAW;AAAA,EAClB;AAAA,EAEA,MAAc,UAAA,GAA4B;AACxC,IAAA,IAAI;AAEF,MAAA,MAAM,EAAE,kBAAA,EAAmB,GAAI,MAAM,OAAO,uBAAuB,CAAA;AACnE,MAAA,IAAA,CAAK,KAAA,GAAQ,IAAI,kBAAA,CAAmB,IAAA,CAAK,MAAM,CAAA;AAAA,IACjD,CAAA,CAAA,MAAQ;AACN,MAAA,MAAM,IAAI,KAAA;AAAA,QACR;AAAA,OACF;AAAA,IACF;AAAA,EACF;AAAA,EAEA,eAAA,GAA0B;AACxB,IAAA,OAAO,kBAAA;AAAA,EACT;AAAA,EAEA,OAAA,GAAkB;AAChB,IAAA,OAAO,QAAA;AAAA,EACT;AAAA,EAEA,OAAO,WAAA,GAAuB;AAC5B,IAAA,IAAI;AACF,MAAA,SAAA,CAAQ,QAAQ,uBAAuB,CAAA;AACvC,MAAA,OAAO,IAAA;AAAA,IACT,CAAA,CAAA,MAAQ;AACN,MAAA,OAAO,KAAA;AAAA,IACT;AAAA,EACF;AAAA,EAEA,MAAM,SAAS,OAAA,EAAuD;AACpE,IAAA,MAAM;AAAA,MACJ,YAAA;AAAA,MACA,UAAA;AAAA,MACA,SAAA,GAAY,GAAA;AAAA,MACZ,WAAA,GAAc;AAAA,KAChB,GAAI,OAAA;AAEJ,IAAA,IAAI,CAAC,KAAK,KAAA,EAAO;AACf,MAAA,MAAM,KAAK,UAAA,EAAW;AAAA,IACxB;AAGA,IAAA,MAAM,QAAQ,IAAA,CAAK,KAAA;AACnB,IAAA,MAAM,KAAA,GAAQ,MAAM,kBAAA,CAAmB;AAAA,MACrC,OAAO,IAAA,CAAK,KAAA;AAAA,MACZ,iBAAA,EAAmB;AAAA,KACpB,CAAA;AAED,IAAA,MAAM,MAAA,GAAS,MAAM,KAAA,CAAM,eAAA,CAAgB;AAAA,MACzC,QAAA,EAAU,CAAC,EAAE,IAAA,EAAM,MAAA,EAAQ,KAAA,EAAO,CAAC,EAAE,IAAA,EAAM,UAAA,EAAY,CAAA,EAAG,CAAA;AAAA,MAC1D,gBAAA,EAAkB;AAAA,QAChB,eAAA,EAAiB,SAAA;AAAA,QACjB;AAAA;AACF,KACD,CAAA;AAED,IAAA,MAAM,WAAW,MAAA,CAAO,QAAA;AAExB,IAAA,OAAO;AAAA,MACL,OAAA,EAAS,QAAA,CAAS,IAAA,EAAK,IAAK,EAAA;AAAA,MAC5B,KAAA,EAAO;AAAA,QACL,WAAA,EAAa,QAAA,CAAS,aAAA,EAAe,gBAAA,IAAoB,CAAA;AAAA,QACzD,YAAA,EAAc,QAAA,CAAS,aAAA,EAAe,oBAAA,IAAwB;AAAA;AAChE,KACF;AAAA,EACF;AACF;;;AC1EO,IAAM,YAAA,GAAN,cAA2B,YAAA,CAAa;AAAA,EACrC,MAAA;AAAA,EAER,YAAY,MAAA,EAA8D;AACxE,IAAA,KAAA,CAAM,MAAM,CAAA;AACZ,IAAA,IAAA,CAAK,UAAA,EAAW;AAAA,EAClB;AAAA,EAEA,MAAc,UAAA,GAA4B;AACxC,IAAA,IAAI;AAEF,MAAA,MAAM,EAAE,OAAA,EAAS,IAAA,EAAK,GAAI,MAAM,OAAO,UAAU,CAAA;AACjD,MAAA,IAAA,CAAK,MAAA,GAAS,IAAI,IAAA,CAAK;AAAA,QACrB,QAAQ,IAAA,CAAK,MAAA;AAAA,QACb,GAAI,IAAA,CAAK,OAAA,IAAW,EAAE,OAAA,EAAS,KAAK,OAAA;AAAQ,OAC7C,CAAA;AAAA,IACH,CAAA,CAAA,MAAQ;AACN,MAAA,MAAM,IAAI,MAAM,mDAAmD,CAAA;AAAA,IACrE;AAAA,EACF;AAAA,EAEA,eAAA,GAA0B;AACxB,IAAA,OAAO,yBAAA;AAAA,EACT;AAAA,EAEA,OAAA,GAAkB;AAChB,IAAA,OAAO,MAAA;AAAA,EACT;AAAA,EAEA,OAAO,WAAA,GAAuB;AAC5B,IAAA,IAAI;AACF,MAAA,SAAA,CAAQ,QAAQ,UAAU,CAAA;AAC1B,MAAA,OAAO,IAAA;AAAA,IACT,CAAA,CAAA,MAAQ;AACN,MAAA,OAAO,KAAA;AAAA,IACT;AAAA,EACF;AAAA,EAEA,MAAM,SAAS,OAAA,EAAuD;AACpE,IAAA,MAAM;AAAA,MACJ,YAAA;AAAA,MACA,UAAA;AAAA,MACA,SAAA,GAAY,GAAA;AAAA,MACZ,WAAA,GAAc;AAAA,KAChB,GAAI,OAAA;AAEJ,IAAA,IAAI,CAAC,KAAK,MAAA,EAAQ;AAChB,MAAA,MAAM,KAAK,UAAA,EAAW;AAAA,IACxB;AAGA,IAAA,MAAM,SAAS,IAAA,CAAK,MAAA;AACpB,IAAA,MAAM,UAAA,GAAa,MAAM,MAAA,CAAO,IAAA,CAAK,YAAY,MAAA,CAAO;AAAA,MACtD,OAAO,IAAA,CAAK,KAAA;AAAA,MACZ,QAAA,EAAU;AAAA,QACR,EAAE,IAAA,EAAM,QAAA,EAAU,OAAA,EAAS,YAAA,EAAa;AAAA,QACxC,EAAE,IAAA,EAAM,MAAA,EAAQ,OAAA,EAAS,UAAA;AAAW,OACtC;AAAA,MACA,UAAA,EAAY,SAAA;AAAA,MACZ;AAAA,KACD,CAAA;AAED,IAAA,OAAO;AAAA,MACL,SAAS,UAAA,CAAW,OAAA,CAAQ,CAAC,CAAA,EAAG,SAAS,OAAA,IAAW,EAAA;AAAA,MACpD,KAAA,EAAO;AAAA,QACL,WAAA,EAAa,UAAA,CAAW,KAAA,EAAO,aAAA,IAAiB,CAAA;AAAA,QAChD,YAAA,EAAc,UAAA,CAAW,KAAA,EAAO,iBAAA,IAAqB;AAAA;AACvD,KACF;AAAA,EACF;AACF;;;ACtEO,IAAM,gBAAA,GAAN,cAA+B,YAAA,CAAa;AAAA,EACzC,MAAA;AAAA,EAER,YAAY,MAAA,EAA8D;AACxE,IAAA,KAAA,CAAM,MAAM,CAAA;AACZ,IAAA,IAAA,CAAK,UAAA,EAAW;AAAA,EAClB;AAAA,EAEA,MAAc,UAAA,GAA4B;AACxC,IAAA,IAAI;AAEF,MAAA,MAAM,EAAE,OAAA,EAAS,QAAA,EAAS,GAAI,MAAM,OAClC,8BACF,CAAA;AACA,MAAA,IAAA,CAAK,MAAA,GAAS,IAAI,QAAA,CAAS;AAAA,QACzB,QAAQ,IAAA,CAAK,MAAA;AAAA,QACb,GAAI,IAAA,CAAK,OAAA,IAAW,EAAE,OAAA,EAAS,KAAK,OAAA;AAAQ,OAC7C,CAAA;AAAA,IACH,CAAA,CAAA,MAAQ;AACN,MAAA,MAAM,IAAI,KAAA;AAAA,QACR;AAAA,OACF;AAAA,IACF;AAAA,EACF;AAAA,EAEA,eAAA,GAA0B;AACxB,IAAA,OAAO,eAAA;AAAA,EACT;AAAA,EAEA,OAAA,GAAkB;AAChB,IAAA,OAAO,UAAA;AAAA,EACT;AAAA,EAEA,OAAO,WAAA,GAAuB;AAC5B,IAAA,IAAI;AACF,MAAA,SAAA,CAAQ,QAAQ,8BAA8B,CAAA;AAC9C,MAAA,OAAO,IAAA;AAAA,IACT,CAAA,CAAA,MAAQ;AACN,MAAA,OAAO,KAAA;AAAA,IACT;AAAA,EACF;AAAA,EAEA,MAAM,SAAS,OAAA,EAAuD;AACpE,IAAA,MAAM;AAAA,MACJ,YAAA;AAAA,MACA,UAAA;AAAA,MACA,SAAA,GAAY,GAAA;AAAA,MACZ,WAAA,GAAc;AAAA,KAChB,GAAI,OAAA;AAEJ,IAAA,IAAI,CAAC,KAAK,MAAA,EAAQ;AAChB,MAAA,MAAM,KAAK,UAAA,EAAW;AAAA,IACxB;AAGA,IAAA,MAAM,SAAS,IAAA,CAAK,MAAA;AACpB,IAAA,MAAM,UAAA,GAAa,MAAM,MAAA,CAAO,IAAA,CAAK,YAAY,MAAA,CAAO;AAAA,MACtD,OAAO,IAAA,CAAK,KAAA;AAAA,MACZ,QAAA,EAAU;AAAA,QACR,EAAE,IAAA,EAAM,QAAA,EAAU,OAAA,EAAS,YAAA,EAAa;AAAA,QACxC,EAAE,IAAA,EAAM,MAAA,EAAQ,OAAA,EAAS,UAAA;AAAW,OACtC;AAAA,MACA,qBAAA,EAAuB,SAAA;AAAA,MACvB;AAAA,KACD,CAAA;AAED,IAAA,OAAO;AAAA,MACL,SAAS,UAAA,CAAW,OAAA,CAAQ,CAAC,CAAA,EAAG,SAAS,OAAA,IAAW,EAAA;AAAA,MACpD,KAAA,EAAO;AAAA,QACL,WAAA,EAAa,UAAA,CAAW,KAAA,EAAO,aAAA,IAAiB,CAAA;AAAA,QAChD,YAAA,EAAc,UAAA,CAAW,KAAA,EAAO,iBAAA,IAAqB;AAAA;AACvD,KACF;AAAA,EACF;AACF;;;AC5DA,IAAM,gBAAA,GAOF;AAAA,EACF,MAAA,EAAQ,cAAA;AAAA,EACR,SAAA,EAAW,iBAAA;AAAA,EACX,MAAA,EAAQ,cAAA;AAAA,EACR,IAAA,EAAM,YAAA;AAAA,EACN,QAAA,EAAU;AACZ,CAAA;AAKO,SAAS,eAAe,MAAA,EAAsC;AACnE,EAAA,MAAM,aAAA,GAAgB,gBAAA,CAAiB,MAAA,CAAO,QAAQ,CAAA;AACtD,EAAA,IAAI,CAAC,aAAA,EAAe;AAClB,IAAA,MAAM,IAAI,KAAA;AAAA,MACR,CAAA,kBAAA,EAAqB,MAAA,CAAO,QAAQ,CAAA,aAAA,EAAgB,MAAA,CAAO,IAAA;AAAA,QACzD;AAAA,OACF,CAAE,IAAA,CAAK,IAAI,CAAC,CAAA;AAAA,KACd;AAAA,EACF;AAEA,EAAA,OAAO,IAAI,aAAA,CAAc;AAAA,IACvB,QAAQ,MAAA,CAAO,MAAA;AAAA,IACf,OAAO,MAAA,CAAO,KAAA;AAAA,IACd,SAAS,MAAA,CAAO;AAAA,GACjB,CAAA;AACH;AAKO,SAAS,qBAAA,GAAwC;AACtD,EAAA,MAAM,SAAA,GAA4B,CAAC,QAAQ,CAAA;AAE3C,EAAA,IAAI,iBAAA,CAAkB,WAAA,EAAY,EAAG,SAAA,CAAU,KAAK,WAAW,CAAA;AAC/D,EAAA,IAAI,cAAA,CAAe,WAAA,EAAY,EAAG,SAAA,CAAU,KAAK,QAAQ,CAAA;AACzD,EAAA,IAAI,YAAA,CAAa,WAAA,EAAY,EAAG,SAAA,CAAU,KAAK,MAAM,CAAA;AACrD,EAAA,IAAI,gBAAA,CAAiB,WAAA,EAAY,EAAG,SAAA,CAAU,KAAK,UAAU,CAAA;AAE7D,EAAA,OAAO,SAAA;AACT","file":"index.mjs","sourcesContent":["import type { CompletionOptions, CompletionResult } from \"../types\";\r\n\r\n/**\r\n * Abstract base class for LLM providers.\r\n * All provider implementations must extend this class.\r\n */\r\nexport abstract class BaseProvider {\r\n protected apiKey: string;\r\n protected model: string;\r\n protected baseUrl?: string;\r\n\r\n constructor(config: { apiKey: string; model?: string; baseUrl?: string }) {\r\n if (!config.apiKey) {\r\n throw new Error(\"API key is required\");\r\n }\r\n this.apiKey = config.apiKey;\r\n this.model = config.model || this.getDefaultModel();\r\n this.baseUrl = config.baseUrl;\r\n }\r\n\r\n /**\r\n * Get the default model for this provider\r\n */\r\n abstract getDefaultModel(): string;\r\n\r\n /**\r\n * Get the provider name\r\n */\r\n abstract getName(): string;\r\n\r\n /**\r\n * Generate a completion from the LLM\r\n */\r\n abstract complete(options: CompletionOptions): Promise<CompletionResult>;\r\n\r\n /**\r\n * Check if the provider SDK is available\r\n */\r\n static isAvailable(): boolean {\r\n return true;\r\n }\r\n}\r\n","import { BaseProvider } from \"./BaseProvider\";\r\nimport type { CompletionOptions, CompletionResult } from \"../types\";\r\n\r\n/**\r\n * OpenAI provider using native fetch (no SDK required)\r\n */\r\nexport class OpenAIProvider extends BaseProvider {\r\n private endpoint: string;\r\n\r\n constructor(config: { apiKey: string; model?: string; baseUrl?: string }) {\r\n super(config);\r\n this.endpoint = this.baseUrl || \"https://api.openai.com/v1\";\r\n }\r\n\r\n getDefaultModel(): string {\r\n return \"gpt-4o-mini\";\r\n }\r\n\r\n getName(): string {\r\n return \"openai\";\r\n }\r\n\r\n async complete(options: CompletionOptions): Promise<CompletionResult> {\r\n const {\r\n systemPrompt,\r\n userPrompt,\r\n maxTokens = 1000,\r\n temperature = 0.3,\r\n jsonMode = true,\r\n } = options;\r\n\r\n const response = await fetch(`${this.endpoint}/chat/completions`, {\r\n method: \"POST\",\r\n headers: {\r\n \"Content-Type\": \"application/json\",\r\n Authorization: `Bearer ${this.apiKey}`,\r\n },\r\n body: JSON.stringify({\r\n model: this.model,\r\n messages: [\r\n { role: \"system\", content: systemPrompt },\r\n { role: \"user\", content: userPrompt },\r\n ],\r\n max_tokens: maxTokens,\r\n temperature,\r\n ...(jsonMode && { response_format: { type: \"json_object\" } }),\r\n }),\r\n });\r\n\r\n if (!response.ok) {\r\n const errorData = (await response\r\n .json()\r\n .catch(() => ({ error: { message: response.statusText } }))) as {\r\n error?: { message?: string };\r\n };\r\n throw new Error(\r\n `OpenAI API error: ${errorData.error?.message || response.statusText}`\r\n );\r\n }\r\n\r\n interface OpenAIResponse {\r\n choices: Array<{ message?: { content?: string } }>;\r\n usage?: { prompt_tokens?: number; completion_tokens?: number };\r\n }\r\n\r\n const data = (await response.json()) as OpenAIResponse;\r\n\r\n return {\r\n content: data.choices[0]?.message?.content || \"\",\r\n usage: {\r\n inputTokens: data.usage?.prompt_tokens || 0,\r\n outputTokens: data.usage?.completion_tokens || 0,\r\n },\r\n };\r\n }\r\n}\r\n","import { BaseProvider } from \"./BaseProvider\";\r\nimport type { CompletionOptions, CompletionResult } from \"../types\";\r\n\r\n/**\r\n * Anthropic provider using the official @anthropic-ai/sdk package\r\n */\r\nexport class AnthropicProvider extends BaseProvider {\r\n private client: unknown;\r\n\r\n constructor(config: { apiKey: string; model?: string; baseUrl?: string }) {\r\n super(config);\r\n this.initClient();\r\n }\r\n\r\n private async initClient(): Promise<void> {\r\n try {\r\n // Dynamic import to make the SDK optional\r\n const { default: Anthropic } = await import(\"@anthropic-ai/sdk\");\r\n this.client = new Anthropic({\r\n apiKey: this.apiKey,\r\n ...(this.baseUrl && { baseURL: this.baseUrl }),\r\n });\r\n } catch {\r\n throw new Error(\r\n \"Anthropic SDK not installed. Run: npm install @anthropic-ai/sdk\"\r\n );\r\n }\r\n }\r\n\r\n getDefaultModel(): string {\r\n return \"claude-3-haiku-20240307\";\r\n }\r\n\r\n getName(): string {\r\n return \"anthropic\";\r\n }\r\n\r\n static isAvailable(): boolean {\r\n try {\r\n require.resolve(\"@anthropic-ai/sdk\");\r\n return true;\r\n } catch {\r\n return false;\r\n }\r\n }\r\n\r\n async complete(options: CompletionOptions): Promise<CompletionResult> {\r\n const {\r\n systemPrompt,\r\n userPrompt,\r\n maxTokens = 1000,\r\n temperature = 0.3,\r\n } = options;\r\n\r\n if (!this.client) {\r\n await this.initClient();\r\n }\r\n\r\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\r\n const client = this.client as any;\r\n const message = await client.messages.create({\r\n model: this.model,\r\n max_tokens: maxTokens,\r\n temperature,\r\n system: systemPrompt,\r\n messages: [{ role: \"user\", content: userPrompt }],\r\n });\r\n\r\n return {\r\n content: message.content[0]?.text || \"\",\r\n usage: {\r\n inputTokens: message.usage?.input_tokens || 0,\r\n outputTokens: message.usage?.output_tokens || 0,\r\n },\r\n };\r\n }\r\n}\r\n","import { BaseProvider } from \"./BaseProvider\";\r\nimport type { CompletionOptions, CompletionResult } from \"../types\";\r\n\r\n/**\r\n * Google Gemini provider using the official @google/generative-ai package\r\n */\r\nexport class GeminiProvider extends BaseProvider {\r\n private genAI: unknown;\r\n\r\n constructor(config: { apiKey: string; model?: string; baseUrl?: string }) {\r\n super(config);\r\n this.initClient();\r\n }\r\n\r\n private async initClient(): Promise<void> {\r\n try {\r\n // Dynamic import to make the SDK optional\r\n const { GoogleGenerativeAI } = await import(\"@google/generative-ai\");\r\n this.genAI = new GoogleGenerativeAI(this.apiKey);\r\n } catch {\r\n throw new Error(\r\n \"Google Generative AI SDK not installed. Run: npm install @google/generative-ai\"\r\n );\r\n }\r\n }\r\n\r\n getDefaultModel(): string {\r\n return \"gemini-2.0-flash\";\r\n }\r\n\r\n getName(): string {\r\n return \"gemini\";\r\n }\r\n\r\n static isAvailable(): boolean {\r\n try {\r\n require.resolve(\"@google/generative-ai\");\r\n return true;\r\n } catch {\r\n return false;\r\n }\r\n }\r\n\r\n async complete(options: CompletionOptions): Promise<CompletionResult> {\r\n const {\r\n systemPrompt,\r\n userPrompt,\r\n maxTokens = 1000,\r\n temperature = 0.3,\r\n } = options;\r\n\r\n if (!this.genAI) {\r\n await this.initClient();\r\n }\r\n\r\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\r\n const genAI = this.genAI as any;\r\n const model = genAI.getGenerativeModel({\r\n model: this.model,\r\n systemInstruction: systemPrompt,\r\n });\r\n\r\n const result = await model.generateContent({\r\n contents: [{ role: \"user\", parts: [{ text: userPrompt }] }],\r\n generationConfig: {\r\n maxOutputTokens: maxTokens,\r\n temperature,\r\n },\r\n });\r\n\r\n const response = result.response;\r\n\r\n return {\r\n content: response.text() || \"\",\r\n usage: {\r\n inputTokens: response.usageMetadata?.promptTokenCount || 0,\r\n outputTokens: response.usageMetadata?.candidatesTokenCount || 0,\r\n },\r\n };\r\n }\r\n}\r\n","import { BaseProvider } from \"./BaseProvider\";\r\nimport type { CompletionOptions, CompletionResult } from \"../types\";\r\n\r\n/**\r\n * Groq provider using the official groq-sdk package\r\n */\r\nexport class GroqProvider extends BaseProvider {\r\n private client: unknown;\r\n\r\n constructor(config: { apiKey: string; model?: string; baseUrl?: string }) {\r\n super(config);\r\n this.initClient();\r\n }\r\n\r\n private async initClient(): Promise<void> {\r\n try {\r\n // Dynamic import to make the SDK optional\r\n const { default: Groq } = await import(\"groq-sdk\");\r\n this.client = new Groq({\r\n apiKey: this.apiKey,\r\n ...(this.baseUrl && { baseURL: this.baseUrl }),\r\n });\r\n } catch {\r\n throw new Error(\"Groq SDK not installed. Run: npm install groq-sdk\");\r\n }\r\n }\r\n\r\n getDefaultModel(): string {\r\n return \"llama-3.3-70b-versatile\";\r\n }\r\n\r\n getName(): string {\r\n return \"groq\";\r\n }\r\n\r\n static isAvailable(): boolean {\r\n try {\r\n require.resolve(\"groq-sdk\");\r\n return true;\r\n } catch {\r\n return false;\r\n }\r\n }\r\n\r\n async complete(options: CompletionOptions): Promise<CompletionResult> {\r\n const {\r\n systemPrompt,\r\n userPrompt,\r\n maxTokens = 1000,\r\n temperature = 0.3,\r\n } = options;\r\n\r\n if (!this.client) {\r\n await this.initClient();\r\n }\r\n\r\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\r\n const client = this.client as any;\r\n const completion = await client.chat.completions.create({\r\n model: this.model,\r\n messages: [\r\n { role: \"system\", content: systemPrompt },\r\n { role: \"user\", content: userPrompt },\r\n ],\r\n max_tokens: maxTokens,\r\n temperature,\r\n });\r\n\r\n return {\r\n content: completion.choices[0]?.message?.content || \"\",\r\n usage: {\r\n inputTokens: completion.usage?.prompt_tokens || 0,\r\n outputTokens: completion.usage?.completion_tokens || 0,\r\n },\r\n };\r\n }\r\n}\r\n","import { BaseProvider } from \"./BaseProvider\";\r\nimport type { CompletionOptions, CompletionResult } from \"../types\";\r\n\r\n/**\r\n * Cerebras provider using the official @cerebras/cerebras_cloud_sdk package\r\n */\r\nexport class CerebrasProvider extends BaseProvider {\r\n private client: unknown;\r\n\r\n constructor(config: { apiKey: string; model?: string; baseUrl?: string }) {\r\n super(config);\r\n this.initClient();\r\n }\r\n\r\n private async initClient(): Promise<void> {\r\n try {\r\n // Dynamic import to make the SDK optional\r\n const { default: Cerebras } = await import(\r\n \"@cerebras/cerebras_cloud_sdk\"\r\n );\r\n this.client = new Cerebras({\r\n apiKey: this.apiKey,\r\n ...(this.baseUrl && { baseURL: this.baseUrl }),\r\n });\r\n } catch {\r\n throw new Error(\r\n \"Cerebras SDK not installed. Run: npm install @cerebras/cerebras_cloud_sdk\"\r\n );\r\n }\r\n }\r\n\r\n getDefaultModel(): string {\r\n return \"llama-3.3-70b\";\r\n }\r\n\r\n getName(): string {\r\n return \"cerebras\";\r\n }\r\n\r\n static isAvailable(): boolean {\r\n try {\r\n require.resolve(\"@cerebras/cerebras_cloud_sdk\");\r\n return true;\r\n } catch {\r\n return false;\r\n }\r\n }\r\n\r\n async complete(options: CompletionOptions): Promise<CompletionResult> {\r\n const {\r\n systemPrompt,\r\n userPrompt,\r\n maxTokens = 1000,\r\n temperature = 0.3,\r\n } = options;\r\n\r\n if (!this.client) {\r\n await this.initClient();\r\n }\r\n\r\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\r\n const client = this.client as any;\r\n const completion = await client.chat.completions.create({\r\n model: this.model,\r\n messages: [\r\n { role: \"system\", content: systemPrompt },\r\n { role: \"user\", content: userPrompt },\r\n ],\r\n max_completion_tokens: maxTokens,\r\n temperature,\r\n });\r\n\r\n return {\r\n content: completion.choices[0]?.message?.content || \"\",\r\n usage: {\r\n inputTokens: completion.usage?.prompt_tokens || 0,\r\n outputTokens: completion.usage?.completion_tokens || 0,\r\n },\r\n };\r\n }\r\n}\r\n","import { BaseProvider } from \"./BaseProvider\";\r\nimport { OpenAIProvider } from \"./OpenAIProvider\";\r\nimport { AnthropicProvider } from \"./AnthropicProvider\";\r\nimport { GeminiProvider } from \"./GeminiProvider\";\r\nimport { GroqProvider } from \"./GroqProvider\";\r\nimport { CerebrasProvider } from \"./CerebrasProvider\";\r\nimport type { ProviderConfig, ProviderName } from \"../types\";\r\n\r\nexport {\r\n BaseProvider,\r\n OpenAIProvider,\r\n AnthropicProvider,\r\n GeminiProvider,\r\n GroqProvider,\r\n CerebrasProvider,\r\n};\r\n\r\n/**\r\n * Provider registry for creating providers by name\r\n */\r\nconst providerRegistry: Record<\r\n ProviderName,\r\n new (config: {\r\n apiKey: string;\r\n model?: string;\r\n baseUrl?: string;\r\n }) => BaseProvider\r\n> = {\r\n openai: OpenAIProvider,\r\n anthropic: AnthropicProvider,\r\n gemini: GeminiProvider,\r\n groq: GroqProvider,\r\n cerebras: CerebrasProvider,\r\n};\r\n\r\n/**\r\n * Create a provider instance from configuration\r\n */\r\nexport function createProvider(config: ProviderConfig): BaseProvider {\r\n const ProviderClass = providerRegistry[config.provider];\r\n if (!ProviderClass) {\r\n throw new Error(\r\n `Unknown provider: ${config.provider}. Available: ${Object.keys(\r\n providerRegistry\r\n ).join(\", \")}`\r\n );\r\n }\r\n\r\n return new ProviderClass({\r\n apiKey: config.apiKey,\r\n model: config.model,\r\n baseUrl: config.baseUrl,\r\n });\r\n}\r\n\r\n/**\r\n * Check which providers are available (have their SDKs installed)\r\n */\r\nexport function getAvailableProviders(): ProviderName[] {\r\n const available: ProviderName[] = [\"openai\"]; // OpenAI uses fetch, always available\r\n\r\n if (AnthropicProvider.isAvailable()) available.push(\"anthropic\");\r\n if (GeminiProvider.isAvailable()) available.push(\"gemini\");\r\n if (GroqProvider.isAvailable()) available.push(\"groq\");\r\n if (CerebrasProvider.isAvailable()) available.push(\"cerebras\");\r\n\r\n return available;\r\n}\r\n"]}
|
|
@@ -0,0 +1,260 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Core type definitions for mem-ts Memory OS
|
|
3
|
+
*/
|
|
4
|
+
/**
|
|
5
|
+
* A single memory fact representing a relationship between entities
|
|
6
|
+
*/
|
|
7
|
+
interface MemoryFact {
|
|
8
|
+
/** Unique identifier (UUID) */
|
|
9
|
+
id: string;
|
|
10
|
+
/** The entity (e.g., "User", "Project:WebApp") */
|
|
11
|
+
subject: string;
|
|
12
|
+
/** The relationship (e.g., "HAS_ALLERGY", "PREFERS", "WORKS_AT") */
|
|
13
|
+
predicate: string;
|
|
14
|
+
/** The value (e.g., "Peanuts", "Dark Mode", "Acme Corp") */
|
|
15
|
+
object: string;
|
|
16
|
+
/** Confidence score 0-1 */
|
|
17
|
+
confidence: number;
|
|
18
|
+
/**
|
|
19
|
+
* Importance score 1-10 (Amygdala pattern)
|
|
20
|
+
* 1-3: Trivia (preferences, minor details)
|
|
21
|
+
* 4-6: Standard (work, location, relationships)
|
|
22
|
+
* 7-8: Important (strong preferences, constraints)
|
|
23
|
+
* 9-10: Critical (allergies, safety, medical, boundaries)
|
|
24
|
+
*/
|
|
25
|
+
importance: number;
|
|
26
|
+
/** Conversation ID that created/updated this fact */
|
|
27
|
+
source: string;
|
|
28
|
+
/** Specific conversation exchange ID for episodic linking */
|
|
29
|
+
sourceConversationId?: string;
|
|
30
|
+
/** When this fact was first created */
|
|
31
|
+
createdAt: Date;
|
|
32
|
+
/** When this fact was last updated */
|
|
33
|
+
updatedAt: Date;
|
|
34
|
+
/** When this fact was superseded (null if still valid) */
|
|
35
|
+
invalidatedAt: Date | null;
|
|
36
|
+
/** Number of times this fact has been accessed (Hebbian learning) */
|
|
37
|
+
accessCount?: number;
|
|
38
|
+
/** Last time this fact was accessed */
|
|
39
|
+
lastAccessedAt?: Date;
|
|
40
|
+
/**
|
|
41
|
+
* Emotional context when fact was learned (Emotional Coloring)
|
|
42
|
+
* Tracks whether the fact was learned in a positive or negative context
|
|
43
|
+
*/
|
|
44
|
+
sentiment?: "positive" | "negative" | "neutral";
|
|
45
|
+
/** Description of the emotional context */
|
|
46
|
+
emotionalContext?: string;
|
|
47
|
+
/**
|
|
48
|
+
* Memory consolidation stage (Memory Consolidation Levels)
|
|
49
|
+
* - short-term: Just learned, may not persist
|
|
50
|
+
* - working: Being actively used, intermediate storage
|
|
51
|
+
* - long-term: Consolidated through reinforcement
|
|
52
|
+
*/
|
|
53
|
+
memoryStage?: "short-term" | "working" | "long-term";
|
|
54
|
+
/**
|
|
55
|
+
* IDs of related facts (Associative Linking / Knowledge Graph)
|
|
56
|
+
* Creates connections between facts for richer context
|
|
57
|
+
*/
|
|
58
|
+
relatedFactIds?: string[];
|
|
59
|
+
/**
|
|
60
|
+
* Vector embedding for semantic search (Attention Filtering)
|
|
61
|
+
* Stored as array of numbers for similarity matching
|
|
62
|
+
*/
|
|
63
|
+
embedding?: number[];
|
|
64
|
+
/**
|
|
65
|
+
* Memory level in the hierarchy pyramid:
|
|
66
|
+
* - raw_log: Raw conversation data (ephemeral, auto-deleted)
|
|
67
|
+
* - fact: Specific discrete facts (your normal facts)
|
|
68
|
+
* - pattern: Synthesized patterns/traits from multiple facts
|
|
69
|
+
* - core_belief: Unchangeable truths, always loaded (allergies, identity)
|
|
70
|
+
*/
|
|
71
|
+
memoryLevel?: "raw_log" | "fact" | "pattern" | "core_belief";
|
|
72
|
+
/**
|
|
73
|
+
* IDs of lower-level facts that this pattern/belief was derived from.
|
|
74
|
+
* Creates provenance chain for "why do I believe this?"
|
|
75
|
+
*/
|
|
76
|
+
childrenIds?: string[];
|
|
77
|
+
/** Additional metadata */
|
|
78
|
+
metadata?: Record<string, unknown>;
|
|
79
|
+
}
|
|
80
|
+
/**
|
|
81
|
+
* A memory operation for updating the fact graph
|
|
82
|
+
*/
|
|
83
|
+
interface MemoryOperation {
|
|
84
|
+
/** Operation type */
|
|
85
|
+
op: "INSERT" | "UPDATE" | "DELETE";
|
|
86
|
+
/** The entity */
|
|
87
|
+
subject: string;
|
|
88
|
+
/** The relationship */
|
|
89
|
+
predicate: string;
|
|
90
|
+
/** The value */
|
|
91
|
+
object: string;
|
|
92
|
+
/** Reason for this operation (especially for DELETEs) */
|
|
93
|
+
reason?: string;
|
|
94
|
+
/** Confidence score 0-1 */
|
|
95
|
+
confidence?: number;
|
|
96
|
+
/** Importance score 1-10 (for safety-critical facts) */
|
|
97
|
+
importance?: number;
|
|
98
|
+
/** Sentiment context when fact was learned */
|
|
99
|
+
sentiment?: "positive" | "negative" | "neutral";
|
|
100
|
+
}
|
|
101
|
+
/**
|
|
102
|
+
* Result from the fact extraction LLM
|
|
103
|
+
*/
|
|
104
|
+
interface ExtractionResult {
|
|
105
|
+
/** List of operations to apply */
|
|
106
|
+
operations: MemoryOperation[];
|
|
107
|
+
/** Reasoning for the extractions */
|
|
108
|
+
reasoning?: string;
|
|
109
|
+
}
|
|
110
|
+
/**
|
|
111
|
+
* A single message in a conversation
|
|
112
|
+
*/
|
|
113
|
+
interface Message {
|
|
114
|
+
role: "user" | "assistant" | "system";
|
|
115
|
+
content: string;
|
|
116
|
+
timestamp: Date;
|
|
117
|
+
}
|
|
118
|
+
/**
|
|
119
|
+
* A conversation exchange (user message + assistant response)
|
|
120
|
+
*/
|
|
121
|
+
interface ConversationExchange {
|
|
122
|
+
id: string;
|
|
123
|
+
userId: string;
|
|
124
|
+
sessionId: string;
|
|
125
|
+
userMessage: string;
|
|
126
|
+
assistantResponse: string;
|
|
127
|
+
timestamp: Date;
|
|
128
|
+
metadata?: Record<string, unknown>;
|
|
129
|
+
}
|
|
130
|
+
/**
|
|
131
|
+
* Session information
|
|
132
|
+
*/
|
|
133
|
+
interface Session {
|
|
134
|
+
id: string;
|
|
135
|
+
userId: string;
|
|
136
|
+
startedAt: Date;
|
|
137
|
+
endedAt: Date | null;
|
|
138
|
+
messageCount: number;
|
|
139
|
+
summary?: string;
|
|
140
|
+
}
|
|
141
|
+
/**
|
|
142
|
+
* Result from hydrating context before an LLM call
|
|
143
|
+
*/
|
|
144
|
+
interface HydratedContext {
|
|
145
|
+
/** Compiled prompt ready for injection into system message */
|
|
146
|
+
compiledPrompt: string;
|
|
147
|
+
/** Raw facts that were retrieved */
|
|
148
|
+
facts: MemoryFact[];
|
|
149
|
+
/** Recent conversation history */
|
|
150
|
+
recentHistory: ConversationExchange[];
|
|
151
|
+
/** Token estimate for the compiled context */
|
|
152
|
+
estimatedTokens: number;
|
|
153
|
+
/** Whether this was served from cache */
|
|
154
|
+
fromCache: boolean;
|
|
155
|
+
}
|
|
156
|
+
/**
|
|
157
|
+
* Options for hydration
|
|
158
|
+
*/
|
|
159
|
+
interface HydrateOptions {
|
|
160
|
+
/** Maximum number of facts to include */
|
|
161
|
+
maxFacts?: number;
|
|
162
|
+
/** Maximum number of recent messages to include */
|
|
163
|
+
maxHistory?: number;
|
|
164
|
+
/** Specific predicates to filter for */
|
|
165
|
+
predicates?: string[];
|
|
166
|
+
/** Whether to include invalidated facts */
|
|
167
|
+
includeInvalidated?: boolean;
|
|
168
|
+
}
|
|
169
|
+
/**
|
|
170
|
+
* Options for LLM completion
|
|
171
|
+
*/
|
|
172
|
+
interface CompletionOptions {
|
|
173
|
+
/** System prompt */
|
|
174
|
+
systemPrompt: string;
|
|
175
|
+
/** User prompt */
|
|
176
|
+
userPrompt: string;
|
|
177
|
+
/** Maximum tokens in response */
|
|
178
|
+
maxTokens?: number;
|
|
179
|
+
/** Temperature (0-1, lower = more deterministic) */
|
|
180
|
+
temperature?: number;
|
|
181
|
+
/** Force JSON output mode */
|
|
182
|
+
jsonMode?: boolean;
|
|
183
|
+
}
|
|
184
|
+
/**
|
|
185
|
+
* Result from LLM completion
|
|
186
|
+
*/
|
|
187
|
+
interface CompletionResult {
|
|
188
|
+
/** The generated content */
|
|
189
|
+
content: string;
|
|
190
|
+
/** Token usage */
|
|
191
|
+
usage: {
|
|
192
|
+
inputTokens: number;
|
|
193
|
+
outputTokens: number;
|
|
194
|
+
};
|
|
195
|
+
}
|
|
196
|
+
/**
|
|
197
|
+
* Supported LLM providers
|
|
198
|
+
*/
|
|
199
|
+
type ProviderName = "openai" | "anthropic" | "gemini" | "groq" | "cerebras";
|
|
200
|
+
/**
|
|
201
|
+
* LLM provider configuration
|
|
202
|
+
*/
|
|
203
|
+
interface ProviderConfig {
|
|
204
|
+
/** Provider name */
|
|
205
|
+
provider: ProviderName;
|
|
206
|
+
/** API key */
|
|
207
|
+
apiKey: string;
|
|
208
|
+
/** Model to use (each provider has its own default) */
|
|
209
|
+
model?: string;
|
|
210
|
+
/** Base URL override (for proxies or self-hosted) */
|
|
211
|
+
baseUrl?: string;
|
|
212
|
+
}
|
|
213
|
+
/**
|
|
214
|
+
* Filter options for querying facts
|
|
215
|
+
*/
|
|
216
|
+
interface FactFilter {
|
|
217
|
+
/** Filter by subject */
|
|
218
|
+
subject?: string;
|
|
219
|
+
/** Filter by predicate */
|
|
220
|
+
predicate?: string;
|
|
221
|
+
/** Filter by predicates (OR) */
|
|
222
|
+
predicates?: string[];
|
|
223
|
+
/** Only valid (non-invalidated) facts */
|
|
224
|
+
validOnly?: boolean;
|
|
225
|
+
/** Limit number of results */
|
|
226
|
+
limit?: number;
|
|
227
|
+
/** Order by field */
|
|
228
|
+
orderBy?: "createdAt" | "updatedAt" | "confidence";
|
|
229
|
+
/** Order direction */
|
|
230
|
+
orderDir?: "asc" | "desc";
|
|
231
|
+
}
|
|
232
|
+
/**
|
|
233
|
+
* Options for MemoryOS behavior
|
|
234
|
+
*/
|
|
235
|
+
interface MemoryOSOptions {
|
|
236
|
+
/** Auto-summarize conversations after this many messages */
|
|
237
|
+
autoSummarizeAfter?: number;
|
|
238
|
+
/** Conflict resolution strategy */
|
|
239
|
+
conflictStrategy?: "latest" | "merge" | "keep_both";
|
|
240
|
+
/** Enable semantic caching */
|
|
241
|
+
enableCache?: boolean;
|
|
242
|
+
/** Cache TTL in seconds */
|
|
243
|
+
cacheTtl?: number;
|
|
244
|
+
/** Debug mode */
|
|
245
|
+
debug?: boolean;
|
|
246
|
+
}
|
|
247
|
+
/**
|
|
248
|
+
* Events emitted by MemoryOS
|
|
249
|
+
*/
|
|
250
|
+
interface MemoryOSEvents {
|
|
251
|
+
"fact:created": (fact: MemoryFact) => void;
|
|
252
|
+
"fact:updated": (fact: MemoryFact, oldFact: MemoryFact) => void;
|
|
253
|
+
"fact:deleted": (fact: MemoryFact, reason: string) => void;
|
|
254
|
+
"session:start": (session: Session) => void;
|
|
255
|
+
"session:end": (session: Session) => void;
|
|
256
|
+
"extraction:complete": (result: ExtractionResult) => void;
|
|
257
|
+
error: (error: Error) => void;
|
|
258
|
+
}
|
|
259
|
+
|
|
260
|
+
export type { ConversationExchange as C, ExtractionResult as E, FactFilter as F, HydrateOptions as H, MemoryFact as M, ProviderConfig as P, Session as S, CompletionOptions as a, CompletionResult as b, MemoryOSOptions as c, HydratedContext as d, ProviderName as e, MemoryOperation as f, Message as g, MemoryOSEvents as h };
|