@mzhub/mem-ts 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +335 -0
- package/dist/BaseAdapter-BoRh1T7O.d.mts +75 -0
- package/dist/BaseAdapter-CQVX-gcA.d.ts +75 -0
- package/dist/BaseProvider-CEoiLGj5.d.ts +34 -0
- package/dist/BaseProvider-edMh_R9t.d.mts +34 -0
- package/dist/adapters/index.d.mts +259 -0
- package/dist/adapters/index.d.ts +259 -0
- package/dist/adapters/index.js +1570 -0
- package/dist/adapters/index.js.map +1 -0
- package/dist/adapters/index.mjs +1542 -0
- package/dist/adapters/index.mjs.map +1 -0
- package/dist/index-Ci5Q9G9H.d.mts +289 -0
- package/dist/index-Dl-Q2au9.d.ts +289 -0
- package/dist/index.d.mts +1206 -0
- package/dist/index.d.ts +1206 -0
- package/dist/index.js +5126 -0
- package/dist/index.js.map +1 -0
- package/dist/index.mjs +5058 -0
- package/dist/index.mjs.map +1 -0
- package/dist/middleware/index.d.mts +4 -0
- package/dist/middleware/index.d.ts +4 -0
- package/dist/middleware/index.js +63 -0
- package/dist/middleware/index.js.map +1 -0
- package/dist/middleware/index.mjs +59 -0
- package/dist/middleware/index.mjs.map +1 -0
- package/dist/providers/index.d.mts +96 -0
- package/dist/providers/index.d.ts +96 -0
- package/dist/providers/index.js +379 -0
- package/dist/providers/index.js.map +1 -0
- package/dist/providers/index.mjs +370 -0
- package/dist/providers/index.mjs.map +1 -0
- package/dist/types-G9qmfSeZ.d.mts +260 -0
- package/dist/types-G9qmfSeZ.d.ts +260 -0
- package/logo.png +0 -0
- package/package.json +114 -0
|
@@ -0,0 +1,379 @@
|
|
|
1
|
+
'use strict';
|
|
2
|
+
|
|
3
|
+
var __require = /* @__PURE__ */ ((x) => typeof require !== "undefined" ? require : typeof Proxy !== "undefined" ? new Proxy(x, {
|
|
4
|
+
get: (a, b) => (typeof require !== "undefined" ? require : a)[b]
|
|
5
|
+
}) : x)(function(x) {
|
|
6
|
+
if (typeof require !== "undefined") return require.apply(this, arguments);
|
|
7
|
+
throw Error('Dynamic require of "' + x + '" is not supported');
|
|
8
|
+
});
|
|
9
|
+
|
|
10
|
+
// src/providers/BaseProvider.ts
|
|
11
|
+
var BaseProvider = class {
|
|
12
|
+
apiKey;
|
|
13
|
+
model;
|
|
14
|
+
baseUrl;
|
|
15
|
+
constructor(config) {
|
|
16
|
+
if (!config.apiKey) {
|
|
17
|
+
throw new Error("API key is required");
|
|
18
|
+
}
|
|
19
|
+
this.apiKey = config.apiKey;
|
|
20
|
+
this.model = config.model || this.getDefaultModel();
|
|
21
|
+
this.baseUrl = config.baseUrl;
|
|
22
|
+
}
|
|
23
|
+
/**
|
|
24
|
+
* Check if the provider SDK is available
|
|
25
|
+
*/
|
|
26
|
+
static isAvailable() {
|
|
27
|
+
return true;
|
|
28
|
+
}
|
|
29
|
+
};
|
|
30
|
+
|
|
31
|
+
// src/providers/OpenAIProvider.ts
|
|
32
|
+
var OpenAIProvider = class extends BaseProvider {
|
|
33
|
+
endpoint;
|
|
34
|
+
constructor(config) {
|
|
35
|
+
super(config);
|
|
36
|
+
this.endpoint = this.baseUrl || "https://api.openai.com/v1";
|
|
37
|
+
}
|
|
38
|
+
getDefaultModel() {
|
|
39
|
+
return "gpt-4o-mini";
|
|
40
|
+
}
|
|
41
|
+
getName() {
|
|
42
|
+
return "openai";
|
|
43
|
+
}
|
|
44
|
+
async complete(options) {
|
|
45
|
+
const {
|
|
46
|
+
systemPrompt,
|
|
47
|
+
userPrompt,
|
|
48
|
+
maxTokens = 1e3,
|
|
49
|
+
temperature = 0.3,
|
|
50
|
+
jsonMode = true
|
|
51
|
+
} = options;
|
|
52
|
+
const response = await fetch(`${this.endpoint}/chat/completions`, {
|
|
53
|
+
method: "POST",
|
|
54
|
+
headers: {
|
|
55
|
+
"Content-Type": "application/json",
|
|
56
|
+
Authorization: `Bearer ${this.apiKey}`
|
|
57
|
+
},
|
|
58
|
+
body: JSON.stringify({
|
|
59
|
+
model: this.model,
|
|
60
|
+
messages: [
|
|
61
|
+
{ role: "system", content: systemPrompt },
|
|
62
|
+
{ role: "user", content: userPrompt }
|
|
63
|
+
],
|
|
64
|
+
max_tokens: maxTokens,
|
|
65
|
+
temperature,
|
|
66
|
+
...jsonMode && { response_format: { type: "json_object" } }
|
|
67
|
+
})
|
|
68
|
+
});
|
|
69
|
+
if (!response.ok) {
|
|
70
|
+
const errorData = await response.json().catch(() => ({ error: { message: response.statusText } }));
|
|
71
|
+
throw new Error(
|
|
72
|
+
`OpenAI API error: ${errorData.error?.message || response.statusText}`
|
|
73
|
+
);
|
|
74
|
+
}
|
|
75
|
+
const data = await response.json();
|
|
76
|
+
return {
|
|
77
|
+
content: data.choices[0]?.message?.content || "",
|
|
78
|
+
usage: {
|
|
79
|
+
inputTokens: data.usage?.prompt_tokens || 0,
|
|
80
|
+
outputTokens: data.usage?.completion_tokens || 0
|
|
81
|
+
}
|
|
82
|
+
};
|
|
83
|
+
}
|
|
84
|
+
};
|
|
85
|
+
|
|
86
|
+
// src/providers/AnthropicProvider.ts
|
|
87
|
+
var AnthropicProvider = class extends BaseProvider {
|
|
88
|
+
client;
|
|
89
|
+
constructor(config) {
|
|
90
|
+
super(config);
|
|
91
|
+
this.initClient();
|
|
92
|
+
}
|
|
93
|
+
async initClient() {
|
|
94
|
+
try {
|
|
95
|
+
const { default: Anthropic } = await import('@anthropic-ai/sdk');
|
|
96
|
+
this.client = new Anthropic({
|
|
97
|
+
apiKey: this.apiKey,
|
|
98
|
+
...this.baseUrl && { baseURL: this.baseUrl }
|
|
99
|
+
});
|
|
100
|
+
} catch {
|
|
101
|
+
throw new Error(
|
|
102
|
+
"Anthropic SDK not installed. Run: npm install @anthropic-ai/sdk"
|
|
103
|
+
);
|
|
104
|
+
}
|
|
105
|
+
}
|
|
106
|
+
getDefaultModel() {
|
|
107
|
+
return "claude-3-haiku-20240307";
|
|
108
|
+
}
|
|
109
|
+
getName() {
|
|
110
|
+
return "anthropic";
|
|
111
|
+
}
|
|
112
|
+
static isAvailable() {
|
|
113
|
+
try {
|
|
114
|
+
__require.resolve("@anthropic-ai/sdk");
|
|
115
|
+
return true;
|
|
116
|
+
} catch {
|
|
117
|
+
return false;
|
|
118
|
+
}
|
|
119
|
+
}
|
|
120
|
+
async complete(options) {
|
|
121
|
+
const {
|
|
122
|
+
systemPrompt,
|
|
123
|
+
userPrompt,
|
|
124
|
+
maxTokens = 1e3,
|
|
125
|
+
temperature = 0.3
|
|
126
|
+
} = options;
|
|
127
|
+
if (!this.client) {
|
|
128
|
+
await this.initClient();
|
|
129
|
+
}
|
|
130
|
+
const client = this.client;
|
|
131
|
+
const message = await client.messages.create({
|
|
132
|
+
model: this.model,
|
|
133
|
+
max_tokens: maxTokens,
|
|
134
|
+
temperature,
|
|
135
|
+
system: systemPrompt,
|
|
136
|
+
messages: [{ role: "user", content: userPrompt }]
|
|
137
|
+
});
|
|
138
|
+
return {
|
|
139
|
+
content: message.content[0]?.text || "",
|
|
140
|
+
usage: {
|
|
141
|
+
inputTokens: message.usage?.input_tokens || 0,
|
|
142
|
+
outputTokens: message.usage?.output_tokens || 0
|
|
143
|
+
}
|
|
144
|
+
};
|
|
145
|
+
}
|
|
146
|
+
};
|
|
147
|
+
|
|
148
|
+
// src/providers/GeminiProvider.ts
|
|
149
|
+
var GeminiProvider = class extends BaseProvider {
|
|
150
|
+
genAI;
|
|
151
|
+
constructor(config) {
|
|
152
|
+
super(config);
|
|
153
|
+
this.initClient();
|
|
154
|
+
}
|
|
155
|
+
async initClient() {
|
|
156
|
+
try {
|
|
157
|
+
const { GoogleGenerativeAI } = await import('@google/generative-ai');
|
|
158
|
+
this.genAI = new GoogleGenerativeAI(this.apiKey);
|
|
159
|
+
} catch {
|
|
160
|
+
throw new Error(
|
|
161
|
+
"Google Generative AI SDK not installed. Run: npm install @google/generative-ai"
|
|
162
|
+
);
|
|
163
|
+
}
|
|
164
|
+
}
|
|
165
|
+
getDefaultModel() {
|
|
166
|
+
return "gemini-2.0-flash";
|
|
167
|
+
}
|
|
168
|
+
getName() {
|
|
169
|
+
return "gemini";
|
|
170
|
+
}
|
|
171
|
+
static isAvailable() {
|
|
172
|
+
try {
|
|
173
|
+
__require.resolve("@google/generative-ai");
|
|
174
|
+
return true;
|
|
175
|
+
} catch {
|
|
176
|
+
return false;
|
|
177
|
+
}
|
|
178
|
+
}
|
|
179
|
+
async complete(options) {
|
|
180
|
+
const {
|
|
181
|
+
systemPrompt,
|
|
182
|
+
userPrompt,
|
|
183
|
+
maxTokens = 1e3,
|
|
184
|
+
temperature = 0.3
|
|
185
|
+
} = options;
|
|
186
|
+
if (!this.genAI) {
|
|
187
|
+
await this.initClient();
|
|
188
|
+
}
|
|
189
|
+
const genAI = this.genAI;
|
|
190
|
+
const model = genAI.getGenerativeModel({
|
|
191
|
+
model: this.model,
|
|
192
|
+
systemInstruction: systemPrompt
|
|
193
|
+
});
|
|
194
|
+
const result = await model.generateContent({
|
|
195
|
+
contents: [{ role: "user", parts: [{ text: userPrompt }] }],
|
|
196
|
+
generationConfig: {
|
|
197
|
+
maxOutputTokens: maxTokens,
|
|
198
|
+
temperature
|
|
199
|
+
}
|
|
200
|
+
});
|
|
201
|
+
const response = result.response;
|
|
202
|
+
return {
|
|
203
|
+
content: response.text() || "",
|
|
204
|
+
usage: {
|
|
205
|
+
inputTokens: response.usageMetadata?.promptTokenCount || 0,
|
|
206
|
+
outputTokens: response.usageMetadata?.candidatesTokenCount || 0
|
|
207
|
+
}
|
|
208
|
+
};
|
|
209
|
+
}
|
|
210
|
+
};
|
|
211
|
+
|
|
212
|
+
// src/providers/GroqProvider.ts
|
|
213
|
+
var GroqProvider = class extends BaseProvider {
|
|
214
|
+
client;
|
|
215
|
+
constructor(config) {
|
|
216
|
+
super(config);
|
|
217
|
+
this.initClient();
|
|
218
|
+
}
|
|
219
|
+
async initClient() {
|
|
220
|
+
try {
|
|
221
|
+
const { default: Groq } = await import('groq-sdk');
|
|
222
|
+
this.client = new Groq({
|
|
223
|
+
apiKey: this.apiKey,
|
|
224
|
+
...this.baseUrl && { baseURL: this.baseUrl }
|
|
225
|
+
});
|
|
226
|
+
} catch {
|
|
227
|
+
throw new Error("Groq SDK not installed. Run: npm install groq-sdk");
|
|
228
|
+
}
|
|
229
|
+
}
|
|
230
|
+
getDefaultModel() {
|
|
231
|
+
return "llama-3.3-70b-versatile";
|
|
232
|
+
}
|
|
233
|
+
getName() {
|
|
234
|
+
return "groq";
|
|
235
|
+
}
|
|
236
|
+
static isAvailable() {
|
|
237
|
+
try {
|
|
238
|
+
__require.resolve("groq-sdk");
|
|
239
|
+
return true;
|
|
240
|
+
} catch {
|
|
241
|
+
return false;
|
|
242
|
+
}
|
|
243
|
+
}
|
|
244
|
+
async complete(options) {
|
|
245
|
+
const {
|
|
246
|
+
systemPrompt,
|
|
247
|
+
userPrompt,
|
|
248
|
+
maxTokens = 1e3,
|
|
249
|
+
temperature = 0.3
|
|
250
|
+
} = options;
|
|
251
|
+
if (!this.client) {
|
|
252
|
+
await this.initClient();
|
|
253
|
+
}
|
|
254
|
+
const client = this.client;
|
|
255
|
+
const completion = await client.chat.completions.create({
|
|
256
|
+
model: this.model,
|
|
257
|
+
messages: [
|
|
258
|
+
{ role: "system", content: systemPrompt },
|
|
259
|
+
{ role: "user", content: userPrompt }
|
|
260
|
+
],
|
|
261
|
+
max_tokens: maxTokens,
|
|
262
|
+
temperature
|
|
263
|
+
});
|
|
264
|
+
return {
|
|
265
|
+
content: completion.choices[0]?.message?.content || "",
|
|
266
|
+
usage: {
|
|
267
|
+
inputTokens: completion.usage?.prompt_tokens || 0,
|
|
268
|
+
outputTokens: completion.usage?.completion_tokens || 0
|
|
269
|
+
}
|
|
270
|
+
};
|
|
271
|
+
}
|
|
272
|
+
};
|
|
273
|
+
|
|
274
|
+
// src/providers/CerebrasProvider.ts
|
|
275
|
+
var CerebrasProvider = class extends BaseProvider {
|
|
276
|
+
client;
|
|
277
|
+
constructor(config) {
|
|
278
|
+
super(config);
|
|
279
|
+
this.initClient();
|
|
280
|
+
}
|
|
281
|
+
async initClient() {
|
|
282
|
+
try {
|
|
283
|
+
const { default: Cerebras } = await import('@cerebras/cerebras_cloud_sdk');
|
|
284
|
+
this.client = new Cerebras({
|
|
285
|
+
apiKey: this.apiKey,
|
|
286
|
+
...this.baseUrl && { baseURL: this.baseUrl }
|
|
287
|
+
});
|
|
288
|
+
} catch {
|
|
289
|
+
throw new Error(
|
|
290
|
+
"Cerebras SDK not installed. Run: npm install @cerebras/cerebras_cloud_sdk"
|
|
291
|
+
);
|
|
292
|
+
}
|
|
293
|
+
}
|
|
294
|
+
getDefaultModel() {
|
|
295
|
+
return "llama-3.3-70b";
|
|
296
|
+
}
|
|
297
|
+
getName() {
|
|
298
|
+
return "cerebras";
|
|
299
|
+
}
|
|
300
|
+
static isAvailable() {
|
|
301
|
+
try {
|
|
302
|
+
__require.resolve("@cerebras/cerebras_cloud_sdk");
|
|
303
|
+
return true;
|
|
304
|
+
} catch {
|
|
305
|
+
return false;
|
|
306
|
+
}
|
|
307
|
+
}
|
|
308
|
+
async complete(options) {
|
|
309
|
+
const {
|
|
310
|
+
systemPrompt,
|
|
311
|
+
userPrompt,
|
|
312
|
+
maxTokens = 1e3,
|
|
313
|
+
temperature = 0.3
|
|
314
|
+
} = options;
|
|
315
|
+
if (!this.client) {
|
|
316
|
+
await this.initClient();
|
|
317
|
+
}
|
|
318
|
+
const client = this.client;
|
|
319
|
+
const completion = await client.chat.completions.create({
|
|
320
|
+
model: this.model,
|
|
321
|
+
messages: [
|
|
322
|
+
{ role: "system", content: systemPrompt },
|
|
323
|
+
{ role: "user", content: userPrompt }
|
|
324
|
+
],
|
|
325
|
+
max_completion_tokens: maxTokens,
|
|
326
|
+
temperature
|
|
327
|
+
});
|
|
328
|
+
return {
|
|
329
|
+
content: completion.choices[0]?.message?.content || "",
|
|
330
|
+
usage: {
|
|
331
|
+
inputTokens: completion.usage?.prompt_tokens || 0,
|
|
332
|
+
outputTokens: completion.usage?.completion_tokens || 0
|
|
333
|
+
}
|
|
334
|
+
};
|
|
335
|
+
}
|
|
336
|
+
};
|
|
337
|
+
|
|
338
|
+
// src/providers/index.ts
|
|
339
|
+
var providerRegistry = {
|
|
340
|
+
openai: OpenAIProvider,
|
|
341
|
+
anthropic: AnthropicProvider,
|
|
342
|
+
gemini: GeminiProvider,
|
|
343
|
+
groq: GroqProvider,
|
|
344
|
+
cerebras: CerebrasProvider
|
|
345
|
+
};
|
|
346
|
+
function createProvider(config) {
|
|
347
|
+
const ProviderClass = providerRegistry[config.provider];
|
|
348
|
+
if (!ProviderClass) {
|
|
349
|
+
throw new Error(
|
|
350
|
+
`Unknown provider: ${config.provider}. Available: ${Object.keys(
|
|
351
|
+
providerRegistry
|
|
352
|
+
).join(", ")}`
|
|
353
|
+
);
|
|
354
|
+
}
|
|
355
|
+
return new ProviderClass({
|
|
356
|
+
apiKey: config.apiKey,
|
|
357
|
+
model: config.model,
|
|
358
|
+
baseUrl: config.baseUrl
|
|
359
|
+
});
|
|
360
|
+
}
|
|
361
|
+
function getAvailableProviders() {
|
|
362
|
+
const available = ["openai"];
|
|
363
|
+
if (AnthropicProvider.isAvailable()) available.push("anthropic");
|
|
364
|
+
if (GeminiProvider.isAvailable()) available.push("gemini");
|
|
365
|
+
if (GroqProvider.isAvailable()) available.push("groq");
|
|
366
|
+
if (CerebrasProvider.isAvailable()) available.push("cerebras");
|
|
367
|
+
return available;
|
|
368
|
+
}
|
|
369
|
+
|
|
370
|
+
exports.AnthropicProvider = AnthropicProvider;
|
|
371
|
+
exports.BaseProvider = BaseProvider;
|
|
372
|
+
exports.CerebrasProvider = CerebrasProvider;
|
|
373
|
+
exports.GeminiProvider = GeminiProvider;
|
|
374
|
+
exports.GroqProvider = GroqProvider;
|
|
375
|
+
exports.OpenAIProvider = OpenAIProvider;
|
|
376
|
+
exports.createProvider = createProvider;
|
|
377
|
+
exports.getAvailableProviders = getAvailableProviders;
|
|
378
|
+
//# sourceMappingURL=index.js.map
|
|
379
|
+
//# sourceMappingURL=index.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../../src/providers/BaseProvider.ts","../../src/providers/OpenAIProvider.ts","../../src/providers/AnthropicProvider.ts","../../src/providers/GeminiProvider.ts","../../src/providers/GroqProvider.ts","../../src/providers/CerebrasProvider.ts","../../src/providers/index.ts"],"names":[],"mappings":";;;;;;;;;;AAMO,IAAe,eAAf,MAA4B;AAAA,EACvB,MAAA;AAAA,EACA,KAAA;AAAA,EACA,OAAA;AAAA,EAEV,YAAY,MAAA,EAA8D;AACxE,IAAA,IAAI,CAAC,OAAO,MAAA,EAAQ;AAClB,MAAA,MAAM,IAAI,MAAM,qBAAqB,CAAA;AAAA,IACvC;AACA,IAAA,IAAA,CAAK,SAAS,MAAA,CAAO,MAAA;AACrB,IAAA,IAAA,CAAK,KAAA,GAAQ,MAAA,CAAO,KAAA,IAAS,IAAA,CAAK,eAAA,EAAgB;AAClD,IAAA,IAAA,CAAK,UAAU,MAAA,CAAO,OAAA;AAAA,EACxB;AAAA;AAAA;AAAA;AAAA,EAoBA,OAAO,WAAA,GAAuB;AAC5B,IAAA,OAAO,IAAA;AAAA,EACT;AACF;;;ACnCO,IAAM,cAAA,GAAN,cAA6B,YAAA,CAAa;AAAA,EACvC,QAAA;AAAA,EAER,YAAY,MAAA,EAA8D;AACxE,IAAA,KAAA,CAAM,MAAM,CAAA;AACZ,IAAA,IAAA,CAAK,QAAA,GAAW,KAAK,OAAA,IAAW,2BAAA;AAAA,EAClC;AAAA,EAEA,eAAA,GAA0B;AACxB,IAAA,OAAO,aAAA;AAAA,EACT;AAAA,EAEA,OAAA,GAAkB;AAChB,IAAA,OAAO,QAAA;AAAA,EACT;AAAA,EAEA,MAAM,SAAS,OAAA,EAAuD;AACpE,IAAA,MAAM;AAAA,MACJ,YAAA;AAAA,MACA,UAAA;AAAA,MACA,SAAA,GAAY,GAAA;AAAA,MACZ,WAAA,GAAc,GAAA;AAAA,MACd,QAAA,GAAW;AAAA,KACb,GAAI,OAAA;AAEJ,IAAA,MAAM,WAAW,MAAM,KAAA,CAAM,CAAA,EAAG,IAAA,CAAK,QAAQ,CAAA,iBAAA,CAAA,EAAqB;AAAA,MAChE,MAAA,EAAQ,MAAA;AAAA,MACR,OAAA,EAAS;AAAA,QACP,cAAA,EAAgB,kBAAA;AAAA,QAChB,aAAA,EAAe,CAAA,OAAA,EAAU,IAAA,CAAK,MAAM,CAAA;AAAA,OACtC;AAAA,MACA,IAAA,EAAM,KAAK,SAAA,CAAU;AAAA,QACnB,OAAO,IAAA,CAAK,KAAA;AAAA,QACZ,QAAA,EAAU;AAAA,UACR,EAAE,IAAA,EAAM,QAAA,EAAU,OAAA,EAAS,YAAA,EAAa;AAAA,UACxC,EAAE,IAAA,EAAM,MAAA,EAAQ,OAAA,EAAS,UAAA;AAAW,SACtC;AAAA,QACA,UAAA,EAAY,SAAA;AAAA,QACZ,WAAA;AAAA,QACA,GAAI,QAAA,IAAY,EAAE,iBAAiB,EAAE,IAAA,EAAM,eAAc;AAAE,OAC5D;AAAA,KACF,CAAA;AAED,IAAA,IAAI,CAAC,SAAS,EAAA,EAAI;AAChB,MAAA,MAAM,SAAA,GAAa,MAAM,QAAA,CACtB,IAAA,GACA,KAAA,CAAM,OAAO,EAAE,KAAA,EAAO,EAAE,OAAA,EAAS,QAAA,CAAS,UAAA,IAAa,CAAE,CAAA;AAG5D,MAAA,MAAM,IAAI,KAAA;AAAA,QACR,CAAA,kBAAA,EAAqB,SAAA,CAAU,KAAA,EAAO,OAAA,IAAW,SAAS,UAAU,CAAA;AAAA,OACtE;AAAA,IACF;AAOA,IAAA,MAAM,IAAA,GAAQ,MAAM,QAAA,CAAS,IAAA,EAAK;AAElC,IAAA,OAAO;AAAA,MACL,SAAS,IAAA,CAAK,OAAA,CAAQ,CAAC,CAAA,EAAG,SAAS,OAAA,IAAW,EAAA;AAAA,MAC9C,KAAA,EAAO;AAAA,QACL,WAAA,EAAa,IAAA,CAAK,KAAA,EAAO,aAAA,IAAiB,CAAA;AAAA,QAC1C,YAAA,EAAc,IAAA,CAAK,KAAA,EAAO,iBAAA,IAAqB;AAAA;AACjD,KACF;AAAA,EACF;AACF;;;ACrEO,IAAM,iBAAA,GAAN,cAAgC,YAAA,CAAa;AAAA,EAC1C,MAAA;AAAA,EAER,YAAY,MAAA,EAA8D;AACxE,IAAA,KAAA,CAAM,MAAM,CAAA;AACZ,IAAA,IAAA,CAAK,UAAA,EAAW;AAAA,EAClB;AAAA,EAEA,MAAc,UAAA,GAA4B;AACxC,IAAA,IAAI;AAEF,MAAA,MAAM,EAAE,OAAA,EAAS,SAAA,EAAU,GAAI,MAAM,OAAO,mBAAmB,CAAA;AAC/D,MAAA,IAAA,CAAK,MAAA,GAAS,IAAI,SAAA,CAAU;AAAA,QAC1B,QAAQ,IAAA,CAAK,MAAA;AAAA,QACb,GAAI,IAAA,CAAK,OAAA,IAAW,EAAE,OAAA,EAAS,KAAK,OAAA;AAAQ,OAC7C,CAAA;AAAA,IACH,CAAA,CAAA,MAAQ;AACN,MAAA,MAAM,IAAI,KAAA;AAAA,QACR;AAAA,OACF;AAAA,IACF;AAAA,EACF;AAAA,EAEA,eAAA,GAA0B;AACxB,IAAA,OAAO,yBAAA;AAAA,EACT;AAAA,EAEA,OAAA,GAAkB;AAChB,IAAA,OAAO,WAAA;AAAA,EACT;AAAA,EAEA,OAAO,WAAA,GAAuB;AAC5B,IAAA,IAAI;AACF,MAAA,SAAA,CAAQ,QAAQ,mBAAmB,CAAA;AACnC,MAAA,OAAO,IAAA;AAAA,IACT,CAAA,CAAA,MAAQ;AACN,MAAA,OAAO,KAAA;AAAA,IACT;AAAA,EACF;AAAA,EAEA,MAAM,SAAS,OAAA,EAAuD;AACpE,IAAA,MAAM;AAAA,MACJ,YAAA;AAAA,MACA,UAAA;AAAA,MACA,SAAA,GAAY,GAAA;AAAA,MACZ,WAAA,GAAc;AAAA,KAChB,GAAI,OAAA;AAEJ,IAAA,IAAI,CAAC,KAAK,MAAA,EAAQ;AAChB,MAAA,MAAM,KAAK,UAAA,EAAW;AAAA,IACxB;AAGA,IAAA,MAAM,SAAS,IAAA,CAAK,MAAA;AACpB,IAAA,MAAM,OAAA,GAAU,MAAM,MAAA,CAAO,QAAA,CAAS,MAAA,CAAO;AAAA,MAC3C,OAAO,IAAA,CAAK,KAAA;AAAA,MACZ,UAAA,EAAY,SAAA;AAAA,MACZ,WAAA;AAAA,MACA,MAAA,EAAQ,YAAA;AAAA,MACR,UAAU,CAAC,EAAE,MAAM,MAAA,EAAQ,OAAA,EAAS,YAAY;AAAA,KACjD,CAAA;AAED,IAAA,OAAO;AAAA,MACL,OAAA,EAAS,OAAA,CAAQ,OAAA,CAAQ,CAAC,GAAG,IAAA,IAAQ,EAAA;AAAA,MACrC,KAAA,EAAO;AAAA,QACL,WAAA,EAAa,OAAA,CAAQ,KAAA,EAAO,YAAA,IAAgB,CAAA;AAAA,QAC5C,YAAA,EAAc,OAAA,CAAQ,KAAA,EAAO,aAAA,IAAiB;AAAA;AAChD,KACF;AAAA,EACF;AACF;;;ACtEO,IAAM,cAAA,GAAN,cAA6B,YAAA,CAAa;AAAA,EACvC,KAAA;AAAA,EAER,YAAY,MAAA,EAA8D;AACxE,IAAA,KAAA,CAAM,MAAM,CAAA;AACZ,IAAA,IAAA,CAAK,UAAA,EAAW;AAAA,EAClB;AAAA,EAEA,MAAc,UAAA,GAA4B;AACxC,IAAA,IAAI;AAEF,MAAA,MAAM,EAAE,kBAAA,EAAmB,GAAI,MAAM,OAAO,uBAAuB,CAAA;AACnE,MAAA,IAAA,CAAK,KAAA,GAAQ,IAAI,kBAAA,CAAmB,IAAA,CAAK,MAAM,CAAA;AAAA,IACjD,CAAA,CAAA,MAAQ;AACN,MAAA,MAAM,IAAI,KAAA;AAAA,QACR;AAAA,OACF;AAAA,IACF;AAAA,EACF;AAAA,EAEA,eAAA,GAA0B;AACxB,IAAA,OAAO,kBAAA;AAAA,EACT;AAAA,EAEA,OAAA,GAAkB;AAChB,IAAA,OAAO,QAAA;AAAA,EACT;AAAA,EAEA,OAAO,WAAA,GAAuB;AAC5B,IAAA,IAAI;AACF,MAAA,SAAA,CAAQ,QAAQ,uBAAuB,CAAA;AACvC,MAAA,OAAO,IAAA;AAAA,IACT,CAAA,CAAA,MAAQ;AACN,MAAA,OAAO,KAAA;AAAA,IACT;AAAA,EACF;AAAA,EAEA,MAAM,SAAS,OAAA,EAAuD;AACpE,IAAA,MAAM;AAAA,MACJ,YAAA;AAAA,MACA,UAAA;AAAA,MACA,SAAA,GAAY,GAAA;AAAA,MACZ,WAAA,GAAc;AAAA,KAChB,GAAI,OAAA;AAEJ,IAAA,IAAI,CAAC,KAAK,KAAA,EAAO;AACf,MAAA,MAAM,KAAK,UAAA,EAAW;AAAA,IACxB;AAGA,IAAA,MAAM,QAAQ,IAAA,CAAK,KAAA;AACnB,IAAA,MAAM,KAAA,GAAQ,MAAM,kBAAA,CAAmB;AAAA,MACrC,OAAO,IAAA,CAAK,KAAA;AAAA,MACZ,iBAAA,EAAmB;AAAA,KACpB,CAAA;AAED,IAAA,MAAM,MAAA,GAAS,MAAM,KAAA,CAAM,eAAA,CAAgB;AAAA,MACzC,QAAA,EAAU,CAAC,EAAE,IAAA,EAAM,MAAA,EAAQ,KAAA,EAAO,CAAC,EAAE,IAAA,EAAM,UAAA,EAAY,CAAA,EAAG,CAAA;AAAA,MAC1D,gBAAA,EAAkB;AAAA,QAChB,eAAA,EAAiB,SAAA;AAAA,QACjB;AAAA;AACF,KACD,CAAA;AAED,IAAA,MAAM,WAAW,MAAA,CAAO,QAAA;AAExB,IAAA,OAAO;AAAA,MACL,OAAA,EAAS,QAAA,CAAS,IAAA,EAAK,IAAK,EAAA;AAAA,MAC5B,KAAA,EAAO;AAAA,QACL,WAAA,EAAa,QAAA,CAAS,aAAA,EAAe,gBAAA,IAAoB,CAAA;AAAA,QACzD,YAAA,EAAc,QAAA,CAAS,aAAA,EAAe,oBAAA,IAAwB;AAAA;AAChE,KACF;AAAA,EACF;AACF;;;AC1EO,IAAM,YAAA,GAAN,cAA2B,YAAA,CAAa;AAAA,EACrC,MAAA;AAAA,EAER,YAAY,MAAA,EAA8D;AACxE,IAAA,KAAA,CAAM,MAAM,CAAA;AACZ,IAAA,IAAA,CAAK,UAAA,EAAW;AAAA,EAClB;AAAA,EAEA,MAAc,UAAA,GAA4B;AACxC,IAAA,IAAI;AAEF,MAAA,MAAM,EAAE,OAAA,EAAS,IAAA,EAAK,GAAI,MAAM,OAAO,UAAU,CAAA;AACjD,MAAA,IAAA,CAAK,MAAA,GAAS,IAAI,IAAA,CAAK;AAAA,QACrB,QAAQ,IAAA,CAAK,MAAA;AAAA,QACb,GAAI,IAAA,CAAK,OAAA,IAAW,EAAE,OAAA,EAAS,KAAK,OAAA;AAAQ,OAC7C,CAAA;AAAA,IACH,CAAA,CAAA,MAAQ;AACN,MAAA,MAAM,IAAI,MAAM,mDAAmD,CAAA;AAAA,IACrE;AAAA,EACF;AAAA,EAEA,eAAA,GAA0B;AACxB,IAAA,OAAO,yBAAA;AAAA,EACT;AAAA,EAEA,OAAA,GAAkB;AAChB,IAAA,OAAO,MAAA;AAAA,EACT;AAAA,EAEA,OAAO,WAAA,GAAuB;AAC5B,IAAA,IAAI;AACF,MAAA,SAAA,CAAQ,QAAQ,UAAU,CAAA;AAC1B,MAAA,OAAO,IAAA;AAAA,IACT,CAAA,CAAA,MAAQ;AACN,MAAA,OAAO,KAAA;AAAA,IACT;AAAA,EACF;AAAA,EAEA,MAAM,SAAS,OAAA,EAAuD;AACpE,IAAA,MAAM;AAAA,MACJ,YAAA;AAAA,MACA,UAAA;AAAA,MACA,SAAA,GAAY,GAAA;AAAA,MACZ,WAAA,GAAc;AAAA,KAChB,GAAI,OAAA;AAEJ,IAAA,IAAI,CAAC,KAAK,MAAA,EAAQ;AAChB,MAAA,MAAM,KAAK,UAAA,EAAW;AAAA,IACxB;AAGA,IAAA,MAAM,SAAS,IAAA,CAAK,MAAA;AACpB,IAAA,MAAM,UAAA,GAAa,MAAM,MAAA,CAAO,IAAA,CAAK,YAAY,MAAA,CAAO;AAAA,MACtD,OAAO,IAAA,CAAK,KAAA;AAAA,MACZ,QAAA,EAAU;AAAA,QACR,EAAE,IAAA,EAAM,QAAA,EAAU,OAAA,EAAS,YAAA,EAAa;AAAA,QACxC,EAAE,IAAA,EAAM,MAAA,EAAQ,OAAA,EAAS,UAAA;AAAW,OACtC;AAAA,MACA,UAAA,EAAY,SAAA;AAAA,MACZ;AAAA,KACD,CAAA;AAED,IAAA,OAAO;AAAA,MACL,SAAS,UAAA,CAAW,OAAA,CAAQ,CAAC,CAAA,EAAG,SAAS,OAAA,IAAW,EAAA;AAAA,MACpD,KAAA,EAAO;AAAA,QACL,WAAA,EAAa,UAAA,CAAW,KAAA,EAAO,aAAA,IAAiB,CAAA;AAAA,QAChD,YAAA,EAAc,UAAA,CAAW,KAAA,EAAO,iBAAA,IAAqB;AAAA;AACvD,KACF;AAAA,EACF;AACF;;;ACtEO,IAAM,gBAAA,GAAN,cAA+B,YAAA,CAAa;AAAA,EACzC,MAAA;AAAA,EAER,YAAY,MAAA,EAA8D;AACxE,IAAA,KAAA,CAAM,MAAM,CAAA;AACZ,IAAA,IAAA,CAAK,UAAA,EAAW;AAAA,EAClB;AAAA,EAEA,MAAc,UAAA,GAA4B;AACxC,IAAA,IAAI;AAEF,MAAA,MAAM,EAAE,OAAA,EAAS,QAAA,EAAS,GAAI,MAAM,OAClC,8BACF,CAAA;AACA,MAAA,IAAA,CAAK,MAAA,GAAS,IAAI,QAAA,CAAS;AAAA,QACzB,QAAQ,IAAA,CAAK,MAAA;AAAA,QACb,GAAI,IAAA,CAAK,OAAA,IAAW,EAAE,OAAA,EAAS,KAAK,OAAA;AAAQ,OAC7C,CAAA;AAAA,IACH,CAAA,CAAA,MAAQ;AACN,MAAA,MAAM,IAAI,KAAA;AAAA,QACR;AAAA,OACF;AAAA,IACF;AAAA,EACF;AAAA,EAEA,eAAA,GAA0B;AACxB,IAAA,OAAO,eAAA;AAAA,EACT;AAAA,EAEA,OAAA,GAAkB;AAChB,IAAA,OAAO,UAAA;AAAA,EACT;AAAA,EAEA,OAAO,WAAA,GAAuB;AAC5B,IAAA,IAAI;AACF,MAAA,SAAA,CAAQ,QAAQ,8BAA8B,CAAA;AAC9C,MAAA,OAAO,IAAA;AAAA,IACT,CAAA,CAAA,MAAQ;AACN,MAAA,OAAO,KAAA;AAAA,IACT;AAAA,EACF;AAAA,EAEA,MAAM,SAAS,OAAA,EAAuD;AACpE,IAAA,MAAM;AAAA,MACJ,YAAA;AAAA,MACA,UAAA;AAAA,MACA,SAAA,GAAY,GAAA;AAAA,MACZ,WAAA,GAAc;AAAA,KAChB,GAAI,OAAA;AAEJ,IAAA,IAAI,CAAC,KAAK,MAAA,EAAQ;AAChB,MAAA,MAAM,KAAK,UAAA,EAAW;AAAA,IACxB;AAGA,IAAA,MAAM,SAAS,IAAA,CAAK,MAAA;AACpB,IAAA,MAAM,UAAA,GAAa,MAAM,MAAA,CAAO,IAAA,CAAK,YAAY,MAAA,CAAO;AAAA,MACtD,OAAO,IAAA,CAAK,KAAA;AAAA,MACZ,QAAA,EAAU;AAAA,QACR,EAAE,IAAA,EAAM,QAAA,EAAU,OAAA,EAAS,YAAA,EAAa;AAAA,QACxC,EAAE,IAAA,EAAM,MAAA,EAAQ,OAAA,EAAS,UAAA;AAAW,OACtC;AAAA,MACA,qBAAA,EAAuB,SAAA;AAAA,MACvB;AAAA,KACD,CAAA;AAED,IAAA,OAAO;AAAA,MACL,SAAS,UAAA,CAAW,OAAA,CAAQ,CAAC,CAAA,EAAG,SAAS,OAAA,IAAW,EAAA;AAAA,MACpD,KAAA,EAAO;AAAA,QACL,WAAA,EAAa,UAAA,CAAW,KAAA,EAAO,aAAA,IAAiB,CAAA;AAAA,QAChD,YAAA,EAAc,UAAA,CAAW,KAAA,EAAO,iBAAA,IAAqB;AAAA;AACvD,KACF;AAAA,EACF;AACF;;;AC5DA,IAAM,gBAAA,GAOF;AAAA,EACF,MAAA,EAAQ,cAAA;AAAA,EACR,SAAA,EAAW,iBAAA;AAAA,EACX,MAAA,EAAQ,cAAA;AAAA,EACR,IAAA,EAAM,YAAA;AAAA,EACN,QAAA,EAAU;AACZ,CAAA;AAKO,SAAS,eAAe,MAAA,EAAsC;AACnE,EAAA,MAAM,aAAA,GAAgB,gBAAA,CAAiB,MAAA,CAAO,QAAQ,CAAA;AACtD,EAAA,IAAI,CAAC,aAAA,EAAe;AAClB,IAAA,MAAM,IAAI,KAAA;AAAA,MACR,CAAA,kBAAA,EAAqB,MAAA,CAAO,QAAQ,CAAA,aAAA,EAAgB,MAAA,CAAO,IAAA;AAAA,QACzD;AAAA,OACF,CAAE,IAAA,CAAK,IAAI,CAAC,CAAA;AAAA,KACd;AAAA,EACF;AAEA,EAAA,OAAO,IAAI,aAAA,CAAc;AAAA,IACvB,QAAQ,MAAA,CAAO,MAAA;AAAA,IACf,OAAO,MAAA,CAAO,KAAA;AAAA,IACd,SAAS,MAAA,CAAO;AAAA,GACjB,CAAA;AACH;AAKO,SAAS,qBAAA,GAAwC;AACtD,EAAA,MAAM,SAAA,GAA4B,CAAC,QAAQ,CAAA;AAE3C,EAAA,IAAI,iBAAA,CAAkB,WAAA,EAAY,EAAG,SAAA,CAAU,KAAK,WAAW,CAAA;AAC/D,EAAA,IAAI,cAAA,CAAe,WAAA,EAAY,EAAG,SAAA,CAAU,KAAK,QAAQ,CAAA;AACzD,EAAA,IAAI,YAAA,CAAa,WAAA,EAAY,EAAG,SAAA,CAAU,KAAK,MAAM,CAAA;AACrD,EAAA,IAAI,gBAAA,CAAiB,WAAA,EAAY,EAAG,SAAA,CAAU,KAAK,UAAU,CAAA;AAE7D,EAAA,OAAO,SAAA;AACT","file":"index.js","sourcesContent":["import type { CompletionOptions, CompletionResult } from \"../types\";\r\n\r\n/**\r\n * Abstract base class for LLM providers.\r\n * All provider implementations must extend this class.\r\n */\r\nexport abstract class BaseProvider {\r\n protected apiKey: string;\r\n protected model: string;\r\n protected baseUrl?: string;\r\n\r\n constructor(config: { apiKey: string; model?: string; baseUrl?: string }) {\r\n if (!config.apiKey) {\r\n throw new Error(\"API key is required\");\r\n }\r\n this.apiKey = config.apiKey;\r\n this.model = config.model || this.getDefaultModel();\r\n this.baseUrl = config.baseUrl;\r\n }\r\n\r\n /**\r\n * Get the default model for this provider\r\n */\r\n abstract getDefaultModel(): string;\r\n\r\n /**\r\n * Get the provider name\r\n */\r\n abstract getName(): string;\r\n\r\n /**\r\n * Generate a completion from the LLM\r\n */\r\n abstract complete(options: CompletionOptions): Promise<CompletionResult>;\r\n\r\n /**\r\n * Check if the provider SDK is available\r\n */\r\n static isAvailable(): boolean {\r\n return true;\r\n }\r\n}\r\n","import { BaseProvider } from \"./BaseProvider\";\r\nimport type { CompletionOptions, CompletionResult } from \"../types\";\r\n\r\n/**\r\n * OpenAI provider using native fetch (no SDK required)\r\n */\r\nexport class OpenAIProvider extends BaseProvider {\r\n private endpoint: string;\r\n\r\n constructor(config: { apiKey: string; model?: string; baseUrl?: string }) {\r\n super(config);\r\n this.endpoint = this.baseUrl || \"https://api.openai.com/v1\";\r\n }\r\n\r\n getDefaultModel(): string {\r\n return \"gpt-4o-mini\";\r\n }\r\n\r\n getName(): string {\r\n return \"openai\";\r\n }\r\n\r\n async complete(options: CompletionOptions): Promise<CompletionResult> {\r\n const {\r\n systemPrompt,\r\n userPrompt,\r\n maxTokens = 1000,\r\n temperature = 0.3,\r\n jsonMode = true,\r\n } = options;\r\n\r\n const response = await fetch(`${this.endpoint}/chat/completions`, {\r\n method: \"POST\",\r\n headers: {\r\n \"Content-Type\": \"application/json\",\r\n Authorization: `Bearer ${this.apiKey}`,\r\n },\r\n body: JSON.stringify({\r\n model: this.model,\r\n messages: [\r\n { role: \"system\", content: systemPrompt },\r\n { role: \"user\", content: userPrompt },\r\n ],\r\n max_tokens: maxTokens,\r\n temperature,\r\n ...(jsonMode && { response_format: { type: \"json_object\" } }),\r\n }),\r\n });\r\n\r\n if (!response.ok) {\r\n const errorData = (await response\r\n .json()\r\n .catch(() => ({ error: { message: response.statusText } }))) as {\r\n error?: { message?: string };\r\n };\r\n throw new Error(\r\n `OpenAI API error: ${errorData.error?.message || response.statusText}`\r\n );\r\n }\r\n\r\n interface OpenAIResponse {\r\n choices: Array<{ message?: { content?: string } }>;\r\n usage?: { prompt_tokens?: number; completion_tokens?: number };\r\n }\r\n\r\n const data = (await response.json()) as OpenAIResponse;\r\n\r\n return {\r\n content: data.choices[0]?.message?.content || \"\",\r\n usage: {\r\n inputTokens: data.usage?.prompt_tokens || 0,\r\n outputTokens: data.usage?.completion_tokens || 0,\r\n },\r\n };\r\n }\r\n}\r\n","import { BaseProvider } from \"./BaseProvider\";\r\nimport type { CompletionOptions, CompletionResult } from \"../types\";\r\n\r\n/**\r\n * Anthropic provider using the official @anthropic-ai/sdk package\r\n */\r\nexport class AnthropicProvider extends BaseProvider {\r\n private client: unknown;\r\n\r\n constructor(config: { apiKey: string; model?: string; baseUrl?: string }) {\r\n super(config);\r\n this.initClient();\r\n }\r\n\r\n private async initClient(): Promise<void> {\r\n try {\r\n // Dynamic import to make the SDK optional\r\n const { default: Anthropic } = await import(\"@anthropic-ai/sdk\");\r\n this.client = new Anthropic({\r\n apiKey: this.apiKey,\r\n ...(this.baseUrl && { baseURL: this.baseUrl }),\r\n });\r\n } catch {\r\n throw new Error(\r\n \"Anthropic SDK not installed. Run: npm install @anthropic-ai/sdk\"\r\n );\r\n }\r\n }\r\n\r\n getDefaultModel(): string {\r\n return \"claude-3-haiku-20240307\";\r\n }\r\n\r\n getName(): string {\r\n return \"anthropic\";\r\n }\r\n\r\n static isAvailable(): boolean {\r\n try {\r\n require.resolve(\"@anthropic-ai/sdk\");\r\n return true;\r\n } catch {\r\n return false;\r\n }\r\n }\r\n\r\n async complete(options: CompletionOptions): Promise<CompletionResult> {\r\n const {\r\n systemPrompt,\r\n userPrompt,\r\n maxTokens = 1000,\r\n temperature = 0.3,\r\n } = options;\r\n\r\n if (!this.client) {\r\n await this.initClient();\r\n }\r\n\r\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\r\n const client = this.client as any;\r\n const message = await client.messages.create({\r\n model: this.model,\r\n max_tokens: maxTokens,\r\n temperature,\r\n system: systemPrompt,\r\n messages: [{ role: \"user\", content: userPrompt }],\r\n });\r\n\r\n return {\r\n content: message.content[0]?.text || \"\",\r\n usage: {\r\n inputTokens: message.usage?.input_tokens || 0,\r\n outputTokens: message.usage?.output_tokens || 0,\r\n },\r\n };\r\n }\r\n}\r\n","import { BaseProvider } from \"./BaseProvider\";\r\nimport type { CompletionOptions, CompletionResult } from \"../types\";\r\n\r\n/**\r\n * Google Gemini provider using the official @google/generative-ai package\r\n */\r\nexport class GeminiProvider extends BaseProvider {\r\n private genAI: unknown;\r\n\r\n constructor(config: { apiKey: string; model?: string; baseUrl?: string }) {\r\n super(config);\r\n this.initClient();\r\n }\r\n\r\n private async initClient(): Promise<void> {\r\n try {\r\n // Dynamic import to make the SDK optional\r\n const { GoogleGenerativeAI } = await import(\"@google/generative-ai\");\r\n this.genAI = new GoogleGenerativeAI(this.apiKey);\r\n } catch {\r\n throw new Error(\r\n \"Google Generative AI SDK not installed. Run: npm install @google/generative-ai\"\r\n );\r\n }\r\n }\r\n\r\n getDefaultModel(): string {\r\n return \"gemini-2.0-flash\";\r\n }\r\n\r\n getName(): string {\r\n return \"gemini\";\r\n }\r\n\r\n static isAvailable(): boolean {\r\n try {\r\n require.resolve(\"@google/generative-ai\");\r\n return true;\r\n } catch {\r\n return false;\r\n }\r\n }\r\n\r\n async complete(options: CompletionOptions): Promise<CompletionResult> {\r\n const {\r\n systemPrompt,\r\n userPrompt,\r\n maxTokens = 1000,\r\n temperature = 0.3,\r\n } = options;\r\n\r\n if (!this.genAI) {\r\n await this.initClient();\r\n }\r\n\r\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\r\n const genAI = this.genAI as any;\r\n const model = genAI.getGenerativeModel({\r\n model: this.model,\r\n systemInstruction: systemPrompt,\r\n });\r\n\r\n const result = await model.generateContent({\r\n contents: [{ role: \"user\", parts: [{ text: userPrompt }] }],\r\n generationConfig: {\r\n maxOutputTokens: maxTokens,\r\n temperature,\r\n },\r\n });\r\n\r\n const response = result.response;\r\n\r\n return {\r\n content: response.text() || \"\",\r\n usage: {\r\n inputTokens: response.usageMetadata?.promptTokenCount || 0,\r\n outputTokens: response.usageMetadata?.candidatesTokenCount || 0,\r\n },\r\n };\r\n }\r\n}\r\n","import { BaseProvider } from \"./BaseProvider\";\r\nimport type { CompletionOptions, CompletionResult } from \"../types\";\r\n\r\n/**\r\n * Groq provider using the official groq-sdk package\r\n */\r\nexport class GroqProvider extends BaseProvider {\r\n private client: unknown;\r\n\r\n constructor(config: { apiKey: string; model?: string; baseUrl?: string }) {\r\n super(config);\r\n this.initClient();\r\n }\r\n\r\n private async initClient(): Promise<void> {\r\n try {\r\n // Dynamic import to make the SDK optional\r\n const { default: Groq } = await import(\"groq-sdk\");\r\n this.client = new Groq({\r\n apiKey: this.apiKey,\r\n ...(this.baseUrl && { baseURL: this.baseUrl }),\r\n });\r\n } catch {\r\n throw new Error(\"Groq SDK not installed. Run: npm install groq-sdk\");\r\n }\r\n }\r\n\r\n getDefaultModel(): string {\r\n return \"llama-3.3-70b-versatile\";\r\n }\r\n\r\n getName(): string {\r\n return \"groq\";\r\n }\r\n\r\n static isAvailable(): boolean {\r\n try {\r\n require.resolve(\"groq-sdk\");\r\n return true;\r\n } catch {\r\n return false;\r\n }\r\n }\r\n\r\n async complete(options: CompletionOptions): Promise<CompletionResult> {\r\n const {\r\n systemPrompt,\r\n userPrompt,\r\n maxTokens = 1000,\r\n temperature = 0.3,\r\n } = options;\r\n\r\n if (!this.client) {\r\n await this.initClient();\r\n }\r\n\r\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\r\n const client = this.client as any;\r\n const completion = await client.chat.completions.create({\r\n model: this.model,\r\n messages: [\r\n { role: \"system\", content: systemPrompt },\r\n { role: \"user\", content: userPrompt },\r\n ],\r\n max_tokens: maxTokens,\r\n temperature,\r\n });\r\n\r\n return {\r\n content: completion.choices[0]?.message?.content || \"\",\r\n usage: {\r\n inputTokens: completion.usage?.prompt_tokens || 0,\r\n outputTokens: completion.usage?.completion_tokens || 0,\r\n },\r\n };\r\n }\r\n}\r\n","import { BaseProvider } from \"./BaseProvider\";\r\nimport type { CompletionOptions, CompletionResult } from \"../types\";\r\n\r\n/**\r\n * Cerebras provider using the official @cerebras/cerebras_cloud_sdk package\r\n */\r\nexport class CerebrasProvider extends BaseProvider {\r\n private client: unknown;\r\n\r\n constructor(config: { apiKey: string; model?: string; baseUrl?: string }) {\r\n super(config);\r\n this.initClient();\r\n }\r\n\r\n private async initClient(): Promise<void> {\r\n try {\r\n // Dynamic import to make the SDK optional\r\n const { default: Cerebras } = await import(\r\n \"@cerebras/cerebras_cloud_sdk\"\r\n );\r\n this.client = new Cerebras({\r\n apiKey: this.apiKey,\r\n ...(this.baseUrl && { baseURL: this.baseUrl }),\r\n });\r\n } catch {\r\n throw new Error(\r\n \"Cerebras SDK not installed. Run: npm install @cerebras/cerebras_cloud_sdk\"\r\n );\r\n }\r\n }\r\n\r\n getDefaultModel(): string {\r\n return \"llama-3.3-70b\";\r\n }\r\n\r\n getName(): string {\r\n return \"cerebras\";\r\n }\r\n\r\n static isAvailable(): boolean {\r\n try {\r\n require.resolve(\"@cerebras/cerebras_cloud_sdk\");\r\n return true;\r\n } catch {\r\n return false;\r\n }\r\n }\r\n\r\n async complete(options: CompletionOptions): Promise<CompletionResult> {\r\n const {\r\n systemPrompt,\r\n userPrompt,\r\n maxTokens = 1000,\r\n temperature = 0.3,\r\n } = options;\r\n\r\n if (!this.client) {\r\n await this.initClient();\r\n }\r\n\r\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\r\n const client = this.client as any;\r\n const completion = await client.chat.completions.create({\r\n model: this.model,\r\n messages: [\r\n { role: \"system\", content: systemPrompt },\r\n { role: \"user\", content: userPrompt },\r\n ],\r\n max_completion_tokens: maxTokens,\r\n temperature,\r\n });\r\n\r\n return {\r\n content: completion.choices[0]?.message?.content || \"\",\r\n usage: {\r\n inputTokens: completion.usage?.prompt_tokens || 0,\r\n outputTokens: completion.usage?.completion_tokens || 0,\r\n },\r\n };\r\n }\r\n}\r\n","import { BaseProvider } from \"./BaseProvider\";\r\nimport { OpenAIProvider } from \"./OpenAIProvider\";\r\nimport { AnthropicProvider } from \"./AnthropicProvider\";\r\nimport { GeminiProvider } from \"./GeminiProvider\";\r\nimport { GroqProvider } from \"./GroqProvider\";\r\nimport { CerebrasProvider } from \"./CerebrasProvider\";\r\nimport type { ProviderConfig, ProviderName } from \"../types\";\r\n\r\nexport {\r\n BaseProvider,\r\n OpenAIProvider,\r\n AnthropicProvider,\r\n GeminiProvider,\r\n GroqProvider,\r\n CerebrasProvider,\r\n};\r\n\r\n/**\r\n * Provider registry for creating providers by name\r\n */\r\nconst providerRegistry: Record<\r\n ProviderName,\r\n new (config: {\r\n apiKey: string;\r\n model?: string;\r\n baseUrl?: string;\r\n }) => BaseProvider\r\n> = {\r\n openai: OpenAIProvider,\r\n anthropic: AnthropicProvider,\r\n gemini: GeminiProvider,\r\n groq: GroqProvider,\r\n cerebras: CerebrasProvider,\r\n};\r\n\r\n/**\r\n * Create a provider instance from configuration\r\n */\r\nexport function createProvider(config: ProviderConfig): BaseProvider {\r\n const ProviderClass = providerRegistry[config.provider];\r\n if (!ProviderClass) {\r\n throw new Error(\r\n `Unknown provider: ${config.provider}. Available: ${Object.keys(\r\n providerRegistry\r\n ).join(\", \")}`\r\n );\r\n }\r\n\r\n return new ProviderClass({\r\n apiKey: config.apiKey,\r\n model: config.model,\r\n baseUrl: config.baseUrl,\r\n });\r\n}\r\n\r\n/**\r\n * Check which providers are available (have their SDKs installed)\r\n */\r\nexport function getAvailableProviders(): ProviderName[] {\r\n const available: ProviderName[] = [\"openai\"]; // OpenAI uses fetch, always available\r\n\r\n if (AnthropicProvider.isAvailable()) available.push(\"anthropic\");\r\n if (GeminiProvider.isAvailable()) available.push(\"gemini\");\r\n if (GroqProvider.isAvailable()) available.push(\"groq\");\r\n if (CerebrasProvider.isAvailable()) available.push(\"cerebras\");\r\n\r\n return available;\r\n}\r\n"]}
|