ai.libx.js 0.1.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +21 -0
- package/README.md +339 -0
- package/build/@Module.d.ts +6 -0
- package/build/@Module.js +14 -0
- package/build/@Module.js.map +1 -0
- package/build/AIClient.d.ts +19 -0
- package/build/AIClient.js +132 -0
- package/build/AIClient.js.map +1 -0
- package/build/Extensions.d.ts +3 -0
- package/build/Extensions.js +4 -0
- package/build/Extensions.js.map +1 -0
- package/build/adapters/ai21.d.ts +8 -0
- package/build/adapters/ai21.js +83 -0
- package/build/adapters/ai21.js.map +1 -0
- package/build/adapters/anthropic.d.ts +9 -0
- package/build/adapters/anthropic.js +162 -0
- package/build/adapters/anthropic.js.map +1 -0
- package/build/adapters/base/BaseAdapter.d.ts +13 -0
- package/build/adapters/base/BaseAdapter.js +56 -0
- package/build/adapters/base/BaseAdapter.js.map +1 -0
- package/build/adapters/cloudflare.d.ts +8 -0
- package/build/adapters/cloudflare.js +129 -0
- package/build/adapters/cloudflare.js.map +1 -0
- package/build/adapters/cohere.d.ts +9 -0
- package/build/adapters/cohere.js +158 -0
- package/build/adapters/cohere.js.map +1 -0
- package/build/adapters/deepseek.d.ts +8 -0
- package/build/adapters/deepseek.js +142 -0
- package/build/adapters/deepseek.js.map +1 -0
- package/build/adapters/google.d.ts +9 -0
- package/build/adapters/google.js +166 -0
- package/build/adapters/google.js.map +1 -0
- package/build/adapters/groq.d.ts +8 -0
- package/build/adapters/groq.js +142 -0
- package/build/adapters/groq.js.map +1 -0
- package/build/adapters/index.d.ts +12 -0
- package/build/adapters/index.js +28 -0
- package/build/adapters/index.js.map +1 -0
- package/build/adapters/mistral.d.ts +8 -0
- package/build/adapters/mistral.js +139 -0
- package/build/adapters/mistral.js.map +1 -0
- package/build/adapters/openai.d.ts +9 -0
- package/build/adapters/openai.js +145 -0
- package/build/adapters/openai.js.map +1 -0
- package/build/adapters/openrouter.d.ts +8 -0
- package/build/adapters/openrouter.js +145 -0
- package/build/adapters/openrouter.js.map +1 -0
- package/build/adapters/xai.d.ts +8 -0
- package/build/adapters/xai.js +138 -0
- package/build/adapters/xai.js.map +1 -0
- package/build/index.d.ts +12 -0
- package/build/index.js +29 -0
- package/build/index.js.map +1 -0
- package/build/models.d.ts +6 -0
- package/build/models.js +103 -0
- package/build/models.js.map +1 -0
- package/build/types/index.d.ts +66 -0
- package/build/types/index.js +3 -0
- package/build/types/index.js.map +1 -0
- package/build/types/provider.d.ts +8 -0
- package/build/types/provider.js +3 -0
- package/build/types/provider.js.map +1 -0
- package/build/types/streaming.d.ts +8 -0
- package/build/types/streaming.js +33 -0
- package/build/types/streaming.js.map +1 -0
- package/build/utils/errors.d.ts +21 -0
- package/build/utils/errors.js +70 -0
- package/build/utils/errors.js.map +1 -0
- package/build/utils/model-normalization.d.ts +9 -0
- package/build/utils/model-normalization.js +59 -0
- package/build/utils/model-normalization.js.map +1 -0
- package/build/utils/request-logger.d.ts +43 -0
- package/build/utils/request-logger.js +96 -0
- package/build/utils/request-logger.js.map +1 -0
- package/build/utils/stream.d.ts +8 -0
- package/build/utils/stream.js +109 -0
- package/build/utils/stream.js.map +1 -0
- package/build/utils/validation.d.ts +4 -0
- package/build/utils/validation.js +57 -0
- package/build/utils/validation.js.map +1 -0
- package/example.ts +166 -0
- package/jest.config.js +26 -0
- package/package.json +68 -0
- package/src/@Module.ts +9 -0
- package/src/AIClient.ts +210 -0
- package/src/Extensions.ts +7 -0
- package/src/adapters/ai21.ts +99 -0
- package/src/adapters/anthropic.ts +152 -0
- package/src/adapters/base/BaseAdapter.ts +78 -0
- package/src/adapters/cloudflare.ts +115 -0
- package/src/adapters/cohere.ts +158 -0
- package/src/adapters/deepseek.ts +108 -0
- package/src/adapters/google.ts +170 -0
- package/src/adapters/groq.ts +108 -0
- package/src/adapters/index.ts +14 -0
- package/src/adapters/mistral.ts +108 -0
- package/src/adapters/openai.ts +129 -0
- package/src/adapters/openrouter.ts +110 -0
- package/src/adapters/xai.ts +106 -0
- package/src/index.ts +66 -0
- package/src/models.ts +116 -0
- package/src/types/index.ts +81 -0
- package/src/types/provider.ts +19 -0
- package/src/types/streaming.ts +32 -0
- package/src/utils/errors.ts +76 -0
- package/src/utils/model-normalization.ts +100 -0
- package/src/utils/request-logger.ts +179 -0
- package/src/utils/stream.ts +93 -0
- package/src/utils/validation.ts +69 -0
package/LICENSE
ADDED
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
MIT License
|
|
2
|
+
|
|
3
|
+
Copyright (c) 2020 Elya Livshitz
|
|
4
|
+
|
|
5
|
+
Permission is hereby granted, free of charge, to any person obtaining a copy
|
|
6
|
+
of this software and associated documentation files (the "Software"), to deal
|
|
7
|
+
in the Software without restriction, including without limitation the rights
|
|
8
|
+
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
|
9
|
+
copies of the Software, and to permit persons to whom the Software is
|
|
10
|
+
furnished to do so, subject to the following conditions:
|
|
11
|
+
|
|
12
|
+
The above copyright notice and this permission notice shall be included in all
|
|
13
|
+
copies or substantial portions of the Software.
|
|
14
|
+
|
|
15
|
+
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
|
16
|
+
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
|
17
|
+
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
|
18
|
+
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
|
19
|
+
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
|
20
|
+
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
|
21
|
+
SOFTWARE.
|
package/README.md
ADDED
|
@@ -0,0 +1,339 @@
|
|
|
1
|
+
# ai.libx.js
|
|
2
|
+
|
|
3
|
+
A unified, stateless API bridge for various AI models including LLMs, image/video generation, TTS, and STT. Edge-compatible and designed for use in serverless environments like Vercel Edge Functions and Cloudflare Workers.
|
|
4
|
+
|
|
5
|
+
## Features
|
|
6
|
+
|
|
7
|
+
- 🚀 **Unified API** - Single interface for multiple AI providers
|
|
8
|
+
- 🔌 **11 Providers** - OpenAI, Anthropic, Google, Groq, Mistral, Cohere, XAI, DeepSeek, AI21, OpenRouter, Cloudflare
|
|
9
|
+
- 🌊 **Streaming Support** - Real-time streaming responses from all compatible providers
|
|
10
|
+
- 📝 **Plain Text Mode** - Raw text output without JSON wrapping
|
|
11
|
+
- 🤖 **Model Normalization** - Intelligent alias resolution (e.g., `gpt-5` → `chatgpt-4o-latest`)
|
|
12
|
+
- 🧠 **Reasoning Model Support** - Automatic detection and parameter adjustment for o1/o3/R1 models
|
|
13
|
+
- 📊 **Request Logging** - Built-in metrics tracking with detailed statistics
|
|
14
|
+
- 🖼️ **Multi-Modal Ready** - Framework support for images (implementation per adapter)
|
|
15
|
+
- 🪶 **Stateless Design** - No state management, pass API keys per request or globally
|
|
16
|
+
- 🌐 **Edge Compatible** - Works with Vercel Edge Functions and Cloudflare Workers
|
|
17
|
+
- 📦 **Tree-Shakeable** - Import only what you need
|
|
18
|
+
- 🔒 **Type-Safe** - Full TypeScript support with comprehensive types
|
|
19
|
+
- ⚡ **Zero Dependencies** - No external runtime dependencies
|
|
20
|
+
|
|
21
|
+
## Installation
|
|
22
|
+
|
|
23
|
+
```bash
|
|
24
|
+
npm install ai.libx.js
|
|
25
|
+
```
|
|
26
|
+
|
|
27
|
+
## Usage
|
|
28
|
+
|
|
29
|
+
### Pattern 1: Generic Client (Runtime Model Selection)
|
|
30
|
+
|
|
31
|
+
```typescript
|
|
32
|
+
import AIClient from 'ai.libx.js';
|
|
33
|
+
|
|
34
|
+
// Initialize with API keys and enable logging
|
|
35
|
+
const ai = new AIClient({
|
|
36
|
+
apiKeys: {
|
|
37
|
+
openai: process.env.OPENAI_API_KEY,
|
|
38
|
+
anthropic: process.env.ANTHROPIC_API_KEY,
|
|
39
|
+
google: process.env.GOOGLE_API_KEY,
|
|
40
|
+
},
|
|
41
|
+
enableLogging: true // Track metrics
|
|
42
|
+
});
|
|
43
|
+
|
|
44
|
+
// Non-streaming chat
|
|
45
|
+
const response = await ai.chat({
|
|
46
|
+
model: 'openai/gpt-4o',
|
|
47
|
+
messages: [
|
|
48
|
+
{ role: 'user', content: 'Hello!' }
|
|
49
|
+
],
|
|
50
|
+
});
|
|
51
|
+
|
|
52
|
+
console.log(response.content);
|
|
53
|
+
console.log(ai.getStats()); // View metrics
|
|
54
|
+
|
|
55
|
+
// Streaming chat
|
|
56
|
+
const stream = await ai.chat({
|
|
57
|
+
model: 'anthropic/claude-3-5-sonnet-latest',
|
|
58
|
+
messages: [
|
|
59
|
+
{ role: 'user', content: 'Write a story' }
|
|
60
|
+
],
|
|
61
|
+
stream: true,
|
|
62
|
+
});
|
|
63
|
+
|
|
64
|
+
for await (const chunk of stream) {
|
|
65
|
+
process.stdout.write(chunk.content);
|
|
66
|
+
}
|
|
67
|
+
|
|
68
|
+
// Plain text mode (raw output)
|
|
69
|
+
const plainResponse = await ai.chat({
|
|
70
|
+
model: 'openai/gpt-4o',
|
|
71
|
+
messages: [{ role: 'user', content: 'Hello!' }],
|
|
72
|
+
plain: true // Returns plain text
|
|
73
|
+
});
|
|
74
|
+
```
|
|
75
|
+
|
|
76
|
+
### Pattern 2: Direct Provider Adapter
|
|
77
|
+
|
|
78
|
+
```typescript
|
|
79
|
+
import { OpenAIAdapter, AnthropicAdapter } from 'ai.libx.js/adapters';
|
|
80
|
+
|
|
81
|
+
// Work directly with a specific provider
|
|
82
|
+
const openai = new OpenAIAdapter({
|
|
83
|
+
apiKey: process.env.OPENAI_API_KEY
|
|
84
|
+
});
|
|
85
|
+
|
|
86
|
+
const response = await openai.chat({
|
|
87
|
+
model: 'gpt-4o', // No vendor prefix needed
|
|
88
|
+
messages: [{ role: 'user', content: 'Hello!' }],
|
|
89
|
+
temperature: 0.7,
|
|
90
|
+
});
|
|
91
|
+
```
|
|
92
|
+
|
|
93
|
+
## API Reference
|
|
94
|
+
|
|
95
|
+
### AIClient
|
|
96
|
+
|
|
97
|
+
#### Constructor Options
|
|
98
|
+
|
|
99
|
+
```typescript
|
|
100
|
+
const ai = new AIClient({
|
|
101
|
+
apiKeys?: Record<string, string>; // API keys by provider
|
|
102
|
+
baseUrls?: Record<string, string>; // Custom base URLs
|
|
103
|
+
cloudflareAccountId?: string; // For Cloudflare Workers AI
|
|
104
|
+
});
|
|
105
|
+
```
|
|
106
|
+
|
|
107
|
+
#### chat(options)
|
|
108
|
+
|
|
109
|
+
```typescript
|
|
110
|
+
await ai.chat({
|
|
111
|
+
model: string; // Format: "provider/model-name"
|
|
112
|
+
messages: Message[]; // Conversation messages
|
|
113
|
+
apiKey?: string; // Override global API key
|
|
114
|
+
temperature?: number; // 0-2, default varies by provider
|
|
115
|
+
maxTokens?: number; // Max tokens to generate
|
|
116
|
+
topP?: number; // 0-1, nucleus sampling
|
|
117
|
+
topK?: number; // Top-k sampling
|
|
118
|
+
frequencyPenalty?: number; // -2 to 2
|
|
119
|
+
presencePenalty?: number; // -2 to 2
|
|
120
|
+
stop?: string | string[]; // Stop sequences
|
|
121
|
+
stream?: boolean; // Enable streaming
|
|
122
|
+
providerOptions?: object; // Provider-specific options
|
|
123
|
+
});
|
|
124
|
+
```
|
|
125
|
+
|
|
126
|
+
### Supported Providers
|
|
127
|
+
|
|
128
|
+
| Provider | Prefix | Models |
|
|
129
|
+
|----------|--------|---------|
|
|
130
|
+
| OpenAI | `openai/` | GPT-4, GPT-3.5, etc. |
|
|
131
|
+
| Anthropic | `anthropic/` | Claude 3/4 series |
|
|
132
|
+
| Google | `google/` | Gemini 1.0/1.5/2.0 |
|
|
133
|
+
| Groq | `groq/` | LLaMA, Mixtral, Gemma |
|
|
134
|
+
| Mistral | `mistral/` | Mistral, Mixtral series |
|
|
135
|
+
| Cohere | `cohere/` | Command series |
|
|
136
|
+
| XAI | `xai/` | Grok series |
|
|
137
|
+
| DeepSeek | `deepseek/` | DeepSeek V3, R1 |
|
|
138
|
+
| AI21 | `ai21/` | Jamba, Jurassic |
|
|
139
|
+
| OpenRouter | `openrouter/` | Multi-model proxy |
|
|
140
|
+
| Cloudflare | `cloudflare/` | Workers AI models |
|
|
141
|
+
|
|
142
|
+
### Types
|
|
143
|
+
|
|
144
|
+
```typescript
|
|
145
|
+
interface Message {
|
|
146
|
+
role: 'system' | 'user' | 'assistant' | 'tool';
|
|
147
|
+
content: string;
|
|
148
|
+
name?: string;
|
|
149
|
+
tool_call_id?: string;
|
|
150
|
+
}
|
|
151
|
+
|
|
152
|
+
interface ChatResponse {
|
|
153
|
+
content: string;
|
|
154
|
+
finishReason?: string;
|
|
155
|
+
usage?: {
|
|
156
|
+
promptTokens: number;
|
|
157
|
+
completionTokens: number;
|
|
158
|
+
totalTokens: number;
|
|
159
|
+
};
|
|
160
|
+
model: string;
|
|
161
|
+
raw?: any; // Original provider response
|
|
162
|
+
}
|
|
163
|
+
|
|
164
|
+
interface StreamChunk {
|
|
165
|
+
content: string;
|
|
166
|
+
finishReason?: string;
|
|
167
|
+
index?: number;
|
|
168
|
+
}
|
|
169
|
+
```
|
|
170
|
+
|
|
171
|
+
## Provider-Specific Notes
|
|
172
|
+
|
|
173
|
+
### Cloudflare Workers AI
|
|
174
|
+
|
|
175
|
+
Requires account ID:
|
|
176
|
+
|
|
177
|
+
```typescript
|
|
178
|
+
const ai = new AIClient({
|
|
179
|
+
apiKeys: { cloudflare: 'YOUR_API_KEY' },
|
|
180
|
+
cloudflareAccountId: 'YOUR_ACCOUNT_ID',
|
|
181
|
+
});
|
|
182
|
+
```
|
|
183
|
+
|
|
184
|
+
### OpenRouter
|
|
185
|
+
|
|
186
|
+
Supports custom headers:
|
|
187
|
+
|
|
188
|
+
```typescript
|
|
189
|
+
await ai.chat({
|
|
190
|
+
model: 'openrouter/meta-llama/llama-3-70b-instruct',
|
|
191
|
+
messages: [...],
|
|
192
|
+
providerOptions: {
|
|
193
|
+
httpReferer: 'https://yourapp.com',
|
|
194
|
+
xTitle: 'Your App Name',
|
|
195
|
+
}
|
|
196
|
+
});
|
|
197
|
+
```
|
|
198
|
+
|
|
199
|
+
## Edge Runtime Compatibility
|
|
200
|
+
|
|
201
|
+
This library is designed to work in edge environments:
|
|
202
|
+
|
|
203
|
+
```typescript
|
|
204
|
+
// Vercel Edge Function
|
|
205
|
+
export const config = { runtime: 'edge' };
|
|
206
|
+
|
|
207
|
+
export default async function handler(req: Request) {
|
|
208
|
+
const ai = new AIClient({
|
|
209
|
+
apiKeys: { openai: process.env.OPENAI_API_KEY }
|
|
210
|
+
});
|
|
211
|
+
|
|
212
|
+
const response = await ai.chat({
|
|
213
|
+
model: 'openai/gpt-4o-mini',
|
|
214
|
+
messages: [{ role: 'user', content: 'Hello!' }],
|
|
215
|
+
});
|
|
216
|
+
|
|
217
|
+
return new Response(JSON.stringify(response));
|
|
218
|
+
}
|
|
219
|
+
```
|
|
220
|
+
|
|
221
|
+
## Error Handling
|
|
222
|
+
|
|
223
|
+
```typescript
|
|
224
|
+
import {
|
|
225
|
+
AILibError,
|
|
226
|
+
AuthenticationError,
|
|
227
|
+
InvalidRequestError,
|
|
228
|
+
RateLimitError,
|
|
229
|
+
ModelNotFoundError,
|
|
230
|
+
ProviderError
|
|
231
|
+
} from 'ai.libx.js';
|
|
232
|
+
|
|
233
|
+
try {
|
|
234
|
+
const response = await ai.chat({...});
|
|
235
|
+
} catch (error) {
|
|
236
|
+
if (error instanceof AuthenticationError) {
|
|
237
|
+
console.error('Invalid API key');
|
|
238
|
+
} else if (error instanceof RateLimitError) {
|
|
239
|
+
console.error('Rate limit exceeded');
|
|
240
|
+
} else if (error instanceof ModelNotFoundError) {
|
|
241
|
+
console.error('Model not found');
|
|
242
|
+
}
|
|
243
|
+
}
|
|
244
|
+
```
|
|
245
|
+
|
|
246
|
+
## Model Utilities
|
|
247
|
+
|
|
248
|
+
### Model Registry
|
|
249
|
+
|
|
250
|
+
```typescript
|
|
251
|
+
import {
|
|
252
|
+
supportedModels,
|
|
253
|
+
getModelInfo,
|
|
254
|
+
listModels,
|
|
255
|
+
isModelSupported,
|
|
256
|
+
getProviderFromModel
|
|
257
|
+
} from 'ai.libx.js';
|
|
258
|
+
|
|
259
|
+
// Get model info
|
|
260
|
+
const info = getModelInfo('openai/gpt-4o');
|
|
261
|
+
console.log(info?.displayName); // "GPT-4o"
|
|
262
|
+
|
|
263
|
+
// List all models for a provider
|
|
264
|
+
const openaiModels = listModels('openai');
|
|
265
|
+
|
|
266
|
+
// Check if model is supported
|
|
267
|
+
if (isModelSupported('anthropic/claude-3-5-sonnet-latest')) {
|
|
268
|
+
// ...
|
|
269
|
+
}
|
|
270
|
+
|
|
271
|
+
// Extract provider from model string
|
|
272
|
+
const provider = getProviderFromModel('openai/gpt-4o'); // "openai"
|
|
273
|
+
```
|
|
274
|
+
|
|
275
|
+
### Model Normalization
|
|
276
|
+
|
|
277
|
+
```typescript
|
|
278
|
+
import {
|
|
279
|
+
normalizeModelName,
|
|
280
|
+
isReasoningModel,
|
|
281
|
+
supportsSystemMessages,
|
|
282
|
+
getReasoningModelAdjustments
|
|
283
|
+
} from 'ai.libx.js';
|
|
284
|
+
|
|
285
|
+
// Resolve model aliases
|
|
286
|
+
normalizeModelName('gpt-5'); // → 'chatgpt-4o-latest'
|
|
287
|
+
normalizeModelName('claude-4'); // → 'claude-sonnet-4-0'
|
|
288
|
+
normalizeModelName('gemini'); // → 'models/gemini-2.0-flash'
|
|
289
|
+
|
|
290
|
+
// Check for reasoning models
|
|
291
|
+
isReasoningModel('openai/o1-preview'); // true
|
|
292
|
+
isReasoningModel('deepseek/deepseek-reasoner'); // true
|
|
293
|
+
|
|
294
|
+
// Check system message support
|
|
295
|
+
supportsSystemMessages('openai/o1-preview'); // false (o1 doesn't support)
|
|
296
|
+
supportsSystemMessages('openai/gpt-4o'); // true
|
|
297
|
+
|
|
298
|
+
// Get required parameter adjustments
|
|
299
|
+
const adjustments = getReasoningModelAdjustments('openai/o3-mini');
|
|
300
|
+
// { temperature: 1, topP: 1, useMaxCompletionTokens: true }
|
|
301
|
+
```
|
|
302
|
+
|
|
303
|
+
### Request Logging
|
|
304
|
+
|
|
305
|
+
```typescript
|
|
306
|
+
const ai = new AIClient({
|
|
307
|
+
apiKeys: { openai: 'sk-...' },
|
|
308
|
+
enableLogging: true
|
|
309
|
+
});
|
|
310
|
+
|
|
311
|
+
// Make requests...
|
|
312
|
+
await ai.chat({ model: 'openai/gpt-4o', messages: [...] });
|
|
313
|
+
|
|
314
|
+
// Get statistics
|
|
315
|
+
const stats = ai.getStats();
|
|
316
|
+
console.log(stats);
|
|
317
|
+
// {
|
|
318
|
+
// totalRequests: 10,
|
|
319
|
+
// successfulRequests: 9,
|
|
320
|
+
// failedRequests: 1,
|
|
321
|
+
// averageLatency: 1234,
|
|
322
|
+
// totalTokensUsed: 12500,
|
|
323
|
+
// providerBreakdown: {
|
|
324
|
+
// openai: { requests: 6, avgLatency: 1100, tokens: 8000 }
|
|
325
|
+
// }
|
|
326
|
+
// }
|
|
327
|
+
```
|
|
328
|
+
|
|
329
|
+
## Examples
|
|
330
|
+
|
|
331
|
+
See [example.ts](./example.ts) for complete usage examples.
|
|
332
|
+
|
|
333
|
+
## License
|
|
334
|
+
|
|
335
|
+
MIT
|
|
336
|
+
|
|
337
|
+
## Contributing
|
|
338
|
+
|
|
339
|
+
Contributions are welcome! This library is designed to be extensible - adding new providers is straightforward by implementing the `IProviderAdapter` interface.
|
package/build/@Module.js
ADDED
|
@@ -0,0 +1,14 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.ModuleOptions = exports.Module = void 0;
|
|
4
|
+
class Module {
|
|
5
|
+
constructor(options) {
|
|
6
|
+
this.options = options;
|
|
7
|
+
this.options = Object.assign(Object.assign({}, new ModuleOptions()), options);
|
|
8
|
+
}
|
|
9
|
+
}
|
|
10
|
+
exports.Module = Module;
|
|
11
|
+
class ModuleOptions {
|
|
12
|
+
}
|
|
13
|
+
exports.ModuleOptions = ModuleOptions;
|
|
14
|
+
//# sourceMappingURL=@Module.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"@Module.js","sourceRoot":"","sources":["../src/@Module.ts"],"names":[],"mappings":";;;AAEA,MAAa,MAAM;IACf,YAA0B,OAAgC;QAAhC,YAAO,GAAP,OAAO,CAAyB;QACtD,IAAI,CAAC,OAAO,mCAAQ,IAAI,aAAa,EAAE,GAAK,OAAO,CAAE,CAAC;IAC1D,CAAC;CACJ;AAJD,wBAIC;AAED,MAAa,aAAa;CAAG;AAA7B,sCAA6B","sourcesContent":["// Module template\n\nexport class Module {\n public constructor(public options?: Partial<ModuleOptions>) {\n this.options = { ...new ModuleOptions(), ...options };\n }\n}\n\nexport class ModuleOptions {}\n"]}
|
|
@@ -0,0 +1,19 @@
|
|
|
1
|
+
import { ChatOptions, ChatResponse, StreamChunk } from './types';
|
|
2
|
+
import { RequestLogger } from './utils/request-logger';
|
|
3
|
+
export interface AIClientConfig {
|
|
4
|
+
apiKeys?: Record<string, string>;
|
|
5
|
+
baseUrls?: Record<string, string>;
|
|
6
|
+
cloudflareAccountId?: string;
|
|
7
|
+
enableLogging?: boolean;
|
|
8
|
+
}
|
|
9
|
+
export declare class AIClient {
|
|
10
|
+
private config;
|
|
11
|
+
private adapters;
|
|
12
|
+
private logger;
|
|
13
|
+
constructor(config?: AIClientConfig);
|
|
14
|
+
chat(options: ChatOptions): Promise<ChatResponse | AsyncIterable<StreamChunk>>;
|
|
15
|
+
private getAdapter;
|
|
16
|
+
clearAdapters(): void;
|
|
17
|
+
getLogger(): RequestLogger;
|
|
18
|
+
getStats(): import("./utils/request-logger").LoggerStats;
|
|
19
|
+
}
|
|
@@ -0,0 +1,132 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
|
3
|
+
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
|
4
|
+
return new (P || (P = Promise))(function (resolve, reject) {
|
|
5
|
+
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
|
6
|
+
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
|
7
|
+
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
|
8
|
+
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
|
9
|
+
});
|
|
10
|
+
};
|
|
11
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
12
|
+
exports.AIClient = void 0;
|
|
13
|
+
const validation_1 = require("./utils/validation");
|
|
14
|
+
const errors_1 = require("./utils/errors");
|
|
15
|
+
const models_1 = require("./models");
|
|
16
|
+
const model_normalization_1 = require("./utils/model-normalization");
|
|
17
|
+
const request_logger_1 = require("./utils/request-logger");
|
|
18
|
+
const openai_1 = require("./adapters/openai");
|
|
19
|
+
const anthropic_1 = require("./adapters/anthropic");
|
|
20
|
+
const google_1 = require("./adapters/google");
|
|
21
|
+
const groq_1 = require("./adapters/groq");
|
|
22
|
+
const mistral_1 = require("./adapters/mistral");
|
|
23
|
+
const cohere_1 = require("./adapters/cohere");
|
|
24
|
+
const xai_1 = require("./adapters/xai");
|
|
25
|
+
const deepseek_1 = require("./adapters/deepseek");
|
|
26
|
+
const ai21_1 = require("./adapters/ai21");
|
|
27
|
+
const openrouter_1 = require("./adapters/openrouter");
|
|
28
|
+
const cloudflare_1 = require("./adapters/cloudflare");
|
|
29
|
+
class AIClient {
|
|
30
|
+
constructor(config = {}) {
|
|
31
|
+
this.adapters = new Map();
|
|
32
|
+
this.config = config;
|
|
33
|
+
this.logger = (0, request_logger_1.getRequestLogger)(config.enableLogging || false);
|
|
34
|
+
}
|
|
35
|
+
chat(options) {
|
|
36
|
+
return __awaiter(this, void 0, void 0, function* () {
|
|
37
|
+
var _a, _b;
|
|
38
|
+
const normalizedModel = (0, model_normalization_1.normalizeModelName)(options.model);
|
|
39
|
+
options = Object.assign(Object.assign({}, options), { model: normalizedModel });
|
|
40
|
+
(0, validation_1.validateChatOptions)(options);
|
|
41
|
+
const provider = (0, models_1.getProviderFromModel)(options.model);
|
|
42
|
+
if (!provider) {
|
|
43
|
+
throw new errors_1.InvalidRequestError(`Invalid model format: "${options.model}". Expected format: "provider/model-name"`);
|
|
44
|
+
}
|
|
45
|
+
if (!(0, models_1.isModelSupported)(options.model)) {
|
|
46
|
+
throw new errors_1.ModelNotFoundError(options.model);
|
|
47
|
+
}
|
|
48
|
+
const adapter = this.getAdapter(provider);
|
|
49
|
+
const chatOptions = Object.assign(Object.assign({}, options), { apiKey: options.apiKey || ((_a = this.config.apiKeys) === null || _a === void 0 ? void 0 : _a[provider]) });
|
|
50
|
+
const tracker = this.logger.startRequest(provider, options.model);
|
|
51
|
+
try {
|
|
52
|
+
const result = yield adapter.chat(chatOptions);
|
|
53
|
+
if ('content' in result) {
|
|
54
|
+
const tokens = (_b = result.usage) === null || _b === void 0 ? void 0 : _b.totalTokens;
|
|
55
|
+
this.logger.logRequest(tracker, true, tokens);
|
|
56
|
+
}
|
|
57
|
+
else {
|
|
58
|
+
this.logger.logRequest(tracker, true);
|
|
59
|
+
}
|
|
60
|
+
return result;
|
|
61
|
+
}
|
|
62
|
+
catch (error) {
|
|
63
|
+
this.logger.logRequest(tracker, false, undefined, error instanceof Error ? error.message : 'Unknown error');
|
|
64
|
+
throw error;
|
|
65
|
+
}
|
|
66
|
+
});
|
|
67
|
+
}
|
|
68
|
+
getAdapter(provider) {
|
|
69
|
+
var _a, _b;
|
|
70
|
+
if (this.adapters.has(provider)) {
|
|
71
|
+
return this.adapters.get(provider);
|
|
72
|
+
}
|
|
73
|
+
const adapterConfig = {
|
|
74
|
+
apiKey: (_a = this.config.apiKeys) === null || _a === void 0 ? void 0 : _a[provider],
|
|
75
|
+
baseUrl: (_b = this.config.baseUrls) === null || _b === void 0 ? void 0 : _b[provider],
|
|
76
|
+
};
|
|
77
|
+
if (provider === 'cloudflare') {
|
|
78
|
+
adapterConfig.cloudflareAccountId = this.config.cloudflareAccountId;
|
|
79
|
+
}
|
|
80
|
+
let adapter;
|
|
81
|
+
switch (provider) {
|
|
82
|
+
case 'openai':
|
|
83
|
+
adapter = new openai_1.OpenAIAdapter(adapterConfig);
|
|
84
|
+
break;
|
|
85
|
+
case 'anthropic':
|
|
86
|
+
adapter = new anthropic_1.AnthropicAdapter(adapterConfig);
|
|
87
|
+
break;
|
|
88
|
+
case 'google':
|
|
89
|
+
adapter = new google_1.GoogleAdapter(adapterConfig);
|
|
90
|
+
break;
|
|
91
|
+
case 'groq':
|
|
92
|
+
adapter = new groq_1.GroqAdapter(adapterConfig);
|
|
93
|
+
break;
|
|
94
|
+
case 'mistral':
|
|
95
|
+
adapter = new mistral_1.MistralAdapter(adapterConfig);
|
|
96
|
+
break;
|
|
97
|
+
case 'cohere':
|
|
98
|
+
adapter = new cohere_1.CohereAdapter(adapterConfig);
|
|
99
|
+
break;
|
|
100
|
+
case 'xai':
|
|
101
|
+
adapter = new xai_1.XAIAdapter(adapterConfig);
|
|
102
|
+
break;
|
|
103
|
+
case 'deepseek':
|
|
104
|
+
adapter = new deepseek_1.DeepSeekAdapter(adapterConfig);
|
|
105
|
+
break;
|
|
106
|
+
case 'ai21':
|
|
107
|
+
adapter = new ai21_1.AI21Adapter(adapterConfig);
|
|
108
|
+
break;
|
|
109
|
+
case 'openrouter':
|
|
110
|
+
adapter = new openrouter_1.OpenRouterAdapter(adapterConfig);
|
|
111
|
+
break;
|
|
112
|
+
case 'cloudflare':
|
|
113
|
+
adapter = new cloudflare_1.CloudflareAdapter(adapterConfig);
|
|
114
|
+
break;
|
|
115
|
+
default:
|
|
116
|
+
throw new errors_1.InvalidRequestError(`Unsupported provider: ${provider}`);
|
|
117
|
+
}
|
|
118
|
+
this.adapters.set(provider, adapter);
|
|
119
|
+
return adapter;
|
|
120
|
+
}
|
|
121
|
+
clearAdapters() {
|
|
122
|
+
this.adapters.clear();
|
|
123
|
+
}
|
|
124
|
+
getLogger() {
|
|
125
|
+
return this.logger;
|
|
126
|
+
}
|
|
127
|
+
getStats() {
|
|
128
|
+
return this.logger.getStats();
|
|
129
|
+
}
|
|
130
|
+
}
|
|
131
|
+
exports.AIClient = AIClient;
|
|
132
|
+
//# sourceMappingURL=AIClient.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"AIClient.js","sourceRoot":"","sources":["../src/AIClient.ts"],"names":[],"mappings":";;;;;;;;;;;;AAEA,mDAAyD;AACzD,2CAAyE;AACzE,qCAAkE;AAClE,qEAAiE;AACjE,2DAAyE;AAGzE,8CAAkD;AAClD,oDAAwD;AACxD,8CAAkD;AAClD,0CAA8C;AAC9C,gDAAoD;AACpD,8CAAkD;AAClD,wCAA4C;AAC5C,kDAAsD;AACtD,0CAA8C;AAC9C,sDAA0D;AAC1D,sDAA0D;AA4B1D,MAAa,QAAQ;IAKpB,YAAY,SAAyB,EAAE;QAH/B,aAAQ,GAAkC,IAAI,GAAG,EAAE,CAAC;QAI3D,IAAI,CAAC,MAAM,GAAG,MAAM,CAAC;QACrB,IAAI,CAAC,MAAM,GAAG,IAAA,iCAAgB,EAAC,MAAM,CAAC,aAAa,IAAI,KAAK,CAAC,CAAC;IAC/D,CAAC;IAOK,IAAI,CAAC,OAAoB;;;YAE9B,MAAM,eAAe,GAAG,IAAA,wCAAkB,EAAC,OAAO,CAAC,KAAK,CAAC,CAAC;YAC1D,OAAO,mCAAQ,OAAO,KAAE,KAAK,EAAE,eAAe,GAAE,CAAC;YAGjD,IAAA,gCAAmB,EAAC,OAAO,CAAC,CAAC;YAG7B,MAAM,QAAQ,GAAG,IAAA,6BAAoB,EAAC,OAAO,CAAC,KAAK,CAAC,CAAC;YACrD,IAAI,CAAC,QAAQ,EAAE,CAAC;gBACf,MAAM,IAAI,4BAAmB,CAC5B,0BAA0B,OAAO,CAAC,KAAK,2CAA2C,CAClF,CAAC;YACH,CAAC;YAGD,IAAI,CAAC,IAAA,yBAAgB,EAAC,OAAO,CAAC,KAAK,CAAC,EAAE,CAAC;gBACtC,MAAM,IAAI,2BAAkB,CAAC,OAAO,CAAC,KAAK,CAAC,CAAC;YAC7C,CAAC;YAGD,MAAM,OAAO,GAAG,IAAI,CAAC,UAAU,CAAC,QAAQ,CAAC,CAAC;YAG1C,MAAM,WAAW,mCACb,OAAO,KACV,MAAM,EAAE,OAAO,CAAC,MAAM,KAAI,MAAA,IAAI,CAAC,MAAM,CAAC,OAAO,0CAAG,QAAQ,CAAC,CAAA,GACzD,CAAC;YAGF,MAAM,OAAO,GAAG,IAAI,CAAC,MAAM,CAAC,YAAY,CAAC,QAAQ,EAAE,OAAO,CAAC,KAAK,CAAC,CAAC;YAElE,IAAI,CAAC;gBAEJ,MAAM,MAAM,GAAG,MAAM,OAAO,CAAC,IAAI,CAAC,WAAW,CAAC,CAAC;gBAG/C,IAAI,SAAS,IAAI,MAAM,EAAE,CAAC;oBACzB,MAAM,MAAM,GAAG,MAAA,MAAM,CAAC,KAAK,0CAAE,WAAW,CAAC;oBACzC,IAAI,CAAC,MAAM,CAAC,UAAU,CAAC,OAAO,EAAE,IAAI,EAAE,MAAM,CAAC,CAAC;gBAC/C,CAAC;qBAAM,CAAC;oBAEP,IAAI,CAAC,MAAM,CAAC,UAAU,CAAC,OAAO,EAAE,IAAI,CAAC,CAAC;gBACvC,CAAC;gBAED,OAAO,MAAM,CAAC;YACf,CAAC;YAAC,OAAO,KAAK,EAAE,CAAC;gBAEhB,IAAI,CAAC,MAAM,CAAC,UAAU,CACrB,OAAO,EACP,KAAK,EACL,SAAS,EACT,KAAK,YAAY,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC,CAAC,eAAe,CACxD,CAAC;gBACF,MAAM,KAAK,CAAC;YACb,CAAC;QACF,CAAC;KAAA;IAKO,UAAU,CAAC,QAAgB;;QAElC,IAAI,IAAI,CAAC,QAAQ,CAAC,GAAG,CAAC,QAAQ,CAAC,EAAE,CAAC;YACjC,OAAO,IAAI,CAAC,QAAQ,CAAC,GAAG,CAAC,QAAQ,CAAE,CAAC;QACrC,CAAC;QAGD,MAAM,aAAa,GAAmB;YACrC,MAAM,EAAE,MAAA,IAAI,CAAC,MAAM,CAAC,OAAO,0CAAG,QAAQ,CAAC;YACvC,OAAO,EAAE,MAAA,IAAI,CAAC,MAAM,CAAC,QAAQ,0CAAG,QAAQ,CAAC;SACzC,CAAC;QAGF,IAAI,QAAQ,KAAK,YAAY,EAAE,CAAC;YAC/B,aAAa,CAAC,mBAAmB,GAAG,IAAI,CAAC,MAAM,CAAC,mBAAmB,CAAC;QACrE,CAAC;QAGD,IAAI,OAAyB,CAAC;QAE9B,QAAQ,QAAQ,EAAE,CAAC;YAClB,KAAK,QAAQ;gBACZ,OAAO,GAAG,IAAI,sBAAa,CAAC,aAAa,CAAC,CAAC;gBAC3C,MAAM;YACP,KAAK,WAAW;gBACf,OAAO,GAAG,IAAI,4BAAgB,CAAC,aAAa,CAAC,CAAC;gBAC9C,MAAM;YACP,KAAK,QAAQ;gBACZ,OAAO,GAAG,IAAI,sBAAa,CAAC,aAAa,CAAC,CAAC;gBAC3C,MAAM;YACP,KAAK,MAAM;gBACV,OAAO,GAAG,IAAI,kBAAW,CAAC,aAAa,CAAC,CAAC;gBACzC,MAAM;YACP,KAAK,SAAS;gBACb,OAAO,GAAG,IAAI,wBAAc,CAAC,aAAa,CAAC,CAAC;gBAC5C,MAAM;YACP,KAAK,QAAQ;gBACZ,OAAO,GAAG,IAAI,sBAAa,CAAC,aAAa,CAAC,CAAC;gBAC3C,MAAM;YACP,KAAK,KAAK;gBACT,OAAO,GAAG,IAAI,gBAAU,CAAC,aAAa,CAAC,CAAC;gBACxC,MAAM;YACP,KAAK,UAAU;gBACd,OAAO,GAAG,IAAI,0BAAe,CAAC,aAAa,CAAC,CAAC;gBAC7C,MAAM;YACP,KAAK,MAAM;gBACV,OAAO,GAAG,IAAI,kBAAW,CAAC,aAAa,CAAC,CAAC;gBACzC,MAAM;YACP,KAAK,YAAY;gBAChB,OAAO,GAAG,IAAI,8BAAiB,CAAC,aAAa,CAAC,CAAC;gBAC/C,MAAM;YACP,KAAK,YAAY;gBAChB,OAAO,GAAG,IAAI,8BAAiB,CAAC,aAAa,CAAC,CAAC;gBAC/C,MAAM;YACP;gBACC,MAAM,IAAI,4BAAmB,CAAC,yBAAyB,QAAQ,EAAE,CAAC,CAAC;QACrE,CAAC;QAGD,IAAI,CAAC,QAAQ,CAAC,GAAG,CAAC,QAAQ,EAAE,OAAO,CAAC,CAAC;QAErC,OAAO,OAAO,CAAC;IAChB,CAAC;IAKD,aAAa;QACZ,IAAI,CAAC,QAAQ,CAAC,KAAK,EAAE,CAAC;IACvB,CAAC;IAKD,SAAS;QACR,OAAO,IAAI,CAAC,MAAM,CAAC;IACpB,CAAC;IAKD,QAAQ;QACP,OAAO,IAAI,CAAC,MAAM,CAAC,QAAQ,EAAE,CAAC;IAC/B,CAAC;CACD;AAjKD,4BAiKC","sourcesContent":["import { ChatOptions, ChatResponse, StreamChunk, ProviderConfig } from './types';\nimport { IProviderAdapter } from './types/provider';\nimport { validateChatOptions } from './utils/validation';\nimport { ModelNotFoundError, InvalidRequestError } from './utils/errors';\nimport { getProviderFromModel, isModelSupported } from './models';\nimport { normalizeModelName } from './utils/model-normalization';\nimport { RequestLogger, getRequestLogger } from './utils/request-logger';\n\n// Lazy import adapters\nimport { OpenAIAdapter } from './adapters/openai';\nimport { AnthropicAdapter } from './adapters/anthropic';\nimport { GoogleAdapter } from './adapters/google';\nimport { GroqAdapter } from './adapters/groq';\nimport { MistralAdapter } from './adapters/mistral';\nimport { CohereAdapter } from './adapters/cohere';\nimport { XAIAdapter } from './adapters/xai';\nimport { DeepSeekAdapter } from './adapters/deepseek';\nimport { AI21Adapter } from './adapters/ai21';\nimport { OpenRouterAdapter } from './adapters/openrouter';\nimport { CloudflareAdapter } from './adapters/cloudflare';\n\nexport interface AIClientConfig {\n\t/**\n\t * API keys for different providers\n\t * Example: { openai: 'sk-...', anthropic: 'sk-ant-...', google: '...' }\n\t */\n\tapiKeys?: Record<string, string>;\n\n\t/**\n\t * Base URLs for providers (optional, for custom endpoints)\n\t */\n\tbaseUrls?: Record<string, string>;\n\n\t/**\n\t * Cloudflare account ID (required for Cloudflare Workers AI)\n\t */\n\tcloudflareAccountId?: string;\n\n\t/**\n\t * Enable request logging for metrics tracking\n\t */\n\tenableLogging?: boolean;\n}\n\n/**\n * Main AI client class providing unified access to multiple AI providers\n */\nexport class AIClient {\n\tprivate config: AIClientConfig;\n\tprivate adapters: Map<string, IProviderAdapter> = new Map();\n\tprivate logger: RequestLogger;\n\n\tconstructor(config: AIClientConfig = {}) {\n\t\tthis.config = config;\n\t\tthis.logger = getRequestLogger(config.enableLogging || false);\n\t}\n\n\t/**\n\t * Execute a chat completion request\n\t * @param options Chat options including model, messages, and parameters\n\t * @returns ChatResponse for non-streaming, AsyncIterable<StreamChunk> for streaming\n\t */\n\tasync chat(options: ChatOptions): Promise<ChatResponse | AsyncIterable<StreamChunk>> {\n\t\t// Normalize model name (resolve aliases)\n\t\tconst normalizedModel = normalizeModelName(options.model);\n\t\toptions = { ...options, model: normalizedModel };\n\n\t\t// Validate options\n\t\tvalidateChatOptions(options);\n\n\t\t// Extract provider from model string\n\t\tconst provider = getProviderFromModel(options.model);\n\t\tif (!provider) {\n\t\t\tthrow new InvalidRequestError(\n\t\t\t\t`Invalid model format: \"${options.model}\". Expected format: \"provider/model-name\"`\n\t\t\t);\n\t\t}\n\n\t\t// Check if model is supported\n\t\tif (!isModelSupported(options.model)) {\n\t\t\tthrow new ModelNotFoundError(options.model);\n\t\t}\n\n\t\t// Get or create adapter\n\t\tconst adapter = this.getAdapter(provider);\n\n\t\t// Merge API keys from config if not provided in options\n\t\tconst chatOptions: ChatOptions = {\n\t\t\t...options,\n\t\t\tapiKey: options.apiKey || this.config.apiKeys?.[provider],\n\t\t};\n\n\t\t// Start request tracking\n\t\tconst tracker = this.logger.startRequest(provider, options.model);\n\n\t\ttry {\n\t\t\t// Execute chat\n\t\t\tconst result = await adapter.chat(chatOptions);\n\n\t\t\t// Log successful request\n\t\t\tif ('content' in result) {\n\t\t\t\tconst tokens = result.usage?.totalTokens;\n\t\t\t\tthis.logger.logRequest(tracker, true, tokens);\n\t\t\t} else {\n\t\t\t\t// Streaming - log without token count\n\t\t\t\tthis.logger.logRequest(tracker, true);\n\t\t\t}\n\n\t\t\treturn result;\n\t\t} catch (error) {\n\t\t\t// Log failed request\n\t\t\tthis.logger.logRequest(\n\t\t\t\ttracker,\n\t\t\t\tfalse,\n\t\t\t\tundefined,\n\t\t\t\terror instanceof Error ? error.message : 'Unknown error'\n\t\t\t);\n\t\t\tthrow error;\n\t\t}\n\t}\n\n\t/**\n\t * Get adapter for a provider (creates if not exists)\n\t */\n\tprivate getAdapter(provider: string): IProviderAdapter {\n\t\t// Return cached adapter if exists\n\t\tif (this.adapters.has(provider)) {\n\t\t\treturn this.adapters.get(provider)!;\n\t\t}\n\n\t\t// Create adapter config\n\t\tconst adapterConfig: ProviderConfig = {\n\t\t\tapiKey: this.config.apiKeys?.[provider],\n\t\t\tbaseUrl: this.config.baseUrls?.[provider],\n\t\t};\n\n\t\t// Add cloudflare-specific config\n\t\tif (provider === 'cloudflare') {\n\t\t\tadapterConfig.cloudflareAccountId = this.config.cloudflareAccountId;\n\t\t}\n\n\t\t// Create new adapter\n\t\tlet adapter: IProviderAdapter;\n\n\t\tswitch (provider) {\n\t\t\tcase 'openai':\n\t\t\t\tadapter = new OpenAIAdapter(adapterConfig);\n\t\t\t\tbreak;\n\t\t\tcase 'anthropic':\n\t\t\t\tadapter = new AnthropicAdapter(adapterConfig);\n\t\t\t\tbreak;\n\t\t\tcase 'google':\n\t\t\t\tadapter = new GoogleAdapter(adapterConfig);\n\t\t\t\tbreak;\n\t\t\tcase 'groq':\n\t\t\t\tadapter = new GroqAdapter(adapterConfig);\n\t\t\t\tbreak;\n\t\t\tcase 'mistral':\n\t\t\t\tadapter = new MistralAdapter(adapterConfig);\n\t\t\t\tbreak;\n\t\t\tcase 'cohere':\n\t\t\t\tadapter = new CohereAdapter(adapterConfig);\n\t\t\t\tbreak;\n\t\t\tcase 'xai':\n\t\t\t\tadapter = new XAIAdapter(adapterConfig);\n\t\t\t\tbreak;\n\t\t\tcase 'deepseek':\n\t\t\t\tadapter = new DeepSeekAdapter(adapterConfig);\n\t\t\t\tbreak;\n\t\t\tcase 'ai21':\n\t\t\t\tadapter = new AI21Adapter(adapterConfig);\n\t\t\t\tbreak;\n\t\t\tcase 'openrouter':\n\t\t\t\tadapter = new OpenRouterAdapter(adapterConfig);\n\t\t\t\tbreak;\n\t\t\tcase 'cloudflare':\n\t\t\t\tadapter = new CloudflareAdapter(adapterConfig);\n\t\t\t\tbreak;\n\t\t\tdefault:\n\t\t\t\tthrow new InvalidRequestError(`Unsupported provider: ${provider}`);\n\t\t}\n\n\t\t// Cache adapter\n\t\tthis.adapters.set(provider, adapter);\n\n\t\treturn adapter;\n\t}\n\n\t/**\n\t * Clear cached adapters (useful for testing or resetting state)\n\t */\n\tclearAdapters(): void {\n\t\tthis.adapters.clear();\n\t}\n\n\t/**\n\t * Get request logger for metrics\n\t */\n\tgetLogger(): RequestLogger {\n\t\treturn this.logger;\n\t}\n\n\t/**\n\t * Get request statistics\n\t */\n\tgetStats() {\n\t\treturn this.logger.getStats();\n\t}\n}\n\n"]}
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"Extensions.js","sourceRoot":"","sources":["../src/Extensions.ts"],"names":[],"mappings":"AAIA,MAAM,CAAC,SAAS,CAAC,SAAS,GAAG,UAAU,KAAK,EAAE,WAAW;IACrD,OAAO,IAAI,CAAC,MAAM,CAAC,CAAC,EAAE,KAAK,CAAC,GAAG,WAAW,GAAG,IAAI,CAAC,MAAM,CAAC,KAAK,GAAG,WAAW,CAAC,MAAM,CAAC,CAAC;AACzF,CAAC,CAAC","sourcesContent":["declare interface String {\n replaceAt(index: number, replacement: string): string;\n}\n\nString.prototype.replaceAt = function (index, replacement) {\n return this.substr(0, index) + replacement + this.substr(index + replacement.length);\n};\n"]}
|
|
@@ -0,0 +1,8 @@
|
|
|
1
|
+
import { BaseAdapter } from './base/BaseAdapter';
|
|
2
|
+
import { ChatOptions, ChatResponse, StreamChunk } from '../types';
|
|
3
|
+
export declare class AI21Adapter extends BaseAdapter {
|
|
4
|
+
get name(): string;
|
|
5
|
+
chat(options: ChatOptions): Promise<ChatResponse | AsyncIterable<StreamChunk>>;
|
|
6
|
+
private transformMessages;
|
|
7
|
+
private handleNonStreamResponse;
|
|
8
|
+
}
|
|
@@ -0,0 +1,83 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
|
3
|
+
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
|
4
|
+
return new (P || (P = Promise))(function (resolve, reject) {
|
|
5
|
+
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
|
6
|
+
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
|
7
|
+
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
|
8
|
+
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
|
9
|
+
});
|
|
10
|
+
};
|
|
11
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
12
|
+
exports.AI21Adapter = void 0;
|
|
13
|
+
const BaseAdapter_1 = require("./base/BaseAdapter");
|
|
14
|
+
const errors_1 = require("../utils/errors");
|
|
15
|
+
class AI21Adapter extends BaseAdapter_1.BaseAdapter {
|
|
16
|
+
get name() {
|
|
17
|
+
return 'ai21';
|
|
18
|
+
}
|
|
19
|
+
chat(options) {
|
|
20
|
+
return __awaiter(this, void 0, void 0, function* () {
|
|
21
|
+
try {
|
|
22
|
+
const apiKey = this.getApiKey(options);
|
|
23
|
+
const baseUrl = this.getBaseUrl('https://api.ai21.com/studio/v1');
|
|
24
|
+
const model = options.model.replace(/^ai21\//, '');
|
|
25
|
+
const request = {
|
|
26
|
+
model,
|
|
27
|
+
messages: this.transformMessages(options.messages),
|
|
28
|
+
};
|
|
29
|
+
if (options.temperature !== undefined)
|
|
30
|
+
request.temperature = options.temperature;
|
|
31
|
+
if (options.maxTokens !== undefined)
|
|
32
|
+
request.max_tokens = options.maxTokens;
|
|
33
|
+
if (options.topP !== undefined)
|
|
34
|
+
request.top_p = options.topP;
|
|
35
|
+
if (options.stop && Array.isArray(options.stop)) {
|
|
36
|
+
request.stop = options.stop;
|
|
37
|
+
}
|
|
38
|
+
if (options.providerOptions) {
|
|
39
|
+
Object.assign(request, options.providerOptions);
|
|
40
|
+
}
|
|
41
|
+
const response = yield this.fetchWithErrorHandling(`${baseUrl}/chat/completions`, {
|
|
42
|
+
method: 'POST',
|
|
43
|
+
headers: {
|
|
44
|
+
'Content-Type': 'application/json',
|
|
45
|
+
'Authorization': `Bearer ${apiKey}`,
|
|
46
|
+
},
|
|
47
|
+
body: JSON.stringify(request),
|
|
48
|
+
}, this.name);
|
|
49
|
+
const data = yield response.json();
|
|
50
|
+
return this.handleNonStreamResponse(data, model);
|
|
51
|
+
}
|
|
52
|
+
catch (error) {
|
|
53
|
+
(0, errors_1.handleProviderError)(error, this.name);
|
|
54
|
+
}
|
|
55
|
+
});
|
|
56
|
+
}
|
|
57
|
+
transformMessages(messages) {
|
|
58
|
+
return messages.map((msg) => ({
|
|
59
|
+
role: msg.role,
|
|
60
|
+
content: msg.content,
|
|
61
|
+
}));
|
|
62
|
+
}
|
|
63
|
+
handleNonStreamResponse(data, model) {
|
|
64
|
+
var _a, _b;
|
|
65
|
+
const choice = (_a = data.choices) === null || _a === void 0 ? void 0 : _a[0];
|
|
66
|
+
if (!choice) {
|
|
67
|
+
throw new Error('No choices in response');
|
|
68
|
+
}
|
|
69
|
+
return {
|
|
70
|
+
content: ((_b = choice.message) === null || _b === void 0 ? void 0 : _b.content) || '',
|
|
71
|
+
finishReason: choice.finish_reason,
|
|
72
|
+
usage: data.usage ? {
|
|
73
|
+
promptTokens: data.usage.prompt_tokens || 0,
|
|
74
|
+
completionTokens: data.usage.completion_tokens || 0,
|
|
75
|
+
totalTokens: data.usage.total_tokens || 0,
|
|
76
|
+
} : undefined,
|
|
77
|
+
model,
|
|
78
|
+
raw: data,
|
|
79
|
+
};
|
|
80
|
+
}
|
|
81
|
+
}
|
|
82
|
+
exports.AI21Adapter = AI21Adapter;
|
|
83
|
+
//# sourceMappingURL=ai21.js.map
|