dev-ai-sdk 0.0.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.env.example +4 -0
- package/LICENSE +20 -0
- package/README.md +491 -0
- package/dist/client.d.ts +8 -0
- package/dist/client.d.ts.map +1 -0
- package/dist/client.js +71 -0
- package/dist/client.js.map +1 -0
- package/dist/core/config.d.ts +16 -0
- package/dist/core/config.d.ts.map +1 -0
- package/dist/core/config.js +2 -0
- package/dist/core/config.js.map +1 -0
- package/dist/core/error.d.ts +5 -0
- package/dist/core/error.d.ts.map +1 -0
- package/dist/core/error.js +8 -0
- package/dist/core/error.js.map +1 -0
- package/dist/core/fallbackEngine.d.ts +4 -0
- package/dist/core/fallbackEngine.d.ts.map +1 -0
- package/dist/core/fallbackEngine.js +89 -0
- package/dist/core/fallbackEngine.js.map +1 -0
- package/dist/core/validate.d.ts +5 -0
- package/dist/core/validate.d.ts.map +1 -0
- package/dist/core/validate.js +73 -0
- package/dist/core/validate.js.map +1 -0
- package/dist/index.d.ts +5 -0
- package/dist/index.d.ts.map +1 -0
- package/dist/index.js +2 -0
- package/dist/index.js.map +1 -0
- package/dist/providers/deepseek-stream.d.ts +3 -0
- package/dist/providers/deepseek-stream.d.ts.map +1 -0
- package/dist/providers/deepseek-stream.js +70 -0
- package/dist/providers/deepseek-stream.js.map +1 -0
- package/dist/providers/deepseek.d.ts +3 -0
- package/dist/providers/deepseek.d.ts.map +1 -0
- package/dist/providers/deepseek.js +45 -0
- package/dist/providers/deepseek.js.map +1 -0
- package/dist/providers/google-core.d.ts +3 -0
- package/dist/providers/google-core.d.ts.map +1 -0
- package/dist/providers/google-core.js +48 -0
- package/dist/providers/google-core.js.map +1 -0
- package/dist/providers/google-stream.d.ts +3 -0
- package/dist/providers/google-stream.d.ts.map +1 -0
- package/dist/providers/google-stream.js +51 -0
- package/dist/providers/google-stream.js.map +1 -0
- package/dist/providers/google.d.ts +3 -0
- package/dist/providers/google.d.ts.map +1 -0
- package/dist/providers/google.js +10 -0
- package/dist/providers/google.js.map +1 -0
- package/dist/providers/mistral-stream.d.ts +3 -0
- package/dist/providers/mistral-stream.d.ts.map +1 -0
- package/dist/providers/mistral-stream.js +67 -0
- package/dist/providers/mistral-stream.js.map +1 -0
- package/dist/providers/mistral.d.ts +3 -0
- package/dist/providers/mistral.d.ts.map +1 -0
- package/dist/providers/mistral.js +43 -0
- package/dist/providers/mistral.js.map +1 -0
- package/dist/providers/openai-stream.d.ts +3 -0
- package/dist/providers/openai-stream.d.ts.map +1 -0
- package/dist/providers/openai-stream.js +65 -0
- package/dist/providers/openai-stream.js.map +1 -0
- package/dist/providers/openai.d.ts +3 -0
- package/dist/providers/openai.d.ts.map +1 -0
- package/dist/providers/openai.js +40 -0
- package/dist/providers/openai.js.map +1 -0
- package/dist/types/types.d.ts +45 -0
- package/dist/types/types.d.ts.map +1 -0
- package/dist/types/types.js +2 -0
- package/dist/types/types.js.map +1 -0
- package/package.json +50 -0
package/.env.example
ADDED
package/LICENSE
ADDED
|
@@ -0,0 +1,20 @@
|
|
|
1
|
+
MIT License
|
|
2
|
+
|
|
3
|
+
Copyright (c) 2026 Shujan Islam
|
|
4
|
+
|
|
5
|
+
Permission is hereby granted, free of charge, to any person obtaining a copy
|
|
6
|
+
of this software and associated documentation files (the "Software"), to deal
|
|
7
|
+
in the Software without restriction, including without limitation the rights
|
|
8
|
+
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
|
9
|
+
copies of the Software, and to permit persons to whom the Software is
|
|
10
|
+
furnished to do so, subject to the following conditions:
|
|
11
|
+
|
|
12
|
+
The above copyright notice and this permission notice shall be included in all
|
|
13
|
+
copies or substantial portions of the Software.
|
|
14
|
+
|
|
15
|
+
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
|
16
|
+
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
|
17
|
+
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
|
18
|
+
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
|
19
|
+
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, OR IN CONNECTION
|
|
20
|
+
WITH THE SOFTWARE OR THE USE OF OTHER SOFTWARE OR DEALINGS IN THE SOFTWARE.
|
package/README.md
ADDED
|
@@ -0,0 +1,491 @@
|
|
|
1
|
+
# dev-ai-sdk
|
|
2
|
+
|
|
3
|
+
Universal AI SDK with a single syntax for multiple LLM providers.
|
|
4
|
+
|
|
5
|
+
This project aims to give you a small, provider-agnostic layer for text generation across different APIs using a consistent TypeScript interface.
|
|
6
|
+
|
|
7
|
+
It is still in an early, experimental phase.
|
|
8
|
+
|
|
9
|
+
Currently supported providers:
|
|
10
|
+
|
|
11
|
+
- OpenAI (Responses API)
|
|
12
|
+
- Google Gemini (Generative Language API)
|
|
13
|
+
- DeepSeek (chat completions, OpenAI-like)
|
|
14
|
+
- Mistral (chat completions, OpenAI-like)
|
|
15
|
+
|
|
16
|
+
---
|
|
17
|
+
|
|
18
|
+
## Features (Current)
|
|
19
|
+
|
|
20
|
+
- Unified interface for multiple providers (OpenAI, Google, DeepSeek, Mistral)
|
|
21
|
+
- Simple `genChat` client with a single `generate` method
|
|
22
|
+
- Strongly typed configuration and request/response types
|
|
23
|
+
- Centralized validation of configuration and provider calls
|
|
24
|
+
- Basic support for:
|
|
25
|
+
- `system` prompt (per provider)
|
|
26
|
+
- `temperature` and `maxTokens` (per provider)
|
|
27
|
+
- Optional `raw` responses to inspect full provider JSON
|
|
28
|
+
- Normalized error type (`SDKError`) with provider tagging
|
|
29
|
+
- Tiny, dependency-light TypeScript codebase
|
|
30
|
+
|
|
31
|
+
Planned (not implemented yet):
|
|
32
|
+
|
|
33
|
+
- Rich message/chat abstractions
|
|
34
|
+
- JSON / structured output helpers
|
|
35
|
+
- React / Next.js integrations
|
|
36
|
+
- More providers (Anthropic, Azure OpenAI, etc.)
|
|
37
|
+
|
|
38
|
+
---
|
|
39
|
+
|
|
40
|
+
## Installation
|
|
41
|
+
|
|
42
|
+
> This project is not yet published to npm; these instructions assume you are developing or consuming it locally.
|
|
43
|
+
|
|
44
|
+
Clone the repository and install dependencies:
|
|
45
|
+
|
|
46
|
+
```bash
|
|
47
|
+
npm install
|
|
48
|
+
# or
|
|
49
|
+
yarn install
|
|
50
|
+
# or
|
|
51
|
+
pnpm install
|
|
52
|
+
```
|
|
53
|
+
|
|
54
|
+
Build the TypeScript sources:
|
|
55
|
+
|
|
56
|
+
```bash
|
|
57
|
+
npm run build
|
|
58
|
+
```
|
|
59
|
+
|
|
60
|
+
This outputs compiled files to `dist/` as configured in `package.json`.
|
|
61
|
+
|
|
62
|
+
---
|
|
63
|
+
|
|
64
|
+
## Core Concepts
|
|
65
|
+
|
|
66
|
+
The library exposes a single main client class today: `genChat`.
|
|
67
|
+
|
|
68
|
+
- You configure the client with API keys for the providers you want to use.
|
|
69
|
+
- You call `generate` with exactly one provider payload (`google`, `openai`, `deepseek`, or `mistral`).
|
|
70
|
+
- The client validates the configuration and the request, then calls the appropriate provider adapter.
|
|
71
|
+
|
|
72
|
+
Key files:
|
|
73
|
+
|
|
74
|
+
- `src/client.ts` – main `genChat` class
|
|
75
|
+
- `src/providers/google.ts` – Google Gemini implementation
|
|
76
|
+
- `src/providers/openai.ts` – OpenAI Responses API implementation
|
|
77
|
+
- `src/providers/deepseek.ts` – DeepSeek chat completions implementation
|
|
78
|
+
- `src/providers/mistral.ts` – Mistral chat completions implementation
|
|
79
|
+
- `src/core/config.ts` – SDK configuration types
|
|
80
|
+
- `src/core/validate.ts` – configuration and provider validation
|
|
81
|
+
- `src/core/error.ts` – `SDKError` implementation
|
|
82
|
+
- `src/types/types.ts` – request/response types
|
|
83
|
+
|
|
84
|
+
---
|
|
85
|
+
|
|
86
|
+
## Configuration
|
|
87
|
+
|
|
88
|
+
The client is configured via an `SDKConfig` object (defined in `src/core/config.ts`):
|
|
89
|
+
|
|
90
|
+
```ts
|
|
91
|
+
export type SDKConfig = {
|
|
92
|
+
google?: {
|
|
93
|
+
apiKey: string;
|
|
94
|
+
};
|
|
95
|
+
|
|
96
|
+
openai?: {
|
|
97
|
+
apiKey: string;
|
|
98
|
+
};
|
|
99
|
+
|
|
100
|
+
deepseek?: {
|
|
101
|
+
apiKey: string;
|
|
102
|
+
};
|
|
103
|
+
|
|
104
|
+
mistral?: {
|
|
105
|
+
apiKey: string;
|
|
106
|
+
};
|
|
107
|
+
};
|
|
108
|
+
```
|
|
109
|
+
|
|
110
|
+
Rules:
|
|
111
|
+
|
|
112
|
+
- At least one provider (`google`, `openai`, `deepseek`, or `mistral`) must be configured.
|
|
113
|
+
- Each configured provider must have a non-empty `apiKey` string.
|
|
114
|
+
- If these rules are violated, the SDK throws an `SDKError` from `validateConfig`.
|
|
115
|
+
|
|
116
|
+
Example configuration:
|
|
117
|
+
|
|
118
|
+
```ts
|
|
119
|
+
import { genChat } from './src/client';
|
|
120
|
+
|
|
121
|
+
const ai = new genChat({
|
|
122
|
+
google: {
|
|
123
|
+
apiKey: process.env.GOOGLE_API_KEY!,
|
|
124
|
+
},
|
|
125
|
+
openai: {
|
|
126
|
+
apiKey: process.env.OPENAI_API_KEY!,
|
|
127
|
+
},
|
|
128
|
+
deepseek: {
|
|
129
|
+
apiKey: process.env.DEEPSEEK_API_KEY!,
|
|
130
|
+
},
|
|
131
|
+
mistral: {
|
|
132
|
+
apiKey: process.env.MISTRAL_API_KEY!,
|
|
133
|
+
},
|
|
134
|
+
});
|
|
135
|
+
```
|
|
136
|
+
|
|
137
|
+
You can also configure only the providers you actually intend to use.
|
|
138
|
+
|
|
139
|
+
---
|
|
140
|
+
|
|
141
|
+
## Provider Request Shape
|
|
142
|
+
|
|
143
|
+
Requests are described by the `Provider` type in `src/types/types.ts`:
|
|
144
|
+
|
|
145
|
+
```ts
|
|
146
|
+
export type Provider = {
|
|
147
|
+
google?: {
|
|
148
|
+
model: string;
|
|
149
|
+
prompt: string;
|
|
150
|
+
system?: string;
|
|
151
|
+
temperature?: number;
|
|
152
|
+
maxTokens?: number;
|
|
153
|
+
raw?: boolean;
|
|
154
|
+
stream?: boolean; // stream text from Gemini
|
|
155
|
+
};
|
|
156
|
+
|
|
157
|
+
openai?: {
|
|
158
|
+
model: string;
|
|
159
|
+
prompt: string;
|
|
160
|
+
system?: string;
|
|
161
|
+
temperature?: number;
|
|
162
|
+
maxTokens?: number;
|
|
163
|
+
raw?: boolean;
|
|
164
|
+
stream?: boolean; // stream text from OpenAI
|
|
165
|
+
};
|
|
166
|
+
|
|
167
|
+
deepseek?: {
|
|
168
|
+
model: string;
|
|
169
|
+
prompt: string;
|
|
170
|
+
system?: string;
|
|
171
|
+
temperature?: number;
|
|
172
|
+
maxTokens?: number;
|
|
173
|
+
raw?: boolean;
|
|
174
|
+
stream?: boolean; // stream text from DeepSeek
|
|
175
|
+
};
|
|
176
|
+
|
|
177
|
+
mistral?: {
|
|
178
|
+
model: string;
|
|
179
|
+
prompt: string;
|
|
180
|
+
system?: string;
|
|
181
|
+
temperature?: number;
|
|
182
|
+
maxTokens?: number;
|
|
183
|
+
raw?: boolean;
|
|
184
|
+
stream?: boolean; // stream text from Mistral
|
|
185
|
+
};
|
|
186
|
+
}
|
|
187
|
+
|
|
188
|
+
```
|
|
189
|
+
|
|
190
|
+
Common fields per provider:
|
|
191
|
+
|
|
192
|
+
- `model` (**required**) – model name for that provider.
|
|
193
|
+
- `prompt` (**required**) – the main user message.
|
|
194
|
+
- `system` (optional) – high-level system instruction (currently only passed through if you add support in the provider).
|
|
195
|
+
- `temperature` (optional) – sampling temperature (0–2, provider-specific behavior).
|
|
196
|
+
- `maxTokens` (optional) – maximum output tokens (provider-specific naming under the hood).
|
|
197
|
+
- `raw` (optional) – if `true`, include the full raw provider response in `Output.raw`.
|
|
198
|
+
|
|
199
|
+
Rules enforced by `validateProvider`:
|
|
200
|
+
|
|
201
|
+
- Exactly one provider must be present per call:
|
|
202
|
+
- Either `provider.google`, `provider.openai`, `provider.deepseek`, or `provider.mistral`, but not more than one at a time.
|
|
203
|
+
- For the selected provider:
|
|
204
|
+
- `model` must be a non-empty string.
|
|
205
|
+
- `prompt` must be a non-empty string.
|
|
206
|
+
|
|
207
|
+
If these rules are not met, an `SDKError` is thrown.
|
|
208
|
+
|
|
209
|
+
---
|
|
210
|
+
|
|
211
|
+
## Response Shape
|
|
212
|
+
|
|
213
|
+
Responses use the `Output` type from `src/types/types.ts`:
|
|
214
|
+
|
|
215
|
+
```ts
|
|
216
|
+
export type Output = {
|
|
217
|
+
data: string;
|
|
218
|
+
provider: string;
|
|
219
|
+
model: string;
|
|
220
|
+
raw?: any;
|
|
221
|
+
}
|
|
222
|
+
```
|
|
223
|
+
|
|
224
|
+
Fields:
|
|
225
|
+
|
|
226
|
+
- `data`: the main text content returned by the model (extracted from each provider-specific response format).
|
|
227
|
+
- `provider`: the provider identifier (for example, `'google'`, `'openai'`, `'deepseek'`, `'mistral'`).
|
|
228
|
+
- `model`: the model name that was used.
|
|
229
|
+
- `raw` (optional): the full raw JSON response from the provider, included only when `raw: true` is set on the request.
|
|
230
|
+
|
|
231
|
+
> Note: Internally, some providers may temporarily return `{ text: ... }` instead of `{ data: ... }`, but the long-term intention is to normalize around `data` as the main text field.
|
|
232
|
+
|
|
233
|
+
---
|
|
234
|
+
|
|
235
|
+
## Usage
|
|
236
|
+
|
|
237
|
+
### 0. Streaming vs non-streaming
|
|
238
|
+
|
|
239
|
+
`genChat.generate` returns either a single `Output` (non-streaming) or an async iterable of chunks (streaming), depending on the per-provider `stream` flag:
|
|
240
|
+
|
|
241
|
+
- If `stream` is **not** set or `false`, `generate` resolves to an `Output`:
|
|
242
|
+
- `{ data, provider, model, raw? }`.
|
|
243
|
+
- If `stream` is `true` for a provider (`google`, `openai`, `deepseek`, or `mistral`), `generate` resolves to an async iterable of chunks:
|
|
244
|
+
- You can use `for await (const chunk of result) { ... }`.
|
|
245
|
+
- For Gemini, each `chunk` is a JSON event; you can drill into `candidates[0].content.parts[0].text` to get only the text.
|
|
246
|
+
|
|
247
|
+
### 1. Creating the Client
|
|
248
|
+
|
|
249
|
+
Create a new `genChat` instance with the providers you want to use:
|
|
250
|
+
|
|
251
|
+
```ts
|
|
252
|
+
import { genChat } from './src/client';
|
|
253
|
+
|
|
254
|
+
const ai = new genChat({
|
|
255
|
+
google: {
|
|
256
|
+
apiKey: process.env.GOOGLE_API_KEY!,
|
|
257
|
+
},
|
|
258
|
+
openai: {
|
|
259
|
+
apiKey: process.env.OPENAI_API_KEY!,
|
|
260
|
+
},
|
|
261
|
+
deepseek: {
|
|
262
|
+
apiKey: process.env.DEEPSEEK_API_KEY!,
|
|
263
|
+
},
|
|
264
|
+
mistral: {
|
|
265
|
+
apiKey: process.env.MISTRAL_API_KEY!,
|
|
266
|
+
},
|
|
267
|
+
});
|
|
268
|
+
```
|
|
269
|
+
|
|
270
|
+
You can also configure just one provider, e.g. only Mistral:
|
|
271
|
+
|
|
272
|
+
```ts
|
|
273
|
+
const ai = new genChat({
|
|
274
|
+
mistral: {
|
|
275
|
+
apiKey: process.env.MISTRAL_API_KEY!,
|
|
276
|
+
},
|
|
277
|
+
});
|
|
278
|
+
```
|
|
279
|
+
|
|
280
|
+
### 2. Calling Google Gemini
|
|
281
|
+
|
|
282
|
+
#### Non-streaming
|
|
283
|
+
|
|
284
|
+
```ts
|
|
285
|
+
const result = await ai.generate({
|
|
286
|
+
google: {
|
|
287
|
+
model: 'gemini-2.5-flash-lite',
|
|
288
|
+
prompt: 'Summarize the benefits of TypeScript in 3 bullet points.',
|
|
289
|
+
temperature: 0.4,
|
|
290
|
+
maxTokens: 256,
|
|
291
|
+
raw: false, // set to true to include full raw response
|
|
292
|
+
},
|
|
293
|
+
});
|
|
294
|
+
|
|
295
|
+
console.log(result.provider); // 'google'
|
|
296
|
+
console.log(result.model); // 'gemini-2.5-flash-lite'
|
|
297
|
+
console.log(result.data); // summarized text
|
|
298
|
+
```
|
|
299
|
+
|
|
300
|
+
#### Streaming (Gemini)
|
|
301
|
+
|
|
302
|
+
```ts
|
|
303
|
+
const res = await ai.generate({
|
|
304
|
+
google: {
|
|
305
|
+
model: 'gemini-2.5-flash-lite',
|
|
306
|
+
prompt: 'Explain Vercel in 5 lines.',
|
|
307
|
+
system: 'Act like you are the maker of Vercel and answer accordingly.',
|
|
308
|
+
maxTokens: 500,
|
|
309
|
+
stream: true,
|
|
310
|
+
},
|
|
311
|
+
});
|
|
312
|
+
|
|
313
|
+
if (!(Symbol.asyncIterator in Object(res))) {
|
|
314
|
+
throw new Error('Expected streaming result to be async iterable');
|
|
315
|
+
}
|
|
316
|
+
|
|
317
|
+
for await (const chunk of res as AsyncIterable<any>) {
|
|
318
|
+
const text =
|
|
319
|
+
chunk?.candidates?.[0]?.content?.parts?.[0]?.text ?? '';
|
|
320
|
+
|
|
321
|
+
if (text) {
|
|
322
|
+
console.log(text); // only the text from each streamed event
|
|
323
|
+
}
|
|
324
|
+
}
|
|
325
|
+
```
|
|
326
|
+
|
|
327
|
+
### 3. Calling OpenAI (Responses API)
|
|
328
|
+
|
|
329
|
+
```ts
|
|
330
|
+
const result = await ai.generate({
|
|
331
|
+
openai: {
|
|
332
|
+
model: 'gpt-4.1-mini',
|
|
333
|
+
prompt: 'Generate a creative product name for a note-taking app.',
|
|
334
|
+
temperature: 0.7,
|
|
335
|
+
maxTokens: 128,
|
|
336
|
+
raw: false, // set to true to include full raw response
|
|
337
|
+
},
|
|
338
|
+
});
|
|
339
|
+
|
|
340
|
+
console.log(result.provider); // 'openai'
|
|
341
|
+
console.log(result.model); // 'gpt-4.1-mini'
|
|
342
|
+
console.log(result.data); // generated product name
|
|
343
|
+
```
|
|
344
|
+
|
|
345
|
+
### 4. Calling DeepSeek
|
|
346
|
+
|
|
347
|
+
```ts
|
|
348
|
+
const result = await ai.generate({
|
|
349
|
+
deepseek: {
|
|
350
|
+
model: 'deepseek-chat',
|
|
351
|
+
prompt: 'Explain RAG in simple terms.',
|
|
352
|
+
temperature: 0.5,
|
|
353
|
+
maxTokens: 256,
|
|
354
|
+
raw: true, // include full raw DeepSeek response
|
|
355
|
+
},
|
|
356
|
+
});
|
|
357
|
+
|
|
358
|
+
console.log(result.provider); // 'deepseek'
|
|
359
|
+
console.log(result.model); // 'deepseek-chat'
|
|
360
|
+
console.log(result.data); // explanation text
|
|
361
|
+
console.log(result.raw); // full DeepSeek JSON (for debugging)
|
|
362
|
+
```
|
|
363
|
+
|
|
364
|
+
### 5. Calling Mistral
|
|
365
|
+
|
|
366
|
+
```ts
|
|
367
|
+
const result = await ai.generate({
|
|
368
|
+
mistral: {
|
|
369
|
+
model: 'mistral-tiny',
|
|
370
|
+
prompt: 'Give me a short haiku about TypeScript.',
|
|
371
|
+
temperature: 0.8,
|
|
372
|
+
maxTokens: 64,
|
|
373
|
+
raw: true,
|
|
374
|
+
},
|
|
375
|
+
});
|
|
376
|
+
|
|
377
|
+
console.log(result.provider); // 'mistral'
|
|
378
|
+
console.log(result.model); // 'mistral-tiny'
|
|
379
|
+
console.log(result.data); // haiku text (once the provider normalizes to `data`)
|
|
380
|
+
console.log(result.raw); // full Mistral JSON (for inspecting choices/message)
|
|
381
|
+
```
|
|
382
|
+
|
|
383
|
+
> Note: The provider implementations for DeepSeek and Mistral are still evolving. They are currently focused on basic, URL-based chat completions and raw response inspection while you iterate on the exact output normalization.
|
|
384
|
+
|
|
385
|
+
---
|
|
386
|
+
|
|
387
|
+
## Error Handling
|
|
388
|
+
|
|
389
|
+
All SDK-level errors are represented by the `SDKError` class (`src/core/error.ts`):
|
|
390
|
+
|
|
391
|
+
```ts
|
|
392
|
+
export class SDKError extends Error {
|
|
393
|
+
provider: string;
|
|
394
|
+
message: string;
|
|
395
|
+
|
|
396
|
+
constructor(message: string, provider?: string) {
|
|
397
|
+
super(message);
|
|
398
|
+
this.provider = provider;
|
|
399
|
+
this.message = message;
|
|
400
|
+
}
|
|
401
|
+
}
|
|
402
|
+
```
|
|
403
|
+
|
|
404
|
+
Examples of when `SDKError` is thrown:
|
|
405
|
+
|
|
406
|
+
- No providers configured in `SDKConfig`.
|
|
407
|
+
- API key is missing or an empty string for a configured provider.
|
|
408
|
+
- No provider passed to `generate`.
|
|
409
|
+
- More than one provider passed in a single `generate` call.
|
|
410
|
+
- `model` or `prompt` is missing/empty for the chosen provider.
|
|
411
|
+
- Provider HTTP response is not OK (`res.ok === false`), in which case the error message includes the status code and response data.
|
|
412
|
+
|
|
413
|
+
You can catch and inspect `SDKError` like this:
|
|
414
|
+
|
|
415
|
+
```ts
|
|
416
|
+
try {
|
|
417
|
+
const result = await ai.generate({
|
|
418
|
+
google: {
|
|
419
|
+
model: 'gemini-2.5-flash-lite',
|
|
420
|
+
prompt: '', // invalid: empty prompt
|
|
421
|
+
},
|
|
422
|
+
});
|
|
423
|
+
} catch (err) {
|
|
424
|
+
if (err instanceof SDKError) {
|
|
425
|
+
console.error('SDK error from provider:', err.provider);
|
|
426
|
+
console.error('Message:', err.message);
|
|
427
|
+
} else {
|
|
428
|
+
console.error('Unknown error:', err);
|
|
429
|
+
}
|
|
430
|
+
}
|
|
431
|
+
```
|
|
432
|
+
|
|
433
|
+
---
|
|
434
|
+
|
|
435
|
+
## Development
|
|
436
|
+
|
|
437
|
+
### Scripts
|
|
438
|
+
|
|
439
|
+
Defined in `package.json`:
|
|
440
|
+
|
|
441
|
+
- `npm run dev` – run `src/index.ts` with `tsx`.
|
|
442
|
+
- `npm run build` – run TypeScript compiler (`tsc`).
|
|
443
|
+
- `npm run start` – run the built `dist/index.js` with Node.
|
|
444
|
+
- `npm run clean` – remove the `dist` directory.
|
|
445
|
+
|
|
446
|
+
### TypeScript Configuration
|
|
447
|
+
|
|
448
|
+
`tsconfig.json` is set up with:
|
|
449
|
+
|
|
450
|
+
- `target`: `ES2022`
|
|
451
|
+
- `module`: `ESNext`
|
|
452
|
+
- `moduleResolution`: `Bundler`
|
|
453
|
+
- `strict`: `true`
|
|
454
|
+
- `allowImportingTsExtensions`: `true`
|
|
455
|
+
- `noEmit`: `true` (for development; the build step can be adjusted as the project evolves)
|
|
456
|
+
|
|
457
|
+
The `src/` directory is included for compilation.
|
|
458
|
+
|
|
459
|
+
---
|
|
460
|
+
|
|
461
|
+
## Limitations (Current)
|
|
462
|
+
|
|
463
|
+
This project is currently in an early stage and has several limitations:
|
|
464
|
+
|
|
465
|
+
- Only single-prompt text generation is supported (no explicit chat/history abstraction yet).
|
|
466
|
+
- Streaming is basic and low-level:
|
|
467
|
+
- It returns provider-specific JSON events (for example, Gemini `candidates[].content.parts[].text`).
|
|
468
|
+
- You are responsible for extracting the text you care about from each chunk.
|
|
469
|
+
- No structured/JSON output helpers are provided.
|
|
470
|
+
- No React/Next.js integrations or hooks are included.
|
|
471
|
+
- Output normalization across providers (e.g. always using `data`) is still being finalized.
|
|
472
|
+
|
|
473
|
+
|
|
474
|
+
These limitations are intentional for now to keep the core small and focused while the API surface is still evolving.
|
|
475
|
+
|
|
476
|
+
---
|
|
477
|
+
|
|
478
|
+
## Future Directions
|
|
479
|
+
|
|
480
|
+
The long-term goal is to move toward a feature set closer to the Vercel AI SDK, while staying provider-agnostic and simple. Potential future improvements include:
|
|
481
|
+
|
|
482
|
+
- `generateText`, `streamText`, and `generateObject` helper functions.
|
|
483
|
+
- Unified message-based chat interface and history management.
|
|
484
|
+
- First-class streaming support with helpers for Node, browser, and Edge runtimes.
|
|
485
|
+
- JSON/structured output helpers, with optional schema validation.
|
|
486
|
+
- Tool/function calling abstraction across providers.
|
|
487
|
+
- Middleware/hooks for logging, metrics, retries, rate limiting, and caching.
|
|
488
|
+
- Official React/Next.js integrations and example apps.
|
|
489
|
+
- Support for more providers (Anthropic, Azure OpenAI, etc.).
|
|
490
|
+
|
|
491
|
+
Contributions and ideas are welcome as the design evolves.
|
package/dist/client.d.ts
ADDED
|
@@ -0,0 +1,8 @@
|
|
|
1
|
+
import type { Provider, Output } from './types/types';
|
|
2
|
+
import type { SDKConfig } from './core/config';
|
|
3
|
+
export declare class genChat {
|
|
4
|
+
private sdkConfig;
|
|
5
|
+
constructor(sdkConfig: SDKConfig);
|
|
6
|
+
generate(provider: Provider): Promise<Output | AsyncIterable<string>>;
|
|
7
|
+
}
|
|
8
|
+
//# sourceMappingURL=client.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"client.d.ts","sourceRoot":"","sources":["../src/client.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,QAAQ,EAAE,MAAM,EAAE,MAAM,eAAe,CAAC;AAWtD,OAAO,KAAK,EAAE,SAAS,EAAE,MAAM,eAAe,CAAC;AAG/C,qBAAa,OAAO;IAClB,OAAO,CAAC,SAAS,CAAY;gBAEjB,SAAS,EAAE,SAAS;IAY1B,QAAQ,CAAC,QAAQ,EAAE,QAAQ,GAAG,OAAO,CAAC,MAAM,GAAG,aAAa,CAAC,MAAM,CAAC,CAAC;CAwD5E"}
|
package/dist/client.js
ADDED
|
@@ -0,0 +1,71 @@
|
|
|
1
|
+
import { googleCoreProvider } from './providers/google-core';
|
|
2
|
+
import { googleStreamProvider } from './providers/google-stream';
|
|
3
|
+
import { openaiProvider } from './providers/openai';
|
|
4
|
+
import { openaiStreamProvider } from './providers/openai-stream';
|
|
5
|
+
import { deepseekProvider } from './providers/deepseek';
|
|
6
|
+
import { deepseekStreamProvider } from './providers/deepseek-stream';
|
|
7
|
+
import { mistralProvider } from './providers/mistral';
|
|
8
|
+
import { mistralStreamProvider } from './providers/mistral-stream';
|
|
9
|
+
import { SDKError } from './core/error';
|
|
10
|
+
import { validateConfig, validateProvider } from './core/validate';
|
|
11
|
+
import { fallbackEngine } from './core/fallbackEngine';
|
|
12
|
+
export class genChat {
|
|
13
|
+
sdkConfig;
|
|
14
|
+
constructor(sdkConfig) {
|
|
15
|
+
validateConfig(sdkConfig);
|
|
16
|
+
this.sdkConfig = {
|
|
17
|
+
google: sdkConfig.google ? { ...sdkConfig.google } : undefined,
|
|
18
|
+
openai: sdkConfig.openai ? { ...sdkConfig.openai } : undefined,
|
|
19
|
+
deepseek: sdkConfig.deepseek ? { ...sdkConfig.deepseek } : undefined,
|
|
20
|
+
mistral: sdkConfig.mistral ? { ...sdkConfig.mistral } : undefined,
|
|
21
|
+
fallback: sdkConfig.fallback,
|
|
22
|
+
};
|
|
23
|
+
}
|
|
24
|
+
async generate(provider) {
|
|
25
|
+
validateProvider(provider);
|
|
26
|
+
try {
|
|
27
|
+
if (provider.google) {
|
|
28
|
+
if (provider.google.stream === true) {
|
|
29
|
+
return googleStreamProvider(provider, this.sdkConfig.google.apiKey);
|
|
30
|
+
}
|
|
31
|
+
return await googleCoreProvider(provider, this.sdkConfig.google.apiKey);
|
|
32
|
+
}
|
|
33
|
+
if (provider.openai) {
|
|
34
|
+
if (provider.openai.stream === true) {
|
|
35
|
+
return openaiStreamProvider(provider, this.sdkConfig.openai.apiKey);
|
|
36
|
+
}
|
|
37
|
+
return await openaiProvider(provider, this.sdkConfig.openai.apiKey);
|
|
38
|
+
}
|
|
39
|
+
if (provider.deepseek) {
|
|
40
|
+
if (provider.deepseek.stream === true) {
|
|
41
|
+
return deepseekStreamProvider(provider, this.sdkConfig.deepseek.apiKey);
|
|
42
|
+
}
|
|
43
|
+
return await deepseekProvider(provider, this.sdkConfig.deepseek.apiKey);
|
|
44
|
+
}
|
|
45
|
+
if (provider.mistral) {
|
|
46
|
+
if (provider.mistral.stream === true) {
|
|
47
|
+
return mistralStreamProvider(provider, this.sdkConfig.mistral.apiKey);
|
|
48
|
+
}
|
|
49
|
+
return await mistralProvider(provider, this.sdkConfig.mistral.apiKey);
|
|
50
|
+
}
|
|
51
|
+
throw new SDKError('No provider passed', 'core');
|
|
52
|
+
}
|
|
53
|
+
catch (err) {
|
|
54
|
+
const isStreaming = provider.google?.stream === true ||
|
|
55
|
+
provider.openai?.stream === true ||
|
|
56
|
+
provider.deepseek?.stream === true ||
|
|
57
|
+
provider.mistral?.stream === true;
|
|
58
|
+
if (!isStreaming &&
|
|
59
|
+
err instanceof SDKError &&
|
|
60
|
+
this.sdkConfig.fallback === true) {
|
|
61
|
+
// non-streaming calls can use fallback engine
|
|
62
|
+
return await fallbackEngine(err.provider, this.sdkConfig, provider);
|
|
63
|
+
}
|
|
64
|
+
if (err instanceof SDKError) {
|
|
65
|
+
throw err;
|
|
66
|
+
}
|
|
67
|
+
throw new SDKError('Unexpected Error', 'core');
|
|
68
|
+
}
|
|
69
|
+
}
|
|
70
|
+
}
|
|
71
|
+
//# sourceMappingURL=client.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"client.js","sourceRoot":"","sources":["../src/client.ts"],"names":[],"mappings":"AACA,OAAO,EAAE,kBAAkB,EAAE,MAAM,yBAAyB,CAAC;AAC7D,OAAO,EAAE,oBAAoB,EAAE,MAAM,2BAA2B,CAAC;AACjE,OAAO,EAAE,cAAc,EAAE,MAAM,oBAAoB,CAAC;AACpD,OAAO,EAAE,oBAAoB,EAAE,MAAM,2BAA2B,CAAC;AACjE,OAAO,EAAE,gBAAgB,EAAE,MAAM,sBAAsB,CAAC;AACxD,OAAO,EAAE,sBAAsB,EAAE,MAAM,6BAA6B,CAAC;AACrE,OAAO,EAAE,eAAe,EAAE,MAAM,qBAAqB,CAAC;AACtD,OAAO,EAAE,qBAAqB,EAAE,MAAM,4BAA4B,CAAC;AACnE,OAAO,EAAE,QAAQ,EAAE,MAAM,cAAc,CAAC;AACxC,OAAO,EAAE,cAAc,EAAE,gBAAgB,EAAE,MAAM,iBAAiB,CAAC;AAEnE,OAAO,EAAE,cAAc,EAAE,MAAM,uBAAuB,CAAC;AAEvD,MAAM,OAAO,OAAO;IACV,SAAS,CAAY;IAE7B,YAAY,SAAoB;QAC9B,cAAc,CAAC,SAAS,CAAC,CAAC;QAE1B,IAAI,CAAC,SAAS,GAAG;YACf,MAAM,EAAE,SAAS,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,GAAG,SAAS,CAAC,MAAM,EAAE,CAAC,CAAC,CAAC,SAAS;YAC9D,MAAM,EAAE,SAAS,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,GAAG,SAAS,CAAC,MAAM,EAAE,CAAC,CAAC,CAAC,SAAS;YAC9D,QAAQ,EAAE,SAAS,CAAC,QAAQ,CAAC,CAAC,CAAC,EAAE,GAAG,SAAS,CAAC,QAAQ,EAAE,CAAC,CAAC,CAAC,SAAS;YACpE,OAAO,EAAE,SAAS,CAAC,OAAO,CAAC,CAAC,CAAC,EAAE,GAAG,SAAS,CAAC,OAAO,EAAE,CAAC,CAAC,CAAC,SAAS;YACjE,QAAQ,EAAE,SAAS,CAAC,QAAQ;SAC7B,CAAC;IACJ,CAAC;IAED,KAAK,CAAC,QAAQ,CAAC,QAAkB;QAC/B,gBAAgB,CAAC,QAAQ,CAAC,CAAC;QAE3B,IAAI,CAAC;YACH,IAAI,QAAQ,CAAC,MAAM,EAAE,CAAC;gBACpB,IAAI,QAAQ,CAAC,MAAM,CAAC,MAAM,KAAK,IAAI,EAAE,CAAC;oBACpC,OAAO,oBAAoB,CAAC,QAAQ,EAAE,IAAI,CAAC,SAAS,CAAC,MAAO,CAAC,MAAM,CAAC,CAAC;gBACvE,CAAC;gBACD,OAAO,MAAM,kBAAkB,CAAC,QAAQ,EAAE,IAAI,CAAC,SAAS,CAAC,MAAO,CAAC,MAAM,CAAC,CAAC;YAC3E,CAAC;YAED,IAAI,QAAQ,CAAC,MAAM,EAAE,CAAC;gBACpB,IAAI,QAAQ,CAAC,MAAM,CAAC,MAAM,KAAK,IAAI,EAAE,CAAC;oBACpC,OAAO,oBAAoB,CAAC,QAAQ,EAAE,IAAI,CAAC,SAAS,CAAC,MAAO,CAAC,MAAM,CAAC,CAAC;gBACvE,CAAC;gBACD,OAAO,MAAM,cAAc,CAAC,QAAQ,EAAE,IAAI,CAAC,SAAS,CAAC,MAAO,CAAC,MAAM,CAAC,CAAC;YACvE,CAAC;YAED,IAAI,QAAQ,CAAC,QAAQ,EAAE,CAAC;gBACtB,IAAI,QAAQ,CAAC,QAAQ,CAAC,MAAM,KAAK,IAAI,EAAE,CAAC;oBACtC,OAAO,sBAAsB,CAAC,QAAQ,EAAE,IAAI,CAAC,SAAS,CAAC,QAAS,CAAC,MAAM,CAAC,CAAC;gBAC3E,CAAC;gBACD,OAAO,MAAM,gBAAgB,CAAC,QAAQ,EAAE,IAAI,CAAC,SAAS,CAAC,QAAS,CAAC,MAAM,CAAC,CAAC;YAC3E,CAAC;YAED,IAAI,QAAQ,CAAC,OAAO,EAAE,CAAC;gBACrB,IAAI,QAAQ,CAAC,OAAO,CAAC,MAAM,KAAK,IAAI,EAAE,CAAC;oBACrC,OAAO,qBAAqB,CAAC,QAAQ,EAAE,IAAI,CAAC,SAAS,CAAC,OAAQ,CAAC,MAAM,CAAC,CAAC;gBACzE,CAAC;gBACD,OAAO,MAAM,eAAe,CAAC,QAAQ,EAAE,IAAI,CAAC,SAAS,CAAC,OAAQ,CAAC,MAAM,CAAC,CAAC;YACzE,CAAC;YAED,MAAM,IAAI,QAAQ,CAAC,oBAAoB,EAAE,MAAM,CAAC,CAAC;QACnD,CAAC;QAAC,OAAO,GAAG,EAAE,CAAC;YACb,MAAM,WAAW,GACf,QAAQ,CAAC,MAAM,EAAE,MAAM,KAAK,IAAI;gBAChC,QAAQ,CAAC,MAAM,EAAE,MAAM,KAAK,IAAI;gBAChC,QAAQ,CAAC,QAAQ,EAAE,MAAM,KAAK,IAAI;gBAClC,QAAQ,CAAC,OAAO,EAAE,MAAM,KAAK,IAAI,CAAC;YAEpC,IACE,CAAC,WAAW;gBACZ,GAAG,YAAY,QAAQ;gBACvB,IAAI,CAAC,SAAS,CAAC,QAAQ,KAAK,IAAI,EAChC,CAAC;gBACD,8CAA8C;gBAC9C,OAAO,MAAM,cAAc,CAAC,GAAG,CAAC,QAAQ,EAAE,IAAI,CAAC,SAAS,EAAE,QAAQ,CAAC,CAAC;YACtE,CAAC;YAED,IAAI,GAAG,YAAY,QAAQ,EAAE,CAAC;gBAC5B,MAAM,GAAG,CAAC;YACZ,CAAC;YAED,MAAM,IAAI,QAAQ,CAAC,kBAAkB,EAAE,MAAM,CAAC,CAAC;QACjD,CAAC;IACH,CAAC;CACF"}
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"config.d.ts","sourceRoot":"","sources":["../../src/core/config.ts"],"names":[],"mappings":"AAAA,MAAM,MAAM,SAAS,GAAG;IACtB,MAAM,CAAC,EAAE;QACP,MAAM,EAAE,MAAM,CAAC;KAChB,CAAC;IAEF,MAAM,CAAC,EAAE;QACP,MAAM,EAAE,MAAM,CAAC;KAChB,CAAC;IAEF,QAAQ,CAAC,EAAE;QACT,MAAM,EAAE,MAAM,CAAC;KAChB,CAAC;IAEF,OAAO,CAAC,EAAE;QACR,MAAM,EAAE,MAAM,CAAC;KAChB,CAAC;IAEF,QAAQ,CAAC,EAAE,OAAO,CAAC;CACpB,CAAC"}
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"config.js","sourceRoot":"","sources":["../../src/core/config.ts"],"names":[],"mappings":""}
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"error.d.ts","sourceRoot":"","sources":["../../src/core/error.ts"],"names":[],"mappings":"AAAA,qBAAa,QAAS,SAAQ,KAAK;IACjC,QAAQ,EAAE,MAAM,CAAC;gBAEL,OAAO,EAAE,MAAM,EAAE,QAAQ,EAAE,MAAM;CAI9C"}
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"error.js","sourceRoot":"","sources":["../../src/core/error.ts"],"names":[],"mappings":"AAAA,MAAM,OAAO,QAAS,SAAQ,KAAK;IACjC,QAAQ,CAAS;IAEjB,YAAY,OAAe,EAAE,QAAgB;QAC3C,KAAK,CAAC,OAAO,CAAC,CAAC;QACf,IAAI,CAAC,QAAQ,GAAG,QAAQ,CAAC;IAC3B,CAAC;CACF"}
|
|
@@ -0,0 +1,4 @@
|
|
|
1
|
+
import type { Provider, Output } from '../types/types';
|
|
2
|
+
import type { SDKConfig } from './config';
|
|
3
|
+
export declare function fallbackEngine(failedProvider: string, sdkConfig: SDKConfig, originalProvider: Provider): Promise<Output>;
|
|
4
|
+
//# sourceMappingURL=fallbackEngine.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"fallbackEngine.d.ts","sourceRoot":"","sources":["../../src/core/fallbackEngine.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,QAAQ,EAAE,MAAM,EAAE,MAAM,gBAAgB,CAAC;AAMvD,OAAO,KAAK,EAAE,SAAS,EAAE,MAAM,UAAU,CAAC;AAE1C,wBAAsB,cAAc,CAClC,cAAc,EAAE,MAAM,EACtB,SAAS,EAAE,SAAS,EACpB,gBAAgB,EAAE,QAAQ,GACzB,OAAO,CAAC,MAAM,CAAC,CA4FjB"}
|