@node-llm/core 1.6.1 โ 1.6.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +64 -249
- package/dist/errors/index.d.ts +19 -0
- package/dist/errors/index.d.ts.map +1 -1
- package/dist/errors/index.js +28 -0
- package/dist/index.d.ts +1 -0
- package/dist/index.d.ts.map +1 -1
- package/dist/index.js +1 -0
- package/package.json +1 -1
package/README.md
CHANGED
|
@@ -1,312 +1,127 @@
|
|
|
1
|
+
# @node-llm/core
|
|
2
|
+
|
|
1
3
|
<p align="left">
|
|
2
4
|
<a href="https://node-llm.eshaiju.com/">
|
|
3
5
|
<img src="https://node-llm.eshaiju.com/assets/images/logo.jpg" alt="NodeLLM logo" width="300" />
|
|
4
6
|
</a>
|
|
5
7
|
</p>
|
|
6
8
|
|
|
7
|
-
# NodeLLM
|
|
8
|
-
|
|
9
|
-
**An architectural layer for integrating Large Language Models in Node.js.**
|
|
10
|
-
|
|
11
|
-
**Provider-agnostic by design.**
|
|
12
|
-
|
|
13
|
-
Integrating multiple LLM providers often means juggling different SDKs, API styles, and update cycles. NodeLLM provides a single, unified, production-oriented API for interacting with over 540+ models across multiple providers (OpenAI, Gemini, Anthropic, DeepSeek, OpenRouter, Ollama, etc.) that stays consistent even when providers change.
|
|
14
|
-
|
|
15
|
-
<p align="left">
|
|
16
|
-
<img src="https://registry.npmmirror.com/@lobehub/icons-static-svg/latest/files/icons/openai.svg" height="28" />
|
|
17
|
-
<img src="https://registry.npmmirror.com/@lobehub/icons-static-svg/latest/files/icons/openai-text.svg" height="22" />
|
|
18
|
-
|
|
19
|
-
<img src="https://registry.npmmirror.com/@lobehub/icons-static-svg/latest/files/icons/anthropic-text.svg" height="18" />
|
|
20
|
-
|
|
21
|
-
<img src="https://registry.npmmirror.com/@lobehub/icons-static-svg/latest/files/icons/gemini-color.svg" height="28" />
|
|
22
|
-
<img src="https://registry.npmmirror.com/@lobehub/icons-static-svg/latest/files/icons/gemini-text.svg" height="20" />
|
|
23
|
-
|
|
24
|
-
<img src="https://registry.npmmirror.com/@lobehub/icons-static-svg/latest/files/icons/deepseek-color.svg" height="28" />
|
|
25
|
-
<img src="https://registry.npmmirror.com/@lobehub/icons-static-svg/latest/files/icons/deepseek-text.svg" height="20" />
|
|
26
|
-
|
|
27
|
-
<img src="https://registry.npmmirror.com/@lobehub/icons-static-svg/latest/files/icons/openrouter.svg" height="28" />
|
|
28
|
-
<img src="https://registry.npmmirror.com/@lobehub/icons-static-svg/latest/files/icons/openrouter-text.svg" height="22" />
|
|
29
|
-
|
|
30
|
-
<img src="https://registry.npmmirror.com/@lobehub/icons-static-svg/latest/files/icons/ollama.svg" height="28" />
|
|
31
|
-
<img src="https://registry.npmmirror.com/@lobehub/icons-static-svg/latest/files/icons/ollama-text.svg" height="18" />
|
|
32
|
-
</p>
|
|
33
|
-
|
|
34
|
-
<br/>
|
|
35
|
-
|
|
36
9
|
[](https://www.npmjs.com/package/@node-llm/core)
|
|
37
10
|
[](https://opensource.org/licenses/MIT)
|
|
38
11
|
[](https://www.typescriptlang.org/)
|
|
39
12
|
|
|
40
|
-
|
|
41
|
-
|
|
42
|
-
## ๐ What NodeLLM is NOT
|
|
43
|
-
|
|
44
|
-
NodeLLM represents a clear architectural boundary between your system and LLM vendors.
|
|
45
|
-
|
|
46
|
-
NodeLLM is **NOT**:
|
|
13
|
+
**The production-grade LLM engine for Node.js. Provider-agnostic by design.**
|
|
47
14
|
|
|
48
|
-
-
|
|
49
|
-
- A prompt-engineering framework
|
|
50
|
-
- An agent playground or experimental toy
|
|
15
|
+
`@node-llm/core` provides a single, unified API for interacting with over 540+ models across all major providers. It is built for developers who need stable infrastructure, standard streaming, and automated tool execution without vendor lock-in.
|
|
51
16
|
|
|
52
17
|
---
|
|
53
18
|
|
|
54
|
-
##
|
|
55
|
-
|
|
56
|
-
Direct integrations often become tightly coupled to specific providers, making it difficult to adapt as models evolve. **LLMs should be treated as infrastructure**, and NodeLLM helps you build a stable foundation that persists regardless of which model is currently "state of the art."
|
|
57
|
-
|
|
58
|
-
NodeLLM helps solve **architectural problems**, not just provide API access. It serves as the core integration layer for LLMs in the Node.js ecosystem.
|
|
59
|
-
|
|
60
|
-
### Strategic Goals
|
|
61
|
-
|
|
62
|
-
- **Provider Isolation**: Decouple your services from vendor SDKs.
|
|
63
|
-
- **Production-Ready**: Native support for streaming, automatic retries, and unified error handling.
|
|
64
|
-
- **Predictable API**: Consistent behavior for Tools, Vision, and Structured Outputs across all models, **now including full parity for streaming**.
|
|
65
|
-
|
|
66
|
-
---
|
|
67
|
-
|
|
68
|
-
## โก The Architectural Path
|
|
69
|
-
|
|
70
|
-
```ts
|
|
71
|
-
import { NodeLLM } from "@node-llm/core";
|
|
72
|
-
|
|
73
|
-
// 1. Zero-Config (NodeLLM automatically reads NODELLM_PROVIDER and API keys)
|
|
74
|
-
const chat = NodeLLM.chat("gpt-4o");
|
|
75
|
-
|
|
76
|
-
// 2. Chat (High-level request/response)
|
|
77
|
-
const response = await chat.ask("Explain event-driven architecture");
|
|
78
|
-
console.log(response.content);
|
|
79
|
-
|
|
80
|
-
// 3. Streaming (Standard AsyncIterator)
|
|
81
|
-
for await (const chunk of chat.stream("Explain event-driven architecture")) {
|
|
82
|
-
process.stdout.write(chunk.content);
|
|
83
|
-
}
|
|
84
|
-
```
|
|
85
|
-
|
|
86
|
-
### ๐ฏ Real-World Example: Brand Perception Checker
|
|
87
|
-
|
|
88
|
-
Built with NodeLLM - Multi-provider AI analysis, tool calling, and structured outputs working together.
|
|
19
|
+
## ๐ Key Features
|
|
89
20
|
|
|
90
|
-
**
|
|
21
|
+
- **Unified API**: One interface for OpenAI, Anthropic, Gemini, DeepSeek, OpenRouter, and Ollama.
|
|
22
|
+
- **Automated Tool Loops**: Recursive tool execution handled automaticallyโno manual loops required.
|
|
23
|
+
- **Streaming + Tools**: Seamlessly execute tools and continue the stream with the final response.
|
|
24
|
+
- **Structured Output**: Native Zod support for rigorous schema validation (`.withSchema()`).
|
|
25
|
+
- **Multimodal engine**: Built-in handling for Vision, Audio (Whisper), and Video (Gemini).
|
|
26
|
+
- **Security-First**: Integrated circuit breakers for timeouts, max tokens, and infinite tool loops.
|
|
91
27
|
|
|
92
28
|
---
|
|
93
29
|
|
|
94
|
-
##
|
|
95
|
-
|
|
96
|
-
NodeLLM provides a flexible, **lazy-initialized** configuration system designed for enterprise usage. It is safe for ESM and resolved only when your first request is made, eliminating the common `dotenv` race condition.
|
|
97
|
-
|
|
98
|
-
```ts
|
|
99
|
-
// Recommended for multi-provider pipelines
|
|
100
|
-
const llm = createLLM({
|
|
101
|
-
openaiApiKey: process.env.OPENAI_API_KEY,
|
|
102
|
-
anthropicApiKey: process.env.ANTHROPIC_API_KEY,
|
|
103
|
-
ollamaApiBase: process.env.OLLAMA_API_BASE
|
|
104
|
-
});
|
|
105
|
-
|
|
106
|
-
// Support for Custom Endpoints (e.g., Azure or LocalAI)
|
|
107
|
-
const llm = createLLM({
|
|
108
|
-
openaiApiKey: process.env.AZURE_KEY,
|
|
109
|
-
openaiApiBase: "https://your-resource.openai.azure.com/openai/deployments/..."
|
|
110
|
-
});
|
|
111
|
-
```
|
|
112
|
-
|
|
113
|
-
**[Full Configuration Guide โ](docs/getting_started/configuration.md)**
|
|
30
|
+
## ๐ Supported Providers
|
|
114
31
|
|
|
115
|
-
|
|
32
|
+
| Provider | Supported Features |
|
|
33
|
+
| :------------- | :--------------------------------------------------------------- |
|
|
34
|
+
| **OpenAI** | Chat, Streaming, Tools, Vision, Audio, Images, Reasoning (o1/o3) |
|
|
35
|
+
| **Anthropic** | Chat, Streaming, Tools, Vision, PDF Support (Claude 3.5) |
|
|
36
|
+
| **Gemini** | Chat, Streaming, Tools, Vision, Audio, Video, Embeddings |
|
|
37
|
+
| **DeepSeek** | Chat (V3), Reasoning (R1), Streaming + Tools |
|
|
38
|
+
| **OpenRouter** | 540+ models via a single API with automatic capability detection |
|
|
39
|
+
| **Ollama** | Local LLM inference with full Tool and Vision support |
|
|
116
40
|
|
|
117
41
|
---
|
|
118
42
|
|
|
119
|
-
##
|
|
120
|
-
|
|
121
|
-
### ๐ฌ Unified Chat
|
|
43
|
+
## โก Quick Start
|
|
122
44
|
|
|
123
|
-
|
|
45
|
+
### Installation
|
|
124
46
|
|
|
125
|
-
```
|
|
126
|
-
|
|
127
|
-
|
|
128
|
-
// Uses NODELLM_PROVIDER from environment (defaults to GPT-4o)
|
|
129
|
-
const chat = NodeLLM.chat();
|
|
130
|
-
await chat.ask("Hello world");
|
|
47
|
+
```bash
|
|
48
|
+
npm install @node-llm/core
|
|
131
49
|
```
|
|
132
50
|
|
|
133
|
-
###
|
|
51
|
+
### Basic Chat & Streaming
|
|
134
52
|
|
|
135
|
-
|
|
53
|
+
NodeLLM automatically reads your API keys from environment variables (e.g., `OPENAI_API_KEY`).
|
|
136
54
|
|
|
137
55
|
```ts
|
|
138
|
-
|
|
139
|
-
files: ["./screenshot.png", "https://example.com/spec.pdf"]
|
|
140
|
-
});
|
|
141
|
-
```
|
|
56
|
+
import { createLLM } from "@node-llm/core";
|
|
142
57
|
|
|
143
|
-
|
|
144
|
-
|
|
145
|
-
Define tools once;`NodeLLM` manages the recursive execution loop for you, keeping your controller logic clean. **Works seamlessly with both regular chat and streaming!**
|
|
146
|
-
|
|
147
|
-
```ts
|
|
148
|
-
import { Tool, z } from "@node-llm/core";
|
|
58
|
+
const llm = createLLM({ provider: "openai" });
|
|
149
59
|
|
|
150
|
-
//
|
|
151
|
-
|
|
152
|
-
|
|
153
|
-
description = "Get current weather";
|
|
154
|
-
schema = z.object({ location: z.string() });
|
|
60
|
+
// 1. Standard Request
|
|
61
|
+
const res = await llm.chat("gpt-4o").ask("What is the speed of light?");
|
|
62
|
+
console.log(res.content);
|
|
155
63
|
|
|
156
|
-
|
|
157
|
-
|
|
158
|
-
|
|
64
|
+
// 2. Real-time Streaming
|
|
65
|
+
for await (const chunk of llm.chat().stream("Tell me a long story")) {
|
|
66
|
+
process.stdout.write(chunk.content);
|
|
159
67
|
}
|
|
160
|
-
|
|
161
|
-
// Now the model can use it automatically
|
|
162
|
-
await chat.withTool(WeatherTool).ask("What's the weather in Tokyo?");
|
|
163
|
-
|
|
164
|
-
// Lifecycle Hooks for Error & Flow Control
|
|
165
|
-
chat.onToolCallError((call, err) => "STOP");
|
|
166
68
|
```
|
|
167
69
|
|
|
168
|
-
|
|
169
|
-
|
|
170
|
-
### ๐ Comprehensive Debug Logging
|
|
70
|
+
### Structured Output (Zod)
|
|
171
71
|
|
|
172
|
-
|
|
173
|
-
|
|
174
|
-
```ts
|
|
175
|
-
// Set environment variable
|
|
176
|
-
process.env.NODELLM_DEBUG = "true";
|
|
177
|
-
|
|
178
|
-
// Now see detailed logs for every API call:
|
|
179
|
-
// [NodeLLM] [OpenAI] Request: POST https://api.openai.com/v1/chat/completions
|
|
180
|
-
// { "model": "gpt-4o", "messages": [...] }
|
|
181
|
-
// [NodeLLM] [OpenAI] Response: 200 OK
|
|
182
|
-
// { "id": "chatcmpl-123", ... }
|
|
183
|
-
```
|
|
184
|
-
|
|
185
|
-
**Covers:** Chat, Streaming, Images, Embeddings, Transcription, Moderation - across all providers!
|
|
186
|
-
|
|
187
|
-
### โจ Structured Output
|
|
188
|
-
|
|
189
|
-
Get type-safe, validated JSON back using **Zod** schemas.
|
|
72
|
+
Stop parsing markdown. Get typed objects directly.
|
|
190
73
|
|
|
191
74
|
```ts
|
|
192
75
|
import { z } from "@node-llm/core";
|
|
193
|
-
const Product = z.object({ name: z.string(), price: z.number() });
|
|
194
|
-
|
|
195
|
-
const res = await chat.withSchema(Product).ask("Generate a gadget");
|
|
196
|
-
console.log(res.parsed.name); // Full type-safety
|
|
197
|
-
```
|
|
198
|
-
|
|
199
|
-
### ๐จ Image Generation
|
|
200
76
|
|
|
201
|
-
|
|
202
|
-
|
|
203
|
-
|
|
204
|
-
|
|
205
|
-
|
|
206
|
-
|
|
207
|
-
```ts
|
|
208
|
-
await NodeLLM.transcribe("meeting-recording.wav");
|
|
209
|
-
```
|
|
210
|
-
|
|
211
|
-
### ๐พ Persistence Layer
|
|
212
|
-
|
|
213
|
-
Automatically track chat history, tool executions, and API metrics with **@node-llm/orm**.
|
|
214
|
-
|
|
215
|
-
```ts
|
|
216
|
-
import { createChat } from "@node-llm/orm/prisma";
|
|
77
|
+
const PlayerSchema = z.object({
|
|
78
|
+
name: z.string(),
|
|
79
|
+
powerLevel: z.number(),
|
|
80
|
+
abilities: z.array(z.string())
|
|
81
|
+
});
|
|
217
82
|
|
|
218
|
-
|
|
219
|
-
const
|
|
83
|
+
const chat = llm.chat("gpt-4o-mini").withSchema(PlayerSchema);
|
|
84
|
+
const response = await chat.ask("Generate a random RPG character");
|
|
220
85
|
|
|
221
|
-
|
|
222
|
-
// -> Saves User Message
|
|
223
|
-
// -> Saves Assistant Response
|
|
224
|
-
// -> Tracks Token Usage & Cost
|
|
225
|
-
// -> Logs Tool Calls & Results
|
|
86
|
+
console.log(response.parsed.name); // Fully typed!
|
|
226
87
|
```
|
|
227
88
|
|
|
228
|
-
|
|
229
|
-
|
|
230
|
-
Run multiple providers in parallel safely without global configuration side effects using isolated contexts.
|
|
231
|
-
|
|
232
|
-
```ts
|
|
233
|
-
const [gpt, claude] = await Promise.all([
|
|
234
|
-
// Each call branch off into its own isolated context
|
|
235
|
-
NodeLLM.withProvider("openai").chat("gpt-4o").ask(prompt),
|
|
236
|
-
NodeLLM.withProvider("anthropic").chat("claude-3-5-sonnet").ask(prompt)
|
|
237
|
-
]);
|
|
238
|
-
```
|
|
89
|
+
---
|
|
239
90
|
|
|
240
|
-
|
|
91
|
+
## ๐ก๏ธ Security Circuit Breakers
|
|
241
92
|
|
|
242
|
-
|
|
93
|
+
NodeLLM protects your production environment with four built-in safety pillars:
|
|
243
94
|
|
|
244
95
|
```ts
|
|
245
|
-
const
|
|
246
|
-
|
|
96
|
+
const llm = createLLM({
|
|
97
|
+
requestTimeout: 15000, // 15s DoS Protection
|
|
98
|
+
maxTokens: 4096, // Cost Protection
|
|
99
|
+
maxRetries: 3, // Retry Storm Protection
|
|
100
|
+
maxToolCalls: 5 // Infinite Loop Protection
|
|
101
|
+
});
|
|
247
102
|
```
|
|
248
103
|
|
|
249
104
|
---
|
|
250
105
|
|
|
251
|
-
##
|
|
106
|
+
## ๐พ Ecosystem
|
|
252
107
|
|
|
253
|
-
|
|
254
|
-
| :-------------------- | :---------------------------- | :-------------------------- | :------------------------ |
|
|
255
|
-
| **Provider Logic** | Transparently Handled | Exposed to your code | **Low Coupling** |
|
|
256
|
-
| **Streaming** | Standard `AsyncIterator` | Vendor-specific Events | **Predictable Data Flow** |
|
|
257
|
-
| **Streaming + Tools** | Automated Execution | Manual implementation | **Seamless UX** |
|
|
258
|
-
| **Tool Loops** | Automated Recursion | Manual implementation | **Reduced Boilerplate** |
|
|
259
|
-
| **Files/Vision** | Intelligent Path/URL handling | Base64/Buffer management | **Cleaner Service Layer** |
|
|
260
|
-
| **Configuration** | Centralized & Global | Per-instance initialization | **Easier Lifecycle Mgmt** |
|
|
108
|
+
Looking for persistence? use **[@node-llm/orm](https://www.npmjs.com/package/@node-llm/orm)**.
|
|
261
109
|
|
|
262
|
-
|
|
263
|
-
|
|
264
|
-
## ๐ Supported Providers
|
|
265
|
-
|
|
266
|
-
| Provider | Supported Features |
|
|
267
|
-
| :----------------------------------------------------------------------------------------------------------------------------------- | :------------------------------------------------------------------------------- |
|
|
268
|
-
| <img src="https://registry.npmmirror.com/@lobehub/icons-static-svg/latest/files/icons/openai.svg" height="18"> **OpenAI** | Chat, **Streaming + Tools**, Vision, Audio, Images, Transcription, **Reasoning** |
|
|
269
|
-
| <img src="https://registry.npmmirror.com/@lobehub/icons-static-svg/latest/files/icons/gemini-color.svg" height="18"> **Gemini** | Chat, **Streaming + Tools**, Vision, Audio, Video, Embeddings |
|
|
270
|
-
| <img src="https://registry.npmmirror.com/@lobehub/icons-static-svg/latest/files/icons/anthropic-text.svg" height="12"> **Anthropic** | Chat, **Streaming + Tools**, Vision, PDF, Structured Output |
|
|
271
|
-
| <img src="https://registry.npmmirror.com/@lobehub/icons-static-svg/latest/files/icons/deepseek-color.svg" height="18"> **DeepSeek** | Chat (V3), **Reasoning (R1)**, **Streaming + Tools** |
|
|
272
|
-
| <img src="https://registry.npmmirror.com/@lobehub/icons-static-svg/latest/files/icons/openrouter.svg" height="18"> **OpenRouter** | **Aggregator**, Chat, Streaming, Tools, Vision, Embeddings, **Reasoning** |
|
|
273
|
-
| <img src="https://registry.npmmirror.com/@lobehub/icons-static-svg/latest/files/icons/ollama.svg" height="18"> **Ollama** | **Local Inference**, Chat, Streaming, Tools, Vision, Embeddings |
|
|
110
|
+
- Automatically saves chat history to PostgreSQL/MySQL/SQLite via Prisma.
|
|
111
|
+
- Tracks tool execution results and API metrics (latency, cost, tokens).
|
|
274
112
|
|
|
275
113
|
---
|
|
276
114
|
|
|
277
|
-
## ๐ Documentation
|
|
278
|
-
|
|
279
|
-
```bash
|
|
280
|
-
npm install @node-llm/core
|
|
281
|
-
```
|
|
282
|
-
|
|
283
|
-
**[View Full Documentation โ](https://node-llm.eshaiju.com/)**
|
|
284
|
-
|
|
285
|
-
### ๐ฟ Try the Live Demo
|
|
286
|
-
|
|
287
|
-
Want to see it in action? Run this in your terminal:
|
|
288
|
-
|
|
289
|
-
```bash
|
|
290
|
-
git clone https://github.com/node-llm/node-llm.git
|
|
291
|
-
cd node-llm
|
|
292
|
-
npm install
|
|
293
|
-
npm run demo
|
|
294
|
-
```
|
|
295
|
-
|
|
296
|
-
---
|
|
297
|
-
|
|
298
|
-
## ๐ค Contributing
|
|
299
|
-
|
|
300
|
-
We welcome contributions! Please see our **[Contributing Guide](CONTRIBUTING.md)** for more details on how to get started.
|
|
301
|
-
|
|
302
|
-
---
|
|
115
|
+
## ๐ Full Documentation
|
|
303
116
|
|
|
304
|
-
|
|
117
|
+
Visit **[node-llm.eshaiju.com](https://node-llm.eshaiju.com/)** for:
|
|
305
118
|
|
|
306
|
-
|
|
119
|
+
- [Deep Dive into Tool Calling](https://node-llm.eshaiju.com/core-features/tools)
|
|
120
|
+
- [Multi-modal Vision & Audio Guide](https://node-llm.eshaiju.com/core-features/multimodal)
|
|
121
|
+
- [Custom Provider Plugin System](https://node-llm.eshaiju.com/advanced/custom-providers)
|
|
307
122
|
|
|
308
123
|
---
|
|
309
124
|
|
|
310
|
-
##
|
|
125
|
+
## License
|
|
311
126
|
|
|
312
|
-
MIT ยฉ [NodeLLM
|
|
127
|
+
MIT ยฉ [NodeLLM Contributors]
|
package/dist/errors/index.d.ts
CHANGED
|
@@ -35,8 +35,27 @@ export declare class APIError extends LLMError {
|
|
|
35
35
|
export declare class BadRequestError extends APIError {
|
|
36
36
|
constructor(message: string, body: unknown, provider?: string, model?: string);
|
|
37
37
|
}
|
|
38
|
+
/**
|
|
39
|
+
* 401 - Invalid or missing API key
|
|
40
|
+
*/
|
|
41
|
+
export declare class UnauthorizedError extends APIError {
|
|
42
|
+
constructor(message: string, body: unknown, provider?: string);
|
|
43
|
+
}
|
|
44
|
+
/**
|
|
45
|
+
* 402 - Payment required (billing issues)
|
|
46
|
+
*/
|
|
47
|
+
export declare class PaymentRequiredError extends APIError {
|
|
48
|
+
constructor(message: string, body: unknown, provider?: string);
|
|
49
|
+
}
|
|
50
|
+
/**
|
|
51
|
+
* 403 - Permission denied
|
|
52
|
+
*/
|
|
53
|
+
export declare class ForbiddenError extends APIError {
|
|
54
|
+
constructor(message: string, body: unknown, provider?: string);
|
|
55
|
+
}
|
|
38
56
|
/**
|
|
39
57
|
* 401/403 - API key or permission issues
|
|
58
|
+
* @deprecated Use UnauthorizedError (401) or ForbiddenError (403) for granular handling
|
|
40
59
|
*/
|
|
41
60
|
export declare class AuthenticationError extends APIError {
|
|
42
61
|
constructor(message: string, status: number, body: unknown, provider?: string);
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../src/errors/index.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;GAaG;AAEH;;GAEG;AACH,qBAAa,QAAS,SAAQ,KAAK;aAGf,IAAI,CAAC,EAAE,MAAM;gBAD7B,OAAO,EAAE,MAAM,EACC,IAAI,CAAC,EAAE,MAAM,YAAA;CAMhC;AAED;;GAEG;AACH,qBAAa,QAAS,SAAQ,QAAQ;aAGlB,MAAM,EAAE,MAAM;aACd,IAAI,EAAE,OAAO;aACb,QAAQ,CAAC,EAAE,MAAM;aACjB,KAAK,CAAC,EAAE,MAAM;gBAJ9B,OAAO,EAAE,MAAM,EACC,MAAM,EAAE,MAAM,EACd,IAAI,EAAE,OAAO,EACb,QAAQ,CAAC,EAAE,MAAM,YAAA,EACjB,KAAK,CAAC,EAAE,MAAM,YAAA;CAIjC;AAED;;GAEG;AACH,qBAAa,eAAgB,SAAQ,QAAQ;gBAC/B,OAAO,EAAE,MAAM,EAAE,IAAI,EAAE,OAAO,EAAE,QAAQ,CAAC,EAAE,MAAM,EAAE,KAAK,CAAC,EAAE,MAAM;CAI9E;AAED;;GAEG;AACH,qBAAa,mBAAoB,SAAQ,QAAQ;gBACnC,OAAO,EAAE,MAAM,EAAE,MAAM,EAAE,MAAM,EAAE,IAAI,EAAE,OAAO,EAAE,QAAQ,CAAC,EAAE,MAAM;CAI9E;AAED;;GAEG;AACH,qBAAa,cAAe,SAAQ,QAAQ;gBAC9B,OAAO,EAAE,MAAM,EAAE,IAAI,EAAE,OAAO,EAAE,QAAQ,CAAC,EAAE,MAAM,EAAE,KAAK,CAAC,EAAE,MAAM;CAI9E;AAED;;GAEG;AACH,qBAAa,WAAY,SAAQ,QAAQ;gBAC3B,OAAO,EAAE,MAAM,EAAE,MAAM,EAAE,MAAM,EAAE,IAAI,EAAE,OAAO,EAAE,QAAQ,CAAC,EAAE,MAAM,EAAE,KAAK,CAAC,EAAE,MAAM;CAI9F;AAED;;GAEG;AACH,qBAAa,uBAAwB,SAAQ,WAAW;gBAC1C,OAAO,EAAE,MAAM,EAAE,MAAM,EAAE,MAAM,EAAE,IAAI,EAAE,OAAO,EAAE,QAAQ,CAAC,EAAE,MAAM,EAAE,KAAK,CAAC,EAAE,MAAM;CAI9F;AAED;;GAEG;AACH,qBAAa,kBAAmB,SAAQ,QAAQ;gBAClC,OAAO,EAAE,MAAM;CAG5B;AAED;;GAEG;AACH,qBAAa,aAAc,SAAQ,QAAQ;gBAC7B,OAAO,EAAE,MAAM;CAG5B;AAED;;GAEG;AACH,qBAAa,eAAgB,SAAQ,QAAQ;gBAC/B,OAAO,EAAE,MAAM;CAG5B;AAED;;GAEG;AACH,qBAAa,0BAA2B,SAAQ,QAAQ;;CAIvD;AAED;;GAEG;AACH,qBAAa,uBAAwB,SAAQ,QAAQ;aAEjC,QAAQ,EAAE,MAAM;aAChB,OAAO,EAAE,MAAM;gBADf,QAAQ,EAAE,MAAM,EAChB,OAAO,EAAE,MAAM;CAIlC;AAED;;GAEG;AACH,qBAAa,oBAAqB,SAAQ,QAAQ;aAE9B,KAAK,EAAE,MAAM;aACb,UAAU,EAAE,MAAM;gBADlB,KAAK,EAAE,MAAM,EACb,UAAU,EAAE,MAAM;CAIrC;AACD;;GAEG;AACH,qBAAa,SAAU,SAAQ,QAAQ;aAGnB,QAAQ,CAAC,EAAE,MAAM;aACjB,KAAK,EAAE,OAAO;gBAF9B,OAAO,EAAE,MAAM,EACC,QAAQ,CAAC,EAAE,MAAM,YAAA,EACjB,KAAK,GAAE,OAAe;CAKzC"}
|
|
1
|
+
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../src/errors/index.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;GAaG;AAEH;;GAEG;AACH,qBAAa,QAAS,SAAQ,KAAK;aAGf,IAAI,CAAC,EAAE,MAAM;gBAD7B,OAAO,EAAE,MAAM,EACC,IAAI,CAAC,EAAE,MAAM,YAAA;CAMhC;AAED;;GAEG;AACH,qBAAa,QAAS,SAAQ,QAAQ;aAGlB,MAAM,EAAE,MAAM;aACd,IAAI,EAAE,OAAO;aACb,QAAQ,CAAC,EAAE,MAAM;aACjB,KAAK,CAAC,EAAE,MAAM;gBAJ9B,OAAO,EAAE,MAAM,EACC,MAAM,EAAE,MAAM,EACd,IAAI,EAAE,OAAO,EACb,QAAQ,CAAC,EAAE,MAAM,YAAA,EACjB,KAAK,CAAC,EAAE,MAAM,YAAA;CAIjC;AAED;;GAEG;AACH,qBAAa,eAAgB,SAAQ,QAAQ;gBAC/B,OAAO,EAAE,MAAM,EAAE,IAAI,EAAE,OAAO,EAAE,QAAQ,CAAC,EAAE,MAAM,EAAE,KAAK,CAAC,EAAE,MAAM;CAI9E;AAED;;GAEG;AACH,qBAAa,iBAAkB,SAAQ,QAAQ;gBACjC,OAAO,EAAE,MAAM,EAAE,IAAI,EAAE,OAAO,EAAE,QAAQ,CAAC,EAAE,MAAM;CAI9D;AAED;;GAEG;AACH,qBAAa,oBAAqB,SAAQ,QAAQ;gBACpC,OAAO,EAAE,MAAM,EAAE,IAAI,EAAE,OAAO,EAAE,QAAQ,CAAC,EAAE,MAAM;CAI9D;AAED;;GAEG;AACH,qBAAa,cAAe,SAAQ,QAAQ;gBAC9B,OAAO,EAAE,MAAM,EAAE,IAAI,EAAE,OAAO,EAAE,QAAQ,CAAC,EAAE,MAAM;CAI9D;AAED;;;GAGG;AACH,qBAAa,mBAAoB,SAAQ,QAAQ;gBACnC,OAAO,EAAE,MAAM,EAAE,MAAM,EAAE,MAAM,EAAE,IAAI,EAAE,OAAO,EAAE,QAAQ,CAAC,EAAE,MAAM;CAI9E;AAED;;GAEG;AACH,qBAAa,cAAe,SAAQ,QAAQ;gBAC9B,OAAO,EAAE,MAAM,EAAE,IAAI,EAAE,OAAO,EAAE,QAAQ,CAAC,EAAE,MAAM,EAAE,KAAK,CAAC,EAAE,MAAM;CAI9E;AAED;;GAEG;AACH,qBAAa,WAAY,SAAQ,QAAQ;gBAC3B,OAAO,EAAE,MAAM,EAAE,MAAM,EAAE,MAAM,EAAE,IAAI,EAAE,OAAO,EAAE,QAAQ,CAAC,EAAE,MAAM,EAAE,KAAK,CAAC,EAAE,MAAM;CAI9F;AAED;;GAEG;AACH,qBAAa,uBAAwB,SAAQ,WAAW;gBAC1C,OAAO,EAAE,MAAM,EAAE,MAAM,EAAE,MAAM,EAAE,IAAI,EAAE,OAAO,EAAE,QAAQ,CAAC,EAAE,MAAM,EAAE,KAAK,CAAC,EAAE,MAAM;CAI9F;AAED;;GAEG;AACH,qBAAa,kBAAmB,SAAQ,QAAQ;gBAClC,OAAO,EAAE,MAAM;CAG5B;AAED;;GAEG;AACH,qBAAa,aAAc,SAAQ,QAAQ;gBAC7B,OAAO,EAAE,MAAM;CAG5B;AAED;;GAEG;AACH,qBAAa,eAAgB,SAAQ,QAAQ;gBAC/B,OAAO,EAAE,MAAM;CAG5B;AAED;;GAEG;AACH,qBAAa,0BAA2B,SAAQ,QAAQ;;CAIvD;AAED;;GAEG;AACH,qBAAa,uBAAwB,SAAQ,QAAQ;aAEjC,QAAQ,EAAE,MAAM;aAChB,OAAO,EAAE,MAAM;gBADf,QAAQ,EAAE,MAAM,EAChB,OAAO,EAAE,MAAM;CAIlC;AAED;;GAEG;AACH,qBAAa,oBAAqB,SAAQ,QAAQ;aAE9B,KAAK,EAAE,MAAM;aACb,UAAU,EAAE,MAAM;gBADlB,KAAK,EAAE,MAAM,EACb,UAAU,EAAE,MAAM;CAIrC;AACD;;GAEG;AACH,qBAAa,SAAU,SAAQ,QAAQ;aAGnB,QAAQ,CAAC,EAAE,MAAM;aACjB,KAAK,EAAE,OAAO;gBAF9B,OAAO,EAAE,MAAM,EACC,QAAQ,CAAC,EAAE,MAAM,YAAA,EACjB,KAAK,GAAE,OAAe;CAKzC"}
|
package/dist/errors/index.js
CHANGED
|
@@ -49,8 +49,36 @@ export class BadRequestError extends APIError {
|
|
|
49
49
|
this.name = "BadRequestError";
|
|
50
50
|
}
|
|
51
51
|
}
|
|
52
|
+
/**
|
|
53
|
+
* 401 - Invalid or missing API key
|
|
54
|
+
*/
|
|
55
|
+
export class UnauthorizedError extends APIError {
|
|
56
|
+
constructor(message, body, provider) {
|
|
57
|
+
super(message, 401, body, provider);
|
|
58
|
+
this.name = "UnauthorizedError";
|
|
59
|
+
}
|
|
60
|
+
}
|
|
61
|
+
/**
|
|
62
|
+
* 402 - Payment required (billing issues)
|
|
63
|
+
*/
|
|
64
|
+
export class PaymentRequiredError extends APIError {
|
|
65
|
+
constructor(message, body, provider) {
|
|
66
|
+
super(message, 402, body, provider);
|
|
67
|
+
this.name = "PaymentRequiredError";
|
|
68
|
+
}
|
|
69
|
+
}
|
|
70
|
+
/**
|
|
71
|
+
* 403 - Permission denied
|
|
72
|
+
*/
|
|
73
|
+
export class ForbiddenError extends APIError {
|
|
74
|
+
constructor(message, body, provider) {
|
|
75
|
+
super(message, 403, body, provider);
|
|
76
|
+
this.name = "ForbiddenError";
|
|
77
|
+
}
|
|
78
|
+
}
|
|
52
79
|
/**
|
|
53
80
|
* 401/403 - API key or permission issues
|
|
81
|
+
* @deprecated Use UnauthorizedError (401) or ForbiddenError (403) for granular handling
|
|
54
82
|
*/
|
|
55
83
|
export class AuthenticationError extends APIError {
|
|
56
84
|
constructor(message, status, body, provider) {
|
package/dist/index.d.ts
CHANGED
|
@@ -16,4 +16,5 @@ export { BaseProvider } from "./providers/BaseProvider.js";
|
|
|
16
16
|
export { resolveModelAlias } from "./model_aliases.js";
|
|
17
17
|
export { default as MODEL_ALIASES } from "./aliases.js";
|
|
18
18
|
export { ToolExecutionMode, DEFAULT_MAX_TOOL_CALLS, DEFAULT_MAX_RETRIES, DEFAULT_TOOL_EXECUTION, DEFAULT_OLLAMA_BASE_URL, DEFAULT_MODELS } from "./constants.js";
|
|
19
|
+
export { fetchWithTimeout } from "./utils/fetch.js";
|
|
19
20
|
//# sourceMappingURL=index.d.ts.map
|
package/dist/index.d.ts.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../src/index.ts"],"names":[],"mappings":"AAAA,cAAc,mBAAmB,CAAC;AAClC,cAAc,mBAAmB,CAAC;AAClC,cAAc,gBAAgB,CAAC;AAC/B,cAAc,uBAAuB,CAAC;AACtC,cAAc,wBAAwB,CAAC;AACvC,cAAc,gBAAgB,CAAC;AAC/B,cAAc,sBAAsB,CAAC;AACrC,cAAc,uBAAuB,CAAC;AAEtC,OAAO,EAAE,CAAC,EAAE,MAAM,KAAK,CAAC;AACxB,OAAO,EACL,OAAO,EACP,aAAa,EACb,SAAS,EACT,WAAW,EACX,aAAa,EACb,UAAU,EACV,SAAS,EACT,aAAa,EACb,eAAe,EAChB,MAAM,UAAU,CAAC;AAClB,OAAO,EAAE,MAAM,EAAE,MAAM,aAAa,CAAC;AACrC,YAAY,EAAE,aAAa,EAAE,MAAM,aAAa,CAAC;AACjD,OAAO,EAAE,gBAAgB,EAAE,MAAM,yBAAyB,CAAC;AAC3D,OAAO,EAAE,MAAM,EAAE,MAAM,oBAAoB,CAAC;AAC5C,OAAO,EAAE,YAAY,EAAE,MAAM,6BAA6B,CAAC;AAC3D,OAAO,EAAE,iBAAiB,EAAE,MAAM,oBAAoB,CAAC;AACvD,OAAO,EAAE,OAAO,IAAI,aAAa,EAAE,MAAM,cAAc,CAAC;AACxD,OAAO,EACL,iBAAiB,EACjB,sBAAsB,EACtB,mBAAmB,EACnB,sBAAsB,EACtB,uBAAuB,EACvB,cAAc,EACf,MAAM,gBAAgB,CAAC"}
|
|
1
|
+
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../src/index.ts"],"names":[],"mappings":"AAAA,cAAc,mBAAmB,CAAC;AAClC,cAAc,mBAAmB,CAAC;AAClC,cAAc,gBAAgB,CAAC;AAC/B,cAAc,uBAAuB,CAAC;AACtC,cAAc,wBAAwB,CAAC;AACvC,cAAc,gBAAgB,CAAC;AAC/B,cAAc,sBAAsB,CAAC;AACrC,cAAc,uBAAuB,CAAC;AAEtC,OAAO,EAAE,CAAC,EAAE,MAAM,KAAK,CAAC;AACxB,OAAO,EACL,OAAO,EACP,aAAa,EACb,SAAS,EACT,WAAW,EACX,aAAa,EACb,UAAU,EACV,SAAS,EACT,aAAa,EACb,eAAe,EAChB,MAAM,UAAU,CAAC;AAClB,OAAO,EAAE,MAAM,EAAE,MAAM,aAAa,CAAC;AACrC,YAAY,EAAE,aAAa,EAAE,MAAM,aAAa,CAAC;AACjD,OAAO,EAAE,gBAAgB,EAAE,MAAM,yBAAyB,CAAC;AAC3D,OAAO,EAAE,MAAM,EAAE,MAAM,oBAAoB,CAAC;AAC5C,OAAO,EAAE,YAAY,EAAE,MAAM,6BAA6B,CAAC;AAC3D,OAAO,EAAE,iBAAiB,EAAE,MAAM,oBAAoB,CAAC;AACvD,OAAO,EAAE,OAAO,IAAI,aAAa,EAAE,MAAM,cAAc,CAAC;AACxD,OAAO,EACL,iBAAiB,EACjB,sBAAsB,EACtB,mBAAmB,EACnB,sBAAsB,EACtB,uBAAuB,EACvB,cAAc,EACf,MAAM,gBAAgB,CAAC;AAExB,OAAO,EAAE,gBAAgB,EAAE,MAAM,kBAAkB,CAAC"}
|
package/dist/index.js
CHANGED
|
@@ -15,3 +15,4 @@ export { BaseProvider } from "./providers/BaseProvider.js";
|
|
|
15
15
|
export { resolveModelAlias } from "./model_aliases.js";
|
|
16
16
|
export { default as MODEL_ALIASES } from "./aliases.js";
|
|
17
17
|
export { ToolExecutionMode, DEFAULT_MAX_TOOL_CALLS, DEFAULT_MAX_RETRIES, DEFAULT_TOOL_EXECUTION, DEFAULT_OLLAMA_BASE_URL, DEFAULT_MODELS } from "./constants.js";
|
|
18
|
+
export { fetchWithTimeout } from "./utils/fetch.js";
|