@node-llm/core 1.5.4 → 1.6.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +76 -43
- package/dist/aliases.d.ts +4 -0
- package/dist/aliases.d.ts.map +1 -1
- package/dist/aliases.js +4 -0
- package/dist/chat/Chat.d.ts +28 -17
- package/dist/chat/Chat.d.ts.map +1 -1
- package/dist/chat/Chat.js +75 -42
- package/dist/chat/ChatOptions.d.ts +8 -9
- package/dist/chat/ChatOptions.d.ts.map +1 -1
- package/dist/chat/ChatResponse.d.ts +26 -1
- package/dist/chat/ChatResponse.d.ts.map +1 -1
- package/dist/chat/ChatResponse.js +54 -8
- package/dist/chat/ChatStream.d.ts.map +1 -1
- package/dist/chat/ChatStream.js +14 -21
- package/dist/chat/Content.d.ts +3 -3
- package/dist/chat/Content.d.ts.map +1 -1
- package/dist/chat/Content.js +3 -6
- package/dist/chat/Message.d.ts +3 -1
- package/dist/chat/Message.d.ts.map +1 -1
- package/dist/chat/Role.d.ts.map +1 -1
- package/dist/chat/Tool.d.ts +8 -8
- package/dist/chat/Tool.d.ts.map +1 -1
- package/dist/chat/Tool.js +9 -7
- package/dist/chat/ToolHandler.d.ts +4 -3
- package/dist/chat/ToolHandler.d.ts.map +1 -1
- package/dist/chat/ToolHandler.js +10 -15
- package/dist/chat/Validation.d.ts.map +1 -1
- package/dist/chat/Validation.js +9 -3
- package/dist/config.d.ts +4 -0
- package/dist/config.d.ts.map +1 -1
- package/dist/config.js +80 -25
- package/dist/constants.js +1 -1
- package/dist/errors/index.d.ts +21 -7
- package/dist/errors/index.d.ts.map +1 -1
- package/dist/errors/index.js +14 -0
- package/dist/index.d.ts +1 -1
- package/dist/index.d.ts.map +1 -1
- package/dist/index.js +1 -1
- package/dist/llm.d.ts +44 -46
- package/dist/llm.d.ts.map +1 -1
- package/dist/llm.js +201 -130
- package/dist/model_aliases.d.ts.map +1 -1
- package/dist/models/ModelRegistry.d.ts.map +1 -1
- package/dist/models/ModelRegistry.js +13 -10
- package/dist/models/PricingRegistry.d.ts +31 -0
- package/dist/models/PricingRegistry.d.ts.map +1 -0
- package/dist/models/PricingRegistry.js +109 -0
- package/dist/models/models.d.ts.map +1 -1
- package/dist/models/models.js +230 -138
- package/dist/models/types.d.ts +37 -34
- package/dist/models/types.d.ts.map +1 -1
- package/dist/moderation/Moderation.d.ts.map +1 -1
- package/dist/moderation/Moderation.js +15 -5
- package/dist/providers/BaseProvider.d.ts +12 -8
- package/dist/providers/BaseProvider.d.ts.map +1 -1
- package/dist/providers/BaseProvider.js +17 -7
- package/dist/providers/Provider.d.ts +20 -5
- package/dist/providers/Provider.d.ts.map +1 -1
- package/dist/providers/anthropic/AnthropicProvider.d.ts +1 -1
- package/dist/providers/anthropic/AnthropicProvider.d.ts.map +1 -1
- package/dist/providers/anthropic/AnthropicProvider.js +3 -3
- package/dist/providers/anthropic/Capabilities.d.ts +2 -1
- package/dist/providers/anthropic/Capabilities.d.ts.map +1 -1
- package/dist/providers/anthropic/Capabilities.js +3 -20
- package/dist/providers/anthropic/Chat.d.ts.map +1 -1
- package/dist/providers/anthropic/Chat.js +27 -17
- package/dist/providers/anthropic/Errors.d.ts.map +1 -1
- package/dist/providers/anthropic/Errors.js +5 -2
- package/dist/providers/anthropic/Models.d.ts.map +1 -1
- package/dist/providers/anthropic/Models.js +6 -6
- package/dist/providers/anthropic/Streaming.d.ts.map +1 -1
- package/dist/providers/anthropic/Streaming.js +17 -12
- package/dist/providers/anthropic/Utils.js +8 -5
- package/dist/providers/anthropic/index.d.ts.map +1 -1
- package/dist/providers/anthropic/index.js +4 -3
- package/dist/providers/anthropic/types.d.ts +11 -4
- package/dist/providers/anthropic/types.d.ts.map +1 -1
- package/dist/providers/deepseek/Capabilities.d.ts +7 -5
- package/dist/providers/deepseek/Capabilities.d.ts.map +1 -1
- package/dist/providers/deepseek/Capabilities.js +9 -5
- package/dist/providers/deepseek/Chat.d.ts.map +1 -1
- package/dist/providers/deepseek/Chat.js +10 -9
- package/dist/providers/deepseek/DeepSeekProvider.d.ts +1 -1
- package/dist/providers/deepseek/DeepSeekProvider.d.ts.map +1 -1
- package/dist/providers/deepseek/DeepSeekProvider.js +4 -4
- package/dist/providers/deepseek/Models.d.ts.map +1 -1
- package/dist/providers/deepseek/Models.js +7 -7
- package/dist/providers/deepseek/Streaming.d.ts.map +1 -1
- package/dist/providers/deepseek/Streaming.js +11 -8
- package/dist/providers/deepseek/index.d.ts.map +1 -1
- package/dist/providers/deepseek/index.js +5 -4
- package/dist/providers/gemini/Capabilities.d.ts +5 -33
- package/dist/providers/gemini/Capabilities.d.ts.map +1 -1
- package/dist/providers/gemini/Capabilities.js +7 -30
- package/dist/providers/gemini/Chat.d.ts.map +1 -1
- package/dist/providers/gemini/Chat.js +24 -19
- package/dist/providers/gemini/ChatUtils.d.ts.map +1 -1
- package/dist/providers/gemini/ChatUtils.js +10 -10
- package/dist/providers/gemini/Embeddings.d.ts.map +1 -1
- package/dist/providers/gemini/Embeddings.js +2 -2
- package/dist/providers/gemini/Errors.d.ts.map +1 -1
- package/dist/providers/gemini/Errors.js +5 -2
- package/dist/providers/gemini/GeminiProvider.d.ts +1 -1
- package/dist/providers/gemini/GeminiProvider.d.ts.map +1 -1
- package/dist/providers/gemini/GeminiProvider.js +3 -3
- package/dist/providers/gemini/Image.d.ts.map +1 -1
- package/dist/providers/gemini/Image.js +7 -7
- package/dist/providers/gemini/Models.d.ts.map +1 -1
- package/dist/providers/gemini/Models.js +6 -6
- package/dist/providers/gemini/Streaming.d.ts.map +1 -1
- package/dist/providers/gemini/Streaming.js +18 -14
- package/dist/providers/gemini/Transcription.d.ts.map +1 -1
- package/dist/providers/gemini/Transcription.js +11 -11
- package/dist/providers/gemini/index.d.ts +1 -1
- package/dist/providers/gemini/index.d.ts.map +1 -1
- package/dist/providers/gemini/index.js +5 -4
- package/dist/providers/gemini/types.d.ts +4 -4
- package/dist/providers/gemini/types.d.ts.map +1 -1
- package/dist/providers/ollama/Capabilities.d.ts.map +1 -1
- package/dist/providers/ollama/Capabilities.js +6 -2
- package/dist/providers/ollama/Models.d.ts.map +1 -1
- package/dist/providers/ollama/Models.js +1 -1
- package/dist/providers/ollama/OllamaProvider.d.ts +1 -1
- package/dist/providers/ollama/OllamaProvider.d.ts.map +1 -1
- package/dist/providers/ollama/OllamaProvider.js +2 -2
- package/dist/providers/ollama/index.d.ts +1 -1
- package/dist/providers/ollama/index.d.ts.map +1 -1
- package/dist/providers/ollama/index.js +7 -3
- package/dist/providers/openai/Capabilities.d.ts +2 -1
- package/dist/providers/openai/Capabilities.d.ts.map +1 -1
- package/dist/providers/openai/Capabilities.js +9 -21
- package/dist/providers/openai/Chat.d.ts.map +1 -1
- package/dist/providers/openai/Chat.js +18 -15
- package/dist/providers/openai/Embedding.d.ts.map +1 -1
- package/dist/providers/openai/Embedding.js +11 -7
- package/dist/providers/openai/Errors.d.ts.map +1 -1
- package/dist/providers/openai/Errors.js +5 -2
- package/dist/providers/openai/Image.d.ts.map +1 -1
- package/dist/providers/openai/Image.js +6 -6
- package/dist/providers/openai/Models.d.ts +1 -1
- package/dist/providers/openai/Models.d.ts.map +1 -1
- package/dist/providers/openai/Models.js +12 -8
- package/dist/providers/openai/Moderation.d.ts.map +1 -1
- package/dist/providers/openai/Moderation.js +6 -6
- package/dist/providers/openai/OpenAIProvider.d.ts +2 -3
- package/dist/providers/openai/OpenAIProvider.d.ts.map +1 -1
- package/dist/providers/openai/OpenAIProvider.js +4 -4
- package/dist/providers/openai/Streaming.d.ts.map +1 -1
- package/dist/providers/openai/Streaming.js +18 -13
- package/dist/providers/openai/Transcription.d.ts.map +1 -1
- package/dist/providers/openai/Transcription.js +15 -12
- package/dist/providers/openai/index.d.ts +1 -1
- package/dist/providers/openai/index.d.ts.map +1 -1
- package/dist/providers/openai/index.js +6 -5
- package/dist/providers/openai/types.d.ts +1 -1
- package/dist/providers/openai/utils.js +2 -2
- package/dist/providers/openrouter/Capabilities.d.ts +3 -3
- package/dist/providers/openrouter/Capabilities.d.ts.map +1 -1
- package/dist/providers/openrouter/Capabilities.js +21 -24
- package/dist/providers/openrouter/Models.d.ts.map +1 -1
- package/dist/providers/openrouter/Models.js +20 -16
- package/dist/providers/openrouter/OpenRouterProvider.d.ts.map +1 -1
- package/dist/providers/openrouter/OpenRouterProvider.js +1 -1
- package/dist/providers/openrouter/index.d.ts +1 -1
- package/dist/providers/openrouter/index.d.ts.map +1 -1
- package/dist/providers/openrouter/index.js +6 -5
- package/dist/providers/registry.d.ts +18 -2
- package/dist/providers/registry.d.ts.map +1 -1
- package/dist/providers/registry.js +17 -2
- package/dist/providers/utils.js +1 -1
- package/dist/schema/Schema.d.ts +3 -3
- package/dist/schema/Schema.d.ts.map +1 -1
- package/dist/schema/Schema.js +2 -2
- package/dist/schema/to-json-schema.d.ts +1 -1
- package/dist/schema/to-json-schema.d.ts.map +1 -1
- package/dist/streaming/Stream.d.ts.map +1 -1
- package/dist/streaming/Stream.js +3 -3
- package/dist/utils/Binary.d.ts.map +1 -1
- package/dist/utils/Binary.js +23 -13
- package/dist/utils/FileLoader.d.ts.map +1 -1
- package/dist/utils/FileLoader.js +25 -4
- package/dist/utils/audio.js +1 -1
- package/dist/utils/fetch.d.ts.map +1 -1
- package/dist/utils/fetch.js +3 -2
- package/dist/utils/logger.d.ts +3 -3
- package/dist/utils/logger.d.ts.map +1 -1
- package/dist/utils/logger.js +2 -2
- package/package.json +1 -1
package/README.md
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
<p align="left">
|
|
2
2
|
<a href="https://node-llm.eshaiju.com/">
|
|
3
|
-
<img src="
|
|
3
|
+
<img src="https://node-llm.eshaiju.com/assets/images/logo.jpg" alt="NodeLLM logo" width="300" />
|
|
4
4
|
</a>
|
|
5
5
|
</p>
|
|
6
6
|
|
|
@@ -44,9 +44,10 @@ Integrating multiple LLM providers often means juggling different SDKs, API styl
|
|
|
44
44
|
NodeLLM represents a clear architectural boundary between your system and LLM vendors.
|
|
45
45
|
|
|
46
46
|
NodeLLM is **NOT**:
|
|
47
|
-
|
|
48
|
-
|
|
49
|
-
|
|
47
|
+
|
|
48
|
+
- A wrapper around a single provider SDK (like `openai` or `@google/generative-ai`)
|
|
49
|
+
- A prompt-engineering framework
|
|
50
|
+
- An agent playground or experimental toy
|
|
50
51
|
|
|
51
52
|
---
|
|
52
53
|
|
|
@@ -57,6 +58,7 @@ Direct integrations often become tightly coupled to specific providers, making i
|
|
|
57
58
|
NodeLLM helps solve **architectural problems**, not just provide API access. It serves as the core integration layer for LLMs in the Node.js ecosystem.
|
|
58
59
|
|
|
59
60
|
### Strategic Goals
|
|
61
|
+
|
|
60
62
|
- **Provider Isolation**: Decouple your services from vendor SDKs.
|
|
61
63
|
- **Production-Ready**: Native support for streaming, automatic retries, and unified error handling.
|
|
62
64
|
- **Predictable API**: Consistent behavior for Tools, Vision, and Structured Outputs across all models, **now including full parity for streaming**.
|
|
@@ -68,16 +70,14 @@ NodeLLM helps solve **architectural problems**, not just provide API access. It
|
|
|
68
70
|
```ts
|
|
69
71
|
import { NodeLLM } from "@node-llm/core";
|
|
70
72
|
|
|
71
|
-
// 1.
|
|
72
|
-
NodeLLM.
|
|
73
|
+
// 1. Zero-Config (NodeLLM automatically reads NODELLM_PROVIDER and API keys)
|
|
74
|
+
const chat = NodeLLM.chat("gpt-4o");
|
|
73
75
|
|
|
74
76
|
// 2. Chat (High-level request/response)
|
|
75
|
-
const chat = NodeLLM.chat("gpt-4o");
|
|
76
77
|
const response = await chat.ask("Explain event-driven architecture");
|
|
77
78
|
console.log(response.content);
|
|
78
79
|
|
|
79
80
|
// 3. Streaming (Standard AsyncIterator)
|
|
80
|
-
// NOW with full support for Tools, Vision, and Schemas!
|
|
81
81
|
for await (const chunk of chat.stream("Explain event-driven architecture")) {
|
|
82
82
|
process.stdout.write(chunk.content);
|
|
83
83
|
}
|
|
@@ -85,14 +85,9 @@ for await (const chunk of chat.stream("Explain event-driven architecture")) {
|
|
|
85
85
|
|
|
86
86
|
### 🎯 Real-World Example: Brand Perception Checker
|
|
87
87
|
|
|
88
|
-
Built with NodeLLM - Multi-provider AI analysis, tool calling, and structured outputs working together
|
|
89
|
-
|
|
90
|
-
<p align="center">
|
|
91
|
-
<img src="assets/brand-perception-checker.png" alt="Brand Perception Checker" width="800" />
|
|
92
|
-
</p>
|
|
93
|
-
|
|
94
|
-
**[View Example →](examples/brand-perception-checker/)**
|
|
88
|
+
Built with NodeLLM - Multi-provider AI analysis, tool calling, and structured outputs working together.
|
|
95
89
|
|
|
90
|
+
**[View Example →](https://github.com/node-llm/node-llm/tree/main/examples/applications/brand-perception-checker)**
|
|
96
91
|
|
|
97
92
|
---
|
|
98
93
|
|
|
@@ -102,16 +97,16 @@ NodeLLM provides a flexible, **lazy-initialized** configuration system designed
|
|
|
102
97
|
|
|
103
98
|
```ts
|
|
104
99
|
// Recommended for multi-provider pipelines
|
|
105
|
-
|
|
100
|
+
const llm = createLLM({
|
|
106
101
|
openaiApiKey: process.env.OPENAI_API_KEY,
|
|
107
102
|
anthropicApiKey: process.env.ANTHROPIC_API_KEY,
|
|
108
|
-
ollamaApiBase: process.env.OLLAMA_API_BASE
|
|
103
|
+
ollamaApiBase: process.env.OLLAMA_API_BASE
|
|
109
104
|
});
|
|
110
105
|
|
|
111
106
|
// Support for Custom Endpoints (e.g., Azure or LocalAI)
|
|
112
|
-
|
|
107
|
+
const llm = createLLM({
|
|
113
108
|
openaiApiKey: process.env.AZURE_KEY,
|
|
114
|
-
openaiApiBase: "https://your-resource.openai.azure.com/openai/deployments/..."
|
|
109
|
+
openaiApiBase: "https://your-resource.openai.azure.com/openai/deployments/..."
|
|
115
110
|
});
|
|
116
111
|
```
|
|
117
112
|
|
|
@@ -124,21 +119,29 @@ NodeLLM.configure({
|
|
|
124
119
|
## 🔮 Capabilities
|
|
125
120
|
|
|
126
121
|
### 💬 Unified Chat
|
|
122
|
+
|
|
127
123
|
Stop rewriting code for every provider. `NodeLLM` normalizes inputs and outputs into a single, predictable mental model.
|
|
124
|
+
|
|
128
125
|
```ts
|
|
129
|
-
|
|
126
|
+
import { NodeLLM } from "@node-llm/core";
|
|
127
|
+
|
|
128
|
+
// Uses NODELLM_PROVIDER from environment (defaults to GPT-4o)
|
|
129
|
+
const chat = NodeLLM.chat();
|
|
130
130
|
await chat.ask("Hello world");
|
|
131
131
|
```
|
|
132
132
|
|
|
133
133
|
### 👁️ Smart Vision & Files
|
|
134
|
+
|
|
134
135
|
Pass images, PDFs, or audio files directly to **both `ask()` and `stream()`**. We handle the heavy lifting: fetching remote URLs, base64 encoding, and MIME type mapping.
|
|
136
|
+
|
|
135
137
|
```ts
|
|
136
|
-
await chat.ask("Analyze this interface", {
|
|
137
|
-
files: ["./screenshot.png", "https://example.com/spec.pdf"]
|
|
138
|
+
await chat.ask("Analyze this interface", {
|
|
139
|
+
files: ["./screenshot.png", "https://example.com/spec.pdf"]
|
|
138
140
|
});
|
|
139
141
|
```
|
|
140
142
|
|
|
141
143
|
### 🛠️ Auto-Executing Tools
|
|
144
|
+
|
|
142
145
|
Define tools once;`NodeLLM` manages the recursive execution loop for you, keeping your controller logic clean. **Works seamlessly with both regular chat and streaming!**
|
|
143
146
|
|
|
144
147
|
```ts
|
|
@@ -150,8 +153,8 @@ class WeatherTool extends Tool {
|
|
|
150
153
|
description = "Get current weather";
|
|
151
154
|
schema = z.object({ location: z.string() });
|
|
152
155
|
|
|
153
|
-
async execute({ location }) {
|
|
154
|
-
return `Sunny in ${location}`;
|
|
156
|
+
async execute({ location }) {
|
|
157
|
+
return `Sunny in ${location}`;
|
|
155
158
|
}
|
|
156
159
|
}
|
|
157
160
|
|
|
@@ -159,12 +162,15 @@ class WeatherTool extends Tool {
|
|
|
159
162
|
await chat.withTool(WeatherTool).ask("What's the weather in Tokyo?");
|
|
160
163
|
|
|
161
164
|
// Lifecycle Hooks for Error & Flow Control
|
|
162
|
-
chat.onToolCallError((call, err) => "STOP");
|
|
165
|
+
chat.onToolCallError((call, err) => "STOP");
|
|
163
166
|
```
|
|
167
|
+
|
|
164
168
|
**[Full Tool Calling Guide →](https://node-llm.eshaiju.com/core-features/tool-calling)**
|
|
165
169
|
|
|
166
170
|
### 🔍 Comprehensive Debug Logging
|
|
171
|
+
|
|
167
172
|
Enable detailed logging for all API requests and responses across every feature and provider:
|
|
173
|
+
|
|
168
174
|
```ts
|
|
169
175
|
// Set environment variable
|
|
170
176
|
process.env.NODELLM_DEBUG = "true";
|
|
@@ -175,10 +181,13 @@ process.env.NODELLM_DEBUG = "true";
|
|
|
175
181
|
// [NodeLLM] [OpenAI] Response: 200 OK
|
|
176
182
|
// { "id": "chatcmpl-123", ... }
|
|
177
183
|
```
|
|
184
|
+
|
|
178
185
|
**Covers:** Chat, Streaming, Images, Embeddings, Transcription, Moderation - across all providers!
|
|
179
186
|
|
|
180
187
|
### ✨ Structured Output
|
|
188
|
+
|
|
181
189
|
Get type-safe, validated JSON back using **Zod** schemas.
|
|
190
|
+
|
|
182
191
|
```ts
|
|
183
192
|
import { z } from "@node-llm/core";
|
|
184
193
|
const Product = z.object({ name: z.string(), price: z.number() });
|
|
@@ -188,27 +197,50 @@ console.log(res.parsed.name); // Full type-safety
|
|
|
188
197
|
```
|
|
189
198
|
|
|
190
199
|
### 🎨 Image Generation
|
|
200
|
+
|
|
191
201
|
```ts
|
|
192
202
|
await NodeLLM.paint("A cyberpunk city in rain");
|
|
193
203
|
```
|
|
194
204
|
|
|
195
205
|
### 🎤 Audio Transcription
|
|
206
|
+
|
|
196
207
|
```ts
|
|
197
208
|
await NodeLLM.transcribe("meeting-recording.wav");
|
|
198
209
|
```
|
|
199
210
|
|
|
211
|
+
### 💾 Persistence Layer
|
|
212
|
+
|
|
213
|
+
Automatically track chat history, tool executions, and API metrics with **@node-llm/orm**.
|
|
214
|
+
|
|
215
|
+
```ts
|
|
216
|
+
import { createChat } from "@node-llm/orm/prisma";
|
|
217
|
+
|
|
218
|
+
// Chat state is automatically saved to your database (Postgres/MySQL/SQLite)
|
|
219
|
+
const chat = await createChat(prisma, llm, { model: "gpt-4o" });
|
|
220
|
+
|
|
221
|
+
await chat.ask("Hello");
|
|
222
|
+
// -> Saves User Message
|
|
223
|
+
// -> Saves Assistant Response
|
|
224
|
+
// -> Tracks Token Usage & Cost
|
|
225
|
+
// -> Logs Tool Calls & Results
|
|
226
|
+
```
|
|
227
|
+
|
|
200
228
|
### ⚡ Scoped Parallelism
|
|
229
|
+
|
|
201
230
|
Run multiple providers in parallel safely without global configuration side effects using isolated contexts.
|
|
231
|
+
|
|
202
232
|
```ts
|
|
203
233
|
const [gpt, claude] = await Promise.all([
|
|
204
234
|
// Each call branch off into its own isolated context
|
|
205
235
|
NodeLLM.withProvider("openai").chat("gpt-4o").ask(prompt),
|
|
206
|
-
NodeLLM.withProvider("anthropic").chat("claude-3-5-sonnet").ask(prompt)
|
|
236
|
+
NodeLLM.withProvider("anthropic").chat("claude-3-5-sonnet").ask(prompt)
|
|
207
237
|
]);
|
|
208
238
|
```
|
|
209
239
|
|
|
210
240
|
### 🧠 Deep Reasoning
|
|
241
|
+
|
|
211
242
|
Direct access to the thought process of models like **DeepSeek R1** or **OpenAI o1/o3** using the `.reasoning` field.
|
|
243
|
+
|
|
212
244
|
```ts
|
|
213
245
|
const res = await NodeLLM.chat("deepseek-reasoner").ask("Solve this logical puzzle");
|
|
214
246
|
console.log(res.reasoning); // Chain-of-thought
|
|
@@ -218,27 +250,27 @@ console.log(res.reasoning); // Chain-of-thought
|
|
|
218
250
|
|
|
219
251
|
## 🚀 Why use this over official SDKs?
|
|
220
252
|
|
|
221
|
-
| Feature
|
|
222
|
-
|
|
|
223
|
-
| **Provider Logic**
|
|
224
|
-
| **Streaming**
|
|
225
|
-
| **Streaming + Tools** | Automated Execution
|
|
226
|
-
| **Tool Loops**
|
|
227
|
-
| **Files/Vision**
|
|
228
|
-
| **Configuration**
|
|
253
|
+
| Feature | NodeLLM | Official SDKs | Architectural Impact |
|
|
254
|
+
| :-------------------- | :---------------------------- | :-------------------------- | :------------------------ |
|
|
255
|
+
| **Provider Logic** | Transparently Handled | Exposed to your code | **Low Coupling** |
|
|
256
|
+
| **Streaming** | Standard `AsyncIterator` | Vendor-specific Events | **Predictable Data Flow** |
|
|
257
|
+
| **Streaming + Tools** | Automated Execution | Manual implementation | **Seamless UX** |
|
|
258
|
+
| **Tool Loops** | Automated Recursion | Manual implementation | **Reduced Boilerplate** |
|
|
259
|
+
| **Files/Vision** | Intelligent Path/URL handling | Base64/Buffer management | **Cleaner Service Layer** |
|
|
260
|
+
| **Configuration** | Centralized & Global | Per-instance initialization | **Easier Lifecycle Mgmt** |
|
|
229
261
|
|
|
230
262
|
---
|
|
231
263
|
|
|
232
264
|
## 📋 Supported Providers
|
|
233
265
|
|
|
234
|
-
| Provider
|
|
235
|
-
|
|
|
236
|
-
| <img src="https://registry.npmmirror.com/@lobehub/icons-static-svg/latest/files/icons/openai.svg" height="18"> **OpenAI**
|
|
237
|
-
| <img src="https://registry.npmmirror.com/@lobehub/icons-static-svg/latest/files/icons/gemini-color.svg" height="18"> **Gemini**
|
|
238
|
-
| <img src="https://registry.npmmirror.com/@lobehub/icons-static-svg/latest/files/icons/anthropic-text.svg" height="12"> **Anthropic** | Chat, **Streaming + Tools**, Vision, PDF, Structured Output
|
|
239
|
-
| <img src="https://registry.npmmirror.com/@lobehub/icons-static-svg/latest/files/icons/deepseek-color.svg" height="18"> **DeepSeek**
|
|
240
|
-
| <img src="https://registry.npmmirror.com/@lobehub/icons-static-svg/latest/files/icons/openrouter.svg" height="18"> **OpenRouter**
|
|
241
|
-
| <img src="https://registry.npmmirror.com/@lobehub/icons-static-svg/latest/files/icons/ollama.svg" height="18"> **Ollama**
|
|
266
|
+
| Provider | Supported Features |
|
|
267
|
+
| :----------------------------------------------------------------------------------------------------------------------------------- | :------------------------------------------------------------------------------- |
|
|
268
|
+
| <img src="https://registry.npmmirror.com/@lobehub/icons-static-svg/latest/files/icons/openai.svg" height="18"> **OpenAI** | Chat, **Streaming + Tools**, Vision, Audio, Images, Transcription, **Reasoning** |
|
|
269
|
+
| <img src="https://registry.npmmirror.com/@lobehub/icons-static-svg/latest/files/icons/gemini-color.svg" height="18"> **Gemini** | Chat, **Streaming + Tools**, Vision, Audio, Video, Embeddings |
|
|
270
|
+
| <img src="https://registry.npmmirror.com/@lobehub/icons-static-svg/latest/files/icons/anthropic-text.svg" height="12"> **Anthropic** | Chat, **Streaming + Tools**, Vision, PDF, Structured Output |
|
|
271
|
+
| <img src="https://registry.npmmirror.com/@lobehub/icons-static-svg/latest/files/icons/deepseek-color.svg" height="18"> **DeepSeek** | Chat (V3), **Reasoning (R1)**, **Streaming + Tools** |
|
|
272
|
+
| <img src="https://registry.npmmirror.com/@lobehub/icons-static-svg/latest/files/icons/openrouter.svg" height="18"> **OpenRouter** | **Aggregator**, Chat, Streaming, Tools, Vision, Embeddings, **Reasoning** |
|
|
273
|
+
| <img src="https://registry.npmmirror.com/@lobehub/icons-static-svg/latest/files/icons/ollama.svg" height="18"> **Ollama** | **Local Inference**, Chat, Streaming, Tools, Vision, Embeddings |
|
|
242
274
|
|
|
243
275
|
---
|
|
244
276
|
|
|
@@ -251,7 +283,9 @@ npm install @node-llm/core
|
|
|
251
283
|
**[View Full Documentation ↗](https://node-llm.eshaiju.com/)**
|
|
252
284
|
|
|
253
285
|
### 🍿 Try the Live Demo
|
|
286
|
+
|
|
254
287
|
Want to see it in action? Run this in your terminal:
|
|
288
|
+
|
|
255
289
|
```bash
|
|
256
290
|
git clone https://github.com/node-llm/node-llm.git
|
|
257
291
|
cd node-llm
|
|
@@ -259,7 +293,6 @@ npm install
|
|
|
259
293
|
npm run demo
|
|
260
294
|
```
|
|
261
295
|
|
|
262
|
-
|
|
263
296
|
---
|
|
264
297
|
|
|
265
298
|
## 🤝 Contributing
|
package/dist/aliases.d.ts
CHANGED
|
@@ -375,6 +375,10 @@ declare const _default: {
|
|
|
375
375
|
readonly openai: "gpt-5.2-chat-latest";
|
|
376
376
|
readonly openrouter: "openai/gpt-5.2-chat-latest";
|
|
377
377
|
};
|
|
378
|
+
readonly "gpt-5.2-codex": {
|
|
379
|
+
readonly openai: "gpt-5.2-codex";
|
|
380
|
+
readonly openrouter: "openai/gpt-5.2-codex";
|
|
381
|
+
};
|
|
378
382
|
readonly "gpt-5.2-pro": {
|
|
379
383
|
readonly openai: "gpt-5.2-pro";
|
|
380
384
|
readonly openrouter: "openai/gpt-5.2-pro";
|
package/dist/aliases.d.ts.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"aliases.d.ts","sourceRoot":"","sources":["../src/aliases.ts"],"names":[],"mappings":"
|
|
1
|
+
{"version":3,"file":"aliases.d.ts","sourceRoot":"","sources":["../src/aliases.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA,wBAmnBW"}
|
package/dist/aliases.js
CHANGED
|
@@ -375,6 +375,10 @@ export default {
|
|
|
375
375
|
"openai": "gpt-5.2-chat-latest",
|
|
376
376
|
"openrouter": "openai/gpt-5.2-chat-latest"
|
|
377
377
|
},
|
|
378
|
+
"gpt-5.2-codex": {
|
|
379
|
+
"openai": "gpt-5.2-codex",
|
|
380
|
+
"openrouter": "openai/gpt-5.2-codex"
|
|
381
|
+
},
|
|
378
382
|
"gpt-5.2-pro": {
|
|
379
383
|
"openai": "gpt-5.2-pro",
|
|
380
384
|
"openrouter": "openai/gpt-5.2-pro"
|
package/dist/chat/Chat.d.ts
CHANGED
|
@@ -44,13 +44,13 @@ export declare class Chat {
|
|
|
44
44
|
*/
|
|
45
45
|
withTool(tool: ToolResolvable): this;
|
|
46
46
|
/**
|
|
47
|
-
|
|
48
|
-
|
|
49
|
-
|
|
50
|
-
|
|
51
|
-
|
|
52
|
-
|
|
53
|
-
|
|
47
|
+
* Add multiple tools to the chat session.
|
|
48
|
+
* Supports passing Tool classes (which will be instantiated) or instances.
|
|
49
|
+
* Can replace existing tools if options.replace is true.
|
|
50
|
+
*
|
|
51
|
+
* @example
|
|
52
|
+
* chat.withTools([WeatherTool, new CalculatorTool()], { replace: true });
|
|
53
|
+
*/
|
|
54
54
|
withTools(tools: ToolResolvable[], options?: {
|
|
55
55
|
replace?: boolean;
|
|
56
56
|
}): this;
|
|
@@ -68,6 +68,17 @@ export declare class Chat {
|
|
|
68
68
|
withSystemPrompt(instruction: string, options?: {
|
|
69
69
|
replace?: boolean;
|
|
70
70
|
}): this;
|
|
71
|
+
/**
|
|
72
|
+
* Alias for withInstructions
|
|
73
|
+
*/
|
|
74
|
+
system(instruction: string, options?: {
|
|
75
|
+
replace?: boolean;
|
|
76
|
+
}): this;
|
|
77
|
+
/**
|
|
78
|
+
* Add a message manually to the chat history.
|
|
79
|
+
* Useful for rehydrating sessions from a database.
|
|
80
|
+
*/
|
|
81
|
+
add(role: "user" | "assistant" | "system" | "developer", content: string): this;
|
|
71
82
|
/**
|
|
72
83
|
* Set the temperature for the chat session.
|
|
73
84
|
* Controls randomness: 0.0 (deterministic) to 1.0 (creative).
|
|
@@ -83,31 +94,31 @@ export declare class Chat {
|
|
|
83
94
|
*/
|
|
84
95
|
withRequestOptions(options: {
|
|
85
96
|
headers?: Record<string, string>;
|
|
86
|
-
responseFormat?:
|
|
97
|
+
responseFormat?: unknown;
|
|
87
98
|
}): this;
|
|
88
99
|
/**
|
|
89
100
|
* Set provider-specific parameters.
|
|
90
101
|
* These will be merged into the final request payload.
|
|
91
102
|
*/
|
|
92
|
-
withParams(params: Record<string,
|
|
103
|
+
withParams(params: Record<string, unknown>): this;
|
|
93
104
|
/**
|
|
94
105
|
* Enforce a specific schema for the output.
|
|
95
106
|
* Can accept a Schema object or a Zod schema/JSON Schema directly.
|
|
96
107
|
*/
|
|
97
|
-
withSchema(schema: Schema | z.ZodType<
|
|
108
|
+
withSchema(schema: Schema | z.ZodType<unknown> | Record<string, unknown> | null): this;
|
|
98
109
|
onNewMessage(handler: () => void): this;
|
|
99
110
|
onEndMessage(handler: (message: ChatResponseString) => void): this;
|
|
100
|
-
onToolCall(handler: (toolCall:
|
|
101
|
-
onToolResult(handler: (result:
|
|
111
|
+
onToolCall(handler: (toolCall: unknown) => void): this;
|
|
112
|
+
onToolResult(handler: (result: unknown) => void): this;
|
|
102
113
|
/**
|
|
103
114
|
* Called when a tool call starts.
|
|
104
115
|
*/
|
|
105
|
-
onToolCallStart(handler: (toolCall:
|
|
116
|
+
onToolCallStart(handler: (toolCall: unknown) => void): this;
|
|
106
117
|
/**
|
|
107
118
|
* Called when a tool call ends successfully.
|
|
108
119
|
*/
|
|
109
|
-
onToolCallEnd(handler: (toolCall:
|
|
110
|
-
onToolCallError(handler: (toolCall:
|
|
120
|
+
onToolCallEnd(handler: (toolCall: unknown, result: unknown) => void): this;
|
|
121
|
+
onToolCallError(handler: (toolCall: unknown, error: Error) => "STOP" | "CONTINUE" | "RETRY" | void | Promise<"STOP" | "CONTINUE" | "RETRY" | void>): this;
|
|
111
122
|
/**
|
|
112
123
|
* Set the tool execution mode.
|
|
113
124
|
* - "auto": (Default) Automatically execute all tool calls.
|
|
@@ -119,7 +130,7 @@ export declare class Chat {
|
|
|
119
130
|
* Hook for confirming tool execution in "confirm" mode.
|
|
120
131
|
* Return true to proceed, false to cancel the specific call.
|
|
121
132
|
*/
|
|
122
|
-
onConfirmToolCall(handler: (toolCall:
|
|
133
|
+
onConfirmToolCall(handler: (toolCall: unknown) => Promise<boolean> | boolean): this;
|
|
123
134
|
/**
|
|
124
135
|
* Add a hook to process messages before sending to the LLM.
|
|
125
136
|
* Useful for PII detection, redaction, and input moderation.
|
|
@@ -133,7 +144,7 @@ export declare class Chat {
|
|
|
133
144
|
/**
|
|
134
145
|
* Ask the model a question
|
|
135
146
|
*/
|
|
136
|
-
ask(content: string |
|
|
147
|
+
ask(content: string | ContentPart[], options?: AskOptions): Promise<ChatResponseString>;
|
|
137
148
|
/**
|
|
138
149
|
* Streams the model's response to a user question.
|
|
139
150
|
*/
|
package/dist/chat/Chat.d.ts.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"Chat.d.ts","sourceRoot":"","sources":["../../src/chat/Chat.ts"],"names":[],"mappings":"AACA,OAAO,EAAE,OAAO,EAAE,MAAM,cAAc,CAAC;AACvC,OAAO,
|
|
1
|
+
{"version":3,"file":"Chat.d.ts","sourceRoot":"","sources":["../../src/chat/Chat.ts"],"names":[],"mappings":"AACA,OAAO,EAAE,OAAO,EAAE,MAAM,cAAc,CAAC;AACvC,OAAO,EACL,WAAW,EAIZ,MAAM,cAAc,CAAC;AACtB,OAAO,EAAE,WAAW,EAAE,MAAM,kBAAkB,CAAC;AAC/C,OAAO,EAAE,QAAQ,EAAE,KAAK,EAAE,SAAS,EAAkB,MAAM,0BAA0B,CAAC;AAGtF,OAAO,EAAE,MAAM,EAAE,MAAM,wBAAwB,CAAC;AAChD,OAAO,EAAkB,cAAc,EAAE,MAAM,WAAW,CAAC;AAC3D,OAAO,EAAE,MAAM,EAAE,MAAM,qBAAqB,CAAC;AAE7C,OAAO,EAAE,CAAC,EAAE,MAAM,KAAK,CAAC;AAExB,OAAO,EAAE,iBAAiB,EAAE,MAAM,iBAAiB,CAAC;AAMpD,MAAM,WAAW,UAAU;IACzB,MAAM,CAAC,EAAE,MAAM,EAAE,CAAC;IAClB,KAAK,CAAC,EAAE,MAAM,EAAE,CAAC;IACjB,WAAW,CAAC,EAAE,MAAM,CAAC;IACrB,SAAS,CAAC,EAAE,MAAM,CAAC;IACnB,OAAO,CAAC,EAAE,MAAM,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;IACjC,YAAY,CAAC,EAAE,MAAM,CAAC;IACtB,cAAc,CAAC,EAAE,MAAM,CAAC;IACxB,MAAM,CAAC,EAAE,WAAW,CAAC;CACtB;AAED,OAAO,EAAE,kBAAkB,EAAE,MAAM,mBAAmB,CAAC;AAEvD,qBAAa,IAAI;IAMb,OAAO,CAAC,QAAQ,CAAC,QAAQ;IACzB,OAAO,CAAC,KAAK;IACb,OAAO,CAAC,QAAQ,CAAC,OAAO;IAP1B,OAAO,CAAC,QAAQ,CAAiB;IACjC,OAAO,CAAC,cAAc,CAAiB;IACvC,OAAO,CAAC,QAAQ,CAAW;gBAGR,QAAQ,EAAE,QAAQ,EAC3B,KAAK,EAAE,MAAM,EACJ,OAAO,GAAE,WAAgB,EAC1C,WAAW,GAAE;QAAE,QAAQ,EAAE,MAAM,CAAC;QAAC,OAAO,EAAE,MAAM,CAAA;KAAgC;IA6BlF;;OAEG;IACH,IAAI,OAAO,IAAI,SAAS,OAAO,EAAE,CAEhC;IAED,IAAI,OAAO,IAAI,MAAM,CAEpB;IAED;;OAEG;IACH,IAAI,UAAU,IAAI,KAAK,CAuBtB;IAED;;;OAGG;IACH,QAAQ,CAAC,IAAI,EAAE,cAAc,GAAG,IAAI;IAIpC;;;;;;;OAOG;IACH,SAAS,CAAC,KAAK,EAAE,cAAc,EAAE,EAAE,OAAO,CAAC,EAAE;QAAE,OAAO,CAAC,EAAE,OAAO,CAAA;KAAE,GAAG,IAAI;IAkBzE;;;;OAIG;IACH,gBAAgB,CAAC,WAAW,EAAE,MAAM,EAAE,OAAO,CAAC,EAAE;QAAE,OAAO,CAAC,EAAE,OAAO,CAAA;KAAE,GAAG,IAAI;IAW5E;;OAEG;IACH,gBAAgB,CAAC,WAAW,EAAE,MAAM,EAAE,OAAO,CAAC,EAAE;QAAE,OAAO,CAAC,EAAE,OAAO,CAAA;KAAE,GAAG,IAAI;IAI5E;;OAEG;IACH,MAAM,CAAC,WAAW,EAAE,MAAM,EAAE,OAAO,CAAC,EAAE;QAAE,OAAO,CAAC,EAAE,OAAO,CAAA;KAAE,GAAG,IAAI;IAIlE;;;OAGG;IACH,GAAG,CAAC,IAAI,EAAE,MAAM,GAAG,WAAW,GAAG,QAAQ,GAAG,WAAW,EAAE,OAAO,EAAE,MAAM,GAAG,IAAI;IAS/E;;;OAGG;IACH,eAAe,CAAC,IAAI,EAAE,MAAM,GAAG,IAAI;IAKnC;;OAEG;IACH,SAAS,CAAC,KAAK,EAAE,MAAM,GAAG,IAAI;IAK9B;;;OAGG;IACH,kBAAkB,CAAC,OAAO,EAAE;QAC1B,OAAO,CAAC,EAAE,MAAM,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;QACjC,cAAc,CAAC,EAAE,OAAO,CAAC;KAC1B,GAAG,IAAI;IAUR;;;OAGG;IACH,UAAU,CAAC,MAAM,EAAE,MAAM,CAAC,MAAM,EAAE,OAAO,CAAC,GAAG,IAAI;IAKjD;;;OAGG;IACH,UAAU,CAAC,MAAM,EAAE,MAAM,GAAG,CAAC,CAAC,OAAO,CAAC,OAAO,CAAC,GAAG,MAAM,CAAC,MAAM,EAAE,OAAO,CAAC,GAAG,IAAI,GAAG,IAAI;IAkBtF,YAAY,CAAC,OAAO,EAAE,MAAM,IAAI,GAAG,IAAI;IAKvC,YAAY,CAAC,OAAO,EAAE,CAAC,OAAO,EAAE,kBAAkB,KAAK,IAAI,GAAG,IAAI;IAKlE,UAAU,CAAC,OAAO,EAAE,CAAC,QAAQ,EAAE,OAAO,KAAK,IAAI,GAAG,IAAI;IAItD,YAAY,CAAC,OAAO,EAAE,CAAC,MAAM,EAAE,OAAO,KAAK,IAAI,GAAG,IAAI;IAItD;;OAEG;IACH,eAAe,CAAC,OAAO,EAAE,CAAC,QAAQ,EAAE,OAAO,KAAK,IAAI,GAAG,IAAI;IAK3D;;OAEG;IACH,aAAa,CAAC,OAAO,EAAE,CAAC,QAAQ,EAAE,OAAO,EAAE,MAAM,EAAE,OAAO,KAAK,IAAI,GAAG,IAAI;IAK1E,eAAe,CACb,OAAO,EAAE,CACP,QAAQ,EAAE,OAAO,EACjB,KAAK,EAAE,KAAK,KACT,MAAM,GAAG,UAAU,GAAG,OAAO,GAAG,IAAI,GAAG,OAAO,CAAC,MAAM,GAAG,UAAU,GAAG,OAAO,GAAG,IAAI,CAAC,GACxF,IAAI;IAKP;;;;;OAKG;IACH,iBAAiB,CAAC,IAAI,EAAE,iBAAiB,GAAG,IAAI;IAKhD;;;OAGG;IACH,iBAAiB,CAAC,OAAO,EAAE,CAAC,QAAQ,EAAE,OAAO,KAAK,OAAO,CAAC,OAAO,CAAC,GAAG,OAAO,GAAG,IAAI;IAKnF;;;OAGG;IACH,aAAa,CAAC,OAAO,EAAE,CAAC,QAAQ,EAAE,OAAO,EAAE,KAAK,OAAO,CAAC,OAAO,EAAE,GAAG,IAAI,CAAC,GAAG,IAAI;IAKhF;;;OAGG;IACH,aAAa,CACX,OAAO,EAAE,CAAC,QAAQ,EAAE,kBAAkB,KAAK,OAAO,CAAC,kBAAkB,GAAG,IAAI,CAAC,GAC5E,IAAI;IAKP;;OAEG;IACG,GAAG,CAAC,OAAO,EAAE,MAAM,GAAG,WAAW,EAAE,EAAE,OAAO,CAAC,EAAE,UAAU,GAAG,OAAO,CAAC,kBAAkB,CAAC;IAmQ7F;;OAEG;IACH,MAAM,CAAC,OAAO,EAAE,MAAM,GAAG,WAAW,EAAE,EAAE,OAAO,GAAE,UAAe,GAAG,MAAM,CAAC,SAAS,CAAC;IAWpF;;OAEG;IACH,OAAO,CAAC,aAAa;CAoDtB"}
|
package/dist/chat/Chat.js
CHANGED
|
@@ -65,11 +65,19 @@ export class Chat {
|
|
|
65
65
|
acc.output_tokens += msg.usage.output_tokens;
|
|
66
66
|
acc.total_tokens += msg.usage.total_tokens;
|
|
67
67
|
acc.cached_tokens = (acc.cached_tokens ?? 0) + (msg.usage.cached_tokens ?? 0);
|
|
68
|
-
acc.cache_creation_tokens =
|
|
68
|
+
acc.cache_creation_tokens =
|
|
69
|
+
(acc.cache_creation_tokens ?? 0) + (msg.usage.cache_creation_tokens ?? 0);
|
|
69
70
|
acc.cost = (acc.cost ?? 0) + (msg.usage.cost ?? 0);
|
|
70
71
|
}
|
|
71
72
|
return acc;
|
|
72
|
-
}, {
|
|
73
|
+
}, {
|
|
74
|
+
input_tokens: 0,
|
|
75
|
+
output_tokens: 0,
|
|
76
|
+
total_tokens: 0,
|
|
77
|
+
cached_tokens: 0,
|
|
78
|
+
cache_creation_tokens: 0,
|
|
79
|
+
cost: 0
|
|
80
|
+
});
|
|
73
81
|
}
|
|
74
82
|
/**
|
|
75
83
|
* Add a tool to the chat session (fluent API)
|
|
@@ -79,13 +87,13 @@ export class Chat {
|
|
|
79
87
|
return this.withTools([tool]);
|
|
80
88
|
}
|
|
81
89
|
/**
|
|
82
|
-
|
|
83
|
-
|
|
84
|
-
|
|
85
|
-
|
|
86
|
-
|
|
87
|
-
|
|
88
|
-
|
|
90
|
+
* Add multiple tools to the chat session.
|
|
91
|
+
* Supports passing Tool classes (which will be instantiated) or instances.
|
|
92
|
+
* Can replace existing tools if options.replace is true.
|
|
93
|
+
*
|
|
94
|
+
* @example
|
|
95
|
+
* chat.withTools([WeatherTool, new CalculatorTool()], { replace: true });
|
|
96
|
+
*/
|
|
89
97
|
withTools(tools, options) {
|
|
90
98
|
if (options?.replace) {
|
|
91
99
|
this.options.tools = [];
|
|
@@ -120,6 +128,25 @@ export class Chat {
|
|
|
120
128
|
withSystemPrompt(instruction, options) {
|
|
121
129
|
return this.withInstructions(instruction, options);
|
|
122
130
|
}
|
|
131
|
+
/**
|
|
132
|
+
* Alias for withInstructions
|
|
133
|
+
*/
|
|
134
|
+
system(instruction, options) {
|
|
135
|
+
return this.withInstructions(instruction, options);
|
|
136
|
+
}
|
|
137
|
+
/**
|
|
138
|
+
* Add a message manually to the chat history.
|
|
139
|
+
* Useful for rehydrating sessions from a database.
|
|
140
|
+
*/
|
|
141
|
+
add(role, content) {
|
|
142
|
+
if (role === "system" || role === "developer") {
|
|
143
|
+
this.systemMessages.push({ role, content });
|
|
144
|
+
}
|
|
145
|
+
else {
|
|
146
|
+
this.messages.push({ role, content });
|
|
147
|
+
}
|
|
148
|
+
return this;
|
|
149
|
+
}
|
|
123
150
|
/**
|
|
124
151
|
* Set the temperature for the chat session.
|
|
125
152
|
* Controls randomness: 0.0 (deterministic) to 1.0 (creative).
|
|
@@ -260,7 +287,7 @@ export class Chat {
|
|
|
260
287
|
}
|
|
261
288
|
this.messages.push({
|
|
262
289
|
role: "user",
|
|
263
|
-
content: messageContent
|
|
290
|
+
content: messageContent
|
|
264
291
|
});
|
|
265
292
|
// Process Schema/Structured Output
|
|
266
293
|
let responseFormat = this.options.responseFormat;
|
|
@@ -273,7 +300,7 @@ export class Chat {
|
|
|
273
300
|
name: this.options.schema.definition.name,
|
|
274
301
|
description: this.options.schema.definition.description,
|
|
275
302
|
strict: this.options.schema.definition.strict ?? true,
|
|
276
|
-
schema: jsonSchema
|
|
303
|
+
schema: jsonSchema
|
|
277
304
|
}
|
|
278
305
|
};
|
|
279
306
|
}
|
|
@@ -287,7 +314,7 @@ export class Chat {
|
|
|
287
314
|
response_format: responseFormat, // Pass to provider
|
|
288
315
|
requestTimeout: options?.requestTimeout ?? this.options.requestTimeout ?? config.requestTimeout,
|
|
289
316
|
signal: options?.signal,
|
|
290
|
-
...this.options.params
|
|
317
|
+
...this.options.params
|
|
291
318
|
};
|
|
292
319
|
// --- Content Policy Hooks (Input) ---
|
|
293
320
|
if (this.options.onBeforeRequest) {
|
|
@@ -298,7 +325,7 @@ export class Chat {
|
|
|
298
325
|
executeOptions.messages = result;
|
|
299
326
|
}
|
|
300
327
|
}
|
|
301
|
-
|
|
328
|
+
const totalUsage = { input_tokens: 0, output_tokens: 0, total_tokens: 0 };
|
|
302
329
|
const trackUsage = (u) => {
|
|
303
330
|
if (u) {
|
|
304
331
|
totalUsage.input_tokens += u.input_tokens;
|
|
@@ -327,9 +354,9 @@ export class Chat {
|
|
|
327
354
|
}
|
|
328
355
|
this.messages.push({
|
|
329
356
|
role: "assistant",
|
|
330
|
-
content: assistantMessage || null,
|
|
357
|
+
content: assistantMessage?.toString() || null,
|
|
331
358
|
tool_calls: response.tool_calls,
|
|
332
|
-
usage: response.usage
|
|
359
|
+
usage: response.usage
|
|
333
360
|
});
|
|
334
361
|
if (this.options.onEndMessage && (!response.tool_calls || response.tool_calls.length === 0)) {
|
|
335
362
|
this.options.onEndMessage(assistantMessage);
|
|
@@ -350,37 +377,42 @@ export class Chat {
|
|
|
350
377
|
if (this.options.toolExecution === ToolExecutionMode.CONFIRM) {
|
|
351
378
|
const approved = await ToolHandler.requestToolConfirmation(toolCall, this.options.onConfirmToolCall);
|
|
352
379
|
if (!approved) {
|
|
353
|
-
this.messages.push(
|
|
354
|
-
role: "tool",
|
|
355
|
-
tool_call_id: toolCall.id,
|
|
356
|
-
content: "Action cancelled by user.",
|
|
357
|
-
});
|
|
380
|
+
this.messages.push(this.provider.formatToolResultMessage(toolCall.id, "Action cancelled by user."));
|
|
358
381
|
continue;
|
|
359
382
|
}
|
|
360
383
|
}
|
|
361
384
|
try {
|
|
362
385
|
const toolResult = await ToolHandler.execute(toolCall, this.options.tools, this.options.onToolCallStart, this.options.onToolCallEnd);
|
|
363
|
-
this.messages.push(toolResult);
|
|
386
|
+
this.messages.push(this.provider.formatToolResultMessage(toolResult.tool_call_id, toolResult.content));
|
|
364
387
|
}
|
|
365
388
|
catch (error) {
|
|
366
|
-
|
|
367
|
-
|
|
368
|
-
|
|
389
|
+
let currentError = error;
|
|
390
|
+
const directive = await this.options.onToolCallError?.(toolCall, currentError);
|
|
391
|
+
if (directive === "STOP") {
|
|
392
|
+
throw currentError;
|
|
393
|
+
}
|
|
394
|
+
if (directive === "RETRY") {
|
|
395
|
+
try {
|
|
396
|
+
const toolResult = await ToolHandler.execute(toolCall, this.options.tools, this.options.onToolCallStart, this.options.onToolCallEnd);
|
|
397
|
+
this.messages.push(this.provider.formatToolResultMessage(toolResult.tool_call_id, toolResult.content));
|
|
398
|
+
continue;
|
|
399
|
+
}
|
|
400
|
+
catch (retryError) {
|
|
401
|
+
// If retry also fails, fall through to default logic
|
|
402
|
+
currentError = retryError;
|
|
403
|
+
}
|
|
369
404
|
}
|
|
370
|
-
this.messages.push({
|
|
371
|
-
|
|
372
|
-
tool_call_id: toolCall.id,
|
|
373
|
-
content: `Fatal error executing tool '${toolCall.function.name}': ${error.message}`,
|
|
374
|
-
});
|
|
375
|
-
if (directive === 'CONTINUE') {
|
|
405
|
+
this.messages.push(this.provider.formatToolResultMessage(toolCall.id, `Fatal error executing tool '${toolCall.function.name}': ${currentError.message}`, { isError: true }));
|
|
406
|
+
if (directive === "CONTINUE") {
|
|
376
407
|
continue;
|
|
377
408
|
}
|
|
378
409
|
// Default short-circuit logic
|
|
379
|
-
const
|
|
410
|
+
const errorObj = currentError;
|
|
411
|
+
const isFatal = errorObj.fatal === true || errorObj.status === 401 || errorObj.status === 403;
|
|
380
412
|
if (isFatal) {
|
|
381
|
-
throw
|
|
413
|
+
throw currentError;
|
|
382
414
|
}
|
|
383
|
-
logger.error(`Tool execution failed for '${toolCall.function.name}':`,
|
|
415
|
+
logger.error(`Tool execution failed for '${toolCall.function.name}':`, currentError);
|
|
384
416
|
}
|
|
385
417
|
}
|
|
386
418
|
response = await this.executor.executeChat({
|
|
@@ -393,7 +425,7 @@ export class Chat {
|
|
|
393
425
|
response_format: responseFormat,
|
|
394
426
|
requestTimeout: options?.requestTimeout ?? this.options.requestTimeout ?? config.requestTimeout,
|
|
395
427
|
signal: options?.signal,
|
|
396
|
-
...this.options.params
|
|
428
|
+
...this.options.params
|
|
397
429
|
});
|
|
398
430
|
trackUsage(response.usage);
|
|
399
431
|
assistantMessage = new ChatResponseString(response.content ?? "", response.usage ?? { input_tokens: 0, output_tokens: 0, total_tokens: 0 }, this.model, this.provider.id, response.reasoning);
|
|
@@ -406,15 +438,15 @@ export class Chat {
|
|
|
406
438
|
}
|
|
407
439
|
this.messages.push({
|
|
408
440
|
role: "assistant",
|
|
409
|
-
content: assistantMessage || null,
|
|
441
|
+
content: assistantMessage?.toString() || null,
|
|
410
442
|
tool_calls: response.tool_calls,
|
|
411
|
-
usage: response.usage
|
|
443
|
+
usage: response.usage
|
|
412
444
|
});
|
|
413
445
|
if (this.options.onEndMessage && (!response.tool_calls || response.tool_calls.length === 0)) {
|
|
414
446
|
this.options.onEndMessage(assistantMessage);
|
|
415
447
|
}
|
|
416
448
|
}
|
|
417
|
-
// For the final return, we might want to aggregate reasoning too if it happened in multiple turns?
|
|
449
|
+
// For the final return, we might want to aggregate reasoning too if it happened in multiple turns?
|
|
418
450
|
// Usually reasoning only happens once or we just want the last one.
|
|
419
451
|
return new ChatResponseString(assistantMessage.toString() || "", totalUsage, this.model, this.provider.id, assistantMessage.reasoning, response.tool_calls);
|
|
420
452
|
}
|
|
@@ -449,17 +481,18 @@ export class Chat {
|
|
|
449
481
|
if (typeof toolInstance.toLLMTool === "function") {
|
|
450
482
|
return toolInstance.toLLMTool();
|
|
451
483
|
}
|
|
452
|
-
|
|
484
|
+
const toolObj = toolInstance;
|
|
485
|
+
if (!toolObj.function || !toolObj.function.name) {
|
|
453
486
|
// 1. Validate structure
|
|
454
487
|
throw new ConfigurationError(`[NodeLLM] Tool validation failed: 'function.name' is required for raw tool objects.`);
|
|
455
488
|
}
|
|
456
|
-
if (
|
|
489
|
+
if (toolObj.type !== "function") {
|
|
457
490
|
// 2. Ensure 'type: function' exists (standardize for providers)
|
|
458
|
-
|
|
491
|
+
toolObj.type = "function";
|
|
459
492
|
}
|
|
460
|
-
if (typeof
|
|
493
|
+
if (typeof toolObj.handler !== "function") {
|
|
461
494
|
// 3. Validate handler existence
|
|
462
|
-
throw new ConfigurationError(`[NodeLLM] Tool validation failed: Tool '${
|
|
495
|
+
throw new ConfigurationError(`[NodeLLM] Tool validation failed: Tool '${toolObj.function.name}' must have a 'handler' function. (Note: Only Tool subclasses use 'execute()')`);
|
|
463
496
|
}
|
|
464
497
|
return toolInstance;
|
|
465
498
|
}
|