@node-llm/core 0.2.2 → 0.4.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +288 -16
- package/dist/chat/Chat.d.ts +58 -17
- package/dist/chat/Chat.d.ts.map +1 -1
- package/dist/chat/Chat.js +185 -33
- package/dist/chat/ChatOptions.d.ts +10 -0
- package/dist/chat/ChatOptions.d.ts.map +1 -1
- package/dist/chat/ChatResponse.d.ts +23 -0
- package/dist/chat/ChatResponse.d.ts.map +1 -0
- package/dist/chat/ChatResponse.js +38 -0
- package/dist/chat/Stream.d.ts.map +1 -1
- package/dist/chat/Stream.js +10 -0
- package/dist/constants.d.ts +7 -0
- package/dist/constants.d.ts.map +1 -0
- package/dist/constants.js +6 -0
- package/dist/embedding/Embedding.d.ts +17 -0
- package/dist/embedding/Embedding.d.ts.map +1 -0
- package/dist/embedding/Embedding.js +24 -0
- package/dist/index.d.ts +1 -1
- package/dist/index.d.ts.map +1 -1
- package/dist/index.js +1 -1
- package/dist/llm.d.ts +35 -3
- package/dist/llm.d.ts.map +1 -1
- package/dist/llm.js +88 -14
- package/dist/models/ModelRegistry.d.ts +23 -0
- package/dist/models/ModelRegistry.d.ts.map +1 -0
- package/dist/models/ModelRegistry.js +54 -0
- package/dist/moderation/Moderation.d.ts +56 -0
- package/dist/moderation/Moderation.d.ts.map +1 -0
- package/dist/moderation/Moderation.js +92 -0
- package/dist/providers/Embedding.d.ts +20 -0
- package/dist/providers/Embedding.d.ts.map +1 -0
- package/dist/providers/Provider.d.ts +50 -4
- package/dist/providers/Provider.d.ts.map +1 -1
- package/dist/providers/gemini/Capabilities.d.ts +30 -0
- package/dist/providers/gemini/Capabilities.d.ts.map +1 -0
- package/dist/providers/gemini/Capabilities.js +148 -0
- package/dist/providers/gemini/Chat.d.ts +8 -0
- package/dist/providers/gemini/Chat.d.ts.map +1 -0
- package/dist/providers/gemini/Chat.js +69 -0
- package/dist/providers/gemini/ChatUtils.d.ts +9 -0
- package/dist/providers/gemini/ChatUtils.d.ts.map +1 -0
- package/dist/providers/gemini/ChatUtils.js +83 -0
- package/dist/providers/gemini/Embeddings.d.ts +8 -0
- package/dist/providers/gemini/Embeddings.d.ts.map +1 -0
- package/dist/providers/gemini/Embeddings.js +44 -0
- package/dist/providers/gemini/Errors.d.ts +2 -0
- package/dist/providers/gemini/Errors.d.ts.map +1 -0
- package/dist/providers/gemini/Errors.js +34 -0
- package/dist/providers/gemini/GeminiProvider.d.ts +34 -0
- package/dist/providers/gemini/GeminiProvider.d.ts.map +1 -0
- package/dist/providers/gemini/GeminiProvider.js +55 -0
- package/dist/providers/gemini/Image.d.ts +8 -0
- package/dist/providers/gemini/Image.d.ts.map +1 -0
- package/dist/providers/gemini/Image.js +47 -0
- package/dist/providers/gemini/Models.d.ts +8 -0
- package/dist/providers/gemini/Models.d.ts.map +1 -0
- package/dist/providers/gemini/Models.js +38 -0
- package/dist/providers/gemini/Streaming.d.ts +8 -0
- package/dist/providers/gemini/Streaming.d.ts.map +1 -0
- package/dist/providers/gemini/Streaming.js +70 -0
- package/dist/providers/gemini/Transcription.d.ts +9 -0
- package/dist/providers/gemini/Transcription.d.ts.map +1 -0
- package/dist/providers/gemini/Transcription.js +63 -0
- package/dist/providers/gemini/index.d.ts +11 -0
- package/dist/providers/gemini/index.d.ts.map +1 -0
- package/dist/providers/gemini/index.js +24 -0
- package/dist/providers/gemini/types.d.ts +118 -0
- package/dist/providers/gemini/types.d.ts.map +1 -0
- package/dist/providers/gemini/types.js +1 -0
- package/dist/providers/openai/Capabilities.d.ts +7 -2
- package/dist/providers/openai/Capabilities.d.ts.map +1 -1
- package/dist/providers/openai/Capabilities.js +52 -214
- package/dist/providers/openai/Chat.d.ts.map +1 -1
- package/dist/providers/openai/Chat.js +4 -0
- package/dist/providers/openai/Embedding.d.ts +8 -0
- package/dist/providers/openai/Embedding.d.ts.map +1 -0
- package/dist/providers/openai/Embedding.js +48 -0
- package/dist/providers/openai/ModelDefinitions.d.ts +25 -0
- package/dist/providers/openai/ModelDefinitions.d.ts.map +1 -0
- package/dist/providers/openai/ModelDefinitions.js +211 -0
- package/dist/providers/openai/Moderation.d.ts +8 -0
- package/dist/providers/openai/Moderation.d.ts.map +1 -0
- package/dist/providers/openai/Moderation.js +27 -0
- package/dist/providers/openai/OpenAIProvider.d.ts +13 -1
- package/dist/providers/openai/OpenAIProvider.d.ts.map +1 -1
- package/dist/providers/openai/OpenAIProvider.js +22 -0
- package/dist/providers/openai/Streaming.d.ts.map +1 -1
- package/dist/providers/openai/Streaming.js +19 -8
- package/dist/providers/openai/Transcription.d.ts +10 -0
- package/dist/providers/openai/Transcription.d.ts.map +1 -0
- package/dist/providers/openai/Transcription.js +162 -0
- package/dist/providers/openai/index.d.ts +8 -0
- package/dist/providers/openai/index.d.ts.map +1 -1
- package/dist/providers/openai/index.js +12 -0
- package/dist/schema/Schema.d.ts +20 -0
- package/dist/schema/Schema.d.ts.map +1 -0
- package/dist/schema/Schema.js +22 -0
- package/dist/schema/to-json-schema.d.ts +3 -0
- package/dist/schema/to-json-schema.d.ts.map +1 -0
- package/dist/schema/to-json-schema.js +10 -0
- package/dist/transcription/Transcription.d.ts +11 -0
- package/dist/transcription/Transcription.d.ts.map +1 -0
- package/dist/transcription/Transcription.js +21 -0
- package/dist/utils/Binary.d.ts +12 -0
- package/dist/utils/Binary.d.ts.map +1 -0
- package/dist/utils/Binary.js +71 -0
- package/dist/utils/FileLoader.d.ts.map +1 -1
- package/dist/utils/FileLoader.js +12 -1
- package/dist/utils/audio.d.ts +10 -0
- package/dist/utils/audio.d.ts.map +1 -0
- package/dist/utils/audio.js +46 -0
- package/package.json +18 -7
- package/dist/providers/openai/register.d.ts +0 -2
- package/dist/providers/openai/register.d.ts.map +0 -1
- package/dist/providers/openai/register.js +0 -15
- package/dist/tools/Tool.d.ts +0 -8
- package/dist/tools/Tool.d.ts.map +0 -1
- package/dist/tools/ToolSet.d.ts +0 -15
- package/dist/tools/ToolSet.d.ts.map +0 -1
- package/dist/tools/ToolSet.js +0 -29
- package/dist/tools/index.d.ts +0 -2
- package/dist/tools/index.d.ts.map +0 -1
- package/dist/tools/index.js +0 -1
- package/dist/tools/runCommandTool.d.ts +0 -8
- package/dist/tools/runCommandTool.d.ts.map +0 -1
- package/dist/tools/runCommandTool.js +0 -19
- /package/dist/{tools/Tool.js → providers/Embedding.js} +0 -0
package/README.md
CHANGED
|
@@ -15,6 +15,7 @@ A provider-agnostic LLM core for Node.js, heavily inspired by the elegant design
|
|
|
15
15
|
- **Provider-Agnostic**: Switch between OpenAI, Anthropic, and others with a single line of config.
|
|
16
16
|
- **Streaming-First**: Native `AsyncIterator` support for real-time token delivery.
|
|
17
17
|
- **Tool Calling**: Automatic execution loop for model-requested functions.
|
|
18
|
+
- **Content Moderation**: Built-in safety checks for user input and model output.
|
|
18
19
|
- **Multi-modal & Smart Files**: Built-in support for Vision (images), Audio, and Text files.
|
|
19
20
|
- **Fluent API**: Chainable methods like `.withTool()` for dynamic registration.
|
|
20
21
|
- **Resilient**: Configurable retry logic at the execution layer.
|
|
@@ -41,8 +42,11 @@ import { LLM } from "@node-llm/core";
|
|
|
41
42
|
import "dotenv/config";
|
|
42
43
|
|
|
43
44
|
LLM.configure({
|
|
44
|
-
provider: "openai", //
|
|
45
|
-
retry: { attempts: 3, delayMs: 500 }
|
|
45
|
+
provider: "openai", // or "gemini" (Uses GEMINI_API_KEY)
|
|
46
|
+
retry: { attempts: 3, delayMs: 500 },
|
|
47
|
+
defaultModerationModel: "text-moderation-latest",
|
|
48
|
+
defaultTranscriptionModel: "whisper-1",
|
|
49
|
+
defaultEmbeddingModel: "text-embedding-3-small"
|
|
46
50
|
});
|
|
47
51
|
```
|
|
48
52
|
|
|
@@ -103,25 +107,175 @@ console.log(response.output_tokens); // 5
|
|
|
103
107
|
console.log(chat.totalUsage.total_tokens);
|
|
104
108
|
```
|
|
105
109
|
|
|
110
|
+
### 6. Embeddings
|
|
111
|
+
|
|
112
|
+
Generate vector representations of text for semantic search, clustering, and similarity comparisons.
|
|
113
|
+
|
|
114
|
+
```ts
|
|
115
|
+
// Single text embedding
|
|
116
|
+
const embedding = await LLM.embed("Ruby is a programmer's best friend");
|
|
117
|
+
|
|
118
|
+
console.log(embedding.vector); // Array of floats (e.g., 1536 dimensions)
|
|
119
|
+
console.log(embedding.dimensions); // 1536
|
|
120
|
+
console.log(embedding.model); // "text-embedding-3-small"
|
|
121
|
+
console.log(embedding.input_tokens); // Token count
|
|
122
|
+
|
|
123
|
+
// Batch embeddings
|
|
124
|
+
const embeddings = await LLM.embed([
|
|
125
|
+
"First text",
|
|
126
|
+
"Second text",
|
|
127
|
+
"Third text"
|
|
128
|
+
]);
|
|
129
|
+
|
|
130
|
+
console.log(embeddings.vectors); // Array of vectors
|
|
131
|
+
console.log(embeddings.vectors.length); // 3
|
|
132
|
+
|
|
133
|
+
// Custom model and dimensions
|
|
134
|
+
const customEmbedding = await LLM.embed("Semantic search text", {
|
|
135
|
+
model: "text-embedding-3-large",
|
|
136
|
+
dimensions: 256 // Reduce dimensions for faster processing
|
|
137
|
+
});
|
|
138
|
+
```
|
|
139
|
+
|
|
140
|
+
### 7. Audio Transcription (Transcribe)
|
|
141
|
+
|
|
142
|
+
Convert audio files to text using specialized models like Whisper.
|
|
143
|
+
|
|
144
|
+
```ts
|
|
145
|
+
const text = await LLM.transcribe("meeting.mp3");
|
|
146
|
+
console.log(text);
|
|
147
|
+
```
|
|
148
|
+
|
|
149
|
+
### 7. Content Moderation (Moderate)
|
|
150
|
+
|
|
151
|
+
Check if text content violates safety policies.
|
|
152
|
+
|
|
153
|
+
```ts
|
|
154
|
+
const result = await LLM.moderate("I want to help everyone!");
|
|
155
|
+
if (result.flagged) {
|
|
156
|
+
console.log(`❌ Flagged for: ${result.flaggedCategories.join(", ")}`);
|
|
157
|
+
} else {
|
|
158
|
+
console.log("✅ Content appears safe");
|
|
159
|
+
}
|
|
160
|
+
```
|
|
161
|
+
|
|
162
|
+
Learn how to implement [custom risk thresholds](../../examples/openai/12-risk-assessment.mjs) for more granular control.
|
|
163
|
+
|
|
164
|
+
### 8. Chat Event Handlers
|
|
165
|
+
|
|
166
|
+
Hook into the chat lifecycle for logging, UI updates, or auditing.
|
|
167
|
+
|
|
168
|
+
```ts
|
|
169
|
+
chat
|
|
170
|
+
.onNewMessage(() => console.log("AI started typing..."))
|
|
171
|
+
.onToolCall((tool) => console.log(`Calling ${tool.function.name}...`))
|
|
172
|
+
.onToolResult((result) => console.log(`Tool returned: ${result}`))
|
|
173
|
+
.onEndMessage((response) => console.log(`Done. Usage: ${response.total_tokens}`));
|
|
174
|
+
|
|
175
|
+
await chat.ask("What's the weather?");
|
|
176
|
+
```
|
|
177
|
+
|
|
178
|
+
### 9. System Prompts (Instructions)
|
|
179
|
+
|
|
180
|
+
Guide the AI's behavior, personality, or constraints.
|
|
181
|
+
|
|
182
|
+
```ts
|
|
183
|
+
// Set initial instructions
|
|
184
|
+
chat.withInstructions("You are a helpful assistant that explains simply.");
|
|
185
|
+
|
|
186
|
+
// Update instructions mid-conversation (replace: true removes previous ones)
|
|
187
|
+
chat.withInstructions("Now assume the persona of a pirate.", { replace: true });
|
|
188
|
+
|
|
189
|
+
await chat.ask("Hello");
|
|
190
|
+
// => "Ahoy matey!"
|
|
191
|
+
```
|
|
192
|
+
|
|
193
|
+
### 10. Temperature Control (Creativity)
|
|
194
|
+
|
|
195
|
+
Adjust the randomness of the model's responses.
|
|
196
|
+
|
|
197
|
+
```ts
|
|
198
|
+
// Factual (0.0 - 0.3)
|
|
199
|
+
const factual = LLM.chat("gpt-4o").withTemperature(0.2);
|
|
200
|
+
|
|
201
|
+
// Creative (0.7 - 1.0)
|
|
202
|
+
const creative = LLM.chat("gpt-4o").withTemperature(0.9);
|
|
203
|
+
```
|
|
204
|
+
|
|
106
205
|
---
|
|
107
206
|
|
|
108
207
|
## 📚 Examples
|
|
109
208
|
|
|
110
|
-
Check the [examples](
|
|
209
|
+
Check the [examples](../../examples) directory for focused scripts organized by provider:
|
|
111
210
|
|
|
112
211
|
### OpenAI Examples
|
|
212
|
+
|
|
213
|
+
#### 💬 Chat
|
|
214
|
+
| Example | Description |
|
|
215
|
+
| :--- | :--- |
|
|
216
|
+
| [Basic & Streaming](../../examples/openai/chat/basic.mjs) | Standard completions and real-time streaming |
|
|
217
|
+
| [System Instructions](../../examples/openai/chat/instructions.mjs) | Tuning behavior with system prompts and temperature |
|
|
218
|
+
| [Tool Calling](../../examples/openai/chat/tools.mjs) | Automatic execution of model-requested functions |
|
|
219
|
+
| [Parallel Tool Calling](../../examples/openai/chat/parallel-tools.mjs) | Executing multiple tools in a single turn |
|
|
220
|
+
| [Lifecycle Events](../../examples/openai/chat/events.mjs) | Hooks for specific chat events (onNewMessage, onToolCall) |
|
|
221
|
+
| [Token Usage](../../examples/openai/chat/usage.mjs) | Tracking costs and token counts |
|
|
222
|
+
| [Max Tokens](../../examples/openai/chat/max-tokens.mjs) | Limiting response length with `maxTokens` |
|
|
223
|
+
|
|
224
|
+
#### 🖼️ Multimodal
|
|
225
|
+
| Example | Description |
|
|
226
|
+
| :--- | :--- |
|
|
227
|
+
| [Vision Analysis](../../examples/openai/multimodal/vision.mjs) | Analyzing images via URLs |
|
|
228
|
+
| [Multi-Image Analysis](../../examples/openai/multimodal/multi-image.mjs) | Comparing multiple images in one request |
|
|
229
|
+
| [File Context](../../examples/openai/multimodal/files.mjs) | Reading and analyzing local project files |
|
|
230
|
+
| [Audio Transcription](../../examples/openai/multimodal/transcribe.mjs) | Converting audio files to text (Whisper) |
|
|
231
|
+
|
|
232
|
+
#### 🎨 Images
|
|
233
|
+
| Example | Description |
|
|
234
|
+
| :--- | :--- |
|
|
235
|
+
| [Generate & Save](../../examples/openai/images/generate.mjs) | Creating images with DALL-E 3 and saving to disk |
|
|
236
|
+
|
|
237
|
+
#### 🛡️ Safety
|
|
238
|
+
| Example | Description |
|
|
239
|
+
| :--- | :--- |
|
|
240
|
+
| [Moderation](../../examples/openai/safety/moderation.mjs) | Content safety checks and risk assessment |
|
|
241
|
+
|
|
242
|
+
#### 🧠 Discovery
|
|
243
|
+
| Example | Description |
|
|
244
|
+
| :--- | :--- |
|
|
245
|
+
| [Models & Capabilities](../../examples/openai/discovery/models.mjs) | Listing models and inspecting their specs |
|
|
246
|
+
| [Embeddings](../../examples/openai/embeddings/create.mjs) | Generating semantic vector embeddings |
|
|
247
|
+
|
|
248
|
+
### Gemini Examples
|
|
249
|
+
|
|
250
|
+
#### 💬 Chat
|
|
251
|
+
| Example | Description |
|
|
252
|
+
| :--- | :--- |
|
|
253
|
+
| [Basic & Streaming](../../examples/gemini/chat/basic.mjs) | Standard completions and real-time streaming |
|
|
254
|
+
| [System Instructions](../../examples/gemini/chat/instructions.mjs) | Behavior tuning and creativity control |
|
|
255
|
+
| [Tool Calling](../../examples/gemini/chat/tools.mjs) | Function calling with automatic execution |
|
|
256
|
+
| [Lifecycle Events](../../examples/gemini/chat/events.mjs) | Event hooks for chat interactions |
|
|
257
|
+
| [Token Usage](../../examples/gemini/chat/usage.mjs) | Tracking conversation costs |
|
|
258
|
+
|
|
259
|
+
#### 🖼️ Multimodal
|
|
113
260
|
| Example | Description |
|
|
114
261
|
| :--- | :--- |
|
|
115
|
-
| [
|
|
116
|
-
| [
|
|
117
|
-
| [
|
|
118
|
-
|
|
119
|
-
|
|
120
|
-
|
|
|
121
|
-
|
|
|
122
|
-
| [
|
|
123
|
-
|
|
124
|
-
|
|
262
|
+
| [Vision Analysis](../../examples/gemini/multimodal/vision.mjs) | Understanding images |
|
|
263
|
+
| [File Context](../../examples/gemini/multimodal/files.mjs) | Reading multiple local files |
|
|
264
|
+
| [Audio Transcription](../../examples/gemini/multimodal/transcribe.mjs) | Native audio understanding |
|
|
265
|
+
|
|
266
|
+
#### 🎨 Images
|
|
267
|
+
| Example | Description |
|
|
268
|
+
| :--- | :--- |
|
|
269
|
+
| [Generate & Save](../../examples/gemini/images/generate.mjs) | Creating images with Imagen |
|
|
270
|
+
|
|
271
|
+
#### 🧠 Discovery
|
|
272
|
+
| Example | Description |
|
|
273
|
+
| :--- | :--- |
|
|
274
|
+
| [Models & Capabilities](../../examples/gemini/discovery/models.mjs) | Listing models and capabilities |
|
|
275
|
+
| [Embeddings](../../examples/gemini/embeddings/create.mjs) | Creating vector embeddings |
|
|
276
|
+
|
|
277
|
+
|
|
278
|
+
To run an example:
|
|
125
279
|
```bash
|
|
126
280
|
node examples/openai/01-basic-chat.mjs
|
|
127
281
|
```
|
|
@@ -155,9 +309,75 @@ const reply = await chat
|
|
|
155
309
|
.ask("What is the weather in London?");
|
|
156
310
|
```
|
|
157
311
|
|
|
312
|
+
### Structured Output (Schemas)
|
|
313
|
+
|
|
314
|
+
Ensure the AI returns data exactly matching a specific structure. Supports strict schema validation using Zod.
|
|
315
|
+
|
|
316
|
+
**Using Zod (Recommended):**
|
|
317
|
+
|
|
318
|
+
```ts
|
|
319
|
+
import { z } from "zod";
|
|
320
|
+
|
|
321
|
+
const personSchema = z.object({
|
|
322
|
+
name: z.string(),
|
|
323
|
+
age: z.number(),
|
|
324
|
+
hobbies: z.array(z.string())
|
|
325
|
+
});
|
|
326
|
+
|
|
327
|
+
const response = await chat
|
|
328
|
+
.withSchema(personSchema)
|
|
329
|
+
.ask("Generate a person named Alice who likes hiking");
|
|
330
|
+
|
|
331
|
+
// Type-safe access to parsed data
|
|
332
|
+
const person = response.parsed;
|
|
333
|
+
console.log(person.name); // "Alice"
|
|
334
|
+
```
|
|
335
|
+
|
|
336
|
+
**Using Manual JSON Schema:**
|
|
337
|
+
|
|
338
|
+
```ts
|
|
339
|
+
const schema = {
|
|
340
|
+
type: "object",
|
|
341
|
+
properties: {
|
|
342
|
+
name: { type: "string" },
|
|
343
|
+
age: { type: "integer" }
|
|
344
|
+
},
|
|
345
|
+
required: ["name", "age"],
|
|
346
|
+
additionalProperties: false // Required for strict mode in OpenAI
|
|
347
|
+
};
|
|
348
|
+
|
|
349
|
+
const response = await chat
|
|
350
|
+
.withSchema(schema)
|
|
351
|
+
.ask("Generate a person");
|
|
352
|
+
|
|
353
|
+
console.log(response.parsed); // { name: "...", age: ... }
|
|
354
|
+
```
|
|
355
|
+
|
|
356
|
+
### JSON Mode
|
|
357
|
+
|
|
358
|
+
Guarantee valid JSON output without enforcing a strict schema.
|
|
359
|
+
|
|
360
|
+
```ts
|
|
361
|
+
chat.withRequestOptions({
|
|
362
|
+
responseFormat: { type: "json_object" }
|
|
363
|
+
});
|
|
364
|
+
|
|
365
|
+
const response = await chat.ask("Generate a JSON object with a greeting");
|
|
366
|
+
console.log(response.parsed); // { greeting: "..." }
|
|
367
|
+
```
|
|
368
|
+
|
|
369
|
+
|
|
158
370
|
### Multi-modal & File Support
|
|
159
371
|
|
|
160
|
-
Pass local paths or URLs directly. The library handles reading, MIME detection, and encoding.
|
|
372
|
+
Pass local paths or URLs directly. The library handles reading, MIME detection, and encoding for a wide variety of file types.
|
|
373
|
+
|
|
374
|
+
**Supported File Types:**
|
|
375
|
+
- **Images**: `.jpg`, `.jpeg`, `.png`, `.gif`, `.webp`
|
|
376
|
+
- **Videos**: `.mp4`, `.mpeg`, `.mov`
|
|
377
|
+
- **Audio**: `.wav`, `.mp3`
|
|
378
|
+
- **Documents**: `.csv`, `.json`
|
|
379
|
+
- **Code**: `.js`, `.mjs`, `.cjs`, `.ts`, `.py`, `.rb`, `.go`, `.java`, `.c`, `.cpp`, `.rs`, `.swift`, `.kt`
|
|
380
|
+
- **Text**: `.txt`, `.md`, `.html`, `.css`, `.xml`, `.yml`, `.yaml`
|
|
161
381
|
|
|
162
382
|
```ts
|
|
163
383
|
// Vision
|
|
@@ -170,10 +390,43 @@ await chat.ask("Transcribe this", {
|
|
|
170
390
|
files: ["./meeting.mp3"]
|
|
171
391
|
});
|
|
172
392
|
|
|
173
|
-
//
|
|
393
|
+
// Code Analysis
|
|
174
394
|
await chat.ask("Explain this code", {
|
|
175
395
|
files: ["./app.ts"]
|
|
176
396
|
});
|
|
397
|
+
|
|
398
|
+
// Multiple files at once
|
|
399
|
+
await chat.ask("Analyze these files", {
|
|
400
|
+
files: ["diagram.png", "data.json", "notes.txt"]
|
|
401
|
+
});
|
|
402
|
+
```
|
|
403
|
+
|
|
404
|
+
### Custom HTTP Headers (Proxies/Observability)
|
|
405
|
+
|
|
406
|
+
Inject custom headers into requests, useful for tools like Helicone or Portkey.
|
|
407
|
+
|
|
408
|
+
```ts
|
|
409
|
+
chat.withRequestOptions({
|
|
410
|
+
headers: {
|
|
411
|
+
"Helicone-Auth": "Bearer my-key",
|
|
412
|
+
"X-Custom-Trace": "123"
|
|
413
|
+
}
|
|
414
|
+
});
|
|
415
|
+
```
|
|
416
|
+
|
|
417
|
+
### Model Capabilities & Pricing
|
|
418
|
+
|
|
419
|
+
Get up-to-date information about context windows, pricing, and capabilities directly from the Parsera API.
|
|
420
|
+
|
|
421
|
+
```javascript
|
|
422
|
+
// Refresh model information from the API
|
|
423
|
+
await LLM.models.refresh();
|
|
424
|
+
|
|
425
|
+
// Use the data programmatically
|
|
426
|
+
const model = LLM.models.find("gpt-4o-mini");
|
|
427
|
+
console.log(model.context_window); // => 128000
|
|
428
|
+
console.log(model.capabilities); // => ["function_calling", "structured_output", "streaming", "batch"]
|
|
429
|
+
console.log(model.pricing.text_tokens.standard.input_per_million); // => 0.15
|
|
177
430
|
```
|
|
178
431
|
|
|
179
432
|
---
|
|
@@ -182,7 +435,8 @@ await chat.ask("Explain this code", {
|
|
|
182
435
|
|
|
183
436
|
| Provider | Status | Notes |
|
|
184
437
|
| :--- | :--- | :--- |
|
|
185
|
-
| **OpenAI** | ✅ Supported | Chat, Streaming, Tools, Vision, Audio, Images
|
|
438
|
+
| **OpenAI** | ✅ Supported | Chat, Streaming, Tools, Vision, Audio, Images, Transcription, Moderation |
|
|
439
|
+
| **Gemini** | ✅ Supported | Chat, Streaming, Tools, Vision, Audio, Video, Embeddings, Transcription |
|
|
186
440
|
| **Anthropic** | 🏗️ Roadmap | Coming soon |
|
|
187
441
|
| **Azure OpenAI** | 🏗️ Roadmap | Coming soon |
|
|
188
442
|
|
|
@@ -197,6 +451,24 @@ await chat.ask("Explain this code", {
|
|
|
197
451
|
|
|
198
452
|
---
|
|
199
453
|
|
|
454
|
+
## 🧪 Testing
|
|
455
|
+
|
|
456
|
+
`node-llm` uses VCR-style testing (via Polly.js) for robust, deterministic integration tests. This allows us to record real LLM provider interactions once and replay them during tests without making actual API calls.
|
|
457
|
+
|
|
458
|
+
- **Replay Mode (Default)**: Runs tests using recorded cassettes. Fast, deterministic, and requires no API keys.
|
|
459
|
+
```bash
|
|
460
|
+
npm test
|
|
461
|
+
```
|
|
462
|
+
|
|
463
|
+
- **Record Mode**: Hits real APIs and updates cassettes. Requires a valid API key.
|
|
464
|
+
```bash
|
|
465
|
+
VCR_MODE=record OPENAI_API_KEY=your_key npm test
|
|
466
|
+
```
|
|
467
|
+
|
|
468
|
+
*All recordings are automatically scrubbed of sensitive data (API keys, org IDs) before being saved to disk.*
|
|
469
|
+
|
|
470
|
+
---
|
|
471
|
+
|
|
200
472
|
## 📄 License
|
|
201
473
|
|
|
202
474
|
MIT © [node-llm contributors]
|
package/dist/chat/Chat.d.ts
CHANGED
|
@@ -1,31 +1,20 @@
|
|
|
1
1
|
import { Message } from "./Message.js";
|
|
2
2
|
import { ChatOptions } from "./ChatOptions.js";
|
|
3
3
|
import { Provider, Usage } from "../providers/Provider.js";
|
|
4
|
+
import { Tool } from "./Tool.js";
|
|
5
|
+
import { Schema } from "../schema/Schema.js";
|
|
6
|
+
import { z } from "zod";
|
|
4
7
|
export interface AskOptions {
|
|
5
8
|
images?: string[];
|
|
6
9
|
files?: string[];
|
|
7
10
|
temperature?: number;
|
|
8
11
|
maxTokens?: number;
|
|
12
|
+
headers?: Record<string, string>;
|
|
9
13
|
}
|
|
10
|
-
|
|
11
|
-
* Enhanced string that includes token usage metadata.
|
|
12
|
-
* Behaves like a regular string but has .usage and .input_tokens etc.
|
|
13
|
-
*/
|
|
14
|
-
export declare class ChatResponseString extends String {
|
|
15
|
-
readonly usage: Usage;
|
|
16
|
-
readonly model: string;
|
|
17
|
-
constructor(content: string, usage: Usage, model: string);
|
|
18
|
-
get input_tokens(): number;
|
|
19
|
-
get output_tokens(): number;
|
|
20
|
-
get total_tokens(): number;
|
|
21
|
-
get cached_tokens(): number | undefined;
|
|
22
|
-
get content(): string;
|
|
23
|
-
get model_id(): string;
|
|
24
|
-
toString(): string;
|
|
25
|
-
}
|
|
14
|
+
import { ChatResponseString } from "./ChatResponse.js";
|
|
26
15
|
export declare class Chat {
|
|
27
16
|
private readonly provider;
|
|
28
|
-
private
|
|
17
|
+
private model;
|
|
29
18
|
private readonly options;
|
|
30
19
|
private messages;
|
|
31
20
|
private executor;
|
|
@@ -40,8 +29,60 @@ export declare class Chat {
|
|
|
40
29
|
get totalUsage(): Usage;
|
|
41
30
|
/**
|
|
42
31
|
* Add a tool to the chat session (fluent API)
|
|
32
|
+
* Supports passing a tool instance or a tool class (which will be instantiated).
|
|
43
33
|
*/
|
|
44
34
|
withTool(tool: any): this;
|
|
35
|
+
/**
|
|
36
|
+
* Add multiple tools to the chat session.
|
|
37
|
+
* Supports passing tool instances or classes (which will be instantiated).
|
|
38
|
+
* Can replace existing tools if options.replace is true.
|
|
39
|
+
*
|
|
40
|
+
* @example
|
|
41
|
+
* chat.withTools([WeatherTool, new CalculatorTool()], { replace: true });
|
|
42
|
+
*/
|
|
43
|
+
withTools(tools: (Tool | any)[], options?: {
|
|
44
|
+
replace?: boolean;
|
|
45
|
+
}): this;
|
|
46
|
+
/**
|
|
47
|
+
* Add instructions (system prompt) to the chat.
|
|
48
|
+
* By default, it appends a new system message.
|
|
49
|
+
* If { replace: true } is passed, it removes all previous system messages first.
|
|
50
|
+
*/
|
|
51
|
+
withInstructions(instruction: string, options?: {
|
|
52
|
+
replace?: boolean;
|
|
53
|
+
}): this;
|
|
54
|
+
/**
|
|
55
|
+
* Alias for withInstructions
|
|
56
|
+
*/
|
|
57
|
+
withSystemPrompt(instruction: string, options?: {
|
|
58
|
+
replace?: boolean;
|
|
59
|
+
}): this;
|
|
60
|
+
/**
|
|
61
|
+
* Set the temperature for the chat session.
|
|
62
|
+
* Controls randomness: 0.0 (deterministic) to 1.0 (creative).
|
|
63
|
+
*/
|
|
64
|
+
withTemperature(temp: number): this;
|
|
65
|
+
/**
|
|
66
|
+
* Switch the model used for this chat session.
|
|
67
|
+
*/
|
|
68
|
+
withModel(model: string): this;
|
|
69
|
+
/**
|
|
70
|
+
* Set custom headers for the chat session.
|
|
71
|
+
* Merges with existing headers.
|
|
72
|
+
*/
|
|
73
|
+
withRequestOptions(options: {
|
|
74
|
+
headers?: Record<string, string>;
|
|
75
|
+
responseFormat?: any;
|
|
76
|
+
}): this;
|
|
77
|
+
/**
|
|
78
|
+
* Enforce a specific schema for the output.
|
|
79
|
+
* Can accept a Schema object or a Zod schema/JSON Schema directly.
|
|
80
|
+
*/
|
|
81
|
+
withSchema(schema: Schema | z.ZodType<any> | Record<string, any> | null): this;
|
|
82
|
+
onNewMessage(handler: () => void): this;
|
|
83
|
+
onEndMessage(handler: (message: ChatResponseString) => void): this;
|
|
84
|
+
onToolCall(handler: (toolCall: any) => void): this;
|
|
85
|
+
onToolResult(handler: (result: any) => void): this;
|
|
45
86
|
/**
|
|
46
87
|
* Ask the model a question
|
|
47
88
|
*/
|
package/dist/chat/Chat.d.ts.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"Chat.d.ts","sourceRoot":"","sources":["../../src/chat/Chat.ts"],"names":[],"mappings":"AACA,OAAO,EAAE,OAAO,EAAE,MAAM,cAAc,CAAC;AACvC,OAAO,EAAE,WAAW,EAAE,MAAM,kBAAkB,CAAC;AAC/C,OAAO,EAAE,QAAQ,EAAE,KAAK,EAAE,MAAM,0BAA0B,CAAC;
|
|
1
|
+
{"version":3,"file":"Chat.d.ts","sourceRoot":"","sources":["../../src/chat/Chat.ts"],"names":[],"mappings":"AACA,OAAO,EAAE,OAAO,EAAE,MAAM,cAAc,CAAC;AACvC,OAAO,EAAE,WAAW,EAAE,MAAM,kBAAkB,CAAC;AAC/C,OAAO,EAAE,QAAQ,EAAE,KAAK,EAAE,MAAM,0BAA0B,CAAC;AAI3D,OAAO,EAAE,IAAI,EAAE,MAAM,WAAW,CAAC;AACjC,OAAO,EAAE,MAAM,EAAE,MAAM,qBAAqB,CAAC;AAE7C,OAAO,EAAE,CAAC,EAAE,MAAM,KAAK,CAAC;AAExB,MAAM,WAAW,UAAU;IACzB,MAAM,CAAC,EAAE,MAAM,EAAE,CAAC;IAClB,KAAK,CAAC,EAAE,MAAM,EAAE,CAAC;IACjB,WAAW,CAAC,EAAE,MAAM,CAAC;IACrB,SAAS,CAAC,EAAE,MAAM,CAAC;IACnB,OAAO,CAAC,EAAE,MAAM,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;CAClC;AAED,OAAO,EAAE,kBAAkB,EAAE,MAAM,mBAAmB,CAAC;AAEvD,qBAAa,IAAI;IAKb,OAAO,CAAC,QAAQ,CAAC,QAAQ;IACzB,OAAO,CAAC,KAAK;IACb,OAAO,CAAC,QAAQ,CAAC,OAAO;IAN1B,OAAO,CAAC,QAAQ,CAAiB;IACjC,OAAO,CAAC,QAAQ,CAAW;gBAGR,QAAQ,EAAE,QAAQ,EAC3B,KAAK,EAAE,MAAM,EACJ,OAAO,GAAE,WAAgB;IAmB5C;;OAEG;IACH,IAAI,OAAO,IAAI,SAAS,OAAO,EAAE,CAEhC;IAED;;OAEG;IACH,IAAI,UAAU,IAAI,KAAK,CAatB;IAED;;;OAGG;IACH,QAAQ,CAAC,IAAI,EAAE,GAAG,GAAG,IAAI;IAIzB;;;;;;;OAOG;IACH,SAAS,CAAC,KAAK,EAAE,CAAC,IAAI,GAAG,GAAG,CAAC,EAAE,EAAE,OAAO,CAAC,EAAE;QAAE,OAAO,CAAC,EAAE,OAAO,CAAA;KAAE,GAAG,IAAI;IA2BvE;;;;OAIG;IACH,gBAAgB,CAAC,WAAW,EAAE,MAAM,EAAE,OAAO,CAAC,EAAE;QAAE,OAAO,CAAC,EAAE,OAAO,CAAA;KAAE,GAAG,IAAI;IAmB5E;;OAEG;IACH,gBAAgB,CAAC,WAAW,EAAE,MAAM,EAAE,OAAO,CAAC,EAAE;QAAE,OAAO,CAAC,EAAE,OAAO,CAAA;KAAE,GAAG,IAAI;IAI5E;;;OAGG;IACH,eAAe,CAAC,IAAI,EAAE,MAAM,GAAG,IAAI;IAKnC;;OAEG;IACH,SAAS,CAAC,KAAK,EAAE,MAAM,GAAG,IAAI;IAK9B;;;OAGG;IACH,kBAAkB,CAAC,OAAO,EAAE;QAAE,OAAO,CAAC,EAAE,MAAM,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;QAAC,cAAc,CAAC,EAAE,GAAG,CAAA;KAAE,GAAG,IAAI;IAU7F;;;OAGG;IACH,UAAU,CAAC,MAAM,EAAE,MAAM,GAAG,CAAC,CAAC,OAAO,CAAC,GAAG,CAAC,GAAG,MAAM,CAAC,MAAM,EAAE,GAAG,CAAC,GAAG,IAAI,GAAG,IAAI;IAkB9E,YAAY,CAAC,OAAO,EAAE,MAAM,IAAI,GAAG,IAAI;IAKvC,YAAY,CAAC,OAAO,EAAE,CAAC,OAAO,EAAE,kBAAkB,KAAK,IAAI,GAAG,IAAI;IAKlE,UAAU,CAAC,OAAO,EAAE,CAAC,QAAQ,EAAE,GAAG,KAAK,IAAI,GAAG,IAAI;IAKlD,YAAY,CAAC,OAAO,EAAE,CAAC,MAAM,EAAE,GAAG,KAAK,IAAI,GAAG,IAAI;IAKlD;;OAEG;IACG,GAAG,CAAC,OAAO,EAAE,MAAM,EAAE,OAAO,CAAC,EAAE,UAAU,GAAG,OAAO,CAAC,kBAAkB,CAAC;IA8K7E;;OAEG;IACI,MAAM,CAAC,OAAO,EAAE,MAAM;CAI9B"}
|