@node-llm/core 0.4.1 → 0.6.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +89 -27
- package/dist/chat/Chat.d.ts +6 -1
- package/dist/chat/Chat.d.ts.map +1 -1
- package/dist/chat/Chat.js +27 -4
- package/dist/chat/ChatOptions.d.ts +3 -0
- package/dist/chat/ChatOptions.d.ts.map +1 -1
- package/dist/chat/ChatResponse.d.ts +3 -0
- package/dist/chat/ChatResponse.d.ts.map +1 -1
- package/dist/chat/ChatResponse.js +3 -0
- package/dist/index.d.ts +2 -0
- package/dist/index.d.ts.map +1 -1
- package/dist/index.js +2 -0
- package/dist/llm.d.ts +5 -1
- package/dist/llm.d.ts.map +1 -1
- package/dist/llm.js +22 -6
- package/dist/models/ModelRegistry.d.ts +39 -12
- package/dist/models/ModelRegistry.d.ts.map +1 -1
- package/dist/models/ModelRegistry.js +50 -40
- package/dist/models/models.d.ts +972 -0
- package/dist/models/models.d.ts.map +1 -0
- package/dist/models/models.js +7026 -0
- package/dist/models/types.d.ts +50 -0
- package/dist/models/types.d.ts.map +1 -0
- package/dist/models/types.js +1 -0
- package/dist/providers/Provider.d.ts +5 -0
- package/dist/providers/Provider.d.ts.map +1 -1
- package/dist/providers/anthropic/AnthropicProvider.d.ts +32 -0
- package/dist/providers/anthropic/AnthropicProvider.d.ts.map +1 -0
- package/dist/providers/anthropic/AnthropicProvider.js +49 -0
- package/dist/providers/anthropic/Capabilities.d.ts +11 -0
- package/dist/providers/anthropic/Capabilities.d.ts.map +1 -0
- package/dist/providers/anthropic/Capabilities.js +82 -0
- package/dist/providers/anthropic/Chat.d.ts +8 -0
- package/dist/providers/anthropic/Chat.d.ts.map +1 -0
- package/dist/providers/anthropic/Chat.js +97 -0
- package/dist/providers/anthropic/Errors.d.ts +2 -0
- package/dist/providers/anthropic/Errors.d.ts.map +1 -0
- package/dist/providers/anthropic/Errors.js +33 -0
- package/dist/providers/anthropic/Models.d.ts +9 -0
- package/dist/providers/anthropic/Models.d.ts.map +1 -0
- package/dist/providers/anthropic/Models.js +58 -0
- package/dist/providers/anthropic/Streaming.d.ts +8 -0
- package/dist/providers/anthropic/Streaming.d.ts.map +1 -0
- package/dist/providers/anthropic/Streaming.js +113 -0
- package/dist/providers/anthropic/Utils.d.ts +5 -0
- package/dist/providers/anthropic/Utils.d.ts.map +1 -0
- package/dist/providers/anthropic/Utils.js +125 -0
- package/dist/providers/anthropic/index.d.ts +2 -0
- package/dist/providers/anthropic/index.d.ts.map +1 -0
- package/dist/providers/anthropic/index.js +11 -0
- package/dist/providers/anthropic/types.d.ts +57 -0
- package/dist/providers/anthropic/types.d.ts.map +1 -0
- package/dist/providers/anthropic/types.js +1 -0
- package/dist/providers/gemini/Capabilities.d.ts +28 -7
- package/dist/providers/gemini/Capabilities.d.ts.map +1 -1
- package/dist/providers/gemini/Capabilities.js +37 -22
- package/dist/providers/gemini/Chat.d.ts +1 -0
- package/dist/providers/gemini/Chat.d.ts.map +1 -1
- package/dist/providers/gemini/Chat.js +40 -3
- package/dist/providers/gemini/GeminiProvider.d.ts +2 -1
- package/dist/providers/gemini/GeminiProvider.d.ts.map +1 -1
- package/dist/providers/gemini/GeminiProvider.js +3 -0
- package/dist/providers/gemini/Models.d.ts +1 -0
- package/dist/providers/gemini/Models.d.ts.map +1 -1
- package/dist/providers/gemini/Models.js +46 -26
- package/dist/providers/gemini/Streaming.d.ts +1 -0
- package/dist/providers/gemini/Streaming.d.ts.map +1 -1
- package/dist/providers/gemini/Streaming.js +34 -4
- package/dist/providers/openai/Capabilities.d.ts +3 -11
- package/dist/providers/openai/Capabilities.d.ts.map +1 -1
- package/dist/providers/openai/Capabilities.js +119 -122
- package/dist/providers/openai/Chat.d.ts.map +1 -1
- package/dist/providers/openai/Chat.js +19 -17
- package/dist/providers/openai/Embedding.d.ts.map +1 -1
- package/dist/providers/openai/Embedding.js +2 -1
- package/dist/providers/openai/Image.d.ts.map +1 -1
- package/dist/providers/openai/Image.js +2 -1
- package/dist/providers/openai/ModelDefinitions.d.ts +1 -24
- package/dist/providers/openai/ModelDefinitions.d.ts.map +1 -1
- package/dist/providers/openai/ModelDefinitions.js +1 -211
- package/dist/providers/openai/Models.d.ts +1 -0
- package/dist/providers/openai/Models.d.ts.map +1 -1
- package/dist/providers/openai/Models.js +46 -22
- package/dist/providers/openai/Moderation.d.ts.map +1 -1
- package/dist/providers/openai/Moderation.js +2 -1
- package/dist/providers/openai/Streaming.d.ts.map +1 -1
- package/dist/providers/openai/Streaming.js +5 -1
- package/dist/providers/openai/Transcription.d.ts.map +1 -1
- package/dist/providers/openai/Transcription.js +3 -2
- package/dist/providers/openai/index.d.ts.map +1 -1
- package/dist/providers/openai/index.js +2 -1
- package/dist/providers/openai/utils.d.ts +20 -0
- package/dist/providers/openai/utils.d.ts.map +1 -0
- package/dist/providers/openai/utils.js +25 -0
- package/dist/providers/registry.js +1 -1
- package/dist/utils/FileLoader.d.ts.map +1 -1
- package/dist/utils/FileLoader.js +1 -0
- package/package.json +1 -1
package/README.md
CHANGED
|
@@ -12,13 +12,13 @@ A provider-agnostic LLM core for Node.js, heavily inspired by the elegant design
|
|
|
12
12
|
|
|
13
13
|
## 🚀 Features
|
|
14
14
|
|
|
15
|
-
- **Provider-Agnostic**: Switch between OpenAI, Anthropic, and
|
|
15
|
+
- **Provider-Agnostic**: Switch between OpenAI (GPT-4o), Anthropic (Claude 3.5), and Gemini (2.0) with a single line of config.
|
|
16
16
|
- **Streaming-First**: Native `AsyncIterator` support for real-time token delivery.
|
|
17
|
-
- **Tool Calling**: Automatic execution loop for model-requested functions.
|
|
18
|
-
- **
|
|
19
|
-
- **Multi-modal & Smart Files**: Built-in support for Vision (images), Audio, and
|
|
20
|
-
- **Fluent API**: Chainable methods like `.withTool()` for dynamic registration.
|
|
21
|
-
- **Resilient**: Configurable retry logic
|
|
17
|
+
- **Tool Calling**: Automatic execution loop for model-requested functions (OpenAI, Anthropic, Gemini).
|
|
18
|
+
- **Structured Output**: Strict Zod-based JSON schema enforcement across all major providers.
|
|
19
|
+
- **Multi-modal & Smart Files**: Built-in support for Vision (images), Audio, and Documents (PDFs for Claude).
|
|
20
|
+
- **Fluent API**: Chainable methods like `.withTool()` and `.withSchema()` for dynamic registration.
|
|
21
|
+
- **Resilient**: Configurable retry logic and detailed error handling for API outages.
|
|
22
22
|
- **Type-Safe**: Written in TypeScript with full ESM support.
|
|
23
23
|
|
|
24
24
|
---
|
|
@@ -42,7 +42,7 @@ import { LLM } from "@node-llm/core";
|
|
|
42
42
|
import "dotenv/config";
|
|
43
43
|
|
|
44
44
|
LLM.configure({
|
|
45
|
-
provider: "openai", // or "gemini"
|
|
45
|
+
provider: "openai", // or "anthropic", "gemini"
|
|
46
46
|
retry: { attempts: 3, delayMs: 500 },
|
|
47
47
|
defaultModerationModel: "text-moderation-latest",
|
|
48
48
|
defaultTranscriptionModel: "whisper-1",
|
|
@@ -66,6 +66,7 @@ console.log(response);
|
|
|
66
66
|
console.log(response.content);
|
|
67
67
|
console.log(`Model: ${response.model_id}`);
|
|
68
68
|
console.log(`Tokens: ${response.input_tokens} in, ${response.output_tokens} out`);
|
|
69
|
+
console.log(`Cost: $${response.cost}`);
|
|
69
70
|
```
|
|
70
71
|
|
|
71
72
|
### 3. Streaming Responses
|
|
@@ -102,9 +103,11 @@ const response = await chat.ask("Hello!");
|
|
|
102
103
|
|
|
103
104
|
console.log(response.input_tokens); // 10
|
|
104
105
|
console.log(response.output_tokens); // 5
|
|
106
|
+
console.log(response.cost); // 0.000185
|
|
105
107
|
|
|
106
108
|
// Access aggregated usage for the whole session
|
|
107
109
|
console.log(chat.totalUsage.total_tokens);
|
|
110
|
+
console.log(chat.totalUsage.cost);
|
|
108
111
|
```
|
|
109
112
|
|
|
110
113
|
### 6. Embeddings
|
|
@@ -202,6 +205,44 @@ const factual = LLM.chat("gpt-4o").withTemperature(0.2);
|
|
|
202
205
|
const creative = LLM.chat("gpt-4o").withTemperature(0.9);
|
|
203
206
|
```
|
|
204
207
|
|
|
208
|
+
### 11. Provider-Specific Parameters
|
|
209
|
+
|
|
210
|
+
Access unique provider features while maintaining the unified interface. Parameters passed via `withParams()` will override any defaults set by the library.
|
|
211
|
+
|
|
212
|
+
```ts
|
|
213
|
+
// OpenAI: Set seed for deterministic output
|
|
214
|
+
const chat = LLM.chat("gpt-4o-mini")
|
|
215
|
+
.withParams({
|
|
216
|
+
seed: 42,
|
|
217
|
+
user: "user-123",
|
|
218
|
+
presence_penalty: 0.5
|
|
219
|
+
});
|
|
220
|
+
|
|
221
|
+
// Gemini: Configure safety settings and generation params
|
|
222
|
+
const geminiChat = LLM.chat("gemini-2.0-flash")
|
|
223
|
+
.withParams({
|
|
224
|
+
generationConfig: { topP: 0.8, topK: 40 },
|
|
225
|
+
safetySettings: [
|
|
226
|
+
{ category: "HARM_CATEGORY_HARASSMENT", threshold: "BLOCK_LOW_AND_ABOVE" }
|
|
227
|
+
]
|
|
228
|
+
});
|
|
229
|
+
|
|
230
|
+
// Anthropic: Custom headers or beta features
|
|
231
|
+
const claudeChat = LLM.chat("claude-3-5-sonnet-20241022")
|
|
232
|
+
.withParams({
|
|
233
|
+
top_k: 50,
|
|
234
|
+
top_p: 0.9
|
|
235
|
+
});
|
|
236
|
+
```
|
|
237
|
+
|
|
238
|
+
**⚠️ Important Notes:**
|
|
239
|
+
- Parameters from `withParams()` take precedence over library defaults
|
|
240
|
+
- Always consult the provider's API documentation for supported parameters
|
|
241
|
+
- The library passes these parameters through without validation
|
|
242
|
+
- Enable debug mode to see the exact request: `process.env.NODELLM_DEBUG = "true"`
|
|
243
|
+
|
|
244
|
+
See examples: [OpenAI](../../examples/openai/chat/params.mjs) | [Gemini](../../examples/gemini/chat/params.mjs)
|
|
245
|
+
|
|
205
246
|
---
|
|
206
247
|
|
|
207
248
|
## 📚 Examples
|
|
@@ -220,6 +261,7 @@ Check the [examples](../../examples) directory for focused scripts organized by
|
|
|
220
261
|
| [Lifecycle Events](../../examples/openai/chat/events.mjs) | Hooks for specific chat events (onNewMessage, onToolCall) |
|
|
221
262
|
| [Token Usage](../../examples/openai/chat/usage.mjs) | Tracking costs and token counts |
|
|
222
263
|
| [Max Tokens](../../examples/openai/chat/max-tokens.mjs) | Limiting response length with `maxTokens` |
|
|
264
|
+
| [Structured Output](../../examples/openai/chat/structured.mjs) | Zod-based JSON schema enforcement |
|
|
223
265
|
|
|
224
266
|
#### 🖼️ Multimodal
|
|
225
267
|
| Example | Description |
|
|
@@ -255,6 +297,7 @@ Check the [examples](../../examples) directory for focused scripts organized by
|
|
|
255
297
|
| [Tool Calling](../../examples/gemini/chat/tools.mjs) | Function calling with automatic execution |
|
|
256
298
|
| [Lifecycle Events](../../examples/gemini/chat/events.mjs) | Event hooks for chat interactions |
|
|
257
299
|
| [Token Usage](../../examples/gemini/chat/usage.mjs) | Tracking conversation costs |
|
|
300
|
+
| [Structured Output](../../examples/gemini/chat/structured.mjs) | Native JSON schema support |
|
|
258
301
|
|
|
259
302
|
#### 🖼️ Multimodal
|
|
260
303
|
| Example | Description |
|
|
@@ -271,8 +314,26 @@ Check the [examples](../../examples) directory for focused scripts organized by
|
|
|
271
314
|
#### 🧠 Discovery
|
|
272
315
|
| Example | Description |
|
|
273
316
|
| :--- | :--- |
|
|
274
|
-
| [Models & Capabilities](../../examples/gemini/discovery/models.mjs) | Listing models and
|
|
275
|
-
| [Embeddings](../../examples/gemini/embeddings/create.mjs) |
|
|
317
|
+
| [Models & Capabilities](../../examples/gemini/discovery/models.mjs) | Listing models and inspecting their specs |
|
|
318
|
+
| [Embeddings](../../examples/gemini/embeddings/create.mjs) | Generating semantic vector embeddings |
|
|
319
|
+
|
|
320
|
+
### Anthropic Examples
|
|
321
|
+
|
|
322
|
+
#### 💬 Chat
|
|
323
|
+
| Example | Description |
|
|
324
|
+
| :--- | :--- |
|
|
325
|
+
| [Basic & Streaming](../../examples/anthropic/chat/basic.mjs) | Chatting with Claude 3.5 Models |
|
|
326
|
+
| [Tool Calling](../../examples/anthropic/chat/tools.mjs) | Native tool use with automatic execution |
|
|
327
|
+
| [Parallel Tools](../../examples/anthropic/chat/parallel-tools.mjs) | Handling multiple tool requests in one turn |
|
|
328
|
+
| [Token Usage](../../examples/anthropic/chat/usage.mjs) | Tracking Claude-specific token metrics |
|
|
329
|
+
| [Structured Output](../../examples/anthropic/chat/structured.mjs) | Prompt-based JSON schema enforcement |
|
|
330
|
+
|
|
331
|
+
#### 🖼️ Multimodal
|
|
332
|
+
| Example | Description |
|
|
333
|
+
| :--- | :--- |
|
|
334
|
+
| [Vision Analysis](../../examples/anthropic/multimodal/vision.mjs) | Analyzing images with Claude Vision |
|
|
335
|
+
| [PDF Analysis](../../examples/anthropic/multimodal/pdf.mjs) | Native PDF document processing |
|
|
336
|
+
| [File Context](../../examples/anthropic/multimodal/files.mjs) | Passing local file contents to Claude |
|
|
276
337
|
|
|
277
338
|
|
|
278
339
|
To run an example:
|
|
@@ -316,7 +377,7 @@ Ensure the AI returns data exactly matching a specific structure. Supports stric
|
|
|
316
377
|
**Using Zod (Recommended):**
|
|
317
378
|
|
|
318
379
|
```ts
|
|
319
|
-
import { z } from "
|
|
380
|
+
import { LLM, z } from "@node-llm/core";
|
|
320
381
|
|
|
321
382
|
const personSchema = z.object({
|
|
322
383
|
name: z.string(),
|
|
@@ -418,15 +479,13 @@ chat.withRequestOptions({
|
|
|
418
479
|
|
|
419
480
|
Get up-to-date information about context windows, pricing, and capabilities directly from the Parsera API.
|
|
420
481
|
|
|
421
|
-
```javascript
|
|
422
|
-
// Refresh model information from the API
|
|
423
|
-
await LLM.models.refresh();
|
|
424
|
-
|
|
425
482
|
// Use the data programmatically
|
|
426
483
|
const model = LLM.models.find("gpt-4o-mini");
|
|
427
|
-
|
|
428
|
-
console.log(model.
|
|
429
|
-
console.log(model.
|
|
484
|
+
if (model) {
|
|
485
|
+
console.log(model.context_window); // => 128000
|
|
486
|
+
console.log(model.capabilities); // => ["function_calling", "structured_output", "streaming", "batch", "json_mode"]
|
|
487
|
+
console.log(model.pricing.text_tokens.standard.input_per_million); // => 0.15
|
|
488
|
+
}
|
|
430
489
|
```
|
|
431
490
|
|
|
432
491
|
---
|
|
@@ -437,7 +496,7 @@ console.log(model.pricing.text_tokens.standard.input_per_million); // => 0.15
|
|
|
437
496
|
| :--- | :--- | :--- |
|
|
438
497
|
| **OpenAI** | ✅ Supported | Chat, Streaming, Tools, Vision, Audio, Images, Transcription, Moderation |
|
|
439
498
|
| **Gemini** | ✅ Supported | Chat, Streaming, Tools, Vision, Audio, Video, Embeddings, Transcription |
|
|
440
|
-
| **Anthropic** |
|
|
499
|
+
| **Anthropic** | ✅ Supported | Chat, Streaming, Tools, Vision, PDF Support, Structured Output |
|
|
441
500
|
| **Azure OpenAI** | 🏗️ Roadmap | Coming soon |
|
|
442
501
|
|
|
443
502
|
---
|
|
@@ -451,19 +510,22 @@ console.log(model.pricing.text_tokens.standard.input_per_million); // => 0.15
|
|
|
451
510
|
|
|
452
511
|
---
|
|
453
512
|
|
|
454
|
-
|
|
455
|
-
|
|
456
|
-
`node-llm` uses VCR-style testing (via Polly.js) for robust, deterministic integration tests. This allows us to record real LLM provider interactions once and replay them during tests without making actual API calls.
|
|
513
|
+
`node-llm` features a comprehensive test suite including high-level integration tests and granular unit tests.
|
|
457
514
|
|
|
458
|
-
- **
|
|
515
|
+
- **Unit Tests**: Test core logic and provider handlers in isolation without hitting any APIs.
|
|
459
516
|
```bash
|
|
460
|
-
npm test
|
|
517
|
+
npm run test:unit
|
|
461
518
|
```
|
|
462
519
|
|
|
463
|
-
- **
|
|
464
|
-
|
|
465
|
-
|
|
466
|
-
|
|
520
|
+
- **Integration Tests (VCR)**: Uses Polly.js to record and replay real LLM interactions.
|
|
521
|
+
- **Replay Mode (Default)**: Runs against recorded cassettes. Fast and requires no API keys.
|
|
522
|
+
```bash
|
|
523
|
+
npm run test:integration
|
|
524
|
+
```
|
|
525
|
+
- **Record Mode**: Update cassettes by hitting real APIs (requires API keys).
|
|
526
|
+
```bash
|
|
527
|
+
VCR_MODE=record npm run test:integration
|
|
528
|
+
```
|
|
467
529
|
|
|
468
530
|
*All recordings are automatically scrubbed of sensitive data (API keys, org IDs) before being saved to disk.*
|
|
469
531
|
|
package/dist/chat/Chat.d.ts
CHANGED
|
@@ -74,6 +74,11 @@ export declare class Chat {
|
|
|
74
74
|
headers?: Record<string, string>;
|
|
75
75
|
responseFormat?: any;
|
|
76
76
|
}): this;
|
|
77
|
+
/**
|
|
78
|
+
* Set provider-specific parameters.
|
|
79
|
+
* These will be merged into the final request payload.
|
|
80
|
+
*/
|
|
81
|
+
withParams(params: Record<string, any>): this;
|
|
77
82
|
/**
|
|
78
83
|
* Enforce a specific schema for the output.
|
|
79
84
|
* Can accept a Schema object or a Zod schema/JSON Schema directly.
|
|
@@ -86,7 +91,7 @@ export declare class Chat {
|
|
|
86
91
|
/**
|
|
87
92
|
* Ask the model a question
|
|
88
93
|
*/
|
|
89
|
-
ask(content: string, options?: AskOptions): Promise<ChatResponseString>;
|
|
94
|
+
ask(content: string | any[], options?: AskOptions): Promise<ChatResponseString>;
|
|
90
95
|
/**
|
|
91
96
|
* Streams the model's response to a user question.
|
|
92
97
|
*/
|
package/dist/chat/Chat.d.ts.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"Chat.d.ts","sourceRoot":"","sources":["../../src/chat/Chat.ts"],"names":[],"mappings":"AACA,OAAO,EAAE,OAAO,EAAE,MAAM,cAAc,CAAC;AACvC,OAAO,EAAE,WAAW,EAAE,MAAM,kBAAkB,CAAC;AAC/C,OAAO,EAAE,QAAQ,EAAE,KAAK,EAAE,MAAM,0BAA0B,CAAC;AAI3D,OAAO,EAAE,IAAI,EAAE,MAAM,WAAW,CAAC;AACjC,OAAO,EAAE,MAAM,EAAE,MAAM,qBAAqB,CAAC;AAE7C,OAAO,EAAE,CAAC,EAAE,MAAM,KAAK,CAAC;AAExB,MAAM,WAAW,UAAU;IACzB,MAAM,CAAC,EAAE,MAAM,EAAE,CAAC;IAClB,KAAK,CAAC,EAAE,MAAM,EAAE,CAAC;IACjB,WAAW,CAAC,EAAE,MAAM,CAAC;IACrB,SAAS,CAAC,EAAE,MAAM,CAAC;IACnB,OAAO,CAAC,EAAE,MAAM,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;CAClC;AAED,OAAO,EAAE,kBAAkB,EAAE,MAAM,mBAAmB,CAAC;AAEvD,qBAAa,IAAI;IAKb,OAAO,CAAC,QAAQ,CAAC,QAAQ;IACzB,OAAO,CAAC,KAAK;IACb,OAAO,CAAC,QAAQ,CAAC,OAAO;IAN1B,OAAO,CAAC,QAAQ,CAAiB;IACjC,OAAO,CAAC,QAAQ,CAAW;gBAGR,QAAQ,EAAE,QAAQ,EAC3B,KAAK,EAAE,MAAM,EACJ,OAAO,GAAE,WAAgB;IAmB5C;;OAEG;IACH,IAAI,OAAO,IAAI,SAAS,OAAO,EAAE,CAEhC;IAED;;OAEG;IACH,IAAI,UAAU,IAAI,KAAK,
|
|
1
|
+
{"version":3,"file":"Chat.d.ts","sourceRoot":"","sources":["../../src/chat/Chat.ts"],"names":[],"mappings":"AACA,OAAO,EAAE,OAAO,EAAE,MAAM,cAAc,CAAC;AACvC,OAAO,EAAE,WAAW,EAAE,MAAM,kBAAkB,CAAC;AAC/C,OAAO,EAAE,QAAQ,EAAE,KAAK,EAAE,MAAM,0BAA0B,CAAC;AAI3D,OAAO,EAAE,IAAI,EAAE,MAAM,WAAW,CAAC;AACjC,OAAO,EAAE,MAAM,EAAE,MAAM,qBAAqB,CAAC;AAE7C,OAAO,EAAE,CAAC,EAAE,MAAM,KAAK,CAAC;AAExB,MAAM,WAAW,UAAU;IACzB,MAAM,CAAC,EAAE,MAAM,EAAE,CAAC;IAClB,KAAK,CAAC,EAAE,MAAM,EAAE,CAAC;IACjB,WAAW,CAAC,EAAE,MAAM,CAAC;IACrB,SAAS,CAAC,EAAE,MAAM,CAAC;IACnB,OAAO,CAAC,EAAE,MAAM,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;CAClC;AAED,OAAO,EAAE,kBAAkB,EAAE,MAAM,mBAAmB,CAAC;AAEvD,qBAAa,IAAI;IAKb,OAAO,CAAC,QAAQ,CAAC,QAAQ;IACzB,OAAO,CAAC,KAAK;IACb,OAAO,CAAC,QAAQ,CAAC,OAAO;IAN1B,OAAO,CAAC,QAAQ,CAAiB;IACjC,OAAO,CAAC,QAAQ,CAAW;gBAGR,QAAQ,EAAE,QAAQ,EAC3B,KAAK,EAAE,MAAM,EACJ,OAAO,GAAE,WAAgB;IAmB5C;;OAEG;IACH,IAAI,OAAO,IAAI,SAAS,OAAO,EAAE,CAEhC;IAED;;OAEG;IACH,IAAI,UAAU,IAAI,KAAK,CAetB;IAED;;;OAGG;IACH,QAAQ,CAAC,IAAI,EAAE,GAAG,GAAG,IAAI;IAIzB;;;;;;;OAOG;IACH,SAAS,CAAC,KAAK,EAAE,CAAC,IAAI,GAAG,GAAG,CAAC,EAAE,EAAE,OAAO,CAAC,EAAE;QAAE,OAAO,CAAC,EAAE,OAAO,CAAA;KAAE,GAAG,IAAI;IA2BvE;;;;OAIG;IACH,gBAAgB,CAAC,WAAW,EAAE,MAAM,EAAE,OAAO,CAAC,EAAE;QAAE,OAAO,CAAC,EAAE,OAAO,CAAA;KAAE,GAAG,IAAI;IAmB5E;;OAEG;IACH,gBAAgB,CAAC,WAAW,EAAE,MAAM,EAAE,OAAO,CAAC,EAAE;QAAE,OAAO,CAAC,EAAE,OAAO,CAAA;KAAE,GAAG,IAAI;IAI5E;;;OAGG;IACH,eAAe,CAAC,IAAI,EAAE,MAAM,GAAG,IAAI;IAKnC;;OAEG;IACH,SAAS,CAAC,KAAK,EAAE,MAAM,GAAG,IAAI;IAK9B;;;OAGG;IACH,kBAAkB,CAAC,OAAO,EAAE;QAAE,OAAO,CAAC,EAAE,MAAM,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;QAAC,cAAc,CAAC,EAAE,GAAG,CAAA;KAAE,GAAG,IAAI;IAU7F;;;OAGG;IACH,UAAU,CAAC,MAAM,EAAE,MAAM,CAAC,MAAM,EAAE,GAAG,CAAC,GAAG,IAAI;IAK7C;;;OAGG;IACH,UAAU,CAAC,MAAM,EAAE,MAAM,GAAG,CAAC,CAAC,OAAO,CAAC,GAAG,CAAC,GAAG,MAAM,CAAC,MAAM,EAAE,GAAG,CAAC,GAAG,IAAI,GAAG,IAAI;IAkB9E,YAAY,CAAC,OAAO,EAAE,MAAM,IAAI,GAAG,IAAI;IAKvC,YAAY,CAAC,OAAO,EAAE,CAAC,OAAO,EAAE,kBAAkB,KAAK,IAAI,GAAG,IAAI;IAKlE,UAAU,CAAC,OAAO,EAAE,CAAC,QAAQ,EAAE,GAAG,KAAK,IAAI,GAAG,IAAI;IAKlD,YAAY,CAAC,OAAO,EAAE,CAAC,MAAM,EAAE,GAAG,KAAK,IAAI,GAAG,IAAI;IAKlD;;OAEG;IACG,GAAG,CAAC,OAAO,EAAE,MAAM,GAAG,GAAG,EAAE,EAAE,OAAO,CAAC,EAAE,UAAU,GAAG,OAAO,CAAC,kBAAkB,CAAC;IA4LrF;;OAEG;IACI,MAAM,CAAC,OAAO,EAAE,MAAM;CAI9B"}
|
package/dist/chat/Chat.js
CHANGED
|
@@ -43,9 +43,11 @@ export class Chat {
|
|
|
43
43
|
acc.output_tokens += msg.usage.output_tokens;
|
|
44
44
|
acc.total_tokens += msg.usage.total_tokens;
|
|
45
45
|
acc.cached_tokens = (acc.cached_tokens ?? 0) + (msg.usage.cached_tokens ?? 0);
|
|
46
|
+
acc.cache_creation_tokens = (acc.cache_creation_tokens ?? 0) + (msg.usage.cache_creation_tokens ?? 0);
|
|
47
|
+
acc.cost = (acc.cost ?? 0) + (msg.usage.cost ?? 0);
|
|
46
48
|
}
|
|
47
49
|
return acc;
|
|
48
|
-
}, { input_tokens: 0, output_tokens: 0, total_tokens: 0, cached_tokens: 0 });
|
|
50
|
+
}, { input_tokens: 0, output_tokens: 0, total_tokens: 0, cached_tokens: 0, cache_creation_tokens: 0, cost: 0 });
|
|
49
51
|
}
|
|
50
52
|
/**
|
|
51
53
|
* Add a tool to the chat session (fluent API)
|
|
@@ -144,6 +146,14 @@ export class Chat {
|
|
|
144
146
|
}
|
|
145
147
|
return this;
|
|
146
148
|
}
|
|
149
|
+
/**
|
|
150
|
+
* Set provider-specific parameters.
|
|
151
|
+
* These will be merged into the final request payload.
|
|
152
|
+
*/
|
|
153
|
+
withParams(params) {
|
|
154
|
+
this.options.params = { ...this.options.params, ...params };
|
|
155
|
+
return this;
|
|
156
|
+
}
|
|
147
157
|
/**
|
|
148
158
|
* Enforce a specific schema for the output.
|
|
149
159
|
* Can accept a Schema object or a Zod schema/JSON Schema directly.
|
|
@@ -190,9 +200,12 @@ export class Chat {
|
|
|
190
200
|
if (files.length > 0) {
|
|
191
201
|
const processedFiles = await Promise.all(files.map(f => FileLoader.load(f)));
|
|
192
202
|
const hasBinary = processedFiles.some(p => p.type === "image_url" || p.type === "input_audio" || p.type === "video_url");
|
|
193
|
-
if (hasBinary && this.provider.capabilities && !this.provider.capabilities.supportsVision(this.model)) {
|
|
203
|
+
if (hasBinary && !this.options.assumeModelExists && this.provider.capabilities && !this.provider.capabilities.supportsVision(this.model)) {
|
|
194
204
|
throw new Error(`Model ${this.model} does not support vision/binary files.`);
|
|
195
205
|
}
|
|
206
|
+
if (hasBinary && this.options.assumeModelExists) {
|
|
207
|
+
console.warn(`[NodeLLM] Skipping vision capability validation for model ${this.model}`);
|
|
208
|
+
}
|
|
196
209
|
// Separate text files from binary files
|
|
197
210
|
const textFiles = processedFiles.filter(p => p.type === "text");
|
|
198
211
|
const binaryFiles = processedFiles.filter(p => p.type !== "text");
|
|
@@ -214,9 +227,12 @@ export class Chat {
|
|
|
214
227
|
}
|
|
215
228
|
}
|
|
216
229
|
if (this.options.tools && this.options.tools.length > 0) {
|
|
217
|
-
if (this.provider.capabilities && !this.provider.capabilities.supportsTools(this.model)) {
|
|
230
|
+
if (!this.options.assumeModelExists && this.provider.capabilities && !this.provider.capabilities.supportsTools(this.model)) {
|
|
218
231
|
throw new Error(`Model ${this.model} does not support tool calling.`);
|
|
219
232
|
}
|
|
233
|
+
if (this.options.assumeModelExists) {
|
|
234
|
+
console.warn(`[NodeLLM] Skipping tool capability validation for model ${this.model}`);
|
|
235
|
+
}
|
|
220
236
|
}
|
|
221
237
|
this.messages.push({
|
|
222
238
|
role: "user",
|
|
@@ -225,9 +241,12 @@ export class Chat {
|
|
|
225
241
|
// Process Schema/Structured Output
|
|
226
242
|
let responseFormat = this.options.responseFormat;
|
|
227
243
|
if (this.options.schema) {
|
|
228
|
-
if (this.provider.capabilities && !this.provider.capabilities.supportsStructuredOutput(this.model)) {
|
|
244
|
+
if (!this.options.assumeModelExists && this.provider.capabilities && !this.provider.capabilities.supportsStructuredOutput(this.model)) {
|
|
229
245
|
throw new Error(`Model ${this.model} does not support structured output.`);
|
|
230
246
|
}
|
|
247
|
+
if (this.options.assumeModelExists) {
|
|
248
|
+
console.warn(`[NodeLLM] Skipping structured output capability validation for model ${this.model}`);
|
|
249
|
+
}
|
|
231
250
|
const jsonSchema = toJsonSchema(this.options.schema.definition.schema);
|
|
232
251
|
responseFormat = {
|
|
233
252
|
type: "json_schema",
|
|
@@ -247,6 +266,7 @@ export class Chat {
|
|
|
247
266
|
max_tokens: options?.maxTokens ?? this.options.maxTokens,
|
|
248
267
|
headers: { ...this.options.headers, ...options?.headers },
|
|
249
268
|
response_format: responseFormat, // Pass to provider
|
|
269
|
+
...this.options.params,
|
|
250
270
|
};
|
|
251
271
|
let totalUsage = { input_tokens: 0, output_tokens: 0, total_tokens: 0 };
|
|
252
272
|
const trackUsage = (u) => {
|
|
@@ -257,6 +277,9 @@ export class Chat {
|
|
|
257
277
|
if (u.cached_tokens) {
|
|
258
278
|
totalUsage.cached_tokens = (totalUsage.cached_tokens ?? 0) + u.cached_tokens;
|
|
259
279
|
}
|
|
280
|
+
if (u.cost !== undefined) {
|
|
281
|
+
totalUsage.cost = (totalUsage.cost ?? 0) + u.cost;
|
|
282
|
+
}
|
|
260
283
|
}
|
|
261
284
|
};
|
|
262
285
|
// First round
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"ChatOptions.d.ts","sourceRoot":"","sources":["../../src/chat/ChatOptions.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,OAAO,EAAE,MAAM,cAAc,CAAC;AACvC,OAAO,EAAE,IAAI,EAAE,MAAM,WAAW,CAAC;AACjC,OAAO,EAAE,MAAM,EAAE,MAAM,qBAAqB,CAAC;AAE7C,MAAM,WAAW,WAAW;IAC1B,YAAY,CAAC,EAAE,MAAM,CAAC;IACtB,QAAQ,CAAC,EAAE,OAAO,EAAE,CAAC;IACrB,KAAK,CAAC,EAAE,IAAI,EAAE,CAAC;IACf,WAAW,CAAC,EAAE,MAAM,CAAC;IACrB,SAAS,CAAC,EAAE,MAAM,CAAC;IACnB,YAAY,CAAC,EAAE,MAAM,IAAI,CAAC;IAC1B,YAAY,CAAC,EAAE,CAAC,OAAO,EAAE,GAAG,KAAK,IAAI,CAAC;IACtC,UAAU,CAAC,EAAE,CAAC,QAAQ,EAAE,GAAG,KAAK,IAAI,CAAC;IACrC,YAAY,CAAC,EAAE,CAAC,MAAM,EAAE,GAAG,KAAK,IAAI,CAAC;IACrC,OAAO,CAAC,EAAE,MAAM,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;IACjC,MAAM,CAAC,EAAE,MAAM,CAAC;IAChB,cAAc,CAAC,EAAE;QAAE,IAAI,EAAE,aAAa,GAAG,MAAM,CAAA;KAAE,CAAC;
|
|
1
|
+
{"version":3,"file":"ChatOptions.d.ts","sourceRoot":"","sources":["../../src/chat/ChatOptions.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,OAAO,EAAE,MAAM,cAAc,CAAC;AACvC,OAAO,EAAE,IAAI,EAAE,MAAM,WAAW,CAAC;AACjC,OAAO,EAAE,MAAM,EAAE,MAAM,qBAAqB,CAAC;AAE7C,MAAM,WAAW,WAAW;IAC1B,YAAY,CAAC,EAAE,MAAM,CAAC;IACtB,QAAQ,CAAC,EAAE,OAAO,EAAE,CAAC;IACrB,KAAK,CAAC,EAAE,IAAI,EAAE,CAAC;IACf,WAAW,CAAC,EAAE,MAAM,CAAC;IACrB,SAAS,CAAC,EAAE,MAAM,CAAC;IACnB,YAAY,CAAC,EAAE,MAAM,IAAI,CAAC;IAC1B,YAAY,CAAC,EAAE,CAAC,OAAO,EAAE,GAAG,KAAK,IAAI,CAAC;IACtC,UAAU,CAAC,EAAE,CAAC,QAAQ,EAAE,GAAG,KAAK,IAAI,CAAC;IACrC,YAAY,CAAC,EAAE,CAAC,MAAM,EAAE,GAAG,KAAK,IAAI,CAAC;IACrC,OAAO,CAAC,EAAE,MAAM,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;IACjC,MAAM,CAAC,EAAE,MAAM,CAAC;IAChB,cAAc,CAAC,EAAE;QAAE,IAAI,EAAE,aAAa,GAAG,MAAM,CAAA;KAAE,CAAC;IAClD,MAAM,CAAC,EAAE,MAAM,CAAC,MAAM,EAAE,GAAG,CAAC,CAAC;IAC7B,iBAAiB,CAAC,EAAE,OAAO,CAAC;IAC5B,QAAQ,CAAC,EAAE,MAAM,CAAC;CACnB"}
|
|
@@ -11,6 +11,9 @@ export declare class ChatResponseString extends String {
|
|
|
11
11
|
get output_tokens(): number;
|
|
12
12
|
get total_tokens(): number;
|
|
13
13
|
get cached_tokens(): number | undefined;
|
|
14
|
+
get cost(): number | undefined;
|
|
15
|
+
get input_cost(): number | undefined;
|
|
16
|
+
get output_cost(): number | undefined;
|
|
14
17
|
get content(): string;
|
|
15
18
|
get model_id(): string;
|
|
16
19
|
toString(): string;
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"ChatResponse.d.ts","sourceRoot":"","sources":["../../src/chat/ChatResponse.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,KAAK,EAAE,MAAM,0BAA0B,CAAC;AAEjD;;;GAGG;AACH,qBAAa,kBAAmB,SAAQ,MAAM;aAG1B,KAAK,EAAE,KAAK;aACZ,KAAK,EAAE,MAAM;gBAF7B,OAAO,EAAE,MAAM,EACC,KAAK,EAAE,KAAK,EACZ,KAAK,EAAE,MAAM;IAK/B,IAAI,YAAY,WAAsC;IACtD,IAAI,aAAa,WAAuC;IACxD,IAAI,YAAY,WAAsC;IACtD,IAAI,aAAa,uBAAuC;
|
|
1
|
+
{"version":3,"file":"ChatResponse.d.ts","sourceRoot":"","sources":["../../src/chat/ChatResponse.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,KAAK,EAAE,MAAM,0BAA0B,CAAC;AAEjD;;;GAGG;AACH,qBAAa,kBAAmB,SAAQ,MAAM;aAG1B,KAAK,EAAE,KAAK;aACZ,KAAK,EAAE,MAAM;gBAF7B,OAAO,EAAE,MAAM,EACC,KAAK,EAAE,KAAK,EACZ,KAAK,EAAE,MAAM;IAK/B,IAAI,YAAY,WAAsC;IACtD,IAAI,aAAa,WAAuC;IACxD,IAAI,YAAY,WAAsC;IACtD,IAAI,aAAa,uBAAuC;IACxD,IAAI,IAAI,uBAA8B;IACtC,IAAI,UAAU,uBAAoC;IAClD,IAAI,WAAW,uBAAqC;IAEpD,IAAI,OAAO,IAAI,MAAM,CAEpB;IAED,IAAI,QAAQ,IAAI,MAAM,CAErB;IAED,QAAQ;IAIR;;;OAGG;IACH,IAAI,MAAM,IAAI,GAAG,CAMhB;CACF"}
|
|
@@ -14,6 +14,9 @@ export class ChatResponseString extends String {
|
|
|
14
14
|
get output_tokens() { return this.usage.output_tokens; }
|
|
15
15
|
get total_tokens() { return this.usage.total_tokens; }
|
|
16
16
|
get cached_tokens() { return this.usage.cached_tokens; }
|
|
17
|
+
get cost() { return this.usage.cost; }
|
|
18
|
+
get input_cost() { return this.usage.input_cost; }
|
|
19
|
+
get output_cost() { return this.usage.output_cost; }
|
|
17
20
|
get content() {
|
|
18
21
|
return this.valueOf();
|
|
19
22
|
}
|
package/dist/index.d.ts
CHANGED
|
@@ -6,10 +6,12 @@ export type { Role } from "./chat/Role.js";
|
|
|
6
6
|
export type { ChatOptions } from "./chat/ChatOptions.js";
|
|
7
7
|
export type { Tool, ToolCall } from "./chat/Tool.js";
|
|
8
8
|
export type { MessageContent, ContentPart } from "./chat/Content.js";
|
|
9
|
+
export { z } from "zod";
|
|
9
10
|
export { LLM, Transcription, Moderation, Embedding } from "./llm.js";
|
|
10
11
|
export { providerRegistry } from "./providers/registry.js";
|
|
11
12
|
export { OpenAIProvider } from "./providers/openai/OpenAIProvider.js";
|
|
12
13
|
export { registerOpenAIProvider } from "./providers/openai/index.js";
|
|
14
|
+
export { registerAnthropicProvider } from "./providers/anthropic/index.js";
|
|
13
15
|
export type { ImageRequest, ImageResponse } from "./providers/Provider.js";
|
|
14
16
|
export * from "./errors/index.js";
|
|
15
17
|
//# sourceMappingURL=index.d.ts.map
|
package/dist/index.d.ts.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../src/index.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,IAAI,EAAE,MAAM,gBAAgB,CAAC;AACtC,OAAO,EAAE,MAAM,EAAE,MAAM,kBAAkB,CAAC;AAC1C,OAAO,EAAE,cAAc,EAAE,MAAM,2BAA2B,CAAC;AAC3D,YAAY,EAAE,OAAO,EAAE,MAAM,mBAAmB,CAAC;AACjD,YAAY,EAAE,IAAI,EAAE,MAAM,gBAAgB,CAAC;AAC3C,YAAY,EAAE,WAAW,EAAE,MAAM,uBAAuB,CAAC;AACzD,YAAY,EAAE,IAAI,EAAE,QAAQ,EAAE,MAAM,gBAAgB,CAAC;AACrD,YAAY,EAAE,cAAc,EAAE,WAAW,EAAE,MAAM,mBAAmB,CAAC;AAErE,OAAO,EAAE,GAAG,EAAE,aAAa,EAAE,UAAU,EAAE,SAAS,EAAE,MAAM,UAAU,CAAC;AACrE,OAAO,EAAE,gBAAgB,EAAE,MAAM,yBAAyB,CAAC;AAE3D,OAAO,EAAE,cAAc,EAAE,MAAM,sCAAsC,CAAC;AACtE,OAAO,EAAE,sBAAsB,EAAE,MAAM,6BAA6B,CAAC;AACrE,YAAY,EAAE,YAAY,EAAE,aAAa,EAAE,MAAM,yBAAyB,CAAC;AAC3E,cAAc,mBAAmB,CAAC"}
|
|
1
|
+
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../src/index.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,IAAI,EAAE,MAAM,gBAAgB,CAAC;AACtC,OAAO,EAAE,MAAM,EAAE,MAAM,kBAAkB,CAAC;AAC1C,OAAO,EAAE,cAAc,EAAE,MAAM,2BAA2B,CAAC;AAC3D,YAAY,EAAE,OAAO,EAAE,MAAM,mBAAmB,CAAC;AACjD,YAAY,EAAE,IAAI,EAAE,MAAM,gBAAgB,CAAC;AAC3C,YAAY,EAAE,WAAW,EAAE,MAAM,uBAAuB,CAAC;AACzD,YAAY,EAAE,IAAI,EAAE,QAAQ,EAAE,MAAM,gBAAgB,CAAC;AACrD,YAAY,EAAE,cAAc,EAAE,WAAW,EAAE,MAAM,mBAAmB,CAAC;AAErE,OAAO,EAAE,CAAC,EAAE,MAAM,KAAK,CAAC;AACxB,OAAO,EAAE,GAAG,EAAE,aAAa,EAAE,UAAU,EAAE,SAAS,EAAE,MAAM,UAAU,CAAC;AACrE,OAAO,EAAE,gBAAgB,EAAE,MAAM,yBAAyB,CAAC;AAE3D,OAAO,EAAE,cAAc,EAAE,MAAM,sCAAsC,CAAC;AACtE,OAAO,EAAE,sBAAsB,EAAE,MAAM,6BAA6B,CAAC;AACrE,OAAO,EAAE,yBAAyB,EAAE,MAAM,gCAAgC,CAAC;AAC3E,YAAY,EAAE,YAAY,EAAE,aAAa,EAAE,MAAM,yBAAyB,CAAC;AAC3E,cAAc,mBAAmB,CAAC"}
|
package/dist/index.js
CHANGED
|
@@ -1,8 +1,10 @@
|
|
|
1
1
|
export { Chat } from "./chat/Chat.js";
|
|
2
2
|
export { Stream } from "./chat/Stream.js";
|
|
3
3
|
export { GeneratedImage } from "./image/GeneratedImage.js";
|
|
4
|
+
export { z } from "zod";
|
|
4
5
|
export { LLM, Transcription, Moderation, Embedding } from "./llm.js";
|
|
5
6
|
export { providerRegistry } from "./providers/registry.js";
|
|
6
7
|
export { OpenAIProvider } from "./providers/openai/OpenAIProvider.js";
|
|
7
8
|
export { registerOpenAIProvider } from "./providers/openai/index.js";
|
|
9
|
+
export { registerAnthropicProvider } from "./providers/anthropic/index.js";
|
|
8
10
|
export * from "./errors/index.js";
|
package/dist/llm.d.ts
CHANGED
|
@@ -24,7 +24,7 @@ type LLMConfig = {
|
|
|
24
24
|
defaultEmbeddingModel?: string;
|
|
25
25
|
};
|
|
26
26
|
declare class LLMCore {
|
|
27
|
-
readonly models: ModelRegistry;
|
|
27
|
+
readonly models: typeof ModelRegistry;
|
|
28
28
|
private provider?;
|
|
29
29
|
private defaultTranscriptionModelId?;
|
|
30
30
|
private defaultModerationModelId?;
|
|
@@ -38,6 +38,7 @@ declare class LLMCore {
|
|
|
38
38
|
model?: string;
|
|
39
39
|
size?: string;
|
|
40
40
|
quality?: string;
|
|
41
|
+
assumeModelExists?: boolean;
|
|
41
42
|
}): Promise<GeneratedImage>;
|
|
42
43
|
transcribe(file: string, options?: {
|
|
43
44
|
model?: string;
|
|
@@ -45,6 +46,7 @@ declare class LLMCore {
|
|
|
45
46
|
language?: string;
|
|
46
47
|
speakerNames?: string[];
|
|
47
48
|
speakerReferences?: string[];
|
|
49
|
+
assumeModelExists?: boolean;
|
|
48
50
|
}): Promise<Transcription>;
|
|
49
51
|
get defaultTranscriptionModel(): string | undefined;
|
|
50
52
|
get defaultModerationModel(): string | undefined;
|
|
@@ -52,10 +54,12 @@ declare class LLMCore {
|
|
|
52
54
|
getRetryConfig(): Required<RetryOptions>;
|
|
53
55
|
moderate(input: string | string[], options?: {
|
|
54
56
|
model?: string;
|
|
57
|
+
assumeModelExists?: boolean;
|
|
55
58
|
}): Promise<Moderation>;
|
|
56
59
|
embed(input: string | string[], options?: {
|
|
57
60
|
model?: string;
|
|
58
61
|
dimensions?: number;
|
|
62
|
+
assumeModelExists?: boolean;
|
|
59
63
|
}): Promise<Embedding>;
|
|
60
64
|
}
|
|
61
65
|
export { Transcription, Moderation, Embedding };
|
package/dist/llm.d.ts.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"llm.d.ts","sourceRoot":"","sources":["../src/llm.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,IAAI,EAAE,MAAM,gBAAgB,CAAC;AACtC,OAAO,EAAE,WAAW,EAAE,MAAM,uBAAuB,CAAC;AACpD,OAAO,EACL,QAAQ,EACR,SAAS,EAKV,MAAM,yBAAyB,CAAC;
|
|
1
|
+
{"version":3,"file":"llm.d.ts","sourceRoot":"","sources":["../src/llm.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,IAAI,EAAE,MAAM,gBAAgB,CAAC;AACtC,OAAO,EAAE,WAAW,EAAE,MAAM,uBAAuB,CAAC;AACpD,OAAO,EACL,QAAQ,EACR,SAAS,EAKV,MAAM,yBAAyB,CAAC;AAKjC,OAAO,EAAE,cAAc,EAAE,MAAM,2BAA2B,CAAC;AAC3D,OAAO,EAAE,aAAa,EAAE,MAAM,2BAA2B,CAAC;AAC1D,OAAO,EAAE,aAAa,EAAE,MAAM,kCAAkC,CAAC;AACjE,OAAO,EAAE,UAAU,EAAE,MAAM,4BAA4B,CAAC;AACxD,OAAO,EAAE,SAAS,EAAE,MAAM,0BAA0B,CAAC;AAIrD,MAAM,WAAW,YAAY;IAC3B,QAAQ,CAAC,EAAE,MAAM,CAAC;IAClB,OAAO,CAAC,EAAE,MAAM,CAAC;CAClB;AAED,KAAK,SAAS,GACV;IAAE,QAAQ,EAAE,QAAQ,CAAC;IAAC,KAAK,CAAC,EAAE,YAAY,CAAC;IAAC,yBAAyB,CAAC,EAAE,MAAM,CAAC;IAAC,sBAAsB,CAAC,EAAE,MAAM,CAAC;IAAC,qBAAqB,CAAC,EAAE,MAAM,CAAA;CAAE,GACjJ;IAAE,QAAQ,EAAE,MAAM,CAAC;IAAC,KAAK,CAAC,EAAE,YAAY,CAAC;IAAC,yBAAyB,CAAC,EAAE,MAAM,CAAC;IAAC,sBAAsB,CAAC,EAAE,MAAM,CAAC;IAAC,qBAAqB,CAAC,EAAE,MAAM,CAAA;CAAE,CAAC;AAEpJ,cAAM,OAAO;IACX,SAAgB,MAAM,uBAAiB;IACvC,OAAO,CAAC,QAAQ,CAAC,CAAW;IAC5B,OAAO,CAAC,2BAA2B,CAAC,CAAS;IAC7C,OAAO,CAAC,wBAAwB,CAAC,CAAS;IAC1C,OAAO,CAAC,uBAAuB,CAAC,CAAS;IAEzC,OAAO,CAAC,KAAK,CAGX;IAEF,SAAS,CAAC,MAAM,EAAE,SAAS;IAuC3B,OAAO,CAAC,qBAAqB;IAU7B,IAAI,CAAC,KAAK,EAAE,MAAM,EAAE,OAAO,CAAC,EAAE,WAAW,GAAG,IAAI;IAQ1C,UAAU,IAAI,OAAO,CAAC,SAAS,EAAE,CAAC;IAKlC,KAAK,CAAC,MAAM,EAAE,MAAM,EAAE,OAAO,CAAC,EAAE;QAAE,KAAK,CAAC,EAAE,MAAM,CAAC;QAAC,IAAI,CAAC,EAAE,MAAM,CAAC;QAAC,OAAO,CAAC,EAAE,MAAM,CAAC;QAAC,iBAAiB,CAAC,EAAE,OAAO,CAAA;KAAE,GAAG,OAAO,CAAC,cAAc,CAAC;IAkB1I,UAAU,CACd,IAAI,EAAE,MAAM,EACZ,OAAO,CAAC,EAAE;QACR,KAAK,CAAC,EAAE,MAAM,CAAC;QACf,MAAM,CAAC,EAAE,MAAM,CAAC;QAChB,QAAQ,CAAC,EAAE,MAAM,CAAC;QAClB,YAAY,CAAC,EAAE,MAAM,EAAE,CAAC;QACxB,iBAAiB,CAAC,EAAE,MAAM,EAAE,CAAC;QAC7B,iBAAiB,CAAC,EAAE,OAAO,CAAC;KAC7B,GACA,OAAO,CAAC,aAAa,CAAC;IAmBzB,IAAI,yBAAyB,IAAI,MAAM,GAAG,SAAS,CAElD;IAED,IAAI,sBAAsB,IAAI,MAAM,GAAG,SAAS,CAE/C;IAED,IAAI,qBAAqB,IAAI,MAAM,GAAG,SAAS,CAE9C;IAED,cAAc;IAIR,QAAQ,CAAC,KAAK,EAAE,MAAM,GAAG,MAAM,EAAE,EAAE,OAAO,CAAC,EAAE;QAAE,KAAK,CAAC,EAAE,MAAM,CAAC;QAAC,iBAAiB,CAAC,EAAE,OAAO,CAAA;KAAE,GAAG,OAAO,CAAC,UAAU,CAAC;IAmBlH,KAAK,CACT,KAAK,EAAE,MAAM,GAAG,MAAM,EAAE,EACxB,OAAO,CAAC,EAAE;QAAE,KAAK,CAAC,EAAE,MAAM,CAAC;QAAC,UAAU,CAAC,EAAE,MAAM,CAAC;QAAC,iBAAiB,CAAC,EAAE,OAAO,CAAA;KAAE,GAC7E,OAAO,CAAC,SAAS,CAAC;CAoBtB;AAED,OAAO,EAAE,aAAa,EAAE,UAAU,EAAE,SAAS,EAAE,CAAC;AAEhD,eAAO,MAAM,GAAG,SAAgB,CAAC"}
|
package/dist/llm.js
CHANGED
|
@@ -2,13 +2,14 @@ import { Chat } from "./chat/Chat.js";
|
|
|
2
2
|
import { providerRegistry } from "./providers/registry.js";
|
|
3
3
|
import { ensureOpenAIRegistered } from "./providers/openai/index.js";
|
|
4
4
|
import { registerGeminiProvider } from "./providers/gemini/index.js";
|
|
5
|
+
import { registerAnthropicProvider } from "./providers/anthropic/index.js";
|
|
5
6
|
import { GeneratedImage } from "./image/GeneratedImage.js";
|
|
6
|
-
import {
|
|
7
|
+
import { ModelRegistry } from "./models/ModelRegistry.js";
|
|
7
8
|
import { Transcription } from "./transcription/Transcription.js";
|
|
8
9
|
import { Moderation } from "./moderation/Moderation.js";
|
|
9
10
|
import { Embedding } from "./embedding/Embedding.js";
|
|
10
11
|
class LLMCore {
|
|
11
|
-
models =
|
|
12
|
+
models = ModelRegistry;
|
|
12
13
|
provider;
|
|
13
14
|
defaultTranscriptionModelId;
|
|
14
15
|
defaultModerationModelId;
|
|
@@ -40,6 +41,9 @@ class LLMCore {
|
|
|
40
41
|
if (config.provider === "gemini") {
|
|
41
42
|
registerGeminiProvider();
|
|
42
43
|
}
|
|
44
|
+
if (config.provider === "anthropic") {
|
|
45
|
+
registerAnthropicProvider();
|
|
46
|
+
}
|
|
43
47
|
this.provider = providerRegistry.resolve(config.provider);
|
|
44
48
|
}
|
|
45
49
|
else {
|
|
@@ -68,7 +72,10 @@ class LLMCore {
|
|
|
68
72
|
async paint(prompt, options) {
|
|
69
73
|
const provider = this.ensureProviderSupport("paint");
|
|
70
74
|
const model = options?.model;
|
|
71
|
-
if (
|
|
75
|
+
if (options?.assumeModelExists) {
|
|
76
|
+
console.warn(`[NodeLLM] Skipping validation for model ${model}`);
|
|
77
|
+
}
|
|
78
|
+
else if (model && provider.capabilities && !provider.capabilities.supportsImageGeneration(model)) {
|
|
72
79
|
throw new Error(`Model ${model} does not support image generation.`);
|
|
73
80
|
}
|
|
74
81
|
const response = await provider.paint({
|
|
@@ -80,7 +87,10 @@ class LLMCore {
|
|
|
80
87
|
async transcribe(file, options) {
|
|
81
88
|
const provider = this.ensureProviderSupport("transcribe");
|
|
82
89
|
const model = options?.model || this.defaultTranscriptionModelId;
|
|
83
|
-
if (
|
|
90
|
+
if (options?.assumeModelExists) {
|
|
91
|
+
console.warn(`[NodeLLM] Skipping validation for model ${model}`);
|
|
92
|
+
}
|
|
93
|
+
else if (model && provider.capabilities && !provider.capabilities.supportsTranscription(model)) {
|
|
84
94
|
throw new Error(`Model ${model} does not support transcription.`);
|
|
85
95
|
}
|
|
86
96
|
const response = await provider.transcribe({
|
|
@@ -105,7 +115,10 @@ class LLMCore {
|
|
|
105
115
|
async moderate(input, options) {
|
|
106
116
|
const provider = this.ensureProviderSupport("moderate");
|
|
107
117
|
const model = options?.model || this.defaultModerationModelId;
|
|
108
|
-
if (
|
|
118
|
+
if (options?.assumeModelExists) {
|
|
119
|
+
console.warn(`[NodeLLM] Skipping validation for model ${model}`);
|
|
120
|
+
}
|
|
121
|
+
else if (model && provider.capabilities && !provider.capabilities.supportsModeration(model)) {
|
|
109
122
|
throw new Error(`Model ${model} does not support moderation.`);
|
|
110
123
|
}
|
|
111
124
|
const response = await provider.moderate({
|
|
@@ -123,7 +136,10 @@ class LLMCore {
|
|
|
123
136
|
model,
|
|
124
137
|
dimensions: options?.dimensions,
|
|
125
138
|
};
|
|
126
|
-
if (
|
|
139
|
+
if (options?.assumeModelExists) {
|
|
140
|
+
console.warn(`[NodeLLM] Skipping validation for model ${request.model}`);
|
|
141
|
+
}
|
|
142
|
+
else if (request.model && provider.capabilities && !provider.capabilities.supportsEmbeddings(request.model)) {
|
|
127
143
|
throw new Error(`Model ${request.model} does not support embeddings.`);
|
|
128
144
|
}
|
|
129
145
|
const response = await provider.embed(request);
|
|
@@ -1,23 +1,50 @@
|
|
|
1
|
-
import {
|
|
1
|
+
import { Model } from "./types.js";
|
|
2
2
|
export declare class ModelRegistry {
|
|
3
|
-
private models;
|
|
4
|
-
private static readonly API_URL;
|
|
3
|
+
private static models;
|
|
5
4
|
/**
|
|
6
|
-
*
|
|
5
|
+
* Find a model by its ID.
|
|
7
6
|
*/
|
|
8
|
-
|
|
7
|
+
static find(modelId: string, provider?: string): Model | undefined;
|
|
9
8
|
/**
|
|
10
|
-
*
|
|
9
|
+
* Get all available models.
|
|
11
10
|
*/
|
|
12
|
-
|
|
11
|
+
static all(): Model[];
|
|
13
12
|
/**
|
|
14
|
-
*
|
|
13
|
+
* Get output tokens limit for a model.
|
|
15
14
|
*/
|
|
16
|
-
|
|
15
|
+
static getMaxOutputTokens(modelId: string, provider: string): number | undefined;
|
|
17
16
|
/**
|
|
18
|
-
*
|
|
17
|
+
* Check if a model supports a capability.
|
|
19
18
|
*/
|
|
20
|
-
|
|
19
|
+
static supports(modelId: string, capability: string, provider: string): boolean;
|
|
20
|
+
/**
|
|
21
|
+
* Get context window size.
|
|
22
|
+
*/
|
|
23
|
+
static getContextWindow(modelId: string, provider: string): number | undefined;
|
|
24
|
+
/**
|
|
25
|
+
* Calculate cost for usage.
|
|
26
|
+
*/
|
|
27
|
+
static calculateCost(usage: {
|
|
28
|
+
input_tokens: number;
|
|
29
|
+
output_tokens: number;
|
|
30
|
+
total_tokens: number;
|
|
31
|
+
cached_tokens?: number;
|
|
32
|
+
reasoning_tokens?: number;
|
|
33
|
+
}, modelId: string, provider: string): {
|
|
34
|
+
input_tokens: number;
|
|
35
|
+
output_tokens: number;
|
|
36
|
+
total_tokens: number;
|
|
37
|
+
cached_tokens?: number;
|
|
38
|
+
reasoning_tokens?: number;
|
|
39
|
+
} | {
|
|
40
|
+
input_cost: number;
|
|
41
|
+
output_cost: number;
|
|
42
|
+
cost: number;
|
|
43
|
+
input_tokens: number;
|
|
44
|
+
output_tokens: number;
|
|
45
|
+
total_tokens: number;
|
|
46
|
+
cached_tokens?: number;
|
|
47
|
+
reasoning_tokens?: number;
|
|
48
|
+
};
|
|
21
49
|
}
|
|
22
|
-
export declare const models: ModelRegistry;
|
|
23
50
|
//# sourceMappingURL=ModelRegistry.d.ts.map
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"ModelRegistry.d.ts","sourceRoot":"","sources":["../../src/models/ModelRegistry.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,
|
|
1
|
+
{"version":3,"file":"ModelRegistry.d.ts","sourceRoot":"","sources":["../../src/models/ModelRegistry.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,KAAK,EAAE,MAAM,YAAY,CAAC;AAGnC,qBAAa,aAAa;IACtB,OAAO,CAAC,MAAM,CAAC,MAAM,CAA6C;IAElE;;OAEG;IACH,MAAM,CAAC,IAAI,CAAC,OAAO,EAAE,MAAM,EAAE,QAAQ,CAAC,EAAE,MAAM,GAAG,KAAK,GAAG,SAAS;IAMlE;;OAEG;IACH,MAAM,CAAC,GAAG,IAAI,KAAK,EAAE;IAIrB;;OAEG;IACH,MAAM,CAAC,kBAAkB,CAAC,OAAO,EAAE,MAAM,EAAE,QAAQ,EAAE,MAAM,GAAG,MAAM,GAAG,SAAS;IAKhF;;OAEG;IACH,MAAM,CAAC,QAAQ,CAAC,OAAO,EAAE,MAAM,EAAE,UAAU,EAAE,MAAM,EAAE,QAAQ,EAAE,MAAM,GAAG,OAAO;IAK/E;;OAEG;IACH,MAAM,CAAC,gBAAgB,CAAC,OAAO,EAAE,MAAM,EAAE,QAAQ,EAAE,MAAM,GAAG,MAAM,GAAG,SAAS;IAK9E;;OAEG;IACH,MAAM,CAAC,aAAa,CAAC,KAAK,EAAE;QAAE,YAAY,EAAE,MAAM,CAAC;QAAC,aAAa,EAAE,MAAM,CAAC;QAAC,YAAY,EAAE,MAAM,CAAC;QAAC,aAAa,CAAC,EAAE,MAAM,CAAC;QAAC,gBAAgB,CAAC,EAAE,MAAM,CAAA;KAAE,EAAE,OAAO,EAAE,MAAM,EAAE,QAAQ,EAAE,MAAM;sBAA3I,MAAM;uBAAiB,MAAM;sBAAgB,MAAM;wBAAkB,MAAM;2BAAqB,MAAM;;;;;sBAAtG,MAAM;uBAAiB,MAAM;sBAAgB,MAAM;wBAAkB,MAAM;2BAAqB,MAAM;;CA+BrJ"}
|