@node-llm/core 1.2.0 → 1.4.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (58) hide show
  1. package/LICENSE +21 -0
  2. package/README.md +245 -0
  3. package/dist/aliases.d.ts +134 -0
  4. package/dist/aliases.d.ts.map +1 -0
  5. package/dist/{aliases.json → aliases.js} +8 -2
  6. package/dist/chat/Chat.d.ts +11 -8
  7. package/dist/chat/Chat.d.ts.map +1 -1
  8. package/dist/chat/Chat.js +27 -17
  9. package/dist/chat/ChatOptions.d.ts +2 -2
  10. package/dist/chat/ChatOptions.d.ts.map +1 -1
  11. package/dist/chat/ChatResponse.d.ts +2 -1
  12. package/dist/chat/ChatResponse.d.ts.map +1 -1
  13. package/dist/chat/ChatResponse.js +3 -1
  14. package/dist/chat/ChatStream.js +1 -2
  15. package/dist/chat/Message.d.ts +1 -0
  16. package/dist/chat/Message.d.ts.map +1 -1
  17. package/dist/chat/Tool.d.ts +35 -1
  18. package/dist/chat/Tool.d.ts.map +1 -1
  19. package/dist/chat/Tool.js +38 -1
  20. package/dist/config.d.ts +2 -0
  21. package/dist/config.d.ts.map +1 -1
  22. package/dist/config.js +1 -0
  23. package/dist/errors/index.d.ts +1 -1
  24. package/dist/errors/index.js +1 -1
  25. package/dist/index.d.ts +3 -0
  26. package/dist/index.d.ts.map +1 -1
  27. package/dist/index.js +3 -0
  28. package/dist/llm.d.ts +45 -15
  29. package/dist/llm.d.ts.map +1 -1
  30. package/dist/llm.js +57 -5
  31. package/dist/model_aliases.d.ts.map +1 -1
  32. package/dist/model_aliases.js +6 -9
  33. package/dist/providers/BaseProvider.d.ts +1 -1
  34. package/dist/providers/BaseProvider.d.ts.map +1 -1
  35. package/dist/providers/BaseProvider.js +3 -0
  36. package/dist/providers/Provider.d.ts +3 -2
  37. package/dist/providers/Provider.d.ts.map +1 -1
  38. package/dist/providers/anthropic/AnthropicProvider.d.ts +1 -0
  39. package/dist/providers/anthropic/AnthropicProvider.d.ts.map +1 -1
  40. package/dist/providers/anthropic/AnthropicProvider.js +3 -0
  41. package/dist/providers/deepseek/DeepSeekProvider.d.ts +1 -0
  42. package/dist/providers/deepseek/DeepSeekProvider.d.ts.map +1 -1
  43. package/dist/providers/deepseek/DeepSeekProvider.js +3 -0
  44. package/dist/providers/gemini/GeminiProvider.d.ts +1 -0
  45. package/dist/providers/gemini/GeminiProvider.d.ts.map +1 -1
  46. package/dist/providers/gemini/GeminiProvider.js +3 -0
  47. package/dist/providers/ollama/OllamaProvider.d.ts +1 -0
  48. package/dist/providers/ollama/OllamaProvider.d.ts.map +1 -1
  49. package/dist/providers/ollama/OllamaProvider.js +3 -0
  50. package/dist/providers/openai/OpenAIProvider.d.ts +1 -0
  51. package/dist/providers/openai/OpenAIProvider.d.ts.map +1 -1
  52. package/dist/providers/openai/OpenAIProvider.js +3 -0
  53. package/dist/utils/FileLoader.d.ts.map +1 -1
  54. package/dist/utils/FileLoader.js +2 -1
  55. package/dist/utils/logger.d.ts +0 -3
  56. package/dist/utils/logger.d.ts.map +1 -1
  57. package/dist/utils/logger.js +2 -4
  58. package/package.json +2 -2
package/LICENSE ADDED
@@ -0,0 +1,21 @@
1
+ MIT License
2
+
3
+ Copyright (c) 2026 NodeLLM contributors
4
+
5
+ Permission is hereby granted, free of charge, to any person obtaining a copy
6
+ of this software and associated documentation files (the "Software"), to deal
7
+ in the Software without restriction, including without limitation the rights
8
+ to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9
+ copies of the Software, and to permit persons to whom the Software is
10
+ furnished to do so, subject to the following conditions:
11
+
12
+ The above copyright notice and this permission notice shall be included in all
13
+ copies or substantial portions of the Software.
14
+
15
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16
+ IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17
+ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18
+ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19
+ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20
+ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21
+ SOFTWARE.
package/README.md CHANGED
@@ -0,0 +1,245 @@
1
+ <p align="left">
2
+ <img src="https://github.com/eshaiju/node-llm/raw/main/docs/assets/images/logo.jpg" alt="NodeLLMlogo" width="300" />
3
+ </p>
4
+
5
+ # NodeLLM
6
+ **An opinionated architectural layer for using Large Language Models in Node.js.**
7
+
8
+ Build chatbots, autonomous agents, and RAG pipelines without the SDK fatigue. NodeLLM provides a unified, production-oriented API for interacting with over **540+ models** across multiple providers (OpenAI, Gemini, Anthropic, DeepSeek, OpenRouter, Ollama, etc.) without coupling your application to any single SDK.
9
+
10
+ <br/>
11
+
12
+ <p align="left">
13
+ <img src="https://registry.npmmirror.com/@lobehub/icons-static-svg/latest/files/icons/openai.svg" height="28" />
14
+ <img src="https://registry.npmmirror.com/@lobehub/icons-static-svg/latest/files/icons/openai-text.svg" height="22" />
15
+ &nbsp;&nbsp;&nbsp;&nbsp;
16
+ <img src="https://registry.npmmirror.com/@lobehub/icons-static-svg/latest/files/icons/anthropic-text.svg" height="18" />
17
+ &nbsp;&nbsp;&nbsp;&nbsp;
18
+ <img src="https://registry.npmmirror.com/@lobehub/icons-static-svg/latest/files/icons/gemini-color.svg" height="28" />
19
+ <img src="https://registry.npmmirror.com/@lobehub/icons-static-svg/latest/files/icons/gemini-text.svg" height="20" />
20
+ &nbsp;&nbsp;&nbsp;&nbsp;
21
+ <img src="https://registry.npmmirror.com/@lobehub/icons-static-svg/latest/files/icons/deepseek-color.svg" height="28" />
22
+ <img src="https://registry.npmmirror.com/@lobehub/icons-static-svg/latest/files/icons/deepseek-text.svg" height="20" />
23
+ &nbsp;&nbsp;&nbsp;&nbsp;
24
+ <img src="https://registry.npmmirror.com/@lobehub/icons-static-svg/latest/files/icons/openrouter.svg" height="28" />
25
+ <img src="https://registry.npmmirror.com/@lobehub/icons-static-svg/latest/files/icons/openrouter-text.svg" height="22" />
26
+ &nbsp;&nbsp;&nbsp;&nbsp;
27
+ <img src="https://registry.npmmirror.com/@lobehub/icons-static-svg/latest/files/icons/ollama.svg" height="28" />
28
+ <img src="https://registry.npmmirror.com/@lobehub/icons-static-svg/latest/files/icons/ollama-text.svg" height="18" />
29
+ </p>
30
+
31
+ <br/>
32
+
33
+ [![npm version](https://img.shields.io/npm/v/@node-llm/core.svg)](https://www.npmjs.com/package/@node-llm/core)
34
+ [![License: MIT](https://img.shields.io/badge/License-MIT-yellow.svg)](https://opensource.org/licenses/MIT)
35
+ [![TypeScript](https://img.shields.io/badge/TypeScript-Ready-blue.svg)](https://www.typescriptlang.org/)
36
+
37
+ ---
38
+
39
+ ## ⚡ The Golden Path
40
+
41
+ ```ts
42
+ import { NodeLLM } from "@node-llm/core";
43
+
44
+ // 1. Configure once
45
+ NodeLLM.configure({ provider: "openai" });
46
+
47
+ // 2. Chat (High-level request/response)
48
+ const chat = NodeLLM.chat("gpt-4o");
49
+ const response = await chat.ask("Explain event-driven architecture");
50
+ console.log(response.content);
51
+
52
+ // 3. Streaming (Standard AsyncIterator)
53
+ for await (const chunk of chat.stream("Explain event-driven architecture")) {
54
+ process.stdout.write(chunk.content);
55
+ }
56
+ ```
57
+
58
+ ---
59
+
60
+ ## Why`NodeLLM`?
61
+
62
+ Most AI integrations today are provider-specific, SDK-driven, and leaky at abstraction boundaries. This creates long-term architectural risk. Switching models should not mean a total rewrite of your business logic.
63
+
64
+ NodeLLMexists to solve **architectural problems**, not just provide API access.
65
+
66
+ ### Strategic Goals
67
+
68
+ - **Provider Isolation**: Your application logic never touches a provider-specific SDK.
69
+ - **Unified Mental Model**: Chat, streaming, tools, and structured outputs feel identical across providers.
70
+ - **Production-Ready**: Streaming, retries, and errors are first-class concerns.
71
+ - **The "Standard Library" Voice**: It provides a beautiful, native-feeling API for modern Node.js.
72
+
73
+ ### Non-Goals
74
+
75
+ - It is **not** a thin wrapper that mirrors every provider's unique API knobs.
76
+ - It is **not** a UI framework or a simple chatbot builder.
77
+ - It prioritizes **architectural clarity** over raw SDK convenience.
78
+
79
+ ---
80
+
81
+ ## 🔧 Strategic Configuration
82
+
83
+ NodeLLMprovides a flexible configuration system designed for enterprise usage:
84
+
85
+ ```ts
86
+ // Recommended for multi-provider pipelines
87
+ NodeLLM.configure((config) => {
88
+ config.openaiApiKey = process.env.OPENAI_API_KEY;
89
+ config.anthropicApiKey = process.env.ANTHROPIC_API_KEY;
90
+ config.ollamaApiBase = process.env.OLLAMA_API_BASE;
91
+ });
92
+
93
+ // Switch providers at the framework level
94
+ NodeLLM.configure({ provider: "anthropic" });
95
+
96
+ // Support for Custom Endpoints (e.g., Azure or LocalAI)
97
+ NodeLLM.configure({
98
+ openaiApiKey: process.env.AZURE_KEY,
99
+ openaiApiBase: "https://your-resource.openai.azure.com/openai/deployments/...",
100
+ });
101
+ ```
102
+
103
+ **[Full Configuration Guide →](docs/getting_started/configuration.md)**
104
+
105
+ ---
106
+
107
+ ---
108
+
109
+ ## 🔮 Capabilities
110
+
111
+ ### 💬 Unified Chat
112
+ Stop rewriting code for every provider. `NodeLLM` normalizes inputs and outputs into a single, predictable mental model.
113
+ ```ts
114
+ const chat = NodeLLM.chat(); // Defaults to GPT-4o
115
+ await chat.ask("Hello world");
116
+ ```
117
+
118
+ ### 👁️ Smart Vision & Files
119
+ Pass images, PDFs, or audio files directly. We handle the heavy lifting: fetching remote URLs, base64 encoding, and MIME type mapping.
120
+ ```ts
121
+ await chat.ask("Analyze this interface", {
122
+ files: ["./screenshot.png", "https://example.com/spec.pdf"]
123
+ });
124
+ ```
125
+
126
+ ### 🛠️ Auto-Executing Tools
127
+ Define tools once;`NodeLLM` manages the recursive execution loop for you, keeping your controller logic clean. **Works seamlessly with both regular chat and streaming!**
128
+
129
+ ```ts
130
+ // Class-based DSL
131
+ class WeatherTool extends Tool {
132
+ name = "get_weather";
133
+ description = "Get current weather";
134
+ schema = z.object({ location: z.string() });
135
+ async execute({ location }) { return `Sunny in ${location}`; }
136
+ }
137
+
138
+ // Register tools
139
+ chat.withTools([WeatherTool]);
140
+
141
+ // Now the model can use it automatically
142
+ await chat.ask("What's the weather in Tokyo?");
143
+ ```
144
+ **[Full Tool Calling Guide →](https://node-llm.eshaiju.com/core-features/tool-calling)**
145
+
146
+ ### 🔍 Comprehensive Debug Logging
147
+ Enable detailed logging for all API requests and responses across every feature and provider:
148
+ ```ts
149
+ // Set environment variable
150
+ process.env.NODELLM_DEBUG = "true";
151
+
152
+ // Now see detailed logs for every API call:
153
+ // [NodeLLM] [OpenAI] Request: POST https://api.openai.com/v1/chat/completions
154
+ // { "model": "gpt-4o", "messages": [...] }
155
+ // [NodeLLM] [OpenAI] Response: 200 OK
156
+ // { "id": "chatcmpl-123", ... }
157
+ ```
158
+ **Covers:** Chat, Streaming, Images, Embeddings, Transcription, Moderation - across all providers!
159
+
160
+ ### ✨ Structured Output
161
+ Get type-safe, validated JSON back using **Zod** schemas.
162
+ ```ts
163
+ import { z } from "@node-llm/core";
164
+ const Product = z.object({ name: z.string(), price: z.number() });
165
+
166
+ const res = await chat.withSchema(Product).ask("Generate a gadget");
167
+ console.log(res.parsed.name); // Full type-safety
168
+ ```
169
+
170
+ ### 🎨 Image Generation
171
+ ```ts
172
+ await NodeLLM.paint("A cyberpunk city in rain");
173
+ ```
174
+
175
+ ### 🎤 Audio Transcription
176
+ ```ts
177
+ await NodeLLM.transcribe("meeting-recording.wav");
178
+ ```
179
+
180
+ ### ⚡ Scoped Parallelism
181
+ Run multiple providers in parallel safely without global configuration side effects using isolated contexts. You can also override credentials (API keys) for specific instances.
182
+
183
+ ```ts
184
+ const [gpt, claude] = await Promise.all([
185
+ // Each call branch off into its own isolated context
186
+ NodeLLM.withProvider("openai").chat("gpt-4o").ask(prompt),
187
+ NodeLLM.withProvider("anthropic", { anthropicApiKey: "..." }).chat("claude-3.5-sonnet").ask(prompt),
188
+ ]);
189
+ ```
190
+
191
+
192
+ ### 🧠 Deep Reasoning
193
+ Direct access to the thought process of models like **DeepSeek R1** or **OpenAI o1/o3** using the `.reasoning` field.
194
+ ```ts
195
+ const res = await NodeLLM.chat("deepseek-reasoner").ask("Solve this logical puzzle");
196
+ console.log(res.reasoning); // Chain-of-thought
197
+ ```
198
+
199
+ ---
200
+
201
+ ## 🚀 Why use this over official SDKs?
202
+
203
+ | Feature |`NodeLLM` | Official SDKs | Architectural Impact |
204
+ | :--- | :--- | :--- | :--- |
205
+ | **Provider Logic** | Transparently Handled | Exposed to your code | **Low Coupling** |
206
+ | **Streaming** | Standard `AsyncIterator` | Vendor-specific Events | **Predictable Data Flow** |
207
+ | **Streaming + Tools** | Automated Execution | Manual implementation | **Seamless UX** |
208
+ | **Tool Loops** | Automated Recursion | Manual implementation | **Reduced Boilerplate** |
209
+ | **Files/Vision** | Intelligent Path/URL handling | Base64/Buffer management | **Cleaner Service Layer** |
210
+ | **Configuration** | Centralized & Global | Per-instance initialization | **Easier Lifecycle Mgmt** |
211
+
212
+ ---
213
+
214
+ ## 📋 Supported Providers
215
+
216
+ | Provider | Supported Features |
217
+ | :--- | :--- |
218
+ | <img src="https://registry.npmmirror.com/@lobehub/icons-static-svg/latest/files/icons/openai.svg" height="18"> **OpenAI** | Chat, **Streaming + Tools**, Vision, Audio, Images, Transcription, **Reasoning** |
219
+ | <img src="https://registry.npmmirror.com/@lobehub/icons-static-svg/latest/files/icons/gemini-color.svg" height="18"> **Gemini** | Chat, **Streaming + Tools**, Vision, Audio, Video, Embeddings |
220
+ | <img src="https://registry.npmmirror.com/@lobehub/icons-static-svg/latest/files/icons/anthropic-text.svg" height="12"> **Anthropic** | Chat, **Streaming + Tools**, Vision, PDF, Structured Output |
221
+ | <img src="https://registry.npmmirror.com/@lobehub/icons-static-svg/latest/files/icons/deepseek-color.svg" height="18"> **DeepSeek** | Chat (V3), **Reasoning (R1)**, **Streaming + Tools** |
222
+ | <img src="https://registry.npmmirror.com/@lobehub/icons-static-svg/latest/files/icons/openrouter.svg" height="18"> **OpenRouter** | **Aggregator**, Chat, Streaming, Tools, Vision, Embeddings, **Reasoning** |
223
+ | <img src="https://registry.npmmirror.com/@lobehub/icons-static-svg/latest/files/icons/ollama.svg" height="18"> **Ollama** | **Local Inference**, Chat, Streaming, Tools, Vision, Embeddings |
224
+
225
+ ---
226
+
227
+ ## 📚 Documentation & Installation
228
+
229
+ ```bash
230
+ npm install @node-llm/core
231
+ ```
232
+
233
+ **[View Full Documentation ↗](https://node-llm.eshaiju.com/)**
234
+
235
+ ---
236
+
237
+ ## 🫶 Credits
238
+
239
+ Heavily inspired by the elegant design of [RubyLLM](https://rubyllm.com/).
240
+
241
+ ---
242
+
243
+ ## 📄 License
244
+
245
+ MIT © [NodeLLMcontributors]
@@ -0,0 +1,134 @@
1
+ declare const _default: {
2
+ readonly "chatgpt-4o": {
3
+ readonly openai: "chatgpt-4o-latest";
4
+ readonly openrouter: "openai/chatgpt-4o-latest";
5
+ };
6
+ readonly "claude-3-5-haiku": {
7
+ readonly anthropic: "claude-3-5-haiku-20241022";
8
+ readonly openrouter: "anthropic/claude-3.5-haiku";
9
+ readonly bedrock: "anthropic.claude-3-5-haiku-20241022-v1:0";
10
+ };
11
+ readonly "claude-3-5-sonnet": {
12
+ readonly anthropic: "claude-3-5-sonnet-20240620";
13
+ readonly openrouter: "anthropic/claude-3.5-sonnet";
14
+ readonly bedrock: "anthropic.claude-3-5-sonnet-20240620-v1:0";
15
+ };
16
+ readonly "claude-sonnet-4-5": {
17
+ readonly anthropic: "claude-sonnet-4-5-20250929";
18
+ };
19
+ readonly "claude-sonnet-4": {
20
+ readonly anthropic: "claude-sonnet-4-20250514";
21
+ };
22
+ readonly "claude-3-7-sonnet": {
23
+ readonly anthropic: "claude-3-7-sonnet-20250219";
24
+ readonly openrouter: "anthropic/claude-3.7-sonnet";
25
+ readonly bedrock: "us.anthropic.claude-3-7-sonnet-20250219-v1:0";
26
+ };
27
+ readonly "claude-3-haiku": {
28
+ readonly anthropic: "claude-3-haiku-20240307";
29
+ readonly openrouter: "anthropic/claude-3-haiku";
30
+ readonly bedrock: "anthropic.claude-3-haiku-20240307-v1:0:200k";
31
+ };
32
+ readonly "claude-3-opus": {
33
+ readonly anthropic: "claude-3-opus-20240229";
34
+ readonly openrouter: "anthropic/claude-3-opus";
35
+ readonly bedrock: "anthropic.claude-3-opus-20240229-v1:0:200k";
36
+ };
37
+ readonly "claude-3-sonnet": {
38
+ readonly bedrock: "anthropic.claude-3-sonnet-20240229-v1:0";
39
+ };
40
+ readonly "deepseek-chat": {
41
+ readonly deepseek: "deepseek-chat";
42
+ readonly openrouter: "deepseek/deepseek-chat";
43
+ };
44
+ readonly "gemini-flash": {
45
+ readonly gemini: "gemini-flash-latest";
46
+ readonly vertexai: "gemini-flash-latest";
47
+ readonly openrouter: "google/gemini-flash-latest";
48
+ };
49
+ readonly "gemini-pro": {
50
+ readonly gemini: "gemini-1.5-pro-001";
51
+ readonly vertexai: "gemini-1.5-pro-001";
52
+ readonly openrouter: "google/gemini-1.5-pro-001";
53
+ };
54
+ readonly "gemini-1.5-flash": {
55
+ readonly gemini: "gemini-1.5-flash-001";
56
+ readonly vertexai: "gemini-1.5-flash-001";
57
+ readonly openrouter: "google/gemini-1.5-flash-001";
58
+ };
59
+ readonly "gemini-1.5-pro": {
60
+ readonly gemini: "gemini-1.5-pro-001";
61
+ readonly vertexai: "gemini-1.5-pro-001";
62
+ readonly openrouter: "google/gemini-1.5-pro-001";
63
+ };
64
+ readonly "gemini-2.0-flash": {
65
+ readonly gemini: "gemini-2.0-flash";
66
+ readonly vertexai: "gemini-2.0-flash";
67
+ };
68
+ readonly "gemini-2.0-flash-001": {
69
+ readonly gemini: "gemini-2.0-flash-001";
70
+ readonly openrouter: "google/gemini-2.0-flash-001";
71
+ readonly vertexai: "gemini-2.0-flash-001";
72
+ };
73
+ readonly "gpt-3.5-turbo": {
74
+ readonly openai: "gpt-3.5-turbo";
75
+ readonly openrouter: "openai/gpt-3.5-turbo";
76
+ };
77
+ readonly "gpt-4": {
78
+ readonly openai: "gpt-4";
79
+ readonly openrouter: "openai/gpt-4";
80
+ };
81
+ readonly "gpt-4-turbo": {
82
+ readonly openai: "gpt-4-turbo";
83
+ readonly openrouter: "openai/gpt-4-turbo";
84
+ };
85
+ readonly "gpt-4o": {
86
+ readonly openai: "gpt-4o";
87
+ readonly openrouter: "openai/gpt-4o";
88
+ };
89
+ readonly "gpt-4o-mini": {
90
+ readonly openai: "gpt-4o-mini";
91
+ readonly openrouter: "openai/gpt-4o-mini";
92
+ };
93
+ readonly "llama-3-1-405b": {
94
+ readonly openrouter: "meta-llama/llama-3.1-405b";
95
+ };
96
+ readonly "llama-3-1-405b-instruct": {
97
+ readonly openrouter: "meta-llama/llama-3.1-405b-instruct";
98
+ };
99
+ readonly "llama-3-1-70b": {
100
+ readonly openrouter: "meta-llama/llama-3.1-70b";
101
+ };
102
+ readonly "llama-3-1-70b-instruct": {
103
+ readonly openrouter: "meta-llama/llama-3.1-70b-instruct";
104
+ };
105
+ readonly "llama-3-1-8b": {
106
+ readonly openrouter: "meta-llama/llama-3.1-8b";
107
+ };
108
+ readonly "llama-3-1-8b-instruct": {
109
+ readonly openrouter: "meta-llama/llama-3.1-8b-instruct";
110
+ };
111
+ readonly "llama-3-2-1b-instruct": {
112
+ readonly openrouter: "meta-llama/llama-3.2-1b-instruct";
113
+ };
114
+ readonly "llama-3-2-3b-instruct": {
115
+ readonly openrouter: "meta-llama/llama-3.2-3b-instruct";
116
+ };
117
+ readonly "llama-3-3-70b-instruct": {
118
+ readonly openrouter: "meta-llama/llama-3.3-70b-instruct";
119
+ };
120
+ readonly "mistral-large": {
121
+ readonly mistral: "mistral-large-latest";
122
+ readonly openrouter: "mistralai/mistral-large";
123
+ };
124
+ readonly "mistral-medium": {
125
+ readonly mistral: "mistral-medium-latest";
126
+ readonly openrouter: "mistralai/mistral-medium";
127
+ };
128
+ readonly "mistral-small": {
129
+ readonly mistral: "mistral-small-latest";
130
+ readonly openrouter: "mistralai/mistral-small";
131
+ };
132
+ };
133
+ export default _default;
134
+ //# sourceMappingURL=aliases.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"aliases.d.ts","sourceRoot":"","sources":["../src/aliases.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA,wBAmIW"}
@@ -1,4 +1,4 @@
1
- {
1
+ export default {
2
2
  "chatgpt-4o": {
3
3
  "openai": "chatgpt-4o-latest",
4
4
  "openrouter": "openai/chatgpt-4o-latest"
@@ -13,6 +13,12 @@
13
13
  "openrouter": "anthropic/claude-3.5-sonnet",
14
14
  "bedrock": "anthropic.claude-3-5-sonnet-20240620-v1:0"
15
15
  },
16
+ "claude-sonnet-4-5": {
17
+ "anthropic": "claude-sonnet-4-5-20250929"
18
+ },
19
+ "claude-sonnet-4": {
20
+ "anthropic": "claude-sonnet-4-20250514"
21
+ },
16
22
  "claude-3-7-sonnet": {
17
23
  "anthropic": "claude-3-7-sonnet-20250219",
18
24
  "openrouter": "anthropic/claude-3.7-sonnet",
@@ -123,4 +129,4 @@
123
129
  "mistral": "mistral-small-latest",
124
130
  "openrouter": "mistralai/mistral-small"
125
131
  }
126
- }
132
+ };
@@ -27,6 +27,7 @@ export declare class Chat {
27
27
  * Read-only access to message history
28
28
  */
29
29
  get history(): readonly Message[];
30
+ get modelId(): string;
30
31
  /**
31
32
  * Aggregate usage across the entire conversation
32
33
  */
@@ -37,14 +38,16 @@ export declare class Chat {
37
38
  */
38
39
  withTool(tool: any): this;
39
40
  /**
40
- * Add multiple tools to the chat session.
41
- * Supports passing tool instances or classes (which will be instantiated).
42
- * Can replace existing tools if options.replace is true.
43
- *
44
- * @example
45
- * chat.withTools([WeatherTool, new CalculatorTool()], { replace: true });
46
- */
47
- withTools(tools: (Tool | any)[], options?: {
41
+ * Add multiple tools to the chat session.
42
+ * Supports passing Tool classes (which will be instantiated) or instances.
43
+ * Can replace existing tools if options.replace is true.
44
+ *
45
+ * @example
46
+ * chat.withTools([WeatherTool, new CalculatorTool()], { replace: true });
47
+ */
48
+ withTools(tools: (Tool | {
49
+ new (): Tool;
50
+ } | any)[], options?: {
48
51
  replace?: boolean;
49
52
  }): this;
50
53
  /**
@@ -1 +1 @@
1
- {"version":3,"file":"Chat.d.ts","sourceRoot":"","sources":["../../src/chat/Chat.ts"],"names":[],"mappings":"AACA,OAAO,EAAE,OAAO,EAAE,MAAM,cAAc,CAAC;AACvC,OAAO,EAAE,WAAW,EAAE,MAAM,kBAAkB,CAAC;AAC/C,OAAO,EAAE,QAAQ,EAAE,KAAK,EAAE,SAAS,EAAE,MAAM,0BAA0B,CAAC;AAGtE,OAAO,EAAE,MAAM,EAAE,MAAM,wBAAwB,CAAC;AAChD,OAAO,EAAE,IAAI,EAAE,MAAM,WAAW,CAAC;AACjC,OAAO,EAAE,MAAM,EAAE,MAAM,qBAAqB,CAAC;AAE7C,OAAO,EAAE,CAAC,EAAE,MAAM,KAAK,CAAC;AAExB,MAAM,WAAW,UAAU;IACzB,MAAM,CAAC,EAAE,MAAM,EAAE,CAAC;IAClB,KAAK,CAAC,EAAE,MAAM,EAAE,CAAC;IACjB,WAAW,CAAC,EAAE,MAAM,CAAC;IACrB,SAAS,CAAC,EAAE,MAAM,CAAC;IACnB,OAAO,CAAC,EAAE,MAAM,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;CAClC;AAED,OAAO,EAAE,kBAAkB,EAAE,MAAM,mBAAmB,CAAC;AAEvD,qBAAa,IAAI;IAKb,OAAO,CAAC,QAAQ,CAAC,QAAQ;IACzB,OAAO,CAAC,KAAK;IACb,OAAO,CAAC,QAAQ,CAAC,OAAO;IAN1B,OAAO,CAAC,QAAQ,CAAiB;IACjC,OAAO,CAAC,QAAQ,CAAW;gBAGR,QAAQ,EAAE,QAAQ,EAC3B,KAAK,EAAE,MAAM,EACJ,OAAO,GAAE,WAAgB,EAC1C,WAAW,GAAE;QAAE,QAAQ,EAAE,MAAM,CAAC;QAAC,OAAO,EAAE,MAAM,CAAA;KAAgC;IAmBlF;;OAEG;IACH,IAAI,OAAO,IAAI,SAAS,OAAO,EAAE,CAEhC;IAED;;OAEG;IACH,IAAI,UAAU,IAAI,KAAK,CAetB;IAED;;;OAGG;IACH,QAAQ,CAAC,IAAI,EAAE,GAAG,GAAG,IAAI;IAIzB;;;;;;;OAOG;IACH,SAAS,CAAC,KAAK,EAAE,CAAC,IAAI,GAAG,GAAG,CAAC,EAAE,EAAE,OAAO,CAAC,EAAE;QAAE,OAAO,CAAC,EAAE,OAAO,CAAA;KAAE,GAAG,IAAI;IA2BvE;;;;OAIG;IACH,gBAAgB,CAAC,WAAW,EAAE,MAAM,EAAE,OAAO,CAAC,EAAE;QAAE,OAAO,CAAC,EAAE,OAAO,CAAA;KAAE,GAAG,IAAI;IAmB5E;;OAEG;IACH,gBAAgB,CAAC,WAAW,EAAE,MAAM,EAAE,OAAO,CAAC,EAAE;QAAE,OAAO,CAAC,EAAE,OAAO,CAAA;KAAE,GAAG,IAAI;IAI5E;;;OAGG;IACH,eAAe,CAAC,IAAI,EAAE,MAAM,GAAG,IAAI;IAKnC;;OAEG;IACH,SAAS,CAAC,KAAK,EAAE,MAAM,GAAG,IAAI;IAK9B;;;OAGG;IACH,kBAAkB,CAAC,OAAO,EAAE;QAAE,OAAO,CAAC,EAAE,MAAM,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;QAAC,cAAc,CAAC,EAAE,GAAG,CAAA;KAAE,GAAG,IAAI;IAU7F;;;OAGG;IACH,UAAU,CAAC,MAAM,EAAE,MAAM,CAAC,MAAM,EAAE,GAAG,CAAC,GAAG,IAAI;IAK7C;;;OAGG;IACH,UAAU,CAAC,MAAM,EAAE,MAAM,GAAG,CAAC,CAAC,OAAO,CAAC,GAAG,CAAC,GAAG,MAAM,CAAC,MAAM,EAAE,GAAG,CAAC,GAAG,IAAI,GAAG,IAAI;IAkB9E,YAAY,CAAC,OAAO,EAAE,MAAM,IAAI,GAAG,IAAI;IAKvC,YAAY,CAAC,OAAO,EAAE,CAAC,OAAO,EAAE,kBAAkB,KAAK,IAAI,GAAG,IAAI;IAKlE,UAAU,CAAC,OAAO,EAAE,CAAC,QAAQ,EAAE,GAAG,KAAK,IAAI,GAAG,IAAI;IAKlD,YAAY,CAAC,OAAO,EAAE,CAAC,MAAM,EAAE,GAAG,KAAK,IAAI,GAAG,IAAI;IAKlD;;OAEG;IACG,GAAG,CAAC,OAAO,EAAE,MAAM,GAAG,GAAG,EAAE,EAAE,OAAO,CAAC,EAAE,UAAU,GAAG,OAAO,CAAC,kBAAkB,CAAC;IAgMrF;;OAEG;IACH,MAAM,CAAC,OAAO,EAAE,MAAM,GAAG,MAAM,CAAC,SAAS,CAAC;CAI3C"}
1
+ {"version":3,"file":"Chat.d.ts","sourceRoot":"","sources":["../../src/chat/Chat.ts"],"names":[],"mappings":"AACA,OAAO,EAAE,OAAO,EAAE,MAAM,cAAc,CAAC;AACvC,OAAO,EAAE,WAAW,EAAE,MAAM,kBAAkB,CAAC;AAC/C,OAAO,EAAE,QAAQ,EAAE,KAAK,EAAE,SAAS,EAAE,MAAM,0BAA0B,CAAC;AAGtE,OAAO,EAAE,MAAM,EAAE,MAAM,wBAAwB,CAAC;AAChD,OAAO,EAAE,IAAI,EAAkB,MAAM,WAAW,CAAC;AACjD,OAAO,EAAE,MAAM,EAAE,MAAM,qBAAqB,CAAC;AAE7C,OAAO,EAAE,CAAC,EAAE,MAAM,KAAK,CAAC;AAExB,MAAM,WAAW,UAAU;IACzB,MAAM,CAAC,EAAE,MAAM,EAAE,CAAC;IAClB,KAAK,CAAC,EAAE,MAAM,EAAE,CAAC;IACjB,WAAW,CAAC,EAAE,MAAM,CAAC;IACrB,SAAS,CAAC,EAAE,MAAM,CAAC;IACnB,OAAO,CAAC,EAAE,MAAM,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;CAClC;AAED,OAAO,EAAE,kBAAkB,EAAE,MAAM,mBAAmB,CAAC;AAEvD,qBAAa,IAAI;IAKb,OAAO,CAAC,QAAQ,CAAC,QAAQ;IACzB,OAAO,CAAC,KAAK;IACb,OAAO,CAAC,QAAQ,CAAC,OAAO;IAN1B,OAAO,CAAC,QAAQ,CAAiB;IACjC,OAAO,CAAC,QAAQ,CAAW;gBAGR,QAAQ,EAAE,QAAQ,EAC3B,KAAK,EAAE,MAAM,EACJ,OAAO,GAAE,WAAgB,EAC1C,WAAW,GAAE;QAAE,QAAQ,EAAE,MAAM,CAAC;QAAC,OAAO,EAAE,MAAM,CAAA;KAAgC;IAmBlF;;OAEG;IACH,IAAI,OAAO,IAAI,SAAS,OAAO,EAAE,CAEhC;IAED,IAAI,OAAO,IAAI,MAAM,CAEpB;IAED;;OAEG;IACH,IAAI,UAAU,IAAI,KAAK,CAetB;IAED;;;OAGG;IACH,QAAQ,CAAC,IAAI,EAAE,GAAG,GAAG,IAAI;IAIvB;;;;;;;KAOC;IACH,SAAS,CAAC,KAAK,EAAE,CAAC,IAAI,GAAG;QAAE,QAAO,IAAI,CAAA;KAAE,GAAG,GAAG,CAAC,EAAE,EAAE,OAAO,CAAC,EAAE;QAAE,OAAO,CAAC,EAAE,OAAO,CAAA;KAAE,GAAG,IAAI;IAmCzF;;;;OAIG;IACH,gBAAgB,CAAC,WAAW,EAAE,MAAM,EAAE,OAAO,CAAC,EAAE;QAAE,OAAO,CAAC,EAAE,OAAO,CAAA;KAAE,GAAG,IAAI;IAmB5E;;OAEG;IACH,gBAAgB,CAAC,WAAW,EAAE,MAAM,EAAE,OAAO,CAAC,EAAE;QAAE,OAAO,CAAC,EAAE,OAAO,CAAA;KAAE,GAAG,IAAI;IAI5E;;;OAGG;IACH,eAAe,CAAC,IAAI,EAAE,MAAM,GAAG,IAAI;IAKnC;;OAEG;IACH,SAAS,CAAC,KAAK,EAAE,MAAM,GAAG,IAAI;IAK9B;;;OAGG;IACH,kBAAkB,CAAC,OAAO,EAAE;QAAE,OAAO,CAAC,EAAE,MAAM,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;QAAC,cAAc,CAAC,EAAE,GAAG,CAAA;KAAE,GAAG,IAAI;IAU7F;;;OAGG;IACH,UAAU,CAAC,MAAM,EAAE,MAAM,CAAC,MAAM,EAAE,GAAG,CAAC,GAAG,IAAI;IAK7C;;;OAGG;IACH,UAAU,CAAC,MAAM,EAAE,MAAM,GAAG,CAAC,CAAC,OAAO,CAAC,GAAG,CAAC,GAAG,MAAM,CAAC,MAAM,EAAE,GAAG,CAAC,GAAG,IAAI,GAAG,IAAI;IAkB9E,YAAY,CAAC,OAAO,EAAE,MAAM,IAAI,GAAG,IAAI;IAKvC,YAAY,CAAC,OAAO,EAAE,CAAC,OAAO,EAAE,kBAAkB,KAAK,IAAI,GAAG,IAAI;IAKlE,UAAU,CAAC,OAAO,EAAE,CAAC,QAAQ,EAAE,GAAG,KAAK,IAAI,GAAG,IAAI;IAKlD,YAAY,CAAC,OAAO,EAAE,CAAC,MAAM,EAAE,GAAG,KAAK,IAAI,GAAG,IAAI;IAKlD;;OAEG;IACG,GAAG,CAAC,OAAO,EAAE,MAAM,GAAG,GAAG,EAAE,EAAE,OAAO,CAAC,EAAE,UAAU,GAAG,OAAO,CAAC,kBAAkB,CAAC;IAkMrF;;OAEG;IACH,MAAM,CAAC,OAAO,EAAE,MAAM,GAAG,MAAM,CAAC,SAAS,CAAC;CAI3C"}
package/dist/chat/Chat.js CHANGED
@@ -32,6 +32,9 @@ export class Chat {
32
32
  get history() {
33
33
  return this.messages;
34
34
  }
35
+ get modelId() {
36
+ return this.model;
37
+ }
35
38
  /**
36
39
  * Aggregate usage across the entire conversation
37
40
  */
@@ -56,13 +59,13 @@ export class Chat {
56
59
  return this.withTools([tool]);
57
60
  }
58
61
  /**
59
- * Add multiple tools to the chat session.
60
- * Supports passing tool instances or classes (which will be instantiated).
61
- * Can replace existing tools if options.replace is true.
62
- *
63
- * @example
64
- * chat.withTools([WeatherTool, new CalculatorTool()], { replace: true });
65
- */
62
+ * Add multiple tools to the chat session.
63
+ * Supports passing Tool classes (which will be instantiated) or instances.
64
+ * Can replace existing tools if options.replace is true.
65
+ *
66
+ * @example
67
+ * chat.withTools([WeatherTool, new CalculatorTool()], { replace: true });
68
+ */
66
69
  withTools(tools, options) {
67
70
  if (options?.replace) {
68
71
  this.options.tools = [];
@@ -71,20 +74,27 @@ export class Chat {
71
74
  this.options.tools = [];
72
75
  }
73
76
  for (const tool of tools) {
77
+ let toolInstance;
78
+ // Handle class constructor
74
79
  if (typeof tool === "function") {
75
80
  try {
76
- // Attempt to instantiate if it's a class
77
- this.options.tools.push(new tool());
81
+ toolInstance = new tool();
78
82
  }
79
83
  catch (e) {
80
- // If instantiation fails, it might be a function tool or require args?
81
- // For now, assuming classes with no-arg constructors as per convention.
82
- console.warn("Attempted to instantiate tool class but failed, adding as-is", e);
83
- this.options.tools.push(tool);
84
+ console.error(`[NodeLLM] Failed to instantiate tool class: ${tool.name}`, e);
85
+ continue;
84
86
  }
85
87
  }
86
88
  else {
87
- this.options.tools.push(tool);
89
+ toolInstance = tool;
90
+ }
91
+ // Normalized to standard ToolDefinition interface if it's a Tool class instance
92
+ if (toolInstance && typeof toolInstance.toLLMTool === "function") {
93
+ this.options.tools.push(toolInstance.toLLMTool());
94
+ }
95
+ else {
96
+ // Fallback for legacy raw tool objects (defined as objects with type: 'function')
97
+ this.options.tools.push(toolInstance);
88
98
  }
89
99
  }
90
100
  return this;
@@ -286,7 +296,7 @@ export class Chat {
286
296
  this.options.onNewMessage();
287
297
  let response = await this.executor.executeChat(executeOptions);
288
298
  trackUsage(response.usage);
289
- const firstAssistantMessage = new ChatResponseString(response.content ?? "", response.usage ?? { input_tokens: 0, output_tokens: 0, total_tokens: 0 }, this.model, response.reasoning);
299
+ const firstAssistantMessage = new ChatResponseString(response.content ?? "", response.usage ?? { input_tokens: 0, output_tokens: 0, total_tokens: 0 }, this.model, this.provider.id, response.reasoning);
290
300
  this.messages.push({
291
301
  role: "assistant",
292
302
  content: firstAssistantMessage,
@@ -336,7 +346,7 @@ export class Chat {
336
346
  headers: this.options.headers,
337
347
  });
338
348
  trackUsage(response.usage);
339
- const assistantMessage = new ChatResponseString(response.content ?? "", response.usage ?? { input_tokens: 0, output_tokens: 0, total_tokens: 0 }, this.model, response.reasoning);
349
+ const assistantMessage = new ChatResponseString(response.content ?? "", response.usage ?? { input_tokens: 0, output_tokens: 0, total_tokens: 0 }, this.model, this.provider.id, response.reasoning);
340
350
  this.messages.push({
341
351
  role: "assistant",
342
352
  content: assistantMessage,
@@ -349,7 +359,7 @@ export class Chat {
349
359
  }
350
360
  // For the final return, we might want to aggregate reasoning too if it happened in multiple turns?
351
361
  // Usually reasoning only happens once or we just want the last one.
352
- return new ChatResponseString(response.content ?? "", totalUsage, this.model, response.reasoning);
362
+ return new ChatResponseString(response.content ?? "", totalUsage, this.model, this.provider.id, response.reasoning);
353
363
  }
354
364
  /**
355
365
  * Streams the model's response to a user question.
@@ -1,10 +1,10 @@
1
1
  import { Message } from "./Message.js";
2
- import { Tool } from "./Tool.js";
2
+ import { ToolDefinition } from "./Tool.js";
3
3
  import { Schema } from "../schema/Schema.js";
4
4
  export interface ChatOptions {
5
5
  systemPrompt?: string;
6
6
  messages?: Message[];
7
- tools?: Tool[];
7
+ tools?: ToolDefinition[];
8
8
  temperature?: number;
9
9
  maxTokens?: number;
10
10
  onNewMessage?: () => void;
@@ -1 +1 @@
1
- {"version":3,"file":"ChatOptions.d.ts","sourceRoot":"","sources":["../../src/chat/ChatOptions.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,OAAO,EAAE,MAAM,cAAc,CAAC;AACvC,OAAO,EAAE,IAAI,EAAE,MAAM,WAAW,CAAC;AACjC,OAAO,EAAE,MAAM,EAAE,MAAM,qBAAqB,CAAC;AAE7C,MAAM,WAAW,WAAW;IAC1B,YAAY,CAAC,EAAE,MAAM,CAAC;IACtB,QAAQ,CAAC,EAAE,OAAO,EAAE,CAAC;IACrB,KAAK,CAAC,EAAE,IAAI,EAAE,CAAC;IACf,WAAW,CAAC,EAAE,MAAM,CAAC;IACrB,SAAS,CAAC,EAAE,MAAM,CAAC;IACnB,YAAY,CAAC,EAAE,MAAM,IAAI,CAAC;IAC1B,YAAY,CAAC,EAAE,CAAC,OAAO,EAAE,GAAG,KAAK,IAAI,CAAC;IACtC,UAAU,CAAC,EAAE,CAAC,QAAQ,EAAE,GAAG,KAAK,IAAI,CAAC;IACrC,YAAY,CAAC,EAAE,CAAC,MAAM,EAAE,GAAG,KAAK,IAAI,CAAC;IACrC,OAAO,CAAC,EAAE,MAAM,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;IACjC,MAAM,CAAC,EAAE,MAAM,CAAC;IAChB,cAAc,CAAC,EAAE;QAAE,IAAI,EAAE,aAAa,GAAG,MAAM,CAAA;KAAE,CAAC;IAClD,MAAM,CAAC,EAAE,MAAM,CAAC,MAAM,EAAE,GAAG,CAAC,CAAC;IAC7B,iBAAiB,CAAC,EAAE,OAAO,CAAC;IAC5B,QAAQ,CAAC,EAAE,MAAM,CAAC;CACnB"}
1
+ {"version":3,"file":"ChatOptions.d.ts","sourceRoot":"","sources":["../../src/chat/ChatOptions.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,OAAO,EAAE,MAAM,cAAc,CAAC;AACvC,OAAO,EAAE,cAAc,EAAE,MAAM,WAAW,CAAC;AAC3C,OAAO,EAAE,MAAM,EAAE,MAAM,qBAAqB,CAAC;AAE7C,MAAM,WAAW,WAAW;IAC1B,YAAY,CAAC,EAAE,MAAM,CAAC;IACtB,QAAQ,CAAC,EAAE,OAAO,EAAE,CAAC;IACrB,KAAK,CAAC,EAAE,cAAc,EAAE,CAAC;IACzB,WAAW,CAAC,EAAE,MAAM,CAAC;IACrB,SAAS,CAAC,EAAE,MAAM,CAAC;IACnB,YAAY,CAAC,EAAE,MAAM,IAAI,CAAC;IAC1B,YAAY,CAAC,EAAE,CAAC,OAAO,EAAE,GAAG,KAAK,IAAI,CAAC;IACtC,UAAU,CAAC,EAAE,CAAC,QAAQ,EAAE,GAAG,KAAK,IAAI,CAAC;IACrC,YAAY,CAAC,EAAE,CAAC,MAAM,EAAE,GAAG,KAAK,IAAI,CAAC;IACrC,OAAO,CAAC,EAAE,MAAM,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;IACjC,MAAM,CAAC,EAAE,MAAM,CAAC;IAChB,cAAc,CAAC,EAAE;QAAE,IAAI,EAAE,aAAa,GAAG,MAAM,CAAA;KAAE,CAAC;IAClD,MAAM,CAAC,EAAE,MAAM,CAAC,MAAM,EAAE,GAAG,CAAC,CAAC;IAC7B,iBAAiB,CAAC,EAAE,OAAO,CAAC;IAC5B,QAAQ,CAAC,EAAE,MAAM,CAAC;CACnB"}
@@ -6,8 +6,9 @@ import { Usage } from "../providers/Provider.js";
6
6
  export declare class ChatResponseString extends String {
7
7
  readonly usage: Usage;
8
8
  readonly model: string;
9
+ readonly provider: string;
9
10
  readonly reasoning?: string | null | undefined;
10
- constructor(content: string, usage: Usage, model: string, reasoning?: string | null | undefined);
11
+ constructor(content: string, usage: Usage, model: string, provider: string, reasoning?: string | null | undefined);
11
12
  get input_tokens(): number;
12
13
  get output_tokens(): number;
13
14
  get total_tokens(): number;
@@ -1 +1 @@
1
- {"version":3,"file":"ChatResponse.d.ts","sourceRoot":"","sources":["../../src/chat/ChatResponse.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,KAAK,EAAE,MAAM,0BAA0B,CAAC;AAEjD;;;GAGG;AACH,qBAAa,kBAAmB,SAAQ,MAAM;aAG1B,KAAK,EAAE,KAAK;aACZ,KAAK,EAAE,MAAM;aACb,SAAS,CAAC,EAAE,MAAM,GAAG,IAAI;gBAHzC,OAAO,EAAE,MAAM,EACC,KAAK,EAAE,KAAK,EACZ,KAAK,EAAE,MAAM,EACb,SAAS,CAAC,EAAE,MAAM,GAAG,IAAI,YAAA;IAK3C,IAAI,YAAY,WAAsC;IACtD,IAAI,aAAa,WAAuC;IACxD,IAAI,YAAY,WAAsC;IACtD,IAAI,aAAa,uBAAuC;IACxD,IAAI,IAAI,uBAA8B;IACtC,IAAI,UAAU,uBAAoC;IAClD,IAAI,WAAW,uBAAqC;IAEpD,IAAI,OAAO,IAAI,MAAM,CAEpB;IAED,IAAI,QAAQ,IAAI,MAAM,CAErB;IAED,QAAQ;IAIR;;;OAGG;IACH,IAAI,MAAM,IAAI,GAAG,CAMhB;CACF"}
1
+ {"version":3,"file":"ChatResponse.d.ts","sourceRoot":"","sources":["../../src/chat/ChatResponse.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,KAAK,EAAE,MAAM,0BAA0B,CAAC;AAEjD;;;GAGG;AACH,qBAAa,kBAAmB,SAAQ,MAAM;aAG1B,KAAK,EAAE,KAAK;aACZ,KAAK,EAAE,MAAM;aACb,QAAQ,EAAE,MAAM;aAChB,SAAS,CAAC,EAAE,MAAM,GAAG,IAAI;gBAJzC,OAAO,EAAE,MAAM,EACC,KAAK,EAAE,KAAK,EACZ,KAAK,EAAE,MAAM,EACb,QAAQ,EAAE,MAAM,EAChB,SAAS,CAAC,EAAE,MAAM,GAAG,IAAI,YAAA;IAK3C,IAAI,YAAY,WAAsC;IACtD,IAAI,aAAa,WAAuC;IACxD,IAAI,YAAY,WAAsC;IACtD,IAAI,aAAa,uBAAuC;IACxD,IAAI,IAAI,uBAA8B;IACtC,IAAI,UAAU,uBAAoC;IAClD,IAAI,WAAW,uBAAqC;IAEpD,IAAI,OAAO,IAAI,MAAM,CAEpB;IAED,IAAI,QAAQ,IAAI,MAAM,CAErB;IAED,QAAQ;IAIR;;;OAGG;IACH,IAAI,MAAM,IAAI,GAAG,CAMhB;CACF"}
@@ -5,11 +5,13 @@
5
5
  export class ChatResponseString extends String {
6
6
  usage;
7
7
  model;
8
+ provider;
8
9
  reasoning;
9
- constructor(content, usage, model, reasoning) {
10
+ constructor(content, usage, model, provider, reasoning) {
10
11
  super(content);
11
12
  this.usage = usage;
12
13
  this.model = model;
14
+ this.provider = provider;
13
15
  this.reasoning = reasoning;
14
16
  }
15
17
  get input_tokens() { return this.usage.input_tokens; }
@@ -86,13 +86,12 @@ export class ChatStream {
86
86
  role: "assistant",
87
87
  content: fullContent || null,
88
88
  tool_calls: toolCalls,
89
- // @ts-ignore
90
89
  reasoning: fullReasoning || undefined
91
90
  });
92
91
  // If no tool calls, we're done
93
92
  if (!toolCalls || toolCalls.length === 0) {
94
93
  if (options.onEndMessage) {
95
- options.onEndMessage(new ChatResponseString(fullContent, { input_tokens: 0, output_tokens: 0, total_tokens: 0 }, model, fullReasoning || undefined));
94
+ options.onEndMessage(new ChatResponseString(fullContent, { input_tokens: 0, output_tokens: 0, total_tokens: 0 }, model, provider.id, fullReasoning || undefined));
96
95
  }
97
96
  break;
98
97
  }
@@ -9,5 +9,6 @@ export interface Message {
9
9
  tool_call_id?: string;
10
10
  name?: string;
11
11
  usage?: Usage;
12
+ reasoning?: string;
12
13
  }
13
14
  //# sourceMappingURL=Message.d.ts.map
@@ -1 +1 @@
1
- {"version":3,"file":"Message.d.ts","sourceRoot":"","sources":["../../src/chat/Message.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,IAAI,EAAE,MAAM,WAAW,CAAC;AACjC,OAAO,EAAE,QAAQ,EAAE,MAAM,WAAW,CAAC;AACrC,OAAO,EAAE,cAAc,EAAE,MAAM,cAAc,CAAC;AAC9C,OAAO,EAAE,KAAK,EAAE,MAAM,0BAA0B,CAAC;AAEjD,MAAM,WAAW,OAAO;IACtB,IAAI,EAAE,IAAI,CAAC;IACX,OAAO,EAAE,cAAc,GAAG,IAAI,CAAC;IAC/B,UAAU,CAAC,EAAE,QAAQ,EAAE,CAAC;IACxB,YAAY,CAAC,EAAE,MAAM,CAAC;IACtB,IAAI,CAAC,EAAE,MAAM,CAAC;IACd,KAAK,CAAC,EAAE,KAAK,CAAC;CACf"}
1
+ {"version":3,"file":"Message.d.ts","sourceRoot":"","sources":["../../src/chat/Message.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,IAAI,EAAE,MAAM,WAAW,CAAC;AACjC,OAAO,EAAE,QAAQ,EAAE,MAAM,WAAW,CAAC;AACrC,OAAO,EAAE,cAAc,EAAE,MAAM,cAAc,CAAC;AAC9C,OAAO,EAAE,KAAK,EAAE,MAAM,0BAA0B,CAAC;AAEjD,MAAM,WAAW,OAAO;IACtB,IAAI,EAAE,IAAI,CAAC;IACX,OAAO,EAAE,cAAc,GAAG,IAAI,CAAC;IAC/B,UAAU,CAAC,EAAE,QAAQ,EAAE,CAAC;IACxB,YAAY,CAAC,EAAE,MAAM,CAAC;IACtB,IAAI,CAAC,EAAE,MAAM,CAAC;IACd,KAAK,CAAC,EAAE,KAAK,CAAC;IACd,SAAS,CAAC,EAAE,MAAM,CAAC;CACpB"}