@node-llm/core 1.1.0 → 1.3.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (75) hide show
  1. package/README.md +243 -0
  2. package/dist/chat/Chat.d.ts +11 -8
  3. package/dist/chat/Chat.d.ts.map +1 -1
  4. package/dist/chat/Chat.js +24 -14
  5. package/dist/chat/ChatOptions.d.ts +2 -2
  6. package/dist/chat/ChatOptions.d.ts.map +1 -1
  7. package/dist/chat/ChatStream.d.ts.map +1 -1
  8. package/dist/chat/ChatStream.js +85 -34
  9. package/dist/chat/Tool.d.ts +35 -1
  10. package/dist/chat/Tool.d.ts.map +1 -1
  11. package/dist/chat/Tool.js +38 -1
  12. package/dist/config.d.ts +1 -1
  13. package/dist/errors/index.d.ts +2 -2
  14. package/dist/errors/index.js +3 -3
  15. package/dist/llm.d.ts +27 -15
  16. package/dist/llm.d.ts.map +1 -1
  17. package/dist/llm.js +37 -5
  18. package/dist/models/models.js +15 -15
  19. package/dist/providers/BaseProvider.d.ts +2 -1
  20. package/dist/providers/BaseProvider.d.ts.map +1 -1
  21. package/dist/providers/BaseProvider.js +4 -1
  22. package/dist/providers/Provider.d.ts +4 -2
  23. package/dist/providers/Provider.d.ts.map +1 -1
  24. package/dist/providers/anthropic/AnthropicProvider.d.ts +1 -0
  25. package/dist/providers/anthropic/AnthropicProvider.d.ts.map +1 -1
  26. package/dist/providers/anthropic/AnthropicProvider.js +3 -0
  27. package/dist/providers/anthropic/Chat.d.ts.map +1 -1
  28. package/dist/providers/anthropic/Chat.js +5 -1
  29. package/dist/providers/anthropic/Streaming.d.ts.map +1 -1
  30. package/dist/providers/anthropic/Streaming.js +49 -2
  31. package/dist/providers/deepseek/Chat.d.ts.map +1 -1
  32. package/dist/providers/deepseek/Chat.js +5 -4
  33. package/dist/providers/deepseek/DeepSeekProvider.d.ts +1 -0
  34. package/dist/providers/deepseek/DeepSeekProvider.d.ts.map +1 -1
  35. package/dist/providers/deepseek/DeepSeekProvider.js +3 -0
  36. package/dist/providers/deepseek/Streaming.d.ts.map +1 -1
  37. package/dist/providers/deepseek/Streaming.js +49 -3
  38. package/dist/providers/gemini/Chat.d.ts.map +1 -1
  39. package/dist/providers/gemini/Chat.js +3 -0
  40. package/dist/providers/gemini/Embeddings.d.ts.map +1 -1
  41. package/dist/providers/gemini/Embeddings.js +3 -0
  42. package/dist/providers/gemini/GeminiProvider.d.ts +1 -0
  43. package/dist/providers/gemini/GeminiProvider.d.ts.map +1 -1
  44. package/dist/providers/gemini/GeminiProvider.js +3 -0
  45. package/dist/providers/gemini/Image.d.ts.map +1 -1
  46. package/dist/providers/gemini/Image.js +3 -0
  47. package/dist/providers/gemini/Streaming.d.ts.map +1 -1
  48. package/dist/providers/gemini/Streaming.js +32 -1
  49. package/dist/providers/gemini/Transcription.d.ts.map +1 -1
  50. package/dist/providers/gemini/Transcription.js +3 -0
  51. package/dist/providers/ollama/OllamaProvider.d.ts +1 -0
  52. package/dist/providers/ollama/OllamaProvider.d.ts.map +1 -1
  53. package/dist/providers/ollama/OllamaProvider.js +3 -0
  54. package/dist/providers/openai/Chat.d.ts.map +1 -1
  55. package/dist/providers/openai/Chat.js +5 -4
  56. package/dist/providers/openai/Embedding.d.ts.map +1 -1
  57. package/dist/providers/openai/Embedding.js +5 -1
  58. package/dist/providers/openai/Image.d.ts.map +1 -1
  59. package/dist/providers/openai/Image.js +5 -1
  60. package/dist/providers/openai/Moderation.d.ts.map +1 -1
  61. package/dist/providers/openai/Moderation.js +12 -6
  62. package/dist/providers/openai/OpenAIProvider.d.ts +1 -0
  63. package/dist/providers/openai/OpenAIProvider.d.ts.map +1 -1
  64. package/dist/providers/openai/OpenAIProvider.js +3 -0
  65. package/dist/providers/openai/Streaming.d.ts.map +1 -1
  66. package/dist/providers/openai/Streaming.js +53 -4
  67. package/dist/providers/openai/Transcription.d.ts.map +1 -1
  68. package/dist/providers/openai/Transcription.js +9 -2
  69. package/dist/providers/registry.js +1 -1
  70. package/dist/utils/FileLoader.d.ts.map +1 -1
  71. package/dist/utils/FileLoader.js +2 -1
  72. package/dist/utils/logger.d.ts +9 -1
  73. package/dist/utils/logger.d.ts.map +1 -1
  74. package/dist/utils/logger.js +23 -1
  75. package/package.json +3 -3
package/README.md CHANGED
@@ -0,0 +1,243 @@
1
+ <p align="left">
2
+ <img src="https://github.com/eshaiju/node-llm/raw/main/docs/assets/images/logo.jpg" alt="NodeLLMlogo" width="300" />
3
+ </p>
4
+
5
+ # NodeLLM
6
+ **An opinionated architectural layer for using Large Language Models in Node.js.**
7
+
8
+ Build chatbots, autonomous agents, and RAG pipelines without the SDK fatigue. NodeLLM provides a unified, production-oriented API for interacting with over **540+ models** across multiple providers (OpenAI, Gemini, Anthropic, DeepSeek, OpenRouter, Ollama, etc.) without coupling your application to any single SDK.
9
+
10
+ <br/>
11
+
12
+ <p align="left">
13
+ <img src="https://registry.npmmirror.com/@lobehub/icons-static-svg/latest/files/icons/openai.svg" height="28" />
14
+ <img src="https://registry.npmmirror.com/@lobehub/icons-static-svg/latest/files/icons/openai-text.svg" height="22" />
15
+ &nbsp;&nbsp;&nbsp;&nbsp;
16
+ <img src="https://registry.npmmirror.com/@lobehub/icons-static-svg/latest/files/icons/anthropic-text.svg" height="18" />
17
+ &nbsp;&nbsp;&nbsp;&nbsp;
18
+ <img src="https://registry.npmmirror.com/@lobehub/icons-static-svg/latest/files/icons/gemini-color.svg" height="28" />
19
+ <img src="https://registry.npmmirror.com/@lobehub/icons-static-svg/latest/files/icons/gemini-text.svg" height="20" />
20
+ &nbsp;&nbsp;&nbsp;&nbsp;
21
+ <img src="https://registry.npmmirror.com/@lobehub/icons-static-svg/latest/files/icons/deepseek-color.svg" height="28" />
22
+ <img src="https://registry.npmmirror.com/@lobehub/icons-static-svg/latest/files/icons/deepseek-text.svg" height="20" />
23
+ &nbsp;&nbsp;&nbsp;&nbsp;
24
+ <img src="https://registry.npmmirror.com/@lobehub/icons-static-svg/latest/files/icons/openrouter.svg" height="28" />
25
+ <img src="https://registry.npmmirror.com/@lobehub/icons-static-svg/latest/files/icons/openrouter-text.svg" height="22" />
26
+ &nbsp;&nbsp;&nbsp;&nbsp;
27
+ <img src="https://registry.npmmirror.com/@lobehub/icons-static-svg/latest/files/icons/ollama.svg" height="28" />
28
+ <img src="https://registry.npmmirror.com/@lobehub/icons-static-svg/latest/files/icons/ollama-text.svg" height="18" />
29
+ </p>
30
+
31
+ <br/>
32
+
33
+ [![npm version](https://img.shields.io/npm/v/@node-llm/core.svg)](https://www.npmjs.com/package/@node-llm/core)
34
+ [![License: MIT](https://img.shields.io/badge/License-MIT-yellow.svg)](https://opensource.org/licenses/MIT)
35
+ [![TypeScript](https://img.shields.io/badge/TypeScript-Ready-blue.svg)](https://www.typescriptlang.org/)
36
+
37
+ ---
38
+
39
+ ## ⚡ The Golden Path
40
+
41
+ ```ts
42
+ import { NodeLLM } from "@node-llm/core";
43
+
44
+ // 1. Configure once
45
+ NodeLLM.configure({ provider: "openai" });
46
+
47
+ // 2. Chat (High-level request/response)
48
+ const chat = NodeLLM.chat("gpt-4o");
49
+ const response = await chat.ask("Explain event-driven architecture");
50
+ console.log(response.content);
51
+
52
+ // 3. Streaming (Standard AsyncIterator)
53
+ for await (const chunk of chat.stream("Explain event-driven architecture")) {
54
+ process.stdout.write(chunk.content);
55
+ }
56
+ ```
57
+
58
+ ---
59
+
60
+ ## Why`NodeLLM`?
61
+
62
+ Most AI integrations today are provider-specific, SDK-driven, and leaky at abstraction boundaries. This creates long-term architectural risk. Switching models should not mean a total rewrite of your business logic.
63
+
64
+ NodeLLMexists to solve **architectural problems**, not just provide API access.
65
+
66
+ ### Strategic Goals
67
+
68
+ - **Provider Isolation**: Your application logic never touches a provider-specific SDK.
69
+ - **Unified Mental Model**: Chat, streaming, tools, and structured outputs feel identical across providers.
70
+ - **Production-Ready**: Streaming, retries, and errors are first-class concerns.
71
+ - **The "Standard Library" Voice**: It provides a beautiful, native-feeling API for modern Node.js.
72
+
73
+ ### Non-Goals
74
+
75
+ - It is **not** a thin wrapper that mirrors every provider's unique API knobs.
76
+ - It is **not** a UI framework or a simple chatbot builder.
77
+ - It prioritizes **architectural clarity** over raw SDK convenience.
78
+
79
+ ---
80
+
81
+ ## 🔧 Strategic Configuration
82
+
83
+ NodeLLMprovides a flexible configuration system designed for enterprise usage:
84
+
85
+ ```ts
86
+ // Recommended for multi-provider pipelines
87
+ NodeLLM.configure((config) => {
88
+ config.openaiApiKey = process.env.OPENAI_API_KEY;
89
+ config.anthropicApiKey = process.env.ANTHROPIC_API_KEY;
90
+ config.ollamaApiBase = process.env.OLLAMA_API_BASE;
91
+ });
92
+
93
+ // Switch providers at the framework level
94
+ NodeLLM.configure({ provider: "anthropic" });
95
+
96
+ // Support for Custom Endpoints (e.g., Azure or LocalAI)
97
+ NodeLLM.configure({
98
+ openaiApiKey: process.env.AZURE_KEY,
99
+ openaiApiBase: "https://your-resource.openai.azure.com/openai/deployments/...",
100
+ });
101
+ ```
102
+
103
+ **[Full Configuration Guide →](docs/getting_started/configuration.md)**
104
+
105
+ ---
106
+
107
+ ---
108
+
109
+ ## 🔮 Capabilities
110
+
111
+ ### 💬 Unified Chat
112
+ Stop rewriting code for every provider. `NodeLLM` normalizes inputs and outputs into a single, predictable mental model.
113
+ ```ts
114
+ const chat = NodeLLM.chat(); // Defaults to GPT-4o
115
+ await chat.ask("Hello world");
116
+ ```
117
+
118
+ ### 👁️ Smart Vision & Files
119
+ Pass images, PDFs, or audio files directly. We handle the heavy lifting: fetching remote URLs, base64 encoding, and MIME type mapping.
120
+ ```ts
121
+ await chat.ask("Analyze this interface", {
122
+ files: ["./screenshot.png", "https://example.com/spec.pdf"]
123
+ });
124
+ ```
125
+
126
+ ### 🛠️ Auto-Executing Tools
127
+ Define tools once;`NodeLLM` manages the recursive execution loop for you, keeping your controller logic clean. **Works seamlessly with both regular chat and streaming!**
128
+
129
+ ```ts
130
+ // Class-based DSL
131
+ class WeatherTool extends Tool {
132
+ name = "get_weather";
133
+ description = "Get current weather";
134
+ schema = z.object({ location: z.string() });
135
+ async execute({ location }) { return `Sunny in ${location}`; }
136
+ }
137
+
138
+ // Register tools
139
+ chat.withTools([WeatherTool]);
140
+
141
+ // Now the model can use it automatically
142
+ await chat.ask("What's the weather in Tokyo?");
143
+ ```
144
+ **[Full Tool Calling Guide →](https://node-llm.eshaiju.com/core-features/tool-calling)**
145
+
146
+ ### 🔍 Comprehensive Debug Logging
147
+ Enable detailed logging for all API requests and responses across every feature and provider:
148
+ ```ts
149
+ // Set environment variable
150
+ process.env.NODELLM_DEBUG = "true";
151
+
152
+ // Now see detailed logs for every API call:
153
+ // [NodeLLM] [OpenAI] Request: POST https://api.openai.com/v1/chat/completions
154
+ // { "model": "gpt-4o", "messages": [...] }
155
+ // [NodeLLM] [OpenAI] Response: 200 OK
156
+ // { "id": "chatcmpl-123", ... }
157
+ ```
158
+ **Covers:** Chat, Streaming, Images, Embeddings, Transcription, Moderation - across all providers!
159
+
160
+ ### ✨ Structured Output
161
+ Get type-safe, validated JSON back using **Zod** schemas.
162
+ ```ts
163
+ import { z } from "@node-llm/core";
164
+ const Product = z.object({ name: z.string(), price: z.number() });
165
+
166
+ const res = await chat.withSchema(Product).ask("Generate a gadget");
167
+ console.log(res.parsed.name); // Full type-safety
168
+ ```
169
+
170
+ ### 🎨 Image Generation
171
+ ```ts
172
+ await NodeLLM.paint("A cyberpunk city in rain");
173
+ ```
174
+
175
+ ### 🎤 Audio Transcription
176
+ ```ts
177
+ await NodeLLM.transcribe("meeting-recording.wav");
178
+ ```
179
+
180
+ ### ⚡ Scoped Parallelism
181
+ Run multiple providers in parallel safely without global configuration side effects using isolated contexts.
182
+ ```ts
183
+ const [gpt, claude] = await Promise.all([
184
+ // Each call branch off into its own isolated context
185
+ NodeLLM.withProvider("openai").chat("gpt-4o").ask(prompt),
186
+ NodeLLM.withProvider("anthropic").chat("claude-3-5-sonnet").ask(prompt),
187
+ ]);
188
+ ```
189
+
190
+ ### 🧠 Deep Reasoning
191
+ Direct access to the thought process of models like **DeepSeek R1** or **OpenAI o1/o3** using the `.reasoning` field.
192
+ ```ts
193
+ const res = await NodeLLM.chat("deepseek-reasoner").ask("Solve this logical puzzle");
194
+ console.log(res.reasoning); // Chain-of-thought
195
+ ```
196
+
197
+ ---
198
+
199
+ ## 🚀 Why use this over official SDKs?
200
+
201
+ | Feature |`NodeLLM` | Official SDKs | Architectural Impact |
202
+ | :--- | :--- | :--- | :--- |
203
+ | **Provider Logic** | Transparently Handled | Exposed to your code | **Low Coupling** |
204
+ | **Streaming** | Standard `AsyncIterator` | Vendor-specific Events | **Predictable Data Flow** |
205
+ | **Streaming + Tools** | Automated Execution | Manual implementation | **Seamless UX** |
206
+ | **Tool Loops** | Automated Recursion | Manual implementation | **Reduced Boilerplate** |
207
+ | **Files/Vision** | Intelligent Path/URL handling | Base64/Buffer management | **Cleaner Service Layer** |
208
+ | **Configuration** | Centralized & Global | Per-instance initialization | **Easier Lifecycle Mgmt** |
209
+
210
+ ---
211
+
212
+ ## 📋 Supported Providers
213
+
214
+ | Provider | Supported Features |
215
+ | :--- | :--- |
216
+ | <img src="https://registry.npmmirror.com/@lobehub/icons-static-svg/latest/files/icons/openai.svg" height="18"> **OpenAI** | Chat, **Streaming + Tools**, Vision, Audio, Images, Transcription, **Reasoning** |
217
+ | <img src="https://registry.npmmirror.com/@lobehub/icons-static-svg/latest/files/icons/gemini-color.svg" height="18"> **Gemini** | Chat, **Streaming + Tools**, Vision, Audio, Video, Embeddings |
218
+ | <img src="https://registry.npmmirror.com/@lobehub/icons-static-svg/latest/files/icons/anthropic-text.svg" height="12"> **Anthropic** | Chat, **Streaming + Tools**, Vision, PDF, Structured Output |
219
+ | <img src="https://registry.npmmirror.com/@lobehub/icons-static-svg/latest/files/icons/deepseek-color.svg" height="18"> **DeepSeek** | Chat (V3), **Reasoning (R1)**, **Streaming + Tools** |
220
+ | <img src="https://registry.npmmirror.com/@lobehub/icons-static-svg/latest/files/icons/openrouter.svg" height="18"> **OpenRouter** | **Aggregator**, Chat, Streaming, Tools, Vision, Embeddings, **Reasoning** |
221
+ | <img src="https://registry.npmmirror.com/@lobehub/icons-static-svg/latest/files/icons/ollama.svg" height="18"> **Ollama** | **Local Inference**, Chat, Streaming, Tools, Vision, Embeddings |
222
+
223
+ ---
224
+
225
+ ## 📚 Documentation & Installation
226
+
227
+ ```bash
228
+ npm install @node-llm/core
229
+ ```
230
+
231
+ **[View Full Documentation ↗](https://node-llm.eshaiju.com/)**
232
+
233
+ ---
234
+
235
+ ## 🫶 Credits
236
+
237
+ Heavily inspired by the elegant design of [RubyLLM](https://rubyllm.com/).
238
+
239
+ ---
240
+
241
+ ## 📄 License
242
+
243
+ MIT © [NodeLLMcontributors]
@@ -27,6 +27,7 @@ export declare class Chat {
27
27
  * Read-only access to message history
28
28
  */
29
29
  get history(): readonly Message[];
30
+ get modelId(): string;
30
31
  /**
31
32
  * Aggregate usage across the entire conversation
32
33
  */
@@ -37,14 +38,16 @@ export declare class Chat {
37
38
  */
38
39
  withTool(tool: any): this;
39
40
  /**
40
- * Add multiple tools to the chat session.
41
- * Supports passing tool instances or classes (which will be instantiated).
42
- * Can replace existing tools if options.replace is true.
43
- *
44
- * @example
45
- * chat.withTools([WeatherTool, new CalculatorTool()], { replace: true });
46
- */
47
- withTools(tools: (Tool | any)[], options?: {
41
+ * Add multiple tools to the chat session.
42
+ * Supports passing Tool classes (which will be instantiated) or instances.
43
+ * Can replace existing tools if options.replace is true.
44
+ *
45
+ * @example
46
+ * chat.withTools([WeatherTool, new CalculatorTool()], { replace: true });
47
+ */
48
+ withTools(tools: (Tool | {
49
+ new (): Tool;
50
+ } | any)[], options?: {
48
51
  replace?: boolean;
49
52
  }): this;
50
53
  /**
@@ -1 +1 @@
1
- {"version":3,"file":"Chat.d.ts","sourceRoot":"","sources":["../../src/chat/Chat.ts"],"names":[],"mappings":"AACA,OAAO,EAAE,OAAO,EAAE,MAAM,cAAc,CAAC;AACvC,OAAO,EAAE,WAAW,EAAE,MAAM,kBAAkB,CAAC;AAC/C,OAAO,EAAE,QAAQ,EAAE,KAAK,EAAE,SAAS,EAAE,MAAM,0BAA0B,CAAC;AAGtE,OAAO,EAAE,MAAM,EAAE,MAAM,wBAAwB,CAAC;AAChD,OAAO,EAAE,IAAI,EAAE,MAAM,WAAW,CAAC;AACjC,OAAO,EAAE,MAAM,EAAE,MAAM,qBAAqB,CAAC;AAE7C,OAAO,EAAE,CAAC,EAAE,MAAM,KAAK,CAAC;AAExB,MAAM,WAAW,UAAU;IACzB,MAAM,CAAC,EAAE,MAAM,EAAE,CAAC;IAClB,KAAK,CAAC,EAAE,MAAM,EAAE,CAAC;IACjB,WAAW,CAAC,EAAE,MAAM,CAAC;IACrB,SAAS,CAAC,EAAE,MAAM,CAAC;IACnB,OAAO,CAAC,EAAE,MAAM,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;CAClC;AAED,OAAO,EAAE,kBAAkB,EAAE,MAAM,mBAAmB,CAAC;AAEvD,qBAAa,IAAI;IAKb,OAAO,CAAC,QAAQ,CAAC,QAAQ;IACzB,OAAO,CAAC,KAAK;IACb,OAAO,CAAC,QAAQ,CAAC,OAAO;IAN1B,OAAO,CAAC,QAAQ,CAAiB;IACjC,OAAO,CAAC,QAAQ,CAAW;gBAGR,QAAQ,EAAE,QAAQ,EAC3B,KAAK,EAAE,MAAM,EACJ,OAAO,GAAE,WAAgB,EAC1C,WAAW,GAAE;QAAE,QAAQ,EAAE,MAAM,CAAC;QAAC,OAAO,EAAE,MAAM,CAAA;KAAgC;IAmBlF;;OAEG;IACH,IAAI,OAAO,IAAI,SAAS,OAAO,EAAE,CAEhC;IAED;;OAEG;IACH,IAAI,UAAU,IAAI,KAAK,CAetB;IAED;;;OAGG;IACH,QAAQ,CAAC,IAAI,EAAE,GAAG,GAAG,IAAI;IAIzB;;;;;;;OAOG;IACH,SAAS,CAAC,KAAK,EAAE,CAAC,IAAI,GAAG,GAAG,CAAC,EAAE,EAAE,OAAO,CAAC,EAAE;QAAE,OAAO,CAAC,EAAE,OAAO,CAAA;KAAE,GAAG,IAAI;IA2BvE;;;;OAIG;IACH,gBAAgB,CAAC,WAAW,EAAE,MAAM,EAAE,OAAO,CAAC,EAAE;QAAE,OAAO,CAAC,EAAE,OAAO,CAAA;KAAE,GAAG,IAAI;IAmB5E;;OAEG;IACH,gBAAgB,CAAC,WAAW,EAAE,MAAM,EAAE,OAAO,CAAC,EAAE;QAAE,OAAO,CAAC,EAAE,OAAO,CAAA;KAAE,GAAG,IAAI;IAI5E;;;OAGG;IACH,eAAe,CAAC,IAAI,EAAE,MAAM,GAAG,IAAI;IAKnC;;OAEG;IACH,SAAS,CAAC,KAAK,EAAE,MAAM,GAAG,IAAI;IAK9B;;;OAGG;IACH,kBAAkB,CAAC,OAAO,EAAE;QAAE,OAAO,CAAC,EAAE,MAAM,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;QAAC,cAAc,CAAC,EAAE,GAAG,CAAA;KAAE,GAAG,IAAI;IAU7F;;;OAGG;IACH,UAAU,CAAC,MAAM,EAAE,MAAM,CAAC,MAAM,EAAE,GAAG,CAAC,GAAG,IAAI;IAK7C;;;OAGG;IACH,UAAU,CAAC,MAAM,EAAE,MAAM,GAAG,CAAC,CAAC,OAAO,CAAC,GAAG,CAAC,GAAG,MAAM,CAAC,MAAM,EAAE,GAAG,CAAC,GAAG,IAAI,GAAG,IAAI;IAkB9E,YAAY,CAAC,OAAO,EAAE,MAAM,IAAI,GAAG,IAAI;IAKvC,YAAY,CAAC,OAAO,EAAE,CAAC,OAAO,EAAE,kBAAkB,KAAK,IAAI,GAAG,IAAI;IAKlE,UAAU,CAAC,OAAO,EAAE,CAAC,QAAQ,EAAE,GAAG,KAAK,IAAI,GAAG,IAAI;IAKlD,YAAY,CAAC,OAAO,EAAE,CAAC,MAAM,EAAE,GAAG,KAAK,IAAI,GAAG,IAAI;IAKlD;;OAEG;IACG,GAAG,CAAC,OAAO,EAAE,MAAM,GAAG,GAAG,EAAE,EAAE,OAAO,CAAC,EAAE,UAAU,GAAG,OAAO,CAAC,kBAAkB,CAAC;IAgMrF;;OAEG;IACH,MAAM,CAAC,OAAO,EAAE,MAAM,GAAG,MAAM,CAAC,SAAS,CAAC;CAI3C"}
1
+ {"version":3,"file":"Chat.d.ts","sourceRoot":"","sources":["../../src/chat/Chat.ts"],"names":[],"mappings":"AACA,OAAO,EAAE,OAAO,EAAE,MAAM,cAAc,CAAC;AACvC,OAAO,EAAE,WAAW,EAAE,MAAM,kBAAkB,CAAC;AAC/C,OAAO,EAAE,QAAQ,EAAE,KAAK,EAAE,SAAS,EAAE,MAAM,0BAA0B,CAAC;AAGtE,OAAO,EAAE,MAAM,EAAE,MAAM,wBAAwB,CAAC;AAChD,OAAO,EAAE,IAAI,EAAkB,MAAM,WAAW,CAAC;AACjD,OAAO,EAAE,MAAM,EAAE,MAAM,qBAAqB,CAAC;AAE7C,OAAO,EAAE,CAAC,EAAE,MAAM,KAAK,CAAC;AAExB,MAAM,WAAW,UAAU;IACzB,MAAM,CAAC,EAAE,MAAM,EAAE,CAAC;IAClB,KAAK,CAAC,EAAE,MAAM,EAAE,CAAC;IACjB,WAAW,CAAC,EAAE,MAAM,CAAC;IACrB,SAAS,CAAC,EAAE,MAAM,CAAC;IACnB,OAAO,CAAC,EAAE,MAAM,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;CAClC;AAED,OAAO,EAAE,kBAAkB,EAAE,MAAM,mBAAmB,CAAC;AAEvD,qBAAa,IAAI;IAKb,OAAO,CAAC,QAAQ,CAAC,QAAQ;IACzB,OAAO,CAAC,KAAK;IACb,OAAO,CAAC,QAAQ,CAAC,OAAO;IAN1B,OAAO,CAAC,QAAQ,CAAiB;IACjC,OAAO,CAAC,QAAQ,CAAW;gBAGR,QAAQ,EAAE,QAAQ,EAC3B,KAAK,EAAE,MAAM,EACJ,OAAO,GAAE,WAAgB,EAC1C,WAAW,GAAE;QAAE,QAAQ,EAAE,MAAM,CAAC;QAAC,OAAO,EAAE,MAAM,CAAA;KAAgC;IAmBlF;;OAEG;IACH,IAAI,OAAO,IAAI,SAAS,OAAO,EAAE,CAEhC;IAED,IAAI,OAAO,IAAI,MAAM,CAEpB;IAED;;OAEG;IACH,IAAI,UAAU,IAAI,KAAK,CAetB;IAED;;;OAGG;IACH,QAAQ,CAAC,IAAI,EAAE,GAAG,GAAG,IAAI;IAIvB;;;;;;;KAOC;IACH,SAAS,CAAC,KAAK,EAAE,CAAC,IAAI,GAAG;QAAE,QAAO,IAAI,CAAA;KAAE,GAAG,GAAG,CAAC,EAAE,EAAE,OAAO,CAAC,EAAE;QAAE,OAAO,CAAC,EAAE,OAAO,CAAA;KAAE,GAAG,IAAI;IAmCzF;;;;OAIG;IACH,gBAAgB,CAAC,WAAW,EAAE,MAAM,EAAE,OAAO,CAAC,EAAE;QAAE,OAAO,CAAC,EAAE,OAAO,CAAA;KAAE,GAAG,IAAI;IAmB5E;;OAEG;IACH,gBAAgB,CAAC,WAAW,EAAE,MAAM,EAAE,OAAO,CAAC,EAAE;QAAE,OAAO,CAAC,EAAE,OAAO,CAAA;KAAE,GAAG,IAAI;IAI5E;;;OAGG;IACH,eAAe,CAAC,IAAI,EAAE,MAAM,GAAG,IAAI;IAKnC;;OAEG;IACH,SAAS,CAAC,KAAK,EAAE,MAAM,GAAG,IAAI;IAK9B;;;OAGG;IACH,kBAAkB,CAAC,OAAO,EAAE;QAAE,OAAO,CAAC,EAAE,MAAM,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;QAAC,cAAc,CAAC,EAAE,GAAG,CAAA;KAAE,GAAG,IAAI;IAU7F;;;OAGG;IACH,UAAU,CAAC,MAAM,EAAE,MAAM,CAAC,MAAM,EAAE,GAAG,CAAC,GAAG,IAAI;IAK7C;;;OAGG;IACH,UAAU,CAAC,MAAM,EAAE,MAAM,GAAG,CAAC,CAAC,OAAO,CAAC,GAAG,CAAC,GAAG,MAAM,CAAC,MAAM,EAAE,GAAG,CAAC,GAAG,IAAI,GAAG,IAAI;IAkB9E,YAAY,CAAC,OAAO,EAAE,MAAM,IAAI,GAAG,IAAI;IAKvC,YAAY,CAAC,OAAO,EAAE,CAAC,OAAO,EAAE,kBAAkB,KAAK,IAAI,GAAG,IAAI;IAKlE,UAAU,CAAC,OAAO,EAAE,CAAC,QAAQ,EAAE,GAAG,KAAK,IAAI,GAAG,IAAI;IAKlD,YAAY,CAAC,OAAO,EAAE,CAAC,MAAM,EAAE,GAAG,KAAK,IAAI,GAAG,IAAI;IAKlD;;OAEG;IACG,GAAG,CAAC,OAAO,EAAE,MAAM,GAAG,GAAG,EAAE,EAAE,OAAO,CAAC,EAAE,UAAU,GAAG,OAAO,CAAC,kBAAkB,CAAC;IAgMrF;;OAEG;IACH,MAAM,CAAC,OAAO,EAAE,MAAM,GAAG,MAAM,CAAC,SAAS,CAAC;CAI3C"}
package/dist/chat/Chat.js CHANGED
@@ -32,6 +32,9 @@ export class Chat {
32
32
  get history() {
33
33
  return this.messages;
34
34
  }
35
+ get modelId() {
36
+ return this.model;
37
+ }
35
38
  /**
36
39
  * Aggregate usage across the entire conversation
37
40
  */
@@ -56,13 +59,13 @@ export class Chat {
56
59
  return this.withTools([tool]);
57
60
  }
58
61
  /**
59
- * Add multiple tools to the chat session.
60
- * Supports passing tool instances or classes (which will be instantiated).
61
- * Can replace existing tools if options.replace is true.
62
- *
63
- * @example
64
- * chat.withTools([WeatherTool, new CalculatorTool()], { replace: true });
65
- */
62
+ * Add multiple tools to the chat session.
63
+ * Supports passing Tool classes (which will be instantiated) or instances.
64
+ * Can replace existing tools if options.replace is true.
65
+ *
66
+ * @example
67
+ * chat.withTools([WeatherTool, new CalculatorTool()], { replace: true });
68
+ */
66
69
  withTools(tools, options) {
67
70
  if (options?.replace) {
68
71
  this.options.tools = [];
@@ -71,20 +74,27 @@ export class Chat {
71
74
  this.options.tools = [];
72
75
  }
73
76
  for (const tool of tools) {
77
+ let toolInstance;
78
+ // Handle class constructor
74
79
  if (typeof tool === "function") {
75
80
  try {
76
- // Attempt to instantiate if it's a class
77
- this.options.tools.push(new tool());
81
+ toolInstance = new tool();
78
82
  }
79
83
  catch (e) {
80
- // If instantiation fails, it might be a function tool or require args?
81
- // For now, assuming classes with no-arg constructors as per convention.
82
- console.warn("Attempted to instantiate tool class but failed, adding as-is", e);
83
- this.options.tools.push(tool);
84
+ console.error(`[NodeLLM] Failed to instantiate tool class: ${tool.name}`, e);
85
+ continue;
84
86
  }
85
87
  }
86
88
  else {
87
- this.options.tools.push(tool);
89
+ toolInstance = tool;
90
+ }
91
+ // Normalized to standard ToolDefinition interface if it's a Tool class instance
92
+ if (toolInstance && typeof toolInstance.toLLMTool === "function") {
93
+ this.options.tools.push(toolInstance.toLLMTool());
94
+ }
95
+ else {
96
+ // Fallback for legacy raw tool objects (defined as objects with type: 'function')
97
+ this.options.tools.push(toolInstance);
88
98
  }
89
99
  }
90
100
  return this;
@@ -1,10 +1,10 @@
1
1
  import { Message } from "./Message.js";
2
- import { Tool } from "./Tool.js";
2
+ import { ToolDefinition } from "./Tool.js";
3
3
  import { Schema } from "../schema/Schema.js";
4
4
  export interface ChatOptions {
5
5
  systemPrompt?: string;
6
6
  messages?: Message[];
7
- tools?: Tool[];
7
+ tools?: ToolDefinition[];
8
8
  temperature?: number;
9
9
  maxTokens?: number;
10
10
  onNewMessage?: () => void;
@@ -1 +1 @@
1
- {"version":3,"file":"ChatOptions.d.ts","sourceRoot":"","sources":["../../src/chat/ChatOptions.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,OAAO,EAAE,MAAM,cAAc,CAAC;AACvC,OAAO,EAAE,IAAI,EAAE,MAAM,WAAW,CAAC;AACjC,OAAO,EAAE,MAAM,EAAE,MAAM,qBAAqB,CAAC;AAE7C,MAAM,WAAW,WAAW;IAC1B,YAAY,CAAC,EAAE,MAAM,CAAC;IACtB,QAAQ,CAAC,EAAE,OAAO,EAAE,CAAC;IACrB,KAAK,CAAC,EAAE,IAAI,EAAE,CAAC;IACf,WAAW,CAAC,EAAE,MAAM,CAAC;IACrB,SAAS,CAAC,EAAE,MAAM,CAAC;IACnB,YAAY,CAAC,EAAE,MAAM,IAAI,CAAC;IAC1B,YAAY,CAAC,EAAE,CAAC,OAAO,EAAE,GAAG,KAAK,IAAI,CAAC;IACtC,UAAU,CAAC,EAAE,CAAC,QAAQ,EAAE,GAAG,KAAK,IAAI,CAAC;IACrC,YAAY,CAAC,EAAE,CAAC,MAAM,EAAE,GAAG,KAAK,IAAI,CAAC;IACrC,OAAO,CAAC,EAAE,MAAM,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;IACjC,MAAM,CAAC,EAAE,MAAM,CAAC;IAChB,cAAc,CAAC,EAAE;QAAE,IAAI,EAAE,aAAa,GAAG,MAAM,CAAA;KAAE,CAAC;IAClD,MAAM,CAAC,EAAE,MAAM,CAAC,MAAM,EAAE,GAAG,CAAC,CAAC;IAC7B,iBAAiB,CAAC,EAAE,OAAO,CAAC;IAC5B,QAAQ,CAAC,EAAE,MAAM,CAAC;CACnB"}
1
+ {"version":3,"file":"ChatOptions.d.ts","sourceRoot":"","sources":["../../src/chat/ChatOptions.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,OAAO,EAAE,MAAM,cAAc,CAAC;AACvC,OAAO,EAAE,cAAc,EAAE,MAAM,WAAW,CAAC;AAC3C,OAAO,EAAE,MAAM,EAAE,MAAM,qBAAqB,CAAC;AAE7C,MAAM,WAAW,WAAW;IAC1B,YAAY,CAAC,EAAE,MAAM,CAAC;IACtB,QAAQ,CAAC,EAAE,OAAO,EAAE,CAAC;IACrB,KAAK,CAAC,EAAE,cAAc,EAAE,CAAC;IACzB,WAAW,CAAC,EAAE,MAAM,CAAC;IACrB,SAAS,CAAC,EAAE,MAAM,CAAC;IACnB,YAAY,CAAC,EAAE,MAAM,IAAI,CAAC;IAC1B,YAAY,CAAC,EAAE,CAAC,OAAO,EAAE,GAAG,KAAK,IAAI,CAAC;IACtC,UAAU,CAAC,EAAE,CAAC,QAAQ,EAAE,GAAG,KAAK,IAAI,CAAC;IACrC,YAAY,CAAC,EAAE,CAAC,MAAM,EAAE,GAAG,KAAK,IAAI,CAAC;IACrC,OAAO,CAAC,EAAE,MAAM,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;IACjC,MAAM,CAAC,EAAE,MAAM,CAAC;IAChB,cAAc,CAAC,EAAE;QAAE,IAAI,EAAE,aAAa,GAAG,MAAM,CAAA;KAAE,CAAC;IAClD,MAAM,CAAC,EAAE,MAAM,CAAC,MAAM,EAAE,GAAG,CAAC,CAAC;IAC7B,iBAAiB,CAAC,EAAE,OAAO,CAAC;IAC5B,QAAQ,CAAC,EAAE,MAAM,CAAC;CACnB"}
@@ -1 +1 @@
1
- {"version":3,"file":"ChatStream.d.ts","sourceRoot":"","sources":["../../src/chat/ChatStream.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,OAAO,EAAE,MAAM,cAAc,CAAC;AACvC,OAAO,EAAE,WAAW,EAAE,MAAM,kBAAkB,CAAC;AAC/C,OAAO,EAAE,QAAQ,EAAE,SAAS,EAAE,MAAM,0BAA0B,CAAC;AAE/D,OAAO,EAAE,MAAM,EAAE,MAAM,wBAAwB,CAAC;AAEhD;;;GAGG;AACH,qBAAa,UAAU;IAInB,OAAO,CAAC,QAAQ,CAAC,QAAQ;IACzB,OAAO,CAAC,QAAQ,CAAC,KAAK;IACtB,OAAO,CAAC,QAAQ,CAAC,OAAO;IAL1B,OAAO,CAAC,QAAQ,CAAY;gBAGT,QAAQ,EAAE,QAAQ,EAClB,KAAK,EAAE,MAAM,EACb,OAAO,GAAE,WAAgB,EAC1C,QAAQ,CAAC,EAAE,OAAO,EAAE;IAmBtB;;OAEG;IACH,IAAI,OAAO,IAAI,SAAS,OAAO,EAAE,CAEhC;IAED;;;OAGG;IACH,MAAM,CAAC,OAAO,EAAE,MAAM,GAAG,MAAM,CAAC,SAAS,CAAC;CAyE3C"}
1
+ {"version":3,"file":"ChatStream.d.ts","sourceRoot":"","sources":["../../src/chat/ChatStream.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,OAAO,EAAE,MAAM,cAAc,CAAC;AACvC,OAAO,EAAE,WAAW,EAAE,MAAM,kBAAkB,CAAC;AAC/C,OAAO,EAAE,QAAQ,EAAE,SAAS,EAAE,MAAM,0BAA0B,CAAC;AAE/D,OAAO,EAAE,MAAM,EAAE,MAAM,wBAAwB,CAAC;AAEhD;;;GAGG;AACH,qBAAa,UAAU;IAInB,OAAO,CAAC,QAAQ,CAAC,QAAQ;IACzB,OAAO,CAAC,QAAQ,CAAC,KAAK;IACtB,OAAO,CAAC,QAAQ,CAAC,OAAO;IAL1B,OAAO,CAAC,QAAQ,CAAY;gBAGT,QAAQ,EAAE,QAAQ,EAClB,KAAK,EAAE,MAAM,EACb,OAAO,GAAE,WAAgB,EAC1C,QAAQ,CAAC,EAAE,OAAO,EAAE;IAmBtB;;OAEG;IACH,IAAI,OAAO,IAAI,SAAS,OAAO,EAAE,CAEhC;IAED;;;OAGG;IACH,MAAM,CAAC,OAAO,EAAE,MAAM,GAAG,MAAM,CAAC,SAAS,CAAC;CAkI3C"}
@@ -45,47 +45,98 @@ export class ChatStream {
45
45
  if (!provider.stream) {
46
46
  throw new Error("Streaming not supported by provider");
47
47
  }
48
- let full = "";
48
+ let fullContent = "";
49
49
  let fullReasoning = "";
50
+ let toolCalls;
50
51
  let isFirst = true;
51
- try {
52
- for await (const chunk of provider.stream({
53
- model,
54
- messages,
55
- temperature: options.temperature,
56
- max_tokens: options.maxTokens,
57
- signal: abortController.signal,
58
- })) {
59
- if (isFirst) {
60
- if (options.onNewMessage)
61
- options.onNewMessage();
62
- isFirst = false;
52
+ // Main streaming loop - may iterate multiple times for tool calls
53
+ while (true) {
54
+ fullContent = "";
55
+ fullReasoning = "";
56
+ toolCalls = undefined;
57
+ try {
58
+ for await (const chunk of provider.stream({
59
+ model,
60
+ messages,
61
+ tools: options.tools,
62
+ temperature: options.temperature,
63
+ max_tokens: options.maxTokens,
64
+ signal: abortController.signal,
65
+ })) {
66
+ if (isFirst) {
67
+ if (options.onNewMessage)
68
+ options.onNewMessage();
69
+ isFirst = false;
70
+ }
71
+ if (chunk.content) {
72
+ fullContent += chunk.content;
73
+ yield chunk;
74
+ }
75
+ if (chunk.reasoning) {
76
+ fullReasoning += chunk.reasoning;
77
+ yield { content: "", reasoning: chunk.reasoning };
78
+ }
79
+ // Accumulate tool calls from the final chunk
80
+ if (chunk.tool_calls) {
81
+ toolCalls = chunk.tool_calls;
82
+ }
63
83
  }
64
- if (chunk.content) {
65
- full += chunk.content;
84
+ // Add assistant message to history
85
+ messages.push({
86
+ role: "assistant",
87
+ content: fullContent || null,
88
+ tool_calls: toolCalls,
89
+ // @ts-ignore
90
+ reasoning: fullReasoning || undefined
91
+ });
92
+ // If no tool calls, we're done
93
+ if (!toolCalls || toolCalls.length === 0) {
94
+ if (options.onEndMessage) {
95
+ options.onEndMessage(new ChatResponseString(fullContent, { input_tokens: 0, output_tokens: 0, total_tokens: 0 }, model, fullReasoning || undefined));
96
+ }
97
+ break;
66
98
  }
67
- if (chunk.reasoning) {
68
- fullReasoning += chunk.reasoning;
99
+ // Execute tool calls
100
+ for (const toolCall of toolCalls) {
101
+ if (options.onToolCall)
102
+ options.onToolCall(toolCall);
103
+ const tool = options.tools?.find((t) => t.function.name === toolCall.function.name);
104
+ if (tool?.handler) {
105
+ try {
106
+ const args = JSON.parse(toolCall.function.arguments);
107
+ const result = await tool.handler(args);
108
+ if (options.onToolResult)
109
+ options.onToolResult(result);
110
+ messages.push({
111
+ role: "tool",
112
+ tool_call_id: toolCall.id,
113
+ content: result,
114
+ });
115
+ }
116
+ catch (error) {
117
+ messages.push({
118
+ role: "tool",
119
+ tool_call_id: toolCall.id,
120
+ content: `Error executing tool: ${error.message}`,
121
+ });
122
+ }
123
+ }
124
+ else {
125
+ messages.push({
126
+ role: "tool",
127
+ tool_call_id: toolCall.id,
128
+ content: "Error: Tool not found or no handler provided",
129
+ });
130
+ }
69
131
  }
70
- yield chunk;
132
+ // Continue loop to stream the next response after tool execution
71
133
  }
72
- // Finalize history
73
- messages.push({
74
- role: "assistant",
75
- content: full,
76
- // @ts-ignore
77
- reasoning: fullReasoning || undefined
78
- });
79
- if (options.onEndMessage) {
80
- options.onEndMessage(new ChatResponseString(full, { input_tokens: 0, output_tokens: 0, total_tokens: 0 }, model, fullReasoning || undefined));
81
- }
82
- }
83
- catch (error) {
84
- if (error instanceof Error && error.name === 'AbortError') {
85
- // Stream was aborted, we might still want to save what we got?
86
- // For now just rethrow or handle as needed
134
+ catch (error) {
135
+ if (error instanceof Error && error.name === 'AbortError') {
136
+ // Stream was aborted
137
+ }
138
+ throw error;
87
139
  }
88
- throw error;
89
140
  }
90
141
  };
91
142
  return new Stream(() => sideEffectGenerator(this.provider, this.model, this.messages, this.options, controller), controller);
@@ -1,3 +1,4 @@
1
+ import { z } from "zod";
1
2
  export interface ToolCall {
2
3
  id: string;
3
4
  type: 'function';
@@ -6,7 +7,7 @@ export interface ToolCall {
6
7
  arguments: string;
7
8
  };
8
9
  }
9
- export interface Tool {
10
+ export interface ToolDefinition {
10
11
  type: 'function';
11
12
  function: {
12
13
  name: string;
@@ -15,4 +16,37 @@ export interface Tool {
15
16
  };
16
17
  handler?: (args: any) => Promise<string>;
17
18
  }
19
+ /**
20
+ * Subclass this to create tools with auto-generated schemas and type safety.
21
+ */
22
+ export declare abstract class Tool<T = any> {
23
+ /**
24
+ * The name of the tool (must match [a-zA-Z0-9_-]+).
25
+ */
26
+ abstract name: string;
27
+ /**
28
+ * A clear description of what the tool does, used by the LLM to decide when to call it.
29
+ */
30
+ abstract description: string;
31
+ /**
32
+ * Parameters the tool accepts.
33
+ * Can be a Zod object (for auto-schema + type safety) or a raw JSON Schema.
34
+ */
35
+ abstract schema: z.ZodObject<any> | Record<string, any>;
36
+ /**
37
+ * The core logic for the tool.
38
+ * 'args' will be parsed and validated based on 'schema'.
39
+ */
40
+ abstract execute(args: T): Promise<any>;
41
+ /**
42
+ * Internal handler to bridge with LLM providers.
43
+ * Converts any result to a string (usually JSON).
44
+ */
45
+ handler(args: T): Promise<string>;
46
+ /**
47
+ * Converts the tool definition and logic into a standard ToolDefinition interface.
48
+ * This is called automatically by NodeLLM when registering tools.
49
+ */
50
+ toLLMTool(): ToolDefinition;
51
+ }
18
52
  //# sourceMappingURL=Tool.d.ts.map
@@ -1 +1 @@
1
- {"version":3,"file":"Tool.d.ts","sourceRoot":"","sources":["../../src/chat/Tool.ts"],"names":[],"mappings":"AAAA,MAAM,WAAW,QAAQ;IACvB,EAAE,EAAE,MAAM,CAAC;IACX,IAAI,EAAE,UAAU,CAAC;IACjB,QAAQ,EAAE;QACR,IAAI,EAAE,MAAM,CAAC;QACb,SAAS,EAAE,MAAM,CAAC;KACnB,CAAC;CACH;AAED,MAAM,WAAW,IAAI;IACnB,IAAI,EAAE,UAAU,CAAC;IACjB,QAAQ,EAAE;QACR,IAAI,EAAE,MAAM,CAAC;QACb,WAAW,CAAC,EAAE,MAAM,CAAC;QACrB,UAAU,EAAE,MAAM,CAAC,MAAM,EAAE,GAAG,CAAC,CAAC;KACjC,CAAC;IACF,OAAO,CAAC,EAAE,CAAC,IAAI,EAAE,GAAG,KAAK,OAAO,CAAC,MAAM,CAAC,CAAC;CAC1C"}
1
+ {"version":3,"file":"Tool.d.ts","sourceRoot":"","sources":["../../src/chat/Tool.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,CAAC,EAAE,MAAM,KAAK,CAAC;AAGxB,MAAM,WAAW,QAAQ;IACvB,EAAE,EAAE,MAAM,CAAC;IACX,IAAI,EAAE,UAAU,CAAC;IACjB,QAAQ,EAAE;QACR,IAAI,EAAE,MAAM,CAAC;QACb,SAAS,EAAE,MAAM,CAAC;KACnB,CAAC;CACH;AAED,MAAM,WAAW,cAAc;IAC7B,IAAI,EAAE,UAAU,CAAC;IACjB,QAAQ,EAAE;QACR,IAAI,EAAE,MAAM,CAAC;QACb,WAAW,CAAC,EAAE,MAAM,CAAC;QACrB,UAAU,EAAE,MAAM,CAAC,MAAM,EAAE,GAAG,CAAC,CAAC;KACjC,CAAC;IACF,OAAO,CAAC,EAAE,CAAC,IAAI,EAAE,GAAG,KAAK,OAAO,CAAC,MAAM,CAAC,CAAC;CAC1C;AAED;;GAEG;AACH,8BAAsB,IAAI,CAAC,CAAC,GAAG,GAAG;IAChC;;OAEG;IACH,SAAgB,IAAI,EAAE,MAAM,CAAC;IAE7B;;OAEG;IACH,SAAgB,WAAW,EAAE,MAAM,CAAC;IAEpC;;;OAGG;IACH,SAAgB,MAAM,EAAE,CAAC,CAAC,SAAS,CAAC,GAAG,CAAC,GAAG,MAAM,CAAC,MAAM,EAAE,GAAG,CAAC,CAAC;IAE/D;;;OAGG;aACa,OAAO,CAAC,IAAI,EAAE,CAAC,GAAG,OAAO,CAAC,GAAG,CAAC;IAE9C;;;OAGG;IACU,OAAO,CAAC,IAAI,EAAE,CAAC,GAAG,OAAO,CAAC,MAAM,CAAC;IAM9C;;;OAGG;IACI,SAAS,IAAI,cAAc;CAoBnC"}
package/dist/chat/Tool.js CHANGED
@@ -1 +1,38 @@
1
- export {};
1
+ import { toJsonSchema } from "../schema/to-json-schema.js";
2
+ /**
3
+ * Subclass this to create tools with auto-generated schemas and type safety.
4
+ */
5
+ export class Tool {
6
+ /**
7
+ * Internal handler to bridge with LLM providers.
8
+ * Converts any result to a string (usually JSON).
9
+ */
10
+ async handler(args) {
11
+ const result = await this.execute(args);
12
+ if (result === undefined || result === null)
13
+ return "";
14
+ return typeof result === "string" ? result : JSON.stringify(result);
15
+ }
16
+ /**
17
+ * Converts the tool definition and logic into a standard ToolDefinition interface.
18
+ * This is called automatically by NodeLLM when registering tools.
19
+ */
20
+ toLLMTool() {
21
+ const rawSchema = toJsonSchema(this.schema);
22
+ // We want the 'properties' and 'required' parts, not the full JSON Schema wrapper if present
23
+ const parameters = rawSchema.type === "object" ? rawSchema : {
24
+ type: "object",
25
+ properties: rawSchema.properties || {},
26
+ required: rawSchema.required || []
27
+ };
28
+ return {
29
+ type: "function",
30
+ function: {
31
+ name: this.name,
32
+ description: this.description,
33
+ parameters: parameters,
34
+ },
35
+ handler: this.handler.bind(this),
36
+ };
37
+ }
38
+ }
package/dist/config.d.ts CHANGED
@@ -1,5 +1,5 @@
1
1
  /**
2
- * Global configuration for Node-NodeLLM providers.
2
+ * Global configuration for LLM providers.
3
3
  * Values are initialized from environment variables but can be overridden programmatically.
4
4
  */
5
5
  export interface NodeLLMConfig {
@@ -1,5 +1,5 @@
1
1
  /**
2
- * Base class for all node-llm errors
2
+ * Base class for all NodeLLM errors
3
3
  */
4
4
  export declare class LLMError extends Error {
5
5
  readonly code?: string | undefined;
@@ -64,7 +64,7 @@ export declare class CapabilityError extends LLMError {
64
64
  constructor(message: string);
65
65
  }
66
66
  /**
67
- * Thrown when NodeLLM provider is not configured
67
+ * Thrown when LLM provider is not configured
68
68
  */
69
69
  export declare class ProviderNotConfiguredError extends LLMError {
70
70
  constructor();
@@ -1,5 +1,5 @@
1
1
  /**
2
- * Base class for all node-llm errors
2
+ * Base class for all NodeLLM errors
3
3
  */
4
4
  export class LLMError extends Error {
5
5
  code;
@@ -96,11 +96,11 @@ export class CapabilityError extends LLMError {
96
96
  }
97
97
  }
98
98
  /**
99
- * Thrown when NodeLLM provider is not configured
99
+ * Thrown when LLM provider is not configured
100
100
  */
101
101
  export class ProviderNotConfiguredError extends LLMError {
102
102
  constructor() {
103
- super("NodeLLM provider not configured", "PROVIDER_NOT_CONFIGURED");
103
+ super("LLM provider not configured", "PROVIDER_NOT_CONFIGURED");
104
104
  }
105
105
  }
106
106
  /**