@node-llm/core 1.4.2 → 1.4.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +22 -17
- package/package.json +11 -12
- package/dist/aliases.json +0 -132
- package/dist/utils/sanitize.d.ts +0 -21
- package/dist/utils/sanitize.d.ts.map +0 -1
- package/dist/utils/sanitize.js +0 -76
package/README.md
CHANGED
|
@@ -1,18 +1,17 @@
|
|
|
1
1
|
<p align="left">
|
|
2
|
-
<
|
|
2
|
+
<a href="https://node-llm.eshaiju.com/">
|
|
3
|
+
<img src="https://github.com/node-llm/node-llm/raw/main/docs/assets/images/logo.jpg" alt="NodeLLM logo" width="300" />
|
|
4
|
+
</a>
|
|
3
5
|
</p>
|
|
4
6
|
|
|
5
7
|
# NodeLLM
|
|
6
8
|
|
|
7
|
-
|
|
8
9
|
**An opinionated architectural layer for integrating Large Language Models in Node.js.**
|
|
9
10
|
|
|
10
11
|
**Provider-agnostic by design.**
|
|
11
12
|
|
|
12
13
|
Most LLM SDKs **tightly couple** your application to vendors, APIs, and churn. NodeLLM provides a unified, production-oriented API for interacting with over 540+ models across multiple providers (OpenAI, Gemini, Anthropic, DeepSeek, OpenRouter, Ollama, etc.) without the SDK fatigue.
|
|
13
14
|
|
|
14
|
-
<br/>
|
|
15
|
-
|
|
16
15
|
<p align="left">
|
|
17
16
|
<img src="https://registry.npmmirror.com/@lobehub/icons-static-svg/latest/files/icons/openai.svg" height="28" />
|
|
18
17
|
<img src="https://registry.npmmirror.com/@lobehub/icons-static-svg/latest/files/icons/openai-text.svg" height="22" />
|
|
@@ -88,7 +87,7 @@ for await (const chunk of chat.stream("Explain event-driven architecture")) {
|
|
|
88
87
|
|
|
89
88
|
## 🔧 Strategic Configuration
|
|
90
89
|
|
|
91
|
-
|
|
90
|
+
NodeLLM provides a flexible configuration system designed for enterprise usage:
|
|
92
91
|
|
|
93
92
|
```ts
|
|
94
93
|
// Recommended for multi-provider pipelines
|
|
@@ -108,7 +107,7 @@ NodeLLM.configure({
|
|
|
108
107
|
});
|
|
109
108
|
```
|
|
110
109
|
|
|
111
|
-
**[Full Configuration Guide →](
|
|
110
|
+
**[Full Configuration Guide →](https://node-llm.eshaiju.com/getting-started/configuration)**
|
|
112
111
|
|
|
113
112
|
---
|
|
114
113
|
|
|
@@ -135,19 +134,21 @@ await chat.ask("Analyze this interface", {
|
|
|
135
134
|
Define tools once;`NodeLLM` manages the recursive execution loop for you, keeping your controller logic clean. **Works seamlessly with both regular chat and streaming!**
|
|
136
135
|
|
|
137
136
|
```ts
|
|
137
|
+
import { Tool, z } from "@node-llm/core";
|
|
138
|
+
|
|
138
139
|
// Class-based DSL
|
|
139
140
|
class WeatherTool extends Tool {
|
|
140
141
|
name = "get_weather";
|
|
141
142
|
description = "Get current weather";
|
|
142
143
|
schema = z.object({ location: z.string() });
|
|
143
|
-
async execute({ location }) { return `Sunny in ${location}`; }
|
|
144
|
-
}
|
|
145
144
|
|
|
146
|
-
|
|
147
|
-
|
|
145
|
+
async handler({ location }) {
|
|
146
|
+
return `Sunny in ${location}`;
|
|
147
|
+
}
|
|
148
|
+
}
|
|
148
149
|
|
|
149
150
|
// Now the model can use it automatically
|
|
150
|
-
await chat.ask("What's the weather in Tokyo?");
|
|
151
|
+
await chat.withTool(WeatherTool).ask("What's the weather in Tokyo?");
|
|
151
152
|
```
|
|
152
153
|
**[Full Tool Calling Guide →](https://node-llm.eshaiju.com/core-features/tool-calling)**
|
|
153
154
|
|
|
@@ -186,17 +187,15 @@ await NodeLLM.transcribe("meeting-recording.wav");
|
|
|
186
187
|
```
|
|
187
188
|
|
|
188
189
|
### ⚡ Scoped Parallelism
|
|
189
|
-
Run multiple providers in parallel safely without global configuration side effects using isolated contexts.
|
|
190
|
-
|
|
190
|
+
Run multiple providers in parallel safely without global configuration side effects using isolated contexts.
|
|
191
191
|
```ts
|
|
192
192
|
const [gpt, claude] = await Promise.all([
|
|
193
193
|
// Each call branch off into its own isolated context
|
|
194
194
|
NodeLLM.withProvider("openai").chat("gpt-4o").ask(prompt),
|
|
195
|
-
NodeLLM.withProvider("anthropic"
|
|
195
|
+
NodeLLM.withProvider("anthropic").chat("claude-3-5-sonnet").ask(prompt),
|
|
196
196
|
]);
|
|
197
197
|
```
|
|
198
198
|
|
|
199
|
-
|
|
200
199
|
### 🧠 Deep Reasoning
|
|
201
200
|
Direct access to the thought process of models like **DeepSeek R1** or **OpenAI o1/o3** using the `.reasoning` field.
|
|
202
201
|
```ts
|
|
@@ -208,7 +207,7 @@ console.log(res.reasoning); // Chain-of-thought
|
|
|
208
207
|
|
|
209
208
|
## 🚀 Why use this over official SDKs?
|
|
210
209
|
|
|
211
|
-
| Feature
|
|
210
|
+
| Feature | NodeLLM | Official SDKs | Architectural Impact |
|
|
212
211
|
| :--- | :--- | :--- | :--- |
|
|
213
212
|
| **Provider Logic** | Transparently Handled | Exposed to your code | **Low Coupling** |
|
|
214
213
|
| **Streaming** | Standard `AsyncIterator` | Vendor-specific Events | **Predictable Data Flow** |
|
|
@@ -242,6 +241,12 @@ npm install @node-llm/core
|
|
|
242
241
|
|
|
243
242
|
---
|
|
244
243
|
|
|
244
|
+
## 🤝 Contributing
|
|
245
|
+
|
|
246
|
+
We welcome contributions! Please see our **[Contributing Guide](https://github.com/node-llm/node-llm/blob/main/CONTRIBUTING.md)** for more details on how to get started.
|
|
247
|
+
|
|
248
|
+
---
|
|
249
|
+
|
|
245
250
|
## 🫶 Credits
|
|
246
251
|
|
|
247
252
|
Heavily inspired by the elegant design of [RubyLLM](https://rubyllm.com/).
|
|
@@ -250,4 +255,4 @@ Heavily inspired by the elegant design of [RubyLLM](https://rubyllm.com/).
|
|
|
250
255
|
|
|
251
256
|
## 📄 License
|
|
252
257
|
|
|
253
|
-
MIT © [
|
|
258
|
+
MIT © [NodeLLM contributors]
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@node-llm/core",
|
|
3
|
-
"version": "1.4.
|
|
3
|
+
"version": "1.4.3",
|
|
4
4
|
"type": "module",
|
|
5
5
|
"main": "./dist/index.js",
|
|
6
6
|
"types": "./dist/index.d.ts",
|
|
@@ -37,12 +37,12 @@
|
|
|
37
37
|
"license": "MIT",
|
|
38
38
|
"repository": {
|
|
39
39
|
"type": "git",
|
|
40
|
-
"url": "https://github.com/
|
|
40
|
+
"url": "https://github.com/node-llm/node-llm",
|
|
41
41
|
"directory": "packages/core"
|
|
42
42
|
},
|
|
43
43
|
"homepage": "https://node-llm.eshaiju.com",
|
|
44
44
|
"bugs": {
|
|
45
|
-
"url": "https://github.com/
|
|
45
|
+
"url": "https://github.com/node-llm/node-llm/issues"
|
|
46
46
|
},
|
|
47
47
|
"engines": {
|
|
48
48
|
"node": ">=20.0.0"
|
|
@@ -52,14 +52,6 @@
|
|
|
52
52
|
"README.md",
|
|
53
53
|
"LICENSE"
|
|
54
54
|
],
|
|
55
|
-
"scripts": {
|
|
56
|
-
"build": "tsc -p tsconfig.json",
|
|
57
|
-
"dev": "tsc -w",
|
|
58
|
-
"lint": "tsc --noEmit",
|
|
59
|
-
"test": "vitest run",
|
|
60
|
-
"test:watch": "vitest",
|
|
61
|
-
"prepublishOnly": "npm run build"
|
|
62
|
-
},
|
|
63
55
|
"dependencies": {
|
|
64
56
|
"zod": "^3.23.8",
|
|
65
57
|
"zod-to-json-schema": "^3.25.1"
|
|
@@ -69,5 +61,12 @@
|
|
|
69
61
|
"@pollyjs/adapter-node-http": "^6.0.6",
|
|
70
62
|
"@pollyjs/core": "^6.0.6",
|
|
71
63
|
"@pollyjs/persister-fs": "^6.0.6"
|
|
64
|
+
},
|
|
65
|
+
"scripts": {
|
|
66
|
+
"build": "tsc -p tsconfig.json",
|
|
67
|
+
"dev": "tsc -w",
|
|
68
|
+
"lint": "tsc --noEmit",
|
|
69
|
+
"test": "vitest run",
|
|
70
|
+
"test:watch": "vitest"
|
|
72
71
|
}
|
|
73
|
-
}
|
|
72
|
+
}
|
package/dist/aliases.json
DELETED
|
@@ -1,132 +0,0 @@
|
|
|
1
|
-
{
|
|
2
|
-
"chatgpt-4o": {
|
|
3
|
-
"openai": "chatgpt-4o-latest",
|
|
4
|
-
"openrouter": "openai/chatgpt-4o-latest"
|
|
5
|
-
},
|
|
6
|
-
"claude-3-5-haiku": {
|
|
7
|
-
"anthropic": "claude-3-5-haiku-20241022",
|
|
8
|
-
"openrouter": "anthropic/claude-3.5-haiku",
|
|
9
|
-
"bedrock": "anthropic.claude-3-5-haiku-20241022-v1:0"
|
|
10
|
-
},
|
|
11
|
-
"claude-3-5-sonnet": {
|
|
12
|
-
"anthropic": "claude-3-5-sonnet-20240620",
|
|
13
|
-
"openrouter": "anthropic/claude-3.5-sonnet",
|
|
14
|
-
"bedrock": "anthropic.claude-3-5-sonnet-20240620-v1:0"
|
|
15
|
-
},
|
|
16
|
-
"claude-sonnet-4-5": {
|
|
17
|
-
"anthropic": "claude-sonnet-4-5-20250929"
|
|
18
|
-
},
|
|
19
|
-
"claude-sonnet-4": {
|
|
20
|
-
"anthropic": "claude-sonnet-4-20250514"
|
|
21
|
-
},
|
|
22
|
-
"claude-3-7-sonnet": {
|
|
23
|
-
"anthropic": "claude-3-7-sonnet-20250219",
|
|
24
|
-
"openrouter": "anthropic/claude-3.7-sonnet",
|
|
25
|
-
"bedrock": "us.anthropic.claude-3-7-sonnet-20250219-v1:0"
|
|
26
|
-
},
|
|
27
|
-
"claude-3-haiku": {
|
|
28
|
-
"anthropic": "claude-3-haiku-20240307",
|
|
29
|
-
"openrouter": "anthropic/claude-3-haiku",
|
|
30
|
-
"bedrock": "anthropic.claude-3-haiku-20240307-v1:0:200k"
|
|
31
|
-
},
|
|
32
|
-
"claude-3-opus": {
|
|
33
|
-
"anthropic": "claude-3-opus-20240229",
|
|
34
|
-
"openrouter": "anthropic/claude-3-opus",
|
|
35
|
-
"bedrock": "anthropic.claude-3-opus-20240229-v1:0:200k"
|
|
36
|
-
},
|
|
37
|
-
"claude-3-sonnet": {
|
|
38
|
-
"bedrock": "anthropic.claude-3-sonnet-20240229-v1:0"
|
|
39
|
-
},
|
|
40
|
-
"deepseek-chat": {
|
|
41
|
-
"deepseek": "deepseek-chat",
|
|
42
|
-
"openrouter": "deepseek/deepseek-chat"
|
|
43
|
-
},
|
|
44
|
-
"gemini-flash": {
|
|
45
|
-
"gemini": "gemini-flash-latest",
|
|
46
|
-
"vertexai": "gemini-flash-latest",
|
|
47
|
-
"openrouter": "google/gemini-flash-latest"
|
|
48
|
-
},
|
|
49
|
-
"gemini-pro": {
|
|
50
|
-
"gemini": "gemini-1.5-pro-001",
|
|
51
|
-
"vertexai": "gemini-1.5-pro-001",
|
|
52
|
-
"openrouter": "google/gemini-1.5-pro-001"
|
|
53
|
-
},
|
|
54
|
-
"gemini-1.5-flash": {
|
|
55
|
-
"gemini": "gemini-1.5-flash-001",
|
|
56
|
-
"vertexai": "gemini-1.5-flash-001",
|
|
57
|
-
"openrouter": "google/gemini-1.5-flash-001"
|
|
58
|
-
},
|
|
59
|
-
"gemini-1.5-pro": {
|
|
60
|
-
"gemini": "gemini-1.5-pro-001",
|
|
61
|
-
"vertexai": "gemini-1.5-pro-001",
|
|
62
|
-
"openrouter": "google/gemini-1.5-pro-001"
|
|
63
|
-
},
|
|
64
|
-
"gemini-2.0-flash": {
|
|
65
|
-
"gemini": "gemini-2.0-flash",
|
|
66
|
-
"vertexai": "gemini-2.0-flash"
|
|
67
|
-
},
|
|
68
|
-
"gemini-2.0-flash-001": {
|
|
69
|
-
"gemini": "gemini-2.0-flash-001",
|
|
70
|
-
"openrouter": "google/gemini-2.0-flash-001",
|
|
71
|
-
"vertexai": "gemini-2.0-flash-001"
|
|
72
|
-
},
|
|
73
|
-
"gpt-3.5-turbo": {
|
|
74
|
-
"openai": "gpt-3.5-turbo",
|
|
75
|
-
"openrouter": "openai/gpt-3.5-turbo"
|
|
76
|
-
},
|
|
77
|
-
"gpt-4": {
|
|
78
|
-
"openai": "gpt-4",
|
|
79
|
-
"openrouter": "openai/gpt-4"
|
|
80
|
-
},
|
|
81
|
-
"gpt-4-turbo": {
|
|
82
|
-
"openai": "gpt-4-turbo",
|
|
83
|
-
"openrouter": "openai/gpt-4-turbo"
|
|
84
|
-
},
|
|
85
|
-
"gpt-4o": {
|
|
86
|
-
"openai": "gpt-4o",
|
|
87
|
-
"openrouter": "openai/gpt-4o"
|
|
88
|
-
},
|
|
89
|
-
"gpt-4o-mini": {
|
|
90
|
-
"openai": "gpt-4o-mini",
|
|
91
|
-
"openrouter": "openai/gpt-4o-mini"
|
|
92
|
-
},
|
|
93
|
-
"llama-3-1-405b": {
|
|
94
|
-
"openrouter": "meta-llama/llama-3.1-405b"
|
|
95
|
-
},
|
|
96
|
-
"llama-3-1-405b-instruct": {
|
|
97
|
-
"openrouter": "meta-llama/llama-3.1-405b-instruct"
|
|
98
|
-
},
|
|
99
|
-
"llama-3-1-70b": {
|
|
100
|
-
"openrouter": "meta-llama/llama-3.1-70b"
|
|
101
|
-
},
|
|
102
|
-
"llama-3-1-70b-instruct": {
|
|
103
|
-
"openrouter": "meta-llama/llama-3.1-70b-instruct"
|
|
104
|
-
},
|
|
105
|
-
"llama-3-1-8b": {
|
|
106
|
-
"openrouter": "meta-llama/llama-3.1-8b"
|
|
107
|
-
},
|
|
108
|
-
"llama-3-1-8b-instruct": {
|
|
109
|
-
"openrouter": "meta-llama/llama-3.1-8b-instruct"
|
|
110
|
-
},
|
|
111
|
-
"llama-3-2-1b-instruct": {
|
|
112
|
-
"openrouter": "meta-llama/llama-3.2-1b-instruct"
|
|
113
|
-
},
|
|
114
|
-
"llama-3-2-3b-instruct": {
|
|
115
|
-
"openrouter": "meta-llama/llama-3.2-3b-instruct"
|
|
116
|
-
},
|
|
117
|
-
"llama-3-3-70b-instruct": {
|
|
118
|
-
"openrouter": "meta-llama/llama-3.3-70b-instruct"
|
|
119
|
-
},
|
|
120
|
-
"mistral-large": {
|
|
121
|
-
"mistral": "mistral-large-latest",
|
|
122
|
-
"openrouter": "mistralai/mistral-large"
|
|
123
|
-
},
|
|
124
|
-
"mistral-medium": {
|
|
125
|
-
"mistral": "mistral-medium-latest",
|
|
126
|
-
"openrouter": "mistralai/mistral-medium"
|
|
127
|
-
},
|
|
128
|
-
"mistral-small": {
|
|
129
|
-
"mistral": "mistral-small-latest",
|
|
130
|
-
"openrouter": "mistralai/mistral-small"
|
|
131
|
-
}
|
|
132
|
-
}
|
package/dist/utils/sanitize.d.ts
DELETED
|
@@ -1,21 +0,0 @@
|
|
|
1
|
-
/**
|
|
2
|
-
* Utility functions for sanitizing sensitive data from error messages and objects
|
|
3
|
-
*/
|
|
4
|
-
/**
|
|
5
|
-
* Sanitizes API keys and other sensitive data from strings
|
|
6
|
-
* @param text - The text to sanitize
|
|
7
|
-
* @returns Sanitized text with sensitive data redacted
|
|
8
|
-
*/
|
|
9
|
-
export declare function sanitizeText(text: string): string;
|
|
10
|
-
/**
|
|
11
|
-
* Sanitizes an error object by redacting sensitive data
|
|
12
|
-
* @param error - The error object to sanitize
|
|
13
|
-
* @returns Sanitized error object
|
|
14
|
-
*/
|
|
15
|
-
export declare function sanitizeError(error: any): any;
|
|
16
|
-
/**
|
|
17
|
-
* Logs an error with sanitized output
|
|
18
|
-
* @param error - The error to log
|
|
19
|
-
*/
|
|
20
|
-
export declare function logSanitizedError(error: any): void;
|
|
21
|
-
//# sourceMappingURL=sanitize.d.ts.map
|
|
@@ -1 +0,0 @@
|
|
|
1
|
-
{"version":3,"file":"sanitize.d.ts","sourceRoot":"","sources":["../../src/utils/sanitize.ts"],"names":[],"mappings":"AAAA;;GAEG;AAEH;;;;GAIG;AACH,wBAAgB,YAAY,CAAC,IAAI,EAAE,MAAM,GAAG,MAAM,CAiCjD;AAED;;;;GAIG;AACH,wBAAgB,aAAa,CAAC,KAAK,EAAE,GAAG,GAAG,GAAG,CA+B7C;AAiBD;;;GAGG;AACH,wBAAgB,iBAAiB,CAAC,KAAK,EAAE,GAAG,GAAG,IAAI,CAGlD"}
|
package/dist/utils/sanitize.js
DELETED
|
@@ -1,76 +0,0 @@
|
|
|
1
|
-
/**
|
|
2
|
-
* Utility functions for sanitizing sensitive data from error messages and objects
|
|
3
|
-
*/
|
|
4
|
-
/**
|
|
5
|
-
* Sanitizes API keys and other sensitive data from strings
|
|
6
|
-
* @param text - The text to sanitize
|
|
7
|
-
* @returns Sanitized text with sensitive data redacted
|
|
8
|
-
*/
|
|
9
|
-
export function sanitizeText(text) {
|
|
10
|
-
if (!text)
|
|
11
|
-
return text;
|
|
12
|
-
// Sanitize API keys (various formats)
|
|
13
|
-
// OpenAI: sk-...
|
|
14
|
-
// Anthropic: sk-ant-...
|
|
15
|
-
// Generic patterns
|
|
16
|
-
let sanitized = text;
|
|
17
|
-
// Pattern: sk-ant-api03-... or sk-...
|
|
18
|
-
sanitized = sanitized.replace(/sk-ant-[a-zA-Z0-9_-]{8,}/g, (match) => `sk-ant-***${match.slice(-4)}`);
|
|
19
|
-
sanitized = sanitized.replace(/sk-[a-zA-Z0-9_-]{20,}/g, (match) => `sk-***${match.slice(-4)}`);
|
|
20
|
-
// Pattern: Bearer tokens
|
|
21
|
-
sanitized = sanitized.replace(/Bearer\s+[a-zA-Z0-9_-]{20,}/gi, (match) => `Bearer ***${match.slice(-4)}`);
|
|
22
|
-
// Pattern: API keys in error messages like "Incorrect API key provided: 8TAXX2TT***..."
|
|
23
|
-
sanitized = sanitized.replace(/API key[^:]*:\s*[a-zA-Z0-9]{4,}\**/gi, 'API key: [REDACTED]');
|
|
24
|
-
return sanitized;
|
|
25
|
-
}
|
|
26
|
-
/**
|
|
27
|
-
* Sanitizes an error object by redacting sensitive data
|
|
28
|
-
* @param error - The error object to sanitize
|
|
29
|
-
* @returns Sanitized error object
|
|
30
|
-
*/
|
|
31
|
-
export function sanitizeError(error) {
|
|
32
|
-
if (!error)
|
|
33
|
-
return error;
|
|
34
|
-
// Handle Error instances
|
|
35
|
-
if (error instanceof Error) {
|
|
36
|
-
const sanitized = new Error(sanitizeText(error.message));
|
|
37
|
-
sanitized.name = error.name;
|
|
38
|
-
sanitized.stack = error.stack ? sanitizeText(error.stack) : undefined;
|
|
39
|
-
// Copy other properties
|
|
40
|
-
Object.keys(error).forEach(key => {
|
|
41
|
-
if (key !== 'message' && key !== 'stack' && key !== 'name') {
|
|
42
|
-
sanitized[key] = sanitizeErrorValue(error[key]);
|
|
43
|
-
}
|
|
44
|
-
});
|
|
45
|
-
return sanitized;
|
|
46
|
-
}
|
|
47
|
-
// Handle plain objects
|
|
48
|
-
if (typeof error === 'object') {
|
|
49
|
-
const sanitized = Array.isArray(error) ? [] : {};
|
|
50
|
-
for (const key in error) {
|
|
51
|
-
sanitized[key] = sanitizeErrorValue(error[key]);
|
|
52
|
-
}
|
|
53
|
-
return sanitized;
|
|
54
|
-
}
|
|
55
|
-
return error;
|
|
56
|
-
}
|
|
57
|
-
/**
|
|
58
|
-
* Sanitizes a single value (recursive helper)
|
|
59
|
-
*/
|
|
60
|
-
function sanitizeErrorValue(value) {
|
|
61
|
-
if (typeof value === 'string') {
|
|
62
|
-
return sanitizeText(value);
|
|
63
|
-
}
|
|
64
|
-
if (typeof value === 'object' && value !== null) {
|
|
65
|
-
return sanitizeError(value);
|
|
66
|
-
}
|
|
67
|
-
return value;
|
|
68
|
-
}
|
|
69
|
-
/**
|
|
70
|
-
* Logs an error with sanitized output
|
|
71
|
-
* @param error - The error to log
|
|
72
|
-
*/
|
|
73
|
-
export function logSanitizedError(error) {
|
|
74
|
-
const sanitized = sanitizeError(error);
|
|
75
|
-
console.error('[NodeLLM Error]', sanitized);
|
|
76
|
-
}
|