@n8n/ai-node-sdk 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE.md +88 -0
- package/README.md +349 -0
- package/dist/build.tsbuildinfo +1 -0
- package/dist/index.d.ts +15 -0
- package/dist/index.js +20 -0
- package/dist/index.js.map +1 -0
- package/package.json +31 -0
package/LICENSE.md
ADDED
|
@@ -0,0 +1,88 @@
|
|
|
1
|
+
# License
|
|
2
|
+
|
|
3
|
+
Portions of this software are licensed as follows:
|
|
4
|
+
|
|
5
|
+
- Content of branches other than the main branch (i.e. "master") are not licensed.
|
|
6
|
+
- Source code files that contain ".ee." in their filename or ".ee" in their dirname are NOT licensed under
|
|
7
|
+
the Sustainable Use License.
|
|
8
|
+
To use source code files that contain ".ee." in their filename or ".ee" in their dirname you must hold a
|
|
9
|
+
valid n8n Enterprise License specifically allowing you access to such source code files and as defined
|
|
10
|
+
in "LICENSE_EE.md".
|
|
11
|
+
- All third party components incorporated into the n8n Software are licensed under the original license
|
|
12
|
+
provided by the owner of the applicable component.
|
|
13
|
+
- Content outside of the above mentioned files or restrictions is available under the "Sustainable Use
|
|
14
|
+
License" as defined below.
|
|
15
|
+
|
|
16
|
+
## Sustainable Use License
|
|
17
|
+
|
|
18
|
+
Version 1.0
|
|
19
|
+
|
|
20
|
+
### Acceptance
|
|
21
|
+
|
|
22
|
+
By using the software, you agree to all of the terms and conditions below.
|
|
23
|
+
|
|
24
|
+
### Copyright License
|
|
25
|
+
|
|
26
|
+
The licensor grants you a non-exclusive, royalty-free, worldwide, non-sublicensable, non-transferable license
|
|
27
|
+
to use, copy, distribute, make available, and prepare derivative works of the software, in each case subject
|
|
28
|
+
to the limitations below.
|
|
29
|
+
|
|
30
|
+
### Limitations
|
|
31
|
+
|
|
32
|
+
You may use or modify the software only for your own internal business purposes or for non-commercial or
|
|
33
|
+
personal use. You may distribute the software or provide it to others only if you do so free of charge for
|
|
34
|
+
non-commercial purposes. You may not alter, remove, or obscure any licensing, copyright, or other notices of
|
|
35
|
+
the licensor in the software. Any use of the licensor’s trademarks is subject to applicable law.
|
|
36
|
+
|
|
37
|
+
### Patents
|
|
38
|
+
|
|
39
|
+
The licensor grants you a license, under any patent claims the licensor can license, or becomes able to
|
|
40
|
+
license, to make, have made, use, sell, offer for sale, import and have imported the software, in each case
|
|
41
|
+
subject to the limitations and conditions in this license. This license does not cover any patent claims that
|
|
42
|
+
you cause to be infringed by modifications or additions to the software. If you or your company make any
|
|
43
|
+
written claim that the software infringes or contributes to infringement of any patent, your patent license
|
|
44
|
+
for the software granted under these terms ends immediately. If your company makes such a claim, your patent
|
|
45
|
+
license ends immediately for work on behalf of your company.
|
|
46
|
+
|
|
47
|
+
### Notices
|
|
48
|
+
|
|
49
|
+
You must ensure that anyone who gets a copy of any part of the software from you also gets a copy of these
|
|
50
|
+
terms. If you modify the software, you must include in any modified copies of the software a prominent notice
|
|
51
|
+
stating that you have modified the software.
|
|
52
|
+
|
|
53
|
+
### No Other Rights
|
|
54
|
+
|
|
55
|
+
These terms do not imply any licenses other than those expressly granted in these terms.
|
|
56
|
+
|
|
57
|
+
### Termination
|
|
58
|
+
|
|
59
|
+
If you use the software in violation of these terms, such use is not licensed, and your license will
|
|
60
|
+
automatically terminate. If the licensor provides you with a notice of your violation, and you cease all
|
|
61
|
+
violation of this license no later than 30 days after you receive that notice, your license will be reinstated
|
|
62
|
+
retroactively. However, if you violate these terms after such reinstatement, any additional violation of these
|
|
63
|
+
terms will cause your license to terminate automatically and permanently.
|
|
64
|
+
|
|
65
|
+
### No Liability
|
|
66
|
+
|
|
67
|
+
As far as the law allows, the software comes as is, without any warranty or condition, and the licensor will
|
|
68
|
+
not be liable to you for any damages arising out of these terms or the use or nature of the software, under
|
|
69
|
+
any kind of legal claim.
|
|
70
|
+
|
|
71
|
+
### Definitions
|
|
72
|
+
|
|
73
|
+
The “licensor” is the entity offering these terms.
|
|
74
|
+
|
|
75
|
+
The “software” is the software the licensor makes available under these terms, including any portion of it.
|
|
76
|
+
|
|
77
|
+
“You” refers to the individual or entity agreeing to these terms.
|
|
78
|
+
|
|
79
|
+
“Your company” is any legal entity, sole proprietorship, or other kind of organization that you work for, plus
|
|
80
|
+
all organizations that have control over, are under the control of, or are under common control with that
|
|
81
|
+
organization. Control means ownership of substantially all the assets of an entity, or the power to direct its
|
|
82
|
+
management and policies by vote, contract, or otherwise. Control can be direct or indirect.
|
|
83
|
+
|
|
84
|
+
“Your license” is the license granted to you for the software under these terms.
|
|
85
|
+
|
|
86
|
+
“Use” means anything you do with the software requiring your license.
|
|
87
|
+
|
|
88
|
+
“Trademark” means trademarks, service marks, and similar rights.
|
package/README.md
ADDED
|
@@ -0,0 +1,349 @@
|
|
|
1
|
+
# @n8n/ai-node-sdk
|
|
2
|
+
|
|
3
|
+
Public SDK for building AI nodes in n8n. This package provides a simplified API for creating chat model and memory nodes without LangChain dependencies.
|
|
4
|
+
|
|
5
|
+
## Installation in node packages
|
|
6
|
+
|
|
7
|
+
Include the package in your node packages by updating `peerDependencies`:
|
|
8
|
+
|
|
9
|
+
```json
|
|
10
|
+
{
|
|
11
|
+
"peerDependencies": {
|
|
12
|
+
"n8n-workflow": "*",
|
|
13
|
+
"@n8n/ai-node-sdk": "*"
|
|
14
|
+
}
|
|
15
|
+
}
|
|
16
|
+
```
|
|
17
|
+
|
|
18
|
+
## Development
|
|
19
|
+
|
|
20
|
+
```bash
|
|
21
|
+
# Build the package
|
|
22
|
+
pnpm build
|
|
23
|
+
|
|
24
|
+
# Run tests
|
|
25
|
+
pnpm test
|
|
26
|
+
|
|
27
|
+
# Run in watch mode
|
|
28
|
+
pnpm dev
|
|
29
|
+
```
|
|
30
|
+
|
|
31
|
+
## Chat Model Nodes
|
|
32
|
+
|
|
33
|
+
Chat model nodes implement the `INodeType` interface and use `supplyModel` to provide model instances.
|
|
34
|
+
|
|
35
|
+
### Simple Pattern: OpenAI-Compatible Providers
|
|
36
|
+
|
|
37
|
+
For OpenAI-compatible providers, use the config object pattern with `supplyModel`:
|
|
38
|
+
|
|
39
|
+
```typescript
|
|
40
|
+
import { supplyModel } from '@n8n/ai-node-sdk';
|
|
41
|
+
import {
|
|
42
|
+
type INodeType,
|
|
43
|
+
type INodeTypeDescription,
|
|
44
|
+
NodeConnectionTypes,
|
|
45
|
+
type SupplyData,
|
|
46
|
+
type ISupplyDataFunctions,
|
|
47
|
+
} from 'n8n-workflow';
|
|
48
|
+
|
|
49
|
+
export class LmChatMyProvider implements INodeType {
|
|
50
|
+
description: INodeTypeDescription = {
|
|
51
|
+
displayName: 'MyProvider Chat Model',
|
|
52
|
+
name: 'lmChatMyProvider',
|
|
53
|
+
icon: 'fa:robot',
|
|
54
|
+
group: ['transform'],
|
|
55
|
+
version: [1],
|
|
56
|
+
description: 'For advanced usage with an AI chain',
|
|
57
|
+
defaults: {
|
|
58
|
+
name: 'MyProvider Chat Model',
|
|
59
|
+
},
|
|
60
|
+
inputs: [],
|
|
61
|
+
outputs: [NodeConnectionTypes.AiLanguageModel],
|
|
62
|
+
credentials: [{ name: 'myProviderApi', required: true }],
|
|
63
|
+
properties: [
|
|
64
|
+
{
|
|
65
|
+
displayName: 'Model',
|
|
66
|
+
name: 'model',
|
|
67
|
+
type: 'string',
|
|
68
|
+
default: 'my-model',
|
|
69
|
+
},
|
|
70
|
+
{
|
|
71
|
+
displayName: 'Temperature',
|
|
72
|
+
name: 'temperature',
|
|
73
|
+
type: 'number',
|
|
74
|
+
default: 0.7,
|
|
75
|
+
},
|
|
76
|
+
],
|
|
77
|
+
};
|
|
78
|
+
|
|
79
|
+
async supplyData(this: ISupplyDataFunctions, itemIndex: number): Promise<SupplyData> {
|
|
80
|
+
const credentials = await this.getCredentials('myProviderApi');
|
|
81
|
+
const model = this.getNodeParameter('model', itemIndex) as string;
|
|
82
|
+
const temperature = this.getNodeParameter('temperature', itemIndex) as number;
|
|
83
|
+
|
|
84
|
+
// Return config for OpenAI-compatible providers
|
|
85
|
+
return supplyModel(this, {
|
|
86
|
+
type: 'openai',
|
|
87
|
+
baseUrl: credentials.url as string,
|
|
88
|
+
apiKey: credentials.apiKey as string,
|
|
89
|
+
model,
|
|
90
|
+
temperature,
|
|
91
|
+
});
|
|
92
|
+
}
|
|
93
|
+
}
|
|
94
|
+
```
|
|
95
|
+
|
|
96
|
+
### Advanced Pattern: Custom Model Class
|
|
97
|
+
|
|
98
|
+
For providers with custom APIs, extend `BaseChatModel` and pass an instance to `supplyModel`:
|
|
99
|
+
|
|
100
|
+
```typescript
|
|
101
|
+
import {
|
|
102
|
+
BaseChatModel,
|
|
103
|
+
supplyModel,
|
|
104
|
+
type Message,
|
|
105
|
+
type GenerateResult,
|
|
106
|
+
type StreamChunk,
|
|
107
|
+
type ChatModelConfig,
|
|
108
|
+
} from '@n8n/ai-node-sdk';
|
|
109
|
+
import {
|
|
110
|
+
type INodeType,
|
|
111
|
+
type INodeTypeDescription,
|
|
112
|
+
NodeConnectionTypes,
|
|
113
|
+
type IHttpRequestMethods,
|
|
114
|
+
type ISupplyDataFunctions,
|
|
115
|
+
type SupplyData,
|
|
116
|
+
} from 'n8n-workflow';
|
|
117
|
+
import type Stream from 'node:stream';
|
|
118
|
+
import { Readable } from 'node:stream';
|
|
119
|
+
|
|
120
|
+
// Custom model implementation
|
|
121
|
+
class MyProviderChatModel extends BaseChatModel {
|
|
122
|
+
constructor(
|
|
123
|
+
modelId: string,
|
|
124
|
+
private requests: {
|
|
125
|
+
httpRequest: (
|
|
126
|
+
method: IHttpRequestMethods,
|
|
127
|
+
url: string,
|
|
128
|
+
body?: object,
|
|
129
|
+
headers?: Record<string, string>,
|
|
130
|
+
) => Promise<{ body: unknown }>;
|
|
131
|
+
openStream: (
|
|
132
|
+
method: IHttpRequestMethods,
|
|
133
|
+
url: string,
|
|
134
|
+
body?: object,
|
|
135
|
+
headers?: Record<string, string>,
|
|
136
|
+
) => Promise<{ body: ReadableStream<Uint8Array> }>;
|
|
137
|
+
},
|
|
138
|
+
config?: ChatModelConfig,
|
|
139
|
+
) {
|
|
140
|
+
super('my-provider', modelId, config);
|
|
141
|
+
}
|
|
142
|
+
|
|
143
|
+
async generate(messages: Message[], config?: ChatModelConfig): Promise<GenerateResult> {
|
|
144
|
+
// Convert n8n messages to provider format
|
|
145
|
+
const providerMessages = messages.map(m => ({
|
|
146
|
+
role: m.role,
|
|
147
|
+
content: m.content.find(c => c.type === 'text')?.text ?? '',
|
|
148
|
+
}));
|
|
149
|
+
|
|
150
|
+
// Call the provider API
|
|
151
|
+
const response = await this.requests.httpRequest('POST', '/chat', {
|
|
152
|
+
model: this.modelId,
|
|
153
|
+
messages: providerMessages,
|
|
154
|
+
temperature: config?.temperature,
|
|
155
|
+
});
|
|
156
|
+
|
|
157
|
+
const body = response.body as any;
|
|
158
|
+
|
|
159
|
+
return {
|
|
160
|
+
finishReason: 'stop',
|
|
161
|
+
message: {
|
|
162
|
+
id: body.id,
|
|
163
|
+
role: 'assistant',
|
|
164
|
+
content: [{ type: 'text', text: body.content }],
|
|
165
|
+
},
|
|
166
|
+
usage: {
|
|
167
|
+
promptTokens: body.usage.prompt_tokens,
|
|
168
|
+
completionTokens: body.usage.completion_tokens,
|
|
169
|
+
totalTokens: body.usage.total_tokens,
|
|
170
|
+
},
|
|
171
|
+
};
|
|
172
|
+
}
|
|
173
|
+
|
|
174
|
+
async *stream(messages: Message[], config?: ChatModelConfig): AsyncIterable<StreamChunk> {
|
|
175
|
+
// Implement streaming...
|
|
176
|
+
yield { type: 'text-delta', delta: 'response text' };
|
|
177
|
+
yield { type: 'finish', finishReason: 'stop' };
|
|
178
|
+
}
|
|
179
|
+
}
|
|
180
|
+
|
|
181
|
+
// Node definition
|
|
182
|
+
export class LmChatMyProvider implements INodeType {
|
|
183
|
+
description: INodeTypeDescription = {
|
|
184
|
+
displayName: 'MyProvider Chat Model',
|
|
185
|
+
name: 'lmChatMyProvider',
|
|
186
|
+
icon: 'fa:robot',
|
|
187
|
+
group: ['transform'],
|
|
188
|
+
version: [1],
|
|
189
|
+
description: 'For advanced usage with an AI chain',
|
|
190
|
+
defaults: {
|
|
191
|
+
name: 'MyProvider Chat Model',
|
|
192
|
+
},
|
|
193
|
+
inputs: [],
|
|
194
|
+
outputs: [NodeConnectionTypes.AiLanguageModel],
|
|
195
|
+
credentials: [{ name: 'myProviderApi', required: true }],
|
|
196
|
+
properties: [
|
|
197
|
+
{ displayName: 'Model', name: 'model', type: 'string', default: 'my-model' },
|
|
198
|
+
{ displayName: 'Temperature', name: 'temperature', type: 'number', default: 0.7 },
|
|
199
|
+
],
|
|
200
|
+
};
|
|
201
|
+
|
|
202
|
+
async supplyData(this: ISupplyDataFunctions, itemIndex: number): Promise<SupplyData> {
|
|
203
|
+
const model = this.getNodeParameter('model', itemIndex) as string;
|
|
204
|
+
const temperature = this.getNodeParameter('temperature', itemIndex) as number;
|
|
205
|
+
|
|
206
|
+
const chatModel = new MyProviderChatModel(
|
|
207
|
+
model,
|
|
208
|
+
{
|
|
209
|
+
httpRequest: async (method, url, body, headers) => {
|
|
210
|
+
const response = await this.helpers.httpRequestWithAuthentication.call(
|
|
211
|
+
this,
|
|
212
|
+
'myProviderApi',
|
|
213
|
+
{ method, url, body, headers },
|
|
214
|
+
);
|
|
215
|
+
return { body: response };
|
|
216
|
+
},
|
|
217
|
+
openStream: async (method, url, body, headers) => {
|
|
218
|
+
const response = (await this.helpers.httpRequestWithAuthentication.call(
|
|
219
|
+
this,
|
|
220
|
+
'myProviderApi',
|
|
221
|
+
{ method, url, body, headers, encoding: 'stream' },
|
|
222
|
+
)) as Stream.Readable;
|
|
223
|
+
return { body: Readable.toWeb(response) as ReadableStream<Uint8Array> };
|
|
224
|
+
},
|
|
225
|
+
},
|
|
226
|
+
{ temperature },
|
|
227
|
+
);
|
|
228
|
+
|
|
229
|
+
return supplyModel(this, chatModel);
|
|
230
|
+
}
|
|
231
|
+
}
|
|
232
|
+
```
|
|
233
|
+
|
|
234
|
+
## Memory Nodes
|
|
235
|
+
|
|
236
|
+
Memory nodes implement the `INodeType` interface and use `supplyMemory` to provide memory instances.
|
|
237
|
+
|
|
238
|
+
### Pattern: Custom Storage with Windowed Memory
|
|
239
|
+
|
|
240
|
+
Extend `BaseChatHistory` to implement storage, then wrap it with `WindowedChatMemory` and pass to `supplyMemory`:
|
|
241
|
+
|
|
242
|
+
```typescript
|
|
243
|
+
import {
|
|
244
|
+
BaseChatHistory,
|
|
245
|
+
WindowedChatMemory,
|
|
246
|
+
supplyMemory,
|
|
247
|
+
type Message,
|
|
248
|
+
} from '@n8n/ai-node-sdk';
|
|
249
|
+
import {
|
|
250
|
+
type INodeType,
|
|
251
|
+
type INodeTypeDescription,
|
|
252
|
+
NodeConnectionTypes,
|
|
253
|
+
type ISupplyDataFunctions,
|
|
254
|
+
type SupplyData,
|
|
255
|
+
} from 'n8n-workflow';
|
|
256
|
+
|
|
257
|
+
// Custom storage implementation
|
|
258
|
+
class MyDbChatHistory extends BaseChatHistory {
|
|
259
|
+
constructor(
|
|
260
|
+
private sessionId: string,
|
|
261
|
+
private apiKey: string,
|
|
262
|
+
private httpRequest: any,
|
|
263
|
+
) {
|
|
264
|
+
super();
|
|
265
|
+
}
|
|
266
|
+
|
|
267
|
+
async getMessages(): Promise<Message[]> {
|
|
268
|
+
const data = await this.httpRequest({
|
|
269
|
+
method: 'GET',
|
|
270
|
+
url: `/sessions/${this.sessionId}/messages`,
|
|
271
|
+
headers: { Authorization: `Bearer ${this.apiKey}` },
|
|
272
|
+
json: true,
|
|
273
|
+
});
|
|
274
|
+
|
|
275
|
+
return data.messages.map((m: any) => ({
|
|
276
|
+
role: m.role,
|
|
277
|
+
content: [{ type: 'text', text: m.content }],
|
|
278
|
+
}));
|
|
279
|
+
}
|
|
280
|
+
|
|
281
|
+
async addMessage(message: Message): Promise<void> {
|
|
282
|
+
const text = message.content.find(c => c.type === 'text')?.text ?? '';
|
|
283
|
+
await this.httpRequest({
|
|
284
|
+
method: 'POST',
|
|
285
|
+
url: `/sessions/${this.sessionId}/messages`,
|
|
286
|
+
headers: { Authorization: `Bearer ${this.apiKey}` },
|
|
287
|
+
body: { role: message.role, content: text },
|
|
288
|
+
json: true,
|
|
289
|
+
});
|
|
290
|
+
}
|
|
291
|
+
|
|
292
|
+
async clear(): Promise<void> {
|
|
293
|
+
await this.httpRequest({
|
|
294
|
+
method: 'DELETE',
|
|
295
|
+
url: `/sessions/${this.sessionId}`,
|
|
296
|
+
headers: { Authorization: `Bearer ${this.apiKey}` },
|
|
297
|
+
});
|
|
298
|
+
}
|
|
299
|
+
}
|
|
300
|
+
|
|
301
|
+
// Memory node
|
|
302
|
+
export class MemoryMyDb implements INodeType {
|
|
303
|
+
description: INodeTypeDescription = {
|
|
304
|
+
displayName: 'MyDB Memory',
|
|
305
|
+
name: 'memoryMyDb',
|
|
306
|
+
icon: 'fa:database',
|
|
307
|
+
group: ['transform'],
|
|
308
|
+
version: [1],
|
|
309
|
+
description: 'Store conversation history in MyDB',
|
|
310
|
+
defaults: {
|
|
311
|
+
name: 'MyDB Memory',
|
|
312
|
+
},
|
|
313
|
+
inputs: [],
|
|
314
|
+
outputs: [NodeConnectionTypes.AiMemory],
|
|
315
|
+
credentials: [{ name: 'myDbApi', required: true }],
|
|
316
|
+
properties: [
|
|
317
|
+
{
|
|
318
|
+
displayName: 'Session ID',
|
|
319
|
+
name: 'sessionId',
|
|
320
|
+
type: 'string',
|
|
321
|
+
default: '={{ $json.sessionId }}',
|
|
322
|
+
},
|
|
323
|
+
{
|
|
324
|
+
displayName: 'Window Size',
|
|
325
|
+
name: 'windowSize',
|
|
326
|
+
type: 'number',
|
|
327
|
+
default: 10,
|
|
328
|
+
description: 'Number of recent message pairs to keep',
|
|
329
|
+
},
|
|
330
|
+
],
|
|
331
|
+
};
|
|
332
|
+
|
|
333
|
+
async supplyData(this: ISupplyDataFunctions, itemIndex: number): Promise<SupplyData> {
|
|
334
|
+
const credentials = await this.getCredentials('myDbApi');
|
|
335
|
+
const sessionId = this.getNodeParameter('sessionId', itemIndex) as string;
|
|
336
|
+
const windowSize = this.getNodeParameter('windowSize', itemIndex) as number;
|
|
337
|
+
|
|
338
|
+
const history = new MyDbChatHistory(
|
|
339
|
+
sessionId,
|
|
340
|
+
credentials.apiKey as string,
|
|
341
|
+
this.helpers.httpRequest,
|
|
342
|
+
);
|
|
343
|
+
|
|
344
|
+
const memory = new WindowedChatMemory(history, { windowSize });
|
|
345
|
+
|
|
346
|
+
return supplyMemory(this, memory);
|
|
347
|
+
}
|
|
348
|
+
}
|
|
349
|
+
```
|