@n8n/ai-utilities 0.2.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE.md +88 -0
- package/README.md +468 -0
- package/dist/adapters/langchain-chat-model.d.ts +20 -0
- package/dist/adapters/langchain-chat-model.js +180 -0
- package/dist/adapters/langchain-chat-model.js.map +1 -0
- package/dist/adapters/langchain-history.d.ts +12 -0
- package/dist/adapters/langchain-history.js +27 -0
- package/dist/adapters/langchain-history.js.map +1 -0
- package/dist/adapters/langchain-memory.d.ts +11 -0
- package/dist/adapters/langchain-memory.js +36 -0
- package/dist/adapters/langchain-memory.js.map +1 -0
- package/dist/build.tsbuildinfo +1 -0
- package/dist/chat-model/base.d.ts +15 -0
- package/dist/chat-model/base.js +25 -0
- package/dist/chat-model/base.js.map +1 -0
- package/dist/converters/message.d.ts +5 -0
- package/dist/converters/message.js +365 -0
- package/dist/converters/message.js.map +1 -0
- package/dist/converters/tool.d.ts +5 -0
- package/dist/converters/tool.js +59 -0
- package/dist/converters/tool.js.map +1 -0
- package/dist/guards.d.ts +8 -0
- package/dist/guards.js +27 -0
- package/dist/guards.js.map +1 -0
- package/dist/index.d.ts +28 -0
- package/dist/index.js +56 -0
- package/dist/index.js.map +1 -0
- package/dist/memory/base-chat-history.d.ts +8 -0
- package/dist/memory/base-chat-history.js +12 -0
- package/dist/memory/base-chat-history.js.map +1 -0
- package/dist/memory/base-chat-memory.d.ts +8 -0
- package/dist/memory/base-chat-memory.js +7 -0
- package/dist/memory/base-chat-memory.js.map +1 -0
- package/dist/memory/windowed-chat-memory.d.ts +14 -0
- package/dist/memory/windowed-chat-memory.js +38 -0
- package/dist/memory/windowed-chat-memory.js.map +1 -0
- package/dist/suppliers/supplyMemory.d.ts +6 -0
- package/dist/suppliers/supplyMemory.js +14 -0
- package/dist/suppliers/supplyMemory.js.map +1 -0
- package/dist/suppliers/supplyModel.d.ts +15 -0
- package/dist/suppliers/supplyModel.js +78 -0
- package/dist/suppliers/supplyModel.js.map +1 -0
- package/dist/types/chat-model.d.ts +26 -0
- package/dist/types/chat-model.js +3 -0
- package/dist/types/chat-model.js.map +1 -0
- package/dist/types/json.d.ts +5 -0
- package/dist/types/json.js +3 -0
- package/dist/types/json.js.map +1 -0
- package/dist/types/memory.d.ts +13 -0
- package/dist/types/memory.js +3 -0
- package/dist/types/memory.js.map +1 -0
- package/dist/types/message.d.ts +49 -0
- package/dist/types/message.js +3 -0
- package/dist/types/message.js.map +1 -0
- package/dist/types/openai.d.ts +39 -0
- package/dist/types/openai.js +3 -0
- package/dist/types/openai.js.map +1 -0
- package/dist/types/output.d.ts +37 -0
- package/dist/types/output.js +3 -0
- package/dist/types/output.js.map +1 -0
- package/dist/types/tool.d.ts +28 -0
- package/dist/types/tool.js +3 -0
- package/dist/types/tool.js.map +1 -0
- package/dist/utils/embeddings-input-validation.d.ts +3 -0
- package/dist/utils/embeddings-input-validation.js +28 -0
- package/dist/utils/embeddings-input-validation.js.map +1 -0
- package/dist/utils/failed-attempt-handler/n8nDefaultFailedAttemptHandler.d.ts +1 -0
- package/dist/utils/failed-attempt-handler/n8nDefaultFailedAttemptHandler.js +30 -0
- package/dist/utils/failed-attempt-handler/n8nDefaultFailedAttemptHandler.js.map +1 -0
- package/dist/utils/failed-attempt-handler/n8nLlmFailedAttemptHandler.d.ts +3 -0
- package/dist/utils/failed-attempt-handler/n8nLlmFailedAttemptHandler.js +28 -0
- package/dist/utils/failed-attempt-handler/n8nLlmFailedAttemptHandler.js.map +1 -0
- package/dist/utils/helpers.d.ts +3 -0
- package/dist/utils/helpers.js +53 -0
- package/dist/utils/helpers.js.map +1 -0
- package/dist/utils/http-proxy-agent.d.ts +10 -0
- package/dist/utils/http-proxy-agent.js +48 -0
- package/dist/utils/http-proxy-agent.js.map +1 -0
- package/dist/utils/log-ai-event.d.ts +2 -0
- package/dist/utils/log-ai-event.js +13 -0
- package/dist/utils/log-ai-event.js.map +1 -0
- package/dist/utils/log-wrapper.d.ts +28 -0
- package/dist/utils/log-wrapper.js +329 -0
- package/dist/utils/log-wrapper.js.map +1 -0
- package/dist/utils/n8n-binary-loader.d.ts +18 -0
- package/dist/utils/n8n-binary-loader.js +159 -0
- package/dist/utils/n8n-binary-loader.js.map +1 -0
- package/dist/utils/n8n-json-loader.d.ts +11 -0
- package/dist/utils/n8n-json-loader.js +66 -0
- package/dist/utils/n8n-json-loader.js.map +1 -0
- package/dist/utils/n8n-llm-tracing.d.ts +46 -0
- package/dist/utils/n8n-llm-tracing.js +157 -0
- package/dist/utils/n8n-llm-tracing.js.map +1 -0
- package/dist/utils/sse.d.ts +8 -0
- package/dist/utils/sse.js +107 -0
- package/dist/utils/sse.js.map +1 -0
- package/dist/utils/tokenizer/cl100k_base.json +1 -0
- package/dist/utils/tokenizer/o200k_base.json +1 -0
- package/dist/utils/tokenizer/tiktoken.d.ts +4 -0
- package/dist/utils/tokenizer/tiktoken.js +40 -0
- package/dist/utils/tokenizer/tiktoken.js.map +1 -0
- package/dist/utils/tokenizer/token-estimator.d.ts +4 -0
- package/dist/utils/tokenizer/token-estimator.js +98 -0
- package/dist/utils/tokenizer/token-estimator.js.map +1 -0
- package/package.json +51 -0
|
@@ -0,0 +1,4 @@
|
|
|
1
|
+
import type { TiktokenEncoding, TiktokenModel } from 'js-tiktoken/lite';
|
|
2
|
+
import { Tiktoken } from 'js-tiktoken/lite';
|
|
3
|
+
export declare function getEncoding(encoding: TiktokenEncoding): Promise<Tiktoken>;
|
|
4
|
+
export declare function encodingForModel(model: TiktokenModel): Promise<Tiktoken>;
|
|
@@ -0,0 +1,40 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.getEncoding = getEncoding;
|
|
4
|
+
exports.encodingForModel = encodingForModel;
|
|
5
|
+
const promises_1 = require("fs/promises");
|
|
6
|
+
const lite_1 = require("js-tiktoken/lite");
|
|
7
|
+
const n8n_workflow_1 = require("n8n-workflow");
|
|
8
|
+
const path_1 = require("path");
|
|
9
|
+
const cache = {};
|
|
10
|
+
const loadJSONFile = async (filename) => {
|
|
11
|
+
const filePath = (0, path_1.join)(__dirname, filename);
|
|
12
|
+
const content = await (0, promises_1.readFile)(filePath, 'utf-8');
|
|
13
|
+
return await (0, n8n_workflow_1.jsonParse)(content);
|
|
14
|
+
};
|
|
15
|
+
async function getEncoding(encoding) {
|
|
16
|
+
if (!(encoding in cache)) {
|
|
17
|
+
cache[encoding] = (async () => {
|
|
18
|
+
let jsonData;
|
|
19
|
+
switch (encoding) {
|
|
20
|
+
case 'o200k_base':
|
|
21
|
+
jsonData = await loadJSONFile('./o200k_base.json');
|
|
22
|
+
break;
|
|
23
|
+
case 'cl100k_base':
|
|
24
|
+
jsonData = await loadJSONFile('./cl100k_base.json');
|
|
25
|
+
break;
|
|
26
|
+
default:
|
|
27
|
+
jsonData = await loadJSONFile('./cl100k_base.json');
|
|
28
|
+
}
|
|
29
|
+
return new lite_1.Tiktoken(jsonData);
|
|
30
|
+
})().catch((error) => {
|
|
31
|
+
delete cache[encoding];
|
|
32
|
+
throw error;
|
|
33
|
+
});
|
|
34
|
+
}
|
|
35
|
+
return await cache[encoding];
|
|
36
|
+
}
|
|
37
|
+
async function encodingForModel(model) {
|
|
38
|
+
return await getEncoding((0, lite_1.getEncodingNameForModel)(model));
|
|
39
|
+
}
|
|
40
|
+
//# sourceMappingURL=tiktoken.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"tiktoken.js","sourceRoot":"","sources":["../../../src/utils/tokenizer/tiktoken.ts"],"names":[],"mappings":";;AAcA,kCA0BC;AAED,4CAEC;AA5CD,0CAAuC;AAEvC,2CAAqE;AACrE,+CAAyC;AACzC,+BAA4B;AAE5B,MAAM,KAAK,GAAsC,EAAE,CAAC;AAEpD,MAAM,YAAY,GAAG,KAAK,EAAE,QAAgB,EAAwB,EAAE;IACrE,MAAM,QAAQ,GAAG,IAAA,WAAI,EAAC,SAAS,EAAE,QAAQ,CAAC,CAAC;IAC3C,MAAM,OAAO,GAAG,MAAM,IAAA,mBAAQ,EAAC,QAAQ,EAAE,OAAO,CAAC,CAAC;IAClD,OAAO,MAAM,IAAA,wBAAS,EAAC,OAAO,CAAC,CAAC;AACjC,CAAC,CAAC;AAEK,KAAK,UAAU,WAAW,CAAC,QAA0B;IAC3D,IAAI,CAAC,CAAC,QAAQ,IAAI,KAAK,CAAC,EAAE,CAAC;QAE1B,KAAK,CAAC,QAAQ,CAAC,GAAG,CAAC,KAAK,IAAI,EAAE;YAC7B,IAAI,QAAqB,CAAC;YAE1B,QAAQ,QAAQ,EAAE,CAAC;gBAClB,KAAK,YAAY;oBAChB,QAAQ,GAAG,MAAM,YAAY,CAAC,mBAAmB,CAAC,CAAC;oBACnD,MAAM;gBACP,KAAK,aAAa;oBACjB,QAAQ,GAAG,MAAM,YAAY,CAAC,oBAAoB,CAAC,CAAC;oBACpD,MAAM;gBACP;oBAEC,QAAQ,GAAG,MAAM,YAAY,CAAC,oBAAoB,CAAC,CAAC;YACtD,CAAC;YAED,OAAO,IAAI,eAAQ,CAAC,QAAQ,CAAC,CAAC;QAC/B,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,CAAC,KAAK,EAAE,EAAE;YACpB,OAAO,KAAK,CAAC,QAAQ,CAAC,CAAC;YACvB,MAAM,KAAK,CAAC;QACb,CAAC,CAAC,CAAC;IACJ,CAAC;IAED,OAAO,MAAM,KAAK,CAAC,QAAQ,CAAC,CAAC;AAC9B,CAAC;AAEM,KAAK,UAAU,gBAAgB,CAAC,KAAoB;IAC1D,OAAO,MAAM,WAAW,CAAC,IAAA,8BAAuB,EAAC,KAAK,CAAC,CAAC,CAAC;AAC1D,CAAC"}
|
|
@@ -0,0 +1,4 @@
|
|
|
1
|
+
import type { TiktokenModel } from 'js-tiktoken';
|
|
2
|
+
export declare function estimateTokensByCharCount(text: string, model?: string): number;
|
|
3
|
+
export declare function estimateTextSplitsByTokens(text: string, chunkSize: number, chunkOverlap: number, model?: string): string[];
|
|
4
|
+
export declare function estimateTokensFromStringList(list: string[], model: TiktokenModel): Promise<number>;
|
|
@@ -0,0 +1,98 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.estimateTokensByCharCount = estimateTokensByCharCount;
|
|
4
|
+
exports.estimateTextSplitsByTokens = estimateTextSplitsByTokens;
|
|
5
|
+
exports.estimateTokensFromStringList = estimateTokensFromStringList;
|
|
6
|
+
const tiktoken_1 = require("./tiktoken");
|
|
7
|
+
const helpers_1 = require("../helpers");
|
|
8
|
+
const MODEL_CHAR_PER_TOKEN_RATIOS = {
|
|
9
|
+
'gpt-4o': 3.8,
|
|
10
|
+
'gpt-4': 4.0,
|
|
11
|
+
'gpt-3.5-turbo': 4.0,
|
|
12
|
+
cl100k_base: 4.0,
|
|
13
|
+
o200k_base: 3.5,
|
|
14
|
+
p50k_base: 4.2,
|
|
15
|
+
r50k_base: 4.2,
|
|
16
|
+
};
|
|
17
|
+
function estimateTokensByCharCount(text, model = 'cl100k_base') {
|
|
18
|
+
try {
|
|
19
|
+
if (!text || typeof text !== 'string' || text.length === 0) {
|
|
20
|
+
return 0;
|
|
21
|
+
}
|
|
22
|
+
const charsPerToken = MODEL_CHAR_PER_TOKEN_RATIOS[model] || 4.0;
|
|
23
|
+
if (!Number.isFinite(charsPerToken) || charsPerToken <= 0) {
|
|
24
|
+
const estimatedTokens = Math.ceil(text.length / 4.0);
|
|
25
|
+
return estimatedTokens;
|
|
26
|
+
}
|
|
27
|
+
const estimatedTokens = Math.ceil(text.length / charsPerToken);
|
|
28
|
+
return estimatedTokens;
|
|
29
|
+
}
|
|
30
|
+
catch (error) {
|
|
31
|
+
return Math.ceil((text?.length || 0) / 4.0);
|
|
32
|
+
}
|
|
33
|
+
}
|
|
34
|
+
function estimateTextSplitsByTokens(text, chunkSize, chunkOverlap, model = 'cl100k_base') {
|
|
35
|
+
try {
|
|
36
|
+
if (!text || typeof text !== 'string' || text.length === 0) {
|
|
37
|
+
return [];
|
|
38
|
+
}
|
|
39
|
+
if (!Number.isFinite(chunkSize) || chunkSize <= 0) {
|
|
40
|
+
return [text];
|
|
41
|
+
}
|
|
42
|
+
const validOverlap = Number.isFinite(chunkOverlap) && chunkOverlap >= 0
|
|
43
|
+
? Math.min(chunkOverlap, chunkSize - 1)
|
|
44
|
+
: 0;
|
|
45
|
+
const charsPerToken = MODEL_CHAR_PER_TOKEN_RATIOS[model] || 4.0;
|
|
46
|
+
const chunkSizeInChars = Math.floor(chunkSize * charsPerToken);
|
|
47
|
+
const overlapInChars = Math.floor(validOverlap * charsPerToken);
|
|
48
|
+
const chunks = [];
|
|
49
|
+
let start = 0;
|
|
50
|
+
while (start < text.length) {
|
|
51
|
+
const end = Math.min(start + chunkSizeInChars, text.length);
|
|
52
|
+
chunks.push(text.slice(start, end));
|
|
53
|
+
if (end >= text.length) {
|
|
54
|
+
break;
|
|
55
|
+
}
|
|
56
|
+
start = Math.max(end - overlapInChars, start + 1);
|
|
57
|
+
}
|
|
58
|
+
return chunks;
|
|
59
|
+
}
|
|
60
|
+
catch (error) {
|
|
61
|
+
return text ? [text] : [];
|
|
62
|
+
}
|
|
63
|
+
}
|
|
64
|
+
async function estimateTokensFromStringList(list, model) {
|
|
65
|
+
try {
|
|
66
|
+
if (!Array.isArray(list)) {
|
|
67
|
+
return 0;
|
|
68
|
+
}
|
|
69
|
+
const encoder = await (0, tiktoken_1.encodingForModel)(model);
|
|
70
|
+
const encodedListLength = await Promise.all(list.map(async (text) => {
|
|
71
|
+
try {
|
|
72
|
+
if (!text || typeof text !== 'string') {
|
|
73
|
+
return 0;
|
|
74
|
+
}
|
|
75
|
+
if ((0, helpers_1.hasLongSequentialRepeat)(text)) {
|
|
76
|
+
const estimatedTokens = estimateTokensByCharCount(text, model);
|
|
77
|
+
return estimatedTokens;
|
|
78
|
+
}
|
|
79
|
+
try {
|
|
80
|
+
const tokens = encoder.encode(text);
|
|
81
|
+
return tokens.length;
|
|
82
|
+
}
|
|
83
|
+
catch (encodingError) {
|
|
84
|
+
return estimateTokensByCharCount(text, model);
|
|
85
|
+
}
|
|
86
|
+
}
|
|
87
|
+
catch (itemError) {
|
|
88
|
+
return 0;
|
|
89
|
+
}
|
|
90
|
+
}));
|
|
91
|
+
const totalTokens = encodedListLength.reduce((acc, curr) => acc + curr, 0);
|
|
92
|
+
return totalTokens;
|
|
93
|
+
}
|
|
94
|
+
catch (error) {
|
|
95
|
+
return 0;
|
|
96
|
+
}
|
|
97
|
+
}
|
|
98
|
+
//# sourceMappingURL=token-estimator.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"token-estimator.js","sourceRoot":"","sources":["../../../src/utils/tokenizer/token-estimator.ts"],"names":[],"mappings":";;AAgCA,8DAyBC;AAYD,gEAgDC;AAWD,oEA+CC;AAxKD,yCAA8C;AAC9C,wCAAqD;AAMrD,MAAM,2BAA2B,GAA2B;IAC3D,QAAQ,EAAE,GAAG;IACb,OAAO,EAAE,GAAG;IACZ,eAAe,EAAE,GAAG;IACpB,WAAW,EAAE,GAAG;IAChB,UAAU,EAAE,GAAG;IACf,SAAS,EAAE,GAAG;IACd,SAAS,EAAE,GAAG;CACd,CAAC;AAUF,SAAgB,yBAAyB,CAAC,IAAY,EAAE,QAAgB,aAAa;IACpF,IAAI,CAAC;QAEJ,IAAI,CAAC,IAAI,IAAI,OAAO,IAAI,KAAK,QAAQ,IAAI,IAAI,CAAC,MAAM,KAAK,CAAC,EAAE,CAAC;YAC5D,OAAO,CAAC,CAAC;QACV,CAAC;QAGD,MAAM,aAAa,GAAG,2BAA2B,CAAC,KAAK,CAAC,IAAI,GAAG,CAAC;QAGhE,IAAI,CAAC,MAAM,CAAC,QAAQ,CAAC,aAAa,CAAC,IAAI,aAAa,IAAI,CAAC,EAAE,CAAC;YAE3D,MAAM,eAAe,GAAG,IAAI,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,GAAG,GAAG,CAAC,CAAC;YACrD,OAAO,eAAe,CAAC;QACxB,CAAC;QAGD,MAAM,eAAe,GAAG,IAAI,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,GAAG,aAAa,CAAC,CAAC;QAE/D,OAAO,eAAe,CAAC;IACxB,CAAC;IAAC,OAAO,KAAK,EAAE,CAAC;QAEhB,OAAO,IAAI,CAAC,IAAI,CAAC,CAAC,IAAI,EAAE,MAAM,IAAI,CAAC,CAAC,GAAG,GAAG,CAAC,CAAC;IAC7C,CAAC;AACF,CAAC;AAYD,SAAgB,0BAA0B,CACzC,IAAY,EACZ,SAAiB,EACjB,YAAoB,EACpB,QAAgB,aAAa;IAE7B,IAAI,CAAC;QAEJ,IAAI,CAAC,IAAI,IAAI,OAAO,IAAI,KAAK,QAAQ,IAAI,IAAI,CAAC,MAAM,KAAK,CAAC,EAAE,CAAC;YAC5D,OAAO,EAAE,CAAC;QACX,CAAC;QAGD,IAAI,CAAC,MAAM,CAAC,QAAQ,CAAC,SAAS,CAAC,IAAI,SAAS,IAAI,CAAC,EAAE,CAAC;YAEnD,OAAO,CAAC,IAAI,CAAC,CAAC;QACf,CAAC;QAGD,MAAM,YAAY,GACjB,MAAM,CAAC,QAAQ,CAAC,YAAY,CAAC,IAAI,YAAY,IAAI,CAAC;YACjD,CAAC,CAAC,IAAI,CAAC,GAAG,CAAC,YAAY,EAAE,SAAS,GAAG,CAAC,CAAC;YACvC,CAAC,CAAC,CAAC,CAAC;QAEN,MAAM,aAAa,GAAG,2BAA2B,CAAC,KAAK,CAAC,IAAI,GAAG,CAAC;QAChE,MAAM,gBAAgB,GAAG,IAAI,CAAC,KAAK,CAAC,SAAS,GAAG,aAAa,CAAC,CAAC;QAC/D,MAAM,cAAc,GAAG,IAAI,CAAC,KAAK,CAAC,YAAY,GAAG,aAAa,CAAC,CAAC;QAEhE,MAAM,MAAM,GAAa,EAAE,CAAC;QAC5B,IAAI,KAAK,GAAG,CAAC,CAAC;QAEd,OAAO,KAAK,GAAG,IAAI,CAAC,MAAM,EAAE,CAAC;YAC5B,MAAM,GAAG,GAAG,IAAI,CAAC,GAAG,CAAC,KAAK,GAAG,gBAAgB,EAAE,IAAI,CAAC,MAAM,CAAC,CAAC;YAC5D,MAAM,CAAC,IAAI,CAAC,IAAI,CAAC,KAAK,CAAC,KAAK,EAAE,GAAG,CAAC,CAAC,CAAC;YAEpC,IAAI,GAAG,IAAI,IAAI,CAAC,MAAM,EAAE,CAAC;gBACxB,MAAM;YACP,CAAC;YAGD,KAAK,GAAG,IAAI,CAAC,GAAG,CAAC,GAAG,GAAG,cAAc,EAAE,KAAK,GAAG,CAAC,CAAC,CAAC;QACnD,CAAC;QAED,OAAO,MAAM,CAAC;IACf,CAAC;IAAC,OAAO,KAAK,EAAE,CAAC;QAEhB,OAAO,IAAI,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC;IAC3B,CAAC;AACF,CAAC;AAWM,KAAK,UAAU,4BAA4B,CACjD,IAAc,EACd,KAAoB;IAEpB,IAAI,CAAC;QAEJ,IAAI,CAAC,KAAK,CAAC,OAAO,CAAC,IAAI,CAAC,EAAE,CAAC;YAC1B,OAAO,CAAC,CAAC;QACV,CAAC;QAED,MAAM,OAAO,GAAG,MAAM,IAAA,2BAAgB,EAAC,KAAK,CAAC,CAAC;QAC9C,MAAM,iBAAiB,GAAG,MAAM,OAAO,CAAC,GAAG,CAC1C,IAAI,CAAC,GAAG,CAAC,KAAK,EAAE,IAAI,EAAE,EAAE;YACvB,IAAI,CAAC;gBAEJ,IAAI,CAAC,IAAI,IAAI,OAAO,IAAI,KAAK,QAAQ,EAAE,CAAC;oBACvC,OAAO,CAAC,CAAC;gBACV,CAAC;gBAGD,IAAI,IAAA,iCAAuB,EAAC,IAAI,CAAC,EAAE,CAAC;oBACnC,MAAM,eAAe,GAAG,yBAAyB,CAAC,IAAI,EAAE,KAAK,CAAC,CAAC;oBAC/D,OAAO,eAAe,CAAC;gBACxB,CAAC;gBAGD,IAAI,CAAC;oBACJ,MAAM,MAAM,GAAG,OAAO,CAAC,MAAM,CAAC,IAAI,CAAC,CAAC;oBACpC,OAAO,MAAM,CAAC,MAAM,CAAC;gBACtB,CAAC;gBAAC,OAAO,aAAa,EAAE,CAAC;oBAExB,OAAO,yBAAyB,CAAC,IAAI,EAAE,KAAK,CAAC,CAAC;gBAC/C,CAAC;YACF,CAAC;YAAC,OAAO,SAAS,EAAE,CAAC;gBAEpB,OAAO,CAAC,CAAC;YACV,CAAC;QACF,CAAC,CAAC,CACF,CAAC;QAEF,MAAM,WAAW,GAAG,iBAAiB,CAAC,MAAM,CAAC,CAAC,GAAG,EAAE,IAAI,EAAE,EAAE,CAAC,GAAG,GAAG,IAAI,EAAE,CAAC,CAAC,CAAC;QAE3E,OAAO,WAAW,CAAC;IACpB,CAAC;IAAC,OAAO,KAAK,EAAE,CAAC;QAEhB,OAAO,CAAC,CAAC;IACV,CAAC;AACF,CAAC"}
|
package/package.json
ADDED
|
@@ -0,0 +1,51 @@
|
|
|
1
|
+
{
|
|
2
|
+
"name": "@n8n/ai-utilities",
|
|
3
|
+
"version": "0.2.0",
|
|
4
|
+
"description": "Utilities for building AI nodes in n8n",
|
|
5
|
+
"main": "dist/index.js",
|
|
6
|
+
"module": "src/index.ts",
|
|
7
|
+
"types": "dist/index.d.ts",
|
|
8
|
+
"files": [
|
|
9
|
+
"dist"
|
|
10
|
+
],
|
|
11
|
+
"devDependencies": {
|
|
12
|
+
"@types/json-schema": "^7.0.15",
|
|
13
|
+
"jest-mock-extended": "^3.0.4",
|
|
14
|
+
"@types/mime-types": "3.0.1",
|
|
15
|
+
"tsx": "^4.19.3"
|
|
16
|
+
},
|
|
17
|
+
"dependencies": {
|
|
18
|
+
"zod": "3.25.67",
|
|
19
|
+
"zod-to-json-schema": "3.23.3",
|
|
20
|
+
"@langchain/core": "1.1.8",
|
|
21
|
+
"@langchain/classic": "1.0.5",
|
|
22
|
+
"@langchain/community": "1.0.5",
|
|
23
|
+
"@langchain/textsplitters": "1.0.1",
|
|
24
|
+
"@langchain/openai": "1.1.3",
|
|
25
|
+
"langchain": "1.2.3",
|
|
26
|
+
"tmp-promise": "3.0.3",
|
|
27
|
+
"js-tiktoken": "1.0.12",
|
|
28
|
+
"https-proxy-agent": "7.0.6",
|
|
29
|
+
"proxy-from-env": "^1.1.0",
|
|
30
|
+
"undici": "^6.21.0",
|
|
31
|
+
"n8n-workflow": "2.8.0",
|
|
32
|
+
"@n8n/config": "2.7.0",
|
|
33
|
+
"@n8n/typescript-config": "1.3.0"
|
|
34
|
+
},
|
|
35
|
+
"scripts": {
|
|
36
|
+
"example:run": "tsx examples/run.ts",
|
|
37
|
+
"clean": "rimraf dist .turbo",
|
|
38
|
+
"dev": "pnpm run watch",
|
|
39
|
+
"typecheck": "tsc --noEmit",
|
|
40
|
+
"copy-tokenizer-json": "node scripts/copy-tokenizer-json.js .",
|
|
41
|
+
"build": "tsc --build tsconfig.build.json && tsc-alias -p tsconfig.build.json && pnpm copy-tokenizer-json",
|
|
42
|
+
"format": "biome format --write .",
|
|
43
|
+
"format:check": "biome ci .",
|
|
44
|
+
"lint": "eslint . --quiet",
|
|
45
|
+
"lint:fix": "eslint . --fix",
|
|
46
|
+
"watch": "tsc-watch -p tsconfig.build.json",
|
|
47
|
+
"test": "jest",
|
|
48
|
+
"test:unit": "jest",
|
|
49
|
+
"test:dev": "jest --watch"
|
|
50
|
+
}
|
|
51
|
+
}
|