@n8n/n8n-nodes-langchain 1.101.2 → 1.102.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/credentials/CohereApi.credentials.js +7 -1
- package/dist/credentials/CohereApi.credentials.js.map +1 -1
- package/dist/known/credentials.json +2 -0
- package/dist/known/nodes.json +12 -0
- package/dist/nodes/agents/Agent/Agent.node.js +1 -0
- package/dist/nodes/agents/Agent/Agent.node.js.map +1 -1
- package/dist/nodes/agents/Agent/AgentTool.node.js +57 -0
- package/dist/nodes/agents/Agent/AgentTool.node.js.map +1 -0
- package/dist/nodes/agents/Agent/V2/AgentToolV2.node.js +104 -0
- package/dist/nodes/agents/Agent/V2/AgentToolV2.node.js.map +1 -0
- package/dist/nodes/agents/Agent/V2/AgentV2.node.js +4 -66
- package/dist/nodes/agents/Agent/V2/AgentV2.node.js.map +1 -1
- package/dist/nodes/agents/Agent/V2/utils.js +92 -0
- package/dist/nodes/agents/Agent/V2/utils.js.map +1 -0
- package/dist/nodes/agents/Agent/agents/ToolsAgent/V2/description.js +13 -10
- package/dist/nodes/agents/Agent/agents/ToolsAgent/V2/description.js.map +1 -1
- package/dist/nodes/agents/Agent/agents/ToolsAgent/V2/execute.js +14 -7
- package/dist/nodes/agents/Agent/agents/ToolsAgent/V2/execute.js.map +1 -1
- package/dist/nodes/agents/Agent/agents/ToolsAgent/common.js.map +1 -1
- package/dist/nodes/chains/SentimentAnalysis/SentimentAnalysis.node.js +1 -1
- package/dist/nodes/chains/SentimentAnalysis/SentimentAnalysis.node.js.map +1 -1
- package/dist/nodes/document_loaders/DocumentGithubLoader/DocumentGithubLoader.node.js +1 -1
- package/dist/nodes/document_loaders/DocumentGithubLoader/DocumentGithubLoader.node.js.map +1 -1
- package/dist/nodes/embeddings/EmbeddingsOpenAI/EmbeddingsOpenAi.node.js +6 -0
- package/dist/nodes/embeddings/EmbeddingsOpenAI/EmbeddingsOpenAi.node.js.map +1 -1
- package/dist/nodes/llms/LMChatAnthropic/LmChatAnthropic.node.js +2 -2
- package/dist/nodes/llms/LMChatAnthropic/LmChatAnthropic.node.js.map +1 -1
- package/dist/nodes/llms/LmChatCohere/LmChatCohere.node.js +181 -0
- package/dist/nodes/llms/LmChatCohere/LmChatCohere.node.js.map +1 -0
- package/dist/nodes/llms/LmChatCohere/cohere.dark.svg +5 -0
- package/dist/nodes/llms/LmChatCohere/cohere.svg +5 -0
- package/dist/nodes/llms/N8nLlmTracing.js +4 -4
- package/dist/nodes/llms/N8nLlmTracing.js.map +1 -1
- package/dist/nodes/mcp/McpTrigger/McpTrigger.node.js +1 -1
- package/dist/nodes/mcp/McpTrigger/McpTrigger.node.js.map +1 -1
- package/dist/nodes/tools/ToolCode/ToolCode.node.js +1 -2
- package/dist/nodes/tools/ToolCode/ToolCode.node.js.map +1 -1
- package/dist/nodes/tools/ToolVectorStore/ToolVectorStore.node.js +1 -2
- package/dist/nodes/tools/ToolVectorStore/ToolVectorStore.node.js.map +1 -1
- package/dist/nodes/tools/ToolWorkflow/v2/ToolWorkflowV2.node.js +2 -2
- package/dist/nodes/tools/ToolWorkflow/v2/ToolWorkflowV2.node.js.map +1 -1
- package/dist/nodes/trigger/ChatTrigger/ChatTrigger.node.js +2 -1
- package/dist/nodes/trigger/ChatTrigger/ChatTrigger.node.js.map +1 -1
- package/dist/nodes/trigger/ChatTrigger/templates.js +3 -1
- package/dist/nodes/trigger/ChatTrigger/templates.js.map +1 -1
- package/dist/nodes/vector_store/shared/createVectorStoreNode/operations/retrieveAsToolOperation.js +2 -1
- package/dist/nodes/vector_store/shared/createVectorStoreNode/operations/retrieveAsToolOperation.js.map +1 -1
- package/dist/nodes/vendors/GoogleGemini/GoogleGemini.node.js +42 -0
- package/dist/nodes/vendors/GoogleGemini/GoogleGemini.node.js.map +1 -0
- package/dist/nodes/vendors/GoogleGemini/actions/audio/analyze.operation.js +125 -0
- package/dist/nodes/vendors/GoogleGemini/actions/audio/analyze.operation.js.map +1 -0
- package/dist/nodes/vendors/GoogleGemini/actions/audio/index.js +74 -0
- package/dist/nodes/vendors/GoogleGemini/actions/audio/index.js.map +1 -0
- package/dist/nodes/vendors/GoogleGemini/actions/audio/transcribe.operation.js +184 -0
- package/dist/nodes/vendors/GoogleGemini/actions/audio/transcribe.operation.js.map +1 -0
- package/dist/nodes/vendors/GoogleGemini/actions/descriptions.js +52 -0
- package/dist/nodes/vendors/GoogleGemini/actions/descriptions.js.map +1 -0
- package/dist/nodes/vendors/GoogleGemini/actions/document/analyze.operation.js +125 -0
- package/dist/nodes/vendors/GoogleGemini/actions/document/analyze.operation.js.map +1 -0
- package/dist/nodes/vendors/GoogleGemini/actions/document/index.js +64 -0
- package/dist/nodes/vendors/GoogleGemini/actions/document/index.js.map +1 -0
- package/dist/nodes/vendors/GoogleGemini/actions/file/index.js +64 -0
- package/dist/nodes/vendors/GoogleGemini/actions/file/index.js.map +1 -0
- package/dist/nodes/vendors/GoogleGemini/actions/file/upload.operation.js +119 -0
- package/dist/nodes/vendors/GoogleGemini/actions/file/upload.operation.js.map +1 -0
- package/dist/nodes/vendors/GoogleGemini/actions/image/analyze.operation.js +125 -0
- package/dist/nodes/vendors/GoogleGemini/actions/image/analyze.operation.js.map +1 -0
- package/dist/nodes/vendors/GoogleGemini/actions/image/generate.operation.js +167 -0
- package/dist/nodes/vendors/GoogleGemini/actions/image/generate.operation.js.map +1 -0
- package/dist/nodes/vendors/GoogleGemini/actions/image/index.js +74 -0
- package/dist/nodes/vendors/GoogleGemini/actions/image/index.js.map +1 -0
- package/dist/nodes/vendors/GoogleGemini/actions/node.type.js +17 -0
- package/dist/nodes/vendors/GoogleGemini/actions/node.type.js.map +1 -0
- package/dist/nodes/vendors/GoogleGemini/actions/router.js +97 -0
- package/dist/nodes/vendors/GoogleGemini/actions/router.js.map +1 -0
- package/dist/nodes/vendors/GoogleGemini/actions/text/index.js +64 -0
- package/dist/nodes/vendors/GoogleGemini/actions/text/index.js.map +1 -0
- package/dist/nodes/vendors/GoogleGemini/actions/text/message.operation.js +339 -0
- package/dist/nodes/vendors/GoogleGemini/actions/text/message.operation.js.map +1 -0
- package/dist/nodes/vendors/GoogleGemini/actions/versionDescription.js +131 -0
- package/dist/nodes/vendors/GoogleGemini/actions/versionDescription.js.map +1 -0
- package/dist/nodes/vendors/GoogleGemini/actions/video/analyze.operation.js +125 -0
- package/dist/nodes/vendors/GoogleGemini/actions/video/analyze.operation.js.map +1 -0
- package/dist/nodes/vendors/GoogleGemini/actions/video/download.operation.js +88 -0
- package/dist/nodes/vendors/GoogleGemini/actions/video/download.operation.js.map +1 -0
- package/dist/nodes/vendors/GoogleGemini/actions/video/generate.operation.js +228 -0
- package/dist/nodes/vendors/GoogleGemini/actions/video/generate.operation.js.map +1 -0
- package/dist/nodes/vendors/GoogleGemini/actions/video/index.js +84 -0
- package/dist/nodes/vendors/GoogleGemini/actions/video/index.js.map +1 -0
- package/dist/nodes/vendors/GoogleGemini/gemini.svg +1 -0
- package/dist/nodes/vendors/GoogleGemini/helpers/baseAnalyze.js +100 -0
- package/dist/nodes/vendors/GoogleGemini/helpers/baseAnalyze.js.map +1 -0
- package/dist/nodes/vendors/GoogleGemini/helpers/interfaces.js +17 -0
- package/dist/nodes/vendors/GoogleGemini/helpers/interfaces.js.map +1 -0
- package/dist/nodes/vendors/GoogleGemini/helpers/utils.js +91 -0
- package/dist/nodes/vendors/GoogleGemini/helpers/utils.js.map +1 -0
- package/dist/nodes/vendors/GoogleGemini/methods/index.js +39 -0
- package/dist/nodes/vendors/GoogleGemini/methods/index.js.map +1 -0
- package/dist/nodes/vendors/GoogleGemini/methods/listSearch.js +74 -0
- package/dist/nodes/vendors/GoogleGemini/methods/listSearch.js.map +1 -0
- package/dist/nodes/vendors/GoogleGemini/transport/index.js +48 -0
- package/dist/nodes/vendors/GoogleGemini/transport/index.js.map +1 -0
- package/dist/nodes/vendors/OpenAi/actions/router.js +1 -1
- package/dist/nodes/vendors/OpenAi/actions/router.js.map +1 -1
- package/dist/types/credentials.json +2 -2
- package/dist/types/nodes.json +4 -1
- package/dist/utils/descriptions.js +13 -2
- package/dist/utils/descriptions.js.map +1 -1
- package/dist/utils/helpers.js +2 -7
- package/dist/utils/helpers.js.map +1 -1
- package/dist/utils/output_parsers/N8nOutputParser.js.map +1 -1
- package/package.json +10 -5
|
@@ -61,6 +61,8 @@ function createAgentExecutor(model, tools, prompt, options, outputParser, memory
|
|
|
61
61
|
(0, import_common.getAgentStepsParser)(outputParser, memory),
|
|
62
62
|
import_common.fixEmptyContentMessage
|
|
63
63
|
]);
|
|
64
|
+
runnableAgent.singleAction = false;
|
|
65
|
+
runnableAgent.streamRunnable = false;
|
|
64
66
|
return import_agents.AgentExecutor.fromAgentAndTools({
|
|
65
67
|
agent: runnableAgent,
|
|
66
68
|
memory,
|
|
@@ -69,14 +71,14 @@ function createAgentExecutor(model, tools, prompt, options, outputParser, memory
|
|
|
69
71
|
maxIterations: options.maxIterations ?? 10
|
|
70
72
|
});
|
|
71
73
|
}
|
|
72
|
-
async function processEventStream(ctx, eventStream, returnIntermediateSteps = false) {
|
|
74
|
+
async function processEventStream(ctx, eventStream, itemIndex, returnIntermediateSteps = false) {
|
|
73
75
|
const agentResult = {
|
|
74
76
|
output: ""
|
|
75
77
|
};
|
|
76
78
|
if (returnIntermediateSteps) {
|
|
77
79
|
agentResult.intermediateSteps = [];
|
|
78
80
|
}
|
|
79
|
-
ctx.sendChunk("begin");
|
|
81
|
+
ctx.sendChunk("begin", itemIndex);
|
|
80
82
|
for await (const event of eventStream) {
|
|
81
83
|
switch (event.event) {
|
|
82
84
|
case "on_chat_model_stream":
|
|
@@ -91,7 +93,7 @@ async function processEventStream(ctx, eventStream, returnIntermediateSteps = fa
|
|
|
91
93
|
} else if (typeof chunkContent === "string") {
|
|
92
94
|
chunkText = chunkContent;
|
|
93
95
|
}
|
|
94
|
-
ctx.sendChunk("item", chunkText);
|
|
96
|
+
ctx.sendChunk("item", itemIndex, chunkText);
|
|
95
97
|
agentResult.output += chunkText;
|
|
96
98
|
}
|
|
97
99
|
break;
|
|
@@ -131,7 +133,7 @@ async function processEventStream(ctx, eventStream, returnIntermediateSteps = fa
|
|
|
131
133
|
break;
|
|
132
134
|
}
|
|
133
135
|
}
|
|
134
|
-
ctx.sendChunk("end");
|
|
136
|
+
ctx.sendChunk("end", itemIndex);
|
|
135
137
|
return agentResult;
|
|
136
138
|
}
|
|
137
139
|
async function toolsAgentExecute() {
|
|
@@ -193,8 +195,8 @@ async function toolsAgentExecute() {
|
|
|
193
195
|
formatting_instructions: "IMPORTANT: For your response to user, you MUST use the `format_final_json_response` tool with your complete answer formatted according to the required schema. Do not attempt to format the JSON manually - always use this tool. Your response will be rejected if it is not properly formatted through this tool. Only use this tool once you are ready to provide your final answer."
|
|
194
196
|
};
|
|
195
197
|
const executeOptions = { signal: this.getExecutionCancelSignal() };
|
|
196
|
-
const isStreamingAvailable = this.isStreaming();
|
|
197
|
-
if (enableStreaming && isStreamingAvailable && this.getNode().typeVersion >= 2.1) {
|
|
198
|
+
const isStreamingAvailable = "isStreaming" in this ? this.isStreaming?.() : void 0;
|
|
199
|
+
if ("isStreaming" in this && enableStreaming && isStreamingAvailable && this.getNode().typeVersion >= 2.1) {
|
|
198
200
|
const chatHistory = await memory?.chatHistory.getMessages();
|
|
199
201
|
const eventStream = executor.streamEvents(
|
|
200
202
|
{
|
|
@@ -206,7 +208,12 @@ async function toolsAgentExecute() {
|
|
|
206
208
|
...executeOptions
|
|
207
209
|
}
|
|
208
210
|
);
|
|
209
|
-
return await processEventStream(
|
|
211
|
+
return await processEventStream(
|
|
212
|
+
this,
|
|
213
|
+
eventStream,
|
|
214
|
+
itemIndex,
|
|
215
|
+
options.returnIntermediateSteps
|
|
216
|
+
);
|
|
210
217
|
} else {
|
|
211
218
|
return await executor.invoke(invokeParams, executeOptions);
|
|
212
219
|
}
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"sources":["../../../../../../../nodes/agents/Agent/agents/ToolsAgent/V2/execute.ts"],"sourcesContent":["import type { StreamEvent } from '@langchain/core/dist/tracers/event_stream';\nimport type { IterableReadableStream } from '@langchain/core/dist/utils/stream';\nimport type { BaseChatModel } from '@langchain/core/language_models/chat_models';\nimport type { AIMessageChunk, MessageContentText } from '@langchain/core/messages';\nimport type { ChatPromptTemplate } from '@langchain/core/prompts';\nimport { RunnableSequence } from '@langchain/core/runnables';\nimport {\n\tAgentExecutor,\n\ttype AgentRunnableSequence,\n\tcreateToolCallingAgent,\n} from 'langchain/agents';\nimport type { BaseChatMemory } from 'langchain/memory';\nimport type { DynamicStructuredTool, Tool } from 'langchain/tools';\nimport omit from 'lodash/omit';\nimport { jsonParse, NodeOperationError, sleep } from 'n8n-workflow';\nimport type { IExecuteFunctions, INodeExecutionData } from 'n8n-workflow';\nimport assert from 'node:assert';\n\nimport { getPromptInputByType } from '@utils/helpers';\nimport {\n\tgetOptionalOutputParser,\n\ttype N8nOutputParser,\n} from '@utils/output_parsers/N8nOutputParser';\n\nimport {\n\tfixEmptyContentMessage,\n\tgetAgentStepsParser,\n\tgetChatModel,\n\tgetOptionalMemory,\n\tgetTools,\n\tprepareMessages,\n\tpreparePrompt,\n} from '../common';\nimport { SYSTEM_MESSAGE } from '../prompt';\n\n/**\n * Creates an agent executor with the given configuration\n */\nfunction createAgentExecutor(\n\tmodel: BaseChatModel,\n\ttools: Array<DynamicStructuredTool | Tool>,\n\tprompt: ChatPromptTemplate,\n\toptions: { maxIterations?: number; returnIntermediateSteps?: boolean },\n\toutputParser?: N8nOutputParser,\n\tmemory?: BaseChatMemory,\n\tfallbackModel?: BaseChatModel | null,\n) {\n\tconst agent = createToolCallingAgent({\n\t\tllm: model,\n\t\ttools,\n\t\tprompt,\n\t\tstreamRunnable: false,\n\t});\n\n\tlet fallbackAgent: AgentRunnableSequence | undefined;\n\tif (fallbackModel) {\n\t\tfallbackAgent = createToolCallingAgent({\n\t\t\tllm: fallbackModel,\n\t\t\ttools,\n\t\t\tprompt,\n\t\t\tstreamRunnable: false,\n\t\t});\n\t}\n\tconst runnableAgent = RunnableSequence.from([\n\t\tfallbackAgent ? agent.withFallbacks([fallbackAgent]) : agent,\n\t\tgetAgentStepsParser(outputParser, memory),\n\t\tfixEmptyContentMessage,\n\t]);\n\n\treturn AgentExecutor.fromAgentAndTools({\n\t\tagent: runnableAgent,\n\t\tmemory,\n\t\ttools,\n\t\treturnIntermediateSteps: options.returnIntermediateSteps === true,\n\t\tmaxIterations: options.maxIterations ?? 10,\n\t});\n}\n\nasync function processEventStream(\n\tctx: IExecuteFunctions,\n\teventStream: IterableReadableStream<StreamEvent>,\n\treturnIntermediateSteps: boolean = false,\n): Promise<{ output: string; intermediateSteps?: any[] }> {\n\tconst agentResult: { output: string; intermediateSteps?: any[] } = {\n\t\toutput: '',\n\t};\n\n\tif (returnIntermediateSteps) {\n\t\tagentResult.intermediateSteps = [];\n\t}\n\n\tctx.sendChunk('begin');\n\tfor await (const event of eventStream) {\n\t\t// Stream chat model tokens as they come in\n\t\tswitch (event.event) {\n\t\t\tcase 'on_chat_model_stream':\n\t\t\t\tconst chunk = event.data?.chunk as AIMessageChunk;\n\t\t\t\tif (chunk?.content) {\n\t\t\t\t\tconst chunkContent = chunk.content;\n\t\t\t\t\tlet chunkText = '';\n\t\t\t\t\tif (Array.isArray(chunkContent)) {\n\t\t\t\t\t\tfor (const message of chunkContent) {\n\t\t\t\t\t\t\tchunkText += (message as MessageContentText)?.text;\n\t\t\t\t\t\t}\n\t\t\t\t\t} else if (typeof chunkContent === 'string') {\n\t\t\t\t\t\tchunkText = chunkContent;\n\t\t\t\t\t}\n\t\t\t\t\tctx.sendChunk('item', chunkText);\n\n\t\t\t\t\tagentResult.output += chunkText;\n\t\t\t\t}\n\t\t\t\tbreak;\n\t\t\tcase 'on_chat_model_end':\n\t\t\t\t// Capture full LLM response with tool calls for intermediate steps\n\t\t\t\tif (returnIntermediateSteps && event.data) {\n\t\t\t\t\tconst chatModelData = event.data as any;\n\t\t\t\t\tconst output = chatModelData.output;\n\n\t\t\t\t\t// Check if this LLM response contains tool calls\n\t\t\t\t\tif (output?.tool_calls && output.tool_calls.length > 0) {\n\t\t\t\t\t\tfor (const toolCall of output.tool_calls) {\n\t\t\t\t\t\t\tagentResult.intermediateSteps!.push({\n\t\t\t\t\t\t\t\taction: {\n\t\t\t\t\t\t\t\t\ttool: toolCall.name,\n\t\t\t\t\t\t\t\t\ttoolInput: toolCall.args,\n\t\t\t\t\t\t\t\t\tlog:\n\t\t\t\t\t\t\t\t\t\toutput.content ||\n\t\t\t\t\t\t\t\t\t\t`Calling ${toolCall.name} with input: ${JSON.stringify(toolCall.args)}`,\n\t\t\t\t\t\t\t\t\tmessageLog: [output], // Include the full LLM response\n\t\t\t\t\t\t\t\t\ttoolCallId: toolCall.id,\n\t\t\t\t\t\t\t\t\ttype: toolCall.type,\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t});\n\t\t\t\t\t\t}\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t\tbreak;\n\t\t\tcase 'on_tool_end':\n\t\t\t\t// Capture tool execution results and match with action\n\t\t\t\tif (returnIntermediateSteps && event.data && agentResult.intermediateSteps!.length > 0) {\n\t\t\t\t\tconst toolData = event.data as any;\n\t\t\t\t\t// Find the matching intermediate step for this tool call\n\t\t\t\t\tconst matchingStep = agentResult.intermediateSteps!.find(\n\t\t\t\t\t\t(step) => !step.observation && step.action.tool === event.name,\n\t\t\t\t\t);\n\t\t\t\t\tif (matchingStep) {\n\t\t\t\t\t\tmatchingStep.observation = toolData.output;\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t\tbreak;\n\t\t\tdefault:\n\t\t\t\tbreak;\n\t\t}\n\t}\n\tctx.sendChunk('end');\n\n\treturn agentResult;\n}\n\n/* -----------------------------------------------------------\n Main Executor Function\n----------------------------------------------------------- */\n/**\n * The main executor method for the Tools Agent.\n *\n * This function retrieves necessary components (model, memory, tools), prepares the prompt,\n * creates the agent, and processes each input item. The error handling for each item is also\n * managed here based on the node's continueOnFail setting.\n *\n * @returns The array of execution data for all processed items\n */\nexport async function toolsAgentExecute(this: IExecuteFunctions): Promise<INodeExecutionData[][]> {\n\tthis.logger.debug('Executing Tools Agent V2');\n\n\tconst returnData: INodeExecutionData[] = [];\n\tconst items = this.getInputData();\n\tconst batchSize = this.getNodeParameter('options.batching.batchSize', 0, 1) as number;\n\tconst delayBetweenBatches = this.getNodeParameter(\n\t\t'options.batching.delayBetweenBatches',\n\t\t0,\n\t\t0,\n\t) as number;\n\tconst needsFallback = this.getNodeParameter('needsFallback', 0, false) as boolean;\n\tconst memory = await getOptionalMemory(this);\n\tconst model = await getChatModel(this, 0);\n\tassert(model, 'Please connect a model to the Chat Model input');\n\tconst fallbackModel = needsFallback ? await getChatModel(this, 1) : null;\n\n\tif (needsFallback && !fallbackModel) {\n\t\tthrow new NodeOperationError(\n\t\t\tthis.getNode(),\n\t\t\t'Please connect a model to the Fallback Model input or disable the fallback option',\n\t\t);\n\t}\n\n\t// Check if streaming is enabled\n\tconst enableStreaming = this.getNodeParameter('options.enableStreaming', 0, true) as boolean;\n\n\tfor (let i = 0; i < items.length; i += batchSize) {\n\t\tconst batch = items.slice(i, i + batchSize);\n\t\tconst batchPromises = batch.map(async (_item, batchItemIndex) => {\n\t\t\tconst itemIndex = i + batchItemIndex;\n\n\t\t\tconst input = getPromptInputByType({\n\t\t\t\tctx: this,\n\t\t\t\ti: itemIndex,\n\t\t\t\tinputKey: 'text',\n\t\t\t\tpromptTypeKey: 'promptType',\n\t\t\t});\n\t\t\tif (input === undefined) {\n\t\t\t\tthrow new NodeOperationError(this.getNode(), 'The \"text\" parameter is empty.');\n\t\t\t}\n\t\t\tconst outputParser = await getOptionalOutputParser(this, itemIndex);\n\t\t\tconst tools = await getTools(this, outputParser);\n\t\t\tconst options = this.getNodeParameter('options', itemIndex, {}) as {\n\t\t\t\tsystemMessage?: string;\n\t\t\t\tmaxIterations?: number;\n\t\t\t\treturnIntermediateSteps?: boolean;\n\t\t\t\tpassthroughBinaryImages?: boolean;\n\t\t\t};\n\n\t\t\t// Prepare the prompt messages and prompt template.\n\t\t\tconst messages = await prepareMessages(this, itemIndex, {\n\t\t\t\tsystemMessage: options.systemMessage,\n\t\t\t\tpassthroughBinaryImages: options.passthroughBinaryImages ?? true,\n\t\t\t\toutputParser,\n\t\t\t});\n\t\t\tconst prompt: ChatPromptTemplate = preparePrompt(messages);\n\n\t\t\t// Create executors for primary and fallback models\n\t\t\tconst executor = createAgentExecutor(\n\t\t\t\tmodel,\n\t\t\t\ttools,\n\t\t\t\tprompt,\n\t\t\t\toptions,\n\t\t\t\toutputParser,\n\t\t\t\tmemory,\n\t\t\t\tfallbackModel,\n\t\t\t);\n\t\t\t// Invoke with fallback logic\n\t\t\tconst invokeParams = {\n\t\t\t\tinput,\n\t\t\t\tsystem_message: options.systemMessage ?? SYSTEM_MESSAGE,\n\t\t\t\tformatting_instructions:\n\t\t\t\t\t'IMPORTANT: For your response to user, you MUST use the `format_final_json_response` tool with your complete answer formatted according to the required schema. Do not attempt to format the JSON manually - always use this tool. Your response will be rejected if it is not properly formatted through this tool. Only use this tool once you are ready to provide your final answer.',\n\t\t\t};\n\t\t\tconst executeOptions = { signal: this.getExecutionCancelSignal() };\n\n\t\t\t// Check if streaming is actually available\n\t\t\tconst isStreamingAvailable = this.isStreaming();\n\n\t\t\tif (enableStreaming && isStreamingAvailable && this.getNode().typeVersion >= 2.1) {\n\t\t\t\tconst chatHistory = await memory?.chatHistory.getMessages();\n\t\t\t\tconst eventStream = executor.streamEvents(\n\t\t\t\t\t{\n\t\t\t\t\t\t...invokeParams,\n\t\t\t\t\t\tchat_history: chatHistory ?? undefined,\n\t\t\t\t\t},\n\t\t\t\t\t{\n\t\t\t\t\t\tversion: 'v2',\n\t\t\t\t\t\t...executeOptions,\n\t\t\t\t\t},\n\t\t\t\t);\n\n\t\t\t\treturn await processEventStream(this, eventStream, options.returnIntermediateSteps);\n\t\t\t} else {\n\t\t\t\t// Handle regular execution\n\t\t\t\treturn await executor.invoke(invokeParams, executeOptions);\n\t\t\t}\n\t\t});\n\n\t\tconst batchResults = await Promise.allSettled(batchPromises);\n\t\t// This is only used to check if the output parser is connected\n\t\t// so we can parse the output if needed. Actual output parsing is done in the loop above\n\t\tconst outputParser = await getOptionalOutputParser(this, 0);\n\t\tbatchResults.forEach((result, index) => {\n\t\t\tconst itemIndex = i + index;\n\t\t\tif (result.status === 'rejected') {\n\t\t\t\tconst error = result.reason as Error;\n\t\t\t\tif (this.continueOnFail()) {\n\t\t\t\t\treturnData.push({\n\t\t\t\t\t\tjson: { error: error.message },\n\t\t\t\t\t\tpairedItem: { item: itemIndex },\n\t\t\t\t\t});\n\t\t\t\t\treturn;\n\t\t\t\t} else {\n\t\t\t\t\tthrow new NodeOperationError(this.getNode(), error);\n\t\t\t\t}\n\t\t\t}\n\t\t\tconst response = result.value;\n\t\t\t// If memory and outputParser are connected, parse the output.\n\t\t\tif (memory && outputParser) {\n\t\t\t\tconst parsedOutput = jsonParse<{ output: Record<string, unknown> }>(\n\t\t\t\t\tresponse.output as string,\n\t\t\t\t);\n\t\t\t\tresponse.output = parsedOutput?.output ?? parsedOutput;\n\t\t\t}\n\n\t\t\t// Omit internal keys before returning the result.\n\t\t\tconst itemResult = {\n\t\t\t\tjson: omit(\n\t\t\t\t\tresponse,\n\t\t\t\t\t'system_message',\n\t\t\t\t\t'formatting_instructions',\n\t\t\t\t\t'input',\n\t\t\t\t\t'chat_history',\n\t\t\t\t\t'agent_scratchpad',\n\t\t\t\t),\n\t\t\t\tpairedItem: { item: itemIndex },\n\t\t\t};\n\n\t\t\treturnData.push(itemResult);\n\t\t});\n\n\t\tif (i + batchSize < items.length && delayBetweenBatches > 0) {\n\t\t\tawait sleep(delayBetweenBatches);\n\t\t}\n\t}\n\n\treturn [returnData];\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAKA,uBAAiC;AACjC,oBAIO;AAGP,kBAAiB;AACjB,0BAAqD;AAErD,yBAAmB;AAEnB,qBAAqC;AACrC,6BAGO;AAEP,oBAQO;AACP,oBAA+B;AAK/B,SAAS,oBACR,OACA,OACA,QACA,SACA,cACA,QACA,eACC;AACD,QAAM,YAAQ,sCAAuB;AAAA,IACpC,KAAK;AAAA,IACL;AAAA,IACA;AAAA,IACA,gBAAgB;AAAA,EACjB,CAAC;AAED,MAAI;AACJ,MAAI,eAAe;AAClB,wBAAgB,sCAAuB;AAAA,MACtC,KAAK;AAAA,MACL;AAAA,MACA;AAAA,MACA,gBAAgB;AAAA,IACjB,CAAC;AAAA,EACF;AACA,QAAM,gBAAgB,kCAAiB,KAAK;AAAA,IAC3C,gBAAgB,MAAM,cAAc,CAAC,aAAa,CAAC,IAAI;AAAA,QACvD,mCAAoB,cAAc,MAAM;AAAA,IACxC;AAAA,EACD,CAAC;AAED,SAAO,4BAAc,kBAAkB;AAAA,IACtC,OAAO;AAAA,IACP;AAAA,IACA;AAAA,IACA,yBAAyB,QAAQ,4BAA4B;AAAA,IAC7D,eAAe,QAAQ,iBAAiB;AAAA,EACzC,CAAC;AACF;AAEA,eAAe,mBACd,KACA,aACA,0BAAmC,OACsB;AACzD,QAAM,cAA6D;AAAA,IAClE,QAAQ;AAAA,EACT;AAEA,MAAI,yBAAyB;AAC5B,gBAAY,oBAAoB,CAAC;AAAA,EAClC;AAEA,MAAI,UAAU,OAAO;AACrB,mBAAiB,SAAS,aAAa;AAEtC,YAAQ,MAAM,OAAO;AAAA,MACpB,KAAK;AACJ,cAAM,QAAQ,MAAM,MAAM;AAC1B,YAAI,OAAO,SAAS;AACnB,gBAAM,eAAe,MAAM;AAC3B,cAAI,YAAY;AAChB,cAAI,MAAM,QAAQ,YAAY,GAAG;AAChC,uBAAW,WAAW,cAAc;AACnC,2BAAc,SAAgC;AAAA,YAC/C;AAAA,UACD,WAAW,OAAO,iBAAiB,UAAU;AAC5C,wBAAY;AAAA,UACb;AACA,cAAI,UAAU,QAAQ,SAAS;AAE/B,sBAAY,UAAU;AAAA,QACvB;AACA;AAAA,MACD,KAAK;AAEJ,YAAI,2BAA2B,MAAM,MAAM;AAC1C,gBAAM,gBAAgB,MAAM;AAC5B,gBAAM,SAAS,cAAc;AAG7B,cAAI,QAAQ,cAAc,OAAO,WAAW,SAAS,GAAG;AACvD,uBAAW,YAAY,OAAO,YAAY;AACzC,0BAAY,kBAAmB,KAAK;AAAA,gBACnC,QAAQ;AAAA,kBACP,MAAM,SAAS;AAAA,kBACf,WAAW,SAAS;AAAA,kBACpB,KACC,OAAO,WACP,WAAW,SAAS,IAAI,gBAAgB,KAAK,UAAU,SAAS,IAAI,CAAC;AAAA,kBACtE,YAAY,CAAC,MAAM;AAAA;AAAA,kBACnB,YAAY,SAAS;AAAA,kBACrB,MAAM,SAAS;AAAA,gBAChB;AAAA,cACD,CAAC;AAAA,YACF;AAAA,UACD;AAAA,QACD;AACA;AAAA,MACD,KAAK;AAEJ,YAAI,2BAA2B,MAAM,QAAQ,YAAY,kBAAmB,SAAS,GAAG;AACvF,gBAAM,WAAW,MAAM;AAEvB,gBAAM,eAAe,YAAY,kBAAmB;AAAA,YACnD,CAAC,SAAS,CAAC,KAAK,eAAe,KAAK,OAAO,SAAS,MAAM;AAAA,UAC3D;AACA,cAAI,cAAc;AACjB,yBAAa,cAAc,SAAS;AAAA,UACrC;AAAA,QACD;AACA;AAAA,MACD;AACC;AAAA,IACF;AAAA,EACD;AACA,MAAI,UAAU,KAAK;AAEnB,SAAO;AACR;AAcA,eAAsB,oBAA4E;AACjG,OAAK,OAAO,MAAM,0BAA0B;AAE5C,QAAM,aAAmC,CAAC;AAC1C,QAAM,QAAQ,KAAK,aAAa;AAChC,QAAM,YAAY,KAAK,iBAAiB,8BAA8B,GAAG,CAAC;AAC1E,QAAM,sBAAsB,KAAK;AAAA,IAChC;AAAA,IACA;AAAA,IACA;AAAA,EACD;AACA,QAAM,gBAAgB,KAAK,iBAAiB,iBAAiB,GAAG,KAAK;AACrE,QAAM,SAAS,UAAM,iCAAkB,IAAI;AAC3C,QAAM,QAAQ,UAAM,4BAAa,MAAM,CAAC;AACxC,yBAAAA,SAAO,OAAO,gDAAgD;AAC9D,QAAM,gBAAgB,gBAAgB,UAAM,4BAAa,MAAM,CAAC,IAAI;AAEpE,MAAI,iBAAiB,CAAC,eAAe;AACpC,UAAM,IAAI;AAAA,MACT,KAAK,QAAQ;AAAA,MACb;AAAA,IACD;AAAA,EACD;AAGA,QAAM,kBAAkB,KAAK,iBAAiB,2BAA2B,GAAG,IAAI;AAEhF,WAAS,IAAI,GAAG,IAAI,MAAM,QAAQ,KAAK,WAAW;AACjD,UAAM,QAAQ,MAAM,MAAM,GAAG,IAAI,SAAS;AAC1C,UAAM,gBAAgB,MAAM,IAAI,OAAO,OAAO,mBAAmB;AAChE,YAAM,YAAY,IAAI;AAEtB,YAAM,YAAQ,qCAAqB;AAAA,QAClC,KAAK;AAAA,QACL,GAAG;AAAA,QACH,UAAU;AAAA,QACV,eAAe;AAAA,MAChB,CAAC;AACD,UAAI,UAAU,QAAW;AACxB,cAAM,IAAI,uCAAmB,KAAK,QAAQ,GAAG,gCAAgC;AAAA,MAC9E;AACA,YAAMC,gBAAe,UAAM,gDAAwB,MAAM,SAAS;AAClE,YAAM,QAAQ,UAAM,wBAAS,MAAMA,aAAY;AAC/C,YAAM,UAAU,KAAK,iBAAiB,WAAW,WAAW,CAAC,CAAC;AAQ9D,YAAM,WAAW,UAAM,+BAAgB,MAAM,WAAW;AAAA,QACvD,eAAe,QAAQ;AAAA,QACvB,yBAAyB,QAAQ,2BAA2B;AAAA,QAC5D,cAAAA;AAAA,MACD,CAAC;AACD,YAAM,aAA6B,6BAAc,QAAQ;AAGzD,YAAM,WAAW;AAAA,QAChB;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,QACAA;AAAA,QACA;AAAA,QACA;AAAA,MACD;AAEA,YAAM,eAAe;AAAA,QACpB;AAAA,QACA,gBAAgB,QAAQ,iBAAiB;AAAA,QACzC,yBACC;AAAA,MACF;AACA,YAAM,iBAAiB,EAAE,QAAQ,KAAK,yBAAyB,EAAE;AAGjE,YAAM,uBAAuB,KAAK,YAAY;AAE9C,UAAI,mBAAmB,wBAAwB,KAAK,QAAQ,EAAE,eAAe,KAAK;AACjF,cAAM,cAAc,MAAM,QAAQ,YAAY,YAAY;AAC1D,cAAM,cAAc,SAAS;AAAA,UAC5B;AAAA,YACC,GAAG;AAAA,YACH,cAAc,eAAe;AAAA,UAC9B;AAAA,UACA;AAAA,YACC,SAAS;AAAA,YACT,GAAG;AAAA,UACJ;AAAA,QACD;AAEA,eAAO,MAAM,mBAAmB,MAAM,aAAa,QAAQ,uBAAuB;AAAA,MACnF,OAAO;AAEN,eAAO,MAAM,SAAS,OAAO,cAAc,cAAc;AAAA,MAC1D;AAAA,IACD,CAAC;AAED,UAAM,eAAe,MAAM,QAAQ,WAAW,aAAa;AAG3D,UAAM,eAAe,UAAM,gDAAwB,MAAM,CAAC;AAC1D,iBAAa,QAAQ,CAAC,QAAQ,UAAU;AACvC,YAAM,YAAY,IAAI;AACtB,UAAI,OAAO,WAAW,YAAY;AACjC,cAAM,QAAQ,OAAO;AACrB,YAAI,KAAK,eAAe,GAAG;AAC1B,qBAAW,KAAK;AAAA,YACf,MAAM,EAAE,OAAO,MAAM,QAAQ;AAAA,YAC7B,YAAY,EAAE,MAAM,UAAU;AAAA,UAC/B,CAAC;AACD;AAAA,QACD,OAAO;AACN,gBAAM,IAAI,uCAAmB,KAAK,QAAQ,GAAG,KAAK;AAAA,QACnD;AAAA,MACD;AACA,YAAM,WAAW,OAAO;AAExB,UAAI,UAAU,cAAc;AAC3B,cAAM,mBAAe;AAAA,UACpB,SAAS;AAAA,QACV;AACA,iBAAS,SAAS,cAAc,UAAU;AAAA,MAC3C;AAGA,YAAM,aAAa;AAAA,QAClB,UAAM,YAAAC;AAAA,UACL;AAAA,UACA;AAAA,UACA;AAAA,UACA;AAAA,UACA;AAAA,UACA;AAAA,QACD;AAAA,QACA,YAAY,EAAE,MAAM,UAAU;AAAA,MAC/B;AAEA,iBAAW,KAAK,UAAU;AAAA,IAC3B,CAAC;AAED,QAAI,IAAI,YAAY,MAAM,UAAU,sBAAsB,GAAG;AAC5D,gBAAM,2BAAM,mBAAmB;AAAA,IAChC;AAAA,EACD;AAEA,SAAO,CAAC,UAAU;AACnB;","names":["assert","outputParser","omit"]}
|
|
1
|
+
{"version":3,"sources":["../../../../../../../nodes/agents/Agent/agents/ToolsAgent/V2/execute.ts"],"sourcesContent":["import type { StreamEvent } from '@langchain/core/dist/tracers/event_stream';\nimport type { IterableReadableStream } from '@langchain/core/dist/utils/stream';\nimport type { BaseChatModel } from '@langchain/core/language_models/chat_models';\nimport type { AIMessageChunk, MessageContentText } from '@langchain/core/messages';\nimport type { ChatPromptTemplate } from '@langchain/core/prompts';\nimport { RunnableSequence } from '@langchain/core/runnables';\nimport {\n\tAgentExecutor,\n\ttype AgentRunnableSequence,\n\tcreateToolCallingAgent,\n} from 'langchain/agents';\nimport type { BaseChatMemory } from 'langchain/memory';\nimport type { DynamicStructuredTool, Tool } from 'langchain/tools';\nimport omit from 'lodash/omit';\nimport { jsonParse, NodeOperationError, sleep } from 'n8n-workflow';\nimport type { IExecuteFunctions, INodeExecutionData, ISupplyDataFunctions } from 'n8n-workflow';\nimport assert from 'node:assert';\n\nimport { getPromptInputByType } from '@utils/helpers';\nimport {\n\tgetOptionalOutputParser,\n\ttype N8nOutputParser,\n} from '@utils/output_parsers/N8nOutputParser';\n\nimport {\n\tfixEmptyContentMessage,\n\tgetAgentStepsParser,\n\tgetChatModel,\n\tgetOptionalMemory,\n\tgetTools,\n\tprepareMessages,\n\tpreparePrompt,\n} from '../common';\nimport { SYSTEM_MESSAGE } from '../prompt';\n\n/**\n * Creates an agent executor with the given configuration\n */\nfunction createAgentExecutor(\n\tmodel: BaseChatModel,\n\ttools: Array<DynamicStructuredTool | Tool>,\n\tprompt: ChatPromptTemplate,\n\toptions: { maxIterations?: number; returnIntermediateSteps?: boolean },\n\toutputParser?: N8nOutputParser,\n\tmemory?: BaseChatMemory,\n\tfallbackModel?: BaseChatModel | null,\n) {\n\tconst agent = createToolCallingAgent({\n\t\tllm: model,\n\t\ttools,\n\t\tprompt,\n\t\tstreamRunnable: false,\n\t});\n\n\tlet fallbackAgent: AgentRunnableSequence | undefined;\n\tif (fallbackModel) {\n\t\tfallbackAgent = createToolCallingAgent({\n\t\t\tllm: fallbackModel,\n\t\t\ttools,\n\t\t\tprompt,\n\t\t\tstreamRunnable: false,\n\t\t});\n\t}\n\tconst runnableAgent = RunnableSequence.from([\n\t\tfallbackAgent ? agent.withFallbacks([fallbackAgent]) : agent,\n\t\tgetAgentStepsParser(outputParser, memory),\n\t\tfixEmptyContentMessage,\n\t]) as AgentRunnableSequence;\n\n\trunnableAgent.singleAction = false;\n\trunnableAgent.streamRunnable = false;\n\n\treturn AgentExecutor.fromAgentAndTools({\n\t\tagent: runnableAgent,\n\t\tmemory,\n\t\ttools,\n\t\treturnIntermediateSteps: options.returnIntermediateSteps === true,\n\t\tmaxIterations: options.maxIterations ?? 10,\n\t});\n}\n\nasync function processEventStream(\n\tctx: IExecuteFunctions,\n\teventStream: IterableReadableStream<StreamEvent>,\n\titemIndex: number,\n\treturnIntermediateSteps: boolean = false,\n): Promise<{ output: string; intermediateSteps?: any[] }> {\n\tconst agentResult: { output: string; intermediateSteps?: any[] } = {\n\t\toutput: '',\n\t};\n\n\tif (returnIntermediateSteps) {\n\t\tagentResult.intermediateSteps = [];\n\t}\n\n\tctx.sendChunk('begin', itemIndex);\n\tfor await (const event of eventStream) {\n\t\t// Stream chat model tokens as they come in\n\t\tswitch (event.event) {\n\t\t\tcase 'on_chat_model_stream':\n\t\t\t\tconst chunk = event.data?.chunk as AIMessageChunk;\n\t\t\t\tif (chunk?.content) {\n\t\t\t\t\tconst chunkContent = chunk.content;\n\t\t\t\t\tlet chunkText = '';\n\t\t\t\t\tif (Array.isArray(chunkContent)) {\n\t\t\t\t\t\tfor (const message of chunkContent) {\n\t\t\t\t\t\t\tchunkText += (message as MessageContentText)?.text;\n\t\t\t\t\t\t}\n\t\t\t\t\t} else if (typeof chunkContent === 'string') {\n\t\t\t\t\t\tchunkText = chunkContent;\n\t\t\t\t\t}\n\t\t\t\t\tctx.sendChunk('item', itemIndex, chunkText);\n\n\t\t\t\t\tagentResult.output += chunkText;\n\t\t\t\t}\n\t\t\t\tbreak;\n\t\t\tcase 'on_chat_model_end':\n\t\t\t\t// Capture full LLM response with tool calls for intermediate steps\n\t\t\t\tif (returnIntermediateSteps && event.data) {\n\t\t\t\t\tconst chatModelData = event.data as any;\n\t\t\t\t\tconst output = chatModelData.output;\n\n\t\t\t\t\t// Check if this LLM response contains tool calls\n\t\t\t\t\tif (output?.tool_calls && output.tool_calls.length > 0) {\n\t\t\t\t\t\tfor (const toolCall of output.tool_calls) {\n\t\t\t\t\t\t\tagentResult.intermediateSteps!.push({\n\t\t\t\t\t\t\t\taction: {\n\t\t\t\t\t\t\t\t\ttool: toolCall.name,\n\t\t\t\t\t\t\t\t\ttoolInput: toolCall.args,\n\t\t\t\t\t\t\t\t\tlog:\n\t\t\t\t\t\t\t\t\t\toutput.content ||\n\t\t\t\t\t\t\t\t\t\t`Calling ${toolCall.name} with input: ${JSON.stringify(toolCall.args)}`,\n\t\t\t\t\t\t\t\t\tmessageLog: [output], // Include the full LLM response\n\t\t\t\t\t\t\t\t\ttoolCallId: toolCall.id,\n\t\t\t\t\t\t\t\t\ttype: toolCall.type,\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t});\n\t\t\t\t\t\t}\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t\tbreak;\n\t\t\tcase 'on_tool_end':\n\t\t\t\t// Capture tool execution results and match with action\n\t\t\t\tif (returnIntermediateSteps && event.data && agentResult.intermediateSteps!.length > 0) {\n\t\t\t\t\tconst toolData = event.data as any;\n\t\t\t\t\t// Find the matching intermediate step for this tool call\n\t\t\t\t\tconst matchingStep = agentResult.intermediateSteps!.find(\n\t\t\t\t\t\t(step) => !step.observation && step.action.tool === event.name,\n\t\t\t\t\t);\n\t\t\t\t\tif (matchingStep) {\n\t\t\t\t\t\tmatchingStep.observation = toolData.output;\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t\tbreak;\n\t\t\tdefault:\n\t\t\t\tbreak;\n\t\t}\n\t}\n\tctx.sendChunk('end', itemIndex);\n\n\treturn agentResult;\n}\n\n/* -----------------------------------------------------------\n Main Executor Function\n----------------------------------------------------------- */\n/**\n * The main executor method for the Tools Agent.\n *\n * This function retrieves necessary components (model, memory, tools), prepares the prompt,\n * creates the agent, and processes each input item. The error handling for each item is also\n * managed here based on the node's continueOnFail setting.\n *\n * @param this Execute context. SupplyDataContext is passed when agent is as a tool\n *\n * @returns The array of execution data for all processed items\n */\nexport async function toolsAgentExecute(\n\tthis: IExecuteFunctions | ISupplyDataFunctions,\n): Promise<INodeExecutionData[][]> {\n\tthis.logger.debug('Executing Tools Agent V2');\n\n\tconst returnData: INodeExecutionData[] = [];\n\tconst items = this.getInputData();\n\tconst batchSize = this.getNodeParameter('options.batching.batchSize', 0, 1) as number;\n\tconst delayBetweenBatches = this.getNodeParameter(\n\t\t'options.batching.delayBetweenBatches',\n\t\t0,\n\t\t0,\n\t) as number;\n\tconst needsFallback = this.getNodeParameter('needsFallback', 0, false) as boolean;\n\tconst memory = await getOptionalMemory(this);\n\tconst model = await getChatModel(this, 0);\n\tassert(model, 'Please connect a model to the Chat Model input');\n\tconst fallbackModel = needsFallback ? await getChatModel(this, 1) : null;\n\n\tif (needsFallback && !fallbackModel) {\n\t\tthrow new NodeOperationError(\n\t\t\tthis.getNode(),\n\t\t\t'Please connect a model to the Fallback Model input or disable the fallback option',\n\t\t);\n\t}\n\n\t// Check if streaming is enabled\n\tconst enableStreaming = this.getNodeParameter('options.enableStreaming', 0, true) as boolean;\n\n\tfor (let i = 0; i < items.length; i += batchSize) {\n\t\tconst batch = items.slice(i, i + batchSize);\n\t\tconst batchPromises = batch.map(async (_item, batchItemIndex) => {\n\t\t\tconst itemIndex = i + batchItemIndex;\n\n\t\t\tconst input = getPromptInputByType({\n\t\t\t\tctx: this,\n\t\t\t\ti: itemIndex,\n\t\t\t\tinputKey: 'text',\n\t\t\t\tpromptTypeKey: 'promptType',\n\t\t\t});\n\t\t\tif (input === undefined) {\n\t\t\t\tthrow new NodeOperationError(this.getNode(), 'The \"text\" parameter is empty.');\n\t\t\t}\n\t\t\tconst outputParser = await getOptionalOutputParser(this, itemIndex);\n\t\t\tconst tools = await getTools(this, outputParser);\n\t\t\tconst options = this.getNodeParameter('options', itemIndex, {}) as {\n\t\t\t\tsystemMessage?: string;\n\t\t\t\tmaxIterations?: number;\n\t\t\t\treturnIntermediateSteps?: boolean;\n\t\t\t\tpassthroughBinaryImages?: boolean;\n\t\t\t};\n\n\t\t\t// Prepare the prompt messages and prompt template.\n\t\t\tconst messages = await prepareMessages(this, itemIndex, {\n\t\t\t\tsystemMessage: options.systemMessage,\n\t\t\t\tpassthroughBinaryImages: options.passthroughBinaryImages ?? true,\n\t\t\t\toutputParser,\n\t\t\t});\n\t\t\tconst prompt: ChatPromptTemplate = preparePrompt(messages);\n\n\t\t\t// Create executors for primary and fallback models\n\t\t\tconst executor = createAgentExecutor(\n\t\t\t\tmodel,\n\t\t\t\ttools,\n\t\t\t\tprompt,\n\t\t\t\toptions,\n\t\t\t\toutputParser,\n\t\t\t\tmemory,\n\t\t\t\tfallbackModel,\n\t\t\t);\n\t\t\t// Invoke with fallback logic\n\t\t\tconst invokeParams = {\n\t\t\t\tinput,\n\t\t\t\tsystem_message: options.systemMessage ?? SYSTEM_MESSAGE,\n\t\t\t\tformatting_instructions:\n\t\t\t\t\t'IMPORTANT: For your response to user, you MUST use the `format_final_json_response` tool with your complete answer formatted according to the required schema. Do not attempt to format the JSON manually - always use this tool. Your response will be rejected if it is not properly formatted through this tool. Only use this tool once you are ready to provide your final answer.',\n\t\t\t};\n\t\t\tconst executeOptions = { signal: this.getExecutionCancelSignal() };\n\n\t\t\t// Check if streaming is actually available\n\t\t\tconst isStreamingAvailable = 'isStreaming' in this ? this.isStreaming?.() : undefined;\n\n\t\t\tif (\n\t\t\t\t'isStreaming' in this &&\n\t\t\t\tenableStreaming &&\n\t\t\t\tisStreamingAvailable &&\n\t\t\t\tthis.getNode().typeVersion >= 2.1\n\t\t\t) {\n\t\t\t\tconst chatHistory = await memory?.chatHistory.getMessages();\n\t\t\t\tconst eventStream = executor.streamEvents(\n\t\t\t\t\t{\n\t\t\t\t\t\t...invokeParams,\n\t\t\t\t\t\tchat_history: chatHistory ?? undefined,\n\t\t\t\t\t},\n\t\t\t\t\t{\n\t\t\t\t\t\tversion: 'v2',\n\t\t\t\t\t\t...executeOptions,\n\t\t\t\t\t},\n\t\t\t\t);\n\n\t\t\t\treturn await processEventStream(\n\t\t\t\t\tthis,\n\t\t\t\t\teventStream,\n\t\t\t\t\titemIndex,\n\t\t\t\t\toptions.returnIntermediateSteps,\n\t\t\t\t);\n\t\t\t} else {\n\t\t\t\t// Handle regular execution\n\t\t\t\treturn await executor.invoke(invokeParams, executeOptions);\n\t\t\t}\n\t\t});\n\n\t\tconst batchResults = await Promise.allSettled(batchPromises);\n\t\t// This is only used to check if the output parser is connected\n\t\t// so we can parse the output if needed. Actual output parsing is done in the loop above\n\t\tconst outputParser = await getOptionalOutputParser(this, 0);\n\t\tbatchResults.forEach((result, index) => {\n\t\t\tconst itemIndex = i + index;\n\t\t\tif (result.status === 'rejected') {\n\t\t\t\tconst error = result.reason as Error;\n\t\t\t\tif (this.continueOnFail()) {\n\t\t\t\t\treturnData.push({\n\t\t\t\t\t\tjson: { error: error.message },\n\t\t\t\t\t\tpairedItem: { item: itemIndex },\n\t\t\t\t\t});\n\t\t\t\t\treturn;\n\t\t\t\t} else {\n\t\t\t\t\tthrow new NodeOperationError(this.getNode(), error);\n\t\t\t\t}\n\t\t\t}\n\t\t\tconst response = result.value;\n\t\t\t// If memory and outputParser are connected, parse the output.\n\t\t\tif (memory && outputParser) {\n\t\t\t\tconst parsedOutput = jsonParse<{ output: Record<string, unknown> }>(\n\t\t\t\t\tresponse.output as string,\n\t\t\t\t);\n\t\t\t\tresponse.output = parsedOutput?.output ?? parsedOutput;\n\t\t\t}\n\n\t\t\t// Omit internal keys before returning the result.\n\t\t\tconst itemResult = {\n\t\t\t\tjson: omit(\n\t\t\t\t\tresponse,\n\t\t\t\t\t'system_message',\n\t\t\t\t\t'formatting_instructions',\n\t\t\t\t\t'input',\n\t\t\t\t\t'chat_history',\n\t\t\t\t\t'agent_scratchpad',\n\t\t\t\t),\n\t\t\t\tpairedItem: { item: itemIndex },\n\t\t\t};\n\n\t\t\treturnData.push(itemResult);\n\t\t});\n\n\t\tif (i + batchSize < items.length && delayBetweenBatches > 0) {\n\t\t\tawait sleep(delayBetweenBatches);\n\t\t}\n\t}\n\n\treturn [returnData];\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAKA,uBAAiC;AACjC,oBAIO;AAGP,kBAAiB;AACjB,0BAAqD;AAErD,yBAAmB;AAEnB,qBAAqC;AACrC,6BAGO;AAEP,oBAQO;AACP,oBAA+B;AAK/B,SAAS,oBACR,OACA,OACA,QACA,SACA,cACA,QACA,eACC;AACD,QAAM,YAAQ,sCAAuB;AAAA,IACpC,KAAK;AAAA,IACL;AAAA,IACA;AAAA,IACA,gBAAgB;AAAA,EACjB,CAAC;AAED,MAAI;AACJ,MAAI,eAAe;AAClB,wBAAgB,sCAAuB;AAAA,MACtC,KAAK;AAAA,MACL;AAAA,MACA;AAAA,MACA,gBAAgB;AAAA,IACjB,CAAC;AAAA,EACF;AACA,QAAM,gBAAgB,kCAAiB,KAAK;AAAA,IAC3C,gBAAgB,MAAM,cAAc,CAAC,aAAa,CAAC,IAAI;AAAA,QACvD,mCAAoB,cAAc,MAAM;AAAA,IACxC;AAAA,EACD,CAAC;AAED,gBAAc,eAAe;AAC7B,gBAAc,iBAAiB;AAE/B,SAAO,4BAAc,kBAAkB;AAAA,IACtC,OAAO;AAAA,IACP;AAAA,IACA;AAAA,IACA,yBAAyB,QAAQ,4BAA4B;AAAA,IAC7D,eAAe,QAAQ,iBAAiB;AAAA,EACzC,CAAC;AACF;AAEA,eAAe,mBACd,KACA,aACA,WACA,0BAAmC,OACsB;AACzD,QAAM,cAA6D;AAAA,IAClE,QAAQ;AAAA,EACT;AAEA,MAAI,yBAAyB;AAC5B,gBAAY,oBAAoB,CAAC;AAAA,EAClC;AAEA,MAAI,UAAU,SAAS,SAAS;AAChC,mBAAiB,SAAS,aAAa;AAEtC,YAAQ,MAAM,OAAO;AAAA,MACpB,KAAK;AACJ,cAAM,QAAQ,MAAM,MAAM;AAC1B,YAAI,OAAO,SAAS;AACnB,gBAAM,eAAe,MAAM;AAC3B,cAAI,YAAY;AAChB,cAAI,MAAM,QAAQ,YAAY,GAAG;AAChC,uBAAW,WAAW,cAAc;AACnC,2BAAc,SAAgC;AAAA,YAC/C;AAAA,UACD,WAAW,OAAO,iBAAiB,UAAU;AAC5C,wBAAY;AAAA,UACb;AACA,cAAI,UAAU,QAAQ,WAAW,SAAS;AAE1C,sBAAY,UAAU;AAAA,QACvB;AACA;AAAA,MACD,KAAK;AAEJ,YAAI,2BAA2B,MAAM,MAAM;AAC1C,gBAAM,gBAAgB,MAAM;AAC5B,gBAAM,SAAS,cAAc;AAG7B,cAAI,QAAQ,cAAc,OAAO,WAAW,SAAS,GAAG;AACvD,uBAAW,YAAY,OAAO,YAAY;AACzC,0BAAY,kBAAmB,KAAK;AAAA,gBACnC,QAAQ;AAAA,kBACP,MAAM,SAAS;AAAA,kBACf,WAAW,SAAS;AAAA,kBACpB,KACC,OAAO,WACP,WAAW,SAAS,IAAI,gBAAgB,KAAK,UAAU,SAAS,IAAI,CAAC;AAAA,kBACtE,YAAY,CAAC,MAAM;AAAA;AAAA,kBACnB,YAAY,SAAS;AAAA,kBACrB,MAAM,SAAS;AAAA,gBAChB;AAAA,cACD,CAAC;AAAA,YACF;AAAA,UACD;AAAA,QACD;AACA;AAAA,MACD,KAAK;AAEJ,YAAI,2BAA2B,MAAM,QAAQ,YAAY,kBAAmB,SAAS,GAAG;AACvF,gBAAM,WAAW,MAAM;AAEvB,gBAAM,eAAe,YAAY,kBAAmB;AAAA,YACnD,CAAC,SAAS,CAAC,KAAK,eAAe,KAAK,OAAO,SAAS,MAAM;AAAA,UAC3D;AACA,cAAI,cAAc;AACjB,yBAAa,cAAc,SAAS;AAAA,UACrC;AAAA,QACD;AACA;AAAA,MACD;AACC;AAAA,IACF;AAAA,EACD;AACA,MAAI,UAAU,OAAO,SAAS;AAE9B,SAAO;AACR;AAgBA,eAAsB,oBAEa;AAClC,OAAK,OAAO,MAAM,0BAA0B;AAE5C,QAAM,aAAmC,CAAC;AAC1C,QAAM,QAAQ,KAAK,aAAa;AAChC,QAAM,YAAY,KAAK,iBAAiB,8BAA8B,GAAG,CAAC;AAC1E,QAAM,sBAAsB,KAAK;AAAA,IAChC;AAAA,IACA;AAAA,IACA;AAAA,EACD;AACA,QAAM,gBAAgB,KAAK,iBAAiB,iBAAiB,GAAG,KAAK;AACrE,QAAM,SAAS,UAAM,iCAAkB,IAAI;AAC3C,QAAM,QAAQ,UAAM,4BAAa,MAAM,CAAC;AACxC,yBAAAA,SAAO,OAAO,gDAAgD;AAC9D,QAAM,gBAAgB,gBAAgB,UAAM,4BAAa,MAAM,CAAC,IAAI;AAEpE,MAAI,iBAAiB,CAAC,eAAe;AACpC,UAAM,IAAI;AAAA,MACT,KAAK,QAAQ;AAAA,MACb;AAAA,IACD;AAAA,EACD;AAGA,QAAM,kBAAkB,KAAK,iBAAiB,2BAA2B,GAAG,IAAI;AAEhF,WAAS,IAAI,GAAG,IAAI,MAAM,QAAQ,KAAK,WAAW;AACjD,UAAM,QAAQ,MAAM,MAAM,GAAG,IAAI,SAAS;AAC1C,UAAM,gBAAgB,MAAM,IAAI,OAAO,OAAO,mBAAmB;AAChE,YAAM,YAAY,IAAI;AAEtB,YAAM,YAAQ,qCAAqB;AAAA,QAClC,KAAK;AAAA,QACL,GAAG;AAAA,QACH,UAAU;AAAA,QACV,eAAe;AAAA,MAChB,CAAC;AACD,UAAI,UAAU,QAAW;AACxB,cAAM,IAAI,uCAAmB,KAAK,QAAQ,GAAG,gCAAgC;AAAA,MAC9E;AACA,YAAMC,gBAAe,UAAM,gDAAwB,MAAM,SAAS;AAClE,YAAM,QAAQ,UAAM,wBAAS,MAAMA,aAAY;AAC/C,YAAM,UAAU,KAAK,iBAAiB,WAAW,WAAW,CAAC,CAAC;AAQ9D,YAAM,WAAW,UAAM,+BAAgB,MAAM,WAAW;AAAA,QACvD,eAAe,QAAQ;AAAA,QACvB,yBAAyB,QAAQ,2BAA2B;AAAA,QAC5D,cAAAA;AAAA,MACD,CAAC;AACD,YAAM,aAA6B,6BAAc,QAAQ;AAGzD,YAAM,WAAW;AAAA,QAChB;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,QACAA;AAAA,QACA;AAAA,QACA;AAAA,MACD;AAEA,YAAM,eAAe;AAAA,QACpB;AAAA,QACA,gBAAgB,QAAQ,iBAAiB;AAAA,QACzC,yBACC;AAAA,MACF;AACA,YAAM,iBAAiB,EAAE,QAAQ,KAAK,yBAAyB,EAAE;AAGjE,YAAM,uBAAuB,iBAAiB,OAAO,KAAK,cAAc,IAAI;AAE5E,UACC,iBAAiB,QACjB,mBACA,wBACA,KAAK,QAAQ,EAAE,eAAe,KAC7B;AACD,cAAM,cAAc,MAAM,QAAQ,YAAY,YAAY;AAC1D,cAAM,cAAc,SAAS;AAAA,UAC5B;AAAA,YACC,GAAG;AAAA,YACH,cAAc,eAAe;AAAA,UAC9B;AAAA,UACA;AAAA,YACC,SAAS;AAAA,YACT,GAAG;AAAA,UACJ;AAAA,QACD;AAEA,eAAO,MAAM;AAAA,UACZ;AAAA,UACA;AAAA,UACA;AAAA,UACA,QAAQ;AAAA,QACT;AAAA,MACD,OAAO;AAEN,eAAO,MAAM,SAAS,OAAO,cAAc,cAAc;AAAA,MAC1D;AAAA,IACD,CAAC;AAED,UAAM,eAAe,MAAM,QAAQ,WAAW,aAAa;AAG3D,UAAM,eAAe,UAAM,gDAAwB,MAAM,CAAC;AAC1D,iBAAa,QAAQ,CAAC,QAAQ,UAAU;AACvC,YAAM,YAAY,IAAI;AACtB,UAAI,OAAO,WAAW,YAAY;AACjC,cAAM,QAAQ,OAAO;AACrB,YAAI,KAAK,eAAe,GAAG;AAC1B,qBAAW,KAAK;AAAA,YACf,MAAM,EAAE,OAAO,MAAM,QAAQ;AAAA,YAC7B,YAAY,EAAE,MAAM,UAAU;AAAA,UAC/B,CAAC;AACD;AAAA,QACD,OAAO;AACN,gBAAM,IAAI,uCAAmB,KAAK,QAAQ,GAAG,KAAK;AAAA,QACnD;AAAA,MACD;AACA,YAAM,WAAW,OAAO;AAExB,UAAI,UAAU,cAAc;AAC3B,cAAM,mBAAe;AAAA,UACpB,SAAS;AAAA,QACV;AACA,iBAAS,SAAS,cAAc,UAAU;AAAA,MAC3C;AAGA,YAAM,aAAa;AAAA,QAClB,UAAM,YAAAC;AAAA,UACL;AAAA,UACA;AAAA,UACA;AAAA,UACA;AAAA,UACA;AAAA,UACA;AAAA,QACD;AAAA,QACA,YAAY,EAAE,MAAM,UAAU;AAAA,MAC/B;AAEA,iBAAW,KAAK,UAAU;AAAA,IAC3B,CAAC;AAED,QAAI,IAAI,YAAY,MAAM,UAAU,sBAAsB,GAAG;AAC5D,gBAAM,2BAAM,mBAAmB;AAAA,IAChC;AAAA,EACD;AAEA,SAAO,CAAC,UAAU;AACnB;","names":["assert","outputParser","omit"]}
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"sources":["../../../../../../nodes/agents/Agent/agents/ToolsAgent/common.ts"],"sourcesContent":["import type { BaseChatModel } from '@langchain/core/language_models/chat_models';\nimport { HumanMessage } from '@langchain/core/messages';\nimport type { BaseMessage } from '@langchain/core/messages';\nimport { ChatPromptTemplate, type BaseMessagePromptTemplateLike } from '@langchain/core/prompts';\nimport type { AgentAction, AgentFinish } from 'langchain/agents';\nimport type { ToolsAgentAction } from 'langchain/dist/agents/tool_calling/output_parser';\nimport type { BaseChatMemory } from 'langchain/memory';\nimport { DynamicStructuredTool, type Tool } from 'langchain/tools';\nimport { BINARY_ENCODING, jsonParse, NodeConnectionTypes, NodeOperationError } from 'n8n-workflow';\nimport type { IExecuteFunctions } from 'n8n-workflow';\nimport type { ZodObject } from 'zod';\nimport { z } from 'zod';\n\nimport { isChatInstance, getConnectedTools } from '@utils/helpers';\nimport { type N8nOutputParser } from '@utils/output_parsers/N8nOutputParser';\n/* -----------------------------------------------------------\n Output Parser Helper\n----------------------------------------------------------- */\n/**\n * Retrieve the output parser schema.\n * If the parser does not return a valid schema, default to a schema with a single text field.\n */\nexport function getOutputParserSchema(\n\toutputParser: N8nOutputParser,\n\t// eslint-disable-next-line @typescript-eslint/no-explicit-any\n): ZodObject<any, any, any, any> {\n\tconst schema =\n\t\t// eslint-disable-next-line @typescript-eslint/no-explicit-any\n\t\t(outputParser.getSchema() as ZodObject<any, any, any, any>) ?? z.object({ text: z.string() });\n\treturn schema;\n}\n\n/* -----------------------------------------------------------\n Binary Data Helpers\n----------------------------------------------------------- */\n/**\n * Extracts binary image messages from the input data.\n * When operating in filesystem mode, the binary stream is first converted to a buffer.\n *\n * @param ctx - The execution context\n * @param itemIndex - The current item index\n * @returns A HumanMessage containing the binary image messages.\n */\nexport async function extractBinaryMessages(\n\tctx: IExecuteFunctions,\n\titemIndex: number,\n): Promise<HumanMessage> {\n\tconst binaryData = ctx.getInputData()?.[itemIndex]?.binary ?? {};\n\tconst binaryMessages = await Promise.all(\n\t\tObject.values(binaryData)\n\t\t\t.filter((data) => data.mimeType.startsWith('image/'))\n\t\t\t.map(async (data) => {\n\t\t\t\tlet binaryUrlString: string;\n\n\t\t\t\t// In filesystem mode we need to get binary stream by id before converting it to buffer\n\t\t\t\tif (data.id) {\n\t\t\t\t\tconst binaryBuffer = await ctx.helpers.binaryToBuffer(\n\t\t\t\t\t\tawait ctx.helpers.getBinaryStream(data.id),\n\t\t\t\t\t);\n\t\t\t\t\tbinaryUrlString = `data:${data.mimeType};base64,${Buffer.from(binaryBuffer).toString(\n\t\t\t\t\t\tBINARY_ENCODING,\n\t\t\t\t\t)}`;\n\t\t\t\t} else {\n\t\t\t\t\tbinaryUrlString = data.data.includes('base64')\n\t\t\t\t\t\t? data.data\n\t\t\t\t\t\t: `data:${data.mimeType};base64,${data.data}`;\n\t\t\t\t}\n\n\t\t\t\treturn {\n\t\t\t\t\ttype: 'image_url',\n\t\t\t\t\timage_url: {\n\t\t\t\t\t\turl: binaryUrlString,\n\t\t\t\t\t},\n\t\t\t\t};\n\t\t\t}),\n\t);\n\treturn new HumanMessage({\n\t\tcontent: [...binaryMessages],\n\t});\n}\n\n/* -----------------------------------------------------------\n Agent Output Format Helpers\n----------------------------------------------------------- */\n/**\n * Fixes empty content messages in agent steps.\n *\n * This function is necessary when using RunnableSequence.from in LangChain.\n * If a tool doesn't have any arguments, LangChain returns input: '' (empty string).\n * This can throw an error for some providers (like Anthropic) which expect the input to always be an object.\n * This function replaces empty string inputs with empty objects to prevent such errors.\n *\n * @param steps - The agent steps to fix\n * @returns The fixed agent steps\n */\nexport function fixEmptyContentMessage(\n\tsteps: AgentFinish | ToolsAgentAction[],\n): AgentFinish | ToolsAgentAction[] {\n\tif (!Array.isArray(steps)) return steps;\n\n\tsteps.forEach((step) => {\n\t\tif ('messageLog' in step && step.messageLog !== undefined) {\n\t\t\tif (Array.isArray(step.messageLog)) {\n\t\t\t\tstep.messageLog.forEach((message: BaseMessage) => {\n\t\t\t\t\tif ('content' in message && Array.isArray(message.content)) {\n\t\t\t\t\t\t(message.content as Array<{ input?: string | object }>).forEach((content) => {\n\t\t\t\t\t\t\tif (content.input === '') {\n\t\t\t\t\t\t\t\tcontent.input = {};\n\t\t\t\t\t\t\t}\n\t\t\t\t\t\t});\n\t\t\t\t\t}\n\t\t\t\t});\n\t\t\t}\n\t\t}\n\t});\n\n\treturn steps;\n}\n\n/**\n * Ensures consistent handling of outputs regardless of the model used,\n * providing a unified output format for further processing.\n *\n * This method is necessary to handle different output formats from various language models.\n * Specifically, it checks if the agent step is the final step (contains returnValues) and determines\n * if the output is a simple string (e.g., from OpenAI models) or an array of outputs (e.g., from Anthropic models).\n *\n * Examples:\n * 1. Anthropic model output:\n * ```json\n * {\n * \"output\": [\n * {\n * \"index\": 0,\n * \"type\": \"text\",\n * \"text\": \"The result of the calculation is approximately 1001.8166...\"\n * }\n * ]\n * }\n *```\n * 2. OpenAI model output:\n * ```json\n * {\n * \"output\": \"The result of the calculation is approximately 1001.82...\"\n * }\n * ```\n *\n * @param steps - The agent finish or agent action steps.\n * @returns The modified agent finish steps or the original steps.\n */\nexport function handleAgentFinishOutput(\n\tsteps: AgentFinish | AgentAction[],\n): AgentFinish | AgentAction[] {\n\ttype AgentMultiOutputFinish = AgentFinish & {\n\t\treturnValues: { output: Array<{ text: string; type: string; index: number }> };\n\t};\n\tconst agentFinishSteps = steps as AgentMultiOutputFinish | AgentFinish;\n\n\tif (agentFinishSteps.returnValues) {\n\t\tconst isMultiOutput = Array.isArray(agentFinishSteps.returnValues?.output);\n\t\tif (isMultiOutput) {\n\t\t\t// If all items in the multi-output array are of type 'text', merge them into a single string\n\t\t\tconst multiOutputSteps = agentFinishSteps.returnValues.output as Array<{\n\t\t\t\tindex: number;\n\t\t\t\ttype: string;\n\t\t\t\ttext: string;\n\t\t\t}>;\n\t\t\tconst isTextOnly = multiOutputSteps.every((output) => 'text' in output);\n\t\t\tif (isTextOnly) {\n\t\t\t\tagentFinishSteps.returnValues.output = multiOutputSteps\n\t\t\t\t\t.map((output) => output.text)\n\t\t\t\t\t.join('\\n')\n\t\t\t\t\t.trim();\n\t\t\t}\n\t\t\treturn agentFinishSteps;\n\t\t}\n\t}\n\n\treturn agentFinishSteps;\n}\n\n/**\n * Wraps the parsed output so that it can be stored in memory.\n * If memory is connected, the output is stringified.\n *\n * @param output - The parsed output object\n * @param memory - The connected memory (if any)\n * @returns The formatted output object\n */\nexport function handleParsedStepOutput(\n\toutput: Record<string, unknown>,\n\tmemory?: BaseChatMemory,\n): { returnValues: Record<string, unknown>; log: string } {\n\treturn {\n\t\treturnValues: memory ? { output: JSON.stringify(output) } : output,\n\t\tlog: 'Final response formatted',\n\t};\n}\n\n/**\n * Parses agent steps using the provided output parser.\n * If the agent used the 'format_final_json_response' tool, the output is parsed accordingly.\n *\n * @param steps - The agent finish or action steps\n * @param outputParser - The output parser (if defined)\n * @param memory - The connected memory (if any)\n * @returns The parsed steps with the final output\n */\nexport const getAgentStepsParser =\n\t(outputParser?: N8nOutputParser, memory?: BaseChatMemory) =>\n\tasync (steps: AgentFinish | AgentAction[]): Promise<AgentFinish | AgentAction[]> => {\n\t\t// Check if the steps contain the 'format_final_json_response' tool invocation.\n\t\tif (Array.isArray(steps)) {\n\t\t\tconst responseParserTool = steps.find((step) => step.tool === 'format_final_json_response');\n\t\t\tif (responseParserTool && outputParser) {\n\t\t\t\tconst toolInput = responseParserTool.toolInput;\n\t\t\t\t// Ensure the tool input is a string\n\t\t\t\tconst parserInput = toolInput instanceof Object ? JSON.stringify(toolInput) : toolInput;\n\t\t\t\tconst returnValues = (await outputParser.parse(parserInput)) as Record<string, unknown>;\n\t\t\t\treturn handleParsedStepOutput(returnValues, memory);\n\t\t\t}\n\t\t}\n\n\t\t// Otherwise, if the steps contain a returnValues field, try to parse them manually.\n\t\tif (outputParser && typeof steps === 'object' && (steps as AgentFinish).returnValues) {\n\t\t\tconst finalResponse = (steps as AgentFinish).returnValues;\n\t\t\tlet parserInput: string;\n\n\t\t\tif (finalResponse instanceof Object) {\n\t\t\t\tif ('output' in finalResponse) {\n\t\t\t\t\ttry {\n\t\t\t\t\t\t// If the output is an object, we will try to parse it as JSON\n\t\t\t\t\t\t// this is because parser expects stringified JSON object like { \"output\": { .... } }\n\t\t\t\t\t\t// so we try to parse the output before wrapping it and then stringify it\n\t\t\t\t\t\tparserInput = JSON.stringify({ output: jsonParse(finalResponse.output) });\n\t\t\t\t\t} catch (error) {\n\t\t\t\t\t\t// Fallback to the raw output if parsing fails.\n\t\t\t\t\t\tparserInput = finalResponse.output;\n\t\t\t\t\t}\n\t\t\t\t} else {\n\t\t\t\t\t// If the output is not an object, we will stringify it as it is\n\t\t\t\t\tparserInput = JSON.stringify(finalResponse);\n\t\t\t\t}\n\t\t\t} else {\n\t\t\t\tparserInput = finalResponse;\n\t\t\t}\n\n\t\t\tconst returnValues = (await outputParser.parse(parserInput)) as Record<string, unknown>;\n\t\t\treturn handleParsedStepOutput(returnValues, memory);\n\t\t}\n\n\t\treturn handleAgentFinishOutput(steps);\n\t};\n\n/* -----------------------------------------------------------\n Agent Setup Helpers\n----------------------------------------------------------- */\n/**\n * Retrieves the language model from the input connection.\n * Throws an error if the model is not a valid chat instance or does not support tools.\n *\n * @param ctx - The execution context\n * @returns The validated chat model\n */\nexport async function getChatModel(\n\tctx: IExecuteFunctions,\n\tindex: number = 0,\n): Promise<BaseChatModel | undefined> {\n\tconst connectedModels = await ctx.getInputConnectionData(NodeConnectionTypes.AiLanguageModel, 0);\n\n\tlet model;\n\n\tif (Array.isArray(connectedModels) && index !== undefined) {\n\t\tif (connectedModels.length <= index) {\n\t\t\treturn undefined;\n\t\t}\n\t\t// We get the models in reversed order from the workflow so we need to reverse them to match the right index\n\t\tconst reversedModels = [...connectedModels].reverse();\n\t\tmodel = reversedModels[index] as BaseChatModel;\n\t} else {\n\t\tmodel = connectedModels as BaseChatModel;\n\t}\n\n\tif (!isChatInstance(model) || !model.bindTools) {\n\t\tthrow new NodeOperationError(\n\t\t\tctx.getNode(),\n\t\t\t'Tools Agent requires Chat Model which supports Tools calling',\n\t\t);\n\t}\n\treturn model;\n}\n\n/**\n * Retrieves the memory instance from the input connection if it is connected\n *\n * @param ctx - The execution context\n * @returns The connected memory (if any)\n */\nexport async function getOptionalMemory(\n\tctx: IExecuteFunctions,\n): Promise<BaseChatMemory | undefined> {\n\treturn (await ctx.getInputConnectionData(NodeConnectionTypes.AiMemory, 0)) as\n\t\t| BaseChatMemory\n\t\t| undefined;\n}\n\n/**\n * Retrieves the connected tools and (if an output parser is defined)\n * appends a structured output parser tool.\n *\n * @param ctx - The execution context\n * @param outputParser - The optional output parser\n * @returns The array of connected tools\n */\nexport async function getTools(\n\tctx: IExecuteFunctions,\n\toutputParser?: N8nOutputParser,\n): Promise<Array<DynamicStructuredTool | Tool>> {\n\tconst tools = (await getConnectedTools(ctx, true, false)) as Array<DynamicStructuredTool | Tool>;\n\n\t// If an output parser is available, create a dynamic tool to validate the final output.\n\tif (outputParser) {\n\t\tconst schema = getOutputParserSchema(outputParser);\n\t\tconst structuredOutputParserTool = new DynamicStructuredTool({\n\t\t\tschema,\n\t\t\tname: 'format_final_json_response',\n\t\t\tdescription:\n\t\t\t\t'Use this tool to format your final response to the user in a structured JSON format. This tool validates your output against a schema to ensure it meets the required format. ONLY use this tool when you have completed all necessary reasoning and are ready to provide your final answer. Do not use this tool for intermediate steps or for asking questions. The output from this tool will be directly returned to the user.',\n\t\t\t// We do not use a function here because we intercept the output with the parser.\n\t\t\tfunc: async () => '',\n\t\t});\n\t\ttools.push(structuredOutputParserTool);\n\t}\n\treturn tools;\n}\n\n/**\n * Prepares the prompt messages for the agent.\n *\n * @param ctx - The execution context\n * @param itemIndex - The current item index\n * @param options - Options containing systemMessage and other parameters\n * @returns The array of prompt messages\n */\nexport async function prepareMessages(\n\tctx: IExecuteFunctions,\n\titemIndex: number,\n\toptions: {\n\t\tsystemMessage?: string;\n\t\tpassthroughBinaryImages?: boolean;\n\t\toutputParser?: N8nOutputParser;\n\t},\n): Promise<BaseMessagePromptTemplateLike[]> {\n\tconst useSystemMessage = options.systemMessage ?? ctx.getNode().typeVersion < 1.9;\n\n\tconst messages: BaseMessagePromptTemplateLike[] = [];\n\n\tif (useSystemMessage) {\n\t\tmessages.push([\n\t\t\t'system',\n\t\t\t`{system_message}${options.outputParser ? '\\n\\n{formatting_instructions}' : ''}`,\n\t\t]);\n\t} else if (options.outputParser) {\n\t\tmessages.push(['system', '{formatting_instructions}']);\n\t}\n\n\tmessages.push(['placeholder', '{chat_history}'], ['human', '{input}']);\n\n\t// If there is binary data and the node option permits it, add a binary message\n\tconst hasBinaryData = ctx.getInputData()?.[itemIndex]?.binary !== undefined;\n\tif (hasBinaryData && options.passthroughBinaryImages) {\n\t\tconst binaryMessage = await extractBinaryMessages(ctx, itemIndex);\n\t\tif (binaryMessage.content.length !== 0) {\n\t\t\tmessages.push(binaryMessage);\n\t\t} else {\n\t\t\tctx.logger.debug('Not attaching binary message, since its content was empty');\n\t\t}\n\t}\n\n\t// We add the agent scratchpad last, so that the agent will not run in loops\n\t// by adding binary messages between each interaction\n\tmessages.push(['placeholder', '{agent_scratchpad}']);\n\treturn messages;\n}\n\n/**\n * Creates the chat prompt from messages.\n *\n * @param messages - The messages array\n * @returns The ChatPromptTemplate instance\n */\nexport function preparePrompt(messages: BaseMessagePromptTemplateLike[]): ChatPromptTemplate {\n\treturn ChatPromptTemplate.fromMessages(messages);\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AACA,sBAA6B;AAE7B,qBAAuE;AAIvE,mBAAiD;AACjD,0BAAoF;AAGpF,iBAAkB;AAElB,qBAAkD;AAS3C,SAAS,sBACf,cAEgC;AAChC,QAAM;AAAA;AAAA,IAEJ,aAAa,UAAU,KAAuC,aAAE,OAAO,EAAE,MAAM,aAAE,OAAO,EAAE,CAAC;AAAA;AAC7F,SAAO;AACR;AAaA,eAAsB,sBACrB,KACA,WACwB;AACxB,QAAM,aAAa,IAAI,aAAa,IAAI,SAAS,GAAG,UAAU,CAAC;AAC/D,QAAM,iBAAiB,MAAM,QAAQ;AAAA,IACpC,OAAO,OAAO,UAAU,EACtB,OAAO,CAAC,SAAS,KAAK,SAAS,WAAW,QAAQ,CAAC,EACnD,IAAI,OAAO,SAAS;AACpB,UAAI;AAGJ,UAAI,KAAK,IAAI;AACZ,cAAM,eAAe,MAAM,IAAI,QAAQ;AAAA,UACtC,MAAM,IAAI,QAAQ,gBAAgB,KAAK,EAAE;AAAA,QAC1C;AACA,0BAAkB,QAAQ,KAAK,QAAQ,WAAW,OAAO,KAAK,YAAY,EAAE;AAAA,UAC3E;AAAA,QACD,CAAC;AAAA,MACF,OAAO;AACN,0BAAkB,KAAK,KAAK,SAAS,QAAQ,IAC1C,KAAK,OACL,QAAQ,KAAK,QAAQ,WAAW,KAAK,IAAI;AAAA,MAC7C;AAEA,aAAO;AAAA,QACN,MAAM;AAAA,QACN,WAAW;AAAA,UACV,KAAK;AAAA,QACN;AAAA,MACD;AAAA,IACD,CAAC;AAAA,EACH;AACA,SAAO,IAAI,6BAAa;AAAA,IACvB,SAAS,CAAC,GAAG,cAAc;AAAA,EAC5B,CAAC;AACF;AAgBO,SAAS,uBACf,OACmC;AACnC,MAAI,CAAC,MAAM,QAAQ,KAAK,EAAG,QAAO;AAElC,QAAM,QAAQ,CAAC,SAAS;AACvB,QAAI,gBAAgB,QAAQ,KAAK,eAAe,QAAW;AAC1D,UAAI,MAAM,QAAQ,KAAK,UAAU,GAAG;AACnC,aAAK,WAAW,QAAQ,CAAC,YAAyB;AACjD,cAAI,aAAa,WAAW,MAAM,QAAQ,QAAQ,OAAO,GAAG;AAC3D,YAAC,QAAQ,QAA+C,QAAQ,CAAC,YAAY;AAC5E,kBAAI,QAAQ,UAAU,IAAI;AACzB,wBAAQ,QAAQ,CAAC;AAAA,cAClB;AAAA,YACD,CAAC;AAAA,UACF;AAAA,QACD,CAAC;AAAA,MACF;AAAA,IACD;AAAA,EACD,CAAC;AAED,SAAO;AACR;AAiCO,SAAS,wBACf,OAC8B;AAI9B,QAAM,mBAAmB;AAEzB,MAAI,iBAAiB,cAAc;AAClC,UAAM,gBAAgB,MAAM,QAAQ,iBAAiB,cAAc,MAAM;AACzE,QAAI,eAAe;AAElB,YAAM,mBAAmB,iBAAiB,aAAa;AAKvD,YAAM,aAAa,iBAAiB,MAAM,CAAC,WAAW,UAAU,MAAM;AACtE,UAAI,YAAY;AACf,yBAAiB,aAAa,SAAS,iBACrC,IAAI,CAAC,WAAW,OAAO,IAAI,EAC3B,KAAK,IAAI,EACT,KAAK;AAAA,MACR;AACA,aAAO;AAAA,IACR;AAAA,EACD;AAEA,SAAO;AACR;AAUO,SAAS,uBACf,QACA,QACyD;AACzD,SAAO;AAAA,IACN,cAAc,SAAS,EAAE,QAAQ,KAAK,UAAU,MAAM,EAAE,IAAI;AAAA,IAC5D,KAAK;AAAA,EACN;AACD;AAWO,MAAM,sBACZ,CAAC,cAAgC,WACjC,OAAO,UAA6E;AAEnF,MAAI,MAAM,QAAQ,KAAK,GAAG;AACzB,UAAM,qBAAqB,MAAM,KAAK,CAAC,SAAS,KAAK,SAAS,4BAA4B;AAC1F,QAAI,sBAAsB,cAAc;AACvC,YAAM,YAAY,mBAAmB;AAErC,YAAM,cAAc,qBAAqB,SAAS,KAAK,UAAU,SAAS,IAAI;AAC9E,YAAM,eAAgB,MAAM,aAAa,MAAM,WAAW;AAC1D,aAAO,uBAAuB,cAAc,MAAM;AAAA,IACnD;AAAA,EACD;AAGA,MAAI,gBAAgB,OAAO,UAAU,YAAa,MAAsB,cAAc;AACrF,UAAM,gBAAiB,MAAsB;AAC7C,QAAI;AAEJ,QAAI,yBAAyB,QAAQ;AACpC,UAAI,YAAY,eAAe;AAC9B,YAAI;AAIH,wBAAc,KAAK,UAAU,EAAE,YAAQ,+BAAU,cAAc,MAAM,EAAE,CAAC;AAAA,QACzE,SAAS,OAAO;AAEf,wBAAc,cAAc;AAAA,QAC7B;AAAA,MACD,OAAO;AAEN,sBAAc,KAAK,UAAU,aAAa;AAAA,MAC3C;AAAA,IACD,OAAO;AACN,oBAAc;AAAA,IACf;AAEA,UAAM,eAAgB,MAAM,aAAa,MAAM,WAAW;AAC1D,WAAO,uBAAuB,cAAc,MAAM;AAAA,EACnD;AAEA,SAAO,wBAAwB,KAAK;AACrC;AAYD,eAAsB,aACrB,KACA,QAAgB,GACqB;AACrC,QAAM,kBAAkB,MAAM,IAAI,uBAAuB,wCAAoB,iBAAiB,CAAC;AAE/F,MAAI;AAEJ,MAAI,MAAM,QAAQ,eAAe,KAAK,UAAU,QAAW;AAC1D,QAAI,gBAAgB,UAAU,OAAO;AACpC,aAAO;AAAA,IACR;AAEA,UAAM,iBAAiB,CAAC,GAAG,eAAe,EAAE,QAAQ;AACpD,YAAQ,eAAe,KAAK;AAAA,EAC7B,OAAO;AACN,YAAQ;AAAA,EACT;AAEA,MAAI,KAAC,+BAAe,KAAK,KAAK,CAAC,MAAM,WAAW;AAC/C,UAAM,IAAI;AAAA,MACT,IAAI,QAAQ;AAAA,MACZ;AAAA,IACD;AAAA,EACD;AACA,SAAO;AACR;AAQA,eAAsB,kBACrB,KACsC;AACtC,SAAQ,MAAM,IAAI,uBAAuB,wCAAoB,UAAU,CAAC;AAGzE;AAUA,eAAsB,SACrB,KACA,cAC+C;AAC/C,QAAM,QAAS,UAAM,kCAAkB,KAAK,MAAM,KAAK;AAGvD,MAAI,cAAc;AACjB,UAAM,SAAS,sBAAsB,YAAY;AACjD,UAAM,6BAA6B,IAAI,mCAAsB;AAAA,MAC5D;AAAA,MACA,MAAM;AAAA,MACN,aACC;AAAA;AAAA,MAED,MAAM,YAAY;AAAA,IACnB,CAAC;AACD,UAAM,KAAK,0BAA0B;AAAA,EACtC;AACA,SAAO;AACR;AAUA,eAAsB,gBACrB,KACA,WACA,SAK2C;AAC3C,QAAM,mBAAmB,QAAQ,iBAAiB,IAAI,QAAQ,EAAE,cAAc;AAE9E,QAAM,WAA4C,CAAC;AAEnD,MAAI,kBAAkB;AACrB,aAAS,KAAK;AAAA,MACb;AAAA,MACA,mBAAmB,QAAQ,eAAe,kCAAkC,EAAE;AAAA,IAC/E,CAAC;AAAA,EACF,WAAW,QAAQ,cAAc;AAChC,aAAS,KAAK,CAAC,UAAU,2BAA2B,CAAC;AAAA,EACtD;AAEA,WAAS,KAAK,CAAC,eAAe,gBAAgB,GAAG,CAAC,SAAS,SAAS,CAAC;AAGrE,QAAM,gBAAgB,IAAI,aAAa,IAAI,SAAS,GAAG,WAAW;AAClE,MAAI,iBAAiB,QAAQ,yBAAyB;AACrD,UAAM,gBAAgB,MAAM,sBAAsB,KAAK,SAAS;AAChE,QAAI,cAAc,QAAQ,WAAW,GAAG;AACvC,eAAS,KAAK,aAAa;AAAA,IAC5B,OAAO;AACN,UAAI,OAAO,MAAM,2DAA2D;AAAA,IAC7E;AAAA,EACD;AAIA,WAAS,KAAK,CAAC,eAAe,oBAAoB,CAAC;AACnD,SAAO;AACR;AAQO,SAAS,cAAc,UAA+D;AAC5F,SAAO,kCAAmB,aAAa,QAAQ;AAChD;","names":[]}
|
|
1
|
+
{"version":3,"sources":["../../../../../../nodes/agents/Agent/agents/ToolsAgent/common.ts"],"sourcesContent":["import type { BaseChatModel } from '@langchain/core/language_models/chat_models';\nimport { HumanMessage } from '@langchain/core/messages';\nimport type { BaseMessage } from '@langchain/core/messages';\nimport { ChatPromptTemplate, type BaseMessagePromptTemplateLike } from '@langchain/core/prompts';\nimport type { AgentAction, AgentFinish } from 'langchain/agents';\nimport type { ToolsAgentAction } from 'langchain/dist/agents/tool_calling/output_parser';\nimport type { BaseChatMemory } from 'langchain/memory';\nimport { DynamicStructuredTool, type Tool } from 'langchain/tools';\nimport { BINARY_ENCODING, jsonParse, NodeConnectionTypes, NodeOperationError } from 'n8n-workflow';\nimport type { IExecuteFunctions, ISupplyDataFunctions } from 'n8n-workflow';\nimport type { ZodObject } from 'zod';\nimport { z } from 'zod';\n\nimport { isChatInstance, getConnectedTools } from '@utils/helpers';\nimport { type N8nOutputParser } from '@utils/output_parsers/N8nOutputParser';\n/* -----------------------------------------------------------\n Output Parser Helper\n----------------------------------------------------------- */\n/**\n * Retrieve the output parser schema.\n * If the parser does not return a valid schema, default to a schema with a single text field.\n */\nexport function getOutputParserSchema(\n\toutputParser: N8nOutputParser,\n\t// eslint-disable-next-line @typescript-eslint/no-explicit-any\n): ZodObject<any, any, any, any> {\n\tconst schema =\n\t\t// eslint-disable-next-line @typescript-eslint/no-explicit-any\n\t\t(outputParser.getSchema() as ZodObject<any, any, any, any>) ?? z.object({ text: z.string() });\n\treturn schema;\n}\n\n/* -----------------------------------------------------------\n Binary Data Helpers\n----------------------------------------------------------- */\n/**\n * Extracts binary image messages from the input data.\n * When operating in filesystem mode, the binary stream is first converted to a buffer.\n *\n * @param ctx - The execution context\n * @param itemIndex - The current item index\n * @returns A HumanMessage containing the binary image messages.\n */\nexport async function extractBinaryMessages(\n\tctx: IExecuteFunctions | ISupplyDataFunctions,\n\titemIndex: number,\n): Promise<HumanMessage> {\n\tconst binaryData = ctx.getInputData()?.[itemIndex]?.binary ?? {};\n\tconst binaryMessages = await Promise.all(\n\t\tObject.values(binaryData)\n\t\t\t.filter((data) => data.mimeType.startsWith('image/'))\n\t\t\t.map(async (data) => {\n\t\t\t\tlet binaryUrlString: string;\n\n\t\t\t\t// In filesystem mode we need to get binary stream by id before converting it to buffer\n\t\t\t\tif (data.id) {\n\t\t\t\t\tconst binaryBuffer = await ctx.helpers.binaryToBuffer(\n\t\t\t\t\t\tawait ctx.helpers.getBinaryStream(data.id),\n\t\t\t\t\t);\n\t\t\t\t\tbinaryUrlString = `data:${data.mimeType};base64,${Buffer.from(binaryBuffer).toString(\n\t\t\t\t\t\tBINARY_ENCODING,\n\t\t\t\t\t)}`;\n\t\t\t\t} else {\n\t\t\t\t\tbinaryUrlString = data.data.includes('base64')\n\t\t\t\t\t\t? data.data\n\t\t\t\t\t\t: `data:${data.mimeType};base64,${data.data}`;\n\t\t\t\t}\n\n\t\t\t\treturn {\n\t\t\t\t\ttype: 'image_url',\n\t\t\t\t\timage_url: {\n\t\t\t\t\t\turl: binaryUrlString,\n\t\t\t\t\t},\n\t\t\t\t};\n\t\t\t}),\n\t);\n\treturn new HumanMessage({\n\t\tcontent: [...binaryMessages],\n\t});\n}\n\n/* -----------------------------------------------------------\n Agent Output Format Helpers\n----------------------------------------------------------- */\n/**\n * Fixes empty content messages in agent steps.\n *\n * This function is necessary when using RunnableSequence.from in LangChain.\n * If a tool doesn't have any arguments, LangChain returns input: '' (empty string).\n * This can throw an error for some providers (like Anthropic) which expect the input to always be an object.\n * This function replaces empty string inputs with empty objects to prevent such errors.\n *\n * @param steps - The agent steps to fix\n * @returns The fixed agent steps\n */\nexport function fixEmptyContentMessage(\n\tsteps: AgentFinish | ToolsAgentAction[],\n): AgentFinish | ToolsAgentAction[] {\n\tif (!Array.isArray(steps)) return steps;\n\n\tsteps.forEach((step) => {\n\t\tif ('messageLog' in step && step.messageLog !== undefined) {\n\t\t\tif (Array.isArray(step.messageLog)) {\n\t\t\t\tstep.messageLog.forEach((message: BaseMessage) => {\n\t\t\t\t\tif ('content' in message && Array.isArray(message.content)) {\n\t\t\t\t\t\t(message.content as Array<{ input?: string | object }>).forEach((content) => {\n\t\t\t\t\t\t\tif (content.input === '') {\n\t\t\t\t\t\t\t\tcontent.input = {};\n\t\t\t\t\t\t\t}\n\t\t\t\t\t\t});\n\t\t\t\t\t}\n\t\t\t\t});\n\t\t\t}\n\t\t}\n\t});\n\n\treturn steps;\n}\n\n/**\n * Ensures consistent handling of outputs regardless of the model used,\n * providing a unified output format for further processing.\n *\n * This method is necessary to handle different output formats from various language models.\n * Specifically, it checks if the agent step is the final step (contains returnValues) and determines\n * if the output is a simple string (e.g., from OpenAI models) or an array of outputs (e.g., from Anthropic models).\n *\n * Examples:\n * 1. Anthropic model output:\n * ```json\n * {\n * \"output\": [\n * {\n * \"index\": 0,\n * \"type\": \"text\",\n * \"text\": \"The result of the calculation is approximately 1001.8166...\"\n * }\n * ]\n * }\n *```\n * 2. OpenAI model output:\n * ```json\n * {\n * \"output\": \"The result of the calculation is approximately 1001.82...\"\n * }\n * ```\n *\n * @param steps - The agent finish or agent action steps.\n * @returns The modified agent finish steps or the original steps.\n */\nexport function handleAgentFinishOutput(\n\tsteps: AgentFinish | AgentAction[],\n): AgentFinish | AgentAction[] {\n\ttype AgentMultiOutputFinish = AgentFinish & {\n\t\treturnValues: { output: Array<{ text: string; type: string; index: number }> };\n\t};\n\tconst agentFinishSteps = steps as AgentMultiOutputFinish | AgentFinish;\n\n\tif (agentFinishSteps.returnValues) {\n\t\tconst isMultiOutput = Array.isArray(agentFinishSteps.returnValues?.output);\n\t\tif (isMultiOutput) {\n\t\t\t// If all items in the multi-output array are of type 'text', merge them into a single string\n\t\t\tconst multiOutputSteps = agentFinishSteps.returnValues.output as Array<{\n\t\t\t\tindex: number;\n\t\t\t\ttype: string;\n\t\t\t\ttext: string;\n\t\t\t}>;\n\t\t\tconst isTextOnly = multiOutputSteps.every((output) => 'text' in output);\n\t\t\tif (isTextOnly) {\n\t\t\t\tagentFinishSteps.returnValues.output = multiOutputSteps\n\t\t\t\t\t.map((output) => output.text)\n\t\t\t\t\t.join('\\n')\n\t\t\t\t\t.trim();\n\t\t\t}\n\t\t\treturn agentFinishSteps;\n\t\t}\n\t}\n\n\treturn agentFinishSteps;\n}\n\n/**\n * Wraps the parsed output so that it can be stored in memory.\n * If memory is connected, the output is stringified.\n *\n * @param output - The parsed output object\n * @param memory - The connected memory (if any)\n * @returns The formatted output object\n */\nexport function handleParsedStepOutput(\n\toutput: Record<string, unknown>,\n\tmemory?: BaseChatMemory,\n): { returnValues: Record<string, unknown>; log: string } {\n\treturn {\n\t\treturnValues: memory ? { output: JSON.stringify(output) } : output,\n\t\tlog: 'Final response formatted',\n\t};\n}\n\n/**\n * Parses agent steps using the provided output parser.\n * If the agent used the 'format_final_json_response' tool, the output is parsed accordingly.\n *\n * @param steps - The agent finish or action steps\n * @param outputParser - The output parser (if defined)\n * @param memory - The connected memory (if any)\n * @returns The parsed steps with the final output\n */\nexport const getAgentStepsParser =\n\t(outputParser?: N8nOutputParser, memory?: BaseChatMemory) =>\n\tasync (steps: AgentFinish | AgentAction[]): Promise<AgentFinish | AgentAction[]> => {\n\t\t// Check if the steps contain the 'format_final_json_response' tool invocation.\n\t\tif (Array.isArray(steps)) {\n\t\t\tconst responseParserTool = steps.find((step) => step.tool === 'format_final_json_response');\n\t\t\tif (responseParserTool && outputParser) {\n\t\t\t\tconst toolInput = responseParserTool.toolInput;\n\t\t\t\t// Ensure the tool input is a string\n\t\t\t\tconst parserInput = toolInput instanceof Object ? JSON.stringify(toolInput) : toolInput;\n\t\t\t\tconst returnValues = (await outputParser.parse(parserInput)) as Record<string, unknown>;\n\t\t\t\treturn handleParsedStepOutput(returnValues, memory);\n\t\t\t}\n\t\t}\n\n\t\t// Otherwise, if the steps contain a returnValues field, try to parse them manually.\n\t\tif (outputParser && typeof steps === 'object' && (steps as AgentFinish).returnValues) {\n\t\t\tconst finalResponse = (steps as AgentFinish).returnValues;\n\t\t\tlet parserInput: string;\n\n\t\t\tif (finalResponse instanceof Object) {\n\t\t\t\tif ('output' in finalResponse) {\n\t\t\t\t\ttry {\n\t\t\t\t\t\t// If the output is an object, we will try to parse it as JSON\n\t\t\t\t\t\t// this is because parser expects stringified JSON object like { \"output\": { .... } }\n\t\t\t\t\t\t// so we try to parse the output before wrapping it and then stringify it\n\t\t\t\t\t\tparserInput = JSON.stringify({ output: jsonParse(finalResponse.output) });\n\t\t\t\t\t} catch (error) {\n\t\t\t\t\t\t// Fallback to the raw output if parsing fails.\n\t\t\t\t\t\tparserInput = finalResponse.output;\n\t\t\t\t\t}\n\t\t\t\t} else {\n\t\t\t\t\t// If the output is not an object, we will stringify it as it is\n\t\t\t\t\tparserInput = JSON.stringify(finalResponse);\n\t\t\t\t}\n\t\t\t} else {\n\t\t\t\tparserInput = finalResponse;\n\t\t\t}\n\n\t\t\tconst returnValues = (await outputParser.parse(parserInput)) as Record<string, unknown>;\n\t\t\treturn handleParsedStepOutput(returnValues, memory);\n\t\t}\n\n\t\treturn handleAgentFinishOutput(steps);\n\t};\n\n/* -----------------------------------------------------------\n Agent Setup Helpers\n----------------------------------------------------------- */\n/**\n * Retrieves the language model from the input connection.\n * Throws an error if the model is not a valid chat instance or does not support tools.\n *\n * @param ctx - The execution context\n * @returns The validated chat model\n */\nexport async function getChatModel(\n\tctx: IExecuteFunctions | ISupplyDataFunctions,\n\tindex: number = 0,\n): Promise<BaseChatModel | undefined> {\n\tconst connectedModels = await ctx.getInputConnectionData(NodeConnectionTypes.AiLanguageModel, 0);\n\n\tlet model;\n\n\tif (Array.isArray(connectedModels) && index !== undefined) {\n\t\tif (connectedModels.length <= index) {\n\t\t\treturn undefined;\n\t\t}\n\t\t// We get the models in reversed order from the workflow so we need to reverse them to match the right index\n\t\tconst reversedModels = [...connectedModels].reverse();\n\t\tmodel = reversedModels[index] as BaseChatModel;\n\t} else {\n\t\tmodel = connectedModels as BaseChatModel;\n\t}\n\n\tif (!isChatInstance(model) || !model.bindTools) {\n\t\tthrow new NodeOperationError(\n\t\t\tctx.getNode(),\n\t\t\t'Tools Agent requires Chat Model which supports Tools calling',\n\t\t);\n\t}\n\treturn model;\n}\n\n/**\n * Retrieves the memory instance from the input connection if it is connected\n *\n * @param ctx - The execution context\n * @returns The connected memory (if any)\n */\nexport async function getOptionalMemory(\n\tctx: IExecuteFunctions | ISupplyDataFunctions,\n): Promise<BaseChatMemory | undefined> {\n\treturn (await ctx.getInputConnectionData(NodeConnectionTypes.AiMemory, 0)) as\n\t\t| BaseChatMemory\n\t\t| undefined;\n}\n\n/**\n * Retrieves the connected tools and (if an output parser is defined)\n * appends a structured output parser tool.\n *\n * @param ctx - The execution context\n * @param outputParser - The optional output parser\n * @returns The array of connected tools\n */\nexport async function getTools(\n\tctx: IExecuteFunctions | ISupplyDataFunctions,\n\toutputParser?: N8nOutputParser,\n): Promise<Array<DynamicStructuredTool | Tool>> {\n\tconst tools = (await getConnectedTools(ctx, true, false)) as Array<DynamicStructuredTool | Tool>;\n\n\t// If an output parser is available, create a dynamic tool to validate the final output.\n\tif (outputParser) {\n\t\tconst schema = getOutputParserSchema(outputParser);\n\t\tconst structuredOutputParserTool = new DynamicStructuredTool({\n\t\t\tschema,\n\t\t\tname: 'format_final_json_response',\n\t\t\tdescription:\n\t\t\t\t'Use this tool to format your final response to the user in a structured JSON format. This tool validates your output against a schema to ensure it meets the required format. ONLY use this tool when you have completed all necessary reasoning and are ready to provide your final answer. Do not use this tool for intermediate steps or for asking questions. The output from this tool will be directly returned to the user.',\n\t\t\t// We do not use a function here because we intercept the output with the parser.\n\t\t\tfunc: async () => '',\n\t\t});\n\t\ttools.push(structuredOutputParserTool);\n\t}\n\treturn tools;\n}\n\n/**\n * Prepares the prompt messages for the agent.\n *\n * @param ctx - The execution context\n * @param itemIndex - The current item index\n * @param options - Options containing systemMessage and other parameters\n * @returns The array of prompt messages\n */\nexport async function prepareMessages(\n\tctx: IExecuteFunctions | ISupplyDataFunctions,\n\titemIndex: number,\n\toptions: {\n\t\tsystemMessage?: string;\n\t\tpassthroughBinaryImages?: boolean;\n\t\toutputParser?: N8nOutputParser;\n\t},\n): Promise<BaseMessagePromptTemplateLike[]> {\n\tconst useSystemMessage = options.systemMessage ?? ctx.getNode().typeVersion < 1.9;\n\n\tconst messages: BaseMessagePromptTemplateLike[] = [];\n\n\tif (useSystemMessage) {\n\t\tmessages.push([\n\t\t\t'system',\n\t\t\t`{system_message}${options.outputParser ? '\\n\\n{formatting_instructions}' : ''}`,\n\t\t]);\n\t} else if (options.outputParser) {\n\t\tmessages.push(['system', '{formatting_instructions}']);\n\t}\n\n\tmessages.push(['placeholder', '{chat_history}'], ['human', '{input}']);\n\n\t// If there is binary data and the node option permits it, add a binary message\n\tconst hasBinaryData = ctx.getInputData()?.[itemIndex]?.binary !== undefined;\n\tif (hasBinaryData && options.passthroughBinaryImages) {\n\t\tconst binaryMessage = await extractBinaryMessages(ctx, itemIndex);\n\t\tif (binaryMessage.content.length !== 0) {\n\t\t\tmessages.push(binaryMessage);\n\t\t} else {\n\t\t\tctx.logger.debug('Not attaching binary message, since its content was empty');\n\t\t}\n\t}\n\n\t// We add the agent scratchpad last, so that the agent will not run in loops\n\t// by adding binary messages between each interaction\n\tmessages.push(['placeholder', '{agent_scratchpad}']);\n\treturn messages;\n}\n\n/**\n * Creates the chat prompt from messages.\n *\n * @param messages - The messages array\n * @returns The ChatPromptTemplate instance\n */\nexport function preparePrompt(messages: BaseMessagePromptTemplateLike[]): ChatPromptTemplate {\n\treturn ChatPromptTemplate.fromMessages(messages);\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AACA,sBAA6B;AAE7B,qBAAuE;AAIvE,mBAAiD;AACjD,0BAAoF;AAGpF,iBAAkB;AAElB,qBAAkD;AAS3C,SAAS,sBACf,cAEgC;AAChC,QAAM;AAAA;AAAA,IAEJ,aAAa,UAAU,KAAuC,aAAE,OAAO,EAAE,MAAM,aAAE,OAAO,EAAE,CAAC;AAAA;AAC7F,SAAO;AACR;AAaA,eAAsB,sBACrB,KACA,WACwB;AACxB,QAAM,aAAa,IAAI,aAAa,IAAI,SAAS,GAAG,UAAU,CAAC;AAC/D,QAAM,iBAAiB,MAAM,QAAQ;AAAA,IACpC,OAAO,OAAO,UAAU,EACtB,OAAO,CAAC,SAAS,KAAK,SAAS,WAAW,QAAQ,CAAC,EACnD,IAAI,OAAO,SAAS;AACpB,UAAI;AAGJ,UAAI,KAAK,IAAI;AACZ,cAAM,eAAe,MAAM,IAAI,QAAQ;AAAA,UACtC,MAAM,IAAI,QAAQ,gBAAgB,KAAK,EAAE;AAAA,QAC1C;AACA,0BAAkB,QAAQ,KAAK,QAAQ,WAAW,OAAO,KAAK,YAAY,EAAE;AAAA,UAC3E;AAAA,QACD,CAAC;AAAA,MACF,OAAO;AACN,0BAAkB,KAAK,KAAK,SAAS,QAAQ,IAC1C,KAAK,OACL,QAAQ,KAAK,QAAQ,WAAW,KAAK,IAAI;AAAA,MAC7C;AAEA,aAAO;AAAA,QACN,MAAM;AAAA,QACN,WAAW;AAAA,UACV,KAAK;AAAA,QACN;AAAA,MACD;AAAA,IACD,CAAC;AAAA,EACH;AACA,SAAO,IAAI,6BAAa;AAAA,IACvB,SAAS,CAAC,GAAG,cAAc;AAAA,EAC5B,CAAC;AACF;AAgBO,SAAS,uBACf,OACmC;AACnC,MAAI,CAAC,MAAM,QAAQ,KAAK,EAAG,QAAO;AAElC,QAAM,QAAQ,CAAC,SAAS;AACvB,QAAI,gBAAgB,QAAQ,KAAK,eAAe,QAAW;AAC1D,UAAI,MAAM,QAAQ,KAAK,UAAU,GAAG;AACnC,aAAK,WAAW,QAAQ,CAAC,YAAyB;AACjD,cAAI,aAAa,WAAW,MAAM,QAAQ,QAAQ,OAAO,GAAG;AAC3D,YAAC,QAAQ,QAA+C,QAAQ,CAAC,YAAY;AAC5E,kBAAI,QAAQ,UAAU,IAAI;AACzB,wBAAQ,QAAQ,CAAC;AAAA,cAClB;AAAA,YACD,CAAC;AAAA,UACF;AAAA,QACD,CAAC;AAAA,MACF;AAAA,IACD;AAAA,EACD,CAAC;AAED,SAAO;AACR;AAiCO,SAAS,wBACf,OAC8B;AAI9B,QAAM,mBAAmB;AAEzB,MAAI,iBAAiB,cAAc;AAClC,UAAM,gBAAgB,MAAM,QAAQ,iBAAiB,cAAc,MAAM;AACzE,QAAI,eAAe;AAElB,YAAM,mBAAmB,iBAAiB,aAAa;AAKvD,YAAM,aAAa,iBAAiB,MAAM,CAAC,WAAW,UAAU,MAAM;AACtE,UAAI,YAAY;AACf,yBAAiB,aAAa,SAAS,iBACrC,IAAI,CAAC,WAAW,OAAO,IAAI,EAC3B,KAAK,IAAI,EACT,KAAK;AAAA,MACR;AACA,aAAO;AAAA,IACR;AAAA,EACD;AAEA,SAAO;AACR;AAUO,SAAS,uBACf,QACA,QACyD;AACzD,SAAO;AAAA,IACN,cAAc,SAAS,EAAE,QAAQ,KAAK,UAAU,MAAM,EAAE,IAAI;AAAA,IAC5D,KAAK;AAAA,EACN;AACD;AAWO,MAAM,sBACZ,CAAC,cAAgC,WACjC,OAAO,UAA6E;AAEnF,MAAI,MAAM,QAAQ,KAAK,GAAG;AACzB,UAAM,qBAAqB,MAAM,KAAK,CAAC,SAAS,KAAK,SAAS,4BAA4B;AAC1F,QAAI,sBAAsB,cAAc;AACvC,YAAM,YAAY,mBAAmB;AAErC,YAAM,cAAc,qBAAqB,SAAS,KAAK,UAAU,SAAS,IAAI;AAC9E,YAAM,eAAgB,MAAM,aAAa,MAAM,WAAW;AAC1D,aAAO,uBAAuB,cAAc,MAAM;AAAA,IACnD;AAAA,EACD;AAGA,MAAI,gBAAgB,OAAO,UAAU,YAAa,MAAsB,cAAc;AACrF,UAAM,gBAAiB,MAAsB;AAC7C,QAAI;AAEJ,QAAI,yBAAyB,QAAQ;AACpC,UAAI,YAAY,eAAe;AAC9B,YAAI;AAIH,wBAAc,KAAK,UAAU,EAAE,YAAQ,+BAAU,cAAc,MAAM,EAAE,CAAC;AAAA,QACzE,SAAS,OAAO;AAEf,wBAAc,cAAc;AAAA,QAC7B;AAAA,MACD,OAAO;AAEN,sBAAc,KAAK,UAAU,aAAa;AAAA,MAC3C;AAAA,IACD,OAAO;AACN,oBAAc;AAAA,IACf;AAEA,UAAM,eAAgB,MAAM,aAAa,MAAM,WAAW;AAC1D,WAAO,uBAAuB,cAAc,MAAM;AAAA,EACnD;AAEA,SAAO,wBAAwB,KAAK;AACrC;AAYD,eAAsB,aACrB,KACA,QAAgB,GACqB;AACrC,QAAM,kBAAkB,MAAM,IAAI,uBAAuB,wCAAoB,iBAAiB,CAAC;AAE/F,MAAI;AAEJ,MAAI,MAAM,QAAQ,eAAe,KAAK,UAAU,QAAW;AAC1D,QAAI,gBAAgB,UAAU,OAAO;AACpC,aAAO;AAAA,IACR;AAEA,UAAM,iBAAiB,CAAC,GAAG,eAAe,EAAE,QAAQ;AACpD,YAAQ,eAAe,KAAK;AAAA,EAC7B,OAAO;AACN,YAAQ;AAAA,EACT;AAEA,MAAI,KAAC,+BAAe,KAAK,KAAK,CAAC,MAAM,WAAW;AAC/C,UAAM,IAAI;AAAA,MACT,IAAI,QAAQ;AAAA,MACZ;AAAA,IACD;AAAA,EACD;AACA,SAAO;AACR;AAQA,eAAsB,kBACrB,KACsC;AACtC,SAAQ,MAAM,IAAI,uBAAuB,wCAAoB,UAAU,CAAC;AAGzE;AAUA,eAAsB,SACrB,KACA,cAC+C;AAC/C,QAAM,QAAS,UAAM,kCAAkB,KAAK,MAAM,KAAK;AAGvD,MAAI,cAAc;AACjB,UAAM,SAAS,sBAAsB,YAAY;AACjD,UAAM,6BAA6B,IAAI,mCAAsB;AAAA,MAC5D;AAAA,MACA,MAAM;AAAA,MACN,aACC;AAAA;AAAA,MAED,MAAM,YAAY;AAAA,IACnB,CAAC;AACD,UAAM,KAAK,0BAA0B;AAAA,EACtC;AACA,SAAO;AACR;AAUA,eAAsB,gBACrB,KACA,WACA,SAK2C;AAC3C,QAAM,mBAAmB,QAAQ,iBAAiB,IAAI,QAAQ,EAAE,cAAc;AAE9E,QAAM,WAA4C,CAAC;AAEnD,MAAI,kBAAkB;AACrB,aAAS,KAAK;AAAA,MACb;AAAA,MACA,mBAAmB,QAAQ,eAAe,kCAAkC,EAAE;AAAA,IAC/E,CAAC;AAAA,EACF,WAAW,QAAQ,cAAc;AAChC,aAAS,KAAK,CAAC,UAAU,2BAA2B,CAAC;AAAA,EACtD;AAEA,WAAS,KAAK,CAAC,eAAe,gBAAgB,GAAG,CAAC,SAAS,SAAS,CAAC;AAGrE,QAAM,gBAAgB,IAAI,aAAa,IAAI,SAAS,GAAG,WAAW;AAClE,MAAI,iBAAiB,QAAQ,yBAAyB;AACrD,UAAM,gBAAgB,MAAM,sBAAsB,KAAK,SAAS;AAChE,QAAI,cAAc,QAAQ,WAAW,GAAG;AACvC,eAAS,KAAK,aAAa;AAAA,IAC5B,OAAO;AACN,UAAI,OAAO,MAAM,2DAA2D;AAAA,IAC7E;AAAA,EACD;AAIA,WAAS,KAAK,CAAC,eAAe,oBAAoB,CAAC;AACnD,SAAO;AACR;AAQO,SAAS,cAAc,UAA+D;AAC5F,SAAO,kCAAmB,aAAa,QAAQ;AAChD;","names":[]}
|
|
@@ -264,7 +264,7 @@ class SentimentAnalysis {
|
|
|
264
264
|
} else {
|
|
265
265
|
throw error;
|
|
266
266
|
}
|
|
267
|
-
} else if (result.resultItem && result.sentimentIndex) {
|
|
267
|
+
} else if (result.resultItem && result.sentimentIndex !== -1) {
|
|
268
268
|
const sentimentIndex = result.sentimentIndex;
|
|
269
269
|
const resultItem = result.resultItem;
|
|
270
270
|
returnData[sentimentIndex].push(resultItem);
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"sources":["../../../../nodes/chains/SentimentAnalysis/SentimentAnalysis.node.ts"],"sourcesContent":["import type { BaseLanguageModel } from '@langchain/core/language_models/base';\nimport { HumanMessage } from '@langchain/core/messages';\nimport { SystemMessagePromptTemplate, ChatPromptTemplate } from '@langchain/core/prompts';\nimport { OutputFixingParser, StructuredOutputParser } from 'langchain/output_parsers';\nimport { NodeConnectionTypes, NodeOperationError, sleep } from 'n8n-workflow';\nimport type {\n\tIDataObject,\n\tIExecuteFunctions,\n\tINodeExecutionData,\n\tINodeParameters,\n\tINodeType,\n\tINodeTypeDescription,\n} from 'n8n-workflow';\nimport { z } from 'zod';\n\nimport { getBatchingOptionFields } from '@utils/sharedFields';\nimport { getTracingConfig } from '@utils/tracing';\n\nconst DEFAULT_SYSTEM_PROMPT_TEMPLATE =\n\t'You are highly intelligent and accurate sentiment analyzer. Analyze the sentiment of the provided text. Categorize it into one of the following: {categories}. Use the provided formatting instructions. Only output the JSON.';\n\nconst DEFAULT_CATEGORIES = 'Positive, Neutral, Negative';\nconst configuredOutputs = (parameters: INodeParameters, defaultCategories: string) => {\n\tconst options = (parameters?.options ?? {}) as IDataObject;\n\tconst categories = (options?.categories as string) ?? defaultCategories;\n\tconst categoriesArray = categories.split(',').map((cat) => cat.trim());\n\n\tconst ret = categoriesArray.map((cat) => ({ type: 'main', displayName: cat }));\n\treturn ret;\n};\n\nexport class SentimentAnalysis implements INodeType {\n\tdescription: INodeTypeDescription = {\n\t\tdisplayName: 'Sentiment Analysis',\n\t\tname: 'sentimentAnalysis',\n\t\ticon: 'fa:balance-scale-left',\n\t\ticonColor: 'black',\n\t\tgroup: ['transform'],\n\t\tversion: [1, 1.1],\n\t\tdescription: 'Analyze the sentiment of your text',\n\t\tcodex: {\n\t\t\tcategories: ['AI'],\n\t\t\tsubcategories: {\n\t\t\t\tAI: ['Chains', 'Root Nodes'],\n\t\t\t},\n\t\t\tresources: {\n\t\t\t\tprimaryDocumentation: [\n\t\t\t\t\t{\n\t\t\t\t\t\turl: 'https://docs.n8n.io/integrations/builtin/cluster-nodes/root-nodes/n8n-nodes-langchain.sentimentanalysis/',\n\t\t\t\t\t},\n\t\t\t\t],\n\t\t\t},\n\t\t},\n\t\tdefaults: {\n\t\t\tname: 'Sentiment Analysis',\n\t\t},\n\t\tinputs: [\n\t\t\t{ displayName: '', type: NodeConnectionTypes.Main },\n\t\t\t{\n\t\t\t\tdisplayName: 'Model',\n\t\t\t\tmaxConnections: 1,\n\t\t\t\ttype: NodeConnectionTypes.AiLanguageModel,\n\t\t\t\trequired: true,\n\t\t\t},\n\t\t],\n\t\toutputs: `={{(${configuredOutputs})($parameter, \"${DEFAULT_CATEGORIES}\")}}`,\n\t\tproperties: [\n\t\t\t{\n\t\t\t\tdisplayName: 'Text to Analyze',\n\t\t\t\tname: 'inputText',\n\t\t\t\ttype: 'string',\n\t\t\t\trequired: true,\n\t\t\t\tdefault: '',\n\t\t\t\tdescription: 'Use an expression to reference data in previous nodes or enter static text',\n\t\t\t\ttypeOptions: {\n\t\t\t\t\trows: 2,\n\t\t\t\t},\n\t\t\t},\n\t\t\t{\n\t\t\t\tdisplayName:\n\t\t\t\t\t'Sentiment scores are LLM-generated estimates, not statistically rigorous measurements. They may be inconsistent across runs and should be used as rough indicators only.',\n\t\t\t\tname: 'detailedResultsNotice',\n\t\t\t\ttype: 'notice',\n\t\t\t\tdefault: '',\n\t\t\t\tdisplayOptions: {\n\t\t\t\t\tshow: {\n\t\t\t\t\t\t'/options.includeDetailedResults': [true],\n\t\t\t\t\t},\n\t\t\t\t},\n\t\t\t},\n\t\t\t{\n\t\t\t\tdisplayName: 'Options',\n\t\t\t\tname: 'options',\n\t\t\t\ttype: 'collection',\n\t\t\t\tdefault: {},\n\t\t\t\tplaceholder: 'Add Option',\n\t\t\t\toptions: [\n\t\t\t\t\t{\n\t\t\t\t\t\tdisplayName: 'Sentiment Categories',\n\t\t\t\t\t\tname: 'categories',\n\t\t\t\t\t\ttype: 'string',\n\t\t\t\t\t\tdefault: DEFAULT_CATEGORIES,\n\t\t\t\t\t\tdescription: 'A comma-separated list of categories to analyze',\n\t\t\t\t\t\tnoDataExpression: true,\n\t\t\t\t\t\ttypeOptions: {\n\t\t\t\t\t\t\trows: 2,\n\t\t\t\t\t\t},\n\t\t\t\t\t},\n\t\t\t\t\t{\n\t\t\t\t\t\tdisplayName: 'System Prompt Template',\n\t\t\t\t\t\tname: 'systemPromptTemplate',\n\t\t\t\t\t\ttype: 'string',\n\t\t\t\t\t\tdefault: DEFAULT_SYSTEM_PROMPT_TEMPLATE,\n\t\t\t\t\t\tdescription: 'String to use directly as the system prompt template',\n\t\t\t\t\t\ttypeOptions: {\n\t\t\t\t\t\t\trows: 6,\n\t\t\t\t\t\t},\n\t\t\t\t\t},\n\t\t\t\t\t{\n\t\t\t\t\t\tdisplayName: 'Include Detailed Results',\n\t\t\t\t\t\tname: 'includeDetailedResults',\n\t\t\t\t\t\ttype: 'boolean',\n\t\t\t\t\t\tdefault: false,\n\t\t\t\t\t\tdescription:\n\t\t\t\t\t\t\t'Whether to include sentiment strength and confidence scores in the output',\n\t\t\t\t\t},\n\t\t\t\t\t{\n\t\t\t\t\t\tdisplayName: 'Enable Auto-Fixing',\n\t\t\t\t\t\tname: 'enableAutoFixing',\n\t\t\t\t\t\ttype: 'boolean',\n\t\t\t\t\t\tdefault: true,\n\t\t\t\t\t\tdescription:\n\t\t\t\t\t\t\t'Whether to enable auto-fixing (may trigger an additional LLM call if output is broken)',\n\t\t\t\t\t},\n\t\t\t\t\tgetBatchingOptionFields({\n\t\t\t\t\t\tshow: {\n\t\t\t\t\t\t\t'@version': [{ _cnd: { gte: 1.1 } }],\n\t\t\t\t\t\t},\n\t\t\t\t\t}),\n\t\t\t\t],\n\t\t\t},\n\t\t],\n\t};\n\n\tasync execute(this: IExecuteFunctions): Promise<INodeExecutionData[][]> {\n\t\tconst items = this.getInputData();\n\n\t\tconst llm = (await this.getInputConnectionData(\n\t\t\tNodeConnectionTypes.AiLanguageModel,\n\t\t\t0,\n\t\t)) as BaseLanguageModel;\n\n\t\tconst returnData: INodeExecutionData[][] = [];\n\n\t\tconst batchSize = this.getNodeParameter('options.batching.batchSize', 0, 5) as number;\n\t\tconst delayBetweenBatches = this.getNodeParameter(\n\t\t\t'options.batching.delayBetweenBatches',\n\t\t\t0,\n\t\t\t0,\n\t\t) as number;\n\n\t\tif (this.getNode().typeVersion >= 1.1 && batchSize > 1) {\n\t\t\tfor (let i = 0; i < items.length; i += batchSize) {\n\t\t\t\tconst batch = items.slice(i, i + batchSize);\n\t\t\t\tconst batchPromises = batch.map(async (_item, batchItemIndex) => {\n\t\t\t\t\tconst itemIndex = i + batchItemIndex;\n\t\t\t\t\tconst sentimentCategories = this.getNodeParameter(\n\t\t\t\t\t\t'options.categories',\n\t\t\t\t\t\titemIndex,\n\t\t\t\t\t\tDEFAULT_CATEGORIES,\n\t\t\t\t\t) as string;\n\n\t\t\t\t\tconst categories = sentimentCategories\n\t\t\t\t\t\t.split(',')\n\t\t\t\t\t\t.map((cat) => cat.trim())\n\t\t\t\t\t\t.filter(Boolean);\n\n\t\t\t\t\tif (categories.length === 0) {\n\t\t\t\t\t\treturn {\n\t\t\t\t\t\t\tresult: null,\n\t\t\t\t\t\t\titemIndex,\n\t\t\t\t\t\t\terror: new NodeOperationError(this.getNode(), 'No sentiment categories provided', {\n\t\t\t\t\t\t\t\titemIndex,\n\t\t\t\t\t\t\t}),\n\t\t\t\t\t\t};\n\t\t\t\t\t}\n\n\t\t\t\t\t// Initialize returnData with empty arrays for each category\n\t\t\t\t\tif (returnData.length === 0) {\n\t\t\t\t\t\treturnData.push(...Array.from({ length: categories.length }, () => []));\n\t\t\t\t\t}\n\n\t\t\t\t\tconst options = this.getNodeParameter('options', itemIndex, {}) as {\n\t\t\t\t\t\tsystemPromptTemplate?: string;\n\t\t\t\t\t\tincludeDetailedResults?: boolean;\n\t\t\t\t\t\tenableAutoFixing?: boolean;\n\t\t\t\t\t};\n\n\t\t\t\t\tconst schema = z.object({\n\t\t\t\t\t\tsentiment: z.enum(categories as [string, ...string[]]),\n\t\t\t\t\t\tstrength: z\n\t\t\t\t\t\t\t.number()\n\t\t\t\t\t\t\t.min(0)\n\t\t\t\t\t\t\t.max(1)\n\t\t\t\t\t\t\t.describe('Strength score for sentiment in relation to the category'),\n\t\t\t\t\t\tconfidence: z.number().min(0).max(1),\n\t\t\t\t\t});\n\n\t\t\t\t\tconst structuredParser = StructuredOutputParser.fromZodSchema(schema);\n\n\t\t\t\t\tconst parser = options.enableAutoFixing\n\t\t\t\t\t\t? OutputFixingParser.fromLLM(llm, structuredParser)\n\t\t\t\t\t\t: structuredParser;\n\n\t\t\t\t\tconst systemPromptTemplate = SystemMessagePromptTemplate.fromTemplate(\n\t\t\t\t\t\t`${options.systemPromptTemplate ?? DEFAULT_SYSTEM_PROMPT_TEMPLATE}\n\t\t\t\t{format_instructions}`,\n\t\t\t\t\t);\n\n\t\t\t\t\tconst input = this.getNodeParameter('inputText', itemIndex) as string;\n\t\t\t\t\tconst inputPrompt = new HumanMessage(input);\n\t\t\t\t\tconst messages = [\n\t\t\t\t\t\tawait systemPromptTemplate.format({\n\t\t\t\t\t\t\tcategories: sentimentCategories,\n\t\t\t\t\t\t\tformat_instructions: parser.getFormatInstructions(),\n\t\t\t\t\t\t}),\n\t\t\t\t\t\tinputPrompt,\n\t\t\t\t\t];\n\n\t\t\t\t\tconst prompt = ChatPromptTemplate.fromMessages(messages);\n\t\t\t\t\tconst chain = prompt.pipe(llm).pipe(parser).withConfig(getTracingConfig(this));\n\n\t\t\t\t\ttry {\n\t\t\t\t\t\tconst output = await chain.invoke(messages);\n\t\t\t\t\t\tconst sentimentIndex = categories.findIndex(\n\t\t\t\t\t\t\t(s) => s.toLowerCase() === output.sentiment.toLowerCase(),\n\t\t\t\t\t\t);\n\n\t\t\t\t\t\tif (sentimentIndex !== -1) {\n\t\t\t\t\t\t\tconst resultItem = { ...items[itemIndex] };\n\t\t\t\t\t\t\tconst sentimentAnalysis: IDataObject = {\n\t\t\t\t\t\t\t\tcategory: output.sentiment,\n\t\t\t\t\t\t\t};\n\t\t\t\t\t\t\tif (options.includeDetailedResults) {\n\t\t\t\t\t\t\t\tsentimentAnalysis.strength = output.strength;\n\t\t\t\t\t\t\t\tsentimentAnalysis.confidence = output.confidence;\n\t\t\t\t\t\t\t}\n\t\t\t\t\t\t\tresultItem.json = {\n\t\t\t\t\t\t\t\t...resultItem.json,\n\t\t\t\t\t\t\t\tsentimentAnalysis,\n\t\t\t\t\t\t\t};\n\n\t\t\t\t\t\t\treturn {\n\t\t\t\t\t\t\t\tresult: {\n\t\t\t\t\t\t\t\t\tresultItem,\n\t\t\t\t\t\t\t\t\tsentimentIndex,\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\titemIndex,\n\t\t\t\t\t\t\t};\n\t\t\t\t\t\t}\n\n\t\t\t\t\t\treturn {\n\t\t\t\t\t\t\tresult: {},\n\t\t\t\t\t\t\titemIndex,\n\t\t\t\t\t\t};\n\t\t\t\t\t} catch (error) {\n\t\t\t\t\t\treturn {\n\t\t\t\t\t\t\tresult: null,\n\t\t\t\t\t\t\titemIndex,\n\t\t\t\t\t\t\terror: new NodeOperationError(\n\t\t\t\t\t\t\t\tthis.getNode(),\n\t\t\t\t\t\t\t\t'Error during parsing of LLM output, please check your LLM model and configuration',\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\titemIndex,\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t),\n\t\t\t\t\t\t};\n\t\t\t\t\t}\n\t\t\t\t});\n\t\t\t\tconst batchResults = await Promise.all(batchPromises);\n\n\t\t\t\tbatchResults.forEach(({ result, itemIndex, error }) => {\n\t\t\t\t\tif (error) {\n\t\t\t\t\t\tif (this.continueOnFail()) {\n\t\t\t\t\t\t\tconst executionErrorData = this.helpers.constructExecutionMetaData(\n\t\t\t\t\t\t\t\tthis.helpers.returnJsonArray({ error: error.message }),\n\t\t\t\t\t\t\t\t{ itemData: { item: itemIndex } },\n\t\t\t\t\t\t\t);\n\n\t\t\t\t\t\t\treturnData[0].push(...executionErrorData);\n\t\t\t\t\t\t\treturn;\n\t\t\t\t\t\t} else {\n\t\t\t\t\t\t\tthrow error;\n\t\t\t\t\t\t}\n\t\t\t\t\t} else if (result.resultItem && result.sentimentIndex) {\n\t\t\t\t\t\tconst sentimentIndex = result.sentimentIndex;\n\t\t\t\t\t\tconst resultItem = result.resultItem;\n\t\t\t\t\t\treturnData[sentimentIndex].push(resultItem);\n\t\t\t\t\t}\n\t\t\t\t});\n\n\t\t\t\t// Add delay between batches if not the last batch\n\t\t\t\tif (i + batchSize < items.length && delayBetweenBatches > 0) {\n\t\t\t\t\tawait sleep(delayBetweenBatches);\n\t\t\t\t}\n\t\t\t}\n\t\t} else {\n\t\t\t// Sequential Processing\n\t\t\tfor (let i = 0; i < items.length; i++) {\n\t\t\t\ttry {\n\t\t\t\t\tconst sentimentCategories = this.getNodeParameter(\n\t\t\t\t\t\t'options.categories',\n\t\t\t\t\t\ti,\n\t\t\t\t\t\tDEFAULT_CATEGORIES,\n\t\t\t\t\t) as string;\n\n\t\t\t\t\tconst categories = sentimentCategories\n\t\t\t\t\t\t.split(',')\n\t\t\t\t\t\t.map((cat) => cat.trim())\n\t\t\t\t\t\t.filter(Boolean);\n\n\t\t\t\t\tif (categories.length === 0) {\n\t\t\t\t\t\tthrow new NodeOperationError(this.getNode(), 'No sentiment categories provided', {\n\t\t\t\t\t\t\titemIndex: i,\n\t\t\t\t\t\t});\n\t\t\t\t\t}\n\n\t\t\t\t\t// Initialize returnData with empty arrays for each category\n\t\t\t\t\tif (returnData.length === 0) {\n\t\t\t\t\t\treturnData.push(...Array.from({ length: categories.length }, () => []));\n\t\t\t\t\t}\n\n\t\t\t\t\tconst options = this.getNodeParameter('options', i, {}) as {\n\t\t\t\t\t\tsystemPromptTemplate?: string;\n\t\t\t\t\t\tincludeDetailedResults?: boolean;\n\t\t\t\t\t\tenableAutoFixing?: boolean;\n\t\t\t\t\t};\n\n\t\t\t\t\tconst schema = z.object({\n\t\t\t\t\t\tsentiment: z.enum(categories as [string, ...string[]]),\n\t\t\t\t\t\tstrength: z\n\t\t\t\t\t\t\t.number()\n\t\t\t\t\t\t\t.min(0)\n\t\t\t\t\t\t\t.max(1)\n\t\t\t\t\t\t\t.describe('Strength score for sentiment in relation to the category'),\n\t\t\t\t\t\tconfidence: z.number().min(0).max(1),\n\t\t\t\t\t});\n\n\t\t\t\t\tconst structuredParser = StructuredOutputParser.fromZodSchema(schema);\n\n\t\t\t\t\tconst parser = options.enableAutoFixing\n\t\t\t\t\t\t? OutputFixingParser.fromLLM(llm, structuredParser)\n\t\t\t\t\t\t: structuredParser;\n\n\t\t\t\t\tconst systemPromptTemplate = SystemMessagePromptTemplate.fromTemplate(\n\t\t\t\t\t\t`${options.systemPromptTemplate ?? DEFAULT_SYSTEM_PROMPT_TEMPLATE}\n\t\t\t{format_instructions}`,\n\t\t\t\t\t);\n\n\t\t\t\t\tconst input = this.getNodeParameter('inputText', i) as string;\n\t\t\t\t\tconst inputPrompt = new HumanMessage(input);\n\t\t\t\t\tconst messages = [\n\t\t\t\t\t\tawait systemPromptTemplate.format({\n\t\t\t\t\t\t\tcategories: sentimentCategories,\n\t\t\t\t\t\t\tformat_instructions: parser.getFormatInstructions(),\n\t\t\t\t\t\t}),\n\t\t\t\t\t\tinputPrompt,\n\t\t\t\t\t];\n\n\t\t\t\t\tconst prompt = ChatPromptTemplate.fromMessages(messages);\n\t\t\t\t\tconst chain = prompt.pipe(llm).pipe(parser).withConfig(getTracingConfig(this));\n\n\t\t\t\t\ttry {\n\t\t\t\t\t\tconst output = await chain.invoke(messages);\n\t\t\t\t\t\tconst sentimentIndex = categories.findIndex(\n\t\t\t\t\t\t\t(s) => s.toLowerCase() === output.sentiment.toLowerCase(),\n\t\t\t\t\t\t);\n\n\t\t\t\t\t\tif (sentimentIndex !== -1) {\n\t\t\t\t\t\t\tconst resultItem = { ...items[i] };\n\t\t\t\t\t\t\tconst sentimentAnalysis: IDataObject = {\n\t\t\t\t\t\t\t\tcategory: output.sentiment,\n\t\t\t\t\t\t\t};\n\t\t\t\t\t\t\tif (options.includeDetailedResults) {\n\t\t\t\t\t\t\t\tsentimentAnalysis.strength = output.strength;\n\t\t\t\t\t\t\t\tsentimentAnalysis.confidence = output.confidence;\n\t\t\t\t\t\t\t}\n\t\t\t\t\t\t\tresultItem.json = {\n\t\t\t\t\t\t\t\t...resultItem.json,\n\t\t\t\t\t\t\t\tsentimentAnalysis,\n\t\t\t\t\t\t\t};\n\t\t\t\t\t\t\treturnData[sentimentIndex].push(resultItem);\n\t\t\t\t\t\t}\n\t\t\t\t\t} catch (error) {\n\t\t\t\t\t\tthrow new NodeOperationError(\n\t\t\t\t\t\t\tthis.getNode(),\n\t\t\t\t\t\t\t'Error during parsing of LLM output, please check your LLM model and configuration',\n\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\titemIndex: i,\n\t\t\t\t\t\t\t},\n\t\t\t\t\t\t);\n\t\t\t\t\t}\n\t\t\t\t} catch (error) {\n\t\t\t\t\tif (this.continueOnFail()) {\n\t\t\t\t\t\tconst executionErrorData = this.helpers.constructExecutionMetaData(\n\t\t\t\t\t\t\tthis.helpers.returnJsonArray({ error: error.message }),\n\t\t\t\t\t\t\t{ itemData: { item: i } },\n\t\t\t\t\t\t);\n\t\t\t\t\t\treturnData[0].push(...executionErrorData);\n\t\t\t\t\t\tcontinue;\n\t\t\t\t\t}\n\t\t\t\t\tthrow error;\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t\treturn returnData;\n\t}\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AACA,sBAA6B;AAC7B,qBAAgE;AAChE,4BAA2D;AAC3D,0BAA+D;AAS/D,iBAAkB;AAElB,0BAAwC;AACxC,qBAAiC;AAEjC,MAAM,iCACL;AAED,MAAM,qBAAqB;AAC3B,MAAM,oBAAoB,CAAC,YAA6B,sBAA8B;AACrF,QAAM,UAAW,YAAY,WAAW,CAAC;AACzC,QAAM,aAAc,SAAS,cAAyB;AACtD,QAAM,kBAAkB,WAAW,MAAM,GAAG,EAAE,IAAI,CAAC,QAAQ,IAAI,KAAK,CAAC;AAErE,QAAM,MAAM,gBAAgB,IAAI,CAAC,SAAS,EAAE,MAAM,QAAQ,aAAa,IAAI,EAAE;AAC7E,SAAO;AACR;AAEO,MAAM,kBAAuC;AAAA,EAA7C;AACN,uBAAoC;AAAA,MACnC,aAAa;AAAA,MACb,MAAM;AAAA,MACN,MAAM;AAAA,MACN,WAAW;AAAA,MACX,OAAO,CAAC,WAAW;AAAA,MACnB,SAAS,CAAC,GAAG,GAAG;AAAA,MAChB,aAAa;AAAA,MACb,OAAO;AAAA,QACN,YAAY,CAAC,IAAI;AAAA,QACjB,eAAe;AAAA,UACd,IAAI,CAAC,UAAU,YAAY;AAAA,QAC5B;AAAA,QACA,WAAW;AAAA,UACV,sBAAsB;AAAA,YACrB;AAAA,cACC,KAAK;AAAA,YACN;AAAA,UACD;AAAA,QACD;AAAA,MACD;AAAA,MACA,UAAU;AAAA,QACT,MAAM;AAAA,MACP;AAAA,MACA,QAAQ;AAAA,QACP,EAAE,aAAa,IAAI,MAAM,wCAAoB,KAAK;AAAA,QAClD;AAAA,UACC,aAAa;AAAA,UACb,gBAAgB;AAAA,UAChB,MAAM,wCAAoB;AAAA,UAC1B,UAAU;AAAA,QACX;AAAA,MACD;AAAA,MACA,SAAS,OAAO,iBAAiB,kBAAkB,kBAAkB;AAAA,MACrE,YAAY;AAAA,QACX;AAAA,UACC,aAAa;AAAA,UACb,MAAM;AAAA,UACN,MAAM;AAAA,UACN,UAAU;AAAA,UACV,SAAS;AAAA,UACT,aAAa;AAAA,UACb,aAAa;AAAA,YACZ,MAAM;AAAA,UACP;AAAA,QACD;AAAA,QACA;AAAA,UACC,aACC;AAAA,UACD,MAAM;AAAA,UACN,MAAM;AAAA,UACN,SAAS;AAAA,UACT,gBAAgB;AAAA,YACf,MAAM;AAAA,cACL,mCAAmC,CAAC,IAAI;AAAA,YACzC;AAAA,UACD;AAAA,QACD;AAAA,QACA;AAAA,UACC,aAAa;AAAA,UACb,MAAM;AAAA,UACN,MAAM;AAAA,UACN,SAAS,CAAC;AAAA,UACV,aAAa;AAAA,UACb,SAAS;AAAA,YACR;AAAA,cACC,aAAa;AAAA,cACb,MAAM;AAAA,cACN,MAAM;AAAA,cACN,SAAS;AAAA,cACT,aAAa;AAAA,cACb,kBAAkB;AAAA,cAClB,aAAa;AAAA,gBACZ,MAAM;AAAA,cACP;AAAA,YACD;AAAA,YACA;AAAA,cACC,aAAa;AAAA,cACb,MAAM;AAAA,cACN,MAAM;AAAA,cACN,SAAS;AAAA,cACT,aAAa;AAAA,cACb,aAAa;AAAA,gBACZ,MAAM;AAAA,cACP;AAAA,YACD;AAAA,YACA;AAAA,cACC,aAAa;AAAA,cACb,MAAM;AAAA,cACN,MAAM;AAAA,cACN,SAAS;AAAA,cACT,aACC;AAAA,YACF;AAAA,YACA;AAAA,cACC,aAAa;AAAA,cACb,MAAM;AAAA,cACN,MAAM;AAAA,cACN,SAAS;AAAA,cACT,aACC;AAAA,YACF;AAAA,gBACA,6CAAwB;AAAA,cACvB,MAAM;AAAA,gBACL,YAAY,CAAC,EAAE,MAAM,EAAE,KAAK,IAAI,EAAE,CAAC;AAAA,cACpC;AAAA,YACD,CAAC;AAAA,UACF;AAAA,QACD;AAAA,MACD;AAAA,IACD;AAAA;AAAA,EAEA,MAAM,UAAkE;AACvE,UAAM,QAAQ,KAAK,aAAa;AAEhC,UAAM,MAAO,MAAM,KAAK;AAAA,MACvB,wCAAoB;AAAA,MACpB;AAAA,IACD;AAEA,UAAM,aAAqC,CAAC;AAE5C,UAAM,YAAY,KAAK,iBAAiB,8BAA8B,GAAG,CAAC;AAC1E,UAAM,sBAAsB,KAAK;AAAA,MAChC;AAAA,MACA;AAAA,MACA;AAAA,IACD;AAEA,QAAI,KAAK,QAAQ,EAAE,eAAe,OAAO,YAAY,GAAG;AACvD,eAAS,IAAI,GAAG,IAAI,MAAM,QAAQ,KAAK,WAAW;AACjD,cAAM,QAAQ,MAAM,MAAM,GAAG,IAAI,SAAS;AAC1C,cAAM,gBAAgB,MAAM,IAAI,OAAO,OAAO,mBAAmB;AAChE,gBAAM,YAAY,IAAI;AACtB,gBAAM,sBAAsB,KAAK;AAAA,YAChC;AAAA,YACA;AAAA,YACA;AAAA,UACD;AAEA,gBAAM,aAAa,oBACjB,MAAM,GAAG,EACT,IAAI,CAAC,QAAQ,IAAI,KAAK,CAAC,EACvB,OAAO,OAAO;AAEhB,cAAI,WAAW,WAAW,GAAG;AAC5B,mBAAO;AAAA,cACN,QAAQ;AAAA,cACR;AAAA,cACA,OAAO,IAAI,uCAAmB,KAAK,QAAQ,GAAG,oCAAoC;AAAA,gBACjF;AAAA,cACD,CAAC;AAAA,YACF;AAAA,UACD;AAGA,cAAI,WAAW,WAAW,GAAG;AAC5B,uBAAW,KAAK,GAAG,MAAM,KAAK,EAAE,QAAQ,WAAW,OAAO,GAAG,MAAM,CAAC,CAAC,CAAC;AAAA,UACvE;AAEA,gBAAM,UAAU,KAAK,iBAAiB,WAAW,WAAW,CAAC,CAAC;AAM9D,gBAAM,SAAS,aAAE,OAAO;AAAA,YACvB,WAAW,aAAE,KAAK,UAAmC;AAAA,YACrD,UAAU,aACR,OAAO,EACP,IAAI,CAAC,EACL,IAAI,CAAC,EACL,SAAS,0DAA0D;AAAA,YACrE,YAAY,aAAE,OAAO,EAAE,IAAI,CAAC,EAAE,IAAI,CAAC;AAAA,UACpC,CAAC;AAED,gBAAM,mBAAmB,6CAAuB,cAAc,MAAM;AAEpE,gBAAM,SAAS,QAAQ,mBACpB,yCAAmB,QAAQ,KAAK,gBAAgB,IAChD;AAEH,gBAAM,uBAAuB,2CAA4B;AAAA,YACxD,GAAG,QAAQ,wBAAwB,8BAA8B;AAAA;AAAA,UAElE;AAEA,gBAAM,QAAQ,KAAK,iBAAiB,aAAa,SAAS;AAC1D,gBAAM,cAAc,IAAI,6BAAa,KAAK;AAC1C,gBAAM,WAAW;AAAA,YAChB,MAAM,qBAAqB,OAAO;AAAA,cACjC,YAAY;AAAA,cACZ,qBAAqB,OAAO,sBAAsB;AAAA,YACnD,CAAC;AAAA,YACD;AAAA,UACD;AAEA,gBAAM,SAAS,kCAAmB,aAAa,QAAQ;AACvD,gBAAM,QAAQ,OAAO,KAAK,GAAG,EAAE,KAAK,MAAM,EAAE,eAAW,iCAAiB,IAAI,CAAC;AAE7E,cAAI;AACH,kBAAM,SAAS,MAAM,MAAM,OAAO,QAAQ;AAC1C,kBAAM,iBAAiB,WAAW;AAAA,cACjC,CAAC,MAAM,EAAE,YAAY,MAAM,OAAO,UAAU,YAAY;AAAA,YACzD;AAEA,gBAAI,mBAAmB,IAAI;AAC1B,oBAAM,aAAa,EAAE,GAAG,MAAM,SAAS,EAAE;AACzC,oBAAM,oBAAiC;AAAA,gBACtC,UAAU,OAAO;AAAA,cAClB;AACA,kBAAI,QAAQ,wBAAwB;AACnC,kCAAkB,WAAW,OAAO;AACpC,kCAAkB,aAAa,OAAO;AAAA,cACvC;AACA,yBAAW,OAAO;AAAA,gBACjB,GAAG,WAAW;AAAA,gBACd;AAAA,cACD;AAEA,qBAAO;AAAA,gBACN,QAAQ;AAAA,kBACP;AAAA,kBACA;AAAA,gBACD;AAAA,gBACA;AAAA,cACD;AAAA,YACD;AAEA,mBAAO;AAAA,cACN,QAAQ,CAAC;AAAA,cACT;AAAA,YACD;AAAA,UACD,SAAS,OAAO;AACf,mBAAO;AAAA,cACN,QAAQ;AAAA,cACR;AAAA,cACA,OAAO,IAAI;AAAA,gBACV,KAAK,QAAQ;AAAA,gBACb;AAAA,gBACA;AAAA,kBACC;AAAA,gBACD;AAAA,cACD;AAAA,YACD;AAAA,UACD;AAAA,QACD,CAAC;AACD,cAAM,eAAe,MAAM,QAAQ,IAAI,aAAa;AAEpD,qBAAa,QAAQ,CAAC,EAAE,QAAQ,WAAW,MAAM,MAAM;AACtD,cAAI,OAAO;AACV,gBAAI,KAAK,eAAe,GAAG;AAC1B,oBAAM,qBAAqB,KAAK,QAAQ;AAAA,gBACvC,KAAK,QAAQ,gBAAgB,EAAE,OAAO,MAAM,QAAQ,CAAC;AAAA,gBACrD,EAAE,UAAU,EAAE,MAAM,UAAU,EAAE;AAAA,cACjC;AAEA,yBAAW,CAAC,EAAE,KAAK,GAAG,kBAAkB;AACxC;AAAA,YACD,OAAO;AACN,oBAAM;AAAA,YACP;AAAA,UACD,WAAW,OAAO,cAAc,OAAO,gBAAgB;AACtD,kBAAM,iBAAiB,OAAO;AAC9B,kBAAM,aAAa,OAAO;AAC1B,uBAAW,cAAc,EAAE,KAAK,UAAU;AAAA,UAC3C;AAAA,QACD,CAAC;AAGD,YAAI,IAAI,YAAY,MAAM,UAAU,sBAAsB,GAAG;AAC5D,oBAAM,2BAAM,mBAAmB;AAAA,QAChC;AAAA,MACD;AAAA,IACD,OAAO;AAEN,eAAS,IAAI,GAAG,IAAI,MAAM,QAAQ,KAAK;AACtC,YAAI;AACH,gBAAM,sBAAsB,KAAK;AAAA,YAChC;AAAA,YACA;AAAA,YACA;AAAA,UACD;AAEA,gBAAM,aAAa,oBACjB,MAAM,GAAG,EACT,IAAI,CAAC,QAAQ,IAAI,KAAK,CAAC,EACvB,OAAO,OAAO;AAEhB,cAAI,WAAW,WAAW,GAAG;AAC5B,kBAAM,IAAI,uCAAmB,KAAK,QAAQ,GAAG,oCAAoC;AAAA,cAChF,WAAW;AAAA,YACZ,CAAC;AAAA,UACF;AAGA,cAAI,WAAW,WAAW,GAAG;AAC5B,uBAAW,KAAK,GAAG,MAAM,KAAK,EAAE,QAAQ,WAAW,OAAO,GAAG,MAAM,CAAC,CAAC,CAAC;AAAA,UACvE;AAEA,gBAAM,UAAU,KAAK,iBAAiB,WAAW,GAAG,CAAC,CAAC;AAMtD,gBAAM,SAAS,aAAE,OAAO;AAAA,YACvB,WAAW,aAAE,KAAK,UAAmC;AAAA,YACrD,UAAU,aACR,OAAO,EACP,IAAI,CAAC,EACL,IAAI,CAAC,EACL,SAAS,0DAA0D;AAAA,YACrE,YAAY,aAAE,OAAO,EAAE,IAAI,CAAC,EAAE,IAAI,CAAC;AAAA,UACpC,CAAC;AAED,gBAAM,mBAAmB,6CAAuB,cAAc,MAAM;AAEpE,gBAAM,SAAS,QAAQ,mBACpB,yCAAmB,QAAQ,KAAK,gBAAgB,IAChD;AAEH,gBAAM,uBAAuB,2CAA4B;AAAA,YACxD,GAAG,QAAQ,wBAAwB,8BAA8B;AAAA;AAAA,UAElE;AAEA,gBAAM,QAAQ,KAAK,iBAAiB,aAAa,CAAC;AAClD,gBAAM,cAAc,IAAI,6BAAa,KAAK;AAC1C,gBAAM,WAAW;AAAA,YAChB,MAAM,qBAAqB,OAAO;AAAA,cACjC,YAAY;AAAA,cACZ,qBAAqB,OAAO,sBAAsB;AAAA,YACnD,CAAC;AAAA,YACD;AAAA,UACD;AAEA,gBAAM,SAAS,kCAAmB,aAAa,QAAQ;AACvD,gBAAM,QAAQ,OAAO,KAAK,GAAG,EAAE,KAAK,MAAM,EAAE,eAAW,iCAAiB,IAAI,CAAC;AAE7E,cAAI;AACH,kBAAM,SAAS,MAAM,MAAM,OAAO,QAAQ;AAC1C,kBAAM,iBAAiB,WAAW;AAAA,cACjC,CAAC,MAAM,EAAE,YAAY,MAAM,OAAO,UAAU,YAAY;AAAA,YACzD;AAEA,gBAAI,mBAAmB,IAAI;AAC1B,oBAAM,aAAa,EAAE,GAAG,MAAM,CAAC,EAAE;AACjC,oBAAM,oBAAiC;AAAA,gBACtC,UAAU,OAAO;AAAA,cAClB;AACA,kBAAI,QAAQ,wBAAwB;AACnC,kCAAkB,WAAW,OAAO;AACpC,kCAAkB,aAAa,OAAO;AAAA,cACvC;AACA,yBAAW,OAAO;AAAA,gBACjB,GAAG,WAAW;AAAA,gBACd;AAAA,cACD;AACA,yBAAW,cAAc,EAAE,KAAK,UAAU;AAAA,YAC3C;AAAA,UACD,SAAS,OAAO;AACf,kBAAM,IAAI;AAAA,cACT,KAAK,QAAQ;AAAA,cACb;AAAA,cACA;AAAA,gBACC,WAAW;AAAA,cACZ;AAAA,YACD;AAAA,UACD;AAAA,QACD,SAAS,OAAO;AACf,cAAI,KAAK,eAAe,GAAG;AAC1B,kBAAM,qBAAqB,KAAK,QAAQ;AAAA,cACvC,KAAK,QAAQ,gBAAgB,EAAE,OAAO,MAAM,QAAQ,CAAC;AAAA,cACrD,EAAE,UAAU,EAAE,MAAM,EAAE,EAAE;AAAA,YACzB;AACA,uBAAW,CAAC,EAAE,KAAK,GAAG,kBAAkB;AACxC;AAAA,UACD;AACA,gBAAM;AAAA,QACP;AAAA,MACD;AAAA,IACD;AACA,WAAO;AAAA,EACR;AACD;","names":[]}
|
|
1
|
+
{"version":3,"sources":["../../../../nodes/chains/SentimentAnalysis/SentimentAnalysis.node.ts"],"sourcesContent":["import type { BaseLanguageModel } from '@langchain/core/language_models/base';\nimport { HumanMessage } from '@langchain/core/messages';\nimport { SystemMessagePromptTemplate, ChatPromptTemplate } from '@langchain/core/prompts';\nimport { OutputFixingParser, StructuredOutputParser } from 'langchain/output_parsers';\nimport { NodeConnectionTypes, NodeOperationError, sleep } from 'n8n-workflow';\nimport type {\n\tIDataObject,\n\tIExecuteFunctions,\n\tINodeExecutionData,\n\tINodeParameters,\n\tINodeType,\n\tINodeTypeDescription,\n} from 'n8n-workflow';\nimport { z } from 'zod';\n\nimport { getBatchingOptionFields } from '@utils/sharedFields';\nimport { getTracingConfig } from '@utils/tracing';\n\nconst DEFAULT_SYSTEM_PROMPT_TEMPLATE =\n\t'You are highly intelligent and accurate sentiment analyzer. Analyze the sentiment of the provided text. Categorize it into one of the following: {categories}. Use the provided formatting instructions. Only output the JSON.';\n\nconst DEFAULT_CATEGORIES = 'Positive, Neutral, Negative';\nconst configuredOutputs = (parameters: INodeParameters, defaultCategories: string) => {\n\tconst options = (parameters?.options ?? {}) as IDataObject;\n\tconst categories = (options?.categories as string) ?? defaultCategories;\n\tconst categoriesArray = categories.split(',').map((cat) => cat.trim());\n\n\tconst ret = categoriesArray.map((cat) => ({ type: 'main', displayName: cat }));\n\treturn ret;\n};\n\nexport class SentimentAnalysis implements INodeType {\n\tdescription: INodeTypeDescription = {\n\t\tdisplayName: 'Sentiment Analysis',\n\t\tname: 'sentimentAnalysis',\n\t\ticon: 'fa:balance-scale-left',\n\t\ticonColor: 'black',\n\t\tgroup: ['transform'],\n\t\tversion: [1, 1.1],\n\t\tdescription: 'Analyze the sentiment of your text',\n\t\tcodex: {\n\t\t\tcategories: ['AI'],\n\t\t\tsubcategories: {\n\t\t\t\tAI: ['Chains', 'Root Nodes'],\n\t\t\t},\n\t\t\tresources: {\n\t\t\t\tprimaryDocumentation: [\n\t\t\t\t\t{\n\t\t\t\t\t\turl: 'https://docs.n8n.io/integrations/builtin/cluster-nodes/root-nodes/n8n-nodes-langchain.sentimentanalysis/',\n\t\t\t\t\t},\n\t\t\t\t],\n\t\t\t},\n\t\t},\n\t\tdefaults: {\n\t\t\tname: 'Sentiment Analysis',\n\t\t},\n\t\tinputs: [\n\t\t\t{ displayName: '', type: NodeConnectionTypes.Main },\n\t\t\t{\n\t\t\t\tdisplayName: 'Model',\n\t\t\t\tmaxConnections: 1,\n\t\t\t\ttype: NodeConnectionTypes.AiLanguageModel,\n\t\t\t\trequired: true,\n\t\t\t},\n\t\t],\n\t\toutputs: `={{(${configuredOutputs})($parameter, \"${DEFAULT_CATEGORIES}\")}}`,\n\t\tproperties: [\n\t\t\t{\n\t\t\t\tdisplayName: 'Text to Analyze',\n\t\t\t\tname: 'inputText',\n\t\t\t\ttype: 'string',\n\t\t\t\trequired: true,\n\t\t\t\tdefault: '',\n\t\t\t\tdescription: 'Use an expression to reference data in previous nodes or enter static text',\n\t\t\t\ttypeOptions: {\n\t\t\t\t\trows: 2,\n\t\t\t\t},\n\t\t\t},\n\t\t\t{\n\t\t\t\tdisplayName:\n\t\t\t\t\t'Sentiment scores are LLM-generated estimates, not statistically rigorous measurements. They may be inconsistent across runs and should be used as rough indicators only.',\n\t\t\t\tname: 'detailedResultsNotice',\n\t\t\t\ttype: 'notice',\n\t\t\t\tdefault: '',\n\t\t\t\tdisplayOptions: {\n\t\t\t\t\tshow: {\n\t\t\t\t\t\t'/options.includeDetailedResults': [true],\n\t\t\t\t\t},\n\t\t\t\t},\n\t\t\t},\n\t\t\t{\n\t\t\t\tdisplayName: 'Options',\n\t\t\t\tname: 'options',\n\t\t\t\ttype: 'collection',\n\t\t\t\tdefault: {},\n\t\t\t\tplaceholder: 'Add Option',\n\t\t\t\toptions: [\n\t\t\t\t\t{\n\t\t\t\t\t\tdisplayName: 'Sentiment Categories',\n\t\t\t\t\t\tname: 'categories',\n\t\t\t\t\t\ttype: 'string',\n\t\t\t\t\t\tdefault: DEFAULT_CATEGORIES,\n\t\t\t\t\t\tdescription: 'A comma-separated list of categories to analyze',\n\t\t\t\t\t\tnoDataExpression: true,\n\t\t\t\t\t\ttypeOptions: {\n\t\t\t\t\t\t\trows: 2,\n\t\t\t\t\t\t},\n\t\t\t\t\t},\n\t\t\t\t\t{\n\t\t\t\t\t\tdisplayName: 'System Prompt Template',\n\t\t\t\t\t\tname: 'systemPromptTemplate',\n\t\t\t\t\t\ttype: 'string',\n\t\t\t\t\t\tdefault: DEFAULT_SYSTEM_PROMPT_TEMPLATE,\n\t\t\t\t\t\tdescription: 'String to use directly as the system prompt template',\n\t\t\t\t\t\ttypeOptions: {\n\t\t\t\t\t\t\trows: 6,\n\t\t\t\t\t\t},\n\t\t\t\t\t},\n\t\t\t\t\t{\n\t\t\t\t\t\tdisplayName: 'Include Detailed Results',\n\t\t\t\t\t\tname: 'includeDetailedResults',\n\t\t\t\t\t\ttype: 'boolean',\n\t\t\t\t\t\tdefault: false,\n\t\t\t\t\t\tdescription:\n\t\t\t\t\t\t\t'Whether to include sentiment strength and confidence scores in the output',\n\t\t\t\t\t},\n\t\t\t\t\t{\n\t\t\t\t\t\tdisplayName: 'Enable Auto-Fixing',\n\t\t\t\t\t\tname: 'enableAutoFixing',\n\t\t\t\t\t\ttype: 'boolean',\n\t\t\t\t\t\tdefault: true,\n\t\t\t\t\t\tdescription:\n\t\t\t\t\t\t\t'Whether to enable auto-fixing (may trigger an additional LLM call if output is broken)',\n\t\t\t\t\t},\n\t\t\t\t\tgetBatchingOptionFields({\n\t\t\t\t\t\tshow: {\n\t\t\t\t\t\t\t'@version': [{ _cnd: { gte: 1.1 } }],\n\t\t\t\t\t\t},\n\t\t\t\t\t}),\n\t\t\t\t],\n\t\t\t},\n\t\t],\n\t};\n\n\tasync execute(this: IExecuteFunctions): Promise<INodeExecutionData[][]> {\n\t\tconst items = this.getInputData();\n\n\t\tconst llm = (await this.getInputConnectionData(\n\t\t\tNodeConnectionTypes.AiLanguageModel,\n\t\t\t0,\n\t\t)) as BaseLanguageModel;\n\n\t\tconst returnData: INodeExecutionData[][] = [];\n\n\t\tconst batchSize = this.getNodeParameter('options.batching.batchSize', 0, 5) as number;\n\t\tconst delayBetweenBatches = this.getNodeParameter(\n\t\t\t'options.batching.delayBetweenBatches',\n\t\t\t0,\n\t\t\t0,\n\t\t) as number;\n\n\t\tif (this.getNode().typeVersion >= 1.1 && batchSize > 1) {\n\t\t\tfor (let i = 0; i < items.length; i += batchSize) {\n\t\t\t\tconst batch = items.slice(i, i + batchSize);\n\t\t\t\tconst batchPromises = batch.map(async (_item, batchItemIndex) => {\n\t\t\t\t\tconst itemIndex = i + batchItemIndex;\n\t\t\t\t\tconst sentimentCategories = this.getNodeParameter(\n\t\t\t\t\t\t'options.categories',\n\t\t\t\t\t\titemIndex,\n\t\t\t\t\t\tDEFAULT_CATEGORIES,\n\t\t\t\t\t) as string;\n\n\t\t\t\t\tconst categories = sentimentCategories\n\t\t\t\t\t\t.split(',')\n\t\t\t\t\t\t.map((cat) => cat.trim())\n\t\t\t\t\t\t.filter(Boolean);\n\n\t\t\t\t\tif (categories.length === 0) {\n\t\t\t\t\t\treturn {\n\t\t\t\t\t\t\tresult: null,\n\t\t\t\t\t\t\titemIndex,\n\t\t\t\t\t\t\terror: new NodeOperationError(this.getNode(), 'No sentiment categories provided', {\n\t\t\t\t\t\t\t\titemIndex,\n\t\t\t\t\t\t\t}),\n\t\t\t\t\t\t};\n\t\t\t\t\t}\n\n\t\t\t\t\t// Initialize returnData with empty arrays for each category\n\t\t\t\t\tif (returnData.length === 0) {\n\t\t\t\t\t\treturnData.push(...Array.from({ length: categories.length }, () => []));\n\t\t\t\t\t}\n\n\t\t\t\t\tconst options = this.getNodeParameter('options', itemIndex, {}) as {\n\t\t\t\t\t\tsystemPromptTemplate?: string;\n\t\t\t\t\t\tincludeDetailedResults?: boolean;\n\t\t\t\t\t\tenableAutoFixing?: boolean;\n\t\t\t\t\t};\n\n\t\t\t\t\tconst schema = z.object({\n\t\t\t\t\t\tsentiment: z.enum(categories as [string, ...string[]]),\n\t\t\t\t\t\tstrength: z\n\t\t\t\t\t\t\t.number()\n\t\t\t\t\t\t\t.min(0)\n\t\t\t\t\t\t\t.max(1)\n\t\t\t\t\t\t\t.describe('Strength score for sentiment in relation to the category'),\n\t\t\t\t\t\tconfidence: z.number().min(0).max(1),\n\t\t\t\t\t});\n\n\t\t\t\t\tconst structuredParser = StructuredOutputParser.fromZodSchema(schema);\n\n\t\t\t\t\tconst parser = options.enableAutoFixing\n\t\t\t\t\t\t? OutputFixingParser.fromLLM(llm, structuredParser)\n\t\t\t\t\t\t: structuredParser;\n\n\t\t\t\t\tconst systemPromptTemplate = SystemMessagePromptTemplate.fromTemplate(\n\t\t\t\t\t\t`${options.systemPromptTemplate ?? DEFAULT_SYSTEM_PROMPT_TEMPLATE}\n\t\t\t\t{format_instructions}`,\n\t\t\t\t\t);\n\n\t\t\t\t\tconst input = this.getNodeParameter('inputText', itemIndex) as string;\n\t\t\t\t\tconst inputPrompt = new HumanMessage(input);\n\t\t\t\t\tconst messages = [\n\t\t\t\t\t\tawait systemPromptTemplate.format({\n\t\t\t\t\t\t\tcategories: sentimentCategories,\n\t\t\t\t\t\t\tformat_instructions: parser.getFormatInstructions(),\n\t\t\t\t\t\t}),\n\t\t\t\t\t\tinputPrompt,\n\t\t\t\t\t];\n\n\t\t\t\t\tconst prompt = ChatPromptTemplate.fromMessages(messages);\n\t\t\t\t\tconst chain = prompt.pipe(llm).pipe(parser).withConfig(getTracingConfig(this));\n\n\t\t\t\t\ttry {\n\t\t\t\t\t\tconst output = await chain.invoke(messages);\n\t\t\t\t\t\tconst sentimentIndex = categories.findIndex(\n\t\t\t\t\t\t\t(s) => s.toLowerCase() === output.sentiment.toLowerCase(),\n\t\t\t\t\t\t);\n\n\t\t\t\t\t\tif (sentimentIndex !== -1) {\n\t\t\t\t\t\t\tconst resultItem = { ...items[itemIndex] };\n\t\t\t\t\t\t\tconst sentimentAnalysis: IDataObject = {\n\t\t\t\t\t\t\t\tcategory: output.sentiment,\n\t\t\t\t\t\t\t};\n\t\t\t\t\t\t\tif (options.includeDetailedResults) {\n\t\t\t\t\t\t\t\tsentimentAnalysis.strength = output.strength;\n\t\t\t\t\t\t\t\tsentimentAnalysis.confidence = output.confidence;\n\t\t\t\t\t\t\t}\n\t\t\t\t\t\t\tresultItem.json = {\n\t\t\t\t\t\t\t\t...resultItem.json,\n\t\t\t\t\t\t\t\tsentimentAnalysis,\n\t\t\t\t\t\t\t};\n\n\t\t\t\t\t\t\treturn {\n\t\t\t\t\t\t\t\tresult: {\n\t\t\t\t\t\t\t\t\tresultItem,\n\t\t\t\t\t\t\t\t\tsentimentIndex,\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\titemIndex,\n\t\t\t\t\t\t\t};\n\t\t\t\t\t\t}\n\n\t\t\t\t\t\treturn {\n\t\t\t\t\t\t\tresult: {},\n\t\t\t\t\t\t\titemIndex,\n\t\t\t\t\t\t};\n\t\t\t\t\t} catch (error) {\n\t\t\t\t\t\treturn {\n\t\t\t\t\t\t\tresult: null,\n\t\t\t\t\t\t\titemIndex,\n\t\t\t\t\t\t\terror: new NodeOperationError(\n\t\t\t\t\t\t\t\tthis.getNode(),\n\t\t\t\t\t\t\t\t'Error during parsing of LLM output, please check your LLM model and configuration',\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\titemIndex,\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t),\n\t\t\t\t\t\t};\n\t\t\t\t\t}\n\t\t\t\t});\n\t\t\t\tconst batchResults = await Promise.all(batchPromises);\n\n\t\t\t\tbatchResults.forEach(({ result, itemIndex, error }) => {\n\t\t\t\t\tif (error) {\n\t\t\t\t\t\tif (this.continueOnFail()) {\n\t\t\t\t\t\t\tconst executionErrorData = this.helpers.constructExecutionMetaData(\n\t\t\t\t\t\t\t\tthis.helpers.returnJsonArray({ error: error.message }),\n\t\t\t\t\t\t\t\t{ itemData: { item: itemIndex } },\n\t\t\t\t\t\t\t);\n\n\t\t\t\t\t\t\treturnData[0].push(...executionErrorData);\n\t\t\t\t\t\t\treturn;\n\t\t\t\t\t\t} else {\n\t\t\t\t\t\t\tthrow error;\n\t\t\t\t\t\t}\n\t\t\t\t\t} else if (result.resultItem && result.sentimentIndex !== -1) {\n\t\t\t\t\t\tconst sentimentIndex = result.sentimentIndex;\n\t\t\t\t\t\tconst resultItem = result.resultItem;\n\t\t\t\t\t\treturnData[sentimentIndex].push(resultItem);\n\t\t\t\t\t}\n\t\t\t\t});\n\n\t\t\t\t// Add delay between batches if not the last batch\n\t\t\t\tif (i + batchSize < items.length && delayBetweenBatches > 0) {\n\t\t\t\t\tawait sleep(delayBetweenBatches);\n\t\t\t\t}\n\t\t\t}\n\t\t} else {\n\t\t\t// Sequential Processing\n\t\t\tfor (let i = 0; i < items.length; i++) {\n\t\t\t\ttry {\n\t\t\t\t\tconst sentimentCategories = this.getNodeParameter(\n\t\t\t\t\t\t'options.categories',\n\t\t\t\t\t\ti,\n\t\t\t\t\t\tDEFAULT_CATEGORIES,\n\t\t\t\t\t) as string;\n\n\t\t\t\t\tconst categories = sentimentCategories\n\t\t\t\t\t\t.split(',')\n\t\t\t\t\t\t.map((cat) => cat.trim())\n\t\t\t\t\t\t.filter(Boolean);\n\n\t\t\t\t\tif (categories.length === 0) {\n\t\t\t\t\t\tthrow new NodeOperationError(this.getNode(), 'No sentiment categories provided', {\n\t\t\t\t\t\t\titemIndex: i,\n\t\t\t\t\t\t});\n\t\t\t\t\t}\n\n\t\t\t\t\t// Initialize returnData with empty arrays for each category\n\t\t\t\t\tif (returnData.length === 0) {\n\t\t\t\t\t\treturnData.push(...Array.from({ length: categories.length }, () => []));\n\t\t\t\t\t}\n\n\t\t\t\t\tconst options = this.getNodeParameter('options', i, {}) as {\n\t\t\t\t\t\tsystemPromptTemplate?: string;\n\t\t\t\t\t\tincludeDetailedResults?: boolean;\n\t\t\t\t\t\tenableAutoFixing?: boolean;\n\t\t\t\t\t};\n\n\t\t\t\t\tconst schema = z.object({\n\t\t\t\t\t\tsentiment: z.enum(categories as [string, ...string[]]),\n\t\t\t\t\t\tstrength: z\n\t\t\t\t\t\t\t.number()\n\t\t\t\t\t\t\t.min(0)\n\t\t\t\t\t\t\t.max(1)\n\t\t\t\t\t\t\t.describe('Strength score for sentiment in relation to the category'),\n\t\t\t\t\t\tconfidence: z.number().min(0).max(1),\n\t\t\t\t\t});\n\n\t\t\t\t\tconst structuredParser = StructuredOutputParser.fromZodSchema(schema);\n\n\t\t\t\t\tconst parser = options.enableAutoFixing\n\t\t\t\t\t\t? OutputFixingParser.fromLLM(llm, structuredParser)\n\t\t\t\t\t\t: structuredParser;\n\n\t\t\t\t\tconst systemPromptTemplate = SystemMessagePromptTemplate.fromTemplate(\n\t\t\t\t\t\t`${options.systemPromptTemplate ?? DEFAULT_SYSTEM_PROMPT_TEMPLATE}\n\t\t\t{format_instructions}`,\n\t\t\t\t\t);\n\n\t\t\t\t\tconst input = this.getNodeParameter('inputText', i) as string;\n\t\t\t\t\tconst inputPrompt = new HumanMessage(input);\n\t\t\t\t\tconst messages = [\n\t\t\t\t\t\tawait systemPromptTemplate.format({\n\t\t\t\t\t\t\tcategories: sentimentCategories,\n\t\t\t\t\t\t\tformat_instructions: parser.getFormatInstructions(),\n\t\t\t\t\t\t}),\n\t\t\t\t\t\tinputPrompt,\n\t\t\t\t\t];\n\n\t\t\t\t\tconst prompt = ChatPromptTemplate.fromMessages(messages);\n\t\t\t\t\tconst chain = prompt.pipe(llm).pipe(parser).withConfig(getTracingConfig(this));\n\n\t\t\t\t\ttry {\n\t\t\t\t\t\tconst output = await chain.invoke(messages);\n\t\t\t\t\t\tconst sentimentIndex = categories.findIndex(\n\t\t\t\t\t\t\t(s) => s.toLowerCase() === output.sentiment.toLowerCase(),\n\t\t\t\t\t\t);\n\n\t\t\t\t\t\tif (sentimentIndex !== -1) {\n\t\t\t\t\t\t\tconst resultItem = { ...items[i] };\n\t\t\t\t\t\t\tconst sentimentAnalysis: IDataObject = {\n\t\t\t\t\t\t\t\tcategory: output.sentiment,\n\t\t\t\t\t\t\t};\n\t\t\t\t\t\t\tif (options.includeDetailedResults) {\n\t\t\t\t\t\t\t\tsentimentAnalysis.strength = output.strength;\n\t\t\t\t\t\t\t\tsentimentAnalysis.confidence = output.confidence;\n\t\t\t\t\t\t\t}\n\t\t\t\t\t\t\tresultItem.json = {\n\t\t\t\t\t\t\t\t...resultItem.json,\n\t\t\t\t\t\t\t\tsentimentAnalysis,\n\t\t\t\t\t\t\t};\n\t\t\t\t\t\t\treturnData[sentimentIndex].push(resultItem);\n\t\t\t\t\t\t}\n\t\t\t\t\t} catch (error) {\n\t\t\t\t\t\tthrow new NodeOperationError(\n\t\t\t\t\t\t\tthis.getNode(),\n\t\t\t\t\t\t\t'Error during parsing of LLM output, please check your LLM model and configuration',\n\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\titemIndex: i,\n\t\t\t\t\t\t\t},\n\t\t\t\t\t\t);\n\t\t\t\t\t}\n\t\t\t\t} catch (error) {\n\t\t\t\t\tif (this.continueOnFail()) {\n\t\t\t\t\t\tconst executionErrorData = this.helpers.constructExecutionMetaData(\n\t\t\t\t\t\t\tthis.helpers.returnJsonArray({ error: error.message }),\n\t\t\t\t\t\t\t{ itemData: { item: i } },\n\t\t\t\t\t\t);\n\t\t\t\t\t\treturnData[0].push(...executionErrorData);\n\t\t\t\t\t\tcontinue;\n\t\t\t\t\t}\n\t\t\t\t\tthrow error;\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t\treturn returnData;\n\t}\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AACA,sBAA6B;AAC7B,qBAAgE;AAChE,4BAA2D;AAC3D,0BAA+D;AAS/D,iBAAkB;AAElB,0BAAwC;AACxC,qBAAiC;AAEjC,MAAM,iCACL;AAED,MAAM,qBAAqB;AAC3B,MAAM,oBAAoB,CAAC,YAA6B,sBAA8B;AACrF,QAAM,UAAW,YAAY,WAAW,CAAC;AACzC,QAAM,aAAc,SAAS,cAAyB;AACtD,QAAM,kBAAkB,WAAW,MAAM,GAAG,EAAE,IAAI,CAAC,QAAQ,IAAI,KAAK,CAAC;AAErE,QAAM,MAAM,gBAAgB,IAAI,CAAC,SAAS,EAAE,MAAM,QAAQ,aAAa,IAAI,EAAE;AAC7E,SAAO;AACR;AAEO,MAAM,kBAAuC;AAAA,EAA7C;AACN,uBAAoC;AAAA,MACnC,aAAa;AAAA,MACb,MAAM;AAAA,MACN,MAAM;AAAA,MACN,WAAW;AAAA,MACX,OAAO,CAAC,WAAW;AAAA,MACnB,SAAS,CAAC,GAAG,GAAG;AAAA,MAChB,aAAa;AAAA,MACb,OAAO;AAAA,QACN,YAAY,CAAC,IAAI;AAAA,QACjB,eAAe;AAAA,UACd,IAAI,CAAC,UAAU,YAAY;AAAA,QAC5B;AAAA,QACA,WAAW;AAAA,UACV,sBAAsB;AAAA,YACrB;AAAA,cACC,KAAK;AAAA,YACN;AAAA,UACD;AAAA,QACD;AAAA,MACD;AAAA,MACA,UAAU;AAAA,QACT,MAAM;AAAA,MACP;AAAA,MACA,QAAQ;AAAA,QACP,EAAE,aAAa,IAAI,MAAM,wCAAoB,KAAK;AAAA,QAClD;AAAA,UACC,aAAa;AAAA,UACb,gBAAgB;AAAA,UAChB,MAAM,wCAAoB;AAAA,UAC1B,UAAU;AAAA,QACX;AAAA,MACD;AAAA,MACA,SAAS,OAAO,iBAAiB,kBAAkB,kBAAkB;AAAA,MACrE,YAAY;AAAA,QACX;AAAA,UACC,aAAa;AAAA,UACb,MAAM;AAAA,UACN,MAAM;AAAA,UACN,UAAU;AAAA,UACV,SAAS;AAAA,UACT,aAAa;AAAA,UACb,aAAa;AAAA,YACZ,MAAM;AAAA,UACP;AAAA,QACD;AAAA,QACA;AAAA,UACC,aACC;AAAA,UACD,MAAM;AAAA,UACN,MAAM;AAAA,UACN,SAAS;AAAA,UACT,gBAAgB;AAAA,YACf,MAAM;AAAA,cACL,mCAAmC,CAAC,IAAI;AAAA,YACzC;AAAA,UACD;AAAA,QACD;AAAA,QACA;AAAA,UACC,aAAa;AAAA,UACb,MAAM;AAAA,UACN,MAAM;AAAA,UACN,SAAS,CAAC;AAAA,UACV,aAAa;AAAA,UACb,SAAS;AAAA,YACR;AAAA,cACC,aAAa;AAAA,cACb,MAAM;AAAA,cACN,MAAM;AAAA,cACN,SAAS;AAAA,cACT,aAAa;AAAA,cACb,kBAAkB;AAAA,cAClB,aAAa;AAAA,gBACZ,MAAM;AAAA,cACP;AAAA,YACD;AAAA,YACA;AAAA,cACC,aAAa;AAAA,cACb,MAAM;AAAA,cACN,MAAM;AAAA,cACN,SAAS;AAAA,cACT,aAAa;AAAA,cACb,aAAa;AAAA,gBACZ,MAAM;AAAA,cACP;AAAA,YACD;AAAA,YACA;AAAA,cACC,aAAa;AAAA,cACb,MAAM;AAAA,cACN,MAAM;AAAA,cACN,SAAS;AAAA,cACT,aACC;AAAA,YACF;AAAA,YACA;AAAA,cACC,aAAa;AAAA,cACb,MAAM;AAAA,cACN,MAAM;AAAA,cACN,SAAS;AAAA,cACT,aACC;AAAA,YACF;AAAA,gBACA,6CAAwB;AAAA,cACvB,MAAM;AAAA,gBACL,YAAY,CAAC,EAAE,MAAM,EAAE,KAAK,IAAI,EAAE,CAAC;AAAA,cACpC;AAAA,YACD,CAAC;AAAA,UACF;AAAA,QACD;AAAA,MACD;AAAA,IACD;AAAA;AAAA,EAEA,MAAM,UAAkE;AACvE,UAAM,QAAQ,KAAK,aAAa;AAEhC,UAAM,MAAO,MAAM,KAAK;AAAA,MACvB,wCAAoB;AAAA,MACpB;AAAA,IACD;AAEA,UAAM,aAAqC,CAAC;AAE5C,UAAM,YAAY,KAAK,iBAAiB,8BAA8B,GAAG,CAAC;AAC1E,UAAM,sBAAsB,KAAK;AAAA,MAChC;AAAA,MACA;AAAA,MACA;AAAA,IACD;AAEA,QAAI,KAAK,QAAQ,EAAE,eAAe,OAAO,YAAY,GAAG;AACvD,eAAS,IAAI,GAAG,IAAI,MAAM,QAAQ,KAAK,WAAW;AACjD,cAAM,QAAQ,MAAM,MAAM,GAAG,IAAI,SAAS;AAC1C,cAAM,gBAAgB,MAAM,IAAI,OAAO,OAAO,mBAAmB;AAChE,gBAAM,YAAY,IAAI;AACtB,gBAAM,sBAAsB,KAAK;AAAA,YAChC;AAAA,YACA;AAAA,YACA;AAAA,UACD;AAEA,gBAAM,aAAa,oBACjB,MAAM,GAAG,EACT,IAAI,CAAC,QAAQ,IAAI,KAAK,CAAC,EACvB,OAAO,OAAO;AAEhB,cAAI,WAAW,WAAW,GAAG;AAC5B,mBAAO;AAAA,cACN,QAAQ;AAAA,cACR;AAAA,cACA,OAAO,IAAI,uCAAmB,KAAK,QAAQ,GAAG,oCAAoC;AAAA,gBACjF;AAAA,cACD,CAAC;AAAA,YACF;AAAA,UACD;AAGA,cAAI,WAAW,WAAW,GAAG;AAC5B,uBAAW,KAAK,GAAG,MAAM,KAAK,EAAE,QAAQ,WAAW,OAAO,GAAG,MAAM,CAAC,CAAC,CAAC;AAAA,UACvE;AAEA,gBAAM,UAAU,KAAK,iBAAiB,WAAW,WAAW,CAAC,CAAC;AAM9D,gBAAM,SAAS,aAAE,OAAO;AAAA,YACvB,WAAW,aAAE,KAAK,UAAmC;AAAA,YACrD,UAAU,aACR,OAAO,EACP,IAAI,CAAC,EACL,IAAI,CAAC,EACL,SAAS,0DAA0D;AAAA,YACrE,YAAY,aAAE,OAAO,EAAE,IAAI,CAAC,EAAE,IAAI,CAAC;AAAA,UACpC,CAAC;AAED,gBAAM,mBAAmB,6CAAuB,cAAc,MAAM;AAEpE,gBAAM,SAAS,QAAQ,mBACpB,yCAAmB,QAAQ,KAAK,gBAAgB,IAChD;AAEH,gBAAM,uBAAuB,2CAA4B;AAAA,YACxD,GAAG,QAAQ,wBAAwB,8BAA8B;AAAA;AAAA,UAElE;AAEA,gBAAM,QAAQ,KAAK,iBAAiB,aAAa,SAAS;AAC1D,gBAAM,cAAc,IAAI,6BAAa,KAAK;AAC1C,gBAAM,WAAW;AAAA,YAChB,MAAM,qBAAqB,OAAO;AAAA,cACjC,YAAY;AAAA,cACZ,qBAAqB,OAAO,sBAAsB;AAAA,YACnD,CAAC;AAAA,YACD;AAAA,UACD;AAEA,gBAAM,SAAS,kCAAmB,aAAa,QAAQ;AACvD,gBAAM,QAAQ,OAAO,KAAK,GAAG,EAAE,KAAK,MAAM,EAAE,eAAW,iCAAiB,IAAI,CAAC;AAE7E,cAAI;AACH,kBAAM,SAAS,MAAM,MAAM,OAAO,QAAQ;AAC1C,kBAAM,iBAAiB,WAAW;AAAA,cACjC,CAAC,MAAM,EAAE,YAAY,MAAM,OAAO,UAAU,YAAY;AAAA,YACzD;AAEA,gBAAI,mBAAmB,IAAI;AAC1B,oBAAM,aAAa,EAAE,GAAG,MAAM,SAAS,EAAE;AACzC,oBAAM,oBAAiC;AAAA,gBACtC,UAAU,OAAO;AAAA,cAClB;AACA,kBAAI,QAAQ,wBAAwB;AACnC,kCAAkB,WAAW,OAAO;AACpC,kCAAkB,aAAa,OAAO;AAAA,cACvC;AACA,yBAAW,OAAO;AAAA,gBACjB,GAAG,WAAW;AAAA,gBACd;AAAA,cACD;AAEA,qBAAO;AAAA,gBACN,QAAQ;AAAA,kBACP;AAAA,kBACA;AAAA,gBACD;AAAA,gBACA;AAAA,cACD;AAAA,YACD;AAEA,mBAAO;AAAA,cACN,QAAQ,CAAC;AAAA,cACT;AAAA,YACD;AAAA,UACD,SAAS,OAAO;AACf,mBAAO;AAAA,cACN,QAAQ;AAAA,cACR;AAAA,cACA,OAAO,IAAI;AAAA,gBACV,KAAK,QAAQ;AAAA,gBACb;AAAA,gBACA;AAAA,kBACC;AAAA,gBACD;AAAA,cACD;AAAA,YACD;AAAA,UACD;AAAA,QACD,CAAC;AACD,cAAM,eAAe,MAAM,QAAQ,IAAI,aAAa;AAEpD,qBAAa,QAAQ,CAAC,EAAE,QAAQ,WAAW,MAAM,MAAM;AACtD,cAAI,OAAO;AACV,gBAAI,KAAK,eAAe,GAAG;AAC1B,oBAAM,qBAAqB,KAAK,QAAQ;AAAA,gBACvC,KAAK,QAAQ,gBAAgB,EAAE,OAAO,MAAM,QAAQ,CAAC;AAAA,gBACrD,EAAE,UAAU,EAAE,MAAM,UAAU,EAAE;AAAA,cACjC;AAEA,yBAAW,CAAC,EAAE,KAAK,GAAG,kBAAkB;AACxC;AAAA,YACD,OAAO;AACN,oBAAM;AAAA,YACP;AAAA,UACD,WAAW,OAAO,cAAc,OAAO,mBAAmB,IAAI;AAC7D,kBAAM,iBAAiB,OAAO;AAC9B,kBAAM,aAAa,OAAO;AAC1B,uBAAW,cAAc,EAAE,KAAK,UAAU;AAAA,UAC3C;AAAA,QACD,CAAC;AAGD,YAAI,IAAI,YAAY,MAAM,UAAU,sBAAsB,GAAG;AAC5D,oBAAM,2BAAM,mBAAmB;AAAA,QAChC;AAAA,MACD;AAAA,IACD,OAAO;AAEN,eAAS,IAAI,GAAG,IAAI,MAAM,QAAQ,KAAK;AACtC,YAAI;AACH,gBAAM,sBAAsB,KAAK;AAAA,YAChC;AAAA,YACA;AAAA,YACA;AAAA,UACD;AAEA,gBAAM,aAAa,oBACjB,MAAM,GAAG,EACT,IAAI,CAAC,QAAQ,IAAI,KAAK,CAAC,EACvB,OAAO,OAAO;AAEhB,cAAI,WAAW,WAAW,GAAG;AAC5B,kBAAM,IAAI,uCAAmB,KAAK,QAAQ,GAAG,oCAAoC;AAAA,cAChF,WAAW;AAAA,YACZ,CAAC;AAAA,UACF;AAGA,cAAI,WAAW,WAAW,GAAG;AAC5B,uBAAW,KAAK,GAAG,MAAM,KAAK,EAAE,QAAQ,WAAW,OAAO,GAAG,MAAM,CAAC,CAAC,CAAC;AAAA,UACvE;AAEA,gBAAM,UAAU,KAAK,iBAAiB,WAAW,GAAG,CAAC,CAAC;AAMtD,gBAAM,SAAS,aAAE,OAAO;AAAA,YACvB,WAAW,aAAE,KAAK,UAAmC;AAAA,YACrD,UAAU,aACR,OAAO,EACP,IAAI,CAAC,EACL,IAAI,CAAC,EACL,SAAS,0DAA0D;AAAA,YACrE,YAAY,aAAE,OAAO,EAAE,IAAI,CAAC,EAAE,IAAI,CAAC;AAAA,UACpC,CAAC;AAED,gBAAM,mBAAmB,6CAAuB,cAAc,MAAM;AAEpE,gBAAM,SAAS,QAAQ,mBACpB,yCAAmB,QAAQ,KAAK,gBAAgB,IAChD;AAEH,gBAAM,uBAAuB,2CAA4B;AAAA,YACxD,GAAG,QAAQ,wBAAwB,8BAA8B;AAAA;AAAA,UAElE;AAEA,gBAAM,QAAQ,KAAK,iBAAiB,aAAa,CAAC;AAClD,gBAAM,cAAc,IAAI,6BAAa,KAAK;AAC1C,gBAAM,WAAW;AAAA,YAChB,MAAM,qBAAqB,OAAO;AAAA,cACjC,YAAY;AAAA,cACZ,qBAAqB,OAAO,sBAAsB;AAAA,YACnD,CAAC;AAAA,YACD;AAAA,UACD;AAEA,gBAAM,SAAS,kCAAmB,aAAa,QAAQ;AACvD,gBAAM,QAAQ,OAAO,KAAK,GAAG,EAAE,KAAK,MAAM,EAAE,eAAW,iCAAiB,IAAI,CAAC;AAE7E,cAAI;AACH,kBAAM,SAAS,MAAM,MAAM,OAAO,QAAQ;AAC1C,kBAAM,iBAAiB,WAAW;AAAA,cACjC,CAAC,MAAM,EAAE,YAAY,MAAM,OAAO,UAAU,YAAY;AAAA,YACzD;AAEA,gBAAI,mBAAmB,IAAI;AAC1B,oBAAM,aAAa,EAAE,GAAG,MAAM,CAAC,EAAE;AACjC,oBAAM,oBAAiC;AAAA,gBACtC,UAAU,OAAO;AAAA,cAClB;AACA,kBAAI,QAAQ,wBAAwB;AACnC,kCAAkB,WAAW,OAAO;AACpC,kCAAkB,aAAa,OAAO;AAAA,cACvC;AACA,yBAAW,OAAO;AAAA,gBACjB,GAAG,WAAW;AAAA,gBACd;AAAA,cACD;AACA,yBAAW,cAAc,EAAE,KAAK,UAAU;AAAA,YAC3C;AAAA,UACD,SAAS,OAAO;AACf,kBAAM,IAAI;AAAA,cACT,KAAK,QAAQ;AAAA,cACb;AAAA,cACA;AAAA,gBACC,WAAW;AAAA,cACZ;AAAA,YACD;AAAA,UACD;AAAA,QACD,SAAS,OAAO;AACf,cAAI,KAAK,eAAe,GAAG;AAC1B,kBAAM,qBAAqB,KAAK,QAAQ;AAAA,cACvC,KAAK,QAAQ,gBAAgB,EAAE,OAAO,MAAM,QAAQ,CAAC;AAAA,cACrD,EAAE,UAAU,EAAE,MAAM,EAAE,EAAE;AAAA,YACzB;AACA,uBAAW,CAAC,EAAE,KAAK,GAAG,kBAAkB;AACxC;AAAA,UACD;AACA,gBAAM;AAAA,QACP;AAAA,MACD;AAAA,IACD;AACA,WAAO;AAAA,EACR;AACD;","names":[]}
|
|
@@ -23,9 +23,9 @@ __export(DocumentGithubLoader_node_exports, {
|
|
|
23
23
|
module.exports = __toCommonJS(DocumentGithubLoader_node_exports);
|
|
24
24
|
var import_github = require("@langchain/community/document_loaders/web/github");
|
|
25
25
|
var import_textsplitters = require("@langchain/textsplitters");
|
|
26
|
-
var import_n8n_workflow = require("n8n-workflow");
|
|
27
26
|
var import_logWrapper = require("../../../utils/logWrapper");
|
|
28
27
|
var import_sharedFields = require("../../../utils/sharedFields");
|
|
28
|
+
var import_n8n_workflow = require("n8n-workflow");
|
|
29
29
|
function getInputs(parameters) {
|
|
30
30
|
const inputs = [];
|
|
31
31
|
const textSplittingMode = parameters?.textSplittingMode;
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"sources":["../../../../nodes/document_loaders/DocumentGithubLoader/DocumentGithubLoader.node.ts"],"sourcesContent":["import { GithubRepoLoader } from '@langchain/community/document_loaders/web/github';\nimport type { TextSplitter } from '@langchain/textsplitters';\nimport { RecursiveCharacterTextSplitter } from '@langchain/textsplitters';\nimport {\n\tNodeConnectionTypes,\n\ttype INodeType,\n\ttype INodeTypeDescription,\n\ttype ISupplyDataFunctions,\n\ttype SupplyData,\n\ttype IDataObject,\n\ttype INodeInputConfiguration,\n} from 'n8n-workflow';\n\
|
|
1
|
+
{"version":3,"sources":["../../../../nodes/document_loaders/DocumentGithubLoader/DocumentGithubLoader.node.ts"],"sourcesContent":["import { GithubRepoLoader } from '@langchain/community/document_loaders/web/github';\nimport type { TextSplitter } from '@langchain/textsplitters';\nimport { RecursiveCharacterTextSplitter } from '@langchain/textsplitters';\nimport { logWrapper } from '@utils/logWrapper';\nimport { getConnectionHintNoticeField } from '@utils/sharedFields';\nimport {\n\tNodeConnectionTypes,\n\ttype INodeType,\n\ttype INodeTypeDescription,\n\ttype ISupplyDataFunctions,\n\ttype SupplyData,\n\ttype IDataObject,\n\ttype INodeInputConfiguration,\n} from 'n8n-workflow';\n\nfunction getInputs(parameters: IDataObject) {\n\tconst inputs: INodeInputConfiguration[] = [];\n\n\tconst textSplittingMode = parameters?.textSplittingMode;\n\t// If text splitting mode is 'custom' or does not exist (v1), we need to add an input for the text splitter\n\tif (!textSplittingMode || textSplittingMode === 'custom') {\n\t\tinputs.push({\n\t\t\tdisplayName: 'Text Splitter',\n\t\t\tmaxConnections: 1,\n\t\t\ttype: 'ai_textSplitter',\n\t\t\trequired: true,\n\t\t});\n\t}\n\n\treturn inputs;\n}\n\nexport class DocumentGithubLoader implements INodeType {\n\tdescription: INodeTypeDescription = {\n\t\tdisplayName: 'GitHub Document Loader',\n\t\tname: 'documentGithubLoader',\n\t\ticon: 'file:github.svg',\n\t\tgroup: ['transform'],\n\t\tversion: [1, 1.1],\n\t\tdefaultVersion: 1.1,\n\t\tdescription: 'Use GitHub data as input to this chain',\n\t\tdefaults: {\n\t\t\tname: 'GitHub Document Loader',\n\t\t},\n\t\tcodex: {\n\t\t\tcategories: ['AI'],\n\t\t\tsubcategories: {\n\t\t\t\tAI: ['Document Loaders'],\n\t\t\t},\n\t\t\tresources: {\n\t\t\t\tprimaryDocumentation: [\n\t\t\t\t\t{\n\t\t\t\t\t\turl: 'https://docs.n8n.io/integrations/builtin/cluster-nodes/sub-nodes/n8n-nodes-langchain.documentgithubloader/',\n\t\t\t\t\t},\n\t\t\t\t],\n\t\t\t},\n\t\t},\n\t\tcredentials: [\n\t\t\t{\n\t\t\t\tname: 'githubApi',\n\t\t\t\trequired: true,\n\t\t\t},\n\t\t],\n\n\t\tinputs: `={{ ((parameter) => { ${getInputs.toString()}; return getInputs(parameter) })($parameter) }}`,\n\t\tinputNames: ['Text Splitter'],\n\n\t\toutputs: [NodeConnectionTypes.AiDocument],\n\t\toutputNames: ['Document'],\n\t\tproperties: [\n\t\t\tgetConnectionHintNoticeField([NodeConnectionTypes.AiVectorStore]),\n\t\t\t{\n\t\t\t\tdisplayName: 'Repository Link',\n\t\t\t\tname: 'repository',\n\t\t\t\ttype: 'string',\n\t\t\t\tdefault: '',\n\t\t\t},\n\t\t\t{\n\t\t\t\tdisplayName: 'Branch',\n\t\t\t\tname: 'branch',\n\t\t\t\ttype: 'string',\n\t\t\t\tdefault: 'main',\n\t\t\t},\n\t\t\t{\n\t\t\t\tdisplayName: 'Text Splitting',\n\t\t\t\tname: 'textSplittingMode',\n\t\t\t\ttype: 'options',\n\t\t\t\tdefault: 'simple',\n\t\t\t\trequired: true,\n\t\t\t\tnoDataExpression: true,\n\t\t\t\tdisplayOptions: {\n\t\t\t\t\tshow: {\n\t\t\t\t\t\t'@version': [1.1],\n\t\t\t\t\t},\n\t\t\t\t},\n\t\t\t\toptions: [\n\t\t\t\t\t{\n\t\t\t\t\t\tname: 'Simple',\n\t\t\t\t\t\tvalue: 'simple',\n\t\t\t\t\t\tdescription: 'Splits every 1000 characters with a 200 character overlap',\n\t\t\t\t\t},\n\t\t\t\t\t{\n\t\t\t\t\t\tname: 'Custom',\n\t\t\t\t\t\tvalue: 'custom',\n\t\t\t\t\t\tdescription: 'Connect a custom text-splitting sub-node',\n\t\t\t\t\t},\n\t\t\t\t],\n\t\t\t},\n\t\t\t{\n\t\t\t\tdisplayName: 'Options',\n\t\t\t\tname: 'additionalOptions',\n\t\t\t\ttype: 'collection',\n\t\t\t\tplaceholder: 'Add Option',\n\t\t\t\tdefault: {},\n\n\t\t\t\toptions: [\n\t\t\t\t\t{\n\t\t\t\t\t\tdisplayName: 'Recursive',\n\t\t\t\t\t\tname: 'recursive',\n\t\t\t\t\t\ttype: 'boolean',\n\t\t\t\t\t\tdefault: false,\n\t\t\t\t\t},\n\t\t\t\t\t{\n\t\t\t\t\t\tdisplayName: 'Ignore Paths',\n\t\t\t\t\t\tname: 'ignorePaths',\n\t\t\t\t\t\ttype: 'string',\n\t\t\t\t\t\tdescription: 'Comma-separated list of paths to ignore, e.g. \"docs, src/tests',\n\t\t\t\t\t\tdefault: '',\n\t\t\t\t\t},\n\t\t\t\t],\n\t\t\t},\n\t\t],\n\t};\n\n\tasync supplyData(this: ISupplyDataFunctions, itemIndex: number): Promise<SupplyData> {\n\t\tthis.logger.debug('Supplying data for Github Document Loader');\n\t\tconst node = this.getNode();\n\n\t\tconst repository = this.getNodeParameter('repository', itemIndex) as string;\n\t\tconst branch = this.getNodeParameter('branch', itemIndex) as string;\n\t\tconst credentials = await this.getCredentials('githubApi');\n\t\tconst { ignorePaths, recursive } = this.getNodeParameter('additionalOptions', 0) as {\n\t\t\trecursive: boolean;\n\t\t\tignorePaths: string;\n\t\t};\n\t\tlet textSplitter: TextSplitter | undefined;\n\n\t\tif (node.typeVersion === 1.1) {\n\t\t\tconst textSplittingMode = this.getNodeParameter('textSplittingMode', itemIndex, 'simple') as\n\t\t\t\t| 'simple'\n\t\t\t\t| 'custom';\n\n\t\t\tif (textSplittingMode === 'simple') {\n\t\t\t\ttextSplitter = new RecursiveCharacterTextSplitter({ chunkSize: 1000, chunkOverlap: 200 });\n\t\t\t} else if (textSplittingMode === 'custom') {\n\t\t\t\ttextSplitter = (await this.getInputConnectionData(NodeConnectionTypes.AiTextSplitter, 0)) as\n\t\t\t\t\t| TextSplitter\n\t\t\t\t\t| undefined;\n\t\t\t}\n\t\t} else {\n\t\t\ttextSplitter = (await this.getInputConnectionData(NodeConnectionTypes.AiTextSplitter, 0)) as\n\t\t\t\t| TextSplitter\n\t\t\t\t| undefined;\n\t\t}\n\n\t\tconst { index } = this.addInputData(NodeConnectionTypes.AiDocument, [\n\t\t\t[{ json: { repository, branch, ignorePaths, recursive } }],\n\t\t]);\n\t\tconst docs = new GithubRepoLoader(repository, {\n\t\t\tbranch,\n\t\t\tignorePaths: (ignorePaths ?? '').split(',').map((p) => p.trim()),\n\t\t\trecursive,\n\t\t\taccessToken: (credentials.accessToken as string) || '',\n\t\t\tapiUrl: credentials.server as string,\n\t\t});\n\n\t\tconst loadedDocs = textSplitter\n\t\t\t? await textSplitter.splitDocuments(await docs.load())\n\t\t\t: await docs.load();\n\n\t\tthis.addOutputData(NodeConnectionTypes.AiDocument, index, [[{ json: { loadedDocs } }]]);\n\t\treturn {\n\t\t\tresponse: logWrapper(loadedDocs, this),\n\t\t};\n\t}\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,oBAAiC;AAEjC,2BAA+C;AAC/C,wBAA2B;AAC3B,0BAA6C;AAC7C,0BAQO;AAEP,SAAS,UAAU,YAAyB;AAC3C,QAAM,SAAoC,CAAC;AAE3C,QAAM,oBAAoB,YAAY;AAEtC,MAAI,CAAC,qBAAqB,sBAAsB,UAAU;AACzD,WAAO,KAAK;AAAA,MACX,aAAa;AAAA,MACb,gBAAgB;AAAA,MAChB,MAAM;AAAA,MACN,UAAU;AAAA,IACX,CAAC;AAAA,EACF;AAEA,SAAO;AACR;AAEO,MAAM,qBAA0C;AAAA,EAAhD;AACN,uBAAoC;AAAA,MACnC,aAAa;AAAA,MACb,MAAM;AAAA,MACN,MAAM;AAAA,MACN,OAAO,CAAC,WAAW;AAAA,MACnB,SAAS,CAAC,GAAG,GAAG;AAAA,MAChB,gBAAgB;AAAA,MAChB,aAAa;AAAA,MACb,UAAU;AAAA,QACT,MAAM;AAAA,MACP;AAAA,MACA,OAAO;AAAA,QACN,YAAY,CAAC,IAAI;AAAA,QACjB,eAAe;AAAA,UACd,IAAI,CAAC,kBAAkB;AAAA,QACxB;AAAA,QACA,WAAW;AAAA,UACV,sBAAsB;AAAA,YACrB;AAAA,cACC,KAAK;AAAA,YACN;AAAA,UACD;AAAA,QACD;AAAA,MACD;AAAA,MACA,aAAa;AAAA,QACZ;AAAA,UACC,MAAM;AAAA,UACN,UAAU;AAAA,QACX;AAAA,MACD;AAAA,MAEA,QAAQ,yBAAyB,UAAU,SAAS,CAAC;AAAA,MACrD,YAAY,CAAC,eAAe;AAAA,MAE5B,SAAS,CAAC,wCAAoB,UAAU;AAAA,MACxC,aAAa,CAAC,UAAU;AAAA,MACxB,YAAY;AAAA,YACX,kDAA6B,CAAC,wCAAoB,aAAa,CAAC;AAAA,QAChE;AAAA,UACC,aAAa;AAAA,UACb,MAAM;AAAA,UACN,MAAM;AAAA,UACN,SAAS;AAAA,QACV;AAAA,QACA;AAAA,UACC,aAAa;AAAA,UACb,MAAM;AAAA,UACN,MAAM;AAAA,UACN,SAAS;AAAA,QACV;AAAA,QACA;AAAA,UACC,aAAa;AAAA,UACb,MAAM;AAAA,UACN,MAAM;AAAA,UACN,SAAS;AAAA,UACT,UAAU;AAAA,UACV,kBAAkB;AAAA,UAClB,gBAAgB;AAAA,YACf,MAAM;AAAA,cACL,YAAY,CAAC,GAAG;AAAA,YACjB;AAAA,UACD;AAAA,UACA,SAAS;AAAA,YACR;AAAA,cACC,MAAM;AAAA,cACN,OAAO;AAAA,cACP,aAAa;AAAA,YACd;AAAA,YACA;AAAA,cACC,MAAM;AAAA,cACN,OAAO;AAAA,cACP,aAAa;AAAA,YACd;AAAA,UACD;AAAA,QACD;AAAA,QACA;AAAA,UACC,aAAa;AAAA,UACb,MAAM;AAAA,UACN,MAAM;AAAA,UACN,aAAa;AAAA,UACb,SAAS,CAAC;AAAA,UAEV,SAAS;AAAA,YACR;AAAA,cACC,aAAa;AAAA,cACb,MAAM;AAAA,cACN,MAAM;AAAA,cACN,SAAS;AAAA,YACV;AAAA,YACA;AAAA,cACC,aAAa;AAAA,cACb,MAAM;AAAA,cACN,MAAM;AAAA,cACN,aAAa;AAAA,cACb,SAAS;AAAA,YACV;AAAA,UACD;AAAA,QACD;AAAA,MACD;AAAA,IACD;AAAA;AAAA,EAEA,MAAM,WAAuC,WAAwC;AACpF,SAAK,OAAO,MAAM,2CAA2C;AAC7D,UAAM,OAAO,KAAK,QAAQ;AAE1B,UAAM,aAAa,KAAK,iBAAiB,cAAc,SAAS;AAChE,UAAM,SAAS,KAAK,iBAAiB,UAAU,SAAS;AACxD,UAAM,cAAc,MAAM,KAAK,eAAe,WAAW;AACzD,UAAM,EAAE,aAAa,UAAU,IAAI,KAAK,iBAAiB,qBAAqB,CAAC;AAI/E,QAAI;AAEJ,QAAI,KAAK,gBAAgB,KAAK;AAC7B,YAAM,oBAAoB,KAAK,iBAAiB,qBAAqB,WAAW,QAAQ;AAIxF,UAAI,sBAAsB,UAAU;AACnC,uBAAe,IAAI,oDAA+B,EAAE,WAAW,KAAM,cAAc,IAAI,CAAC;AAAA,MACzF,WAAW,sBAAsB,UAAU;AAC1C,uBAAgB,MAAM,KAAK,uBAAuB,wCAAoB,gBAAgB,CAAC;AAAA,MAGxF;AAAA,IACD,OAAO;AACN,qBAAgB,MAAM,KAAK,uBAAuB,wCAAoB,gBAAgB,CAAC;AAAA,IAGxF;AAEA,UAAM,EAAE,MAAM,IAAI,KAAK,aAAa,wCAAoB,YAAY;AAAA,MACnE,CAAC,EAAE,MAAM,EAAE,YAAY,QAAQ,aAAa,UAAU,EAAE,CAAC;AAAA,IAC1D,CAAC;AACD,UAAM,OAAO,IAAI,+BAAiB,YAAY;AAAA,MAC7C;AAAA,MACA,cAAc,eAAe,IAAI,MAAM,GAAG,EAAE,IAAI,CAAC,MAAM,EAAE,KAAK,CAAC;AAAA,MAC/D;AAAA,MACA,aAAc,YAAY,eAA0B;AAAA,MACpD,QAAQ,YAAY;AAAA,IACrB,CAAC;AAED,UAAM,aAAa,eAChB,MAAM,aAAa,eAAe,MAAM,KAAK,KAAK,CAAC,IACnD,MAAM,KAAK,KAAK;AAEnB,SAAK,cAAc,wCAAoB,YAAY,OAAO,CAAC,CAAC,EAAE,MAAM,EAAE,WAAW,EAAE,CAAC,CAAC,CAAC;AACtF,WAAO;AAAA,MACN,cAAU,8BAAW,YAAY,IAAI;AAAA,IACtC;AAAA,EACD;AACD;","names":[]}
|
|
@@ -24,6 +24,7 @@ module.exports = __toCommonJS(EmbeddingsOpenAi_node_exports);
|
|
|
24
24
|
var import_openai = require("@langchain/openai");
|
|
25
25
|
var import_n8n_workflow = require("n8n-workflow");
|
|
26
26
|
var import_logWrapper = require("../../../utils/logWrapper");
|
|
27
|
+
var import_httpProxyAgent = require("../../../utils/httpProxyAgent");
|
|
27
28
|
var import_sharedFields = require("../../../utils/sharedFields");
|
|
28
29
|
const modelParameter = {
|
|
29
30
|
displayName: "Model",
|
|
@@ -223,6 +224,11 @@ class EmbeddingsOpenAi {
|
|
|
223
224
|
} else if (credentials.url) {
|
|
224
225
|
configuration.baseURL = credentials.url;
|
|
225
226
|
}
|
|
227
|
+
if (configuration.baseURL) {
|
|
228
|
+
configuration.fetchOptions = {
|
|
229
|
+
dispatcher: (0, import_httpProxyAgent.getProxyAgent)(configuration.baseURL ?? "https://api.openai.com/v1")
|
|
230
|
+
};
|
|
231
|
+
}
|
|
226
232
|
const embeddings = new import_openai.OpenAIEmbeddings({
|
|
227
233
|
modelName: this.getNodeParameter("model", itemIndex, "text-embedding-3-small"),
|
|
228
234
|
openAIApiKey: credentials.apiKey,
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"sources":["../../../../nodes/embeddings/EmbeddingsOpenAI/EmbeddingsOpenAi.node.ts"],"sourcesContent":["import { OpenAIEmbeddings } from '@langchain/openai';\nimport {\n\tNodeConnectionTypes,\n\ttype INodeType,\n\ttype INodeTypeDescription,\n\ttype SupplyData,\n\ttype ISupplyDataFunctions,\n\ttype INodeProperties,\n} from 'n8n-workflow';\nimport type { ClientOptions } from 'openai';\n\nimport { logWrapper } from '@utils/logWrapper';\nimport { getConnectionHintNoticeField } from '@utils/sharedFields';\n\nconst modelParameter: INodeProperties = {\n\tdisplayName: 'Model',\n\tname: 'model',\n\ttype: 'options',\n\tdescription:\n\t\t'The model which will generate the embeddings. <a href=\"https://platform.openai.com/docs/models/overview\">Learn more</a>.',\n\ttypeOptions: {\n\t\tloadOptions: {\n\t\t\trouting: {\n\t\t\t\trequest: {\n\t\t\t\t\tmethod: 'GET',\n\t\t\t\t\turl: '={{ $parameter.options?.baseURL?.split(\"/\").slice(-1).pop() || $credentials?.url?.split(\"/\").slice(-1).pop() || \"v1\" }}/models',\n\t\t\t\t},\n\t\t\t\toutput: {\n\t\t\t\t\tpostReceive: [\n\t\t\t\t\t\t{\n\t\t\t\t\t\t\ttype: 'rootProperty',\n\t\t\t\t\t\t\tproperties: {\n\t\t\t\t\t\t\t\tproperty: 'data',\n\t\t\t\t\t\t\t},\n\t\t\t\t\t\t},\n\t\t\t\t\t\t{\n\t\t\t\t\t\t\ttype: 'filter',\n\t\t\t\t\t\t\tproperties: {\n\t\t\t\t\t\t\t\tpass: \"={{ $responseItem.id.includes('embed') }}\",\n\t\t\t\t\t\t\t},\n\t\t\t\t\t\t},\n\t\t\t\t\t\t{\n\t\t\t\t\t\t\ttype: 'setKeyValue',\n\t\t\t\t\t\t\tproperties: {\n\t\t\t\t\t\t\t\tname: '={{$responseItem.id}}',\n\t\t\t\t\t\t\t\tvalue: '={{$responseItem.id}}',\n\t\t\t\t\t\t\t},\n\t\t\t\t\t\t},\n\t\t\t\t\t\t{\n\t\t\t\t\t\t\ttype: 'sort',\n\t\t\t\t\t\t\tproperties: {\n\t\t\t\t\t\t\t\tkey: 'name',\n\t\t\t\t\t\t\t},\n\t\t\t\t\t\t},\n\t\t\t\t\t],\n\t\t\t\t},\n\t\t\t},\n\t\t},\n\t},\n\trouting: {\n\t\tsend: {\n\t\t\ttype: 'body',\n\t\t\tproperty: 'model',\n\t\t},\n\t},\n\tdefault: 'text-embedding-3-small',\n};\n\nexport class EmbeddingsOpenAi implements INodeType {\n\tdescription: INodeTypeDescription = {\n\t\tdisplayName: 'Embeddings OpenAI',\n\t\tname: 'embeddingsOpenAi',\n\t\ticon: { light: 'file:openAiLight.svg', dark: 'file:openAiLight.dark.svg' },\n\t\tcredentials: [\n\t\t\t{\n\t\t\t\tname: 'openAiApi',\n\t\t\t\trequired: true,\n\t\t\t},\n\t\t],\n\t\tgroup: ['transform'],\n\t\tversion: [1, 1.1, 1.2],\n\t\tdescription: 'Use Embeddings OpenAI',\n\t\tdefaults: {\n\t\t\tname: 'Embeddings OpenAI',\n\t\t},\n\n\t\tcodex: {\n\t\t\tcategories: ['AI'],\n\t\t\tsubcategories: {\n\t\t\t\tAI: ['Embeddings'],\n\t\t\t},\n\t\t\tresources: {\n\t\t\t\tprimaryDocumentation: [\n\t\t\t\t\t{\n\t\t\t\t\t\turl: 'https://docs.n8n.io/integrations/builtin/cluster-nodes/sub-nodes/n8n-nodes-langchain.embeddingsopenai/',\n\t\t\t\t\t},\n\t\t\t\t],\n\t\t\t},\n\t\t},\n\n\t\tinputs: [],\n\n\t\toutputs: [NodeConnectionTypes.AiEmbedding],\n\t\toutputNames: ['Embeddings'],\n\t\trequestDefaults: {\n\t\t\tignoreHttpStatusErrors: true,\n\t\t\tbaseURL:\n\t\t\t\t'={{ $parameter.options?.baseURL?.split(\"/\").slice(0,-1).join(\"/\") || $credentials.url?.split(\"/\").slice(0,-1).join(\"/\") || \"https://api.openai.com\" }}',\n\t\t},\n\t\tproperties: [\n\t\t\tgetConnectionHintNoticeField([NodeConnectionTypes.AiVectorStore]),\n\t\t\t{\n\t\t\t\t...modelParameter,\n\t\t\t\tdefault: 'text-embedding-ada-002',\n\t\t\t\tdisplayOptions: {\n\t\t\t\t\tshow: {\n\t\t\t\t\t\t'@version': [1],\n\t\t\t\t\t},\n\t\t\t\t},\n\t\t\t},\n\t\t\t{\n\t\t\t\t...modelParameter,\n\t\t\t\tdisplayOptions: {\n\t\t\t\t\thide: {\n\t\t\t\t\t\t'@version': [1],\n\t\t\t\t\t},\n\t\t\t\t},\n\t\t\t},\n\t\t\t{\n\t\t\t\tdisplayName: 'Options',\n\t\t\t\tname: 'options',\n\t\t\t\tplaceholder: 'Add Option',\n\t\t\t\tdescription: 'Additional options to add',\n\t\t\t\ttype: 'collection',\n\t\t\t\tdefault: {},\n\t\t\t\toptions: [\n\t\t\t\t\t{\n\t\t\t\t\t\tdisplayName: 'Dimensions',\n\t\t\t\t\t\tname: 'dimensions',\n\t\t\t\t\t\tdefault: undefined,\n\t\t\t\t\t\tdescription:\n\t\t\t\t\t\t\t'The number of dimensions the resulting output embeddings should have. Only supported in text-embedding-3 and later models.',\n\t\t\t\t\t\ttype: 'options',\n\t\t\t\t\t\toptions: [\n\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\tname: '256',\n\t\t\t\t\t\t\t\tvalue: 256,\n\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\tname: '512',\n\t\t\t\t\t\t\t\tvalue: 512,\n\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\tname: '1024',\n\t\t\t\t\t\t\t\tvalue: 1024,\n\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\tname: '1536',\n\t\t\t\t\t\t\t\tvalue: 1536,\n\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\tname: '3072',\n\t\t\t\t\t\t\t\tvalue: 3072,\n\t\t\t\t\t\t\t},\n\t\t\t\t\t\t],\n\t\t\t\t\t},\n\t\t\t\t\t{\n\t\t\t\t\t\tdisplayName: 'Base URL',\n\t\t\t\t\t\tname: 'baseURL',\n\t\t\t\t\t\tdefault: 'https://api.openai.com/v1',\n\t\t\t\t\t\tdescription: 'Override the default base URL for the API',\n\t\t\t\t\t\ttype: 'string',\n\t\t\t\t\t\tdisplayOptions: {\n\t\t\t\t\t\t\thide: {\n\t\t\t\t\t\t\t\t'@version': [{ _cnd: { gte: 1.2 } }],\n\t\t\t\t\t\t\t},\n\t\t\t\t\t\t},\n\t\t\t\t\t},\n\t\t\t\t\t{\n\t\t\t\t\t\tdisplayName: 'Batch Size',\n\t\t\t\t\t\tname: 'batchSize',\n\t\t\t\t\t\tdefault: 512,\n\t\t\t\t\t\ttypeOptions: { maxValue: 2048 },\n\t\t\t\t\t\tdescription: 'Maximum number of documents to send in each request',\n\t\t\t\t\t\ttype: 'number',\n\t\t\t\t\t},\n\t\t\t\t\t{\n\t\t\t\t\t\tdisplayName: 'Strip New Lines',\n\t\t\t\t\t\tname: 'stripNewLines',\n\t\t\t\t\t\tdefault: true,\n\t\t\t\t\t\tdescription: 'Whether to strip new lines from the input text',\n\t\t\t\t\t\ttype: 'boolean',\n\t\t\t\t\t},\n\t\t\t\t\t{\n\t\t\t\t\t\tdisplayName: 'Timeout',\n\t\t\t\t\t\tname: 'timeout',\n\t\t\t\t\t\tdefault: -1,\n\t\t\t\t\t\tdescription:\n\t\t\t\t\t\t\t'Maximum amount of time a request is allowed to take in seconds. Set to -1 for no timeout.',\n\t\t\t\t\t\ttype: 'number',\n\t\t\t\t\t},\n\t\t\t\t],\n\t\t\t},\n\t\t],\n\t};\n\n\tasync supplyData(this: ISupplyDataFunctions, itemIndex: number): Promise<SupplyData> {\n\t\tthis.logger.debug('Supply data for embeddings');\n\t\tconst credentials = await this.getCredentials('openAiApi');\n\n\t\tconst options = this.getNodeParameter('options', itemIndex, {}) as {\n\t\t\tbaseURL?: string;\n\t\t\tbatchSize?: number;\n\t\t\tstripNewLines?: boolean;\n\t\t\ttimeout?: number;\n\t\t\tdimensions?: number | undefined;\n\t\t};\n\n\t\tif (options.timeout === -1) {\n\t\t\toptions.timeout = undefined;\n\t\t}\n\n\t\tconst configuration: ClientOptions = {};\n\t\tif (options.baseURL) {\n\t\t\tconfiguration.baseURL = options.baseURL;\n\t\t} else if (credentials.url) {\n\t\t\tconfiguration.baseURL = credentials.url as string;\n\t\t}\n\n\t\tconst embeddings = new OpenAIEmbeddings({\n\t\t\tmodelName: this.getNodeParameter('model', itemIndex, 'text-embedding-3-small') as string,\n\t\t\topenAIApiKey: credentials.apiKey as string,\n\t\t\t...options,\n\t\t\tconfiguration,\n\t\t});\n\n\t\treturn {\n\t\t\tresponse: logWrapper(embeddings, this),\n\t\t};\n\t}\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,oBAAiC;AACjC,0BAOO;AAGP,wBAA2B;AAC3B,0BAA6C;AAE7C,MAAM,iBAAkC;AAAA,EACvC,aAAa;AAAA,EACb,MAAM;AAAA,EACN,MAAM;AAAA,EACN,aACC;AAAA,EACD,aAAa;AAAA,IACZ,aAAa;AAAA,MACZ,SAAS;AAAA,QACR,SAAS;AAAA,UACR,QAAQ;AAAA,UACR,KAAK;AAAA,QACN;AAAA,QACA,QAAQ;AAAA,UACP,aAAa;AAAA,YACZ;AAAA,cACC,MAAM;AAAA,cACN,YAAY;AAAA,gBACX,UAAU;AAAA,cACX;AAAA,YACD;AAAA,YACA;AAAA,cACC,MAAM;AAAA,cACN,YAAY;AAAA,gBACX,MAAM;AAAA,cACP;AAAA,YACD;AAAA,YACA;AAAA,cACC,MAAM;AAAA,cACN,YAAY;AAAA,gBACX,MAAM;AAAA,gBACN,OAAO;AAAA,cACR;AAAA,YACD;AAAA,YACA;AAAA,cACC,MAAM;AAAA,cACN,YAAY;AAAA,gBACX,KAAK;AAAA,cACN;AAAA,YACD;AAAA,UACD;AAAA,QACD;AAAA,MACD;AAAA,IACD;AAAA,EACD;AAAA,EACA,SAAS;AAAA,IACR,MAAM;AAAA,MACL,MAAM;AAAA,MACN,UAAU;AAAA,IACX;AAAA,EACD;AAAA,EACA,SAAS;AACV;AAEO,MAAM,iBAAsC;AAAA,EAA5C;AACN,uBAAoC;AAAA,MACnC,aAAa;AAAA,MACb,MAAM;AAAA,MACN,MAAM,EAAE,OAAO,wBAAwB,MAAM,4BAA4B;AAAA,MACzE,aAAa;AAAA,QACZ;AAAA,UACC,MAAM;AAAA,UACN,UAAU;AAAA,QACX;AAAA,MACD;AAAA,MACA,OAAO,CAAC,WAAW;AAAA,MACnB,SAAS,CAAC,GAAG,KAAK,GAAG;AAAA,MACrB,aAAa;AAAA,MACb,UAAU;AAAA,QACT,MAAM;AAAA,MACP;AAAA,MAEA,OAAO;AAAA,QACN,YAAY,CAAC,IAAI;AAAA,QACjB,eAAe;AAAA,UACd,IAAI,CAAC,YAAY;AAAA,QAClB;AAAA,QACA,WAAW;AAAA,UACV,sBAAsB;AAAA,YACrB;AAAA,cACC,KAAK;AAAA,YACN;AAAA,UACD;AAAA,QACD;AAAA,MACD;AAAA,MAEA,QAAQ,CAAC;AAAA,MAET,SAAS,CAAC,wCAAoB,WAAW;AAAA,MACzC,aAAa,CAAC,YAAY;AAAA,MAC1B,iBAAiB;AAAA,QAChB,wBAAwB;AAAA,QACxB,SACC;AAAA,MACF;AAAA,MACA,YAAY;AAAA,YACX,kDAA6B,CAAC,wCAAoB,aAAa,CAAC;AAAA,QAChE;AAAA,UACC,GAAG;AAAA,UACH,SAAS;AAAA,UACT,gBAAgB;AAAA,YACf,MAAM;AAAA,cACL,YAAY,CAAC,CAAC;AAAA,YACf;AAAA,UACD;AAAA,QACD;AAAA,QACA;AAAA,UACC,GAAG;AAAA,UACH,gBAAgB;AAAA,YACf,MAAM;AAAA,cACL,YAAY,CAAC,CAAC;AAAA,YACf;AAAA,UACD;AAAA,QACD;AAAA,QACA;AAAA,UACC,aAAa;AAAA,UACb,MAAM;AAAA,UACN,aAAa;AAAA,UACb,aAAa;AAAA,UACb,MAAM;AAAA,UACN,SAAS,CAAC;AAAA,UACV,SAAS;AAAA,YACR;AAAA,cACC,aAAa;AAAA,cACb,MAAM;AAAA,cACN,SAAS;AAAA,cACT,aACC;AAAA,cACD,MAAM;AAAA,cACN,SAAS;AAAA,gBACR;AAAA,kBACC,MAAM;AAAA,kBACN,OAAO;AAAA,gBACR;AAAA,gBACA;AAAA,kBACC,MAAM;AAAA,kBACN,OAAO;AAAA,gBACR;AAAA,gBACA;AAAA,kBACC,MAAM;AAAA,kBACN,OAAO;AAAA,gBACR;AAAA,gBACA;AAAA,kBACC,MAAM;AAAA,kBACN,OAAO;AAAA,gBACR;AAAA,gBACA;AAAA,kBACC,MAAM;AAAA,kBACN,OAAO;AAAA,gBACR;AAAA,cACD;AAAA,YACD;AAAA,YACA;AAAA,cACC,aAAa;AAAA,cACb,MAAM;AAAA,cACN,SAAS;AAAA,cACT,aAAa;AAAA,cACb,MAAM;AAAA,cACN,gBAAgB;AAAA,gBACf,MAAM;AAAA,kBACL,YAAY,CAAC,EAAE,MAAM,EAAE,KAAK,IAAI,EAAE,CAAC;AAAA,gBACpC;AAAA,cACD;AAAA,YACD;AAAA,YACA;AAAA,cACC,aAAa;AAAA,cACb,MAAM;AAAA,cACN,SAAS;AAAA,cACT,aAAa,EAAE,UAAU,KAAK;AAAA,cAC9B,aAAa;AAAA,cACb,MAAM;AAAA,YACP;AAAA,YACA;AAAA,cACC,aAAa;AAAA,cACb,MAAM;AAAA,cACN,SAAS;AAAA,cACT,aAAa;AAAA,cACb,MAAM;AAAA,YACP;AAAA,YACA;AAAA,cACC,aAAa;AAAA,cACb,MAAM;AAAA,cACN,SAAS;AAAA,cACT,aACC;AAAA,cACD,MAAM;AAAA,YACP;AAAA,UACD;AAAA,QACD;AAAA,MACD;AAAA,IACD;AAAA;AAAA,EAEA,MAAM,WAAuC,WAAwC;AACpF,SAAK,OAAO,MAAM,4BAA4B;AAC9C,UAAM,cAAc,MAAM,KAAK,eAAe,WAAW;AAEzD,UAAM,UAAU,KAAK,iBAAiB,WAAW,WAAW,CAAC,CAAC;AAQ9D,QAAI,QAAQ,YAAY,IAAI;AAC3B,cAAQ,UAAU;AAAA,IACnB;AAEA,UAAM,gBAA+B,CAAC;AACtC,QAAI,QAAQ,SAAS;AACpB,oBAAc,UAAU,QAAQ;AAAA,IACjC,WAAW,YAAY,KAAK;AAC3B,oBAAc,UAAU,YAAY;AAAA,IACrC;AAEA,UAAM,aAAa,IAAI,+BAAiB;AAAA,MACvC,WAAW,KAAK,iBAAiB,SAAS,WAAW,wBAAwB;AAAA,MAC7E,cAAc,YAAY;AAAA,MAC1B,GAAG;AAAA,MACH;AAAA,IACD,CAAC;AAED,WAAO;AAAA,MACN,cAAU,8BAAW,YAAY,IAAI;AAAA,IACtC;AAAA,EACD;AACD;","names":[]}
|
|
1
|
+
{"version":3,"sources":["../../../../nodes/embeddings/EmbeddingsOpenAI/EmbeddingsOpenAi.node.ts"],"sourcesContent":["import { OpenAIEmbeddings } from '@langchain/openai';\nimport {\n\tNodeConnectionTypes,\n\ttype INodeType,\n\ttype INodeTypeDescription,\n\ttype SupplyData,\n\ttype ISupplyDataFunctions,\n\ttype INodeProperties,\n} from 'n8n-workflow';\nimport type { ClientOptions } from 'openai';\n\nimport { logWrapper } from '@utils/logWrapper';\n\nimport { getProxyAgent } from '@utils/httpProxyAgent';\nimport { getConnectionHintNoticeField } from '@utils/sharedFields';\n\nconst modelParameter: INodeProperties = {\n\tdisplayName: 'Model',\n\tname: 'model',\n\ttype: 'options',\n\tdescription:\n\t\t'The model which will generate the embeddings. <a href=\"https://platform.openai.com/docs/models/overview\">Learn more</a>.',\n\ttypeOptions: {\n\t\tloadOptions: {\n\t\t\trouting: {\n\t\t\t\trequest: {\n\t\t\t\t\tmethod: 'GET',\n\t\t\t\t\turl: '={{ $parameter.options?.baseURL?.split(\"/\").slice(-1).pop() || $credentials?.url?.split(\"/\").slice(-1).pop() || \"v1\" }}/models',\n\t\t\t\t},\n\t\t\t\toutput: {\n\t\t\t\t\tpostReceive: [\n\t\t\t\t\t\t{\n\t\t\t\t\t\t\ttype: 'rootProperty',\n\t\t\t\t\t\t\tproperties: {\n\t\t\t\t\t\t\t\tproperty: 'data',\n\t\t\t\t\t\t\t},\n\t\t\t\t\t\t},\n\t\t\t\t\t\t{\n\t\t\t\t\t\t\ttype: 'filter',\n\t\t\t\t\t\t\tproperties: {\n\t\t\t\t\t\t\t\tpass: \"={{ $responseItem.id.includes('embed') }}\",\n\t\t\t\t\t\t\t},\n\t\t\t\t\t\t},\n\t\t\t\t\t\t{\n\t\t\t\t\t\t\ttype: 'setKeyValue',\n\t\t\t\t\t\t\tproperties: {\n\t\t\t\t\t\t\t\tname: '={{$responseItem.id}}',\n\t\t\t\t\t\t\t\tvalue: '={{$responseItem.id}}',\n\t\t\t\t\t\t\t},\n\t\t\t\t\t\t},\n\t\t\t\t\t\t{\n\t\t\t\t\t\t\ttype: 'sort',\n\t\t\t\t\t\t\tproperties: {\n\t\t\t\t\t\t\t\tkey: 'name',\n\t\t\t\t\t\t\t},\n\t\t\t\t\t\t},\n\t\t\t\t\t],\n\t\t\t\t},\n\t\t\t},\n\t\t},\n\t},\n\trouting: {\n\t\tsend: {\n\t\t\ttype: 'body',\n\t\t\tproperty: 'model',\n\t\t},\n\t},\n\tdefault: 'text-embedding-3-small',\n};\n\nexport class EmbeddingsOpenAi implements INodeType {\n\tdescription: INodeTypeDescription = {\n\t\tdisplayName: 'Embeddings OpenAI',\n\t\tname: 'embeddingsOpenAi',\n\t\ticon: { light: 'file:openAiLight.svg', dark: 'file:openAiLight.dark.svg' },\n\t\tcredentials: [\n\t\t\t{\n\t\t\t\tname: 'openAiApi',\n\t\t\t\trequired: true,\n\t\t\t},\n\t\t],\n\t\tgroup: ['transform'],\n\t\tversion: [1, 1.1, 1.2],\n\t\tdescription: 'Use Embeddings OpenAI',\n\t\tdefaults: {\n\t\t\tname: 'Embeddings OpenAI',\n\t\t},\n\n\t\tcodex: {\n\t\t\tcategories: ['AI'],\n\t\t\tsubcategories: {\n\t\t\t\tAI: ['Embeddings'],\n\t\t\t},\n\t\t\tresources: {\n\t\t\t\tprimaryDocumentation: [\n\t\t\t\t\t{\n\t\t\t\t\t\turl: 'https://docs.n8n.io/integrations/builtin/cluster-nodes/sub-nodes/n8n-nodes-langchain.embeddingsopenai/',\n\t\t\t\t\t},\n\t\t\t\t],\n\t\t\t},\n\t\t},\n\n\t\tinputs: [],\n\n\t\toutputs: [NodeConnectionTypes.AiEmbedding],\n\t\toutputNames: ['Embeddings'],\n\t\trequestDefaults: {\n\t\t\tignoreHttpStatusErrors: true,\n\t\t\tbaseURL:\n\t\t\t\t'={{ $parameter.options?.baseURL?.split(\"/\").slice(0,-1).join(\"/\") || $credentials.url?.split(\"/\").slice(0,-1).join(\"/\") || \"https://api.openai.com\" }}',\n\t\t},\n\t\tproperties: [\n\t\t\tgetConnectionHintNoticeField([NodeConnectionTypes.AiVectorStore]),\n\t\t\t{\n\t\t\t\t...modelParameter,\n\t\t\t\tdefault: 'text-embedding-ada-002',\n\t\t\t\tdisplayOptions: {\n\t\t\t\t\tshow: {\n\t\t\t\t\t\t'@version': [1],\n\t\t\t\t\t},\n\t\t\t\t},\n\t\t\t},\n\t\t\t{\n\t\t\t\t...modelParameter,\n\t\t\t\tdisplayOptions: {\n\t\t\t\t\thide: {\n\t\t\t\t\t\t'@version': [1],\n\t\t\t\t\t},\n\t\t\t\t},\n\t\t\t},\n\t\t\t{\n\t\t\t\tdisplayName: 'Options',\n\t\t\t\tname: 'options',\n\t\t\t\tplaceholder: 'Add Option',\n\t\t\t\tdescription: 'Additional options to add',\n\t\t\t\ttype: 'collection',\n\t\t\t\tdefault: {},\n\t\t\t\toptions: [\n\t\t\t\t\t{\n\t\t\t\t\t\tdisplayName: 'Dimensions',\n\t\t\t\t\t\tname: 'dimensions',\n\t\t\t\t\t\tdefault: undefined,\n\t\t\t\t\t\tdescription:\n\t\t\t\t\t\t\t'The number of dimensions the resulting output embeddings should have. Only supported in text-embedding-3 and later models.',\n\t\t\t\t\t\ttype: 'options',\n\t\t\t\t\t\toptions: [\n\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\tname: '256',\n\t\t\t\t\t\t\t\tvalue: 256,\n\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\tname: '512',\n\t\t\t\t\t\t\t\tvalue: 512,\n\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\tname: '1024',\n\t\t\t\t\t\t\t\tvalue: 1024,\n\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\tname: '1536',\n\t\t\t\t\t\t\t\tvalue: 1536,\n\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\tname: '3072',\n\t\t\t\t\t\t\t\tvalue: 3072,\n\t\t\t\t\t\t\t},\n\t\t\t\t\t\t],\n\t\t\t\t\t},\n\t\t\t\t\t{\n\t\t\t\t\t\tdisplayName: 'Base URL',\n\t\t\t\t\t\tname: 'baseURL',\n\t\t\t\t\t\tdefault: 'https://api.openai.com/v1',\n\t\t\t\t\t\tdescription: 'Override the default base URL for the API',\n\t\t\t\t\t\ttype: 'string',\n\t\t\t\t\t\tdisplayOptions: {\n\t\t\t\t\t\t\thide: {\n\t\t\t\t\t\t\t\t'@version': [{ _cnd: { gte: 1.2 } }],\n\t\t\t\t\t\t\t},\n\t\t\t\t\t\t},\n\t\t\t\t\t},\n\t\t\t\t\t{\n\t\t\t\t\t\tdisplayName: 'Batch Size',\n\t\t\t\t\t\tname: 'batchSize',\n\t\t\t\t\t\tdefault: 512,\n\t\t\t\t\t\ttypeOptions: { maxValue: 2048 },\n\t\t\t\t\t\tdescription: 'Maximum number of documents to send in each request',\n\t\t\t\t\t\ttype: 'number',\n\t\t\t\t\t},\n\t\t\t\t\t{\n\t\t\t\t\t\tdisplayName: 'Strip New Lines',\n\t\t\t\t\t\tname: 'stripNewLines',\n\t\t\t\t\t\tdefault: true,\n\t\t\t\t\t\tdescription: 'Whether to strip new lines from the input text',\n\t\t\t\t\t\ttype: 'boolean',\n\t\t\t\t\t},\n\t\t\t\t\t{\n\t\t\t\t\t\tdisplayName: 'Timeout',\n\t\t\t\t\t\tname: 'timeout',\n\t\t\t\t\t\tdefault: -1,\n\t\t\t\t\t\tdescription:\n\t\t\t\t\t\t\t'Maximum amount of time a request is allowed to take in seconds. Set to -1 for no timeout.',\n\t\t\t\t\t\ttype: 'number',\n\t\t\t\t\t},\n\t\t\t\t],\n\t\t\t},\n\t\t],\n\t};\n\n\tasync supplyData(this: ISupplyDataFunctions, itemIndex: number): Promise<SupplyData> {\n\t\tthis.logger.debug('Supply data for embeddings');\n\t\tconst credentials = await this.getCredentials('openAiApi');\n\n\t\tconst options = this.getNodeParameter('options', itemIndex, {}) as {\n\t\t\tbaseURL?: string;\n\t\t\tbatchSize?: number;\n\t\t\tstripNewLines?: boolean;\n\t\t\ttimeout?: number;\n\t\t\tdimensions?: number | undefined;\n\t\t};\n\n\t\tif (options.timeout === -1) {\n\t\t\toptions.timeout = undefined;\n\t\t}\n\n\t\tconst configuration: ClientOptions = {};\n\t\tif (options.baseURL) {\n\t\t\tconfiguration.baseURL = options.baseURL;\n\t\t} else if (credentials.url) {\n\t\t\tconfiguration.baseURL = credentials.url as string;\n\t\t}\n\n\t\tif (configuration.baseURL) {\n\t\t\tconfiguration.fetchOptions = {\n\t\t\t\tdispatcher: getProxyAgent(configuration.baseURL ?? 'https://api.openai.com/v1'),\n\t\t\t};\n\t\t}\n\n\t\tconst embeddings = new OpenAIEmbeddings({\n\t\t\tmodelName: this.getNodeParameter('model', itemIndex, 'text-embedding-3-small') as string,\n\t\t\topenAIApiKey: credentials.apiKey as string,\n\t\t\t...options,\n\t\t\tconfiguration,\n\t\t});\n\n\t\treturn {\n\t\t\tresponse: logWrapper(embeddings, this),\n\t\t};\n\t}\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,oBAAiC;AACjC,0BAOO;AAGP,wBAA2B;AAE3B,4BAA8B;AAC9B,0BAA6C;AAE7C,MAAM,iBAAkC;AAAA,EACvC,aAAa;AAAA,EACb,MAAM;AAAA,EACN,MAAM;AAAA,EACN,aACC;AAAA,EACD,aAAa;AAAA,IACZ,aAAa;AAAA,MACZ,SAAS;AAAA,QACR,SAAS;AAAA,UACR,QAAQ;AAAA,UACR,KAAK;AAAA,QACN;AAAA,QACA,QAAQ;AAAA,UACP,aAAa;AAAA,YACZ;AAAA,cACC,MAAM;AAAA,cACN,YAAY;AAAA,gBACX,UAAU;AAAA,cACX;AAAA,YACD;AAAA,YACA;AAAA,cACC,MAAM;AAAA,cACN,YAAY;AAAA,gBACX,MAAM;AAAA,cACP;AAAA,YACD;AAAA,YACA;AAAA,cACC,MAAM;AAAA,cACN,YAAY;AAAA,gBACX,MAAM;AAAA,gBACN,OAAO;AAAA,cACR;AAAA,YACD;AAAA,YACA;AAAA,cACC,MAAM;AAAA,cACN,YAAY;AAAA,gBACX,KAAK;AAAA,cACN;AAAA,YACD;AAAA,UACD;AAAA,QACD;AAAA,MACD;AAAA,IACD;AAAA,EACD;AAAA,EACA,SAAS;AAAA,IACR,MAAM;AAAA,MACL,MAAM;AAAA,MACN,UAAU;AAAA,IACX;AAAA,EACD;AAAA,EACA,SAAS;AACV;AAEO,MAAM,iBAAsC;AAAA,EAA5C;AACN,uBAAoC;AAAA,MACnC,aAAa;AAAA,MACb,MAAM;AAAA,MACN,MAAM,EAAE,OAAO,wBAAwB,MAAM,4BAA4B;AAAA,MACzE,aAAa;AAAA,QACZ;AAAA,UACC,MAAM;AAAA,UACN,UAAU;AAAA,QACX;AAAA,MACD;AAAA,MACA,OAAO,CAAC,WAAW;AAAA,MACnB,SAAS,CAAC,GAAG,KAAK,GAAG;AAAA,MACrB,aAAa;AAAA,MACb,UAAU;AAAA,QACT,MAAM;AAAA,MACP;AAAA,MAEA,OAAO;AAAA,QACN,YAAY,CAAC,IAAI;AAAA,QACjB,eAAe;AAAA,UACd,IAAI,CAAC,YAAY;AAAA,QAClB;AAAA,QACA,WAAW;AAAA,UACV,sBAAsB;AAAA,YACrB;AAAA,cACC,KAAK;AAAA,YACN;AAAA,UACD;AAAA,QACD;AAAA,MACD;AAAA,MAEA,QAAQ,CAAC;AAAA,MAET,SAAS,CAAC,wCAAoB,WAAW;AAAA,MACzC,aAAa,CAAC,YAAY;AAAA,MAC1B,iBAAiB;AAAA,QAChB,wBAAwB;AAAA,QACxB,SACC;AAAA,MACF;AAAA,MACA,YAAY;AAAA,YACX,kDAA6B,CAAC,wCAAoB,aAAa,CAAC;AAAA,QAChE;AAAA,UACC,GAAG;AAAA,UACH,SAAS;AAAA,UACT,gBAAgB;AAAA,YACf,MAAM;AAAA,cACL,YAAY,CAAC,CAAC;AAAA,YACf;AAAA,UACD;AAAA,QACD;AAAA,QACA;AAAA,UACC,GAAG;AAAA,UACH,gBAAgB;AAAA,YACf,MAAM;AAAA,cACL,YAAY,CAAC,CAAC;AAAA,YACf;AAAA,UACD;AAAA,QACD;AAAA,QACA;AAAA,UACC,aAAa;AAAA,UACb,MAAM;AAAA,UACN,aAAa;AAAA,UACb,aAAa;AAAA,UACb,MAAM;AAAA,UACN,SAAS,CAAC;AAAA,UACV,SAAS;AAAA,YACR;AAAA,cACC,aAAa;AAAA,cACb,MAAM;AAAA,cACN,SAAS;AAAA,cACT,aACC;AAAA,cACD,MAAM;AAAA,cACN,SAAS;AAAA,gBACR;AAAA,kBACC,MAAM;AAAA,kBACN,OAAO;AAAA,gBACR;AAAA,gBACA;AAAA,kBACC,MAAM;AAAA,kBACN,OAAO;AAAA,gBACR;AAAA,gBACA;AAAA,kBACC,MAAM;AAAA,kBACN,OAAO;AAAA,gBACR;AAAA,gBACA;AAAA,kBACC,MAAM;AAAA,kBACN,OAAO;AAAA,gBACR;AAAA,gBACA;AAAA,kBACC,MAAM;AAAA,kBACN,OAAO;AAAA,gBACR;AAAA,cACD;AAAA,YACD;AAAA,YACA;AAAA,cACC,aAAa;AAAA,cACb,MAAM;AAAA,cACN,SAAS;AAAA,cACT,aAAa;AAAA,cACb,MAAM;AAAA,cACN,gBAAgB;AAAA,gBACf,MAAM;AAAA,kBACL,YAAY,CAAC,EAAE,MAAM,EAAE,KAAK,IAAI,EAAE,CAAC;AAAA,gBACpC;AAAA,cACD;AAAA,YACD;AAAA,YACA;AAAA,cACC,aAAa;AAAA,cACb,MAAM;AAAA,cACN,SAAS;AAAA,cACT,aAAa,EAAE,UAAU,KAAK;AAAA,cAC9B,aAAa;AAAA,cACb,MAAM;AAAA,YACP;AAAA,YACA;AAAA,cACC,aAAa;AAAA,cACb,MAAM;AAAA,cACN,SAAS;AAAA,cACT,aAAa;AAAA,cACb,MAAM;AAAA,YACP;AAAA,YACA;AAAA,cACC,aAAa;AAAA,cACb,MAAM;AAAA,cACN,SAAS;AAAA,cACT,aACC;AAAA,cACD,MAAM;AAAA,YACP;AAAA,UACD;AAAA,QACD;AAAA,MACD;AAAA,IACD;AAAA;AAAA,EAEA,MAAM,WAAuC,WAAwC;AACpF,SAAK,OAAO,MAAM,4BAA4B;AAC9C,UAAM,cAAc,MAAM,KAAK,eAAe,WAAW;AAEzD,UAAM,UAAU,KAAK,iBAAiB,WAAW,WAAW,CAAC,CAAC;AAQ9D,QAAI,QAAQ,YAAY,IAAI;AAC3B,cAAQ,UAAU;AAAA,IACnB;AAEA,UAAM,gBAA+B,CAAC;AACtC,QAAI,QAAQ,SAAS;AACpB,oBAAc,UAAU,QAAQ;AAAA,IACjC,WAAW,YAAY,KAAK;AAC3B,oBAAc,UAAU,YAAY;AAAA,IACrC;AAEA,QAAI,cAAc,SAAS;AAC1B,oBAAc,eAAe;AAAA,QAC5B,gBAAY,qCAAc,cAAc,WAAW,2BAA2B;AAAA,MAC/E;AAAA,IACD;AAEA,UAAM,aAAa,IAAI,+BAAiB;AAAA,MACvC,WAAW,KAAK,iBAAiB,SAAS,WAAW,wBAAwB;AAAA,MAC7E,cAAc,YAAY;AAAA,MAC1B,GAAG;AAAA,MACH;AAAA,IACD,CAAC;AAED,WAAO;AAAA,MACN,cAAU,8BAAW,YAAY,IAAI;AAAA,IACtC;AAAA,EACD;AACD;","names":[]}
|
|
@@ -275,8 +275,8 @@ class LmChatAnthropic {
|
|
|
275
275
|
const modelName = version >= 1.3 ? this.getNodeParameter("model.value", itemIndex) : this.getNodeParameter("model", itemIndex);
|
|
276
276
|
const options = this.getNodeParameter("options", itemIndex, {});
|
|
277
277
|
let invocationKwargs = {};
|
|
278
|
-
const tokensUsageParser = (
|
|
279
|
-
const usage = llmOutput?.usage ?? {
|
|
278
|
+
const tokensUsageParser = (result) => {
|
|
279
|
+
const usage = result?.llmOutput?.usage ?? {
|
|
280
280
|
input_tokens: 0,
|
|
281
281
|
output_tokens: 0
|
|
282
282
|
};
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"sources":["../../../../nodes/llms/LMChatAnthropic/LmChatAnthropic.node.ts"],"sourcesContent":["import { ChatAnthropic } from '@langchain/anthropic';\nimport type { LLMResult } from '@langchain/core/outputs';\nimport {\n\tNodeConnectionTypes,\n\ttype INodePropertyOptions,\n\ttype INodeProperties,\n\ttype ISupplyDataFunctions,\n\ttype INodeType,\n\ttype INodeTypeDescription,\n\ttype SupplyData,\n} from 'n8n-workflow';\n\nimport { getProxyAgent } from '@utils/httpProxyAgent';\nimport { getConnectionHintNoticeField } from '@utils/sharedFields';\n\nimport { makeN8nLlmFailedAttemptHandler } from '../n8nLlmFailedAttemptHandler';\nimport { N8nLlmTracing } from '../N8nLlmTracing';\nimport { searchModels } from './methods/searchModels';\n\nconst modelField: INodeProperties = {\n\tdisplayName: 'Model',\n\tname: 'model',\n\ttype: 'options',\n\t// eslint-disable-next-line n8n-nodes-base/node-param-options-type-unsorted-items\n\toptions: [\n\t\t{\n\t\t\tname: 'Claude 3.5 Sonnet(20241022)',\n\t\t\tvalue: 'claude-3-5-sonnet-20241022',\n\t\t},\n\t\t{\n\t\t\tname: 'Claude 3 Opus(20240229)',\n\t\t\tvalue: 'claude-3-opus-20240229',\n\t\t},\n\t\t{\n\t\t\tname: 'Claude 3.5 Sonnet(20240620)',\n\t\t\tvalue: 'claude-3-5-sonnet-20240620',\n\t\t},\n\t\t{\n\t\t\tname: 'Claude 3 Sonnet(20240229)',\n\t\t\tvalue: 'claude-3-sonnet-20240229',\n\t\t},\n\t\t{\n\t\t\tname: 'Claude 3.5 Haiku(20241022)',\n\t\t\tvalue: 'claude-3-5-haiku-20241022',\n\t\t},\n\t\t{\n\t\t\tname: 'Claude 3 Haiku(20240307)',\n\t\t\tvalue: 'claude-3-haiku-20240307',\n\t\t},\n\t\t{\n\t\t\tname: 'LEGACY: Claude 2',\n\t\t\tvalue: 'claude-2',\n\t\t},\n\t\t{\n\t\t\tname: 'LEGACY: Claude 2.1',\n\t\t\tvalue: 'claude-2.1',\n\t\t},\n\t\t{\n\t\t\tname: 'LEGACY: Claude Instant 1.2',\n\t\t\tvalue: 'claude-instant-1.2',\n\t\t},\n\t\t{\n\t\t\tname: 'LEGACY: Claude Instant 1',\n\t\t\tvalue: 'claude-instant-1',\n\t\t},\n\t],\n\tdescription:\n\t\t'The model which will generate the completion. <a href=\"https://docs.anthropic.com/claude/docs/models-overview\">Learn more</a>.',\n\tdefault: 'claude-2',\n};\n\nconst MIN_THINKING_BUDGET = 1024;\nconst DEFAULT_MAX_TOKENS = 4096;\nexport class LmChatAnthropic implements INodeType {\n\tmethods = {\n\t\tlistSearch: {\n\t\t\tsearchModels,\n\t\t},\n\t};\n\n\tdescription: INodeTypeDescription = {\n\t\tdisplayName: 'Anthropic Chat Model',\n\n\t\tname: 'lmChatAnthropic',\n\t\ticon: 'file:anthropic.svg',\n\t\tgroup: ['transform'],\n\t\tversion: [1, 1.1, 1.2, 1.3],\n\t\tdefaultVersion: 1.3,\n\t\tdescription: 'Language Model Anthropic',\n\t\tdefaults: {\n\t\t\tname: 'Anthropic Chat Model',\n\t\t},\n\t\tcodex: {\n\t\t\tcategories: ['AI'],\n\t\t\tsubcategories: {\n\t\t\t\tAI: ['Language Models', 'Root Nodes'],\n\t\t\t\t'Language Models': ['Chat Models (Recommended)'],\n\t\t\t},\n\t\t\tresources: {\n\t\t\t\tprimaryDocumentation: [\n\t\t\t\t\t{\n\t\t\t\t\t\turl: 'https://docs.n8n.io/integrations/builtin/cluster-nodes/sub-nodes/n8n-nodes-langchain.lmchatanthropic/',\n\t\t\t\t\t},\n\t\t\t\t],\n\t\t\t},\n\t\t\talias: ['claude', 'sonnet', 'opus'],\n\t\t},\n\n\t\tinputs: [],\n\n\t\toutputs: [NodeConnectionTypes.AiLanguageModel],\n\t\toutputNames: ['Model'],\n\t\tcredentials: [\n\t\t\t{\n\t\t\t\tname: 'anthropicApi',\n\t\t\t\trequired: true,\n\t\t\t},\n\t\t],\n\t\tproperties: [\n\t\t\tgetConnectionHintNoticeField([NodeConnectionTypes.AiChain, NodeConnectionTypes.AiChain]),\n\t\t\t{\n\t\t\t\t...modelField,\n\t\t\t\tdisplayOptions: {\n\t\t\t\t\tshow: {\n\t\t\t\t\t\t'@version': [1],\n\t\t\t\t\t},\n\t\t\t\t},\n\t\t\t},\n\t\t\t{\n\t\t\t\t...modelField,\n\t\t\t\tdefault: 'claude-3-sonnet-20240229',\n\t\t\t\tdisplayOptions: {\n\t\t\t\t\tshow: {\n\t\t\t\t\t\t'@version': [1.1],\n\t\t\t\t\t},\n\t\t\t\t},\n\t\t\t},\n\t\t\t{\n\t\t\t\t...modelField,\n\t\t\t\tdefault: 'claude-3-5-sonnet-20240620',\n\t\t\t\toptions: (modelField.options ?? []).filter(\n\t\t\t\t\t(o): o is INodePropertyOptions => 'name' in o && !o.name.toString().startsWith('LEGACY'),\n\t\t\t\t),\n\t\t\t\tdisplayOptions: {\n\t\t\t\t\tshow: {\n\t\t\t\t\t\t'@version': [{ _cnd: { lte: 1.2 } }],\n\t\t\t\t\t},\n\t\t\t\t},\n\t\t\t},\n\t\t\t{\n\t\t\t\tdisplayName: 'Model',\n\t\t\t\tname: 'model',\n\t\t\t\ttype: 'resourceLocator',\n\t\t\t\tdefault: {\n\t\t\t\t\tmode: 'list',\n\t\t\t\t\tvalue: 'claude-sonnet-4-20250514',\n\t\t\t\t\tcachedResultName: 'Claude 4 Sonnet',\n\t\t\t\t},\n\t\t\t\trequired: true,\n\t\t\t\tmodes: [\n\t\t\t\t\t{\n\t\t\t\t\t\tdisplayName: 'From List',\n\t\t\t\t\t\tname: 'list',\n\t\t\t\t\t\ttype: 'list',\n\t\t\t\t\t\tplaceholder: 'Select a model...',\n\t\t\t\t\t\ttypeOptions: {\n\t\t\t\t\t\t\tsearchListMethod: 'searchModels',\n\t\t\t\t\t\t\tsearchable: true,\n\t\t\t\t\t\t},\n\t\t\t\t\t},\n\t\t\t\t\t{\n\t\t\t\t\t\tdisplayName: 'ID',\n\t\t\t\t\t\tname: 'id',\n\t\t\t\t\t\ttype: 'string',\n\t\t\t\t\t\tplaceholder: 'Claude Sonnet',\n\t\t\t\t\t},\n\t\t\t\t],\n\t\t\t\tdescription:\n\t\t\t\t\t'The model. Choose from the list, or specify an ID. <a href=\"https://docs.anthropic.com/claude/docs/models-overview\">Learn more</a>.',\n\t\t\t\tdisplayOptions: {\n\t\t\t\t\tshow: {\n\t\t\t\t\t\t'@version': [{ _cnd: { gte: 1.3 } }],\n\t\t\t\t\t},\n\t\t\t\t},\n\t\t\t},\n\t\t\t{\n\t\t\t\tdisplayName: 'Options',\n\t\t\t\tname: 'options',\n\t\t\t\tplaceholder: 'Add Option',\n\t\t\t\tdescription: 'Additional options to add',\n\t\t\t\ttype: 'collection',\n\t\t\t\tdefault: {},\n\t\t\t\toptions: [\n\t\t\t\t\t{\n\t\t\t\t\t\tdisplayName: 'Maximum Number of Tokens',\n\t\t\t\t\t\tname: 'maxTokensToSample',\n\t\t\t\t\t\tdefault: DEFAULT_MAX_TOKENS,\n\t\t\t\t\t\tdescription: 'The maximum number of tokens to generate in the completion',\n\t\t\t\t\t\ttype: 'number',\n\t\t\t\t\t},\n\t\t\t\t\t{\n\t\t\t\t\t\tdisplayName: 'Sampling Temperature',\n\t\t\t\t\t\tname: 'temperature',\n\t\t\t\t\t\tdefault: 0.7,\n\t\t\t\t\t\ttypeOptions: { maxValue: 1, minValue: 0, numberPrecision: 1 },\n\t\t\t\t\t\tdescription:\n\t\t\t\t\t\t\t'Controls randomness: Lowering results in less random completions. As the temperature approaches zero, the model will become deterministic and repetitive.',\n\t\t\t\t\t\ttype: 'number',\n\t\t\t\t\t\tdisplayOptions: {\n\t\t\t\t\t\t\thide: {\n\t\t\t\t\t\t\t\tthinking: [true],\n\t\t\t\t\t\t\t},\n\t\t\t\t\t\t},\n\t\t\t\t\t},\n\t\t\t\t\t{\n\t\t\t\t\t\tdisplayName: 'Top K',\n\t\t\t\t\t\tname: 'topK',\n\t\t\t\t\t\tdefault: -1,\n\t\t\t\t\t\ttypeOptions: { maxValue: 1, minValue: -1, numberPrecision: 1 },\n\t\t\t\t\t\tdescription:\n\t\t\t\t\t\t\t'Used to remove \"long tail\" low probability responses. Defaults to -1, which disables it.',\n\t\t\t\t\t\ttype: 'number',\n\t\t\t\t\t\tdisplayOptions: {\n\t\t\t\t\t\t\thide: {\n\t\t\t\t\t\t\t\tthinking: [true],\n\t\t\t\t\t\t\t},\n\t\t\t\t\t\t},\n\t\t\t\t\t},\n\t\t\t\t\t{\n\t\t\t\t\t\tdisplayName: 'Top P',\n\t\t\t\t\t\tname: 'topP',\n\t\t\t\t\t\tdefault: 1,\n\t\t\t\t\t\ttypeOptions: { maxValue: 1, minValue: 0, numberPrecision: 1 },\n\t\t\t\t\t\tdescription:\n\t\t\t\t\t\t\t'Controls diversity via nucleus sampling: 0.5 means half of all likelihood-weighted options are considered. We generally recommend altering this or temperature but not both.',\n\t\t\t\t\t\ttype: 'number',\n\t\t\t\t\t\tdisplayOptions: {\n\t\t\t\t\t\t\thide: {\n\t\t\t\t\t\t\t\tthinking: [true],\n\t\t\t\t\t\t\t},\n\t\t\t\t\t\t},\n\t\t\t\t\t},\n\t\t\t\t\t{\n\t\t\t\t\t\tdisplayName: 'Enable Thinking',\n\t\t\t\t\t\tname: 'thinking',\n\t\t\t\t\t\ttype: 'boolean',\n\t\t\t\t\t\tdefault: false,\n\t\t\t\t\t\tdescription: 'Whether to enable thinking mode for the model',\n\t\t\t\t\t},\n\t\t\t\t\t{\n\t\t\t\t\t\tdisplayName: 'Thinking Budget (Tokens)',\n\t\t\t\t\t\tname: 'thinkingBudget',\n\t\t\t\t\t\ttype: 'number',\n\t\t\t\t\t\tdefault: MIN_THINKING_BUDGET,\n\t\t\t\t\t\tdescription: 'The maximum number of tokens to use for thinking',\n\t\t\t\t\t\tdisplayOptions: {\n\t\t\t\t\t\t\tshow: {\n\t\t\t\t\t\t\t\tthinking: [true],\n\t\t\t\t\t\t\t},\n\t\t\t\t\t\t},\n\t\t\t\t\t},\n\t\t\t\t],\n\t\t\t},\n\t\t],\n\t};\n\n\tasync supplyData(this: ISupplyDataFunctions, itemIndex: number): Promise<SupplyData> {\n\t\tconst credentials = await this.getCredentials<{ url?: string; apiKey?: string }>(\n\t\t\t'anthropicApi',\n\t\t);\n\t\tconst baseURL = credentials.url ?? 'https://api.anthropic.com';\n\t\tconst version = this.getNode().typeVersion;\n\t\tconst modelName =\n\t\t\tversion >= 1.3\n\t\t\t\t? (this.getNodeParameter('model.value', itemIndex) as string)\n\t\t\t\t: (this.getNodeParameter('model', itemIndex) as string);\n\n\t\tconst options = this.getNodeParameter('options', itemIndex, {}) as {\n\t\t\tmaxTokensToSample?: number;\n\t\t\ttemperature: number;\n\t\t\ttopK?: number;\n\t\t\ttopP?: number;\n\t\t\tthinking?: boolean;\n\t\t\tthinkingBudget?: number;\n\t\t};\n\t\tlet invocationKwargs = {};\n\n\t\tconst tokensUsageParser = (llmOutput: LLMResult['llmOutput']) => {\n\t\t\tconst usage = (llmOutput?.usage as { input_tokens: number; output_tokens: number }) ?? {\n\t\t\t\tinput_tokens: 0,\n\t\t\t\toutput_tokens: 0,\n\t\t\t};\n\t\t\treturn {\n\t\t\t\tcompletionTokens: usage.output_tokens,\n\t\t\t\tpromptTokens: usage.input_tokens,\n\t\t\t\ttotalTokens: usage.input_tokens + usage.output_tokens,\n\t\t\t};\n\t\t};\n\n\t\tif (options.thinking) {\n\t\t\tinvocationKwargs = {\n\t\t\t\tthinking: {\n\t\t\t\t\ttype: 'enabled',\n\t\t\t\t\t// If thinking is enabled, we need to set a budget.\n\t\t\t\t\t// We fallback to 1024 as that is the minimum\n\t\t\t\t\tbudget_tokens: options.thinkingBudget ?? MIN_THINKING_BUDGET,\n\t\t\t\t},\n\t\t\t\t// The default Langchain max_tokens is -1 (no limit) but Anthropic requires a number\n\t\t\t\t// higher than budget_tokens\n\t\t\t\tmax_tokens: options.maxTokensToSample ?? DEFAULT_MAX_TOKENS,\n\t\t\t\t// These need to be unset when thinking is enabled.\n\t\t\t\t// Because the invocationKwargs will override the model options\n\t\t\t\t// we can pass options to the model and then override them here\n\t\t\t\ttop_k: undefined,\n\t\t\t\ttop_p: undefined,\n\t\t\t\ttemperature: undefined,\n\t\t\t};\n\t\t}\n\n\t\tconst model = new ChatAnthropic({\n\t\t\tanthropicApiKey: credentials.apiKey,\n\t\t\tmodel: modelName,\n\t\t\tanthropicApiUrl: baseURL,\n\t\t\tmaxTokens: options.maxTokensToSample,\n\t\t\ttemperature: options.temperature,\n\t\t\ttopK: options.topK,\n\t\t\ttopP: options.topP,\n\t\t\tcallbacks: [new N8nLlmTracing(this, { tokensUsageParser })],\n\t\t\tonFailedAttempt: makeN8nLlmFailedAttemptHandler(this),\n\t\t\tinvocationKwargs,\n\t\t\tclientOptions: {\n\t\t\t\tfetchOptions: {\n\t\t\t\t\tdispatcher: getProxyAgent(baseURL),\n\t\t\t\t},\n\t\t\t},\n\t\t});\n\n\t\treturn {\n\t\t\tresponse: model,\n\t\t};\n\t}\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,uBAA8B;AAE9B,0BAQO;AAEP,4BAA8B;AAC9B,0BAA6C;AAE7C,wCAA+C;AAC/C,2BAA8B;AAC9B,0BAA6B;AAE7B,MAAM,aAA8B;AAAA,EACnC,aAAa;AAAA,EACb,MAAM;AAAA,EACN,MAAM;AAAA;AAAA,EAEN,SAAS;AAAA,IACR;AAAA,MACC,MAAM;AAAA,MACN,OAAO;AAAA,IACR;AAAA,IACA;AAAA,MACC,MAAM;AAAA,MACN,OAAO;AAAA,IACR;AAAA,IACA;AAAA,MACC,MAAM;AAAA,MACN,OAAO;AAAA,IACR;AAAA,IACA;AAAA,MACC,MAAM;AAAA,MACN,OAAO;AAAA,IACR;AAAA,IACA;AAAA,MACC,MAAM;AAAA,MACN,OAAO;AAAA,IACR;AAAA,IACA;AAAA,MACC,MAAM;AAAA,MACN,OAAO;AAAA,IACR;AAAA,IACA;AAAA,MACC,MAAM;AAAA,MACN,OAAO;AAAA,IACR;AAAA,IACA;AAAA,MACC,MAAM;AAAA,MACN,OAAO;AAAA,IACR;AAAA,IACA;AAAA,MACC,MAAM;AAAA,MACN,OAAO;AAAA,IACR;AAAA,IACA;AAAA,MACC,MAAM;AAAA,MACN,OAAO;AAAA,IACR;AAAA,EACD;AAAA,EACA,aACC;AAAA,EACD,SAAS;AACV;AAEA,MAAM,sBAAsB;AAC5B,MAAM,qBAAqB;AACpB,MAAM,gBAAqC;AAAA,EAA3C;AACN,mBAAU;AAAA,MACT,YAAY;AAAA,QACX;AAAA,MACD;AAAA,IACD;AAEA,uBAAoC;AAAA,MACnC,aAAa;AAAA,MAEb,MAAM;AAAA,MACN,MAAM;AAAA,MACN,OAAO,CAAC,WAAW;AAAA,MACnB,SAAS,CAAC,GAAG,KAAK,KAAK,GAAG;AAAA,MAC1B,gBAAgB;AAAA,MAChB,aAAa;AAAA,MACb,UAAU;AAAA,QACT,MAAM;AAAA,MACP;AAAA,MACA,OAAO;AAAA,QACN,YAAY,CAAC,IAAI;AAAA,QACjB,eAAe;AAAA,UACd,IAAI,CAAC,mBAAmB,YAAY;AAAA,UACpC,mBAAmB,CAAC,2BAA2B;AAAA,QAChD;AAAA,QACA,WAAW;AAAA,UACV,sBAAsB;AAAA,YACrB;AAAA,cACC,KAAK;AAAA,YACN;AAAA,UACD;AAAA,QACD;AAAA,QACA,OAAO,CAAC,UAAU,UAAU,MAAM;AAAA,MACnC;AAAA,MAEA,QAAQ,CAAC;AAAA,MAET,SAAS,CAAC,wCAAoB,eAAe;AAAA,MAC7C,aAAa,CAAC,OAAO;AAAA,MACrB,aAAa;AAAA,QACZ;AAAA,UACC,MAAM;AAAA,UACN,UAAU;AAAA,QACX;AAAA,MACD;AAAA,MACA,YAAY;AAAA,YACX,kDAA6B,CAAC,wCAAoB,SAAS,wCAAoB,OAAO,CAAC;AAAA,QACvF;AAAA,UACC,GAAG;AAAA,UACH,gBAAgB;AAAA,YACf,MAAM;AAAA,cACL,YAAY,CAAC,CAAC;AAAA,YACf;AAAA,UACD;AAAA,QACD;AAAA,QACA;AAAA,UACC,GAAG;AAAA,UACH,SAAS;AAAA,UACT,gBAAgB;AAAA,YACf,MAAM;AAAA,cACL,YAAY,CAAC,GAAG;AAAA,YACjB;AAAA,UACD;AAAA,QACD;AAAA,QACA;AAAA,UACC,GAAG;AAAA,UACH,SAAS;AAAA,UACT,UAAU,WAAW,WAAW,CAAC,GAAG;AAAA,YACnC,CAAC,MAAiC,UAAU,KAAK,CAAC,EAAE,KAAK,SAAS,EAAE,WAAW,QAAQ;AAAA,UACxF;AAAA,UACA,gBAAgB;AAAA,YACf,MAAM;AAAA,cACL,YAAY,CAAC,EAAE,MAAM,EAAE,KAAK,IAAI,EAAE,CAAC;AAAA,YACpC;AAAA,UACD;AAAA,QACD;AAAA,QACA;AAAA,UACC,aAAa;AAAA,UACb,MAAM;AAAA,UACN,MAAM;AAAA,UACN,SAAS;AAAA,YACR,MAAM;AAAA,YACN,OAAO;AAAA,YACP,kBAAkB;AAAA,UACnB;AAAA,UACA,UAAU;AAAA,UACV,OAAO;AAAA,YACN;AAAA,cACC,aAAa;AAAA,cACb,MAAM;AAAA,cACN,MAAM;AAAA,cACN,aAAa;AAAA,cACb,aAAa;AAAA,gBACZ,kBAAkB;AAAA,gBAClB,YAAY;AAAA,cACb;AAAA,YACD;AAAA,YACA;AAAA,cACC,aAAa;AAAA,cACb,MAAM;AAAA,cACN,MAAM;AAAA,cACN,aAAa;AAAA,YACd;AAAA,UACD;AAAA,UACA,aACC;AAAA,UACD,gBAAgB;AAAA,YACf,MAAM;AAAA,cACL,YAAY,CAAC,EAAE,MAAM,EAAE,KAAK,IAAI,EAAE,CAAC;AAAA,YACpC;AAAA,UACD;AAAA,QACD;AAAA,QACA;AAAA,UACC,aAAa;AAAA,UACb,MAAM;AAAA,UACN,aAAa;AAAA,UACb,aAAa;AAAA,UACb,MAAM;AAAA,UACN,SAAS,CAAC;AAAA,UACV,SAAS;AAAA,YACR;AAAA,cACC,aAAa;AAAA,cACb,MAAM;AAAA,cACN,SAAS;AAAA,cACT,aAAa;AAAA,cACb,MAAM;AAAA,YACP;AAAA,YACA;AAAA,cACC,aAAa;AAAA,cACb,MAAM;AAAA,cACN,SAAS;AAAA,cACT,aAAa,EAAE,UAAU,GAAG,UAAU,GAAG,iBAAiB,EAAE;AAAA,cAC5D,aACC;AAAA,cACD,MAAM;AAAA,cACN,gBAAgB;AAAA,gBACf,MAAM;AAAA,kBACL,UAAU,CAAC,IAAI;AAAA,gBAChB;AAAA,cACD;AAAA,YACD;AAAA,YACA;AAAA,cACC,aAAa;AAAA,cACb,MAAM;AAAA,cACN,SAAS;AAAA,cACT,aAAa,EAAE,UAAU,GAAG,UAAU,IAAI,iBAAiB,EAAE;AAAA,cAC7D,aACC;AAAA,cACD,MAAM;AAAA,cACN,gBAAgB;AAAA,gBACf,MAAM;AAAA,kBACL,UAAU,CAAC,IAAI;AAAA,gBAChB;AAAA,cACD;AAAA,YACD;AAAA,YACA;AAAA,cACC,aAAa;AAAA,cACb,MAAM;AAAA,cACN,SAAS;AAAA,cACT,aAAa,EAAE,UAAU,GAAG,UAAU,GAAG,iBAAiB,EAAE;AAAA,cAC5D,aACC;AAAA,cACD,MAAM;AAAA,cACN,gBAAgB;AAAA,gBACf,MAAM;AAAA,kBACL,UAAU,CAAC,IAAI;AAAA,gBAChB;AAAA,cACD;AAAA,YACD;AAAA,YACA;AAAA,cACC,aAAa;AAAA,cACb,MAAM;AAAA,cACN,MAAM;AAAA,cACN,SAAS;AAAA,cACT,aAAa;AAAA,YACd;AAAA,YACA;AAAA,cACC,aAAa;AAAA,cACb,MAAM;AAAA,cACN,MAAM;AAAA,cACN,SAAS;AAAA,cACT,aAAa;AAAA,cACb,gBAAgB;AAAA,gBACf,MAAM;AAAA,kBACL,UAAU,CAAC,IAAI;AAAA,gBAChB;AAAA,cACD;AAAA,YACD;AAAA,UACD;AAAA,QACD;AAAA,MACD;AAAA,IACD;AAAA;AAAA,EAEA,MAAM,WAAuC,WAAwC;AACpF,UAAM,cAAc,MAAM,KAAK;AAAA,MAC9B;AAAA,IACD;AACA,UAAM,UAAU,YAAY,OAAO;AACnC,UAAM,UAAU,KAAK,QAAQ,EAAE;AAC/B,UAAM,YACL,WAAW,MACP,KAAK,iBAAiB,eAAe,SAAS,IAC9C,KAAK,iBAAiB,SAAS,SAAS;AAE7C,UAAM,UAAU,KAAK,iBAAiB,WAAW,WAAW,CAAC,CAAC;AAQ9D,QAAI,mBAAmB,CAAC;AAExB,UAAM,oBAAoB,CAAC,cAAsC;AAChE,YAAM,QAAS,WAAW,SAA6D;AAAA,QACtF,cAAc;AAAA,QACd,eAAe;AAAA,MAChB;AACA,aAAO;AAAA,QACN,kBAAkB,MAAM;AAAA,QACxB,cAAc,MAAM;AAAA,QACpB,aAAa,MAAM,eAAe,MAAM;AAAA,MACzC;AAAA,IACD;AAEA,QAAI,QAAQ,UAAU;AACrB,yBAAmB;AAAA,QAClB,UAAU;AAAA,UACT,MAAM;AAAA;AAAA;AAAA,UAGN,eAAe,QAAQ,kBAAkB;AAAA,QAC1C;AAAA;AAAA;AAAA,QAGA,YAAY,QAAQ,qBAAqB;AAAA;AAAA;AAAA;AAAA,QAIzC,OAAO;AAAA,QACP,OAAO;AAAA,QACP,aAAa;AAAA,MACd;AAAA,IACD;AAEA,UAAM,QAAQ,IAAI,+BAAc;AAAA,MAC/B,iBAAiB,YAAY;AAAA,MAC7B,OAAO;AAAA,MACP,iBAAiB;AAAA,MACjB,WAAW,QAAQ;AAAA,MACnB,aAAa,QAAQ;AAAA,MACrB,MAAM,QAAQ;AAAA,MACd,MAAM,QAAQ;AAAA,MACd,WAAW,CAAC,IAAI,mCAAc,MAAM,EAAE,kBAAkB,CAAC,CAAC;AAAA,MAC1D,qBAAiB,kEAA+B,IAAI;AAAA,MACpD;AAAA,MACA,eAAe;AAAA,QACd,cAAc;AAAA,UACb,gBAAY,qCAAc,OAAO;AAAA,QAClC;AAAA,MACD;AAAA,IACD,CAAC;AAED,WAAO;AAAA,MACN,UAAU;AAAA,IACX;AAAA,EACD;AACD;","names":[]}
|
|
1
|
+
{"version":3,"sources":["../../../../nodes/llms/LMChatAnthropic/LmChatAnthropic.node.ts"],"sourcesContent":["import { ChatAnthropic } from '@langchain/anthropic';\nimport type { LLMResult } from '@langchain/core/outputs';\nimport {\n\tNodeConnectionTypes,\n\ttype INodePropertyOptions,\n\ttype INodeProperties,\n\ttype ISupplyDataFunctions,\n\ttype INodeType,\n\ttype INodeTypeDescription,\n\ttype SupplyData,\n} from 'n8n-workflow';\n\nimport { getProxyAgent } from '@utils/httpProxyAgent';\nimport { getConnectionHintNoticeField } from '@utils/sharedFields';\n\nimport { makeN8nLlmFailedAttemptHandler } from '../n8nLlmFailedAttemptHandler';\nimport { N8nLlmTracing } from '../N8nLlmTracing';\nimport { searchModels } from './methods/searchModels';\n\nconst modelField: INodeProperties = {\n\tdisplayName: 'Model',\n\tname: 'model',\n\ttype: 'options',\n\t// eslint-disable-next-line n8n-nodes-base/node-param-options-type-unsorted-items\n\toptions: [\n\t\t{\n\t\t\tname: 'Claude 3.5 Sonnet(20241022)',\n\t\t\tvalue: 'claude-3-5-sonnet-20241022',\n\t\t},\n\t\t{\n\t\t\tname: 'Claude 3 Opus(20240229)',\n\t\t\tvalue: 'claude-3-opus-20240229',\n\t\t},\n\t\t{\n\t\t\tname: 'Claude 3.5 Sonnet(20240620)',\n\t\t\tvalue: 'claude-3-5-sonnet-20240620',\n\t\t},\n\t\t{\n\t\t\tname: 'Claude 3 Sonnet(20240229)',\n\t\t\tvalue: 'claude-3-sonnet-20240229',\n\t\t},\n\t\t{\n\t\t\tname: 'Claude 3.5 Haiku(20241022)',\n\t\t\tvalue: 'claude-3-5-haiku-20241022',\n\t\t},\n\t\t{\n\t\t\tname: 'Claude 3 Haiku(20240307)',\n\t\t\tvalue: 'claude-3-haiku-20240307',\n\t\t},\n\t\t{\n\t\t\tname: 'LEGACY: Claude 2',\n\t\t\tvalue: 'claude-2',\n\t\t},\n\t\t{\n\t\t\tname: 'LEGACY: Claude 2.1',\n\t\t\tvalue: 'claude-2.1',\n\t\t},\n\t\t{\n\t\t\tname: 'LEGACY: Claude Instant 1.2',\n\t\t\tvalue: 'claude-instant-1.2',\n\t\t},\n\t\t{\n\t\t\tname: 'LEGACY: Claude Instant 1',\n\t\t\tvalue: 'claude-instant-1',\n\t\t},\n\t],\n\tdescription:\n\t\t'The model which will generate the completion. <a href=\"https://docs.anthropic.com/claude/docs/models-overview\">Learn more</a>.',\n\tdefault: 'claude-2',\n};\n\nconst MIN_THINKING_BUDGET = 1024;\nconst DEFAULT_MAX_TOKENS = 4096;\nexport class LmChatAnthropic implements INodeType {\n\tmethods = {\n\t\tlistSearch: {\n\t\t\tsearchModels,\n\t\t},\n\t};\n\n\tdescription: INodeTypeDescription = {\n\t\tdisplayName: 'Anthropic Chat Model',\n\n\t\tname: 'lmChatAnthropic',\n\t\ticon: 'file:anthropic.svg',\n\t\tgroup: ['transform'],\n\t\tversion: [1, 1.1, 1.2, 1.3],\n\t\tdefaultVersion: 1.3,\n\t\tdescription: 'Language Model Anthropic',\n\t\tdefaults: {\n\t\t\tname: 'Anthropic Chat Model',\n\t\t},\n\t\tcodex: {\n\t\t\tcategories: ['AI'],\n\t\t\tsubcategories: {\n\t\t\t\tAI: ['Language Models', 'Root Nodes'],\n\t\t\t\t'Language Models': ['Chat Models (Recommended)'],\n\t\t\t},\n\t\t\tresources: {\n\t\t\t\tprimaryDocumentation: [\n\t\t\t\t\t{\n\t\t\t\t\t\turl: 'https://docs.n8n.io/integrations/builtin/cluster-nodes/sub-nodes/n8n-nodes-langchain.lmchatanthropic/',\n\t\t\t\t\t},\n\t\t\t\t],\n\t\t\t},\n\t\t\talias: ['claude', 'sonnet', 'opus'],\n\t\t},\n\n\t\tinputs: [],\n\n\t\toutputs: [NodeConnectionTypes.AiLanguageModel],\n\t\toutputNames: ['Model'],\n\t\tcredentials: [\n\t\t\t{\n\t\t\t\tname: 'anthropicApi',\n\t\t\t\trequired: true,\n\t\t\t},\n\t\t],\n\t\tproperties: [\n\t\t\tgetConnectionHintNoticeField([NodeConnectionTypes.AiChain, NodeConnectionTypes.AiChain]),\n\t\t\t{\n\t\t\t\t...modelField,\n\t\t\t\tdisplayOptions: {\n\t\t\t\t\tshow: {\n\t\t\t\t\t\t'@version': [1],\n\t\t\t\t\t},\n\t\t\t\t},\n\t\t\t},\n\t\t\t{\n\t\t\t\t...modelField,\n\t\t\t\tdefault: 'claude-3-sonnet-20240229',\n\t\t\t\tdisplayOptions: {\n\t\t\t\t\tshow: {\n\t\t\t\t\t\t'@version': [1.1],\n\t\t\t\t\t},\n\t\t\t\t},\n\t\t\t},\n\t\t\t{\n\t\t\t\t...modelField,\n\t\t\t\tdefault: 'claude-3-5-sonnet-20240620',\n\t\t\t\toptions: (modelField.options ?? []).filter(\n\t\t\t\t\t(o): o is INodePropertyOptions => 'name' in o && !o.name.toString().startsWith('LEGACY'),\n\t\t\t\t),\n\t\t\t\tdisplayOptions: {\n\t\t\t\t\tshow: {\n\t\t\t\t\t\t'@version': [{ _cnd: { lte: 1.2 } }],\n\t\t\t\t\t},\n\t\t\t\t},\n\t\t\t},\n\t\t\t{\n\t\t\t\tdisplayName: 'Model',\n\t\t\t\tname: 'model',\n\t\t\t\ttype: 'resourceLocator',\n\t\t\t\tdefault: {\n\t\t\t\t\tmode: 'list',\n\t\t\t\t\tvalue: 'claude-sonnet-4-20250514',\n\t\t\t\t\tcachedResultName: 'Claude 4 Sonnet',\n\t\t\t\t},\n\t\t\t\trequired: true,\n\t\t\t\tmodes: [\n\t\t\t\t\t{\n\t\t\t\t\t\tdisplayName: 'From List',\n\t\t\t\t\t\tname: 'list',\n\t\t\t\t\t\ttype: 'list',\n\t\t\t\t\t\tplaceholder: 'Select a model...',\n\t\t\t\t\t\ttypeOptions: {\n\t\t\t\t\t\t\tsearchListMethod: 'searchModels',\n\t\t\t\t\t\t\tsearchable: true,\n\t\t\t\t\t\t},\n\t\t\t\t\t},\n\t\t\t\t\t{\n\t\t\t\t\t\tdisplayName: 'ID',\n\t\t\t\t\t\tname: 'id',\n\t\t\t\t\t\ttype: 'string',\n\t\t\t\t\t\tplaceholder: 'Claude Sonnet',\n\t\t\t\t\t},\n\t\t\t\t],\n\t\t\t\tdescription:\n\t\t\t\t\t'The model. Choose from the list, or specify an ID. <a href=\"https://docs.anthropic.com/claude/docs/models-overview\">Learn more</a>.',\n\t\t\t\tdisplayOptions: {\n\t\t\t\t\tshow: {\n\t\t\t\t\t\t'@version': [{ _cnd: { gte: 1.3 } }],\n\t\t\t\t\t},\n\t\t\t\t},\n\t\t\t},\n\t\t\t{\n\t\t\t\tdisplayName: 'Options',\n\t\t\t\tname: 'options',\n\t\t\t\tplaceholder: 'Add Option',\n\t\t\t\tdescription: 'Additional options to add',\n\t\t\t\ttype: 'collection',\n\t\t\t\tdefault: {},\n\t\t\t\toptions: [\n\t\t\t\t\t{\n\t\t\t\t\t\tdisplayName: 'Maximum Number of Tokens',\n\t\t\t\t\t\tname: 'maxTokensToSample',\n\t\t\t\t\t\tdefault: DEFAULT_MAX_TOKENS,\n\t\t\t\t\t\tdescription: 'The maximum number of tokens to generate in the completion',\n\t\t\t\t\t\ttype: 'number',\n\t\t\t\t\t},\n\t\t\t\t\t{\n\t\t\t\t\t\tdisplayName: 'Sampling Temperature',\n\t\t\t\t\t\tname: 'temperature',\n\t\t\t\t\t\tdefault: 0.7,\n\t\t\t\t\t\ttypeOptions: { maxValue: 1, minValue: 0, numberPrecision: 1 },\n\t\t\t\t\t\tdescription:\n\t\t\t\t\t\t\t'Controls randomness: Lowering results in less random completions. As the temperature approaches zero, the model will become deterministic and repetitive.',\n\t\t\t\t\t\ttype: 'number',\n\t\t\t\t\t\tdisplayOptions: {\n\t\t\t\t\t\t\thide: {\n\t\t\t\t\t\t\t\tthinking: [true],\n\t\t\t\t\t\t\t},\n\t\t\t\t\t\t},\n\t\t\t\t\t},\n\t\t\t\t\t{\n\t\t\t\t\t\tdisplayName: 'Top K',\n\t\t\t\t\t\tname: 'topK',\n\t\t\t\t\t\tdefault: -1,\n\t\t\t\t\t\ttypeOptions: { maxValue: 1, minValue: -1, numberPrecision: 1 },\n\t\t\t\t\t\tdescription:\n\t\t\t\t\t\t\t'Used to remove \"long tail\" low probability responses. Defaults to -1, which disables it.',\n\t\t\t\t\t\ttype: 'number',\n\t\t\t\t\t\tdisplayOptions: {\n\t\t\t\t\t\t\thide: {\n\t\t\t\t\t\t\t\tthinking: [true],\n\t\t\t\t\t\t\t},\n\t\t\t\t\t\t},\n\t\t\t\t\t},\n\t\t\t\t\t{\n\t\t\t\t\t\tdisplayName: 'Top P',\n\t\t\t\t\t\tname: 'topP',\n\t\t\t\t\t\tdefault: 1,\n\t\t\t\t\t\ttypeOptions: { maxValue: 1, minValue: 0, numberPrecision: 1 },\n\t\t\t\t\t\tdescription:\n\t\t\t\t\t\t\t'Controls diversity via nucleus sampling: 0.5 means half of all likelihood-weighted options are considered. We generally recommend altering this or temperature but not both.',\n\t\t\t\t\t\ttype: 'number',\n\t\t\t\t\t\tdisplayOptions: {\n\t\t\t\t\t\t\thide: {\n\t\t\t\t\t\t\t\tthinking: [true],\n\t\t\t\t\t\t\t},\n\t\t\t\t\t\t},\n\t\t\t\t\t},\n\t\t\t\t\t{\n\t\t\t\t\t\tdisplayName: 'Enable Thinking',\n\t\t\t\t\t\tname: 'thinking',\n\t\t\t\t\t\ttype: 'boolean',\n\t\t\t\t\t\tdefault: false,\n\t\t\t\t\t\tdescription: 'Whether to enable thinking mode for the model',\n\t\t\t\t\t},\n\t\t\t\t\t{\n\t\t\t\t\t\tdisplayName: 'Thinking Budget (Tokens)',\n\t\t\t\t\t\tname: 'thinkingBudget',\n\t\t\t\t\t\ttype: 'number',\n\t\t\t\t\t\tdefault: MIN_THINKING_BUDGET,\n\t\t\t\t\t\tdescription: 'The maximum number of tokens to use for thinking',\n\t\t\t\t\t\tdisplayOptions: {\n\t\t\t\t\t\t\tshow: {\n\t\t\t\t\t\t\t\tthinking: [true],\n\t\t\t\t\t\t\t},\n\t\t\t\t\t\t},\n\t\t\t\t\t},\n\t\t\t\t],\n\t\t\t},\n\t\t],\n\t};\n\n\tasync supplyData(this: ISupplyDataFunctions, itemIndex: number): Promise<SupplyData> {\n\t\tconst credentials = await this.getCredentials<{ url?: string; apiKey?: string }>(\n\t\t\t'anthropicApi',\n\t\t);\n\t\tconst baseURL = credentials.url ?? 'https://api.anthropic.com';\n\t\tconst version = this.getNode().typeVersion;\n\t\tconst modelName =\n\t\t\tversion >= 1.3\n\t\t\t\t? (this.getNodeParameter('model.value', itemIndex) as string)\n\t\t\t\t: (this.getNodeParameter('model', itemIndex) as string);\n\n\t\tconst options = this.getNodeParameter('options', itemIndex, {}) as {\n\t\t\tmaxTokensToSample?: number;\n\t\t\ttemperature: number;\n\t\t\ttopK?: number;\n\t\t\ttopP?: number;\n\t\t\tthinking?: boolean;\n\t\t\tthinkingBudget?: number;\n\t\t};\n\t\tlet invocationKwargs = {};\n\n\t\tconst tokensUsageParser = (result: LLMResult) => {\n\t\t\tconst usage = (result?.llmOutput?.usage as {\n\t\t\t\tinput_tokens: number;\n\t\t\t\toutput_tokens: number;\n\t\t\t}) ?? {\n\t\t\t\tinput_tokens: 0,\n\t\t\t\toutput_tokens: 0,\n\t\t\t};\n\t\t\treturn {\n\t\t\t\tcompletionTokens: usage.output_tokens,\n\t\t\t\tpromptTokens: usage.input_tokens,\n\t\t\t\ttotalTokens: usage.input_tokens + usage.output_tokens,\n\t\t\t};\n\t\t};\n\n\t\tif (options.thinking) {\n\t\t\tinvocationKwargs = {\n\t\t\t\tthinking: {\n\t\t\t\t\ttype: 'enabled',\n\t\t\t\t\t// If thinking is enabled, we need to set a budget.\n\t\t\t\t\t// We fallback to 1024 as that is the minimum\n\t\t\t\t\tbudget_tokens: options.thinkingBudget ?? MIN_THINKING_BUDGET,\n\t\t\t\t},\n\t\t\t\t// The default Langchain max_tokens is -1 (no limit) but Anthropic requires a number\n\t\t\t\t// higher than budget_tokens\n\t\t\t\tmax_tokens: options.maxTokensToSample ?? DEFAULT_MAX_TOKENS,\n\t\t\t\t// These need to be unset when thinking is enabled.\n\t\t\t\t// Because the invocationKwargs will override the model options\n\t\t\t\t// we can pass options to the model and then override them here\n\t\t\t\ttop_k: undefined,\n\t\t\t\ttop_p: undefined,\n\t\t\t\ttemperature: undefined,\n\t\t\t};\n\t\t}\n\n\t\tconst model = new ChatAnthropic({\n\t\t\tanthropicApiKey: credentials.apiKey,\n\t\t\tmodel: modelName,\n\t\t\tanthropicApiUrl: baseURL,\n\t\t\tmaxTokens: options.maxTokensToSample,\n\t\t\ttemperature: options.temperature,\n\t\t\ttopK: options.topK,\n\t\t\ttopP: options.topP,\n\t\t\tcallbacks: [new N8nLlmTracing(this, { tokensUsageParser })],\n\t\t\tonFailedAttempt: makeN8nLlmFailedAttemptHandler(this),\n\t\t\tinvocationKwargs,\n\t\t\tclientOptions: {\n\t\t\t\tfetchOptions: {\n\t\t\t\t\tdispatcher: getProxyAgent(baseURL),\n\t\t\t\t},\n\t\t\t},\n\t\t});\n\n\t\treturn {\n\t\t\tresponse: model,\n\t\t};\n\t}\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,uBAA8B;AAE9B,0BAQO;AAEP,4BAA8B;AAC9B,0BAA6C;AAE7C,wCAA+C;AAC/C,2BAA8B;AAC9B,0BAA6B;AAE7B,MAAM,aAA8B;AAAA,EACnC,aAAa;AAAA,EACb,MAAM;AAAA,EACN,MAAM;AAAA;AAAA,EAEN,SAAS;AAAA,IACR;AAAA,MACC,MAAM;AAAA,MACN,OAAO;AAAA,IACR;AAAA,IACA;AAAA,MACC,MAAM;AAAA,MACN,OAAO;AAAA,IACR;AAAA,IACA;AAAA,MACC,MAAM;AAAA,MACN,OAAO;AAAA,IACR;AAAA,IACA;AAAA,MACC,MAAM;AAAA,MACN,OAAO;AAAA,IACR;AAAA,IACA;AAAA,MACC,MAAM;AAAA,MACN,OAAO;AAAA,IACR;AAAA,IACA;AAAA,MACC,MAAM;AAAA,MACN,OAAO;AAAA,IACR;AAAA,IACA;AAAA,MACC,MAAM;AAAA,MACN,OAAO;AAAA,IACR;AAAA,IACA;AAAA,MACC,MAAM;AAAA,MACN,OAAO;AAAA,IACR;AAAA,IACA;AAAA,MACC,MAAM;AAAA,MACN,OAAO;AAAA,IACR;AAAA,IACA;AAAA,MACC,MAAM;AAAA,MACN,OAAO;AAAA,IACR;AAAA,EACD;AAAA,EACA,aACC;AAAA,EACD,SAAS;AACV;AAEA,MAAM,sBAAsB;AAC5B,MAAM,qBAAqB;AACpB,MAAM,gBAAqC;AAAA,EAA3C;AACN,mBAAU;AAAA,MACT,YAAY;AAAA,QACX;AAAA,MACD;AAAA,IACD;AAEA,uBAAoC;AAAA,MACnC,aAAa;AAAA,MAEb,MAAM;AAAA,MACN,MAAM;AAAA,MACN,OAAO,CAAC,WAAW;AAAA,MACnB,SAAS,CAAC,GAAG,KAAK,KAAK,GAAG;AAAA,MAC1B,gBAAgB;AAAA,MAChB,aAAa;AAAA,MACb,UAAU;AAAA,QACT,MAAM;AAAA,MACP;AAAA,MACA,OAAO;AAAA,QACN,YAAY,CAAC,IAAI;AAAA,QACjB,eAAe;AAAA,UACd,IAAI,CAAC,mBAAmB,YAAY;AAAA,UACpC,mBAAmB,CAAC,2BAA2B;AAAA,QAChD;AAAA,QACA,WAAW;AAAA,UACV,sBAAsB;AAAA,YACrB;AAAA,cACC,KAAK;AAAA,YACN;AAAA,UACD;AAAA,QACD;AAAA,QACA,OAAO,CAAC,UAAU,UAAU,MAAM;AAAA,MACnC;AAAA,MAEA,QAAQ,CAAC;AAAA,MAET,SAAS,CAAC,wCAAoB,eAAe;AAAA,MAC7C,aAAa,CAAC,OAAO;AAAA,MACrB,aAAa;AAAA,QACZ;AAAA,UACC,MAAM;AAAA,UACN,UAAU;AAAA,QACX;AAAA,MACD;AAAA,MACA,YAAY;AAAA,YACX,kDAA6B,CAAC,wCAAoB,SAAS,wCAAoB,OAAO,CAAC;AAAA,QACvF;AAAA,UACC,GAAG;AAAA,UACH,gBAAgB;AAAA,YACf,MAAM;AAAA,cACL,YAAY,CAAC,CAAC;AAAA,YACf;AAAA,UACD;AAAA,QACD;AAAA,QACA;AAAA,UACC,GAAG;AAAA,UACH,SAAS;AAAA,UACT,gBAAgB;AAAA,YACf,MAAM;AAAA,cACL,YAAY,CAAC,GAAG;AAAA,YACjB;AAAA,UACD;AAAA,QACD;AAAA,QACA;AAAA,UACC,GAAG;AAAA,UACH,SAAS;AAAA,UACT,UAAU,WAAW,WAAW,CAAC,GAAG;AAAA,YACnC,CAAC,MAAiC,UAAU,KAAK,CAAC,EAAE,KAAK,SAAS,EAAE,WAAW,QAAQ;AAAA,UACxF;AAAA,UACA,gBAAgB;AAAA,YACf,MAAM;AAAA,cACL,YAAY,CAAC,EAAE,MAAM,EAAE,KAAK,IAAI,EAAE,CAAC;AAAA,YACpC;AAAA,UACD;AAAA,QACD;AAAA,QACA;AAAA,UACC,aAAa;AAAA,UACb,MAAM;AAAA,UACN,MAAM;AAAA,UACN,SAAS;AAAA,YACR,MAAM;AAAA,YACN,OAAO;AAAA,YACP,kBAAkB;AAAA,UACnB;AAAA,UACA,UAAU;AAAA,UACV,OAAO;AAAA,YACN;AAAA,cACC,aAAa;AAAA,cACb,MAAM;AAAA,cACN,MAAM;AAAA,cACN,aAAa;AAAA,cACb,aAAa;AAAA,gBACZ,kBAAkB;AAAA,gBAClB,YAAY;AAAA,cACb;AAAA,YACD;AAAA,YACA;AAAA,cACC,aAAa;AAAA,cACb,MAAM;AAAA,cACN,MAAM;AAAA,cACN,aAAa;AAAA,YACd;AAAA,UACD;AAAA,UACA,aACC;AAAA,UACD,gBAAgB;AAAA,YACf,MAAM;AAAA,cACL,YAAY,CAAC,EAAE,MAAM,EAAE,KAAK,IAAI,EAAE,CAAC;AAAA,YACpC;AAAA,UACD;AAAA,QACD;AAAA,QACA;AAAA,UACC,aAAa;AAAA,UACb,MAAM;AAAA,UACN,aAAa;AAAA,UACb,aAAa;AAAA,UACb,MAAM;AAAA,UACN,SAAS,CAAC;AAAA,UACV,SAAS;AAAA,YACR;AAAA,cACC,aAAa;AAAA,cACb,MAAM;AAAA,cACN,SAAS;AAAA,cACT,aAAa;AAAA,cACb,MAAM;AAAA,YACP;AAAA,YACA;AAAA,cACC,aAAa;AAAA,cACb,MAAM;AAAA,cACN,SAAS;AAAA,cACT,aAAa,EAAE,UAAU,GAAG,UAAU,GAAG,iBAAiB,EAAE;AAAA,cAC5D,aACC;AAAA,cACD,MAAM;AAAA,cACN,gBAAgB;AAAA,gBACf,MAAM;AAAA,kBACL,UAAU,CAAC,IAAI;AAAA,gBAChB;AAAA,cACD;AAAA,YACD;AAAA,YACA;AAAA,cACC,aAAa;AAAA,cACb,MAAM;AAAA,cACN,SAAS;AAAA,cACT,aAAa,EAAE,UAAU,GAAG,UAAU,IAAI,iBAAiB,EAAE;AAAA,cAC7D,aACC;AAAA,cACD,MAAM;AAAA,cACN,gBAAgB;AAAA,gBACf,MAAM;AAAA,kBACL,UAAU,CAAC,IAAI;AAAA,gBAChB;AAAA,cACD;AAAA,YACD;AAAA,YACA;AAAA,cACC,aAAa;AAAA,cACb,MAAM;AAAA,cACN,SAAS;AAAA,cACT,aAAa,EAAE,UAAU,GAAG,UAAU,GAAG,iBAAiB,EAAE;AAAA,cAC5D,aACC;AAAA,cACD,MAAM;AAAA,cACN,gBAAgB;AAAA,gBACf,MAAM;AAAA,kBACL,UAAU,CAAC,IAAI;AAAA,gBAChB;AAAA,cACD;AAAA,YACD;AAAA,YACA;AAAA,cACC,aAAa;AAAA,cACb,MAAM;AAAA,cACN,MAAM;AAAA,cACN,SAAS;AAAA,cACT,aAAa;AAAA,YACd;AAAA,YACA;AAAA,cACC,aAAa;AAAA,cACb,MAAM;AAAA,cACN,MAAM;AAAA,cACN,SAAS;AAAA,cACT,aAAa;AAAA,cACb,gBAAgB;AAAA,gBACf,MAAM;AAAA,kBACL,UAAU,CAAC,IAAI;AAAA,gBAChB;AAAA,cACD;AAAA,YACD;AAAA,UACD;AAAA,QACD;AAAA,MACD;AAAA,IACD;AAAA;AAAA,EAEA,MAAM,WAAuC,WAAwC;AACpF,UAAM,cAAc,MAAM,KAAK;AAAA,MAC9B;AAAA,IACD;AACA,UAAM,UAAU,YAAY,OAAO;AACnC,UAAM,UAAU,KAAK,QAAQ,EAAE;AAC/B,UAAM,YACL,WAAW,MACP,KAAK,iBAAiB,eAAe,SAAS,IAC9C,KAAK,iBAAiB,SAAS,SAAS;AAE7C,UAAM,UAAU,KAAK,iBAAiB,WAAW,WAAW,CAAC,CAAC;AAQ9D,QAAI,mBAAmB,CAAC;AAExB,UAAM,oBAAoB,CAAC,WAAsB;AAChD,YAAM,QAAS,QAAQ,WAAW,SAG5B;AAAA,QACL,cAAc;AAAA,QACd,eAAe;AAAA,MAChB;AACA,aAAO;AAAA,QACN,kBAAkB,MAAM;AAAA,QACxB,cAAc,MAAM;AAAA,QACpB,aAAa,MAAM,eAAe,MAAM;AAAA,MACzC;AAAA,IACD;AAEA,QAAI,QAAQ,UAAU;AACrB,yBAAmB;AAAA,QAClB,UAAU;AAAA,UACT,MAAM;AAAA;AAAA;AAAA,UAGN,eAAe,QAAQ,kBAAkB;AAAA,QAC1C;AAAA;AAAA;AAAA,QAGA,YAAY,QAAQ,qBAAqB;AAAA;AAAA;AAAA;AAAA,QAIzC,OAAO;AAAA,QACP,OAAO;AAAA,QACP,aAAa;AAAA,MACd;AAAA,IACD;AAEA,UAAM,QAAQ,IAAI,+BAAc;AAAA,MAC/B,iBAAiB,YAAY;AAAA,MAC7B,OAAO;AAAA,MACP,iBAAiB;AAAA,MACjB,WAAW,QAAQ;AAAA,MACnB,aAAa,QAAQ;AAAA,MACrB,MAAM,QAAQ;AAAA,MACd,MAAM,QAAQ;AAAA,MACd,WAAW,CAAC,IAAI,mCAAc,MAAM,EAAE,kBAAkB,CAAC,CAAC;AAAA,MAC1D,qBAAiB,kEAA+B,IAAI;AAAA,MACpD;AAAA,MACA,eAAe;AAAA,QACd,cAAc;AAAA,UACb,gBAAY,qCAAc,OAAO;AAAA,QAClC;AAAA,MACD;AAAA,IACD,CAAC;AAED,WAAO;AAAA,MACN,UAAU;AAAA,IACX;AAAA,EACD;AACD;","names":[]}
|