@atom8n/n8n-nodes-langchain 2.5.5 → 2.5.7

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,443 @@
1
+ "use strict";
2
+ var __defProp = Object.defineProperty;
3
+ var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
4
+ var __getOwnPropNames = Object.getOwnPropertyNames;
5
+ var __hasOwnProp = Object.prototype.hasOwnProperty;
6
+ var __export = (target, all) => {
7
+ for (var name in all)
8
+ __defProp(target, name, { get: all[name], enumerable: true });
9
+ };
10
+ var __copyProps = (to, from, except, desc) => {
11
+ if (from && typeof from === "object" || typeof from === "function") {
12
+ for (let key of __getOwnPropNames(from))
13
+ if (!__hasOwnProp.call(to, key) && key !== except)
14
+ __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
15
+ }
16
+ return to;
17
+ };
18
+ var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
19
+ var LmChatOpenCodeCli_node_exports = {};
20
+ __export(LmChatOpenCodeCli_node_exports, {
21
+ LmChatOpenCodeCli: () => LmChatOpenCodeCli
22
+ });
23
+ module.exports = __toCommonJS(LmChatOpenCodeCli_node_exports);
24
+ var import_chat_models = require("@langchain/core/language_models/chat_models");
25
+ var import_messages = require("@langchain/core/messages");
26
+ var import_n8n_workflow = require("n8n-workflow");
27
+ var import_sharedFields = require("../../../utils/sharedFields");
28
+ var import_N8nLlmTracing = require("../N8nLlmTracing");
29
+ var import_child_process = require("child_process");
30
+ var import_fs = require("fs");
31
+ const TOOL_CALL_SYSTEM_PROMPT = `You have access to the following tools. When you need to call a tool, respond ONLY with a JSON block in this exact format (no other text before or after):
32
+
33
+ \`\`\`tool_calls
34
+ [{"id": "call_1", "name": "tool_name", "args": {"param": "value"}}]
35
+ \`\`\`
36
+
37
+ When you do NOT need to call a tool, respond normally with text. Never mix tool calls and text in the same response.
38
+
39
+ Available tools:
40
+ `;
41
+ class ChatOpenCodeCLI extends import_chat_models.BaseChatModel {
42
+ constructor(fields) {
43
+ super({});
44
+ this.boundTools = [];
45
+ this.model = fields.model;
46
+ this.binaryPath = fields.binaryPath;
47
+ this.workingDirectory = fields.workingDirectory;
48
+ }
49
+ _llmType() {
50
+ return "opencode-cli";
51
+ }
52
+ bindTools(tools, kwargs) {
53
+ console.log("[LmChatOpenCodeCli] bindTools called, tool count:", tools.length);
54
+ const clone = new ChatOpenCodeCLI({
55
+ model: this.model,
56
+ binaryPath: this.binaryPath,
57
+ workingDirectory: this.workingDirectory
58
+ });
59
+ clone.boundTools = tools;
60
+ clone.callbacks = this.callbacks;
61
+ if (kwargs) {
62
+ return clone.bind(kwargs);
63
+ }
64
+ return clone;
65
+ }
66
+ async _generate(messages, _options, _runManager) {
67
+ console.log("[LmChatOpenCodeCli] _generate called", {
68
+ messageCount: messages.length,
69
+ boundToolCount: this.boundTools.length,
70
+ model: this.model
71
+ });
72
+ const processedMessages = [...messages];
73
+ if (this.boundTools.length > 0) {
74
+ const toolDescriptions = this.boundTools.map((tool) => {
75
+ const t = tool;
76
+ const name = t.name ?? "";
77
+ const description = t.description ?? "";
78
+ const schema = t.parameters ?? t.schema ?? {};
79
+ return `- ${name}: ${description}
80
+ Parameters: ${JSON.stringify(schema)}`;
81
+ }).join("\n\n");
82
+ const systemPrompt = TOOL_CALL_SYSTEM_PROMPT + toolDescriptions;
83
+ processedMessages.unshift(new import_messages.SystemMessage(systemPrompt));
84
+ console.log(
85
+ "[LmChatOpenCodeCli] injected tool system prompt, tool count:",
86
+ this.boundTools.length
87
+ );
88
+ }
89
+ const prompt = processedMessages.map((m) => {
90
+ const content = typeof m.content === "string" ? m.content : JSON.stringify(m.content);
91
+ if (m instanceof import_messages.SystemMessage) return `[system]: ${content}`;
92
+ if (m instanceof import_messages.HumanMessage) return `[user]: ${content}`;
93
+ if (m instanceof import_messages.AIMessage) return `[assistant]: ${content}`;
94
+ return `[${m._getType()}]: ${content}`;
95
+ }).join("\n\n");
96
+ console.log("[LmChatOpenCodeCli] prompt built, length:", prompt.length);
97
+ const rawResponse = await this.executeOpenCodeCli(prompt);
98
+ console.log("[LmChatOpenCodeCli] raw response received, length:", rawResponse.length);
99
+ if (this.boundTools.length > 0) {
100
+ const toolCalls = this.extractToolCalls(rawResponse);
101
+ if (toolCalls.length > 0) {
102
+ console.log("[LmChatOpenCodeCli] extracted tool calls:", toolCalls.length);
103
+ const aiMessage2 = new import_messages.AIMessage({
104
+ content: "",
105
+ tool_calls: toolCalls.map((tc) => ({
106
+ id: tc.id,
107
+ name: tc.name,
108
+ args: tc.args,
109
+ type: "tool_call"
110
+ }))
111
+ });
112
+ return {
113
+ generations: [{ message: aiMessage2, text: "" }]
114
+ };
115
+ }
116
+ }
117
+ console.log("[LmChatOpenCodeCli] returning text response");
118
+ const aiMessage = new import_messages.AIMessage({ content: rawResponse });
119
+ return {
120
+ generations: [{ message: aiMessage, text: rawResponse }]
121
+ };
122
+ }
123
+ extractToolCalls(text) {
124
+ const toolCallRegex = /```tool_calls\s*\n([\s\S]*?)\n```/;
125
+ const match = toolCallRegex.exec(text);
126
+ if (!match) return [];
127
+ try {
128
+ const parsed = JSON.parse(match[1]);
129
+ if (!Array.isArray(parsed)) return [];
130
+ console.log("[LmChatOpenCodeCli] parsed tool calls from response:", parsed.length);
131
+ return parsed.map((tc, i) => ({
132
+ id: tc.id ?? `call_${i}`,
133
+ name: tc.name,
134
+ args: tc.args ?? {}
135
+ }));
136
+ } catch {
137
+ console.log("[LmChatOpenCodeCli] failed to parse tool calls JSON block");
138
+ return [];
139
+ }
140
+ }
141
+ async executeOpenCodeCli(prompt) {
142
+ const args = ["run", "--format", "json"];
143
+ if (this.model && this.model !== "auto") {
144
+ args.push("--model", this.model);
145
+ }
146
+ args.push(prompt);
147
+ const cwd = this.workingDirectory?.trim() || void 0;
148
+ console.log("[LmChatOpenCodeCli] spawning opencode run", {
149
+ binaryPath: this.binaryPath,
150
+ args: args.map((a, i) => i === args.length - 1 ? `<prompt len=${a.length}>` : a),
151
+ model: this.model,
152
+ cwd
153
+ });
154
+ return await new Promise((resolve, reject) => {
155
+ const child = (0, import_child_process.spawn)(this.binaryPath, args, {
156
+ cwd,
157
+ stdio: ["pipe", "pipe", "pipe"],
158
+ env: { ...process.env }
159
+ });
160
+ let stdout = "";
161
+ let stderr = "";
162
+ child.stdout.on("data", (data) => {
163
+ stdout += data.toString();
164
+ });
165
+ child.stderr.on("data", (data) => {
166
+ stderr += data.toString();
167
+ });
168
+ child.on("error", (err) => {
169
+ console.error("[LmChatOpenCodeCli] spawn error:", err.message);
170
+ reject(
171
+ new Error(
172
+ `Failed to spawn opencode: ${err.message}. Make sure OpenCode CLI is installed (brew install opencode-ai/tap/opencode or curl -fsSL https://opencode.ai/install | bash) and accessible. Working directory: ${cwd ?? "<default>"}`
173
+ )
174
+ );
175
+ });
176
+ child.on("close", (code) => {
177
+ console.log("[LmChatOpenCodeCli] opencode run exited", {
178
+ code,
179
+ stdoutLength: stdout.length,
180
+ stderrLength: stderr.length
181
+ });
182
+ const parseResult = this.parseJsonEventOutput(stdout);
183
+ if (parseResult.assistantText) {
184
+ console.log(
185
+ "[LmChatOpenCodeCli] parsed assistant content, length:",
186
+ parseResult.assistantText.length
187
+ );
188
+ resolve(parseResult.assistantText);
189
+ return;
190
+ }
191
+ if (parseResult.errorMessage) {
192
+ console.error("[LmChatOpenCodeCli] opencode returned error:", parseResult.errorMessage);
193
+ reject(new Error(`OpenCode CLI error: ${parseResult.errorMessage}`));
194
+ return;
195
+ }
196
+ if (code !== 0) {
197
+ const stderrMsg = stderr.trim();
198
+ const errorMsg = stderrMsg || `opencode run exited with code ${code}`;
199
+ console.error("[LmChatOpenCodeCli] opencode run failed with code", code, ":", errorMsg);
200
+ reject(new Error(errorMsg));
201
+ return;
202
+ }
203
+ console.error(
204
+ "[LmChatOpenCodeCli] no assistant response parsed from output, stdout preview:",
205
+ stdout.substring(0, 500)
206
+ );
207
+ reject(new Error("No assistant response received from opencode run"));
208
+ });
209
+ if (child.stdin) {
210
+ child.stdin.end();
211
+ }
212
+ });
213
+ }
214
+ /**
215
+ * Parse JSON event output from `opencode run --format json`.
216
+ *
217
+ * Actual event types from opencode run --format json (verified empirically):
218
+ * - {"type":"step_start","part":{"type":"step-start",...}}
219
+ * - {"type":"text","part":{"type":"text","text":"...the response...",...}}
220
+ * - {"type":"step_finish","part":{"type":"step-finish","reason":"stop","cost":...,"tokens":{...}}}
221
+ *
222
+ * Returns both assistant text and any error messages found.
223
+ */
224
+ parseJsonEventOutput(output) {
225
+ const lines = output.split("\n").filter((line) => line.trim());
226
+ const assistantParts = [];
227
+ const errorParts = [];
228
+ console.log("[LmChatOpenCodeCli] parsing JSON event output, line count:", lines.length);
229
+ for (const line of lines) {
230
+ try {
231
+ const parsed = JSON.parse(line);
232
+ const eventType = parsed.type;
233
+ console.log(
234
+ "[LmChatOpenCodeCli] JSON event:",
235
+ eventType,
236
+ "| keys:",
237
+ Object.keys(parsed).join(",")
238
+ );
239
+ if (eventType === "text") {
240
+ const part = parsed.part;
241
+ if (part?.type === "text" && typeof part.text === "string") {
242
+ console.log(
243
+ "[LmChatOpenCodeCli] found text event, text length:",
244
+ part.text.length
245
+ );
246
+ assistantParts.push(part.text);
247
+ }
248
+ }
249
+ if (eventType === "message") {
250
+ const part = parsed.part;
251
+ if (part && typeof part.text === "string") {
252
+ assistantParts.push(part.text);
253
+ }
254
+ if (parsed.role === "assistant" && typeof parsed.content === "string") {
255
+ assistantParts.push(parsed.content);
256
+ }
257
+ }
258
+ if (eventType === "assistant") {
259
+ const message = parsed.message;
260
+ if (message?.content) {
261
+ if (Array.isArray(message.content)) {
262
+ for (const c of message.content) {
263
+ if (c.type === "text" && typeof c.text === "string") {
264
+ assistantParts.push(c.text);
265
+ }
266
+ }
267
+ } else if (typeof message.content === "string") {
268
+ assistantParts.push(message.content);
269
+ }
270
+ }
271
+ }
272
+ if (eventType === "error") {
273
+ const errMsg = typeof parsed.message === "string" ? parsed.message : typeof parsed.error === "string" ? parsed.error : void 0;
274
+ if (errMsg) {
275
+ console.error("[LmChatOpenCodeCli] error event received:", errMsg);
276
+ errorParts.push(errMsg);
277
+ }
278
+ }
279
+ if (eventType === "step_finish") {
280
+ const part = parsed.part;
281
+ if (part) {
282
+ console.log(
283
+ "[LmChatOpenCodeCli] step_finish event, reason:",
284
+ part.reason,
285
+ "cost:",
286
+ part.cost
287
+ );
288
+ if (part.reason === "error" && typeof part.error === "string") {
289
+ errorParts.push(part.error);
290
+ }
291
+ }
292
+ }
293
+ } catch {
294
+ }
295
+ }
296
+ return {
297
+ assistantText: assistantParts.join(""),
298
+ errorMessage: errorParts.join("; ")
299
+ };
300
+ }
301
+ }
302
+ class LmChatOpenCodeCli {
303
+ constructor() {
304
+ this.description = {
305
+ displayName: "OpenCode CLI Chat Model",
306
+ name: "lmChatOpenCodeCli",
307
+ icon: "file:openCodeCli.svg",
308
+ group: ["transform"],
309
+ version: [1],
310
+ description: "Chat model powered by the OpenCode CLI. Requires opencode to be installed locally (brew install opencode-ai/tap/opencode or curl -fsSL https://opencode.ai/install | bash).",
311
+ defaults: {
312
+ name: "OpenCode CLI Chat Model"
313
+ },
314
+ codex: {
315
+ categories: ["AI"],
316
+ subcategories: {
317
+ AI: ["Language Models", "Root Nodes"],
318
+ "Language Models": ["Chat Models (Recommended)"]
319
+ },
320
+ resources: {}
321
+ },
322
+ inputs: [],
323
+ outputs: [import_n8n_workflow.NodeConnectionTypes.AiLanguageModel],
324
+ outputNames: ["Model"],
325
+ properties: [
326
+ (0, import_sharedFields.getConnectionHintNoticeField)([import_n8n_workflow.NodeConnectionTypes.AiChain, import_n8n_workflow.NodeConnectionTypes.AiAgent]),
327
+ {
328
+ displayName: "Model",
329
+ name: "model",
330
+ type: "options",
331
+ description: 'The model to use via opencode CLI. Format: provider/model (e.g. anthropic/claude-sonnet-4-20250514). Select "Auto" to use the default model configured in opencode.',
332
+ // eslint-disable-next-line n8n-nodes-base/node-param-options-type-unsorted-items
333
+ options: [
334
+ { name: "Auto (Default)", value: "auto" },
335
+ // OpenCode built-in models
336
+ { name: "OpenCode Big Pickle", value: "opencode/big-pickle" },
337
+ { name: "OpenCode GPT-5 Nano", value: "opencode/gpt-5-nano" },
338
+ { name: "OpenCode Hy3 Preview Free", value: "opencode/hy3-preview-free" },
339
+ // Anthropic models
340
+ { name: "Claude Sonnet 4 (Anthropic)", value: "anthropic/claude-sonnet-4-20250514" },
341
+ { name: "Claude Opus 4 (Anthropic)", value: "anthropic/claude-opus-4-20250918" },
342
+ {
343
+ name: "Claude 3.5 Sonnet (Anthropic)",
344
+ value: "anthropic/claude-3-5-sonnet-20241022"
345
+ },
346
+ // OpenAI models
347
+ { name: "GPT-4o (OpenAI)", value: "openai/gpt-4o" },
348
+ { name: "GPT-4o Mini (OpenAI)", value: "openai/gpt-4o-mini" },
349
+ { name: "o3 (OpenAI)", value: "openai/o3" },
350
+ { name: "o3 Mini (OpenAI)", value: "openai/o3-mini" },
351
+ // Google models
352
+ { name: "Gemini 2.5 Pro (Google)", value: "google/gemini-2.5-pro" },
353
+ { name: "Gemini 2.5 Flash (Google)", value: "google/gemini-2.5-flash" },
354
+ { name: "Gemini 2.0 Flash (Google)", value: "google/gemini-2.0-flash" },
355
+ // xAI models
356
+ { name: "Grok 3 (xAI)", value: "xai/grok-3" },
357
+ { name: "Grok 3 Mini (xAI)", value: "xai/grok-3-mini" },
358
+ // DeepSeek models
359
+ { name: "DeepSeek Chat (DeepSeek)", value: "deepseek/deepseek-chat" },
360
+ { name: "DeepSeek Reasoner (DeepSeek)", value: "deepseek/deepseek-reasoner" }
361
+ ],
362
+ default: "auto"
363
+ },
364
+ {
365
+ displayName: "Options",
366
+ name: "options",
367
+ placeholder: "Add Option",
368
+ description: "Additional options to configure",
369
+ type: "collection",
370
+ default: {},
371
+ options: [
372
+ {
373
+ displayName: "Binary Path",
374
+ name: "binaryPath",
375
+ default: "opencode",
376
+ description: 'Path to the opencode binary. Defaults to "opencode" (must be in PATH).',
377
+ type: "string"
378
+ },
379
+ {
380
+ displayName: "Working Directory",
381
+ name: "workingDirectory",
382
+ default: "",
383
+ description: "Working directory for the opencode process. Leave empty to use the default.",
384
+ type: "string"
385
+ }
386
+ ]
387
+ }
388
+ ]
389
+ };
390
+ }
391
+ async supplyData(itemIndex) {
392
+ const modelName = this.getNodeParameter("model", itemIndex);
393
+ const binaryPath = this.getNodeParameter("options.binaryPath", itemIndex, "opencode");
394
+ const rawWorkingDirectory = this.getNodeParameter("options.workingDirectory", itemIndex, "", {
395
+ rawExpressions: true
396
+ });
397
+ const workingDirectory = this.getNodeParameter("options.workingDirectory", itemIndex, "");
398
+ const normalizedWorkingDirectory = (workingDirectory ?? "").trim();
399
+ const rawWorkingDirectoryValue = rawWorkingDirectory ?? "";
400
+ const isWorkingDirectoryExpression = rawWorkingDirectoryValue.startsWith("=") || rawWorkingDirectoryValue.includes("{{") || rawWorkingDirectoryValue.includes("$workspace");
401
+ console.log("[LmChatOpenCodeCli] resolved OpenCode CLI options", {
402
+ itemIndex,
403
+ modelName,
404
+ binaryPath,
405
+ rawWorkingDirectory,
406
+ workingDirectory: normalizedWorkingDirectory
407
+ });
408
+ if (isWorkingDirectoryExpression && !normalizedWorkingDirectory) {
409
+ throw new import_n8n_workflow.ApplicationError(
410
+ `OpenCode CLI working directory expression resolved to an empty value: ${rawWorkingDirectoryValue}`
411
+ );
412
+ }
413
+ if (normalizedWorkingDirectory.includes("{{") || normalizedWorkingDirectory.includes("$workspace")) {
414
+ throw new import_n8n_workflow.ApplicationError(
415
+ `OpenCode CLI working directory was not resolved before execution: ${normalizedWorkingDirectory}`
416
+ );
417
+ }
418
+ if (normalizedWorkingDirectory && (!(0, import_fs.existsSync)(normalizedWorkingDirectory) || !(0, import_fs.statSync)(normalizedWorkingDirectory).isDirectory())) {
419
+ throw new import_n8n_workflow.ApplicationError(
420
+ `OpenCode CLI working directory does not exist or is not a directory: ${normalizedWorkingDirectory}`
421
+ );
422
+ }
423
+ console.log("[LmChatOpenCodeCli] creating ChatOpenCodeCLI instance", {
424
+ model: modelName,
425
+ binaryPath,
426
+ workingDirectory: normalizedWorkingDirectory
427
+ });
428
+ const model = new ChatOpenCodeCLI({
429
+ model: modelName,
430
+ binaryPath,
431
+ workingDirectory: normalizedWorkingDirectory
432
+ });
433
+ model.callbacks = [new import_N8nLlmTracing.N8nLlmTracing(this)];
434
+ return {
435
+ response: model
436
+ };
437
+ }
438
+ }
439
+ // Annotate the CommonJS export names for ESM import in node:
440
+ 0 && (module.exports = {
441
+ LmChatOpenCodeCli
442
+ });
443
+ //# sourceMappingURL=LmChatOpenCodeCli.node.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"sources":["../../../../nodes/llms/LmChatOpenCodeCli/LmChatOpenCodeCli.node.ts"],"sourcesContent":["import { BaseChatModel } from '@langchain/core/language_models/chat_models';\nimport type { BaseMessage } from '@langchain/core/messages';\nimport { AIMessage, HumanMessage, SystemMessage } from '@langchain/core/messages';\nimport type { ChatResult } from '@langchain/core/outputs';\nimport type { CallbackManagerForLLMRun } from '@langchain/core/callbacks/manager';\nimport type { BindToolsInput } from '@langchain/core/language_models/chat_models';\nimport {\n\tApplicationError,\n\tNodeConnectionTypes,\n\ttype INodeType,\n\ttype INodeTypeDescription,\n\ttype ISupplyDataFunctions,\n\ttype SupplyData,\n} from 'n8n-workflow';\n\nimport { getConnectionHintNoticeField } from '@utils/sharedFields';\n\nimport { N8nLlmTracing } from '../N8nLlmTracing';\nimport { spawn } from 'child_process';\nimport { existsSync, statSync } from 'fs';\n\ninterface OpenCodeCliFields {\n\tmodel: string;\n\tbinaryPath: string;\n\tworkingDirectory: string;\n}\n\ninterface ParsedToolCall {\n\tid: string;\n\tname: string;\n\targs: Record<string, unknown>;\n}\n\ninterface ParsedJsonEventResult {\n\tassistantText: string;\n\terrorMessage: string;\n}\n\nconst TOOL_CALL_SYSTEM_PROMPT = `You have access to the following tools. When you need to call a tool, respond ONLY with a JSON block in this exact format (no other text before or after):\n\n\\`\\`\\`tool_calls\n[{\"id\": \"call_1\", \"name\": \"tool_name\", \"args\": {\"param\": \"value\"}}]\n\\`\\`\\`\n\nWhen you do NOT need to call a tool, respond normally with text. Never mix tool calls and text in the same response.\n\nAvailable tools:\n`;\n\n/**\n * Custom LangChain chat model that wraps the OpenCode CLI binary.\n * Uses `opencode run --format json` for non-interactive execution.\n * Supports tool calling by injecting tool schemas into the prompt\n * and parsing structured JSON responses for tool calls.\n *\n * OpenCode CLI JSON output event types (verified empirically):\n * - {\"type\":\"step_start\", \"part\":{\"type\":\"step-start\",...}}\n * - {\"type\":\"text\", \"part\":{\"type\":\"text\",\"text\":\"...the response...\",...}}\n * - {\"type\":\"step_finish\", \"part\":{\"type\":\"step-finish\",\"reason\":\"stop\",\"cost\":...,\"tokens\":{...}}}\n */\nclass ChatOpenCodeCLI extends BaseChatModel {\n\tmodel: string;\n\n\tbinaryPath: string;\n\n\tworkingDirectory: string;\n\n\tboundTools: BindToolsInput[] = [];\n\n\tconstructor(fields: OpenCodeCliFields) {\n\t\tsuper({});\n\t\tthis.model = fields.model;\n\t\tthis.binaryPath = fields.binaryPath;\n\t\tthis.workingDirectory = fields.workingDirectory;\n\t}\n\n\t_llmType(): string {\n\t\treturn 'opencode-cli';\n\t}\n\n\toverride bindTools(tools: BindToolsInput[], kwargs?: Partial<this['ParsedCallOptions']>) {\n\t\tconsole.log('[LmChatOpenCodeCli] bindTools called, tool count:', tools.length);\n\t\tconst clone = new ChatOpenCodeCLI({\n\t\t\tmodel: this.model,\n\t\t\tbinaryPath: this.binaryPath,\n\t\t\tworkingDirectory: this.workingDirectory,\n\t\t});\n\t\tclone.boundTools = tools;\n\t\tclone.callbacks = this.callbacks;\n\t\tif (kwargs) {\n\t\t\treturn (\n\t\t\t\tclone as unknown as {\n\t\t\t\t\tbind: (kwargs: Record<string, unknown>) => ChatOpenCodeCLI;\n\t\t\t\t}\n\t\t\t).bind(kwargs as Record<string, unknown>);\n\t\t}\n\t\treturn clone;\n\t}\n\n\tasync _generate(\n\t\tmessages: BaseMessage[],\n\t\t_options: this['ParsedCallOptions'],\n\t\t_runManager?: CallbackManagerForLLMRun,\n\t): Promise<ChatResult> {\n\t\tconsole.log('[LmChatOpenCodeCli] _generate called', {\n\t\t\tmessageCount: messages.length,\n\t\t\tboundToolCount: this.boundTools.length,\n\t\t\tmodel: this.model,\n\t\t});\n\n\t\t// If tools are bound, inject tool schemas into a system message\n\t\tconst processedMessages = [...messages];\n\t\tif (this.boundTools.length > 0) {\n\t\t\tconst toolDescriptions = this.boundTools\n\t\t\t\t.map((tool) => {\n\t\t\t\t\tconst t = tool as Record<string, unknown>;\n\t\t\t\t\tconst name = (t.name as string) ?? '';\n\t\t\t\t\tconst description = (t.description as string) ?? '';\n\t\t\t\t\tconst schema = t.parameters ?? t.schema ?? {};\n\t\t\t\t\treturn `- ${name}: ${description}\\n Parameters: ${JSON.stringify(schema)}`;\n\t\t\t\t})\n\t\t\t\t.join('\\n\\n');\n\n\t\t\tconst systemPrompt = TOOL_CALL_SYSTEM_PROMPT + toolDescriptions;\n\t\t\tprocessedMessages.unshift(new SystemMessage(systemPrompt));\n\t\t\tconsole.log(\n\t\t\t\t'[LmChatOpenCodeCli] injected tool system prompt, tool count:',\n\t\t\t\tthis.boundTools.length,\n\t\t\t);\n\t\t}\n\n\t\t// Build prompt from messages\n\t\tconst prompt = processedMessages\n\t\t\t.map((m) => {\n\t\t\t\tconst content = typeof m.content === 'string' ? m.content : JSON.stringify(m.content);\n\t\t\t\tif (m instanceof SystemMessage) return `[system]: ${content}`;\n\t\t\t\tif (m instanceof HumanMessage) return `[user]: ${content}`;\n\t\t\t\tif (m instanceof AIMessage) return `[assistant]: ${content}`;\n\t\t\t\treturn `[${m._getType()}]: ${content}`;\n\t\t\t})\n\t\t\t.join('\\n\\n');\n\n\t\tconsole.log('[LmChatOpenCodeCli] prompt built, length:', prompt.length);\n\n\t\t// Execute opencode CLI\n\t\tconst rawResponse = await this.executeOpenCodeCli(prompt);\n\n\t\tconsole.log('[LmChatOpenCodeCli] raw response received, length:', rawResponse.length);\n\n\t\t// Check for tool calls in response\n\t\tif (this.boundTools.length > 0) {\n\t\t\tconst toolCalls = this.extractToolCalls(rawResponse);\n\t\t\tif (toolCalls.length > 0) {\n\t\t\t\tconsole.log('[LmChatOpenCodeCli] extracted tool calls:', toolCalls.length);\n\t\t\t\tconst aiMessage = new AIMessage({\n\t\t\t\t\tcontent: '',\n\t\t\t\t\ttool_calls: toolCalls.map((tc) => ({\n\t\t\t\t\t\tid: tc.id,\n\t\t\t\t\t\tname: tc.name,\n\t\t\t\t\t\targs: tc.args,\n\t\t\t\t\t\ttype: 'tool_call' as const,\n\t\t\t\t\t})),\n\t\t\t\t});\n\n\t\t\t\treturn {\n\t\t\t\t\tgenerations: [{ message: aiMessage, text: '' }],\n\t\t\t\t};\n\t\t\t}\n\t\t}\n\n\t\t// Normal text response\n\t\tconsole.log('[LmChatOpenCodeCli] returning text response');\n\t\tconst aiMessage = new AIMessage({ content: rawResponse });\n\t\treturn {\n\t\t\tgenerations: [{ message: aiMessage, text: rawResponse }],\n\t\t};\n\t}\n\n\tprivate extractToolCalls(text: string): ParsedToolCall[] {\n\t\t// Look for tool_calls JSON block\n\t\tconst toolCallRegex = /```tool_calls\\s*\\n([\\s\\S]*?)\\n```/;\n\t\tconst match = toolCallRegex.exec(text);\n\t\tif (!match) return [];\n\n\t\ttry {\n\t\t\tconst parsed = JSON.parse(match[1]) as Array<{\n\t\t\t\tid?: string;\n\t\t\t\tname: string;\n\t\t\t\targs: Record<string, unknown>;\n\t\t\t}>;\n\t\t\tif (!Array.isArray(parsed)) return [];\n\n\t\t\tconsole.log('[LmChatOpenCodeCli] parsed tool calls from response:', parsed.length);\n\t\t\treturn parsed.map((tc, i) => ({\n\t\t\t\tid: tc.id ?? `call_${i}`,\n\t\t\t\tname: tc.name,\n\t\t\t\targs: tc.args ?? {},\n\t\t\t}));\n\t\t} catch {\n\t\t\tconsole.log('[LmChatOpenCodeCli] failed to parse tool calls JSON block');\n\t\t\treturn [];\n\t\t}\n\t}\n\n\tprivate async executeOpenCodeCli(prompt: string): Promise<string> {\n\t\t// Build args: opencode run --format json [--model <provider/model>] <prompt>\n\t\t// The prompt is passed as a positional argument to `opencode run`\n\t\t// We use --format json to get structured JSONL output\n\t\tconst args = ['run', '--format', 'json'];\n\n\t\tif (this.model && this.model !== 'auto') {\n\t\t\targs.push('--model', this.model);\n\t\t}\n\n\t\t// The prompt is passed via stdin-like mechanism — actually as positional arg\n\t\t// But since prompts can be very long, we pass it as a positional argument\n\t\targs.push(prompt);\n\n\t\tconst cwd = this.workingDirectory?.trim() || undefined;\n\n\t\tconsole.log('[LmChatOpenCodeCli] spawning opencode run', {\n\t\t\tbinaryPath: this.binaryPath,\n\t\t\targs: args.map((a, i) => (i === args.length - 1 ? `<prompt len=${a.length}>` : a)),\n\t\t\tmodel: this.model,\n\t\t\tcwd,\n\t\t});\n\n\t\treturn await new Promise<string>((resolve, reject) => {\n\t\t\tconst child = spawn(this.binaryPath, args, {\n\t\t\t\tcwd,\n\t\t\t\tstdio: ['pipe', 'pipe', 'pipe'],\n\t\t\t\tenv: { ...process.env },\n\t\t\t});\n\n\t\t\tlet stdout = '';\n\t\t\tlet stderr = '';\n\n\t\t\tchild.stdout.on('data', (data: Buffer) => {\n\t\t\t\tstdout += data.toString();\n\t\t\t});\n\n\t\t\tchild.stderr.on('data', (data: Buffer) => {\n\t\t\t\tstderr += data.toString();\n\t\t\t});\n\n\t\t\tchild.on('error', (err: Error) => {\n\t\t\t\tconsole.error('[LmChatOpenCodeCli] spawn error:', err.message);\n\t\t\t\treject(\n\t\t\t\t\tnew Error(\n\t\t\t\t\t\t`Failed to spawn opencode: ${err.message}. Make sure OpenCode CLI is installed (brew install opencode-ai/tap/opencode or curl -fsSL https://opencode.ai/install | bash) and accessible. Working directory: ${cwd ?? '<default>'}`,\n\t\t\t\t\t),\n\t\t\t\t);\n\t\t\t});\n\n\t\t\tchild.on('close', (code: number | null) => {\n\t\t\t\tconsole.log('[LmChatOpenCodeCli] opencode run exited', {\n\t\t\t\t\tcode,\n\t\t\t\t\tstdoutLength: stdout.length,\n\t\t\t\t\tstderrLength: stderr.length,\n\t\t\t\t});\n\n\t\t\t\t// Parse the JSON event output — even on non-zero exit, stdout may\n\t\t\t\t// contain useful events (e.g. error messages from the provider)\n\t\t\t\tconst parseResult = this.parseJsonEventOutput(stdout);\n\n\t\t\t\tif (parseResult.assistantText) {\n\t\t\t\t\tconsole.log(\n\t\t\t\t\t\t'[LmChatOpenCodeCli] parsed assistant content, length:',\n\t\t\t\t\t\tparseResult.assistantText.length,\n\t\t\t\t\t);\n\t\t\t\t\tresolve(parseResult.assistantText);\n\t\t\t\t\treturn;\n\t\t\t\t}\n\n\t\t\t\t// No assistant response — build a meaningful error from available info\n\t\t\t\tif (parseResult.errorMessage) {\n\t\t\t\t\tconsole.error('[LmChatOpenCodeCli] opencode returned error:', parseResult.errorMessage);\n\t\t\t\t\treject(new Error(`OpenCode CLI error: ${parseResult.errorMessage}`));\n\t\t\t\t\treturn;\n\t\t\t\t}\n\n\t\t\t\tif (code !== 0) {\n\t\t\t\t\tconst stderrMsg = stderr.trim();\n\t\t\t\t\tconst errorMsg = stderrMsg || `opencode run exited with code ${code}`;\n\t\t\t\t\tconsole.error('[LmChatOpenCodeCli] opencode run failed with code', code, ':', errorMsg);\n\t\t\t\t\treject(new Error(errorMsg));\n\t\t\t\t\treturn;\n\t\t\t\t}\n\n\t\t\t\tconsole.error(\n\t\t\t\t\t'[LmChatOpenCodeCli] no assistant response parsed from output, stdout preview:',\n\t\t\t\t\tstdout.substring(0, 500),\n\t\t\t\t);\n\t\t\t\treject(new Error('No assistant response received from opencode run'));\n\t\t\t});\n\n\t\t\t// Close stdin immediately — opencode run takes the prompt as a positional arg\n\t\t\tif (child.stdin) {\n\t\t\t\tchild.stdin.end();\n\t\t\t}\n\t\t});\n\t}\n\n\t/**\n\t * Parse JSON event output from `opencode run --format json`.\n\t *\n\t * Actual event types from opencode run --format json (verified empirically):\n\t * - {\"type\":\"step_start\",\"part\":{\"type\":\"step-start\",...}}\n\t * - {\"type\":\"text\",\"part\":{\"type\":\"text\",\"text\":\"...the response...\",...}}\n\t * - {\"type\":\"step_finish\",\"part\":{\"type\":\"step-finish\",\"reason\":\"stop\",\"cost\":...,\"tokens\":{...}}}\n\t *\n\t * Returns both assistant text and any error messages found.\n\t */\n\tprivate parseJsonEventOutput(output: string): ParsedJsonEventResult {\n\t\tconst lines = output.split('\\n').filter((line) => line.trim());\n\t\tconst assistantParts: string[] = [];\n\t\tconst errorParts: string[] = [];\n\n\t\tconsole.log('[LmChatOpenCodeCli] parsing JSON event output, line count:', lines.length);\n\n\t\tfor (const line of lines) {\n\t\t\ttry {\n\t\t\t\tconst parsed = JSON.parse(line) as Record<string, unknown>;\n\t\t\t\tconst eventType = parsed.type as string | undefined;\n\n\t\t\t\tconsole.log(\n\t\t\t\t\t'[LmChatOpenCodeCli] JSON event:',\n\t\t\t\t\teventType,\n\t\t\t\t\t'| keys:',\n\t\t\t\t\tObject.keys(parsed).join(','),\n\t\t\t\t);\n\n\t\t\t\t// \"text\" event — contains assistant response text in part.text\n\t\t\t\tif (eventType === 'text') {\n\t\t\t\t\tconst part = parsed.part as Record<string, unknown> | undefined;\n\t\t\t\t\tif (part?.type === 'text' && typeof part.text === 'string') {\n\t\t\t\t\t\tconsole.log(\n\t\t\t\t\t\t\t'[LmChatOpenCodeCli] found text event, text length:',\n\t\t\t\t\t\t\t(part.text as string).length,\n\t\t\t\t\t\t);\n\t\t\t\t\t\tassistantParts.push(part.text as string);\n\t\t\t\t\t}\n\t\t\t\t}\n\n\t\t\t\t// \"message\" event — fallback for alternative output formats\n\t\t\t\tif (eventType === 'message') {\n\t\t\t\t\tconst part = parsed.part as Record<string, unknown> | undefined;\n\t\t\t\t\tif (part && typeof part.text === 'string') {\n\t\t\t\t\t\tassistantParts.push(part.text as string);\n\t\t\t\t\t}\n\t\t\t\t\t// Also handle role-based messages\n\t\t\t\t\tif (parsed.role === 'assistant' && typeof parsed.content === 'string') {\n\t\t\t\t\t\tassistantParts.push(parsed.content as string);\n\t\t\t\t\t}\n\t\t\t\t}\n\n\t\t\t\t// \"assistant\" event — compatibility fallback\n\t\t\t\tif (eventType === 'assistant') {\n\t\t\t\t\tconst message = parsed.message as Record<string, unknown> | undefined;\n\t\t\t\t\tif (message?.content) {\n\t\t\t\t\t\tif (Array.isArray(message.content)) {\n\t\t\t\t\t\t\tfor (const c of message.content as Array<Record<string, unknown>>) {\n\t\t\t\t\t\t\t\tif (c.type === 'text' && typeof c.text === 'string') {\n\t\t\t\t\t\t\t\t\tassistantParts.push(c.text as string);\n\t\t\t\t\t\t\t\t}\n\t\t\t\t\t\t\t}\n\t\t\t\t\t\t} else if (typeof message.content === 'string') {\n\t\t\t\t\t\t\tassistantParts.push(message.content);\n\t\t\t\t\t\t}\n\t\t\t\t\t}\n\t\t\t\t}\n\n\t\t\t\t// Error events\n\t\t\t\tif (eventType === 'error') {\n\t\t\t\t\tconst errMsg =\n\t\t\t\t\t\ttypeof parsed.message === 'string'\n\t\t\t\t\t\t\t? parsed.message\n\t\t\t\t\t\t\t: typeof parsed.error === 'string'\n\t\t\t\t\t\t\t\t? parsed.error\n\t\t\t\t\t\t\t\t: undefined;\n\t\t\t\t\tif (errMsg) {\n\t\t\t\t\t\tconsole.error('[LmChatOpenCodeCli] error event received:', errMsg);\n\t\t\t\t\t\terrorParts.push(errMsg);\n\t\t\t\t\t}\n\t\t\t\t}\n\n\t\t\t\t// step_finish with error info\n\t\t\t\tif (eventType === 'step_finish') {\n\t\t\t\t\tconst part = parsed.part as Record<string, unknown> | undefined;\n\t\t\t\t\tif (part) {\n\t\t\t\t\t\tconsole.log(\n\t\t\t\t\t\t\t'[LmChatOpenCodeCli] step_finish event, reason:',\n\t\t\t\t\t\t\tpart.reason,\n\t\t\t\t\t\t\t'cost:',\n\t\t\t\t\t\t\tpart.cost,\n\t\t\t\t\t\t);\n\t\t\t\t\t\tif (part.reason === 'error' && typeof part.error === 'string') {\n\t\t\t\t\t\t\terrorParts.push(part.error as string);\n\t\t\t\t\t\t}\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t} catch {\n\t\t\t\t// Skip non-JSON lines (e.g. progress output, banners)\n\t\t\t}\n\t\t}\n\n\t\treturn {\n\t\t\tassistantText: assistantParts.join(''),\n\t\t\terrorMessage: errorParts.join('; '),\n\t\t};\n\t}\n}\n\nexport class LmChatOpenCodeCli implements INodeType {\n\tdescription: INodeTypeDescription = {\n\t\tdisplayName: 'OpenCode CLI Chat Model',\n\n\t\tname: 'lmChatOpenCodeCli',\n\t\ticon: 'file:openCodeCli.svg',\n\t\tgroup: ['transform'],\n\t\tversion: [1],\n\t\tdescription:\n\t\t\t'Chat model powered by the OpenCode CLI. Requires opencode to be installed locally (brew install opencode-ai/tap/opencode or curl -fsSL https://opencode.ai/install | bash).',\n\t\tdefaults: {\n\t\t\tname: 'OpenCode CLI Chat Model',\n\t\t},\n\t\tcodex: {\n\t\t\tcategories: ['AI'],\n\t\t\tsubcategories: {\n\t\t\t\tAI: ['Language Models', 'Root Nodes'],\n\t\t\t\t'Language Models': ['Chat Models (Recommended)'],\n\t\t\t},\n\t\t\tresources: {},\n\t\t},\n\n\t\tinputs: [],\n\n\t\toutputs: [NodeConnectionTypes.AiLanguageModel],\n\t\toutputNames: ['Model'],\n\t\tproperties: [\n\t\t\tgetConnectionHintNoticeField([NodeConnectionTypes.AiChain, NodeConnectionTypes.AiAgent]),\n\t\t\t{\n\t\t\t\tdisplayName: 'Model',\n\t\t\t\tname: 'model',\n\t\t\t\ttype: 'options',\n\t\t\t\tdescription:\n\t\t\t\t\t'The model to use via opencode CLI. Format: provider/model (e.g. anthropic/claude-sonnet-4-20250514). Select \"Auto\" to use the default model configured in opencode.',\n\t\t\t\t// eslint-disable-next-line n8n-nodes-base/node-param-options-type-unsorted-items\n\t\t\t\toptions: [\n\t\t\t\t\t{ name: 'Auto (Default)', value: 'auto' },\n\t\t\t\t\t// OpenCode built-in models\n\t\t\t\t\t{ name: 'OpenCode Big Pickle', value: 'opencode/big-pickle' },\n\t\t\t\t\t{ name: 'OpenCode GPT-5 Nano', value: 'opencode/gpt-5-nano' },\n\t\t\t\t\t{ name: 'OpenCode Hy3 Preview Free', value: 'opencode/hy3-preview-free' },\n\t\t\t\t\t// Anthropic models\n\t\t\t\t\t{ name: 'Claude Sonnet 4 (Anthropic)', value: 'anthropic/claude-sonnet-4-20250514' },\n\t\t\t\t\t{ name: 'Claude Opus 4 (Anthropic)', value: 'anthropic/claude-opus-4-20250918' },\n\t\t\t\t\t{\n\t\t\t\t\t\tname: 'Claude 3.5 Sonnet (Anthropic)',\n\t\t\t\t\t\tvalue: 'anthropic/claude-3-5-sonnet-20241022',\n\t\t\t\t\t},\n\t\t\t\t\t// OpenAI models\n\t\t\t\t\t{ name: 'GPT-4o (OpenAI)', value: 'openai/gpt-4o' },\n\t\t\t\t\t{ name: 'GPT-4o Mini (OpenAI)', value: 'openai/gpt-4o-mini' },\n\t\t\t\t\t{ name: 'o3 (OpenAI)', value: 'openai/o3' },\n\t\t\t\t\t{ name: 'o3 Mini (OpenAI)', value: 'openai/o3-mini' },\n\t\t\t\t\t// Google models\n\t\t\t\t\t{ name: 'Gemini 2.5 Pro (Google)', value: 'google/gemini-2.5-pro' },\n\t\t\t\t\t{ name: 'Gemini 2.5 Flash (Google)', value: 'google/gemini-2.5-flash' },\n\t\t\t\t\t{ name: 'Gemini 2.0 Flash (Google)', value: 'google/gemini-2.0-flash' },\n\t\t\t\t\t// xAI models\n\t\t\t\t\t{ name: 'Grok 3 (xAI)', value: 'xai/grok-3' },\n\t\t\t\t\t{ name: 'Grok 3 Mini (xAI)', value: 'xai/grok-3-mini' },\n\t\t\t\t\t// DeepSeek models\n\t\t\t\t\t{ name: 'DeepSeek Chat (DeepSeek)', value: 'deepseek/deepseek-chat' },\n\t\t\t\t\t{ name: 'DeepSeek Reasoner (DeepSeek)', value: 'deepseek/deepseek-reasoner' },\n\t\t\t\t],\n\t\t\t\tdefault: 'auto',\n\t\t\t},\n\t\t\t{\n\t\t\t\tdisplayName: 'Options',\n\t\t\t\tname: 'options',\n\t\t\t\tplaceholder: 'Add Option',\n\t\t\t\tdescription: 'Additional options to configure',\n\t\t\t\ttype: 'collection',\n\t\t\t\tdefault: {},\n\t\t\t\toptions: [\n\t\t\t\t\t{\n\t\t\t\t\t\tdisplayName: 'Binary Path',\n\t\t\t\t\t\tname: 'binaryPath',\n\t\t\t\t\t\tdefault: 'opencode',\n\t\t\t\t\t\tdescription: 'Path to the opencode binary. Defaults to \"opencode\" (must be in PATH).',\n\t\t\t\t\t\ttype: 'string',\n\t\t\t\t\t},\n\t\t\t\t\t{\n\t\t\t\t\t\tdisplayName: 'Working Directory',\n\t\t\t\t\t\tname: 'workingDirectory',\n\t\t\t\t\t\tdefault: '',\n\t\t\t\t\t\tdescription:\n\t\t\t\t\t\t\t'Working directory for the opencode process. Leave empty to use the default.',\n\t\t\t\t\t\ttype: 'string',\n\t\t\t\t\t},\n\t\t\t\t],\n\t\t\t},\n\t\t],\n\t};\n\n\tasync supplyData(this: ISupplyDataFunctions, itemIndex: number): Promise<SupplyData> {\n\t\tconst modelName = this.getNodeParameter('model', itemIndex) as string;\n\n\t\tconst binaryPath = this.getNodeParameter('options.binaryPath', itemIndex, 'opencode') as string;\n\t\tconst rawWorkingDirectory = this.getNodeParameter('options.workingDirectory', itemIndex, '', {\n\t\t\trawExpressions: true,\n\t\t}) as string | undefined;\n\t\tconst workingDirectory = this.getNodeParameter('options.workingDirectory', itemIndex, '') as\n\t\t\t| string\n\t\t\t| undefined;\n\t\tconst normalizedWorkingDirectory = (workingDirectory ?? '').trim();\n\t\tconst rawWorkingDirectoryValue = rawWorkingDirectory ?? '';\n\t\tconst isWorkingDirectoryExpression =\n\t\t\trawWorkingDirectoryValue.startsWith('=') ||\n\t\t\trawWorkingDirectoryValue.includes('{{') ||\n\t\t\trawWorkingDirectoryValue.includes('$workspace');\n\n\t\tconsole.log('[LmChatOpenCodeCli] resolved OpenCode CLI options', {\n\t\t\titemIndex,\n\t\t\tmodelName,\n\t\t\tbinaryPath,\n\t\t\trawWorkingDirectory,\n\t\t\tworkingDirectory: normalizedWorkingDirectory,\n\t\t});\n\n\t\tif (isWorkingDirectoryExpression && !normalizedWorkingDirectory) {\n\t\t\tthrow new ApplicationError(\n\t\t\t\t`OpenCode CLI working directory expression resolved to an empty value: ${rawWorkingDirectoryValue}`,\n\t\t\t);\n\t\t}\n\n\t\tif (\n\t\t\tnormalizedWorkingDirectory.includes('{{') ||\n\t\t\tnormalizedWorkingDirectory.includes('$workspace')\n\t\t) {\n\t\t\tthrow new ApplicationError(\n\t\t\t\t`OpenCode CLI working directory was not resolved before execution: ${normalizedWorkingDirectory}`,\n\t\t\t);\n\t\t}\n\n\t\tif (\n\t\t\tnormalizedWorkingDirectory &&\n\t\t\t(!existsSync(normalizedWorkingDirectory) ||\n\t\t\t\t!statSync(normalizedWorkingDirectory).isDirectory())\n\t\t) {\n\t\t\tthrow new ApplicationError(\n\t\t\t\t`OpenCode CLI working directory does not exist or is not a directory: ${normalizedWorkingDirectory}`,\n\t\t\t);\n\t\t}\n\n\t\tconsole.log('[LmChatOpenCodeCli] creating ChatOpenCodeCLI instance', {\n\t\t\tmodel: modelName,\n\t\t\tbinaryPath,\n\t\t\tworkingDirectory: normalizedWorkingDirectory,\n\t\t});\n\n\t\tconst model = new ChatOpenCodeCLI({\n\t\t\tmodel: modelName,\n\t\t\tbinaryPath,\n\t\t\tworkingDirectory: normalizedWorkingDirectory,\n\t\t});\n\n\t\tmodel.callbacks = [new N8nLlmTracing(this)];\n\n\t\treturn {\n\t\t\tresponse: model,\n\t\t};\n\t}\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,yBAA8B;AAE9B,sBAAuD;AAIvD,0BAOO;AAEP,0BAA6C;AAE7C,2BAA8B;AAC9B,2BAAsB;AACtB,gBAAqC;AAmBrC,MAAM,0BAA0B;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAsBhC,MAAM,wBAAwB,iCAAc;AAAA,EAS3C,YAAY,QAA2B;AACtC,UAAM,CAAC,CAAC;AAHT,sBAA+B,CAAC;AAI/B,SAAK,QAAQ,OAAO;AACpB,SAAK,aAAa,OAAO;AACzB,SAAK,mBAAmB,OAAO;AAAA,EAChC;AAAA,EAEA,WAAmB;AAClB,WAAO;AAAA,EACR;AAAA,EAES,UAAU,OAAyB,QAA6C;AACxF,YAAQ,IAAI,qDAAqD,MAAM,MAAM;AAC7E,UAAM,QAAQ,IAAI,gBAAgB;AAAA,MACjC,OAAO,KAAK;AAAA,MACZ,YAAY,KAAK;AAAA,MACjB,kBAAkB,KAAK;AAAA,IACxB,CAAC;AACD,UAAM,aAAa;AACnB,UAAM,YAAY,KAAK;AACvB,QAAI,QAAQ;AACX,aACC,MAGC,KAAK,MAAiC;AAAA,IACzC;AACA,WAAO;AAAA,EACR;AAAA,EAEA,MAAM,UACL,UACA,UACA,aACsB;AACtB,YAAQ,IAAI,wCAAwC;AAAA,MACnD,cAAc,SAAS;AAAA,MACvB,gBAAgB,KAAK,WAAW;AAAA,MAChC,OAAO,KAAK;AAAA,IACb,CAAC;AAGD,UAAM,oBAAoB,CAAC,GAAG,QAAQ;AACtC,QAAI,KAAK,WAAW,SAAS,GAAG;AAC/B,YAAM,mBAAmB,KAAK,WAC5B,IAAI,CAAC,SAAS;AACd,cAAM,IAAI;AACV,cAAM,OAAQ,EAAE,QAAmB;AACnC,cAAM,cAAe,EAAE,eAA0B;AACjD,cAAM,SAAS,EAAE,cAAc,EAAE,UAAU,CAAC;AAC5C,eAAO,KAAK,IAAI,KAAK,WAAW;AAAA,gBAAmB,KAAK,UAAU,MAAM,CAAC;AAAA,MAC1E,CAAC,EACA,KAAK,MAAM;AAEb,YAAM,eAAe,0BAA0B;AAC/C,wBAAkB,QAAQ,IAAI,8BAAc,YAAY,CAAC;AACzD,cAAQ;AAAA,QACP;AAAA,QACA,KAAK,WAAW;AAAA,MACjB;AAAA,IACD;AAGA,UAAM,SAAS,kBACb,IAAI,CAAC,MAAM;AACX,YAAM,UAAU,OAAO,EAAE,YAAY,WAAW,EAAE,UAAU,KAAK,UAAU,EAAE,OAAO;AACpF,UAAI,aAAa,8BAAe,QAAO,aAAa,OAAO;AAC3D,UAAI,aAAa,6BAAc,QAAO,WAAW,OAAO;AACxD,UAAI,aAAa,0BAAW,QAAO,gBAAgB,OAAO;AAC1D,aAAO,IAAI,EAAE,SAAS,CAAC,MAAM,OAAO;AAAA,IACrC,CAAC,EACA,KAAK,MAAM;AAEb,YAAQ,IAAI,6CAA6C,OAAO,MAAM;AAGtE,UAAM,cAAc,MAAM,KAAK,mBAAmB,MAAM;AAExD,YAAQ,IAAI,sDAAsD,YAAY,MAAM;AAGpF,QAAI,KAAK,WAAW,SAAS,GAAG;AAC/B,YAAM,YAAY,KAAK,iBAAiB,WAAW;AACnD,UAAI,UAAU,SAAS,GAAG;AACzB,gBAAQ,IAAI,6CAA6C,UAAU,MAAM;AACzE,cAAMA,aAAY,IAAI,0BAAU;AAAA,UAC/B,SAAS;AAAA,UACT,YAAY,UAAU,IAAI,CAAC,QAAQ;AAAA,YAClC,IAAI,GAAG;AAAA,YACP,MAAM,GAAG;AAAA,YACT,MAAM,GAAG;AAAA,YACT,MAAM;AAAA,UACP,EAAE;AAAA,QACH,CAAC;AAED,eAAO;AAAA,UACN,aAAa,CAAC,EAAE,SAASA,YAAW,MAAM,GAAG,CAAC;AAAA,QAC/C;AAAA,MACD;AAAA,IACD;AAGA,YAAQ,IAAI,6CAA6C;AACzD,UAAM,YAAY,IAAI,0BAAU,EAAE,SAAS,YAAY,CAAC;AACxD,WAAO;AAAA,MACN,aAAa,CAAC,EAAE,SAAS,WAAW,MAAM,YAAY,CAAC;AAAA,IACxD;AAAA,EACD;AAAA,EAEQ,iBAAiB,MAAgC;AAExD,UAAM,gBAAgB;AACtB,UAAM,QAAQ,cAAc,KAAK,IAAI;AACrC,QAAI,CAAC,MAAO,QAAO,CAAC;AAEpB,QAAI;AACH,YAAM,SAAS,KAAK,MAAM,MAAM,CAAC,CAAC;AAKlC,UAAI,CAAC,MAAM,QAAQ,MAAM,EAAG,QAAO,CAAC;AAEpC,cAAQ,IAAI,wDAAwD,OAAO,MAAM;AACjF,aAAO,OAAO,IAAI,CAAC,IAAI,OAAO;AAAA,QAC7B,IAAI,GAAG,MAAM,QAAQ,CAAC;AAAA,QACtB,MAAM,GAAG;AAAA,QACT,MAAM,GAAG,QAAQ,CAAC;AAAA,MACnB,EAAE;AAAA,IACH,QAAQ;AACP,cAAQ,IAAI,2DAA2D;AACvE,aAAO,CAAC;AAAA,IACT;AAAA,EACD;AAAA,EAEA,MAAc,mBAAmB,QAAiC;AAIjE,UAAM,OAAO,CAAC,OAAO,YAAY,MAAM;AAEvC,QAAI,KAAK,SAAS,KAAK,UAAU,QAAQ;AACxC,WAAK,KAAK,WAAW,KAAK,KAAK;AAAA,IAChC;AAIA,SAAK,KAAK,MAAM;AAEhB,UAAM,MAAM,KAAK,kBAAkB,KAAK,KAAK;AAE7C,YAAQ,IAAI,6CAA6C;AAAA,MACxD,YAAY,KAAK;AAAA,MACjB,MAAM,KAAK,IAAI,CAAC,GAAG,MAAO,MAAM,KAAK,SAAS,IAAI,eAAe,EAAE,MAAM,MAAM,CAAE;AAAA,MACjF,OAAO,KAAK;AAAA,MACZ;AAAA,IACD,CAAC;AAED,WAAO,MAAM,IAAI,QAAgB,CAAC,SAAS,WAAW;AACrD,YAAM,YAAQ,4BAAM,KAAK,YAAY,MAAM;AAAA,QAC1C;AAAA,QACA,OAAO,CAAC,QAAQ,QAAQ,MAAM;AAAA,QAC9B,KAAK,EAAE,GAAG,QAAQ,IAAI;AAAA,MACvB,CAAC;AAED,UAAI,SAAS;AACb,UAAI,SAAS;AAEb,YAAM,OAAO,GAAG,QAAQ,CAAC,SAAiB;AACzC,kBAAU,KAAK,SAAS;AAAA,MACzB,CAAC;AAED,YAAM,OAAO,GAAG,QAAQ,CAAC,SAAiB;AACzC,kBAAU,KAAK,SAAS;AAAA,MACzB,CAAC;AAED,YAAM,GAAG,SAAS,CAAC,QAAe;AACjC,gBAAQ,MAAM,oCAAoC,IAAI,OAAO;AAC7D;AAAA,UACC,IAAI;AAAA,YACH,6BAA6B,IAAI,OAAO,qKAAqK,OAAO,WAAW;AAAA,UAChO;AAAA,QACD;AAAA,MACD,CAAC;AAED,YAAM,GAAG,SAAS,CAAC,SAAwB;AAC1C,gBAAQ,IAAI,2CAA2C;AAAA,UACtD;AAAA,UACA,cAAc,OAAO;AAAA,UACrB,cAAc,OAAO;AAAA,QACtB,CAAC;AAID,cAAM,cAAc,KAAK,qBAAqB,MAAM;AAEpD,YAAI,YAAY,eAAe;AAC9B,kBAAQ;AAAA,YACP;AAAA,YACA,YAAY,cAAc;AAAA,UAC3B;AACA,kBAAQ,YAAY,aAAa;AACjC;AAAA,QACD;AAGA,YAAI,YAAY,cAAc;AAC7B,kBAAQ,MAAM,gDAAgD,YAAY,YAAY;AACtF,iBAAO,IAAI,MAAM,uBAAuB,YAAY,YAAY,EAAE,CAAC;AACnE;AAAA,QACD;AAEA,YAAI,SAAS,GAAG;AACf,gBAAM,YAAY,OAAO,KAAK;AAC9B,gBAAM,WAAW,aAAa,iCAAiC,IAAI;AACnE,kBAAQ,MAAM,qDAAqD,MAAM,KAAK,QAAQ;AACtF,iBAAO,IAAI,MAAM,QAAQ,CAAC;AAC1B;AAAA,QACD;AAEA,gBAAQ;AAAA,UACP;AAAA,UACA,OAAO,UAAU,GAAG,GAAG;AAAA,QACxB;AACA,eAAO,IAAI,MAAM,kDAAkD,CAAC;AAAA,MACrE,CAAC;AAGD,UAAI,MAAM,OAAO;AAChB,cAAM,MAAM,IAAI;AAAA,MACjB;AAAA,IACD,CAAC;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAYQ,qBAAqB,QAAuC;AACnE,UAAM,QAAQ,OAAO,MAAM,IAAI,EAAE,OAAO,CAAC,SAAS,KAAK,KAAK,CAAC;AAC7D,UAAM,iBAA2B,CAAC;AAClC,UAAM,aAAuB,CAAC;AAE9B,YAAQ,IAAI,8DAA8D,MAAM,MAAM;AAEtF,eAAW,QAAQ,OAAO;AACzB,UAAI;AACH,cAAM,SAAS,KAAK,MAAM,IAAI;AAC9B,cAAM,YAAY,OAAO;AAEzB,gBAAQ;AAAA,UACP;AAAA,UACA;AAAA,UACA;AAAA,UACA,OAAO,KAAK,MAAM,EAAE,KAAK,GAAG;AAAA,QAC7B;AAGA,YAAI,cAAc,QAAQ;AACzB,gBAAM,OAAO,OAAO;AACpB,cAAI,MAAM,SAAS,UAAU,OAAO,KAAK,SAAS,UAAU;AAC3D,oBAAQ;AAAA,cACP;AAAA,cACC,KAAK,KAAgB;AAAA,YACvB;AACA,2BAAe,KAAK,KAAK,IAAc;AAAA,UACxC;AAAA,QACD;AAGA,YAAI,cAAc,WAAW;AAC5B,gBAAM,OAAO,OAAO;AACpB,cAAI,QAAQ,OAAO,KAAK,SAAS,UAAU;AAC1C,2BAAe,KAAK,KAAK,IAAc;AAAA,UACxC;AAEA,cAAI,OAAO,SAAS,eAAe,OAAO,OAAO,YAAY,UAAU;AACtE,2BAAe,KAAK,OAAO,OAAiB;AAAA,UAC7C;AAAA,QACD;AAGA,YAAI,cAAc,aAAa;AAC9B,gBAAM,UAAU,OAAO;AACvB,cAAI,SAAS,SAAS;AACrB,gBAAI,MAAM,QAAQ,QAAQ,OAAO,GAAG;AACnC,yBAAW,KAAK,QAAQ,SAA2C;AAClE,oBAAI,EAAE,SAAS,UAAU,OAAO,EAAE,SAAS,UAAU;AACpD,iCAAe,KAAK,EAAE,IAAc;AAAA,gBACrC;AAAA,cACD;AAAA,YACD,WAAW,OAAO,QAAQ,YAAY,UAAU;AAC/C,6BAAe,KAAK,QAAQ,OAAO;AAAA,YACpC;AAAA,UACD;AAAA,QACD;AAGA,YAAI,cAAc,SAAS;AAC1B,gBAAM,SACL,OAAO,OAAO,YAAY,WACvB,OAAO,UACP,OAAO,OAAO,UAAU,WACvB,OAAO,QACP;AACL,cAAI,QAAQ;AACX,oBAAQ,MAAM,6CAA6C,MAAM;AACjE,uBAAW,KAAK,MAAM;AAAA,UACvB;AAAA,QACD;AAGA,YAAI,cAAc,eAAe;AAChC,gBAAM,OAAO,OAAO;AACpB,cAAI,MAAM;AACT,oBAAQ;AAAA,cACP;AAAA,cACA,KAAK;AAAA,cACL;AAAA,cACA,KAAK;AAAA,YACN;AACA,gBAAI,KAAK,WAAW,WAAW,OAAO,KAAK,UAAU,UAAU;AAC9D,yBAAW,KAAK,KAAK,KAAe;AAAA,YACrC;AAAA,UACD;AAAA,QACD;AAAA,MACD,QAAQ;AAAA,MAER;AAAA,IACD;AAEA,WAAO;AAAA,MACN,eAAe,eAAe,KAAK,EAAE;AAAA,MACrC,cAAc,WAAW,KAAK,IAAI;AAAA,IACnC;AAAA,EACD;AACD;AAEO,MAAM,kBAAuC;AAAA,EAA7C;AACN,uBAAoC;AAAA,MACnC,aAAa;AAAA,MAEb,MAAM;AAAA,MACN,MAAM;AAAA,MACN,OAAO,CAAC,WAAW;AAAA,MACnB,SAAS,CAAC,CAAC;AAAA,MACX,aACC;AAAA,MACD,UAAU;AAAA,QACT,MAAM;AAAA,MACP;AAAA,MACA,OAAO;AAAA,QACN,YAAY,CAAC,IAAI;AAAA,QACjB,eAAe;AAAA,UACd,IAAI,CAAC,mBAAmB,YAAY;AAAA,UACpC,mBAAmB,CAAC,2BAA2B;AAAA,QAChD;AAAA,QACA,WAAW,CAAC;AAAA,MACb;AAAA,MAEA,QAAQ,CAAC;AAAA,MAET,SAAS,CAAC,wCAAoB,eAAe;AAAA,MAC7C,aAAa,CAAC,OAAO;AAAA,MACrB,YAAY;AAAA,YACX,kDAA6B,CAAC,wCAAoB,SAAS,wCAAoB,OAAO,CAAC;AAAA,QACvF;AAAA,UACC,aAAa;AAAA,UACb,MAAM;AAAA,UACN,MAAM;AAAA,UACN,aACC;AAAA;AAAA,UAED,SAAS;AAAA,YACR,EAAE,MAAM,kBAAkB,OAAO,OAAO;AAAA;AAAA,YAExC,EAAE,MAAM,uBAAuB,OAAO,sBAAsB;AAAA,YAC5D,EAAE,MAAM,uBAAuB,OAAO,sBAAsB;AAAA,YAC5D,EAAE,MAAM,6BAA6B,OAAO,4BAA4B;AAAA;AAAA,YAExE,EAAE,MAAM,+BAA+B,OAAO,qCAAqC;AAAA,YACnF,EAAE,MAAM,6BAA6B,OAAO,mCAAmC;AAAA,YAC/E;AAAA,cACC,MAAM;AAAA,cACN,OAAO;AAAA,YACR;AAAA;AAAA,YAEA,EAAE,MAAM,mBAAmB,OAAO,gBAAgB;AAAA,YAClD,EAAE,MAAM,wBAAwB,OAAO,qBAAqB;AAAA,YAC5D,EAAE,MAAM,eAAe,OAAO,YAAY;AAAA,YAC1C,EAAE,MAAM,oBAAoB,OAAO,iBAAiB;AAAA;AAAA,YAEpD,EAAE,MAAM,2BAA2B,OAAO,wBAAwB;AAAA,YAClE,EAAE,MAAM,6BAA6B,OAAO,0BAA0B;AAAA,YACtE,EAAE,MAAM,6BAA6B,OAAO,0BAA0B;AAAA;AAAA,YAEtE,EAAE,MAAM,gBAAgB,OAAO,aAAa;AAAA,YAC5C,EAAE,MAAM,qBAAqB,OAAO,kBAAkB;AAAA;AAAA,YAEtD,EAAE,MAAM,4BAA4B,OAAO,yBAAyB;AAAA,YACpE,EAAE,MAAM,gCAAgC,OAAO,6BAA6B;AAAA,UAC7E;AAAA,UACA,SAAS;AAAA,QACV;AAAA,QACA;AAAA,UACC,aAAa;AAAA,UACb,MAAM;AAAA,UACN,aAAa;AAAA,UACb,aAAa;AAAA,UACb,MAAM;AAAA,UACN,SAAS,CAAC;AAAA,UACV,SAAS;AAAA,YACR;AAAA,cACC,aAAa;AAAA,cACb,MAAM;AAAA,cACN,SAAS;AAAA,cACT,aAAa;AAAA,cACb,MAAM;AAAA,YACP;AAAA,YACA;AAAA,cACC,aAAa;AAAA,cACb,MAAM;AAAA,cACN,SAAS;AAAA,cACT,aACC;AAAA,cACD,MAAM;AAAA,YACP;AAAA,UACD;AAAA,QACD;AAAA,MACD;AAAA,IACD;AAAA;AAAA,EAEA,MAAM,WAAuC,WAAwC;AACpF,UAAM,YAAY,KAAK,iBAAiB,SAAS,SAAS;AAE1D,UAAM,aAAa,KAAK,iBAAiB,sBAAsB,WAAW,UAAU;AACpF,UAAM,sBAAsB,KAAK,iBAAiB,4BAA4B,WAAW,IAAI;AAAA,MAC5F,gBAAgB;AAAA,IACjB,CAAC;AACD,UAAM,mBAAmB,KAAK,iBAAiB,4BAA4B,WAAW,EAAE;AAGxF,UAAM,8BAA8B,oBAAoB,IAAI,KAAK;AACjE,UAAM,2BAA2B,uBAAuB;AACxD,UAAM,+BACL,yBAAyB,WAAW,GAAG,KACvC,yBAAyB,SAAS,IAAI,KACtC,yBAAyB,SAAS,YAAY;AAE/C,YAAQ,IAAI,qDAAqD;AAAA,MAChE;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA,kBAAkB;AAAA,IACnB,CAAC;AAED,QAAI,gCAAgC,CAAC,4BAA4B;AAChE,YAAM,IAAI;AAAA,QACT,yEAAyE,wBAAwB;AAAA,MAClG;AAAA,IACD;AAEA,QACC,2BAA2B,SAAS,IAAI,KACxC,2BAA2B,SAAS,YAAY,GAC/C;AACD,YAAM,IAAI;AAAA,QACT,qEAAqE,0BAA0B;AAAA,MAChG;AAAA,IACD;AAEA,QACC,+BACC,KAAC,sBAAW,0BAA0B,KACtC,KAAC,oBAAS,0BAA0B,EAAE,YAAY,IAClD;AACD,YAAM,IAAI;AAAA,QACT,wEAAwE,0BAA0B;AAAA,MACnG;AAAA,IACD;AAEA,YAAQ,IAAI,yDAAyD;AAAA,MACpE,OAAO;AAAA,MACP;AAAA,MACA,kBAAkB;AAAA,IACnB,CAAC;AAED,UAAM,QAAQ,IAAI,gBAAgB;AAAA,MACjC,OAAO;AAAA,MACP;AAAA,MACA,kBAAkB;AAAA,IACnB,CAAC;AAED,UAAM,YAAY,CAAC,IAAI,mCAAc,IAAI,CAAC;AAE1C,WAAO;AAAA,MACN,UAAU;AAAA,IACX;AAAA,EACD;AACD;","names":["aiMessage"]}
@@ -0,0 +1 @@
1
+ <svg fill="none" height="512" viewBox="0 0 512 512" width="512" xmlns="http://www.w3.org/2000/svg"><rect width="512" height="512" rx="80" fill="#0f172a"/><rect x="4" y="4" width="504" height="504" rx="76" stroke="#38bdf8" stroke-opacity=".2" stroke-width="8"/><path d="M160 176l-56 80 56 80" stroke="#38bdf8" stroke-width="28" stroke-linecap="round" stroke-linejoin="round"/><path d="M352 176l56 80-56 80" stroke="#38bdf8" stroke-width="28" stroke-linecap="round" stroke-linejoin="round"/><path d="M288 148l-64 216" stroke="#7dd3fc" stroke-width="24" stroke-linecap="round"/><circle cx="256" cy="256" r="120" stroke="#38bdf8" stroke-width="8" stroke-dasharray="16 12" opacity=".35"/></svg>
@@ -14,6 +14,7 @@
14
14
  {"name":"mistralCloudApi","displayName":"Mistral Cloud API","documentationUrl":"mistral","properties":[{"displayName":"API Key","name":"apiKey","type":"string","typeOptions":{"password":true},"required":true,"default":""}],"authenticate":{"type":"generic","properties":{"headers":{"Authorization":"=Bearer {{$credentials.apiKey}}"}}},"test":{"request":{"baseURL":"https://api.mistral.ai/v1","url":"/models","method":"GET"}},"supportedNodes":["embeddingsMistralCloud","lmChatMistralCloud"],"iconUrl":"icons/@n8n/n8n-nodes-langchain/dist/nodes/embeddings/EmbeddingsMistralCloud/mistral.svg"},
15
15
  {"name":"lemonadeApi","displayName":"Lemonade","documentationUrl":"lemonade","properties":[{"displayName":"Base URL","name":"baseUrl","required":true,"type":"string","default":"http://localhost:8000/api/v1"},{"displayName":"API Key","hint":"Optional API key for Lemonade server authentication. Not required for default Lemonade installation","name":"apiKey","type":"string","typeOptions":{"password":true},"default":"","required":false}],"test":{"request":{"baseURL":"={{ $credentials.baseUrl }}","url":"/models","method":"GET"}},"supportedNodes":["embeddingsLemonade","lmChatLemonade","lmLemonade"],"iconUrl":"icons/@n8n/n8n-nodes-langchain/dist/nodes/embeddings/EmbeddingsLemonade/lemonade.svg","authenticate":{}},
16
16
  {"name":"ollamaApi","displayName":"Ollama","documentationUrl":"ollama","properties":[{"displayName":"Base URL","name":"baseUrl","required":true,"type":"string","default":"http://localhost:11434"},{"displayName":"API Key","hint":"When using Ollama behind a proxy with authentication (such as Open WebUI), provide the Bearer token/API key here. This is not required for the default Ollama installation","name":"apiKey","type":"string","typeOptions":{"password":true},"default":"","required":false}],"authenticate":{"type":"generic","properties":{"headers":{"Authorization":"=Bearer {{$credentials.apiKey}}"}}},"test":{"request":{"baseURL":"={{ $credentials.baseUrl }}","url":"/api/tags","method":"GET"}},"supportedNodes":["ollama","embeddingsOllama","lmChatOllama","lmOllama"],"iconUrl":"icons/@n8n/n8n-nodes-langchain/dist/nodes/vendors/Ollama/ollama.svg"},
17
+ {"name":"nineRouterApi","displayName":"9Router","documentationUrl":"nineRouter","properties":[{"displayName":"API Key","name":"apiKey","type":"string","typeOptions":{"password":true},"required":false,"default":"","description":"Optional API key if REQUIRE_API_KEY is enabled on your 9Router instance"},{"displayName":"Base URL","name":"url","type":"string","default":"http://localhost:20128/api/v1","description":"Base URL of your 9Router instance"}],"authenticate":{"type":"generic","properties":{"headers":{"Authorization":"=Bearer {{$credentials.apiKey}}"}}},"test":{"request":{"baseURL":"={{ $credentials.url }}","url":"/models"}},"supportedNodes":["lmChat9Router"],"iconUrl":{"light":"icons/@n8n/n8n-nodes-langchain/dist/nodes/llms/LmChat9Router/9router.svg","dark":"icons/@n8n/n8n-nodes-langchain/dist/nodes/llms/LmChat9Router/9router.dark.svg"}},
17
18
  {"name":"openRouterApi","displayName":"OpenRouter","documentationUrl":"openrouter","properties":[{"displayName":"API Key","name":"apiKey","type":"string","typeOptions":{"password":true},"required":true,"default":""},{"displayName":"Base URL","name":"url","type":"hidden","default":"https://openrouter.ai/api/v1"}],"authenticate":{"type":"generic","properties":{"headers":{"Authorization":"=Bearer {{$credentials.apiKey}}"}}},"test":{"request":{"baseURL":"={{ $credentials.url }}","url":"/key"}},"supportedNodes":["lmChatOpenRouter"],"iconUrl":{"light":"icons/@n8n/n8n-nodes-langchain/dist/nodes/llms/LmChatOpenRouter/openrouter.svg","dark":"icons/@n8n/n8n-nodes-langchain/dist/nodes/llms/LmChatOpenRouter/openrouter.dark.svg"}},
18
19
  {"name":"pineconeApi","displayName":"PineconeApi","documentationUrl":"pinecone","properties":[{"displayName":"API Key","name":"apiKey","type":"string","typeOptions":{"password":true},"required":true,"default":""}],"authenticate":{"type":"generic","properties":{"headers":{"Api-Key":"={{$credentials.apiKey}}"}}},"test":{"request":{"baseURL":"https://api.pinecone.io/indexes","headers":{"accept":"application/json; charset=utf-8"}}},"supportedNodes":["vectorStorePinecone","vectorStorePineconeInsert","vectorStorePineconeLoad"],"iconUrl":{"light":"icons/@n8n/n8n-nodes-langchain/dist/nodes/vector_store/VectorStorePinecone/pinecone.svg","dark":"icons/@n8n/n8n-nodes-langchain/dist/nodes/vector_store/VectorStorePinecone/pinecone.dark.svg"}},
19
20
  {"name":"qdrantApi","displayName":"QdrantApi","documentationUrl":"https://docs.n8n.io/integrations/builtin/credentials/qdrant/","properties":[{"displayName":"API Key","name":"apiKey","type":"string","typeOptions":{"password":true},"required":false,"default":""},{"displayName":"Qdrant URL","name":"qdrantUrl","type":"string","required":true,"default":""}],"authenticate":{"type":"generic","properties":{"headers":{"api-key":"={{$credentials.apiKey}}"}}},"test":{"request":{"baseURL":"={{$credentials.qdrantUrl}}","url":"/collections"}},"supportedNodes":["vectorStoreQdrant"],"iconUrl":"icons/@n8n/n8n-nodes-langchain/dist/nodes/vector_store/VectorStoreQdrant/qdrant.svg"},