@atom8n/n8n-nodes-langchain 2.5.6 → 2.5.8

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (37) hide show
  1. package/dist/known/nodes.json +32 -0
  2. package/dist/methods/defined.json +3 -0
  3. package/dist/methods/referenced.json +3 -0
  4. package/dist/nodes/agents/OpenClawAgent/OpenClawAgent.node.js +62 -0
  5. package/dist/nodes/agents/OpenClawAgent/OpenClawAgent.node.js.map +1 -0
  6. package/dist/nodes/agents/OpenClawAgent/V1/OpenClawAgentV1.node.js +821 -0
  7. package/dist/nodes/agents/OpenClawAgent/V1/OpenClawAgentV1.node.js.map +1 -0
  8. package/dist/nodes/agents/OpenClawAgent/V2/OpenClawAgentV2.node.js +2059 -0
  9. package/dist/nodes/agents/OpenClawAgent/V2/OpenClawAgentV2.node.js.map +1 -0
  10. package/dist/nodes/agents/OpenClawAgent/channels/TelegramChannel/TelegramChannel.node.js +329 -0
  11. package/dist/nodes/agents/OpenClawAgent/channels/TelegramChannel/TelegramChannel.node.js.map +1 -0
  12. package/dist/nodes/agents/OpenClawAgent/channels/TelegramChannel/telegram-channel.svg +4 -0
  13. package/dist/nodes/agents/OpenClawAgent/channels/WhatsAppChannel/WhatsAppChannel.node.js +108 -0
  14. package/dist/nodes/agents/OpenClawAgent/channels/WhatsAppChannel/WhatsAppChannel.node.js.map +1 -0
  15. package/dist/nodes/agents/OpenClawAgent/channels/WhatsAppChannel/whatsapp-channel.svg +3 -0
  16. package/dist/nodes/agents/OpenClawAgent/mcpServers/OpenClawMcpServer/OpenClawMcpServer.node.js +228 -0
  17. package/dist/nodes/agents/OpenClawAgent/mcpServers/OpenClawMcpServer/OpenClawMcpServer.node.js.map +1 -0
  18. package/dist/nodes/agents/OpenClawAgent/mcpServers/OpenClawMcpServer/openclaw-mcp-server.svg +9 -0
  19. package/dist/nodes/agents/OpenClawAgent/models/OpenCodeFreeModel/OpenCodeFreeModel.node.js +97 -0
  20. package/dist/nodes/agents/OpenClawAgent/models/OpenCodeFreeModel/OpenCodeFreeModel.node.js.map +1 -0
  21. package/dist/nodes/agents/OpenClawAgent/models/OpenCodeFreeModel/opencode-free-model.svg +1 -0
  22. package/dist/nodes/agents/OpenClawAgent/openclaw.svg +8 -0
  23. package/dist/nodes/agents/OpenClawAgent/plugins/OpenClawPlugin/OpenClawPlugin.node.js +261 -0
  24. package/dist/nodes/agents/OpenClawAgent/plugins/OpenClawPlugin/OpenClawPlugin.node.js.map +1 -0
  25. package/dist/nodes/agents/OpenClawAgent/plugins/OpenClawPlugin/openclaw-plugin.svg +3 -0
  26. package/dist/nodes/llms/LmChat9Router/LmChat9Router.node.js +40 -3
  27. package/dist/nodes/llms/LmChat9Router/LmChat9Router.node.js.map +1 -1
  28. package/dist/nodes/llms/LmChatCodexCli/LmChatCodexCli.node.js +456 -0
  29. package/dist/nodes/llms/LmChatCodexCli/LmChatCodexCli.node.js.map +1 -0
  30. package/dist/nodes/llms/LmChatCodexCli/codexCli.svg +1 -0
  31. package/dist/nodes/llms/LmChatCursorAgent/LmChatCursorAgent.node.js +46 -6
  32. package/dist/nodes/llms/LmChatCursorAgent/LmChatCursorAgent.node.js.map +1 -1
  33. package/dist/nodes/llms/LmChatOpenCodeCli/LmChatOpenCodeCli.node.js +443 -0
  34. package/dist/nodes/llms/LmChatOpenCodeCli/LmChatOpenCodeCli.node.js.map +1 -0
  35. package/dist/nodes/llms/LmChatOpenCodeCli/openCodeCli.svg +1 -0
  36. package/dist/types/nodes.json +11 -2
  37. package/package.json +19 -11
@@ -0,0 +1 @@
1
+ {"version":3,"sources":["../../../../../../nodes/agents/OpenClawAgent/plugins/OpenClawPlugin/OpenClawPlugin.node.ts"],"sourcesContent":["import { existsSync, readFileSync } from 'fs';\nimport { join } from 'path';\n\nimport {\n\tNodeConnectionTypes,\n\tNodeOperationError,\n\ttype IDataObject,\n\ttype INodeType,\n\ttype INodeTypeDescription,\n\ttype ISupplyDataFunctions,\n\ttype SupplyData,\n} from 'n8n-workflow';\n\nimport type { PluginConfig } from '../../V2/OpenClawAgentV2.node';\n\nconst PLUGIN_MANIFEST_FILENAME = 'openclaw.plugin.json';\n\n/**\n * Attempt to read and parse an openclaw.plugin.json manifest from a directory.\n * Returns the parsed manifest object or undefined if not found / invalid.\n */\nfunction loadLocalPluginManifest(dirPath: string): PluginConfig['pluginManifest'] | undefined {\n\tconst manifestPath = join(dirPath, PLUGIN_MANIFEST_FILENAME);\n\tconsole.log('[OpenClawPlugin] Scanning for local manifest', {\n\t\tdirPath,\n\t\tmanifestPath,\n\t\texists: existsSync(manifestPath),\n\t});\n\n\tif (!existsSync(manifestPath)) {\n\t\tconsole.log('[OpenClawPlugin] No openclaw.plugin.json found at path', {\n\t\t\tdirPath,\n\t\t\tmanifestPath,\n\t\t});\n\t\treturn undefined;\n\t}\n\n\ttry {\n\t\tconst raw = readFileSync(manifestPath, 'utf8').trim();\n\t\tif (!raw) {\n\t\t\tconsole.log('[OpenClawPlugin] Manifest file is empty', { manifestPath });\n\t\t\treturn undefined;\n\t\t}\n\n\t\tconst parsed = JSON.parse(raw) as IDataObject;\n\t\tif (typeof parsed !== 'object' || parsed === null || Array.isArray(parsed)) {\n\t\t\tconsole.log('[OpenClawPlugin] Manifest is not a valid JSON object', {\n\t\t\t\tmanifestPath,\n\t\t\t\ttype: typeof parsed,\n\t\t\t});\n\t\t\treturn undefined;\n\t\t}\n\n\t\tconst manifest: PluginConfig['pluginManifest'] = {\n\t\t\tid: typeof parsed.id === 'string' ? parsed.id : undefined,\n\t\t\tname: typeof parsed.name === 'string' ? parsed.name : undefined,\n\t\t\tdescription: typeof parsed.description === 'string' ? parsed.description : undefined,\n\t\t\tversion: typeof parsed.version === 'string' ? parsed.version : undefined,\n\t\t\tproviders: Array.isArray(parsed.providers)\n\t\t\t\t? (parsed.providers as unknown[]).filter((p): p is string => typeof p === 'string')\n\t\t\t\t: undefined,\n\t\t\tchannels: Array.isArray(parsed.channels)\n\t\t\t\t? (parsed.channels as unknown[]).filter((c): c is string => typeof c === 'string')\n\t\t\t\t: undefined,\n\t\t};\n\n\t\tconsole.log('[OpenClawPlugin] Successfully loaded local manifest', {\n\t\t\tmanifestPath,\n\t\t\tid: manifest.id,\n\t\t\tname: manifest.name,\n\t\t\tversion: manifest.version,\n\t\t\tproviderCount: manifest.providers?.length ?? 0,\n\t\t\tchannelCount: manifest.channels?.length ?? 0,\n\t\t});\n\n\t\treturn manifest;\n\t} catch (error) {\n\t\tconsole.log('[OpenClawPlugin] Failed to parse manifest file', {\n\t\t\tmanifestPath,\n\t\t\terror: error instanceof Error ? error.message : String(error),\n\t\t});\n\t\treturn undefined;\n\t}\n}\n\n/**\n * OpenClaw Plugin sub-node for OpenClaw Agent.\n *\n * This node provides plugin configuration to the OpenClaw AI Agent\n * via the AiTool connection. It supports two plugin sources:\n *\n * - **Local**: scans a directory path for `openclaw.plugin.json` and\n * loads manifest info (id, name, providers, channels, etc.).\n * Uses `$workspace.__dirPath` by default to scan the workflow directory.\n *\n * - **Cloud**: references a plugin from ClawHub by package name\n * (e.g. \"openai\" or \"@scope/pkg\") and optional version.\n */\nexport class OpenClawPlugin implements INodeType {\n\tdescription: INodeTypeDescription = {\n\t\tdisplayName: 'OpenClaw Plugin',\n\t\tname: 'openClawPlugin',\n\t\ticon: 'file:openclaw-plugin.svg',\n\t\ticonColor: 'purple',\n\t\tgroup: ['transform'],\n\t\tversion: 1,\n\t\tdescription: 'Provides plugin configuration to an OpenClaw AI Agent (local or ClawHub)',\n\t\tdefaults: {\n\t\t\tname: 'OpenClaw Plugin',\n\t\t},\n\t\tcodex: {\n\t\t\talias: ['OpenClaw', 'Plugin', 'Extension', 'ClawHub'],\n\t\t\tcategories: ['AI'],\n\t\t\tsubcategories: {\n\t\t\t\tAI: ['Other'],\n\t\t\t},\n\t\t\tresources: {\n\t\t\t\tprimaryDocumentation: [\n\t\t\t\t\t{\n\t\t\t\t\t\turl: 'https://docs.openclaw.ai/plugins',\n\t\t\t\t\t},\n\t\t\t\t],\n\t\t\t},\n\t\t},\n\t\tinputs: [],\n\t\toutputs: [NodeConnectionTypes.AiTool],\n\t\toutputNames: ['Plugin'],\n\t\t// No credentials needed — local plugins are filesystem-based, cloud uses public ClawHub\n\t\tproperties: [\n\t\t\t{\n\t\t\t\tdisplayName:\n\t\t\t\t\t'Connect this node to an OpenClaw AI Agent to provide plugin configuration. Local plugins are scanned from a directory; Cloud plugins are loaded from ClawHub.',\n\t\t\t\tname: 'pluginNotice',\n\t\t\t\ttype: 'notice',\n\t\t\t\tdefault: '',\n\t\t\t},\n\t\t\t{\n\t\t\t\tdisplayName: 'Plugin Source',\n\t\t\t\tname: 'pluginSource',\n\t\t\t\ttype: 'options',\n\t\t\t\tdefault: 'local',\n\t\t\t\tnoDataExpression: true,\n\t\t\t\tdescription: 'Where to load the plugin from',\n\t\t\t\toptions: [\n\t\t\t\t\t{\n\t\t\t\t\t\tname: 'Local',\n\t\t\t\t\t\tvalue: 'local',\n\t\t\t\t\t\tdescription: 'Scan a directory for openclaw.plugin.json and load plugin info from it',\n\t\t\t\t\t},\n\t\t\t\t\t{\n\t\t\t\t\t\tname: 'Cloud (ClawHub)',\n\t\t\t\t\t\tvalue: 'cloud',\n\t\t\t\t\t\tdescription: 'Load a plugin from the ClawHub marketplace',\n\t\t\t\t\t},\n\t\t\t\t],\n\t\t\t},\n\t\t\t// ── Local source fields ──\n\t\t\t{\n\t\t\t\tdisplayName: 'Plugin Directory',\n\t\t\t\tname: 'pluginDirectory',\n\t\t\t\ttype: 'string',\n\t\t\t\trequired: true,\n\t\t\t\tdefault: '={{ $workspace.__dirPath }}',\n\t\t\t\tdescription:\n\t\t\t\t\t'Directory path to scan for openclaw.plugin.json. Supports expressions like {{ $workspace.__dirPath }}.',\n\t\t\t\tdisplayOptions: {\n\t\t\t\t\tshow: {\n\t\t\t\t\t\tpluginSource: ['local'],\n\t\t\t\t\t},\n\t\t\t\t},\n\t\t\t},\n\t\t\t// ── Cloud source fields ──\n\t\t\t{\n\t\t\t\tdisplayName: 'Plugin ID',\n\t\t\t\tname: 'pluginId',\n\t\t\t\ttype: 'string',\n\t\t\t\trequired: true,\n\t\t\t\tdefault: '',\n\t\t\t\tplaceholder: 'e.g. openai, @scope/my-plugin',\n\t\t\t\tdescription: 'ClawHub plugin package name',\n\t\t\t\tdisplayOptions: {\n\t\t\t\t\tshow: {\n\t\t\t\t\t\tpluginSource: ['cloud'],\n\t\t\t\t\t},\n\t\t\t\t},\n\t\t\t},\n\t\t\t{\n\t\t\t\tdisplayName: 'Version',\n\t\t\t\tname: 'pluginVersion',\n\t\t\t\ttype: 'string',\n\t\t\t\tdefault: '',\n\t\t\t\tplaceholder: 'latest',\n\t\t\t\tdescription: 'ClawHub plugin version. Leave empty to use the latest available version.',\n\t\t\t\tdisplayOptions: {\n\t\t\t\t\tshow: {\n\t\t\t\t\t\tpluginSource: ['cloud'],\n\t\t\t\t\t},\n\t\t\t\t},\n\t\t\t},\n\t\t],\n\t};\n\n\tasync supplyData(this: ISupplyDataFunctions, itemIndex: number): Promise<SupplyData> {\n\t\tconst node = this.getNode();\n\t\tconsole.log('[OpenClawPlugin] supplyData ENTRY', {\n\t\t\tnodeName: node.name,\n\t\t\tnodeId: node.id,\n\t\t\tnodeType: node.type,\n\t\t\titemIndex,\n\t\t\tparameterNames: Object.keys(node.parameters ?? {}),\n\t\t\trawParameters: JSON.stringify(node.parameters ?? {}).slice(0, 500),\n\t\t});\n\n\t\tconst pluginSource = this.getNodeParameter('pluginSource', itemIndex, 'local') as\n\t\t\t| 'local'\n\t\t\t| 'cloud';\n\n\t\tconsole.log('[OpenClawPlugin] supplyData called', {\n\t\t\titemIndex,\n\t\t\tpluginSource,\n\t\t});\n\n\t\tif (pluginSource === 'local') {\n\t\t\tconst rawPluginDirectory = this.getNodeParameter('pluginDirectory', itemIndex, '') as string;\n\t\t\tconst pluginDirectory = rawPluginDirectory.trim();\n\n\t\t\tconsole.log('[OpenClawPlugin] Local source: scanning directory', {\n\t\t\t\titemIndex,\n\t\t\t\trawPluginDirectory,\n\t\t\t\tpluginDirectory,\n\t\t\t\tisEmpty: !pluginDirectory,\n\t\t\t\tlength: pluginDirectory.length,\n\t\t\t});\n\n\t\t\tif (!pluginDirectory) {\n\t\t\t\tthrow new NodeOperationError(\n\t\t\t\t\tthis.getNode(),\n\t\t\t\t\t'Plugin Directory must not be empty for local plugin source',\n\t\t\t\t\t{ itemIndex },\n\t\t\t\t);\n\t\t\t}\n\n\t\t\t// Scan the directory for openclaw.plugin.json\n\t\t\tconst pluginManifest = loadLocalPluginManifest(pluginDirectory);\n\n\t\t\tconst pluginConfig: PluginConfig = {\n\t\t\t\tpluginSource: 'local',\n\t\t\t\tpluginPath: pluginDirectory,\n\t\t\t\tpluginManifest,\n\t\t\t};\n\n\t\t\tconsole.log('[OpenClawPlugin] Returning local plugin config', {\n\t\t\t\tpluginPath: pluginConfig.pluginPath,\n\t\t\t\thasManifest: !!pluginManifest,\n\t\t\t\tmanifestId: pluginManifest?.id,\n\t\t\t\tmanifestName: pluginManifest?.name,\n\t\t\t\tmanifestVersion: pluginManifest?.version,\n\t\t\t});\n\n\t\t\treturn { response: pluginConfig };\n\t\t}\n\n\t\t// Cloud source\n\t\tconst pluginId = (this.getNodeParameter('pluginId', itemIndex, '') as string).trim();\n\t\tconst pluginVersion =\n\t\t\t(this.getNodeParameter('pluginVersion', itemIndex, '') as string).trim() || undefined;\n\n\t\tconsole.log('[OpenClawPlugin] Cloud source: ClawHub plugin', {\n\t\t\titemIndex,\n\t\t\tpluginId,\n\t\t\tpluginVersion: pluginVersion ?? '(latest)',\n\t\t});\n\n\t\tif (!pluginId) {\n\t\t\tthrow new NodeOperationError(\n\t\t\t\tthis.getNode(),\n\t\t\t\t'Plugin ID must not be empty for cloud plugin source',\n\t\t\t\t{ itemIndex },\n\t\t\t);\n\t\t}\n\n\t\tconst pluginConfig: PluginConfig = {\n\t\t\tpluginSource: 'cloud',\n\t\t\tpluginId,\n\t\t\tpluginVersion,\n\t\t};\n\n\t\tconsole.log('[OpenClawPlugin] Returning cloud plugin config', {\n\t\t\tpluginId: pluginConfig.pluginId,\n\t\t\tpluginVersion: pluginConfig.pluginVersion ?? '(latest)',\n\t\t});\n\n\t\treturn { response: pluginConfig };\n\t}\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,gBAAyC;AACzC,kBAAqB;AAErB,0BAQO;AAIP,MAAM,2BAA2B;AAMjC,SAAS,wBAAwB,SAA6D;AAC7F,QAAM,mBAAe,kBAAK,SAAS,wBAAwB;AAC3D,UAAQ,IAAI,gDAAgD;AAAA,IAC3D;AAAA,IACA;AAAA,IACA,YAAQ,sBAAW,YAAY;AAAA,EAChC,CAAC;AAED,MAAI,KAAC,sBAAW,YAAY,GAAG;AAC9B,YAAQ,IAAI,0DAA0D;AAAA,MACrE;AAAA,MACA;AAAA,IACD,CAAC;AACD,WAAO;AAAA,EACR;AAEA,MAAI;AACH,UAAM,UAAM,wBAAa,cAAc,MAAM,EAAE,KAAK;AACpD,QAAI,CAAC,KAAK;AACT,cAAQ,IAAI,2CAA2C,EAAE,aAAa,CAAC;AACvE,aAAO;AAAA,IACR;AAEA,UAAM,SAAS,KAAK,MAAM,GAAG;AAC7B,QAAI,OAAO,WAAW,YAAY,WAAW,QAAQ,MAAM,QAAQ,MAAM,GAAG;AAC3E,cAAQ,IAAI,wDAAwD;AAAA,QACnE;AAAA,QACA,MAAM,OAAO;AAAA,MACd,CAAC;AACD,aAAO;AAAA,IACR;AAEA,UAAM,WAA2C;AAAA,MAChD,IAAI,OAAO,OAAO,OAAO,WAAW,OAAO,KAAK;AAAA,MAChD,MAAM,OAAO,OAAO,SAAS,WAAW,OAAO,OAAO;AAAA,MACtD,aAAa,OAAO,OAAO,gBAAgB,WAAW,OAAO,cAAc;AAAA,MAC3E,SAAS,OAAO,OAAO,YAAY,WAAW,OAAO,UAAU;AAAA,MAC/D,WAAW,MAAM,QAAQ,OAAO,SAAS,IACrC,OAAO,UAAwB,OAAO,CAAC,MAAmB,OAAO,MAAM,QAAQ,IAChF;AAAA,MACH,UAAU,MAAM,QAAQ,OAAO,QAAQ,IACnC,OAAO,SAAuB,OAAO,CAAC,MAAmB,OAAO,MAAM,QAAQ,IAC/E;AAAA,IACJ;AAEA,YAAQ,IAAI,uDAAuD;AAAA,MAClE;AAAA,MACA,IAAI,SAAS;AAAA,MACb,MAAM,SAAS;AAAA,MACf,SAAS,SAAS;AAAA,MAClB,eAAe,SAAS,WAAW,UAAU;AAAA,MAC7C,cAAc,SAAS,UAAU,UAAU;AAAA,IAC5C,CAAC;AAED,WAAO;AAAA,EACR,SAAS,OAAO;AACf,YAAQ,IAAI,kDAAkD;AAAA,MAC7D;AAAA,MACA,OAAO,iBAAiB,QAAQ,MAAM,UAAU,OAAO,KAAK;AAAA,IAC7D,CAAC;AACD,WAAO;AAAA,EACR;AACD;AAeO,MAAM,eAAoC;AAAA,EAA1C;AACN,uBAAoC;AAAA,MACnC,aAAa;AAAA,MACb,MAAM;AAAA,MACN,MAAM;AAAA,MACN,WAAW;AAAA,MACX,OAAO,CAAC,WAAW;AAAA,MACnB,SAAS;AAAA,MACT,aAAa;AAAA,MACb,UAAU;AAAA,QACT,MAAM;AAAA,MACP;AAAA,MACA,OAAO;AAAA,QACN,OAAO,CAAC,YAAY,UAAU,aAAa,SAAS;AAAA,QACpD,YAAY,CAAC,IAAI;AAAA,QACjB,eAAe;AAAA,UACd,IAAI,CAAC,OAAO;AAAA,QACb;AAAA,QACA,WAAW;AAAA,UACV,sBAAsB;AAAA,YACrB;AAAA,cACC,KAAK;AAAA,YACN;AAAA,UACD;AAAA,QACD;AAAA,MACD;AAAA,MACA,QAAQ,CAAC;AAAA,MACT,SAAS,CAAC,wCAAoB,MAAM;AAAA,MACpC,aAAa,CAAC,QAAQ;AAAA;AAAA,MAEtB,YAAY;AAAA,QACX;AAAA,UACC,aACC;AAAA,UACD,MAAM;AAAA,UACN,MAAM;AAAA,UACN,SAAS;AAAA,QACV;AAAA,QACA;AAAA,UACC,aAAa;AAAA,UACb,MAAM;AAAA,UACN,MAAM;AAAA,UACN,SAAS;AAAA,UACT,kBAAkB;AAAA,UAClB,aAAa;AAAA,UACb,SAAS;AAAA,YACR;AAAA,cACC,MAAM;AAAA,cACN,OAAO;AAAA,cACP,aAAa;AAAA,YACd;AAAA,YACA;AAAA,cACC,MAAM;AAAA,cACN,OAAO;AAAA,cACP,aAAa;AAAA,YACd;AAAA,UACD;AAAA,QACD;AAAA;AAAA,QAEA;AAAA,UACC,aAAa;AAAA,UACb,MAAM;AAAA,UACN,MAAM;AAAA,UACN,UAAU;AAAA,UACV,SAAS;AAAA,UACT,aACC;AAAA,UACD,gBAAgB;AAAA,YACf,MAAM;AAAA,cACL,cAAc,CAAC,OAAO;AAAA,YACvB;AAAA,UACD;AAAA,QACD;AAAA;AAAA,QAEA;AAAA,UACC,aAAa;AAAA,UACb,MAAM;AAAA,UACN,MAAM;AAAA,UACN,UAAU;AAAA,UACV,SAAS;AAAA,UACT,aAAa;AAAA,UACb,aAAa;AAAA,UACb,gBAAgB;AAAA,YACf,MAAM;AAAA,cACL,cAAc,CAAC,OAAO;AAAA,YACvB;AAAA,UACD;AAAA,QACD;AAAA,QACA;AAAA,UACC,aAAa;AAAA,UACb,MAAM;AAAA,UACN,MAAM;AAAA,UACN,SAAS;AAAA,UACT,aAAa;AAAA,UACb,aAAa;AAAA,UACb,gBAAgB;AAAA,YACf,MAAM;AAAA,cACL,cAAc,CAAC,OAAO;AAAA,YACvB;AAAA,UACD;AAAA,QACD;AAAA,MACD;AAAA,IACD;AAAA;AAAA,EAEA,MAAM,WAAuC,WAAwC;AACpF,UAAM,OAAO,KAAK,QAAQ;AAC1B,YAAQ,IAAI,qCAAqC;AAAA,MAChD,UAAU,KAAK;AAAA,MACf,QAAQ,KAAK;AAAA,MACb,UAAU,KAAK;AAAA,MACf;AAAA,MACA,gBAAgB,OAAO,KAAK,KAAK,cAAc,CAAC,CAAC;AAAA,MACjD,eAAe,KAAK,UAAU,KAAK,cAAc,CAAC,CAAC,EAAE,MAAM,GAAG,GAAG;AAAA,IAClE,CAAC;AAED,UAAM,eAAe,KAAK,iBAAiB,gBAAgB,WAAW,OAAO;AAI7E,YAAQ,IAAI,sCAAsC;AAAA,MACjD;AAAA,MACA;AAAA,IACD,CAAC;AAED,QAAI,iBAAiB,SAAS;AAC7B,YAAM,qBAAqB,KAAK,iBAAiB,mBAAmB,WAAW,EAAE;AACjF,YAAM,kBAAkB,mBAAmB,KAAK;AAEhD,cAAQ,IAAI,qDAAqD;AAAA,QAChE;AAAA,QACA;AAAA,QACA;AAAA,QACA,SAAS,CAAC;AAAA,QACV,QAAQ,gBAAgB;AAAA,MACzB,CAAC;AAED,UAAI,CAAC,iBAAiB;AACrB,cAAM,IAAI;AAAA,UACT,KAAK,QAAQ;AAAA,UACb;AAAA,UACA,EAAE,UAAU;AAAA,QACb;AAAA,MACD;AAGA,YAAM,iBAAiB,wBAAwB,eAAe;AAE9D,YAAMA,gBAA6B;AAAA,QAClC,cAAc;AAAA,QACd,YAAY;AAAA,QACZ;AAAA,MACD;AAEA,cAAQ,IAAI,kDAAkD;AAAA,QAC7D,YAAYA,cAAa;AAAA,QACzB,aAAa,CAAC,CAAC;AAAA,QACf,YAAY,gBAAgB;AAAA,QAC5B,cAAc,gBAAgB;AAAA,QAC9B,iBAAiB,gBAAgB;AAAA,MAClC,CAAC;AAED,aAAO,EAAE,UAAUA,cAAa;AAAA,IACjC;AAGA,UAAM,WAAY,KAAK,iBAAiB,YAAY,WAAW,EAAE,EAAa,KAAK;AACnF,UAAM,gBACJ,KAAK,iBAAiB,iBAAiB,WAAW,EAAE,EAAa,KAAK,KAAK;AAE7E,YAAQ,IAAI,iDAAiD;AAAA,MAC5D;AAAA,MACA;AAAA,MACA,eAAe,iBAAiB;AAAA,IACjC,CAAC;AAED,QAAI,CAAC,UAAU;AACd,YAAM,IAAI;AAAA,QACT,KAAK,QAAQ;AAAA,QACb;AAAA,QACA,EAAE,UAAU;AAAA,MACb;AAAA,IACD;AAEA,UAAM,eAA6B;AAAA,MAClC,cAAc;AAAA,MACd;AAAA,MACA;AAAA,IACD;AAEA,YAAQ,IAAI,kDAAkD;AAAA,MAC7D,UAAU,aAAa;AAAA,MACvB,eAAe,aAAa,iBAAiB;AAAA,IAC9C,CAAC;AAED,WAAO,EAAE,UAAU,aAAa;AAAA,EACjC;AACD;","names":["pluginConfig"]}
@@ -0,0 +1,3 @@
1
+ <svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round">
2
+ <path d="M14.7 6.3a1 1 0 0 0 0 1.4l1.6 1.6a1 1 0 0 0 1.4 0l3.77-3.77a6 6 0 0 1-7.94 7.94l-6.91 6.91a2.12 2.12 0 0 1-3-3l6.91-6.91a6 6 0 0 1 7.94-7.94l-3.76 3.76z"/>
3
+ </svg>
@@ -28,6 +28,22 @@ var import_sharedFields = require("../../../utils/sharedFields");
28
28
  var import_error_handling = require("../../vendors/OpenAi/helpers/error-handling");
29
29
  var import_n8nLlmFailedAttemptHandler = require("../n8nLlmFailedAttemptHandler");
30
30
  var import_N8nLlmTracing = require("../N8nLlmTracing");
31
+ const NINE_ROUTER_OPENCLAW_PROVIDER = "9router";
32
+ const NINE_ROUTER_OPENCLAW_MODEL_SOURCE = "9router";
33
+ const NINE_ROUTER_OPENCLAW_API = "openai-completions";
34
+ const NINE_ROUTER_DEFAULT_BASE_URL = "http://localhost:20128/api/v1";
35
+ function normalizeOptionalString(value) {
36
+ if (typeof value !== "string") {
37
+ return void 0;
38
+ }
39
+ const trimmed = value.trim();
40
+ return trimmed || void 0;
41
+ }
42
+ function toOpenClawNineRouterModelId(modelName) {
43
+ const normalizedModelName = normalizeOptionalString(modelName) ?? "auto";
44
+ const providerPrefix = `${NINE_ROUTER_OPENCLAW_PROVIDER}/`;
45
+ return normalizedModelName.toLowerCase().startsWith(providerPrefix) ? normalizedModelName : `${NINE_ROUTER_OPENCLAW_PROVIDER}/${normalizedModelName}`;
46
+ }
31
47
  class LmChat9Router {
32
48
  constructor() {
33
49
  this.description = {
@@ -84,7 +100,7 @@ class LmChat9Router {
84
100
  displayName: "Model",
85
101
  name: "model",
86
102
  type: "options",
87
- description: 'The model which will generate the completion. <a href="http://localhost:20128">Learn more</a>.',
103
+ description: 'The model which will generate the completion. <a href="https://github.com/9router/9router">Learn more</a>.',
88
104
  typeOptions: {
89
105
  loadOptions: {
90
106
  routing: {
@@ -216,11 +232,13 @@ class LmChat9Router {
216
232
  async supplyData(itemIndex) {
217
233
  const credentials = await this.getCredentials("nineRouterApi");
218
234
  const modelName = this.getNodeParameter("model", itemIndex);
235
+ const baseURL = normalizeOptionalString(credentials.url) ?? NINE_ROUTER_DEFAULT_BASE_URL;
236
+ const hasApiKey = normalizeOptionalString(credentials.apiKey) !== void 0;
219
237
  const options = this.getNodeParameter("options", itemIndex, {});
220
238
  const configuration = {
221
- baseURL: credentials.url,
239
+ baseURL,
222
240
  fetchOptions: {
223
- dispatcher: (0, import_httpProxyAgent.getProxyAgent)(credentials.url)
241
+ dispatcher: (0, import_httpProxyAgent.getProxyAgent)(baseURL)
224
242
  }
225
243
  };
226
244
  const model = new import_openai.ChatOpenAI({
@@ -236,6 +254,25 @@ class LmChat9Router {
236
254
  } : void 0,
237
255
  onFailedAttempt: (0, import_n8nLlmFailedAttemptHandler.makeN8nLlmFailedAttemptHandler)(this, import_error_handling.openAiFailedAttemptHandler)
238
256
  });
257
+ const openClawModelConfig = {
258
+ modelId: toOpenClawNineRouterModelId(modelName),
259
+ modelSource: NINE_ROUTER_OPENCLAW_MODEL_SOURCE,
260
+ extra: {
261
+ baseUrl: baseURL,
262
+ api: NINE_ROUTER_OPENCLAW_API,
263
+ hasApiKey
264
+ }
265
+ };
266
+ Object.assign(model, openClawModelConfig);
267
+ console.log("[LmChat9Router] returning model with OpenClaw metadata", {
268
+ itemIndex,
269
+ modelName,
270
+ openClawModelId: openClawModelConfig.modelId,
271
+ modelSource: openClawModelConfig.modelSource,
272
+ baseUrl: baseURL,
273
+ api: NINE_ROUTER_OPENCLAW_API,
274
+ hasApiKey
275
+ });
239
276
  return {
240
277
  response: model
241
278
  };
@@ -1 +1 @@
1
- {"version":3,"sources":["../../../../nodes/llms/LmChat9Router/LmChat9Router.node.ts"],"sourcesContent":["import { ChatOpenAI, type ClientOptions } from '@langchain/openai';\nimport {\n\tNodeConnectionTypes,\n\ttype INodeType,\n\ttype INodeTypeDescription,\n\ttype ISupplyDataFunctions,\n\ttype SupplyData,\n} from 'n8n-workflow';\n\nimport { getProxyAgent } from '@utils/httpProxyAgent';\nimport { getConnectionHintNoticeField } from '@utils/sharedFields';\n\nimport type { OpenAICompatibleCredential } from '../../../types/types';\nimport { openAiFailedAttemptHandler } from '../../vendors/OpenAi/helpers/error-handling';\nimport { makeN8nLlmFailedAttemptHandler } from '../n8nLlmFailedAttemptHandler';\nimport { N8nLlmTracing } from '../N8nLlmTracing';\n\nexport class LmChat9Router implements INodeType {\n\tdescription: INodeTypeDescription = {\n\t\tdisplayName: '9Router Chat Model',\n\t\tname: 'lmChat9Router',\n\t\ticon: { light: 'file:9router.svg', dark: 'file:9router.dark.svg' },\n\t\tgroup: ['transform'],\n\t\tversion: [1],\n\t\tdescription: 'For advanced usage with an AI chain',\n\t\tdefaults: {\n\t\t\tname: '9Router Chat Model',\n\t\t},\n\t\tcodex: {\n\t\t\tcategories: ['AI'],\n\t\t\tsubcategories: {\n\t\t\t\tAI: ['Language Models', 'Root Nodes'],\n\t\t\t\t'Language Models': ['Chat Models (Recommended)'],\n\t\t\t},\n\t\t\tresources: {\n\t\t\t\tprimaryDocumentation: [\n\t\t\t\t\t{\n\t\t\t\t\t\turl: 'https://github.com/9router/9router',\n\t\t\t\t\t},\n\t\t\t\t],\n\t\t\t},\n\t\t},\n\n\t\tinputs: [],\n\n\t\toutputs: [NodeConnectionTypes.AiLanguageModel],\n\t\toutputNames: ['Model'],\n\t\tcredentials: [\n\t\t\t{\n\t\t\t\tname: 'nineRouterApi',\n\t\t\t\trequired: true,\n\t\t\t},\n\t\t],\n\t\trequestDefaults: {\n\t\t\tignoreHttpStatusErrors: true,\n\t\t\tbaseURL: '={{ $credentials?.url }}',\n\t\t},\n\t\tproperties: [\n\t\t\tgetConnectionHintNoticeField([NodeConnectionTypes.AiChain, NodeConnectionTypes.AiAgent]),\n\t\t\t{\n\t\t\t\tdisplayName:\n\t\t\t\t\t'If using JSON response format, you must include word \"json\" in the prompt in your chain or agent. Also, make sure to select latest models released post November 2023.',\n\t\t\t\tname: 'notice',\n\t\t\t\ttype: 'notice',\n\t\t\t\tdefault: '',\n\t\t\t\tdisplayOptions: {\n\t\t\t\t\tshow: {\n\t\t\t\t\t\t'/options.responseFormat': ['json_object'],\n\t\t\t\t\t},\n\t\t\t\t},\n\t\t\t},\n\t\t\t{\n\t\t\t\tdisplayName: 'Model',\n\t\t\t\tname: 'model',\n\t\t\t\ttype: 'options',\n\t\t\t\tdescription:\n\t\t\t\t\t'The model which will generate the completion. <a href=\"http://localhost:20128\">Learn more</a>.',\n\t\t\t\ttypeOptions: {\n\t\t\t\t\tloadOptions: {\n\t\t\t\t\t\trouting: {\n\t\t\t\t\t\t\trequest: {\n\t\t\t\t\t\t\t\tmethod: 'GET',\n\t\t\t\t\t\t\t\turl: '/models',\n\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\toutput: {\n\t\t\t\t\t\t\t\tpostReceive: [\n\t\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\ttype: 'rootProperty',\n\t\t\t\t\t\t\t\t\t\tproperties: {\n\t\t\t\t\t\t\t\t\t\t\tproperty: 'data',\n\t\t\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\ttype: 'setKeyValue',\n\t\t\t\t\t\t\t\t\t\tproperties: {\n\t\t\t\t\t\t\t\t\t\t\tname: '={{$responseItem.id}}',\n\t\t\t\t\t\t\t\t\t\t\tvalue: '={{$responseItem.id}}',\n\t\t\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\ttype: 'sort',\n\t\t\t\t\t\t\t\t\t\tproperties: {\n\t\t\t\t\t\t\t\t\t\t\tkey: 'name',\n\t\t\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t],\n\t\t\t\t\t\t\t},\n\t\t\t\t\t\t},\n\t\t\t\t\t},\n\t\t\t\t},\n\t\t\t\trouting: {\n\t\t\t\t\tsend: {\n\t\t\t\t\t\ttype: 'body',\n\t\t\t\t\t\tproperty: 'model',\n\t\t\t\t\t},\n\t\t\t\t},\n\t\t\t\tdefault: 'openai/gpt-4o',\n\t\t\t},\n\t\t\t{\n\t\t\t\tdisplayName: 'Options',\n\t\t\t\tname: 'options',\n\t\t\t\tplaceholder: 'Add Option',\n\t\t\t\tdescription: 'Additional options to add',\n\t\t\t\ttype: 'collection',\n\t\t\t\tdefault: {},\n\t\t\t\toptions: [\n\t\t\t\t\t{\n\t\t\t\t\t\tdisplayName: 'Frequency Penalty',\n\t\t\t\t\t\tname: 'frequencyPenalty',\n\t\t\t\t\t\tdefault: 0,\n\t\t\t\t\t\ttypeOptions: { maxValue: 2, minValue: -2, numberPrecision: 1 },\n\t\t\t\t\t\tdescription:\n\t\t\t\t\t\t\t\"Positive values penalize new tokens based on their existing frequency in the text so far, decreasing the model's likelihood to repeat the same line verbatim\",\n\t\t\t\t\t\ttype: 'number',\n\t\t\t\t\t},\n\t\t\t\t\t{\n\t\t\t\t\t\tdisplayName: 'Maximum Number of Tokens',\n\t\t\t\t\t\tname: 'maxTokens',\n\t\t\t\t\t\tdefault: -1,\n\t\t\t\t\t\tdescription:\n\t\t\t\t\t\t\t'The maximum number of tokens to generate in the completion. Most models have a context length of 2048 tokens (except for the newest models, which support 32,768).',\n\t\t\t\t\t\ttype: 'number',\n\t\t\t\t\t\ttypeOptions: {\n\t\t\t\t\t\t\tmaxValue: 32768,\n\t\t\t\t\t\t},\n\t\t\t\t\t},\n\t\t\t\t\t{\n\t\t\t\t\t\tdisplayName: 'Response Format',\n\t\t\t\t\t\tname: 'responseFormat',\n\t\t\t\t\t\tdefault: 'text',\n\t\t\t\t\t\ttype: 'options',\n\t\t\t\t\t\toptions: [\n\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\tname: 'Text',\n\t\t\t\t\t\t\t\tvalue: 'text',\n\t\t\t\t\t\t\t\tdescription: 'Regular text response',\n\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\tname: 'JSON',\n\t\t\t\t\t\t\t\tvalue: 'json_object',\n\t\t\t\t\t\t\t\tdescription:\n\t\t\t\t\t\t\t\t\t'Enables JSON mode, which should guarantee the message the model generates is valid JSON',\n\t\t\t\t\t\t\t},\n\t\t\t\t\t\t],\n\t\t\t\t\t},\n\t\t\t\t\t{\n\t\t\t\t\t\tdisplayName: 'Presence Penalty',\n\t\t\t\t\t\tname: 'presencePenalty',\n\t\t\t\t\t\tdefault: 0,\n\t\t\t\t\t\ttypeOptions: { maxValue: 2, minValue: -2, numberPrecision: 1 },\n\t\t\t\t\t\tdescription:\n\t\t\t\t\t\t\t\"Positive values penalize new tokens based on whether they appear in the text so far, increasing the model's likelihood to talk about new topics\",\n\t\t\t\t\t\ttype: 'number',\n\t\t\t\t\t},\n\t\t\t\t\t{\n\t\t\t\t\t\tdisplayName: 'Sampling Temperature',\n\t\t\t\t\t\tname: 'temperature',\n\t\t\t\t\t\tdefault: 0.7,\n\t\t\t\t\t\ttypeOptions: { maxValue: 2, minValue: 0, numberPrecision: 1 },\n\t\t\t\t\t\tdescription:\n\t\t\t\t\t\t\t'Controls randomness: Lowering results in less random completions. As the temperature approaches zero, the model will become deterministic and repetitive.',\n\t\t\t\t\t\ttype: 'number',\n\t\t\t\t\t},\n\t\t\t\t\t{\n\t\t\t\t\t\tdisplayName: 'Timeout',\n\t\t\t\t\t\tname: 'timeout',\n\t\t\t\t\t\tdefault: 360000,\n\t\t\t\t\t\tdescription: 'Maximum amount of time a request is allowed to take in milliseconds',\n\t\t\t\t\t\ttype: 'number',\n\t\t\t\t\t},\n\t\t\t\t\t{\n\t\t\t\t\t\tdisplayName: 'Max Retries',\n\t\t\t\t\t\tname: 'maxRetries',\n\t\t\t\t\t\tdefault: 2,\n\t\t\t\t\t\tdescription: 'Maximum number of retries to attempt',\n\t\t\t\t\t\ttype: 'number',\n\t\t\t\t\t},\n\t\t\t\t\t{\n\t\t\t\t\t\tdisplayName: 'Top P',\n\t\t\t\t\t\tname: 'topP',\n\t\t\t\t\t\tdefault: 1,\n\t\t\t\t\t\ttypeOptions: { maxValue: 1, minValue: 0, numberPrecision: 1 },\n\t\t\t\t\t\tdescription:\n\t\t\t\t\t\t\t'Controls diversity via nucleus sampling: 0.5 means half of all likelihood-weighted options are considered. We generally recommend altering this or temperature but not both.',\n\t\t\t\t\t\ttype: 'number',\n\t\t\t\t\t},\n\t\t\t\t],\n\t\t\t},\n\t\t],\n\t};\n\n\tasync supplyData(this: ISupplyDataFunctions, itemIndex: number): Promise<SupplyData> {\n\t\tconst credentials = await this.getCredentials<OpenAICompatibleCredential>('nineRouterApi');\n\n\t\tconst modelName = this.getNodeParameter('model', itemIndex) as string;\n\n\t\tconst options = this.getNodeParameter('options', itemIndex, {}) as {\n\t\t\tfrequencyPenalty?: number;\n\t\t\tmaxTokens?: number;\n\t\t\tmaxRetries: number;\n\t\t\ttimeout: number;\n\t\t\tpresencePenalty?: number;\n\t\t\ttemperature?: number;\n\t\t\ttopP?: number;\n\t\t\tresponseFormat?: 'text' | 'json_object';\n\t\t};\n\n\t\tconst configuration: ClientOptions = {\n\t\t\tbaseURL: credentials.url,\n\t\t\tfetchOptions: {\n\t\t\t\tdispatcher: getProxyAgent(credentials.url),\n\t\t\t},\n\t\t};\n\n\t\tconst model = new ChatOpenAI({\n\t\t\tapiKey: credentials.apiKey || 'no-key',\n\t\t\tmodel: modelName,\n\t\t\t...options,\n\t\t\ttimeout: options.timeout ?? 60000,\n\t\t\tmaxRetries: options.maxRetries ?? 2,\n\t\t\tconfiguration,\n\t\t\tcallbacks: [new N8nLlmTracing(this)],\n\t\t\tmodelKwargs: options.responseFormat\n\t\t\t\t? {\n\t\t\t\t\t\tresponse_format: { type: options.responseFormat },\n\t\t\t\t\t}\n\t\t\t\t: undefined,\n\t\t\tonFailedAttempt: makeN8nLlmFailedAttemptHandler(this, openAiFailedAttemptHandler),\n\t\t});\n\n\t\treturn {\n\t\t\tresponse: model,\n\t\t};\n\t}\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,oBAA+C;AAC/C,0BAMO;AAEP,4BAA8B;AAC9B,0BAA6C;AAG7C,4BAA2C;AAC3C,wCAA+C;AAC/C,2BAA8B;AAEvB,MAAM,cAAmC;AAAA,EAAzC;AACN,uBAAoC;AAAA,MACnC,aAAa;AAAA,MACb,MAAM;AAAA,MACN,MAAM,EAAE,OAAO,oBAAoB,MAAM,wBAAwB;AAAA,MACjE,OAAO,CAAC,WAAW;AAAA,MACnB,SAAS,CAAC,CAAC;AAAA,MACX,aAAa;AAAA,MACb,UAAU;AAAA,QACT,MAAM;AAAA,MACP;AAAA,MACA,OAAO;AAAA,QACN,YAAY,CAAC,IAAI;AAAA,QACjB,eAAe;AAAA,UACd,IAAI,CAAC,mBAAmB,YAAY;AAAA,UACpC,mBAAmB,CAAC,2BAA2B;AAAA,QAChD;AAAA,QACA,WAAW;AAAA,UACV,sBAAsB;AAAA,YACrB;AAAA,cACC,KAAK;AAAA,YACN;AAAA,UACD;AAAA,QACD;AAAA,MACD;AAAA,MAEA,QAAQ,CAAC;AAAA,MAET,SAAS,CAAC,wCAAoB,eAAe;AAAA,MAC7C,aAAa,CAAC,OAAO;AAAA,MACrB,aAAa;AAAA,QACZ;AAAA,UACC,MAAM;AAAA,UACN,UAAU;AAAA,QACX;AAAA,MACD;AAAA,MACA,iBAAiB;AAAA,QAChB,wBAAwB;AAAA,QACxB,SAAS;AAAA,MACV;AAAA,MACA,YAAY;AAAA,YACX,kDAA6B,CAAC,wCAAoB,SAAS,wCAAoB,OAAO,CAAC;AAAA,QACvF;AAAA,UACC,aACC;AAAA,UACD,MAAM;AAAA,UACN,MAAM;AAAA,UACN,SAAS;AAAA,UACT,gBAAgB;AAAA,YACf,MAAM;AAAA,cACL,2BAA2B,CAAC,aAAa;AAAA,YAC1C;AAAA,UACD;AAAA,QACD;AAAA,QACA;AAAA,UACC,aAAa;AAAA,UACb,MAAM;AAAA,UACN,MAAM;AAAA,UACN,aACC;AAAA,UACD,aAAa;AAAA,YACZ,aAAa;AAAA,cACZ,SAAS;AAAA,gBACR,SAAS;AAAA,kBACR,QAAQ;AAAA,kBACR,KAAK;AAAA,gBACN;AAAA,gBACA,QAAQ;AAAA,kBACP,aAAa;AAAA,oBACZ;AAAA,sBACC,MAAM;AAAA,sBACN,YAAY;AAAA,wBACX,UAAU;AAAA,sBACX;AAAA,oBACD;AAAA,oBACA;AAAA,sBACC,MAAM;AAAA,sBACN,YAAY;AAAA,wBACX,MAAM;AAAA,wBACN,OAAO;AAAA,sBACR;AAAA,oBACD;AAAA,oBACA;AAAA,sBACC,MAAM;AAAA,sBACN,YAAY;AAAA,wBACX,KAAK;AAAA,sBACN;AAAA,oBACD;AAAA,kBACD;AAAA,gBACD;AAAA,cACD;AAAA,YACD;AAAA,UACD;AAAA,UACA,SAAS;AAAA,YACR,MAAM;AAAA,cACL,MAAM;AAAA,cACN,UAAU;AAAA,YACX;AAAA,UACD;AAAA,UACA,SAAS;AAAA,QACV;AAAA,QACA;AAAA,UACC,aAAa;AAAA,UACb,MAAM;AAAA,UACN,aAAa;AAAA,UACb,aAAa;AAAA,UACb,MAAM;AAAA,UACN,SAAS,CAAC;AAAA,UACV,SAAS;AAAA,YACR;AAAA,cACC,aAAa;AAAA,cACb,MAAM;AAAA,cACN,SAAS;AAAA,cACT,aAAa,EAAE,UAAU,GAAG,UAAU,IAAI,iBAAiB,EAAE;AAAA,cAC7D,aACC;AAAA,cACD,MAAM;AAAA,YACP;AAAA,YACA;AAAA,cACC,aAAa;AAAA,cACb,MAAM;AAAA,cACN,SAAS;AAAA,cACT,aACC;AAAA,cACD,MAAM;AAAA,cACN,aAAa;AAAA,gBACZ,UAAU;AAAA,cACX;AAAA,YACD;AAAA,YACA;AAAA,cACC,aAAa;AAAA,cACb,MAAM;AAAA,cACN,SAAS;AAAA,cACT,MAAM;AAAA,cACN,SAAS;AAAA,gBACR;AAAA,kBACC,MAAM;AAAA,kBACN,OAAO;AAAA,kBACP,aAAa;AAAA,gBACd;AAAA,gBACA;AAAA,kBACC,MAAM;AAAA,kBACN,OAAO;AAAA,kBACP,aACC;AAAA,gBACF;AAAA,cACD;AAAA,YACD;AAAA,YACA;AAAA,cACC,aAAa;AAAA,cACb,MAAM;AAAA,cACN,SAAS;AAAA,cACT,aAAa,EAAE,UAAU,GAAG,UAAU,IAAI,iBAAiB,EAAE;AAAA,cAC7D,aACC;AAAA,cACD,MAAM;AAAA,YACP;AAAA,YACA;AAAA,cACC,aAAa;AAAA,cACb,MAAM;AAAA,cACN,SAAS;AAAA,cACT,aAAa,EAAE,UAAU,GAAG,UAAU,GAAG,iBAAiB,EAAE;AAAA,cAC5D,aACC;AAAA,cACD,MAAM;AAAA,YACP;AAAA,YACA;AAAA,cACC,aAAa;AAAA,cACb,MAAM;AAAA,cACN,SAAS;AAAA,cACT,aAAa;AAAA,cACb,MAAM;AAAA,YACP;AAAA,YACA;AAAA,cACC,aAAa;AAAA,cACb,MAAM;AAAA,cACN,SAAS;AAAA,cACT,aAAa;AAAA,cACb,MAAM;AAAA,YACP;AAAA,YACA;AAAA,cACC,aAAa;AAAA,cACb,MAAM;AAAA,cACN,SAAS;AAAA,cACT,aAAa,EAAE,UAAU,GAAG,UAAU,GAAG,iBAAiB,EAAE;AAAA,cAC5D,aACC;AAAA,cACD,MAAM;AAAA,YACP;AAAA,UACD;AAAA,QACD;AAAA,MACD;AAAA,IACD;AAAA;AAAA,EAEA,MAAM,WAAuC,WAAwC;AACpF,UAAM,cAAc,MAAM,KAAK,eAA2C,eAAe;AAEzF,UAAM,YAAY,KAAK,iBAAiB,SAAS,SAAS;AAE1D,UAAM,UAAU,KAAK,iBAAiB,WAAW,WAAW,CAAC,CAAC;AAW9D,UAAM,gBAA+B;AAAA,MACpC,SAAS,YAAY;AAAA,MACrB,cAAc;AAAA,QACb,gBAAY,qCAAc,YAAY,GAAG;AAAA,MAC1C;AAAA,IACD;AAEA,UAAM,QAAQ,IAAI,yBAAW;AAAA,MAC5B,QAAQ,YAAY,UAAU;AAAA,MAC9B,OAAO;AAAA,MACP,GAAG;AAAA,MACH,SAAS,QAAQ,WAAW;AAAA,MAC5B,YAAY,QAAQ,cAAc;AAAA,MAClC;AAAA,MACA,WAAW,CAAC,IAAI,mCAAc,IAAI,CAAC;AAAA,MACnC,aAAa,QAAQ,iBAClB;AAAA,QACA,iBAAiB,EAAE,MAAM,QAAQ,eAAe;AAAA,MACjD,IACC;AAAA,MACH,qBAAiB,kEAA+B,MAAM,gDAA0B;AAAA,IACjF,CAAC;AAED,WAAO;AAAA,MACN,UAAU;AAAA,IACX;AAAA,EACD;AACD;","names":[]}
1
+ {"version":3,"sources":["../../../../nodes/llms/LmChat9Router/LmChat9Router.node.ts"],"sourcesContent":["import { ChatOpenAI, type ClientOptions } from '@langchain/openai';\nimport {\n\ttype IDataObject,\n\tNodeConnectionTypes,\n\ttype INodeType,\n\ttype INodeTypeDescription,\n\ttype ISupplyDataFunctions,\n\ttype SupplyData,\n} from 'n8n-workflow';\n\nimport { getProxyAgent } from '@utils/httpProxyAgent';\nimport { getConnectionHintNoticeField } from '@utils/sharedFields';\n\nimport type { OpenAICompatibleCredential } from '../../../types/types';\nimport { openAiFailedAttemptHandler } from '../../vendors/OpenAi/helpers/error-handling';\nimport { makeN8nLlmFailedAttemptHandler } from '../n8nLlmFailedAttemptHandler';\nimport { N8nLlmTracing } from '../N8nLlmTracing';\nimport type { ModelConfig } from '../../agents/OpenClawAgent/V2/OpenClawAgentV2.node';\n\nconst NINE_ROUTER_OPENCLAW_PROVIDER = '9router';\nconst NINE_ROUTER_OPENCLAW_MODEL_SOURCE = '9router';\nconst NINE_ROUTER_OPENCLAW_API = 'openai-completions';\nconst NINE_ROUTER_DEFAULT_BASE_URL = 'http://localhost:20128/api/v1';\n\nfunction normalizeOptionalString(value: unknown): string | undefined {\n\tif (typeof value !== 'string') {\n\t\treturn undefined;\n\t}\n\tconst trimmed = value.trim();\n\treturn trimmed || undefined;\n}\n\nfunction toOpenClawNineRouterModelId(modelName: string): string {\n\tconst normalizedModelName = normalizeOptionalString(modelName) ?? 'auto';\n\tconst providerPrefix = `${NINE_ROUTER_OPENCLAW_PROVIDER}/`;\n\treturn normalizedModelName.toLowerCase().startsWith(providerPrefix)\n\t\t? normalizedModelName\n\t\t: `${NINE_ROUTER_OPENCLAW_PROVIDER}/${normalizedModelName}`;\n}\n\nexport class LmChat9Router implements INodeType {\n\tdescription: INodeTypeDescription = {\n\t\tdisplayName: '9Router Chat Model',\n\t\tname: 'lmChat9Router',\n\t\ticon: { light: 'file:9router.svg', dark: 'file:9router.dark.svg' },\n\t\tgroup: ['transform'],\n\t\tversion: [1],\n\t\tdescription: 'For advanced usage with an AI chain',\n\t\tdefaults: {\n\t\t\tname: '9Router Chat Model',\n\t\t},\n\t\tcodex: {\n\t\t\tcategories: ['AI'],\n\t\t\tsubcategories: {\n\t\t\t\tAI: ['Language Models', 'Root Nodes'],\n\t\t\t\t'Language Models': ['Chat Models (Recommended)'],\n\t\t\t},\n\t\t\tresources: {\n\t\t\t\tprimaryDocumentation: [\n\t\t\t\t\t{\n\t\t\t\t\t\turl: 'https://github.com/9router/9router',\n\t\t\t\t\t},\n\t\t\t\t],\n\t\t\t},\n\t\t},\n\n\t\tinputs: [],\n\n\t\toutputs: [NodeConnectionTypes.AiLanguageModel],\n\t\toutputNames: ['Model'],\n\t\tcredentials: [\n\t\t\t{\n\t\t\t\tname: 'nineRouterApi',\n\t\t\t\trequired: true,\n\t\t\t},\n\t\t],\n\t\trequestDefaults: {\n\t\t\tignoreHttpStatusErrors: true,\n\t\t\tbaseURL: '={{ $credentials?.url }}',\n\t\t},\n\t\tproperties: [\n\t\t\tgetConnectionHintNoticeField([NodeConnectionTypes.AiChain, NodeConnectionTypes.AiAgent]),\n\t\t\t{\n\t\t\t\tdisplayName:\n\t\t\t\t\t'If using JSON response format, you must include word \"json\" in the prompt in your chain or agent. Also, make sure to select latest models released post November 2023.',\n\t\t\t\tname: 'notice',\n\t\t\t\ttype: 'notice',\n\t\t\t\tdefault: '',\n\t\t\t\tdisplayOptions: {\n\t\t\t\t\tshow: {\n\t\t\t\t\t\t'/options.responseFormat': ['json_object'],\n\t\t\t\t\t},\n\t\t\t\t},\n\t\t\t},\n\t\t\t{\n\t\t\t\tdisplayName: 'Model',\n\t\t\t\tname: 'model',\n\t\t\t\ttype: 'options',\n\t\t\t\tdescription:\n\t\t\t\t\t'The model which will generate the completion. <a href=\"https://github.com/9router/9router\">Learn more</a>.',\n\t\t\t\ttypeOptions: {\n\t\t\t\t\tloadOptions: {\n\t\t\t\t\t\trouting: {\n\t\t\t\t\t\t\trequest: {\n\t\t\t\t\t\t\t\tmethod: 'GET',\n\t\t\t\t\t\t\t\turl: '/models',\n\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\toutput: {\n\t\t\t\t\t\t\t\tpostReceive: [\n\t\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\ttype: 'rootProperty',\n\t\t\t\t\t\t\t\t\t\tproperties: {\n\t\t\t\t\t\t\t\t\t\t\tproperty: 'data',\n\t\t\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\ttype: 'setKeyValue',\n\t\t\t\t\t\t\t\t\t\tproperties: {\n\t\t\t\t\t\t\t\t\t\t\tname: '={{$responseItem.id}}',\n\t\t\t\t\t\t\t\t\t\t\tvalue: '={{$responseItem.id}}',\n\t\t\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\ttype: 'sort',\n\t\t\t\t\t\t\t\t\t\tproperties: {\n\t\t\t\t\t\t\t\t\t\t\tkey: 'name',\n\t\t\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t],\n\t\t\t\t\t\t\t},\n\t\t\t\t\t\t},\n\t\t\t\t\t},\n\t\t\t\t},\n\t\t\t\trouting: {\n\t\t\t\t\tsend: {\n\t\t\t\t\t\ttype: 'body',\n\t\t\t\t\t\tproperty: 'model',\n\t\t\t\t\t},\n\t\t\t\t},\n\t\t\t\tdefault: 'openai/gpt-4o',\n\t\t\t},\n\t\t\t{\n\t\t\t\tdisplayName: 'Options',\n\t\t\t\tname: 'options',\n\t\t\t\tplaceholder: 'Add Option',\n\t\t\t\tdescription: 'Additional options to add',\n\t\t\t\ttype: 'collection',\n\t\t\t\tdefault: {},\n\t\t\t\toptions: [\n\t\t\t\t\t{\n\t\t\t\t\t\tdisplayName: 'Frequency Penalty',\n\t\t\t\t\t\tname: 'frequencyPenalty',\n\t\t\t\t\t\tdefault: 0,\n\t\t\t\t\t\ttypeOptions: { maxValue: 2, minValue: -2, numberPrecision: 1 },\n\t\t\t\t\t\tdescription:\n\t\t\t\t\t\t\t\"Positive values penalize new tokens based on their existing frequency in the text so far, decreasing the model's likelihood to repeat the same line verbatim\",\n\t\t\t\t\t\ttype: 'number',\n\t\t\t\t\t},\n\t\t\t\t\t{\n\t\t\t\t\t\tdisplayName: 'Maximum Number of Tokens',\n\t\t\t\t\t\tname: 'maxTokens',\n\t\t\t\t\t\tdefault: -1,\n\t\t\t\t\t\tdescription:\n\t\t\t\t\t\t\t'The maximum number of tokens to generate in the completion. Most models have a context length of 2048 tokens (except for the newest models, which support 32,768).',\n\t\t\t\t\t\ttype: 'number',\n\t\t\t\t\t\ttypeOptions: {\n\t\t\t\t\t\t\tmaxValue: 32768,\n\t\t\t\t\t\t},\n\t\t\t\t\t},\n\t\t\t\t\t{\n\t\t\t\t\t\tdisplayName: 'Response Format',\n\t\t\t\t\t\tname: 'responseFormat',\n\t\t\t\t\t\tdefault: 'text',\n\t\t\t\t\t\ttype: 'options',\n\t\t\t\t\t\toptions: [\n\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\tname: 'Text',\n\t\t\t\t\t\t\t\tvalue: 'text',\n\t\t\t\t\t\t\t\tdescription: 'Regular text response',\n\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\tname: 'JSON',\n\t\t\t\t\t\t\t\tvalue: 'json_object',\n\t\t\t\t\t\t\t\tdescription:\n\t\t\t\t\t\t\t\t\t'Enables JSON mode, which should guarantee the message the model generates is valid JSON',\n\t\t\t\t\t\t\t},\n\t\t\t\t\t\t],\n\t\t\t\t\t},\n\t\t\t\t\t{\n\t\t\t\t\t\tdisplayName: 'Presence Penalty',\n\t\t\t\t\t\tname: 'presencePenalty',\n\t\t\t\t\t\tdefault: 0,\n\t\t\t\t\t\ttypeOptions: { maxValue: 2, minValue: -2, numberPrecision: 1 },\n\t\t\t\t\t\tdescription:\n\t\t\t\t\t\t\t\"Positive values penalize new tokens based on whether they appear in the text so far, increasing the model's likelihood to talk about new topics\",\n\t\t\t\t\t\ttype: 'number',\n\t\t\t\t\t},\n\t\t\t\t\t{\n\t\t\t\t\t\tdisplayName: 'Sampling Temperature',\n\t\t\t\t\t\tname: 'temperature',\n\t\t\t\t\t\tdefault: 0.7,\n\t\t\t\t\t\ttypeOptions: { maxValue: 2, minValue: 0, numberPrecision: 1 },\n\t\t\t\t\t\tdescription:\n\t\t\t\t\t\t\t'Controls randomness: Lowering results in less random completions. As the temperature approaches zero, the model will become deterministic and repetitive.',\n\t\t\t\t\t\ttype: 'number',\n\t\t\t\t\t},\n\t\t\t\t\t{\n\t\t\t\t\t\tdisplayName: 'Timeout',\n\t\t\t\t\t\tname: 'timeout',\n\t\t\t\t\t\tdefault: 360000,\n\t\t\t\t\t\tdescription: 'Maximum amount of time a request is allowed to take in milliseconds',\n\t\t\t\t\t\ttype: 'number',\n\t\t\t\t\t},\n\t\t\t\t\t{\n\t\t\t\t\t\tdisplayName: 'Max Retries',\n\t\t\t\t\t\tname: 'maxRetries',\n\t\t\t\t\t\tdefault: 2,\n\t\t\t\t\t\tdescription: 'Maximum number of retries to attempt',\n\t\t\t\t\t\ttype: 'number',\n\t\t\t\t\t},\n\t\t\t\t\t{\n\t\t\t\t\t\tdisplayName: 'Top P',\n\t\t\t\t\t\tname: 'topP',\n\t\t\t\t\t\tdefault: 1,\n\t\t\t\t\t\ttypeOptions: { maxValue: 1, minValue: 0, numberPrecision: 1 },\n\t\t\t\t\t\tdescription:\n\t\t\t\t\t\t\t'Controls diversity via nucleus sampling: 0.5 means half of all likelihood-weighted options are considered. We generally recommend altering this or temperature but not both.',\n\t\t\t\t\t\ttype: 'number',\n\t\t\t\t\t},\n\t\t\t\t],\n\t\t\t},\n\t\t],\n\t};\n\n\tasync supplyData(this: ISupplyDataFunctions, itemIndex: number): Promise<SupplyData> {\n\t\tconst credentials = await this.getCredentials<OpenAICompatibleCredential>('nineRouterApi');\n\n\t\tconst modelName = this.getNodeParameter('model', itemIndex) as string;\n\t\tconst baseURL = normalizeOptionalString(credentials.url) ?? NINE_ROUTER_DEFAULT_BASE_URL;\n\t\tconst hasApiKey = normalizeOptionalString(credentials.apiKey) !== undefined;\n\n\t\tconst options = this.getNodeParameter('options', itemIndex, {}) as {\n\t\t\tfrequencyPenalty?: number;\n\t\t\tmaxTokens?: number;\n\t\t\tmaxRetries: number;\n\t\t\ttimeout: number;\n\t\t\tpresencePenalty?: number;\n\t\t\ttemperature?: number;\n\t\t\ttopP?: number;\n\t\t\tresponseFormat?: 'text' | 'json_object';\n\t\t};\n\n\t\tconst configuration: ClientOptions = {\n\t\t\tbaseURL,\n\t\t\tfetchOptions: {\n\t\t\t\tdispatcher: getProxyAgent(baseURL),\n\t\t\t},\n\t\t};\n\n\t\tconst model = new ChatOpenAI({\n\t\t\tapiKey: credentials.apiKey || 'no-key',\n\t\t\tmodel: modelName,\n\t\t\t...options,\n\t\t\ttimeout: options.timeout ?? 60000,\n\t\t\tmaxRetries: options.maxRetries ?? 2,\n\t\t\tconfiguration,\n\t\t\tcallbacks: [new N8nLlmTracing(this)],\n\t\t\tmodelKwargs: options.responseFormat\n\t\t\t\t? {\n\t\t\t\t\t\tresponse_format: { type: options.responseFormat },\n\t\t\t\t\t}\n\t\t\t\t: undefined,\n\t\t\tonFailedAttempt: makeN8nLlmFailedAttemptHandler(this, openAiFailedAttemptHandler),\n\t\t});\n\n\t\tconst openClawModelConfig: ModelConfig = {\n\t\t\tmodelId: toOpenClawNineRouterModelId(modelName),\n\t\t\tmodelSource: NINE_ROUTER_OPENCLAW_MODEL_SOURCE,\n\t\t\textra: {\n\t\t\t\tbaseUrl: baseURL,\n\t\t\t\tapi: NINE_ROUTER_OPENCLAW_API,\n\t\t\t\thasApiKey,\n\t\t\t} satisfies IDataObject,\n\t\t};\n\n\t\tObject.assign(model, openClawModelConfig);\n\n\t\tconsole.log('[LmChat9Router] returning model with OpenClaw metadata', {\n\t\t\titemIndex,\n\t\t\tmodelName,\n\t\t\topenClawModelId: openClawModelConfig.modelId,\n\t\t\tmodelSource: openClawModelConfig.modelSource,\n\t\t\tbaseUrl: baseURL,\n\t\t\tapi: NINE_ROUTER_OPENCLAW_API,\n\t\t\thasApiKey,\n\t\t});\n\n\t\treturn {\n\t\t\tresponse: model,\n\t\t};\n\t}\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,oBAA+C;AAC/C,0BAOO;AAEP,4BAA8B;AAC9B,0BAA6C;AAG7C,4BAA2C;AAC3C,wCAA+C;AAC/C,2BAA8B;AAG9B,MAAM,gCAAgC;AACtC,MAAM,oCAAoC;AAC1C,MAAM,2BAA2B;AACjC,MAAM,+BAA+B;AAErC,SAAS,wBAAwB,OAAoC;AACpE,MAAI,OAAO,UAAU,UAAU;AAC9B,WAAO;AAAA,EACR;AACA,QAAM,UAAU,MAAM,KAAK;AAC3B,SAAO,WAAW;AACnB;AAEA,SAAS,4BAA4B,WAA2B;AAC/D,QAAM,sBAAsB,wBAAwB,SAAS,KAAK;AAClE,QAAM,iBAAiB,GAAG,6BAA6B;AACvD,SAAO,oBAAoB,YAAY,EAAE,WAAW,cAAc,IAC/D,sBACA,GAAG,6BAA6B,IAAI,mBAAmB;AAC3D;AAEO,MAAM,cAAmC;AAAA,EAAzC;AACN,uBAAoC;AAAA,MACnC,aAAa;AAAA,MACb,MAAM;AAAA,MACN,MAAM,EAAE,OAAO,oBAAoB,MAAM,wBAAwB;AAAA,MACjE,OAAO,CAAC,WAAW;AAAA,MACnB,SAAS,CAAC,CAAC;AAAA,MACX,aAAa;AAAA,MACb,UAAU;AAAA,QACT,MAAM;AAAA,MACP;AAAA,MACA,OAAO;AAAA,QACN,YAAY,CAAC,IAAI;AAAA,QACjB,eAAe;AAAA,UACd,IAAI,CAAC,mBAAmB,YAAY;AAAA,UACpC,mBAAmB,CAAC,2BAA2B;AAAA,QAChD;AAAA,QACA,WAAW;AAAA,UACV,sBAAsB;AAAA,YACrB;AAAA,cACC,KAAK;AAAA,YACN;AAAA,UACD;AAAA,QACD;AAAA,MACD;AAAA,MAEA,QAAQ,CAAC;AAAA,MAET,SAAS,CAAC,wCAAoB,eAAe;AAAA,MAC7C,aAAa,CAAC,OAAO;AAAA,MACrB,aAAa;AAAA,QACZ;AAAA,UACC,MAAM;AAAA,UACN,UAAU;AAAA,QACX;AAAA,MACD;AAAA,MACA,iBAAiB;AAAA,QAChB,wBAAwB;AAAA,QACxB,SAAS;AAAA,MACV;AAAA,MACA,YAAY;AAAA,YACX,kDAA6B,CAAC,wCAAoB,SAAS,wCAAoB,OAAO,CAAC;AAAA,QACvF;AAAA,UACC,aACC;AAAA,UACD,MAAM;AAAA,UACN,MAAM;AAAA,UACN,SAAS;AAAA,UACT,gBAAgB;AAAA,YACf,MAAM;AAAA,cACL,2BAA2B,CAAC,aAAa;AAAA,YAC1C;AAAA,UACD;AAAA,QACD;AAAA,QACA;AAAA,UACC,aAAa;AAAA,UACb,MAAM;AAAA,UACN,MAAM;AAAA,UACN,aACC;AAAA,UACD,aAAa;AAAA,YACZ,aAAa;AAAA,cACZ,SAAS;AAAA,gBACR,SAAS;AAAA,kBACR,QAAQ;AAAA,kBACR,KAAK;AAAA,gBACN;AAAA,gBACA,QAAQ;AAAA,kBACP,aAAa;AAAA,oBACZ;AAAA,sBACC,MAAM;AAAA,sBACN,YAAY;AAAA,wBACX,UAAU;AAAA,sBACX;AAAA,oBACD;AAAA,oBACA;AAAA,sBACC,MAAM;AAAA,sBACN,YAAY;AAAA,wBACX,MAAM;AAAA,wBACN,OAAO;AAAA,sBACR;AAAA,oBACD;AAAA,oBACA;AAAA,sBACC,MAAM;AAAA,sBACN,YAAY;AAAA,wBACX,KAAK;AAAA,sBACN;AAAA,oBACD;AAAA,kBACD;AAAA,gBACD;AAAA,cACD;AAAA,YACD;AAAA,UACD;AAAA,UACA,SAAS;AAAA,YACR,MAAM;AAAA,cACL,MAAM;AAAA,cACN,UAAU;AAAA,YACX;AAAA,UACD;AAAA,UACA,SAAS;AAAA,QACV;AAAA,QACA;AAAA,UACC,aAAa;AAAA,UACb,MAAM;AAAA,UACN,aAAa;AAAA,UACb,aAAa;AAAA,UACb,MAAM;AAAA,UACN,SAAS,CAAC;AAAA,UACV,SAAS;AAAA,YACR;AAAA,cACC,aAAa;AAAA,cACb,MAAM;AAAA,cACN,SAAS;AAAA,cACT,aAAa,EAAE,UAAU,GAAG,UAAU,IAAI,iBAAiB,EAAE;AAAA,cAC7D,aACC;AAAA,cACD,MAAM;AAAA,YACP;AAAA,YACA;AAAA,cACC,aAAa;AAAA,cACb,MAAM;AAAA,cACN,SAAS;AAAA,cACT,aACC;AAAA,cACD,MAAM;AAAA,cACN,aAAa;AAAA,gBACZ,UAAU;AAAA,cACX;AAAA,YACD;AAAA,YACA;AAAA,cACC,aAAa;AAAA,cACb,MAAM;AAAA,cACN,SAAS;AAAA,cACT,MAAM;AAAA,cACN,SAAS;AAAA,gBACR;AAAA,kBACC,MAAM;AAAA,kBACN,OAAO;AAAA,kBACP,aAAa;AAAA,gBACd;AAAA,gBACA;AAAA,kBACC,MAAM;AAAA,kBACN,OAAO;AAAA,kBACP,aACC;AAAA,gBACF;AAAA,cACD;AAAA,YACD;AAAA,YACA;AAAA,cACC,aAAa;AAAA,cACb,MAAM;AAAA,cACN,SAAS;AAAA,cACT,aAAa,EAAE,UAAU,GAAG,UAAU,IAAI,iBAAiB,EAAE;AAAA,cAC7D,aACC;AAAA,cACD,MAAM;AAAA,YACP;AAAA,YACA;AAAA,cACC,aAAa;AAAA,cACb,MAAM;AAAA,cACN,SAAS;AAAA,cACT,aAAa,EAAE,UAAU,GAAG,UAAU,GAAG,iBAAiB,EAAE;AAAA,cAC5D,aACC;AAAA,cACD,MAAM;AAAA,YACP;AAAA,YACA;AAAA,cACC,aAAa;AAAA,cACb,MAAM;AAAA,cACN,SAAS;AAAA,cACT,aAAa;AAAA,cACb,MAAM;AAAA,YACP;AAAA,YACA;AAAA,cACC,aAAa;AAAA,cACb,MAAM;AAAA,cACN,SAAS;AAAA,cACT,aAAa;AAAA,cACb,MAAM;AAAA,YACP;AAAA,YACA;AAAA,cACC,aAAa;AAAA,cACb,MAAM;AAAA,cACN,SAAS;AAAA,cACT,aAAa,EAAE,UAAU,GAAG,UAAU,GAAG,iBAAiB,EAAE;AAAA,cAC5D,aACC;AAAA,cACD,MAAM;AAAA,YACP;AAAA,UACD;AAAA,QACD;AAAA,MACD;AAAA,IACD;AAAA;AAAA,EAEA,MAAM,WAAuC,WAAwC;AACpF,UAAM,cAAc,MAAM,KAAK,eAA2C,eAAe;AAEzF,UAAM,YAAY,KAAK,iBAAiB,SAAS,SAAS;AAC1D,UAAM,UAAU,wBAAwB,YAAY,GAAG,KAAK;AAC5D,UAAM,YAAY,wBAAwB,YAAY,MAAM,MAAM;AAElE,UAAM,UAAU,KAAK,iBAAiB,WAAW,WAAW,CAAC,CAAC;AAW9D,UAAM,gBAA+B;AAAA,MACpC;AAAA,MACA,cAAc;AAAA,QACb,gBAAY,qCAAc,OAAO;AAAA,MAClC;AAAA,IACD;AAEA,UAAM,QAAQ,IAAI,yBAAW;AAAA,MAC5B,QAAQ,YAAY,UAAU;AAAA,MAC9B,OAAO;AAAA,MACP,GAAG;AAAA,MACH,SAAS,QAAQ,WAAW;AAAA,MAC5B,YAAY,QAAQ,cAAc;AAAA,MAClC;AAAA,MACA,WAAW,CAAC,IAAI,mCAAc,IAAI,CAAC;AAAA,MACnC,aAAa,QAAQ,iBAClB;AAAA,QACA,iBAAiB,EAAE,MAAM,QAAQ,eAAe;AAAA,MACjD,IACC;AAAA,MACH,qBAAiB,kEAA+B,MAAM,gDAA0B;AAAA,IACjF,CAAC;AAED,UAAM,sBAAmC;AAAA,MACxC,SAAS,4BAA4B,SAAS;AAAA,MAC9C,aAAa;AAAA,MACb,OAAO;AAAA,QACN,SAAS;AAAA,QACT,KAAK;AAAA,QACL;AAAA,MACD;AAAA,IACD;AAEA,WAAO,OAAO,OAAO,mBAAmB;AAExC,YAAQ,IAAI,0DAA0D;AAAA,MACrE;AAAA,MACA;AAAA,MACA,iBAAiB,oBAAoB;AAAA,MACrC,aAAa,oBAAoB;AAAA,MACjC,SAAS;AAAA,MACT,KAAK;AAAA,MACL;AAAA,IACD,CAAC;AAED,WAAO;AAAA,MACN,UAAU;AAAA,IACX;AAAA,EACD;AACD;","names":[]}
@@ -0,0 +1,456 @@
1
+ "use strict";
2
+ var __defProp = Object.defineProperty;
3
+ var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
4
+ var __getOwnPropNames = Object.getOwnPropertyNames;
5
+ var __hasOwnProp = Object.prototype.hasOwnProperty;
6
+ var __export = (target, all) => {
7
+ for (var name in all)
8
+ __defProp(target, name, { get: all[name], enumerable: true });
9
+ };
10
+ var __copyProps = (to, from, except, desc) => {
11
+ if (from && typeof from === "object" || typeof from === "function") {
12
+ for (let key of __getOwnPropNames(from))
13
+ if (!__hasOwnProp.call(to, key) && key !== except)
14
+ __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
15
+ }
16
+ return to;
17
+ };
18
+ var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
19
+ var LmChatCodexCli_node_exports = {};
20
+ __export(LmChatCodexCli_node_exports, {
21
+ LmChatCodexCli: () => LmChatCodexCli
22
+ });
23
+ module.exports = __toCommonJS(LmChatCodexCli_node_exports);
24
+ var import_chat_models = require("@langchain/core/language_models/chat_models");
25
+ var import_messages = require("@langchain/core/messages");
26
+ var import_n8n_workflow = require("n8n-workflow");
27
+ var import_sharedFields = require("../../../utils/sharedFields");
28
+ var import_N8nLlmTracing = require("../N8nLlmTracing");
29
+ var import_child_process = require("child_process");
30
+ var import_fs = require("fs");
31
+ const TOOL_CALL_SYSTEM_PROMPT = `You have access to the following tools. When you need to call a tool, respond ONLY with a JSON block in this exact format (no other text before or after):
32
+
33
+ \`\`\`tool_calls
34
+ [{"id": "call_1", "name": "tool_name", "args": {"param": "value"}}]
35
+ \`\`\`
36
+
37
+ When you do NOT need to call a tool, respond normally with text. Never mix tool calls and text in the same response.
38
+
39
+ Available tools:
40
+ `;
41
+ class ChatCodexCLI extends import_chat_models.BaseChatModel {
42
+ constructor(fields) {
43
+ super({});
44
+ this.boundTools = [];
45
+ this.model = fields.model;
46
+ this.binaryPath = fields.binaryPath;
47
+ this.workingDirectory = fields.workingDirectory;
48
+ this.sandboxMode = fields.sandboxMode;
49
+ }
50
+ _llmType() {
51
+ return "codex-cli";
52
+ }
53
+ bindTools(tools, kwargs) {
54
+ const clone = new ChatCodexCLI({
55
+ model: this.model,
56
+ binaryPath: this.binaryPath,
57
+ workingDirectory: this.workingDirectory,
58
+ sandboxMode: this.sandboxMode
59
+ });
60
+ clone.boundTools = tools;
61
+ clone.callbacks = this.callbacks;
62
+ if (kwargs) {
63
+ return clone.bind(kwargs);
64
+ }
65
+ return clone;
66
+ }
67
+ async _generate(messages, _options, _runManager) {
68
+ console.log("[LmChatCodexCli] _generate called", {
69
+ messageCount: messages.length,
70
+ boundToolCount: this.boundTools.length,
71
+ model: this.model
72
+ });
73
+ const processedMessages = [...messages];
74
+ if (this.boundTools.length > 0) {
75
+ const toolDescriptions = this.boundTools.map((tool) => {
76
+ const t = tool;
77
+ const name = t.name ?? "";
78
+ const description = t.description ?? "";
79
+ const schema = t.parameters ?? t.schema ?? {};
80
+ return `- ${name}: ${description}
81
+ Parameters: ${JSON.stringify(schema)}`;
82
+ }).join("\n\n");
83
+ const systemPrompt = TOOL_CALL_SYSTEM_PROMPT + toolDescriptions;
84
+ processedMessages.unshift(new import_messages.SystemMessage(systemPrompt));
85
+ }
86
+ const prompt = processedMessages.map((m) => {
87
+ const content = typeof m.content === "string" ? m.content : JSON.stringify(m.content);
88
+ if (m instanceof import_messages.SystemMessage) return `[system]: ${content}`;
89
+ if (m instanceof import_messages.HumanMessage) return `[user]: ${content}`;
90
+ if (m instanceof import_messages.AIMessage) return `[assistant]: ${content}`;
91
+ return `[${m._getType()}]: ${content}`;
92
+ }).join("\n\n");
93
+ console.log("[LmChatCodexCli] prompt built, length:", prompt.length);
94
+ const rawResponse = await this.executeCodexCli(prompt);
95
+ console.log("[LmChatCodexCli] raw response received, length:", rawResponse.length);
96
+ if (this.boundTools.length > 0) {
97
+ const toolCalls = this.extractToolCalls(rawResponse);
98
+ if (toolCalls.length > 0) {
99
+ console.log("[LmChatCodexCli] extracted tool calls:", toolCalls.length);
100
+ const aiMessage2 = new import_messages.AIMessage({
101
+ content: "",
102
+ tool_calls: toolCalls.map((tc) => ({
103
+ id: tc.id,
104
+ name: tc.name,
105
+ args: tc.args,
106
+ type: "tool_call"
107
+ }))
108
+ });
109
+ return {
110
+ generations: [{ message: aiMessage2, text: "" }]
111
+ };
112
+ }
113
+ }
114
+ console.log("[LmChatCodexCli] returning text response");
115
+ const aiMessage = new import_messages.AIMessage({ content: rawResponse });
116
+ return {
117
+ generations: [{ message: aiMessage, text: rawResponse }]
118
+ };
119
+ }
120
+ extractToolCalls(text) {
121
+ const toolCallRegex = /```tool_calls\s*\n([\s\S]*?)\n```/;
122
+ const match = toolCallRegex.exec(text);
123
+ if (!match) return [];
124
+ try {
125
+ const parsed = JSON.parse(match[1]);
126
+ if (!Array.isArray(parsed)) return [];
127
+ return parsed.map((tc, i) => ({
128
+ id: tc.id ?? `call_${i}`,
129
+ name: tc.name,
130
+ args: tc.args ?? {}
131
+ }));
132
+ } catch {
133
+ return [];
134
+ }
135
+ }
136
+ async executeCodexCli(prompt) {
137
+ const args = ["exec", "--json", "--skip-git-repo-check", "--full-auto"];
138
+ if (this.sandboxMode) {
139
+ args.push("--sandbox", this.sandboxMode);
140
+ }
141
+ if (this.model && this.model !== "auto") {
142
+ args.push("--model", this.model);
143
+ }
144
+ const cwd = this.workingDirectory?.trim() || void 0;
145
+ if (cwd) {
146
+ args.push("--cd", cwd);
147
+ }
148
+ args.push("-");
149
+ console.log("[LmChatCodexCli] spawning codex exec", {
150
+ binaryPath: this.binaryPath,
151
+ args,
152
+ model: this.model,
153
+ cwd,
154
+ sandboxMode: this.sandboxMode
155
+ });
156
+ return await new Promise((resolve, reject) => {
157
+ const child = (0, import_child_process.spawn)(this.binaryPath, args, {
158
+ // Codex uses --cd for working directory, so we don't set cwd on spawn
159
+ stdio: ["pipe", "pipe", "pipe"],
160
+ env: { ...process.env }
161
+ });
162
+ let stdout = "";
163
+ let stderr = "";
164
+ child.stdout.on("data", (data) => {
165
+ stdout += data.toString();
166
+ });
167
+ child.stderr.on("data", (data) => {
168
+ stderr += data.toString();
169
+ });
170
+ child.on("error", (err) => {
171
+ console.error("[LmChatCodexCli] spawn error:", err.message);
172
+ reject(
173
+ new Error(
174
+ `Failed to spawn codex: ${err.message}. Make sure the Codex CLI is installed (npm install -g @openai/codex) and accessible.`
175
+ )
176
+ );
177
+ });
178
+ child.on("close", (code) => {
179
+ console.log("[LmChatCodexCli] codex exec exited", {
180
+ code,
181
+ stdoutLength: stdout.length,
182
+ stderrLength: stderr.length
183
+ });
184
+ const parseResult = this.parseJsonlOutput(stdout);
185
+ if (parseResult.assistantText) {
186
+ console.log(
187
+ "[LmChatCodexCli] parsed assistant content, length:",
188
+ parseResult.assistantText.length
189
+ );
190
+ resolve(parseResult.assistantText);
191
+ return;
192
+ }
193
+ if (parseResult.errorMessage) {
194
+ console.error("[LmChatCodexCli] codex returned error:", parseResult.errorMessage);
195
+ reject(new Error(`Codex CLI error: ${parseResult.errorMessage}`));
196
+ return;
197
+ }
198
+ if (code !== 0) {
199
+ const stderrMsg = stderr.trim();
200
+ const errorMsg = stderrMsg || `codex exec exited with code ${code}`;
201
+ console.error("[LmChatCodexCli] codex exec failed with code", code, ":", errorMsg);
202
+ reject(new Error(errorMsg));
203
+ return;
204
+ }
205
+ console.error(
206
+ "[LmChatCodexCli] no assistant response parsed from output, stdout preview:",
207
+ stdout.substring(0, 500)
208
+ );
209
+ reject(new Error("No assistant response received from codex exec"));
210
+ });
211
+ if (child.stdin) {
212
+ child.stdin.write(prompt);
213
+ child.stdin.end();
214
+ }
215
+ });
216
+ }
217
+ /**
218
+ * Parse JSONL output from `codex exec --json`.
219
+ *
220
+ * Actual event types from codex exec --json (verified empirically):
221
+ * - {"type":"thread.started","thread_id":"..."}
222
+ * - {"type":"turn.started"}
223
+ * - {"type":"item.started","item":{"type":"agent_message",...}}
224
+ * - {"type":"item.completed","item":{"type":"agent_message","text":"..."}}
225
+ * - {"type":"turn.completed"}
226
+ * - {"type":"error","message":"..."}
227
+ * - {"type":"turn.failed","error":{"message":"..."}}
228
+ *
229
+ * Returns both assistant text and any error messages found.
230
+ */
231
+ parseJsonlOutput(output) {
232
+ const lines = output.split("\n").filter((line) => line.trim());
233
+ const assistantParts = [];
234
+ const errorParts = [];
235
+ console.log("[LmChatCodexCli] parsing JSONL output, line count:", lines.length);
236
+ for (const line of lines) {
237
+ try {
238
+ const parsed = JSON.parse(line);
239
+ const eventType = parsed.type;
240
+ console.log(
241
+ "[LmChatCodexCli] JSONL event:",
242
+ eventType,
243
+ "| keys:",
244
+ Object.keys(parsed).join(",")
245
+ );
246
+ if (eventType === "item.completed") {
247
+ const item = parsed.item;
248
+ if (item?.type === "agent_message" && typeof item.text === "string") {
249
+ console.log(
250
+ "[LmChatCodexCli] found agent_message text, length:",
251
+ item.text.length
252
+ );
253
+ assistantParts.push(item.text);
254
+ }
255
+ }
256
+ if (eventType === "message" && parsed.role === "assistant") {
257
+ const content = parsed.content;
258
+ if (Array.isArray(content)) {
259
+ for (const c of content) {
260
+ if (c.type === "text" && typeof c.text === "string") {
261
+ assistantParts.push(c.text);
262
+ }
263
+ }
264
+ } else if (typeof content === "string") {
265
+ assistantParts.push(content);
266
+ }
267
+ }
268
+ if (eventType === "assistant") {
269
+ const message = parsed.message;
270
+ if (message?.content) {
271
+ if (Array.isArray(message.content)) {
272
+ for (const c of message.content) {
273
+ if (c.type === "text" && typeof c.text === "string") {
274
+ assistantParts.push(c.text);
275
+ }
276
+ }
277
+ } else if (typeof message.content === "string") {
278
+ assistantParts.push(message.content);
279
+ }
280
+ }
281
+ }
282
+ if (eventType === "error" && typeof parsed.message === "string") {
283
+ errorParts.push(parsed.message);
284
+ }
285
+ if (eventType === "turn.failed") {
286
+ const error = parsed.error;
287
+ if (error && typeof error.message === "string") {
288
+ errorParts.push(error.message);
289
+ }
290
+ }
291
+ } catch {
292
+ }
293
+ }
294
+ return {
295
+ assistantText: assistantParts.join(""),
296
+ errorMessage: errorParts.join("; ")
297
+ };
298
+ }
299
+ }
300
+ class LmChatCodexCli {
301
+ constructor() {
302
+ this.description = {
303
+ displayName: "Codex CLI Chat Model",
304
+ name: "lmChatCodexCli",
305
+ icon: "file:codexCli.svg",
306
+ group: ["transform"],
307
+ version: [1],
308
+ description: "Chat model powered by the OpenAI Codex CLI. Requires codex to be installed locally (npm install -g @openai/codex).",
309
+ defaults: {
310
+ name: "Codex CLI Chat Model"
311
+ },
312
+ codex: {
313
+ categories: ["AI"],
314
+ subcategories: {
315
+ AI: ["Language Models", "Root Nodes"],
316
+ "Language Models": ["Chat Models (Recommended)"]
317
+ },
318
+ resources: {}
319
+ },
320
+ inputs: [],
321
+ outputs: [import_n8n_workflow.NodeConnectionTypes.AiLanguageModel],
322
+ outputNames: ["Model"],
323
+ properties: [
324
+ (0, import_sharedFields.getConnectionHintNoticeField)([import_n8n_workflow.NodeConnectionTypes.AiChain, import_n8n_workflow.NodeConnectionTypes.AiAgent]),
325
+ {
326
+ displayName: "Model",
327
+ name: "model",
328
+ type: "options",
329
+ description: "The model to use via codex CLI",
330
+ // eslint-disable-next-line n8n-nodes-base/node-param-options-type-unsorted-items
331
+ options: [
332
+ { name: "Auto (Default)", value: "auto" },
333
+ // GPT-5 series (supported by Codex CLI)
334
+ { name: "GPT-5.5", value: "gpt-5.5" },
335
+ { name: "GPT-5.5 Fast", value: "gpt-5.5-fast" },
336
+ { name: "GPT-5.4", value: "gpt-5.4" },
337
+ { name: "GPT-5.4 Fast", value: "gpt-5.4-fast" },
338
+ { name: "GPT-5.4 Mini", value: "gpt-5.4-mini" },
339
+ { name: "GPT-5.3 Codex", value: "gpt-5.3-codex" },
340
+ { name: "GPT-5.3 Codex Spark", value: "gpt-5.3-codex-spark" },
341
+ { name: "GPT-5.2", value: "gpt-5.2" }
342
+ ],
343
+ default: "auto"
344
+ },
345
+ {
346
+ displayName: "Options",
347
+ name: "options",
348
+ placeholder: "Add Option",
349
+ description: "Additional options to configure",
350
+ type: "collection",
351
+ default: {},
352
+ options: [
353
+ {
354
+ displayName: "Binary Path",
355
+ name: "binaryPath",
356
+ default: "codex",
357
+ description: 'Path to the codex binary. Defaults to "codex" (must be in PATH).',
358
+ type: "string"
359
+ },
360
+ {
361
+ displayName: "Working Directory",
362
+ name: "workingDirectory",
363
+ default: "",
364
+ description: "Working directory for the codex process. Leave empty to use the default.",
365
+ type: "string"
366
+ },
367
+ {
368
+ displayName: "Sandbox Mode",
369
+ name: "sandboxMode",
370
+ type: "options",
371
+ default: "read-only",
372
+ description: "Sandbox policy for executing model-generated shell commands",
373
+ options: [
374
+ {
375
+ name: "Read Only",
376
+ value: "read-only",
377
+ description: "Only allow read operations (safest)"
378
+ },
379
+ {
380
+ name: "Workspace Write",
381
+ value: "workspace-write",
382
+ description: "Allow writes within the workspace directory"
383
+ },
384
+ {
385
+ name: "Full Access (Dangerous)",
386
+ value: "danger-full-access",
387
+ description: "Full filesystem access \u2014 use with extreme caution"
388
+ }
389
+ ]
390
+ }
391
+ ]
392
+ }
393
+ ]
394
+ };
395
+ }
396
+ async supplyData(itemIndex) {
397
+ const modelName = this.getNodeParameter("model", itemIndex);
398
+ const binaryPath = this.getNodeParameter("options.binaryPath", itemIndex, "codex");
399
+ const rawWorkingDirectory = this.getNodeParameter("options.workingDirectory", itemIndex, "", {
400
+ rawExpressions: true
401
+ });
402
+ const workingDirectory = this.getNodeParameter("options.workingDirectory", itemIndex, "");
403
+ const normalizedWorkingDirectory = (workingDirectory ?? "").trim();
404
+ const rawWorkingDirectoryValue = rawWorkingDirectory ?? "";
405
+ const isWorkingDirectoryExpression = rawWorkingDirectoryValue.startsWith("=") || rawWorkingDirectoryValue.includes("{{") || rawWorkingDirectoryValue.includes("$workspace");
406
+ const sandboxMode = this.getNodeParameter(
407
+ "options.sandboxMode",
408
+ itemIndex,
409
+ "read-only"
410
+ );
411
+ console.log("[LmChatCodexCli] resolved Codex CLI options", {
412
+ itemIndex,
413
+ modelName,
414
+ binaryPath,
415
+ rawWorkingDirectory,
416
+ workingDirectory: normalizedWorkingDirectory,
417
+ sandboxMode
418
+ });
419
+ if (isWorkingDirectoryExpression && !normalizedWorkingDirectory) {
420
+ throw new import_n8n_workflow.ApplicationError(
421
+ `Codex CLI working directory expression resolved to an empty value: ${rawWorkingDirectoryValue}`
422
+ );
423
+ }
424
+ if (normalizedWorkingDirectory.includes("{{") || normalizedWorkingDirectory.includes("$workspace")) {
425
+ throw new import_n8n_workflow.ApplicationError(
426
+ `Codex CLI working directory was not resolved before execution: ${normalizedWorkingDirectory}`
427
+ );
428
+ }
429
+ if (normalizedWorkingDirectory && (!(0, import_fs.existsSync)(normalizedWorkingDirectory) || !(0, import_fs.statSync)(normalizedWorkingDirectory).isDirectory())) {
430
+ throw new import_n8n_workflow.ApplicationError(
431
+ `Codex CLI working directory does not exist or is not a directory: ${normalizedWorkingDirectory}`
432
+ );
433
+ }
434
+ console.log("[LmChatCodexCli] creating ChatCodexCLI instance", {
435
+ model: modelName,
436
+ binaryPath,
437
+ workingDirectory: normalizedWorkingDirectory,
438
+ sandboxMode
439
+ });
440
+ const model = new ChatCodexCLI({
441
+ model: modelName,
442
+ binaryPath,
443
+ workingDirectory: normalizedWorkingDirectory,
444
+ sandboxMode
445
+ });
446
+ model.callbacks = [new import_N8nLlmTracing.N8nLlmTracing(this)];
447
+ return {
448
+ response: model
449
+ };
450
+ }
451
+ }
452
+ // Annotate the CommonJS export names for ESM import in node:
453
+ 0 && (module.exports = {
454
+ LmChatCodexCli
455
+ });
456
+ //# sourceMappingURL=LmChatCodexCli.node.js.map