n8n-workflow-builder-mcp 0.1.4 → 0.1.6

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (3) hide show
  1. package/README.md +6 -6
  2. package/dist/index.js +1269 -0
  3. package/package.json +1 -1
package/README.md CHANGED
@@ -146,12 +146,12 @@ The server provides the following tools for working with n8n workflows:
146
146
  |-----------|-------------|----------------|
147
147
  | **create_workflow** | Create a new n8n workflow | `workflow_name`, `workspace_dir` |
148
148
  | **list_workflows** | List all existing workflows | (no parameters) |
149
- | **get_workflow_details** | Get detailed information about a specific workflow | `workflow_name` |
150
- | **add_node** | Add a new node to a workflow | `workflow_name`, `node_type`, `position`, `parameters`, `node_name`, `typeVersion` |
151
- | **edit_node** | Edit an existing node in a workflow | `workflow_name`, `node_id`, `node_type`, `node_name`, `position`, `parameters` |
152
- | **delete_node** | Delete a node from a workflow | `workflow_name`, `node_id` |
153
- | **add_connection** | Add a connection between nodes | `workflow_name`, `source_node_id`, `source_node_output_name`, `target_node_id`, `target_node_input_name` |
154
- | **add_ai_connections** | Add AI connections for LangChain nodes | `workflow_name`, `agent_node_id`, `model_node_id`, `tool_node_ids` |
149
+ | **get_workflow_details** | Get detailed information about a specific workflow | `workflow_name`, `workflow_path` (optional) |
150
+ | **add_node** | Add a new node to a workflow | `workflow_name`, `node_type`, `position`, `parameters`, `node_name`, `typeVersion`, `webhookId`, `workflow_path` (optional) |
151
+ | **edit_node** | Edit an existing node in a workflow | `workflow_name`, `node_id`, `node_type`, `node_name`, `position`, `parameters`, `typeVersion`, `webhookId`, `workflow_path` (optional) |
152
+ | **delete_node** | Delete a node from a workflow | `workflow_name`, `node_id`, `workflow_path` (optional) |
153
+ | **add_connection** | Add a connection between nodes | `workflow_name`, `source_node_id`, `source_node_output_name`, `target_node_id`, `target_node_input_name`, `target_node_input_index` |
154
+ | **add_ai_connections** | Add AI connections for LangChain nodes | `workflow_name`, `agent_node_id`, `model_node_id`, `tool_node_ids`, `memory_node_id` |
155
155
  | **list_available_nodes** | List available node types with optional filtering | `search_term` (optional) |
156
156
 
157
157
  ## Troubleshooting Cursor Integration
package/dist/index.js ADDED
@@ -0,0 +1,1269 @@
1
+ #!/usr/bin/env node
2
+ "use strict";
3
+ // N8N Workflow Builder MCP Server
4
+ // Using the official MCP SDK as required
5
+ var __importDefault = (this && this.__importDefault) || function (mod) {
6
+ return (mod && mod.__esModule) ? mod : { "default": mod };
7
+ };
8
+ Object.defineProperty(exports, "__esModule", { value: true });
9
+ const mcp_js_1 = require("@modelcontextprotocol/sdk/server/mcp.js");
10
+ const stdio_js_1 = require("@modelcontextprotocol/sdk/server/stdio.js");
11
+ const zod_1 = require("zod");
12
+ const promises_1 = __importDefault(require("fs/promises"));
13
+ const path_1 = __importDefault(require("path"));
14
+ // Global workspace configuration
15
+ let WORKSPACE_DIR = process.cwd();
16
+ console.error(`[DEBUG] Default workspace directory: ${WORKSPACE_DIR}`);
17
+ let nodeInfoCache = new Map();
18
+ // Helper function to normalize LLM parameters from various possible inputs
19
+ function normalizeLLMParameters(params) {
20
+ const normalized = { ...params };
21
+ // Handle model/modelName variation
22
+ if (normalized.modelName && !normalized.model) {
23
+ console.error(`[DEBUG] Normalizing 'modelName' to 'model'`);
24
+ normalized.model = normalized.modelName;
25
+ delete normalized.modelName;
26
+ }
27
+ // Convert model string to required format
28
+ if (normalized.model && typeof normalized.model === 'string') {
29
+ console.error(`[DEBUG] Converting model string to required format: ${normalized.model}`);
30
+ const modelValue = normalized.model;
31
+ normalized.model = {
32
+ "__rl": true,
33
+ "value": modelValue,
34
+ "mode": "list",
35
+ "cachedResultName": modelValue
36
+ };
37
+ }
38
+ // Handle credentials formatting
39
+ // Handle at options level
40
+ if (normalized.options?.credentials?.providerType) {
41
+ const credType = normalized.options.credentials.providerType;
42
+ console.error(`[DEBUG] Found credentials in options with type: ${credType}`);
43
+ delete normalized.options.credentials;
44
+ // Set credentials properly based on provider type
45
+ if (credType === 'openAi' || credType === 'openAiApi') {
46
+ normalized.credentials = {
47
+ "openAiApi": {
48
+ "id": generateN8nId(),
49
+ "name": "OpenAi account"
50
+ }
51
+ };
52
+ }
53
+ }
54
+ // Handle at root level
55
+ if (normalized.credentialsType && !normalized.credentials) {
56
+ const credType = normalized.credentialsType;
57
+ console.error(`[DEBUG] Found credentialsType at root level: ${credType}`);
58
+ // Set credentials properly based on provider type
59
+ if (credType === 'openAi' || credType === 'openAiApi') {
60
+ normalized.credentials = {
61
+ "openAiApi": {
62
+ "id": generateN8nId(),
63
+ "name": "OpenAi account"
64
+ }
65
+ };
66
+ }
67
+ // Remove root level parameter
68
+ delete normalized.credentialsType;
69
+ }
70
+ return normalized;
71
+ }
72
+ async function loadKnownNodeBaseTypes() {
73
+ // Corrected path: relative to this script's location, assuming workflow_nodes is at project root
74
+ const workflowNodesDir = path_1.default.resolve(__dirname, '../workflow_nodes');
75
+ try {
76
+ console.error(`[DEBUG] Attempting to load known node types from server resource path: ${workflowNodesDir}`);
77
+ const files = await promises_1.default.readdir(workflowNodesDir);
78
+ const suffix = ".json";
79
+ // knownNodeBaseCasings.clear(); // Clear any previous entries // OLD MAP
80
+ nodeInfoCache.clear();
81
+ for (const file of files) {
82
+ if (file.endsWith(suffix)) {
83
+ try {
84
+ // Extract base type from filename like "MyNode.json" -> "MyNode"
85
+ // const correctCaseBaseType = file.substring(0, file.length - suffix.length); // Not directly used for cache keys anymore
86
+ // Also read the JSON file to get the nodeType property with correct casing
87
+ const filePath = path_1.default.join(workflowNodesDir, file);
88
+ const fileContent = await promises_1.default.readFile(filePath, 'utf8');
89
+ const nodeDefinition = JSON.parse(fileContent);
90
+ if (nodeDefinition.nodeType) {
91
+ const officialType = nodeDefinition.nodeType;
92
+ const version = nodeDefinition.version || 1; // Default to 1 if version is missing/falsy
93
+ // Map by lowercase full official type
94
+ nodeInfoCache.set(officialType.toLowerCase(), { officialType, version });
95
+ console.error(`[DEBUG] Cached node info for key '${officialType.toLowerCase()}': { officialType: '${officialType}', version: ${JSON.stringify(version)} }`);
96
+ // If it's a prefixed type (n8n-nodes-base), also map by its lowercase base name
97
+ const prefix = "n8n-nodes-base.";
98
+ if (officialType.startsWith(prefix)) {
99
+ const baseName = officialType.substring(prefix.length);
100
+ if (baseName) { // Ensure baseName is not empty
101
+ nodeInfoCache.set(baseName.toLowerCase(), { officialType, version });
102
+ console.error(`[DEBUG] Cached node info for base key '${baseName.toLowerCase()}': { officialType: '${officialType}', version: ${JSON.stringify(version)} }`);
103
+ }
104
+ }
105
+ }
106
+ }
107
+ catch (parseError) {
108
+ console.warn(`[WARN] Error parsing node definition in ${file}:`, parseError);
109
+ // Continue with other files if one fails to parse
110
+ }
111
+ }
112
+ }
113
+ console.error(`[DEBUG] Loaded ${nodeInfoCache.size} cache entries for node types.`);
114
+ if (nodeInfoCache.size === 0) {
115
+ console.warn("[WARN] No node type information loaded into cache. Check 'workflow_nodes' directory and naming convention.");
116
+ }
117
+ }
118
+ catch (error) {
119
+ console.warn(`[WARN] Could not load known node types from ${workflowNodesDir}: ${error.message}. Node type normalization might rely on defaults.`);
120
+ nodeInfoCache = new Map(); // Ensure map is empty if loading fails
121
+ }
122
+ }
123
+ // Helper function to normalize node types (OLD - to be replaced)
124
+ // function normalizeNodeType(inputType: string): string { ... } // OLD FUNCTION
125
+ // New function to get normalized type and version
126
+ function normalizeNodeTypeAndVersion(inputType, inputVersion) {
127
+ if (nodeInfoCache.size === 0 && WORKSPACE_DIR !== process.cwd()) { // Check if cache is empty and workspace might have changed
128
+ console.warn("[WARN] nodeInfoCache is empty in normalizeNodeTypeAndVersion. Attempting to reload based on current WORKSPACE_DIR.");
129
+ // This reload should be awaited if called from an async context.
130
+ // For now, assuming loadKnownNodeBaseTypes was called at startup or by a previous async tool.
131
+ // Synchronous reload attempt (not ideal but matches previous knownNodeBaseCasings logic):
132
+ // await loadKnownNodeBaseTypes(); // Making this async would require normalizeNodeTypeAndVersion to be async too.
133
+ // For now, we proceed, and if cache is empty, warnings will be issued.
134
+ }
135
+ const lowerInputType = inputType.toLowerCase();
136
+ const prefix = "n8n-nodes-base.";
137
+ const cacheEntry = nodeInfoCache.get(lowerInputType);
138
+ let finalNodeType;
139
+ let versionSource = 1; // Default version if not found in cache
140
+ if (cacheEntry) {
141
+ finalNodeType = cacheEntry.officialType; // This is the correctly cased, full type name
142
+ versionSource = cacheEntry.version;
143
+ }
144
+ else {
145
+ // Not in cache. Determine type based on structure.
146
+ if (inputType.includes('/') && !lowerInputType.startsWith(prefix)) {
147
+ // Likely a namespaced type not in cache (e.g. user typed it, or it's new)
148
+ finalNodeType = inputType; // Use user's casing
149
+ console.warn(`[WARN] Namespaced node type ${inputType} not in cache. Using as-is with default version.`);
150
+ }
151
+ else {
152
+ // Assumed to be a base type needing a prefix, or a prefixed type not in cache.
153
+ // Use inputType for casing if it already seems prefixed, otherwise prefix the original inputType
154
+ finalNodeType = lowerInputType.startsWith(prefix) ? inputType : `${prefix}${inputType}`;
155
+ console.warn(`[WARN] Node type ${inputType} (assumed base/prefixed) not in cache. Result: ${finalNodeType} with default version.`);
156
+ }
157
+ // versionSource remains 1 (default)
158
+ }
159
+ let finalTypeVersion;
160
+ if (inputVersion !== undefined && !isNaN(Number(inputVersion))) {
161
+ finalTypeVersion = Number(inputVersion);
162
+ }
163
+ else { // inputVersion was not provided or was NaN, use versionSource
164
+ if (inputVersion !== undefined && isNaN(Number(inputVersion))) {
165
+ console.warn(`[WARN] Provided inputVersion '${inputVersion}' is NaN for node ${finalNodeType}. Determining from cache/default.`);
166
+ }
167
+ if (Array.isArray(versionSource)) {
168
+ if (versionSource.length > 0) {
169
+ const numericVersions = versionSource.map(v => Number(v)).filter(v => !isNaN(v));
170
+ finalTypeVersion = numericVersions.length > 0 ? Math.max(...numericVersions) : 1;
171
+ }
172
+ else {
173
+ finalTypeVersion = 1; // Empty array in cache
174
+ }
175
+ }
176
+ else { // It's a number or was defaulted to 1
177
+ finalTypeVersion = Number(versionSource);
178
+ }
179
+ if (isNaN(finalTypeVersion))
180
+ finalTypeVersion = 1; // Fallback for bad data from cache
181
+ }
182
+ if (isNaN(finalTypeVersion))
183
+ finalTypeVersion = 1; // Final check, e.g. if inputVersion was NaN and versionSource also led to NaN.
184
+ console.error(`[DEBUG] normalizeNodeTypeAndVersion: input='${inputType}', inputVersion=${inputVersion} -> finalNodeType='${finalNodeType}', finalTypeVersion=${finalTypeVersion}`);
185
+ return { finalNodeType, finalTypeVersion };
186
+ }
187
+ // Helper function to resolve paths against workspace
188
+ // Always treat paths as relative to WORKSPACE_DIR by stripping leading slashes
189
+ function resolvePath(filepath) {
190
+ // Remove any leading path separators to prevent absolute path resolution
191
+ const relativePath = filepath.replace(/^[\\/]+/, '');
192
+ return path_1.default.join(WORKSPACE_DIR, relativePath);
193
+ }
194
+ // Helper function to resolve workflow file path with optional direct path
195
+ function resolveWorkflowPath(workflowName, workflowPath) {
196
+ if (workflowPath) {
197
+ // If workflow_path is provided, use it directly (can be absolute or relative to cwd)
198
+ if (path_1.default.isAbsolute(workflowPath)) {
199
+ return workflowPath;
200
+ }
201
+ else {
202
+ return path_1.default.resolve(process.cwd(), workflowPath);
203
+ }
204
+ }
205
+ else {
206
+ // Use the standard workflow directory approach
207
+ const sanitizedName = workflowName.replace(/[^a-z0-9_.-]/gi, '_');
208
+ return resolvePath(path_1.default.join(WORKFLOW_DATA_DIR_NAME, `${sanitizedName}.json`));
209
+ }
210
+ }
211
+ // Helper function to ensure the parent directory exists for a workflow file path
212
+ async function ensureWorkflowParentDir(filePath) {
213
+ try {
214
+ const parentDir = path_1.default.dirname(filePath);
215
+ console.error("[DEBUG] Ensuring parent directory exists:", parentDir);
216
+ await promises_1.default.mkdir(parentDir, { recursive: true });
217
+ }
218
+ catch (error) {
219
+ console.error('[ERROR] Failed to ensure parent directory:', error);
220
+ throw error;
221
+ }
222
+ }
223
+ // ID Generation Helpers
224
+ function generateN8nId(length = 16) {
225
+ const chars = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789';
226
+ let result = '';
227
+ for (let i = 0; i < length; i++) {
228
+ result += chars.charAt(Math.floor(Math.random() * chars.length));
229
+ }
230
+ return result;
231
+ }
232
+ function generateUUID() {
233
+ return 'xxxxxxxx-xxxx-4xxx-yxxx-xxxxxxxxxxxx'.replace(/[xy]/g, function (c) {
234
+ const r = Math.random() * 16 | 0, v = c == 'x' ? r : (r & 0x3 | 0x8);
235
+ return v.toString(16);
236
+ });
237
+ }
238
+ function generateInstanceId(length = 64) {
239
+ const chars = 'abcdef0123456789';
240
+ let result = '';
241
+ for (let i = 0; i < length; i++) {
242
+ result += chars.charAt(Math.floor(Math.random() * chars.length));
243
+ }
244
+ return result;
245
+ }
246
+ // Constants
247
+ const WORKFLOW_DATA_DIR_NAME = 'workflow_data';
248
+ const WORKFLOWS_FILE_NAME = 'workflows.json'; // Kept for now, but create_workflow won't use it.
249
+ // Helper functions
250
+ async function ensureWorkflowDir() {
251
+ try {
252
+ const resolvedDir = resolvePath(WORKFLOW_DATA_DIR_NAME);
253
+ console.error("[DEBUG] Ensuring workflow directory at:", resolvedDir);
254
+ await promises_1.default.mkdir(resolvedDir, { recursive: true });
255
+ // Removed creation of workflows.json as each workflow is a separate file now.
256
+ }
257
+ catch (error) {
258
+ console.error('[ERROR] Failed to ensure workflow directory:', error);
259
+ throw error;
260
+ }
261
+ }
262
+ async function loadWorkflows() {
263
+ // This function will need to be updated if list_workflows is to work with the new format.
264
+ // For now, it's related to the old format.
265
+ const resolvedFile = resolvePath(path_1.default.join(WORKFLOW_DATA_DIR_NAME, WORKFLOWS_FILE_NAME));
266
+ try {
267
+ await ensureWorkflowDir(); // Ensures dir exists, doesn't create workflows.json anymore unless called by old logic
268
+ const data = await promises_1.default.readFile(resolvedFile, 'utf8');
269
+ console.error("[DEBUG] Loaded workflows (old format):", data);
270
+ return JSON.parse(data);
271
+ }
272
+ catch (error) {
273
+ if (error.code === 'ENOENT') {
274
+ console.error("[DEBUG] No workflows.json file found (old format), returning empty array");
275
+ // If workflows.json is truly deprecated, this might try to create it.
276
+ // For now, let's assume ensureWorkflowDir handles directory creation.
277
+ // And if the file doesn't exist, it means no workflows (in old format).
278
+ await promises_1.default.writeFile(resolvedFile, JSON.stringify([], null, 2)); // Create if not exists for old logic
279
+ return [];
280
+ }
281
+ console.error('[ERROR] Failed to load workflows (old format):', error);
282
+ throw error;
283
+ }
284
+ }
285
+ async function saveWorkflows(workflows) {
286
+ // This function is for the old format (saving an array to workflows.json).
287
+ try {
288
+ await ensureWorkflowDir();
289
+ const resolvedFile = resolvePath(path_1.default.join(WORKFLOW_DATA_DIR_NAME, WORKFLOWS_FILE_NAME));
290
+ console.error("[DEBUG] Saving workflows (old format):", JSON.stringify(workflows, null, 2));
291
+ await promises_1.default.writeFile(resolvedFile, JSON.stringify(workflows, null, 2));
292
+ }
293
+ catch (error) {
294
+ console.error('[ERROR] Failed to save workflows (old format):', error);
295
+ throw error;
296
+ }
297
+ }
298
+ // Create the MCP server
299
+ const server = new mcp_js_1.McpServer({
300
+ name: "n8n-workflow-builder",
301
+ version: "1.0.0"
302
+ });
303
+ // Tool definitions
304
+ // Create Workflow
305
+ const createWorkflowParamsSchema = zod_1.z.object({
306
+ workflow_name: zod_1.z.string().describe("The name for the new workflow"),
307
+ workspace_dir: zod_1.z.string().describe("Absolute path to the project root directory where workflow_data will be stored")
308
+ });
309
+ server.tool("create_workflow", createWorkflowParamsSchema.shape, async (params, _extra) => {
310
+ console.error("[DEBUG] create_workflow called with params:", params);
311
+ const workflowName = params.workflow_name;
312
+ const workspaceDir = params.workspace_dir;
313
+ if (!workflowName || workflowName.trim() === "") {
314
+ return { content: [{ type: "text", text: JSON.stringify({ success: false, error: "Parameter 'workflow_name' is required." }) }] };
315
+ }
316
+ if (!workspaceDir || workspaceDir.trim() === "") {
317
+ return { content: [{ type: "text", text: JSON.stringify({ success: false, error: "Parameter 'workspace_dir' is required." }) }] };
318
+ }
319
+ try {
320
+ const stat = await promises_1.default.stat(workspaceDir);
321
+ if (!stat.isDirectory()) {
322
+ return { content: [{ type: "text", text: JSON.stringify({ success: false, error: "Provided 'workspace_dir' is not a directory." }) }] };
323
+ }
324
+ // Check if the workspaceDir is the root directory
325
+ if (path_1.default.resolve(workspaceDir) === path_1.default.resolve('/')) {
326
+ console.error("[ERROR] 'workspace_dir' cannot be the root directory ('/'). Please specify a valid project subdirectory.");
327
+ return { content: [{ type: "text", text: JSON.stringify({ success: false, error: "'workspace_dir' cannot be the root directory. Please specify a project subdirectory." }) }] };
328
+ }
329
+ WORKSPACE_DIR = workspaceDir; // Set current workspace for resolvePath
330
+ await ensureWorkflowDir(); // Ensures WORKFLOW_DATA_DIR_NAME exists
331
+ const newN8nWorkflow = {
332
+ name: workflowName,
333
+ id: generateN8nId(), // e.g., "Y6sBMxxyJQtgCCBQ"
334
+ nodes: [], // Initialize with empty nodes array
335
+ connections: {}, // Initialize with empty connections object
336
+ active: false,
337
+ pinData: {},
338
+ settings: {
339
+ executionOrder: "v1"
340
+ },
341
+ versionId: generateUUID(),
342
+ meta: {
343
+ instanceId: generateInstanceId()
344
+ },
345
+ tags: []
346
+ };
347
+ // Sanitize workflowName for filename or ensure it's safe.
348
+ // For now, using directly. Consider a sanitization function for production.
349
+ const filename = `${workflowName.replace(/[^a-z0-9_.-]/gi, '_')}.json`;
350
+ const filePath = resolvePath(path_1.default.join(WORKFLOW_DATA_DIR_NAME, filename));
351
+ await promises_1.default.writeFile(filePath, JSON.stringify(newN8nWorkflow, null, 2));
352
+ console.error("[DEBUG] Workflow created and saved to:", filePath);
353
+ return { content: [{ type: "text", text: JSON.stringify({ success: true, workflow: newN8nWorkflow, recommended_next_step: "YOU NEED TO CALL 'list_available_nodes' TOOL BEFORE starting adding nodes. SEARCH BY SPECIFIC TOPIC USING 'search_term' parameter'. To search AI nodes you can use 'langchain' as the search term and 'ai" }) }] };
354
+ }
355
+ catch (error) {
356
+ console.error("[ERROR] Failed to create workflow:", error);
357
+ return { content: [{ type: "text", text: JSON.stringify({ success: false, error: "Failed to create workflow: " + error.message }) }] };
358
+ }
359
+ });
360
+ // List Workflows
361
+ // NOTE: This tool will need to be updated to read individual .json files
362
+ // from the workflow_data directory and parse them into N8nWorkflow[]
363
+ server.tool("list_workflows", {}, async (_params, _extra) => {
364
+ console.error("[DEBUG] list_workflows called - (current impl uses old format and might be broken)");
365
+ try {
366
+ // This implementation needs to change to scan directory for .json files
367
+ // and aggregate them. For now, it will likely fail or return empty
368
+ // if workflows.json doesn't exist or is empty.
369
+ await ensureWorkflowDir(); // Ensures directory exists
370
+ const workflowDataDir = resolvePath(WORKFLOW_DATA_DIR_NAME);
371
+ const files = await promises_1.default.readdir(workflowDataDir);
372
+ const workflowFiles = files.filter(file => file.endsWith('.json') && file !== WORKFLOWS_FILE_NAME);
373
+ const workflows = [];
374
+ for (const file of workflowFiles) {
375
+ try {
376
+ const data = await promises_1.default.readFile(path_1.default.join(workflowDataDir, file), 'utf8');
377
+ workflows.push(JSON.parse(data));
378
+ }
379
+ catch (err) {
380
+ console.error(`[ERROR] Failed to read or parse workflow file ${file}:`, err);
381
+ // Decide how to handle: skip, error out, etc.
382
+ }
383
+ }
384
+ console.error(`[DEBUG] Retrieved ${workflows.length} workflows from individual files.`);
385
+ return { content: [{ type: "text", text: JSON.stringify({ success: true, workflows }) }] };
386
+ }
387
+ catch (error) {
388
+ console.error("[ERROR] Failed to list workflows:", error);
389
+ return { content: [{ type: "text", text: JSON.stringify({ success: false, error: "Failed to list workflows: " + error.message }) }] };
390
+ }
391
+ });
392
+ // Get Workflow Details
393
+ // NOTE: This tool will need to be updated. It currently assumes workflow_id is
394
+ // an ID found in the old workflows.json structure. It should now probably
395
+ // expect workflow_id to be the workflow name (to form the filename) or the new N8n ID.
396
+ const getWorkflowDetailsParamsSchema = zod_1.z.object({
397
+ workflow_name: zod_1.z.string().describe("The Name of the workflow to get details for"),
398
+ workflow_path: zod_1.z.string().optional().describe("Optional direct path to the workflow file (absolute or relative to current working directory). If not provided, uses standard workflow_data directory approach.")
399
+ });
400
+ server.tool("get_workflow_details", getWorkflowDetailsParamsSchema.shape, async (params, _extra) => {
401
+ const workflowName = params.workflow_name;
402
+ console.error("[DEBUG] get_workflow_details called with name:", workflowName);
403
+ try {
404
+ const filePath = resolveWorkflowPath(workflowName, params.workflow_path);
405
+ // Only ensure the default workflow directory if using standard approach
406
+ if (!params.workflow_path) {
407
+ await ensureWorkflowDir();
408
+ }
409
+ else {
410
+ // Ensure the parent directory of the custom workflow file exists
411
+ await ensureWorkflowParentDir(filePath);
412
+ }
413
+ try {
414
+ const data = await promises_1.default.readFile(filePath, 'utf8');
415
+ const workflow = JSON.parse(data);
416
+ console.error("[DEBUG] Found workflow by name in file:", filePath);
417
+ return { content: [{ type: "text", text: JSON.stringify({ success: true, workflow }) }] };
418
+ }
419
+ catch (error) {
420
+ if (error.code === 'ENOENT') {
421
+ console.warn(`[DEBUG] Workflow file ${filePath} not found using name: ${workflowName}.`);
422
+ return { content: [{ type: "text", text: JSON.stringify({ success: false, error: `Workflow with name ${workflowName} not found` }) }] };
423
+ }
424
+ else {
425
+ throw error; // Re-throw other read errors
426
+ }
427
+ }
428
+ }
429
+ catch (error) {
430
+ console.error("[ERROR] Failed to get workflow details:", error);
431
+ return { content: [{ type: "text", text: JSON.stringify({ success: false, error: "Failed to get workflow details: " + error.message }) }] };
432
+ }
433
+ });
434
+ // Add Node
435
+ // NOTE: This tool will need significant updates to load the specific workflow file,
436
+ // add the node to its 'nodes' array, and save the file.
437
+ const addNodeParamsSchema = zod_1.z.object({
438
+ workflow_name: zod_1.z.string().describe("The Name of the workflow to add the node to"),
439
+ node_type: zod_1.z.string().describe("The type of node to add (e.g., 'gmail', 'slack', 'openAi'). You can specify with or without the 'n8n-nodes-base.' prefix. The system will handle proper casing (e.g., 'openai' will be converted to 'openAi' if that's the correct casing)."),
440
+ position: zod_1.z.object({
441
+ x: zod_1.z.number(),
442
+ y: zod_1.z.number()
443
+ }).optional().describe("The position of the node {x,y} - will be converted to [x,y] for N8nWorkflowNode"),
444
+ parameters: zod_1.z.record(zod_1.z.string(), zod_1.z.any()).optional().describe("The parameters for the node"),
445
+ node_name: zod_1.z.string().optional().describe("The name for the new node (e.g., 'My Gmail Node')"),
446
+ typeVersion: zod_1.z.number().optional().describe("The type version for the node (e.g., 1, 1.1). Defaults to 1 if not specified."),
447
+ webhookId: zod_1.z.string().optional().describe("Optional webhook ID for certain node types like triggers."),
448
+ workflow_path: zod_1.z.string().optional().describe("Optional direct path to the workflow file (absolute or relative to current working directory). If not provided, uses standard workflow_data directory approach.")
449
+ });
450
+ server.tool("add_node", addNodeParamsSchema.shape, async (params, _extra) => {
451
+ console.error("[DEBUG] add_node called with:", params);
452
+ const workflowName = params.workflow_name;
453
+ try {
454
+ // Attempt to reload node types if cache is empty and WORKSPACE_DIR is set by a previous call (e.g. create_workflow)
455
+ // This helps if server started with default WORKSPACE_DIR and cache was empty.
456
+ if (nodeInfoCache.size === 0 && WORKSPACE_DIR !== process.cwd()) {
457
+ console.warn("[WARN] nodeInfoCache is empty in add_node. Attempting to reload based on current WORKSPACE_DIR.");
458
+ await loadKnownNodeBaseTypes();
459
+ }
460
+ const filePath = resolveWorkflowPath(workflowName, params.workflow_path);
461
+ // Only ensure the default workflow directory if using standard approach
462
+ if (!params.workflow_path) {
463
+ await ensureWorkflowDir();
464
+ }
465
+ else {
466
+ // Ensure the parent directory of the custom workflow file exists
467
+ await ensureWorkflowParentDir(filePath);
468
+ }
469
+ let workflow;
470
+ try {
471
+ const data = await promises_1.default.readFile(filePath, 'utf8');
472
+ workflow = JSON.parse(data);
473
+ }
474
+ catch (readError) {
475
+ if (readError.code === 'ENOENT') {
476
+ return { content: [{ type: "text", text: JSON.stringify({ success: false, error: `Workflow with name ${workflowName} not found at ${filePath}` }) }] };
477
+ }
478
+ throw readError;
479
+ }
480
+ // Ensure workflow.nodes exists
481
+ if (!Array.isArray(workflow.nodes)) {
482
+ workflow.nodes = [];
483
+ }
484
+ const defaultPos = params.position || { x: Math.floor(Math.random() * 500), y: Math.floor(Math.random() * 500) };
485
+ const { finalNodeType, finalTypeVersion } = normalizeNodeTypeAndVersion(params.node_type, params.typeVersion);
486
+ // console.error(`[DEBUG] Node type normalized: "${params.node_type}" -> "${finalNodeType}"`); // Covered by normalizeNodeTypeAndVersion's own log
487
+ // Process parameters for LangChain LLM nodes
488
+ let nodeParameters = params.parameters || {};
489
+ // Check if this is a LangChain LLM node
490
+ const isLangChainLLM = finalNodeType.includes('@n8n/n8n-nodes-langchain') &&
491
+ (finalNodeType.includes('lmChat') || finalNodeType.includes('llm'));
492
+ // Apply normalization for LangChain LLM nodes
493
+ if (isLangChainLLM) {
494
+ console.error(`[DEBUG] Applying parameter normalization for LangChain LLM node`);
495
+ nodeParameters = normalizeLLMParameters(nodeParameters);
496
+ }
497
+ else {
498
+ // Handle OpenAI credentials specifically for non-LangChain nodes
499
+ if (params.parameters?.options?.credentials?.providerType === 'openAi') {
500
+ console.error(`[DEBUG] Setting up proper OpenAI credentials format for standard node`);
501
+ // Remove credentials from options and set at node level
502
+ if (nodeParameters.options?.credentials) {
503
+ const credentialsType = nodeParameters.options.credentials.providerType;
504
+ delete nodeParameters.options.credentials;
505
+ // Set a placeholder for credentials that would be filled in the n8n UI
506
+ if (!nodeParameters.credentials) {
507
+ nodeParameters.credentials = {};
508
+ }
509
+ // Add credentials in the proper format for OpenAI
510
+ nodeParameters.credentials = {
511
+ "openAiApi": {
512
+ "id": generateN8nId(),
513
+ "name": "OpenAi account"
514
+ }
515
+ };
516
+ }
517
+ }
518
+ }
519
+ const newNode = {
520
+ id: generateUUID(),
521
+ type: finalNodeType,
522
+ typeVersion: finalTypeVersion, // Use version from normalizeNodeTypeAndVersion
523
+ position: [defaultPos.x, defaultPos.y],
524
+ parameters: nodeParameters,
525
+ name: params.node_name || `${finalNodeType} Node`, // Use finalNodeType for default name
526
+ ...(params.webhookId && { webhookId: params.webhookId }) // Add webhookId if provided
527
+ };
528
+ workflow.nodes.push(newNode);
529
+ await promises_1.default.writeFile(filePath, JSON.stringify(workflow, null, 2));
530
+ console.error(`[DEBUG] Added node ${newNode.id} to workflow ${workflowName} in file ${filePath}`);
531
+ return { content: [{ type: "text", text: JSON.stringify({ success: true, node: newNode, workflowId: workflow.id }) }] };
532
+ }
533
+ catch (error) {
534
+ console.error("[ERROR] Failed to add node:", error);
535
+ return { content: [{ type: "text", text: JSON.stringify({ success: false, error: "Failed to add node: " + error.message }) }] };
536
+ }
537
+ });
538
+ // Edit Node
539
+ // NOTE: This tool also needs updates for single-file workflow management.
540
+ const editNodeParamsSchema = zod_1.z.object({
541
+ workflow_name: zod_1.z.string().describe("The Name of the workflow containing the node"),
542
+ node_id: zod_1.z.string().describe("The ID of the node to edit"),
543
+ node_type: zod_1.z.string().optional().describe("The new type for the node (e.g., 'gmail', 'slack', 'openAi'). You can specify with or without the 'n8n-nodes-base.' prefix. The system will handle proper casing (e.g., 'openai' will be converted to 'openAi' if that's the correct casing)."),
544
+ node_name: zod_1.z.string().optional().describe("The new name for the node"),
545
+ position: zod_1.z.object({
546
+ x: zod_1.z.number(),
547
+ y: zod_1.z.number()
548
+ }).optional().describe("The new position {x,y} - will be converted to [x,y]"),
549
+ parameters: zod_1.z.record(zod_1.z.string(), zod_1.z.any()).optional().describe("The new parameters"),
550
+ typeVersion: zod_1.z.number().optional().describe("The new type version for the node"),
551
+ webhookId: zod_1.z.string().optional().describe("Optional new webhook ID for the node."),
552
+ workflow_path: zod_1.z.string().optional().describe("Optional workflow path to the workflow file")
553
+ });
554
+ server.tool("edit_node", editNodeParamsSchema.shape, async (params, _extra) => {
555
+ console.error("[DEBUG] edit_node called with:", params);
556
+ const workflowName = params.workflow_name;
557
+ try {
558
+ // Similar cache reload logic as in add_node
559
+ if (nodeInfoCache.size === 0 && WORKSPACE_DIR !== process.cwd()) {
560
+ console.warn("[WARN] nodeInfoCache is empty in edit_node. Attempting to reload based on current WORKSPACE_DIR.");
561
+ await loadKnownNodeBaseTypes();
562
+ }
563
+ const filePath = resolveWorkflowPath(workflowName, params.workflow_path);
564
+ // Only ensure the default workflow directory if using standard approach
565
+ if (!params.workflow_path) {
566
+ await ensureWorkflowDir();
567
+ }
568
+ else {
569
+ // Ensure the parent directory of the custom workflow file exists
570
+ await ensureWorkflowParentDir(filePath);
571
+ }
572
+ let workflow;
573
+ try {
574
+ const data = await promises_1.default.readFile(filePath, 'utf8');
575
+ workflow = JSON.parse(data);
576
+ }
577
+ catch (readError) {
578
+ if (readError.code === 'ENOENT') {
579
+ return { content: [{ type: "text", text: JSON.stringify({ success: false, error: `Workflow with name ${workflowName} not found at ${filePath}` }) }] };
580
+ }
581
+ throw readError;
582
+ }
583
+ const nodeIndex = workflow.nodes.findIndex(n => n.id === params.node_id);
584
+ if (nodeIndex === -1) {
585
+ return { content: [{ type: "text", text: JSON.stringify({ success: false, error: `Node with id ${params.node_id} not found in workflow ${workflowName}` }) }] };
586
+ }
587
+ const nodeToEdit = workflow.nodes[nodeIndex];
588
+ let newType = nodeToEdit.type;
589
+ let newTypeVersion = nodeToEdit.typeVersion;
590
+ if (params.node_type) {
591
+ // If node_type is changing, typeVersion should be re-evaluated based on the new type,
592
+ // unless a specific params.typeVersion is also given for this edit.
593
+ const { finalNodeType, finalTypeVersion: determinedVersionForNewType } = normalizeNodeTypeAndVersion(params.node_type, params.typeVersion);
594
+ newType = finalNodeType;
595
+ newTypeVersion = determinedVersionForNewType; // This uses params.typeVersion if valid, else default for new type.
596
+ }
597
+ else if (params.typeVersion !== undefined && !isNaN(Number(params.typeVersion))) {
598
+ // Only typeVersion is being changed, node_type remains the same.
599
+ newTypeVersion = Number(params.typeVersion);
600
+ }
601
+ else if (params.typeVersion !== undefined && isNaN(Number(params.typeVersion))) {
602
+ console.warn(`[WARN] Provided typeVersion '${params.typeVersion}' for editing node ${nodeToEdit.id} is NaN. typeVersion will not be changed.`);
603
+ }
604
+ nodeToEdit.type = newType;
605
+ nodeToEdit.typeVersion = newTypeVersion;
606
+ if (params.node_name)
607
+ nodeToEdit.name = params.node_name;
608
+ if (params.position)
609
+ nodeToEdit.position = [params.position.x, params.position.y];
610
+ // Process new parameters if provided
611
+ if (params.parameters) {
612
+ let newParameters = params.parameters;
613
+ // Check if this is a LangChain LLM node
614
+ const isLangChainLLM = newType.includes('@n8n/n8n-nodes-langchain') &&
615
+ (newType.includes('lmChat') || newType.includes('llm'));
616
+ // Apply normalization for LangChain LLM nodes
617
+ if (isLangChainLLM) {
618
+ console.error(`[DEBUG] Applying parameter normalization for LangChain LLM node during edit`);
619
+ newParameters = normalizeLLMParameters(newParameters);
620
+ }
621
+ else {
622
+ // Handle OpenAI credentials specifically for non-LangChain nodes
623
+ if (newParameters.options?.credentials?.providerType === 'openAi') {
624
+ console.error(`[DEBUG] Setting up proper OpenAI credentials format for standard node during edit`);
625
+ // Remove credentials from options and set at node level
626
+ if (newParameters.options?.credentials) {
627
+ const credentialsType = newParameters.options.credentials.providerType;
628
+ delete newParameters.options.credentials;
629
+ // Set a placeholder for credentials that would be filled in the n8n UI
630
+ if (!newParameters.credentials) {
631
+ newParameters.credentials = {};
632
+ }
633
+ // Add credentials in the proper format for OpenAI
634
+ newParameters.credentials = {
635
+ "openAiApi": {
636
+ "id": generateN8nId(),
637
+ "name": "OpenAi account"
638
+ }
639
+ };
640
+ }
641
+ }
642
+ }
643
+ nodeToEdit.parameters = newParameters;
644
+ }
645
+ if (params.webhookId !== undefined) { // Allow setting or unsetting webhookId
646
+ if (params.webhookId === null || params.webhookId === "") { // Check for explicit clear
647
+ delete nodeToEdit.webhookId;
648
+ }
649
+ else {
650
+ nodeToEdit.webhookId = params.webhookId;
651
+ }
652
+ }
653
+ workflow.nodes[nodeIndex] = nodeToEdit;
654
+ await promises_1.default.writeFile(filePath, JSON.stringify(workflow, null, 2));
655
+ console.error(`[DEBUG] Edited node ${params.node_id} in workflow ${workflowName} in file ${filePath}`);
656
+ return { content: [{ type: "text", text: JSON.stringify({ success: true, node: nodeToEdit }) }] };
657
+ }
658
+ catch (error) {
659
+ console.error("[ERROR] Failed to edit node:", error);
660
+ return { content: [{ type: "text", text: JSON.stringify({ success: false, error: "Failed to edit node: " + error.message }) }] };
661
+ }
662
+ });
663
+ // Delete Node
664
+ // NOTE: This tool also needs updates for single-file workflow management.
665
+ const deleteNodeParamsSchema = zod_1.z.object({
666
+ workflow_name: zod_1.z.string().describe("The Name of the workflow containing the node"),
667
+ node_id: zod_1.z.string().describe("The ID of the node to delete"),
668
+ workflow_path: zod_1.z.string().optional().describe("Optional direct path to the workflow file (absolute or relative to current working directory). If not provided, uses standard workflow_data directory approach.")
669
+ });
670
+ server.tool("delete_node", deleteNodeParamsSchema.shape, async (params, _extra) => {
671
+ console.error("[DEBUG] delete_node called with:", params);
672
+ const workflowName = params.workflow_name;
673
+ try {
674
+ const filePath = resolveWorkflowPath(workflowName, params.workflow_path);
675
+ // Only ensure the default workflow directory if using standard approach
676
+ if (!params.workflow_path) {
677
+ await ensureWorkflowDir();
678
+ }
679
+ else {
680
+ // Ensure the parent directory of the custom workflow file exists
681
+ await ensureWorkflowParentDir(filePath);
682
+ }
683
+ let workflow;
684
+ try {
685
+ const data = await promises_1.default.readFile(filePath, 'utf8');
686
+ workflow = JSON.parse(data);
687
+ }
688
+ catch (readError) {
689
+ if (readError.code === 'ENOENT') {
690
+ return { content: [{ type: "text", text: JSON.stringify({ success: false, error: `Workflow with name ${workflowName} not found at ${filePath}` }) }] };
691
+ }
692
+ throw readError;
693
+ }
694
+ const nodeIndex = workflow.nodes.findIndex(n => n.id === params.node_id);
695
+ if (nodeIndex === -1) {
696
+ return { content: [{ type: "text", text: JSON.stringify({ success: false, error: `Node with id ${params.node_id} not found in workflow ${workflowName}` }) }] };
697
+ }
698
+ const deletedNodeName = workflow.nodes[nodeIndex].name;
699
+ workflow.nodes.splice(nodeIndex, 1);
700
+ // Also remove connections related to this node
701
+ // This is a simplified connection removal. n8n's logic might be more complex.
702
+ const newConnections = {};
703
+ for (const sourceNodeName in workflow.connections) {
704
+ if (sourceNodeName === deletedNodeName)
705
+ continue; // Skip connections FROM the deleted node
706
+ const outputConnections = workflow.connections[sourceNodeName];
707
+ const newOutputConnectionsForSource = {};
708
+ for (const outputKey in outputConnections) {
709
+ const connectionChains = outputConnections[outputKey];
710
+ const newConnectionChains = [];
711
+ for (const chain of connectionChains) {
712
+ const newChain = chain.filter(connDetail => connDetail.node !== deletedNodeName);
713
+ if (newChain.length > 0) {
714
+ newConnectionChains.push(newChain);
715
+ }
716
+ }
717
+ if (newConnectionChains.length > 0) {
718
+ newOutputConnectionsForSource[outputKey] = newConnectionChains;
719
+ }
720
+ }
721
+ if (Object.keys(newOutputConnectionsForSource).length > 0) {
722
+ newConnections[sourceNodeName] = newOutputConnectionsForSource;
723
+ }
724
+ }
725
+ workflow.connections = newConnections;
726
+ await promises_1.default.writeFile(filePath, JSON.stringify(workflow, null, 2));
727
+ console.error(`[DEBUG] Deleted node ${params.node_id} from workflow ${workflowName} in file ${filePath}`);
728
+ return { content: [{ type: "text", text: JSON.stringify({ success: true, message: `Node ${params.node_id} deleted successfully from workflow ${workflowName}` }) }] };
729
+ }
730
+ catch (error) {
731
+ console.error("[ERROR] Failed to delete node:", error);
732
+ return { content: [{ type: "text", text: JSON.stringify({ success: false, error: "Failed to delete node: " + error.message }) }] };
733
+ }
734
+ });
735
+ // Add Connection
736
+ const addConnectionParamsSchema = zod_1.z.object({
737
+ workflow_name: zod_1.z.string().describe("The Name of the workflow to add the connection to"),
738
+ source_node_id: zod_1.z.string().describe("The ID of the source node for the connection"),
739
+ source_node_output_name: zod_1.z.string().describe("The name of the output handle on the source node (e.g., 'main')"),
740
+ target_node_id: zod_1.z.string().describe("The ID of the target node for the connection"),
741
+ target_node_input_name: zod_1.z.string().describe("The name of the input handle on the target node (e.g., 'main')"),
742
+ target_node_input_index: zod_1.z.number().optional().default(0).describe("The index for the target node's input handle (default: 0)")
743
+ });
744
+ server.tool("add_connection", addConnectionParamsSchema.shape, async (params, _extra) => {
745
+ console.error("[DEBUG] add_connection called with:", params);
746
+ const { workflow_name, source_node_id, source_node_output_name, target_node_id, target_node_input_name, target_node_input_index } = params;
747
+ try {
748
+ await ensureWorkflowDir();
749
+ const sanitizedWorkflowName = workflow_name.replace(/[^a-z0-9_.-]/gi, '_');
750
+ const filePath = resolvePath(path_1.default.join(WORKFLOW_DATA_DIR_NAME, `${sanitizedWorkflowName}.json`));
751
+ let workflow;
752
+ try {
753
+ const data = await promises_1.default.readFile(filePath, 'utf8');
754
+ workflow = JSON.parse(data);
755
+ }
756
+ catch (readError) {
757
+ if (readError.code === 'ENOENT') {
758
+ return { content: [{ type: "text", text: JSON.stringify({ success: false, error: `Workflow with name ${workflow_name} not found at ${filePath}` }) }] };
759
+ }
760
+ throw readError;
761
+ }
762
+ const sourceNode = workflow.nodes.find(node => node.id === source_node_id);
763
+ const targetNode = workflow.nodes.find(node => node.id === target_node_id);
764
+ if (!sourceNode) {
765
+ return { content: [{ type: "text", text: JSON.stringify({ success: false, error: `Source node with ID ${source_node_id} not found in workflow ${workflow_name}` }) }] };
766
+ }
767
+ if (!targetNode) {
768
+ return { content: [{ type: "text", text: JSON.stringify({ success: false, error: `Target node with ID ${target_node_id} not found in workflow ${workflow_name}` }) }] };
769
+ }
770
+ const sourceNodeNameKey = sourceNode.name; // n8n connections are keyed by node *name*
771
+ const targetNodeNameValue = targetNode.name;
772
+ // Detect if we're working with LangChain AI nodes that require special connection handling
773
+ const isLangChainSource = sourceNode.type.includes('@n8n/n8n-nodes-langchain');
774
+ const isLangChainTarget = targetNode.type.includes('@n8n/n8n-nodes-langchain');
775
+ const isAIConnection = source_node_output_name.startsWith('ai_') || target_node_input_name.startsWith('ai_');
776
+ let connectionDirection = "forward"; // Default: source -> target
777
+ // Check if we need to reverse connection direction for AI nodes
778
+ // This handles the special case for LangChain nodes where tools and models
779
+ // connect TO the agent rather than the agent connecting to them
780
+ if ((isLangChainSource || isLangChainTarget) && isAIConnection) {
781
+ // Check if this might be a case where direction needs to be reversed
782
+ // - Models/Tools point TO Agent (reversed)
783
+ // - Agent points to regular nodes (forward)
784
+ // - Triggers point to any node (forward)
785
+ // - Memory nodes point TO Agent (reversed)
786
+ if (
787
+ // If it's a LLM, Tool, or Memory node pointing to an agent
788
+ (sourceNode.type.includes('lmChat') ||
789
+ sourceNode.type.includes('tool') ||
790
+ sourceNode.type.toLowerCase().includes('request') ||
791
+ sourceNode.type.includes('memory'))
792
+ && targetNode.type.includes('agent')) {
793
+ console.warn("[WARN] LangChain AI connection detected. N8n often expects models, tools, and memory to connect TO agents rather than agents connecting to them.");
794
+ console.warn("[WARN] Connections will be created as specified, but if they don't appear correctly in n8n UI, try reversing the source and target.");
795
+ // Special hint for memory connections
796
+ if (sourceNode.type.includes('memory')) {
797
+ if (source_node_output_name !== 'ai_memory') {
798
+ console.warn("[WARN] Memory nodes should usually connect to agents using 'ai_memory' output, not '" + source_node_output_name + "'.");
799
+ }
800
+ if (target_node_input_name !== 'ai_memory') {
801
+ console.warn("[WARN] Agents should receive memory connections on 'ai_memory' input, not '" + target_node_input_name + "'.");
802
+ }
803
+ }
804
+ }
805
+ }
806
+ const newConnectionObject = {
807
+ node: targetNodeNameValue,
808
+ type: target_node_input_name,
809
+ index: target_node_input_index
810
+ };
811
+ if (!workflow.connections) {
812
+ workflow.connections = {};
813
+ }
814
+ if (!workflow.connections[sourceNodeNameKey]) {
815
+ workflow.connections[sourceNodeNameKey] = {};
816
+ }
817
+ if (!workflow.connections[sourceNodeNameKey][source_node_output_name]) {
818
+ workflow.connections[sourceNodeNameKey][source_node_output_name] = [];
819
+ }
820
+ // n8n expects an array of connection arrays for each output handle.
821
+ // Each inner array represents a set of connections originating from the same output point if it splits.
822
+ // For a simple new connection, we add it as a new chain: [newConnectionObject]
823
+ workflow.connections[sourceNodeNameKey][source_node_output_name].push([newConnectionObject]);
824
+ await promises_1.default.writeFile(filePath, JSON.stringify(workflow, null, 2));
825
+ console.error(`[DEBUG] Added connection from ${sourceNodeNameKey}:${source_node_output_name} to ${targetNodeNameValue}:${target_node_input_name} in workflow ${workflow_name}`);
826
+ // Add a special note for AI connections
827
+ let message = "Connection added successfully";
828
+ if ((isLangChainSource || isLangChainTarget) && isAIConnection) {
829
+ message += ". Note: For LangChain nodes, connections might need specific output/input names and connection direction. If connections don't appear in n8n UI, check that:";
830
+ message += "\n- Models connect TO the agent using 'ai_languageModel' ports";
831
+ message += "\n- Tools connect TO the agent using 'ai_tool' ports";
832
+ message += "\n- Memory nodes connect TO the agent using 'ai_memory' ports";
833
+ }
834
+ return { content: [{ type: "text", text: JSON.stringify({ success: true, message, workflow }) }] };
835
+ }
836
+ catch (error) {
837
+ console.error("[ERROR] Failed to add connection:", error);
838
+ return { content: [{ type: "text", text: JSON.stringify({ success: false, error: "Failed to add connection: " + error.message }) }] };
839
+ }
840
+ });
841
+ // Add AI Connections (special case for LangChain nodes)
842
+ const addAIConnectionsParamsSchema = zod_1.z.object({
843
+ workflow_name: zod_1.z.string().describe("The Name of the workflow to add the AI connections to"),
844
+ agent_node_id: zod_1.z.string().describe("The ID of the agent node that will use the model and tools"),
845
+ model_node_id: zod_1.z.string().optional().describe("The ID of the language model node (optional)"),
846
+ tool_node_ids: zod_1.z.array(zod_1.z.string()).optional().describe("Array of tool node IDs to connect to the agent (optional)"),
847
+ memory_node_id: zod_1.z.string().optional().describe("The ID of the memory node (optional)")
848
+ });
849
+ server.tool("add_ai_connections", addAIConnectionsParamsSchema.shape, async (params, _extra) => {
850
+ console.error("[DEBUG] add_ai_connections called with:", params);
851
+ const { workflow_name, agent_node_id, model_node_id, tool_node_ids, memory_node_id } = params;
852
+ if (!model_node_id && (!tool_node_ids || tool_node_ids.length === 0) && !memory_node_id) {
853
+ return {
854
+ content: [{
855
+ type: "text",
856
+ text: JSON.stringify({
857
+ success: false,
858
+ error: "At least one of model_node_id, memory_node_id, or tool_node_ids must be provided"
859
+ })
860
+ }]
861
+ };
862
+ }
863
+ try {
864
+ await ensureWorkflowDir();
865
+ const sanitizedWorkflowName = workflow_name.replace(/[^a-z0-9_.-]/gi, '_');
866
+ const filePath = resolvePath(path_1.default.join(WORKFLOW_DATA_DIR_NAME, `${sanitizedWorkflowName}.json`));
867
+ let workflow;
868
+ try {
869
+ const data = await promises_1.default.readFile(filePath, 'utf8');
870
+ workflow = JSON.parse(data);
871
+ }
872
+ catch (readError) {
873
+ if (readError.code === 'ENOENT') {
874
+ return { content: [{ type: "text", text: JSON.stringify({ success: false, error: `Workflow with name ${workflow_name} not found at ${filePath}` }) }] };
875
+ }
876
+ throw readError;
877
+ }
878
+ // First verify all nodes exist
879
+ const agentNode = workflow.nodes.find(node => node.id === agent_node_id);
880
+ if (!agentNode) {
881
+ return { content: [{ type: "text", text: JSON.stringify({ success: false, error: `Agent node with ID ${agent_node_id} not found in workflow ${workflow_name}` }) }] };
882
+ }
883
+ let modelNode = null;
884
+ if (model_node_id) {
885
+ modelNode = workflow.nodes.find(node => node.id === model_node_id);
886
+ if (!modelNode) {
887
+ return { content: [{ type: "text", text: JSON.stringify({ success: false, error: `Model node with ID ${model_node_id} not found in workflow ${workflow_name}` }) }] };
888
+ }
889
+ }
890
+ let memoryNode = null;
891
+ if (memory_node_id) {
892
+ memoryNode = workflow.nodes.find(node => node.id === memory_node_id);
893
+ if (!memoryNode) {
894
+ return { content: [{ type: "text", text: JSON.stringify({ success: false, error: `Memory node with ID ${memory_node_id} not found in workflow ${workflow_name}` }) }] };
895
+ }
896
+ }
897
+ const toolNodes = [];
898
+ if (tool_node_ids && tool_node_ids.length > 0) {
899
+ for (const toolId of tool_node_ids) {
900
+ const toolNode = workflow.nodes.find(node => node.id === toolId);
901
+ if (!toolNode) {
902
+ return { content: [{ type: "text", text: JSON.stringify({ success: false, error: `Tool node with ID ${toolId} not found in workflow ${workflow_name}` }) }] };
903
+ }
904
+ toolNodes.push(toolNode);
905
+ }
906
+ }
907
+ if (!workflow.connections) {
908
+ workflow.connections = {};
909
+ }
910
+ // For AI nodes in n8n, we need to:
911
+ // 1. Language model connects TO the agent using ai_languageModel ports
912
+ // 2. Tools connect TO the agent using ai_tool ports
913
+ // 3. Memory nodes connect TO the agent using ai_memory ports
914
+ // Create the language model connection if a model node was provided
915
+ if (modelNode) {
916
+ const modelNodeName = modelNode.name;
917
+ // Initialize model node's connections if needed
918
+ if (!workflow.connections[modelNodeName]) {
919
+ workflow.connections[modelNodeName] = {};
920
+ }
921
+ // Add the AI language model output
922
+ if (!workflow.connections[modelNodeName]["ai_languageModel"]) {
923
+ workflow.connections[modelNodeName]["ai_languageModel"] = [];
924
+ }
925
+ // Add connection from model to agent
926
+ const modelConnection = {
927
+ node: agentNode.name,
928
+ type: "ai_languageModel",
929
+ index: 0
930
+ };
931
+ // Check if this connection already exists
932
+ const existingModelConnection = workflow.connections[modelNodeName]["ai_languageModel"].some(conn => conn.some(detail => detail.node === agentNode.name && detail.type === "ai_languageModel"));
933
+ if (!existingModelConnection) {
934
+ workflow.connections[modelNodeName]["ai_languageModel"].push([modelConnection]);
935
+ console.error(`[DEBUG] Added AI language model connection from ${modelNodeName} to ${agentNode.name}`);
936
+ }
937
+ else {
938
+ console.error(`[DEBUG] AI language model connection from ${modelNodeName} to ${agentNode.name} already exists`);
939
+ }
940
+ }
941
+ // Create memory connection if a memory node was provided
942
+ if (memoryNode) {
943
+ const memoryNodeName = memoryNode.name;
944
+ // Initialize memory node's connections if needed
945
+ if (!workflow.connections[memoryNodeName]) {
946
+ workflow.connections[memoryNodeName] = {};
947
+ }
948
+ // Add the AI memory output
949
+ if (!workflow.connections[memoryNodeName]["ai_memory"]) {
950
+ workflow.connections[memoryNodeName]["ai_memory"] = [];
951
+ }
952
+ // Add connection from memory to agent
953
+ const memoryConnection = {
954
+ node: agentNode.name,
955
+ type: "ai_memory",
956
+ index: 0
957
+ };
958
+ // Check if this connection already exists
959
+ const existingMemoryConnection = workflow.connections[memoryNodeName]["ai_memory"].some(conn => conn.some(detail => detail.node === agentNode.name && detail.type === "ai_memory"));
960
+ if (!existingMemoryConnection) {
961
+ workflow.connections[memoryNodeName]["ai_memory"].push([memoryConnection]);
962
+ console.error(`[DEBUG] Added AI memory connection from ${memoryNodeName} to ${agentNode.name}`);
963
+ }
964
+ else {
965
+ console.error(`[DEBUG] AI memory connection from ${memoryNodeName} to ${agentNode.name} already exists`);
966
+ }
967
+ }
968
+ // Create tool connections if tool nodes were provided
969
+ if (toolNodes.length > 0) {
970
+ for (const toolNode of toolNodes) {
971
+ const toolNodeName = toolNode.name;
972
+ // Initialize tool node's connections if needed
973
+ if (!workflow.connections[toolNodeName]) {
974
+ workflow.connections[toolNodeName] = {};
975
+ }
976
+ // Add the AI tool output
977
+ if (!workflow.connections[toolNodeName]["ai_tool"]) {
978
+ workflow.connections[toolNodeName]["ai_tool"] = [];
979
+ }
980
+ // Add connection from tool to agent
981
+ const toolConnection = {
982
+ node: agentNode.name,
983
+ type: "ai_tool",
984
+ index: 0
985
+ };
986
+ // Check if this connection already exists
987
+ const existingToolConnection = workflow.connections[toolNodeName]["ai_tool"].some(conn => conn.some(detail => detail.node === agentNode.name && detail.type === "ai_tool"));
988
+ if (!existingToolConnection) {
989
+ workflow.connections[toolNodeName]["ai_tool"].push([toolConnection]);
990
+ console.error(`[DEBUG] Added AI tool connection from ${toolNodeName} to ${agentNode.name}`);
991
+ }
992
+ else {
993
+ console.error(`[DEBUG] AI tool connection from ${toolNodeName} to ${agentNode.name} already exists`);
994
+ }
995
+ }
996
+ }
997
+ // Save the updated workflow
998
+ await promises_1.default.writeFile(filePath, JSON.stringify(workflow, null, 2));
999
+ return {
1000
+ content: [{
1001
+ type: "text",
1002
+ text: JSON.stringify({
1003
+ success: true,
1004
+ message: "AI connections added successfully",
1005
+ workflow
1006
+ })
1007
+ }]
1008
+ };
1009
+ }
1010
+ catch (error) {
1011
+ console.error("[ERROR] Failed to add AI connections:", error);
1012
+ return {
1013
+ content: [{
1014
+ type: "text",
1015
+ text: JSON.stringify({
1016
+ success: false,
1017
+ error: "Failed to add AI connections: " + error.message
1018
+ })
1019
+ }]
1020
+ };
1021
+ }
1022
+ });
1023
+ // List Available Nodes
1024
+ const listAvailableNodesParamsSchema = zod_1.z.object({
1025
+ search_term: zod_1.z.string().optional().describe("An optional search term to filter nodes by their name, type, or description.")
1026
+ });
1027
+ server.tool("list_available_nodes", listAvailableNodesParamsSchema.shape, async (params, _extra) => {
1028
+ console.error("[DEBUG] list_available_nodes called with params:", params);
1029
+ let availableNodes = [];
1030
+ // Corrected path: relative to this script's location
1031
+ const workflowNodesDir = path_1.default.resolve(__dirname, '../workflow_nodes');
1032
+ try {
1033
+ // knownNodeBaseCasings should ideally be populated at startup by loadKnownNodeBaseTypes.
1034
+ // If it's empty here, it means initial load failed or directory wasn't found then.
1035
+ // We might not need to reload it here if startup handles it, but a check doesn't hurt.
1036
+ if (nodeInfoCache.size === 0 && WORKSPACE_DIR !== process.cwd()) {
1037
+ console.warn("[WARN] nodeInfoCache is empty in list_available_nodes. Attempting to reload node type information.");
1038
+ // For now, if cache is empty, it means startup failed to load them.
1039
+ // The function will proceed and likely return an empty list or whatever it finds if workflowNodesDir is accessible now.
1040
+ }
1041
+ console.error(`[DEBUG] Reading node definitions from server resource path: ${workflowNodesDir}`);
1042
+ const files = await promises_1.default.readdir(workflowNodesDir);
1043
+ const suffix = ".json";
1044
+ const allParsedNodes = []; // Temporary array to hold all nodes before filtering
1045
+ for (const file of files) {
1046
+ if (file.endsWith(suffix) && file !== WORKFLOWS_FILE_NAME /* ignore old combined file */) {
1047
+ const filePath = path_1.default.join(workflowNodesDir, file);
1048
+ try {
1049
+ const fileContent = await promises_1.default.readFile(filePath, 'utf8');
1050
+ const nodeDefinition = JSON.parse(fileContent);
1051
+ if (nodeDefinition.nodeType && nodeDefinition.displayName && nodeDefinition.properties) {
1052
+ allParsedNodes.push({
1053
+ nodeType: nodeDefinition.nodeType,
1054
+ displayName: nodeDefinition.displayName,
1055
+ description: nodeDefinition.description || "",
1056
+ version: nodeDefinition.version || 1,
1057
+ properties: nodeDefinition.properties,
1058
+ credentialsConfig: nodeDefinition.credentialsConfig || [],
1059
+ categories: nodeDefinition.categories || [],
1060
+ // Also add simplified versions of the node type for reference
1061
+ simpleName: nodeDefinition.nodeType.includes('n8n-nodes-base.')
1062
+ ? nodeDefinition.nodeType.split('n8n-nodes-base.')[1]
1063
+ : nodeDefinition.nodeType
1064
+ });
1065
+ }
1066
+ else {
1067
+ console.warn(`[WARN] File ${file} does not seem to be a valid node definition. Skipping.`);
1068
+ }
1069
+ }
1070
+ catch (parseError) {
1071
+ console.warn(`[WARN] Failed to parse ${file}: ${parseError.message}. Skipping.`);
1072
+ }
1073
+ }
1074
+ }
1075
+ if (params.search_term && params.search_term.trim() !== "") {
1076
+ const searchTermLower = params.search_term.toLowerCase();
1077
+ availableNodes = allParsedNodes.filter(node => {
1078
+ let found = false;
1079
+ if (node.displayName && node.displayName.toLowerCase().includes(searchTermLower)) {
1080
+ found = true;
1081
+ }
1082
+ if (!found && node.nodeType && node.nodeType.toLowerCase().includes(searchTermLower)) {
1083
+ found = true;
1084
+ }
1085
+ if (!found && node.description && node.description.toLowerCase().includes(searchTermLower)) {
1086
+ found = true;
1087
+ }
1088
+ if (!found && node.simpleName && node.simpleName.toLowerCase().includes(searchTermLower)) {
1089
+ found = true;
1090
+ }
1091
+ if (!found && node.properties && Array.isArray(node.properties)) {
1092
+ for (const prop of node.properties) {
1093
+ if (prop.name && prop.name.toLowerCase().includes(searchTermLower)) {
1094
+ found = true;
1095
+ break;
1096
+ }
1097
+ if (prop.displayName && prop.displayName.toLowerCase().includes(searchTermLower)) {
1098
+ found = true;
1099
+ break;
1100
+ }
1101
+ // Optionally search prop.description as well
1102
+ // if (prop.description && prop.description.toLowerCase().includes(searchTermLower)) {
1103
+ // found = true;
1104
+ // break;
1105
+ // }
1106
+ }
1107
+ }
1108
+ if (!found && node.categories && Array.isArray(node.categories)) {
1109
+ for (const category of node.categories) {
1110
+ if (typeof category === 'string' && category.toLowerCase().includes(searchTermLower)) {
1111
+ found = true;
1112
+ break;
1113
+ }
1114
+ }
1115
+ }
1116
+ return found;
1117
+ });
1118
+ console.log(`[DEBUG] Filtered nodes by '${params.search_term}'. Found ${availableNodes.length} of ${allParsedNodes.length}.`);
1119
+ }
1120
+ else {
1121
+ availableNodes = allParsedNodes; // No search term, return all nodes
1122
+ }
1123
+ if (availableNodes.length === 0 && allParsedNodes.length > 0 && params.search_term) {
1124
+ console.warn(`[WARN] No nodes matched the search term: '${params.search_term}'.`);
1125
+ }
1126
+ else if (allParsedNodes.length === 0) {
1127
+ console.warn("[WARN] No node definitions found in workflow_nodes. Ensure the directory is populated with JSON files from the scraper.");
1128
+ }
1129
+ // Format the results to be more user-friendly and informative
1130
+ const formattedNodes = availableNodes.map(node => {
1131
+ return {
1132
+ // Keep only the most relevant information
1133
+ nodeType: node.nodeType, // Full node type with correct casing
1134
+ displayName: node.displayName,
1135
+ description: node.description,
1136
+ simpleName: node.simpleName, // The part after n8n-nodes-base
1137
+ categories: node.categories || [],
1138
+ version: node.version,
1139
+ // Count parameters but don't include details to keep response size manageable
1140
+ parameterCount: node.properties ? node.properties.length : 0
1141
+ };
1142
+ });
1143
+ // Include usage guidance in the response
1144
+ const usageGuidance = {
1145
+ title: "Node Type Usage Guide",
1146
+ description: "When using the add_node or replace_node tools, you can specify the node type in any of these formats:",
1147
+ formats: [
1148
+ `Full Type (with correct casing): "${formattedNodes.length > 0 ? formattedNodes[0].nodeType : 'n8n-nodes-base.nodeTypeName'}"`,
1149
+ `Simple Name (with correct casing): "${formattedNodes.length > 0 ? formattedNodes[0].simpleName : 'nodeTypeName'}"`,
1150
+ `Simple Name (lowercase): "${formattedNodes.length > 0 ? formattedNodes[0].simpleName.toLowerCase() : 'nodetypename'}"`
1151
+ ],
1152
+ note: "The system will automatically handle proper casing and prefixing for you based on the official node definitions."
1153
+ };
1154
+ // Return the formatted response
1155
+ return {
1156
+ content: [{
1157
+ type: "text", text: JSON.stringify({
1158
+ success: true,
1159
+ nodes: formattedNodes,
1160
+ total: formattedNodes.length,
1161
+ usageGuidance: usageGuidance
1162
+ })
1163
+ }]
1164
+ };
1165
+ }
1166
+ catch (error) {
1167
+ console.error("[ERROR] Failed to list available nodes:", error);
1168
+ if (error.code === 'ENOENT') {
1169
+ console.warn("[WARN] workflow_nodes directory not found. Cannot list available nodes.");
1170
+ return { content: [{ type: "text", text: JSON.stringify({ success: true, nodes: [], message: "workflow_nodes directory not found." }) }] };
1171
+ }
1172
+ return { content: [{ type: "text", text: JSON.stringify({ success: false, error: "Failed to list available nodes: " + error.message }) }] };
1173
+ }
1174
+ });
1175
+ // Create and configure the transport
1176
+ const transport = new stdio_js_1.StdioServerTransport();
1177
+ // Start the server
1178
+ async function main() {
1179
+ try {
1180
+ // Note: loadKnownNodeBaseTypes uses resolvePath, which depends on WORKSPACE_DIR.
1181
+ // WORKSPACE_DIR is typically set by create_workflow.
1182
+ // If called before create_workflow, it might use process.cwd() or fail if workflow_nodes isn't there.
1183
+ // This is a known limitation for now; ideally, WORKSPACE_DIR is configured at MCP server init more globally.
1184
+ await loadKnownNodeBaseTypes(); // Attempt to load node types at startup
1185
+ await server.connect(transport);
1186
+ console.error("[DEBUG] N8N Workflow Builder MCP Server started (TypeScript version)");
1187
+ // Debugging tool schemas might need update if params changed significantly for other tools
1188
+ const toolSchemasForDebug = {
1189
+ create_workflow: createWorkflowParamsSchema,
1190
+ list_workflows: zod_1.z.object({}), // Updated to reflect empty params
1191
+ get_workflow_details: getWorkflowDetailsParamsSchema,
1192
+ add_node: addNodeParamsSchema,
1193
+ edit_node: editNodeParamsSchema,
1194
+ delete_node: deleteNodeParamsSchema,
1195
+ add_connection: addConnectionParamsSchema
1196
+ };
1197
+ const manuallyConstructedToolList = Object.entries(toolSchemasForDebug).map(([name, schema]) => {
1198
+ let toolDefinition = { name };
1199
+ // Attempt to get description from the schema if available, or use a default.
1200
+ // Note: .describe() on Zod schemas is for properties, not usually the whole schema for tool description.
1201
+ // The description passed in the options object to server.tool() is what the MCP client sees.
1202
+ // This reconstruction is for local debugging of what the SDK *might* send.
1203
+ if (name === "create_workflow")
1204
+ toolDefinition.description = "Create a new n8n workflow";
1205
+ else if (name === "list_workflows")
1206
+ toolDefinition.description = "List all n8n workflows";
1207
+ else if (name === "get_workflow_details")
1208
+ toolDefinition.description = "Get details of a specific n8n workflow";
1209
+ else if (name === "add_node")
1210
+ toolDefinition.description = "Add a new node to a workflow";
1211
+ else if (name === "edit_node")
1212
+ toolDefinition.description = "Edit an existing node in a workflow";
1213
+ else if (name === "delete_node")
1214
+ toolDefinition.description = "Delete a node from a workflow";
1215
+ else if (name === "add_connection")
1216
+ toolDefinition.description = "Add a new connection between nodes in a workflow";
1217
+ else
1218
+ toolDefinition.description = `Description for ${name}`;
1219
+ if (schema) {
1220
+ // This is a simplified mock of how zod-to-json-schema might convert it
1221
+ // Actual conversion by SDK might be more complex.
1222
+ const properties = {};
1223
+ const required = [];
1224
+ const shape = schema.shape;
1225
+ for (const key in shape) {
1226
+ const field = shape[key];
1227
+ properties[key] = { type: field._def.typeName.replace('Zod', '').toLowerCase(), description: field.description };
1228
+ if (!field.isOptional()) {
1229
+ required.push(key);
1230
+ }
1231
+ }
1232
+ toolDefinition.inputSchema = { type: "object", properties, required };
1233
+ }
1234
+ else {
1235
+ toolDefinition.inputSchema = { type: "object", properties: {}, required: [] };
1236
+ }
1237
+ return toolDefinition;
1238
+ });
1239
+ console.error("[DEBUG] Server's expected 'tools' array for tools/list response (with detailed inputSchemas):");
1240
+ console.error(JSON.stringify(manuallyConstructedToolList, null, 2));
1241
+ // Keep the process alive
1242
+ return new Promise((resolve, reject) => {
1243
+ process.on('SIGINT', () => {
1244
+ console.error("[DEBUG] Received SIGINT, shutting down...");
1245
+ server.close().then(resolve).catch(reject);
1246
+ });
1247
+ process.on('SIGTERM', () => {
1248
+ console.error("[DEBUG] Received SIGTERM, shutting down...");
1249
+ server.close().then(resolve).catch(reject);
1250
+ });
1251
+ });
1252
+ }
1253
+ catch (error) {
1254
+ console.error("[ERROR] Failed to start server:", error);
1255
+ process.exit(1);
1256
+ }
1257
+ }
1258
+ main().catch(error => {
1259
+ console.error("[ERROR] Unhandled error in main:", error);
1260
+ process.exit(1);
1261
+ });
1262
+ process.on('uncaughtException', (error) => {
1263
+ console.error("[ERROR] Uncaught exception:", error);
1264
+ // Consider whether to exit or attempt graceful shutdown
1265
+ });
1266
+ process.on('unhandledRejection', (reason, promise) => {
1267
+ console.error("[ERROR] Unhandled promise rejection at:", promise, "reason:", reason);
1268
+ // Consider whether to exit or attempt graceful shutdown
1269
+ });
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "n8n-workflow-builder-mcp",
3
- "version": "0.1.4",
3
+ "version": "0.1.6",
4
4
  "description": "MCP server for building n8n workflows",
5
5
  "main": "dist/index.js",
6
6
  "bin": {