n8n-workflow-builder-mcp 0.1.3 → 0.1.5

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (81) hide show
  1. package/dist/index.js +1212 -0
  2. package/package.json +7 -1
  3. package/.cursor/rules/cursor_rules.mdc +0 -53
  4. package/.cursor/rules/dev_workflow.mdc +0 -219
  5. package/.cursor/rules/mcp.mdc +0 -430
  6. package/.cursor/rules/self_improve.mdc +0 -72
  7. package/.cursor/rules/taskmaster.mdc +0 -382
  8. package/.cursorignore +0 -1
  9. package/.cursorrules +0 -4
  10. package/.env.example +0 -23
  11. package/.eslintrc.json +0 -38
  12. package/.github/workflows/npm-publish-github-packages.yml +0 -55
  13. package/.prettierrc +0 -9
  14. package/.roo/rules/dev_workflow.md +0 -219
  15. package/.roo/rules/mcp.md +0 -430
  16. package/.roo/rules/roo_rules.md +0 -53
  17. package/.roo/rules/self_improve.md +0 -72
  18. package/.roo/rules/taskmaster.md +0 -382
  19. package/.roo/rules-architect/architect-rules +0 -93
  20. package/.roo/rules-ask/ask-rules +0 -89
  21. package/.roo/rules-boomerang/boomerang-rules +0 -181
  22. package/.roo/rules-code/code-rules +0 -61
  23. package/.roo/rules-debug/debug-rules +0 -68
  24. package/.roo/rules-test/test-rules +0 -61
  25. package/.roomodes +0 -63
  26. package/.taskmasterconfig +0 -31
  27. package/.windsurfrules +0 -2382
  28. package/config/credentials/credentials.json +0 -1
  29. package/config/default.js +0 -41
  30. package/scripts/demo-n8n-integration.js +0 -161
  31. package/scripts/demo-workflow-generator.js +0 -102
  32. package/scripts/init.sh +0 -36
  33. package/scripts/prd.txt +0 -197
  34. package/src/index.ts +0 -1440
  35. package/src/middleware/auth.js +0 -273
  36. package/src/middleware/authorize.js +0 -183
  37. package/src/middleware/logging.js +0 -64
  38. package/src/middleware/mcp.js +0 -187
  39. package/src/middleware/rateLimiter.js +0 -82
  40. package/src/middleware/validation.js +0 -241
  41. package/src/models/credential.js +0 -359
  42. package/src/models/llmService.js +0 -236
  43. package/src/models/n8nIntegration.js +0 -542
  44. package/src/models/storage.js +0 -196
  45. package/src/models/tool.js +0 -148
  46. package/src/models/user.js +0 -164
  47. package/src/models/workflow.js +0 -229
  48. package/src/routes/toolDefinitions.js +0 -62
  49. package/src/routes/toolExecution.js +0 -79
  50. package/src/tools/__index.js +0 -242
  51. package/src/tools/connectionManagement.js +0 -500
  52. package/src/tools/n8nIntegration.js +0 -370
  53. package/src/tools/nodeDiscovery.js +0 -488
  54. package/src/tools/nodeManagement.js +0 -674
  55. package/src/tools/toolDefinitions.js +0 -660
  56. package/src/tools/workflowCreation.js +0 -100
  57. package/src/tools/workflowGenerator.js +0 -152
  58. package/src/tools/workflowStorage.js +0 -113
  59. package/src/tools/workflowTesting.js +0 -285
  60. package/src/utils/encryption.js +0 -164
  61. package/src/utils/logger.js +0 -84
  62. package/src/utils/mcp.js +0 -85
  63. package/src/utils/securityLogger.js +0 -109
  64. package/tests/auth.test.js +0 -402
  65. package/tests/authorize.test.js +0 -208
  66. package/tests/run-memory-tests.js +0 -55
  67. package/tests/run-tests.js +0 -55
  68. package/tests/server.test.js +0 -203
  69. package/tests/unit/add-ai-connections.test.js +0 -385
  70. package/tests/unit/connectionManagement.test.js +0 -309
  71. package/tests/unit/langchain-llm-format.test.js +0 -259
  72. package/tests/unit/memory-connection.test.js +0 -140
  73. package/tests/unit/memory-integration.test.js +0 -253
  74. package/tests/unit/n8nIntegration.test.js +0 -291
  75. package/tests/unit/nodeDiscovery.test.js +0 -270
  76. package/tests/unit/nodeManagement.test.js +0 -522
  77. package/tests/unit/utils/mcp-test-utils.js +0 -94
  78. package/tests/unit/workflowCreation.test.js +0 -110
  79. package/tests/unit/workflowTesting.test.js +0 -269
  80. package/tests/user.test.js +0 -181
  81. package/tsconfig.json +0 -20
package/dist/index.js ADDED
@@ -0,0 +1,1212 @@
1
+ #!/usr/bin/env node
2
+ "use strict";
3
+ // N8N Workflow Builder MCP Server
4
+ // Using the official MCP SDK as required
5
+ var __importDefault = (this && this.__importDefault) || function (mod) {
6
+ return (mod && mod.__esModule) ? mod : { "default": mod };
7
+ };
8
+ Object.defineProperty(exports, "__esModule", { value: true });
9
+ const mcp_js_1 = require("@modelcontextprotocol/sdk/server/mcp.js");
10
+ const stdio_js_1 = require("@modelcontextprotocol/sdk/server/stdio.js");
11
+ const zod_1 = require("zod");
12
+ const promises_1 = __importDefault(require("fs/promises"));
13
+ const path_1 = __importDefault(require("path"));
14
+ // Global workspace configuration
15
+ let WORKSPACE_DIR = process.cwd();
16
+ console.error(`[DEBUG] Default workspace directory: ${WORKSPACE_DIR}`);
17
+ let nodeInfoCache = new Map();
18
+ // Helper function to normalize LLM parameters from various possible inputs
19
+ function normalizeLLMParameters(params) {
20
+ const normalized = { ...params };
21
+ // Handle model/modelName variation
22
+ if (normalized.modelName && !normalized.model) {
23
+ console.error(`[DEBUG] Normalizing 'modelName' to 'model'`);
24
+ normalized.model = normalized.modelName;
25
+ delete normalized.modelName;
26
+ }
27
+ // Convert model string to required format
28
+ if (normalized.model && typeof normalized.model === 'string') {
29
+ console.error(`[DEBUG] Converting model string to required format: ${normalized.model}`);
30
+ const modelValue = normalized.model;
31
+ normalized.model = {
32
+ "__rl": true,
33
+ "value": modelValue,
34
+ "mode": "list",
35
+ "cachedResultName": modelValue
36
+ };
37
+ }
38
+ // Handle credentials formatting
39
+ // Handle at options level
40
+ if (normalized.options?.credentials?.providerType) {
41
+ const credType = normalized.options.credentials.providerType;
42
+ console.error(`[DEBUG] Found credentials in options with type: ${credType}`);
43
+ delete normalized.options.credentials;
44
+ // Set credentials properly based on provider type
45
+ if (credType === 'openAi' || credType === 'openAiApi') {
46
+ normalized.credentials = {
47
+ "openAiApi": {
48
+ "id": generateN8nId(),
49
+ "name": "OpenAi account"
50
+ }
51
+ };
52
+ }
53
+ }
54
+ // Handle at root level
55
+ if (normalized.credentialsType && !normalized.credentials) {
56
+ const credType = normalized.credentialsType;
57
+ console.error(`[DEBUG] Found credentialsType at root level: ${credType}`);
58
+ // Set credentials properly based on provider type
59
+ if (credType === 'openAi' || credType === 'openAiApi') {
60
+ normalized.credentials = {
61
+ "openAiApi": {
62
+ "id": generateN8nId(),
63
+ "name": "OpenAi account"
64
+ }
65
+ };
66
+ }
67
+ // Remove root level parameter
68
+ delete normalized.credentialsType;
69
+ }
70
+ return normalized;
71
+ }
72
+ async function loadKnownNodeBaseTypes() {
73
+ // Corrected path: relative to this script's location, assuming workflow_nodes is at project root
74
+ const workflowNodesDir = path_1.default.resolve(__dirname, '../workflow_nodes');
75
+ try {
76
+ console.error(`[DEBUG] Attempting to load known node types from server resource path: ${workflowNodesDir}`);
77
+ const files = await promises_1.default.readdir(workflowNodesDir);
78
+ const suffix = ".json";
79
+ // knownNodeBaseCasings.clear(); // Clear any previous entries // OLD MAP
80
+ nodeInfoCache.clear();
81
+ for (const file of files) {
82
+ if (file.endsWith(suffix)) {
83
+ try {
84
+ // Extract base type from filename like "MyNode.json" -> "MyNode"
85
+ // const correctCaseBaseType = file.substring(0, file.length - suffix.length); // Not directly used for cache keys anymore
86
+ // Also read the JSON file to get the nodeType property with correct casing
87
+ const filePath = path_1.default.join(workflowNodesDir, file);
88
+ const fileContent = await promises_1.default.readFile(filePath, 'utf8');
89
+ const nodeDefinition = JSON.parse(fileContent);
90
+ if (nodeDefinition.nodeType) {
91
+ const officialType = nodeDefinition.nodeType;
92
+ const version = nodeDefinition.version || 1; // Default to 1 if version is missing/falsy
93
+ // Map by lowercase full official type
94
+ nodeInfoCache.set(officialType.toLowerCase(), { officialType, version });
95
+ console.error(`[DEBUG] Cached node info for key '${officialType.toLowerCase()}': { officialType: '${officialType}', version: ${JSON.stringify(version)} }`);
96
+ // If it's a prefixed type (n8n-nodes-base), also map by its lowercase base name
97
+ const prefix = "n8n-nodes-base.";
98
+ if (officialType.startsWith(prefix)) {
99
+ const baseName = officialType.substring(prefix.length);
100
+ if (baseName) { // Ensure baseName is not empty
101
+ nodeInfoCache.set(baseName.toLowerCase(), { officialType, version });
102
+ console.error(`[DEBUG] Cached node info for base key '${baseName.toLowerCase()}': { officialType: '${officialType}', version: ${JSON.stringify(version)} }`);
103
+ }
104
+ }
105
+ }
106
+ }
107
+ catch (parseError) {
108
+ console.warn(`[WARN] Error parsing node definition in ${file}:`, parseError);
109
+ // Continue with other files if one fails to parse
110
+ }
111
+ }
112
+ }
113
+ console.error(`[DEBUG] Loaded ${nodeInfoCache.size} cache entries for node types.`);
114
+ if (nodeInfoCache.size === 0) {
115
+ console.warn("[WARN] No node type information loaded into cache. Check 'workflow_nodes' directory and naming convention.");
116
+ }
117
+ }
118
+ catch (error) {
119
+ console.warn(`[WARN] Could not load known node types from ${workflowNodesDir}: ${error.message}. Node type normalization might rely on defaults.`);
120
+ nodeInfoCache = new Map(); // Ensure map is empty if loading fails
121
+ }
122
+ }
123
+ // Helper function to normalize node types (OLD - to be replaced)
124
+ // function normalizeNodeType(inputType: string): string { ... } // OLD FUNCTION
125
+ // New function to get normalized type and version
126
+ function normalizeNodeTypeAndVersion(inputType, inputVersion) {
127
+ if (nodeInfoCache.size === 0 && WORKSPACE_DIR !== process.cwd()) { // Check if cache is empty and workspace might have changed
128
+ console.warn("[WARN] nodeInfoCache is empty in normalizeNodeTypeAndVersion. Attempting to reload based on current WORKSPACE_DIR.");
129
+ // This reload should be awaited if called from an async context.
130
+ // For now, assuming loadKnownNodeBaseTypes was called at startup or by a previous async tool.
131
+ // Synchronous reload attempt (not ideal but matches previous knownNodeBaseCasings logic):
132
+ // await loadKnownNodeBaseTypes(); // Making this async would require normalizeNodeTypeAndVersion to be async too.
133
+ // For now, we proceed, and if cache is empty, warnings will be issued.
134
+ }
135
+ const lowerInputType = inputType.toLowerCase();
136
+ const prefix = "n8n-nodes-base.";
137
+ const cacheEntry = nodeInfoCache.get(lowerInputType);
138
+ let finalNodeType;
139
+ let versionSource = 1; // Default version if not found in cache
140
+ if (cacheEntry) {
141
+ finalNodeType = cacheEntry.officialType; // This is the correctly cased, full type name
142
+ versionSource = cacheEntry.version;
143
+ }
144
+ else {
145
+ // Not in cache. Determine type based on structure.
146
+ if (inputType.includes('/') && !lowerInputType.startsWith(prefix)) {
147
+ // Likely a namespaced type not in cache (e.g. user typed it, or it's new)
148
+ finalNodeType = inputType; // Use user's casing
149
+ console.warn(`[WARN] Namespaced node type ${inputType} not in cache. Using as-is with default version.`);
150
+ }
151
+ else {
152
+ // Assumed to be a base type needing a prefix, or a prefixed type not in cache.
153
+ // Use inputType for casing if it already seems prefixed, otherwise prefix the original inputType
154
+ finalNodeType = lowerInputType.startsWith(prefix) ? inputType : `${prefix}${inputType}`;
155
+ console.warn(`[WARN] Node type ${inputType} (assumed base/prefixed) not in cache. Result: ${finalNodeType} with default version.`);
156
+ }
157
+ // versionSource remains 1 (default)
158
+ }
159
+ let finalTypeVersion;
160
+ if (inputVersion !== undefined && !isNaN(Number(inputVersion))) {
161
+ finalTypeVersion = Number(inputVersion);
162
+ }
163
+ else { // inputVersion was not provided or was NaN, use versionSource
164
+ if (inputVersion !== undefined && isNaN(Number(inputVersion))) {
165
+ console.warn(`[WARN] Provided inputVersion '${inputVersion}' is NaN for node ${finalNodeType}. Determining from cache/default.`);
166
+ }
167
+ if (Array.isArray(versionSource)) {
168
+ if (versionSource.length > 0) {
169
+ const numericVersions = versionSource.map(v => Number(v)).filter(v => !isNaN(v));
170
+ finalTypeVersion = numericVersions.length > 0 ? Math.max(...numericVersions) : 1;
171
+ }
172
+ else {
173
+ finalTypeVersion = 1; // Empty array in cache
174
+ }
175
+ }
176
+ else { // It's a number or was defaulted to 1
177
+ finalTypeVersion = Number(versionSource);
178
+ }
179
+ if (isNaN(finalTypeVersion))
180
+ finalTypeVersion = 1; // Fallback for bad data from cache
181
+ }
182
+ if (isNaN(finalTypeVersion))
183
+ finalTypeVersion = 1; // Final check, e.g. if inputVersion was NaN and versionSource also led to NaN.
184
+ console.error(`[DEBUG] normalizeNodeTypeAndVersion: input='${inputType}', inputVersion=${inputVersion} -> finalNodeType='${finalNodeType}', finalTypeVersion=${finalTypeVersion}`);
185
+ return { finalNodeType, finalTypeVersion };
186
+ }
187
+ // Helper function to resolve paths against workspace
188
+ // Always treat paths as relative to WORKSPACE_DIR by stripping leading slashes
189
+ function resolvePath(filepath) {
190
+ // Remove any leading path separators to prevent absolute path resolution
191
+ const relativePath = filepath.replace(/^[\\/]+/, '');
192
+ return path_1.default.join(WORKSPACE_DIR, relativePath);
193
+ }
194
+ // ID Generation Helpers
195
+ function generateN8nId(length = 16) {
196
+ const chars = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789';
197
+ let result = '';
198
+ for (let i = 0; i < length; i++) {
199
+ result += chars.charAt(Math.floor(Math.random() * chars.length));
200
+ }
201
+ return result;
202
+ }
203
+ function generateUUID() {
204
+ return 'xxxxxxxx-xxxx-4xxx-yxxx-xxxxxxxxxxxx'.replace(/[xy]/g, function (c) {
205
+ const r = Math.random() * 16 | 0, v = c == 'x' ? r : (r & 0x3 | 0x8);
206
+ return v.toString(16);
207
+ });
208
+ }
209
+ function generateInstanceId(length = 64) {
210
+ const chars = 'abcdef0123456789';
211
+ let result = '';
212
+ for (let i = 0; i < length; i++) {
213
+ result += chars.charAt(Math.floor(Math.random() * chars.length));
214
+ }
215
+ return result;
216
+ }
217
+ // Constants
218
+ const WORKFLOW_DATA_DIR_NAME = 'workflow_data';
219
+ const WORKFLOWS_FILE_NAME = 'workflows.json'; // Kept for now, but create_workflow won't use it.
220
+ // Helper functions
221
+ async function ensureWorkflowDir() {
222
+ try {
223
+ const resolvedDir = resolvePath(WORKFLOW_DATA_DIR_NAME);
224
+ console.error("[DEBUG] Ensuring workflow directory at:", resolvedDir);
225
+ await promises_1.default.mkdir(resolvedDir, { recursive: true });
226
+ // Removed creation of workflows.json as each workflow is a separate file now.
227
+ }
228
+ catch (error) {
229
+ console.error('[ERROR] Failed to ensure workflow directory:', error);
230
+ throw error;
231
+ }
232
+ }
233
+ async function loadWorkflows() {
234
+ // This function will need to be updated if list_workflows is to work with the new format.
235
+ // For now, it's related to the old format.
236
+ const resolvedFile = resolvePath(path_1.default.join(WORKFLOW_DATA_DIR_NAME, WORKFLOWS_FILE_NAME));
237
+ try {
238
+ await ensureWorkflowDir(); // Ensures dir exists, doesn't create workflows.json anymore unless called by old logic
239
+ const data = await promises_1.default.readFile(resolvedFile, 'utf8');
240
+ console.error("[DEBUG] Loaded workflows (old format):", data);
241
+ return JSON.parse(data);
242
+ }
243
+ catch (error) {
244
+ if (error.code === 'ENOENT') {
245
+ console.error("[DEBUG] No workflows.json file found (old format), returning empty array");
246
+ // If workflows.json is truly deprecated, this might try to create it.
247
+ // For now, let's assume ensureWorkflowDir handles directory creation.
248
+ // And if the file doesn't exist, it means no workflows (in old format).
249
+ await promises_1.default.writeFile(resolvedFile, JSON.stringify([], null, 2)); // Create if not exists for old logic
250
+ return [];
251
+ }
252
+ console.error('[ERROR] Failed to load workflows (old format):', error);
253
+ throw error;
254
+ }
255
+ }
256
+ async function saveWorkflows(workflows) {
257
+ // This function is for the old format (saving an array to workflows.json).
258
+ try {
259
+ await ensureWorkflowDir();
260
+ const resolvedFile = resolvePath(path_1.default.join(WORKFLOW_DATA_DIR_NAME, WORKFLOWS_FILE_NAME));
261
+ console.error("[DEBUG] Saving workflows (old format):", JSON.stringify(workflows, null, 2));
262
+ await promises_1.default.writeFile(resolvedFile, JSON.stringify(workflows, null, 2));
263
+ }
264
+ catch (error) {
265
+ console.error('[ERROR] Failed to save workflows (old format):', error);
266
+ throw error;
267
+ }
268
+ }
269
+ // Create the MCP server
270
+ const server = new mcp_js_1.McpServer({
271
+ name: "n8n-workflow-builder",
272
+ version: "1.0.0"
273
+ });
274
+ // Tool definitions
275
+ // Create Workflow
276
+ const createWorkflowParamsSchema = zod_1.z.object({
277
+ workflow_name: zod_1.z.string().describe("The name for the new workflow"),
278
+ workspace_dir: zod_1.z.string().describe("Absolute path to the project root directory where workflow_data will be stored")
279
+ });
280
+ server.tool("create_workflow", createWorkflowParamsSchema.shape, async (params, _extra) => {
281
+ console.error("[DEBUG] create_workflow called with params:", params);
282
+ const workflowName = params.workflow_name;
283
+ const workspaceDir = params.workspace_dir;
284
+ if (!workflowName || workflowName.trim() === "") {
285
+ return { content: [{ type: "text", text: JSON.stringify({ success: false, error: "Parameter 'workflow_name' is required." }) }] };
286
+ }
287
+ if (!workspaceDir || workspaceDir.trim() === "") {
288
+ return { content: [{ type: "text", text: JSON.stringify({ success: false, error: "Parameter 'workspace_dir' is required." }) }] };
289
+ }
290
+ try {
291
+ const stat = await promises_1.default.stat(workspaceDir);
292
+ if (!stat.isDirectory()) {
293
+ return { content: [{ type: "text", text: JSON.stringify({ success: false, error: "Provided 'workspace_dir' is not a directory." }) }] };
294
+ }
295
+ // Check if the workspaceDir is the root directory
296
+ if (path_1.default.resolve(workspaceDir) === path_1.default.resolve('/')) {
297
+ console.error("[ERROR] 'workspace_dir' cannot be the root directory ('/'). Please specify a valid project subdirectory.");
298
+ return { content: [{ type: "text", text: JSON.stringify({ success: false, error: "'workspace_dir' cannot be the root directory. Please specify a project subdirectory." }) }] };
299
+ }
300
+ WORKSPACE_DIR = workspaceDir; // Set current workspace for resolvePath
301
+ await ensureWorkflowDir(); // Ensures WORKFLOW_DATA_DIR_NAME exists
302
+ const newN8nWorkflow = {
303
+ name: workflowName,
304
+ id: generateN8nId(), // e.g., "Y6sBMxxyJQtgCCBQ"
305
+ nodes: [], // Initialize with empty nodes array
306
+ connections: {}, // Initialize with empty connections object
307
+ active: false,
308
+ pinData: {},
309
+ settings: {
310
+ executionOrder: "v1"
311
+ },
312
+ versionId: generateUUID(),
313
+ meta: {
314
+ instanceId: generateInstanceId()
315
+ },
316
+ tags: []
317
+ };
318
+ // Sanitize workflowName for filename or ensure it's safe.
319
+ // For now, using directly. Consider a sanitization function for production.
320
+ const filename = `${workflowName.replace(/[^a-z0-9_.-]/gi, '_')}.json`;
321
+ const filePath = resolvePath(path_1.default.join(WORKFLOW_DATA_DIR_NAME, filename));
322
+ await promises_1.default.writeFile(filePath, JSON.stringify(newN8nWorkflow, null, 2));
323
+ console.error("[DEBUG] Workflow created and saved to:", filePath);
324
+ return { content: [{ type: "text", text: JSON.stringify({ success: true, workflow: newN8nWorkflow, recommended_next_step: "YOU NEED TO CALL 'list_available_nodes' TOOL BEFORE starting adding nodes. SEARCH BY SPECIFIC TOPIC USING 'search_term' parameter'. To search AI nodes you can use 'langchain' as the search term and 'ai" }) }] };
325
+ }
326
+ catch (error) {
327
+ console.error("[ERROR] Failed to create workflow:", error);
328
+ return { content: [{ type: "text", text: JSON.stringify({ success: false, error: "Failed to create workflow: " + error.message }) }] };
329
+ }
330
+ });
331
+ // List Workflows
332
+ // NOTE: This tool will need to be updated to read individual .json files
333
+ // from the workflow_data directory and parse them into N8nWorkflow[]
334
+ server.tool("list_workflows", {}, async (_params, _extra) => {
335
+ console.error("[DEBUG] list_workflows called - (current impl uses old format and might be broken)");
336
+ try {
337
+ // This implementation needs to change to scan directory for .json files
338
+ // and aggregate them. For now, it will likely fail or return empty
339
+ // if workflows.json doesn't exist or is empty.
340
+ await ensureWorkflowDir(); // Ensures directory exists
341
+ const workflowDataDir = resolvePath(WORKFLOW_DATA_DIR_NAME);
342
+ const files = await promises_1.default.readdir(workflowDataDir);
343
+ const workflowFiles = files.filter(file => file.endsWith('.json') && file !== WORKFLOWS_FILE_NAME);
344
+ const workflows = [];
345
+ for (const file of workflowFiles) {
346
+ try {
347
+ const data = await promises_1.default.readFile(path_1.default.join(workflowDataDir, file), 'utf8');
348
+ workflows.push(JSON.parse(data));
349
+ }
350
+ catch (err) {
351
+ console.error(`[ERROR] Failed to read or parse workflow file ${file}:`, err);
352
+ // Decide how to handle: skip, error out, etc.
353
+ }
354
+ }
355
+ console.error(`[DEBUG] Retrieved ${workflows.length} workflows from individual files.`);
356
+ return { content: [{ type: "text", text: JSON.stringify({ success: true, workflows }) }] };
357
+ }
358
+ catch (error) {
359
+ console.error("[ERROR] Failed to list workflows:", error);
360
+ return { content: [{ type: "text", text: JSON.stringify({ success: false, error: "Failed to list workflows: " + error.message }) }] };
361
+ }
362
+ });
363
+ // Get Workflow Details
364
+ // NOTE: This tool will need to be updated. It currently assumes workflow_id is
365
+ // an ID found in the old workflows.json structure. It should now probably
366
+ // expect workflow_id to be the workflow name (to form the filename) or the new N8n ID.
367
+ const getWorkflowDetailsParamsSchema = zod_1.z.object({
368
+ workflow_name: zod_1.z.string().describe("The Name of the workflow to get details for")
369
+ });
370
+ server.tool("get_workflow_details", getWorkflowDetailsParamsSchema.shape, async (params, _extra) => {
371
+ const workflowName = params.workflow_name;
372
+ console.error("[DEBUG] get_workflow_details called with name:", workflowName);
373
+ try {
374
+ await ensureWorkflowDir();
375
+ const sanitizedName = workflowName.replace(/[^a-z0-9_.-]/gi, '_');
376
+ const filePath = resolvePath(path_1.default.join(WORKFLOW_DATA_DIR_NAME, `${sanitizedName}.json`));
377
+ try {
378
+ const data = await promises_1.default.readFile(filePath, 'utf8');
379
+ const workflow = JSON.parse(data);
380
+ console.error("[DEBUG] Found workflow by name in file:", filePath);
381
+ return { content: [{ type: "text", text: JSON.stringify({ success: true, workflow }) }] };
382
+ }
383
+ catch (error) {
384
+ if (error.code === 'ENOENT') {
385
+ console.warn(`[DEBUG] Workflow file ${filePath} not found using name: ${workflowName}.`);
386
+ return { content: [{ type: "text", text: JSON.stringify({ success: false, error: `Workflow with name ${workflowName} not found` }) }] };
387
+ }
388
+ else {
389
+ throw error; // Re-throw other read errors
390
+ }
391
+ }
392
+ }
393
+ catch (error) {
394
+ console.error("[ERROR] Failed to get workflow details:", error);
395
+ return { content: [{ type: "text", text: JSON.stringify({ success: false, error: "Failed to get workflow details: " + error.message }) }] };
396
+ }
397
+ });
398
+ // Add Node
399
+ // NOTE: This tool will need significant updates to load the specific workflow file,
400
+ // add the node to its 'nodes' array, and save the file.
401
+ const addNodeParamsSchema = zod_1.z.object({
402
+ workflow_name: zod_1.z.string().describe("The Name of the workflow to add the node to"),
403
+ node_type: zod_1.z.string().describe("The type of node to add (e.g., 'gmail', 'slack', 'openAi'). You can specify with or without the 'n8n-nodes-base.' prefix. The system will handle proper casing (e.g., 'openai' will be converted to 'openAi' if that's the correct casing)."),
404
+ position: zod_1.z.object({
405
+ x: zod_1.z.number(),
406
+ y: zod_1.z.number()
407
+ }).optional().describe("The position of the node {x,y} - will be converted to [x,y] for N8nWorkflowNode"),
408
+ parameters: zod_1.z.record(zod_1.z.string(), zod_1.z.any()).optional().describe("The parameters for the node"),
409
+ node_name: zod_1.z.string().optional().describe("The name for the new node (e.g., 'My Gmail Node')"),
410
+ typeVersion: zod_1.z.number().optional().describe("The type version for the node (e.g., 1, 1.1). Defaults to 1 if not specified."),
411
+ webhookId: zod_1.z.string().optional().describe("Optional webhook ID for certain node types like triggers.")
412
+ });
413
+ server.tool("add_node", addNodeParamsSchema.shape, async (params, _extra) => {
414
+ console.error("[DEBUG] add_node called with:", params);
415
+ const workflowName = params.workflow_name;
416
+ try {
417
+ // Attempt to reload node types if cache is empty and WORKSPACE_DIR is set by a previous call (e.g. create_workflow)
418
+ // This helps if server started with default WORKSPACE_DIR and cache was empty.
419
+ if (nodeInfoCache.size === 0 && WORKSPACE_DIR !== process.cwd()) {
420
+ console.warn("[WARN] nodeInfoCache is empty in add_node. Attempting to reload based on current WORKSPACE_DIR.");
421
+ await loadKnownNodeBaseTypes();
422
+ }
423
+ await ensureWorkflowDir();
424
+ const sanitizedName = workflowName.replace(/[^a-z0-9_.-]/gi, '_');
425
+ const filePath = resolvePath(path_1.default.join(WORKFLOW_DATA_DIR_NAME, `${sanitizedName}.json`));
426
+ let workflow;
427
+ try {
428
+ const data = await promises_1.default.readFile(filePath, 'utf8');
429
+ workflow = JSON.parse(data);
430
+ }
431
+ catch (readError) {
432
+ if (readError.code === 'ENOENT') {
433
+ return { content: [{ type: "text", text: JSON.stringify({ success: false, error: `Workflow with name ${workflowName} not found at ${filePath}` }) }] };
434
+ }
435
+ throw readError;
436
+ }
437
+ // Ensure workflow.nodes exists
438
+ if (!Array.isArray(workflow.nodes)) {
439
+ workflow.nodes = [];
440
+ }
441
+ const defaultPos = params.position || { x: Math.floor(Math.random() * 500), y: Math.floor(Math.random() * 500) };
442
+ const { finalNodeType, finalTypeVersion } = normalizeNodeTypeAndVersion(params.node_type, params.typeVersion);
443
+ // console.error(`[DEBUG] Node type normalized: "${params.node_type}" -> "${finalNodeType}"`); // Covered by normalizeNodeTypeAndVersion's own log
444
+ // Process parameters for LangChain LLM nodes
445
+ let nodeParameters = params.parameters || {};
446
+ // Check if this is a LangChain LLM node
447
+ const isLangChainLLM = finalNodeType.includes('@n8n/n8n-nodes-langchain') &&
448
+ (finalNodeType.includes('lmChat') || finalNodeType.includes('llm'));
449
+ // Apply normalization for LangChain LLM nodes
450
+ if (isLangChainLLM) {
451
+ console.error(`[DEBUG] Applying parameter normalization for LangChain LLM node`);
452
+ nodeParameters = normalizeLLMParameters(nodeParameters);
453
+ }
454
+ else {
455
+ // Handle OpenAI credentials specifically for non-LangChain nodes
456
+ if (params.parameters?.options?.credentials?.providerType === 'openAi') {
457
+ console.error(`[DEBUG] Setting up proper OpenAI credentials format for standard node`);
458
+ // Remove credentials from options and set at node level
459
+ if (nodeParameters.options?.credentials) {
460
+ const credentialsType = nodeParameters.options.credentials.providerType;
461
+ delete nodeParameters.options.credentials;
462
+ // Set a placeholder for credentials that would be filled in the n8n UI
463
+ if (!nodeParameters.credentials) {
464
+ nodeParameters.credentials = {};
465
+ }
466
+ // Add credentials in the proper format for OpenAI
467
+ nodeParameters.credentials = {
468
+ "openAiApi": {
469
+ "id": generateN8nId(),
470
+ "name": "OpenAi account"
471
+ }
472
+ };
473
+ }
474
+ }
475
+ }
476
+ const newNode = {
477
+ id: generateUUID(),
478
+ type: finalNodeType,
479
+ typeVersion: finalTypeVersion, // Use version from normalizeNodeTypeAndVersion
480
+ position: [defaultPos.x, defaultPos.y],
481
+ parameters: nodeParameters,
482
+ name: params.node_name || `${finalNodeType} Node`, // Use finalNodeType for default name
483
+ ...(params.webhookId && { webhookId: params.webhookId }) // Add webhookId if provided
484
+ };
485
+ workflow.nodes.push(newNode);
486
+ await promises_1.default.writeFile(filePath, JSON.stringify(workflow, null, 2));
487
+ console.error(`[DEBUG] Added node ${newNode.id} to workflow ${workflowName} in file ${filePath}`);
488
+ return { content: [{ type: "text", text: JSON.stringify({ success: true, node: newNode, workflowId: workflow.id }) }] };
489
+ }
490
+ catch (error) {
491
+ console.error("[ERROR] Failed to add node:", error);
492
+ return { content: [{ type: "text", text: JSON.stringify({ success: false, error: "Failed to add node: " + error.message }) }] };
493
+ }
494
+ });
495
+ // Edit Node
496
+ // NOTE: This tool also needs updates for single-file workflow management.
497
+ const editNodeParamsSchema = zod_1.z.object({
498
+ workflow_name: zod_1.z.string().describe("The Name of the workflow containing the node"),
499
+ node_id: zod_1.z.string().describe("The ID of the node to edit"),
500
+ node_type: zod_1.z.string().optional().describe("The new type for the node (e.g., 'gmail', 'slack', 'openAi'). You can specify with or without the 'n8n-nodes-base.' prefix. The system will handle proper casing (e.g., 'openai' will be converted to 'openAi' if that's the correct casing)."),
501
+ node_name: zod_1.z.string().optional().describe("The new name for the node"),
502
+ position: zod_1.z.object({
503
+ x: zod_1.z.number(),
504
+ y: zod_1.z.number()
505
+ }).optional().describe("The new position {x,y} - will be converted to [x,y]"),
506
+ parameters: zod_1.z.record(zod_1.z.string(), zod_1.z.any()).optional().describe("The new parameters"),
507
+ typeVersion: zod_1.z.number().optional().describe("The new type version for the node"),
508
+ webhookId: zod_1.z.string().optional().describe("Optional new webhook ID for the node.")
509
+ });
510
+ server.tool("edit_node", editNodeParamsSchema.shape, async (params, _extra) => {
511
+ console.error("[DEBUG] edit_node called with:", params);
512
+ const workflowName = params.workflow_name;
513
+ try {
514
+ // Similar cache reload logic as in add_node
515
+ if (nodeInfoCache.size === 0 && WORKSPACE_DIR !== process.cwd()) {
516
+ console.warn("[WARN] nodeInfoCache is empty in edit_node. Attempting to reload based on current WORKSPACE_DIR.");
517
+ await loadKnownNodeBaseTypes();
518
+ }
519
+ await ensureWorkflowDir();
520
+ const sanitizedName = workflowName.replace(/[^a-z0-9_.-]/gi, '_');
521
+ const filePath = resolvePath(path_1.default.join(WORKFLOW_DATA_DIR_NAME, `${sanitizedName}.json`));
522
+ let workflow;
523
+ try {
524
+ const data = await promises_1.default.readFile(filePath, 'utf8');
525
+ workflow = JSON.parse(data);
526
+ }
527
+ catch (readError) {
528
+ if (readError.code === 'ENOENT') {
529
+ return { content: [{ type: "text", text: JSON.stringify({ success: false, error: `Workflow with name ${workflowName} not found at ${filePath}` }) }] };
530
+ }
531
+ throw readError;
532
+ }
533
+ const nodeIndex = workflow.nodes.findIndex(n => n.id === params.node_id);
534
+ if (nodeIndex === -1) {
535
+ return { content: [{ type: "text", text: JSON.stringify({ success: false, error: `Node with id ${params.node_id} not found in workflow ${workflowName}` }) }] };
536
+ }
537
+ const nodeToEdit = workflow.nodes[nodeIndex];
538
+ let newType = nodeToEdit.type;
539
+ let newTypeVersion = nodeToEdit.typeVersion;
540
+ if (params.node_type) {
541
+ // If node_type is changing, typeVersion should be re-evaluated based on the new type,
542
+ // unless a specific params.typeVersion is also given for this edit.
543
+ const { finalNodeType, finalTypeVersion: determinedVersionForNewType } = normalizeNodeTypeAndVersion(params.node_type, params.typeVersion);
544
+ newType = finalNodeType;
545
+ newTypeVersion = determinedVersionForNewType; // This uses params.typeVersion if valid, else default for new type.
546
+ }
547
+ else if (params.typeVersion !== undefined && !isNaN(Number(params.typeVersion))) {
548
+ // Only typeVersion is being changed, node_type remains the same.
549
+ newTypeVersion = Number(params.typeVersion);
550
+ }
551
+ else if (params.typeVersion !== undefined && isNaN(Number(params.typeVersion))) {
552
+ console.warn(`[WARN] Provided typeVersion '${params.typeVersion}' for editing node ${nodeToEdit.id} is NaN. typeVersion will not be changed.`);
553
+ }
554
+ nodeToEdit.type = newType;
555
+ nodeToEdit.typeVersion = newTypeVersion;
556
+ if (params.node_name)
557
+ nodeToEdit.name = params.node_name;
558
+ if (params.position)
559
+ nodeToEdit.position = [params.position.x, params.position.y];
560
+ // Process new parameters if provided
561
+ if (params.parameters) {
562
+ let newParameters = params.parameters;
563
+ // Check if this is a LangChain LLM node
564
+ const isLangChainLLM = newType.includes('@n8n/n8n-nodes-langchain') &&
565
+ (newType.includes('lmChat') || newType.includes('llm'));
566
+ // Apply normalization for LangChain LLM nodes
567
+ if (isLangChainLLM) {
568
+ console.error(`[DEBUG] Applying parameter normalization for LangChain LLM node during edit`);
569
+ newParameters = normalizeLLMParameters(newParameters);
570
+ }
571
+ else {
572
+ // Handle OpenAI credentials specifically for non-LangChain nodes
573
+ if (newParameters.options?.credentials?.providerType === 'openAi') {
574
+ console.error(`[DEBUG] Setting up proper OpenAI credentials format for standard node during edit`);
575
+ // Remove credentials from options and set at node level
576
+ if (newParameters.options?.credentials) {
577
+ const credentialsType = newParameters.options.credentials.providerType;
578
+ delete newParameters.options.credentials;
579
+ // Set a placeholder for credentials that would be filled in the n8n UI
580
+ if (!newParameters.credentials) {
581
+ newParameters.credentials = {};
582
+ }
583
+ // Add credentials in the proper format for OpenAI
584
+ newParameters.credentials = {
585
+ "openAiApi": {
586
+ "id": generateN8nId(),
587
+ "name": "OpenAi account"
588
+ }
589
+ };
590
+ }
591
+ }
592
+ }
593
+ nodeToEdit.parameters = newParameters;
594
+ }
595
+ if (params.webhookId !== undefined) { // Allow setting or unsetting webhookId
596
+ if (params.webhookId === null || params.webhookId === "") { // Check for explicit clear
597
+ delete nodeToEdit.webhookId;
598
+ }
599
+ else {
600
+ nodeToEdit.webhookId = params.webhookId;
601
+ }
602
+ }
603
+ workflow.nodes[nodeIndex] = nodeToEdit;
604
+ await promises_1.default.writeFile(filePath, JSON.stringify(workflow, null, 2));
605
+ console.error(`[DEBUG] Edited node ${params.node_id} in workflow ${workflowName} in file ${filePath}`);
606
+ return { content: [{ type: "text", text: JSON.stringify({ success: true, node: nodeToEdit }) }] };
607
+ }
608
+ catch (error) {
609
+ console.error("[ERROR] Failed to edit node:", error);
610
+ return { content: [{ type: "text", text: JSON.stringify({ success: false, error: "Failed to edit node: " + error.message }) }] };
611
+ }
612
+ });
613
+ // Delete Node
614
+ // NOTE: This tool also needs updates for single-file workflow management.
615
+ const deleteNodeParamsSchema = zod_1.z.object({
616
+ workflow_name: zod_1.z.string().describe("The Name of the workflow containing the node"),
617
+ node_id: zod_1.z.string().describe("The ID of the node to delete")
618
+ });
619
+ server.tool("delete_node", deleteNodeParamsSchema.shape, async (params, _extra) => {
620
+ console.error("[DEBUG] delete_node called with:", params);
621
+ const workflowName = params.workflow_name;
622
+ try {
623
+ await ensureWorkflowDir();
624
+ const sanitizedName = workflowName.replace(/[^a-z0-9_.-]/gi, '_');
625
+ const filePath = resolvePath(path_1.default.join(WORKFLOW_DATA_DIR_NAME, `${sanitizedName}.json`));
626
+ let workflow;
627
+ try {
628
+ const data = await promises_1.default.readFile(filePath, 'utf8');
629
+ workflow = JSON.parse(data);
630
+ }
631
+ catch (readError) {
632
+ if (readError.code === 'ENOENT') {
633
+ return { content: [{ type: "text", text: JSON.stringify({ success: false, error: `Workflow with name ${workflowName} not found at ${filePath}` }) }] };
634
+ }
635
+ throw readError;
636
+ }
637
+ const nodeIndex = workflow.nodes.findIndex(n => n.id === params.node_id);
638
+ if (nodeIndex === -1) {
639
+ return { content: [{ type: "text", text: JSON.stringify({ success: false, error: `Node with id ${params.node_id} not found in workflow ${workflowName}` }) }] };
640
+ }
641
+ const deletedNodeName = workflow.nodes[nodeIndex].name;
642
+ workflow.nodes.splice(nodeIndex, 1);
643
+ // Also remove connections related to this node
644
+ // This is a simplified connection removal. n8n's logic might be more complex.
645
+ const newConnections = {};
646
+ for (const sourceNodeName in workflow.connections) {
647
+ if (sourceNodeName === deletedNodeName)
648
+ continue; // Skip connections FROM the deleted node
649
+ const outputConnections = workflow.connections[sourceNodeName];
650
+ const newOutputConnectionsForSource = {};
651
+ for (const outputKey in outputConnections) {
652
+ const connectionChains = outputConnections[outputKey];
653
+ const newConnectionChains = [];
654
+ for (const chain of connectionChains) {
655
+ const newChain = chain.filter(connDetail => connDetail.node !== deletedNodeName);
656
+ if (newChain.length > 0) {
657
+ newConnectionChains.push(newChain);
658
+ }
659
+ }
660
+ if (newConnectionChains.length > 0) {
661
+ newOutputConnectionsForSource[outputKey] = newConnectionChains;
662
+ }
663
+ }
664
+ if (Object.keys(newOutputConnectionsForSource).length > 0) {
665
+ newConnections[sourceNodeName] = newOutputConnectionsForSource;
666
+ }
667
+ }
668
+ workflow.connections = newConnections;
669
+ await promises_1.default.writeFile(filePath, JSON.stringify(workflow, null, 2));
670
+ console.error(`[DEBUG] Deleted node ${params.node_id} from workflow ${workflowName} in file ${filePath}`);
671
+ return { content: [{ type: "text", text: JSON.stringify({ success: true, message: `Node ${params.node_id} deleted successfully from workflow ${workflowName}` }) }] };
672
+ }
673
+ catch (error) {
674
+ console.error("[ERROR] Failed to delete node:", error);
675
+ return { content: [{ type: "text", text: JSON.stringify({ success: false, error: "Failed to delete node: " + error.message }) }] };
676
+ }
677
+ });
678
+ // Add Connection
679
+ const addConnectionParamsSchema = zod_1.z.object({
680
+ workflow_name: zod_1.z.string().describe("The Name of the workflow to add the connection to"),
681
+ source_node_id: zod_1.z.string().describe("The ID of the source node for the connection"),
682
+ source_node_output_name: zod_1.z.string().describe("The name of the output handle on the source node (e.g., 'main')"),
683
+ target_node_id: zod_1.z.string().describe("The ID of the target node for the connection"),
684
+ target_node_input_name: zod_1.z.string().describe("The name of the input handle on the target node (e.g., 'main')"),
685
+ target_node_input_index: zod_1.z.number().optional().default(0).describe("The index for the target node's input handle (default: 0)")
686
+ });
687
+ server.tool("add_connection", addConnectionParamsSchema.shape, async (params, _extra) => {
688
+ console.error("[DEBUG] add_connection called with:", params);
689
+ const { workflow_name, source_node_id, source_node_output_name, target_node_id, target_node_input_name, target_node_input_index } = params;
690
+ try {
691
+ await ensureWorkflowDir();
692
+ const sanitizedWorkflowName = workflow_name.replace(/[^a-z0-9_.-]/gi, '_');
693
+ const filePath = resolvePath(path_1.default.join(WORKFLOW_DATA_DIR_NAME, `${sanitizedWorkflowName}.json`));
694
+ let workflow;
695
+ try {
696
+ const data = await promises_1.default.readFile(filePath, 'utf8');
697
+ workflow = JSON.parse(data);
698
+ }
699
+ catch (readError) {
700
+ if (readError.code === 'ENOENT') {
701
+ return { content: [{ type: "text", text: JSON.stringify({ success: false, error: `Workflow with name ${workflow_name} not found at ${filePath}` }) }] };
702
+ }
703
+ throw readError;
704
+ }
705
+ const sourceNode = workflow.nodes.find(node => node.id === source_node_id);
706
+ const targetNode = workflow.nodes.find(node => node.id === target_node_id);
707
+ if (!sourceNode) {
708
+ return { content: [{ type: "text", text: JSON.stringify({ success: false, error: `Source node with ID ${source_node_id} not found in workflow ${workflow_name}` }) }] };
709
+ }
710
+ if (!targetNode) {
711
+ return { content: [{ type: "text", text: JSON.stringify({ success: false, error: `Target node with ID ${target_node_id} not found in workflow ${workflow_name}` }) }] };
712
+ }
713
+ const sourceNodeNameKey = sourceNode.name; // n8n connections are keyed by node *name*
714
+ const targetNodeNameValue = targetNode.name;
715
+ // Detect if we're working with LangChain AI nodes that require special connection handling
716
+ const isLangChainSource = sourceNode.type.includes('@n8n/n8n-nodes-langchain');
717
+ const isLangChainTarget = targetNode.type.includes('@n8n/n8n-nodes-langchain');
718
+ const isAIConnection = source_node_output_name.startsWith('ai_') || target_node_input_name.startsWith('ai_');
719
+ let connectionDirection = "forward"; // Default: source -> target
720
+ // Check if we need to reverse connection direction for AI nodes
721
+ // This handles the special case for LangChain nodes where tools and models
722
+ // connect TO the agent rather than the agent connecting to them
723
+ if ((isLangChainSource || isLangChainTarget) && isAIConnection) {
724
+ // Check if this might be a case where direction needs to be reversed
725
+ // - Models/Tools point TO Agent (reversed)
726
+ // - Agent points to regular nodes (forward)
727
+ // - Triggers point to any node (forward)
728
+ // - Memory nodes point TO Agent (reversed)
729
+ if (
730
+ // If it's a LLM, Tool, or Memory node pointing to an agent
731
+ (sourceNode.type.includes('lmChat') ||
732
+ sourceNode.type.includes('tool') ||
733
+ sourceNode.type.toLowerCase().includes('request') ||
734
+ sourceNode.type.includes('memory'))
735
+ && targetNode.type.includes('agent')) {
736
+ console.warn("[WARN] LangChain AI connection detected. N8n often expects models, tools, and memory to connect TO agents rather than agents connecting to them.");
737
+ console.warn("[WARN] Connections will be created as specified, but if they don't appear correctly in n8n UI, try reversing the source and target.");
738
+ // Special hint for memory connections
739
+ if (sourceNode.type.includes('memory')) {
740
+ if (source_node_output_name !== 'ai_memory') {
741
+ console.warn("[WARN] Memory nodes should usually connect to agents using 'ai_memory' output, not '" + source_node_output_name + "'.");
742
+ }
743
+ if (target_node_input_name !== 'ai_memory') {
744
+ console.warn("[WARN] Agents should receive memory connections on 'ai_memory' input, not '" + target_node_input_name + "'.");
745
+ }
746
+ }
747
+ }
748
+ }
749
+ const newConnectionObject = {
750
+ node: targetNodeNameValue,
751
+ type: target_node_input_name,
752
+ index: target_node_input_index
753
+ };
754
+ if (!workflow.connections) {
755
+ workflow.connections = {};
756
+ }
757
+ if (!workflow.connections[sourceNodeNameKey]) {
758
+ workflow.connections[sourceNodeNameKey] = {};
759
+ }
760
+ if (!workflow.connections[sourceNodeNameKey][source_node_output_name]) {
761
+ workflow.connections[sourceNodeNameKey][source_node_output_name] = [];
762
+ }
763
+ // n8n expects an array of connection arrays for each output handle.
764
+ // Each inner array represents a set of connections originating from the same output point if it splits.
765
+ // For a simple new connection, we add it as a new chain: [newConnectionObject]
766
+ workflow.connections[sourceNodeNameKey][source_node_output_name].push([newConnectionObject]);
767
+ await promises_1.default.writeFile(filePath, JSON.stringify(workflow, null, 2));
768
+ console.error(`[DEBUG] Added connection from ${sourceNodeNameKey}:${source_node_output_name} to ${targetNodeNameValue}:${target_node_input_name} in workflow ${workflow_name}`);
769
+ // Add a special note for AI connections
770
+ let message = "Connection added successfully";
771
+ if ((isLangChainSource || isLangChainTarget) && isAIConnection) {
772
+ message += ". Note: For LangChain nodes, connections might need specific output/input names and connection direction. If connections don't appear in n8n UI, check that:";
773
+ message += "\n- Models connect TO the agent using 'ai_languageModel' ports";
774
+ message += "\n- Tools connect TO the agent using 'ai_tool' ports";
775
+ message += "\n- Memory nodes connect TO the agent using 'ai_memory' ports";
776
+ }
777
+ return { content: [{ type: "text", text: JSON.stringify({ success: true, message, workflow }) }] };
778
+ }
779
+ catch (error) {
780
+ console.error("[ERROR] Failed to add connection:", error);
781
+ return { content: [{ type: "text", text: JSON.stringify({ success: false, error: "Failed to add connection: " + error.message }) }] };
782
+ }
783
+ });
784
+ // Add AI Connections (special case for LangChain nodes)
785
+ const addAIConnectionsParamsSchema = zod_1.z.object({
786
+ workflow_name: zod_1.z.string().describe("The Name of the workflow to add the AI connections to"),
787
+ agent_node_id: zod_1.z.string().describe("The ID of the agent node that will use the model and tools"),
788
+ model_node_id: zod_1.z.string().optional().describe("The ID of the language model node (optional)"),
789
+ tool_node_ids: zod_1.z.array(zod_1.z.string()).optional().describe("Array of tool node IDs to connect to the agent (optional)"),
790
+ memory_node_id: zod_1.z.string().optional().describe("The ID of the memory node (optional)")
791
+ });
792
+ server.tool("add_ai_connections", addAIConnectionsParamsSchema.shape, async (params, _extra) => {
793
+ console.error("[DEBUG] add_ai_connections called with:", params);
794
+ const { workflow_name, agent_node_id, model_node_id, tool_node_ids, memory_node_id } = params;
795
+ if (!model_node_id && (!tool_node_ids || tool_node_ids.length === 0) && !memory_node_id) {
796
+ return {
797
+ content: [{
798
+ type: "text",
799
+ text: JSON.stringify({
800
+ success: false,
801
+ error: "At least one of model_node_id, memory_node_id, or tool_node_ids must be provided"
802
+ })
803
+ }]
804
+ };
805
+ }
806
+ try {
807
+ await ensureWorkflowDir();
808
+ const sanitizedWorkflowName = workflow_name.replace(/[^a-z0-9_.-]/gi, '_');
809
+ const filePath = resolvePath(path_1.default.join(WORKFLOW_DATA_DIR_NAME, `${sanitizedWorkflowName}.json`));
810
+ let workflow;
811
+ try {
812
+ const data = await promises_1.default.readFile(filePath, 'utf8');
813
+ workflow = JSON.parse(data);
814
+ }
815
+ catch (readError) {
816
+ if (readError.code === 'ENOENT') {
817
+ return { content: [{ type: "text", text: JSON.stringify({ success: false, error: `Workflow with name ${workflow_name} not found at ${filePath}` }) }] };
818
+ }
819
+ throw readError;
820
+ }
821
+ // First verify all nodes exist
822
+ const agentNode = workflow.nodes.find(node => node.id === agent_node_id);
823
+ if (!agentNode) {
824
+ return { content: [{ type: "text", text: JSON.stringify({ success: false, error: `Agent node with ID ${agent_node_id} not found in workflow ${workflow_name}` }) }] };
825
+ }
826
+ let modelNode = null;
827
+ if (model_node_id) {
828
+ modelNode = workflow.nodes.find(node => node.id === model_node_id);
829
+ if (!modelNode) {
830
+ return { content: [{ type: "text", text: JSON.stringify({ success: false, error: `Model node with ID ${model_node_id} not found in workflow ${workflow_name}` }) }] };
831
+ }
832
+ }
833
+ let memoryNode = null;
834
+ if (memory_node_id) {
835
+ memoryNode = workflow.nodes.find(node => node.id === memory_node_id);
836
+ if (!memoryNode) {
837
+ return { content: [{ type: "text", text: JSON.stringify({ success: false, error: `Memory node with ID ${memory_node_id} not found in workflow ${workflow_name}` }) }] };
838
+ }
839
+ }
840
+ const toolNodes = [];
841
+ if (tool_node_ids && tool_node_ids.length > 0) {
842
+ for (const toolId of tool_node_ids) {
843
+ const toolNode = workflow.nodes.find(node => node.id === toolId);
844
+ if (!toolNode) {
845
+ return { content: [{ type: "text", text: JSON.stringify({ success: false, error: `Tool node with ID ${toolId} not found in workflow ${workflow_name}` }) }] };
846
+ }
847
+ toolNodes.push(toolNode);
848
+ }
849
+ }
850
+ if (!workflow.connections) {
851
+ workflow.connections = {};
852
+ }
853
+ // For AI nodes in n8n, we need to:
854
+ // 1. Language model connects TO the agent using ai_languageModel ports
855
+ // 2. Tools connect TO the agent using ai_tool ports
856
+ // 3. Memory nodes connect TO the agent using ai_memory ports
857
+ // Create the language model connection if a model node was provided
858
+ if (modelNode) {
859
+ const modelNodeName = modelNode.name;
860
+ // Initialize model node's connections if needed
861
+ if (!workflow.connections[modelNodeName]) {
862
+ workflow.connections[modelNodeName] = {};
863
+ }
864
+ // Add the AI language model output
865
+ if (!workflow.connections[modelNodeName]["ai_languageModel"]) {
866
+ workflow.connections[modelNodeName]["ai_languageModel"] = [];
867
+ }
868
+ // Add connection from model to agent
869
+ const modelConnection = {
870
+ node: agentNode.name,
871
+ type: "ai_languageModel",
872
+ index: 0
873
+ };
874
+ // Check if this connection already exists
875
+ const existingModelConnection = workflow.connections[modelNodeName]["ai_languageModel"].some(conn => conn.some(detail => detail.node === agentNode.name && detail.type === "ai_languageModel"));
876
+ if (!existingModelConnection) {
877
+ workflow.connections[modelNodeName]["ai_languageModel"].push([modelConnection]);
878
+ console.error(`[DEBUG] Added AI language model connection from ${modelNodeName} to ${agentNode.name}`);
879
+ }
880
+ else {
881
+ console.error(`[DEBUG] AI language model connection from ${modelNodeName} to ${agentNode.name} already exists`);
882
+ }
883
+ }
884
+ // Create memory connection if a memory node was provided
885
+ if (memoryNode) {
886
+ const memoryNodeName = memoryNode.name;
887
+ // Initialize memory node's connections if needed
888
+ if (!workflow.connections[memoryNodeName]) {
889
+ workflow.connections[memoryNodeName] = {};
890
+ }
891
+ // Add the AI memory output
892
+ if (!workflow.connections[memoryNodeName]["ai_memory"]) {
893
+ workflow.connections[memoryNodeName]["ai_memory"] = [];
894
+ }
895
+ // Add connection from memory to agent
896
+ const memoryConnection = {
897
+ node: agentNode.name,
898
+ type: "ai_memory",
899
+ index: 0
900
+ };
901
+ // Check if this connection already exists
902
+ const existingMemoryConnection = workflow.connections[memoryNodeName]["ai_memory"].some(conn => conn.some(detail => detail.node === agentNode.name && detail.type === "ai_memory"));
903
+ if (!existingMemoryConnection) {
904
+ workflow.connections[memoryNodeName]["ai_memory"].push([memoryConnection]);
905
+ console.error(`[DEBUG] Added AI memory connection from ${memoryNodeName} to ${agentNode.name}`);
906
+ }
907
+ else {
908
+ console.error(`[DEBUG] AI memory connection from ${memoryNodeName} to ${agentNode.name} already exists`);
909
+ }
910
+ }
911
+ // Create tool connections if tool nodes were provided
912
+ if (toolNodes.length > 0) {
913
+ for (const toolNode of toolNodes) {
914
+ const toolNodeName = toolNode.name;
915
+ // Initialize tool node's connections if needed
916
+ if (!workflow.connections[toolNodeName]) {
917
+ workflow.connections[toolNodeName] = {};
918
+ }
919
+ // Add the AI tool output
920
+ if (!workflow.connections[toolNodeName]["ai_tool"]) {
921
+ workflow.connections[toolNodeName]["ai_tool"] = [];
922
+ }
923
+ // Add connection from tool to agent
924
+ const toolConnection = {
925
+ node: agentNode.name,
926
+ type: "ai_tool",
927
+ index: 0
928
+ };
929
+ // Check if this connection already exists
930
+ const existingToolConnection = workflow.connections[toolNodeName]["ai_tool"].some(conn => conn.some(detail => detail.node === agentNode.name && detail.type === "ai_tool"));
931
+ if (!existingToolConnection) {
932
+ workflow.connections[toolNodeName]["ai_tool"].push([toolConnection]);
933
+ console.error(`[DEBUG] Added AI tool connection from ${toolNodeName} to ${agentNode.name}`);
934
+ }
935
+ else {
936
+ console.error(`[DEBUG] AI tool connection from ${toolNodeName} to ${agentNode.name} already exists`);
937
+ }
938
+ }
939
+ }
940
+ // Save the updated workflow
941
+ await promises_1.default.writeFile(filePath, JSON.stringify(workflow, null, 2));
942
+ return {
943
+ content: [{
944
+ type: "text",
945
+ text: JSON.stringify({
946
+ success: true,
947
+ message: "AI connections added successfully",
948
+ workflow
949
+ })
950
+ }]
951
+ };
952
+ }
953
+ catch (error) {
954
+ console.error("[ERROR] Failed to add AI connections:", error);
955
+ return {
956
+ content: [{
957
+ type: "text",
958
+ text: JSON.stringify({
959
+ success: false,
960
+ error: "Failed to add AI connections: " + error.message
961
+ })
962
+ }]
963
+ };
964
+ }
965
+ });
966
+ // List Available Nodes
967
+ const listAvailableNodesParamsSchema = zod_1.z.object({
968
+ search_term: zod_1.z.string().optional().describe("An optional search term to filter nodes by their name, type, or description.")
969
+ });
970
+ server.tool("list_available_nodes", listAvailableNodesParamsSchema.shape, async (params, _extra) => {
971
+ console.error("[DEBUG] list_available_nodes called with params:", params);
972
+ let availableNodes = [];
973
+ // Corrected path: relative to this script's location
974
+ const workflowNodesDir = path_1.default.resolve(__dirname, '../workflow_nodes');
975
+ try {
976
+ // knownNodeBaseCasings should ideally be populated at startup by loadKnownNodeBaseTypes.
977
+ // If it's empty here, it means initial load failed or directory wasn't found then.
978
+ // We might not need to reload it here if startup handles it, but a check doesn't hurt.
979
+ if (nodeInfoCache.size === 0 && WORKSPACE_DIR !== process.cwd()) {
980
+ console.warn("[WARN] nodeInfoCache is empty in list_available_nodes. Attempting to reload node type information.");
981
+ // For now, if cache is empty, it means startup failed to load them.
982
+ // The function will proceed and likely return an empty list or whatever it finds if workflowNodesDir is accessible now.
983
+ }
984
+ console.error(`[DEBUG] Reading node definitions from server resource path: ${workflowNodesDir}`);
985
+ const files = await promises_1.default.readdir(workflowNodesDir);
986
+ const suffix = ".json";
987
+ const allParsedNodes = []; // Temporary array to hold all nodes before filtering
988
+ for (const file of files) {
989
+ if (file.endsWith(suffix) && file !== WORKFLOWS_FILE_NAME /* ignore old combined file */) {
990
+ const filePath = path_1.default.join(workflowNodesDir, file);
991
+ try {
992
+ const fileContent = await promises_1.default.readFile(filePath, 'utf8');
993
+ const nodeDefinition = JSON.parse(fileContent);
994
+ if (nodeDefinition.nodeType && nodeDefinition.displayName && nodeDefinition.properties) {
995
+ allParsedNodes.push({
996
+ nodeType: nodeDefinition.nodeType,
997
+ displayName: nodeDefinition.displayName,
998
+ description: nodeDefinition.description || "",
999
+ version: nodeDefinition.version || 1,
1000
+ properties: nodeDefinition.properties,
1001
+ credentialsConfig: nodeDefinition.credentialsConfig || [],
1002
+ categories: nodeDefinition.categories || [],
1003
+ // Also add simplified versions of the node type for reference
1004
+ simpleName: nodeDefinition.nodeType.includes('n8n-nodes-base.')
1005
+ ? nodeDefinition.nodeType.split('n8n-nodes-base.')[1]
1006
+ : nodeDefinition.nodeType
1007
+ });
1008
+ }
1009
+ else {
1010
+ console.warn(`[WARN] File ${file} does not seem to be a valid node definition. Skipping.`);
1011
+ }
1012
+ }
1013
+ catch (parseError) {
1014
+ console.warn(`[WARN] Failed to parse ${file}: ${parseError.message}. Skipping.`);
1015
+ }
1016
+ }
1017
+ }
1018
+ if (params.search_term && params.search_term.trim() !== "") {
1019
+ const searchTermLower = params.search_term.toLowerCase();
1020
+ availableNodes = allParsedNodes.filter(node => {
1021
+ let found = false;
1022
+ if (node.displayName && node.displayName.toLowerCase().includes(searchTermLower)) {
1023
+ found = true;
1024
+ }
1025
+ if (!found && node.nodeType && node.nodeType.toLowerCase().includes(searchTermLower)) {
1026
+ found = true;
1027
+ }
1028
+ if (!found && node.description && node.description.toLowerCase().includes(searchTermLower)) {
1029
+ found = true;
1030
+ }
1031
+ if (!found && node.simpleName && node.simpleName.toLowerCase().includes(searchTermLower)) {
1032
+ found = true;
1033
+ }
1034
+ if (!found && node.properties && Array.isArray(node.properties)) {
1035
+ for (const prop of node.properties) {
1036
+ if (prop.name && prop.name.toLowerCase().includes(searchTermLower)) {
1037
+ found = true;
1038
+ break;
1039
+ }
1040
+ if (prop.displayName && prop.displayName.toLowerCase().includes(searchTermLower)) {
1041
+ found = true;
1042
+ break;
1043
+ }
1044
+ // Optionally search prop.description as well
1045
+ // if (prop.description && prop.description.toLowerCase().includes(searchTermLower)) {
1046
+ // found = true;
1047
+ // break;
1048
+ // }
1049
+ }
1050
+ }
1051
+ if (!found && node.categories && Array.isArray(node.categories)) {
1052
+ for (const category of node.categories) {
1053
+ if (typeof category === 'string' && category.toLowerCase().includes(searchTermLower)) {
1054
+ found = true;
1055
+ break;
1056
+ }
1057
+ }
1058
+ }
1059
+ return found;
1060
+ });
1061
+ console.log(`[DEBUG] Filtered nodes by '${params.search_term}'. Found ${availableNodes.length} of ${allParsedNodes.length}.`);
1062
+ }
1063
+ else {
1064
+ availableNodes = allParsedNodes; // No search term, return all nodes
1065
+ }
1066
+ if (availableNodes.length === 0 && allParsedNodes.length > 0 && params.search_term) {
1067
+ console.warn(`[WARN] No nodes matched the search term: '${params.search_term}'.`);
1068
+ }
1069
+ else if (allParsedNodes.length === 0) {
1070
+ console.warn("[WARN] No node definitions found in workflow_nodes. Ensure the directory is populated with JSON files from the scraper.");
1071
+ }
1072
+ // Format the results to be more user-friendly and informative
1073
+ const formattedNodes = availableNodes.map(node => {
1074
+ return {
1075
+ // Keep only the most relevant information
1076
+ nodeType: node.nodeType, // Full node type with correct casing
1077
+ displayName: node.displayName,
1078
+ description: node.description,
1079
+ simpleName: node.simpleName, // The part after n8n-nodes-base
1080
+ categories: node.categories || [],
1081
+ version: node.version,
1082
+ // Count parameters but don't include details to keep response size manageable
1083
+ parameterCount: node.properties ? node.properties.length : 0
1084
+ };
1085
+ });
1086
+ // Include usage guidance in the response
1087
+ const usageGuidance = {
1088
+ title: "Node Type Usage Guide",
1089
+ description: "When using the add_node or replace_node tools, you can specify the node type in any of these formats:",
1090
+ formats: [
1091
+ `Full Type (with correct casing): "${formattedNodes.length > 0 ? formattedNodes[0].nodeType : 'n8n-nodes-base.nodeTypeName'}"`,
1092
+ `Simple Name (with correct casing): "${formattedNodes.length > 0 ? formattedNodes[0].simpleName : 'nodeTypeName'}"`,
1093
+ `Simple Name (lowercase): "${formattedNodes.length > 0 ? formattedNodes[0].simpleName.toLowerCase() : 'nodetypename'}"`
1094
+ ],
1095
+ note: "The system will automatically handle proper casing and prefixing for you based on the official node definitions."
1096
+ };
1097
+ // Return the formatted response
1098
+ return {
1099
+ content: [{
1100
+ type: "text", text: JSON.stringify({
1101
+ success: true,
1102
+ nodes: formattedNodes,
1103
+ total: formattedNodes.length,
1104
+ usageGuidance: usageGuidance
1105
+ })
1106
+ }]
1107
+ };
1108
+ }
1109
+ catch (error) {
1110
+ console.error("[ERROR] Failed to list available nodes:", error);
1111
+ if (error.code === 'ENOENT') {
1112
+ console.warn("[WARN] workflow_nodes directory not found. Cannot list available nodes.");
1113
+ return { content: [{ type: "text", text: JSON.stringify({ success: true, nodes: [], message: "workflow_nodes directory not found." }) }] };
1114
+ }
1115
+ return { content: [{ type: "text", text: JSON.stringify({ success: false, error: "Failed to list available nodes: " + error.message }) }] };
1116
+ }
1117
+ });
1118
+ // Create and configure the transport
1119
+ const transport = new stdio_js_1.StdioServerTransport();
1120
+ // Start the server
1121
+ async function main() {
1122
+ try {
1123
+ // Note: loadKnownNodeBaseTypes uses resolvePath, which depends on WORKSPACE_DIR.
1124
+ // WORKSPACE_DIR is typically set by create_workflow.
1125
+ // If called before create_workflow, it might use process.cwd() or fail if workflow_nodes isn't there.
1126
+ // This is a known limitation for now; ideally, WORKSPACE_DIR is configured at MCP server init more globally.
1127
+ await loadKnownNodeBaseTypes(); // Attempt to load node types at startup
1128
+ await server.connect(transport);
1129
+ console.error("[DEBUG] N8N Workflow Builder MCP Server started (TypeScript version)");
1130
+ // Debugging tool schemas might need update if params changed significantly for other tools
1131
+ const toolSchemasForDebug = {
1132
+ create_workflow: createWorkflowParamsSchema,
1133
+ list_workflows: zod_1.z.object({}), // Updated to reflect empty params
1134
+ get_workflow_details: getWorkflowDetailsParamsSchema,
1135
+ add_node: addNodeParamsSchema,
1136
+ edit_node: editNodeParamsSchema,
1137
+ delete_node: deleteNodeParamsSchema,
1138
+ add_connection: addConnectionParamsSchema
1139
+ };
1140
+ const manuallyConstructedToolList = Object.entries(toolSchemasForDebug).map(([name, schema]) => {
1141
+ let toolDefinition = { name };
1142
+ // Attempt to get description from the schema if available, or use a default.
1143
+ // Note: .describe() on Zod schemas is for properties, not usually the whole schema for tool description.
1144
+ // The description passed in the options object to server.tool() is what the MCP client sees.
1145
+ // This reconstruction is for local debugging of what the SDK *might* send.
1146
+ if (name === "create_workflow")
1147
+ toolDefinition.description = "Create a new n8n workflow";
1148
+ else if (name === "list_workflows")
1149
+ toolDefinition.description = "List all n8n workflows";
1150
+ else if (name === "get_workflow_details")
1151
+ toolDefinition.description = "Get details of a specific n8n workflow";
1152
+ else if (name === "add_node")
1153
+ toolDefinition.description = "Add a new node to a workflow";
1154
+ else if (name === "edit_node")
1155
+ toolDefinition.description = "Edit an existing node in a workflow";
1156
+ else if (name === "delete_node")
1157
+ toolDefinition.description = "Delete a node from a workflow";
1158
+ else if (name === "add_connection")
1159
+ toolDefinition.description = "Add a new connection between nodes in a workflow";
1160
+ else
1161
+ toolDefinition.description = `Description for ${name}`;
1162
+ if (schema) {
1163
+ // This is a simplified mock of how zod-to-json-schema might convert it
1164
+ // Actual conversion by SDK might be more complex.
1165
+ const properties = {};
1166
+ const required = [];
1167
+ const shape = schema.shape;
1168
+ for (const key in shape) {
1169
+ const field = shape[key];
1170
+ properties[key] = { type: field._def.typeName.replace('Zod', '').toLowerCase(), description: field.description };
1171
+ if (!field.isOptional()) {
1172
+ required.push(key);
1173
+ }
1174
+ }
1175
+ toolDefinition.inputSchema = { type: "object", properties, required };
1176
+ }
1177
+ else {
1178
+ toolDefinition.inputSchema = { type: "object", properties: {}, required: [] };
1179
+ }
1180
+ return toolDefinition;
1181
+ });
1182
+ console.error("[DEBUG] Server's expected 'tools' array for tools/list response (with detailed inputSchemas):");
1183
+ console.error(JSON.stringify(manuallyConstructedToolList, null, 2));
1184
+ // Keep the process alive
1185
+ return new Promise((resolve, reject) => {
1186
+ process.on('SIGINT', () => {
1187
+ console.error("[DEBUG] Received SIGINT, shutting down...");
1188
+ server.close().then(resolve).catch(reject);
1189
+ });
1190
+ process.on('SIGTERM', () => {
1191
+ console.error("[DEBUG] Received SIGTERM, shutting down...");
1192
+ server.close().then(resolve).catch(reject);
1193
+ });
1194
+ });
1195
+ }
1196
+ catch (error) {
1197
+ console.error("[ERROR] Failed to start server:", error);
1198
+ process.exit(1);
1199
+ }
1200
+ }
1201
+ main().catch(error => {
1202
+ console.error("[ERROR] Unhandled error in main:", error);
1203
+ process.exit(1);
1204
+ });
1205
+ process.on('uncaughtException', (error) => {
1206
+ console.error("[ERROR] Uncaught exception:", error);
1207
+ // Consider whether to exit or attempt graceful shutdown
1208
+ });
1209
+ process.on('unhandledRejection', (reason, promise) => {
1210
+ console.error("[ERROR] Unhandled promise rejection at:", promise, "reason:", reason);
1211
+ // Consider whether to exit or attempt graceful shutdown
1212
+ });