@prompd/cli 0.3.4 → 0.4.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/lib/index.d.ts +8 -1
- package/dist/lib/index.d.ts.map +1 -1
- package/dist/lib/index.js +19 -2
- package/dist/lib/index.js.map +1 -1
- package/dist/lib/memoryBackend.d.ts +41 -0
- package/dist/lib/memoryBackend.d.ts.map +1 -0
- package/dist/lib/memoryBackend.js +79 -0
- package/dist/lib/memoryBackend.js.map +1 -0
- package/dist/lib/workflowExecutor.d.ts +240 -0
- package/dist/lib/workflowExecutor.d.ts.map +1 -0
- package/dist/lib/workflowExecutor.js +5517 -0
- package/dist/lib/workflowExecutor.js.map +1 -0
- package/dist/lib/workflowParser.d.ts +80 -0
- package/dist/lib/workflowParser.d.ts.map +1 -0
- package/dist/lib/workflowParser.js +1148 -0
- package/dist/lib/workflowParser.js.map +1 -0
- package/dist/lib/workflowTypes.d.ts +1605 -0
- package/dist/lib/workflowTypes.d.ts.map +1 -0
- package/dist/lib/workflowTypes.js +72 -0
- package/dist/lib/workflowTypes.js.map +1 -0
- package/dist/lib/workflowValidator.d.ts +21 -0
- package/dist/lib/workflowValidator.d.ts.map +1 -0
- package/dist/lib/workflowValidator.js +388 -0
- package/dist/lib/workflowValidator.js.map +1 -0
- package/package.json +1 -1
|
@@ -0,0 +1,1148 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
/**
|
|
3
|
+
* Workflow Parser - Parse and validate .pdflow JSON files
|
|
4
|
+
* Converts between .pdflow format and React Flow format
|
|
5
|
+
*/
|
|
6
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
7
|
+
exports.parseWorkflow = parseWorkflow;
|
|
8
|
+
exports.serializeWorkflow = serializeWorkflow;
|
|
9
|
+
exports.createEmptyWorkflow = createEmptyWorkflow;
|
|
10
|
+
exports.createWorkflowNode = createWorkflowNode;
|
|
11
|
+
exports.getExecutionOrder = getExecutionOrder;
|
|
12
|
+
const workflowValidator_1 = require("./workflowValidator");
|
|
13
|
+
// Valid node types
|
|
14
|
+
const VALID_NODE_TYPES = [
|
|
15
|
+
'trigger',
|
|
16
|
+
'prompt',
|
|
17
|
+
'provider',
|
|
18
|
+
'condition',
|
|
19
|
+
'loop',
|
|
20
|
+
'parallel',
|
|
21
|
+
'merge',
|
|
22
|
+
'transformer',
|
|
23
|
+
'api',
|
|
24
|
+
'tool',
|
|
25
|
+
'tool-call-parser',
|
|
26
|
+
'tool-call-router',
|
|
27
|
+
'agent',
|
|
28
|
+
'chat-agent', // Composite chat agent with guardrail
|
|
29
|
+
'guardrail', // Input validation node
|
|
30
|
+
'callback',
|
|
31
|
+
'checkpoint', // Alias for callback
|
|
32
|
+
'user-input',
|
|
33
|
+
'error-handler',
|
|
34
|
+
'command', // Phase E: Shell command execution
|
|
35
|
+
'code', // Phase E: Custom code execution
|
|
36
|
+
'claude-code', // Phase E: Claude Code agent with SSH
|
|
37
|
+
'workflow', // Phase E: Sub-workflow invocation
|
|
38
|
+
'mcp-tool', // Phase E: External MCP tool execution
|
|
39
|
+
'memory', // Memory/storage operations
|
|
40
|
+
'output',
|
|
41
|
+
];
|
|
42
|
+
// Internal handles that indicate container-internal edges
|
|
43
|
+
const INTERNAL_HANDLES = ['loop-start', 'loop-end', 'parallel-start', 'parallel-end'];
|
|
44
|
+
/**
|
|
45
|
+
* Determine if an edge should be animated based on its source handle
|
|
46
|
+
* Animated edges: internal container edges and condition branch edges
|
|
47
|
+
* Static edges: regular output → input edges between nodes
|
|
48
|
+
*/
|
|
49
|
+
function shouldEdgeBeAnimated(sourceHandle) {
|
|
50
|
+
if (!sourceHandle)
|
|
51
|
+
return false;
|
|
52
|
+
// Internal container edges
|
|
53
|
+
if (INTERNAL_HANDLES.includes(sourceHandle))
|
|
54
|
+
return true;
|
|
55
|
+
// Condition branch edges (condition-* or default)
|
|
56
|
+
if (sourceHandle.startsWith('condition-') || sourceHandle === 'default')
|
|
57
|
+
return true;
|
|
58
|
+
// Regular output edges are not animated
|
|
59
|
+
return false;
|
|
60
|
+
}
|
|
61
|
+
/**
|
|
62
|
+
* Parse a .pdflow JSON string into a ParsedWorkflow
|
|
63
|
+
*/
|
|
64
|
+
function parseWorkflow(json) {
|
|
65
|
+
const errors = [];
|
|
66
|
+
const warnings = [];
|
|
67
|
+
let file;
|
|
68
|
+
// Parse JSON
|
|
69
|
+
try {
|
|
70
|
+
file = JSON.parse(json);
|
|
71
|
+
}
|
|
72
|
+
catch (e) {
|
|
73
|
+
return {
|
|
74
|
+
file: createEmptyWorkflow(),
|
|
75
|
+
nodes: [],
|
|
76
|
+
edges: [],
|
|
77
|
+
errors: [{
|
|
78
|
+
message: `Invalid JSON: ${e instanceof Error ? e.message : 'Parse error'}`,
|
|
79
|
+
code: 'INVALID_JSON',
|
|
80
|
+
}],
|
|
81
|
+
warnings: [],
|
|
82
|
+
};
|
|
83
|
+
}
|
|
84
|
+
// Validate structure
|
|
85
|
+
validateWorkflowStructure(file, errors, warnings);
|
|
86
|
+
// Convert nodes to React Flow format (edges are already in standard format)
|
|
87
|
+
const nodes = convertNodesToReactFlow(file.nodes || [], errors);
|
|
88
|
+
const edges = normalizeEdges(file.edges || [], errors);
|
|
89
|
+
// Validate data flow
|
|
90
|
+
validateDataFlow(file, errors, warnings);
|
|
91
|
+
// Run comprehensive validation (includes node-specific checks like empty containers)
|
|
92
|
+
const validationResult = (0, workflowValidator_1.validateWorkflow)(file);
|
|
93
|
+
errors.push(...validationResult.errors);
|
|
94
|
+
warnings.push(...validationResult.warnings);
|
|
95
|
+
return {
|
|
96
|
+
file,
|
|
97
|
+
nodes,
|
|
98
|
+
edges,
|
|
99
|
+
errors,
|
|
100
|
+
warnings,
|
|
101
|
+
};
|
|
102
|
+
}
|
|
103
|
+
/**
|
|
104
|
+
* Serialize a workflow back to JSON string
|
|
105
|
+
*/
|
|
106
|
+
function serializeWorkflow(file, nodes, edges) {
|
|
107
|
+
// Update node positions, data, and container properties from React Flow
|
|
108
|
+
const updatedNodes = file.nodes.map(node => {
|
|
109
|
+
const rfNode = nodes.find(n => n.id === node.id);
|
|
110
|
+
if (rfNode) {
|
|
111
|
+
const nodeData = rfNode.data;
|
|
112
|
+
// For docked nodes, use the saved pre-dock position instead of the off-canvas position
|
|
113
|
+
let position = rfNode.position;
|
|
114
|
+
if (nodeData.dockedTo && nodeData._preDockPosition) {
|
|
115
|
+
position = nodeData._preDockPosition;
|
|
116
|
+
}
|
|
117
|
+
const updated = {
|
|
118
|
+
...node,
|
|
119
|
+
position,
|
|
120
|
+
// Sync node data from React Flow (includes _savedWidth, _savedHeight, collapsed, etc.)
|
|
121
|
+
data: nodeData,
|
|
122
|
+
};
|
|
123
|
+
// Sync container relationship properties
|
|
124
|
+
if (rfNode.parentId) {
|
|
125
|
+
updated.parentId = rfNode.parentId;
|
|
126
|
+
updated.extent = rfNode.extent || 'parent';
|
|
127
|
+
}
|
|
128
|
+
else {
|
|
129
|
+
// Remove parentId if node was removed from container
|
|
130
|
+
delete updated.parentId;
|
|
131
|
+
delete updated.extent;
|
|
132
|
+
}
|
|
133
|
+
// Sync container dimensions - skip for docked nodes (they have 0 dimensions)
|
|
134
|
+
if (rfNode.width && !nodeData.dockedTo) {
|
|
135
|
+
updated.width = rfNode.width;
|
|
136
|
+
}
|
|
137
|
+
else if (nodeData.dockedTo && nodeData._preDockWidth) {
|
|
138
|
+
// For docked nodes, restore saved dimensions
|
|
139
|
+
updated.width = nodeData._preDockWidth;
|
|
140
|
+
}
|
|
141
|
+
if (rfNode.height && !nodeData.dockedTo) {
|
|
142
|
+
updated.height = rfNode.height;
|
|
143
|
+
}
|
|
144
|
+
else if (nodeData.dockedTo && nodeData._preDockHeight) {
|
|
145
|
+
// For docked nodes, restore saved dimensions
|
|
146
|
+
updated.height = nodeData._preDockHeight;
|
|
147
|
+
}
|
|
148
|
+
return updated;
|
|
149
|
+
}
|
|
150
|
+
return node;
|
|
151
|
+
});
|
|
152
|
+
// Edges are already in React Flow standard format - just map directly
|
|
153
|
+
const workflowEdges = edges.map(edge => ({
|
|
154
|
+
id: edge.id,
|
|
155
|
+
source: edge.source,
|
|
156
|
+
target: edge.target,
|
|
157
|
+
sourceHandle: edge.sourceHandle ?? undefined,
|
|
158
|
+
targetHandle: edge.targetHandle ?? undefined,
|
|
159
|
+
animated: edge.animated,
|
|
160
|
+
label: typeof edge.label === 'string' ? edge.label : undefined,
|
|
161
|
+
}));
|
|
162
|
+
const updatedFile = {
|
|
163
|
+
...file,
|
|
164
|
+
nodes: updatedNodes,
|
|
165
|
+
edges: workflowEdges,
|
|
166
|
+
};
|
|
167
|
+
return JSON.stringify(updatedFile, null, 2);
|
|
168
|
+
}
|
|
169
|
+
/**
|
|
170
|
+
* Create an empty workflow file
|
|
171
|
+
*/
|
|
172
|
+
function createEmptyWorkflow() {
|
|
173
|
+
return {
|
|
174
|
+
version: '1.0',
|
|
175
|
+
metadata: {
|
|
176
|
+
id: `workflow-${Date.now()}`,
|
|
177
|
+
name: 'New Workflow',
|
|
178
|
+
description: '',
|
|
179
|
+
},
|
|
180
|
+
parameters: [],
|
|
181
|
+
nodes: [],
|
|
182
|
+
edges: [],
|
|
183
|
+
};
|
|
184
|
+
}
|
|
185
|
+
/**
|
|
186
|
+
* Create a new workflow node
|
|
187
|
+
*
|
|
188
|
+
* IMPORTANT: When adding a new node type, you MUST update THREE places:
|
|
189
|
+
* 1. This switch statement (createWorkflowNode) - add a case with default data
|
|
190
|
+
* 2. getDefaultLabel() below - add the default label for the node type
|
|
191
|
+
* 3. nodes/index.ts - add the component to nodeTypes registry
|
|
192
|
+
*
|
|
193
|
+
* If you only add to nodes/index.ts without adding a case here, the node
|
|
194
|
+
* will render as "UNKNOWN: <type>" because createWorkflowNode falls back
|
|
195
|
+
* to a generic tool node for unhandled types.
|
|
196
|
+
*/
|
|
197
|
+
function createWorkflowNode(type, position, id) {
|
|
198
|
+
const nodeId = id || `${type}-${Date.now()}`;
|
|
199
|
+
const baseData = {
|
|
200
|
+
label: getDefaultLabel(type),
|
|
201
|
+
};
|
|
202
|
+
switch (type) {
|
|
203
|
+
case 'trigger':
|
|
204
|
+
return {
|
|
205
|
+
id: nodeId,
|
|
206
|
+
type,
|
|
207
|
+
position,
|
|
208
|
+
data: {
|
|
209
|
+
...baseData,
|
|
210
|
+
triggerType: 'manual',
|
|
211
|
+
},
|
|
212
|
+
};
|
|
213
|
+
case 'prompt':
|
|
214
|
+
return {
|
|
215
|
+
id: nodeId,
|
|
216
|
+
type,
|
|
217
|
+
position,
|
|
218
|
+
data: {
|
|
219
|
+
...baseData,
|
|
220
|
+
source: '',
|
|
221
|
+
provider: 'openai',
|
|
222
|
+
model: 'gpt-4o',
|
|
223
|
+
parameters: {},
|
|
224
|
+
},
|
|
225
|
+
};
|
|
226
|
+
case 'provider':
|
|
227
|
+
return {
|
|
228
|
+
id: nodeId,
|
|
229
|
+
type,
|
|
230
|
+
position,
|
|
231
|
+
data: {
|
|
232
|
+
...baseData,
|
|
233
|
+
providerId: 'openai',
|
|
234
|
+
model: 'gpt-4o',
|
|
235
|
+
},
|
|
236
|
+
};
|
|
237
|
+
case 'condition':
|
|
238
|
+
return {
|
|
239
|
+
id: nodeId,
|
|
240
|
+
type,
|
|
241
|
+
position,
|
|
242
|
+
data: {
|
|
243
|
+
...baseData,
|
|
244
|
+
conditions: [],
|
|
245
|
+
default: undefined,
|
|
246
|
+
},
|
|
247
|
+
};
|
|
248
|
+
case 'loop':
|
|
249
|
+
return {
|
|
250
|
+
id: nodeId,
|
|
251
|
+
type,
|
|
252
|
+
position,
|
|
253
|
+
width: 300,
|
|
254
|
+
height: 200,
|
|
255
|
+
data: {
|
|
256
|
+
...baseData,
|
|
257
|
+
loopType: 'while',
|
|
258
|
+
condition: '',
|
|
259
|
+
maxIterations: 10,
|
|
260
|
+
body: [],
|
|
261
|
+
},
|
|
262
|
+
};
|
|
263
|
+
case 'parallel':
|
|
264
|
+
return {
|
|
265
|
+
id: nodeId,
|
|
266
|
+
type,
|
|
267
|
+
position,
|
|
268
|
+
width: 350,
|
|
269
|
+
height: 200,
|
|
270
|
+
data: {
|
|
271
|
+
...baseData,
|
|
272
|
+
mode: 'broadcast',
|
|
273
|
+
forkCount: 2,
|
|
274
|
+
branches: [],
|
|
275
|
+
waitFor: 'all',
|
|
276
|
+
mergeStrategy: 'object',
|
|
277
|
+
},
|
|
278
|
+
};
|
|
279
|
+
case 'merge':
|
|
280
|
+
return {
|
|
281
|
+
id: nodeId,
|
|
282
|
+
type,
|
|
283
|
+
position,
|
|
284
|
+
data: {
|
|
285
|
+
...baseData,
|
|
286
|
+
inputs: [],
|
|
287
|
+
mergeAs: 'object',
|
|
288
|
+
},
|
|
289
|
+
};
|
|
290
|
+
case 'api':
|
|
291
|
+
return {
|
|
292
|
+
id: nodeId,
|
|
293
|
+
type,
|
|
294
|
+
position,
|
|
295
|
+
data: {
|
|
296
|
+
...baseData,
|
|
297
|
+
method: 'GET',
|
|
298
|
+
url: '',
|
|
299
|
+
},
|
|
300
|
+
};
|
|
301
|
+
case 'callback':
|
|
302
|
+
case 'checkpoint':
|
|
303
|
+
return {
|
|
304
|
+
id: nodeId,
|
|
305
|
+
type: 'callback',
|
|
306
|
+
position,
|
|
307
|
+
data: {
|
|
308
|
+
...baseData,
|
|
309
|
+
mode: 'report',
|
|
310
|
+
checkpointName: '',
|
|
311
|
+
includePreviousOutput: true,
|
|
312
|
+
includeNextNodeInfo: true,
|
|
313
|
+
waitForAck: false,
|
|
314
|
+
},
|
|
315
|
+
};
|
|
316
|
+
case 'user-input':
|
|
317
|
+
return {
|
|
318
|
+
id: nodeId,
|
|
319
|
+
type,
|
|
320
|
+
position,
|
|
321
|
+
data: {
|
|
322
|
+
...baseData,
|
|
323
|
+
prompt: 'Enter your input:',
|
|
324
|
+
inputType: 'text',
|
|
325
|
+
required: true,
|
|
326
|
+
showContext: true,
|
|
327
|
+
},
|
|
328
|
+
};
|
|
329
|
+
case 'tool':
|
|
330
|
+
return {
|
|
331
|
+
id: nodeId,
|
|
332
|
+
type,
|
|
333
|
+
position,
|
|
334
|
+
data: {
|
|
335
|
+
...baseData,
|
|
336
|
+
toolType: 'function',
|
|
337
|
+
toolName: '',
|
|
338
|
+
parameters: {},
|
|
339
|
+
},
|
|
340
|
+
};
|
|
341
|
+
case 'tool-call-parser':
|
|
342
|
+
return {
|
|
343
|
+
id: nodeId,
|
|
344
|
+
type,
|
|
345
|
+
position,
|
|
346
|
+
data: {
|
|
347
|
+
...baseData,
|
|
348
|
+
format: 'auto',
|
|
349
|
+
noToolCallBehavior: 'passthrough',
|
|
350
|
+
allowedTools: [],
|
|
351
|
+
},
|
|
352
|
+
};
|
|
353
|
+
case 'agent':
|
|
354
|
+
return {
|
|
355
|
+
id: nodeId,
|
|
356
|
+
type,
|
|
357
|
+
position,
|
|
358
|
+
data: {
|
|
359
|
+
...baseData,
|
|
360
|
+
systemPrompt: 'You are a helpful AI assistant with access to tools. Use the available tools to complete the user\'s request.',
|
|
361
|
+
userPrompt: '{{ input }}',
|
|
362
|
+
tools: [],
|
|
363
|
+
maxIterations: 10,
|
|
364
|
+
toolCallFormat: 'auto',
|
|
365
|
+
outputMode: 'final-response',
|
|
366
|
+
includeHistory: true,
|
|
367
|
+
},
|
|
368
|
+
};
|
|
369
|
+
case 'chat-agent':
|
|
370
|
+
return {
|
|
371
|
+
id: nodeId,
|
|
372
|
+
type,
|
|
373
|
+
position,
|
|
374
|
+
width: 400,
|
|
375
|
+
height: 320,
|
|
376
|
+
data: {
|
|
377
|
+
...baseData,
|
|
378
|
+
// Agent configuration
|
|
379
|
+
agentSystemPrompt: 'You are a helpful AI assistant.',
|
|
380
|
+
agentUserPrompt: '{{ input }}',
|
|
381
|
+
maxIterations: 10,
|
|
382
|
+
toolCallFormat: 'auto',
|
|
383
|
+
outputMode: 'final-response',
|
|
384
|
+
// Guardrail configuration (disabled by default)
|
|
385
|
+
guardrailEnabled: false,
|
|
386
|
+
// User input configuration (enabled by default)
|
|
387
|
+
userInputEnabled: true,
|
|
388
|
+
userInputPrompt: 'Enter your message:',
|
|
389
|
+
userInputType: 'textarea',
|
|
390
|
+
// Container state
|
|
391
|
+
collapsed: true,
|
|
392
|
+
// Saved dimensions for when expanded
|
|
393
|
+
_savedWidth: 400,
|
|
394
|
+
_savedHeight: 320,
|
|
395
|
+
tools: [],
|
|
396
|
+
},
|
|
397
|
+
};
|
|
398
|
+
case 'guardrail':
|
|
399
|
+
return {
|
|
400
|
+
id: nodeId,
|
|
401
|
+
type,
|
|
402
|
+
position,
|
|
403
|
+
data: {
|
|
404
|
+
...baseData,
|
|
405
|
+
systemPrompt: 'Validate the input. Respond with "PASS" if the input is appropriate and safe, or "REJECT" with a reason if it violates guidelines.',
|
|
406
|
+
scoreThreshold: 0.5,
|
|
407
|
+
},
|
|
408
|
+
};
|
|
409
|
+
case 'tool-call-router':
|
|
410
|
+
return {
|
|
411
|
+
id: nodeId,
|
|
412
|
+
type,
|
|
413
|
+
position,
|
|
414
|
+
width: 320,
|
|
415
|
+
height: 180,
|
|
416
|
+
data: {
|
|
417
|
+
...baseData,
|
|
418
|
+
routingMode: 'name-match',
|
|
419
|
+
onNoMatch: 'error',
|
|
420
|
+
collapsed: false,
|
|
421
|
+
},
|
|
422
|
+
};
|
|
423
|
+
case 'error-handler':
|
|
424
|
+
return {
|
|
425
|
+
id: nodeId,
|
|
426
|
+
type,
|
|
427
|
+
position,
|
|
428
|
+
data: {
|
|
429
|
+
...baseData,
|
|
430
|
+
strategy: 'retry',
|
|
431
|
+
retry: {
|
|
432
|
+
maxAttempts: 3,
|
|
433
|
+
backoffMs: 1000,
|
|
434
|
+
backoffMultiplier: 2,
|
|
435
|
+
},
|
|
436
|
+
},
|
|
437
|
+
};
|
|
438
|
+
case 'output':
|
|
439
|
+
return {
|
|
440
|
+
id: nodeId,
|
|
441
|
+
type,
|
|
442
|
+
position,
|
|
443
|
+
data: {
|
|
444
|
+
...baseData,
|
|
445
|
+
},
|
|
446
|
+
};
|
|
447
|
+
case 'command':
|
|
448
|
+
return {
|
|
449
|
+
id: nodeId,
|
|
450
|
+
type,
|
|
451
|
+
position,
|
|
452
|
+
data: {
|
|
453
|
+
...baseData,
|
|
454
|
+
command: '',
|
|
455
|
+
args: [],
|
|
456
|
+
cwd: '',
|
|
457
|
+
env: {},
|
|
458
|
+
timeoutMs: 30000,
|
|
459
|
+
outputFormat: 'text',
|
|
460
|
+
requiresApproval: true,
|
|
461
|
+
},
|
|
462
|
+
};
|
|
463
|
+
case 'claude-code':
|
|
464
|
+
return {
|
|
465
|
+
id: nodeId,
|
|
466
|
+
type,
|
|
467
|
+
position,
|
|
468
|
+
data: {
|
|
469
|
+
...baseData,
|
|
470
|
+
connection: {
|
|
471
|
+
type: 'local',
|
|
472
|
+
},
|
|
473
|
+
task: {
|
|
474
|
+
prompt: '',
|
|
475
|
+
workingDirectory: '',
|
|
476
|
+
},
|
|
477
|
+
constraints: {
|
|
478
|
+
maxTurns: 50,
|
|
479
|
+
allowedTools: ['read', 'write', 'execute', 'web'],
|
|
480
|
+
requireApprovalForWrites: false,
|
|
481
|
+
},
|
|
482
|
+
output: {
|
|
483
|
+
format: 'final-response',
|
|
484
|
+
includeMetadata: false,
|
|
485
|
+
},
|
|
486
|
+
},
|
|
487
|
+
};
|
|
488
|
+
case 'workflow':
|
|
489
|
+
return {
|
|
490
|
+
id: nodeId,
|
|
491
|
+
type,
|
|
492
|
+
position,
|
|
493
|
+
data: {
|
|
494
|
+
...baseData,
|
|
495
|
+
source: '',
|
|
496
|
+
parameters: {},
|
|
497
|
+
outputMapping: {},
|
|
498
|
+
inheritVariables: false,
|
|
499
|
+
},
|
|
500
|
+
};
|
|
501
|
+
case 'mcp-tool':
|
|
502
|
+
return {
|
|
503
|
+
id: nodeId,
|
|
504
|
+
type,
|
|
505
|
+
position,
|
|
506
|
+
data: {
|
|
507
|
+
...baseData,
|
|
508
|
+
toolName: '',
|
|
509
|
+
parameters: {},
|
|
510
|
+
timeoutMs: 30000,
|
|
511
|
+
includeInContext: false,
|
|
512
|
+
},
|
|
513
|
+
};
|
|
514
|
+
case 'code':
|
|
515
|
+
return {
|
|
516
|
+
id: nodeId,
|
|
517
|
+
type,
|
|
518
|
+
position,
|
|
519
|
+
data: {
|
|
520
|
+
...baseData,
|
|
521
|
+
language: 'typescript',
|
|
522
|
+
code: '',
|
|
523
|
+
inputVariable: 'input',
|
|
524
|
+
executionContext: 'isolated',
|
|
525
|
+
},
|
|
526
|
+
};
|
|
527
|
+
case 'transformer':
|
|
528
|
+
return {
|
|
529
|
+
id: nodeId,
|
|
530
|
+
type,
|
|
531
|
+
position,
|
|
532
|
+
data: {
|
|
533
|
+
...baseData,
|
|
534
|
+
mode: 'template',
|
|
535
|
+
template: '',
|
|
536
|
+
inputVariable: 'input',
|
|
537
|
+
passthroughOnError: false,
|
|
538
|
+
},
|
|
539
|
+
};
|
|
540
|
+
case 'memory':
|
|
541
|
+
return {
|
|
542
|
+
id: nodeId,
|
|
543
|
+
type,
|
|
544
|
+
position,
|
|
545
|
+
data: {
|
|
546
|
+
...baseData,
|
|
547
|
+
mode: 'kv',
|
|
548
|
+
operations: ['get'], // Multi-action array (replaces single 'operation')
|
|
549
|
+
scope: 'execution',
|
|
550
|
+
},
|
|
551
|
+
};
|
|
552
|
+
default:
|
|
553
|
+
// IMPORTANT: If you see this error, add a new case to createWorkflowNode() in workflowParser.ts
|
|
554
|
+
// This fallback should NEVER be hit in production - all node types must have explicit cases
|
|
555
|
+
console.error(`[createWorkflowNode] MISSING CASE for node type: "${type}". ` +
|
|
556
|
+
`Add a case for '${type}' in workflowParser.ts createWorkflowNode() switch statement!`);
|
|
557
|
+
// Return a generic tool node as fallback (more visible than callback)
|
|
558
|
+
return {
|
|
559
|
+
id: nodeId,
|
|
560
|
+
type: 'tool',
|
|
561
|
+
position,
|
|
562
|
+
data: {
|
|
563
|
+
...baseData,
|
|
564
|
+
label: `UNKNOWN: ${type}`, // Make it obvious this is a fallback
|
|
565
|
+
toolType: 'function',
|
|
566
|
+
toolName: '',
|
|
567
|
+
parameters: {},
|
|
568
|
+
},
|
|
569
|
+
};
|
|
570
|
+
}
|
|
571
|
+
}
|
|
572
|
+
/**
|
|
573
|
+
* Get default label for a node type
|
|
574
|
+
*/
|
|
575
|
+
function getDefaultLabel(type) {
|
|
576
|
+
const labels = {
|
|
577
|
+
trigger: 'Start',
|
|
578
|
+
prompt: 'Prompt',
|
|
579
|
+
provider: 'Provider',
|
|
580
|
+
condition: 'Condition',
|
|
581
|
+
loop: 'Loop',
|
|
582
|
+
parallel: 'Parallel',
|
|
583
|
+
merge: 'Merge',
|
|
584
|
+
transformer: 'Transform',
|
|
585
|
+
api: 'API Call',
|
|
586
|
+
tool: 'Tool',
|
|
587
|
+
'tool-call-parser': 'Tool Parser',
|
|
588
|
+
'tool-call-router': 'Tool Router',
|
|
589
|
+
agent: 'AI Agent',
|
|
590
|
+
'chat-agent': 'Chat Agent',
|
|
591
|
+
guardrail: 'Guardrail',
|
|
592
|
+
callback: 'Checkpoint',
|
|
593
|
+
checkpoint: 'Checkpoint', // Alias for callback
|
|
594
|
+
'user-input': 'User Input',
|
|
595
|
+
'error-handler': 'Error Handler',
|
|
596
|
+
command: 'Command',
|
|
597
|
+
code: 'Code',
|
|
598
|
+
'claude-code': 'Claude Code',
|
|
599
|
+
workflow: 'Sub-Workflow',
|
|
600
|
+
'mcp-tool': 'MCP Tool',
|
|
601
|
+
memory: 'Memory',
|
|
602
|
+
output: 'Output',
|
|
603
|
+
};
|
|
604
|
+
return labels[type] || 'Node';
|
|
605
|
+
}
|
|
606
|
+
// ============================================================================
|
|
607
|
+
// Validation Functions
|
|
608
|
+
// ============================================================================
|
|
609
|
+
function validateWorkflowStructure(file, errors, warnings) {
|
|
610
|
+
// Version check
|
|
611
|
+
if (!file.version) {
|
|
612
|
+
warnings.push({
|
|
613
|
+
message: 'Missing version field, defaulting to 1.0',
|
|
614
|
+
code: 'MISSING_VERSION',
|
|
615
|
+
});
|
|
616
|
+
}
|
|
617
|
+
// Metadata check
|
|
618
|
+
if (!file.metadata) {
|
|
619
|
+
errors.push({
|
|
620
|
+
message: 'Missing metadata section',
|
|
621
|
+
code: 'MISSING_METADATA',
|
|
622
|
+
});
|
|
623
|
+
}
|
|
624
|
+
else {
|
|
625
|
+
if (!file.metadata.id) {
|
|
626
|
+
errors.push({
|
|
627
|
+
field: 'metadata.id',
|
|
628
|
+
message: 'Missing workflow ID',
|
|
629
|
+
code: 'MISSING_ID',
|
|
630
|
+
});
|
|
631
|
+
}
|
|
632
|
+
if (!file.metadata.name) {
|
|
633
|
+
warnings.push({
|
|
634
|
+
message: 'Missing workflow name',
|
|
635
|
+
code: 'MISSING_NAME',
|
|
636
|
+
});
|
|
637
|
+
}
|
|
638
|
+
}
|
|
639
|
+
// Nodes check
|
|
640
|
+
if (!file.nodes || !Array.isArray(file.nodes)) {
|
|
641
|
+
errors.push({
|
|
642
|
+
message: 'Missing or invalid nodes array',
|
|
643
|
+
code: 'INVALID_NODES',
|
|
644
|
+
});
|
|
645
|
+
}
|
|
646
|
+
else {
|
|
647
|
+
// Check for duplicate IDs
|
|
648
|
+
const nodeIds = new Set();
|
|
649
|
+
for (const node of file.nodes) {
|
|
650
|
+
if (nodeIds.has(node.id)) {
|
|
651
|
+
errors.push({
|
|
652
|
+
nodeId: node.id,
|
|
653
|
+
message: `Duplicate node ID: ${node.id}`,
|
|
654
|
+
code: 'DUPLICATE_NODE_ID',
|
|
655
|
+
});
|
|
656
|
+
}
|
|
657
|
+
nodeIds.add(node.id);
|
|
658
|
+
// Validate node type
|
|
659
|
+
if (!VALID_NODE_TYPES.includes(node.type)) {
|
|
660
|
+
errors.push({
|
|
661
|
+
nodeId: node.id,
|
|
662
|
+
message: `Invalid node type: ${node.type}`,
|
|
663
|
+
code: 'INVALID_NODE_TYPE',
|
|
664
|
+
});
|
|
665
|
+
}
|
|
666
|
+
// Validate position
|
|
667
|
+
if (!node.position || typeof node.position.x !== 'number' || typeof node.position.y !== 'number') {
|
|
668
|
+
errors.push({
|
|
669
|
+
nodeId: node.id,
|
|
670
|
+
message: 'Invalid or missing position',
|
|
671
|
+
code: 'INVALID_POSITION',
|
|
672
|
+
});
|
|
673
|
+
}
|
|
674
|
+
// Validate node-specific data
|
|
675
|
+
validateNodeData(node, errors, warnings);
|
|
676
|
+
}
|
|
677
|
+
}
|
|
678
|
+
// Edges check
|
|
679
|
+
if (file.edges && !Array.isArray(file.edges)) {
|
|
680
|
+
errors.push({
|
|
681
|
+
message: 'Invalid edges array',
|
|
682
|
+
code: 'INVALID_EDGES',
|
|
683
|
+
});
|
|
684
|
+
}
|
|
685
|
+
}
|
|
686
|
+
function validateNodeData(node, errors, warnings) {
|
|
687
|
+
if (!node.data) {
|
|
688
|
+
errors.push({
|
|
689
|
+
nodeId: node.id,
|
|
690
|
+
message: 'Missing node data',
|
|
691
|
+
code: 'MISSING_NODE_DATA',
|
|
692
|
+
});
|
|
693
|
+
return;
|
|
694
|
+
}
|
|
695
|
+
switch (node.type) {
|
|
696
|
+
case 'prompt':
|
|
697
|
+
if (!('source' in node.data)) {
|
|
698
|
+
errors.push({
|
|
699
|
+
nodeId: node.id,
|
|
700
|
+
field: 'source',
|
|
701
|
+
message: 'Prompt node missing source',
|
|
702
|
+
code: 'MISSING_PROMPT_SOURCE',
|
|
703
|
+
});
|
|
704
|
+
}
|
|
705
|
+
break;
|
|
706
|
+
case 'condition':
|
|
707
|
+
if (!('conditions' in node.data) || !Array.isArray(node.data.conditions)) {
|
|
708
|
+
errors.push({
|
|
709
|
+
nodeId: node.id,
|
|
710
|
+
field: 'conditions',
|
|
711
|
+
message: 'Condition node missing conditions array',
|
|
712
|
+
code: 'MISSING_CONDITIONS',
|
|
713
|
+
});
|
|
714
|
+
}
|
|
715
|
+
break;
|
|
716
|
+
case 'loop':
|
|
717
|
+
if (!('loopType' in node.data)) {
|
|
718
|
+
errors.push({
|
|
719
|
+
nodeId: node.id,
|
|
720
|
+
field: 'loopType',
|
|
721
|
+
message: 'Loop node missing loopType',
|
|
722
|
+
code: 'MISSING_LOOP_TYPE',
|
|
723
|
+
});
|
|
724
|
+
}
|
|
725
|
+
if (!('maxIterations' in node.data) || typeof node.data.maxIterations !== 'number') {
|
|
726
|
+
warnings.push({
|
|
727
|
+
nodeId: node.id,
|
|
728
|
+
message: 'Loop node missing maxIterations, defaulting to 10',
|
|
729
|
+
code: 'MISSING_MAX_ITERATIONS',
|
|
730
|
+
});
|
|
731
|
+
}
|
|
732
|
+
break;
|
|
733
|
+
case 'parallel':
|
|
734
|
+
if (!('branches' in node.data) || !Array.isArray(node.data.branches)) {
|
|
735
|
+
errors.push({
|
|
736
|
+
nodeId: node.id,
|
|
737
|
+
field: 'branches',
|
|
738
|
+
message: 'Parallel node missing branches array',
|
|
739
|
+
code: 'MISSING_BRANCHES',
|
|
740
|
+
});
|
|
741
|
+
}
|
|
742
|
+
break;
|
|
743
|
+
case 'api':
|
|
744
|
+
if (!('url' in node.data) || !node.data.url) {
|
|
745
|
+
errors.push({
|
|
746
|
+
nodeId: node.id,
|
|
747
|
+
field: 'url',
|
|
748
|
+
message: 'API node missing URL',
|
|
749
|
+
code: 'MISSING_API_URL',
|
|
750
|
+
});
|
|
751
|
+
}
|
|
752
|
+
if (!('method' in node.data)) {
|
|
753
|
+
warnings.push({
|
|
754
|
+
nodeId: node.id,
|
|
755
|
+
message: 'API node missing method, defaulting to GET',
|
|
756
|
+
code: 'MISSING_API_METHOD',
|
|
757
|
+
});
|
|
758
|
+
}
|
|
759
|
+
break;
|
|
760
|
+
}
|
|
761
|
+
}
|
|
762
|
+
function validateDataFlow(file, errors, warnings) {
|
|
763
|
+
if (!file.nodes || !file.edges)
|
|
764
|
+
return;
|
|
765
|
+
const nodeIds = new Set(file.nodes.map(n => n.id));
|
|
766
|
+
// Check edge references
|
|
767
|
+
for (const edge of file.edges) {
|
|
768
|
+
if (!nodeIds.has(edge.source)) {
|
|
769
|
+
errors.push({
|
|
770
|
+
connectionId: edge.id,
|
|
771
|
+
message: `Edge references non-existent source node: ${edge.source}`,
|
|
772
|
+
code: 'INVALID_SOURCE_NODE',
|
|
773
|
+
});
|
|
774
|
+
}
|
|
775
|
+
if (!nodeIds.has(edge.target)) {
|
|
776
|
+
errors.push({
|
|
777
|
+
connectionId: edge.id,
|
|
778
|
+
message: `Edge references non-existent target node: ${edge.target}`,
|
|
779
|
+
code: 'INVALID_TARGET_NODE',
|
|
780
|
+
});
|
|
781
|
+
}
|
|
782
|
+
}
|
|
783
|
+
// Check for cycles (basic detection)
|
|
784
|
+
const hasCycle = detectCycles(file.nodes, file.edges);
|
|
785
|
+
if (hasCycle) {
|
|
786
|
+
errors.push({
|
|
787
|
+
message: 'Workflow contains cycles which may cause infinite loops',
|
|
788
|
+
code: 'CYCLE_DETECTED',
|
|
789
|
+
});
|
|
790
|
+
}
|
|
791
|
+
// Check for unreachable nodes
|
|
792
|
+
const connectedNodes = new Set();
|
|
793
|
+
for (const edge of file.edges) {
|
|
794
|
+
connectedNodes.add(edge.source);
|
|
795
|
+
connectedNodes.add(edge.target);
|
|
796
|
+
}
|
|
797
|
+
for (const node of file.nodes) {
|
|
798
|
+
if (!connectedNodes.has(node.id) && file.nodes.length > 1) {
|
|
799
|
+
warnings.push({
|
|
800
|
+
nodeId: node.id,
|
|
801
|
+
message: `Node "${node.data.label || node.id}" is not connected to any other nodes`,
|
|
802
|
+
code: 'UNREACHABLE_NODE',
|
|
803
|
+
});
|
|
804
|
+
}
|
|
805
|
+
}
|
|
806
|
+
}
|
|
807
|
+
/**
|
|
808
|
+
* Check if an edge is an internal container edge (loop or parallel internal wiring)
|
|
809
|
+
* or a feedback edge (tool results back to agent)
|
|
810
|
+
* These edges are intentional and should not be considered for cycle detection
|
|
811
|
+
*/
|
|
812
|
+
function isInternalContainerEdge(edge) {
|
|
813
|
+
const internalHandles = [
|
|
814
|
+
'loop-start',
|
|
815
|
+
'loop-end',
|
|
816
|
+
'parallel-start',
|
|
817
|
+
'parallel-end',
|
|
818
|
+
];
|
|
819
|
+
// Check for internal loop/parallel handles
|
|
820
|
+
if (edge.sourceHandle && internalHandles.includes(edge.sourceHandle)) {
|
|
821
|
+
return true;
|
|
822
|
+
}
|
|
823
|
+
if (edge.targetHandle && internalHandles.includes(edge.targetHandle)) {
|
|
824
|
+
return true;
|
|
825
|
+
}
|
|
826
|
+
// Check for fork handles (fork-0, fork-1, etc.)
|
|
827
|
+
if (edge.sourceHandle && edge.sourceHandle.startsWith('fork-')) {
|
|
828
|
+
return true;
|
|
829
|
+
}
|
|
830
|
+
if (edge.targetHandle && edge.targetHandle.startsWith('fork-')) {
|
|
831
|
+
return true;
|
|
832
|
+
}
|
|
833
|
+
// Check for Agent <-> ToolCallRouter feedback edges
|
|
834
|
+
// toolResult edges go from ToolCallRouter back to Agent - this is intentional feedback
|
|
835
|
+
if (edge.sourceHandle === 'toolResult' && edge.targetHandle === 'toolResult') {
|
|
836
|
+
return true;
|
|
837
|
+
}
|
|
838
|
+
// onCheckpoint edges are event streams, not data flow cycles
|
|
839
|
+
if (edge.sourceHandle === 'onCheckpoint') {
|
|
840
|
+
return true;
|
|
841
|
+
}
|
|
842
|
+
return false;
|
|
843
|
+
}
|
|
844
|
+
/**
|
|
845
|
+
* Simple cycle detection using DFS
|
|
846
|
+
* Excludes internal container edges (loop/parallel internal wiring) from cycle detection
|
|
847
|
+
*/
|
|
848
|
+
function detectCycles(nodes, edges) {
|
|
849
|
+
const adjacencyList = new Map();
|
|
850
|
+
// Build adjacency list, excluding internal container edges
|
|
851
|
+
for (const node of nodes) {
|
|
852
|
+
adjacencyList.set(node.id, []);
|
|
853
|
+
}
|
|
854
|
+
for (const edge of edges) {
|
|
855
|
+
// Skip internal loop/parallel edges - these are intentional back-edges
|
|
856
|
+
if (isInternalContainerEdge(edge)) {
|
|
857
|
+
continue;
|
|
858
|
+
}
|
|
859
|
+
const targets = adjacencyList.get(edge.source) || [];
|
|
860
|
+
targets.push(edge.target);
|
|
861
|
+
adjacencyList.set(edge.source, targets);
|
|
862
|
+
}
|
|
863
|
+
const visited = new Set();
|
|
864
|
+
const recursionStack = new Set();
|
|
865
|
+
function dfs(nodeId) {
|
|
866
|
+
visited.add(nodeId);
|
|
867
|
+
recursionStack.add(nodeId);
|
|
868
|
+
const neighbors = adjacencyList.get(nodeId) || [];
|
|
869
|
+
for (const neighbor of neighbors) {
|
|
870
|
+
if (!visited.has(neighbor)) {
|
|
871
|
+
if (dfs(neighbor))
|
|
872
|
+
return true;
|
|
873
|
+
}
|
|
874
|
+
else if (recursionStack.has(neighbor)) {
|
|
875
|
+
return true; // Cycle found
|
|
876
|
+
}
|
|
877
|
+
}
|
|
878
|
+
recursionStack.delete(nodeId);
|
|
879
|
+
return false;
|
|
880
|
+
}
|
|
881
|
+
for (const node of nodes) {
|
|
882
|
+
if (!visited.has(node.id)) {
|
|
883
|
+
if (dfs(node.id))
|
|
884
|
+
return true;
|
|
885
|
+
}
|
|
886
|
+
}
|
|
887
|
+
return false;
|
|
888
|
+
}
|
|
889
|
+
// ============================================================================
|
|
890
|
+
// Conversion Functions
|
|
891
|
+
// ============================================================================
|
|
892
|
+
function convertNodesToReactFlow(nodes, errors) {
|
|
893
|
+
// Convert nodes to React Flow format
|
|
894
|
+
const rfNodes = nodes.map(node => {
|
|
895
|
+
// Migration: Convert old MemoryNode 'operation' (single) to 'operations' (array)
|
|
896
|
+
if (node.type === 'memory') {
|
|
897
|
+
const memData = node.data;
|
|
898
|
+
if (memData.operation && !memData.operations) {
|
|
899
|
+
memData.operations = [memData.operation];
|
|
900
|
+
delete memData.operation;
|
|
901
|
+
}
|
|
902
|
+
}
|
|
903
|
+
const nodeData = node.data;
|
|
904
|
+
const rfNode = {
|
|
905
|
+
id: node.id,
|
|
906
|
+
type: node.type,
|
|
907
|
+
position: node.position || { x: 0, y: 0 },
|
|
908
|
+
data: nodeData,
|
|
909
|
+
};
|
|
910
|
+
// Handle docked nodes - restore hidden state and minimal dimensions
|
|
911
|
+
if (nodeData.dockedTo) {
|
|
912
|
+
rfNode.hidden = true;
|
|
913
|
+
rfNode.width = 0;
|
|
914
|
+
rfNode.height = 0;
|
|
915
|
+
rfNode.position = { x: -9999, y: -9999 };
|
|
916
|
+
}
|
|
917
|
+
// Preserve container relationship properties
|
|
918
|
+
if (node.parentId) {
|
|
919
|
+
rfNode.parentId = node.parentId;
|
|
920
|
+
// Only use 'parent' extent for child nodes (React Flow's CoordinateExtent has different format)
|
|
921
|
+
rfNode.extent = 'parent';
|
|
922
|
+
}
|
|
923
|
+
// Preserve container dimensions (for loop/parallel nodes) - skip for docked nodes
|
|
924
|
+
if (node.width && !nodeData.dockedTo) {
|
|
925
|
+
rfNode.width = node.width;
|
|
926
|
+
}
|
|
927
|
+
if (node.height && !nodeData.dockedTo) {
|
|
928
|
+
rfNode.height = node.height;
|
|
929
|
+
}
|
|
930
|
+
return rfNode;
|
|
931
|
+
});
|
|
932
|
+
// CRITICAL: React Flow requires parent nodes to appear BEFORE their children in the array
|
|
933
|
+
// Sort so that nodes without parentId come first, then children follow their parents
|
|
934
|
+
return sortNodesForReactFlow(rfNodes);
|
|
935
|
+
}
|
|
936
|
+
/**
|
|
937
|
+
* Sort nodes so parent nodes appear before their children.
|
|
938
|
+
* React Flow requires this ordering for proper parent-child rendering.
|
|
939
|
+
*/
|
|
940
|
+
function sortNodesForReactFlow(nodes) {
|
|
941
|
+
// Build a map of node ID to node for quick lookup
|
|
942
|
+
const nodeMap = new Map();
|
|
943
|
+
for (const node of nodes) {
|
|
944
|
+
nodeMap.set(node.id, node);
|
|
945
|
+
}
|
|
946
|
+
// Separate root nodes (no parent) from child nodes
|
|
947
|
+
const rootNodes = [];
|
|
948
|
+
const childNodes = [];
|
|
949
|
+
for (const node of nodes) {
|
|
950
|
+
if (node.parentId) {
|
|
951
|
+
childNodes.push(node);
|
|
952
|
+
}
|
|
953
|
+
else {
|
|
954
|
+
rootNodes.push(node);
|
|
955
|
+
}
|
|
956
|
+
}
|
|
957
|
+
// Result array - start with root nodes
|
|
958
|
+
const result = [...rootNodes];
|
|
959
|
+
// Add child nodes after their parents
|
|
960
|
+
// We need to handle nested containers, so we iterate until all children are placed
|
|
961
|
+
const remaining = [...childNodes];
|
|
962
|
+
const placedIds = new Set(rootNodes.map(n => n.id));
|
|
963
|
+
// Safety limit to prevent infinite loops
|
|
964
|
+
let iterations = 0;
|
|
965
|
+
const maxIterations = remaining.length * 2;
|
|
966
|
+
while (remaining.length > 0 && iterations < maxIterations) {
|
|
967
|
+
iterations++;
|
|
968
|
+
for (let i = remaining.length - 1; i >= 0; i--) {
|
|
969
|
+
const child = remaining[i];
|
|
970
|
+
// If parent is already placed, we can place this child
|
|
971
|
+
if (child.parentId && placedIds.has(child.parentId)) {
|
|
972
|
+
result.push(child);
|
|
973
|
+
placedIds.add(child.id);
|
|
974
|
+
remaining.splice(i, 1);
|
|
975
|
+
}
|
|
976
|
+
}
|
|
977
|
+
}
|
|
978
|
+
// If any nodes couldn't be placed (orphaned children with missing parents), add them at the end
|
|
979
|
+
if (remaining.length > 0) {
|
|
980
|
+
console.warn('[workflowParser] Some nodes have missing parent references:', remaining.map(n => n.id));
|
|
981
|
+
result.push(...remaining);
|
|
982
|
+
}
|
|
983
|
+
return result;
|
|
984
|
+
}
|
|
985
|
+
/**
|
|
986
|
+
* Normalize edges - already in React Flow format, just ensure defaults
|
|
987
|
+
* Applies consistent animation rules: internal container edges and condition
|
|
988
|
+
* branch edges are animated, regular output edges are not
|
|
989
|
+
*/
|
|
990
|
+
function normalizeEdges(edges, errors) {
|
|
991
|
+
return edges.map(edge => {
|
|
992
|
+
const sourceHandle = edge.sourceHandle || 'output';
|
|
993
|
+
const targetHandle = edge.targetHandle || 'input';
|
|
994
|
+
// Use explicit animated value if provided, otherwise determine from handles
|
|
995
|
+
const animated = edge.animated ?? (shouldEdgeBeAnimated(sourceHandle) || shouldEdgeBeAnimated(targetHandle));
|
|
996
|
+
return {
|
|
997
|
+
id: edge.id,
|
|
998
|
+
source: edge.source,
|
|
999
|
+
target: edge.target,
|
|
1000
|
+
sourceHandle,
|
|
1001
|
+
targetHandle,
|
|
1002
|
+
animated,
|
|
1003
|
+
label: edge.label,
|
|
1004
|
+
};
|
|
1005
|
+
});
|
|
1006
|
+
}
|
|
1007
|
+
/**
|
|
1008
|
+
* Check if an edge is used for the main execution flow (should be included in topological sort)
|
|
1009
|
+
* Excludes internal container edges that would create back-edges (loop-end, parallel-end)
|
|
1010
|
+
* Excludes event-based edges that are triggered by events, not data flow (onError, onCheckpoint, onProgress)
|
|
1011
|
+
*/
|
|
1012
|
+
function isExecutionFlowEdge(edge) {
|
|
1013
|
+
// Exclude back-edges that go TO container start handles (these create cycles)
|
|
1014
|
+
// loop-end -> container and parallel-end -> container edges should not count for topological ordering
|
|
1015
|
+
if (edge.sourceHandle === 'loop-end' || edge.sourceHandle === 'parallel-end') {
|
|
1016
|
+
return false;
|
|
1017
|
+
}
|
|
1018
|
+
// Exclude edges targeting internal handles (fork handles for parallel nodes)
|
|
1019
|
+
if (edge.targetHandle && edge.targetHandle.startsWith('fork-')) {
|
|
1020
|
+
return false;
|
|
1021
|
+
}
|
|
1022
|
+
// Exclude event-based edges - these are triggered by events, not main data flow
|
|
1023
|
+
// They should NOT be included in topological sort for execution order
|
|
1024
|
+
// - onError: triggered when an error occurs in a node using this error handler
|
|
1025
|
+
// - onCheckpoint: triggered when agent emits checkpoint events
|
|
1026
|
+
// - onProgress: triggered when a node emits progress events (e.g., ClaudeCode)
|
|
1027
|
+
// - toolResult: triggered when a tool returns a result to an agent
|
|
1028
|
+
const eventBasedHandles = ['onError', 'onCheckpoint', 'onProgress', 'toolResult'];
|
|
1029
|
+
if (edge.sourceHandle && eventBasedHandles.includes(edge.sourceHandle)) {
|
|
1030
|
+
return false;
|
|
1031
|
+
}
|
|
1032
|
+
// Include all other edges:
|
|
1033
|
+
// - Regular output -> input edges
|
|
1034
|
+
// - Condition branch edges (condition-* or default)
|
|
1035
|
+
// - Internal start edges (loop-start, parallel-start) - these are entry points, should flow to children
|
|
1036
|
+
return true;
|
|
1037
|
+
}
|
|
1038
|
+
/**
|
|
1039
|
+
* Build execution order using topological sort
|
|
1040
|
+
*
|
|
1041
|
+
* Important: Child nodes (nodes with parentId) are EXCLUDED from the main execution order.
|
|
1042
|
+
* They are executed by their parent container node (loop, parallel), not by the main executor.
|
|
1043
|
+
*
|
|
1044
|
+
* Excludes:
|
|
1045
|
+
* - Internal back-edges that would create cycles (e.g., loop-end -> container)
|
|
1046
|
+
* - Child nodes that belong to container nodes (they have parentId set)
|
|
1047
|
+
*/
|
|
1048
|
+
function getExecutionOrder(workflow) {
|
|
1049
|
+
const { file } = workflow;
|
|
1050
|
+
if (!file.nodes || !file.edges)
|
|
1051
|
+
return [];
|
|
1052
|
+
// Identify child nodes (nodes that have a parentId - they belong to containers)
|
|
1053
|
+
const childNodeIds = new Set();
|
|
1054
|
+
for (const node of file.nodes) {
|
|
1055
|
+
if (node.parentId) {
|
|
1056
|
+
childNodeIds.add(node.id);
|
|
1057
|
+
}
|
|
1058
|
+
}
|
|
1059
|
+
// Only include root-level nodes in the main execution order
|
|
1060
|
+
const rootNodes = file.nodes.filter(n => !n.parentId);
|
|
1061
|
+
// Build in-degree map and adjacency list for ROOT NODES ONLY
|
|
1062
|
+
const inDegree = new Map();
|
|
1063
|
+
const adjacencyList = new Map();
|
|
1064
|
+
// Initialize all root nodes with in-degree 0
|
|
1065
|
+
for (const node of rootNodes) {
|
|
1066
|
+
inDegree.set(node.id, 0);
|
|
1067
|
+
adjacencyList.set(node.id, []);
|
|
1068
|
+
}
|
|
1069
|
+
// Build graph from execution flow edges (only between root nodes)
|
|
1070
|
+
for (const edge of file.edges) {
|
|
1071
|
+
// Skip edges involving child nodes - they're handled by container execution
|
|
1072
|
+
if (childNodeIds.has(edge.source) || childNodeIds.has(edge.target)) {
|
|
1073
|
+
continue;
|
|
1074
|
+
}
|
|
1075
|
+
// Skip internal back-edges that would create cycles
|
|
1076
|
+
if (!isExecutionFlowEdge(edge)) {
|
|
1077
|
+
continue;
|
|
1078
|
+
}
|
|
1079
|
+
const targets = adjacencyList.get(edge.source) || [];
|
|
1080
|
+
targets.push(edge.target);
|
|
1081
|
+
adjacencyList.set(edge.source, targets);
|
|
1082
|
+
inDegree.set(edge.target, (inDegree.get(edge.target) || 0) + 1);
|
|
1083
|
+
}
|
|
1084
|
+
// Kahn's algorithm for topological sort with specific node type handling
|
|
1085
|
+
//
|
|
1086
|
+
// Node categories:
|
|
1087
|
+
// 1. Global nodes (provider, error-handler, connection): Always run first, configure the workflow
|
|
1088
|
+
// 2. Trigger/Start nodes: Begin the actual execution flow
|
|
1089
|
+
// 3. Event-driven nodes (callback, checkpoint): NEVER auto-execute, only run when triggered by events
|
|
1090
|
+
// 4. Other nodes: Execute when reached via edges from trigger/start nodes
|
|
1091
|
+
//
|
|
1092
|
+
const globalNodeTypes = new Set(['provider', 'error-handler', 'connection']);
|
|
1093
|
+
const startNodeTypes = new Set(['trigger']);
|
|
1094
|
+
// Callback/Checkpoint nodes are event-driven - they ONLY execute when triggered by events
|
|
1095
|
+
// from Agent nodes (onCheckpoint), error handlers (onError), etc. They should NEVER auto-start.
|
|
1096
|
+
const eventDrivenNodeTypes = new Set(['callback', 'checkpoint']);
|
|
1097
|
+
const globalQueue = [];
|
|
1098
|
+
const startQueue = [];
|
|
1099
|
+
const otherQueue = [];
|
|
1100
|
+
for (const [nodeId, degree] of inDegree) {
|
|
1101
|
+
if (degree === 0) {
|
|
1102
|
+
const node = rootNodes.find(n => n.id === nodeId);
|
|
1103
|
+
const nodeType = node?.type || '';
|
|
1104
|
+
// NEVER include event-driven nodes in the initial queue
|
|
1105
|
+
// They only execute when triggered by events, not as part of the main flow
|
|
1106
|
+
if (eventDrivenNodeTypes.has(nodeType)) {
|
|
1107
|
+
continue;
|
|
1108
|
+
}
|
|
1109
|
+
if (globalNodeTypes.has(nodeType)) {
|
|
1110
|
+
globalQueue.push(nodeId);
|
|
1111
|
+
}
|
|
1112
|
+
else if (startNodeTypes.has(nodeType)) {
|
|
1113
|
+
startQueue.push(nodeId);
|
|
1114
|
+
}
|
|
1115
|
+
else {
|
|
1116
|
+
otherQueue.push(nodeId);
|
|
1117
|
+
}
|
|
1118
|
+
}
|
|
1119
|
+
}
|
|
1120
|
+
// Build initial queue: global nodes first, then triggers, then other start points
|
|
1121
|
+
// If no triggers exist, use other nodes with in-degree 0 as starting points
|
|
1122
|
+
// (this supports simple workflows without explicit trigger nodes)
|
|
1123
|
+
const queue = [...globalQueue, ...startQueue, ...otherQueue];
|
|
1124
|
+
const order = [];
|
|
1125
|
+
while (queue.length > 0) {
|
|
1126
|
+
const current = queue.shift();
|
|
1127
|
+
order.push(current);
|
|
1128
|
+
const neighbors = adjacencyList.get(current) || [];
|
|
1129
|
+
for (const neighbor of neighbors) {
|
|
1130
|
+
const newDegree = (inDegree.get(neighbor) || 0) - 1;
|
|
1131
|
+
inDegree.set(neighbor, newDegree);
|
|
1132
|
+
if (newDegree === 0)
|
|
1133
|
+
queue.push(neighbor);
|
|
1134
|
+
}
|
|
1135
|
+
}
|
|
1136
|
+
// Log disconnected nodes for debugging (but don't add them to execution order)
|
|
1137
|
+
const visitedSet = new Set(order);
|
|
1138
|
+
for (const node of rootNodes) {
|
|
1139
|
+
if (!visitedSet.has(node.id)) {
|
|
1140
|
+
// Don't warn for event-driven nodes - they're intentionally excluded
|
|
1141
|
+
if (!eventDrivenNodeTypes.has(node.type || '')) {
|
|
1142
|
+
console.log(`[getExecutionOrder] Node not in execution flow (disconnected or event-driven): ${node.id} (type: ${node.type})`);
|
|
1143
|
+
}
|
|
1144
|
+
}
|
|
1145
|
+
}
|
|
1146
|
+
return order;
|
|
1147
|
+
}
|
|
1148
|
+
//# sourceMappingURL=workflowParser.js.map
|