@synergenius/flow-weaver 0.2.1 → 0.4.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (83) hide show
  1. package/README.md +261 -200
  2. package/dist/annotation-generator.js +36 -0
  3. package/dist/api/generate-in-place.js +39 -0
  4. package/dist/api/generate.js +11 -1
  5. package/dist/api/manipulation/nodes.js +22 -0
  6. package/dist/ast/types.d.ts +27 -1
  7. package/dist/built-in-nodes/index.d.ts +1 -0
  8. package/dist/built-in-nodes/index.js +1 -0
  9. package/dist/built-in-nodes/invoke-workflow.js +12 -1
  10. package/dist/built-in-nodes/mock-types.d.ts +2 -0
  11. package/dist/built-in-nodes/wait-for-agent.d.ts +13 -0
  12. package/dist/built-in-nodes/wait-for-agent.js +26 -0
  13. package/dist/chevrotain-parser/fan-parser.d.ts +38 -0
  14. package/dist/chevrotain-parser/fan-parser.js +149 -0
  15. package/dist/chevrotain-parser/grammar-diagrams.d.ts +1 -0
  16. package/dist/chevrotain-parser/grammar-diagrams.js +3 -0
  17. package/dist/chevrotain-parser/index.d.ts +3 -1
  18. package/dist/chevrotain-parser/index.js +3 -1
  19. package/dist/chevrotain-parser/tokens.d.ts +2 -0
  20. package/dist/chevrotain-parser/tokens.js +10 -0
  21. package/dist/cli/commands/diagram.d.ts +2 -1
  22. package/dist/cli/commands/diagram.js +9 -6
  23. package/dist/cli/commands/docs.d.ts +11 -0
  24. package/dist/cli/commands/docs.js +77 -0
  25. package/dist/cli/commands/run.js +59 -1
  26. package/dist/cli/flow-weaver.mjs +2447 -594
  27. package/dist/cli/index.js +40 -2
  28. package/dist/diagram/geometry.d.ts +9 -4
  29. package/dist/diagram/geometry.js +262 -31
  30. package/dist/diagram/html-viewer.d.ts +12 -0
  31. package/dist/diagram/html-viewer.js +399 -0
  32. package/dist/diagram/index.d.ts +12 -0
  33. package/dist/diagram/index.js +22 -0
  34. package/dist/diagram/renderer.js +137 -116
  35. package/dist/diagram/types.d.ts +1 -0
  36. package/dist/doc-metadata/extractors/annotations.js +282 -1
  37. package/dist/doc-metadata/types.d.ts +6 -0
  38. package/dist/docs/index.d.ts +54 -0
  39. package/dist/docs/index.js +256 -0
  40. package/dist/generator/control-flow.d.ts +13 -0
  41. package/dist/generator/control-flow.js +74 -0
  42. package/dist/generator/inngest.js +23 -0
  43. package/dist/generator/unified.js +122 -2
  44. package/dist/jsdoc-parser.d.ts +24 -0
  45. package/dist/jsdoc-parser.js +41 -1
  46. package/dist/mcp/agent-channel.d.ts +35 -0
  47. package/dist/mcp/agent-channel.js +61 -0
  48. package/dist/mcp/run-registry.d.ts +29 -0
  49. package/dist/mcp/run-registry.js +24 -0
  50. package/dist/mcp/server.js +2 -0
  51. package/dist/mcp/tools-diagram.d.ts +1 -1
  52. package/dist/mcp/tools-diagram.js +15 -7
  53. package/dist/mcp/tools-docs.d.ts +3 -0
  54. package/dist/mcp/tools-docs.js +62 -0
  55. package/dist/mcp/tools-editor.js +77 -3
  56. package/dist/mcp/tools-query.js +3 -1
  57. package/dist/mcp/workflow-executor.d.ts +28 -0
  58. package/dist/mcp/workflow-executor.js +66 -3
  59. package/dist/parser.d.ts +8 -0
  60. package/dist/parser.js +100 -0
  61. package/dist/runtime/ExecutionContext.d.ts +2 -0
  62. package/dist/runtime/ExecutionContext.js +2 -0
  63. package/dist/runtime/events.d.ts +1 -1
  64. package/dist/sugar-optimizer.js +28 -3
  65. package/dist/validator.d.ts +8 -0
  66. package/dist/validator.js +92 -0
  67. package/docs/reference/advanced-annotations.md +431 -0
  68. package/docs/reference/built-in-nodes.md +225 -0
  69. package/docs/reference/cli-reference.md +882 -0
  70. package/docs/reference/compilation.md +351 -0
  71. package/docs/reference/concepts.md +400 -0
  72. package/docs/reference/debugging.md +255 -0
  73. package/docs/reference/deployment.md +207 -0
  74. package/docs/reference/error-codes.md +686 -0
  75. package/docs/reference/export-interface.md +229 -0
  76. package/docs/reference/iterative-development.md +186 -0
  77. package/docs/reference/jsdoc-grammar.md +471 -0
  78. package/docs/reference/marketplace.md +205 -0
  79. package/docs/reference/node-conversion.md +308 -0
  80. package/docs/reference/patterns.md +161 -0
  81. package/docs/reference/scaffold.md +160 -0
  82. package/docs/reference/tutorial.md +519 -0
  83. package/package.json +10 -4
@@ -0,0 +1,256 @@
1
+ import * as fs from 'fs';
2
+ import * as path from 'path';
3
+ import { fileURLToPath } from 'url';
4
+ // ---------------------------------------------------------------------------
5
+ // Internals
6
+ // ---------------------------------------------------------------------------
7
+ function getDocsDir() {
8
+ // Resolve docs/reference relative to the package root.
9
+ // In development: src/docs/index.ts -> ../../docs/reference
10
+ // In dist: dist/docs/index.js -> ../../docs/reference
11
+ const thisFile = fileURLToPath(import.meta.url);
12
+ const packageRoot = path.resolve(path.dirname(thisFile), '..', '..');
13
+ return path.join(packageRoot, 'docs', 'reference');
14
+ }
15
+ function parseFrontmatter(raw) {
16
+ const fmMatch = raw.match(/^---\n([\s\S]*?)\n---\n([\s\S]*)$/);
17
+ if (!fmMatch) {
18
+ return {
19
+ frontmatter: { name: '', description: '', keywords: [] },
20
+ body: raw,
21
+ };
22
+ }
23
+ const fmBlock = fmMatch[1];
24
+ const body = fmMatch[2];
25
+ let name = '';
26
+ let description = '';
27
+ let keywords = [];
28
+ for (const line of fmBlock.split('\n')) {
29
+ const nameMatch = line.match(/^name:\s*(.+)$/);
30
+ if (nameMatch) {
31
+ name = nameMatch[1].trim();
32
+ continue;
33
+ }
34
+ const descMatch = line.match(/^description:\s*(.+)$/);
35
+ if (descMatch) {
36
+ description = descMatch[1].trim();
37
+ continue;
38
+ }
39
+ const kwMatch = line.match(/^keywords:\s*\[(.+)\]$/);
40
+ if (kwMatch) {
41
+ keywords = kwMatch[1].split(',').map((k) => k.trim().replace(/^['"]|['"]$/g, ''));
42
+ continue;
43
+ }
44
+ }
45
+ return { frontmatter: { name, description, keywords }, body };
46
+ }
47
+ function splitSections(body) {
48
+ const lines = body.split('\n');
49
+ const sections = [];
50
+ let currentHeading = '';
51
+ let currentLevel = 0;
52
+ let currentLines = [];
53
+ function flush() {
54
+ if (currentHeading || currentLines.length > 0) {
55
+ const content = currentLines.join('\n').trim();
56
+ const codeBlocks = [];
57
+ const codeRe = /```[\s\S]*?```/g;
58
+ let m;
59
+ while ((m = codeRe.exec(content)) !== null) {
60
+ codeBlocks.push(m[0]);
61
+ }
62
+ sections.push({
63
+ heading: currentHeading,
64
+ level: currentLevel,
65
+ content,
66
+ codeBlocks,
67
+ });
68
+ }
69
+ }
70
+ for (const line of lines) {
71
+ const headingMatch = line.match(/^(#{1,6})\s+(.+)$/);
72
+ if (headingMatch) {
73
+ flush();
74
+ currentLevel = headingMatch[1].length;
75
+ currentHeading = headingMatch[2];
76
+ currentLines = [];
77
+ }
78
+ else {
79
+ currentLines.push(line);
80
+ }
81
+ }
82
+ flush();
83
+ return sections;
84
+ }
85
+ // ---------------------------------------------------------------------------
86
+ // Public API
87
+ // ---------------------------------------------------------------------------
88
+ /**
89
+ * List all available documentation topics.
90
+ */
91
+ export function listTopics() {
92
+ const docsDir = getDocsDir();
93
+ if (!fs.existsSync(docsDir))
94
+ return [];
95
+ const files = fs.readdirSync(docsDir).filter((f) => f.endsWith('.md')).sort();
96
+ return files.map((file) => {
97
+ const raw = fs.readFileSync(path.join(docsDir, file), 'utf-8');
98
+ const { frontmatter } = parseFrontmatter(raw);
99
+ return {
100
+ slug: file.replace(/\.md$/, ''),
101
+ name: frontmatter.name,
102
+ description: frontmatter.description,
103
+ keywords: frontmatter.keywords,
104
+ };
105
+ });
106
+ }
107
+ /**
108
+ * Read a single documentation topic.
109
+ * @param slug - Topic slug (filename without .md)
110
+ * @param compact - If true, return a compact LLM-friendly version
111
+ */
112
+ export function readTopic(slug, compact) {
113
+ const docsDir = getDocsDir();
114
+ const filePath = path.join(docsDir, `${slug}.md`);
115
+ if (!fs.existsSync(filePath))
116
+ return null;
117
+ const raw = fs.readFileSync(filePath, 'utf-8');
118
+ const { frontmatter, body } = parseFrontmatter(raw);
119
+ const content = compact ? buildCompactContent(frontmatter, body) : body.trim();
120
+ return {
121
+ slug,
122
+ name: frontmatter.name,
123
+ description: frontmatter.description,
124
+ keywords: frontmatter.keywords,
125
+ content,
126
+ };
127
+ }
128
+ /**
129
+ * Read a topic and return structured sections (for JSON output).
130
+ */
131
+ export function readTopicStructured(slug) {
132
+ const docsDir = getDocsDir();
133
+ const filePath = path.join(docsDir, `${slug}.md`);
134
+ if (!fs.existsSync(filePath))
135
+ return null;
136
+ const raw = fs.readFileSync(filePath, 'utf-8');
137
+ const { frontmatter, body } = parseFrontmatter(raw);
138
+ const sections = splitSections(body);
139
+ return {
140
+ slug,
141
+ name: frontmatter.name,
142
+ description: frontmatter.description,
143
+ keywords: frontmatter.keywords,
144
+ sections,
145
+ };
146
+ }
147
+ /**
148
+ * Search across all documentation topics.
149
+ * Returns matching sections with context.
150
+ */
151
+ export function searchDocs(query) {
152
+ const topics = listTopics();
153
+ const docsDir = getDocsDir();
154
+ const queryLower = query.toLowerCase();
155
+ const queryTerms = queryLower.split(/\s+/).filter(Boolean);
156
+ const results = [];
157
+ for (const topic of topics) {
158
+ // Check keywords match
159
+ const keywordMatch = topic.keywords.some((kw) => queryTerms.some((term) => kw.toLowerCase().includes(term)));
160
+ const filePath = path.join(docsDir, `${topic.slug}.md`);
161
+ const raw = fs.readFileSync(filePath, 'utf-8');
162
+ const { body } = parseFrontmatter(raw);
163
+ const sections = splitSections(body);
164
+ for (const section of sections) {
165
+ const sectionLower = section.content.toLowerCase();
166
+ const headingLower = section.heading.toLowerCase();
167
+ // Calculate relevance
168
+ let relevance = 0;
169
+ // Exact phrase match in content
170
+ if (sectionLower.includes(queryLower)) {
171
+ relevance += 10;
172
+ }
173
+ // Individual term matches
174
+ for (const term of queryTerms) {
175
+ if (headingLower.includes(term))
176
+ relevance += 5;
177
+ if (sectionLower.includes(term))
178
+ relevance += 2;
179
+ }
180
+ // Keyword bonus
181
+ if (keywordMatch)
182
+ relevance += 3;
183
+ if (relevance > 0) {
184
+ // Build excerpt: find matching lines
185
+ const lines = section.content.split('\n');
186
+ const matchingLines = [];
187
+ for (const line of lines) {
188
+ if (queryTerms.some((term) => line.toLowerCase().includes(term))) {
189
+ matchingLines.push(line.trim());
190
+ if (matchingLines.length >= 3)
191
+ break;
192
+ }
193
+ }
194
+ results.push({
195
+ topic: topic.name,
196
+ slug: topic.slug,
197
+ section: section.heading,
198
+ heading: section.heading,
199
+ excerpt: matchingLines.join('\n') || section.content.slice(0, 200),
200
+ relevance,
201
+ });
202
+ }
203
+ }
204
+ }
205
+ // Sort by relevance descending
206
+ results.sort((a, b) => b.relevance - a.relevance);
207
+ return results;
208
+ }
209
+ // ---------------------------------------------------------------------------
210
+ // Compact mode builder
211
+ // ---------------------------------------------------------------------------
212
+ function buildCompactContent(frontmatter, body) {
213
+ const lines = body.split('\n');
214
+ const output = [];
215
+ // Header
216
+ output.push(`# ${frontmatter.name}`);
217
+ output.push(frontmatter.description);
218
+ output.push('');
219
+ let inCodeBlock = false;
220
+ let inTable = false;
221
+ for (const line of lines) {
222
+ // Track code blocks - always include them
223
+ if (line.trimStart().startsWith('```')) {
224
+ inCodeBlock = !inCodeBlock;
225
+ output.push(line);
226
+ continue;
227
+ }
228
+ if (inCodeBlock) {
229
+ output.push(line);
230
+ continue;
231
+ }
232
+ // Include headings
233
+ if (line.match(/^#{1,6}\s/)) {
234
+ output.push('');
235
+ output.push(line);
236
+ continue;
237
+ }
238
+ // Include table content
239
+ if (line.trim().startsWith('|')) {
240
+ inTable = true;
241
+ output.push(line);
242
+ continue;
243
+ }
244
+ if (inTable && !line.trim().startsWith('|')) {
245
+ inTable = false;
246
+ }
247
+ // Skip prose paragraphs (non-empty lines that aren't headings, code, or tables)
248
+ // But keep list items and blockquotes
249
+ if (line.trim().startsWith('- ') || line.trim().startsWith('* ') || line.trim().startsWith('> ')) {
250
+ output.push(line);
251
+ continue;
252
+ }
253
+ }
254
+ return output.join('\n').replace(/\n{3,}/g, '\n\n').trim();
255
+ }
256
+ //# sourceMappingURL=index.js.map
@@ -50,5 +50,18 @@ export declare function detectBranchingChains(branchingNodes: Set<string>, branc
50
50
  successNodes: Set<string>;
51
51
  failureNodes: Set<string>;
52
52
  }>): Map<string, string[]>;
53
+ /**
54
+ * Compute parallel levels from a control flow graph.
55
+ *
56
+ * Uses a modified Kahn's algorithm that tracks the "wave" (level) each node
57
+ * belongs to. Nodes at the same level with no direct data edges between them
58
+ * can execute in parallel.
59
+ *
60
+ * @param cfg - Control flow graph with nodes and edges
61
+ * @param branchingNodes - Set of branching node IDs to exclude from parallel groups
62
+ * @param scopedChildren - Set of per-port scoped child IDs to exclude from parallel groups
63
+ * @returns Array of groups — each inner array contains node IDs that can run in parallel
64
+ */
65
+ export declare function computeParallelLevels(cfg: ControlFlowGraph, branchingNodes: Set<string>, scopedChildren: Set<string>): string[][];
53
66
  export declare function determineExecutionOrder(workflow: TWorkflowAST, nodes: TNodeTypeAST[]): string[];
54
67
  //# sourceMappingURL=control-flow.d.ts.map
@@ -257,6 +257,80 @@ export function detectBranchingChains(branchingNodes, branchRegions) {
257
257
  }
258
258
  return chains;
259
259
  }
260
+ /**
261
+ * Compute parallel levels from a control flow graph.
262
+ *
263
+ * Uses a modified Kahn's algorithm that tracks the "wave" (level) each node
264
+ * belongs to. Nodes at the same level with no direct data edges between them
265
+ * can execute in parallel.
266
+ *
267
+ * @param cfg - Control flow graph with nodes and edges
268
+ * @param branchingNodes - Set of branching node IDs to exclude from parallel groups
269
+ * @param scopedChildren - Set of per-port scoped child IDs to exclude from parallel groups
270
+ * @returns Array of groups — each inner array contains node IDs that can run in parallel
271
+ */
272
+ export function computeParallelLevels(cfg, branchingNodes, scopedChildren) {
273
+ const inDegree = new Map(cfg.inDegree);
274
+ const queue = [];
275
+ const nodeLevel = new Map();
276
+ // Start with nodes at in-degree 0
277
+ inDegree.forEach((degree, node) => {
278
+ if (degree === 0) {
279
+ queue.push(node);
280
+ nodeLevel.set(node, 0);
281
+ }
282
+ });
283
+ // BFS assigning levels
284
+ while (queue.length > 0) {
285
+ const node = queue.shift();
286
+ const level = nodeLevel.get(node);
287
+ const successors = cfg.graph.get(node) || [];
288
+ for (const succ of successors) {
289
+ const newDeg = (inDegree.get(succ) || 0) - 1;
290
+ inDegree.set(succ, newDeg);
291
+ // Successor's level = max of all predecessor levels + 1
292
+ const currentLevel = nodeLevel.get(succ) ?? 0;
293
+ nodeLevel.set(succ, Math.max(currentLevel, level + 1));
294
+ if (newDeg === 0) {
295
+ queue.push(succ);
296
+ }
297
+ }
298
+ }
299
+ // Group nodes by level
300
+ const levelGroups = new Map();
301
+ nodeLevel.forEach((level, node) => {
302
+ if (isStartNode(node) || isExitNode(node))
303
+ return;
304
+ if (!levelGroups.has(level)) {
305
+ levelGroups.set(level, []);
306
+ }
307
+ levelGroups.get(level).push(node);
308
+ });
309
+ // Sort levels and split groups: branching/scoped nodes become their own single-node groups
310
+ const sortedLevels = Array.from(levelGroups.keys()).sort((a, b) => a - b);
311
+ const result = [];
312
+ for (const level of sortedLevels) {
313
+ const nodes = levelGroups.get(level);
314
+ const parallelGroup = [];
315
+ for (const node of nodes) {
316
+ if (branchingNodes.has(node) || scopedChildren.has(node)) {
317
+ // Branching and scoped nodes must run sequentially
318
+ if (parallelGroup.length > 0) {
319
+ result.push([...parallelGroup]);
320
+ parallelGroup.length = 0;
321
+ }
322
+ result.push([node]);
323
+ }
324
+ else {
325
+ parallelGroup.push(node);
326
+ }
327
+ }
328
+ if (parallelGroup.length > 0) {
329
+ result.push(parallelGroup);
330
+ }
331
+ }
332
+ return result;
333
+ }
260
334
  export function determineExecutionOrder(workflow, nodes) {
261
335
  const nodeMap = new Map();
262
336
  nodes.forEach((node) => nodeMap.set(node.functionName, node));
@@ -24,6 +24,7 @@ const BUILTIN_IMPORT_PREFIX = '@synergenius/flow-weaver/built-in-nodes';
24
24
  const BUILT_IN_HANDLERS = {
25
25
  delay: 'delay',
26
26
  waitForEvent: 'waitForEvent',
27
+ waitForAgent: 'waitForAgent',
27
28
  invokeWorkflow: 'invokeWorkflow',
28
29
  };
29
30
  /**
@@ -54,6 +55,8 @@ function verifyBuiltInSignature(nodeType) {
54
55
  return inputNames.includes('eventName');
55
56
  case 'invokeWorkflow':
56
57
  return inputNames.includes('functionId') && inputNames.includes('payload');
58
+ case 'waitForAgent':
59
+ return inputNames.includes('agentId') && inputNames.includes('context');
57
60
  default:
58
61
  return false;
59
62
  }
@@ -354,6 +357,21 @@ function emitNodeCall(nodeId, nodeType, workflow, nodeTypes, indent, lines) {
354
357
  lines.push('');
355
358
  return;
356
359
  }
360
+ if (builtIn === 'waitForAgent') {
361
+ const safeId = toValidIdentifier(nodeId);
362
+ const args = buildNodeArgs(nodeId, nodeType, workflow, nodeTypes);
363
+ const agentIdArg = args[1]; // execute=args[0], agentId=args[1]
364
+ // Map waitForAgent to step.waitForEvent with agent-scoped event name
365
+ lines.push(`${indent}const ${safeId}_raw = await step.waitForEvent('${nodeId}', {`);
366
+ lines.push(`${indent} event: \`agent/\${${agentIdArg}}\`,`);
367
+ lines.push(`${indent} timeout: '7d',`);
368
+ lines.push(`${indent}});`);
369
+ lines.push(`${indent}${safeId}_result = ${safeId}_raw`);
370
+ lines.push(`${indent} ? { onSuccess: true, onFailure: false, agentResult: ${safeId}_raw.data ?? {} }`);
371
+ lines.push(`${indent} : { onSuccess: false, onFailure: true, agentResult: {} };`);
372
+ lines.push('');
373
+ return;
374
+ }
357
375
  if (builtIn === 'invokeWorkflow') {
358
376
  const safeId = toValidIdentifier(nodeId);
359
377
  const args = buildNodeArgs(nodeId, nodeType, workflow, nodeTypes);
@@ -502,6 +520,11 @@ function emitPromiseAll(nodeIds, workflow, nodeTypes, indent, lines, generatedNo
502
520
  waitCall += ` })`;
503
521
  stepCalls.push(waitCall);
504
522
  }
523
+ else if (builtIn === 'waitForAgent') {
524
+ const args = buildNodeArgs(nodeId, nt, workflow, nodeTypes);
525
+ const agentIdArg = args[1];
526
+ stepCalls.push(`${indent} step.waitForEvent('${nodeId}', { event: \`agent/\${${agentIdArg}}\`, timeout: '7d' })`);
527
+ }
505
528
  else if (builtIn === 'invokeWorkflow') {
506
529
  const args = buildNodeArgs(nodeId, nt, workflow, nodeTypes);
507
530
  const functionIdArg = args[1];
@@ -1,7 +1,7 @@
1
1
  import { extractStartPorts } from '../ast/workflow-utils.js';
2
2
  import { mapToTypeScript } from '../type-mappings.js';
3
3
  import { buildNodeArgumentsWithContext, toValidIdentifier } from './code-utils.js';
4
- import { buildControlFlowGraph, detectBranchingChains, findAllBranchingNodes, findNodesInBranch, performKahnsTopologicalSort, isPerPortScopedChild, } from './control-flow.js';
4
+ import { buildControlFlowGraph, computeParallelLevels, detectBranchingChains, findAllBranchingNodes, findNodesInBranch, performKahnsTopologicalSort, isPerPortScopedChild, } from './control-flow.js';
5
5
  import { RESERVED_NODE_NAMES, RESERVED_PORT_NAMES, EXECUTION_STRATEGIES, isStartNode, isExitNode, isExecutePort, isSuccessPort, isFailurePort, } from '../constants.js';
6
6
  /**
7
7
  * Helper: Determine if an instance has pull execution enabled
@@ -246,6 +246,59 @@ export function generateControlFlowWithExecutionContext(workflow, nodeTypes, isA
246
246
  chainMembers.add(chain[i]);
247
247
  }
248
248
  });
249
+ // Compute parallel levels for async workflows
250
+ const perPortScopedChildrenSet = new Set();
251
+ workflow.instances.forEach((instance) => {
252
+ if (isPerPortScopedChild(instance, workflow, nodeTypes)) {
253
+ perPortScopedChildrenSet.add(instance.id);
254
+ }
255
+ });
256
+ const parallelGroupOf = new Map();
257
+ if (isAsync) {
258
+ const parallelLevels = computeParallelLevels(cfg, branchingNodes, perPortScopedChildrenSet);
259
+ for (const group of parallelLevels) {
260
+ if (group.length < 2)
261
+ continue;
262
+ // Filter out nodes that can't be parallelized
263
+ const eligible = group.filter((id) => {
264
+ if (nodesInBranches.has(id))
265
+ return false;
266
+ if (pullExecutionNodes.has(id))
267
+ return false;
268
+ if (nodeLevelScopedChildren.has(id))
269
+ return false;
270
+ if (nodesPromotedFromBranches.has(id))
271
+ return false;
272
+ if (chainMembers.has(id))
273
+ return false;
274
+ if (branchingNodes.has(id))
275
+ return false;
276
+ return true;
277
+ });
278
+ if (eligible.length >= 2) {
279
+ for (const nodeId of eligible) {
280
+ parallelGroupOf.set(nodeId, eligible);
281
+ }
282
+ }
283
+ }
284
+ }
285
+ // Pre-declare execution indices for parallel group nodes
286
+ if (parallelGroupOf.size > 0) {
287
+ const declared = new Set();
288
+ parallelGroupOf.forEach((_, instanceId) => {
289
+ if (declared.has(instanceId))
290
+ return;
291
+ declared.add(instanceId);
292
+ // Only declare if not already declared by earlier let declarations
293
+ if (!nodesInBranches.has(instanceId) &&
294
+ !branchingNodes.has(instanceId) &&
295
+ !pullExecutionNodes.has(instanceId) &&
296
+ !nodeLevelScopedChildren.has(instanceId)) {
297
+ lines.push(` let ${toValidIdentifier(instanceId)}Idx: number | undefined;`);
298
+ }
299
+ });
300
+ lines.push('');
301
+ }
249
302
  const generatedNodes = new Set();
250
303
  const availableVars = new Map();
251
304
  Object.keys(extractStartPorts(workflow)).forEach((portName) => {
@@ -272,6 +325,26 @@ export function generateControlFlowWithExecutionContext(workflow, nodeTypes, isA
272
325
  lines.push(` // Node '${instance.id}' skipped: type '${instance.nodeType}' not found`);
273
326
  return;
274
327
  }
328
+ // Handle parallel groups: emit Promise.all when hitting first node of a group
329
+ if (parallelGroupOf.has(instanceId)) {
330
+ const group = parallelGroupOf.get(instanceId);
331
+ const ungeneratedGroup = group.filter((id) => !generatedNodes.has(id));
332
+ if (ungeneratedGroup.length >= 2) {
333
+ generateParallelGroupWithContext(ungeneratedGroup, workflow, nodeTypes, availableVars, lines, generatedNodes, ' ', isAsync, 'ctx', bundleMode, branchingNodes);
334
+ // Generate scoped children for each parallel node
335
+ for (const parallelNodeId of ungeneratedGroup) {
336
+ const inst = workflow.instances.find((i) => i.id === parallelNodeId);
337
+ if (!inst)
338
+ continue;
339
+ const nt = nodeTypes.find((n) => n.name === inst.nodeType || n.functionName === inst.nodeType);
340
+ if (!nt)
341
+ continue;
342
+ generateScopedChildrenExecution(inst, nt, workflow, nodeTypes, generatedNodes, availableVars, lines, ' ', branchingNodes, branchRegions, isAsync, bundleMode);
343
+ }
344
+ return;
345
+ }
346
+ // else: degenerated to 1 or 0, fall through to sequential handling
347
+ }
275
348
  if (branchingNodes.has(instanceId)) {
276
349
  // Chain members are generated by their chain head — skip
277
350
  if (chainMembers.has(instanceId)) {
@@ -342,7 +415,8 @@ export function generateControlFlowWithExecutionContext(workflow, nodeTypes, isA
342
415
  const nodeUseConst = !nodesInBranches.has(instanceId) &&
343
416
  !branchingNodes.has(instanceId) &&
344
417
  !pullExecutionNodes.has(instanceId) &&
345
- !nodeLevelScopedChildren.has(instanceId);
418
+ !nodeLevelScopedChildren.has(instanceId) &&
419
+ !parallelGroupOf.has(instanceId);
346
420
  generateNodeCallWithContext(instance, nodeType, workflow, availableVars, lines, nodeTypes, ' ', isAsync, nodeUseConst, undefined, // instanceParent
347
421
  'ctx', // ctxVar
348
422
  bundleMode, false, // skipExecuteGuard
@@ -566,6 +640,52 @@ function generateScopedChildrenExecution(parentInstance, parentNodeType, workflo
566
640
  lines.push(`${indent}ctx.mergeScope(${parentInstance.id}_scopedCtx);`);
567
641
  lines.push(``);
568
642
  }
643
+ /**
644
+ * Generate a Promise.all block for 2+ parallel nodes in the unified generator.
645
+ *
646
+ * Each node's execution code is wrapped in an async IIFE inside Promise.all.
647
+ * The outer `let` variables for execution indices are assigned inside the IIFEs.
648
+ */
649
+ function generateParallelGroupWithContext(nodeIds, workflow, nodeTypes, availableVars, lines, generatedNodes, indent, isAsync, ctxVar, bundleMode, branchingNodes) {
650
+ // Collect code buffers for each node
651
+ const nodeBuffers = [];
652
+ for (const nodeId of nodeIds) {
653
+ const instance = workflow.instances.find((i) => i.id === nodeId);
654
+ if (!instance)
655
+ continue;
656
+ const nodeType = nodeTypes.find((nt) => nt.name === instance.nodeType || nt.functionName === instance.nodeType);
657
+ if (!nodeType)
658
+ continue;
659
+ const nodeLines = [];
660
+ generateNodeCallWithContext(instance, nodeType, workflow, availableVars, nodeLines, nodeTypes, `${indent} `, // indent for inside the async IIFE
661
+ isAsync, false, // useConst = false — outer let declarations
662
+ undefined, ctxVar, bundleMode, false, branchingNodes);
663
+ nodeBuffers.push({ id: nodeId, lines: nodeLines });
664
+ }
665
+ // Fallback: if only 0-1 nodes remain, emit directly without Promise.all
666
+ if (nodeBuffers.length < 2) {
667
+ for (const buf of nodeBuffers) {
668
+ for (const line of buf.lines) {
669
+ lines.push(line);
670
+ }
671
+ generatedNodes.add(buf.id);
672
+ }
673
+ return;
674
+ }
675
+ lines.push(`${indent}await Promise.all([`);
676
+ for (let i = 0; i < nodeBuffers.length; i++) {
677
+ const buf = nodeBuffers[i];
678
+ const comma = i < nodeBuffers.length - 1 ? ',' : '';
679
+ lines.push(`${indent} (async () => {`);
680
+ for (const line of buf.lines) {
681
+ lines.push(line);
682
+ }
683
+ lines.push(`${indent} })()${comma}`);
684
+ generatedNodes.add(buf.id);
685
+ }
686
+ lines.push(`${indent}]);`);
687
+ lines.push('');
688
+ }
569
689
  /**
570
690
  * Sort branch nodes topologically based on their dependencies
571
691
  *
@@ -135,6 +135,28 @@ export interface JSDocWorkflowConfig {
135
135
  route?: 'ok' | 'fail';
136
136
  }>;
137
137
  }>;
138
+ /** @fanOut macros that expand to 1-to-N connections */
139
+ fanOuts?: Array<{
140
+ source: {
141
+ node: string;
142
+ port: string;
143
+ };
144
+ targets: Array<{
145
+ node: string;
146
+ port?: string;
147
+ }>;
148
+ }>;
149
+ /** @fanIn macros that expand to N-to-1 connections */
150
+ fanIns?: Array<{
151
+ sources: Array<{
152
+ node: string;
153
+ port?: string;
154
+ }>;
155
+ target: {
156
+ node: string;
157
+ port: string;
158
+ };
159
+ }>;
138
160
  /** @trigger annotation — event name and/or cron schedule */
139
161
  trigger?: {
140
162
  event?: string;
@@ -287,6 +309,8 @@ export declare class JSDocParser {
287
309
  * Format: @path Start -> validator:ok -> classifier -> urgencyRouter:fail -> escalate -> Exit
288
310
  */
289
311
  private parsePathTag;
312
+ private parseFanOutTag;
313
+ private parseFanInTag;
290
314
  /**
291
315
  * Parse @trigger tag using Chevrotain parser.
292
316
  */
@@ -5,7 +5,7 @@
5
5
  */
6
6
  import { isExecutePort, isSuccessPort, isFailurePort, isScopedMandatoryPort } from './constants.js';
7
7
  import { inferDataTypeFromTS } from './type-mappings.js';
8
- import { parsePortLine, parseNodeLine, parseConnectLine, parsePositionLine, parseScopeLine, parseMapLine, parsePathLine, parseTriggerLine, parseCancelOnLine, parseThrottleLine, } from './chevrotain-parser/index.js';
8
+ import { parsePortLine, parseNodeLine, parseConnectLine, parsePositionLine, parseScopeLine, parseMapLine, parsePathLine, parseFanOutLine, parseFanInLine, parseTriggerLine, parseCancelOnLine, parseThrottleLine, } from './chevrotain-parser/index.js';
9
9
  /**
10
10
  * Extract the type of a field from a callback's return type using ts-morph Type API.
11
11
  *
@@ -260,6 +260,12 @@ export class JSDocParser {
260
260
  case 'path':
261
261
  this.parsePathTag(tag, config, warnings);
262
262
  break;
263
+ case 'fanOut':
264
+ this.parseFanOutTag(tag, config, warnings);
265
+ break;
266
+ case 'fanIn':
267
+ this.parseFanInTag(tag, config, warnings);
268
+ break;
263
269
  case 'trigger':
264
270
  this.parseTriggerTag(tag, config, warnings);
265
271
  break;
@@ -864,6 +870,40 @@ export class JSDocParser {
864
870
  steps: result.steps,
865
871
  });
866
872
  }
873
+ parseFanOutTag(tag, config, warnings) {
874
+ const comment = tag.getCommentText() || '';
875
+ const result = parseFanOutLine(`@fanOut ${comment}`, warnings);
876
+ if (!result) {
877
+ warnings.push(`Invalid @fanOut tag format: ${comment}`);
878
+ return;
879
+ }
880
+ if (!result.source.port) {
881
+ warnings.push(`@fanOut source must specify a port: ${comment}`);
882
+ return;
883
+ }
884
+ config.fanOuts = config.fanOuts || [];
885
+ config.fanOuts.push({
886
+ source: { node: result.source.node, port: result.source.port },
887
+ targets: result.targets,
888
+ });
889
+ }
890
+ parseFanInTag(tag, config, warnings) {
891
+ const comment = tag.getCommentText() || '';
892
+ const result = parseFanInLine(`@fanIn ${comment}`, warnings);
893
+ if (!result) {
894
+ warnings.push(`Invalid @fanIn tag format: ${comment}`);
895
+ return;
896
+ }
897
+ if (!result.target.port) {
898
+ warnings.push(`@fanIn target must specify a port: ${comment}`);
899
+ return;
900
+ }
901
+ config.fanIns = config.fanIns || [];
902
+ config.fanIns.push({
903
+ sources: result.sources,
904
+ target: { node: result.target.node, port: result.target.port },
905
+ });
906
+ }
867
907
  /**
868
908
  * Parse @trigger tag using Chevrotain parser.
869
909
  */