claude-autopm 1.17.0 → 1.20.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +159 -0
- package/autopm/.claude/agents/core/mcp-manager.md +1 -1
- package/autopm/.claude/commands/pm/context.md +11 -0
- package/autopm/.claude/commands/pm/epic-decompose.md +25 -2
- package/autopm/.claude/commands/pm/epic-oneshot.md +13 -0
- package/autopm/.claude/commands/pm/epic-start.md +19 -0
- package/autopm/.claude/commands/pm/epic-sync-modular.md +10 -10
- package/autopm/.claude/commands/pm/epic-sync.md +14 -14
- package/autopm/.claude/commands/pm/issue-start.md +50 -5
- package/autopm/.claude/commands/pm/issue-sync.md +15 -15
- package/autopm/.claude/commands/pm/what-next.md +11 -0
- package/autopm/.claude/mcp/MCP-REGISTRY.md +1 -1
- package/autopm/.claude/scripts/azure/active-work.js +2 -2
- package/autopm/.claude/scripts/azure/blocked.js +13 -13
- package/autopm/.claude/scripts/azure/daily.js +1 -1
- package/autopm/.claude/scripts/azure/dashboard.js +1 -1
- package/autopm/.claude/scripts/azure/feature-list.js +2 -2
- package/autopm/.claude/scripts/azure/feature-status.js +1 -1
- package/autopm/.claude/scripts/azure/next-task.js +1 -1
- package/autopm/.claude/scripts/azure/search.js +1 -1
- package/autopm/.claude/scripts/azure/setup.js +15 -15
- package/autopm/.claude/scripts/azure/sprint-report.js +2 -2
- package/autopm/.claude/scripts/azure/sync.js +1 -1
- package/autopm/.claude/scripts/azure/us-list.js +1 -1
- package/autopm/.claude/scripts/azure/us-status.js +1 -1
- package/autopm/.claude/scripts/azure/validate.js +13 -13
- package/autopm/.claude/scripts/lib/frontmatter-utils.sh +42 -7
- package/autopm/.claude/scripts/lib/logging-utils.sh +20 -16
- package/autopm/.claude/scripts/lib/validation-utils.sh +1 -1
- package/autopm/.claude/scripts/pm/context.js +338 -0
- package/autopm/.claude/scripts/pm/issue-sync/format-comment.sh +3 -3
- package/autopm/.claude/scripts/pm/lib/README.md +85 -0
- package/autopm/.claude/scripts/pm/lib/logger.js +78 -0
- package/autopm/.claude/scripts/pm/next.js +25 -1
- package/autopm/.claude/scripts/pm/what-next.js +660 -0
- package/bin/autopm.js +25 -0
- package/bin/commands/team.js +86 -0
- package/package.json +1 -1
- package/lib/agentExecutor.js.deprecated +0 -101
- package/lib/azure/cache.js +0 -80
- package/lib/azure/client.js +0 -77
- package/lib/azure/formatter.js +0 -177
- package/lib/commandHelpers.js +0 -177
- package/lib/context/manager.js +0 -290
- package/lib/documentation/manager.js +0 -528
- package/lib/github/workflow-manager.js +0 -546
- package/lib/helpers/azure-batch-api.js +0 -133
- package/lib/helpers/azure-cache-manager.js +0 -287
- package/lib/helpers/azure-parallel-processor.js +0 -158
- package/lib/helpers/azure-work-item-create.js +0 -278
- package/lib/helpers/gh-issue-create.js +0 -250
- package/lib/helpers/interactive-prompt.js +0 -336
- package/lib/helpers/output-manager.js +0 -335
- package/lib/helpers/progress-indicator.js +0 -258
- package/lib/performance/benchmarker.js +0 -429
- package/lib/pm/epic-decomposer.js +0 -273
- package/lib/pm/epic-syncer.js +0 -221
- package/lib/prdMetadata.js +0 -270
- package/lib/providers/azure/index.js +0 -234
- package/lib/providers/factory.js +0 -87
- package/lib/providers/github/index.js +0 -204
- package/lib/providers/interface.js +0 -73
- package/lib/python/scaffold-manager.js +0 -576
- package/lib/react/scaffold-manager.js +0 -745
- package/lib/regression/analyzer.js +0 -578
- package/lib/release/manager.js +0 -324
- package/lib/tailwind/manager.js +0 -486
- package/lib/traefik/manager.js +0 -484
- package/lib/utils/colors.js +0 -126
- package/lib/utils/config.js +0 -317
- package/lib/utils/filesystem.js +0 -316
- package/lib/utils/logger.js +0 -135
- package/lib/utils/prompts.js +0 -294
- package/lib/utils/shell.js +0 -237
- package/lib/validators/email-validator.js +0 -337
- package/lib/workflow/manager.js +0 -449
package/lib/workflow/manager.js
DELETED
|
@@ -1,449 +0,0 @@
|
|
|
1
|
-
/**
|
|
2
|
-
* Workflow Manager for LangGraph
|
|
3
|
-
* Centralized workflow management functionality
|
|
4
|
-
*/
|
|
5
|
-
|
|
6
|
-
const fs = require('fs').promises;
|
|
7
|
-
const path = require('path');
|
|
8
|
-
|
|
9
|
-
/**
|
|
10
|
-
* Configuration constants
|
|
11
|
-
*/
|
|
12
|
-
const CONFIG = {
|
|
13
|
-
directories: {
|
|
14
|
-
workflows: '.claude/workflows',
|
|
15
|
-
state: '.claude/workflows/state',
|
|
16
|
-
templates: '.claude/workflows/templates'
|
|
17
|
-
},
|
|
18
|
-
validation: {
|
|
19
|
-
requiredFields: ['name', 'nodes', 'edges'],
|
|
20
|
-
nodeTypes: ['input', 'output', 'llm', 'condition', 'transform', 'state', 'process', 'retriever', 'embedder', 'vectordb'],
|
|
21
|
-
edgeRequiredFields: ['from', 'to']
|
|
22
|
-
}
|
|
23
|
-
};
|
|
24
|
-
|
|
25
|
-
/**
|
|
26
|
-
* Built-in workflow templates
|
|
27
|
-
*/
|
|
28
|
-
const TEMPLATES = {
|
|
29
|
-
'qa-chain': {
|
|
30
|
-
name: 'qa-chain',
|
|
31
|
-
description: 'Question-answering chain template',
|
|
32
|
-
nodes: [
|
|
33
|
-
{ id: 'input', type: 'input', prompt: 'Enter question' },
|
|
34
|
-
{ id: 'retriever', type: 'retriever', source: 'knowledge-base' },
|
|
35
|
-
{ id: 'llm', type: 'llm', model: 'gpt-3.5-turbo' },
|
|
36
|
-
{ id: 'output', type: 'output' }
|
|
37
|
-
],
|
|
38
|
-
edges: [
|
|
39
|
-
{ from: 'input', to: 'retriever' },
|
|
40
|
-
{ from: 'retriever', to: 'llm' },
|
|
41
|
-
{ from: 'llm', to: 'output' }
|
|
42
|
-
]
|
|
43
|
-
},
|
|
44
|
-
'rag-pipeline': {
|
|
45
|
-
name: 'rag-pipeline',
|
|
46
|
-
description: 'Retrieval-augmented generation pipeline',
|
|
47
|
-
nodes: [
|
|
48
|
-
{ id: 'input', type: 'input' },
|
|
49
|
-
{ id: 'embedder', type: 'embedder', model: 'text-embedding-ada-002' },
|
|
50
|
-
{ id: 'vectordb', type: 'vectordb', database: 'pinecone' },
|
|
51
|
-
{ id: 'retriever', type: 'retriever' },
|
|
52
|
-
{ id: 'llm', type: 'llm', model: 'gpt-4' },
|
|
53
|
-
{ id: 'output', type: 'output' }
|
|
54
|
-
],
|
|
55
|
-
edges: [
|
|
56
|
-
{ from: 'input', to: 'embedder' },
|
|
57
|
-
{ from: 'embedder', to: 'vectordb' },
|
|
58
|
-
{ from: 'vectordb', to: 'retriever' },
|
|
59
|
-
{ from: 'retriever', to: 'llm' },
|
|
60
|
-
{ from: 'llm', to: 'output' }
|
|
61
|
-
]
|
|
62
|
-
},
|
|
63
|
-
'agent-loop': {
|
|
64
|
-
name: 'agent-loop',
|
|
65
|
-
description: 'Agent with tool-use loop',
|
|
66
|
-
nodes: [
|
|
67
|
-
{ id: 'input', type: 'input' },
|
|
68
|
-
{ id: 'agent', type: 'llm', model: 'gpt-4' },
|
|
69
|
-
{ id: 'tool-check', type: 'condition', expression: 'needs_tool' },
|
|
70
|
-
{ id: 'tool-use', type: 'process', tool: 'execute' },
|
|
71
|
-
{ id: 'output', type: 'output' }
|
|
72
|
-
],
|
|
73
|
-
edges: [
|
|
74
|
-
{ from: 'input', to: 'agent' },
|
|
75
|
-
{ from: 'agent', to: 'tool-check' },
|
|
76
|
-
{ from: 'tool-check', to: 'tool-use', condition: true },
|
|
77
|
-
{ from: 'tool-check', to: 'output', condition: false },
|
|
78
|
-
{ from: 'tool-use', to: 'agent' }
|
|
79
|
-
]
|
|
80
|
-
}
|
|
81
|
-
};
|
|
82
|
-
|
|
83
|
-
class WorkflowManager {
|
|
84
|
-
constructor(projectRoot = process.cwd()) {
|
|
85
|
-
this.projectRoot = projectRoot;
|
|
86
|
-
this.workflowsDir = path.join(projectRoot, CONFIG.directories.workflows);
|
|
87
|
-
this.stateDir = path.join(projectRoot, CONFIG.directories.state);
|
|
88
|
-
}
|
|
89
|
-
|
|
90
|
-
/**
|
|
91
|
-
* Validates workflow structure
|
|
92
|
-
* @param {object} workflow - Workflow to validate
|
|
93
|
-
* @throws {Error} - If validation fails
|
|
94
|
-
*/
|
|
95
|
-
validateWorkflow(workflow) {
|
|
96
|
-
// Check required fields
|
|
97
|
-
for (const field of CONFIG.validation.requiredFields) {
|
|
98
|
-
if (!workflow[field]) {
|
|
99
|
-
throw new Error(`Invalid workflow: missing required field '${field}'`);
|
|
100
|
-
}
|
|
101
|
-
}
|
|
102
|
-
|
|
103
|
-
// Validate nodes
|
|
104
|
-
if (!Array.isArray(workflow.nodes) || workflow.nodes.length === 0) {
|
|
105
|
-
throw new Error('Invalid workflow: nodes must be a non-empty array');
|
|
106
|
-
}
|
|
107
|
-
|
|
108
|
-
for (const node of workflow.nodes) {
|
|
109
|
-
if (!node.id || !node.type) {
|
|
110
|
-
throw new Error('Invalid workflow: each node must have id and type');
|
|
111
|
-
}
|
|
112
|
-
}
|
|
113
|
-
|
|
114
|
-
// Validate edges
|
|
115
|
-
if (!Array.isArray(workflow.edges)) {
|
|
116
|
-
throw new Error('Invalid workflow: edges must be an array');
|
|
117
|
-
}
|
|
118
|
-
|
|
119
|
-
for (const edge of workflow.edges) {
|
|
120
|
-
if (!edge.from || !edge.to) {
|
|
121
|
-
throw new Error('Invalid workflow: each edge must have from and to');
|
|
122
|
-
}
|
|
123
|
-
}
|
|
124
|
-
|
|
125
|
-
// Additional validation
|
|
126
|
-
const nodeIds = new Set(workflow.nodes.map(n => n.id));
|
|
127
|
-
for (const edge of workflow.edges) {
|
|
128
|
-
if (!nodeIds.has(edge.from)) {
|
|
129
|
-
throw new Error(`Invalid workflow: edge references unknown node '${edge.from}'`);
|
|
130
|
-
}
|
|
131
|
-
if (!nodeIds.has(edge.to)) {
|
|
132
|
-
throw new Error(`Invalid workflow: edge references unknown node '${edge.to}'`);
|
|
133
|
-
}
|
|
134
|
-
}
|
|
135
|
-
}
|
|
136
|
-
|
|
137
|
-
/**
|
|
138
|
-
* Creates a workflow from definition
|
|
139
|
-
* @param {string|object} definition - Path to file or workflow object
|
|
140
|
-
* @returns {Promise<object>} - Created workflow
|
|
141
|
-
*/
|
|
142
|
-
async createWorkflow(definition) {
|
|
143
|
-
let workflow;
|
|
144
|
-
|
|
145
|
-
// Load from file if string path provided
|
|
146
|
-
if (typeof definition === 'string') {
|
|
147
|
-
try {
|
|
148
|
-
const content = await fs.readFile(definition, 'utf8');
|
|
149
|
-
workflow = JSON.parse(content);
|
|
150
|
-
} catch (error) {
|
|
151
|
-
if (error.code === 'ENOENT') {
|
|
152
|
-
throw new Error(`Definition file not found: ${definition}`);
|
|
153
|
-
}
|
|
154
|
-
if (error instanceof SyntaxError) {
|
|
155
|
-
throw new Error(`Invalid JSON in definition file: ${error.message}`);
|
|
156
|
-
}
|
|
157
|
-
throw error;
|
|
158
|
-
}
|
|
159
|
-
} else {
|
|
160
|
-
workflow = definition;
|
|
161
|
-
}
|
|
162
|
-
|
|
163
|
-
// Validate workflow
|
|
164
|
-
this.validateWorkflow(workflow);
|
|
165
|
-
|
|
166
|
-
// Save workflow
|
|
167
|
-
await fs.mkdir(this.workflowsDir, { recursive: true });
|
|
168
|
-
const workflowPath = path.join(this.workflowsDir, `${workflow.name}.json`);
|
|
169
|
-
await fs.writeFile(workflowPath, JSON.stringify(workflow, null, 2));
|
|
170
|
-
|
|
171
|
-
return { workflow, path: workflowPath };
|
|
172
|
-
}
|
|
173
|
-
|
|
174
|
-
/**
|
|
175
|
-
* Lists available workflows
|
|
176
|
-
* @returns {Promise<array>} - List of workflows
|
|
177
|
-
*/
|
|
178
|
-
async listWorkflows() {
|
|
179
|
-
try {
|
|
180
|
-
const files = await fs.readdir(this.workflowsDir);
|
|
181
|
-
const workflows = [];
|
|
182
|
-
|
|
183
|
-
for (const file of files) {
|
|
184
|
-
if (file.endsWith('.json') && !file.startsWith('.')) {
|
|
185
|
-
const workflowPath = path.join(this.workflowsDir, file);
|
|
186
|
-
try {
|
|
187
|
-
const content = await fs.readFile(workflowPath, 'utf8');
|
|
188
|
-
const workflow = JSON.parse(content);
|
|
189
|
-
workflows.push({
|
|
190
|
-
name: workflow.name || path.basename(file, '.json'),
|
|
191
|
-
description: workflow.description || 'No description',
|
|
192
|
-
file: file,
|
|
193
|
-
nodes: workflow.nodes ? workflow.nodes.length : 0,
|
|
194
|
-
edges: workflow.edges ? workflow.edges.length : 0
|
|
195
|
-
});
|
|
196
|
-
} catch (error) {
|
|
197
|
-
// Skip invalid workflow files
|
|
198
|
-
continue;
|
|
199
|
-
}
|
|
200
|
-
}
|
|
201
|
-
}
|
|
202
|
-
|
|
203
|
-
return workflows;
|
|
204
|
-
} catch (error) {
|
|
205
|
-
if (error.code === 'ENOENT') {
|
|
206
|
-
return [];
|
|
207
|
-
}
|
|
208
|
-
throw error;
|
|
209
|
-
}
|
|
210
|
-
}
|
|
211
|
-
|
|
212
|
-
/**
|
|
213
|
-
* Loads a workflow by name
|
|
214
|
-
* @param {string} workflowName - Name of workflow
|
|
215
|
-
* @returns {Promise<object>} - Loaded workflow
|
|
216
|
-
*/
|
|
217
|
-
async loadWorkflow(workflowName) {
|
|
218
|
-
const workflowPath = path.join(this.workflowsDir, `${workflowName}.json`);
|
|
219
|
-
|
|
220
|
-
try {
|
|
221
|
-
const content = await fs.readFile(workflowPath, 'utf8');
|
|
222
|
-
return JSON.parse(content);
|
|
223
|
-
} catch (error) {
|
|
224
|
-
if (error.code === 'ENOENT') {
|
|
225
|
-
throw new Error(`Workflow not found: ${workflowName}`);
|
|
226
|
-
}
|
|
227
|
-
throw error;
|
|
228
|
-
}
|
|
229
|
-
}
|
|
230
|
-
|
|
231
|
-
/**
|
|
232
|
-
* Saves workflow state
|
|
233
|
-
* @param {string} workflowName - Workflow name
|
|
234
|
-
* @param {object} state - State to save
|
|
235
|
-
* @returns {Promise<void>}
|
|
236
|
-
*/
|
|
237
|
-
async saveState(workflowName, state) {
|
|
238
|
-
await fs.mkdir(this.stateDir, { recursive: true });
|
|
239
|
-
const statePath = path.join(this.stateDir, `${workflowName}.json`);
|
|
240
|
-
await fs.writeFile(statePath, JSON.stringify(state, null, 2));
|
|
241
|
-
}
|
|
242
|
-
|
|
243
|
-
/**
|
|
244
|
-
* Loads workflow state
|
|
245
|
-
* @param {string} workflowName - Workflow name
|
|
246
|
-
* @returns {Promise<object>} - Loaded state
|
|
247
|
-
*/
|
|
248
|
-
async loadState(workflowName) {
|
|
249
|
-
const statePath = path.join(this.stateDir, `${workflowName}.json`);
|
|
250
|
-
|
|
251
|
-
try {
|
|
252
|
-
const content = await fs.readFile(statePath, 'utf8');
|
|
253
|
-
return JSON.parse(content);
|
|
254
|
-
} catch (error) {
|
|
255
|
-
if (error.code === 'ENOENT') {
|
|
256
|
-
throw new Error(`No saved state for workflow: ${workflowName}`);
|
|
257
|
-
}
|
|
258
|
-
throw error;
|
|
259
|
-
}
|
|
260
|
-
}
|
|
261
|
-
|
|
262
|
-
/**
|
|
263
|
-
* Creates workflow from template
|
|
264
|
-
* @param {string} templateName - Template to use
|
|
265
|
-
* @param {string} workflowName - Name for new workflow
|
|
266
|
-
* @returns {Promise<object>} - Created workflow
|
|
267
|
-
*/
|
|
268
|
-
async createFromTemplate(templateName, workflowName) {
|
|
269
|
-
const template = TEMPLATES[templateName];
|
|
270
|
-
|
|
271
|
-
if (!template) {
|
|
272
|
-
throw new Error(`Template not found: ${templateName}`);
|
|
273
|
-
}
|
|
274
|
-
|
|
275
|
-
// Create workflow from template
|
|
276
|
-
const workflow = {
|
|
277
|
-
...template,
|
|
278
|
-
name: workflowName,
|
|
279
|
-
createdFrom: templateName,
|
|
280
|
-
createdAt: new Date().toISOString()
|
|
281
|
-
};
|
|
282
|
-
|
|
283
|
-
// Use createWorkflow to save it
|
|
284
|
-
return this.createWorkflow(workflow);
|
|
285
|
-
}
|
|
286
|
-
|
|
287
|
-
/**
|
|
288
|
-
* Gets available templates
|
|
289
|
-
* @returns {array} - List of templates
|
|
290
|
-
*/
|
|
291
|
-
getTemplates() {
|
|
292
|
-
return Object.entries(TEMPLATES).map(([name, template]) => ({
|
|
293
|
-
name: name,
|
|
294
|
-
description: template.description || 'No description',
|
|
295
|
-
nodes: template.nodes.length,
|
|
296
|
-
edges: template.edges.length
|
|
297
|
-
}));
|
|
298
|
-
}
|
|
299
|
-
|
|
300
|
-
/**
|
|
301
|
-
* Exports workflow as diagram
|
|
302
|
-
* @param {string} workflowName - Workflow to export
|
|
303
|
-
* @param {string} format - Export format
|
|
304
|
-
* @returns {Promise<string>} - Exported content
|
|
305
|
-
*/
|
|
306
|
-
async exportWorkflow(workflowName, format = 'dot') {
|
|
307
|
-
const workflow = await this.loadWorkflow(workflowName);
|
|
308
|
-
|
|
309
|
-
switch (format) {
|
|
310
|
-
case 'dot':
|
|
311
|
-
return this.exportAsDot(workflow);
|
|
312
|
-
case 'mermaid':
|
|
313
|
-
return this.exportAsMermaid(workflow);
|
|
314
|
-
case 'json':
|
|
315
|
-
return JSON.stringify(workflow, null, 2);
|
|
316
|
-
default:
|
|
317
|
-
throw new Error(`Unsupported export format: ${format}`);
|
|
318
|
-
}
|
|
319
|
-
}
|
|
320
|
-
|
|
321
|
-
/**
|
|
322
|
-
* Exports workflow as Graphviz DOT
|
|
323
|
-
* @private
|
|
324
|
-
*/
|
|
325
|
-
exportAsDot(workflow) {
|
|
326
|
-
let dot = 'digraph workflow {\n';
|
|
327
|
-
dot += ' rankdir=LR;\n';
|
|
328
|
-
dot += ' node [shape=box];\n\n';
|
|
329
|
-
|
|
330
|
-
// Add nodes
|
|
331
|
-
for (const node of workflow.nodes) {
|
|
332
|
-
dot += ` "${node.id}" [label="${node.id}\\n(${node.type})"];\n`;
|
|
333
|
-
}
|
|
334
|
-
|
|
335
|
-
dot += '\n';
|
|
336
|
-
|
|
337
|
-
// Add edges
|
|
338
|
-
for (const edge of workflow.edges) {
|
|
339
|
-
const label = edge.condition !== undefined ? ` [label="${edge.condition}"]` : '';
|
|
340
|
-
dot += ` "${edge.from}" -> "${edge.to}"${label};\n`;
|
|
341
|
-
}
|
|
342
|
-
|
|
343
|
-
dot += '}\n';
|
|
344
|
-
return dot;
|
|
345
|
-
}
|
|
346
|
-
|
|
347
|
-
/**
|
|
348
|
-
* Exports workflow as Mermaid diagram
|
|
349
|
-
* @private
|
|
350
|
-
*/
|
|
351
|
-
exportAsMermaid(workflow) {
|
|
352
|
-
let mermaid = 'graph LR\n';
|
|
353
|
-
|
|
354
|
-
// Add nodes
|
|
355
|
-
for (const node of workflow.nodes) {
|
|
356
|
-
const shape = node.type === 'condition' ? '{{' + node.id + '}}' : '[' + node.id + ']';
|
|
357
|
-
mermaid += ` ${node.id}${shape}\n`;
|
|
358
|
-
}
|
|
359
|
-
|
|
360
|
-
// Add edges
|
|
361
|
-
for (const edge of workflow.edges) {
|
|
362
|
-
const label = edge.condition !== undefined ? `|${edge.condition}|` : '';
|
|
363
|
-
mermaid += ` ${edge.from} -->${label} ${edge.to}\n`;
|
|
364
|
-
}
|
|
365
|
-
|
|
366
|
-
return mermaid;
|
|
367
|
-
}
|
|
368
|
-
|
|
369
|
-
/**
|
|
370
|
-
* Simulates workflow execution
|
|
371
|
-
* @param {string} workflowName - Workflow to run
|
|
372
|
-
* @param {object} options - Execution options
|
|
373
|
-
* @returns {Promise<object>} - Execution result
|
|
374
|
-
*/
|
|
375
|
-
async runWorkflow(workflowName, options = {}) {
|
|
376
|
-
const workflow = await this.loadWorkflow(workflowName);
|
|
377
|
-
|
|
378
|
-
// In dry-run mode, just simulate
|
|
379
|
-
if (options.dryRun) {
|
|
380
|
-
const result = {
|
|
381
|
-
dryRun: true,
|
|
382
|
-
workflow: workflow.name,
|
|
383
|
-
nodes: workflow.nodes.length,
|
|
384
|
-
edges: workflow.edges.length
|
|
385
|
-
};
|
|
386
|
-
|
|
387
|
-
// Simulate state saving for persistent nodes
|
|
388
|
-
if (workflow.nodes.some(n => n.persistent)) {
|
|
389
|
-
await this.saveState(workflowName, {
|
|
390
|
-
workflowId: workflowName,
|
|
391
|
-
currentNode: 'state',
|
|
392
|
-
data: { value: 'test' }
|
|
393
|
-
});
|
|
394
|
-
}
|
|
395
|
-
|
|
396
|
-
// Simulate uppercase transformation
|
|
397
|
-
if (workflow.nodes.some(n => n.operation === 'uppercase')) {
|
|
398
|
-
result.simulatedOutput = 'HELLO';
|
|
399
|
-
}
|
|
400
|
-
|
|
401
|
-
return result;
|
|
402
|
-
}
|
|
403
|
-
|
|
404
|
-
// Real execution would go here
|
|
405
|
-
// For now, just save state if needed
|
|
406
|
-
if (workflow.nodes.some(n => n.persistent)) {
|
|
407
|
-
await this.saveState(workflowName, {
|
|
408
|
-
workflowId: workflowName,
|
|
409
|
-
currentNode: 'complete',
|
|
410
|
-
data: { executed: true }
|
|
411
|
-
});
|
|
412
|
-
}
|
|
413
|
-
|
|
414
|
-
return {
|
|
415
|
-
executed: true,
|
|
416
|
-
workflow: workflow.name,
|
|
417
|
-
timestamp: new Date().toISOString()
|
|
418
|
-
};
|
|
419
|
-
}
|
|
420
|
-
|
|
421
|
-
/**
|
|
422
|
-
* Resumes workflow from saved state
|
|
423
|
-
* @param {string} workflowName - Workflow to resume
|
|
424
|
-
* @param {object} options - Resume options
|
|
425
|
-
* @returns {Promise<object>} - Resume result
|
|
426
|
-
*/
|
|
427
|
-
async resumeWorkflow(workflowName, options = {}) {
|
|
428
|
-
const state = await this.loadState(workflowName);
|
|
429
|
-
|
|
430
|
-
if (options.dryRun) {
|
|
431
|
-
return {
|
|
432
|
-
resumed: true,
|
|
433
|
-
dryRun: true,
|
|
434
|
-
workflowId: state.workflowId,
|
|
435
|
-
currentNode: state.currentNode,
|
|
436
|
-
data: state.data
|
|
437
|
-
};
|
|
438
|
-
}
|
|
439
|
-
|
|
440
|
-
// Real resume would go here
|
|
441
|
-
return {
|
|
442
|
-
resumed: true,
|
|
443
|
-
state: state,
|
|
444
|
-
timestamp: new Date().toISOString()
|
|
445
|
-
};
|
|
446
|
-
}
|
|
447
|
-
}
|
|
448
|
-
|
|
449
|
-
module.exports = WorkflowManager;
|