lsh-framework 1.2.0 → 1.3.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (74) hide show
  1. package/README.md +40 -3
  2. package/dist/cli.js +104 -486
  3. package/dist/commands/doctor.js +427 -0
  4. package/dist/commands/init.js +371 -0
  5. package/dist/constants/api.js +94 -0
  6. package/dist/constants/commands.js +64 -0
  7. package/dist/constants/config.js +56 -0
  8. package/dist/constants/database.js +21 -0
  9. package/dist/constants/errors.js +79 -0
  10. package/dist/constants/index.js +28 -0
  11. package/dist/constants/paths.js +28 -0
  12. package/dist/constants/ui.js +73 -0
  13. package/dist/constants/validation.js +124 -0
  14. package/dist/daemon/lshd.js +11 -32
  15. package/dist/lib/daemon-client-helper.js +7 -4
  16. package/dist/lib/daemon-client.js +9 -2
  17. package/dist/lib/format-utils.js +163 -0
  18. package/dist/lib/fuzzy-match.js +123 -0
  19. package/dist/lib/job-manager.js +2 -1
  20. package/dist/lib/platform-utils.js +211 -0
  21. package/dist/lib/secrets-manager.js +11 -1
  22. package/dist/lib/string-utils.js +128 -0
  23. package/dist/services/daemon/daemon-registrar.js +3 -2
  24. package/dist/services/secrets/secrets.js +119 -59
  25. package/package.json +10 -74
  26. package/dist/app.js +0 -33
  27. package/dist/cicd/analytics.js +0 -261
  28. package/dist/cicd/auth.js +0 -269
  29. package/dist/cicd/cache-manager.js +0 -172
  30. package/dist/cicd/data-retention.js +0 -305
  31. package/dist/cicd/performance-monitor.js +0 -224
  32. package/dist/cicd/webhook-receiver.js +0 -640
  33. package/dist/commands/api.js +0 -346
  34. package/dist/commands/theme.js +0 -261
  35. package/dist/commands/zsh-import.js +0 -240
  36. package/dist/components/App.js +0 -1
  37. package/dist/components/Divider.js +0 -29
  38. package/dist/components/REPL.js +0 -43
  39. package/dist/components/Terminal.js +0 -232
  40. package/dist/components/UserInput.js +0 -30
  41. package/dist/daemon/api-server.js +0 -316
  42. package/dist/daemon/monitoring-api.js +0 -220
  43. package/dist/lib/api-error-handler.js +0 -185
  44. package/dist/lib/associative-arrays.js +0 -285
  45. package/dist/lib/base-api-server.js +0 -290
  46. package/dist/lib/brace-expansion.js +0 -160
  47. package/dist/lib/builtin-commands.js +0 -439
  48. package/dist/lib/executors/builtin-executor.js +0 -52
  49. package/dist/lib/extended-globbing.js +0 -411
  50. package/dist/lib/extended-parameter-expansion.js +0 -227
  51. package/dist/lib/interactive-shell.js +0 -460
  52. package/dist/lib/job-builtins.js +0 -582
  53. package/dist/lib/pathname-expansion.js +0 -216
  54. package/dist/lib/script-runner.js +0 -226
  55. package/dist/lib/shell-executor.js +0 -2504
  56. package/dist/lib/shell-parser.js +0 -958
  57. package/dist/lib/shell-types.js +0 -6
  58. package/dist/lib/shell.lib.js +0 -40
  59. package/dist/lib/theme-manager.js +0 -476
  60. package/dist/lib/variable-expansion.js +0 -385
  61. package/dist/lib/zsh-compatibility.js +0 -659
  62. package/dist/lib/zsh-import-manager.js +0 -707
  63. package/dist/lib/zsh-options.js +0 -328
  64. package/dist/pipeline/job-tracker.js +0 -491
  65. package/dist/pipeline/mcli-bridge.js +0 -309
  66. package/dist/pipeline/pipeline-service.js +0 -1119
  67. package/dist/pipeline/workflow-engine.js +0 -870
  68. package/dist/services/api/api.js +0 -58
  69. package/dist/services/api/auth.js +0 -35
  70. package/dist/services/api/config.js +0 -7
  71. package/dist/services/api/file.js +0 -22
  72. package/dist/services/shell/shell.js +0 -28
  73. package/dist/services/zapier.js +0 -16
  74. package/dist/simple-api-server.js +0 -148
@@ -1,870 +0,0 @@
1
- import { EventEmitter } from 'events';
2
- import { v4 as uuidv4 } from 'uuid';
3
- import { JobStatus, JobPriority } from './job-tracker.js';
4
- export var WorkflowStatus;
5
- (function (WorkflowStatus) {
6
- WorkflowStatus["PENDING"] = "pending";
7
- WorkflowStatus["RUNNING"] = "running";
8
- WorkflowStatus["COMPLETED"] = "completed";
9
- WorkflowStatus["FAILED"] = "failed";
10
- WorkflowStatus["CANCELLED"] = "cancelled";
11
- WorkflowStatus["PAUSED"] = "paused";
12
- })(WorkflowStatus || (WorkflowStatus = {}));
13
- export var NodeStatus;
14
- (function (NodeStatus) {
15
- NodeStatus["PENDING"] = "pending";
16
- NodeStatus["WAITING"] = "waiting";
17
- NodeStatus["READY"] = "ready";
18
- NodeStatus["RUNNING"] = "running";
19
- NodeStatus["COMPLETED"] = "completed";
20
- NodeStatus["FAILED"] = "failed";
21
- NodeStatus["SKIPPED"] = "skipped";
22
- NodeStatus["CANCELLED"] = "cancelled";
23
- })(NodeStatus || (NodeStatus = {}));
24
- export class WorkflowEngine extends EventEmitter {
25
- pool;
26
- jobTracker;
27
- runningExecutions = new Map();
28
- pollInterval = null;
29
- constructor(pool, jobTracker) {
30
- super();
31
- this.pool = pool;
32
- this.jobTracker = jobTracker;
33
- // Listen to job completion events
34
- this.setupJobEventListeners();
35
- }
36
- setupJobEventListeners() {
37
- this.jobTracker.on('execution:completed', async (event) => {
38
- await this.handleJobCompletion(event.jobId, event.executionId, 'completed', event.data);
39
- });
40
- this.jobTracker.on('execution:failed', async (event) => {
41
- await this.handleJobCompletion(event.jobId, event.executionId, 'failed', event.data);
42
- });
43
- }
44
- // Workflow Definition Management
45
- async createWorkflow(definition) {
46
- const id = definition.id || uuidv4();
47
- // Validate workflow
48
- this.validateWorkflow(definition);
49
- const query = `
50
- INSERT INTO pipeline_workflows (
51
- id, name, description, version, definition, schedule_cron,
52
- config, default_parameters, tags, owner, team
53
- ) VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11)
54
- RETURNING *
55
- `;
56
- const values = [
57
- id,
58
- definition.name,
59
- definition.description,
60
- definition.version,
61
- JSON.stringify(definition),
62
- definition.schedule?.cron,
63
- JSON.stringify({ timeout: definition.timeout, maxConcurrentRuns: definition.maxConcurrentRuns }),
64
- JSON.stringify(definition.parameters || {}),
65
- definition.tags,
66
- definition.owner,
67
- definition.team
68
- ];
69
- const _result = await this.pool.query(query, values);
70
- this.emit('workflow:created', {
71
- type: 'workflow:created',
72
- workflowId: id,
73
- data: definition,
74
- timestamp: new Date()
75
- });
76
- return { ...definition, id };
77
- }
78
- async getWorkflow(workflowId) {
79
- const query = 'SELECT * FROM pipeline_workflows WHERE id = $1';
80
- const result = await this.pool.query(query, [workflowId]);
81
- if (result.rows.length === 0) {
82
- return null;
83
- }
84
- const row = result.rows[0];
85
- return JSON.parse(row.definition);
86
- }
87
- async updateWorkflow(workflowId, definition) {
88
- this.validateWorkflow(definition);
89
- const query = `
90
- UPDATE pipeline_workflows
91
- SET definition = $2, version = $3, updated_at = CURRENT_TIMESTAMP
92
- WHERE id = $1
93
- `;
94
- await this.pool.query(query, [workflowId, JSON.stringify(definition), definition.version]);
95
- this.emit('workflow:updated', {
96
- type: 'workflow:updated',
97
- workflowId,
98
- data: definition,
99
- timestamp: new Date()
100
- });
101
- }
102
- async deleteWorkflow(workflowId) {
103
- // Check for running executions
104
- const runningCount = await this.getRunningExecutionsCount(workflowId);
105
- if (runningCount > 0) {
106
- throw new Error(`Cannot delete workflow with ${runningCount} running executions`);
107
- }
108
- const query = 'UPDATE pipeline_workflows SET is_active = false WHERE id = $1';
109
- await this.pool.query(query, [workflowId]);
110
- this.emit('workflow:deleted', {
111
- type: 'workflow:deleted',
112
- workflowId,
113
- timestamp: new Date()
114
- });
115
- }
116
- // Workflow Execution
117
- async executeWorkflow(workflowId, triggeredBy, triggerType, parameters = {}) {
118
- const workflow = await this.getWorkflow(workflowId);
119
- if (!workflow) {
120
- throw new Error(`Workflow ${workflowId} not found`);
121
- }
122
- // Check concurrency limits
123
- const runningCount = await this.getRunningExecutionsCount(workflowId);
124
- const maxConcurrent = workflow.maxConcurrentRuns || 1;
125
- if (runningCount >= maxConcurrent) {
126
- throw new Error(`Workflow ${workflowId} has reached max concurrent runs (${maxConcurrent})`);
127
- }
128
- // Create execution
129
- const execution = {
130
- id: uuidv4(),
131
- workflowId,
132
- runId: this.generateRunId(workflow.name),
133
- status: WorkflowStatus.PENDING,
134
- triggeredBy,
135
- triggerType,
136
- parameters: { ...workflow.parameters, ...parameters },
137
- startedAt: new Date(),
138
- completedStages: [],
139
- failedStages: [],
140
- nodeStates: {}
141
- };
142
- // Initialize node states
143
- for (const node of workflow.nodes) {
144
- execution.nodeStates[node.id] = {
145
- nodeId: node.id,
146
- status: NodeStatus.PENDING,
147
- retryCount: 0
148
- };
149
- }
150
- // Store execution
151
- await this.storeExecution(execution);
152
- // Add to running executions
153
- this.runningExecutions.set(execution.id, execution);
154
- // Start execution
155
- await this.startExecution(execution);
156
- this.emit('workflow:started', {
157
- type: 'workflow:started',
158
- workflowId,
159
- executionId: execution.id,
160
- data: execution,
161
- timestamp: new Date()
162
- });
163
- return execution;
164
- }
165
- async startExecution(execution) {
166
- execution.status = WorkflowStatus.RUNNING;
167
- await this.updateExecution(execution);
168
- // Find ready nodes (no dependencies)
169
- const readyNodes = await this.findReadyNodes(execution);
170
- // Start ready nodes
171
- for (const nodeId of readyNodes) {
172
- await this.executeNode(execution, nodeId);
173
- }
174
- }
175
- async executeNode(execution, nodeId) {
176
- const workflow = await this.getWorkflow(execution.workflowId);
177
- if (!workflow)
178
- return;
179
- const node = workflow.nodes.find(n => n.id === nodeId);
180
- if (!node)
181
- return;
182
- const nodeState = execution.nodeStates[nodeId];
183
- nodeState.status = NodeStatus.RUNNING;
184
- nodeState.startedAt = new Date();
185
- await this.updateExecution(execution);
186
- try {
187
- switch (node.type) {
188
- case 'job':
189
- await this.executeJobNode(execution, node);
190
- break;
191
- case 'condition':
192
- await this.executeConditionNode(execution, node);
193
- break;
194
- case 'parallel':
195
- await this.executeParallelNode(execution, node);
196
- break;
197
- case 'wait':
198
- await this.executeWaitNode(execution, node);
199
- break;
200
- default:
201
- throw new Error(`Unknown node type: ${node.type}`);
202
- }
203
- }
204
- catch (error) {
205
- console.error('Error executing node:', error);
206
- await this.handleNodeFailure(execution, node, error);
207
- }
208
- }
209
- async executeJobNode(execution, node) {
210
- // Create job from node configuration
211
- const config = node.config;
212
- const jobConfig = {
213
- name: `${execution.runId}-${node.name}`,
214
- type: config.type || 'workflow_job',
215
- sourceSystem: 'workflow',
216
- targetSystem: config.targetSystem || 'mcli',
217
- status: JobStatus.PENDING,
218
- priority: config.priority || JobPriority.NORMAL,
219
- config: {
220
- ...config,
221
- workflowExecutionId: execution.id,
222
- workflowNodeId: node.id,
223
- workflowRunId: execution.runId
224
- },
225
- parameters: {
226
- ...execution.parameters,
227
- ...(config.parameters || {})
228
- },
229
- owner: execution.triggeredBy,
230
- tags: [`workflow:${execution.workflowId}`, `run:${execution.runId}`]
231
- };
232
- // Submit job
233
- const job = await this.jobTracker.createJob(jobConfig);
234
- // Update node state
235
- const nodeState = execution.nodeStates[node.id];
236
- nodeState.jobId = job.id;
237
- await this.updateExecution(execution);
238
- // Job completion will be handled by event listeners
239
- }
240
- async executeConditionNode(execution, node) {
241
- // Evaluate condition
242
- const result = this.evaluateCondition(node.condition || 'true', execution.parameters);
243
- const nodeState = execution.nodeStates[node.id];
244
- nodeState.status = result ? NodeStatus.COMPLETED : NodeStatus.SKIPPED;
245
- nodeState.completedAt = new Date();
246
- nodeState.result = { conditionResult: result };
247
- if (nodeState.startedAt) {
248
- nodeState.durationMs = nodeState.completedAt.getTime() - nodeState.startedAt.getTime();
249
- }
250
- await this.updateExecution(execution);
251
- // Continue with downstream nodes
252
- await this.checkAndContinueExecution(execution);
253
- }
254
- async executeParallelNode(execution, node) {
255
- // Parallel nodes are just markers - their completion is determined by their dependencies
256
- const nodeState = execution.nodeStates[node.id];
257
- nodeState.status = NodeStatus.COMPLETED;
258
- nodeState.completedAt = new Date();
259
- if (nodeState.startedAt) {
260
- nodeState.durationMs = nodeState.completedAt.getTime() - nodeState.startedAt.getTime();
261
- }
262
- await this.updateExecution(execution);
263
- await this.checkAndContinueExecution(execution);
264
- }
265
- async executeWaitNode(execution, node) {
266
- const config = node.config;
267
- const waitMs = config.waitMs || 1000;
268
- setTimeout(async () => {
269
- const nodeState = execution.nodeStates[node.id];
270
- nodeState.status = NodeStatus.COMPLETED;
271
- nodeState.completedAt = new Date();
272
- if (nodeState.startedAt) {
273
- nodeState.durationMs = nodeState.completedAt.getTime() - nodeState.startedAt.getTime();
274
- }
275
- await this.updateExecution(execution);
276
- await this.checkAndContinueExecution(execution);
277
- }, waitMs);
278
- }
279
- async handleJobCompletion(jobId, executionId, status, data) {
280
- // Find workflow execution by job ID
281
- let targetExecution = null;
282
- let targetNodeId = null;
283
- for (const [_execId, execution] of this.runningExecutions) {
284
- for (const [nodeId, nodeState] of Object.entries(execution.nodeStates)) {
285
- if (nodeState.jobId === jobId) {
286
- targetExecution = execution;
287
- targetNodeId = nodeId;
288
- break;
289
- }
290
- }
291
- if (targetExecution)
292
- break;
293
- }
294
- if (!targetExecution || !targetNodeId) {
295
- return; // Job not part of workflow
296
- }
297
- const nodeState = targetExecution.nodeStates[targetNodeId];
298
- nodeState.status = status === 'completed' ? NodeStatus.COMPLETED : NodeStatus.FAILED;
299
- nodeState.completedAt = new Date();
300
- nodeState.result = data;
301
- if (nodeState.startedAt) {
302
- nodeState.durationMs = nodeState.completedAt.getTime() - nodeState.startedAt.getTime();
303
- }
304
- if (status === 'failed') {
305
- const errorData = data;
306
- nodeState.error = errorData.errorMessage || 'Job failed';
307
- // Check retry policy
308
- const workflow = await this.getWorkflow(targetExecution.workflowId);
309
- const node = workflow?.nodes.find(n => n.id === targetNodeId);
310
- if (node?.retryPolicy && nodeState.retryCount < node.retryPolicy.maxRetries) {
311
- await this.scheduleNodeRetry(targetExecution, targetNodeId, node.retryPolicy);
312
- return;
313
- }
314
- targetExecution.failedStages.push(targetNodeId);
315
- await this.handleWorkflowFailure(targetExecution, `Node ${targetNodeId} failed: ${nodeState.error}`);
316
- }
317
- else {
318
- targetExecution.completedStages.push(targetNodeId);
319
- await this.checkAndContinueExecution(targetExecution);
320
- }
321
- await this.updateExecution(targetExecution);
322
- }
323
- async scheduleNodeRetry(execution, nodeId, retryPolicy) {
324
- const nodeState = execution.nodeStates[nodeId];
325
- nodeState.retryCount++;
326
- const backoffMs = retryPolicy.backoffMs * Math.pow(retryPolicy.backoffMultiplier, nodeState.retryCount - 1);
327
- nodeState.nextRetryAt = new Date(Date.now() + backoffMs);
328
- nodeState.status = NodeStatus.WAITING;
329
- await this.updateExecution(execution);
330
- // Schedule retry
331
- setTimeout(async () => {
332
- if (execution.status === WorkflowStatus.RUNNING) {
333
- await this.executeNode(execution, nodeId);
334
- }
335
- }, backoffMs);
336
- }
337
- async checkAndContinueExecution(execution) {
338
- const workflow = await this.getWorkflow(execution.workflowId);
339
- if (!workflow)
340
- return;
341
- // Check if workflow is complete
342
- const allNodes = workflow.nodes.map(n => n.id);
343
- const completedNodes = allNodes.filter(id => execution.nodeStates[id].status === NodeStatus.COMPLETED ||
344
- execution.nodeStates[id].status === NodeStatus.SKIPPED);
345
- if (completedNodes.length === allNodes.length) {
346
- await this.completeWorkflow(execution);
347
- return;
348
- }
349
- // Find newly ready nodes
350
- const readyNodes = await this.findReadyNodes(execution);
351
- // Start ready nodes
352
- for (const nodeId of readyNodes) {
353
- if (execution.nodeStates[nodeId].status === NodeStatus.PENDING) {
354
- await this.executeNode(execution, nodeId);
355
- }
356
- }
357
- }
358
- async findReadyNodes(execution) {
359
- const workflow = await this.getWorkflow(execution.workflowId);
360
- if (!workflow)
361
- return [];
362
- const readyNodes = [];
363
- for (const node of workflow.nodes) {
364
- const nodeState = execution.nodeStates[node.id];
365
- // Skip if not pending
366
- if (nodeState.status !== NodeStatus.PENDING) {
367
- continue;
368
- }
369
- // Check dependencies
370
- const dependenciesSatisfied = node.dependencies.every(depId => {
371
- const depState = execution.nodeStates[depId];
372
- return depState?.status === NodeStatus.COMPLETED || depState?.status === NodeStatus.SKIPPED;
373
- });
374
- if (dependenciesSatisfied) {
375
- readyNodes.push(node.id);
376
- }
377
- }
378
- return readyNodes;
379
- }
380
- async completeWorkflow(execution) {
381
- execution.status = WorkflowStatus.COMPLETED;
382
- execution.completedAt = new Date();
383
- execution.durationMs = execution.completedAt.getTime() - execution.startedAt.getTime();
384
- await this.updateExecution(execution);
385
- this.runningExecutions.delete(execution.id);
386
- this.emit('workflow:completed', {
387
- type: 'workflow:completed',
388
- workflowId: execution.workflowId,
389
- executionId: execution.id,
390
- data: execution,
391
- timestamp: new Date()
392
- });
393
- }
394
- async handleWorkflowFailure(execution, errorMessage) {
395
- execution.status = WorkflowStatus.FAILED;
396
- execution.completedAt = new Date();
397
- execution.durationMs = execution.completedAt.getTime() - execution.startedAt.getTime();
398
- execution.errorMessage = errorMessage;
399
- // Cancel running nodes
400
- for (const [_nodeId, nodeState] of Object.entries(execution.nodeStates)) {
401
- if (nodeState.status === NodeStatus.RUNNING || nodeState.status === NodeStatus.WAITING) {
402
- nodeState.status = NodeStatus.CANCELLED;
403
- // Cancel job if exists
404
- if (nodeState.jobId) {
405
- try {
406
- await this.jobTracker.updateJobStatus(nodeState.jobId, JobStatus.CANCELLED);
407
- }
408
- catch (error) {
409
- console.warn(`Failed to cancel job ${nodeState.jobId}:`, error);
410
- }
411
- }
412
- }
413
- }
414
- await this.updateExecution(execution);
415
- this.runningExecutions.delete(execution.id);
416
- this.emit('workflow:failed', {
417
- type: 'workflow:failed',
418
- workflowId: execution.workflowId,
419
- executionId: execution.id,
420
- data: execution,
421
- timestamp: new Date()
422
- });
423
- }
424
- async cancelWorkflow(executionId) {
425
- const execution = this.runningExecutions.get(executionId);
426
- if (!execution) {
427
- throw new Error(`Workflow execution ${executionId} not found or not running`);
428
- }
429
- execution.status = WorkflowStatus.CANCELLED;
430
- execution.completedAt = new Date();
431
- execution.durationMs = execution.completedAt.getTime() - execution.startedAt.getTime();
432
- // Cancel all running and waiting nodes
433
- for (const [_nodeId, nodeState] of Object.entries(execution.nodeStates)) {
434
- if (nodeState.status === NodeStatus.RUNNING || nodeState.status === NodeStatus.WAITING) {
435
- nodeState.status = NodeStatus.CANCELLED;
436
- if (nodeState.jobId) {
437
- try {
438
- await this.jobTracker.updateJobStatus(nodeState.jobId, JobStatus.CANCELLED);
439
- }
440
- catch (error) {
441
- console.warn(`Failed to cancel job ${nodeState.jobId}:`, error);
442
- }
443
- }
444
- }
445
- }
446
- await this.updateExecution(execution);
447
- this.runningExecutions.delete(executionId);
448
- this.emit('workflow:cancelled', {
449
- type: 'workflow:cancelled',
450
- workflowId: execution.workflowId,
451
- executionId: execution.id,
452
- data: execution,
453
- timestamp: new Date()
454
- });
455
- }
456
- // Helper methods
457
- validateWorkflow(definition) {
458
- if (!definition.name || !definition.version) {
459
- throw new Error('Workflow name and version are required');
460
- }
461
- if (!definition.nodes || definition.nodes.length === 0) {
462
- throw new Error('Workflow must have at least one node');
463
- }
464
- // Check for cycles in dependencies
465
- this.detectCycles(definition.nodes);
466
- // Validate nodes
467
- for (const node of definition.nodes) {
468
- if (!node.id || !node.name || !node.type) {
469
- throw new Error('Node must have id, name, and type');
470
- }
471
- // Validate dependencies exist
472
- for (const depId of node.dependencies) {
473
- if (!definition.nodes.find(n => n.id === depId)) {
474
- throw new Error(`Node ${node.id} depends on non-existent node ${depId}`);
475
- }
476
- }
477
- }
478
- }
479
- detectCycles(nodes) {
480
- const visited = new Set();
481
- const recursionStack = new Set();
482
- const hasCycle = (nodeId) => {
483
- if (recursionStack.has(nodeId)) {
484
- return true; // Found cycle
485
- }
486
- if (visited.has(nodeId)) {
487
- return false; // Already processed
488
- }
489
- visited.add(nodeId);
490
- recursionStack.add(nodeId);
491
- const node = nodes.find(n => n.id === nodeId);
492
- if (node) {
493
- for (const depId of node.dependencies) {
494
- if (hasCycle(depId)) {
495
- return true;
496
- }
497
- }
498
- }
499
- recursionStack.delete(nodeId);
500
- return false;
501
- };
502
- for (const node of nodes) {
503
- if (hasCycle(node.id)) {
504
- throw new Error('Workflow contains cycles in dependencies');
505
- }
506
- }
507
- }
508
- evaluateCondition(condition, parameters) {
509
- try {
510
- // Simple condition evaluation - in production, use a safer evaluator
511
- const func = new Function('params', `return ${condition}`);
512
- return !!func(parameters);
513
- }
514
- catch (error) {
515
- console.warn('Condition evaluation failed, defaulting to false:', error);
516
- return false;
517
- }
518
- }
519
- generateRunId(workflowName) {
520
- const timestamp = new Date().toISOString().replace(/[:.]/g, '-');
521
- const shortId = Math.random().toString(36).substr(2, 4);
522
- return `${workflowName}-${timestamp}-${shortId}`;
523
- }
524
- async getRunningExecutionsCount(workflowId) {
525
- const query = `
526
- SELECT COUNT(*) as count
527
- FROM workflow_executions
528
- WHERE workflow_id = $1 AND status = 'running'
529
- `;
530
- const result = await this.pool.query(query, [workflowId]);
531
- return parseInt(result.rows[0].count);
532
- }
533
- async storeExecution(execution) {
534
- const query = `
535
- INSERT INTO workflow_executions (
536
- id, workflow_id, run_id, status, triggered_by, trigger_type,
537
- parameters, started_at, current_stage, completed_stages, failed_stages
538
- ) VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11)
539
- `;
540
- const values = [
541
- execution.id,
542
- execution.workflowId,
543
- execution.runId,
544
- execution.status,
545
- execution.triggeredBy,
546
- execution.triggerType,
547
- JSON.stringify(execution.parameters),
548
- execution.startedAt,
549
- execution.currentStage,
550
- JSON.stringify(execution.completedStages),
551
- JSON.stringify(execution.failedStages)
552
- ];
553
- await this.pool.query(query, values);
554
- }
555
- async updateExecution(execution) {
556
- const query = `
557
- UPDATE workflow_executions
558
- SET
559
- status = $2,
560
- completed_at = $3,
561
- duration_ms = $4,
562
- current_stage = $5,
563
- completed_stages = $6,
564
- failed_stages = $7,
565
- result = $8,
566
- error_message = $9
567
- WHERE id = $1
568
- `;
569
- const values = [
570
- execution.id,
571
- execution.status,
572
- execution.completedAt,
573
- execution.durationMs,
574
- execution.currentStage,
575
- JSON.stringify(execution.completedStages),
576
- JSON.stringify(execution.failedStages),
577
- execution.result ? JSON.stringify(execution.result) : null,
578
- execution.errorMessage
579
- ];
580
- await this.pool.query(query, values);
581
- // Also store node states (this would need a separate table in production)
582
- // For now, we'll store them in the result field or create a separate storage
583
- }
584
- // Public query methods
585
- async getExecution(executionId) {
586
- // First check running executions
587
- const running = this.runningExecutions.get(executionId);
588
- if (running) {
589
- return running;
590
- }
591
- // Then check database
592
- const query = 'SELECT * FROM workflow_executions WHERE id = $1';
593
- const result = await this.pool.query(query, [executionId]);
594
- if (result.rows.length === 0) {
595
- return null;
596
- }
597
- return this.parseExecutionRow(result.rows[0]);
598
- }
599
- async listExecutions(workflowId, limit = 50) {
600
- let query = 'SELECT * FROM workflow_executions';
601
- const values = [];
602
- if (workflowId) {
603
- query += ' WHERE workflow_id = $1';
604
- values.push(workflowId);
605
- }
606
- query += ` ORDER BY started_at DESC LIMIT ${limit}`;
607
- const result = await this.pool.query(query, values);
608
- return result.rows.map(row => this.parseExecutionRow(row));
609
- }
610
- parseExecutionRow(row) {
611
- return {
612
- id: row.id,
613
- workflowId: row.workflow_id,
614
- runId: row.run_id,
615
- status: row.status,
616
- triggeredBy: row.triggered_by,
617
- triggerType: row.trigger_type,
618
- parameters: row.parameters || {},
619
- startedAt: row.started_at,
620
- completedAt: row.completed_at,
621
- durationMs: row.duration_ms,
622
- currentStage: row.current_stage,
623
- completedStages: row.completed_stages || [],
624
- failedStages: row.failed_stages || [],
625
- result: row.result,
626
- errorMessage: row.error_message,
627
- nodeStates: {} // Would need to be loaded from separate storage
628
- };
629
- }
630
- async handleNodeFailure(execution, node, error) {
631
- const nodeState = execution.nodeStates[node.id];
632
- nodeState.status = NodeStatus.FAILED;
633
- nodeState.completedAt = new Date();
634
- nodeState.error = error.message;
635
- execution.status = WorkflowStatus.FAILED;
636
- execution.completedAt = new Date();
637
- execution.errorMessage = `Node ${node.id} failed: ${error.message}`;
638
- await this.updateExecution(execution);
639
- this.runningExecutions.delete(execution.id);
640
- this.emit('node:failed', {
641
- executionId: execution.id,
642
- workflowId: execution.workflowId,
643
- nodeId: node.id,
644
- error: error.message,
645
- timestamp: new Date()
646
- });
647
- this.emit('execution:failed', {
648
- executionId: execution.id,
649
- workflowId: execution.workflowId,
650
- error: error.message,
651
- timestamp: new Date()
652
- });
653
- }
654
- async listWorkflows(filters = {}) {
655
- const conditions = [];
656
- const params = [];
657
- let paramCount = 1;
658
- if (filters.status) {
659
- conditions.push(`status = $${paramCount++}`);
660
- params.push(filters.status);
661
- }
662
- const whereClause = conditions.length > 0 ? `WHERE ${conditions.join(' AND ')}` : '';
663
- const limit = filters.limit || 50;
664
- const offset = filters.offset || 0;
665
- const query = `
666
- SELECT * FROM workflow_definitions
667
- ${whereClause}
668
- ORDER BY created_at DESC
669
- LIMIT $${paramCount++} OFFSET $${paramCount++}
670
- `;
671
- params.push(limit, offset);
672
- const result = await this.pool.query(query, params);
673
- return result.rows.map(this.parseWorkflowRow.bind(this));
674
- }
675
- async getWorkflowExecutions(workflowId, filters = {}) {
676
- const limit = filters.limit || 50;
677
- const offset = filters.offset || 0;
678
- const query = `
679
- SELECT * FROM workflow_executions
680
- WHERE workflow_id = $1
681
- ORDER BY started_at DESC
682
- LIMIT $2 OFFSET $3
683
- `;
684
- const result = await this.pool.query(query, [workflowId, limit, offset]);
685
- return result.rows.map(this.parseExecutionRow.bind(this));
686
- }
687
- async cancelExecution(executionId) {
688
- const execution = this.runningExecutions.get(executionId);
689
- if (execution) {
690
- execution.status = WorkflowStatus.CANCELLED;
691
- execution.completedAt = new Date();
692
- // Cancel any running jobs
693
- for (const nodeId of Object.keys(execution.nodeStates)) {
694
- const nodeState = execution.nodeStates[nodeId];
695
- if (nodeState.status === NodeStatus.RUNNING && nodeState.jobId) {
696
- try {
697
- await this.jobTracker.updateJobStatus(nodeState.jobId, JobStatus.CANCELLED);
698
- }
699
- catch (error) {
700
- console.warn(`Failed to cancel job ${nodeState.jobId}:`, error);
701
- }
702
- }
703
- }
704
- this.runningExecutions.delete(executionId);
705
- await this.updateExecution(execution);
706
- this.emit('execution:cancelled', {
707
- executionId: execution.id,
708
- workflowId: execution.workflowId,
709
- timestamp: new Date()
710
- });
711
- }
712
- }
713
- async validateWorkflowById(workflowId) {
714
- try {
715
- const workflow = await this.getWorkflow(workflowId);
716
- if (!workflow) {
717
- return {
718
- isValid: false,
719
- errors: ['Workflow not found'],
720
- warnings: []
721
- };
722
- }
723
- const errors = [];
724
- const warnings = [];
725
- // Validate basic structure
726
- if (!workflow.name) {
727
- errors.push('Workflow name is required');
728
- }
729
- if (!workflow.nodes || workflow.nodes.length === 0) {
730
- errors.push('Workflow must have at least one node');
731
- }
732
- if (workflow.nodes) {
733
- // Check for duplicate node IDs
734
- const nodeIds = workflow.nodes.map(n => n.id);
735
- const duplicates = nodeIds.filter((id, index) => nodeIds.indexOf(id) !== index);
736
- if (duplicates.length > 0) {
737
- errors.push(`Duplicate node IDs found: ${duplicates.join(', ')}`);
738
- }
739
- // Check for invalid dependencies
740
- for (const node of workflow.nodes) {
741
- if (node.dependencies) {
742
- for (const depId of node.dependencies) {
743
- if (!nodeIds.includes(depId)) {
744
- errors.push(`Node ${node.id} depends on non-existent node ${depId}`);
745
- }
746
- }
747
- }
748
- }
749
- // Check for cycles
750
- try {
751
- this.detectCycles(workflow.nodes);
752
- }
753
- catch (error) {
754
- errors.push(error.message);
755
- }
756
- }
757
- return {
758
- isValid: errors.length === 0,
759
- errors,
760
- warnings
761
- };
762
- }
763
- catch (error) {
764
- return {
765
- isValid: false,
766
- errors: [`Validation failed: ${error.message}`],
767
- warnings: []
768
- };
769
- }
770
- }
771
- async getWorkflowDependencies(workflowId) {
772
- const workflow = await this.getWorkflow(workflowId);
773
- if (!workflow) {
774
- throw new Error(`Workflow ${workflowId} not found`);
775
- }
776
- const nodes = workflow.nodes.map(node => {
777
- const dependencies = node.dependencies || [];
778
- const dependents = workflow.nodes
779
- .filter(n => n.dependencies?.includes(node.id))
780
- .map(n => n.id);
781
- return {
782
- id: node.id,
783
- dependencies,
784
- dependents
785
- };
786
- });
787
- const graph = {};
788
- workflow.nodes.forEach(node => {
789
- graph[node.id] = node.dependencies || [];
790
- });
791
- return { nodes, graph };
792
- }
793
- async start() {
794
- // Start periodic execution polling
795
- this.pollInterval = setInterval(async () => {
796
- try {
797
- await this.checkScheduledExecutions();
798
- await this.cleanupCompletedExecutions();
799
- }
800
- catch (error) {
801
- console.error('Error in workflow engine polling:', error);
802
- }
803
- }, 10000); // Poll every 10 seconds
804
- console.log('WorkflowEngine started');
805
- }
806
- async stop() {
807
- if (this.pollInterval) {
808
- clearInterval(this.pollInterval);
809
- this.pollInterval = null;
810
- }
811
- // Cancel all running executions
812
- const runningExecutions = Array.from(this.runningExecutions.keys());
813
- await Promise.all(runningExecutions.map(id => this.cancelExecution(id)));
814
- console.log('WorkflowEngine stopped');
815
- }
816
- async checkScheduledExecutions() {
817
- // Check for workflows scheduled to run
818
- const query = `
819
- SELECT * FROM workflow_executions
820
- WHERE status = 'scheduled' AND started_at <= NOW()
821
- ORDER BY started_at ASC
822
- LIMIT 10
823
- `;
824
- const result = await this.pool.query(query);
825
- for (const row of result.rows) {
826
- const execution = this.parseExecutionRow(row);
827
- if (!this.runningExecutions.has(execution.id)) {
828
- await this.checkAndContinueExecution(execution);
829
- }
830
- }
831
- }
832
- async cleanupCompletedExecutions() {
833
- // Remove completed executions from memory after 1 hour
834
- const oneHourAgo = new Date(Date.now() - 60 * 60 * 1000);
835
- for (const [id, execution] of this.runningExecutions) {
836
- if ([WorkflowStatus.COMPLETED, WorkflowStatus.FAILED, WorkflowStatus.CANCELLED].includes(execution.status) &&
837
- execution.completedAt && execution.completedAt < oneHourAgo) {
838
- this.runningExecutions.delete(id);
839
- }
840
- }
841
- }
842
- parseWorkflowRow(row) {
843
- return {
844
- id: row.id,
845
- name: row.name,
846
- description: row.description,
847
- version: row.version,
848
- nodes: JSON.parse(row.nodes || '[]'),
849
- parameters: JSON.parse(row.parameters || '{}'),
850
- schedule: row.schedule ? JSON.parse(row.schedule) : undefined,
851
- timeout: row.timeout,
852
- maxConcurrentRuns: row.max_concurrent_runs,
853
- tags: JSON.parse(row.tags || '[]'),
854
- owner: row.owner,
855
- team: row.team
856
- };
857
- }
858
- // Cleanup
859
- async cleanup() {
860
- if (this.pollInterval) {
861
- clearInterval(this.pollInterval);
862
- this.pollInterval = null;
863
- }
864
- // Cancel all running executions
865
- for (const execution of this.runningExecutions.values()) {
866
- await this.cancelWorkflow(execution.id);
867
- }
868
- this.removeAllListeners();
869
- }
870
- }