lsh-framework 0.5.4

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (90) hide show
  1. package/.env.example +51 -0
  2. package/README.md +399 -0
  3. package/dist/app.js +33 -0
  4. package/dist/cicd/analytics.js +261 -0
  5. package/dist/cicd/auth.js +269 -0
  6. package/dist/cicd/cache-manager.js +172 -0
  7. package/dist/cicd/data-retention.js +305 -0
  8. package/dist/cicd/performance-monitor.js +224 -0
  9. package/dist/cicd/webhook-receiver.js +634 -0
  10. package/dist/cli.js +500 -0
  11. package/dist/commands/api.js +343 -0
  12. package/dist/commands/self.js +318 -0
  13. package/dist/commands/theme.js +257 -0
  14. package/dist/commands/zsh-import.js +240 -0
  15. package/dist/components/App.js +1 -0
  16. package/dist/components/Divider.js +29 -0
  17. package/dist/components/REPL.js +43 -0
  18. package/dist/components/Terminal.js +232 -0
  19. package/dist/components/UserInput.js +30 -0
  20. package/dist/daemon/api-server.js +315 -0
  21. package/dist/daemon/job-registry.js +554 -0
  22. package/dist/daemon/lshd.js +822 -0
  23. package/dist/daemon/monitoring-api.js +220 -0
  24. package/dist/examples/supabase-integration.js +106 -0
  25. package/dist/lib/api-error-handler.js +183 -0
  26. package/dist/lib/associative-arrays.js +285 -0
  27. package/dist/lib/base-api-server.js +290 -0
  28. package/dist/lib/base-command-registrar.js +286 -0
  29. package/dist/lib/base-job-manager.js +293 -0
  30. package/dist/lib/brace-expansion.js +160 -0
  31. package/dist/lib/builtin-commands.js +439 -0
  32. package/dist/lib/cloud-config-manager.js +347 -0
  33. package/dist/lib/command-validator.js +190 -0
  34. package/dist/lib/completion-system.js +344 -0
  35. package/dist/lib/cron-job-manager.js +364 -0
  36. package/dist/lib/daemon-client-helper.js +141 -0
  37. package/dist/lib/daemon-client.js +501 -0
  38. package/dist/lib/database-persistence.js +638 -0
  39. package/dist/lib/database-schema.js +259 -0
  40. package/dist/lib/enhanced-history-system.js +246 -0
  41. package/dist/lib/env-validator.js +265 -0
  42. package/dist/lib/executors/builtin-executor.js +52 -0
  43. package/dist/lib/extended-globbing.js +411 -0
  44. package/dist/lib/extended-parameter-expansion.js +227 -0
  45. package/dist/lib/floating-point-arithmetic.js +256 -0
  46. package/dist/lib/history-system.js +245 -0
  47. package/dist/lib/interactive-shell.js +460 -0
  48. package/dist/lib/job-builtins.js +580 -0
  49. package/dist/lib/job-manager.js +386 -0
  50. package/dist/lib/job-storage-database.js +156 -0
  51. package/dist/lib/job-storage-memory.js +73 -0
  52. package/dist/lib/logger.js +274 -0
  53. package/dist/lib/lshrc-init.js +177 -0
  54. package/dist/lib/pathname-expansion.js +216 -0
  55. package/dist/lib/prompt-system.js +328 -0
  56. package/dist/lib/script-runner.js +226 -0
  57. package/dist/lib/secrets-manager.js +193 -0
  58. package/dist/lib/shell-executor.js +2504 -0
  59. package/dist/lib/shell-parser.js +958 -0
  60. package/dist/lib/shell-types.js +6 -0
  61. package/dist/lib/shell.lib.js +40 -0
  62. package/dist/lib/supabase-client.js +58 -0
  63. package/dist/lib/theme-manager.js +476 -0
  64. package/dist/lib/variable-expansion.js +385 -0
  65. package/dist/lib/zsh-compatibility.js +658 -0
  66. package/dist/lib/zsh-import-manager.js +699 -0
  67. package/dist/lib/zsh-options.js +328 -0
  68. package/dist/pipeline/job-tracker.js +491 -0
  69. package/dist/pipeline/mcli-bridge.js +302 -0
  70. package/dist/pipeline/pipeline-service.js +1116 -0
  71. package/dist/pipeline/workflow-engine.js +867 -0
  72. package/dist/services/api/api.js +58 -0
  73. package/dist/services/api/auth.js +35 -0
  74. package/dist/services/api/config.js +7 -0
  75. package/dist/services/api/file.js +22 -0
  76. package/dist/services/cron/cron-registrar.js +235 -0
  77. package/dist/services/cron/cron.js +9 -0
  78. package/dist/services/daemon/daemon-registrar.js +565 -0
  79. package/dist/services/daemon/daemon.js +9 -0
  80. package/dist/services/lib/lib.js +86 -0
  81. package/dist/services/log-file-extractor.js +170 -0
  82. package/dist/services/secrets/secrets.js +94 -0
  83. package/dist/services/shell/shell.js +28 -0
  84. package/dist/services/supabase/supabase-registrar.js +367 -0
  85. package/dist/services/supabase/supabase.js +9 -0
  86. package/dist/services/zapier.js +16 -0
  87. package/dist/simple-api-server.js +148 -0
  88. package/dist/store/store.js +31 -0
  89. package/dist/util/lib.util.js +11 -0
  90. package/package.json +144 -0
@@ -0,0 +1,867 @@
1
+ import { EventEmitter } from 'events';
2
+ import { v4 as uuidv4 } from 'uuid';
3
+ import { JobStatus, JobPriority } from './job-tracker.js';
4
+ export var WorkflowStatus;
5
+ (function (WorkflowStatus) {
6
+ WorkflowStatus["PENDING"] = "pending";
7
+ WorkflowStatus["RUNNING"] = "running";
8
+ WorkflowStatus["COMPLETED"] = "completed";
9
+ WorkflowStatus["FAILED"] = "failed";
10
+ WorkflowStatus["CANCELLED"] = "cancelled";
11
+ WorkflowStatus["PAUSED"] = "paused";
12
+ })(WorkflowStatus || (WorkflowStatus = {}));
13
+ export var NodeStatus;
14
+ (function (NodeStatus) {
15
+ NodeStatus["PENDING"] = "pending";
16
+ NodeStatus["WAITING"] = "waiting";
17
+ NodeStatus["READY"] = "ready";
18
+ NodeStatus["RUNNING"] = "running";
19
+ NodeStatus["COMPLETED"] = "completed";
20
+ NodeStatus["FAILED"] = "failed";
21
+ NodeStatus["SKIPPED"] = "skipped";
22
+ NodeStatus["CANCELLED"] = "cancelled";
23
+ })(NodeStatus || (NodeStatus = {}));
24
+ export class WorkflowEngine extends EventEmitter {
25
+ pool;
26
+ jobTracker;
27
+ runningExecutions = new Map();
28
+ pollInterval = null;
29
+ constructor(pool, jobTracker) {
30
+ super();
31
+ this.pool = pool;
32
+ this.jobTracker = jobTracker;
33
+ // Listen to job completion events
34
+ this.setupJobEventListeners();
35
+ }
36
+ setupJobEventListeners() {
37
+ this.jobTracker.on('execution:completed', async (event) => {
38
+ await this.handleJobCompletion(event.jobId, event.executionId, 'completed', event.data);
39
+ });
40
+ this.jobTracker.on('execution:failed', async (event) => {
41
+ await this.handleJobCompletion(event.jobId, event.executionId, 'failed', event.data);
42
+ });
43
+ }
44
+ // Workflow Definition Management
45
+ async createWorkflow(definition) {
46
+ const id = definition.id || uuidv4();
47
+ // Validate workflow
48
+ this.validateWorkflow(definition);
49
+ const query = `
50
+ INSERT INTO pipeline_workflows (
51
+ id, name, description, version, definition, schedule_cron,
52
+ config, default_parameters, tags, owner, team
53
+ ) VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11)
54
+ RETURNING *
55
+ `;
56
+ const values = [
57
+ id,
58
+ definition.name,
59
+ definition.description,
60
+ definition.version,
61
+ JSON.stringify(definition),
62
+ definition.schedule?.cron,
63
+ JSON.stringify({ timeout: definition.timeout, maxConcurrentRuns: definition.maxConcurrentRuns }),
64
+ JSON.stringify(definition.parameters || {}),
65
+ definition.tags,
66
+ definition.owner,
67
+ definition.team
68
+ ];
69
+ const _result = await this.pool.query(query, values);
70
+ this.emit('workflow:created', {
71
+ type: 'workflow:created',
72
+ workflowId: id,
73
+ data: definition,
74
+ timestamp: new Date()
75
+ });
76
+ return { ...definition, id };
77
+ }
78
+ async getWorkflow(workflowId) {
79
+ const query = 'SELECT * FROM pipeline_workflows WHERE id = $1';
80
+ const result = await this.pool.query(query, [workflowId]);
81
+ if (result.rows.length === 0) {
82
+ return null;
83
+ }
84
+ const row = result.rows[0];
85
+ return JSON.parse(row.definition);
86
+ }
87
+ async updateWorkflow(workflowId, definition) {
88
+ this.validateWorkflow(definition);
89
+ const query = `
90
+ UPDATE pipeline_workflows
91
+ SET definition = $2, version = $3, updated_at = CURRENT_TIMESTAMP
92
+ WHERE id = $1
93
+ `;
94
+ await this.pool.query(query, [workflowId, JSON.stringify(definition), definition.version]);
95
+ this.emit('workflow:updated', {
96
+ type: 'workflow:updated',
97
+ workflowId,
98
+ data: definition,
99
+ timestamp: new Date()
100
+ });
101
+ }
102
+ async deleteWorkflow(workflowId) {
103
+ // Check for running executions
104
+ const runningCount = await this.getRunningExecutionsCount(workflowId);
105
+ if (runningCount > 0) {
106
+ throw new Error(`Cannot delete workflow with ${runningCount} running executions`);
107
+ }
108
+ const query = 'UPDATE pipeline_workflows SET is_active = false WHERE id = $1';
109
+ await this.pool.query(query, [workflowId]);
110
+ this.emit('workflow:deleted', {
111
+ type: 'workflow:deleted',
112
+ workflowId,
113
+ timestamp: new Date()
114
+ });
115
+ }
116
+ // Workflow Execution
117
+ async executeWorkflow(workflowId, triggeredBy, triggerType, parameters = {}) {
118
+ const workflow = await this.getWorkflow(workflowId);
119
+ if (!workflow) {
120
+ throw new Error(`Workflow ${workflowId} not found`);
121
+ }
122
+ // Check concurrency limits
123
+ const runningCount = await this.getRunningExecutionsCount(workflowId);
124
+ const maxConcurrent = workflow.maxConcurrentRuns || 1;
125
+ if (runningCount >= maxConcurrent) {
126
+ throw new Error(`Workflow ${workflowId} has reached max concurrent runs (${maxConcurrent})`);
127
+ }
128
+ // Create execution
129
+ const execution = {
130
+ id: uuidv4(),
131
+ workflowId,
132
+ runId: this.generateRunId(workflow.name),
133
+ status: WorkflowStatus.PENDING,
134
+ triggeredBy,
135
+ triggerType,
136
+ parameters: { ...workflow.parameters, ...parameters },
137
+ startedAt: new Date(),
138
+ completedStages: [],
139
+ failedStages: [],
140
+ nodeStates: {}
141
+ };
142
+ // Initialize node states
143
+ for (const node of workflow.nodes) {
144
+ execution.nodeStates[node.id] = {
145
+ nodeId: node.id,
146
+ status: NodeStatus.PENDING,
147
+ retryCount: 0
148
+ };
149
+ }
150
+ // Store execution
151
+ await this.storeExecution(execution);
152
+ // Add to running executions
153
+ this.runningExecutions.set(execution.id, execution);
154
+ // Start execution
155
+ await this.startExecution(execution);
156
+ this.emit('workflow:started', {
157
+ type: 'workflow:started',
158
+ workflowId,
159
+ executionId: execution.id,
160
+ data: execution,
161
+ timestamp: new Date()
162
+ });
163
+ return execution;
164
+ }
165
+ async startExecution(execution) {
166
+ execution.status = WorkflowStatus.RUNNING;
167
+ await this.updateExecution(execution);
168
+ // Find ready nodes (no dependencies)
169
+ const readyNodes = await this.findReadyNodes(execution);
170
+ // Start ready nodes
171
+ for (const nodeId of readyNodes) {
172
+ await this.executeNode(execution, nodeId);
173
+ }
174
+ }
175
+ async executeNode(execution, nodeId) {
176
+ const workflow = await this.getWorkflow(execution.workflowId);
177
+ if (!workflow)
178
+ return;
179
+ const node = workflow.nodes.find(n => n.id === nodeId);
180
+ if (!node)
181
+ return;
182
+ const nodeState = execution.nodeStates[nodeId];
183
+ nodeState.status = NodeStatus.RUNNING;
184
+ nodeState.startedAt = new Date();
185
+ await this.updateExecution(execution);
186
+ try {
187
+ switch (node.type) {
188
+ case 'job':
189
+ await this.executeJobNode(execution, node);
190
+ break;
191
+ case 'condition':
192
+ await this.executeConditionNode(execution, node);
193
+ break;
194
+ case 'parallel':
195
+ await this.executeParallelNode(execution, node);
196
+ break;
197
+ case 'wait':
198
+ await this.executeWaitNode(execution, node);
199
+ break;
200
+ default:
201
+ throw new Error(`Unknown node type: ${node.type}`);
202
+ }
203
+ }
204
+ catch (error) {
205
+ console.error('Error executing node:', error);
206
+ await this.handleNodeFailure(execution, node, error);
207
+ }
208
+ }
209
+ async executeJobNode(execution, node) {
210
+ // Create job from node configuration
211
+ const jobConfig = {
212
+ name: `${execution.runId}-${node.name}`,
213
+ type: node.config.type || 'workflow_job',
214
+ sourceSystem: 'workflow',
215
+ targetSystem: node.config.targetSystem || 'mcli',
216
+ status: JobStatus.PENDING,
217
+ priority: node.config.priority || JobPriority.NORMAL,
218
+ config: {
219
+ ...node.config,
220
+ workflowExecutionId: execution.id,
221
+ workflowNodeId: node.id,
222
+ workflowRunId: execution.runId
223
+ },
224
+ parameters: {
225
+ ...execution.parameters,
226
+ ...node.config.parameters
227
+ },
228
+ owner: execution.triggeredBy,
229
+ tags: [`workflow:${execution.workflowId}`, `run:${execution.runId}`]
230
+ };
231
+ // Submit job
232
+ const job = await this.jobTracker.createJob(jobConfig);
233
+ // Update node state
234
+ const nodeState = execution.nodeStates[node.id];
235
+ nodeState.jobId = job.id;
236
+ await this.updateExecution(execution);
237
+ // Job completion will be handled by event listeners
238
+ }
239
+ async executeConditionNode(execution, node) {
240
+ // Evaluate condition
241
+ const result = this.evaluateCondition(node.condition || 'true', execution.parameters);
242
+ const nodeState = execution.nodeStates[node.id];
243
+ nodeState.status = result ? NodeStatus.COMPLETED : NodeStatus.SKIPPED;
244
+ nodeState.completedAt = new Date();
245
+ nodeState.result = { conditionResult: result };
246
+ if (nodeState.startedAt) {
247
+ nodeState.durationMs = nodeState.completedAt.getTime() - nodeState.startedAt.getTime();
248
+ }
249
+ await this.updateExecution(execution);
250
+ // Continue with downstream nodes
251
+ await this.checkAndContinueExecution(execution);
252
+ }
253
+ async executeParallelNode(execution, node) {
254
+ // Parallel nodes are just markers - their completion is determined by their dependencies
255
+ const nodeState = execution.nodeStates[node.id];
256
+ nodeState.status = NodeStatus.COMPLETED;
257
+ nodeState.completedAt = new Date();
258
+ if (nodeState.startedAt) {
259
+ nodeState.durationMs = nodeState.completedAt.getTime() - nodeState.startedAt.getTime();
260
+ }
261
+ await this.updateExecution(execution);
262
+ await this.checkAndContinueExecution(execution);
263
+ }
264
+ async executeWaitNode(execution, node) {
265
+ const waitMs = node.config.waitMs || 1000;
266
+ setTimeout(async () => {
267
+ const nodeState = execution.nodeStates[node.id];
268
+ nodeState.status = NodeStatus.COMPLETED;
269
+ nodeState.completedAt = new Date();
270
+ if (nodeState.startedAt) {
271
+ nodeState.durationMs = nodeState.completedAt.getTime() - nodeState.startedAt.getTime();
272
+ }
273
+ await this.updateExecution(execution);
274
+ await this.checkAndContinueExecution(execution);
275
+ }, waitMs);
276
+ }
277
+ async handleJobCompletion(jobId, executionId, status, data) {
278
+ // Find workflow execution by job ID
279
+ let targetExecution = null;
280
+ let targetNodeId = null;
281
+ for (const [_execId, execution] of this.runningExecutions) {
282
+ for (const [nodeId, nodeState] of Object.entries(execution.nodeStates)) {
283
+ if (nodeState.jobId === jobId) {
284
+ targetExecution = execution;
285
+ targetNodeId = nodeId;
286
+ break;
287
+ }
288
+ }
289
+ if (targetExecution)
290
+ break;
291
+ }
292
+ if (!targetExecution || !targetNodeId) {
293
+ return; // Job not part of workflow
294
+ }
295
+ const nodeState = targetExecution.nodeStates[targetNodeId];
296
+ nodeState.status = status === 'completed' ? NodeStatus.COMPLETED : NodeStatus.FAILED;
297
+ nodeState.completedAt = new Date();
298
+ nodeState.result = data;
299
+ if (nodeState.startedAt) {
300
+ nodeState.durationMs = nodeState.completedAt.getTime() - nodeState.startedAt.getTime();
301
+ }
302
+ if (status === 'failed') {
303
+ nodeState.error = data.errorMessage || 'Job failed';
304
+ // Check retry policy
305
+ const workflow = await this.getWorkflow(targetExecution.workflowId);
306
+ const node = workflow?.nodes.find(n => n.id === targetNodeId);
307
+ if (node?.retryPolicy && nodeState.retryCount < node.retryPolicy.maxRetries) {
308
+ await this.scheduleNodeRetry(targetExecution, targetNodeId, node.retryPolicy);
309
+ return;
310
+ }
311
+ targetExecution.failedStages.push(targetNodeId);
312
+ await this.handleWorkflowFailure(targetExecution, `Node ${targetNodeId} failed: ${nodeState.error}`);
313
+ }
314
+ else {
315
+ targetExecution.completedStages.push(targetNodeId);
316
+ await this.checkAndContinueExecution(targetExecution);
317
+ }
318
+ await this.updateExecution(targetExecution);
319
+ }
320
+ async scheduleNodeRetry(execution, nodeId, retryPolicy) {
321
+ const nodeState = execution.nodeStates[nodeId];
322
+ nodeState.retryCount++;
323
+ const backoffMs = retryPolicy.backoffMs * Math.pow(retryPolicy.backoffMultiplier, nodeState.retryCount - 1);
324
+ nodeState.nextRetryAt = new Date(Date.now() + backoffMs);
325
+ nodeState.status = NodeStatus.WAITING;
326
+ await this.updateExecution(execution);
327
+ // Schedule retry
328
+ setTimeout(async () => {
329
+ if (execution.status === WorkflowStatus.RUNNING) {
330
+ await this.executeNode(execution, nodeId);
331
+ }
332
+ }, backoffMs);
333
+ }
334
+ async checkAndContinueExecution(execution) {
335
+ const workflow = await this.getWorkflow(execution.workflowId);
336
+ if (!workflow)
337
+ return;
338
+ // Check if workflow is complete
339
+ const allNodes = workflow.nodes.map(n => n.id);
340
+ const completedNodes = allNodes.filter(id => execution.nodeStates[id].status === NodeStatus.COMPLETED ||
341
+ execution.nodeStates[id].status === NodeStatus.SKIPPED);
342
+ if (completedNodes.length === allNodes.length) {
343
+ await this.completeWorkflow(execution);
344
+ return;
345
+ }
346
+ // Find newly ready nodes
347
+ const readyNodes = await this.findReadyNodes(execution);
348
+ // Start ready nodes
349
+ for (const nodeId of readyNodes) {
350
+ if (execution.nodeStates[nodeId].status === NodeStatus.PENDING) {
351
+ await this.executeNode(execution, nodeId);
352
+ }
353
+ }
354
+ }
355
+ async findReadyNodes(execution) {
356
+ const workflow = await this.getWorkflow(execution.workflowId);
357
+ if (!workflow)
358
+ return [];
359
+ const readyNodes = [];
360
+ for (const node of workflow.nodes) {
361
+ const nodeState = execution.nodeStates[node.id];
362
+ // Skip if not pending
363
+ if (nodeState.status !== NodeStatus.PENDING) {
364
+ continue;
365
+ }
366
+ // Check dependencies
367
+ const dependenciesSatisfied = node.dependencies.every(depId => {
368
+ const depState = execution.nodeStates[depId];
369
+ return depState?.status === NodeStatus.COMPLETED || depState?.status === NodeStatus.SKIPPED;
370
+ });
371
+ if (dependenciesSatisfied) {
372
+ readyNodes.push(node.id);
373
+ }
374
+ }
375
+ return readyNodes;
376
+ }
377
+ async completeWorkflow(execution) {
378
+ execution.status = WorkflowStatus.COMPLETED;
379
+ execution.completedAt = new Date();
380
+ execution.durationMs = execution.completedAt.getTime() - execution.startedAt.getTime();
381
+ await this.updateExecution(execution);
382
+ this.runningExecutions.delete(execution.id);
383
+ this.emit('workflow:completed', {
384
+ type: 'workflow:completed',
385
+ workflowId: execution.workflowId,
386
+ executionId: execution.id,
387
+ data: execution,
388
+ timestamp: new Date()
389
+ });
390
+ }
391
+ async handleWorkflowFailure(execution, errorMessage) {
392
+ execution.status = WorkflowStatus.FAILED;
393
+ execution.completedAt = new Date();
394
+ execution.durationMs = execution.completedAt.getTime() - execution.startedAt.getTime();
395
+ execution.errorMessage = errorMessage;
396
+ // Cancel running nodes
397
+ for (const [_nodeId, nodeState] of Object.entries(execution.nodeStates)) {
398
+ if (nodeState.status === NodeStatus.RUNNING || nodeState.status === NodeStatus.WAITING) {
399
+ nodeState.status = NodeStatus.CANCELLED;
400
+ // Cancel job if exists
401
+ if (nodeState.jobId) {
402
+ try {
403
+ await this.jobTracker.updateJobStatus(nodeState.jobId, JobStatus.CANCELLED);
404
+ }
405
+ catch (error) {
406
+ console.warn(`Failed to cancel job ${nodeState.jobId}:`, error);
407
+ }
408
+ }
409
+ }
410
+ }
411
+ await this.updateExecution(execution);
412
+ this.runningExecutions.delete(execution.id);
413
+ this.emit('workflow:failed', {
414
+ type: 'workflow:failed',
415
+ workflowId: execution.workflowId,
416
+ executionId: execution.id,
417
+ data: execution,
418
+ timestamp: new Date()
419
+ });
420
+ }
421
+ async cancelWorkflow(executionId) {
422
+ const execution = this.runningExecutions.get(executionId);
423
+ if (!execution) {
424
+ throw new Error(`Workflow execution ${executionId} not found or not running`);
425
+ }
426
+ execution.status = WorkflowStatus.CANCELLED;
427
+ execution.completedAt = new Date();
428
+ execution.durationMs = execution.completedAt.getTime() - execution.startedAt.getTime();
429
+ // Cancel all running and waiting nodes
430
+ for (const [_nodeId, nodeState] of Object.entries(execution.nodeStates)) {
431
+ if (nodeState.status === NodeStatus.RUNNING || nodeState.status === NodeStatus.WAITING) {
432
+ nodeState.status = NodeStatus.CANCELLED;
433
+ if (nodeState.jobId) {
434
+ try {
435
+ await this.jobTracker.updateJobStatus(nodeState.jobId, JobStatus.CANCELLED);
436
+ }
437
+ catch (error) {
438
+ console.warn(`Failed to cancel job ${nodeState.jobId}:`, error);
439
+ }
440
+ }
441
+ }
442
+ }
443
+ await this.updateExecution(execution);
444
+ this.runningExecutions.delete(executionId);
445
+ this.emit('workflow:cancelled', {
446
+ type: 'workflow:cancelled',
447
+ workflowId: execution.workflowId,
448
+ executionId: execution.id,
449
+ data: execution,
450
+ timestamp: new Date()
451
+ });
452
+ }
453
+ // Helper methods
454
+ validateWorkflow(definition) {
455
+ if (!definition.name || !definition.version) {
456
+ throw new Error('Workflow name and version are required');
457
+ }
458
+ if (!definition.nodes || definition.nodes.length === 0) {
459
+ throw new Error('Workflow must have at least one node');
460
+ }
461
+ // Check for cycles in dependencies
462
+ this.detectCycles(definition.nodes);
463
+ // Validate nodes
464
+ for (const node of definition.nodes) {
465
+ if (!node.id || !node.name || !node.type) {
466
+ throw new Error('Node must have id, name, and type');
467
+ }
468
+ // Validate dependencies exist
469
+ for (const depId of node.dependencies) {
470
+ if (!definition.nodes.find(n => n.id === depId)) {
471
+ throw new Error(`Node ${node.id} depends on non-existent node ${depId}`);
472
+ }
473
+ }
474
+ }
475
+ }
476
+ detectCycles(nodes) {
477
+ const visited = new Set();
478
+ const recursionStack = new Set();
479
+ const hasCycle = (nodeId) => {
480
+ if (recursionStack.has(nodeId)) {
481
+ return true; // Found cycle
482
+ }
483
+ if (visited.has(nodeId)) {
484
+ return false; // Already processed
485
+ }
486
+ visited.add(nodeId);
487
+ recursionStack.add(nodeId);
488
+ const node = nodes.find(n => n.id === nodeId);
489
+ if (node) {
490
+ for (const depId of node.dependencies) {
491
+ if (hasCycle(depId)) {
492
+ return true;
493
+ }
494
+ }
495
+ }
496
+ recursionStack.delete(nodeId);
497
+ return false;
498
+ };
499
+ for (const node of nodes) {
500
+ if (hasCycle(node.id)) {
501
+ throw new Error('Workflow contains cycles in dependencies');
502
+ }
503
+ }
504
+ }
505
+ evaluateCondition(condition, parameters) {
506
+ try {
507
+ // Simple condition evaluation - in production, use a safer evaluator
508
+ const func = new Function('params', `return ${condition}`);
509
+ return !!func(parameters);
510
+ }
511
+ catch (error) {
512
+ console.warn('Condition evaluation failed, defaulting to false:', error);
513
+ return false;
514
+ }
515
+ }
516
+ generateRunId(workflowName) {
517
+ const timestamp = new Date().toISOString().replace(/[:.]/g, '-');
518
+ const shortId = Math.random().toString(36).substr(2, 4);
519
+ return `${workflowName}-${timestamp}-${shortId}`;
520
+ }
521
+ async getRunningExecutionsCount(workflowId) {
522
+ const query = `
523
+ SELECT COUNT(*) as count
524
+ FROM workflow_executions
525
+ WHERE workflow_id = $1 AND status = 'running'
526
+ `;
527
+ const result = await this.pool.query(query, [workflowId]);
528
+ return parseInt(result.rows[0].count);
529
+ }
530
+ async storeExecution(execution) {
531
+ const query = `
532
+ INSERT INTO workflow_executions (
533
+ id, workflow_id, run_id, status, triggered_by, trigger_type,
534
+ parameters, started_at, current_stage, completed_stages, failed_stages
535
+ ) VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11)
536
+ `;
537
+ const values = [
538
+ execution.id,
539
+ execution.workflowId,
540
+ execution.runId,
541
+ execution.status,
542
+ execution.triggeredBy,
543
+ execution.triggerType,
544
+ JSON.stringify(execution.parameters),
545
+ execution.startedAt,
546
+ execution.currentStage,
547
+ JSON.stringify(execution.completedStages),
548
+ JSON.stringify(execution.failedStages)
549
+ ];
550
+ await this.pool.query(query, values);
551
+ }
552
+ async updateExecution(execution) {
553
+ const query = `
554
+ UPDATE workflow_executions
555
+ SET
556
+ status = $2,
557
+ completed_at = $3,
558
+ duration_ms = $4,
559
+ current_stage = $5,
560
+ completed_stages = $6,
561
+ failed_stages = $7,
562
+ result = $8,
563
+ error_message = $9
564
+ WHERE id = $1
565
+ `;
566
+ const values = [
567
+ execution.id,
568
+ execution.status,
569
+ execution.completedAt,
570
+ execution.durationMs,
571
+ execution.currentStage,
572
+ JSON.stringify(execution.completedStages),
573
+ JSON.stringify(execution.failedStages),
574
+ execution.result ? JSON.stringify(execution.result) : null,
575
+ execution.errorMessage
576
+ ];
577
+ await this.pool.query(query, values);
578
+ // Also store node states (this would need a separate table in production)
579
+ // For now, we'll store them in the result field or create a separate storage
580
+ }
581
+ // Public query methods
582
+ async getExecution(executionId) {
583
+ // First check running executions
584
+ const running = this.runningExecutions.get(executionId);
585
+ if (running) {
586
+ return running;
587
+ }
588
+ // Then check database
589
+ const query = 'SELECT * FROM workflow_executions WHERE id = $1';
590
+ const result = await this.pool.query(query, [executionId]);
591
+ if (result.rows.length === 0) {
592
+ return null;
593
+ }
594
+ return this.parseExecutionRow(result.rows[0]);
595
+ }
596
+ async listExecutions(workflowId, limit = 50) {
597
+ let query = 'SELECT * FROM workflow_executions';
598
+ const values = [];
599
+ if (workflowId) {
600
+ query += ' WHERE workflow_id = $1';
601
+ values.push(workflowId);
602
+ }
603
+ query += ` ORDER BY started_at DESC LIMIT ${limit}`;
604
+ const result = await this.pool.query(query, values);
605
+ return result.rows.map(row => this.parseExecutionRow(row));
606
+ }
607
+ parseExecutionRow(row) {
608
+ return {
609
+ id: row.id,
610
+ workflowId: row.workflow_id,
611
+ runId: row.run_id,
612
+ status: row.status,
613
+ triggeredBy: row.triggered_by,
614
+ triggerType: row.trigger_type,
615
+ parameters: row.parameters || {},
616
+ startedAt: row.started_at,
617
+ completedAt: row.completed_at,
618
+ durationMs: row.duration_ms,
619
+ currentStage: row.current_stage,
620
+ completedStages: row.completed_stages || [],
621
+ failedStages: row.failed_stages || [],
622
+ result: row.result,
623
+ errorMessage: row.error_message,
624
+ nodeStates: {} // Would need to be loaded from separate storage
625
+ };
626
+ }
627
+ async handleNodeFailure(execution, node, error) {
628
+ const nodeState = execution.nodeStates[node.id];
629
+ nodeState.status = NodeStatus.FAILED;
630
+ nodeState.completedAt = new Date();
631
+ nodeState.error = error.message;
632
+ execution.status = WorkflowStatus.FAILED;
633
+ execution.completedAt = new Date();
634
+ execution.errorMessage = `Node ${node.id} failed: ${error.message}`;
635
+ await this.updateExecution(execution);
636
+ this.runningExecutions.delete(execution.id);
637
+ this.emit('node:failed', {
638
+ executionId: execution.id,
639
+ workflowId: execution.workflowId,
640
+ nodeId: node.id,
641
+ error: error.message,
642
+ timestamp: new Date()
643
+ });
644
+ this.emit('execution:failed', {
645
+ executionId: execution.id,
646
+ workflowId: execution.workflowId,
647
+ error: error.message,
648
+ timestamp: new Date()
649
+ });
650
+ }
651
+ async listWorkflows(filters = {}) {
652
+ const conditions = [];
653
+ const params = [];
654
+ let paramCount = 1;
655
+ if (filters.status) {
656
+ conditions.push(`status = $${paramCount++}`);
657
+ params.push(filters.status);
658
+ }
659
+ const whereClause = conditions.length > 0 ? `WHERE ${conditions.join(' AND ')}` : '';
660
+ const limit = filters.limit || 50;
661
+ const offset = filters.offset || 0;
662
+ const query = `
663
+ SELECT * FROM workflow_definitions
664
+ ${whereClause}
665
+ ORDER BY created_at DESC
666
+ LIMIT $${paramCount++} OFFSET $${paramCount++}
667
+ `;
668
+ params.push(limit, offset);
669
+ const result = await this.pool.query(query, params);
670
+ return result.rows.map(this.parseWorkflowRow.bind(this));
671
+ }
672
+ async getWorkflowExecutions(workflowId, filters = {}) {
673
+ const limit = filters.limit || 50;
674
+ const offset = filters.offset || 0;
675
+ const query = `
676
+ SELECT * FROM workflow_executions
677
+ WHERE workflow_id = $1
678
+ ORDER BY started_at DESC
679
+ LIMIT $2 OFFSET $3
680
+ `;
681
+ const result = await this.pool.query(query, [workflowId, limit, offset]);
682
+ return result.rows.map(this.parseExecutionRow.bind(this));
683
+ }
684
+ async cancelExecution(executionId) {
685
+ const execution = this.runningExecutions.get(executionId);
686
+ if (execution) {
687
+ execution.status = WorkflowStatus.CANCELLED;
688
+ execution.completedAt = new Date();
689
+ // Cancel any running jobs
690
+ for (const nodeId of Object.keys(execution.nodeStates)) {
691
+ const nodeState = execution.nodeStates[nodeId];
692
+ if (nodeState.status === NodeStatus.RUNNING && nodeState.jobId) {
693
+ try {
694
+ await this.jobTracker.updateJobStatus(nodeState.jobId, JobStatus.CANCELLED);
695
+ }
696
+ catch (error) {
697
+ console.warn(`Failed to cancel job ${nodeState.jobId}:`, error);
698
+ }
699
+ }
700
+ }
701
+ this.runningExecutions.delete(executionId);
702
+ await this.updateExecution(execution);
703
+ this.emit('execution:cancelled', {
704
+ executionId: execution.id,
705
+ workflowId: execution.workflowId,
706
+ timestamp: new Date()
707
+ });
708
+ }
709
+ }
710
+ async validateWorkflowById(workflowId) {
711
+ try {
712
+ const workflow = await this.getWorkflow(workflowId);
713
+ if (!workflow) {
714
+ return {
715
+ isValid: false,
716
+ errors: ['Workflow not found'],
717
+ warnings: []
718
+ };
719
+ }
720
+ const errors = [];
721
+ const warnings = [];
722
+ // Validate basic structure
723
+ if (!workflow.name) {
724
+ errors.push('Workflow name is required');
725
+ }
726
+ if (!workflow.nodes || workflow.nodes.length === 0) {
727
+ errors.push('Workflow must have at least one node');
728
+ }
729
+ if (workflow.nodes) {
730
+ // Check for duplicate node IDs
731
+ const nodeIds = workflow.nodes.map(n => n.id);
732
+ const duplicates = nodeIds.filter((id, index) => nodeIds.indexOf(id) !== index);
733
+ if (duplicates.length > 0) {
734
+ errors.push(`Duplicate node IDs found: ${duplicates.join(', ')}`);
735
+ }
736
+ // Check for invalid dependencies
737
+ for (const node of workflow.nodes) {
738
+ if (node.dependencies) {
739
+ for (const depId of node.dependencies) {
740
+ if (!nodeIds.includes(depId)) {
741
+ errors.push(`Node ${node.id} depends on non-existent node ${depId}`);
742
+ }
743
+ }
744
+ }
745
+ }
746
+ // Check for cycles
747
+ try {
748
+ this.detectCycles(workflow.nodes);
749
+ }
750
+ catch (error) {
751
+ errors.push(error.message);
752
+ }
753
+ }
754
+ return {
755
+ isValid: errors.length === 0,
756
+ errors,
757
+ warnings
758
+ };
759
+ }
760
+ catch (error) {
761
+ return {
762
+ isValid: false,
763
+ errors: [`Validation failed: ${error.message}`],
764
+ warnings: []
765
+ };
766
+ }
767
+ }
768
+ async getWorkflowDependencies(workflowId) {
769
+ const workflow = await this.getWorkflow(workflowId);
770
+ if (!workflow) {
771
+ throw new Error(`Workflow ${workflowId} not found`);
772
+ }
773
+ const nodes = workflow.nodes.map(node => {
774
+ const dependencies = node.dependencies || [];
775
+ const dependents = workflow.nodes
776
+ .filter(n => n.dependencies?.includes(node.id))
777
+ .map(n => n.id);
778
+ return {
779
+ id: node.id,
780
+ dependencies,
781
+ dependents
782
+ };
783
+ });
784
+ const graph = {};
785
+ workflow.nodes.forEach(node => {
786
+ graph[node.id] = node.dependencies || [];
787
+ });
788
+ return { nodes, graph };
789
+ }
790
+ async start() {
791
+ // Start periodic execution polling
792
+ this.pollInterval = setInterval(async () => {
793
+ try {
794
+ await this.checkScheduledExecutions();
795
+ await this.cleanupCompletedExecutions();
796
+ }
797
+ catch (error) {
798
+ console.error('Error in workflow engine polling:', error);
799
+ }
800
+ }, 10000); // Poll every 10 seconds
801
+ console.log('WorkflowEngine started');
802
+ }
803
+ async stop() {
804
+ if (this.pollInterval) {
805
+ clearInterval(this.pollInterval);
806
+ this.pollInterval = null;
807
+ }
808
+ // Cancel all running executions
809
+ const runningExecutions = Array.from(this.runningExecutions.keys());
810
+ await Promise.all(runningExecutions.map(id => this.cancelExecution(id)));
811
+ console.log('WorkflowEngine stopped');
812
+ }
813
+ async checkScheduledExecutions() {
814
+ // Check for workflows scheduled to run
815
+ const query = `
816
+ SELECT * FROM workflow_executions
817
+ WHERE status = 'scheduled' AND started_at <= NOW()
818
+ ORDER BY started_at ASC
819
+ LIMIT 10
820
+ `;
821
+ const result = await this.pool.query(query);
822
+ for (const row of result.rows) {
823
+ const execution = this.parseExecutionRow(row);
824
+ if (!this.runningExecutions.has(execution.id)) {
825
+ await this.checkAndContinueExecution(execution);
826
+ }
827
+ }
828
+ }
829
+ async cleanupCompletedExecutions() {
830
+ // Remove completed executions from memory after 1 hour
831
+ const oneHourAgo = new Date(Date.now() - 60 * 60 * 1000);
832
+ for (const [id, execution] of this.runningExecutions) {
833
+ if ([WorkflowStatus.COMPLETED, WorkflowStatus.FAILED, WorkflowStatus.CANCELLED].includes(execution.status) &&
834
+ execution.completedAt && execution.completedAt < oneHourAgo) {
835
+ this.runningExecutions.delete(id);
836
+ }
837
+ }
838
+ }
839
+ parseWorkflowRow(row) {
840
+ return {
841
+ id: row.id,
842
+ name: row.name,
843
+ description: row.description,
844
+ version: row.version,
845
+ nodes: JSON.parse(row.nodes || '[]'),
846
+ parameters: JSON.parse(row.parameters || '{}'),
847
+ schedule: row.schedule ? JSON.parse(row.schedule) : undefined,
848
+ timeout: row.timeout,
849
+ maxConcurrentRuns: row.max_concurrent_runs,
850
+ tags: JSON.parse(row.tags || '[]'),
851
+ owner: row.owner,
852
+ team: row.team
853
+ };
854
+ }
855
+ // Cleanup
856
+ async cleanup() {
857
+ if (this.pollInterval) {
858
+ clearInterval(this.pollInterval);
859
+ this.pollInterval = null;
860
+ }
861
+ // Cancel all running executions
862
+ for (const execution of this.runningExecutions.values()) {
863
+ await this.cancelWorkflow(execution.id);
864
+ }
865
+ this.removeAllListeners();
866
+ }
867
+ }