lsh-framework 0.8.3 → 0.9.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +75 -1
- package/dist/cli.js +14 -10
- package/dist/daemon/lshd.js +23 -13
- package/dist/lib/api-error-handler.js +16 -14
- package/dist/lib/base-command-registrar.js +6 -5
- package/dist/lib/daemon-client.js +13 -9
- package/dist/lib/database-persistence.js +8 -8
- package/dist/lib/env-validator.js +0 -3
- package/dist/lib/logger.js +0 -1
- package/dist/lib/secrets-manager.js +105 -18
- package/dist/lib/zsh-import-manager.js +17 -9
- package/dist/pipeline/job-tracker.js +1 -1
- package/dist/pipeline/mcli-bridge.js +11 -5
- package/dist/pipeline/workflow-engine.js +10 -7
- package/dist/services/cron/cron-registrar.js +27 -22
- package/dist/services/daemon/daemon-registrar.js +27 -13
- package/dist/services/secrets/secrets.js +37 -28
- package/dist/services/supabase/supabase-registrar.js +40 -33
- package/package.json +1 -1
|
@@ -66,15 +66,16 @@ export class SecretsManager {
|
|
|
66
66
|
return decrypted;
|
|
67
67
|
}
|
|
68
68
|
catch (error) {
|
|
69
|
-
|
|
69
|
+
const err = error;
|
|
70
|
+
if (err.message.includes('bad decrypt') || err.message.includes('wrong final block length')) {
|
|
70
71
|
throw new Error('Decryption failed. This usually means:\n' +
|
|
71
72
|
' 1. You need to set LSH_SECRETS_KEY environment variable\n' +
|
|
72
73
|
' 2. The key must match the one used during encryption\n' +
|
|
73
74
|
' 3. Generate a shared key with: lsh secrets key\n' +
|
|
74
75
|
' 4. Add it to your .env: LSH_SECRETS_KEY=<key>\n' +
|
|
75
|
-
'\nOriginal error: ' +
|
|
76
|
+
'\nOriginal error: ' + err.message);
|
|
76
77
|
}
|
|
77
|
-
throw
|
|
78
|
+
throw err;
|
|
78
79
|
}
|
|
79
80
|
}
|
|
80
81
|
/**
|
|
@@ -116,10 +117,48 @@ export class SecretsManager {
|
|
|
116
117
|
})
|
|
117
118
|
.join('\n') + '\n';
|
|
118
119
|
}
|
|
120
|
+
/**
|
|
121
|
+
* Detect destructive changes (filled secrets becoming empty)
|
|
122
|
+
*/
|
|
123
|
+
detectDestructiveChanges(cloudSecrets, localSecrets) {
|
|
124
|
+
const destructive = [];
|
|
125
|
+
for (const [key, cloudValue] of Object.entries(cloudSecrets)) {
|
|
126
|
+
// Only check if key exists in local AND cloud has a non-empty value
|
|
127
|
+
if (key in localSecrets && cloudValue.trim() !== '') {
|
|
128
|
+
const localValue = localSecrets[key];
|
|
129
|
+
// If cloud had value but local is now empty/whitespace - this is destructive
|
|
130
|
+
if (localValue.trim() === '') {
|
|
131
|
+
destructive.push({ key, cloudValue, localValue });
|
|
132
|
+
}
|
|
133
|
+
}
|
|
134
|
+
}
|
|
135
|
+
return destructive;
|
|
136
|
+
}
|
|
137
|
+
/**
|
|
138
|
+
* Format error message for destructive changes
|
|
139
|
+
*/
|
|
140
|
+
formatDestructiveChangesError(destructive) {
|
|
141
|
+
const count = destructive.length;
|
|
142
|
+
const plural = count === 1 ? 'secret' : 'secrets';
|
|
143
|
+
let message = `⚠️ Destructive change detected!\n\n`;
|
|
144
|
+
message += `${count} ${plural} would go from filled → empty:\n\n`;
|
|
145
|
+
for (const { key, cloudValue } of destructive) {
|
|
146
|
+
// Mask the value for security (show first 4-5 chars)
|
|
147
|
+
const preview = cloudValue.length > 5
|
|
148
|
+
? cloudValue.substring(0, 5) + '****'
|
|
149
|
+
: '****';
|
|
150
|
+
message += ` • ${key}: "${preview}" → "" (empty)\n`;
|
|
151
|
+
}
|
|
152
|
+
message += `\nThis is likely unintentional and could break your application.\n\n`;
|
|
153
|
+
message += `To proceed anyway, use the --force flag:\n`;
|
|
154
|
+
message += ` lsh lib secrets push --force\n`;
|
|
155
|
+
message += ` lsh lib secrets sync --force\n`;
|
|
156
|
+
return message;
|
|
157
|
+
}
|
|
119
158
|
/**
|
|
120
159
|
* Push local .env to Supabase
|
|
121
160
|
*/
|
|
122
|
-
async push(envFilePath = '.env', environment = 'dev') {
|
|
161
|
+
async push(envFilePath = '.env', environment = 'dev', force = false) {
|
|
123
162
|
if (!fs.existsSync(envFilePath)) {
|
|
124
163
|
throw new Error(`File not found: ${envFilePath}`);
|
|
125
164
|
}
|
|
@@ -138,6 +177,51 @@ export class SecretsManager {
|
|
|
138
177
|
logger.info(`Pushing ${envFilePath} to Supabase (${environment})...`);
|
|
139
178
|
const content = fs.readFileSync(envFilePath, 'utf8');
|
|
140
179
|
const env = this.parseEnvFile(content);
|
|
180
|
+
// Check for destructive changes unless force is true
|
|
181
|
+
if (!force) {
|
|
182
|
+
try {
|
|
183
|
+
const jobs = await this.persistence.getActiveJobs();
|
|
184
|
+
const safeFilename = filename.replace(/[^a-zA-Z0-9._-]/g, '_');
|
|
185
|
+
const secretsJobs = jobs
|
|
186
|
+
.filter(j => {
|
|
187
|
+
return j.command === 'secrets_sync' &&
|
|
188
|
+
j.job_id.includes(environment) &&
|
|
189
|
+
j.job_id.includes(safeFilename);
|
|
190
|
+
})
|
|
191
|
+
.sort((a, b) => new Date(b.started_at).getTime() - new Date(a.started_at).getTime());
|
|
192
|
+
if (secretsJobs.length > 0) {
|
|
193
|
+
const latestSecret = secretsJobs[0];
|
|
194
|
+
if (latestSecret.output) {
|
|
195
|
+
try {
|
|
196
|
+
const decrypted = this.decrypt(latestSecret.output);
|
|
197
|
+
const cloudEnv = this.parseEnvFile(decrypted);
|
|
198
|
+
const destructive = this.detectDestructiveChanges(cloudEnv, env);
|
|
199
|
+
if (destructive.length > 0) {
|
|
200
|
+
throw new Error(this.formatDestructiveChangesError(destructive));
|
|
201
|
+
}
|
|
202
|
+
}
|
|
203
|
+
catch (error) {
|
|
204
|
+
const err = error;
|
|
205
|
+
// If decryption fails, it's a key mismatch - let it proceed
|
|
206
|
+
// (will fail later with proper error)
|
|
207
|
+
if (!err.message.includes('Destructive change')) {
|
|
208
|
+
// Only ignore decryption errors, re-throw destructive change errors
|
|
209
|
+
throw err;
|
|
210
|
+
}
|
|
211
|
+
throw err;
|
|
212
|
+
}
|
|
213
|
+
}
|
|
214
|
+
}
|
|
215
|
+
}
|
|
216
|
+
catch (error) {
|
|
217
|
+
const err = error;
|
|
218
|
+
// Re-throw any errors (including destructive change errors)
|
|
219
|
+
if (err.message.includes('Destructive change') || err.message.includes('Decryption failed')) {
|
|
220
|
+
throw err;
|
|
221
|
+
}
|
|
222
|
+
// Ignore other errors (like connection issues) and proceed
|
|
223
|
+
}
|
|
224
|
+
}
|
|
141
225
|
// Encrypt entire .env content
|
|
142
226
|
const encrypted = this.encrypt(content);
|
|
143
227
|
// Include filename in job_id for tracking multiple .env files
|
|
@@ -227,7 +311,7 @@ export class SecretsManager {
|
|
|
227
311
|
let filename = '.env';
|
|
228
312
|
if (parts.length >= 4) {
|
|
229
313
|
// New format with filename
|
|
230
|
-
const
|
|
314
|
+
const _timestamp = parts[parts.length - 1];
|
|
231
315
|
// Reconstruct filename from middle parts
|
|
232
316
|
const filenameParts = parts.slice(2, -1);
|
|
233
317
|
if (filenameParts.length > 0) {
|
|
@@ -322,13 +406,13 @@ export class SecretsManager {
|
|
|
322
406
|
status.cloudKeys = Object.keys(env).length;
|
|
323
407
|
status.keyMatches = true;
|
|
324
408
|
}
|
|
325
|
-
catch (
|
|
409
|
+
catch (_error) {
|
|
326
410
|
status.keyMatches = false;
|
|
327
411
|
}
|
|
328
412
|
}
|
|
329
413
|
}
|
|
330
414
|
}
|
|
331
|
-
catch (
|
|
415
|
+
catch (_error) {
|
|
332
416
|
// Cloud check failed, likely no connection
|
|
333
417
|
}
|
|
334
418
|
return status;
|
|
@@ -378,6 +462,7 @@ export class SecretsManager {
|
|
|
378
462
|
return true;
|
|
379
463
|
}
|
|
380
464
|
catch (error) {
|
|
465
|
+
const err = error;
|
|
381
466
|
logger.error(`Failed to save encryption key: ${error.message}`);
|
|
382
467
|
logger.info('Please set it manually:');
|
|
383
468
|
logger.info(`export LSH_SECRETS_KEY=${key}`);
|
|
@@ -414,6 +499,7 @@ LSH_SECRETS_KEY=${this.encryptionKey}
|
|
|
414
499
|
return true;
|
|
415
500
|
}
|
|
416
501
|
catch (error) {
|
|
502
|
+
const err = error;
|
|
417
503
|
logger.error(`Failed to create ${envFilePath}: ${error.message}`);
|
|
418
504
|
return false;
|
|
419
505
|
}
|
|
@@ -431,6 +517,7 @@ LSH_SECRETS_KEY=${this.encryptionKey}
|
|
|
431
517
|
return true;
|
|
432
518
|
}
|
|
433
519
|
catch (error) {
|
|
520
|
+
const err = error;
|
|
434
521
|
logger.error(`Failed to create ${envFilePath}: ${error.message}`);
|
|
435
522
|
return false;
|
|
436
523
|
}
|
|
@@ -475,7 +562,7 @@ LSH_SECRETS_KEY=${this.encryptionKey}
|
|
|
475
562
|
* Smart sync command - automatically set up and synchronize secrets
|
|
476
563
|
* This is the new enhanced sync that does everything automatically
|
|
477
564
|
*/
|
|
478
|
-
async smartSync(envFilePath = '.env', environment = 'dev', autoExecute = true, loadMode = false) {
|
|
565
|
+
async smartSync(envFilePath = '.env', environment = 'dev', autoExecute = true, loadMode = false, force = false) {
|
|
479
566
|
// In load mode, suppress all logger output to prevent zsh glob interpretation
|
|
480
567
|
// Save original level and restore at the end
|
|
481
568
|
const originalLogLevel = loadMode ? logger['config'].level : undefined;
|
|
@@ -519,9 +606,9 @@ LSH_SECRETS_KEY=${this.encryptionKey}
|
|
|
519
606
|
}
|
|
520
607
|
out();
|
|
521
608
|
// Step 4: Determine action and execute if auto mode
|
|
522
|
-
let
|
|
609
|
+
let _action = 'in-sync';
|
|
523
610
|
if (status.cloudExists && status.keyMatches === false) {
|
|
524
|
-
|
|
611
|
+
_action = 'key-mismatch';
|
|
525
612
|
out('⚠️ Encryption key mismatch!');
|
|
526
613
|
out(' The local key does not match the cloud storage.');
|
|
527
614
|
out(' Please use the original key or push new secrets with:');
|
|
@@ -530,13 +617,13 @@ LSH_SECRETS_KEY=${this.encryptionKey}
|
|
|
530
617
|
return;
|
|
531
618
|
}
|
|
532
619
|
if (!status.localExists && !status.cloudExists) {
|
|
533
|
-
|
|
620
|
+
_action = 'create-and-push';
|
|
534
621
|
out('🆕 No secrets found locally or in cloud');
|
|
535
622
|
out(' Creating new .env file...');
|
|
536
623
|
if (autoExecute) {
|
|
537
624
|
await this.createEnvFromExample(envFilePath);
|
|
538
625
|
out(' Pushing to cloud...');
|
|
539
|
-
await this.push(envFilePath, effectiveEnv);
|
|
626
|
+
await this.push(envFilePath, effectiveEnv, force);
|
|
540
627
|
out();
|
|
541
628
|
out('✅ Setup complete! Edit your .env and run sync again to update.');
|
|
542
629
|
}
|
|
@@ -551,11 +638,11 @@ LSH_SECRETS_KEY=${this.encryptionKey}
|
|
|
551
638
|
return;
|
|
552
639
|
}
|
|
553
640
|
if (status.localExists && !status.cloudExists) {
|
|
554
|
-
|
|
641
|
+
_action = 'push';
|
|
555
642
|
out('⬆️ Local .env exists but not in cloud');
|
|
556
643
|
if (autoExecute) {
|
|
557
644
|
out(' Pushing to cloud...');
|
|
558
|
-
await this.push(envFilePath, effectiveEnv);
|
|
645
|
+
await this.push(envFilePath, effectiveEnv, force);
|
|
559
646
|
out('✅ Secrets pushed to cloud!');
|
|
560
647
|
}
|
|
561
648
|
else {
|
|
@@ -569,7 +656,7 @@ LSH_SECRETS_KEY=${this.encryptionKey}
|
|
|
569
656
|
return;
|
|
570
657
|
}
|
|
571
658
|
if (!status.localExists && status.cloudExists && status.keyMatches) {
|
|
572
|
-
|
|
659
|
+
_action = 'pull';
|
|
573
660
|
out('⬇️ Cloud secrets available but no local file');
|
|
574
661
|
if (autoExecute) {
|
|
575
662
|
out(' Pulling from cloud...');
|
|
@@ -604,13 +691,13 @@ LSH_SECRETS_KEY=${this.encryptionKey}
|
|
|
604
691
|
return;
|
|
605
692
|
}
|
|
606
693
|
if (localNewer) {
|
|
607
|
-
|
|
694
|
+
_action = 'push';
|
|
608
695
|
out('⬆️ Local file is newer than cloud');
|
|
609
696
|
out(` Local: ${status.localModified.toLocaleString()}`);
|
|
610
697
|
out(` Cloud: ${status.cloudModified.toLocaleString()}`);
|
|
611
698
|
if (autoExecute) {
|
|
612
699
|
out(' Pushing to cloud...');
|
|
613
|
-
await this.push(envFilePath, effectiveEnv);
|
|
700
|
+
await this.push(envFilePath, effectiveEnv, force);
|
|
614
701
|
out('✅ Secrets synced to cloud!');
|
|
615
702
|
}
|
|
616
703
|
else {
|
|
@@ -618,7 +705,7 @@ LSH_SECRETS_KEY=${this.encryptionKey}
|
|
|
618
705
|
}
|
|
619
706
|
}
|
|
620
707
|
else {
|
|
621
|
-
|
|
708
|
+
_action = 'pull';
|
|
622
709
|
out('⬇️ Cloud is newer than local file');
|
|
623
710
|
out(` Local: ${status.localModified.toLocaleString()}`);
|
|
624
711
|
out(` Cloud: ${status.cloudModified.toLocaleString()}`);
|
|
@@ -124,15 +124,16 @@ export class ZshImportManager {
|
|
|
124
124
|
return result;
|
|
125
125
|
}
|
|
126
126
|
catch (error) {
|
|
127
|
+
const err = error;
|
|
127
128
|
this.log({
|
|
128
129
|
type: 'error',
|
|
129
130
|
name: 'IMPORT_ERROR',
|
|
130
131
|
status: 'failed',
|
|
131
|
-
reason:
|
|
132
|
+
reason: err.message
|
|
132
133
|
});
|
|
133
134
|
const result = {
|
|
134
135
|
success: false,
|
|
135
|
-
message: `Import failed: ${
|
|
136
|
+
message: `Import failed: ${err.message}`,
|
|
136
137
|
diagnostics: this.diagnostics,
|
|
137
138
|
stats: { total: 0, succeeded: 0, failed: 1, skipped: 0, conflicts: 0 },
|
|
138
139
|
};
|
|
@@ -280,7 +281,7 @@ export class ZshImportManager {
|
|
|
280
281
|
*/
|
|
281
282
|
async loadExistingItems() {
|
|
282
283
|
// Get existing aliases from context
|
|
283
|
-
const context = this.executor.
|
|
284
|
+
const context = this.executor.getContext();
|
|
284
285
|
if (context && context.variables) {
|
|
285
286
|
for (const key in context.variables) {
|
|
286
287
|
if (key.startsWith('alias_')) {
|
|
@@ -353,11 +354,12 @@ export class ZshImportManager {
|
|
|
353
354
|
stats.succeeded++;
|
|
354
355
|
}
|
|
355
356
|
catch (error) {
|
|
357
|
+
const err = error;
|
|
356
358
|
this.log({
|
|
357
359
|
type: 'alias',
|
|
358
360
|
name: alias.name,
|
|
359
361
|
status: 'failed',
|
|
360
|
-
reason:
|
|
362
|
+
reason: err.message,
|
|
361
363
|
source: `line ${alias.line}`,
|
|
362
364
|
});
|
|
363
365
|
stats.failed++;
|
|
@@ -415,11 +417,12 @@ export class ZshImportManager {
|
|
|
415
417
|
stats.succeeded++;
|
|
416
418
|
}
|
|
417
419
|
catch (error) {
|
|
420
|
+
const err = error;
|
|
418
421
|
this.log({
|
|
419
422
|
type: 'export',
|
|
420
423
|
name: export_.name,
|
|
421
424
|
status: 'failed',
|
|
422
|
-
reason:
|
|
425
|
+
reason: err.message,
|
|
423
426
|
source: `line ${export_.line}`,
|
|
424
427
|
});
|
|
425
428
|
stats.failed++;
|
|
@@ -490,11 +493,12 @@ export class ZshImportManager {
|
|
|
490
493
|
stats.succeeded++;
|
|
491
494
|
}
|
|
492
495
|
catch (error) {
|
|
496
|
+
const err = error;
|
|
493
497
|
this.log({
|
|
494
498
|
type: 'function',
|
|
495
499
|
name: func.name,
|
|
496
500
|
status: 'disabled',
|
|
497
|
-
reason: `Parse error: ${
|
|
501
|
+
reason: `Parse error: ${err.message}`,
|
|
498
502
|
source: `line ${func.line}`,
|
|
499
503
|
});
|
|
500
504
|
stats.failed++;
|
|
@@ -521,11 +525,12 @@ export class ZshImportManager {
|
|
|
521
525
|
stats.succeeded++;
|
|
522
526
|
}
|
|
523
527
|
catch (error) {
|
|
528
|
+
const err = error;
|
|
524
529
|
this.log({
|
|
525
530
|
type: 'setopt',
|
|
526
531
|
name: setopt.option,
|
|
527
532
|
status: 'disabled',
|
|
528
|
-
reason:
|
|
533
|
+
reason: err.message,
|
|
529
534
|
source: `line ${setopt.line}`,
|
|
530
535
|
});
|
|
531
536
|
stats.failed++;
|
|
@@ -635,7 +640,8 @@ export class ZshImportManager {
|
|
|
635
640
|
fs.appendFileSync(this.options.diagnosticLog, logContent + '\n\n', 'utf8');
|
|
636
641
|
}
|
|
637
642
|
catch (error) {
|
|
638
|
-
|
|
643
|
+
const err = error;
|
|
644
|
+
console.error(`Failed to write diagnostic log: ${err.message}`);
|
|
639
645
|
}
|
|
640
646
|
}
|
|
641
647
|
/**
|
|
@@ -684,7 +690,7 @@ export class ZshImportManager {
|
|
|
684
690
|
if (diagnostic.status === 'conflict')
|
|
685
691
|
stats.conflicts++;
|
|
686
692
|
if (!stats.byType[diagnostic.type]) {
|
|
687
|
-
stats.byType[diagnostic.type] = { total: 0, succeeded: 0, failed: 0, skipped: 0 };
|
|
693
|
+
stats.byType[diagnostic.type] = { total: 0, succeeded: 0, failed: 0, skipped: 0, conflicts: 0 };
|
|
688
694
|
}
|
|
689
695
|
stats.byType[diagnostic.type].total++;
|
|
690
696
|
if (diagnostic.status === 'success')
|
|
@@ -693,6 +699,8 @@ export class ZshImportManager {
|
|
|
693
699
|
stats.byType[diagnostic.type].failed++;
|
|
694
700
|
if (diagnostic.status === 'skipped')
|
|
695
701
|
stats.byType[diagnostic.type].skipped++;
|
|
702
|
+
if (diagnostic.status === 'conflict')
|
|
703
|
+
stats.byType[diagnostic.type].conflicts++;
|
|
696
704
|
}
|
|
697
705
|
return stats;
|
|
698
706
|
}
|
|
@@ -443,7 +443,7 @@ export class JobTracker extends EventEmitter {
|
|
|
443
443
|
targetSystem: row.target_system,
|
|
444
444
|
status: row.status,
|
|
445
445
|
priority: row.priority,
|
|
446
|
-
config: row.config,
|
|
446
|
+
config: row.config || {},
|
|
447
447
|
parameters: row.parameters,
|
|
448
448
|
cpuRequest: row.cpu_request ? parseFloat(row.cpu_request) : undefined,
|
|
449
449
|
memoryRequest: row.memory_request,
|
|
@@ -140,12 +140,14 @@ export class MCLIBridge extends EventEmitter {
|
|
|
140
140
|
// Webhook handler for MCLI callbacks
|
|
141
141
|
async handleWebhook(payload) {
|
|
142
142
|
const { job_id, status, result, error, metrics, artifacts } = payload;
|
|
143
|
+
const jobIdStr = job_id;
|
|
143
144
|
// Get pipeline job ID
|
|
144
|
-
let pipelineJobId = this.jobMapping.get(
|
|
145
|
-
|
|
146
|
-
|
|
145
|
+
let pipelineJobId = this.jobMapping.get(jobIdStr);
|
|
146
|
+
const metadata = payload.metadata;
|
|
147
|
+
if (!pipelineJobId && metadata?.pipeline_job_id) {
|
|
148
|
+
pipelineJobId = metadata.pipeline_job_id;
|
|
147
149
|
if (pipelineJobId) {
|
|
148
|
-
this.jobMapping.set(
|
|
150
|
+
this.jobMapping.set(jobIdStr, pipelineJobId);
|
|
149
151
|
}
|
|
150
152
|
}
|
|
151
153
|
if (!pipelineJobId) {
|
|
@@ -169,7 +171,8 @@ export class MCLIBridge extends EventEmitter {
|
|
|
169
171
|
break;
|
|
170
172
|
case 'failed':
|
|
171
173
|
case 'error':
|
|
172
|
-
|
|
174
|
+
const errorObj = error;
|
|
175
|
+
await this.jobTracker.failExecution(execution.id, errorObj?.message || 'Job failed in MCLI', error);
|
|
173
176
|
break;
|
|
174
177
|
case 'cancelled':
|
|
175
178
|
await this.jobTracker.updateJobStatus(pipelineJobId, JobStatus.CANCELLED);
|
|
@@ -258,11 +261,13 @@ export class MCLIBridge extends EventEmitter {
|
|
|
258
261
|
// Helper methods
|
|
259
262
|
async updateJobExternalId(jobId, externalId) {
|
|
260
263
|
// This would be implemented in JobTracker, but for now we'll use raw SQL
|
|
264
|
+
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
|
261
265
|
const pool = this.jobTracker.pool;
|
|
262
266
|
await pool.query('UPDATE pipeline_jobs SET external_id = $1 WHERE id = $2', [externalId, jobId]);
|
|
263
267
|
}
|
|
264
268
|
async getLatestExecution(jobId) {
|
|
265
269
|
// This would be implemented in JobTracker
|
|
270
|
+
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
|
266
271
|
const pool = this.jobTracker.pool;
|
|
267
272
|
const result = await pool.query(`SELECT * FROM job_executions
|
|
268
273
|
WHERE job_id = $1
|
|
@@ -271,6 +276,7 @@ export class MCLIBridge extends EventEmitter {
|
|
|
271
276
|
if (result.rows.length === 0) {
|
|
272
277
|
return null;
|
|
273
278
|
}
|
|
279
|
+
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
|
274
280
|
return this.jobTracker.parseExecutionRow(result.rows[0]);
|
|
275
281
|
}
|
|
276
282
|
// Health check
|
|
@@ -208,22 +208,23 @@ export class WorkflowEngine extends EventEmitter {
|
|
|
208
208
|
}
|
|
209
209
|
async executeJobNode(execution, node) {
|
|
210
210
|
// Create job from node configuration
|
|
211
|
+
const config = node.config;
|
|
211
212
|
const jobConfig = {
|
|
212
213
|
name: `${execution.runId}-${node.name}`,
|
|
213
|
-
type:
|
|
214
|
+
type: config.type || 'workflow_job',
|
|
214
215
|
sourceSystem: 'workflow',
|
|
215
|
-
targetSystem:
|
|
216
|
+
targetSystem: config.targetSystem || 'mcli',
|
|
216
217
|
status: JobStatus.PENDING,
|
|
217
|
-
priority:
|
|
218
|
+
priority: config.priority || JobPriority.NORMAL,
|
|
218
219
|
config: {
|
|
219
|
-
...
|
|
220
|
+
...config,
|
|
220
221
|
workflowExecutionId: execution.id,
|
|
221
222
|
workflowNodeId: node.id,
|
|
222
223
|
workflowRunId: execution.runId
|
|
223
224
|
},
|
|
224
225
|
parameters: {
|
|
225
226
|
...execution.parameters,
|
|
226
|
-
...
|
|
227
|
+
...(config.parameters || {})
|
|
227
228
|
},
|
|
228
229
|
owner: execution.triggeredBy,
|
|
229
230
|
tags: [`workflow:${execution.workflowId}`, `run:${execution.runId}`]
|
|
@@ -262,7 +263,8 @@ export class WorkflowEngine extends EventEmitter {
|
|
|
262
263
|
await this.checkAndContinueExecution(execution);
|
|
263
264
|
}
|
|
264
265
|
async executeWaitNode(execution, node) {
|
|
265
|
-
const
|
|
266
|
+
const config = node.config;
|
|
267
|
+
const waitMs = config.waitMs || 1000;
|
|
266
268
|
setTimeout(async () => {
|
|
267
269
|
const nodeState = execution.nodeStates[node.id];
|
|
268
270
|
nodeState.status = NodeStatus.COMPLETED;
|
|
@@ -300,7 +302,8 @@ export class WorkflowEngine extends EventEmitter {
|
|
|
300
302
|
nodeState.durationMs = nodeState.completedAt.getTime() - nodeState.startedAt.getTime();
|
|
301
303
|
}
|
|
302
304
|
if (status === 'failed') {
|
|
303
|
-
|
|
305
|
+
const errorData = data;
|
|
306
|
+
nodeState.error = errorData.errorMessage || 'Job failed';
|
|
304
307
|
// Check retry policy
|
|
305
308
|
const workflow = await this.getWorkflow(targetExecution.workflowId);
|
|
306
309
|
const node = workflow?.nodes.find(n => n.id === targetNodeId);
|
|
@@ -48,22 +48,23 @@ export class CronCommandRegistrar extends BaseCommandRegistrar {
|
|
|
48
48
|
{ flags: '-p, --priority <priority>', description: 'Priority (0-10)', defaultValue: '5' }
|
|
49
49
|
],
|
|
50
50
|
action: async (templateId, options) => {
|
|
51
|
+
const opts = options;
|
|
51
52
|
const result = await this.withCronManager(async (manager) => {
|
|
52
53
|
const customizations = {};
|
|
53
|
-
if (
|
|
54
|
-
customizations.name =
|
|
55
|
-
if (
|
|
56
|
-
customizations.command =
|
|
57
|
-
if (
|
|
58
|
-
customizations.schedule = { cron:
|
|
59
|
-
if (
|
|
60
|
-
customizations.workingDirectory =
|
|
61
|
-
if (
|
|
62
|
-
customizations.environment = this.parseJSON(
|
|
63
|
-
if (
|
|
64
|
-
customizations.tags = this.parseTags(
|
|
65
|
-
if (
|
|
66
|
-
customizations.priority = parseInt(
|
|
54
|
+
if (opts.name)
|
|
55
|
+
customizations.name = opts.name;
|
|
56
|
+
if (opts.command)
|
|
57
|
+
customizations.command = opts.command;
|
|
58
|
+
if (opts.schedule)
|
|
59
|
+
customizations.schedule = { cron: opts.schedule };
|
|
60
|
+
if (opts.workingDir)
|
|
61
|
+
customizations.workingDirectory = opts.workingDir;
|
|
62
|
+
if (opts.env)
|
|
63
|
+
customizations.environment = this.parseJSON(opts.env, 'environment variables');
|
|
64
|
+
if (opts.tags)
|
|
65
|
+
customizations.tags = this.parseTags(opts.tags);
|
|
66
|
+
if (opts.priority)
|
|
67
|
+
customizations.priority = parseInt(opts.priority);
|
|
67
68
|
return await manager.createJobFromTemplate(templateId, customizations);
|
|
68
69
|
});
|
|
69
70
|
this.logSuccess('Job created from template:');
|
|
@@ -84,8 +85,9 @@ export class CronCommandRegistrar extends BaseCommandRegistrar {
|
|
|
84
85
|
{ flags: '-f, --filter <filter>', description: 'Filter by status' }
|
|
85
86
|
],
|
|
86
87
|
action: async (options) => {
|
|
88
|
+
const opts = options;
|
|
87
89
|
const jobs = await this.withCronManager(async (manager) => {
|
|
88
|
-
return await manager.listJobs(
|
|
90
|
+
return await manager.listJobs(opts.filter ? { status: opts.filter } : undefined);
|
|
89
91
|
});
|
|
90
92
|
this.logInfo(`Cron Jobs (${jobs.length} total):`);
|
|
91
93
|
jobs.forEach(job => {
|
|
@@ -146,10 +148,11 @@ export class CronCommandRegistrar extends BaseCommandRegistrar {
|
|
|
146
148
|
{ flags: '-s, --signal <signal>', description: 'Signal to send', defaultValue: 'SIGTERM' }
|
|
147
149
|
],
|
|
148
150
|
action: async (jobId, options) => {
|
|
151
|
+
const opts = options;
|
|
149
152
|
await this.withCronManager(async (manager) => {
|
|
150
|
-
await manager.stopJob(jobId,
|
|
153
|
+
await manager.stopJob(jobId, opts.signal);
|
|
151
154
|
});
|
|
152
|
-
this.logSuccess(`Job ${jobId} stopped with signal ${
|
|
155
|
+
this.logSuccess(`Job ${jobId} stopped with signal ${opts.signal}`);
|
|
153
156
|
}
|
|
154
157
|
});
|
|
155
158
|
// Remove job
|
|
@@ -161,8 +164,9 @@ export class CronCommandRegistrar extends BaseCommandRegistrar {
|
|
|
161
164
|
{ flags: '-f, --force', description: 'Force removal', defaultValue: false }
|
|
162
165
|
],
|
|
163
166
|
action: async (jobId, options) => {
|
|
167
|
+
const opts = options;
|
|
164
168
|
await this.withCronManager(async (manager) => {
|
|
165
|
-
await manager.removeJob(jobId,
|
|
169
|
+
await manager.removeJob(jobId, opts.force);
|
|
166
170
|
});
|
|
167
171
|
this.logSuccess(`Job ${jobId} removed`);
|
|
168
172
|
}
|
|
@@ -218,13 +222,14 @@ export class CronCommandRegistrar extends BaseCommandRegistrar {
|
|
|
218
222
|
{ flags: '-o, --output <file>', description: 'Output file path' }
|
|
219
223
|
],
|
|
220
224
|
action: async (options) => {
|
|
225
|
+
const opts = options;
|
|
221
226
|
const data = await this.withCronManager(async (manager) => {
|
|
222
|
-
return await manager.exportJobData(
|
|
227
|
+
return await manager.exportJobData(opts.format);
|
|
223
228
|
});
|
|
224
|
-
if (
|
|
229
|
+
if (opts.output) {
|
|
225
230
|
const fs = await import('fs');
|
|
226
|
-
fs.writeFileSync(
|
|
227
|
-
this.logSuccess(`Data exported to ${
|
|
231
|
+
fs.writeFileSync(opts.output, data);
|
|
232
|
+
this.logSuccess(`Data exported to ${opts.output}`);
|
|
228
233
|
}
|
|
229
234
|
else {
|
|
230
235
|
this.logInfo(data);
|