lsh-framework 1.2.0 → 1.3.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +40 -3
- package/dist/cli.js +104 -486
- package/dist/commands/doctor.js +427 -0
- package/dist/commands/init.js +371 -0
- package/dist/constants/api.js +94 -0
- package/dist/constants/commands.js +64 -0
- package/dist/constants/config.js +56 -0
- package/dist/constants/database.js +21 -0
- package/dist/constants/errors.js +79 -0
- package/dist/constants/index.js +28 -0
- package/dist/constants/paths.js +28 -0
- package/dist/constants/ui.js +73 -0
- package/dist/constants/validation.js +124 -0
- package/dist/daemon/lshd.js +11 -32
- package/dist/lib/daemon-client-helper.js +7 -4
- package/dist/lib/daemon-client.js +9 -2
- package/dist/lib/format-utils.js +163 -0
- package/dist/lib/fuzzy-match.js +123 -0
- package/dist/lib/job-manager.js +2 -1
- package/dist/lib/platform-utils.js +211 -0
- package/dist/lib/secrets-manager.js +11 -1
- package/dist/lib/string-utils.js +128 -0
- package/dist/services/daemon/daemon-registrar.js +3 -2
- package/dist/services/secrets/secrets.js +119 -59
- package/package.json +10 -74
- package/dist/app.js +0 -33
- package/dist/cicd/analytics.js +0 -261
- package/dist/cicd/auth.js +0 -269
- package/dist/cicd/cache-manager.js +0 -172
- package/dist/cicd/data-retention.js +0 -305
- package/dist/cicd/performance-monitor.js +0 -224
- package/dist/cicd/webhook-receiver.js +0 -640
- package/dist/commands/api.js +0 -346
- package/dist/commands/theme.js +0 -261
- package/dist/commands/zsh-import.js +0 -240
- package/dist/components/App.js +0 -1
- package/dist/components/Divider.js +0 -29
- package/dist/components/REPL.js +0 -43
- package/dist/components/Terminal.js +0 -232
- package/dist/components/UserInput.js +0 -30
- package/dist/daemon/api-server.js +0 -316
- package/dist/daemon/monitoring-api.js +0 -220
- package/dist/lib/api-error-handler.js +0 -185
- package/dist/lib/associative-arrays.js +0 -285
- package/dist/lib/base-api-server.js +0 -290
- package/dist/lib/brace-expansion.js +0 -160
- package/dist/lib/builtin-commands.js +0 -439
- package/dist/lib/executors/builtin-executor.js +0 -52
- package/dist/lib/extended-globbing.js +0 -411
- package/dist/lib/extended-parameter-expansion.js +0 -227
- package/dist/lib/interactive-shell.js +0 -460
- package/dist/lib/job-builtins.js +0 -582
- package/dist/lib/pathname-expansion.js +0 -216
- package/dist/lib/script-runner.js +0 -226
- package/dist/lib/shell-executor.js +0 -2504
- package/dist/lib/shell-parser.js +0 -958
- package/dist/lib/shell-types.js +0 -6
- package/dist/lib/shell.lib.js +0 -40
- package/dist/lib/theme-manager.js +0 -476
- package/dist/lib/variable-expansion.js +0 -385
- package/dist/lib/zsh-compatibility.js +0 -659
- package/dist/lib/zsh-import-manager.js +0 -707
- package/dist/lib/zsh-options.js +0 -328
- package/dist/pipeline/job-tracker.js +0 -491
- package/dist/pipeline/mcli-bridge.js +0 -309
- package/dist/pipeline/pipeline-service.js +0 -1119
- package/dist/pipeline/workflow-engine.js +0 -870
- package/dist/services/api/api.js +0 -58
- package/dist/services/api/auth.js +0 -35
- package/dist/services/api/config.js +0 -7
- package/dist/services/api/file.js +0 -22
- package/dist/services/shell/shell.js +0 -28
- package/dist/services/zapier.js +0 -16
- package/dist/simple-api-server.js +0 -148
|
@@ -1,491 +0,0 @@
|
|
|
1
|
-
import { EventEmitter } from 'events';
|
|
2
|
-
import { v4 as uuidv4 } from 'uuid';
|
|
3
|
-
export var JobStatus;
|
|
4
|
-
(function (JobStatus) {
|
|
5
|
-
JobStatus["PENDING"] = "pending";
|
|
6
|
-
JobStatus["QUEUED"] = "queued";
|
|
7
|
-
JobStatus["RUNNING"] = "running";
|
|
8
|
-
JobStatus["COMPLETED"] = "completed";
|
|
9
|
-
JobStatus["FAILED"] = "failed";
|
|
10
|
-
JobStatus["CANCELLED"] = "cancelled";
|
|
11
|
-
JobStatus["RETRYING"] = "retrying";
|
|
12
|
-
})(JobStatus || (JobStatus = {}));
|
|
13
|
-
export var JobPriority;
|
|
14
|
-
(function (JobPriority) {
|
|
15
|
-
JobPriority["LOW"] = "low";
|
|
16
|
-
JobPriority["NORMAL"] = "normal";
|
|
17
|
-
JobPriority["HIGH"] = "high";
|
|
18
|
-
JobPriority["CRITICAL"] = "critical";
|
|
19
|
-
})(JobPriority || (JobPriority = {}));
|
|
20
|
-
export class JobTracker extends EventEmitter {
|
|
21
|
-
pool;
|
|
22
|
-
pollingInterval = null;
|
|
23
|
-
constructor(pool) {
|
|
24
|
-
super();
|
|
25
|
-
this.pool = pool;
|
|
26
|
-
}
|
|
27
|
-
// Job Management
|
|
28
|
-
async createJob(job) {
|
|
29
|
-
const id = job.id || uuidv4();
|
|
30
|
-
const query = `
|
|
31
|
-
INSERT INTO pipeline_jobs (
|
|
32
|
-
id, external_id, name, type, source_system, target_system,
|
|
33
|
-
status, priority, config, parameters, cpu_request, memory_request,
|
|
34
|
-
gpu_request, scheduled_at, tags, labels, owner, team, created_by
|
|
35
|
-
) VALUES (
|
|
36
|
-
$1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, $12, $13, $14, $15, $16, $17, $18, $19
|
|
37
|
-
) RETURNING *
|
|
38
|
-
`;
|
|
39
|
-
const values = [
|
|
40
|
-
id,
|
|
41
|
-
job.externalId,
|
|
42
|
-
job.name,
|
|
43
|
-
job.type,
|
|
44
|
-
job.sourceSystem || 'lsh',
|
|
45
|
-
job.targetSystem || 'mcli',
|
|
46
|
-
job.status || JobStatus.PENDING,
|
|
47
|
-
job.priority || JobPriority.NORMAL,
|
|
48
|
-
JSON.stringify(job.config),
|
|
49
|
-
job.parameters ? JSON.stringify(job.parameters) : null,
|
|
50
|
-
job.cpuRequest,
|
|
51
|
-
job.memoryRequest,
|
|
52
|
-
job.gpuRequest || 0,
|
|
53
|
-
job.scheduledAt,
|
|
54
|
-
job.tags,
|
|
55
|
-
job.labels ? JSON.stringify(job.labels) : null,
|
|
56
|
-
job.owner,
|
|
57
|
-
job.team,
|
|
58
|
-
job.createdBy
|
|
59
|
-
];
|
|
60
|
-
const result = await this.pool.query(query, values);
|
|
61
|
-
const createdJob = this.parseJobRow(result.rows[0]);
|
|
62
|
-
// Emit job created event
|
|
63
|
-
this.emit('job:created', {
|
|
64
|
-
type: 'job:created',
|
|
65
|
-
jobId: createdJob.id,
|
|
66
|
-
data: createdJob,
|
|
67
|
-
timestamp: new Date()
|
|
68
|
-
});
|
|
69
|
-
// Create initial execution
|
|
70
|
-
await this.createExecution(createdJob.id);
|
|
71
|
-
return createdJob;
|
|
72
|
-
}
|
|
73
|
-
async getJob(jobId) {
|
|
74
|
-
const query = 'SELECT * FROM pipeline_jobs WHERE id = $1';
|
|
75
|
-
const result = await this.pool.query(query, [jobId]);
|
|
76
|
-
if (result.rows.length === 0) {
|
|
77
|
-
return null;
|
|
78
|
-
}
|
|
79
|
-
return this.parseJobRow(result.rows[0]);
|
|
80
|
-
}
|
|
81
|
-
async updateJobStatus(jobId, status, errorMessage) {
|
|
82
|
-
const query = `
|
|
83
|
-
UPDATE pipeline_jobs
|
|
84
|
-
SET status = $2, updated_at = CURRENT_TIMESTAMP
|
|
85
|
-
WHERE id = $1
|
|
86
|
-
`;
|
|
87
|
-
await this.pool.query(query, [jobId, status]);
|
|
88
|
-
// Update latest execution
|
|
89
|
-
const execQuery = `
|
|
90
|
-
UPDATE job_executions
|
|
91
|
-
SET status = $2, error_message = $3
|
|
92
|
-
WHERE job_id = $1 AND execution_number = (
|
|
93
|
-
SELECT MAX(execution_number) FROM job_executions WHERE job_id = $1
|
|
94
|
-
)
|
|
95
|
-
`;
|
|
96
|
-
await this.pool.query(execQuery, [jobId, status, errorMessage]);
|
|
97
|
-
// Emit status change event
|
|
98
|
-
this.emit('job:status_changed', {
|
|
99
|
-
type: 'job:status_changed',
|
|
100
|
-
jobId,
|
|
101
|
-
data: { status, errorMessage },
|
|
102
|
-
timestamp: new Date()
|
|
103
|
-
});
|
|
104
|
-
}
|
|
105
|
-
async listJobs(filters = {}) {
|
|
106
|
-
let whereClause = 'WHERE 1=1';
|
|
107
|
-
const values = [];
|
|
108
|
-
let paramCount = 0;
|
|
109
|
-
if (filters.status) {
|
|
110
|
-
whereClause += ` AND status = $${++paramCount}`;
|
|
111
|
-
values.push(filters.status);
|
|
112
|
-
}
|
|
113
|
-
if (filters.sourceSystem) {
|
|
114
|
-
whereClause += ` AND source_system = $${++paramCount}`;
|
|
115
|
-
values.push(filters.sourceSystem);
|
|
116
|
-
}
|
|
117
|
-
if (filters.targetSystem) {
|
|
118
|
-
whereClause += ` AND target_system = $${++paramCount}`;
|
|
119
|
-
values.push(filters.targetSystem);
|
|
120
|
-
}
|
|
121
|
-
if (filters.owner) {
|
|
122
|
-
whereClause += ` AND owner = $${++paramCount}`;
|
|
123
|
-
values.push(filters.owner);
|
|
124
|
-
}
|
|
125
|
-
if (filters.team) {
|
|
126
|
-
whereClause += ` AND team = $${++paramCount}`;
|
|
127
|
-
values.push(filters.team);
|
|
128
|
-
}
|
|
129
|
-
// Get total count
|
|
130
|
-
const countQuery = `SELECT COUNT(*) FROM pipeline_jobs ${whereClause}`;
|
|
131
|
-
const countResult = await this.pool.query(countQuery, values);
|
|
132
|
-
const total = parseInt(countResult.rows[0].count);
|
|
133
|
-
// Get jobs with pagination
|
|
134
|
-
const limit = filters.limit || 50;
|
|
135
|
-
const offset = filters.offset || 0;
|
|
136
|
-
const jobsQuery = `
|
|
137
|
-
SELECT * FROM pipeline_jobs
|
|
138
|
-
${whereClause}
|
|
139
|
-
ORDER BY created_at DESC
|
|
140
|
-
LIMIT $${++paramCount} OFFSET $${++paramCount}
|
|
141
|
-
`;
|
|
142
|
-
values.push(limit, offset);
|
|
143
|
-
const jobsResult = await this.pool.query(jobsQuery, values);
|
|
144
|
-
const jobs = jobsResult.rows.map(row => this.parseJobRow(row));
|
|
145
|
-
return { jobs, total };
|
|
146
|
-
}
|
|
147
|
-
// Execution Management
|
|
148
|
-
async createExecution(jobId) {
|
|
149
|
-
const executionNumber = await this.getNextExecutionNumber(jobId);
|
|
150
|
-
const query = `
|
|
151
|
-
INSERT INTO job_executions (
|
|
152
|
-
id, job_id, execution_number, status
|
|
153
|
-
) VALUES (
|
|
154
|
-
$1, $2, $3, $4
|
|
155
|
-
) RETURNING *
|
|
156
|
-
`;
|
|
157
|
-
const id = uuidv4();
|
|
158
|
-
const values = [id, jobId, executionNumber, JobStatus.PENDING];
|
|
159
|
-
const result = await this.pool.query(query, values);
|
|
160
|
-
const execution = this.parseExecutionRow(result.rows[0]);
|
|
161
|
-
// Emit execution created event
|
|
162
|
-
this.emit('execution:created', {
|
|
163
|
-
type: 'execution:created',
|
|
164
|
-
jobId,
|
|
165
|
-
executionId: execution.id,
|
|
166
|
-
data: execution,
|
|
167
|
-
timestamp: new Date()
|
|
168
|
-
});
|
|
169
|
-
return execution;
|
|
170
|
-
}
|
|
171
|
-
async startExecution(executionId, executor, workerNode) {
|
|
172
|
-
const query = `
|
|
173
|
-
UPDATE job_executions
|
|
174
|
-
SET status = $2, started_at = CURRENT_TIMESTAMP, executor = $3, worker_node = $4
|
|
175
|
-
WHERE id = $1
|
|
176
|
-
`;
|
|
177
|
-
await this.pool.query(query, [executionId, JobStatus.RUNNING, executor, workerNode]);
|
|
178
|
-
// Get job ID for event
|
|
179
|
-
const execResult = await this.pool.query('SELECT job_id FROM job_executions WHERE id = $1', [executionId]);
|
|
180
|
-
const jobId = execResult.rows[0]?.job_id;
|
|
181
|
-
if (jobId) {
|
|
182
|
-
// Update job status
|
|
183
|
-
await this.updateJobStatus(jobId, JobStatus.RUNNING);
|
|
184
|
-
// Emit execution started event
|
|
185
|
-
this.emit('execution:started', {
|
|
186
|
-
type: 'execution:started',
|
|
187
|
-
jobId,
|
|
188
|
-
executionId,
|
|
189
|
-
data: { executor, workerNode },
|
|
190
|
-
timestamp: new Date()
|
|
191
|
-
});
|
|
192
|
-
}
|
|
193
|
-
}
|
|
194
|
-
async completeExecution(executionId, result, metrics, outputDatasets) {
|
|
195
|
-
const query = `
|
|
196
|
-
UPDATE job_executions
|
|
197
|
-
SET
|
|
198
|
-
status = $2,
|
|
199
|
-
completed_at = CURRENT_TIMESTAMP,
|
|
200
|
-
duration_ms = EXTRACT(EPOCH FROM (CURRENT_TIMESTAMP - started_at)) * 1000,
|
|
201
|
-
result = $3,
|
|
202
|
-
metrics = $4,
|
|
203
|
-
output_datasets = $5
|
|
204
|
-
WHERE id = $1
|
|
205
|
-
RETURNING job_id, duration_ms
|
|
206
|
-
`;
|
|
207
|
-
const values = [
|
|
208
|
-
executionId,
|
|
209
|
-
JobStatus.COMPLETED,
|
|
210
|
-
JSON.stringify(result),
|
|
211
|
-
metrics ? JSON.stringify(metrics) : null,
|
|
212
|
-
outputDatasets ? JSON.stringify(outputDatasets) : null
|
|
213
|
-
];
|
|
214
|
-
const execResult = await this.pool.query(query, values);
|
|
215
|
-
const jobId = execResult.rows[0]?.job_id;
|
|
216
|
-
const durationMs = execResult.rows[0]?.duration_ms;
|
|
217
|
-
if (jobId) {
|
|
218
|
-
// Update job status
|
|
219
|
-
await this.updateJobStatus(jobId, JobStatus.COMPLETED);
|
|
220
|
-
// Record metrics
|
|
221
|
-
if (durationMs) {
|
|
222
|
-
await this.recordMetric(jobId, executionId, 'execution_duration', durationMs, 'ms');
|
|
223
|
-
}
|
|
224
|
-
// Emit execution completed event
|
|
225
|
-
this.emit('execution:completed', {
|
|
226
|
-
type: 'execution:completed',
|
|
227
|
-
jobId,
|
|
228
|
-
executionId,
|
|
229
|
-
data: { result, metrics, durationMs },
|
|
230
|
-
timestamp: new Date()
|
|
231
|
-
});
|
|
232
|
-
}
|
|
233
|
-
}
|
|
234
|
-
async failExecution(executionId, errorMessage, errorDetails) {
|
|
235
|
-
const query = `
|
|
236
|
-
UPDATE job_executions
|
|
237
|
-
SET
|
|
238
|
-
status = $2,
|
|
239
|
-
completed_at = CURRENT_TIMESTAMP,
|
|
240
|
-
duration_ms = EXTRACT(EPOCH FROM (CURRENT_TIMESTAMP - started_at)) * 1000,
|
|
241
|
-
error_message = $3,
|
|
242
|
-
error_details = $4
|
|
243
|
-
WHERE id = $1
|
|
244
|
-
RETURNING job_id, retry_count
|
|
245
|
-
`;
|
|
246
|
-
const values = [
|
|
247
|
-
executionId,
|
|
248
|
-
JobStatus.FAILED,
|
|
249
|
-
errorMessage,
|
|
250
|
-
errorDetails ? JSON.stringify(errorDetails) : null
|
|
251
|
-
];
|
|
252
|
-
const execResult = await this.pool.query(query, values);
|
|
253
|
-
const jobId = execResult.rows[0]?.job_id;
|
|
254
|
-
const retryCount = execResult.rows[0]?.retry_count || 0;
|
|
255
|
-
if (jobId) {
|
|
256
|
-
// Check if we should retry
|
|
257
|
-
const shouldRetry = retryCount < 3; // Max 3 retries
|
|
258
|
-
if (shouldRetry) {
|
|
259
|
-
await this.scheduleRetry(executionId, retryCount + 1);
|
|
260
|
-
await this.updateJobStatus(jobId, JobStatus.RETRYING);
|
|
261
|
-
}
|
|
262
|
-
else {
|
|
263
|
-
await this.updateJobStatus(jobId, JobStatus.FAILED, errorMessage);
|
|
264
|
-
}
|
|
265
|
-
// Emit execution failed event
|
|
266
|
-
this.emit('execution:failed', {
|
|
267
|
-
type: 'execution:failed',
|
|
268
|
-
jobId,
|
|
269
|
-
executionId,
|
|
270
|
-
data: { errorMessage, errorDetails, retryCount, willRetry: shouldRetry },
|
|
271
|
-
timestamp: new Date()
|
|
272
|
-
});
|
|
273
|
-
}
|
|
274
|
-
}
|
|
275
|
-
async scheduleRetry(executionId, retryCount) {
|
|
276
|
-
// Exponential backoff: 1min, 2min, 4min
|
|
277
|
-
const delayMinutes = Math.pow(2, retryCount - 1);
|
|
278
|
-
const retryAfter = new Date(Date.now() + delayMinutes * 60 * 1000);
|
|
279
|
-
const query = `
|
|
280
|
-
UPDATE job_executions
|
|
281
|
-
SET retry_count = $2, retry_after = $3, status = $4
|
|
282
|
-
WHERE id = $1
|
|
283
|
-
`;
|
|
284
|
-
await this.pool.query(query, [executionId, retryCount, retryAfter, JobStatus.RETRYING]);
|
|
285
|
-
}
|
|
286
|
-
// Metrics
|
|
287
|
-
async recordMetric(jobId, executionId, metricName, metricValue, metricUnit, tags) {
|
|
288
|
-
const query = `
|
|
289
|
-
INSERT INTO pipeline_metrics (
|
|
290
|
-
id, job_id, execution_id, metric_name, metric_value, metric_unit, tags
|
|
291
|
-
) VALUES (
|
|
292
|
-
$1, $2, $3, $4, $5, $6, $7
|
|
293
|
-
)
|
|
294
|
-
`;
|
|
295
|
-
const values = [
|
|
296
|
-
uuidv4(),
|
|
297
|
-
jobId,
|
|
298
|
-
executionId,
|
|
299
|
-
metricName,
|
|
300
|
-
metricValue,
|
|
301
|
-
metricUnit,
|
|
302
|
-
tags ? JSON.stringify(tags) : null
|
|
303
|
-
];
|
|
304
|
-
await this.pool.query(query, values);
|
|
305
|
-
}
|
|
306
|
-
async getJobMetrics(jobId, metricName) {
|
|
307
|
-
let query = 'SELECT * FROM pipeline_metrics WHERE job_id = $1';
|
|
308
|
-
const values = [jobId];
|
|
309
|
-
if (metricName) {
|
|
310
|
-
query += ' AND metric_name = $2';
|
|
311
|
-
values.push(metricName);
|
|
312
|
-
}
|
|
313
|
-
query += ' ORDER BY recorded_at DESC LIMIT 100';
|
|
314
|
-
const result = await this.pool.query(query, values);
|
|
315
|
-
return result.rows;
|
|
316
|
-
}
|
|
317
|
-
// Event Recording
|
|
318
|
-
async recordEvent(eventType, eventSource, eventData, jobId, executionId) {
|
|
319
|
-
const query = `
|
|
320
|
-
INSERT INTO pipeline_events (
|
|
321
|
-
id, event_type, event_source, event_data, job_id, execution_id
|
|
322
|
-
) VALUES (
|
|
323
|
-
$1, $2, $3, $4, $5, $6
|
|
324
|
-
)
|
|
325
|
-
`;
|
|
326
|
-
const values = [
|
|
327
|
-
uuidv4(),
|
|
328
|
-
eventType,
|
|
329
|
-
eventSource,
|
|
330
|
-
JSON.stringify(eventData),
|
|
331
|
-
jobId,
|
|
332
|
-
executionId
|
|
333
|
-
];
|
|
334
|
-
await this.pool.query(query, values);
|
|
335
|
-
}
|
|
336
|
-
// Monitoring
|
|
337
|
-
async getActiveJobs() {
|
|
338
|
-
const query = `
|
|
339
|
-
SELECT * FROM active_jobs
|
|
340
|
-
ORDER BY created_at DESC
|
|
341
|
-
`;
|
|
342
|
-
const result = await this.pool.query(query);
|
|
343
|
-
return result.rows.map(row => this.parseJobRow(row));
|
|
344
|
-
}
|
|
345
|
-
async getJobSuccessRates() {
|
|
346
|
-
const query = 'SELECT * FROM job_success_rates';
|
|
347
|
-
const result = await this.pool.query(query);
|
|
348
|
-
return result.rows;
|
|
349
|
-
}
|
|
350
|
-
// Polling for updates
|
|
351
|
-
startPolling(intervalMs = 5000) {
|
|
352
|
-
if (this.pollingInterval) {
|
|
353
|
-
return;
|
|
354
|
-
}
|
|
355
|
-
this.pollingInterval = setInterval(async () => {
|
|
356
|
-
try {
|
|
357
|
-
// Check for retries that are due
|
|
358
|
-
await this.processRetries();
|
|
359
|
-
// Check for scheduled jobs
|
|
360
|
-
await this.processScheduledJobs();
|
|
361
|
-
// Check for stuck jobs
|
|
362
|
-
await this.checkStuckJobs();
|
|
363
|
-
}
|
|
364
|
-
catch (error) {
|
|
365
|
-
console.error('Polling error:', error);
|
|
366
|
-
}
|
|
367
|
-
}, intervalMs);
|
|
368
|
-
}
|
|
369
|
-
stopPolling() {
|
|
370
|
-
if (this.pollingInterval) {
|
|
371
|
-
clearInterval(this.pollingInterval);
|
|
372
|
-
this.pollingInterval = null;
|
|
373
|
-
}
|
|
374
|
-
}
|
|
375
|
-
async processRetries() {
|
|
376
|
-
const query = `
|
|
377
|
-
SELECT id, job_id FROM job_executions
|
|
378
|
-
WHERE status = 'retrying' AND retry_after <= CURRENT_TIMESTAMP
|
|
379
|
-
`;
|
|
380
|
-
const result = await this.pool.query(query);
|
|
381
|
-
for (const row of result.rows) {
|
|
382
|
-
// Create new execution for retry
|
|
383
|
-
const newExecution = await this.createExecution(row.job_id);
|
|
384
|
-
// Emit retry event
|
|
385
|
-
this.emit('job:retry', {
|
|
386
|
-
type: 'job:retry',
|
|
387
|
-
jobId: row.job_id,
|
|
388
|
-
executionId: newExecution.id,
|
|
389
|
-
data: { originalExecutionId: row.id },
|
|
390
|
-
timestamp: new Date()
|
|
391
|
-
});
|
|
392
|
-
}
|
|
393
|
-
}
|
|
394
|
-
async processScheduledJobs() {
|
|
395
|
-
const query = `
|
|
396
|
-
UPDATE pipeline_jobs
|
|
397
|
-
SET status = 'queued'
|
|
398
|
-
WHERE status = 'pending' AND scheduled_at <= CURRENT_TIMESTAMP
|
|
399
|
-
RETURNING id
|
|
400
|
-
`;
|
|
401
|
-
const result = await this.pool.query(query);
|
|
402
|
-
for (const row of result.rows) {
|
|
403
|
-
this.emit('job:queued', {
|
|
404
|
-
type: 'job:queued',
|
|
405
|
-
jobId: row.id,
|
|
406
|
-
timestamp: new Date()
|
|
407
|
-
});
|
|
408
|
-
}
|
|
409
|
-
}
|
|
410
|
-
async checkStuckJobs() {
|
|
411
|
-
// Jobs running for more than 1 hour without updates
|
|
412
|
-
const query = `
|
|
413
|
-
SELECT j.id, e.id as execution_id
|
|
414
|
-
FROM pipeline_jobs j
|
|
415
|
-
JOIN job_executions e ON j.id = e.job_id
|
|
416
|
-
WHERE j.status = 'running'
|
|
417
|
-
AND e.started_at < CURRENT_TIMESTAMP - INTERVAL '1 hour'
|
|
418
|
-
AND e.status = 'running'
|
|
419
|
-
`;
|
|
420
|
-
const result = await this.pool.query(query);
|
|
421
|
-
for (const row of result.rows) {
|
|
422
|
-
this.emit('job:stuck', {
|
|
423
|
-
type: 'job:stuck',
|
|
424
|
-
jobId: row.id,
|
|
425
|
-
executionId: row.execution_id,
|
|
426
|
-
timestamp: new Date()
|
|
427
|
-
});
|
|
428
|
-
}
|
|
429
|
-
}
|
|
430
|
-
// Helper methods
|
|
431
|
-
async getNextExecutionNumber(jobId) {
|
|
432
|
-
const query = 'SELECT get_next_execution_number($1) as num';
|
|
433
|
-
const result = await this.pool.query(query, [jobId]);
|
|
434
|
-
return result.rows[0].num;
|
|
435
|
-
}
|
|
436
|
-
parseJobRow(row) {
|
|
437
|
-
return {
|
|
438
|
-
id: row.id,
|
|
439
|
-
externalId: row.external_id,
|
|
440
|
-
name: row.name,
|
|
441
|
-
type: row.type,
|
|
442
|
-
sourceSystem: row.source_system,
|
|
443
|
-
targetSystem: row.target_system,
|
|
444
|
-
status: row.status,
|
|
445
|
-
priority: row.priority,
|
|
446
|
-
config: row.config || {},
|
|
447
|
-
parameters: row.parameters,
|
|
448
|
-
cpuRequest: row.cpu_request ? parseFloat(row.cpu_request) : undefined,
|
|
449
|
-
memoryRequest: row.memory_request,
|
|
450
|
-
gpuRequest: row.gpu_request,
|
|
451
|
-
scheduledAt: row.scheduled_at,
|
|
452
|
-
tags: row.tags,
|
|
453
|
-
labels: row.labels,
|
|
454
|
-
owner: row.owner,
|
|
455
|
-
team: row.team,
|
|
456
|
-
createdBy: row.created_by
|
|
457
|
-
};
|
|
458
|
-
}
|
|
459
|
-
parseExecutionRow(row) {
|
|
460
|
-
return {
|
|
461
|
-
id: row.id,
|
|
462
|
-
jobId: row.job_id,
|
|
463
|
-
executionNumber: row.execution_number,
|
|
464
|
-
status: row.status,
|
|
465
|
-
startedAt: row.started_at,
|
|
466
|
-
completedAt: row.completed_at,
|
|
467
|
-
durationMs: row.duration_ms,
|
|
468
|
-
cpuUsed: row.cpu_used ? parseFloat(row.cpu_used) : undefined,
|
|
469
|
-
memoryUsed: row.memory_used,
|
|
470
|
-
gpuUsed: row.gpu_used,
|
|
471
|
-
executor: row.executor,
|
|
472
|
-
workerNode: row.worker_node,
|
|
473
|
-
containerId: row.container_id,
|
|
474
|
-
inputDatasets: row.input_datasets,
|
|
475
|
-
outputDatasets: row.output_datasets,
|
|
476
|
-
artifacts: row.artifacts,
|
|
477
|
-
result: row.result,
|
|
478
|
-
errorMessage: row.error_message,
|
|
479
|
-
errorDetails: row.error_details,
|
|
480
|
-
retryCount: row.retry_count,
|
|
481
|
-
retryAfter: row.retry_after,
|
|
482
|
-
logUrl: row.log_url,
|
|
483
|
-
metrics: row.metrics
|
|
484
|
-
};
|
|
485
|
-
}
|
|
486
|
-
// Cleanup
|
|
487
|
-
async cleanup() {
|
|
488
|
-
this.stopPolling();
|
|
489
|
-
this.removeAllListeners();
|
|
490
|
-
}
|
|
491
|
-
}
|