lsh-framework 1.2.0 → 1.3.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +40 -3
- package/dist/cli.js +104 -486
- package/dist/commands/doctor.js +427 -0
- package/dist/commands/init.js +371 -0
- package/dist/constants/api.js +94 -0
- package/dist/constants/commands.js +64 -0
- package/dist/constants/config.js +56 -0
- package/dist/constants/database.js +21 -0
- package/dist/constants/errors.js +79 -0
- package/dist/constants/index.js +28 -0
- package/dist/constants/paths.js +28 -0
- package/dist/constants/ui.js +73 -0
- package/dist/constants/validation.js +124 -0
- package/dist/daemon/lshd.js +11 -32
- package/dist/lib/daemon-client-helper.js +7 -4
- package/dist/lib/daemon-client.js +9 -2
- package/dist/lib/format-utils.js +163 -0
- package/dist/lib/fuzzy-match.js +123 -0
- package/dist/lib/job-manager.js +2 -1
- package/dist/lib/platform-utils.js +211 -0
- package/dist/lib/secrets-manager.js +11 -1
- package/dist/lib/string-utils.js +128 -0
- package/dist/services/daemon/daemon-registrar.js +3 -2
- package/dist/services/secrets/secrets.js +119 -59
- package/package.json +10 -74
- package/dist/app.js +0 -33
- package/dist/cicd/analytics.js +0 -261
- package/dist/cicd/auth.js +0 -269
- package/dist/cicd/cache-manager.js +0 -172
- package/dist/cicd/data-retention.js +0 -305
- package/dist/cicd/performance-monitor.js +0 -224
- package/dist/cicd/webhook-receiver.js +0 -640
- package/dist/commands/api.js +0 -346
- package/dist/commands/theme.js +0 -261
- package/dist/commands/zsh-import.js +0 -240
- package/dist/components/App.js +0 -1
- package/dist/components/Divider.js +0 -29
- package/dist/components/REPL.js +0 -43
- package/dist/components/Terminal.js +0 -232
- package/dist/components/UserInput.js +0 -30
- package/dist/daemon/api-server.js +0 -316
- package/dist/daemon/monitoring-api.js +0 -220
- package/dist/lib/api-error-handler.js +0 -185
- package/dist/lib/associative-arrays.js +0 -285
- package/dist/lib/base-api-server.js +0 -290
- package/dist/lib/brace-expansion.js +0 -160
- package/dist/lib/builtin-commands.js +0 -439
- package/dist/lib/executors/builtin-executor.js +0 -52
- package/dist/lib/extended-globbing.js +0 -411
- package/dist/lib/extended-parameter-expansion.js +0 -227
- package/dist/lib/interactive-shell.js +0 -460
- package/dist/lib/job-builtins.js +0 -582
- package/dist/lib/pathname-expansion.js +0 -216
- package/dist/lib/script-runner.js +0 -226
- package/dist/lib/shell-executor.js +0 -2504
- package/dist/lib/shell-parser.js +0 -958
- package/dist/lib/shell-types.js +0 -6
- package/dist/lib/shell.lib.js +0 -40
- package/dist/lib/theme-manager.js +0 -476
- package/dist/lib/variable-expansion.js +0 -385
- package/dist/lib/zsh-compatibility.js +0 -659
- package/dist/lib/zsh-import-manager.js +0 -707
- package/dist/lib/zsh-options.js +0 -328
- package/dist/pipeline/job-tracker.js +0 -491
- package/dist/pipeline/mcli-bridge.js +0 -309
- package/dist/pipeline/pipeline-service.js +0 -1119
- package/dist/pipeline/workflow-engine.js +0 -870
- package/dist/services/api/api.js +0 -58
- package/dist/services/api/auth.js +0 -35
- package/dist/services/api/config.js +0 -7
- package/dist/services/api/file.js +0 -22
- package/dist/services/shell/shell.js +0 -28
- package/dist/services/zapier.js +0 -16
- package/dist/simple-api-server.js +0 -148
|
@@ -1,1119 +0,0 @@
|
|
|
1
|
-
import express, { Router } from 'express';
|
|
2
|
-
import { Pool } from 'pg';
|
|
3
|
-
import { JobTracker, JobStatus } from './job-tracker.js';
|
|
4
|
-
import { MCLIBridge } from './mcli-bridge.js';
|
|
5
|
-
import { WorkflowEngine } from './workflow-engine.js';
|
|
6
|
-
import { Server } from 'socket.io';
|
|
7
|
-
import { createServer } from 'http';
|
|
8
|
-
import * as path from 'path';
|
|
9
|
-
import { fileURLToPath } from 'url';
|
|
10
|
-
import * as fs from 'fs';
|
|
11
|
-
import { execSync, spawn } from 'child_process';
|
|
12
|
-
import { createProxyMiddleware } from 'http-proxy-middleware';
|
|
13
|
-
// Compatibility function for ES modules and CommonJS
|
|
14
|
-
function getCurrentDirname() {
|
|
15
|
-
// Use eval to avoid TypeScript compilation issues in CommonJS mode
|
|
16
|
-
try {
|
|
17
|
-
const importMeta = eval('import.meta');
|
|
18
|
-
return path.dirname(fileURLToPath(importMeta.url));
|
|
19
|
-
}
|
|
20
|
-
catch {
|
|
21
|
-
// Use src/pipeline directory as fallback for testing
|
|
22
|
-
return path.join(process.cwd(), 'src', 'pipeline');
|
|
23
|
-
}
|
|
24
|
-
}
|
|
25
|
-
const currentDir = getCurrentDirname();
|
|
26
|
-
export class PipelineService {
|
|
27
|
-
app;
|
|
28
|
-
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
|
29
|
-
server;
|
|
30
|
-
io;
|
|
31
|
-
pool;
|
|
32
|
-
jobTracker;
|
|
33
|
-
mcliBridge;
|
|
34
|
-
workflowEngine;
|
|
35
|
-
config;
|
|
36
|
-
isDemoMode = false;
|
|
37
|
-
streamlitProcess = null;
|
|
38
|
-
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
|
39
|
-
getSystemJobs() {
|
|
40
|
-
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
|
41
|
-
const jobs = [];
|
|
42
|
-
const monitoringJobs = [
|
|
43
|
-
{ script: 'db-health-monitor', name: 'Database Health Monitor', type: 'monitoring', owner: 'ops-team', schedule: '*/5 * * * *' },
|
|
44
|
-
{ script: 'politician-trading-monitor', name: 'Politician Trading Monitor', type: 'data-ingestion', owner: 'data-team', schedule: '*/30 * * * *' },
|
|
45
|
-
{ script: 'shell-analytics', name: 'Shell Analytics', type: 'analytics', owner: 'analytics-team', schedule: '0 * * * *' },
|
|
46
|
-
{ script: 'data-consistency-check', name: 'Data Consistency Check', type: 'validation', owner: 'data-team', schedule: '0 */6 * * *' },
|
|
47
|
-
{ script: 'performance-monitor', name: 'Performance Monitor', type: 'monitoring', owner: 'ops-team', schedule: '*/15 * * * *' },
|
|
48
|
-
{ script: 'alert-monitor', name: 'Alert Monitor', type: 'alerting', owner: 'ops-team', schedule: '*/2 * * * *' },
|
|
49
|
-
{ script: 'daily-summary', name: 'Daily Summary Report', type: 'reporting', owner: 'management', schedule: '0 9 * * *' },
|
|
50
|
-
{ script: 'log-cleanup', name: 'Log Cleanup', type: 'maintenance', owner: 'ops-team', schedule: '0 2 * * *' }
|
|
51
|
-
];
|
|
52
|
-
monitoringJobs.forEach((job, _index) => {
|
|
53
|
-
const logPath = `/Users/lefv/repos/lsh/logs/${job.script}.log`;
|
|
54
|
-
let status = 'unknown';
|
|
55
|
-
let lastRun = new Date(Date.now() - Math.random() * 86400000).toISOString();
|
|
56
|
-
let progress = 0;
|
|
57
|
-
// Try to read actual log file for status
|
|
58
|
-
try {
|
|
59
|
-
if (fs.existsSync(logPath)) {
|
|
60
|
-
const stats = fs.statSync(logPath);
|
|
61
|
-
lastRun = stats.mtime.toISOString();
|
|
62
|
-
// Check if job is currently running based on schedule
|
|
63
|
-
const _now = new Date();
|
|
64
|
-
const schedulePattern = job.schedule;
|
|
65
|
-
if (schedulePattern.includes('*/2')) {
|
|
66
|
-
status = 'running';
|
|
67
|
-
progress = Math.floor(Math.random() * 100);
|
|
68
|
-
}
|
|
69
|
-
else if (schedulePattern.includes('*/5') || schedulePattern.includes('*/15')) {
|
|
70
|
-
status = Math.random() > 0.5 ? 'running' : 'completed';
|
|
71
|
-
progress = status === 'running' ? Math.floor(Math.random() * 100) : 100;
|
|
72
|
-
}
|
|
73
|
-
else {
|
|
74
|
-
status = 'completed';
|
|
75
|
-
progress = 100;
|
|
76
|
-
}
|
|
77
|
-
}
|
|
78
|
-
}
|
|
79
|
-
catch (_error) {
|
|
80
|
-
// If can't read log, use defaults
|
|
81
|
-
}
|
|
82
|
-
jobs.push({
|
|
83
|
-
id: `job-${job.script}`,
|
|
84
|
-
name: job.name,
|
|
85
|
-
type: job.type,
|
|
86
|
-
owner: job.owner,
|
|
87
|
-
status,
|
|
88
|
-
sourceSystem: 'lsh-cron',
|
|
89
|
-
targetSystem: job.type === 'data-ingestion' ? 'database' : 'monitoring',
|
|
90
|
-
schedule: job.schedule,
|
|
91
|
-
createdAt: new Date(Date.now() - 7 * 86400000).toISOString(),
|
|
92
|
-
updatedAt: lastRun,
|
|
93
|
-
progress
|
|
94
|
-
});
|
|
95
|
-
});
|
|
96
|
-
return jobs;
|
|
97
|
-
}
|
|
98
|
-
constructor(config = {}) {
|
|
99
|
-
const port = config.port || parseInt(process.env.PORT || '3034', 10);
|
|
100
|
-
this.config = {
|
|
101
|
-
port,
|
|
102
|
-
databaseUrl: config.databaseUrl || process.env.DATABASE_URL || 'postgresql://localhost:5432/pipeline',
|
|
103
|
-
mcliUrl: config.mcliUrl || process.env.MCLI_URL || 'http://localhost:8000',
|
|
104
|
-
mcliApiKey: config.mcliApiKey || process.env.MCLI_API_KEY,
|
|
105
|
-
webhookBaseUrl: config.webhookBaseUrl || `http://localhost:${port}`
|
|
106
|
-
};
|
|
107
|
-
// Initialize database pool
|
|
108
|
-
this.pool = new Pool({
|
|
109
|
-
connectionString: this.config.databaseUrl
|
|
110
|
-
});
|
|
111
|
-
// Initialize services
|
|
112
|
-
this.jobTracker = new JobTracker(this.pool);
|
|
113
|
-
this.mcliBridge = new MCLIBridge({
|
|
114
|
-
baseUrl: this.config.mcliUrl,
|
|
115
|
-
apiKey: this.config.mcliApiKey,
|
|
116
|
-
webhookUrl: this.config.webhookBaseUrl
|
|
117
|
-
}, this.jobTracker);
|
|
118
|
-
this.workflowEngine = new WorkflowEngine(this.pool, this.jobTracker);
|
|
119
|
-
// Initialize Express app
|
|
120
|
-
this.app = express();
|
|
121
|
-
this.server = createServer(this.app);
|
|
122
|
-
this.io = new Server(this.server, {
|
|
123
|
-
cors: {
|
|
124
|
-
origin: "*",
|
|
125
|
-
methods: ["GET", "POST"]
|
|
126
|
-
}
|
|
127
|
-
});
|
|
128
|
-
this.setupMiddleware();
|
|
129
|
-
this.setupRoutes();
|
|
130
|
-
this.setupWebSocket();
|
|
131
|
-
this.setupEventListeners();
|
|
132
|
-
this.startStreamlit();
|
|
133
|
-
}
|
|
134
|
-
async startStreamlit() {
|
|
135
|
-
try {
|
|
136
|
-
// Check if Streamlit is already running on port 8501
|
|
137
|
-
const checkCmd = 'lsof -i :8501';
|
|
138
|
-
try {
|
|
139
|
-
execSync(checkCmd, { stdio: 'ignore' });
|
|
140
|
-
console.log('✅ Streamlit ML Dashboard is already running on port 8501');
|
|
141
|
-
return;
|
|
142
|
-
}
|
|
143
|
-
catch {
|
|
144
|
-
// Port is free, continue to start Streamlit
|
|
145
|
-
}
|
|
146
|
-
console.log('🚀 Starting Streamlit ML Dashboard...');
|
|
147
|
-
// Path to MCLI repo and Streamlit app
|
|
148
|
-
const mcliPath = '/Users/lefv/repos/mcli';
|
|
149
|
-
const streamlitAppPath = 'src/mcli/ml/dashboard/app_supabase.py';
|
|
150
|
-
// Start Streamlit process
|
|
151
|
-
this.streamlitProcess = spawn('uv', [
|
|
152
|
-
'run', 'streamlit', 'run', streamlitAppPath,
|
|
153
|
-
'--server.port', '8501',
|
|
154
|
-
'--server.address', 'localhost',
|
|
155
|
-
'--browser.gatherUsageStats', 'false',
|
|
156
|
-
'--server.headless', 'true'
|
|
157
|
-
], {
|
|
158
|
-
cwd: mcliPath,
|
|
159
|
-
stdio: ['pipe', 'pipe', 'pipe']
|
|
160
|
-
});
|
|
161
|
-
// Handle process output
|
|
162
|
-
this.streamlitProcess.stdout?.on('data', (data) => {
|
|
163
|
-
const output = data.toString();
|
|
164
|
-
if (output.includes('You can now view your Streamlit app')) {
|
|
165
|
-
console.log('✅ Streamlit ML Dashboard started successfully at http://localhost:8501');
|
|
166
|
-
}
|
|
167
|
-
});
|
|
168
|
-
this.streamlitProcess.stderr?.on('data', (data) => {
|
|
169
|
-
const error = data.toString();
|
|
170
|
-
if (!error.includes('WARNING') && !error.includes('INFO')) {
|
|
171
|
-
console.error('❌ Streamlit Error:', error);
|
|
172
|
-
}
|
|
173
|
-
});
|
|
174
|
-
this.streamlitProcess.on('exit', (code) => {
|
|
175
|
-
if (code !== 0 && code !== null) {
|
|
176
|
-
console.error(`❌ Streamlit process exited with code ${code}`);
|
|
177
|
-
}
|
|
178
|
-
this.streamlitProcess = null;
|
|
179
|
-
});
|
|
180
|
-
// Wait a moment for Streamlit to start
|
|
181
|
-
await new Promise(resolve => setTimeout(resolve, 3000));
|
|
182
|
-
}
|
|
183
|
-
catch (error) {
|
|
184
|
-
console.error('❌ Failed to start Streamlit ML Dashboard:', error);
|
|
185
|
-
}
|
|
186
|
-
}
|
|
187
|
-
setupMiddleware() {
|
|
188
|
-
this.app.use(express.json({ limit: '10mb' }));
|
|
189
|
-
this.app.use(express.urlencoded({ extended: true }));
|
|
190
|
-
// Serve dashboard from src directory
|
|
191
|
-
const dashboardPath = path.join(currentDir, '..', '..', 'src', 'pipeline', 'dashboard');
|
|
192
|
-
console.log(`Serving dashboard from: ${dashboardPath}`);
|
|
193
|
-
this.app.use('/dashboard', express.static(dashboardPath));
|
|
194
|
-
// CORS
|
|
195
|
-
this.app.use((req, res, next) => {
|
|
196
|
-
res.header('Access-Control-Allow-Origin', '*');
|
|
197
|
-
res.header('Access-Control-Allow-Methods', 'GET, POST, PUT, DELETE, OPTIONS');
|
|
198
|
-
res.header('Access-Control-Allow-Headers', 'Content-Type, Authorization');
|
|
199
|
-
if (req.method === 'OPTIONS') {
|
|
200
|
-
return res.sendStatus(200);
|
|
201
|
-
}
|
|
202
|
-
next();
|
|
203
|
-
});
|
|
204
|
-
// Request logging
|
|
205
|
-
this.app.use((req, res, next) => {
|
|
206
|
-
console.log(`[${new Date().toISOString()}] ${req.method} ${req.path}`);
|
|
207
|
-
next();
|
|
208
|
-
});
|
|
209
|
-
}
|
|
210
|
-
setupRoutes() {
|
|
211
|
-
const router = Router();
|
|
212
|
-
// Root route - redirect to dashboard
|
|
213
|
-
router.get('/', (req, res) => {
|
|
214
|
-
res.redirect('/dashboard/');
|
|
215
|
-
});
|
|
216
|
-
// Dashboard routes
|
|
217
|
-
router.get('/dashboard/', (req, res) => {
|
|
218
|
-
const dashboardPath = path.join(currentDir, '..', '..', 'src', 'pipeline', 'dashboard', 'index.html');
|
|
219
|
-
res.sendFile(dashboardPath);
|
|
220
|
-
});
|
|
221
|
-
// Hub route - central dashboard hub
|
|
222
|
-
router.get('/hub', (req, res) => {
|
|
223
|
-
const hubPath = path.join(currentDir, '..', '..', 'src', 'pipeline', 'dashboard', 'hub.html');
|
|
224
|
-
res.sendFile(hubPath);
|
|
225
|
-
});
|
|
226
|
-
// === CONSOLIDATED ENDPOINTS FOR ALL SERVICES ===
|
|
227
|
-
// ML Dashboard endpoints (replaces port 8501 Streamlit)
|
|
228
|
-
router.get('/ml', (req, res) => {
|
|
229
|
-
res.redirect('/ml/dashboard');
|
|
230
|
-
});
|
|
231
|
-
// ML Dashboard proxy to Streamlit
|
|
232
|
-
const mlDashboardProxy = createProxyMiddleware({
|
|
233
|
-
target: 'http://localhost:8501',
|
|
234
|
-
changeOrigin: true,
|
|
235
|
-
ws: true,
|
|
236
|
-
pathRewrite: {
|
|
237
|
-
'^/ml/dashboard': '',
|
|
238
|
-
},
|
|
239
|
-
});
|
|
240
|
-
router.use('/ml/dashboard', mlDashboardProxy);
|
|
241
|
-
// CI/CD Dashboard endpoints (replaces port 3033)
|
|
242
|
-
router.get('/cicd', (req, res) => {
|
|
243
|
-
res.redirect('/cicd/dashboard');
|
|
244
|
-
});
|
|
245
|
-
router.get('/cicd/dashboard', (req, res) => {
|
|
246
|
-
// Serve CI/CD dashboard
|
|
247
|
-
const cicdPath = path.join(currentDir, '..', '..', 'src', 'cicd', 'dashboard', 'index.html');
|
|
248
|
-
if (fs.existsSync(cicdPath)) {
|
|
249
|
-
res.sendFile(cicdPath);
|
|
250
|
-
}
|
|
251
|
-
else {
|
|
252
|
-
// Serve a demo CI/CD dashboard
|
|
253
|
-
res.send(`
|
|
254
|
-
<!DOCTYPE html>
|
|
255
|
-
<html>
|
|
256
|
-
<head>
|
|
257
|
-
<title>CI/CD Dashboard</title>
|
|
258
|
-
<link href="https://cdn.jsdelivr.net/npm/bootstrap@5.3.0/dist/css/bootstrap.min.css" rel="stylesheet">
|
|
259
|
-
</head>
|
|
260
|
-
<body class="bg-light">
|
|
261
|
-
<div class="container py-5">
|
|
262
|
-
<h1>🚀 CI/CD Dashboard</h1>
|
|
263
|
-
<p class="lead">Continuous Integration & Deployment Pipeline</p>
|
|
264
|
-
<div class="row mt-4">
|
|
265
|
-
<div class="col-md-4">
|
|
266
|
-
<div class="card">
|
|
267
|
-
<div class="card-body">
|
|
268
|
-
<h5 class="card-title">Build Status</h5>
|
|
269
|
-
<span class="badge bg-success">Passing</span>
|
|
270
|
-
</div>
|
|
271
|
-
</div>
|
|
272
|
-
</div>
|
|
273
|
-
<div class="col-md-4">
|
|
274
|
-
<div class="card">
|
|
275
|
-
<div class="card-body">
|
|
276
|
-
<h5 class="card-title">Test Coverage</h5>
|
|
277
|
-
<div class="progress">
|
|
278
|
-
<div class="progress-bar bg-success" style="width: 87%">87%</div>
|
|
279
|
-
</div>
|
|
280
|
-
</div>
|
|
281
|
-
</div>
|
|
282
|
-
</div>
|
|
283
|
-
<div class="col-md-4">
|
|
284
|
-
<div class="card">
|
|
285
|
-
<div class="card-body">
|
|
286
|
-
<h5 class="card-title">Deployments</h5>
|
|
287
|
-
<p class="text-muted">Last: 2 hours ago</p>
|
|
288
|
-
</div>
|
|
289
|
-
</div>
|
|
290
|
-
</div>
|
|
291
|
-
</div>
|
|
292
|
-
</div>
|
|
293
|
-
</body>
|
|
294
|
-
</html>
|
|
295
|
-
`);
|
|
296
|
-
}
|
|
297
|
-
});
|
|
298
|
-
router.get('/cicd/health', (req, res) => {
|
|
299
|
-
res.json({ status: 'healthy', service: 'CI/CD Dashboard', timestamp: new Date().toISOString() });
|
|
300
|
-
});
|
|
301
|
-
// CI/CD API endpoints
|
|
302
|
-
router.get('/api/metrics', (req, res) => {
|
|
303
|
-
// Return demo CI/CD metrics
|
|
304
|
-
const today = new Date();
|
|
305
|
-
const totalBuilds = Math.floor(Math.random() * 50) + 20;
|
|
306
|
-
const successfulBuilds = Math.floor(totalBuilds * (0.8 + Math.random() * 0.15));
|
|
307
|
-
const failedBuilds = totalBuilds - successfulBuilds;
|
|
308
|
-
const avgDurationMs = (5 + Math.random() * 15) * 60 * 1000; // 5-20 minutes
|
|
309
|
-
const activePipelines = Math.floor(Math.random() * 5);
|
|
310
|
-
res.json({
|
|
311
|
-
totalBuilds,
|
|
312
|
-
successfulBuilds,
|
|
313
|
-
failedBuilds,
|
|
314
|
-
successRate: totalBuilds > 0 ? (successfulBuilds / totalBuilds) * 100 : 0,
|
|
315
|
-
avgDurationMs,
|
|
316
|
-
activePipelines,
|
|
317
|
-
lastUpdated: today.toISOString()
|
|
318
|
-
});
|
|
319
|
-
});
|
|
320
|
-
router.get('/api/pipelines', (req, res) => {
|
|
321
|
-
// Return demo CI/CD pipeline data
|
|
322
|
-
const limit = parseInt(req.query.limit) || 20;
|
|
323
|
-
const platforms = ['github', 'gitlab', 'jenkins'];
|
|
324
|
-
const repositories = ['lsh', 'mcli', 'data-pipeline', 'monitoring', 'frontend'];
|
|
325
|
-
const statuses = ['completed', 'in_progress', 'failed', 'queued'];
|
|
326
|
-
const actors = ['alice', 'bob', 'charlie', 'diana', 'eve'];
|
|
327
|
-
const workflows = ['CI', 'Deploy', 'Test', 'Release', 'Hotfix'];
|
|
328
|
-
const pipelines = Array.from({ length: limit }, (_, i) => {
|
|
329
|
-
const platform = platforms[Math.floor(Math.random() * platforms.length)];
|
|
330
|
-
const repository = repositories[Math.floor(Math.random() * repositories.length)];
|
|
331
|
-
const status = statuses[Math.floor(Math.random() * statuses.length)];
|
|
332
|
-
const actor = actors[Math.floor(Math.random() * actors.length)];
|
|
333
|
-
const workflow = workflows[Math.floor(Math.random() * workflows.length)];
|
|
334
|
-
const startedAt = new Date(Date.now() - Math.random() * 24 * 60 * 60 * 1000);
|
|
335
|
-
const duration = status === 'completed' ? Math.random() * 1800000 : null; // up to 30 minutes
|
|
336
|
-
const conclusion = status === 'completed' ? (Math.random() > 0.2 ? 'success' : 'failure') : null;
|
|
337
|
-
return {
|
|
338
|
-
id: `pipeline_${i + 1}`,
|
|
339
|
-
workflow_name: workflow,
|
|
340
|
-
repository,
|
|
341
|
-
branch: Math.random() > 0.3 ? 'main' : 'develop',
|
|
342
|
-
platform,
|
|
343
|
-
status,
|
|
344
|
-
conclusion,
|
|
345
|
-
actor,
|
|
346
|
-
started_at: startedAt.toISOString(),
|
|
347
|
-
duration_ms: duration,
|
|
348
|
-
created_at: startedAt.toISOString(),
|
|
349
|
-
updated_at: new Date(startedAt.getTime() + (duration || 0)).toISOString()
|
|
350
|
-
};
|
|
351
|
-
});
|
|
352
|
-
res.json(pipelines);
|
|
353
|
-
});
|
|
354
|
-
// Monitoring API endpoints (replaces port 3035)
|
|
355
|
-
router.get('/monitoring/api/health', (req, res) => {
|
|
356
|
-
res.json({
|
|
357
|
-
status: 'healthy',
|
|
358
|
-
service: 'Monitoring API',
|
|
359
|
-
uptime: process.uptime(),
|
|
360
|
-
timestamp: new Date().toISOString()
|
|
361
|
-
});
|
|
362
|
-
});
|
|
363
|
-
router.get('/monitoring/api/metrics', async (req, res) => {
|
|
364
|
-
// Return system metrics
|
|
365
|
-
const metrics = {
|
|
366
|
-
jobs_total: this.getSystemJobs().length,
|
|
367
|
-
jobs_running: this.getSystemJobs().filter(j => j.status === 'running').length,
|
|
368
|
-
jobs_failed: this.getSystemJobs().filter(j => j.status === 'failed').length,
|
|
369
|
-
system_uptime: process.uptime(),
|
|
370
|
-
memory_usage: process.memoryUsage(),
|
|
371
|
-
timestamp: new Date().toISOString()
|
|
372
|
-
};
|
|
373
|
-
res.json(metrics);
|
|
374
|
-
});
|
|
375
|
-
router.get('/monitoring/api/jobs', (req, res) => {
|
|
376
|
-
// Return monitoring jobs
|
|
377
|
-
const jobs = this.getSystemJobs();
|
|
378
|
-
res.json({ jobs, total: jobs.length });
|
|
379
|
-
});
|
|
380
|
-
router.get('/monitoring/api/alerts', (req, res) => {
|
|
381
|
-
// Return system alerts
|
|
382
|
-
const alerts = [
|
|
383
|
-
{ id: 1, level: 'info', message: 'System operating normally', timestamp: new Date().toISOString() }
|
|
384
|
-
];
|
|
385
|
-
res.json({ alerts, total: alerts.length });
|
|
386
|
-
});
|
|
387
|
-
// Unified health check endpoint for all services
|
|
388
|
-
router.get('/health/all', (req, res) => {
|
|
389
|
-
res.json({
|
|
390
|
-
status: 'healthy',
|
|
391
|
-
services: {
|
|
392
|
-
pipeline: 'running',
|
|
393
|
-
cicd: 'running',
|
|
394
|
-
monitoring: 'running',
|
|
395
|
-
ml: 'requires separate streamlit instance'
|
|
396
|
-
},
|
|
397
|
-
timestamp: new Date().toISOString()
|
|
398
|
-
});
|
|
399
|
-
});
|
|
400
|
-
// Health check
|
|
401
|
-
router.get('/health', (req, res) => {
|
|
402
|
-
res.json({
|
|
403
|
-
status: 'healthy',
|
|
404
|
-
timestamp: new Date().toISOString(),
|
|
405
|
-
services: {
|
|
406
|
-
database: this.pool ? 'connected' : 'disconnected',
|
|
407
|
-
mcli: 'configured',
|
|
408
|
-
jobTracker: 'active'
|
|
409
|
-
}
|
|
410
|
-
});
|
|
411
|
-
});
|
|
412
|
-
// Job Management Routes
|
|
413
|
-
router.post('/api/pipeline/jobs', async (req, res) => {
|
|
414
|
-
if (this.isDemoMode) {
|
|
415
|
-
const demoJob = {
|
|
416
|
-
id: `job-${Date.now()}`,
|
|
417
|
-
...req.body,
|
|
418
|
-
status: 'queued',
|
|
419
|
-
createdAt: new Date().toISOString(),
|
|
420
|
-
updatedAt: new Date().toISOString()
|
|
421
|
-
};
|
|
422
|
-
return res.status(201).json(demoJob);
|
|
423
|
-
}
|
|
424
|
-
try {
|
|
425
|
-
const job = req.body;
|
|
426
|
-
const createdJob = await this.jobTracker.createJob(job);
|
|
427
|
-
res.status(201).json(createdJob);
|
|
428
|
-
}
|
|
429
|
-
catch (error) {
|
|
430
|
-
console.error('Error creating job:', error);
|
|
431
|
-
res.status(500).json({ error: 'Failed to create job' });
|
|
432
|
-
}
|
|
433
|
-
});
|
|
434
|
-
router.get('/api/pipeline/jobs', async (req, res) => {
|
|
435
|
-
if (this.isDemoMode) {
|
|
436
|
-
// Return jobs based on actual system monitoring jobs
|
|
437
|
-
const jobs = this.getSystemJobs();
|
|
438
|
-
return res.json({ jobs, total: jobs.length });
|
|
439
|
-
}
|
|
440
|
-
try {
|
|
441
|
-
const filters = {
|
|
442
|
-
status: req.query.status,
|
|
443
|
-
sourceSystem: req.query.sourceSystem,
|
|
444
|
-
targetSystem: req.query.targetSystem,
|
|
445
|
-
owner: req.query.owner,
|
|
446
|
-
team: req.query.team,
|
|
447
|
-
limit: parseInt(req.query.limit) || 50,
|
|
448
|
-
offset: parseInt(req.query.offset) || 0
|
|
449
|
-
};
|
|
450
|
-
const result = await this.jobTracker.listJobs(filters);
|
|
451
|
-
res.json(result);
|
|
452
|
-
}
|
|
453
|
-
catch (error) {
|
|
454
|
-
console.error('Error listing jobs:', error);
|
|
455
|
-
res.status(500).json({ error: 'Failed to list jobs' });
|
|
456
|
-
}
|
|
457
|
-
});
|
|
458
|
-
// Job logs endpoint
|
|
459
|
-
router.get('/api/pipeline/jobs/:id/logs', async (req, res) => {
|
|
460
|
-
if (this.isDemoMode) {
|
|
461
|
-
const jobId = req.params.id;
|
|
462
|
-
const scriptName = jobId.replace('job-', '');
|
|
463
|
-
const logPath = `/Users/lefv/repos/lsh/logs/${scriptName}.log`;
|
|
464
|
-
try {
|
|
465
|
-
if (fs.existsSync(logPath)) {
|
|
466
|
-
const logContent = fs.readFileSync(logPath, 'utf-8');
|
|
467
|
-
const lines = logContent.split('\n').slice(-100); // Last 100 lines
|
|
468
|
-
const logs = lines.filter(line => line.trim()).map(line => {
|
|
469
|
-
let level = 'info';
|
|
470
|
-
if (line.includes('ERROR') || line.includes('error'))
|
|
471
|
-
level = 'error';
|
|
472
|
-
else if (line.includes('WARNING') || line.includes('warning'))
|
|
473
|
-
level = 'warning';
|
|
474
|
-
else if (line.includes('SUCCESS') || line.includes('✅'))
|
|
475
|
-
level = 'success';
|
|
476
|
-
return {
|
|
477
|
-
timestamp: new Date().toISOString(),
|
|
478
|
-
level,
|
|
479
|
-
message: line
|
|
480
|
-
};
|
|
481
|
-
});
|
|
482
|
-
return res.json({ logs });
|
|
483
|
-
}
|
|
484
|
-
}
|
|
485
|
-
catch (error) {
|
|
486
|
-
console.error('Error reading log file:', error);
|
|
487
|
-
}
|
|
488
|
-
// Return demo logs if file doesn't exist
|
|
489
|
-
return res.json({
|
|
490
|
-
logs: [
|
|
491
|
-
{ timestamp: new Date().toISOString(), level: 'info', message: 'Job started' },
|
|
492
|
-
{ timestamp: new Date().toISOString(), level: 'info', message: 'Processing data...' },
|
|
493
|
-
{ timestamp: new Date().toISOString(), level: 'success', message: 'Job completed successfully' }
|
|
494
|
-
]
|
|
495
|
-
});
|
|
496
|
-
}
|
|
497
|
-
// Real implementation would fetch from database
|
|
498
|
-
res.json({ logs: [] });
|
|
499
|
-
});
|
|
500
|
-
router.get('/api/pipeline/jobs/:id', async (req, res) => {
|
|
501
|
-
if (this.isDemoMode) {
|
|
502
|
-
// Find the actual job from the system jobs
|
|
503
|
-
const allJobs = this.getSystemJobs();
|
|
504
|
-
const job = allJobs.find(j => j.id === req.params.id);
|
|
505
|
-
if (job) {
|
|
506
|
-
return res.json(job);
|
|
507
|
-
}
|
|
508
|
-
// Fallback to demo job if not found
|
|
509
|
-
const demoJob = {
|
|
510
|
-
id: req.params.id,
|
|
511
|
-
name: 'Demo Job',
|
|
512
|
-
type: 'batch',
|
|
513
|
-
owner: 'demo-user',
|
|
514
|
-
status: 'running',
|
|
515
|
-
sourceSystem: 'lsh',
|
|
516
|
-
targetSystem: 'mcli',
|
|
517
|
-
createdAt: new Date(Date.now() - 1800000).toISOString(),
|
|
518
|
-
updatedAt: new Date().toISOString(),
|
|
519
|
-
progress: 75,
|
|
520
|
-
schedule: '*/5 * * * *'
|
|
521
|
-
};
|
|
522
|
-
return res.json(demoJob);
|
|
523
|
-
}
|
|
524
|
-
try {
|
|
525
|
-
const job = await this.jobTracker.getJob(req.params.id);
|
|
526
|
-
if (!job) {
|
|
527
|
-
return res.status(404).json({ error: 'Job not found' });
|
|
528
|
-
}
|
|
529
|
-
res.json(job);
|
|
530
|
-
}
|
|
531
|
-
catch (error) {
|
|
532
|
-
console.error('Error getting job:', error);
|
|
533
|
-
res.status(500).json({ error: 'Failed to get job' });
|
|
534
|
-
}
|
|
535
|
-
});
|
|
536
|
-
router.put('/api/pipeline/jobs/:id/cancel', async (req, res) => {
|
|
537
|
-
try {
|
|
538
|
-
const job = await this.jobTracker.getJob(req.params.id);
|
|
539
|
-
if (!job) {
|
|
540
|
-
return res.status(404).json({ error: 'Job not found' });
|
|
541
|
-
}
|
|
542
|
-
await this.jobTracker.updateJobStatus(req.params.id, JobStatus.CANCELLED);
|
|
543
|
-
// Cancel in MCLI if applicable
|
|
544
|
-
if (job.externalId && job.targetSystem === 'mcli') {
|
|
545
|
-
await this.mcliBridge.cancelJob(job.externalId);
|
|
546
|
-
}
|
|
547
|
-
res.json({ message: 'Job cancelled successfully' });
|
|
548
|
-
}
|
|
549
|
-
catch (error) {
|
|
550
|
-
console.error('Error cancelling job:', error);
|
|
551
|
-
res.status(500).json({ error: 'Failed to cancel job' });
|
|
552
|
-
}
|
|
553
|
-
});
|
|
554
|
-
router.put('/api/pipeline/jobs/:id/retry', async (req, res) => {
|
|
555
|
-
try {
|
|
556
|
-
const job = await this.jobTracker.getJob(req.params.id);
|
|
557
|
-
if (!job) {
|
|
558
|
-
return res.status(404).json({ error: 'Job not found' });
|
|
559
|
-
}
|
|
560
|
-
// Create new execution for retry
|
|
561
|
-
const execution = await this.jobTracker.createExecution(req.params.id);
|
|
562
|
-
// If MCLI job, resubmit
|
|
563
|
-
if (job.targetSystem === 'mcli') {
|
|
564
|
-
await this.mcliBridge.submitJobToMCLI(job);
|
|
565
|
-
}
|
|
566
|
-
res.json({ message: 'Job retry initiated', executionId: execution.id });
|
|
567
|
-
}
|
|
568
|
-
catch (error) {
|
|
569
|
-
console.error('Error retrying job:', error);
|
|
570
|
-
res.status(500).json({ error: 'Failed to retry job' });
|
|
571
|
-
}
|
|
572
|
-
});
|
|
573
|
-
// Job Metrics
|
|
574
|
-
router.get('/api/pipeline/jobs/:id/metrics', async (req, res) => {
|
|
575
|
-
try {
|
|
576
|
-
const metrics = await this.jobTracker.getJobMetrics(req.params.id, req.query.metricName);
|
|
577
|
-
res.json(metrics);
|
|
578
|
-
}
|
|
579
|
-
catch (error) {
|
|
580
|
-
console.error('Error getting job metrics:', error);
|
|
581
|
-
res.status(500).json({ error: 'Failed to get job metrics' });
|
|
582
|
-
}
|
|
583
|
-
});
|
|
584
|
-
// Active Jobs
|
|
585
|
-
router.get('/api/pipeline/jobs/active', async (req, res) => {
|
|
586
|
-
if (this.isDemoMode) {
|
|
587
|
-
const allJobs = this.getSystemJobs();
|
|
588
|
-
const activeJobs = allJobs.filter(job => job.status === 'running');
|
|
589
|
-
return res.json(activeJobs);
|
|
590
|
-
}
|
|
591
|
-
try {
|
|
592
|
-
const jobs = await this.jobTracker.getActiveJobs();
|
|
593
|
-
res.json(jobs);
|
|
594
|
-
}
|
|
595
|
-
catch (error) {
|
|
596
|
-
console.error('Error getting active jobs:', error);
|
|
597
|
-
res.status(500).json({ error: 'Failed to get active jobs' });
|
|
598
|
-
}
|
|
599
|
-
});
|
|
600
|
-
// Success Rates
|
|
601
|
-
router.get('/api/pipeline/metrics/success-rates', async (req, res) => {
|
|
602
|
-
if (this.isDemoMode) {
|
|
603
|
-
const successRates = {
|
|
604
|
-
overall: 0.95,
|
|
605
|
-
bySystem: {
|
|
606
|
-
lsh: 0.97,
|
|
607
|
-
mcli: 0.94,
|
|
608
|
-
monitoring: 0.93
|
|
609
|
-
},
|
|
610
|
-
last24h: 0.96,
|
|
611
|
-
last7d: 0.95
|
|
612
|
-
};
|
|
613
|
-
return res.json(successRates);
|
|
614
|
-
}
|
|
615
|
-
try {
|
|
616
|
-
const rates = await this.jobTracker.getJobSuccessRates();
|
|
617
|
-
res.json(rates);
|
|
618
|
-
}
|
|
619
|
-
catch (error) {
|
|
620
|
-
console.error('Error getting success rates:', error);
|
|
621
|
-
res.status(500).json({ error: 'Failed to get success rates' });
|
|
622
|
-
}
|
|
623
|
-
});
|
|
624
|
-
// MCLI Webhook endpoint
|
|
625
|
-
router.post('/webhook/mcli', async (req, res) => {
|
|
626
|
-
try {
|
|
627
|
-
await this.mcliBridge.handleWebhook(req.body);
|
|
628
|
-
res.json({ success: true });
|
|
629
|
-
}
|
|
630
|
-
catch (error) {
|
|
631
|
-
console.error('Error handling MCLI webhook:', error);
|
|
632
|
-
res.status(500).json({ error: 'Failed to handle webhook' });
|
|
633
|
-
}
|
|
634
|
-
});
|
|
635
|
-
// MCLI Status Sync
|
|
636
|
-
router.post('/api/pipeline/sync/mcli/:jobId', async (req, res) => {
|
|
637
|
-
try {
|
|
638
|
-
await this.mcliBridge.syncJobStatus(req.params.jobId);
|
|
639
|
-
res.json({ message: 'Job synced successfully' });
|
|
640
|
-
}
|
|
641
|
-
catch (error) {
|
|
642
|
-
console.error('Error syncing job:', error);
|
|
643
|
-
res.status(500).json({ error: 'Failed to sync job' });
|
|
644
|
-
}
|
|
645
|
-
});
|
|
646
|
-
// MCLI Health Check
|
|
647
|
-
router.get('/api/pipeline/mcli/health', async (req, res) => {
|
|
648
|
-
try {
|
|
649
|
-
const isHealthy = await this.mcliBridge.healthCheck();
|
|
650
|
-
res.json({ healthy: isHealthy });
|
|
651
|
-
}
|
|
652
|
-
catch (error) {
|
|
653
|
-
console.error('Error checking MCLI health:', error);
|
|
654
|
-
res.status(500).json({ error: 'Failed to check MCLI health' });
|
|
655
|
-
}
|
|
656
|
-
});
|
|
657
|
-
// MCLI Statistics
|
|
658
|
-
router.get('/api/pipeline/mcli/statistics', async (req, res) => {
|
|
659
|
-
try {
|
|
660
|
-
const stats = await this.mcliBridge.getStatistics();
|
|
661
|
-
res.json(stats);
|
|
662
|
-
}
|
|
663
|
-
catch (error) {
|
|
664
|
-
console.error('Error getting MCLI statistics:', error);
|
|
665
|
-
res.status(500).json({ error: 'Failed to get MCLI statistics' });
|
|
666
|
-
}
|
|
667
|
-
});
|
|
668
|
-
// Pipeline Statistics
|
|
669
|
-
router.get('/api/pipeline/statistics', async (req, res) => {
|
|
670
|
-
if (this.isDemoMode) {
|
|
671
|
-
// Return statistics based on actual system jobs
|
|
672
|
-
const jobs = this.getSystemJobs();
|
|
673
|
-
const activeJobs = jobs.filter(j => j.status === 'running').length;
|
|
674
|
-
const completedJobs = jobs.filter(j => j.status === 'completed').length;
|
|
675
|
-
const failedJobs = jobs.filter(j => j.status === 'failed').length;
|
|
676
|
-
const stats = {
|
|
677
|
-
total_jobs: String(jobs.length),
|
|
678
|
-
total_executions: String(jobs.length * 24), // Assuming daily runs
|
|
679
|
-
completed_jobs: String(completedJobs),
|
|
680
|
-
failed_jobs: String(failedJobs),
|
|
681
|
-
active_jobs: String(activeJobs),
|
|
682
|
-
avg_duration_ms: '45000',
|
|
683
|
-
max_duration_ms: '180000',
|
|
684
|
-
min_duration_ms: '5000'
|
|
685
|
-
};
|
|
686
|
-
return res.json(stats);
|
|
687
|
-
}
|
|
688
|
-
try {
|
|
689
|
-
const query = `
|
|
690
|
-
SELECT
|
|
691
|
-
COUNT(DISTINCT j.id) as total_jobs,
|
|
692
|
-
COUNT(DISTINCT e.id) as total_executions,
|
|
693
|
-
COUNT(DISTINCT j.id) FILTER (WHERE j.status = 'completed') as completed_jobs,
|
|
694
|
-
COUNT(DISTINCT j.id) FILTER (WHERE j.status = 'failed') as failed_jobs,
|
|
695
|
-
COUNT(DISTINCT j.id) FILTER (WHERE j.status IN ('running', 'queued')) as active_jobs,
|
|
696
|
-
AVG(e.duration_ms) as avg_duration_ms,
|
|
697
|
-
MAX(e.duration_ms) as max_duration_ms,
|
|
698
|
-
MIN(e.duration_ms) FILTER (WHERE e.duration_ms > 0) as min_duration_ms
|
|
699
|
-
FROM pipeline_jobs j
|
|
700
|
-
LEFT JOIN job_executions e ON j.id = e.job_id
|
|
701
|
-
WHERE j.created_at > CURRENT_TIMESTAMP - INTERVAL '7 days'
|
|
702
|
-
`;
|
|
703
|
-
const result = await this.pool.query(query);
|
|
704
|
-
res.json(result.rows[0]);
|
|
705
|
-
}
|
|
706
|
-
catch (error) {
|
|
707
|
-
console.error('Error getting pipeline statistics:', error);
|
|
708
|
-
res.status(500).json({ error: 'Failed to get pipeline statistics' });
|
|
709
|
-
}
|
|
710
|
-
});
|
|
711
|
-
// Recent Events
|
|
712
|
-
router.get('/api/pipeline/events', async (req, res) => {
|
|
713
|
-
if (this.isDemoMode) {
|
|
714
|
-
// Return demo events
|
|
715
|
-
const demoEvents = [
|
|
716
|
-
{
|
|
717
|
-
id: 'event-1',
|
|
718
|
-
type: 'job_completed',
|
|
719
|
-
message: 'Job "Data Sync - Users" completed successfully',
|
|
720
|
-
occurred_at: new Date(Date.now() - 300000).toISOString()
|
|
721
|
-
},
|
|
722
|
-
{
|
|
723
|
-
id: 'event-2',
|
|
724
|
-
type: 'job_started',
|
|
725
|
-
message: 'Job "ML Model Training" started',
|
|
726
|
-
occurred_at: new Date(Date.now() - 600000).toISOString()
|
|
727
|
-
},
|
|
728
|
-
{
|
|
729
|
-
id: 'event-3',
|
|
730
|
-
type: 'job_queued',
|
|
731
|
-
message: 'Job "Metrics Collection" queued for processing',
|
|
732
|
-
occurred_at: new Date(Date.now() - 900000).toISOString()
|
|
733
|
-
}
|
|
734
|
-
];
|
|
735
|
-
return res.json(demoEvents);
|
|
736
|
-
}
|
|
737
|
-
try {
|
|
738
|
-
const limit = parseInt(req.query.limit) || 100;
|
|
739
|
-
const query = `
|
|
740
|
-
SELECT * FROM pipeline_events
|
|
741
|
-
ORDER BY occurred_at DESC
|
|
742
|
-
LIMIT $1
|
|
743
|
-
`;
|
|
744
|
-
const result = await this.pool.query(query, [limit]);
|
|
745
|
-
res.json(result.rows);
|
|
746
|
-
}
|
|
747
|
-
catch (error) {
|
|
748
|
-
console.error('Error getting events:', error);
|
|
749
|
-
res.status(500).json({ error: 'Failed to get events' });
|
|
750
|
-
}
|
|
751
|
-
});
|
|
752
|
-
// Workflow Management Routes
|
|
753
|
-
router.post('/api/pipeline/workflows', async (req, res) => {
|
|
754
|
-
if (this.isDemoMode) {
|
|
755
|
-
const demoWorkflow = {
|
|
756
|
-
id: `workflow-${Date.now()}`,
|
|
757
|
-
...req.body,
|
|
758
|
-
status: 'draft',
|
|
759
|
-
createdAt: new Date().toISOString(),
|
|
760
|
-
updatedAt: new Date().toISOString()
|
|
761
|
-
};
|
|
762
|
-
return res.status(201).json(demoWorkflow);
|
|
763
|
-
}
|
|
764
|
-
try {
|
|
765
|
-
const workflow = await this.workflowEngine.createWorkflow(req.body);
|
|
766
|
-
res.status(201).json(workflow);
|
|
767
|
-
}
|
|
768
|
-
catch (error) {
|
|
769
|
-
console.error('Error creating workflow:', error);
|
|
770
|
-
res.status(500).json({ error: 'Failed to create workflow' });
|
|
771
|
-
}
|
|
772
|
-
});
|
|
773
|
-
router.get('/api/pipeline/workflows', async (req, res) => {
|
|
774
|
-
if (this.isDemoMode) {
|
|
775
|
-
const demoWorkflows = [
|
|
776
|
-
{
|
|
777
|
-
id: 'workflow-1',
|
|
778
|
-
name: 'Daily Data Pipeline',
|
|
779
|
-
description: 'Syncs data from LSH to MCLI daily',
|
|
780
|
-
status: 'active',
|
|
781
|
-
nodes: 3,
|
|
782
|
-
createdAt: new Date(Date.now() - 86400000).toISOString()
|
|
783
|
-
},
|
|
784
|
-
{
|
|
785
|
-
id: 'workflow-2',
|
|
786
|
-
name: 'ML Training Pipeline',
|
|
787
|
-
description: 'Trains and deploys ML models',
|
|
788
|
-
status: 'active',
|
|
789
|
-
nodes: 5,
|
|
790
|
-
createdAt: new Date(Date.now() - 172800000).toISOString()
|
|
791
|
-
}
|
|
792
|
-
];
|
|
793
|
-
return res.json({ workflows: demoWorkflows, total: demoWorkflows.length });
|
|
794
|
-
}
|
|
795
|
-
try {
|
|
796
|
-
const workflows = await this.workflowEngine.listWorkflows({
|
|
797
|
-
status: req.query.status,
|
|
798
|
-
limit: parseInt(req.query.limit) || 50,
|
|
799
|
-
offset: parseInt(req.query.offset) || 0
|
|
800
|
-
});
|
|
801
|
-
res.json(workflows);
|
|
802
|
-
}
|
|
803
|
-
catch (error) {
|
|
804
|
-
console.error('Error listing workflows:', error);
|
|
805
|
-
res.status(500).json({ error: 'Failed to list workflows' });
|
|
806
|
-
}
|
|
807
|
-
});
|
|
808
|
-
router.get('/api/pipeline/workflows/:id', async (req, res) => {
|
|
809
|
-
if (this.isDemoMode) {
|
|
810
|
-
const demoWorkflow = {
|
|
811
|
-
id: req.params.id,
|
|
812
|
-
name: 'Demo Workflow',
|
|
813
|
-
description: 'A demo workflow for testing',
|
|
814
|
-
status: 'active',
|
|
815
|
-
nodes: [
|
|
816
|
-
{ id: 'node1', type: 'trigger', name: 'Start' },
|
|
817
|
-
{ id: 'node2', type: 'action', name: 'Process Data' },
|
|
818
|
-
{ id: 'node3', type: 'condition', name: 'Check Status' }
|
|
819
|
-
],
|
|
820
|
-
createdAt: new Date(Date.now() - 86400000).toISOString()
|
|
821
|
-
};
|
|
822
|
-
return res.json(demoWorkflow);
|
|
823
|
-
}
|
|
824
|
-
try {
|
|
825
|
-
const workflow = await this.workflowEngine.getWorkflow(req.params.id);
|
|
826
|
-
if (!workflow) {
|
|
827
|
-
return res.status(404).json({ error: 'Workflow not found' });
|
|
828
|
-
}
|
|
829
|
-
res.json(workflow);
|
|
830
|
-
}
|
|
831
|
-
catch (error) {
|
|
832
|
-
console.error('Error getting workflow:', error);
|
|
833
|
-
res.status(500).json({ error: 'Failed to get workflow' });
|
|
834
|
-
}
|
|
835
|
-
});
|
|
836
|
-
router.post('/api/pipeline/workflows/:id/execute', async (req, res) => {
|
|
837
|
-
try {
|
|
838
|
-
const { triggeredBy = 'api', triggerType = 'manual', parameters = {} } = req.body;
|
|
839
|
-
const execution = await this.workflowEngine.executeWorkflow(req.params.id, triggeredBy, triggerType, parameters);
|
|
840
|
-
res.status(201).json(execution);
|
|
841
|
-
}
|
|
842
|
-
catch (error) {
|
|
843
|
-
console.error('Error executing workflow:', error);
|
|
844
|
-
res.status(500).json({ error: 'Failed to execute workflow' });
|
|
845
|
-
}
|
|
846
|
-
});
|
|
847
|
-
router.get('/api/pipeline/workflows/:id/executions', async (req, res) => {
|
|
848
|
-
if (this.isDemoMode) {
|
|
849
|
-
const demoExecutions = [
|
|
850
|
-
{
|
|
851
|
-
id: 'exec-1',
|
|
852
|
-
workflowId: req.params.id,
|
|
853
|
-
status: 'completed',
|
|
854
|
-
startedAt: new Date(Date.now() - 7200000).toISOString(),
|
|
855
|
-
completedAt: new Date(Date.now() - 6000000).toISOString()
|
|
856
|
-
},
|
|
857
|
-
{
|
|
858
|
-
id: 'exec-2',
|
|
859
|
-
workflowId: req.params.id,
|
|
860
|
-
status: 'running',
|
|
861
|
-
startedAt: new Date(Date.now() - 1800000).toISOString()
|
|
862
|
-
}
|
|
863
|
-
];
|
|
864
|
-
return res.json({ executions: demoExecutions, total: 2 });
|
|
865
|
-
}
|
|
866
|
-
try {
|
|
867
|
-
const executions = await this.workflowEngine.getWorkflowExecutions(req.params.id, {
|
|
868
|
-
limit: parseInt(req.query.limit) || 50,
|
|
869
|
-
offset: parseInt(req.query.offset) || 0
|
|
870
|
-
});
|
|
871
|
-
res.json(executions);
|
|
872
|
-
}
|
|
873
|
-
catch (error) {
|
|
874
|
-
console.error('Error getting workflow executions:', error);
|
|
875
|
-
res.status(500).json({ error: 'Failed to get workflow executions' });
|
|
876
|
-
}
|
|
877
|
-
});
|
|
878
|
-
router.get('/api/pipeline/executions/:id', async (req, res) => {
|
|
879
|
-
try {
|
|
880
|
-
const execution = await this.workflowEngine.getExecution(req.params.id);
|
|
881
|
-
if (!execution) {
|
|
882
|
-
return res.status(404).json({ error: 'Execution not found' });
|
|
883
|
-
}
|
|
884
|
-
res.json(execution);
|
|
885
|
-
}
|
|
886
|
-
catch (error) {
|
|
887
|
-
console.error('Error getting execution:', error);
|
|
888
|
-
res.status(500).json({ error: 'Failed to get execution' });
|
|
889
|
-
}
|
|
890
|
-
});
|
|
891
|
-
router.post('/api/pipeline/executions/:id/cancel', async (req, res) => {
|
|
892
|
-
try {
|
|
893
|
-
await this.workflowEngine.cancelExecution(req.params.id);
|
|
894
|
-
res.json({ message: 'Execution cancelled successfully' });
|
|
895
|
-
}
|
|
896
|
-
catch (error) {
|
|
897
|
-
console.error('Error cancelling execution:', error);
|
|
898
|
-
res.status(500).json({ error: 'Failed to cancel execution' });
|
|
899
|
-
}
|
|
900
|
-
});
|
|
901
|
-
router.put('/api/pipeline/workflows/:id', async (req, res) => {
|
|
902
|
-
try {
|
|
903
|
-
const workflow = await this.workflowEngine.updateWorkflow(req.params.id, req.body);
|
|
904
|
-
res.json(workflow);
|
|
905
|
-
}
|
|
906
|
-
catch (error) {
|
|
907
|
-
console.error('Error updating workflow:', error);
|
|
908
|
-
res.status(500).json({ error: 'Failed to update workflow' });
|
|
909
|
-
}
|
|
910
|
-
});
|
|
911
|
-
router.delete('/api/pipeline/workflows/:id', async (req, res) => {
|
|
912
|
-
try {
|
|
913
|
-
await this.workflowEngine.deleteWorkflow(req.params.id);
|
|
914
|
-
res.json({ message: 'Workflow deleted successfully' });
|
|
915
|
-
}
|
|
916
|
-
catch (error) {
|
|
917
|
-
console.error('Error deleting workflow:', error);
|
|
918
|
-
res.status(500).json({ error: 'Failed to delete workflow' });
|
|
919
|
-
}
|
|
920
|
-
});
|
|
921
|
-
router.post('/api/pipeline/workflows/:id/validate', async (req, res) => {
|
|
922
|
-
try {
|
|
923
|
-
const validation = await this.workflowEngine.validateWorkflowById(req.params.id);
|
|
924
|
-
res.json(validation);
|
|
925
|
-
}
|
|
926
|
-
catch (error) {
|
|
927
|
-
console.error('Error validating workflow:', error);
|
|
928
|
-
res.status(500).json({ error: 'Failed to validate workflow' });
|
|
929
|
-
}
|
|
930
|
-
});
|
|
931
|
-
router.get('/api/pipeline/workflows/:id/dependencies', async (req, res) => {
|
|
932
|
-
try {
|
|
933
|
-
const dependencies = await this.workflowEngine.getWorkflowDependencies(req.params.id);
|
|
934
|
-
res.json(dependencies);
|
|
935
|
-
}
|
|
936
|
-
catch (error) {
|
|
937
|
-
console.error('Error getting workflow dependencies:', error);
|
|
938
|
-
res.status(500).json({ error: 'Failed to get workflow dependencies' });
|
|
939
|
-
}
|
|
940
|
-
});
|
|
941
|
-
this.app.use('/', router);
|
|
942
|
-
}
|
|
943
|
-
setupWebSocket() {
|
|
944
|
-
this.io.on('connection', (socket) => {
|
|
945
|
-
console.log(`WebSocket client connected: ${socket.id}`);
|
|
946
|
-
socket.on('disconnect', () => {
|
|
947
|
-
console.log(`WebSocket client disconnected: ${socket.id}`);
|
|
948
|
-
});
|
|
949
|
-
socket.on('subscribe:job', (jobId) => {
|
|
950
|
-
socket.join(`job:${jobId}`);
|
|
951
|
-
console.log(`Client ${socket.id} subscribed to job ${jobId}`);
|
|
952
|
-
});
|
|
953
|
-
socket.on('unsubscribe:job', (jobId) => {
|
|
954
|
-
socket.leave(`job:${jobId}`);
|
|
955
|
-
console.log(`Client ${socket.id} unsubscribed from job ${jobId}`);
|
|
956
|
-
});
|
|
957
|
-
socket.on('subscribe:workflow', (workflowId) => {
|
|
958
|
-
socket.join(`workflow:${workflowId}`);
|
|
959
|
-
console.log(`Client ${socket.id} subscribed to workflow ${workflowId}`);
|
|
960
|
-
});
|
|
961
|
-
socket.on('unsubscribe:workflow', (workflowId) => {
|
|
962
|
-
socket.leave(`workflow:${workflowId}`);
|
|
963
|
-
console.log(`Client ${socket.id} unsubscribed from workflow ${workflowId}`);
|
|
964
|
-
});
|
|
965
|
-
});
|
|
966
|
-
}
|
|
967
|
-
setupEventListeners() {
|
|
968
|
-
// Job Tracker events
|
|
969
|
-
this.jobTracker.on('job:created', (event) => {
|
|
970
|
-
this.io.emit('job:created', event);
|
|
971
|
-
this.io.to(`job:${event.jobId}`).emit('job:update', event);
|
|
972
|
-
});
|
|
973
|
-
this.jobTracker.on('job:status_changed', (event) => {
|
|
974
|
-
this.io.emit('job:status_changed', event);
|
|
975
|
-
this.io.to(`job:${event.jobId}`).emit('job:update', event);
|
|
976
|
-
});
|
|
977
|
-
this.jobTracker.on('execution:started', (event) => {
|
|
978
|
-
this.io.to(`job:${event.jobId}`).emit('execution:started', event);
|
|
979
|
-
});
|
|
980
|
-
this.jobTracker.on('execution:completed', (event) => {
|
|
981
|
-
this.io.to(`job:${event.jobId}`).emit('execution:completed', event);
|
|
982
|
-
});
|
|
983
|
-
this.jobTracker.on('execution:failed', (event) => {
|
|
984
|
-
this.io.to(`job:${event.jobId}`).emit('execution:failed', event);
|
|
985
|
-
});
|
|
986
|
-
// MCLI Bridge events
|
|
987
|
-
this.mcliBridge.on('mcli:submitted', (event) => {
|
|
988
|
-
this.io.emit('mcli:submitted', event);
|
|
989
|
-
if (event.pipelineJobId) {
|
|
990
|
-
this.io.to(`job:${event.pipelineJobId}`).emit('mcli:submitted', event);
|
|
991
|
-
}
|
|
992
|
-
});
|
|
993
|
-
this.mcliBridge.on('mcli:webhook', (event) => {
|
|
994
|
-
this.io.emit('mcli:webhook', event);
|
|
995
|
-
if (event.pipelineJobId) {
|
|
996
|
-
this.io.to(`job:${event.pipelineJobId}`).emit('mcli:update', event);
|
|
997
|
-
}
|
|
998
|
-
});
|
|
999
|
-
// Workflow Engine events
|
|
1000
|
-
this.workflowEngine.on('workflow:created', (event) => {
|
|
1001
|
-
this.io.emit('workflow:created', event);
|
|
1002
|
-
});
|
|
1003
|
-
this.workflowEngine.on('execution:started', (event) => {
|
|
1004
|
-
this.io.emit('workflow:execution:started', event);
|
|
1005
|
-
this.io.to(`workflow:${event.workflowId}`).emit('execution:started', event);
|
|
1006
|
-
});
|
|
1007
|
-
this.workflowEngine.on('execution:completed', (event) => {
|
|
1008
|
-
this.io.emit('workflow:execution:completed', event);
|
|
1009
|
-
this.io.to(`workflow:${event.workflowId}`).emit('execution:completed', event);
|
|
1010
|
-
});
|
|
1011
|
-
this.workflowEngine.on('execution:failed', (event) => {
|
|
1012
|
-
this.io.emit('workflow:execution:failed', event);
|
|
1013
|
-
this.io.to(`workflow:${event.workflowId}`).emit('execution:failed', event);
|
|
1014
|
-
});
|
|
1015
|
-
this.workflowEngine.on('node:started', (event) => {
|
|
1016
|
-
this.io.to(`workflow:${event.workflowId}`).emit('node:started', event);
|
|
1017
|
-
});
|
|
1018
|
-
this.workflowEngine.on('node:completed', (event) => {
|
|
1019
|
-
this.io.to(`workflow:${event.workflowId}`).emit('node:completed', event);
|
|
1020
|
-
});
|
|
1021
|
-
this.workflowEngine.on('node:failed', (event) => {
|
|
1022
|
-
this.io.to(`workflow:${event.workflowId}`).emit('node:failed', event);
|
|
1023
|
-
});
|
|
1024
|
-
}
|
|
1025
|
-
getApp() {
|
|
1026
|
-
return this.app;
|
|
1027
|
-
}
|
|
1028
|
-
getServer() {
|
|
1029
|
-
return this.server;
|
|
1030
|
-
}
|
|
1031
|
-
async start() {
|
|
1032
|
-
try {
|
|
1033
|
-
// Test database connection
|
|
1034
|
-
try {
|
|
1035
|
-
await this.pool.query('SELECT 1');
|
|
1036
|
-
console.log('✅ Database connected');
|
|
1037
|
-
this.isDemoMode = false;
|
|
1038
|
-
}
|
|
1039
|
-
catch (_dbError) {
|
|
1040
|
-
console.warn('⚠️ Database not available - running in demo mode');
|
|
1041
|
-
console.log(' To enable full functionality, create a PostgreSQL database named "pipeline"');
|
|
1042
|
-
console.log(' and run: psql -d pipeline -f src/pipeline/schema.sql');
|
|
1043
|
-
this.isDemoMode = true;
|
|
1044
|
-
}
|
|
1045
|
-
// Start job tracker polling
|
|
1046
|
-
this.jobTracker.startPolling();
|
|
1047
|
-
console.log('✅ Job tracker started');
|
|
1048
|
-
// Start MCLI periodic sync
|
|
1049
|
-
this.mcliBridge.startPeriodicSync();
|
|
1050
|
-
console.log('✅ MCLI bridge started');
|
|
1051
|
-
// Start workflow engine
|
|
1052
|
-
await this.workflowEngine.start();
|
|
1053
|
-
console.log('✅ Workflow engine started');
|
|
1054
|
-
// Start server
|
|
1055
|
-
this.server.listen(this.config.port, () => {
|
|
1056
|
-
console.log(`🚀 Pipeline service running on port ${this.config.port}`);
|
|
1057
|
-
console.log(`📊 API available at http://localhost:${this.config.port}/api/pipeline`);
|
|
1058
|
-
console.log(`🔄 WebSocket available at ws://localhost:${this.config.port}`);
|
|
1059
|
-
console.log(`🪝 Webhook endpoint at http://localhost:${this.config.port}/webhook/mcli`);
|
|
1060
|
-
});
|
|
1061
|
-
}
|
|
1062
|
-
catch (error) {
|
|
1063
|
-
console.error('Failed to start pipeline service:', error);
|
|
1064
|
-
throw error;
|
|
1065
|
-
}
|
|
1066
|
-
}
|
|
1067
|
-
async stop() {
|
|
1068
|
-
console.log('Shutting down pipeline service...');
|
|
1069
|
-
// Stop polling
|
|
1070
|
-
this.jobTracker.stopPolling();
|
|
1071
|
-
// Stop Streamlit process
|
|
1072
|
-
if (this.streamlitProcess) {
|
|
1073
|
-
console.log('Stopping Streamlit ML Dashboard...');
|
|
1074
|
-
this.streamlitProcess.kill('SIGTERM');
|
|
1075
|
-
this.streamlitProcess = null;
|
|
1076
|
-
}
|
|
1077
|
-
// Cleanup services
|
|
1078
|
-
await this.jobTracker.cleanup();
|
|
1079
|
-
this.mcliBridge.cleanup();
|
|
1080
|
-
await this.workflowEngine.stop();
|
|
1081
|
-
// Close database pool
|
|
1082
|
-
await this.pool.end();
|
|
1083
|
-
// Close server
|
|
1084
|
-
this.server.close();
|
|
1085
|
-
console.log('Pipeline service stopped');
|
|
1086
|
-
}
|
|
1087
|
-
}
|
|
1088
|
-
// Export for CLI usage
|
|
1089
|
-
export async function startPipelineService(config) {
|
|
1090
|
-
const service = new PipelineService(config);
|
|
1091
|
-
await service.start();
|
|
1092
|
-
return service;
|
|
1093
|
-
}
|
|
1094
|
-
// Handle process termination
|
|
1095
|
-
function isMainModule() {
|
|
1096
|
-
try {
|
|
1097
|
-
const importMeta = eval('import.meta');
|
|
1098
|
-
return importMeta.url === `file://${process.argv[1]}`;
|
|
1099
|
-
}
|
|
1100
|
-
catch {
|
|
1101
|
-
// Fallback: check if this file is being run directly
|
|
1102
|
-
return Boolean(process.argv[1] && process.argv[1].endsWith('pipeline-service.js'));
|
|
1103
|
-
}
|
|
1104
|
-
}
|
|
1105
|
-
if (isMainModule()) {
|
|
1106
|
-
const service = new PipelineService();
|
|
1107
|
-
process.on('SIGINT', async () => {
|
|
1108
|
-
await service.stop();
|
|
1109
|
-
process.exit(0);
|
|
1110
|
-
});
|
|
1111
|
-
process.on('SIGTERM', async () => {
|
|
1112
|
-
await service.stop();
|
|
1113
|
-
process.exit(0);
|
|
1114
|
-
});
|
|
1115
|
-
service.start().catch((error) => {
|
|
1116
|
-
console.error('Failed to start:', error);
|
|
1117
|
-
process.exit(1);
|
|
1118
|
-
});
|
|
1119
|
-
}
|