lsh-framework 0.5.4

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (90) hide show
  1. package/.env.example +51 -0
  2. package/README.md +399 -0
  3. package/dist/app.js +33 -0
  4. package/dist/cicd/analytics.js +261 -0
  5. package/dist/cicd/auth.js +269 -0
  6. package/dist/cicd/cache-manager.js +172 -0
  7. package/dist/cicd/data-retention.js +305 -0
  8. package/dist/cicd/performance-monitor.js +224 -0
  9. package/dist/cicd/webhook-receiver.js +634 -0
  10. package/dist/cli.js +500 -0
  11. package/dist/commands/api.js +343 -0
  12. package/dist/commands/self.js +318 -0
  13. package/dist/commands/theme.js +257 -0
  14. package/dist/commands/zsh-import.js +240 -0
  15. package/dist/components/App.js +1 -0
  16. package/dist/components/Divider.js +29 -0
  17. package/dist/components/REPL.js +43 -0
  18. package/dist/components/Terminal.js +232 -0
  19. package/dist/components/UserInput.js +30 -0
  20. package/dist/daemon/api-server.js +315 -0
  21. package/dist/daemon/job-registry.js +554 -0
  22. package/dist/daemon/lshd.js +822 -0
  23. package/dist/daemon/monitoring-api.js +220 -0
  24. package/dist/examples/supabase-integration.js +106 -0
  25. package/dist/lib/api-error-handler.js +183 -0
  26. package/dist/lib/associative-arrays.js +285 -0
  27. package/dist/lib/base-api-server.js +290 -0
  28. package/dist/lib/base-command-registrar.js +286 -0
  29. package/dist/lib/base-job-manager.js +293 -0
  30. package/dist/lib/brace-expansion.js +160 -0
  31. package/dist/lib/builtin-commands.js +439 -0
  32. package/dist/lib/cloud-config-manager.js +347 -0
  33. package/dist/lib/command-validator.js +190 -0
  34. package/dist/lib/completion-system.js +344 -0
  35. package/dist/lib/cron-job-manager.js +364 -0
  36. package/dist/lib/daemon-client-helper.js +141 -0
  37. package/dist/lib/daemon-client.js +501 -0
  38. package/dist/lib/database-persistence.js +638 -0
  39. package/dist/lib/database-schema.js +259 -0
  40. package/dist/lib/enhanced-history-system.js +246 -0
  41. package/dist/lib/env-validator.js +265 -0
  42. package/dist/lib/executors/builtin-executor.js +52 -0
  43. package/dist/lib/extended-globbing.js +411 -0
  44. package/dist/lib/extended-parameter-expansion.js +227 -0
  45. package/dist/lib/floating-point-arithmetic.js +256 -0
  46. package/dist/lib/history-system.js +245 -0
  47. package/dist/lib/interactive-shell.js +460 -0
  48. package/dist/lib/job-builtins.js +580 -0
  49. package/dist/lib/job-manager.js +386 -0
  50. package/dist/lib/job-storage-database.js +156 -0
  51. package/dist/lib/job-storage-memory.js +73 -0
  52. package/dist/lib/logger.js +274 -0
  53. package/dist/lib/lshrc-init.js +177 -0
  54. package/dist/lib/pathname-expansion.js +216 -0
  55. package/dist/lib/prompt-system.js +328 -0
  56. package/dist/lib/script-runner.js +226 -0
  57. package/dist/lib/secrets-manager.js +193 -0
  58. package/dist/lib/shell-executor.js +2504 -0
  59. package/dist/lib/shell-parser.js +958 -0
  60. package/dist/lib/shell-types.js +6 -0
  61. package/dist/lib/shell.lib.js +40 -0
  62. package/dist/lib/supabase-client.js +58 -0
  63. package/dist/lib/theme-manager.js +476 -0
  64. package/dist/lib/variable-expansion.js +385 -0
  65. package/dist/lib/zsh-compatibility.js +658 -0
  66. package/dist/lib/zsh-import-manager.js +699 -0
  67. package/dist/lib/zsh-options.js +328 -0
  68. package/dist/pipeline/job-tracker.js +491 -0
  69. package/dist/pipeline/mcli-bridge.js +302 -0
  70. package/dist/pipeline/pipeline-service.js +1116 -0
  71. package/dist/pipeline/workflow-engine.js +867 -0
  72. package/dist/services/api/api.js +58 -0
  73. package/dist/services/api/auth.js +35 -0
  74. package/dist/services/api/config.js +7 -0
  75. package/dist/services/api/file.js +22 -0
  76. package/dist/services/cron/cron-registrar.js +235 -0
  77. package/dist/services/cron/cron.js +9 -0
  78. package/dist/services/daemon/daemon-registrar.js +565 -0
  79. package/dist/services/daemon/daemon.js +9 -0
  80. package/dist/services/lib/lib.js +86 -0
  81. package/dist/services/log-file-extractor.js +170 -0
  82. package/dist/services/secrets/secrets.js +94 -0
  83. package/dist/services/shell/shell.js +28 -0
  84. package/dist/services/supabase/supabase-registrar.js +367 -0
  85. package/dist/services/supabase/supabase.js +9 -0
  86. package/dist/services/zapier.js +16 -0
  87. package/dist/simple-api-server.js +148 -0
  88. package/dist/store/store.js +31 -0
  89. package/dist/util/lib.util.js +11 -0
  90. package/package.json +144 -0
@@ -0,0 +1,634 @@
1
+ import express from 'express';
2
+ import { createServer } from 'http';
3
+ import { Server } from 'socket.io';
4
+ import crypto from 'crypto';
5
+ import { createClient } from '@supabase/supabase-js';
6
+ import Redis from 'ioredis';
7
+ import { Pool } from 'pg';
8
+ import * as path from 'path';
9
+ import { fileURLToPath } from 'url';
10
+ import { generateAnalyticsReport, generateTrendAnalysis, detectBuildAnomalies, generateInsights, predictNextPeriod, calculateCostAnalysis, detectBottlenecks } from './analytics.js';
11
+ import { CacheManager } from './cache-manager.js';
12
+ import { AuthService, authenticate, authorize, rateLimit } from './auth.js';
13
+ import { performanceMonitor } from './performance-monitor.js';
14
+ import { DataRetentionService } from './data-retention.js';
15
+ const __filename = fileURLToPath(import.meta.url);
16
+ const __dirname = path.dirname(__filename);
17
+ const app = express();
18
+ const server = createServer(app);
19
+ const io = new Server(server, {
20
+ cors: {
21
+ origin: "*",
22
+ methods: ["GET", "POST"]
23
+ }
24
+ });
25
+ app.use(express.json({ limit: '10mb' }));
26
+ app.use(express.raw({ type: 'application/json', limit: '10mb' }));
27
+ // Serve static dashboard files
28
+ app.use('/dashboard', express.static(path.join(__dirname, 'dashboard')));
29
+ const GITHUB_WEBHOOK_SECRET = process.env.GITHUB_WEBHOOK_SECRET;
30
+ const GITLAB_WEBHOOK_SECRET = process.env.GITLAB_WEBHOOK_SECRET;
31
+ const JENKINS_WEBHOOK_SECRET = process.env.JENKINS_WEBHOOK_SECRET;
32
+ const SUPABASE_URL = process.env.SUPABASE_URL;
33
+ const SUPABASE_ANON_KEY = process.env.SUPABASE_ANON_KEY;
34
+ const REDIS_URL = process.env.REDIS_URL || 'redis://localhost:6379';
35
+ const supabase = SUPABASE_URL && SUPABASE_ANON_KEY ?
36
+ createClient(SUPABASE_URL, SUPABASE_ANON_KEY) : null;
37
+ const redis = new Redis(REDIS_URL);
38
+ // Initialize PostgreSQL pool
39
+ const pool = new Pool({
40
+ connectionString: process.env.DATABASE_URL || 'postgresql://localhost:5432/cicd'
41
+ });
42
+ // Initialize services
43
+ const cacheManager = new CacheManager(REDIS_URL);
44
+ const authService = new AuthService(pool);
45
+ const retentionService = new DataRetentionService(pool, redis);
46
+ // Apply performance monitoring middleware
47
+ app.use(performanceMonitor.middleware());
48
+ function verifyGitHubSignature(payload, signature) {
49
+ if (!GITHUB_WEBHOOK_SECRET) {
50
+ if (process.env.NODE_ENV === 'production') {
51
+ console.error('GITHUB_WEBHOOK_SECRET not set in production');
52
+ return false;
53
+ }
54
+ console.warn('WARNING: GITHUB_WEBHOOK_SECRET not set - skipping verification (dev mode)');
55
+ return true;
56
+ }
57
+ if (!signature) {
58
+ console.error('No signature provided in webhook request');
59
+ return false;
60
+ }
61
+ const hmac = crypto.createHmac('sha256', GITHUB_WEBHOOK_SECRET);
62
+ const digest = hmac.update(payload, 'utf8').digest('hex');
63
+ const checksum = `sha256=${digest}`;
64
+ return crypto.timingSafeEqual(Buffer.from(signature, 'utf8'), Buffer.from(checksum, 'utf8'));
65
+ }
66
+ function parseGitHubWorkflowEvent(body) {
67
+ const { workflow_run, workflow_job, action } = body;
68
+ if (!workflow_run && !workflow_job)
69
+ return null;
70
+ const run = workflow_run || workflow_job.workflow_run;
71
+ const job = workflow_job;
72
+ const event = {
73
+ id: job ? `${run.id}-${job.id}` : run.id.toString(),
74
+ platform: 'github',
75
+ repository: run.repository.full_name,
76
+ branch: run.head_branch,
77
+ commit_sha: run.head_sha,
78
+ status: mapGitHubStatus(job?.status || run.status),
79
+ conclusion: job?.conclusion || run.conclusion,
80
+ workflow_name: run.workflow?.name || run.name,
81
+ job_name: job?.name,
82
+ started_at: job?.started_at || run.created_at,
83
+ completed_at: job?.completed_at || run.updated_at,
84
+ duration_ms: calculateDuration(job?.started_at || run.created_at, job?.completed_at || run.updated_at),
85
+ actor: run.actor.login,
86
+ event_type: action,
87
+ workflow_url: run.html_url,
88
+ logs_url: job?.html_url,
89
+ metadata: {
90
+ run_number: run.run_number,
91
+ attempt: run.run_attempt,
92
+ workflow_id: run.workflow_id,
93
+ job_id: job?.id,
94
+ runner_id: job?.runner_id,
95
+ runner_name: job?.runner_name
96
+ }
97
+ };
98
+ return event;
99
+ }
100
+ function mapGitHubStatus(status) {
101
+ switch (status) {
102
+ case 'queued': return 'queued';
103
+ case 'in_progress': return 'in_progress';
104
+ case 'completed': return 'completed';
105
+ default: return 'failed';
106
+ }
107
+ }
108
+ function calculateDuration(startTime, endTime) {
109
+ if (!endTime)
110
+ return undefined;
111
+ return new Date(endTime).getTime() - new Date(startTime).getTime();
112
+ }
113
+ function verifyGitLabSignature(payload, signature) {
114
+ if (!GITLAB_WEBHOOK_SECRET) {
115
+ if (process.env.NODE_ENV === 'production') {
116
+ console.error('GITLAB_WEBHOOK_SECRET not set in production');
117
+ return false;
118
+ }
119
+ console.warn('WARNING: GITLAB_WEBHOOK_SECRET not set - skipping verification (dev mode)');
120
+ return true;
121
+ }
122
+ if (!signature) {
123
+ console.error('No signature provided in GitLab webhook request');
124
+ return false;
125
+ }
126
+ const hmac = crypto.createHmac('sha256', GITLAB_WEBHOOK_SECRET);
127
+ const digest = hmac.update(payload, 'utf8').digest('hex');
128
+ return crypto.timingSafeEqual(Buffer.from(signature, 'utf8'), Buffer.from(digest, 'utf8'));
129
+ }
130
+ function parseGitLabPipelineEvent(body) {
131
+ const { object_kind, object_attributes, project, user } = body;
132
+ if (object_kind !== 'pipeline' && object_kind !== 'job')
133
+ return null;
134
+ const pipeline = object_attributes;
135
+ const isJob = object_kind === 'job';
136
+ if (!pipeline)
137
+ return null;
138
+ const event = {
139
+ id: isJob ? `${pipeline.pipeline_id}-${pipeline.id}` : pipeline.id.toString(),
140
+ platform: 'gitlab',
141
+ repository: project.path_with_namespace,
142
+ branch: pipeline.ref,
143
+ commit_sha: pipeline.sha || pipeline.commit?.id || 'unknown',
144
+ status: mapGitLabStatus(pipeline.status),
145
+ conclusion: mapGitLabConclusion(pipeline.status),
146
+ workflow_name: isJob ? pipeline.stage : `Pipeline ${pipeline.id}`,
147
+ job_name: isJob ? pipeline.name : undefined,
148
+ started_at: pipeline.started_at || pipeline.created_at,
149
+ completed_at: pipeline.finished_at,
150
+ duration_ms: pipeline.duration ? pipeline.duration * 1000 : calculateDuration(pipeline.started_at || pipeline.created_at, pipeline.finished_at),
151
+ actor: user?.username || user?.name || 'unknown',
152
+ event_type: object_kind,
153
+ workflow_url: project.web_url + '/-/pipelines/' + (isJob ? pipeline.pipeline_id : pipeline.id),
154
+ logs_url: isJob ? project.web_url + '/-/jobs/' + pipeline.id : undefined,
155
+ metadata: {
156
+ pipeline_id: isJob ? pipeline.pipeline_id : pipeline.id,
157
+ job_id: isJob ? pipeline.id : undefined,
158
+ stage: pipeline.stage,
159
+ runner_id: pipeline.runner?.id,
160
+ runner_description: pipeline.runner?.description,
161
+ tag_list: pipeline.tag_list || [],
162
+ variables: pipeline.variables || []
163
+ }
164
+ };
165
+ return event;
166
+ }
167
+ function mapGitLabStatus(status) {
168
+ switch (status) {
169
+ case 'created':
170
+ case 'waiting_for_resource':
171
+ case 'preparing':
172
+ return 'queued';
173
+ case 'pending':
174
+ case 'running':
175
+ return 'in_progress';
176
+ case 'success':
177
+ return 'completed';
178
+ case 'failed':
179
+ case 'canceled':
180
+ case 'skipped':
181
+ return 'failed';
182
+ default:
183
+ return 'failed';
184
+ }
185
+ }
186
+ function mapGitLabConclusion(status) {
187
+ switch (status) {
188
+ case 'success': return 'success';
189
+ case 'failed': return 'failure';
190
+ case 'canceled': return 'cancelled';
191
+ case 'skipped': return 'skipped';
192
+ default: return 'failure';
193
+ }
194
+ }
195
+ function parseJenkinsEvent(body) {
196
+ const { name, url, build, timestamp } = body;
197
+ if (!build)
198
+ return null;
199
+ const buildUrl = url + build.number + '/';
200
+ const duration = build.duration || (Date.now() - timestamp);
201
+ const event = {
202
+ id: `jenkins-${name}-${build.number}`,
203
+ platform: 'jenkins',
204
+ repository: name, // Jenkins job name as repository
205
+ branch: build.parameters?.BRANCH_NAME || build.parameters?.GIT_BRANCH || 'main',
206
+ commit_sha: build.parameters?.GIT_COMMIT || build.scm?.SHA1 || 'unknown',
207
+ status: mapJenkinsStatus(build.phase || build.status),
208
+ conclusion: mapJenkinsConclusion(build.result),
209
+ workflow_name: name,
210
+ job_name: build.fullDisplayName,
211
+ started_at: new Date(timestamp).toISOString(),
212
+ completed_at: build.result ? new Date(timestamp + duration).toISOString() : undefined,
213
+ duration_ms: build.result ? duration : undefined,
214
+ actor: build.parameters?.TRIGGERED_BY || 'jenkins',
215
+ event_type: build.phase || 'build',
216
+ workflow_url: buildUrl,
217
+ logs_url: buildUrl + 'console',
218
+ metadata: {
219
+ job_name: name,
220
+ build_number: build.number,
221
+ queue_id: build.queue_id,
222
+ executor: build.executor,
223
+ node: build.builtOn,
224
+ parameters: build.parameters || {},
225
+ causes: build.causes || []
226
+ }
227
+ };
228
+ return event;
229
+ }
230
+ function mapJenkinsStatus(phase) {
231
+ switch (phase?.toLowerCase()) {
232
+ case 'queued': return 'queued';
233
+ case 'started':
234
+ case 'running': return 'in_progress';
235
+ case 'completed':
236
+ case 'finished': return 'completed';
237
+ default: return 'failed';
238
+ }
239
+ }
240
+ function mapJenkinsConclusion(result) {
241
+ switch (result?.toUpperCase()) {
242
+ case 'SUCCESS': return 'success';
243
+ case 'FAILURE': return 'failure';
244
+ case 'ABORTED': return 'cancelled';
245
+ case 'UNSTABLE': return 'failure';
246
+ default: return undefined;
247
+ }
248
+ }
249
+ async function storePipelineEvent(event) {
250
+ try {
251
+ // Store in PostgreSQL via Supabase
252
+ if (supabase) {
253
+ const { error } = await supabase
254
+ .from('pipeline_events')
255
+ .upsert(event);
256
+ if (error) {
257
+ console.error('Error storing to Supabase:', error);
258
+ }
259
+ }
260
+ // Cache in Redis for real-time access
261
+ await redis.setex(`pipeline:${event.id}`, 3600, JSON.stringify(event));
262
+ // Update metrics in Redis
263
+ await updateMetrics(event);
264
+ // Emit real-time update to all connected clients
265
+ io.emit('pipeline_event', {
266
+ type: 'pipeline_update',
267
+ event: event,
268
+ timestamp: new Date().toISOString()
269
+ });
270
+ // Emit updated metrics
271
+ const updatedMetrics = await getLatestMetricsFromRedis();
272
+ io.emit('metrics_update', {
273
+ type: 'metrics_update',
274
+ metrics: updatedMetrics,
275
+ timestamp: new Date().toISOString()
276
+ });
277
+ console.log(`Stored pipeline event: ${event.id} (${event.status})`);
278
+ }
279
+ catch (error) {
280
+ console.error('Error storing pipeline event:', error);
281
+ }
282
+ }
283
+ async function updateMetrics(event) {
284
+ const today = new Date().toISOString().split('T')[0];
285
+ const key = `metrics:${today}`;
286
+ // Update daily metrics
287
+ await redis.hincrby(key, 'total_builds', 1);
288
+ if (event.status === 'completed') {
289
+ if (event.conclusion === 'success') {
290
+ await redis.hincrby(key, 'successful_builds', 1);
291
+ }
292
+ else {
293
+ await redis.hincrby(key, 'failed_builds', 1);
294
+ }
295
+ }
296
+ if (event.duration_ms) {
297
+ await redis.lpush(`durations:${today}`, event.duration_ms);
298
+ await redis.ltrim(`durations:${today}`, 0, 999); // Keep last 1000 durations
299
+ }
300
+ // Set expiry for daily metrics (30 days)
301
+ await redis.expire(key, 30 * 24 * 60 * 60);
302
+ }
303
+ async function getLatestMetricsFromRedis() {
304
+ const today = new Date().toISOString().split('T')[0];
305
+ const key = `metrics:${today}`;
306
+ const metrics = await redis.hgetall(key);
307
+ const durations = await redis.lrange(`durations:${today}`, 0, -1);
308
+ const totalBuilds = parseInt(metrics.total_builds || '0');
309
+ const successfulBuilds = parseInt(metrics.successful_builds || '0');
310
+ const failedBuilds = parseInt(metrics.failed_builds || '0');
311
+ const avgDuration = durations.length > 0
312
+ ? durations.reduce((sum, d) => sum + parseInt(d), 0) / durations.length
313
+ : 0;
314
+ return {
315
+ totalBuilds,
316
+ successfulBuilds,
317
+ failedBuilds,
318
+ successRate: totalBuilds > 0 ? (successfulBuilds / totalBuilds) * 100 : 0,
319
+ avgDurationMs: Math.round(avgDuration),
320
+ activePipelines: await redis.keys('pipeline:*').then(keys => keys.length),
321
+ timestamp: new Date().toISOString()
322
+ };
323
+ }
324
+ // WebSocket connection handling
325
+ io.on('connection', (socket) => {
326
+ console.log(`Client connected: ${socket.id}`);
327
+ // Send current metrics on connection
328
+ getLatestMetricsFromRedis().then(metrics => {
329
+ socket.emit('metrics_update', {
330
+ type: 'metrics_update',
331
+ metrics: metrics,
332
+ timestamp: new Date().toISOString()
333
+ });
334
+ });
335
+ socket.on('disconnect', () => {
336
+ console.log(`Client disconnected: ${socket.id}`);
337
+ });
338
+ socket.on('subscribe_logs', (pipelineId) => {
339
+ socket.join(`logs:${pipelineId}`);
340
+ console.log(`Client ${socket.id} subscribed to logs for pipeline ${pipelineId}`);
341
+ });
342
+ socket.on('unsubscribe_logs', (pipelineId) => {
343
+ socket.leave(`logs:${pipelineId}`);
344
+ console.log(`Client ${socket.id} unsubscribed from logs for pipeline ${pipelineId}`);
345
+ });
346
+ });
347
+ // GitHub webhook endpoint
348
+ app.post('/webhook/github', async (req, res) => {
349
+ try {
350
+ const signature = req.get('x-hub-signature-256') || '';
351
+ const payload = JSON.stringify(req.body);
352
+ if (!verifyGitHubSignature(payload, signature)) {
353
+ return res.status(401).json({ error: 'Invalid signature' });
354
+ }
355
+ const event = parseGitHubWorkflowEvent(req.body);
356
+ if (event) {
357
+ await storePipelineEvent(event);
358
+ }
359
+ res.status(200).json({ success: true });
360
+ }
361
+ catch (error) {
362
+ console.error('GitHub webhook error:', error);
363
+ res.status(500).json({ error: 'Internal server error' });
364
+ }
365
+ });
366
+ // GitLab webhook endpoint
367
+ app.post('/webhook/gitlab', async (req, res) => {
368
+ try {
369
+ const signature = req.get('x-gitlab-token') || '';
370
+ const payload = JSON.stringify(req.body);
371
+ if (!verifyGitLabSignature(payload, signature)) {
372
+ return res.status(401).json({ error: 'Invalid token' });
373
+ }
374
+ const event = parseGitLabPipelineEvent(req.body);
375
+ if (event) {
376
+ await storePipelineEvent(event);
377
+ }
378
+ res.status(200).json({ success: true });
379
+ }
380
+ catch (error) {
381
+ console.error('GitLab webhook error:', error);
382
+ res.status(500).json({ error: 'Internal server error' });
383
+ }
384
+ });
385
+ // Jenkins webhook endpoint
386
+ app.post('/webhook/jenkins', async (req, res) => {
387
+ try {
388
+ // Jenkins doesn't use HMAC signatures by default, but we can check for a token
389
+ const token = req.get('authorization') || req.get('x-jenkins-token') || '';
390
+ if (JENKINS_WEBHOOK_SECRET && token !== `Bearer ${JENKINS_WEBHOOK_SECRET}`) {
391
+ return res.status(401).json({ error: 'Invalid authorization' });
392
+ }
393
+ const event = parseJenkinsEvent(req.body);
394
+ if (event) {
395
+ await storePipelineEvent(event);
396
+ }
397
+ res.status(200).json({ success: true });
398
+ }
399
+ catch (error) {
400
+ console.error('Jenkins webhook error:', error);
401
+ res.status(500).json({ error: 'Internal server error' });
402
+ }
403
+ });
404
+ // Root route - redirect to main dashboard
405
+ app.get('/', (req, res) => {
406
+ res.redirect('/dashboard/');
407
+ });
408
+ // Dashboard routes
409
+ app.get('/dashboard/', (req, res) => {
410
+ res.sendFile(path.join(__dirname, 'dashboard', 'index.html'));
411
+ });
412
+ app.get('/dashboard/analytics', (req, res) => {
413
+ res.sendFile(path.join(__dirname, 'dashboard', 'analytics.html'));
414
+ });
415
+ app.get('/dashboard/admin', (req, res) => {
416
+ res.sendFile(path.join(__dirname, 'dashboard', 'admin.html'));
417
+ });
418
+ // Health check endpoint
419
+ app.get('/health', (req, res) => {
420
+ const health = performanceMonitor.getHealthStatus();
421
+ res.json({
422
+ status: health.status,
423
+ timestamp: new Date().toISOString(),
424
+ services: {
425
+ supabase: !!supabase,
426
+ redis: redis.status === 'ready',
427
+ postgres: pool ? 'connected' : 'disconnected'
428
+ },
429
+ performance: health.details
430
+ });
431
+ });
432
+ // Get recent pipeline events
433
+ app.get('/api/pipelines', async (req, res) => {
434
+ try {
435
+ const { limit = 50, status, repository, platform } = req.query;
436
+ let query = supabase?.from('pipeline_events').select('*');
437
+ if (status) {
438
+ query = query?.eq('status', status);
439
+ }
440
+ if (repository) {
441
+ query = query?.eq('repository', repository);
442
+ }
443
+ if (platform) {
444
+ query = query?.eq('platform', platform);
445
+ }
446
+ const { data, error } = await query
447
+ ?.order('started_at', { ascending: false })
448
+ ?.limit(Number(limit)) || { data: null, error: null };
449
+ if (error) {
450
+ throw error;
451
+ }
452
+ res.json(data || []);
453
+ }
454
+ catch (error) {
455
+ console.error('Error fetching pipelines:', error);
456
+ res.status(500).json({ error: 'Failed to fetch pipelines' });
457
+ }
458
+ });
459
+ // Get pipeline metrics
460
+ app.get('/api/metrics', async (req, res) => {
461
+ try {
462
+ const today = new Date().toISOString().split('T')[0];
463
+ const key = `metrics:${today}`;
464
+ const metrics = await redis.hgetall(key);
465
+ const durations = await redis.lrange(`durations:${today}`, 0, -1);
466
+ const totalBuilds = parseInt(metrics.total_builds || '0');
467
+ const successfulBuilds = parseInt(metrics.successful_builds || '0');
468
+ const failedBuilds = parseInt(metrics.failed_builds || '0');
469
+ const avgDuration = durations.length > 0
470
+ ? durations.reduce((sum, d) => sum + parseInt(d), 0) / durations.length
471
+ : 0;
472
+ res.json({
473
+ totalBuilds,
474
+ successfulBuilds,
475
+ failedBuilds,
476
+ successRate: totalBuilds > 0 ? (successfulBuilds / totalBuilds) * 100 : 0,
477
+ avgDurationMs: Math.round(avgDuration),
478
+ activePipelines: await redis.keys('pipeline:*').then(keys => keys.length),
479
+ timestamp: new Date().toISOString()
480
+ });
481
+ }
482
+ catch (error) {
483
+ console.error('Error fetching metrics:', error);
484
+ res.status(500).json({ error: 'Failed to fetch metrics' });
485
+ }
486
+ });
487
+ // Analytics endpoints
488
+ app.get('/api/analytics/report', async (req, res) => {
489
+ try {
490
+ const { period = 'weekly' } = req.query;
491
+ const report = await generateAnalyticsReport(period);
492
+ res.json(report);
493
+ }
494
+ catch (error) {
495
+ console.error('Error generating analytics report:', error);
496
+ res.status(500).json({ error: 'Failed to generate report' });
497
+ }
498
+ });
499
+ app.get('/api/analytics/trends', async (req, res) => {
500
+ try {
501
+ const { days = 30 } = req.query;
502
+ const trends = await generateTrendAnalysis(Number(days));
503
+ res.json(trends);
504
+ }
505
+ catch (error) {
506
+ console.error('Error fetching trends:', error);
507
+ res.status(500).json({ error: 'Failed to fetch trends' });
508
+ }
509
+ });
510
+ app.get('/api/analytics/anomalies', async (req, res) => {
511
+ try {
512
+ const { days = 30 } = req.query;
513
+ const trends = await generateTrendAnalysis(Number(days));
514
+ const anomalies = await detectBuildAnomalies(trends);
515
+ res.json(anomalies);
516
+ }
517
+ catch (error) {
518
+ console.error('Error detecting anomalies:', error);
519
+ res.status(500).json({ error: 'Failed to detect anomalies' });
520
+ }
521
+ });
522
+ app.get('/api/analytics/insights', async (req, res) => {
523
+ try {
524
+ const { days = 30 } = req.query;
525
+ const trends = await generateTrendAnalysis(Number(days));
526
+ const insights = await generateInsights(trends);
527
+ res.json(insights);
528
+ }
529
+ catch (error) {
530
+ console.error('Error generating insights:', error);
531
+ res.status(500).json({ error: 'Failed to generate insights' });
532
+ }
533
+ });
534
+ app.get('/api/analytics/predictions', async (req, res) => {
535
+ try {
536
+ const { days = 30 } = req.query;
537
+ const trends = await generateTrendAnalysis(Number(days));
538
+ const predictions = await predictNextPeriod(trends);
539
+ res.json(predictions);
540
+ }
541
+ catch (error) {
542
+ console.error('Error generating predictions:', error);
543
+ res.status(500).json({ error: 'Failed to generate predictions' });
544
+ }
545
+ });
546
+ app.get('/api/analytics/costs', async (req, res) => {
547
+ try {
548
+ const { days = 30 } = req.query;
549
+ const trends = await generateTrendAnalysis(Number(days));
550
+ const costAnalysis = await calculateCostAnalysis(trends);
551
+ res.json(costAnalysis);
552
+ }
553
+ catch (error) {
554
+ console.error('Error calculating costs:', error);
555
+ res.status(500).json({ error: 'Failed to calculate costs' });
556
+ }
557
+ });
558
+ app.get('/api/analytics/bottlenecks', async (req, res) => {
559
+ try {
560
+ const bottlenecks = await detectBottlenecks();
561
+ res.json(bottlenecks);
562
+ }
563
+ catch (error) {
564
+ console.error('Error detecting bottlenecks:', error);
565
+ res.status(500).json({ error: 'Failed to detect bottlenecks' });
566
+ }
567
+ });
568
+ // Authentication endpoints
569
+ app.post('/auth/register', async (req, res) => {
570
+ try {
571
+ const { email, password, name, role } = req.body;
572
+ const user = await authService.register(email, password, name, role);
573
+ const token = authService.generateToken(user);
574
+ res.json({ user, token });
575
+ }
576
+ catch (error) {
577
+ res.status(400).json({ error: error.message });
578
+ }
579
+ });
580
+ app.post('/auth/login', async (req, res) => {
581
+ try {
582
+ const { email, password } = req.body;
583
+ const result = await authService.login(email, password);
584
+ res.json(result);
585
+ }
586
+ catch (error) {
587
+ res.status(401).json({ error: error.message });
588
+ }
589
+ });
590
+ app.post('/auth/api-key', authenticate(authService), authorize('admin', 'developer'), async (req, res) => {
591
+ try {
592
+ const { name, permissions } = req.body;
593
+ const apiKey = await authService.generateApiKey(req.user.userId, name, permissions);
594
+ res.json({ apiKey });
595
+ }
596
+ catch (error) {
597
+ res.status(400).json({ error: error.message });
598
+ }
599
+ });
600
+ // Protected admin endpoints
601
+ app.get('/api/admin/performance', authenticate(authService), authorize('admin'), (req, res) => {
602
+ const metrics = performanceMonitor.getMetrics();
603
+ res.json(metrics);
604
+ });
605
+ app.get('/api/admin/cache/stats', authenticate(authService), authorize('admin'), async (req, res) => {
606
+ const stats = await cacheManager.getStats();
607
+ res.json(stats);
608
+ });
609
+ app.post('/api/admin/cache/clear', authenticate(authService), authorize('admin'), async (req, res) => {
610
+ await cacheManager.clear();
611
+ res.json({ message: 'Cache cleared successfully' });
612
+ });
613
+ app.get('/api/admin/retention/stats', authenticate(authService), authorize('admin'), async (req, res) => {
614
+ const stats = await retentionService.getRetentionStats();
615
+ res.json(stats);
616
+ });
617
+ app.post('/api/admin/retention/cleanup', authenticate(authService), authorize('admin'), async (req, res) => {
618
+ const { tableName } = req.body;
619
+ const result = await retentionService.triggerCleanup(tableName);
620
+ res.json(result);
621
+ });
622
+ // Apply rate limiting to API endpoints
623
+ const apiRateLimit = rateLimit({
624
+ windowMs: 15 * 60 * 1000, // 15 minutes
625
+ max: 100 // limit each user to 100 requests per windowMs
626
+ });
627
+ app.use('/api/', apiRateLimit);
628
+ const PORT = process.env.WEBHOOK_PORT || 3033;
629
+ server.listen(PORT, () => {
630
+ console.log(`🚀 CI/CD Webhook receiver running on port ${PORT}`);
631
+ console.log(`📊 Health check available at http://localhost:${PORT}/health`);
632
+ console.log(`🔄 WebSocket server enabled for real-time updates`);
633
+ console.log(`📈 Analytics API available at http://localhost:${PORT}/api/analytics/*`);
634
+ });