lsh-framework 1.1.0 → 1.2.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (73) hide show
  1. package/README.md +70 -4
  2. package/dist/cli.js +104 -486
  3. package/dist/commands/doctor.js +427 -0
  4. package/dist/commands/init.js +371 -0
  5. package/dist/constants/api.js +94 -0
  6. package/dist/constants/commands.js +64 -0
  7. package/dist/constants/config.js +56 -0
  8. package/dist/constants/database.js +21 -0
  9. package/dist/constants/errors.js +79 -0
  10. package/dist/constants/index.js +28 -0
  11. package/dist/constants/paths.js +28 -0
  12. package/dist/constants/ui.js +73 -0
  13. package/dist/constants/validation.js +124 -0
  14. package/dist/daemon/lshd.js +11 -32
  15. package/dist/lib/daemon-client-helper.js +7 -4
  16. package/dist/lib/daemon-client.js +9 -2
  17. package/dist/lib/format-utils.js +163 -0
  18. package/dist/lib/job-manager.js +2 -1
  19. package/dist/lib/platform-utils.js +211 -0
  20. package/dist/lib/secrets-manager.js +11 -1
  21. package/dist/lib/string-utils.js +128 -0
  22. package/dist/services/daemon/daemon-registrar.js +3 -2
  23. package/dist/services/secrets/secrets.js +154 -30
  24. package/package.json +10 -74
  25. package/dist/app.js +0 -33
  26. package/dist/cicd/analytics.js +0 -261
  27. package/dist/cicd/auth.js +0 -269
  28. package/dist/cicd/cache-manager.js +0 -172
  29. package/dist/cicd/data-retention.js +0 -305
  30. package/dist/cicd/performance-monitor.js +0 -224
  31. package/dist/cicd/webhook-receiver.js +0 -640
  32. package/dist/commands/api.js +0 -346
  33. package/dist/commands/theme.js +0 -261
  34. package/dist/commands/zsh-import.js +0 -240
  35. package/dist/components/App.js +0 -1
  36. package/dist/components/Divider.js +0 -29
  37. package/dist/components/REPL.js +0 -43
  38. package/dist/components/Terminal.js +0 -232
  39. package/dist/components/UserInput.js +0 -30
  40. package/dist/daemon/api-server.js +0 -316
  41. package/dist/daemon/monitoring-api.js +0 -220
  42. package/dist/lib/api-error-handler.js +0 -185
  43. package/dist/lib/associative-arrays.js +0 -285
  44. package/dist/lib/base-api-server.js +0 -290
  45. package/dist/lib/brace-expansion.js +0 -160
  46. package/dist/lib/builtin-commands.js +0 -439
  47. package/dist/lib/executors/builtin-executor.js +0 -52
  48. package/dist/lib/extended-globbing.js +0 -411
  49. package/dist/lib/extended-parameter-expansion.js +0 -227
  50. package/dist/lib/interactive-shell.js +0 -460
  51. package/dist/lib/job-builtins.js +0 -582
  52. package/dist/lib/pathname-expansion.js +0 -216
  53. package/dist/lib/script-runner.js +0 -226
  54. package/dist/lib/shell-executor.js +0 -2504
  55. package/dist/lib/shell-parser.js +0 -958
  56. package/dist/lib/shell-types.js +0 -6
  57. package/dist/lib/shell.lib.js +0 -40
  58. package/dist/lib/theme-manager.js +0 -476
  59. package/dist/lib/variable-expansion.js +0 -385
  60. package/dist/lib/zsh-compatibility.js +0 -659
  61. package/dist/lib/zsh-import-manager.js +0 -707
  62. package/dist/lib/zsh-options.js +0 -328
  63. package/dist/pipeline/job-tracker.js +0 -491
  64. package/dist/pipeline/mcli-bridge.js +0 -309
  65. package/dist/pipeline/pipeline-service.js +0 -1119
  66. package/dist/pipeline/workflow-engine.js +0 -870
  67. package/dist/services/api/api.js +0 -58
  68. package/dist/services/api/auth.js +0 -35
  69. package/dist/services/api/config.js +0 -7
  70. package/dist/services/api/file.js +0 -22
  71. package/dist/services/shell/shell.js +0 -28
  72. package/dist/services/zapier.js +0 -16
  73. package/dist/simple-api-server.js +0 -148
@@ -1,640 +0,0 @@
1
- import express from 'express';
2
- import { createServer } from 'http';
3
- import { Server } from 'socket.io';
4
- import crypto from 'crypto';
5
- import { createClient } from '@supabase/supabase-js';
6
- import Redis from 'ioredis';
7
- import { Pool } from 'pg';
8
- import * as path from 'path';
9
- import { fileURLToPath } from 'url';
10
- import { generateAnalyticsReport, generateTrendAnalysis, detectBuildAnomalies, generateInsights, predictNextPeriod, calculateCostAnalysis, detectBottlenecks } from './analytics.js';
11
- import { CacheManager } from './cache-manager.js';
12
- import { AuthService, authenticate, authorize, rateLimit } from './auth.js';
13
- import { performanceMonitor } from './performance-monitor.js';
14
- import { DataRetentionService } from './data-retention.js';
15
- const __filename = fileURLToPath(import.meta.url);
16
- const __dirname = path.dirname(__filename);
17
- const app = express();
18
- const server = createServer(app);
19
- const io = new Server(server, {
20
- cors: {
21
- origin: "*",
22
- methods: ["GET", "POST"]
23
- }
24
- });
25
- app.use(express.json({ limit: '10mb' }));
26
- app.use(express.raw({ type: 'application/json', limit: '10mb' }));
27
- // Serve static dashboard files
28
- app.use('/dashboard', express.static(path.join(__dirname, 'dashboard')));
29
- const GITHUB_WEBHOOK_SECRET = process.env.GITHUB_WEBHOOK_SECRET;
30
- const GITLAB_WEBHOOK_SECRET = process.env.GITLAB_WEBHOOK_SECRET;
31
- const JENKINS_WEBHOOK_SECRET = process.env.JENKINS_WEBHOOK_SECRET;
32
- const SUPABASE_URL = process.env.SUPABASE_URL;
33
- const SUPABASE_ANON_KEY = process.env.SUPABASE_ANON_KEY;
34
- const REDIS_URL = process.env.REDIS_URL || 'redis://localhost:6379';
35
- const supabase = SUPABASE_URL && SUPABASE_ANON_KEY ?
36
- createClient(SUPABASE_URL, SUPABASE_ANON_KEY) : null;
37
- const redis = new Redis(REDIS_URL);
38
- // Initialize PostgreSQL pool
39
- const pool = new Pool({
40
- connectionString: process.env.DATABASE_URL || 'postgresql://localhost:5432/cicd'
41
- });
42
- // Initialize services
43
- const cacheManager = new CacheManager(REDIS_URL);
44
- const authService = new AuthService(pool);
45
- const retentionService = new DataRetentionService(pool, redis);
46
- // Apply performance monitoring middleware
47
- app.use(performanceMonitor.middleware());
48
- function verifyGitHubSignature(payload, signature) {
49
- if (!GITHUB_WEBHOOK_SECRET) {
50
- if (process.env.NODE_ENV === 'production') {
51
- console.error('GITHUB_WEBHOOK_SECRET not set in production');
52
- return false;
53
- }
54
- console.warn('WARNING: GITHUB_WEBHOOK_SECRET not set - skipping verification (dev mode)');
55
- return true;
56
- }
57
- if (!signature) {
58
- console.error('No signature provided in webhook request');
59
- return false;
60
- }
61
- const hmac = crypto.createHmac('sha256', GITHUB_WEBHOOK_SECRET);
62
- const digest = hmac.update(payload, 'utf8').digest('hex');
63
- const checksum = `sha256=${digest}`;
64
- return crypto.timingSafeEqual(Buffer.from(signature, 'utf8'), Buffer.from(checksum, 'utf8'));
65
- }
66
- // eslint-disable-next-line @typescript-eslint/no-explicit-any
67
- function parseGitHubWorkflowEvent(body) {
68
- const { workflow_run, workflow_job, action } = body;
69
- if (!workflow_run && !workflow_job)
70
- return null;
71
- const run = workflow_run || workflow_job.workflow_run;
72
- const job = workflow_job;
73
- const event = {
74
- id: job ? `${run.id}-${job.id}` : run.id.toString(),
75
- platform: 'github',
76
- repository: run.repository.full_name,
77
- branch: run.head_branch,
78
- commit_sha: run.head_sha,
79
- status: mapGitHubStatus(job?.status || run.status),
80
- conclusion: job?.conclusion || run.conclusion,
81
- workflow_name: run.workflow?.name || run.name,
82
- job_name: job?.name,
83
- started_at: job?.started_at || run.created_at,
84
- completed_at: job?.completed_at || run.updated_at,
85
- duration_ms: calculateDuration(job?.started_at || run.created_at, job?.completed_at || run.updated_at),
86
- actor: run.actor.login,
87
- event_type: action,
88
- workflow_url: run.html_url,
89
- logs_url: job?.html_url,
90
- metadata: {
91
- run_number: run.run_number,
92
- attempt: run.run_attempt,
93
- workflow_id: run.workflow_id,
94
- job_id: job?.id,
95
- runner_id: job?.runner_id,
96
- runner_name: job?.runner_name
97
- }
98
- };
99
- return event;
100
- }
101
- function mapGitHubStatus(status) {
102
- switch (status) {
103
- case 'queued': return 'queued';
104
- case 'in_progress': return 'in_progress';
105
- case 'completed': return 'completed';
106
- default: return 'failed';
107
- }
108
- }
109
- function calculateDuration(startTime, endTime) {
110
- if (!endTime)
111
- return undefined;
112
- return new Date(endTime).getTime() - new Date(startTime).getTime();
113
- }
114
- function verifyGitLabSignature(payload, signature) {
115
- if (!GITLAB_WEBHOOK_SECRET) {
116
- if (process.env.NODE_ENV === 'production') {
117
- console.error('GITLAB_WEBHOOK_SECRET not set in production');
118
- return false;
119
- }
120
- console.warn('WARNING: GITLAB_WEBHOOK_SECRET not set - skipping verification (dev mode)');
121
- return true;
122
- }
123
- if (!signature) {
124
- console.error('No signature provided in GitLab webhook request');
125
- return false;
126
- }
127
- const hmac = crypto.createHmac('sha256', GITLAB_WEBHOOK_SECRET);
128
- const digest = hmac.update(payload, 'utf8').digest('hex');
129
- return crypto.timingSafeEqual(Buffer.from(signature, 'utf8'), Buffer.from(digest, 'utf8'));
130
- }
131
- // eslint-disable-next-line @typescript-eslint/no-explicit-any
132
- function parseGitLabPipelineEvent(body) {
133
- const { object_kind, object_attributes, project, user } = body;
134
- if (object_kind !== 'pipeline' && object_kind !== 'job')
135
- return null;
136
- const pipeline = object_attributes;
137
- const isJob = object_kind === 'job';
138
- if (!pipeline)
139
- return null;
140
- const event = {
141
- id: isJob ? `${pipeline.pipeline_id}-${pipeline.id}` : pipeline.id.toString(),
142
- platform: 'gitlab',
143
- repository: project.path_with_namespace,
144
- branch: pipeline.ref,
145
- commit_sha: pipeline.sha || pipeline.commit?.id || 'unknown',
146
- status: mapGitLabStatus(pipeline.status),
147
- conclusion: mapGitLabConclusion(pipeline.status),
148
- workflow_name: isJob ? pipeline.stage : `Pipeline ${pipeline.id}`,
149
- job_name: isJob ? pipeline.name : undefined,
150
- started_at: pipeline.started_at || pipeline.created_at,
151
- completed_at: pipeline.finished_at,
152
- duration_ms: pipeline.duration ? pipeline.duration * 1000 : calculateDuration(pipeline.started_at || pipeline.created_at, pipeline.finished_at),
153
- actor: user?.username || user?.name || 'unknown',
154
- event_type: object_kind,
155
- workflow_url: project.web_url + '/-/pipelines/' + (isJob ? pipeline.pipeline_id : pipeline.id),
156
- logs_url: isJob ? project.web_url + '/-/jobs/' + pipeline.id : undefined,
157
- metadata: {
158
- pipeline_id: isJob ? pipeline.pipeline_id : pipeline.id,
159
- job_id: isJob ? pipeline.id : undefined,
160
- stage: pipeline.stage,
161
- runner_id: pipeline.runner?.id,
162
- runner_description: pipeline.runner?.description,
163
- tag_list: pipeline.tag_list || [],
164
- variables: pipeline.variables || []
165
- }
166
- };
167
- return event;
168
- }
169
- function mapGitLabStatus(status) {
170
- switch (status) {
171
- case 'created':
172
- case 'waiting_for_resource':
173
- case 'preparing':
174
- return 'queued';
175
- case 'pending':
176
- case 'running':
177
- return 'in_progress';
178
- case 'success':
179
- return 'completed';
180
- case 'failed':
181
- case 'canceled':
182
- case 'skipped':
183
- return 'failed';
184
- default:
185
- return 'failed';
186
- }
187
- }
188
- function mapGitLabConclusion(status) {
189
- switch (status) {
190
- case 'success': return 'success';
191
- case 'failed': return 'failure';
192
- case 'canceled': return 'cancelled';
193
- case 'skipped': return 'skipped';
194
- default: return 'failure';
195
- }
196
- }
197
- // eslint-disable-next-line @typescript-eslint/no-explicit-any
198
- function parseJenkinsEvent(body) {
199
- const { name, url, build, timestamp } = body;
200
- if (!build)
201
- return null;
202
- const buildUrl = url + build.number + '/';
203
- const duration = build.duration || (Date.now() - timestamp);
204
- const event = {
205
- id: `jenkins-${name}-${build.number}`,
206
- platform: 'jenkins',
207
- repository: name, // Jenkins job name as repository
208
- branch: build.parameters?.BRANCH_NAME || build.parameters?.GIT_BRANCH || 'main',
209
- commit_sha: build.parameters?.GIT_COMMIT || build.scm?.SHA1 || 'unknown',
210
- status: mapJenkinsStatus(build.phase || build.status),
211
- conclusion: mapJenkinsConclusion(build.result),
212
- workflow_name: name,
213
- job_name: build.fullDisplayName,
214
- started_at: new Date(timestamp).toISOString(),
215
- completed_at: build.result ? new Date(timestamp + duration).toISOString() : undefined,
216
- duration_ms: build.result ? duration : undefined,
217
- actor: build.parameters?.TRIGGERED_BY || 'jenkins',
218
- event_type: build.phase || 'build',
219
- workflow_url: buildUrl,
220
- logs_url: buildUrl + 'console',
221
- metadata: {
222
- job_name: name,
223
- build_number: build.number,
224
- queue_id: build.queue_id,
225
- executor: build.executor,
226
- node: build.builtOn,
227
- parameters: build.parameters || {},
228
- causes: build.causes || []
229
- }
230
- };
231
- return event;
232
- }
233
- function mapJenkinsStatus(phase) {
234
- switch (phase?.toLowerCase()) {
235
- case 'queued': return 'queued';
236
- case 'started':
237
- case 'running': return 'in_progress';
238
- case 'completed':
239
- case 'finished': return 'completed';
240
- default: return 'failed';
241
- }
242
- }
243
- function mapJenkinsConclusion(result) {
244
- switch (result?.toUpperCase()) {
245
- case 'SUCCESS': return 'success';
246
- case 'FAILURE': return 'failure';
247
- case 'ABORTED': return 'cancelled';
248
- case 'UNSTABLE': return 'failure';
249
- default: return undefined;
250
- }
251
- }
252
- async function storePipelineEvent(event) {
253
- try {
254
- // Store in PostgreSQL via Supabase
255
- if (supabase) {
256
- const { error } = await supabase
257
- .from('pipeline_events')
258
- .upsert(event);
259
- if (error) {
260
- console.error('Error storing to Supabase:', error);
261
- }
262
- }
263
- // Cache in Redis for real-time access
264
- await redis.setex(`pipeline:${event.id}`, 3600, JSON.stringify(event));
265
- // Update metrics in Redis
266
- await updateMetrics(event);
267
- // Emit real-time update to all connected clients
268
- io.emit('pipeline_event', {
269
- type: 'pipeline_update',
270
- event: event,
271
- timestamp: new Date().toISOString()
272
- });
273
- // Emit updated metrics
274
- const updatedMetrics = await getLatestMetricsFromRedis();
275
- io.emit('metrics_update', {
276
- type: 'metrics_update',
277
- metrics: updatedMetrics,
278
- timestamp: new Date().toISOString()
279
- });
280
- console.log(`Stored pipeline event: ${event.id} (${event.status})`);
281
- }
282
- catch (error) {
283
- console.error('Error storing pipeline event:', error);
284
- }
285
- }
286
- async function updateMetrics(event) {
287
- const today = new Date().toISOString().split('T')[0];
288
- const key = `metrics:${today}`;
289
- // Update daily metrics
290
- await redis.hincrby(key, 'total_builds', 1);
291
- if (event.status === 'completed') {
292
- if (event.conclusion === 'success') {
293
- await redis.hincrby(key, 'successful_builds', 1);
294
- }
295
- else {
296
- await redis.hincrby(key, 'failed_builds', 1);
297
- }
298
- }
299
- if (event.duration_ms) {
300
- await redis.lpush(`durations:${today}`, event.duration_ms);
301
- await redis.ltrim(`durations:${today}`, 0, 999); // Keep last 1000 durations
302
- }
303
- // Set expiry for daily metrics (30 days)
304
- await redis.expire(key, 30 * 24 * 60 * 60);
305
- }
306
- async function getLatestMetricsFromRedis() {
307
- const today = new Date().toISOString().split('T')[0];
308
- const key = `metrics:${today}`;
309
- const metrics = await redis.hgetall(key);
310
- const durations = await redis.lrange(`durations:${today}`, 0, -1);
311
- const totalBuilds = parseInt(metrics.total_builds || '0');
312
- const successfulBuilds = parseInt(metrics.successful_builds || '0');
313
- const failedBuilds = parseInt(metrics.failed_builds || '0');
314
- const avgDuration = durations.length > 0
315
- ? durations.reduce((sum, d) => sum + parseInt(d), 0) / durations.length
316
- : 0;
317
- return {
318
- totalBuilds,
319
- successfulBuilds,
320
- failedBuilds,
321
- successRate: totalBuilds > 0 ? (successfulBuilds / totalBuilds) * 100 : 0,
322
- avgDurationMs: Math.round(avgDuration),
323
- activePipelines: await redis.keys('pipeline:*').then(keys => keys.length),
324
- timestamp: new Date().toISOString()
325
- };
326
- }
327
- // WebSocket connection handling
328
- io.on('connection', (socket) => {
329
- console.log(`Client connected: ${socket.id}`);
330
- // Send current metrics on connection
331
- getLatestMetricsFromRedis().then(metrics => {
332
- socket.emit('metrics_update', {
333
- type: 'metrics_update',
334
- metrics: metrics,
335
- timestamp: new Date().toISOString()
336
- });
337
- });
338
- socket.on('disconnect', () => {
339
- console.log(`Client disconnected: ${socket.id}`);
340
- });
341
- socket.on('subscribe_logs', (pipelineId) => {
342
- socket.join(`logs:${pipelineId}`);
343
- console.log(`Client ${socket.id} subscribed to logs for pipeline ${pipelineId}`);
344
- });
345
- socket.on('unsubscribe_logs', (pipelineId) => {
346
- socket.leave(`logs:${pipelineId}`);
347
- console.log(`Client ${socket.id} unsubscribed from logs for pipeline ${pipelineId}`);
348
- });
349
- });
350
- // GitHub webhook endpoint
351
- app.post('/webhook/github', async (req, res) => {
352
- try {
353
- const signature = req.get('x-hub-signature-256') || '';
354
- const payload = JSON.stringify(req.body);
355
- if (!verifyGitHubSignature(payload, signature)) {
356
- return res.status(401).json({ error: 'Invalid signature' });
357
- }
358
- const event = parseGitHubWorkflowEvent(req.body);
359
- if (event) {
360
- await storePipelineEvent(event);
361
- }
362
- res.status(200).json({ success: true });
363
- }
364
- catch (error) {
365
- console.error('GitHub webhook error:', error);
366
- res.status(500).json({ error: 'Internal server error' });
367
- }
368
- });
369
- // GitLab webhook endpoint
370
- app.post('/webhook/gitlab', async (req, res) => {
371
- try {
372
- const signature = req.get('x-gitlab-token') || '';
373
- const payload = JSON.stringify(req.body);
374
- if (!verifyGitLabSignature(payload, signature)) {
375
- return res.status(401).json({ error: 'Invalid token' });
376
- }
377
- const event = parseGitLabPipelineEvent(req.body);
378
- if (event) {
379
- await storePipelineEvent(event);
380
- }
381
- res.status(200).json({ success: true });
382
- }
383
- catch (error) {
384
- console.error('GitLab webhook error:', error);
385
- res.status(500).json({ error: 'Internal server error' });
386
- }
387
- });
388
- // Jenkins webhook endpoint
389
- app.post('/webhook/jenkins', async (req, res) => {
390
- try {
391
- // Jenkins doesn't use HMAC signatures by default, but we can check for a token
392
- const token = req.get('authorization') || req.get('x-jenkins-token') || '';
393
- if (JENKINS_WEBHOOK_SECRET && token !== `Bearer ${JENKINS_WEBHOOK_SECRET}`) {
394
- return res.status(401).json({ error: 'Invalid authorization' });
395
- }
396
- const event = parseJenkinsEvent(req.body);
397
- if (event) {
398
- await storePipelineEvent(event);
399
- }
400
- res.status(200).json({ success: true });
401
- }
402
- catch (error) {
403
- console.error('Jenkins webhook error:', error);
404
- res.status(500).json({ error: 'Internal server error' });
405
- }
406
- });
407
- // Root route - redirect to main dashboard
408
- app.get('/', (req, res) => {
409
- res.redirect('/dashboard/');
410
- });
411
- // Dashboard routes
412
- app.get('/dashboard/', (req, res) => {
413
- res.sendFile(path.join(__dirname, 'dashboard', 'index.html'));
414
- });
415
- app.get('/dashboard/analytics', (req, res) => {
416
- res.sendFile(path.join(__dirname, 'dashboard', 'analytics.html'));
417
- });
418
- app.get('/dashboard/admin', (req, res) => {
419
- res.sendFile(path.join(__dirname, 'dashboard', 'admin.html'));
420
- });
421
- // Health check endpoint
422
- app.get('/health', (req, res) => {
423
- const health = performanceMonitor.getHealthStatus();
424
- res.json({
425
- status: health.status,
426
- timestamp: new Date().toISOString(),
427
- services: {
428
- supabase: !!supabase,
429
- redis: redis.status === 'ready',
430
- postgres: pool ? 'connected' : 'disconnected'
431
- },
432
- performance: health.details
433
- });
434
- });
435
- // Get recent pipeline events
436
- app.get('/api/pipelines', async (req, res) => {
437
- try {
438
- const { limit = 50, status, repository, platform } = req.query;
439
- let query = supabase?.from('pipeline_events').select('*');
440
- if (status) {
441
- query = query?.eq('status', status);
442
- }
443
- if (repository) {
444
- query = query?.eq('repository', repository);
445
- }
446
- if (platform) {
447
- query = query?.eq('platform', platform);
448
- }
449
- const { data, error } = await query
450
- ?.order('started_at', { ascending: false })
451
- ?.limit(Number(limit)) || { data: null, error: null };
452
- if (error) {
453
- throw error;
454
- }
455
- res.json(data || []);
456
- }
457
- catch (error) {
458
- console.error('Error fetching pipelines:', error);
459
- res.status(500).json({ error: 'Failed to fetch pipelines' });
460
- }
461
- });
462
- // Get pipeline metrics
463
- app.get('/api/metrics', async (req, res) => {
464
- try {
465
- const today = new Date().toISOString().split('T')[0];
466
- const key = `metrics:${today}`;
467
- const metrics = await redis.hgetall(key);
468
- const durations = await redis.lrange(`durations:${today}`, 0, -1);
469
- const totalBuilds = parseInt(metrics.total_builds || '0');
470
- const successfulBuilds = parseInt(metrics.successful_builds || '0');
471
- const failedBuilds = parseInt(metrics.failed_builds || '0');
472
- const avgDuration = durations.length > 0
473
- ? durations.reduce((sum, d) => sum + parseInt(d), 0) / durations.length
474
- : 0;
475
- res.json({
476
- totalBuilds,
477
- successfulBuilds,
478
- failedBuilds,
479
- successRate: totalBuilds > 0 ? (successfulBuilds / totalBuilds) * 100 : 0,
480
- avgDurationMs: Math.round(avgDuration),
481
- activePipelines: await redis.keys('pipeline:*').then(keys => keys.length),
482
- timestamp: new Date().toISOString()
483
- });
484
- }
485
- catch (error) {
486
- console.error('Error fetching metrics:', error);
487
- res.status(500).json({ error: 'Failed to fetch metrics' });
488
- }
489
- });
490
- // Analytics endpoints
491
- app.get('/api/analytics/report', async (req, res) => {
492
- try {
493
- const { period = 'weekly' } = req.query;
494
- const report = await generateAnalyticsReport(period);
495
- res.json(report);
496
- }
497
- catch (error) {
498
- console.error('Error generating analytics report:', error);
499
- res.status(500).json({ error: 'Failed to generate report' });
500
- }
501
- });
502
- app.get('/api/analytics/trends', async (req, res) => {
503
- try {
504
- const { days = 30 } = req.query;
505
- const trends = await generateTrendAnalysis(Number(days));
506
- res.json(trends);
507
- }
508
- catch (error) {
509
- console.error('Error fetching trends:', error);
510
- res.status(500).json({ error: 'Failed to fetch trends' });
511
- }
512
- });
513
- app.get('/api/analytics/anomalies', async (req, res) => {
514
- try {
515
- const { days = 30 } = req.query;
516
- const trends = await generateTrendAnalysis(Number(days));
517
- const anomalies = await detectBuildAnomalies(trends);
518
- res.json(anomalies);
519
- }
520
- catch (error) {
521
- console.error('Error detecting anomalies:', error);
522
- res.status(500).json({ error: 'Failed to detect anomalies' });
523
- }
524
- });
525
- app.get('/api/analytics/insights', async (req, res) => {
526
- try {
527
- const { days = 30 } = req.query;
528
- const trends = await generateTrendAnalysis(Number(days));
529
- const insights = await generateInsights(trends);
530
- res.json(insights);
531
- }
532
- catch (error) {
533
- console.error('Error generating insights:', error);
534
- res.status(500).json({ error: 'Failed to generate insights' });
535
- }
536
- });
537
- app.get('/api/analytics/predictions', async (req, res) => {
538
- try {
539
- const { days = 30 } = req.query;
540
- const trends = await generateTrendAnalysis(Number(days));
541
- const predictions = await predictNextPeriod(trends);
542
- res.json(predictions);
543
- }
544
- catch (error) {
545
- console.error('Error generating predictions:', error);
546
- res.status(500).json({ error: 'Failed to generate predictions' });
547
- }
548
- });
549
- app.get('/api/analytics/costs', async (req, res) => {
550
- try {
551
- const { days = 30 } = req.query;
552
- const trends = await generateTrendAnalysis(Number(days));
553
- const costAnalysis = await calculateCostAnalysis(trends);
554
- res.json(costAnalysis);
555
- }
556
- catch (error) {
557
- console.error('Error calculating costs:', error);
558
- res.status(500).json({ error: 'Failed to calculate costs' });
559
- }
560
- });
561
- app.get('/api/analytics/bottlenecks', async (req, res) => {
562
- try {
563
- const bottlenecks = await detectBottlenecks();
564
- res.json(bottlenecks);
565
- }
566
- catch (error) {
567
- console.error('Error detecting bottlenecks:', error);
568
- res.status(500).json({ error: 'Failed to detect bottlenecks' });
569
- }
570
- });
571
- // Authentication endpoints
572
- app.post('/auth/register', async (req, res) => {
573
- try {
574
- const { email, password, name, role } = req.body;
575
- const user = await authService.register(email, password, name, role);
576
- const token = authService.generateToken(user);
577
- res.json({ user, token });
578
- }
579
- catch (error) {
580
- const err = error;
581
- res.status(400).json({ error: err.message });
582
- }
583
- });
584
- app.post('/auth/login', async (req, res) => {
585
- try {
586
- const { email, password } = req.body;
587
- const result = await authService.login(email, password);
588
- res.json(result);
589
- }
590
- catch (error) {
591
- const err = error;
592
- res.status(401).json({ error: err.message });
593
- }
594
- });
595
- app.post('/auth/api-key', authenticate(authService), authorize('admin', 'developer'), async (req, res) => {
596
- try {
597
- const { name, permissions } = req.body;
598
- const apiKey = await authService.generateApiKey(req.user.userId, name, permissions);
599
- res.json({ apiKey });
600
- }
601
- catch (error) {
602
- const err = error;
603
- res.status(400).json({ error: err.message });
604
- }
605
- });
606
- // Protected admin endpoints
607
- app.get('/api/admin/performance', authenticate(authService), authorize('admin'), (req, res) => {
608
- const metrics = performanceMonitor.getMetrics();
609
- res.json(metrics);
610
- });
611
- app.get('/api/admin/cache/stats', authenticate(authService), authorize('admin'), async (req, res) => {
612
- const stats = await cacheManager.getStats();
613
- res.json(stats);
614
- });
615
- app.post('/api/admin/cache/clear', authenticate(authService), authorize('admin'), async (req, res) => {
616
- await cacheManager.clear();
617
- res.json({ message: 'Cache cleared successfully' });
618
- });
619
- app.get('/api/admin/retention/stats', authenticate(authService), authorize('admin'), async (req, res) => {
620
- const stats = await retentionService.getRetentionStats();
621
- res.json(stats);
622
- });
623
- app.post('/api/admin/retention/cleanup', authenticate(authService), authorize('admin'), async (req, res) => {
624
- const { tableName } = req.body;
625
- const result = await retentionService.triggerCleanup(tableName);
626
- res.json(result);
627
- });
628
- // Apply rate limiting to API endpoints
629
- const apiRateLimit = rateLimit({
630
- windowMs: 15 * 60 * 1000, // 15 minutes
631
- max: 100 // limit each user to 100 requests per windowMs
632
- });
633
- app.use('/api/', apiRateLimit);
634
- const PORT = process.env.WEBHOOK_PORT || 3033;
635
- server.listen(PORT, () => {
636
- console.log(`🚀 CI/CD Webhook receiver running on port ${PORT}`);
637
- console.log(`📊 Health check available at http://localhost:${PORT}/health`);
638
- console.log(`🔄 WebSocket server enabled for real-time updates`);
639
- console.log(`📈 Analytics API available at http://localhost:${PORT}/api/analytics/*`);
640
- });