lsh-framework 0.5.4

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (90) hide show
  1. package/.env.example +51 -0
  2. package/README.md +399 -0
  3. package/dist/app.js +33 -0
  4. package/dist/cicd/analytics.js +261 -0
  5. package/dist/cicd/auth.js +269 -0
  6. package/dist/cicd/cache-manager.js +172 -0
  7. package/dist/cicd/data-retention.js +305 -0
  8. package/dist/cicd/performance-monitor.js +224 -0
  9. package/dist/cicd/webhook-receiver.js +634 -0
  10. package/dist/cli.js +500 -0
  11. package/dist/commands/api.js +343 -0
  12. package/dist/commands/self.js +318 -0
  13. package/dist/commands/theme.js +257 -0
  14. package/dist/commands/zsh-import.js +240 -0
  15. package/dist/components/App.js +1 -0
  16. package/dist/components/Divider.js +29 -0
  17. package/dist/components/REPL.js +43 -0
  18. package/dist/components/Terminal.js +232 -0
  19. package/dist/components/UserInput.js +30 -0
  20. package/dist/daemon/api-server.js +315 -0
  21. package/dist/daemon/job-registry.js +554 -0
  22. package/dist/daemon/lshd.js +822 -0
  23. package/dist/daemon/monitoring-api.js +220 -0
  24. package/dist/examples/supabase-integration.js +106 -0
  25. package/dist/lib/api-error-handler.js +183 -0
  26. package/dist/lib/associative-arrays.js +285 -0
  27. package/dist/lib/base-api-server.js +290 -0
  28. package/dist/lib/base-command-registrar.js +286 -0
  29. package/dist/lib/base-job-manager.js +293 -0
  30. package/dist/lib/brace-expansion.js +160 -0
  31. package/dist/lib/builtin-commands.js +439 -0
  32. package/dist/lib/cloud-config-manager.js +347 -0
  33. package/dist/lib/command-validator.js +190 -0
  34. package/dist/lib/completion-system.js +344 -0
  35. package/dist/lib/cron-job-manager.js +364 -0
  36. package/dist/lib/daemon-client-helper.js +141 -0
  37. package/dist/lib/daemon-client.js +501 -0
  38. package/dist/lib/database-persistence.js +638 -0
  39. package/dist/lib/database-schema.js +259 -0
  40. package/dist/lib/enhanced-history-system.js +246 -0
  41. package/dist/lib/env-validator.js +265 -0
  42. package/dist/lib/executors/builtin-executor.js +52 -0
  43. package/dist/lib/extended-globbing.js +411 -0
  44. package/dist/lib/extended-parameter-expansion.js +227 -0
  45. package/dist/lib/floating-point-arithmetic.js +256 -0
  46. package/dist/lib/history-system.js +245 -0
  47. package/dist/lib/interactive-shell.js +460 -0
  48. package/dist/lib/job-builtins.js +580 -0
  49. package/dist/lib/job-manager.js +386 -0
  50. package/dist/lib/job-storage-database.js +156 -0
  51. package/dist/lib/job-storage-memory.js +73 -0
  52. package/dist/lib/logger.js +274 -0
  53. package/dist/lib/lshrc-init.js +177 -0
  54. package/dist/lib/pathname-expansion.js +216 -0
  55. package/dist/lib/prompt-system.js +328 -0
  56. package/dist/lib/script-runner.js +226 -0
  57. package/dist/lib/secrets-manager.js +193 -0
  58. package/dist/lib/shell-executor.js +2504 -0
  59. package/dist/lib/shell-parser.js +958 -0
  60. package/dist/lib/shell-types.js +6 -0
  61. package/dist/lib/shell.lib.js +40 -0
  62. package/dist/lib/supabase-client.js +58 -0
  63. package/dist/lib/theme-manager.js +476 -0
  64. package/dist/lib/variable-expansion.js +385 -0
  65. package/dist/lib/zsh-compatibility.js +658 -0
  66. package/dist/lib/zsh-import-manager.js +699 -0
  67. package/dist/lib/zsh-options.js +328 -0
  68. package/dist/pipeline/job-tracker.js +491 -0
  69. package/dist/pipeline/mcli-bridge.js +302 -0
  70. package/dist/pipeline/pipeline-service.js +1116 -0
  71. package/dist/pipeline/workflow-engine.js +867 -0
  72. package/dist/services/api/api.js +58 -0
  73. package/dist/services/api/auth.js +35 -0
  74. package/dist/services/api/config.js +7 -0
  75. package/dist/services/api/file.js +22 -0
  76. package/dist/services/cron/cron-registrar.js +235 -0
  77. package/dist/services/cron/cron.js +9 -0
  78. package/dist/services/daemon/daemon-registrar.js +565 -0
  79. package/dist/services/daemon/daemon.js +9 -0
  80. package/dist/services/lib/lib.js +86 -0
  81. package/dist/services/log-file-extractor.js +170 -0
  82. package/dist/services/secrets/secrets.js +94 -0
  83. package/dist/services/shell/shell.js +28 -0
  84. package/dist/services/supabase/supabase-registrar.js +367 -0
  85. package/dist/services/supabase/supabase.js +9 -0
  86. package/dist/services/zapier.js +16 -0
  87. package/dist/simple-api-server.js +148 -0
  88. package/dist/store/store.js +31 -0
  89. package/dist/util/lib.util.js +11 -0
  90. package/package.json +144 -0
@@ -0,0 +1,261 @@
1
+ import { createClient } from '@supabase/supabase-js';
2
+ import Redis from 'ioredis';
3
+ const SUPABASE_URL = process.env.SUPABASE_URL;
4
+ const SUPABASE_ANON_KEY = process.env.SUPABASE_ANON_KEY;
5
+ const REDIS_URL = process.env.REDIS_URL || 'redis://localhost:6379';
6
+ // Future use for Supabase integration - keeping for planned features
7
+ const _supabase = SUPABASE_URL && SUPABASE_ANON_KEY ?
8
+ createClient(SUPABASE_URL, SUPABASE_ANON_KEY) : null;
9
+ const redis = new Redis(REDIS_URL);
10
+ // Calculate moving average for trend smoothing - utility function for future features
11
+ function _movingAverage(data, window) {
12
+ const result = [];
13
+ for (let i = 0; i < data.length; i++) {
14
+ const start = Math.max(0, i - window + 1);
15
+ const subset = data.slice(start, i + 1);
16
+ const avg = subset.reduce((a, b) => a + b, 0) / subset.length;
17
+ result.push(avg);
18
+ }
19
+ return result;
20
+ }
21
+ // Detect anomalies using Z-score
22
+ function detectAnomalies(data, threshold = 2.5) {
23
+ const mean = data.reduce((a, b) => a + b, 0) / data.length;
24
+ const variance = data.reduce((sum, val) => sum + Math.pow(val - mean, 2), 0) / data.length;
25
+ const stdDev = Math.sqrt(variance);
26
+ return data.map((value, index) => {
27
+ const zScore = Math.abs((value - mean) / stdDev);
28
+ return zScore > threshold ? index : -1;
29
+ }).filter(index => index !== -1);
30
+ }
31
+ // Linear regression for predictions
32
+ function linearRegression(data) {
33
+ const n = data.length;
34
+ const x = Array.from({ length: n }, (_, i) => i);
35
+ const sumX = x.reduce((a, b) => a + b, 0);
36
+ const sumY = data.reduce((a, b) => a + b, 0);
37
+ const sumXY = x.reduce((sum, xi, i) => sum + xi * data[i], 0);
38
+ const sumX2 = x.reduce((sum, xi) => sum + xi * xi, 0);
39
+ const slope = (n * sumXY - sumX * sumY) / (n * sumX2 - sumX * sumX);
40
+ const intercept = (sumY - slope * sumX) / n;
41
+ return { slope, intercept };
42
+ }
43
+ export async function generateTrendAnalysis(days = 30) {
44
+ const trends = [];
45
+ const endDate = new Date();
46
+ const startDate = new Date();
47
+ startDate.setDate(endDate.getDate() - days);
48
+ for (let d = new Date(startDate); d <= endDate; d.setDate(d.getDate() + 1)) {
49
+ const dateStr = d.toISOString().split('T')[0];
50
+ const key = `metrics:${dateStr}`;
51
+ const metrics = await redis.hgetall(key);
52
+ const durations = await redis.lrange(`durations:${dateStr}`, 0, -1);
53
+ const totalBuilds = parseInt(metrics.total_builds || '0');
54
+ const successfulBuilds = parseInt(metrics.successful_builds || '0');
55
+ const failedBuilds = parseInt(metrics.failed_builds || '0');
56
+ const avgDuration = durations.length > 0
57
+ ? durations.reduce((sum, d) => sum + parseInt(d), 0) / durations.length
58
+ : 0;
59
+ trends.push({
60
+ date: dateStr,
61
+ totalBuilds,
62
+ successRate: totalBuilds > 0 ? (successfulBuilds / totalBuilds) * 100 : 0,
63
+ avgDuration: avgDuration / 1000 / 60, // Convert to minutes
64
+ failureRate: totalBuilds > 0 ? (failedBuilds / totalBuilds) * 100 : 0
65
+ });
66
+ }
67
+ return trends;
68
+ }
69
+ export async function detectBuildAnomalies(trends) {
70
+ const anomalies = [];
71
+ // Extract metrics
72
+ const durations = trends.map(t => t.avgDuration);
73
+ const failureRates = trends.map(t => t.failureRate);
74
+ const _buildCounts = trends.map(t => t.totalBuilds);
75
+ // Detect duration anomalies
76
+ const durationAnomalies = detectAnomalies(durations);
77
+ durationAnomalies.forEach(index => {
78
+ const mean = durations.reduce((a, b) => a + b, 0) / durations.length;
79
+ const stdDev = Math.sqrt(durations.reduce((sum, val) => sum + Math.pow(val - mean, 2), 0) / durations.length);
80
+ anomalies.push({
81
+ timestamp: trends[index].date,
82
+ type: 'duration',
83
+ severity: durations[index] > mean + 3 * stdDev ? 'critical' : 'warning',
84
+ description: `Build duration significantly higher than average`,
85
+ value: durations[index],
86
+ expectedRange: {
87
+ min: Math.max(0, mean - 2 * stdDev),
88
+ max: mean + 2 * stdDev
89
+ }
90
+ });
91
+ });
92
+ // Detect failure rate anomalies
93
+ const failureAnomalies = detectAnomalies(failureRates, 2);
94
+ failureAnomalies.forEach(index => {
95
+ if (failureRates[index] > 20) { // Only flag if failure rate > 20%
96
+ anomalies.push({
97
+ timestamp: trends[index].date,
98
+ type: 'failure_rate',
99
+ severity: failureRates[index] > 50 ? 'critical' : 'warning',
100
+ description: `High failure rate detected`,
101
+ value: failureRates[index],
102
+ expectedRange: { min: 0, max: 20 }
103
+ });
104
+ }
105
+ });
106
+ return anomalies;
107
+ }
108
+ export async function generateInsights(trends) {
109
+ const insights = [];
110
+ if (trends.length < 7)
111
+ return insights;
112
+ // Compare last 7 days with previous 7 days
113
+ const recentWeek = trends.slice(-7);
114
+ const previousWeek = trends.slice(-14, -7);
115
+ const recentAvgSuccess = recentWeek.reduce((sum, t) => sum + t.successRate, 0) / 7;
116
+ const prevAvgSuccess = previousWeek.reduce((sum, t) => sum + t.successRate, 0) / 7;
117
+ const successChange = recentAvgSuccess - prevAvgSuccess;
118
+ if (Math.abs(successChange) > 5) {
119
+ insights.push({
120
+ type: successChange > 0 ? 'improvement' : 'degradation',
121
+ title: `Success Rate ${successChange > 0 ? 'Improved' : 'Degraded'}`,
122
+ description: `Success rate changed by ${Math.abs(successChange).toFixed(1)}% compared to previous week`,
123
+ metric: 'success_rate',
124
+ change: successChange,
125
+ impact: Math.abs(successChange) > 15 ? 'high' : Math.abs(successChange) > 10 ? 'medium' : 'low'
126
+ });
127
+ }
128
+ // Analyze build duration trends
129
+ const recentAvgDuration = recentWeek.reduce((sum, t) => sum + t.avgDuration, 0) / 7;
130
+ const prevAvgDuration = previousWeek.reduce((sum, t) => sum + t.avgDuration, 0) / 7;
131
+ const durationChange = ((recentAvgDuration - prevAvgDuration) / prevAvgDuration) * 100;
132
+ if (Math.abs(durationChange) > 10) {
133
+ insights.push({
134
+ type: durationChange < 0 ? 'improvement' : 'degradation',
135
+ title: `Build Duration ${durationChange < 0 ? 'Improved' : 'Increased'}`,
136
+ description: `Average build duration changed by ${Math.abs(durationChange).toFixed(1)}%`,
137
+ metric: 'duration',
138
+ change: durationChange,
139
+ impact: Math.abs(durationChange) > 30 ? 'high' : Math.abs(durationChange) > 20 ? 'medium' : 'low'
140
+ });
141
+ }
142
+ // Identify patterns
143
+ const dailyBuilds = trends.map(t => t.totalBuilds);
144
+ const weekdays = trends.map(t => new Date(t.date).getDay());
145
+ const weekdayAvg = Array(7).fill(0).map((_, day) => {
146
+ const dayBuilds = dailyBuilds.filter((_, i) => weekdays[i] === day);
147
+ return dayBuilds.reduce((a, b) => a + b, 0) / dayBuilds.length;
148
+ });
149
+ const peakDay = weekdayAvg.indexOf(Math.max(...weekdayAvg));
150
+ const lowDay = weekdayAvg.indexOf(Math.min(...weekdayAvg));
151
+ const dayNames = ['Sunday', 'Monday', 'Tuesday', 'Wednesday', 'Thursday', 'Friday', 'Saturday'];
152
+ insights.push({
153
+ type: 'pattern',
154
+ title: 'Weekly Build Pattern Detected',
155
+ description: `Most builds occur on ${dayNames[peakDay]}, least on ${dayNames[lowDay]}`,
156
+ metric: 'frequency',
157
+ change: 0,
158
+ impact: 'low'
159
+ });
160
+ return insights;
161
+ }
162
+ export async function predictNextPeriod(trends) {
163
+ const predictions = [];
164
+ if (trends.length < 14)
165
+ return predictions;
166
+ // Predict success rate
167
+ const successRates = trends.map(t => t.successRate);
168
+ const successRegression = linearRegression(successRates.slice(-14));
169
+ const predictedSuccess = successRegression.slope * successRates.length + successRegression.intercept;
170
+ predictions.push({
171
+ metric: 'success_rate',
172
+ nextPeriod: 'next_7_days',
173
+ predictedValue: Math.max(0, Math.min(100, predictedSuccess)),
174
+ confidence: 0.75,
175
+ trend: successRegression.slope > 1 ? 'improving' :
176
+ successRegression.slope < -1 ? 'degrading' : 'stable'
177
+ });
178
+ // Predict build volume
179
+ const buildCounts = trends.map(t => t.totalBuilds);
180
+ const volumeRegression = linearRegression(buildCounts.slice(-14));
181
+ const predictedVolume = volumeRegression.slope * buildCounts.length + volumeRegression.intercept;
182
+ predictions.push({
183
+ metric: 'build_volume',
184
+ nextPeriod: 'next_day',
185
+ predictedValue: Math.max(0, Math.round(predictedVolume)),
186
+ confidence: 0.7,
187
+ trend: volumeRegression.slope > 5 ? 'improving' :
188
+ volumeRegression.slope < -5 ? 'degrading' : 'stable'
189
+ });
190
+ return predictions;
191
+ }
192
+ export async function calculateCostAnalysis(trends) {
193
+ // Estimate costs based on build minutes (GitHub Actions pricing model)
194
+ const COST_PER_MINUTE = 0.008; // $0.008 per minute for Linux runners
195
+ const totalMinutes = trends.reduce((sum, t) => sum + (t.totalBuilds * t.avgDuration), 0);
196
+ const totalCost = totalMinutes * COST_PER_MINUTE;
197
+ const totalBuilds = trends.reduce((sum, t) => sum + t.totalBuilds, 0);
198
+ const savingsOpportunities = [];
199
+ // Identify savings opportunities
200
+ const avgDuration = totalMinutes / totalBuilds;
201
+ if (avgDuration > 10) {
202
+ savingsOpportunities.push('Consider optimizing long-running builds (>10 minutes average)');
203
+ }
204
+ const avgFailureRate = trends.reduce((sum, t) => sum + t.failureRate, 0) / trends.length;
205
+ if (avgFailureRate > 15) {
206
+ const wastedCost = (totalCost * avgFailureRate / 100);
207
+ savingsOpportunities.push(`Reduce failure rate to save ~$${wastedCost.toFixed(2)}/month`);
208
+ }
209
+ // Check for off-peak opportunities
210
+ const peakHourBuilds = trends.filter(t => {
211
+ const hour = new Date(t.date).getHours();
212
+ return hour >= 9 && hour <= 17;
213
+ });
214
+ if (peakHourBuilds.length > trends.length * 0.7) {
215
+ savingsOpportunities.push('Schedule non-critical builds during off-peak hours');
216
+ }
217
+ return {
218
+ totalCost,
219
+ costPerBuild: totalBuilds > 0 ? totalCost / totalBuilds : 0,
220
+ costByPlatform: {
221
+ github: totalCost * 0.6, // Estimate based on usage
222
+ gitlab: totalCost * 0.25,
223
+ jenkins: totalCost * 0.15
224
+ },
225
+ savingsOpportunities
226
+ };
227
+ }
228
+ export async function generateAnalyticsReport(period = 'weekly') {
229
+ const days = period === 'daily' ? 1 : period === 'weekly' ? 7 : 30;
230
+ const trends = await generateTrendAnalysis(days);
231
+ const [insights, anomalies, predictions, costAnalysis] = await Promise.all([
232
+ generateInsights(trends),
233
+ detectBuildAnomalies(trends),
234
+ predictNextPeriod(trends),
235
+ calculateCostAnalysis(trends)
236
+ ]);
237
+ return {
238
+ period,
239
+ startDate: trends[0]?.date || new Date().toISOString(),
240
+ endDate: trends[trends.length - 1]?.date || new Date().toISOString(),
241
+ trends,
242
+ insights,
243
+ anomalies,
244
+ predictions,
245
+ costAnalysis
246
+ };
247
+ }
248
+ // Export functions for bottleneck detection
249
+ export async function detectBottlenecks() {
250
+ // Analyze stage durations to find slowest parts
251
+ const stageData = await redis.hgetall('stage_durations');
252
+ const bottlenecks = Object.entries(stageData)
253
+ .map(([stage, duration]) => ({
254
+ stage,
255
+ avgDuration: parseInt(duration),
256
+ impact: 'high'
257
+ }))
258
+ .sort((a, b) => b.avgDuration - a.avgDuration)
259
+ .slice(0, 5);
260
+ return bottlenecks;
261
+ }
@@ -0,0 +1,269 @@
1
+ import jwt from 'jsonwebtoken';
2
+ import bcrypt from 'bcrypt';
3
+ const JWT_SECRET = process.env.JWT_SECRET || 'your-secret-key-change-in-production';
4
+ const TOKEN_EXPIRY = '24h';
5
+ export class AuthService {
6
+ pool;
7
+ constructor(pool) {
8
+ this.pool = pool;
9
+ this.initializeSchema();
10
+ }
11
+ async initializeSchema() {
12
+ const query = `
13
+ CREATE TABLE IF NOT EXISTS users (
14
+ id SERIAL PRIMARY KEY,
15
+ email VARCHAR(255) UNIQUE NOT NULL,
16
+ name VARCHAR(255) NOT NULL,
17
+ password_hash VARCHAR(255) NOT NULL,
18
+ role VARCHAR(50) NOT NULL DEFAULT 'viewer',
19
+ created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
20
+ last_login TIMESTAMP,
21
+ is_active BOOLEAN DEFAULT true
22
+ );
23
+
24
+ CREATE TABLE IF NOT EXISTS api_keys (
25
+ id SERIAL PRIMARY KEY,
26
+ user_id INTEGER REFERENCES users(id) ON DELETE CASCADE,
27
+ key_hash VARCHAR(255) UNIQUE NOT NULL,
28
+ name VARCHAR(255),
29
+ permissions JSONB,
30
+ last_used TIMESTAMP,
31
+ created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
32
+ expires_at TIMESTAMP
33
+ );
34
+
35
+ CREATE TABLE IF NOT EXISTS audit_logs (
36
+ id SERIAL PRIMARY KEY,
37
+ user_id INTEGER REFERENCES users(id),
38
+ action VARCHAR(255) NOT NULL,
39
+ resource VARCHAR(255),
40
+ details JSONB,
41
+ ip_address VARCHAR(45),
42
+ user_agent TEXT,
43
+ timestamp TIMESTAMP DEFAULT CURRENT_TIMESTAMP
44
+ );
45
+
46
+ CREATE INDEX IF NOT EXISTS idx_audit_logs_user ON audit_logs(user_id);
47
+ CREATE INDEX IF NOT EXISTS idx_audit_logs_timestamp ON audit_logs(timestamp);
48
+ `;
49
+ try {
50
+ await this.pool.query(query);
51
+ }
52
+ catch (error) {
53
+ console.error('Error initializing auth schema:', error);
54
+ }
55
+ }
56
+ async register(email, password, name, role = 'viewer') {
57
+ const passwordHash = await bcrypt.hash(password, 10);
58
+ const query = `
59
+ INSERT INTO users (email, name, password_hash, role)
60
+ VALUES ($1, $2, $3, $4)
61
+ RETURNING id, email, name, role, created_at
62
+ `;
63
+ try {
64
+ const result = await this.pool.query(query, [email, name, passwordHash, role]);
65
+ return result.rows[0];
66
+ }
67
+ catch (error) {
68
+ if (error.code === '23505') { // Unique constraint violation
69
+ throw new Error('User with this email already exists');
70
+ }
71
+ throw error;
72
+ }
73
+ }
74
+ async login(email, password) {
75
+ const query = `
76
+ SELECT id, email, name, role, password_hash, created_at
77
+ FROM users
78
+ WHERE email = $1 AND is_active = true
79
+ `;
80
+ const result = await this.pool.query(query, [email]);
81
+ if (result.rows.length === 0) {
82
+ throw new Error('Invalid credentials');
83
+ }
84
+ const user = result.rows[0];
85
+ const isValid = await bcrypt.compare(password, user.password_hash);
86
+ if (!isValid) {
87
+ throw new Error('Invalid credentials');
88
+ }
89
+ // Update last login
90
+ await this.pool.query('UPDATE users SET last_login = CURRENT_TIMESTAMP WHERE id = $1', [user.id]);
91
+ const token = this.generateToken(user);
92
+ delete user.password_hash;
93
+ return { user, token };
94
+ }
95
+ generateToken(user) {
96
+ const payload = {
97
+ userId: user.id,
98
+ email: user.email,
99
+ role: user.role
100
+ };
101
+ return jwt.sign(payload, JWT_SECRET, { expiresIn: TOKEN_EXPIRY });
102
+ }
103
+ verifyToken(token) {
104
+ try {
105
+ return jwt.verify(token, JWT_SECRET);
106
+ }
107
+ catch (_error) {
108
+ throw new Error('Invalid or expired token');
109
+ }
110
+ }
111
+ async generateApiKey(userId, name, permissions) {
112
+ const apiKey = this.generateRandomKey();
113
+ const keyHash = await bcrypt.hash(apiKey, 10);
114
+ const query = `
115
+ INSERT INTO api_keys (user_id, key_hash, name, permissions)
116
+ VALUES ($1, $2, $3, $4)
117
+ RETURNING id
118
+ `;
119
+ await this.pool.query(query, [userId, keyHash, name, permissions || {}]);
120
+ return apiKey;
121
+ }
122
+ generateRandomKey() {
123
+ const chars = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789';
124
+ let key = 'cicd_';
125
+ for (let i = 0; i < 32; i++) {
126
+ key += chars.charAt(Math.floor(Math.random() * chars.length));
127
+ }
128
+ return key;
129
+ }
130
+ async verifyApiKey(apiKey) {
131
+ const query = `
132
+ SELECT ak.*, u.email, u.role
133
+ FROM api_keys ak
134
+ JOIN users u ON ak.user_id = u.id
135
+ WHERE u.is_active = true
136
+ AND (ak.expires_at IS NULL OR ak.expires_at > CURRENT_TIMESTAMP)
137
+ `;
138
+ const result = await this.pool.query(query);
139
+ for (const row of result.rows) {
140
+ const isValid = await bcrypt.compare(apiKey, row.key_hash);
141
+ if (isValid) {
142
+ // Update last used
143
+ await this.pool.query('UPDATE api_keys SET last_used = CURRENT_TIMESTAMP WHERE id = $1', [row.id]);
144
+ return {
145
+ userId: row.user_id,
146
+ email: row.email,
147
+ role: row.role
148
+ };
149
+ }
150
+ }
151
+ return null;
152
+ }
153
+ async logAudit(userId, action, resource, details, req) {
154
+ const query = `
155
+ INSERT INTO audit_logs (user_id, action, resource, details, ip_address, user_agent)
156
+ VALUES ($1, $2, $3, $4, $5, $6)
157
+ `;
158
+ const ipAddress = req?.ip || req?.socket?.remoteAddress;
159
+ const userAgent = req?.headers['user-agent'];
160
+ await this.pool.query(query, [
161
+ userId,
162
+ action,
163
+ resource,
164
+ details || {},
165
+ ipAddress,
166
+ userAgent
167
+ ]);
168
+ }
169
+ }
170
+ // Middleware for authentication
171
+ export function authenticate(authService) {
172
+ return async (req, res, next) => {
173
+ try {
174
+ const authHeader = req.headers.authorization;
175
+ if (!authHeader) {
176
+ return res.status(401).json({ error: 'No authorization header' });
177
+ }
178
+ let user = null;
179
+ if (authHeader.startsWith('Bearer ')) {
180
+ // JWT token authentication
181
+ const token = authHeader.substring(7);
182
+ user = authService.verifyToken(token);
183
+ }
184
+ else if (authHeader.startsWith('ApiKey ')) {
185
+ // API key authentication
186
+ const apiKey = authHeader.substring(7);
187
+ user = await authService.verifyApiKey(apiKey);
188
+ }
189
+ if (!user) {
190
+ return res.status(401).json({ error: 'Invalid authentication credentials' });
191
+ }
192
+ req.user = user;
193
+ next();
194
+ }
195
+ catch (error) {
196
+ const message = error instanceof Error ? error.message : 'Authentication failed';
197
+ return res.status(401).json({ error: message });
198
+ }
199
+ };
200
+ }
201
+ // Role-based access control middleware
202
+ export function authorize(...allowedRoles) {
203
+ return (req, res, next) => {
204
+ if (!req.user) {
205
+ return res.status(401).json({ error: 'Authentication required' });
206
+ }
207
+ if (!allowedRoles.includes(req.user.role)) {
208
+ return res.status(403).json({ error: 'Insufficient permissions' });
209
+ }
210
+ next();
211
+ };
212
+ }
213
+ // Permission-based middleware
214
+ export function requirePermission(permission) {
215
+ return (req, res, next) => {
216
+ if (!req.user) {
217
+ return res.status(401).json({ error: 'Authentication required' });
218
+ }
219
+ // Admin has all permissions
220
+ if (req.user.role === 'admin') {
221
+ return next();
222
+ }
223
+ // Check specific permissions based on role
224
+ const rolePermissions = {
225
+ developer: [
226
+ 'pipelines.view',
227
+ 'pipelines.trigger',
228
+ 'metrics.view',
229
+ 'analytics.view',
230
+ 'alerts.view'
231
+ ],
232
+ viewer: [
233
+ 'pipelines.view',
234
+ 'metrics.view',
235
+ 'analytics.view'
236
+ ]
237
+ };
238
+ const userPermissions = rolePermissions[req.user.role] || [];
239
+ if (!userPermissions.includes(permission)) {
240
+ return res.status(403).json({ error: `Permission '${permission}' required` });
241
+ }
242
+ next();
243
+ };
244
+ }
245
+ export function rateLimit(options) {
246
+ const requests = new Map();
247
+ return (req, res, next) => {
248
+ const key = req.user?.userId?.toString() || req.ip || 'anonymous';
249
+ const now = Date.now();
250
+ const record = requests.get(key);
251
+ if (!record || now > record.resetTime) {
252
+ requests.set(key, {
253
+ count: 1,
254
+ resetTime: now + options.windowMs
255
+ });
256
+ return next();
257
+ }
258
+ if (record.count >= options.max) {
259
+ const retryAfter = Math.ceil((record.resetTime - now) / 1000);
260
+ res.setHeader('Retry-After', retryAfter);
261
+ return res.status(429).json({
262
+ error: 'Too many requests',
263
+ retryAfter
264
+ });
265
+ }
266
+ record.count++;
267
+ next();
268
+ };
269
+ }
@@ -0,0 +1,172 @@
1
+ import Redis from 'ioredis';
2
+ import { createHash } from 'crypto';
3
+ export class CacheManager {
4
+ redis;
5
+ stats;
6
+ defaultTTL;
7
+ keyPrefix;
8
+ constructor(redisUrl = 'redis://localhost:6379', options = {}) {
9
+ this.redis = new Redis(redisUrl);
10
+ this.defaultTTL = options.ttl || 3600; // 1 hour default
11
+ this.keyPrefix = options.prefix || 'cicd:cache:';
12
+ this.stats = {
13
+ hits: 0,
14
+ misses: 0,
15
+ sets: 0,
16
+ deletes: 0,
17
+ size: 0,
18
+ hitRate: 0
19
+ };
20
+ // Set up error handling
21
+ this.redis.on('error', (err) => {
22
+ console.error('Redis cache error:', err);
23
+ });
24
+ }
25
+ generateKey(namespace, identifier) {
26
+ const hash = createHash('md5').update(identifier).digest('hex');
27
+ return `${this.keyPrefix}${namespace}:${hash}`;
28
+ }
29
+ async get(namespace, identifier) {
30
+ const key = this.generateKey(namespace, identifier);
31
+ try {
32
+ const cached = await this.redis.get(key);
33
+ if (cached) {
34
+ this.stats.hits++;
35
+ this.updateHitRate();
36
+ return JSON.parse(cached);
37
+ }
38
+ this.stats.misses++;
39
+ this.updateHitRate();
40
+ return null;
41
+ }
42
+ catch (error) {
43
+ console.error(`Cache get error for ${key}:`, error);
44
+ return null;
45
+ }
46
+ }
47
+ async set(namespace, identifier, value, ttl) {
48
+ const key = this.generateKey(namespace, identifier);
49
+ const ttlSeconds = ttl || this.defaultTTL;
50
+ try {
51
+ const serialized = JSON.stringify(value);
52
+ await this.redis.setex(key, ttlSeconds, serialized);
53
+ this.stats.sets++;
54
+ this.stats.size = await this.getSize();
55
+ }
56
+ catch (error) {
57
+ console.error(`Cache set error for ${key}:`, error);
58
+ }
59
+ }
60
+ async invalidate(namespace, identifier) {
61
+ try {
62
+ if (identifier) {
63
+ // Invalidate specific item
64
+ const key = this.generateKey(namespace, identifier);
65
+ await this.redis.del(key);
66
+ this.stats.deletes++;
67
+ }
68
+ else {
69
+ // Invalidate entire namespace
70
+ const pattern = `${this.keyPrefix}${namespace}:*`;
71
+ const keys = await this.redis.keys(pattern);
72
+ if (keys.length > 0) {
73
+ await this.redis.del(...keys);
74
+ this.stats.deletes += keys.length;
75
+ }
76
+ }
77
+ this.stats.size = await this.getSize();
78
+ }
79
+ catch (error) {
80
+ console.error(`Cache invalidate error:`, error);
81
+ }
82
+ }
83
+ async getOrSet(namespace, identifier, factory, ttl) {
84
+ // Try to get from cache first
85
+ const cached = await this.get(namespace, identifier);
86
+ if (cached !== null) {
87
+ return cached;
88
+ }
89
+ // Generate fresh value
90
+ const value = await factory();
91
+ await this.set(namespace, identifier, value, ttl);
92
+ return value;
93
+ }
94
+ async warmup(namespace, items) {
95
+ const promises = items.map(async (item) => {
96
+ await this.getOrSet(namespace, item.id, item.factory, item.ttl);
97
+ });
98
+ await Promise.all(promises);
99
+ }
100
+ updateHitRate() {
101
+ const total = this.stats.hits + this.stats.misses;
102
+ this.stats.hitRate = total > 0 ? (this.stats.hits / total) * 100 : 0;
103
+ }
104
+ async getSize() {
105
+ try {
106
+ const info = await this.redis.info('memory');
107
+ const match = info.match(/used_memory_human:(.+)/);
108
+ if (match) {
109
+ const size = match[1].trim();
110
+ // Convert to bytes
111
+ if (size.endsWith('K'))
112
+ return parseFloat(size) * 1024;
113
+ if (size.endsWith('M'))
114
+ return parseFloat(size) * 1024 * 1024;
115
+ if (size.endsWith('G'))
116
+ return parseFloat(size) * 1024 * 1024 * 1024;
117
+ return parseFloat(size);
118
+ }
119
+ return 0;
120
+ }
121
+ catch (_error) {
122
+ return 0;
123
+ }
124
+ }
125
+ async getStats() {
126
+ this.stats.size = await this.getSize();
127
+ return { ...this.stats };
128
+ }
129
+ async clear() {
130
+ try {
131
+ const pattern = `${this.keyPrefix}*`;
132
+ const keys = await this.redis.keys(pattern);
133
+ if (keys.length > 0) {
134
+ await this.redis.del(...keys);
135
+ }
136
+ this.resetStats();
137
+ }
138
+ catch (error) {
139
+ console.error('Cache clear error:', error);
140
+ }
141
+ }
142
+ resetStats() {
143
+ this.stats = {
144
+ hits: 0,
145
+ misses: 0,
146
+ sets: 0,
147
+ deletes: 0,
148
+ size: 0,
149
+ hitRate: 0
150
+ };
151
+ }
152
+ async disconnect() {
153
+ await this.redis.quit();
154
+ }
155
+ }
156
+ // Decorator for method-level caching
157
+ export function Cacheable(namespace, ttl) {
158
+ return function (target, propertyKey, descriptor) {
159
+ const originalMethod = descriptor.value;
160
+ descriptor.value = async function (...args) {
161
+ const cacheManager = this.cacheManager;
162
+ if (!cacheManager) {
163
+ return originalMethod.apply(this, args);
164
+ }
165
+ const identifier = `${propertyKey}:${JSON.stringify(args)}`;
166
+ return cacheManager.getOrSet(namespace, identifier, () => originalMethod.apply(this, args), ttl);
167
+ };
168
+ return descriptor;
169
+ };
170
+ }
171
+ // Export singleton instance
172
+ export const cacheManager = new CacheManager();