lsh-framework 1.2.0 → 1.2.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (73) hide show
  1. package/README.md +40 -3
  2. package/dist/cli.js +104 -486
  3. package/dist/commands/doctor.js +427 -0
  4. package/dist/commands/init.js +371 -0
  5. package/dist/constants/api.js +94 -0
  6. package/dist/constants/commands.js +64 -0
  7. package/dist/constants/config.js +56 -0
  8. package/dist/constants/database.js +21 -0
  9. package/dist/constants/errors.js +79 -0
  10. package/dist/constants/index.js +28 -0
  11. package/dist/constants/paths.js +28 -0
  12. package/dist/constants/ui.js +73 -0
  13. package/dist/constants/validation.js +124 -0
  14. package/dist/daemon/lshd.js +11 -32
  15. package/dist/lib/daemon-client-helper.js +7 -4
  16. package/dist/lib/daemon-client.js +9 -2
  17. package/dist/lib/format-utils.js +163 -0
  18. package/dist/lib/job-manager.js +2 -1
  19. package/dist/lib/platform-utils.js +211 -0
  20. package/dist/lib/secrets-manager.js +11 -1
  21. package/dist/lib/string-utils.js +128 -0
  22. package/dist/services/daemon/daemon-registrar.js +3 -2
  23. package/dist/services/secrets/secrets.js +54 -30
  24. package/package.json +10 -74
  25. package/dist/app.js +0 -33
  26. package/dist/cicd/analytics.js +0 -261
  27. package/dist/cicd/auth.js +0 -269
  28. package/dist/cicd/cache-manager.js +0 -172
  29. package/dist/cicd/data-retention.js +0 -305
  30. package/dist/cicd/performance-monitor.js +0 -224
  31. package/dist/cicd/webhook-receiver.js +0 -640
  32. package/dist/commands/api.js +0 -346
  33. package/dist/commands/theme.js +0 -261
  34. package/dist/commands/zsh-import.js +0 -240
  35. package/dist/components/App.js +0 -1
  36. package/dist/components/Divider.js +0 -29
  37. package/dist/components/REPL.js +0 -43
  38. package/dist/components/Terminal.js +0 -232
  39. package/dist/components/UserInput.js +0 -30
  40. package/dist/daemon/api-server.js +0 -316
  41. package/dist/daemon/monitoring-api.js +0 -220
  42. package/dist/lib/api-error-handler.js +0 -185
  43. package/dist/lib/associative-arrays.js +0 -285
  44. package/dist/lib/base-api-server.js +0 -290
  45. package/dist/lib/brace-expansion.js +0 -160
  46. package/dist/lib/builtin-commands.js +0 -439
  47. package/dist/lib/executors/builtin-executor.js +0 -52
  48. package/dist/lib/extended-globbing.js +0 -411
  49. package/dist/lib/extended-parameter-expansion.js +0 -227
  50. package/dist/lib/interactive-shell.js +0 -460
  51. package/dist/lib/job-builtins.js +0 -582
  52. package/dist/lib/pathname-expansion.js +0 -216
  53. package/dist/lib/script-runner.js +0 -226
  54. package/dist/lib/shell-executor.js +0 -2504
  55. package/dist/lib/shell-parser.js +0 -958
  56. package/dist/lib/shell-types.js +0 -6
  57. package/dist/lib/shell.lib.js +0 -40
  58. package/dist/lib/theme-manager.js +0 -476
  59. package/dist/lib/variable-expansion.js +0 -385
  60. package/dist/lib/zsh-compatibility.js +0 -659
  61. package/dist/lib/zsh-import-manager.js +0 -707
  62. package/dist/lib/zsh-options.js +0 -328
  63. package/dist/pipeline/job-tracker.js +0 -491
  64. package/dist/pipeline/mcli-bridge.js +0 -309
  65. package/dist/pipeline/pipeline-service.js +0 -1119
  66. package/dist/pipeline/workflow-engine.js +0 -870
  67. package/dist/services/api/api.js +0 -58
  68. package/dist/services/api/auth.js +0 -35
  69. package/dist/services/api/config.js +0 -7
  70. package/dist/services/api/file.js +0 -22
  71. package/dist/services/shell/shell.js +0 -28
  72. package/dist/services/zapier.js +0 -16
  73. package/dist/simple-api-server.js +0 -148
package/dist/app.js DELETED
@@ -1,33 +0,0 @@
1
- import { Command } from 'commander';
2
- import { init_lib } from './services/lib/lib.js';
3
- import { init_ishell } from './services/shell/shell.js';
4
- import { init_supabase } from './services/supabase/supabase.js';
5
- import { init_daemon } from './services/daemon/daemon.js';
6
- import { init_cron } from './services/cron/cron.js';
7
- const program = new Command();
8
- program
9
- .version('0.0.0')
10
- .description('lsh | extensible cli client.')
11
- .name('lsh');
12
- init_ishell(program);
13
- init_lib(program);
14
- init_supabase(program);
15
- init_daemon(program);
16
- init_cron(program);
17
- // Show help without error when no command is provided
18
- program.configureHelp({
19
- showGlobalOptions: true,
20
- });
21
- // Set exitOverride to prevent Commander from calling process.exit
22
- program.exitOverride((err) => {
23
- // If showing help, exit cleanly
24
- if (err.code === 'commander.helpDisplayed' || err.code === 'commander.help') {
25
- process.exit(0);
26
- }
27
- throw err;
28
- });
29
- program.parse(process.argv);
30
- // If no command was provided, show help and exit cleanly
31
- if (process.argv.length <= 2) {
32
- program.help({ error: false });
33
- }
@@ -1,261 +0,0 @@
1
- import { createClient } from '@supabase/supabase-js';
2
- import Redis from 'ioredis';
3
- const SUPABASE_URL = process.env.SUPABASE_URL;
4
- const SUPABASE_ANON_KEY = process.env.SUPABASE_ANON_KEY;
5
- const REDIS_URL = process.env.REDIS_URL || 'redis://localhost:6379';
6
- // Future use for Supabase integration - keeping for planned features
7
- const _supabase = SUPABASE_URL && SUPABASE_ANON_KEY ?
8
- createClient(SUPABASE_URL, SUPABASE_ANON_KEY) : null;
9
- const redis = new Redis(REDIS_URL);
10
- // Calculate moving average for trend smoothing - utility function for future features
11
- function _movingAverage(data, window) {
12
- const result = [];
13
- for (let i = 0; i < data.length; i++) {
14
- const start = Math.max(0, i - window + 1);
15
- const subset = data.slice(start, i + 1);
16
- const avg = subset.reduce((a, b) => a + b, 0) / subset.length;
17
- result.push(avg);
18
- }
19
- return result;
20
- }
21
- // Detect anomalies using Z-score
22
- function detectAnomalies(data, threshold = 2.5) {
23
- const mean = data.reduce((a, b) => a + b, 0) / data.length;
24
- const variance = data.reduce((sum, val) => sum + Math.pow(val - mean, 2), 0) / data.length;
25
- const stdDev = Math.sqrt(variance);
26
- return data.map((value, index) => {
27
- const zScore = Math.abs((value - mean) / stdDev);
28
- return zScore > threshold ? index : -1;
29
- }).filter(index => index !== -1);
30
- }
31
- // Linear regression for predictions
32
- function linearRegression(data) {
33
- const n = data.length;
34
- const x = Array.from({ length: n }, (_, i) => i);
35
- const sumX = x.reduce((a, b) => a + b, 0);
36
- const sumY = data.reduce((a, b) => a + b, 0);
37
- const sumXY = x.reduce((sum, xi, i) => sum + xi * data[i], 0);
38
- const sumX2 = x.reduce((sum, xi) => sum + xi * xi, 0);
39
- const slope = (n * sumXY - sumX * sumY) / (n * sumX2 - sumX * sumX);
40
- const intercept = (sumY - slope * sumX) / n;
41
- return { slope, intercept };
42
- }
43
- export async function generateTrendAnalysis(days = 30) {
44
- const trends = [];
45
- const endDate = new Date();
46
- const startDate = new Date();
47
- startDate.setDate(endDate.getDate() - days);
48
- for (let d = new Date(startDate); d <= endDate; d.setDate(d.getDate() + 1)) {
49
- const dateStr = d.toISOString().split('T')[0];
50
- const key = `metrics:${dateStr}`;
51
- const metrics = await redis.hgetall(key);
52
- const durations = await redis.lrange(`durations:${dateStr}`, 0, -1);
53
- const totalBuilds = parseInt(metrics.total_builds || '0');
54
- const successfulBuilds = parseInt(metrics.successful_builds || '0');
55
- const failedBuilds = parseInt(metrics.failed_builds || '0');
56
- const avgDuration = durations.length > 0
57
- ? durations.reduce((sum, d) => sum + parseInt(d), 0) / durations.length
58
- : 0;
59
- trends.push({
60
- date: dateStr,
61
- totalBuilds,
62
- successRate: totalBuilds > 0 ? (successfulBuilds / totalBuilds) * 100 : 0,
63
- avgDuration: avgDuration / 1000 / 60, // Convert to minutes
64
- failureRate: totalBuilds > 0 ? (failedBuilds / totalBuilds) * 100 : 0
65
- });
66
- }
67
- return trends;
68
- }
69
- export async function detectBuildAnomalies(trends) {
70
- const anomalies = [];
71
- // Extract metrics
72
- const durations = trends.map(t => t.avgDuration);
73
- const failureRates = trends.map(t => t.failureRate);
74
- const _buildCounts = trends.map(t => t.totalBuilds);
75
- // Detect duration anomalies
76
- const durationAnomalies = detectAnomalies(durations);
77
- durationAnomalies.forEach(index => {
78
- const mean = durations.reduce((a, b) => a + b, 0) / durations.length;
79
- const stdDev = Math.sqrt(durations.reduce((sum, val) => sum + Math.pow(val - mean, 2), 0) / durations.length);
80
- anomalies.push({
81
- timestamp: trends[index].date,
82
- type: 'duration',
83
- severity: durations[index] > mean + 3 * stdDev ? 'critical' : 'warning',
84
- description: `Build duration significantly higher than average`,
85
- value: durations[index],
86
- expectedRange: {
87
- min: Math.max(0, mean - 2 * stdDev),
88
- max: mean + 2 * stdDev
89
- }
90
- });
91
- });
92
- // Detect failure rate anomalies
93
- const failureAnomalies = detectAnomalies(failureRates, 2);
94
- failureAnomalies.forEach(index => {
95
- if (failureRates[index] > 20) { // Only flag if failure rate > 20%
96
- anomalies.push({
97
- timestamp: trends[index].date,
98
- type: 'failure_rate',
99
- severity: failureRates[index] > 50 ? 'critical' : 'warning',
100
- description: `High failure rate detected`,
101
- value: failureRates[index],
102
- expectedRange: { min: 0, max: 20 }
103
- });
104
- }
105
- });
106
- return anomalies;
107
- }
108
- export async function generateInsights(trends) {
109
- const insights = [];
110
- if (trends.length < 7)
111
- return insights;
112
- // Compare last 7 days with previous 7 days
113
- const recentWeek = trends.slice(-7);
114
- const previousWeek = trends.slice(-14, -7);
115
- const recentAvgSuccess = recentWeek.reduce((sum, t) => sum + t.successRate, 0) / 7;
116
- const prevAvgSuccess = previousWeek.reduce((sum, t) => sum + t.successRate, 0) / 7;
117
- const successChange = recentAvgSuccess - prevAvgSuccess;
118
- if (Math.abs(successChange) > 5) {
119
- insights.push({
120
- type: successChange > 0 ? 'improvement' : 'degradation',
121
- title: `Success Rate ${successChange > 0 ? 'Improved' : 'Degraded'}`,
122
- description: `Success rate changed by ${Math.abs(successChange).toFixed(1)}% compared to previous week`,
123
- metric: 'success_rate',
124
- change: successChange,
125
- impact: Math.abs(successChange) > 15 ? 'high' : Math.abs(successChange) > 10 ? 'medium' : 'low'
126
- });
127
- }
128
- // Analyze build duration trends
129
- const recentAvgDuration = recentWeek.reduce((sum, t) => sum + t.avgDuration, 0) / 7;
130
- const prevAvgDuration = previousWeek.reduce((sum, t) => sum + t.avgDuration, 0) / 7;
131
- const durationChange = ((recentAvgDuration - prevAvgDuration) / prevAvgDuration) * 100;
132
- if (Math.abs(durationChange) > 10) {
133
- insights.push({
134
- type: durationChange < 0 ? 'improvement' : 'degradation',
135
- title: `Build Duration ${durationChange < 0 ? 'Improved' : 'Increased'}`,
136
- description: `Average build duration changed by ${Math.abs(durationChange).toFixed(1)}%`,
137
- metric: 'duration',
138
- change: durationChange,
139
- impact: Math.abs(durationChange) > 30 ? 'high' : Math.abs(durationChange) > 20 ? 'medium' : 'low'
140
- });
141
- }
142
- // Identify patterns
143
- const dailyBuilds = trends.map(t => t.totalBuilds);
144
- const weekdays = trends.map(t => new Date(t.date).getDay());
145
- const weekdayAvg = Array(7).fill(0).map((_, day) => {
146
- const dayBuilds = dailyBuilds.filter((_, i) => weekdays[i] === day);
147
- return dayBuilds.reduce((a, b) => a + b, 0) / dayBuilds.length;
148
- });
149
- const peakDay = weekdayAvg.indexOf(Math.max(...weekdayAvg));
150
- const lowDay = weekdayAvg.indexOf(Math.min(...weekdayAvg));
151
- const dayNames = ['Sunday', 'Monday', 'Tuesday', 'Wednesday', 'Thursday', 'Friday', 'Saturday'];
152
- insights.push({
153
- type: 'pattern',
154
- title: 'Weekly Build Pattern Detected',
155
- description: `Most builds occur on ${dayNames[peakDay]}, least on ${dayNames[lowDay]}`,
156
- metric: 'frequency',
157
- change: 0,
158
- impact: 'low'
159
- });
160
- return insights;
161
- }
162
- export async function predictNextPeriod(trends) {
163
- const predictions = [];
164
- if (trends.length < 14)
165
- return predictions;
166
- // Predict success rate
167
- const successRates = trends.map(t => t.successRate);
168
- const successRegression = linearRegression(successRates.slice(-14));
169
- const predictedSuccess = successRegression.slope * successRates.length + successRegression.intercept;
170
- predictions.push({
171
- metric: 'success_rate',
172
- nextPeriod: 'next_7_days',
173
- predictedValue: Math.max(0, Math.min(100, predictedSuccess)),
174
- confidence: 0.75,
175
- trend: successRegression.slope > 1 ? 'improving' :
176
- successRegression.slope < -1 ? 'degrading' : 'stable'
177
- });
178
- // Predict build volume
179
- const buildCounts = trends.map(t => t.totalBuilds);
180
- const volumeRegression = linearRegression(buildCounts.slice(-14));
181
- const predictedVolume = volumeRegression.slope * buildCounts.length + volumeRegression.intercept;
182
- predictions.push({
183
- metric: 'build_volume',
184
- nextPeriod: 'next_day',
185
- predictedValue: Math.max(0, Math.round(predictedVolume)),
186
- confidence: 0.7,
187
- trend: volumeRegression.slope > 5 ? 'improving' :
188
- volumeRegression.slope < -5 ? 'degrading' : 'stable'
189
- });
190
- return predictions;
191
- }
192
- export async function calculateCostAnalysis(trends) {
193
- // Estimate costs based on build minutes (GitHub Actions pricing model)
194
- const COST_PER_MINUTE = 0.008; // $0.008 per minute for Linux runners
195
- const totalMinutes = trends.reduce((sum, t) => sum + (t.totalBuilds * t.avgDuration), 0);
196
- const totalCost = totalMinutes * COST_PER_MINUTE;
197
- const totalBuilds = trends.reduce((sum, t) => sum + t.totalBuilds, 0);
198
- const savingsOpportunities = [];
199
- // Identify savings opportunities
200
- const avgDuration = totalMinutes / totalBuilds;
201
- if (avgDuration > 10) {
202
- savingsOpportunities.push('Consider optimizing long-running builds (>10 minutes average)');
203
- }
204
- const avgFailureRate = trends.reduce((sum, t) => sum + t.failureRate, 0) / trends.length;
205
- if (avgFailureRate > 15) {
206
- const wastedCost = (totalCost * avgFailureRate / 100);
207
- savingsOpportunities.push(`Reduce failure rate to save ~$${wastedCost.toFixed(2)}/month`);
208
- }
209
- // Check for off-peak opportunities
210
- const peakHourBuilds = trends.filter(t => {
211
- const hour = new Date(t.date).getHours();
212
- return hour >= 9 && hour <= 17;
213
- });
214
- if (peakHourBuilds.length > trends.length * 0.7) {
215
- savingsOpportunities.push('Schedule non-critical builds during off-peak hours');
216
- }
217
- return {
218
- totalCost,
219
- costPerBuild: totalBuilds > 0 ? totalCost / totalBuilds : 0,
220
- costByPlatform: {
221
- github: totalCost * 0.6, // Estimate based on usage
222
- gitlab: totalCost * 0.25,
223
- jenkins: totalCost * 0.15
224
- },
225
- savingsOpportunities
226
- };
227
- }
228
- export async function generateAnalyticsReport(period = 'weekly') {
229
- const days = period === 'daily' ? 1 : period === 'weekly' ? 7 : 30;
230
- const trends = await generateTrendAnalysis(days);
231
- const [insights, anomalies, predictions, costAnalysis] = await Promise.all([
232
- generateInsights(trends),
233
- detectBuildAnomalies(trends),
234
- predictNextPeriod(trends),
235
- calculateCostAnalysis(trends)
236
- ]);
237
- return {
238
- period,
239
- startDate: trends[0]?.date || new Date().toISOString(),
240
- endDate: trends[trends.length - 1]?.date || new Date().toISOString(),
241
- trends,
242
- insights,
243
- anomalies,
244
- predictions,
245
- costAnalysis
246
- };
247
- }
248
- // Export functions for bottleneck detection
249
- export async function detectBottlenecks() {
250
- // Analyze stage durations to find slowest parts
251
- const stageData = await redis.hgetall('stage_durations');
252
- const bottlenecks = Object.entries(stageData)
253
- .map(([stage, duration]) => ({
254
- stage,
255
- avgDuration: parseInt(duration),
256
- impact: 'high'
257
- }))
258
- .sort((a, b) => b.avgDuration - a.avgDuration)
259
- .slice(0, 5);
260
- return bottlenecks;
261
- }
package/dist/cicd/auth.js DELETED
@@ -1,269 +0,0 @@
1
- import jwt from 'jsonwebtoken';
2
- import bcrypt from 'bcrypt';
3
- const JWT_SECRET = process.env.JWT_SECRET || 'your-secret-key-change-in-production';
4
- const TOKEN_EXPIRY = '24h';
5
- export class AuthService {
6
- pool;
7
- constructor(pool) {
8
- this.pool = pool;
9
- this.initializeSchema();
10
- }
11
- async initializeSchema() {
12
- const query = `
13
- CREATE TABLE IF NOT EXISTS users (
14
- id SERIAL PRIMARY KEY,
15
- email VARCHAR(255) UNIQUE NOT NULL,
16
- name VARCHAR(255) NOT NULL,
17
- password_hash VARCHAR(255) NOT NULL,
18
- role VARCHAR(50) NOT NULL DEFAULT 'viewer',
19
- created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
20
- last_login TIMESTAMP,
21
- is_active BOOLEAN DEFAULT true
22
- );
23
-
24
- CREATE TABLE IF NOT EXISTS api_keys (
25
- id SERIAL PRIMARY KEY,
26
- user_id INTEGER REFERENCES users(id) ON DELETE CASCADE,
27
- key_hash VARCHAR(255) UNIQUE NOT NULL,
28
- name VARCHAR(255),
29
- permissions JSONB,
30
- last_used TIMESTAMP,
31
- created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
32
- expires_at TIMESTAMP
33
- );
34
-
35
- CREATE TABLE IF NOT EXISTS audit_logs (
36
- id SERIAL PRIMARY KEY,
37
- user_id INTEGER REFERENCES users(id),
38
- action VARCHAR(255) NOT NULL,
39
- resource VARCHAR(255),
40
- details JSONB,
41
- ip_address VARCHAR(45),
42
- user_agent TEXT,
43
- timestamp TIMESTAMP DEFAULT CURRENT_TIMESTAMP
44
- );
45
-
46
- CREATE INDEX IF NOT EXISTS idx_audit_logs_user ON audit_logs(user_id);
47
- CREATE INDEX IF NOT EXISTS idx_audit_logs_timestamp ON audit_logs(timestamp);
48
- `;
49
- try {
50
- await this.pool.query(query);
51
- }
52
- catch (error) {
53
- console.error('Error initializing auth schema:', error);
54
- }
55
- }
56
- async register(email, password, name, role = 'viewer') {
57
- const passwordHash = await bcrypt.hash(password, 10);
58
- const query = `
59
- INSERT INTO users (email, name, password_hash, role)
60
- VALUES ($1, $2, $3, $4)
61
- RETURNING id, email, name, role, created_at
62
- `;
63
- try {
64
- const result = await this.pool.query(query, [email, name, passwordHash, role]);
65
- return result.rows[0];
66
- }
67
- catch (error) {
68
- if (error.code === '23505') { // Unique constraint violation
69
- throw new Error('User with this email already exists');
70
- }
71
- throw error;
72
- }
73
- }
74
- async login(email, password) {
75
- const query = `
76
- SELECT id, email, name, role, password_hash, created_at
77
- FROM users
78
- WHERE email = $1 AND is_active = true
79
- `;
80
- const result = await this.pool.query(query, [email]);
81
- if (result.rows.length === 0) {
82
- throw new Error('Invalid credentials');
83
- }
84
- const user = result.rows[0];
85
- const isValid = await bcrypt.compare(password, user.password_hash);
86
- if (!isValid) {
87
- throw new Error('Invalid credentials');
88
- }
89
- // Update last login
90
- await this.pool.query('UPDATE users SET last_login = CURRENT_TIMESTAMP WHERE id = $1', [user.id]);
91
- const token = this.generateToken(user);
92
- delete user.password_hash;
93
- return { user, token };
94
- }
95
- generateToken(user) {
96
- const payload = {
97
- userId: user.id,
98
- email: user.email,
99
- role: user.role
100
- };
101
- return jwt.sign(payload, JWT_SECRET, { expiresIn: TOKEN_EXPIRY });
102
- }
103
- verifyToken(token) {
104
- try {
105
- return jwt.verify(token, JWT_SECRET);
106
- }
107
- catch (_error) {
108
- throw new Error('Invalid or expired token');
109
- }
110
- }
111
- async generateApiKey(userId, name, permissions) {
112
- const apiKey = this.generateRandomKey();
113
- const keyHash = await bcrypt.hash(apiKey, 10);
114
- const query = `
115
- INSERT INTO api_keys (user_id, key_hash, name, permissions)
116
- VALUES ($1, $2, $3, $4)
117
- RETURNING id
118
- `;
119
- await this.pool.query(query, [userId, keyHash, name, permissions || {}]);
120
- return apiKey;
121
- }
122
- generateRandomKey() {
123
- const chars = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789';
124
- let key = 'cicd_';
125
- for (let i = 0; i < 32; i++) {
126
- key += chars.charAt(Math.floor(Math.random() * chars.length));
127
- }
128
- return key;
129
- }
130
- async verifyApiKey(apiKey) {
131
- const query = `
132
- SELECT ak.*, u.email, u.role
133
- FROM api_keys ak
134
- JOIN users u ON ak.user_id = u.id
135
- WHERE u.is_active = true
136
- AND (ak.expires_at IS NULL OR ak.expires_at > CURRENT_TIMESTAMP)
137
- `;
138
- const result = await this.pool.query(query);
139
- for (const row of result.rows) {
140
- const isValid = await bcrypt.compare(apiKey, row.key_hash);
141
- if (isValid) {
142
- // Update last used
143
- await this.pool.query('UPDATE api_keys SET last_used = CURRENT_TIMESTAMP WHERE id = $1', [row.id]);
144
- return {
145
- userId: row.user_id,
146
- email: row.email,
147
- role: row.role
148
- };
149
- }
150
- }
151
- return null;
152
- }
153
- async logAudit(userId, action, resource, details, req) {
154
- const query = `
155
- INSERT INTO audit_logs (user_id, action, resource, details, ip_address, user_agent)
156
- VALUES ($1, $2, $3, $4, $5, $6)
157
- `;
158
- const ipAddress = req?.ip || req?.socket?.remoteAddress;
159
- const userAgent = req?.headers['user-agent'];
160
- await this.pool.query(query, [
161
- userId,
162
- action,
163
- resource,
164
- details || {},
165
- ipAddress,
166
- userAgent
167
- ]);
168
- }
169
- }
170
- // Middleware for authentication
171
- export function authenticate(authService) {
172
- return async (req, res, next) => {
173
- try {
174
- const authHeader = req.headers.authorization;
175
- if (!authHeader) {
176
- return res.status(401).json({ error: 'No authorization header' });
177
- }
178
- let user = null;
179
- if (authHeader.startsWith('Bearer ')) {
180
- // JWT token authentication
181
- const token = authHeader.substring(7);
182
- user = authService.verifyToken(token);
183
- }
184
- else if (authHeader.startsWith('ApiKey ')) {
185
- // API key authentication
186
- const apiKey = authHeader.substring(7);
187
- user = await authService.verifyApiKey(apiKey);
188
- }
189
- if (!user) {
190
- return res.status(401).json({ error: 'Invalid authentication credentials' });
191
- }
192
- req.user = user;
193
- next();
194
- }
195
- catch (error) {
196
- const message = error instanceof Error ? error.message : 'Authentication failed';
197
- return res.status(401).json({ error: message });
198
- }
199
- };
200
- }
201
- // Role-based access control middleware
202
- export function authorize(...allowedRoles) {
203
- return (req, res, next) => {
204
- if (!req.user) {
205
- return res.status(401).json({ error: 'Authentication required' });
206
- }
207
- if (!allowedRoles.includes(req.user.role)) {
208
- return res.status(403).json({ error: 'Insufficient permissions' });
209
- }
210
- next();
211
- };
212
- }
213
- // Permission-based middleware
214
- export function requirePermission(permission) {
215
- return (req, res, next) => {
216
- if (!req.user) {
217
- return res.status(401).json({ error: 'Authentication required' });
218
- }
219
- // Admin has all permissions
220
- if (req.user.role === 'admin') {
221
- return next();
222
- }
223
- // Check specific permissions based on role
224
- const rolePermissions = {
225
- developer: [
226
- 'pipelines.view',
227
- 'pipelines.trigger',
228
- 'metrics.view',
229
- 'analytics.view',
230
- 'alerts.view'
231
- ],
232
- viewer: [
233
- 'pipelines.view',
234
- 'metrics.view',
235
- 'analytics.view'
236
- ]
237
- };
238
- const userPermissions = rolePermissions[req.user.role] || [];
239
- if (!userPermissions.includes(permission)) {
240
- return res.status(403).json({ error: `Permission '${permission}' required` });
241
- }
242
- next();
243
- };
244
- }
245
- export function rateLimit(options) {
246
- const requests = new Map();
247
- return (req, res, next) => {
248
- const key = req.user?.userId?.toString() || req.ip || 'anonymous';
249
- const now = Date.now();
250
- const record = requests.get(key);
251
- if (!record || now > record.resetTime) {
252
- requests.set(key, {
253
- count: 1,
254
- resetTime: now + options.windowMs
255
- });
256
- return next();
257
- }
258
- if (record.count >= options.max) {
259
- const retryAfter = Math.ceil((record.resetTime - now) / 1000);
260
- res.setHeader('Retry-After', retryAfter);
261
- return res.status(429).json({
262
- error: 'Too many requests',
263
- retryAfter
264
- });
265
- }
266
- record.count++;
267
- next();
268
- };
269
- }