lsh-framework 1.2.0 → 1.3.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +40 -3
- package/dist/cli.js +104 -486
- package/dist/commands/doctor.js +427 -0
- package/dist/commands/init.js +371 -0
- package/dist/constants/api.js +94 -0
- package/dist/constants/commands.js +64 -0
- package/dist/constants/config.js +56 -0
- package/dist/constants/database.js +21 -0
- package/dist/constants/errors.js +79 -0
- package/dist/constants/index.js +28 -0
- package/dist/constants/paths.js +28 -0
- package/dist/constants/ui.js +73 -0
- package/dist/constants/validation.js +124 -0
- package/dist/daemon/lshd.js +11 -32
- package/dist/lib/daemon-client-helper.js +7 -4
- package/dist/lib/daemon-client.js +9 -2
- package/dist/lib/format-utils.js +163 -0
- package/dist/lib/fuzzy-match.js +123 -0
- package/dist/lib/job-manager.js +2 -1
- package/dist/lib/platform-utils.js +211 -0
- package/dist/lib/secrets-manager.js +11 -1
- package/dist/lib/string-utils.js +128 -0
- package/dist/services/daemon/daemon-registrar.js +3 -2
- package/dist/services/secrets/secrets.js +119 -59
- package/package.json +10 -74
- package/dist/app.js +0 -33
- package/dist/cicd/analytics.js +0 -261
- package/dist/cicd/auth.js +0 -269
- package/dist/cicd/cache-manager.js +0 -172
- package/dist/cicd/data-retention.js +0 -305
- package/dist/cicd/performance-monitor.js +0 -224
- package/dist/cicd/webhook-receiver.js +0 -640
- package/dist/commands/api.js +0 -346
- package/dist/commands/theme.js +0 -261
- package/dist/commands/zsh-import.js +0 -240
- package/dist/components/App.js +0 -1
- package/dist/components/Divider.js +0 -29
- package/dist/components/REPL.js +0 -43
- package/dist/components/Terminal.js +0 -232
- package/dist/components/UserInput.js +0 -30
- package/dist/daemon/api-server.js +0 -316
- package/dist/daemon/monitoring-api.js +0 -220
- package/dist/lib/api-error-handler.js +0 -185
- package/dist/lib/associative-arrays.js +0 -285
- package/dist/lib/base-api-server.js +0 -290
- package/dist/lib/brace-expansion.js +0 -160
- package/dist/lib/builtin-commands.js +0 -439
- package/dist/lib/executors/builtin-executor.js +0 -52
- package/dist/lib/extended-globbing.js +0 -411
- package/dist/lib/extended-parameter-expansion.js +0 -227
- package/dist/lib/interactive-shell.js +0 -460
- package/dist/lib/job-builtins.js +0 -582
- package/dist/lib/pathname-expansion.js +0 -216
- package/dist/lib/script-runner.js +0 -226
- package/dist/lib/shell-executor.js +0 -2504
- package/dist/lib/shell-parser.js +0 -958
- package/dist/lib/shell-types.js +0 -6
- package/dist/lib/shell.lib.js +0 -40
- package/dist/lib/theme-manager.js +0 -476
- package/dist/lib/variable-expansion.js +0 -385
- package/dist/lib/zsh-compatibility.js +0 -659
- package/dist/lib/zsh-import-manager.js +0 -707
- package/dist/lib/zsh-options.js +0 -328
- package/dist/pipeline/job-tracker.js +0 -491
- package/dist/pipeline/mcli-bridge.js +0 -309
- package/dist/pipeline/pipeline-service.js +0 -1119
- package/dist/pipeline/workflow-engine.js +0 -870
- package/dist/services/api/api.js +0 -58
- package/dist/services/api/auth.js +0 -35
- package/dist/services/api/config.js +0 -7
- package/dist/services/api/file.js +0 -22
- package/dist/services/shell/shell.js +0 -28
- package/dist/services/zapier.js +0 -16
- package/dist/simple-api-server.js +0 -148
|
@@ -1,305 +0,0 @@
|
|
|
1
|
-
import * as cron from 'node-cron';
|
|
2
|
-
export class DataRetentionService {
|
|
3
|
-
pool;
|
|
4
|
-
redis;
|
|
5
|
-
policies;
|
|
6
|
-
archiveConfig;
|
|
7
|
-
cronJobs = [];
|
|
8
|
-
constructor(pool, redis) {
|
|
9
|
-
this.pool = pool;
|
|
10
|
-
this.redis = redis;
|
|
11
|
-
// Define retention policies
|
|
12
|
-
this.policies = [
|
|
13
|
-
{ tableName: 'pipeline_events', retentionDays: 90, dateColumn: 'created_at', archiveBeforeDelete: true },
|
|
14
|
-
{ tableName: 'build_metrics', retentionDays: 180, dateColumn: 'recorded_at', archiveBeforeDelete: true },
|
|
15
|
-
{ tableName: 'audit_logs', retentionDays: 365, dateColumn: 'timestamp', archiveBeforeDelete: true },
|
|
16
|
-
{ tableName: 'pipeline_stages', retentionDays: 90, dateColumn: 'started_at' },
|
|
17
|
-
{ tableName: 'alerts', retentionDays: 30, dateColumn: 'created_at' },
|
|
18
|
-
{ tableName: 'webhook_logs', retentionDays: 7, dateColumn: 'received_at' }
|
|
19
|
-
];
|
|
20
|
-
this.archiveConfig = {
|
|
21
|
-
enabled: process.env.ENABLE_ARCHIVE === 'true',
|
|
22
|
-
s3Bucket: process.env.ARCHIVE_S3_BUCKET,
|
|
23
|
-
localPath: process.env.ARCHIVE_LOCAL_PATH || '/var/backups/cicd',
|
|
24
|
-
compress: true
|
|
25
|
-
};
|
|
26
|
-
this.setupCronJobs();
|
|
27
|
-
}
|
|
28
|
-
setupCronJobs() {
|
|
29
|
-
// Daily cleanup at 2 AM
|
|
30
|
-
const dailyCleanup = cron.schedule('0 2 * * *', async () => {
|
|
31
|
-
console.warn('Running daily data cleanup...');
|
|
32
|
-
await this.runCleanup();
|
|
33
|
-
});
|
|
34
|
-
// Weekly archive at Sunday 3 AM
|
|
35
|
-
const weeklyArchive = cron.schedule('0 3 * * 0', async () => {
|
|
36
|
-
console.warn('Running weekly data archive...');
|
|
37
|
-
await this.runArchive();
|
|
38
|
-
});
|
|
39
|
-
// Hourly Redis cleanup
|
|
40
|
-
const hourlyRedisCleanup = cron.schedule('0 * * * *', async () => {
|
|
41
|
-
console.warn('Running Redis cleanup...');
|
|
42
|
-
await this.cleanupRedis();
|
|
43
|
-
});
|
|
44
|
-
this.cronJobs.push(dailyCleanup, weeklyArchive, hourlyRedisCleanup);
|
|
45
|
-
}
|
|
46
|
-
async runCleanup() {
|
|
47
|
-
const results = [];
|
|
48
|
-
for (const policy of this.policies) {
|
|
49
|
-
try {
|
|
50
|
-
const result = await this.cleanupTable(policy);
|
|
51
|
-
results.push(result);
|
|
52
|
-
}
|
|
53
|
-
catch (error) {
|
|
54
|
-
console.error(`Error cleaning up ${policy.tableName}:`, error);
|
|
55
|
-
results.push({
|
|
56
|
-
table: policy.tableName,
|
|
57
|
-
status: 'error',
|
|
58
|
-
error: error instanceof Error ? error.message : 'Unknown error'
|
|
59
|
-
});
|
|
60
|
-
}
|
|
61
|
-
}
|
|
62
|
-
// Log cleanup results
|
|
63
|
-
await this.logCleanupResults(results);
|
|
64
|
-
return results;
|
|
65
|
-
}
|
|
66
|
-
async cleanupTable(policy) {
|
|
67
|
-
const cutoffDate = new Date();
|
|
68
|
-
cutoffDate.setDate(cutoffDate.getDate() - policy.retentionDays);
|
|
69
|
-
// Archive if needed
|
|
70
|
-
if (policy.archiveBeforeDelete && this.archiveConfig.enabled) {
|
|
71
|
-
await this.archiveTableData(policy.tableName, policy.dateColumn, cutoffDate);
|
|
72
|
-
}
|
|
73
|
-
// Delete old records
|
|
74
|
-
const deleteQuery = `
|
|
75
|
-
DELETE FROM ${policy.tableName}
|
|
76
|
-
WHERE ${policy.dateColumn} < $1
|
|
77
|
-
RETURNING COUNT(*) as deleted_count
|
|
78
|
-
`;
|
|
79
|
-
const result = await this.pool.query(deleteQuery, [cutoffDate]);
|
|
80
|
-
const deletedCount = result.rowCount || 0;
|
|
81
|
-
// Vacuum the table to reclaim space
|
|
82
|
-
await this.pool.query(`VACUUM ANALYZE ${policy.tableName}`);
|
|
83
|
-
return {
|
|
84
|
-
table: policy.tableName,
|
|
85
|
-
status: 'success',
|
|
86
|
-
deletedRecords: deletedCount,
|
|
87
|
-
cutoffDate: cutoffDate.toISOString()
|
|
88
|
-
};
|
|
89
|
-
}
|
|
90
|
-
async archiveTableData(tableName, dateColumn, cutoffDate) {
|
|
91
|
-
const selectQuery = `
|
|
92
|
-
SELECT * FROM ${tableName}
|
|
93
|
-
WHERE ${dateColumn} < $1
|
|
94
|
-
`;
|
|
95
|
-
const result = await this.pool.query(selectQuery, [cutoffDate]);
|
|
96
|
-
if (result.rows.length === 0) {
|
|
97
|
-
return;
|
|
98
|
-
}
|
|
99
|
-
const archiveData = {
|
|
100
|
-
tableName,
|
|
101
|
-
archiveDate: new Date().toISOString(),
|
|
102
|
-
recordCount: result.rows.length,
|
|
103
|
-
cutoffDate: cutoffDate.toISOString(),
|
|
104
|
-
data: result.rows
|
|
105
|
-
};
|
|
106
|
-
// Save archive based on configuration
|
|
107
|
-
if (this.archiveConfig.s3Bucket) {
|
|
108
|
-
await this.saveToS3(archiveData);
|
|
109
|
-
}
|
|
110
|
-
else if (this.archiveConfig.localPath) {
|
|
111
|
-
await this.saveToLocal(archiveData);
|
|
112
|
-
}
|
|
113
|
-
}
|
|
114
|
-
async saveToS3(archiveData) {
|
|
115
|
-
// Implementation would use AWS SDK
|
|
116
|
-
console.warn(`Would save ${archiveData.recordCount} records from ${archiveData.tableName} to S3`);
|
|
117
|
-
// Placeholder for S3 upload logic
|
|
118
|
-
}
|
|
119
|
-
async saveToLocal(archiveData) {
|
|
120
|
-
const fs = await import('fs/promises');
|
|
121
|
-
const path = await import('path');
|
|
122
|
-
const zlib = await import('zlib');
|
|
123
|
-
const { promisify } = await import('util');
|
|
124
|
-
const gzip = promisify(zlib.gzip);
|
|
125
|
-
const timestamp = 'archiveDate' in archiveData && typeof archiveData.archiveDate === 'string'
|
|
126
|
-
? archiveData.archiveDate.replace(/:/g, '-')
|
|
127
|
-
: 'weekOf' in archiveData && typeof archiveData.weekOf === 'string'
|
|
128
|
-
? archiveData.weekOf
|
|
129
|
-
: new Date().toISOString().replace(/:/g, '-');
|
|
130
|
-
const fileName = `${archiveData.tableName}_${timestamp}.json`;
|
|
131
|
-
const filePath = path.join(this.archiveConfig.localPath, fileName);
|
|
132
|
-
let data = JSON.stringify(archiveData);
|
|
133
|
-
if (this.archiveConfig.compress) {
|
|
134
|
-
data = (await gzip(data)).toString('base64');
|
|
135
|
-
const compressedPath = filePath + '.gz';
|
|
136
|
-
await fs.writeFile(compressedPath, data);
|
|
137
|
-
console.warn(`Archived ${archiveData.recordCount} records to ${compressedPath}`);
|
|
138
|
-
}
|
|
139
|
-
else {
|
|
140
|
-
await fs.writeFile(filePath, data);
|
|
141
|
-
console.warn(`Archived ${archiveData.recordCount} records to ${filePath}`);
|
|
142
|
-
}
|
|
143
|
-
}
|
|
144
|
-
async cleanupRedis() {
|
|
145
|
-
const patterns = [
|
|
146
|
-
'metrics:*',
|
|
147
|
-
'durations:*',
|
|
148
|
-
'stage_durations:*',
|
|
149
|
-
'webhook:*'
|
|
150
|
-
];
|
|
151
|
-
const cutoffTimestamp = Date.now() - (7 * 24 * 60 * 60 * 1000); // 7 days
|
|
152
|
-
for (const pattern of patterns) {
|
|
153
|
-
const keys = await this.redis.keys(pattern);
|
|
154
|
-
for (const key of keys) {
|
|
155
|
-
// Extract date from key if possible
|
|
156
|
-
const dateMatch = key.match(/(\d{4}-\d{2}-\d{2})/);
|
|
157
|
-
if (dateMatch) {
|
|
158
|
-
const keyDate = new Date(dateMatch[1]);
|
|
159
|
-
if (keyDate.getTime() < cutoffTimestamp) {
|
|
160
|
-
await this.redis.del(key);
|
|
161
|
-
}
|
|
162
|
-
}
|
|
163
|
-
}
|
|
164
|
-
}
|
|
165
|
-
// Clean up expired cache entries
|
|
166
|
-
const cacheKeys = await this.redis.keys('cicd:cache:*');
|
|
167
|
-
for (const key of cacheKeys) {
|
|
168
|
-
const ttl = await this.redis.ttl(key);
|
|
169
|
-
if (ttl === -1) {
|
|
170
|
-
// No expiration set, check age
|
|
171
|
-
await this.redis.expire(key, 3600); // Set 1 hour expiration
|
|
172
|
-
}
|
|
173
|
-
}
|
|
174
|
-
console.warn('Redis cleanup completed');
|
|
175
|
-
}
|
|
176
|
-
async runArchive() {
|
|
177
|
-
if (!this.archiveConfig.enabled) {
|
|
178
|
-
console.warn('Archiving is disabled');
|
|
179
|
-
return;
|
|
180
|
-
}
|
|
181
|
-
const tables = ['pipeline_events', 'build_metrics', 'audit_logs'];
|
|
182
|
-
const results = [];
|
|
183
|
-
for (const tableName of tables) {
|
|
184
|
-
try {
|
|
185
|
-
// Archive last week's data
|
|
186
|
-
const startDate = new Date();
|
|
187
|
-
startDate.setDate(startDate.getDate() - 7);
|
|
188
|
-
const endDate = new Date();
|
|
189
|
-
const query = `
|
|
190
|
-
SELECT * FROM ${tableName}
|
|
191
|
-
WHERE created_at >= $1 AND created_at < $2
|
|
192
|
-
`;
|
|
193
|
-
const result = await this.pool.query(query, [startDate, endDate]);
|
|
194
|
-
if (result.rows.length > 0) {
|
|
195
|
-
const archiveData = {
|
|
196
|
-
tableName,
|
|
197
|
-
weekOf: startDate.toISOString().split('T')[0],
|
|
198
|
-
recordCount: result.rows.length,
|
|
199
|
-
data: result.rows
|
|
200
|
-
};
|
|
201
|
-
if (this.archiveConfig.s3Bucket) {
|
|
202
|
-
await this.saveToS3(archiveData);
|
|
203
|
-
}
|
|
204
|
-
else if (this.archiveConfig.localPath) {
|
|
205
|
-
await this.saveToLocal(archiveData);
|
|
206
|
-
}
|
|
207
|
-
results.push({
|
|
208
|
-
table: tableName,
|
|
209
|
-
status: 'success',
|
|
210
|
-
recordsArchived: result.rows.length
|
|
211
|
-
});
|
|
212
|
-
}
|
|
213
|
-
}
|
|
214
|
-
catch (error) {
|
|
215
|
-
console.error(`Error archiving ${tableName}:`, error);
|
|
216
|
-
results.push({
|
|
217
|
-
table: tableName,
|
|
218
|
-
status: 'error',
|
|
219
|
-
error: error instanceof Error ? error.message : 'Unknown error'
|
|
220
|
-
});
|
|
221
|
-
}
|
|
222
|
-
}
|
|
223
|
-
return results;
|
|
224
|
-
}
|
|
225
|
-
async logCleanupResults(results) {
|
|
226
|
-
const query = `
|
|
227
|
-
INSERT INTO data_retention_logs (
|
|
228
|
-
execution_time, tables_processed, records_deleted, status, details
|
|
229
|
-
) VALUES ($1, $2, $3, $4, $5)
|
|
230
|
-
`;
|
|
231
|
-
const totalDeleted = results
|
|
232
|
-
.filter(r => r.status === 'success')
|
|
233
|
-
.reduce((sum, r) => sum + (r.deletedRecords || 0), 0);
|
|
234
|
-
const status = results.every(r => r.status === 'success') ? 'success' :
|
|
235
|
-
results.some(r => r.status === 'success') ? 'partial' : 'failed';
|
|
236
|
-
await this.pool.query(query, [
|
|
237
|
-
new Date(),
|
|
238
|
-
results.length,
|
|
239
|
-
totalDeleted,
|
|
240
|
-
status,
|
|
241
|
-
JSON.stringify(results)
|
|
242
|
-
]);
|
|
243
|
-
}
|
|
244
|
-
async getRetentionStats() {
|
|
245
|
-
const stats = [];
|
|
246
|
-
for (const policy of this.policies) {
|
|
247
|
-
const countQuery = `
|
|
248
|
-
SELECT
|
|
249
|
-
COUNT(*) as total_records,
|
|
250
|
-
MIN(${policy.dateColumn}) as oldest_record,
|
|
251
|
-
MAX(${policy.dateColumn}) as newest_record,
|
|
252
|
-
pg_size_pretty(pg_total_relation_size('${policy.tableName}')) as table_size
|
|
253
|
-
FROM ${policy.tableName}
|
|
254
|
-
`;
|
|
255
|
-
try {
|
|
256
|
-
const result = await this.pool.query(countQuery);
|
|
257
|
-
const row = result.rows[0];
|
|
258
|
-
stats.push({
|
|
259
|
-
table: policy.tableName,
|
|
260
|
-
retentionDays: policy.retentionDays,
|
|
261
|
-
totalRecords: parseInt(row.total_records),
|
|
262
|
-
oldestRecord: row.oldest_record,
|
|
263
|
-
newestRecord: row.newest_record,
|
|
264
|
-
tableSize: row.table_size,
|
|
265
|
-
estimatedDeletions: await this.estimateDeletions(policy)
|
|
266
|
-
});
|
|
267
|
-
}
|
|
268
|
-
catch (error) {
|
|
269
|
-
console.error(`Error getting stats for ${policy.tableName}:`, error);
|
|
270
|
-
}
|
|
271
|
-
}
|
|
272
|
-
return stats;
|
|
273
|
-
}
|
|
274
|
-
async estimateDeletions(policy) {
|
|
275
|
-
const cutoffDate = new Date();
|
|
276
|
-
cutoffDate.setDate(cutoffDate.getDate() - policy.retentionDays);
|
|
277
|
-
const query = `
|
|
278
|
-
SELECT COUNT(*) as count
|
|
279
|
-
FROM ${policy.tableName}
|
|
280
|
-
WHERE ${policy.dateColumn} < $1
|
|
281
|
-
`;
|
|
282
|
-
try {
|
|
283
|
-
const result = await this.pool.query(query, [cutoffDate]);
|
|
284
|
-
return parseInt(result.rows[0].count);
|
|
285
|
-
}
|
|
286
|
-
catch (_error) {
|
|
287
|
-
return 0;
|
|
288
|
-
}
|
|
289
|
-
}
|
|
290
|
-
// Manual trigger for cleanup
|
|
291
|
-
async triggerCleanup(tableName) {
|
|
292
|
-
if (tableName) {
|
|
293
|
-
const policy = this.policies.find(p => p.tableName === tableName);
|
|
294
|
-
if (policy) {
|
|
295
|
-
return await this.cleanupTable(policy);
|
|
296
|
-
}
|
|
297
|
-
throw new Error(`No retention policy found for table: ${tableName}`);
|
|
298
|
-
}
|
|
299
|
-
return await this.runCleanup();
|
|
300
|
-
}
|
|
301
|
-
// Stop all cron jobs
|
|
302
|
-
stop() {
|
|
303
|
-
this.cronJobs.forEach(job => job.stop());
|
|
304
|
-
}
|
|
305
|
-
}
|
|
@@ -1,224 +0,0 @@
|
|
|
1
|
-
import { performance } from 'perf_hooks';
|
|
2
|
-
import * as os from 'os';
|
|
3
|
-
export class PerformanceMonitor {
|
|
4
|
-
requestMetrics = [];
|
|
5
|
-
dbQueryTimes = [];
|
|
6
|
-
eventLoopDelay = 0;
|
|
7
|
-
startTime;
|
|
8
|
-
alertThresholds;
|
|
9
|
-
constructor() {
|
|
10
|
-
this.startTime = Date.now();
|
|
11
|
-
this.alertThresholds = {
|
|
12
|
-
cpuUsage: 80,
|
|
13
|
-
memoryUsage: 85,
|
|
14
|
-
responseTime: 1000,
|
|
15
|
-
errorRate: 5
|
|
16
|
-
};
|
|
17
|
-
this.setupEventLoopMonitoring();
|
|
18
|
-
this.setupMemoryMonitoring();
|
|
19
|
-
}
|
|
20
|
-
setupEventLoopMonitoring() {
|
|
21
|
-
let lastCheck = process.hrtime.bigint();
|
|
22
|
-
setInterval(() => {
|
|
23
|
-
const now = process.hrtime.bigint();
|
|
24
|
-
const delay = Number(now - lastCheck) / 1e6 - 100; // Convert to ms and subtract interval
|
|
25
|
-
this.eventLoopDelay = Math.max(0, delay);
|
|
26
|
-
lastCheck = now;
|
|
27
|
-
}, 100);
|
|
28
|
-
}
|
|
29
|
-
setupMemoryMonitoring() {
|
|
30
|
-
// Monitor heap usage periodically
|
|
31
|
-
setInterval(() => {
|
|
32
|
-
const memUsage = process.memoryUsage();
|
|
33
|
-
const totalMem = os.totalmem();
|
|
34
|
-
const percentUsed = (memUsage.rss / totalMem) * 100;
|
|
35
|
-
if (percentUsed > this.alertThresholds.memoryUsage) {
|
|
36
|
-
this.triggerAlert('memory', `Memory usage at ${percentUsed.toFixed(2)}%`);
|
|
37
|
-
}
|
|
38
|
-
}, 30000); // Check every 30 seconds
|
|
39
|
-
}
|
|
40
|
-
recordRequest(metrics) {
|
|
41
|
-
this.requestMetrics.push(metrics);
|
|
42
|
-
// Keep only last 1000 requests in memory
|
|
43
|
-
if (this.requestMetrics.length > 1000) {
|
|
44
|
-
this.requestMetrics = this.requestMetrics.slice(-1000);
|
|
45
|
-
}
|
|
46
|
-
// Check for high response time
|
|
47
|
-
if (metrics.responseTime > this.alertThresholds.responseTime) {
|
|
48
|
-
this.triggerAlert('response_time', `Slow request: ${metrics.method} ${metrics.path} took ${metrics.responseTime}ms`);
|
|
49
|
-
}
|
|
50
|
-
}
|
|
51
|
-
recordDatabaseQuery(queryTime, query) {
|
|
52
|
-
this.dbQueryTimes.push(queryTime);
|
|
53
|
-
if (this.dbQueryTimes.length > 1000) {
|
|
54
|
-
this.dbQueryTimes = this.dbQueryTimes.slice(-1000);
|
|
55
|
-
}
|
|
56
|
-
// Alert for slow queries (> 100ms)
|
|
57
|
-
if (queryTime > 100) {
|
|
58
|
-
this.triggerAlert('slow_query', `Slow database query: ${queryTime}ms${query ? ` - ${query.substring(0, 100)}` : ''}`);
|
|
59
|
-
}
|
|
60
|
-
}
|
|
61
|
-
getMetrics() {
|
|
62
|
-
const now = Date.now();
|
|
63
|
-
const cpuUsage = process.cpuUsage();
|
|
64
|
-
const memUsage = process.memoryUsage();
|
|
65
|
-
// Calculate request metrics
|
|
66
|
-
const recentRequests = this.requestMetrics.filter(r => r.timestamp > now - 60000 // Last minute
|
|
67
|
-
);
|
|
68
|
-
const totalRequests = recentRequests.length;
|
|
69
|
-
const errors = recentRequests.filter(r => r.statusCode >= 500).length;
|
|
70
|
-
const avgResponseTime = totalRequests > 0
|
|
71
|
-
? recentRequests.reduce((sum, r) => sum + r.responseTime, 0) / totalRequests
|
|
72
|
-
: 0;
|
|
73
|
-
// Calculate database metrics
|
|
74
|
-
const avgQueryTime = this.dbQueryTimes.length > 0
|
|
75
|
-
? this.dbQueryTimes.reduce((a, b) => a + b, 0) / this.dbQueryTimes.length
|
|
76
|
-
: 0;
|
|
77
|
-
const slowQueries = this.dbQueryTimes.filter(t => t > 100).length;
|
|
78
|
-
const cpuPercent = (cpuUsage.user + cpuUsage.system) / 1000000 * 100;
|
|
79
|
-
const memPercent = (memUsage.rss / os.totalmem()) * 100;
|
|
80
|
-
// Check alert thresholds
|
|
81
|
-
if (cpuPercent > this.alertThresholds.cpuUsage) {
|
|
82
|
-
this.triggerAlert('cpu', `CPU usage at ${cpuPercent.toFixed(2)}%`);
|
|
83
|
-
}
|
|
84
|
-
const errorRate = totalRequests > 0 ? (errors / totalRequests) * 100 : 0;
|
|
85
|
-
if (errorRate > this.alertThresholds.errorRate) {
|
|
86
|
-
this.triggerAlert('error_rate', `Error rate at ${errorRate.toFixed(2)}%`);
|
|
87
|
-
}
|
|
88
|
-
return {
|
|
89
|
-
timestamp: now,
|
|
90
|
-
cpu: {
|
|
91
|
-
usage: cpuPercent,
|
|
92
|
-
loadAverage: os.loadavg()
|
|
93
|
-
},
|
|
94
|
-
memory: {
|
|
95
|
-
heapUsed: memUsage.heapUsed,
|
|
96
|
-
heapTotal: memUsage.heapTotal,
|
|
97
|
-
external: memUsage.external,
|
|
98
|
-
rss: memUsage.rss,
|
|
99
|
-
percentUsed: memPercent
|
|
100
|
-
},
|
|
101
|
-
eventLoop: {
|
|
102
|
-
delay: this.eventLoopDelay,
|
|
103
|
-
utilization: this.eventLoopDelay / 16.67 * 100 // Percentage of 60fps frame time
|
|
104
|
-
},
|
|
105
|
-
requests: {
|
|
106
|
-
total: totalRequests,
|
|
107
|
-
avgResponseTime,
|
|
108
|
-
errors,
|
|
109
|
-
errorRate
|
|
110
|
-
},
|
|
111
|
-
database: {
|
|
112
|
-
activeConnections: 0, // To be updated by connection pool
|
|
113
|
-
avgQueryTime,
|
|
114
|
-
slowQueries
|
|
115
|
-
}
|
|
116
|
-
};
|
|
117
|
-
}
|
|
118
|
-
getHealthStatus() {
|
|
119
|
-
const metrics = this.getMetrics();
|
|
120
|
-
let status = 'healthy';
|
|
121
|
-
const issues = [];
|
|
122
|
-
if (metrics.cpu.usage > this.alertThresholds.cpuUsage) {
|
|
123
|
-
status = 'degraded';
|
|
124
|
-
issues.push(`High CPU usage: ${metrics.cpu.usage.toFixed(2)}%`);
|
|
125
|
-
}
|
|
126
|
-
if (metrics.memory.percentUsed > this.alertThresholds.memoryUsage) {
|
|
127
|
-
status = 'degraded';
|
|
128
|
-
issues.push(`High memory usage: ${metrics.memory.percentUsed.toFixed(2)}%`);
|
|
129
|
-
}
|
|
130
|
-
if (metrics.requests.errorRate > this.alertThresholds.errorRate) {
|
|
131
|
-
status = 'unhealthy';
|
|
132
|
-
issues.push(`High error rate: ${metrics.requests.errorRate.toFixed(2)}%`);
|
|
133
|
-
}
|
|
134
|
-
if (metrics.eventLoop.delay > 50) {
|
|
135
|
-
status = status === 'healthy' ? 'degraded' : status;
|
|
136
|
-
issues.push(`Event loop delay: ${metrics.eventLoop.delay.toFixed(2)}ms`);
|
|
137
|
-
}
|
|
138
|
-
return {
|
|
139
|
-
status,
|
|
140
|
-
details: {
|
|
141
|
-
uptime: Math.floor((Date.now() - this.startTime) / 1000),
|
|
142
|
-
issues,
|
|
143
|
-
metrics: {
|
|
144
|
-
cpu: `${metrics.cpu.usage.toFixed(2)}%`,
|
|
145
|
-
memory: `${metrics.memory.percentUsed.toFixed(2)}%`,
|
|
146
|
-
requests: metrics.requests.total,
|
|
147
|
-
errors: metrics.requests.errors,
|
|
148
|
-
avgResponseTime: `${metrics.requests.avgResponseTime.toFixed(2)}ms`
|
|
149
|
-
}
|
|
150
|
-
}
|
|
151
|
-
};
|
|
152
|
-
}
|
|
153
|
-
triggerAlert(type, message) {
|
|
154
|
-
console.warn(`[PERFORMANCE ALERT] ${type}: ${message}`);
|
|
155
|
-
// Here you could send to monitoring service, trigger notifications, etc.
|
|
156
|
-
}
|
|
157
|
-
// Middleware for Express
|
|
158
|
-
middleware() {
|
|
159
|
-
return (req, res, next) => {
|
|
160
|
-
const start = Date.now();
|
|
161
|
-
// Override res.end to capture response time
|
|
162
|
-
const originalEnd = res.end;
|
|
163
|
-
res.end = (...args) => {
|
|
164
|
-
const responseTime = Date.now() - start;
|
|
165
|
-
this.recordRequest({
|
|
166
|
-
method: req.method,
|
|
167
|
-
path: req.path,
|
|
168
|
-
statusCode: res.statusCode,
|
|
169
|
-
responseTime,
|
|
170
|
-
timestamp: Date.now()
|
|
171
|
-
});
|
|
172
|
-
originalEnd.apply(res, args);
|
|
173
|
-
};
|
|
174
|
-
next();
|
|
175
|
-
};
|
|
176
|
-
}
|
|
177
|
-
// Graceful shutdown
|
|
178
|
-
async shutdown() {
|
|
179
|
-
const finalMetrics = this.getMetrics();
|
|
180
|
-
console.log('Final performance metrics:', finalMetrics);
|
|
181
|
-
// Could save to database or send to monitoring service
|
|
182
|
-
return finalMetrics;
|
|
183
|
-
}
|
|
184
|
-
}
|
|
185
|
-
// Singleton instance
|
|
186
|
-
export const performanceMonitor = new PerformanceMonitor();
|
|
187
|
-
// Helper to measure function execution time
|
|
188
|
-
export function measurePerformance(target, propertyKey, descriptor) {
|
|
189
|
-
const originalMethod = descriptor.value;
|
|
190
|
-
descriptor.value = async function (...args) {
|
|
191
|
-
const start = performance.now();
|
|
192
|
-
try {
|
|
193
|
-
const result = await originalMethod.apply(this, args);
|
|
194
|
-
const duration = performance.now() - start;
|
|
195
|
-
console.log(`[PERF] ${propertyKey} took ${duration.toFixed(2)}ms`);
|
|
196
|
-
return result;
|
|
197
|
-
}
|
|
198
|
-
catch (error) {
|
|
199
|
-
const duration = performance.now() - start;
|
|
200
|
-
console.error(`[PERF] ${propertyKey} failed after ${duration.toFixed(2)}ms`);
|
|
201
|
-
throw error;
|
|
202
|
-
}
|
|
203
|
-
};
|
|
204
|
-
return descriptor;
|
|
205
|
-
}
|
|
206
|
-
// Helper to profile memory usage
|
|
207
|
-
export function profileMemory(label) {
|
|
208
|
-
const before = process.memoryUsage();
|
|
209
|
-
return () => {
|
|
210
|
-
const after = process.memoryUsage();
|
|
211
|
-
const diff = {
|
|
212
|
-
rss: (after.rss - before.rss) / 1024 / 1024,
|
|
213
|
-
heapTotal: (after.heapTotal - before.heapTotal) / 1024 / 1024,
|
|
214
|
-
heapUsed: (after.heapUsed - before.heapUsed) / 1024 / 1024,
|
|
215
|
-
external: (after.external - before.external) / 1024 / 1024
|
|
216
|
-
};
|
|
217
|
-
console.log(`[MEMORY] ${label}:`, {
|
|
218
|
-
rss: `${diff.rss.toFixed(2)} MB`,
|
|
219
|
-
heapTotal: `${diff.heapTotal.toFixed(2)} MB`,
|
|
220
|
-
heapUsed: `${diff.heapUsed.toFixed(2)} MB`,
|
|
221
|
-
external: `${diff.external.toFixed(2)} MB`
|
|
222
|
-
});
|
|
223
|
-
};
|
|
224
|
-
}
|