lsh-framework 1.2.0 → 1.2.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +40 -3
- package/dist/cli.js +104 -486
- package/dist/commands/doctor.js +427 -0
- package/dist/commands/init.js +371 -0
- package/dist/constants/api.js +94 -0
- package/dist/constants/commands.js +64 -0
- package/dist/constants/config.js +56 -0
- package/dist/constants/database.js +21 -0
- package/dist/constants/errors.js +79 -0
- package/dist/constants/index.js +28 -0
- package/dist/constants/paths.js +28 -0
- package/dist/constants/ui.js +73 -0
- package/dist/constants/validation.js +124 -0
- package/dist/daemon/lshd.js +11 -32
- package/dist/lib/daemon-client-helper.js +7 -4
- package/dist/lib/daemon-client.js +9 -2
- package/dist/lib/format-utils.js +163 -0
- package/dist/lib/job-manager.js +2 -1
- package/dist/lib/platform-utils.js +211 -0
- package/dist/lib/secrets-manager.js +11 -1
- package/dist/lib/string-utils.js +128 -0
- package/dist/services/daemon/daemon-registrar.js +3 -2
- package/dist/services/secrets/secrets.js +54 -30
- package/package.json +10 -74
- package/dist/app.js +0 -33
- package/dist/cicd/analytics.js +0 -261
- package/dist/cicd/auth.js +0 -269
- package/dist/cicd/cache-manager.js +0 -172
- package/dist/cicd/data-retention.js +0 -305
- package/dist/cicd/performance-monitor.js +0 -224
- package/dist/cicd/webhook-receiver.js +0 -640
- package/dist/commands/api.js +0 -346
- package/dist/commands/theme.js +0 -261
- package/dist/commands/zsh-import.js +0 -240
- package/dist/components/App.js +0 -1
- package/dist/components/Divider.js +0 -29
- package/dist/components/REPL.js +0 -43
- package/dist/components/Terminal.js +0 -232
- package/dist/components/UserInput.js +0 -30
- package/dist/daemon/api-server.js +0 -316
- package/dist/daemon/monitoring-api.js +0 -220
- package/dist/lib/api-error-handler.js +0 -185
- package/dist/lib/associative-arrays.js +0 -285
- package/dist/lib/base-api-server.js +0 -290
- package/dist/lib/brace-expansion.js +0 -160
- package/dist/lib/builtin-commands.js +0 -439
- package/dist/lib/executors/builtin-executor.js +0 -52
- package/dist/lib/extended-globbing.js +0 -411
- package/dist/lib/extended-parameter-expansion.js +0 -227
- package/dist/lib/interactive-shell.js +0 -460
- package/dist/lib/job-builtins.js +0 -582
- package/dist/lib/pathname-expansion.js +0 -216
- package/dist/lib/script-runner.js +0 -226
- package/dist/lib/shell-executor.js +0 -2504
- package/dist/lib/shell-parser.js +0 -958
- package/dist/lib/shell-types.js +0 -6
- package/dist/lib/shell.lib.js +0 -40
- package/dist/lib/theme-manager.js +0 -476
- package/dist/lib/variable-expansion.js +0 -385
- package/dist/lib/zsh-compatibility.js +0 -659
- package/dist/lib/zsh-import-manager.js +0 -707
- package/dist/lib/zsh-options.js +0 -328
- package/dist/pipeline/job-tracker.js +0 -491
- package/dist/pipeline/mcli-bridge.js +0 -309
- package/dist/pipeline/pipeline-service.js +0 -1119
- package/dist/pipeline/workflow-engine.js +0 -870
- package/dist/services/api/api.js +0 -58
- package/dist/services/api/auth.js +0 -35
- package/dist/services/api/config.js +0 -7
- package/dist/services/api/file.js +0 -22
- package/dist/services/shell/shell.js +0 -28
- package/dist/services/zapier.js +0 -16
- package/dist/simple-api-server.js +0 -148
|
@@ -1,316 +0,0 @@
|
|
|
1
|
-
/**
|
|
2
|
-
* LSH API Server - RESTful API for daemon control and job management
|
|
3
|
-
*/
|
|
4
|
-
import cors from 'cors';
|
|
5
|
-
import jwt from 'jsonwebtoken';
|
|
6
|
-
import crypto from 'crypto';
|
|
7
|
-
import { handleApiOperation } from '../lib/api-error-handler.js';
|
|
8
|
-
import { BaseAPIServer } from '../lib/base-api-server.js';
|
|
9
|
-
export class LSHApiServer extends BaseAPIServer {
|
|
10
|
-
daemon;
|
|
11
|
-
apiConfig;
|
|
12
|
-
clients = new Set(); // SSE clients
|
|
13
|
-
constructor(daemon, config = {}) {
|
|
14
|
-
const baseConfig = {
|
|
15
|
-
port: config.port || 3030,
|
|
16
|
-
corsOrigins: config.corsOrigins || ['http://localhost:*'],
|
|
17
|
-
jsonLimit: config.jsonLimit || '10mb',
|
|
18
|
-
enableHelmet: config.enableHelmet !== false,
|
|
19
|
-
enableRequestLogging: config.enableRequestLogging !== false,
|
|
20
|
-
};
|
|
21
|
-
super(baseConfig, 'LSHApiServer');
|
|
22
|
-
this.daemon = daemon;
|
|
23
|
-
this.apiConfig = {
|
|
24
|
-
...this.config,
|
|
25
|
-
apiKey: config.apiKey || process.env.LSH_API_KEY || crypto.randomBytes(32).toString('hex'),
|
|
26
|
-
jwtSecret: config.jwtSecret || process.env.LSH_JWT_SECRET || crypto.randomBytes(32).toString('hex'),
|
|
27
|
-
enableWebhooks: config.enableWebhooks || false,
|
|
28
|
-
webhookEndpoints: config.webhookEndpoints || [],
|
|
29
|
-
...config
|
|
30
|
-
};
|
|
31
|
-
this.setupEventHandlers();
|
|
32
|
-
}
|
|
33
|
-
/**
|
|
34
|
-
* Override CORS configuration to support wildcard patterns
|
|
35
|
-
*/
|
|
36
|
-
configureCORS() {
|
|
37
|
-
const origins = this.config.corsOrigins;
|
|
38
|
-
if (origins === '*' || !origins) {
|
|
39
|
-
return cors();
|
|
40
|
-
}
|
|
41
|
-
if (Array.isArray(origins)) {
|
|
42
|
-
return cors({
|
|
43
|
-
origin: (origin, callback) => {
|
|
44
|
-
if (!origin || origins.some(pattern => {
|
|
45
|
-
const regex = new RegExp(pattern.replace(/\*/g, '.*'));
|
|
46
|
-
return regex.test(origin);
|
|
47
|
-
})) {
|
|
48
|
-
callback(null, true);
|
|
49
|
-
}
|
|
50
|
-
else {
|
|
51
|
-
callback(new Error('Not allowed by CORS'));
|
|
52
|
-
}
|
|
53
|
-
}
|
|
54
|
-
});
|
|
55
|
-
}
|
|
56
|
-
return cors({ origin: origins });
|
|
57
|
-
}
|
|
58
|
-
/**
|
|
59
|
-
* Helper method to handle API operations with automatic error handling and webhooks
|
|
60
|
-
*/
|
|
61
|
-
async handleOperation(res, operation, successStatus = 200, webhookEvent) {
|
|
62
|
-
await handleApiOperation(res, operation, { successStatus, webhookEvent }, this.apiConfig.enableWebhooks ? this.triggerWebhook.bind(this) : undefined);
|
|
63
|
-
}
|
|
64
|
-
authenticateRequest(req, res, next) {
|
|
65
|
-
const apiKey = req.headers['x-api-key'];
|
|
66
|
-
const authHeader = req.headers['authorization'];
|
|
67
|
-
// Check API key
|
|
68
|
-
if (apiKey && apiKey === this.apiConfig.apiKey) {
|
|
69
|
-
return next();
|
|
70
|
-
}
|
|
71
|
-
// Check JWT token
|
|
72
|
-
if (authHeader && authHeader.startsWith('Bearer ')) {
|
|
73
|
-
const token = authHeader.substring(7);
|
|
74
|
-
try {
|
|
75
|
-
jwt.verify(token, this.apiConfig.jwtSecret);
|
|
76
|
-
return next();
|
|
77
|
-
}
|
|
78
|
-
catch (_err) {
|
|
79
|
-
return res.status(401).json({ error: 'Invalid token' });
|
|
80
|
-
}
|
|
81
|
-
}
|
|
82
|
-
return res.status(401).json({ error: 'Authentication required' });
|
|
83
|
-
}
|
|
84
|
-
setupRoutes() {
|
|
85
|
-
// Health check (no auth)
|
|
86
|
-
this.app.get('/health', (req, res) => {
|
|
87
|
-
res.json({ status: 'healthy', timestamp: new Date().toISOString() });
|
|
88
|
-
});
|
|
89
|
-
// Generate JWT token
|
|
90
|
-
this.app.post('/api/auth', (req, res) => {
|
|
91
|
-
const { apiKey } = req.body;
|
|
92
|
-
if (apiKey === this.apiConfig.apiKey) {
|
|
93
|
-
const token = jwt.sign({ type: 'api-access', created: Date.now() }, this.apiConfig.jwtSecret, { expiresIn: '24h' });
|
|
94
|
-
res.json({ token });
|
|
95
|
-
}
|
|
96
|
-
else {
|
|
97
|
-
res.status(401).json({ error: 'Invalid API key' });
|
|
98
|
-
}
|
|
99
|
-
});
|
|
100
|
-
// Protected routes
|
|
101
|
-
this.app.use('/api', this.authenticateRequest.bind(this));
|
|
102
|
-
// Daemon status
|
|
103
|
-
this.app.get('/api/status', async (req, res) => {
|
|
104
|
-
await this.handleOperation(res, async () => this.daemon.getStatus());
|
|
105
|
-
});
|
|
106
|
-
// Job management
|
|
107
|
-
this.app.get('/api/jobs', async (req, res) => {
|
|
108
|
-
await this.handleOperation(res, async () => {
|
|
109
|
-
const { filter, limit } = req.query;
|
|
110
|
-
return this.daemon.listJobs(filter ? JSON.parse(filter) : undefined, limit ? parseInt(limit) : undefined);
|
|
111
|
-
});
|
|
112
|
-
});
|
|
113
|
-
this.app.post('/api/jobs', async (req, res) => {
|
|
114
|
-
await this.handleOperation(res, async () => this.daemon.addJob(req.body), 201, 'job.created');
|
|
115
|
-
});
|
|
116
|
-
this.app.get('/api/jobs/:id', (req, res) => {
|
|
117
|
-
const job = this.daemon.getJob(req.params.id);
|
|
118
|
-
if (!job) {
|
|
119
|
-
return res.status(404).json({ error: 'Job not found' });
|
|
120
|
-
}
|
|
121
|
-
res.json(job);
|
|
122
|
-
});
|
|
123
|
-
this.app.post('/api/jobs/:id/start', async (req, res) => {
|
|
124
|
-
await this.handleOperation(res, async () => this.daemon.startJob(req.params.id), 200, 'job.started');
|
|
125
|
-
});
|
|
126
|
-
this.app.post('/api/jobs/:id/stop', async (req, res) => {
|
|
127
|
-
await this.handleOperation(res, async () => {
|
|
128
|
-
const { signal } = req.body;
|
|
129
|
-
return this.daemon.stopJob(req.params.id, signal);
|
|
130
|
-
}, 200, 'job.stopped');
|
|
131
|
-
});
|
|
132
|
-
this.app.post('/api/jobs/:id/trigger', async (req, res) => {
|
|
133
|
-
await this.handleOperation(res, async () => this.daemon.triggerJob(req.params.id));
|
|
134
|
-
});
|
|
135
|
-
this.app.delete('/api/jobs/:id', async (req, res) => {
|
|
136
|
-
await this.handleOperation(res, async () => {
|
|
137
|
-
const force = req.query.force === 'true';
|
|
138
|
-
const success = await this.daemon.removeJob(req.params.id, force);
|
|
139
|
-
if (!success) {
|
|
140
|
-
throw new Error('Failed to remove job');
|
|
141
|
-
}
|
|
142
|
-
return { id: req.params.id };
|
|
143
|
-
}, 204, 'job.removed');
|
|
144
|
-
});
|
|
145
|
-
// Bulk operations
|
|
146
|
-
this.app.post('/api/jobs/bulk', async (req, res) => {
|
|
147
|
-
const { jobs } = req.body;
|
|
148
|
-
if (!Array.isArray(jobs)) {
|
|
149
|
-
return res.status(400).json({ error: 'Jobs must be an array' });
|
|
150
|
-
}
|
|
151
|
-
const results = [];
|
|
152
|
-
for (const jobSpec of jobs) {
|
|
153
|
-
try {
|
|
154
|
-
const job = await this.daemon.addJob(jobSpec);
|
|
155
|
-
results.push({ success: true, job });
|
|
156
|
-
}
|
|
157
|
-
catch (error) {
|
|
158
|
-
const err = error;
|
|
159
|
-
results.push({ success: false, error: err.message, jobSpec });
|
|
160
|
-
}
|
|
161
|
-
}
|
|
162
|
-
res.json({ results });
|
|
163
|
-
});
|
|
164
|
-
// Server-sent events for real-time updates
|
|
165
|
-
this.app.get('/api/events', (req, res) => {
|
|
166
|
-
res.writeHead(200, {
|
|
167
|
-
'Content-Type': 'text/event-stream',
|
|
168
|
-
'Cache-Control': 'no-cache',
|
|
169
|
-
'Connection': 'keep-alive',
|
|
170
|
-
'X-Accel-Buffering': 'no' // Disable nginx buffering
|
|
171
|
-
});
|
|
172
|
-
// Send initial ping
|
|
173
|
-
res.write(`data: ${JSON.stringify({ type: 'connected', timestamp: Date.now() })}\n\n`);
|
|
174
|
-
// Add client to set
|
|
175
|
-
this.clients.add(res);
|
|
176
|
-
// Setup heartbeat
|
|
177
|
-
const heartbeat = setInterval(() => {
|
|
178
|
-
res.write(`:ping\n\n`);
|
|
179
|
-
}, 30000);
|
|
180
|
-
// Cleanup on disconnect
|
|
181
|
-
req.on('close', () => {
|
|
182
|
-
clearInterval(heartbeat);
|
|
183
|
-
this.clients.delete(res);
|
|
184
|
-
});
|
|
185
|
-
});
|
|
186
|
-
// Webhook management
|
|
187
|
-
this.app.get('/api/webhooks', (req, res) => {
|
|
188
|
-
res.json({
|
|
189
|
-
enabled: this.apiConfig.enableWebhooks,
|
|
190
|
-
endpoints: this.apiConfig.webhookEndpoints
|
|
191
|
-
});
|
|
192
|
-
});
|
|
193
|
-
this.app.post('/api/webhooks', (req, res) => {
|
|
194
|
-
const { endpoint } = req.body;
|
|
195
|
-
if (!endpoint) {
|
|
196
|
-
return res.status(400).json({ error: 'Endpoint URL required' });
|
|
197
|
-
}
|
|
198
|
-
if (!this.apiConfig.webhookEndpoints?.includes(endpoint)) {
|
|
199
|
-
this.apiConfig.webhookEndpoints?.push(endpoint);
|
|
200
|
-
}
|
|
201
|
-
res.json({ success: true, endpoints: this.apiConfig.webhookEndpoints });
|
|
202
|
-
});
|
|
203
|
-
// Data export endpoints for integration
|
|
204
|
-
this.app.get('/api/export/jobs', async (req, res) => {
|
|
205
|
-
const jobs = await this.daemon.listJobs();
|
|
206
|
-
const format = req.query.format || 'json';
|
|
207
|
-
if (format === 'csv') {
|
|
208
|
-
res.setHeader('Content-Type', 'text/csv');
|
|
209
|
-
res.setHeader('Content-Disposition', 'attachment; filename="jobs.csv"');
|
|
210
|
-
const csv = this.convertToCSV(jobs);
|
|
211
|
-
res.send(csv);
|
|
212
|
-
}
|
|
213
|
-
else {
|
|
214
|
-
res.json(jobs);
|
|
215
|
-
}
|
|
216
|
-
});
|
|
217
|
-
// Supabase integration endpoint
|
|
218
|
-
this.app.post('/api/supabase/sync', async (req, res) => {
|
|
219
|
-
await this.handleOperation(res, async () => {
|
|
220
|
-
// This endpoint can be called by Supabase jobs to sync data
|
|
221
|
-
const { table, operation, data } = req.body;
|
|
222
|
-
// Emit event for mcli listener
|
|
223
|
-
this.emit('supabase:sync', { table, operation, data });
|
|
224
|
-
// Broadcast to SSE clients
|
|
225
|
-
this.broadcastToClients({
|
|
226
|
-
type: 'supabase:sync',
|
|
227
|
-
table,
|
|
228
|
-
operation,
|
|
229
|
-
data,
|
|
230
|
-
timestamp: Date.now()
|
|
231
|
-
});
|
|
232
|
-
return { success: true, message: 'Data synced' };
|
|
233
|
-
});
|
|
234
|
-
});
|
|
235
|
-
}
|
|
236
|
-
setupEventHandlers() {
|
|
237
|
-
// Listen to daemon events and broadcast to SSE clients
|
|
238
|
-
const events = ['job:started', 'job:completed', 'job:failed', 'job:stopped'];
|
|
239
|
-
events.forEach(event => {
|
|
240
|
-
this.daemon.on(event, (data) => {
|
|
241
|
-
this.broadcastToClients({
|
|
242
|
-
type: event,
|
|
243
|
-
data,
|
|
244
|
-
timestamp: Date.now()
|
|
245
|
-
});
|
|
246
|
-
if (this.apiConfig.enableWebhooks) {
|
|
247
|
-
this.triggerWebhook(event, data);
|
|
248
|
-
}
|
|
249
|
-
});
|
|
250
|
-
});
|
|
251
|
-
}
|
|
252
|
-
broadcastToClients(data) {
|
|
253
|
-
const message = `data: ${JSON.stringify(data)}\n\n`;
|
|
254
|
-
this.clients.forEach(client => {
|
|
255
|
-
client.write(message);
|
|
256
|
-
});
|
|
257
|
-
}
|
|
258
|
-
async triggerWebhook(event, data) {
|
|
259
|
-
if (!this.apiConfig.webhookEndpoints?.length)
|
|
260
|
-
return;
|
|
261
|
-
const payload = {
|
|
262
|
-
event,
|
|
263
|
-
data,
|
|
264
|
-
timestamp: Date.now(),
|
|
265
|
-
source: 'lsh-daemon'
|
|
266
|
-
};
|
|
267
|
-
for (const endpoint of this.apiConfig.webhookEndpoints) {
|
|
268
|
-
try {
|
|
269
|
-
await fetch(endpoint, {
|
|
270
|
-
method: 'POST',
|
|
271
|
-
headers: {
|
|
272
|
-
'Content-Type': 'application/json',
|
|
273
|
-
'X-LSH-Event': event
|
|
274
|
-
},
|
|
275
|
-
body: JSON.stringify(payload)
|
|
276
|
-
});
|
|
277
|
-
}
|
|
278
|
-
catch (error) {
|
|
279
|
-
this.logger.error(`Webhook failed for ${endpoint}`, error);
|
|
280
|
-
}
|
|
281
|
-
}
|
|
282
|
-
}
|
|
283
|
-
convertToCSV(data) {
|
|
284
|
-
if (!data.length)
|
|
285
|
-
return '';
|
|
286
|
-
const headers = Object.keys(data[0]);
|
|
287
|
-
const csv = [
|
|
288
|
-
headers.join(','),
|
|
289
|
-
...data.map(row => headers.map(header => {
|
|
290
|
-
const value = row[header];
|
|
291
|
-
return typeof value === 'string' && value.includes(',')
|
|
292
|
-
? `"${value}"`
|
|
293
|
-
: value;
|
|
294
|
-
}).join(','))
|
|
295
|
-
];
|
|
296
|
-
return csv.join('\n');
|
|
297
|
-
}
|
|
298
|
-
/**
|
|
299
|
-
* Override onStop to cleanup SSE connections
|
|
300
|
-
*/
|
|
301
|
-
onStop() {
|
|
302
|
-
// Close all SSE connections
|
|
303
|
-
this.clients.forEach(client => client.end());
|
|
304
|
-
this.clients.clear();
|
|
305
|
-
}
|
|
306
|
-
/**
|
|
307
|
-
* Override start to log API key
|
|
308
|
-
*/
|
|
309
|
-
async start() {
|
|
310
|
-
await super.start();
|
|
311
|
-
this.logger.info(`API Key: ${this.apiConfig.apiKey}`);
|
|
312
|
-
}
|
|
313
|
-
getApiKey() {
|
|
314
|
-
return this.apiConfig.apiKey;
|
|
315
|
-
}
|
|
316
|
-
}
|
|
@@ -1,220 +0,0 @@
|
|
|
1
|
-
/**
|
|
2
|
-
* Monitoring API Server - Real-time system metrics and monitoring dashboard API
|
|
3
|
-
*/
|
|
4
|
-
import { createClient } from '@supabase/supabase-js';
|
|
5
|
-
import * as fs from 'fs/promises';
|
|
6
|
-
import * as path from 'path';
|
|
7
|
-
import { fileURLToPath } from 'url';
|
|
8
|
-
import { BaseAPIServer } from '../lib/base-api-server.js';
|
|
9
|
-
const __filename = fileURLToPath(import.meta.url);
|
|
10
|
-
const __dirname = path.dirname(__filename);
|
|
11
|
-
const _CACHE_DIR = '/Users/lefv/.lsh/cache';
|
|
12
|
-
const MONITORING_DIR = '/Users/lefv/.lsh/monitoring';
|
|
13
|
-
export class MonitoringAPIServer extends BaseAPIServer {
|
|
14
|
-
supabase = null;
|
|
15
|
-
monitoringDir;
|
|
16
|
-
constructor(config = {}) {
|
|
17
|
-
const baseConfig = {
|
|
18
|
-
port: config.port || parseInt(process.env.MONITORING_API_PORT || '3031'),
|
|
19
|
-
corsOrigins: config.corsOrigins || '*',
|
|
20
|
-
enableHelmet: config.enableHelmet !== false,
|
|
21
|
-
enableRequestLogging: config.enableRequestLogging !== false,
|
|
22
|
-
};
|
|
23
|
-
super(baseConfig, 'MonitoringAPI');
|
|
24
|
-
this.monitoringDir = config.monitoringDir || MONITORING_DIR;
|
|
25
|
-
// Setup Supabase client if credentials are provided
|
|
26
|
-
const supabaseUrl = config.supabaseUrl || process.env.SUPABASE_URL || '';
|
|
27
|
-
const supabaseAnonKey = config.supabaseAnonKey || process.env.SUPABASE_ANON_KEY || '';
|
|
28
|
-
if (supabaseUrl && supabaseAnonKey) {
|
|
29
|
-
this.supabase = createClient(supabaseUrl, supabaseAnonKey);
|
|
30
|
-
this.logger.info('Supabase client configured');
|
|
31
|
-
}
|
|
32
|
-
else {
|
|
33
|
-
this.logger.info('Supabase client not configured');
|
|
34
|
-
}
|
|
35
|
-
}
|
|
36
|
-
setupRoutes() {
|
|
37
|
-
// Health check
|
|
38
|
-
this.app.get('/api/health', (req, res) => {
|
|
39
|
-
res.json({ status: 'ok', timestamp: new Date().toISOString() });
|
|
40
|
-
});
|
|
41
|
-
// System metrics
|
|
42
|
-
this.app.get('/api/metrics', async (req, res) => {
|
|
43
|
-
try {
|
|
44
|
-
const metrics = await this.getLatestMetrics();
|
|
45
|
-
res.json(metrics);
|
|
46
|
-
}
|
|
47
|
-
catch (error) {
|
|
48
|
-
this.logger.error('Failed to get metrics', error);
|
|
49
|
-
res.status(500).json({ error: 'Failed to get metrics' });
|
|
50
|
-
}
|
|
51
|
-
});
|
|
52
|
-
// Job metrics
|
|
53
|
-
this.app.get('/api/jobs', async (req, res) => {
|
|
54
|
-
try {
|
|
55
|
-
const jobs = await this.getJobMetrics();
|
|
56
|
-
res.json(jobs);
|
|
57
|
-
}
|
|
58
|
-
catch (error) {
|
|
59
|
-
this.logger.error('Failed to get job metrics', error);
|
|
60
|
-
res.status(500).json({ error: 'Failed to get job metrics' });
|
|
61
|
-
}
|
|
62
|
-
});
|
|
63
|
-
// Politician trades
|
|
64
|
-
this.app.get('/api/trades', async (req, res) => {
|
|
65
|
-
try {
|
|
66
|
-
const trades = await this.getPoliticianTrades();
|
|
67
|
-
res.json(trades);
|
|
68
|
-
}
|
|
69
|
-
catch (error) {
|
|
70
|
-
this.logger.error('Failed to get politician trades', error);
|
|
71
|
-
res.status(500).json({ error: 'Failed to get politician trades' });
|
|
72
|
-
}
|
|
73
|
-
});
|
|
74
|
-
// Alerts
|
|
75
|
-
this.app.get('/api/alerts', async (req, res) => {
|
|
76
|
-
try {
|
|
77
|
-
const alerts = await this.getAlerts();
|
|
78
|
-
res.json(alerts);
|
|
79
|
-
}
|
|
80
|
-
catch (error) {
|
|
81
|
-
this.logger.error('Failed to get alerts', error);
|
|
82
|
-
res.status(500).json({ error: 'Failed to get alerts' });
|
|
83
|
-
}
|
|
84
|
-
});
|
|
85
|
-
}
|
|
86
|
-
async getLatestMetrics() {
|
|
87
|
-
try {
|
|
88
|
-
const metricsFile = path.join(this.monitoringDir, 'system_metrics.json');
|
|
89
|
-
const data = await fs.readFile(metricsFile, 'utf-8');
|
|
90
|
-
const metrics = JSON.parse(data);
|
|
91
|
-
return {
|
|
92
|
-
timestamp: new Date().toISOString(),
|
|
93
|
-
cpu_usage: metrics.cpu_percent || Math.random() * 100,
|
|
94
|
-
memory_usage: metrics.memory_percent || Math.random() * 100,
|
|
95
|
-
disk_usage: metrics.disk_percent || Math.random() * 100,
|
|
96
|
-
network_io: metrics.network_bytes || Math.random() * 1000000,
|
|
97
|
-
job_queue_size: metrics.job_queue_size || 0,
|
|
98
|
-
active_jobs: metrics.active_jobs || 0
|
|
99
|
-
};
|
|
100
|
-
}
|
|
101
|
-
catch (_error) {
|
|
102
|
-
return {
|
|
103
|
-
timestamp: new Date().toISOString(),
|
|
104
|
-
cpu_usage: Math.random() * 100,
|
|
105
|
-
memory_usage: Math.random() * 100,
|
|
106
|
-
disk_usage: Math.random() * 100,
|
|
107
|
-
network_io: Math.random() * 1000000,
|
|
108
|
-
job_queue_size: Math.floor(Math.random() * 10),
|
|
109
|
-
active_jobs: Math.floor(Math.random() * 5)
|
|
110
|
-
};
|
|
111
|
-
}
|
|
112
|
-
}
|
|
113
|
-
async getJobMetrics() {
|
|
114
|
-
const jobs = [
|
|
115
|
-
'politician-trading-monitor',
|
|
116
|
-
'db-health-monitor',
|
|
117
|
-
'shell-analytics',
|
|
118
|
-
'system-metrics-collector'
|
|
119
|
-
];
|
|
120
|
-
const metrics = [];
|
|
121
|
-
for (const job of jobs) {
|
|
122
|
-
try {
|
|
123
|
-
const statusFile = path.join(this.monitoringDir, 'jobs', `${job}.status`);
|
|
124
|
-
const data = await fs.readFile(statusFile, 'utf-8');
|
|
125
|
-
const status = JSON.parse(data);
|
|
126
|
-
metrics.push({
|
|
127
|
-
job_name: job,
|
|
128
|
-
last_run: status.last_run || new Date().toISOString(),
|
|
129
|
-
status: status.status || 'success',
|
|
130
|
-
duration_ms: status.duration_ms || Math.floor(Math.random() * 5000),
|
|
131
|
-
error_message: status.error_message
|
|
132
|
-
});
|
|
133
|
-
}
|
|
134
|
-
catch (_error) {
|
|
135
|
-
metrics.push({
|
|
136
|
-
job_name: job,
|
|
137
|
-
last_run: new Date(Date.now() - Math.random() * 3600000).toISOString(),
|
|
138
|
-
status: Math.random() > 0.8 ? 'failure' : 'success',
|
|
139
|
-
duration_ms: Math.floor(Math.random() * 5000)
|
|
140
|
-
});
|
|
141
|
-
}
|
|
142
|
-
}
|
|
143
|
-
return metrics;
|
|
144
|
-
}
|
|
145
|
-
async getPoliticianTrades() {
|
|
146
|
-
if (this.supabase) {
|
|
147
|
-
try {
|
|
148
|
-
const { data, error } = await this.supabase
|
|
149
|
-
.from('politician_trades')
|
|
150
|
-
.select('*')
|
|
151
|
-
.order('transaction_date', { ascending: false })
|
|
152
|
-
.limit(50);
|
|
153
|
-
if (!error && data) {
|
|
154
|
-
return data.map(trade => ({
|
|
155
|
-
name: trade.politician_name,
|
|
156
|
-
ticker: trade.ticker,
|
|
157
|
-
amount: trade.amount,
|
|
158
|
-
transaction_type: trade.transaction_type,
|
|
159
|
-
transaction_date: trade.transaction_date
|
|
160
|
-
}));
|
|
161
|
-
}
|
|
162
|
-
}
|
|
163
|
-
catch (error) {
|
|
164
|
-
this.logger.error('Error fetching politician trades', error);
|
|
165
|
-
}
|
|
166
|
-
}
|
|
167
|
-
return [
|
|
168
|
-
{ name: 'Nancy Pelosi', ticker: 'NVDA', amount: '$1M - $5M', transaction_type: 'Purchase', transaction_date: '2025-01-20' },
|
|
169
|
-
{ name: 'Dan Crenshaw', ticker: 'TSLA', amount: '$500K - $1M', transaction_type: 'Sale', transaction_date: '2025-01-19' },
|
|
170
|
-
{ name: 'Josh Gottheimer', ticker: 'AAPL', amount: '$100K - $250K', transaction_type: 'Purchase', transaction_date: '2025-01-18' }
|
|
171
|
-
];
|
|
172
|
-
}
|
|
173
|
-
async getAlerts() {
|
|
174
|
-
const alerts = [];
|
|
175
|
-
try {
|
|
176
|
-
const alertsFile = path.join(this.monitoringDir, 'alerts.json');
|
|
177
|
-
const data = await fs.readFile(alertsFile, 'utf-8');
|
|
178
|
-
const fileAlerts = JSON.parse(data);
|
|
179
|
-
if (Array.isArray(fileAlerts)) {
|
|
180
|
-
return fileAlerts;
|
|
181
|
-
}
|
|
182
|
-
}
|
|
183
|
-
catch (_error) {
|
|
184
|
-
// Generate sample alerts
|
|
185
|
-
}
|
|
186
|
-
const now = Date.now();
|
|
187
|
-
if (Math.random() > 0.7) {
|
|
188
|
-
alerts.push({
|
|
189
|
-
id: `alert-${Date.now()}`,
|
|
190
|
-
severity: 'warning',
|
|
191
|
-
message: 'High memory usage detected (>80%)',
|
|
192
|
-
timestamp: new Date(now - 300000).toISOString(),
|
|
193
|
-
resolved: false
|
|
194
|
-
});
|
|
195
|
-
}
|
|
196
|
-
if (Math.random() > 0.9) {
|
|
197
|
-
alerts.push({
|
|
198
|
-
id: `alert-${Date.now() + 1}`,
|
|
199
|
-
severity: 'error',
|
|
200
|
-
message: 'Job failure: politician-trading-monitor',
|
|
201
|
-
timestamp: new Date(now - 600000).toISOString(),
|
|
202
|
-
resolved: false
|
|
203
|
-
});
|
|
204
|
-
}
|
|
205
|
-
return alerts;
|
|
206
|
-
}
|
|
207
|
-
}
|
|
208
|
-
// For backward compatibility, export a function that creates and starts the server
|
|
209
|
-
export async function startMonitoringAPI(config) {
|
|
210
|
-
const server = new MonitoringAPIServer(config);
|
|
211
|
-
await server.start();
|
|
212
|
-
return server;
|
|
213
|
-
}
|
|
214
|
-
// If run directly, start the server
|
|
215
|
-
if (import.meta.url === `file://${process.argv[1]}`) {
|
|
216
|
-
startMonitoringAPI().catch((error) => {
|
|
217
|
-
console.error('Failed to start monitoring API:', error);
|
|
218
|
-
process.exit(1);
|
|
219
|
-
});
|
|
220
|
-
}
|