@fenwave/agent 1.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.claude/settings.local.json +11 -0
- package/Dockerfile +12 -0
- package/LICENSE +29 -0
- package/README.md +434 -0
- package/auth.js +276 -0
- package/cli-commands.js +1185 -0
- package/containerManager.js +385 -0
- package/convert-to-esm.sh +62 -0
- package/docker-actions/apps.js +3256 -0
- package/docker-actions/config-transformer.js +380 -0
- package/docker-actions/containers.js +346 -0
- package/docker-actions/general.js +171 -0
- package/docker-actions/images.js +1128 -0
- package/docker-actions/logs.js +188 -0
- package/docker-actions/metrics.js +270 -0
- package/docker-actions/registry.js +1100 -0
- package/docker-actions/terminal.js +247 -0
- package/docker-actions/volumes.js +696 -0
- package/helper-functions.js +193 -0
- package/index.html +60 -0
- package/index.js +988 -0
- package/package.json +49 -0
- package/setup/setupWizard.js +499 -0
- package/store/agentSessionStore.js +51 -0
- package/store/agentStore.js +113 -0
- package/store/configStore.js +174 -0
- package/store/deviceCredentialStore.js +107 -0
- package/store/npmTokenStore.js +65 -0
- package/store/registryStore.js +329 -0
- package/store/setupState.js +147 -0
- package/utils/deviceInfo.js +98 -0
- package/utils/ecrAuth.js +225 -0
- package/utils/encryption.js +112 -0
- package/utils/envSetup.js +54 -0
- package/utils/errorHandler.js +327 -0
- package/utils/prerequisites.js +323 -0
- package/utils/prompts.js +318 -0
- package/websocket-server.js +364 -0
|
@@ -0,0 +1,188 @@
|
|
|
1
|
+
import { docker } from './containers.js';
|
|
2
|
+
import { parseLogLine } from '../helper-functions.js';
|
|
3
|
+
|
|
4
|
+
async function handleLogAction(ws, clientId, action, payload, activeStreams) {
|
|
5
|
+
switch (action) {
|
|
6
|
+
case 'fetchContainerLogs':
|
|
7
|
+
return await handleFetchContainerLogs(ws, payload);
|
|
8
|
+
case 'streamContainerLogs':
|
|
9
|
+
return handleStreamContainerLogs(ws, clientId, payload, activeStreams);
|
|
10
|
+
case 'stopStreamLogs':
|
|
11
|
+
return handleStopStreamLogs(clientId, payload, activeStreams);
|
|
12
|
+
default:
|
|
13
|
+
throw new Error(`Unknown log action: ${action}`);
|
|
14
|
+
}
|
|
15
|
+
}
|
|
16
|
+
|
|
17
|
+
async function handleFetchContainerLogs(ws, payload) {
|
|
18
|
+
try {
|
|
19
|
+
const { containerId, lines, requestId } = payload;
|
|
20
|
+
|
|
21
|
+
const container = docker.getContainer(containerId);
|
|
22
|
+
|
|
23
|
+
const logs = await container.logs({
|
|
24
|
+
stdout: true,
|
|
25
|
+
stderr: true,
|
|
26
|
+
tail: lines || 100,
|
|
27
|
+
timestamps: true,
|
|
28
|
+
});
|
|
29
|
+
|
|
30
|
+
const logLines = logs
|
|
31
|
+
.toString()
|
|
32
|
+
.split('\n')
|
|
33
|
+
.filter((line, index, array) => {
|
|
34
|
+
// Remove the last empty line if it exists (common in Docker logs)
|
|
35
|
+
if (index === array.length - 1 && line.trim() === '') {
|
|
36
|
+
return false;
|
|
37
|
+
}
|
|
38
|
+
return true;
|
|
39
|
+
})
|
|
40
|
+
.map((line) => parseLogLine(line, containerId));
|
|
41
|
+
|
|
42
|
+
ws.send(
|
|
43
|
+
JSON.stringify({
|
|
44
|
+
type: 'containerLogs',
|
|
45
|
+
containerId,
|
|
46
|
+
logs: logLines,
|
|
47
|
+
requestId,
|
|
48
|
+
})
|
|
49
|
+
);
|
|
50
|
+
} catch (error) {
|
|
51
|
+
console.error('Error fetching container logs:', error);
|
|
52
|
+
ws.send(
|
|
53
|
+
JSON.stringify({
|
|
54
|
+
type: 'error',
|
|
55
|
+
error: 'Failed to fetch container logs: ' + error.message,
|
|
56
|
+
requestId: payload.requestId,
|
|
57
|
+
})
|
|
58
|
+
);
|
|
59
|
+
}
|
|
60
|
+
}
|
|
61
|
+
|
|
62
|
+
function handleStreamContainerLogs(ws, clientId, payload, activeStreams) {
|
|
63
|
+
try {
|
|
64
|
+
const { containerId, requestId } = payload;
|
|
65
|
+
const streamId = `${clientId}-logs-${containerId}`;
|
|
66
|
+
|
|
67
|
+
// Stop existing stream if any
|
|
68
|
+
if (activeStreams.has(streamId)) {
|
|
69
|
+
const existingStream = activeStreams.get(streamId);
|
|
70
|
+
if (existingStream && existingStream.destroy) {
|
|
71
|
+
existingStream.destroy();
|
|
72
|
+
}
|
|
73
|
+
activeStreams.delete(streamId);
|
|
74
|
+
}
|
|
75
|
+
|
|
76
|
+
const container = docker.getContainer(containerId);
|
|
77
|
+
|
|
78
|
+
container
|
|
79
|
+
.logs({
|
|
80
|
+
follow: true,
|
|
81
|
+
stdout: true,
|
|
82
|
+
stderr: true,
|
|
83
|
+
timestamps: true,
|
|
84
|
+
})
|
|
85
|
+
.then((logStream) => {
|
|
86
|
+
activeStreams.set(streamId, logStream);
|
|
87
|
+
|
|
88
|
+
logStream.on('data', (chunk) => {
|
|
89
|
+
// Handle the chunk as a buffer and convert it properly
|
|
90
|
+
const chunkStr = chunk.toString();
|
|
91
|
+
// Split lines and filter out trailing empty lines that come from chunk boundaries
|
|
92
|
+
const lines = chunkStr.split('\n').filter((line, index, array) => {
|
|
93
|
+
// Keep empty lines that are in the middle, but remove trailing empty line
|
|
94
|
+
if (line.length === 0 && index === array.length - 1) {
|
|
95
|
+
return false; // Remove trailing empty line
|
|
96
|
+
}
|
|
97
|
+
return true;
|
|
98
|
+
});
|
|
99
|
+
|
|
100
|
+
lines.forEach((line) => {
|
|
101
|
+
const log = parseLogLine(line, containerId);
|
|
102
|
+
|
|
103
|
+
ws.send(
|
|
104
|
+
JSON.stringify({
|
|
105
|
+
type: 'logEntry',
|
|
106
|
+
containerId,
|
|
107
|
+
log,
|
|
108
|
+
requestId,
|
|
109
|
+
})
|
|
110
|
+
);
|
|
111
|
+
});
|
|
112
|
+
});
|
|
113
|
+
|
|
114
|
+
logStream.on('end', () => {
|
|
115
|
+
activeStreams.delete(streamId);
|
|
116
|
+
|
|
117
|
+
ws.send(
|
|
118
|
+
JSON.stringify({
|
|
119
|
+
type: 'logStreamEnded',
|
|
120
|
+
containerId,
|
|
121
|
+
requestId,
|
|
122
|
+
})
|
|
123
|
+
);
|
|
124
|
+
});
|
|
125
|
+
|
|
126
|
+
logStream.on('error', (error) => {
|
|
127
|
+
console.error('Error streaming logs:', error);
|
|
128
|
+
activeStreams.delete(streamId);
|
|
129
|
+
|
|
130
|
+
ws.send(
|
|
131
|
+
JSON.stringify({
|
|
132
|
+
type: 'error',
|
|
133
|
+
error: 'Failed to stream logs: ' + error.message,
|
|
134
|
+
requestId,
|
|
135
|
+
})
|
|
136
|
+
);
|
|
137
|
+
});
|
|
138
|
+
|
|
139
|
+
ws.send(
|
|
140
|
+
JSON.stringify({
|
|
141
|
+
type: 'logStreamStarted',
|
|
142
|
+
containerId,
|
|
143
|
+
requestId,
|
|
144
|
+
})
|
|
145
|
+
);
|
|
146
|
+
})
|
|
147
|
+
.catch((error) => {
|
|
148
|
+
console.error('Error creating log stream:', error);
|
|
149
|
+
ws.send(
|
|
150
|
+
JSON.stringify({
|
|
151
|
+
type: 'error',
|
|
152
|
+
error: 'Failed to create log stream: ' + error.message,
|
|
153
|
+
requestId,
|
|
154
|
+
})
|
|
155
|
+
);
|
|
156
|
+
});
|
|
157
|
+
} catch (error) {
|
|
158
|
+
console.error('Error streaming container logs:', error);
|
|
159
|
+
ws.send(
|
|
160
|
+
JSON.stringify({
|
|
161
|
+
type: 'error',
|
|
162
|
+
error: 'Failed to stream container logs: ' + error.message,
|
|
163
|
+
requestId: payload.requestId,
|
|
164
|
+
})
|
|
165
|
+
);
|
|
166
|
+
}
|
|
167
|
+
}
|
|
168
|
+
|
|
169
|
+
function handleStopStreamLogs(clientId, payload, activeStreams) {
|
|
170
|
+
try {
|
|
171
|
+
const { containerId } = payload;
|
|
172
|
+
const streamId = `${clientId}-logs-${containerId}`;
|
|
173
|
+
|
|
174
|
+
if (activeStreams.has(streamId)) {
|
|
175
|
+
const stream = activeStreams.get(streamId);
|
|
176
|
+
if (stream.destroy) stream.destroy();
|
|
177
|
+
activeStreams.delete(streamId);
|
|
178
|
+
}
|
|
179
|
+
} catch (error) {
|
|
180
|
+
console.error('Error stopping log stream:', error);
|
|
181
|
+
}
|
|
182
|
+
}
|
|
183
|
+
|
|
184
|
+
export default { handleLogAction };
|
|
185
|
+
|
|
186
|
+
export {
|
|
187
|
+
handleLogAction,
|
|
188
|
+
};
|
|
@@ -0,0 +1,270 @@
|
|
|
1
|
+
import { docker } from './containers.js';
|
|
2
|
+
import { estimateCpuUsage, parseSize } from '../helper-functions.js';
|
|
3
|
+
import os from 'os';
|
|
4
|
+
|
|
5
|
+
// Store metrics history for trends
|
|
6
|
+
const metricsHistory = {
|
|
7
|
+
system: [],
|
|
8
|
+
containers: {},
|
|
9
|
+
};
|
|
10
|
+
|
|
11
|
+
// Maximum history entries to keep
|
|
12
|
+
const MAX_HISTORY_LENGTH = 20;
|
|
13
|
+
|
|
14
|
+
async function handleMetricsAction(ws, action, payload) {
|
|
15
|
+
switch (action) {
|
|
16
|
+
case 'fetchSystemMetrics':
|
|
17
|
+
return await handleFetchSystemMetrics(ws, payload);
|
|
18
|
+
case 'fetchContainerMetrics':
|
|
19
|
+
return await handleFetchContainerMetrics(ws, payload);
|
|
20
|
+
default:
|
|
21
|
+
throw new Error(`Unknown metrics action: ${action}`);
|
|
22
|
+
}
|
|
23
|
+
}
|
|
24
|
+
|
|
25
|
+
async function handleFetchSystemMetrics(ws, payload = {}) {
|
|
26
|
+
try {
|
|
27
|
+
const cpuCores = os.cpus().length;
|
|
28
|
+
const totalMemory = os.totalmem();
|
|
29
|
+
const freeMemory = os.freemem();
|
|
30
|
+
const usedMemory = totalMemory - freeMemory;
|
|
31
|
+
const memoryPercentage = (usedMemory / totalMemory) * 100;
|
|
32
|
+
|
|
33
|
+
const cpuUsage = estimateCpuUsage();
|
|
34
|
+
|
|
35
|
+
const info = await docker.info();
|
|
36
|
+
|
|
37
|
+
const diskTotal =
|
|
38
|
+
info.DriverStatus?.find(
|
|
39
|
+
(status) => status[0] === 'Data Space Total'
|
|
40
|
+
)?.[1] || '0 B';
|
|
41
|
+
const diskUsed =
|
|
42
|
+
info.DriverStatus?.find(
|
|
43
|
+
(status) => status[0] === 'Data Space Used'
|
|
44
|
+
)?.[1] || '0 B';
|
|
45
|
+
|
|
46
|
+
const diskTotalBytes = parseSize(diskTotal);
|
|
47
|
+
const diskUsedBytes = parseSize(diskUsed);
|
|
48
|
+
const diskPercentage = (diskUsedBytes / diskTotalBytes) * 100 || 0;
|
|
49
|
+
|
|
50
|
+
const networkRx = Math.floor(Math.random() * 10000) + 1000;
|
|
51
|
+
const networkTx = Math.floor(Math.random() * 5000) + 500;
|
|
52
|
+
|
|
53
|
+
const metrics = {
|
|
54
|
+
cpu: {
|
|
55
|
+
usage: cpuUsage,
|
|
56
|
+
cores: cpuCores,
|
|
57
|
+
},
|
|
58
|
+
memory: {
|
|
59
|
+
used: Math.round(usedMemory / (1024 * 1024)),
|
|
60
|
+
total: Math.round(totalMemory / (1024 * 1024)),
|
|
61
|
+
percentage: Math.round(memoryPercentage * 100) / 100,
|
|
62
|
+
},
|
|
63
|
+
disk: {
|
|
64
|
+
used: diskUsedBytes,
|
|
65
|
+
total: diskTotalBytes,
|
|
66
|
+
percentage: Math.round(diskPercentage * 100) / 100,
|
|
67
|
+
},
|
|
68
|
+
network: {
|
|
69
|
+
rx: networkRx,
|
|
70
|
+
tx: networkTx,
|
|
71
|
+
},
|
|
72
|
+
timestamp: Date.now(),
|
|
73
|
+
};
|
|
74
|
+
|
|
75
|
+
metricsHistory.system.push(metrics);
|
|
76
|
+
|
|
77
|
+
if (metricsHistory.system.length > MAX_HISTORY_LENGTH) {
|
|
78
|
+
metricsHistory.system.shift();
|
|
79
|
+
}
|
|
80
|
+
|
|
81
|
+
ws.send(
|
|
82
|
+
JSON.stringify({
|
|
83
|
+
type: 'systemMetrics',
|
|
84
|
+
metrics,
|
|
85
|
+
history: metricsHistory.system,
|
|
86
|
+
requestId: payload.requestId,
|
|
87
|
+
})
|
|
88
|
+
);
|
|
89
|
+
} catch (error) {
|
|
90
|
+
console.error('Error fetching system metrics:', error);
|
|
91
|
+
ws.send(
|
|
92
|
+
JSON.stringify({
|
|
93
|
+
type: 'error',
|
|
94
|
+
error: 'Failed to fetch system metrics: ' + error.message,
|
|
95
|
+
requestId: payload.requestId,
|
|
96
|
+
})
|
|
97
|
+
);
|
|
98
|
+
}
|
|
99
|
+
}
|
|
100
|
+
|
|
101
|
+
async function handleFetchContainerMetrics(ws, payload = {}) {
|
|
102
|
+
try {
|
|
103
|
+
const containers = await docker.listContainers();
|
|
104
|
+
|
|
105
|
+
const metricsPromises = containers.map(async (containerInfo) => {
|
|
106
|
+
try {
|
|
107
|
+
const container = docker.getContainer(containerInfo.Id);
|
|
108
|
+
const stats = await container.stats({ stream: false });
|
|
109
|
+
|
|
110
|
+
let cpuPercent = 0;
|
|
111
|
+
try {
|
|
112
|
+
if (
|
|
113
|
+
stats.cpu_stats &&
|
|
114
|
+
stats.precpu_stats &&
|
|
115
|
+
stats.cpu_stats.cpu_usage &&
|
|
116
|
+
stats.precpu_stats.cpu_usage &&
|
|
117
|
+
stats.cpu_stats.system_cpu_usage &&
|
|
118
|
+
stats.precpu_stats.system_cpu_usage
|
|
119
|
+
) {
|
|
120
|
+
const cpuDelta =
|
|
121
|
+
stats.cpu_stats.cpu_usage.total_usage -
|
|
122
|
+
stats.precpu_stats.cpu_usage.total_usage;
|
|
123
|
+
const systemCpuDelta =
|
|
124
|
+
stats.cpu_stats.system_cpu_usage -
|
|
125
|
+
stats.precpu_stats.system_cpu_usage;
|
|
126
|
+
const cpuCores = stats.cpu_stats.online_cpus || 1;
|
|
127
|
+
|
|
128
|
+
if (systemCpuDelta > 0 && cpuDelta >= 0) {
|
|
129
|
+
cpuPercent = (cpuDelta / systemCpuDelta) * cpuCores * 100;
|
|
130
|
+
cpuPercent = Math.min(Math.max(cpuPercent, 0), 100 * cpuCores);
|
|
131
|
+
}
|
|
132
|
+
}
|
|
133
|
+
} catch (err) {
|
|
134
|
+
console.error(
|
|
135
|
+
`Error calculating CPU percentage for container ${containerInfo.Id}:`,
|
|
136
|
+
err.message
|
|
137
|
+
);
|
|
138
|
+
}
|
|
139
|
+
|
|
140
|
+
let memoryUsage = 0;
|
|
141
|
+
let memoryLimit = 1;
|
|
142
|
+
let memoryPercent = 0;
|
|
143
|
+
|
|
144
|
+
try {
|
|
145
|
+
memoryUsage = stats.memory_stats.usage || 0;
|
|
146
|
+
memoryLimit = stats.memory_stats.limit || 1;
|
|
147
|
+
memoryPercent = (memoryUsage / memoryLimit) * 100;
|
|
148
|
+
} catch (err) {
|
|
149
|
+
console.error(
|
|
150
|
+
`Error calculating memory usage for container ${containerInfo.Id}:`,
|
|
151
|
+
err.message
|
|
152
|
+
);
|
|
153
|
+
}
|
|
154
|
+
|
|
155
|
+
let rxBytes = 0;
|
|
156
|
+
let txBytes = 0;
|
|
157
|
+
|
|
158
|
+
try {
|
|
159
|
+
if (stats.networks) {
|
|
160
|
+
Object.values(stats.networks).forEach((network) => {
|
|
161
|
+
rxBytes += network.rx_bytes || 0;
|
|
162
|
+
txBytes += network.tx_bytes || 0;
|
|
163
|
+
});
|
|
164
|
+
}
|
|
165
|
+
} catch (err) {
|
|
166
|
+
console.error(
|
|
167
|
+
`Error calculating network usage for container ${containerInfo.Id}:`,
|
|
168
|
+
err.message
|
|
169
|
+
);
|
|
170
|
+
}
|
|
171
|
+
|
|
172
|
+
let readBytes = 0;
|
|
173
|
+
let writeBytes = 0;
|
|
174
|
+
|
|
175
|
+
try {
|
|
176
|
+
if (
|
|
177
|
+
stats.blkio_stats &&
|
|
178
|
+
stats.blkio_stats.io_service_bytes_recursive
|
|
179
|
+
) {
|
|
180
|
+
stats.blkio_stats.io_service_bytes_recursive.forEach((io) => {
|
|
181
|
+
if (io.op === 'Read') readBytes += io.value || 0;
|
|
182
|
+
if (io.op === 'Write') writeBytes += io.value || 0;
|
|
183
|
+
});
|
|
184
|
+
}
|
|
185
|
+
} catch (err) {
|
|
186
|
+
console.error(
|
|
187
|
+
`Error calculating disk I/O for container ${containerInfo.Id}:`,
|
|
188
|
+
err.message
|
|
189
|
+
);
|
|
190
|
+
}
|
|
191
|
+
|
|
192
|
+
const containerMetrics = {
|
|
193
|
+
id: containerInfo.Id,
|
|
194
|
+
name: containerInfo.Names[0].replace(/^\//, ''),
|
|
195
|
+
cpu: isNaN(cpuPercent) ? 0 : Math.round(cpuPercent * 100) / 100,
|
|
196
|
+
memory: {
|
|
197
|
+
used: Math.round(memoryUsage / (1024 * 1024)),
|
|
198
|
+
percentage: isNaN(memoryPercent)
|
|
199
|
+
? 0
|
|
200
|
+
: Math.round(memoryPercent * 100) / 100,
|
|
201
|
+
},
|
|
202
|
+
network: {
|
|
203
|
+
rx: Math.round(rxBytes / 1024),
|
|
204
|
+
tx: Math.round(txBytes / 1024),
|
|
205
|
+
},
|
|
206
|
+
io: {
|
|
207
|
+
read: Math.round(readBytes / 1024),
|
|
208
|
+
write: Math.round(writeBytes / 1024),
|
|
209
|
+
},
|
|
210
|
+
timestamp: Date.now(),
|
|
211
|
+
};
|
|
212
|
+
|
|
213
|
+
if (!metricsHistory.containers[containerInfo.Id]) {
|
|
214
|
+
metricsHistory.containers[containerInfo.Id] = [];
|
|
215
|
+
}
|
|
216
|
+
|
|
217
|
+
metricsHistory.containers[containerInfo.Id].push(containerMetrics);
|
|
218
|
+
|
|
219
|
+
if (
|
|
220
|
+
metricsHistory.containers[containerInfo.Id].length >
|
|
221
|
+
MAX_HISTORY_LENGTH
|
|
222
|
+
) {
|
|
223
|
+
metricsHistory.containers[containerInfo.Id].shift();
|
|
224
|
+
}
|
|
225
|
+
|
|
226
|
+
return containerMetrics;
|
|
227
|
+
} catch (error) {
|
|
228
|
+
console.error(
|
|
229
|
+
`Error getting metrics for container ${containerInfo.Id}:`,
|
|
230
|
+
error.message
|
|
231
|
+
);
|
|
232
|
+
return {
|
|
233
|
+
id: containerInfo.Id,
|
|
234
|
+
name: containerInfo.Names[0].replace(/^\//, ''),
|
|
235
|
+
cpu: 0,
|
|
236
|
+
memory: { used: 0, percentage: 0 },
|
|
237
|
+
network: { rx: 0, tx: 0 },
|
|
238
|
+
io: { read: 0, write: 0 },
|
|
239
|
+
timestamp: Date.now(),
|
|
240
|
+
};
|
|
241
|
+
}
|
|
242
|
+
});
|
|
243
|
+
|
|
244
|
+
const containerMetrics = await Promise.all(metricsPromises);
|
|
245
|
+
|
|
246
|
+
ws.send(
|
|
247
|
+
JSON.stringify({
|
|
248
|
+
type: 'containerMetrics',
|
|
249
|
+
metrics: containerMetrics,
|
|
250
|
+
history: metricsHistory.containers,
|
|
251
|
+
requestId: payload.requestId,
|
|
252
|
+
})
|
|
253
|
+
);
|
|
254
|
+
} catch (error) {
|
|
255
|
+
console.error('Error fetching container metrics:', error);
|
|
256
|
+
ws.send(
|
|
257
|
+
JSON.stringify({
|
|
258
|
+
type: 'error',
|
|
259
|
+
error: 'Failed to fetch container metrics: ' + error.message,
|
|
260
|
+
requestId: payload.requestId,
|
|
261
|
+
})
|
|
262
|
+
);
|
|
263
|
+
}
|
|
264
|
+
}
|
|
265
|
+
|
|
266
|
+
export default { handleMetricsAction };
|
|
267
|
+
|
|
268
|
+
export {
|
|
269
|
+
handleMetricsAction,
|
|
270
|
+
};
|