@tspappsen/elamax 1.2.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +21 -0
- package/README.md +308 -0
- package/dist/api/server.js +297 -0
- package/dist/cli.js +105 -0
- package/dist/config.js +96 -0
- package/dist/copilot/classifier.js +72 -0
- package/dist/copilot/client.js +30 -0
- package/dist/copilot/mcp-config.js +22 -0
- package/dist/copilot/orchestrator.js +459 -0
- package/dist/copilot/router.js +147 -0
- package/dist/copilot/skills.js +125 -0
- package/dist/copilot/system-message.js +185 -0
- package/dist/copilot/tools.js +486 -0
- package/dist/copilot/watchdog-tools.js +312 -0
- package/dist/copilot/workspace-instructions.js +100 -0
- package/dist/daemon.js +237 -0
- package/dist/diagnosis.js +79 -0
- package/dist/discord/bot.js +505 -0
- package/dist/discord/formatter.js +29 -0
- package/dist/paths.js +37 -0
- package/dist/setup.js +476 -0
- package/dist/store/db.js +173 -0
- package/dist/telegram/bot.js +344 -0
- package/dist/telegram/formatter.js +96 -0
- package/dist/tui/index.js +1026 -0
- package/dist/update.js +72 -0
- package/dist/utils/parseJSON.js +71 -0
- package/package.json +61 -0
- package/skills/.gitkeep +0 -0
- package/skills/find-skills/SKILL.md +161 -0
- package/skills/find-skills/_meta.json +4 -0
- package/templates/instructions/AGENTS.md +18 -0
- package/templates/instructions/TOOLS.md +12 -0
|
@@ -0,0 +1,312 @@
|
|
|
1
|
+
import { z } from "zod";
|
|
2
|
+
import { defineTool } from "@github/copilot-sdk";
|
|
3
|
+
import { config } from "../config.js";
|
|
4
|
+
import { MAIN_MAX_HOME, MAX_PROFILE } from "../paths.js";
|
|
5
|
+
import { execSync } from "child_process";
|
|
6
|
+
import { existsSync, readFileSync } from "fs";
|
|
7
|
+
import { join } from "path";
|
|
8
|
+
import { hostname, uptime, totalmem, freemem, platform, loadavg } from "os";
|
|
9
|
+
import http from "http";
|
|
10
|
+
/** All known pm2 names for the watchdog's own process.
|
|
11
|
+
* Includes the derived name (max-<profile>) AND the pm2-injected process name. */
|
|
12
|
+
function getOwnPm2Names() {
|
|
13
|
+
const names = new Set();
|
|
14
|
+
if (MAX_PROFILE)
|
|
15
|
+
names.add(`max-${MAX_PROFILE}`);
|
|
16
|
+
// pm2 sets process.env.name for managed processes
|
|
17
|
+
if (process.env.name)
|
|
18
|
+
names.add(process.env.name);
|
|
19
|
+
return Array.from(names);
|
|
20
|
+
}
|
|
21
|
+
function execCommand(cmd, timeoutMs = 30_000) {
|
|
22
|
+
try {
|
|
23
|
+
const stdout = execSync(cmd, {
|
|
24
|
+
timeout: timeoutMs,
|
|
25
|
+
stdio: ["ignore", "pipe", "pipe"],
|
|
26
|
+
encoding: "utf-8",
|
|
27
|
+
maxBuffer: 1024 * 1024,
|
|
28
|
+
});
|
|
29
|
+
return { stdout: stdout.slice(0, 4096), stderr: "", exitCode: 0 };
|
|
30
|
+
}
|
|
31
|
+
catch (err) {
|
|
32
|
+
return {
|
|
33
|
+
stdout: String(err.stdout || "").slice(0, 4096),
|
|
34
|
+
stderr: String(err.stderr || err.message || "").slice(0, 4096),
|
|
35
|
+
exitCode: err.status ?? null,
|
|
36
|
+
};
|
|
37
|
+
}
|
|
38
|
+
}
|
|
39
|
+
function formatDuration(ms) {
|
|
40
|
+
if (!ms || ms <= 0)
|
|
41
|
+
return "unknown";
|
|
42
|
+
const totalSeconds = Math.floor(ms / 1000);
|
|
43
|
+
const days = Math.floor(totalSeconds / 86_400);
|
|
44
|
+
const hours = Math.floor((totalSeconds % 86_400) / 3_600);
|
|
45
|
+
const minutes = Math.floor((totalSeconds % 3_600) / 60);
|
|
46
|
+
const seconds = totalSeconds % 60;
|
|
47
|
+
const parts = [
|
|
48
|
+
days ? `${days}d` : "",
|
|
49
|
+
hours ? `${hours}h` : "",
|
|
50
|
+
minutes ? `${minutes}m` : "",
|
|
51
|
+
!days && !hours ? `${seconds}s` : "",
|
|
52
|
+
].filter(Boolean);
|
|
53
|
+
return parts.join(" ") || "0s";
|
|
54
|
+
}
|
|
55
|
+
function formatBytes(bytes) {
|
|
56
|
+
if (bytes === undefined || Number.isNaN(bytes))
|
|
57
|
+
return "unknown";
|
|
58
|
+
const units = ["B", "KB", "MB", "GB", "TB"];
|
|
59
|
+
let value = bytes;
|
|
60
|
+
let unitIndex = 0;
|
|
61
|
+
while (value >= 1024 && unitIndex < units.length - 1) {
|
|
62
|
+
value /= 1024;
|
|
63
|
+
unitIndex += 1;
|
|
64
|
+
}
|
|
65
|
+
return `${value.toFixed(value >= 10 || unitIndex === 0 ? 0 : 1)} ${units[unitIndex]}`;
|
|
66
|
+
}
|
|
67
|
+
function summarizePm2Process(proc) {
|
|
68
|
+
if (!proc) {
|
|
69
|
+
return {
|
|
70
|
+
found: false,
|
|
71
|
+
name: config.mainMaxPm2Name,
|
|
72
|
+
status: "not found",
|
|
73
|
+
uptime: "unknown",
|
|
74
|
+
restarts: 0,
|
|
75
|
+
pid: null,
|
|
76
|
+
memory: "unknown",
|
|
77
|
+
};
|
|
78
|
+
}
|
|
79
|
+
return {
|
|
80
|
+
found: true,
|
|
81
|
+
name: proc.pm2_env?.name || config.mainMaxPm2Name,
|
|
82
|
+
status: proc.pm2_env?.status || "unknown",
|
|
83
|
+
uptime: formatDuration(proc.pm2_env?.pm_uptime ? Date.now() - proc.pm2_env.pm_uptime : undefined),
|
|
84
|
+
restarts: proc.pm2_env?.restart_time ?? 0,
|
|
85
|
+
pid: proc.pid ?? null,
|
|
86
|
+
memory: formatBytes(proc.monit?.memory),
|
|
87
|
+
};
|
|
88
|
+
}
|
|
89
|
+
function parsePm2Status() {
|
|
90
|
+
const result = execCommand("pm2 jlist");
|
|
91
|
+
const combinedOutput = `${result.stdout}\n${result.stderr}`.trim();
|
|
92
|
+
if (result.exitCode !== 0) {
|
|
93
|
+
if (/not recognized|not found|enoent/i.test(combinedOutput)) {
|
|
94
|
+
return { ok: false, message: "pm2 is not installed. Try: ps aux | grep max", details: result };
|
|
95
|
+
}
|
|
96
|
+
return {
|
|
97
|
+
ok: false,
|
|
98
|
+
message: combinedOutput || "Failed to query pm2.",
|
|
99
|
+
details: result,
|
|
100
|
+
};
|
|
101
|
+
}
|
|
102
|
+
try {
|
|
103
|
+
const parsed = JSON.parse(result.stdout);
|
|
104
|
+
const proc = parsed.find((entry) => entry.pm2_env?.name === config.mainMaxPm2Name);
|
|
105
|
+
return { ok: true, process: summarizePm2Process(proc), rawCount: parsed.length };
|
|
106
|
+
}
|
|
107
|
+
catch (err) {
|
|
108
|
+
return {
|
|
109
|
+
ok: false,
|
|
110
|
+
message: `pm2 returned invalid JSON: ${err instanceof Error ? err.message : String(err)}`,
|
|
111
|
+
details: result,
|
|
112
|
+
};
|
|
113
|
+
}
|
|
114
|
+
}
|
|
115
|
+
function pingMainMax() {
|
|
116
|
+
const mainApiPort = Number.parseInt(process.env.MAIN_MAX_API_PORT || "7777", 10) || 7777;
|
|
117
|
+
return new Promise((resolve) => {
|
|
118
|
+
const req = http.get({
|
|
119
|
+
host: "127.0.0.1",
|
|
120
|
+
port: mainApiPort,
|
|
121
|
+
path: "/",
|
|
122
|
+
timeout: 3_000,
|
|
123
|
+
}, (res) => {
|
|
124
|
+
res.resume();
|
|
125
|
+
resolve({
|
|
126
|
+
reachable: true,
|
|
127
|
+
detail: `reachable on http://127.0.0.1:${mainApiPort}/ (HTTP ${res.statusCode ?? "unknown"})`,
|
|
128
|
+
});
|
|
129
|
+
});
|
|
130
|
+
req.on("timeout", () => {
|
|
131
|
+
req.destroy(new Error("timeout"));
|
|
132
|
+
});
|
|
133
|
+
req.on("error", (err) => {
|
|
134
|
+
resolve({
|
|
135
|
+
reachable: false,
|
|
136
|
+
detail: `unreachable on http://127.0.0.1:${mainApiPort}/ (${err.message})`,
|
|
137
|
+
});
|
|
138
|
+
});
|
|
139
|
+
});
|
|
140
|
+
}
|
|
141
|
+
function isSelfTargetedPm2Command(command) {
|
|
142
|
+
for (const name of getOwnPm2Names()) {
|
|
143
|
+
const escapedName = name.replace(/[.*+?^${}()|[\]\\]/g, "\\$&");
|
|
144
|
+
if (new RegExp(`\\bpm2\\b[^\\n\\r;|&]*\\b(stop|restart|delete|kill)\\b[^\\n\\r;|&]*\\b${escapedName}\\b`, "i").test(command)) {
|
|
145
|
+
return true;
|
|
146
|
+
}
|
|
147
|
+
}
|
|
148
|
+
return false;
|
|
149
|
+
}
|
|
150
|
+
function buildMainStatus(status, httpStatus) {
|
|
151
|
+
return {
|
|
152
|
+
pm2Name: status.name,
|
|
153
|
+
status: status.status,
|
|
154
|
+
uptime: status.uptime,
|
|
155
|
+
restarts: status.restarts,
|
|
156
|
+
pid: status.pid,
|
|
157
|
+
memory: status.memory,
|
|
158
|
+
http: httpStatus.detail,
|
|
159
|
+
};
|
|
160
|
+
}
|
|
161
|
+
export function createWatchdogTools() {
|
|
162
|
+
return [
|
|
163
|
+
defineTool("check_main_max", {
|
|
164
|
+
description: "Check whether the main Max process is healthy via pm2 status and an HTTP ping.",
|
|
165
|
+
parameters: z.object({}),
|
|
166
|
+
handler: async () => {
|
|
167
|
+
const pm2Status = parsePm2Status();
|
|
168
|
+
const httpStatus = await pingMainMax();
|
|
169
|
+
if (!pm2Status.ok) {
|
|
170
|
+
return {
|
|
171
|
+
ok: false,
|
|
172
|
+
pm2: pm2Status.message,
|
|
173
|
+
http: httpStatus.detail,
|
|
174
|
+
};
|
|
175
|
+
}
|
|
176
|
+
return {
|
|
177
|
+
ok: true,
|
|
178
|
+
mainMax: buildMainStatus(pm2Status.process, httpStatus),
|
|
179
|
+
};
|
|
180
|
+
},
|
|
181
|
+
}),
|
|
182
|
+
defineTool("restart_main_max", {
|
|
183
|
+
description: "Restart the main Max pm2 process, then report its fresh status.",
|
|
184
|
+
parameters: z.object({
|
|
185
|
+
reason: z.string().optional().describe("Optional reason for the restart"),
|
|
186
|
+
}),
|
|
187
|
+
handler: async (args) => {
|
|
188
|
+
const ownNames = getOwnPm2Names();
|
|
189
|
+
if (ownNames.includes(config.mainMaxPm2Name)) {
|
|
190
|
+
return `Refusing restart: target pm2 process '${config.mainMaxPm2Name}' matches the watchdog's own process name.`;
|
|
191
|
+
}
|
|
192
|
+
console.log(`[watchdog] restart_main_max target=${config.mainMaxPm2Name} reason=${args.reason || "unspecified"}`);
|
|
193
|
+
const result = execCommand(`pm2 restart ${JSON.stringify(config.mainMaxPm2Name)}`);
|
|
194
|
+
const httpStatus = await pingMainMax();
|
|
195
|
+
const pm2Status = parsePm2Status();
|
|
196
|
+
if (!pm2Status.ok) {
|
|
197
|
+
return {
|
|
198
|
+
ok: result.exitCode === 0,
|
|
199
|
+
restart: result,
|
|
200
|
+
pm2: pm2Status.message,
|
|
201
|
+
http: httpStatus.detail,
|
|
202
|
+
};
|
|
203
|
+
}
|
|
204
|
+
return {
|
|
205
|
+
ok: result.exitCode === 0,
|
|
206
|
+
restart: result,
|
|
207
|
+
mainMax: buildMainStatus(pm2Status.process, httpStatus),
|
|
208
|
+
};
|
|
209
|
+
},
|
|
210
|
+
}),
|
|
211
|
+
defineTool("read_main_logs", {
|
|
212
|
+
description: "Read the tail of the main Max daemon log.",
|
|
213
|
+
parameters: z.object({
|
|
214
|
+
lines: z.number().int().min(1).max(500).default(100).describe("How many lines to return (default 100, max 500)"),
|
|
215
|
+
}),
|
|
216
|
+
handler: async (args) => {
|
|
217
|
+
const logPath = join(MAIN_MAX_HOME, "daemon.log");
|
|
218
|
+
const oldLogPath = join(MAIN_MAX_HOME, "daemon.log.old");
|
|
219
|
+
const lineCount = args.lines ?? 100;
|
|
220
|
+
try {
|
|
221
|
+
const content = readFileSync(logPath, "utf-8");
|
|
222
|
+
const tail = content.split(/\r?\n/).slice(-lineCount).join("\n");
|
|
223
|
+
return {
|
|
224
|
+
path: logPath,
|
|
225
|
+
lines: lineCount,
|
|
226
|
+
content: tail,
|
|
227
|
+
oldLogAvailable: existsSync(oldLogPath) ? oldLogPath : undefined,
|
|
228
|
+
};
|
|
229
|
+
}
|
|
230
|
+
catch (err) {
|
|
231
|
+
const enoent = typeof err === "object" && err !== null && "code" in err && err.code === "ENOENT";
|
|
232
|
+
if (enoent) {
|
|
233
|
+
const fallback = existsSync(oldLogPath)
|
|
234
|
+
? ` Fallback log exists at ${oldLogPath}.`
|
|
235
|
+
: "";
|
|
236
|
+
return `Main Max log file not found at ${logPath}. Main Max may not have started yet.${fallback}`;
|
|
237
|
+
}
|
|
238
|
+
return `Failed to read main Max log: ${err instanceof Error ? err.message : String(err)}`;
|
|
239
|
+
}
|
|
240
|
+
},
|
|
241
|
+
}),
|
|
242
|
+
defineTool("server_health", {
|
|
243
|
+
description: "Report basic host health for the watchdog server.",
|
|
244
|
+
parameters: z.object({}),
|
|
245
|
+
handler: async () => {
|
|
246
|
+
const total = totalmem();
|
|
247
|
+
const free = freemem();
|
|
248
|
+
const disk = platform() === "win32"
|
|
249
|
+
? "Disk check skipped on Windows."
|
|
250
|
+
: (() => {
|
|
251
|
+
const result = execCommand("df -h /");
|
|
252
|
+
return result.exitCode === 0 ? result.stdout.trim() : (result.stderr || result.stdout || "Disk check failed.").trim();
|
|
253
|
+
})();
|
|
254
|
+
return {
|
|
255
|
+
hostname: hostname(),
|
|
256
|
+
platform: platform(),
|
|
257
|
+
uptime: formatDuration(uptime() * 1000),
|
|
258
|
+
memory: {
|
|
259
|
+
total: formatBytes(total),
|
|
260
|
+
free: formatBytes(free),
|
|
261
|
+
used: formatBytes(total - free),
|
|
262
|
+
},
|
|
263
|
+
loadAverage: loadavg().map((value) => value.toFixed(2)).join(", "),
|
|
264
|
+
disk,
|
|
265
|
+
};
|
|
266
|
+
},
|
|
267
|
+
}),
|
|
268
|
+
defineTool("run_shell", {
|
|
269
|
+
description: "Run a shell command on the server with a short timeout and capped output.",
|
|
270
|
+
parameters: z.object({
|
|
271
|
+
command: z.string().min(1).describe("Shell command to execute"),
|
|
272
|
+
}),
|
|
273
|
+
handler: async (args) => {
|
|
274
|
+
if (isSelfTargetedPm2Command(args.command)) {
|
|
275
|
+
return `Refusing command: it appears to target the watchdog's own pm2 process (${getOwnPm2Names().join(', ')}).`;
|
|
276
|
+
}
|
|
277
|
+
const result = execCommand(args.command, 30_000);
|
|
278
|
+
console.log(`[watchdog] run_shell command=${JSON.stringify(args.command)} exitCode=${result.exitCode} stdout=${JSON.stringify(result.stdout)} stderr=${JSON.stringify(result.stderr)}`);
|
|
279
|
+
return result;
|
|
280
|
+
},
|
|
281
|
+
}),
|
|
282
|
+
defineTool("update_main_max", {
|
|
283
|
+
description: "Stop the main Max process, update the global package, and start it again.",
|
|
284
|
+
parameters: z.object({}),
|
|
285
|
+
handler: async () => {
|
|
286
|
+
const ownNames = getOwnPm2Names();
|
|
287
|
+
if (ownNames.includes(config.mainMaxPm2Name)) {
|
|
288
|
+
return `Refusing update: target pm2 process '${config.mainMaxPm2Name}' matches the watchdog's own process name.`;
|
|
289
|
+
}
|
|
290
|
+
console.log(`[watchdog] update_main_max target=${config.mainMaxPm2Name}`);
|
|
291
|
+
const command = `pm2 stop ${JSON.stringify(config.mainMaxPm2Name)} && npm update -g elamax && pm2 start ${JSON.stringify(config.mainMaxPm2Name)}`;
|
|
292
|
+
const result = execCommand(command, 120_000);
|
|
293
|
+
const httpStatus = await pingMainMax();
|
|
294
|
+
const pm2Status = parsePm2Status();
|
|
295
|
+
if (!pm2Status.ok) {
|
|
296
|
+
return {
|
|
297
|
+
ok: result.exitCode === 0,
|
|
298
|
+
update: result,
|
|
299
|
+
pm2: pm2Status.message,
|
|
300
|
+
http: httpStatus.detail,
|
|
301
|
+
};
|
|
302
|
+
}
|
|
303
|
+
return {
|
|
304
|
+
ok: result.exitCode === 0,
|
|
305
|
+
update: result,
|
|
306
|
+
mainMax: buildMainStatus(pm2Status.process, httpStatus),
|
|
307
|
+
};
|
|
308
|
+
},
|
|
309
|
+
}),
|
|
310
|
+
];
|
|
311
|
+
}
|
|
312
|
+
//# sourceMappingURL=watchdog-tools.js.map
|
|
@@ -0,0 +1,100 @@
|
|
|
1
|
+
import { readdirSync, readFileSync, copyFileSync, watch, existsSync, mkdirSync } from "fs";
|
|
2
|
+
import { join, basename, dirname } from "path";
|
|
3
|
+
import { fileURLToPath } from "url";
|
|
4
|
+
import { INSTRUCTIONS_DIR } from "../paths.js";
|
|
5
|
+
/** Directory containing bundled template files shipped with Max */
|
|
6
|
+
const TEMPLATES_DIR = join(dirname(fileURLToPath(import.meta.url)), "..", "..", "templates", "instructions");
|
|
7
|
+
/**
|
|
8
|
+
* Scan ~/.max/instructions/ for .md files and return their concatenated content.
|
|
9
|
+
* Each file is rendered as a section with its filename as the heading.
|
|
10
|
+
* Files are sorted alphabetically for deterministic ordering.
|
|
11
|
+
*/
|
|
12
|
+
export function loadWorkspaceInstructions() {
|
|
13
|
+
if (!existsSync(INSTRUCTIONS_DIR))
|
|
14
|
+
return "";
|
|
15
|
+
let entries;
|
|
16
|
+
try {
|
|
17
|
+
entries = readdirSync(INSTRUCTIONS_DIR)
|
|
18
|
+
.filter((f) => f.endsWith(".md"))
|
|
19
|
+
.sort();
|
|
20
|
+
}
|
|
21
|
+
catch {
|
|
22
|
+
return "";
|
|
23
|
+
}
|
|
24
|
+
if (entries.length === 0)
|
|
25
|
+
return "";
|
|
26
|
+
const sections = [];
|
|
27
|
+
for (const entry of entries) {
|
|
28
|
+
try {
|
|
29
|
+
const content = readFileSync(join(INSTRUCTIONS_DIR, entry), "utf-8").trim();
|
|
30
|
+
if (content) {
|
|
31
|
+
const name = basename(entry, ".md");
|
|
32
|
+
sections.push(`## ${name}\n${content}`);
|
|
33
|
+
}
|
|
34
|
+
}
|
|
35
|
+
catch {
|
|
36
|
+
// skip unreadable files
|
|
37
|
+
}
|
|
38
|
+
}
|
|
39
|
+
if (sections.length === 0)
|
|
40
|
+
return "";
|
|
41
|
+
return `\n## Workspace Instructions\nThe following instructions were loaded from ${INSTRUCTIONS_DIR}:\n\n${sections.join("\n\n")}\n`;
|
|
42
|
+
}
|
|
43
|
+
/**
|
|
44
|
+
* Copy bundled template .md files into ~/.max/instructions/ if they don't already exist.
|
|
45
|
+
* This gives new users starter AGENTS.md and TOOLS.md files to customize.
|
|
46
|
+
*/
|
|
47
|
+
export function seedDefaultInstructions() {
|
|
48
|
+
if (!existsSync(INSTRUCTIONS_DIR)) {
|
|
49
|
+
mkdirSync(INSTRUCTIONS_DIR, { recursive: true });
|
|
50
|
+
}
|
|
51
|
+
if (!existsSync(TEMPLATES_DIR))
|
|
52
|
+
return;
|
|
53
|
+
let templates;
|
|
54
|
+
try {
|
|
55
|
+
templates = readdirSync(TEMPLATES_DIR).filter((f) => f.endsWith(".md"));
|
|
56
|
+
}
|
|
57
|
+
catch {
|
|
58
|
+
return;
|
|
59
|
+
}
|
|
60
|
+
for (const file of templates) {
|
|
61
|
+
const dest = join(INSTRUCTIONS_DIR, file);
|
|
62
|
+
if (!existsSync(dest)) {
|
|
63
|
+
try {
|
|
64
|
+
copyFileSync(join(TEMPLATES_DIR, file), dest);
|
|
65
|
+
console.log(`[max] Seeded default instruction: ${file}`);
|
|
66
|
+
}
|
|
67
|
+
catch (err) {
|
|
68
|
+
console.log(`[max] Could not seed ${file}: ${err instanceof Error ? err.message : err}`);
|
|
69
|
+
}
|
|
70
|
+
}
|
|
71
|
+
}
|
|
72
|
+
}
|
|
73
|
+
/**
|
|
74
|
+
* Watch ~/.max/instructions/ for changes and invoke `onChange` when any .md file
|
|
75
|
+
* is added, removed, or modified. Uses a 500ms debounce to coalesce rapid edits.
|
|
76
|
+
* Returns a cleanup function that stops the watcher.
|
|
77
|
+
*/
|
|
78
|
+
export function watchInstructions(onChange) {
|
|
79
|
+
if (!existsSync(INSTRUCTIONS_DIR)) {
|
|
80
|
+
mkdirSync(INSTRUCTIONS_DIR, { recursive: true });
|
|
81
|
+
}
|
|
82
|
+
let debounceTimer;
|
|
83
|
+
const watcher = watch(INSTRUCTIONS_DIR, (_event, filename) => {
|
|
84
|
+
// Only react to .md files
|
|
85
|
+
if (filename && !filename.endsWith(".md"))
|
|
86
|
+
return;
|
|
87
|
+
if (debounceTimer)
|
|
88
|
+
clearTimeout(debounceTimer);
|
|
89
|
+
debounceTimer = setTimeout(() => {
|
|
90
|
+
console.log(`[max] Instructions changed${filename ? ` (${filename})` : ""} — invalidating session`);
|
|
91
|
+
onChange();
|
|
92
|
+
}, 500);
|
|
93
|
+
});
|
|
94
|
+
return () => {
|
|
95
|
+
if (debounceTimer)
|
|
96
|
+
clearTimeout(debounceTimer);
|
|
97
|
+
watcher.close();
|
|
98
|
+
};
|
|
99
|
+
}
|
|
100
|
+
//# sourceMappingURL=workspace-instructions.js.map
|
package/dist/daemon.js
ADDED
|
@@ -0,0 +1,237 @@
|
|
|
1
|
+
import { getClient, stopClient } from "./copilot/client.js";
|
|
2
|
+
import { initOrchestrator, setMessageLogger, setProactiveNotify, getWorkers } from "./copilot/orchestrator.js";
|
|
3
|
+
import { startApiServer, broadcastToSSE } from "./api/server.js";
|
|
4
|
+
import { createBot, startBot, stopBot, sendProactiveMessage } from "./telegram/bot.js";
|
|
5
|
+
import { createDiscordBot, startDiscordBot, stopDiscordBot, sendDiscordProactiveMessage } from "./discord/bot.js";
|
|
6
|
+
import { getDb, closeDb } from "./store/db.js";
|
|
7
|
+
import { config } from "./config.js";
|
|
8
|
+
import { spawn } from "child_process";
|
|
9
|
+
import { checkForUpdate } from "./update.js";
|
|
10
|
+
import { DAEMON_LOG_PATH, ensureMaxHome, MAX_HOME, MAX_PROFILE } from "./paths.js";
|
|
11
|
+
import { createWriteStream, statSync, renameSync } from "fs";
|
|
12
|
+
// --- Persistent file logging (survives restarts) ---
|
|
13
|
+
const MAX_LOG_SIZE = 5 * 1024 * 1024; // 5 MB
|
|
14
|
+
function rotateLogIfNeeded() {
|
|
15
|
+
try {
|
|
16
|
+
const stat = statSync(DAEMON_LOG_PATH);
|
|
17
|
+
if (stat.size > MAX_LOG_SIZE) {
|
|
18
|
+
renameSync(DAEMON_LOG_PATH, DAEMON_LOG_PATH + ".old");
|
|
19
|
+
}
|
|
20
|
+
}
|
|
21
|
+
catch {
|
|
22
|
+
// File doesn't exist yet — nothing to rotate
|
|
23
|
+
}
|
|
24
|
+
}
|
|
25
|
+
ensureMaxHome();
|
|
26
|
+
rotateLogIfNeeded();
|
|
27
|
+
const logStream = createWriteStream(DAEMON_LOG_PATH, { flags: "a" });
|
|
28
|
+
function writeToLog(level, args) {
|
|
29
|
+
const ts = new Date().toISOString();
|
|
30
|
+
const msg = args.map((a) => (a instanceof Error ? a.stack ?? a.message : typeof a === "string" ? a : JSON.stringify(a))).join(" ");
|
|
31
|
+
logStream.write(`${ts} [${level}] ${msg}\n`);
|
|
32
|
+
}
|
|
33
|
+
const origLog = console.log.bind(console);
|
|
34
|
+
const origError = console.error.bind(console);
|
|
35
|
+
const origWarn = console.warn.bind(console);
|
|
36
|
+
console.log = (...args) => { origLog(...args); writeToLog("INFO", args); };
|
|
37
|
+
console.error = (...args) => { origError(...args); writeToLog("ERROR", args); };
|
|
38
|
+
console.warn = (...args) => { origWarn(...args); writeToLog("WARN", args); };
|
|
39
|
+
function truncate(text, max = 200) {
|
|
40
|
+
const oneLine = text.replace(/\n/g, " ").trim();
|
|
41
|
+
return oneLine.length > max ? oneLine.slice(0, max) + "…" : oneLine;
|
|
42
|
+
}
|
|
43
|
+
async function main() {
|
|
44
|
+
console.log("[max] Starting Max daemon...");
|
|
45
|
+
console.log(`[max] Profile: ${MAX_PROFILE || "main"}`);
|
|
46
|
+
console.log(`[max] MAX_HOME: ${MAX_HOME}`);
|
|
47
|
+
if (config.selfEditEnabled) {
|
|
48
|
+
console.log("[max] ⚠ Self-edit mode enabled — Max can modify his own source code");
|
|
49
|
+
}
|
|
50
|
+
// Set up message logging to daemon console
|
|
51
|
+
setMessageLogger((direction, source, text) => {
|
|
52
|
+
const arrow = direction === "in" ? "⟶" : "⟵";
|
|
53
|
+
const tag = source.padEnd(8);
|
|
54
|
+
console.log(`[max] ${tag} ${arrow} ${truncate(text)}`);
|
|
55
|
+
});
|
|
56
|
+
// Initialize SQLite
|
|
57
|
+
getDb();
|
|
58
|
+
console.log("[max] Database initialized");
|
|
59
|
+
// Start Copilot SDK client
|
|
60
|
+
console.log("[max] Starting Copilot SDK client...");
|
|
61
|
+
const client = await getClient();
|
|
62
|
+
console.log("[max] Copilot SDK client ready");
|
|
63
|
+
// Initialize orchestrator session
|
|
64
|
+
console.log("[max] Creating orchestrator session...");
|
|
65
|
+
await initOrchestrator(client);
|
|
66
|
+
console.log("[max] Orchestrator session ready");
|
|
67
|
+
// Wire up proactive notifications — route to the originating channel
|
|
68
|
+
setProactiveNotify((text, channel) => {
|
|
69
|
+
console.log(`[max] bg-notify (${channel ?? "all"}) ⟵ ${truncate(text)}`);
|
|
70
|
+
if (!channel || channel === "telegram") {
|
|
71
|
+
if (config.telegramEnabled)
|
|
72
|
+
sendProactiveMessage(text);
|
|
73
|
+
}
|
|
74
|
+
if (!channel || channel === "tui") {
|
|
75
|
+
broadcastToSSE(text);
|
|
76
|
+
}
|
|
77
|
+
});
|
|
78
|
+
// Start HTTP API for TUI
|
|
79
|
+
await startApiServer();
|
|
80
|
+
// Start Telegram bot (if configured)
|
|
81
|
+
if (config.telegramEnabled) {
|
|
82
|
+
createBot();
|
|
83
|
+
await startBot();
|
|
84
|
+
}
|
|
85
|
+
else if (!config.telegramBotToken && config.authorizedUserId === undefined) {
|
|
86
|
+
console.log("[max] Telegram not configured — skipping bot. Run 'max setup' to configure.");
|
|
87
|
+
}
|
|
88
|
+
else if (!config.telegramBotToken) {
|
|
89
|
+
console.log("[max] Telegram bot token missing — skipping bot. Run 'max setup' and enter your bot token.");
|
|
90
|
+
}
|
|
91
|
+
else {
|
|
92
|
+
console.log("[max] Telegram user ID missing — skipping bot. Run 'max setup' and enter your Telegram user ID (get it from @userinfobot).");
|
|
93
|
+
}
|
|
94
|
+
// Start Discord bot (if configured)
|
|
95
|
+
if (config.discordEnabled) {
|
|
96
|
+
createDiscordBot();
|
|
97
|
+
await startDiscordBot();
|
|
98
|
+
}
|
|
99
|
+
else if (!config.discordBotToken) {
|
|
100
|
+
console.log("[max] Discord not configured — skipping. Run 'max setup' to configure.");
|
|
101
|
+
}
|
|
102
|
+
else {
|
|
103
|
+
console.log("[max] Discord allowed channels missing — skipping. Run 'max setup' and enter channel IDs.");
|
|
104
|
+
}
|
|
105
|
+
console.log("[max] Max is fully operational.");
|
|
106
|
+
// Non-blocking update check
|
|
107
|
+
checkForUpdate()
|
|
108
|
+
.then(({ updateAvailable, current, latest }) => {
|
|
109
|
+
if (updateAvailable) {
|
|
110
|
+
console.log(`[max] ⬆ Update available: v${current} → v${latest} — run 'max update' to install`);
|
|
111
|
+
}
|
|
112
|
+
})
|
|
113
|
+
.catch(() => { }); // silent — network may be unavailable
|
|
114
|
+
// Notify user if this is a restart (not a fresh start)
|
|
115
|
+
if (process.env.MAX_RESTARTED === "1") {
|
|
116
|
+
if (config.telegramEnabled) {
|
|
117
|
+
await sendProactiveMessage("I'm back online 🟢").catch(() => { });
|
|
118
|
+
}
|
|
119
|
+
if (config.discordEnabled) {
|
|
120
|
+
for (const channelId of config.discordAllowedChannelIds) {
|
|
121
|
+
await sendDiscordProactiveMessage("I'm back online 🟢", channelId).catch(() => { });
|
|
122
|
+
}
|
|
123
|
+
}
|
|
124
|
+
delete process.env.MAX_RESTARTED;
|
|
125
|
+
}
|
|
126
|
+
}
|
|
127
|
+
// Graceful shutdown
|
|
128
|
+
let shutdownState = "idle";
|
|
129
|
+
async function shutdown() {
|
|
130
|
+
if (shutdownState === "shutting_down") {
|
|
131
|
+
console.log("\n[max] Forced exit.");
|
|
132
|
+
process.exit(1);
|
|
133
|
+
}
|
|
134
|
+
// Check for active workers before shutting down
|
|
135
|
+
const workers = getWorkers();
|
|
136
|
+
const running = Array.from(workers.values()).filter(w => w.status === "running");
|
|
137
|
+
if (running.length > 0 && shutdownState === "idle") {
|
|
138
|
+
const names = running.map(w => w.name).join(", ");
|
|
139
|
+
console.log(`\n[max] ⚠ ${running.length} active worker(s) will be destroyed: ${names}`);
|
|
140
|
+
console.log("[max] Press Ctrl+C again to shut down, or wait for workers to finish.");
|
|
141
|
+
shutdownState = "warned";
|
|
142
|
+
return;
|
|
143
|
+
}
|
|
144
|
+
shutdownState = "shutting_down";
|
|
145
|
+
console.log("\n[max] Shutting down... (Ctrl+C again to force)");
|
|
146
|
+
// Force exit after 3 seconds no matter what
|
|
147
|
+
const forceTimer = setTimeout(() => {
|
|
148
|
+
console.log("[max] Shutdown timed out — forcing exit.");
|
|
149
|
+
process.exit(1);
|
|
150
|
+
}, 3000);
|
|
151
|
+
forceTimer.unref();
|
|
152
|
+
if (config.telegramEnabled) {
|
|
153
|
+
try {
|
|
154
|
+
await stopBot();
|
|
155
|
+
}
|
|
156
|
+
catch { /* best effort */ }
|
|
157
|
+
}
|
|
158
|
+
if (config.discordEnabled) {
|
|
159
|
+
try {
|
|
160
|
+
await stopDiscordBot();
|
|
161
|
+
}
|
|
162
|
+
catch { /* best effort */ }
|
|
163
|
+
}
|
|
164
|
+
// Destroy all active worker sessions to free memory
|
|
165
|
+
await Promise.allSettled(Array.from(workers.values()).map((w) => w.session.destroy()));
|
|
166
|
+
workers.clear();
|
|
167
|
+
try {
|
|
168
|
+
await stopClient();
|
|
169
|
+
}
|
|
170
|
+
catch { /* best effort */ }
|
|
171
|
+
closeDb();
|
|
172
|
+
console.log("[max] Goodbye.");
|
|
173
|
+
process.exit(0);
|
|
174
|
+
}
|
|
175
|
+
/** Restart the daemon by spawning a new process and exiting. */
|
|
176
|
+
export async function restartDaemon() {
|
|
177
|
+
console.log("[max] Restarting...");
|
|
178
|
+
const activeWorkers = getWorkers();
|
|
179
|
+
const runningCount = Array.from(activeWorkers.values()).filter(w => w.status === "running").length;
|
|
180
|
+
if (runningCount > 0) {
|
|
181
|
+
console.log(`[max] ⚠ Destroying ${runningCount} active worker(s) for restart`);
|
|
182
|
+
}
|
|
183
|
+
if (config.telegramEnabled) {
|
|
184
|
+
await sendProactiveMessage("Restarting — back in a sec ⏳").catch(() => { });
|
|
185
|
+
try {
|
|
186
|
+
await stopBot();
|
|
187
|
+
}
|
|
188
|
+
catch { /* best effort */ }
|
|
189
|
+
}
|
|
190
|
+
if (config.discordEnabled) {
|
|
191
|
+
for (const channelId of config.discordAllowedChannelIds) {
|
|
192
|
+
await sendDiscordProactiveMessage("Restarting — back in a sec ⏳", channelId).catch(() => { });
|
|
193
|
+
}
|
|
194
|
+
try {
|
|
195
|
+
await stopDiscordBot();
|
|
196
|
+
}
|
|
197
|
+
catch { /* best effort */ }
|
|
198
|
+
}
|
|
199
|
+
// Destroy all active worker sessions to free memory
|
|
200
|
+
await Promise.allSettled(Array.from(activeWorkers.values()).map((w) => w.session.destroy()));
|
|
201
|
+
activeWorkers.clear();
|
|
202
|
+
try {
|
|
203
|
+
await stopClient();
|
|
204
|
+
}
|
|
205
|
+
catch { /* best effort */ }
|
|
206
|
+
closeDb();
|
|
207
|
+
// When pm2 is managing the process, just exit — pm2 will restart automatically.
|
|
208
|
+
// Spawning here too would result in two running instances.
|
|
209
|
+
const underPm2 = process.env.PM2_HOME !== undefined || process.env.pm_id !== undefined;
|
|
210
|
+
if (underPm2) {
|
|
211
|
+
console.log("[max] Running under pm2 — exiting cleanly so pm2 handles restart.");
|
|
212
|
+
process.exit(0);
|
|
213
|
+
}
|
|
214
|
+
// Spawn a detached replacement process with the same args (include execArgv for tsx/loaders)
|
|
215
|
+
const child = spawn(process.execPath, [...process.execArgv, ...process.argv.slice(1)], {
|
|
216
|
+
detached: true,
|
|
217
|
+
stdio: "inherit",
|
|
218
|
+
env: { ...process.env, MAX_RESTARTED: "1" },
|
|
219
|
+
});
|
|
220
|
+
child.unref();
|
|
221
|
+
console.log("[max] New process spawned. Exiting old process.");
|
|
222
|
+
process.exit(0);
|
|
223
|
+
}
|
|
224
|
+
process.on("SIGINT", shutdown);
|
|
225
|
+
process.on("SIGTERM", shutdown);
|
|
226
|
+
// Prevent unhandled errors from crashing the daemon
|
|
227
|
+
process.on("unhandledRejection", (reason) => {
|
|
228
|
+
console.error("[max] Unhandled rejection (kept alive):", reason);
|
|
229
|
+
});
|
|
230
|
+
process.on("uncaughtException", (err) => {
|
|
231
|
+
console.error("[max] Uncaught exception (kept alive):", err);
|
|
232
|
+
});
|
|
233
|
+
main().catch((err) => {
|
|
234
|
+
console.error("[max] Fatal error:", err);
|
|
235
|
+
process.exit(1);
|
|
236
|
+
});
|
|
237
|
+
//# sourceMappingURL=daemon.js.map
|