@tspappsen/elamax 1.2.3

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/cli.js ADDED
@@ -0,0 +1,105 @@
1
+ #!/usr/bin/env node
2
+ import { readFileSync } from "fs";
3
+ import { join, dirname } from "path";
4
+ import { fileURLToPath } from "url";
5
+ // IMPORTANT: Do not add static imports of paths.ts, config.ts, or daemon.ts.
6
+ // MAX_PROFILE must be set in process.env BEFORE these modules are imported.
7
+ // The dynamic import() calls below ensure correct sequencing.
8
+ const __dirname = dirname(fileURLToPath(import.meta.url));
9
+ function getVersion() {
10
+ try {
11
+ const pkg = JSON.parse(readFileSync(join(__dirname, "..", "package.json"), "utf-8"));
12
+ return pkg.version || "0.0.0";
13
+ }
14
+ catch {
15
+ return "0.0.0";
16
+ }
17
+ }
18
+ function printHelp() {
19
+ const version = getVersion();
20
+ console.log(`
21
+ max v${version} — AI orchestrator powered by Copilot SDK
22
+
23
+ Usage:
24
+ max <command>
25
+
26
+ Commands:
27
+ start Start the Max daemon (Telegram bot + HTTP API)
28
+ tui Connect to the daemon via terminal UI
29
+ setup Interactive first-run configuration
30
+ update Check for updates and install the latest version
31
+ help Show this help message
32
+
33
+ Flags:
34
+ --profile <name> Run as a named profile (e.g. --profile watchdog)
35
+ --self-edit Allow Max to modify his own source code (off by default)
36
+
37
+ Examples:
38
+ max start Start the daemon
39
+ max start --self-edit Start with self-edit enabled
40
+ max start --profile watchdog Start the watchdog instance
41
+ max tui Open the terminal client
42
+ max setup Configure Telegram token and settings
43
+ max setup --profile watchdog Configure watchdog instance
44
+ `.trim());
45
+ }
46
+ const args = process.argv.slice(2);
47
+ const profileIdx = args.indexOf("--profile");
48
+ if (profileIdx !== -1 && args[profileIdx + 1]) {
49
+ process.env.MAX_PROFILE = args[profileIdx + 1];
50
+ }
51
+ const command = args[0] || "help";
52
+ switch (command) {
53
+ case "start": {
54
+ // Parse flags for start command
55
+ const startFlags = args.slice(1);
56
+ if (startFlags.includes("--self-edit")) {
57
+ process.env.MAX_SELF_EDIT = "1";
58
+ }
59
+ await import("./daemon.js");
60
+ break;
61
+ }
62
+ case "tui":
63
+ await import("./tui/index.js");
64
+ break;
65
+ case "setup":
66
+ await import("./setup.js");
67
+ break;
68
+ case "update": {
69
+ const { checkForUpdate, performUpdate } = await import("./update.js");
70
+ const check = await checkForUpdate();
71
+ if (!check.checkSucceeded) {
72
+ console.error("⚠ Could not reach the npm registry. Check your network and try again.");
73
+ process.exit(1);
74
+ }
75
+ if (!check.updateAvailable) {
76
+ console.log(`max v${check.current} is already the latest version.`);
77
+ break;
78
+ }
79
+ console.log(`Update available: v${check.current} → v${check.latest}`);
80
+ console.log("Installing...");
81
+ const result = await performUpdate();
82
+ if (result.ok) {
83
+ console.log(`✅ Updated to v${check.latest}`);
84
+ }
85
+ else {
86
+ console.error(`❌ Update failed: ${result.output}`);
87
+ process.exit(1);
88
+ }
89
+ break;
90
+ }
91
+ case "help":
92
+ case "--help":
93
+ case "-h":
94
+ printHelp();
95
+ break;
96
+ case "--version":
97
+ case "-v":
98
+ console.log(getVersion());
99
+ break;
100
+ default:
101
+ console.error(`Unknown command: ${command}\n`);
102
+ printHelp();
103
+ process.exit(1);
104
+ }
105
+ //# sourceMappingURL=cli.js.map
package/dist/config.js ADDED
@@ -0,0 +1,96 @@
1
+ import { config as loadEnv } from "dotenv";
2
+ import { z } from "zod";
3
+ import { readFileSync, writeFileSync } from "fs";
4
+ import { ENV_PATH, ensureMaxHome } from "./paths.js";
5
+ import { IS_WATCHDOG, MAIN_MAX_HOME } from "./paths.js";
6
+ // Load from ~/.max/.env, fall back to cwd .env for dev
7
+ loadEnv({ path: ENV_PATH });
8
+ loadEnv(); // also check cwd for backwards compat
9
+ const configSchema = z.object({
10
+ TELEGRAM_BOT_TOKEN: z.string().min(1).optional(),
11
+ DISCORD_BOT_TOKEN: z.string().min(1).optional(),
12
+ DISCORD_ALLOWED_CHANNEL_IDS: z.string().min(1).optional(),
13
+ AUTHORIZED_USER_ID: z.string().min(1).optional(),
14
+ API_PORT: z.string().optional(),
15
+ MAIN_MAX_PM2_NAME: z.string().optional(),
16
+ COPILOT_MODEL: z.string().optional(),
17
+ WORKER_TIMEOUT: z.string().optional(),
18
+ });
19
+ const raw = configSchema.parse(process.env);
20
+ const parsedUserId = raw.AUTHORIZED_USER_ID
21
+ ? parseInt(raw.AUTHORIZED_USER_ID, 10)
22
+ : undefined;
23
+ const parsedChannelIds = raw.DISCORD_ALLOWED_CHANNEL_IDS
24
+ ? raw.DISCORD_ALLOWED_CHANNEL_IDS.split(",").map((id) => id.trim()).filter(Boolean)
25
+ : [];
26
+ const defaultApiPort = IS_WATCHDOG ? "7778" : "7777";
27
+ const parsedPort = parseInt(raw.API_PORT || defaultApiPort, 10);
28
+ if (parsedUserId !== undefined && (Number.isNaN(parsedUserId) || parsedUserId <= 0)) {
29
+ throw new Error(`AUTHORIZED_USER_ID must be a positive integer, got: "${raw.AUTHORIZED_USER_ID}"`);
30
+ }
31
+ if (Number.isNaN(parsedPort) || parsedPort < 1 || parsedPort > 65535) {
32
+ throw new Error(`API_PORT must be 1-65535, got: "${raw.API_PORT}"`);
33
+ }
34
+ const DEFAULT_WORKER_TIMEOUT_MS = 600_000; // 10 minutes
35
+ const parsedWorkerTimeout = raw.WORKER_TIMEOUT
36
+ ? Number(raw.WORKER_TIMEOUT)
37
+ : DEFAULT_WORKER_TIMEOUT_MS;
38
+ if (!Number.isInteger(parsedWorkerTimeout) || parsedWorkerTimeout <= 0) {
39
+ throw new Error(`WORKER_TIMEOUT must be a positive integer (ms), got: "${raw.WORKER_TIMEOUT}"`);
40
+ }
41
+ export const DEFAULT_MODEL = "claude-sonnet-4.6";
42
+ let _copilotModel = raw.COPILOT_MODEL || DEFAULT_MODEL;
43
+ export const config = {
44
+ telegramBotToken: raw.TELEGRAM_BOT_TOKEN,
45
+ discordBotToken: raw.DISCORD_BOT_TOKEN,
46
+ discordAllowedChannelIds: parsedChannelIds,
47
+ authorizedUserId: parsedUserId,
48
+ apiPort: parsedPort,
49
+ isWatchdog: IS_WATCHDOG,
50
+ mainMaxHome: MAIN_MAX_HOME,
51
+ mainMaxPm2Name: raw.MAIN_MAX_PM2_NAME || "max",
52
+ workerTimeoutMs: parsedWorkerTimeout,
53
+ get copilotModel() {
54
+ return _copilotModel;
55
+ },
56
+ set copilotModel(model) {
57
+ _copilotModel = model;
58
+ },
59
+ get telegramEnabled() {
60
+ return !!this.telegramBotToken && this.authorizedUserId !== undefined;
61
+ },
62
+ get discordEnabled() {
63
+ return !!this.discordBotToken && this.discordAllowedChannelIds.length > 0;
64
+ },
65
+ get selfEditEnabled() {
66
+ return process.env.MAX_SELF_EDIT === "1";
67
+ },
68
+ };
69
+ /** Update or append an env var in ~/.max/.env */
70
+ function persistEnvVar(key, value) {
71
+ ensureMaxHome();
72
+ try {
73
+ const content = readFileSync(ENV_PATH, "utf-8");
74
+ const lines = content.split("\n");
75
+ let found = false;
76
+ const updated = lines.map((line) => {
77
+ if (line.startsWith(`${key}=`)) {
78
+ found = true;
79
+ return `${key}=${value}`;
80
+ }
81
+ return line;
82
+ });
83
+ if (!found)
84
+ updated.push(`${key}=${value}`);
85
+ writeFileSync(ENV_PATH, updated.join("\n"));
86
+ }
87
+ catch {
88
+ // File doesn't exist — create it
89
+ writeFileSync(ENV_PATH, `${key}=${value}\n`);
90
+ }
91
+ }
92
+ /** Persist the current model choice to ~/.max/.env */
93
+ export function persistModel(model) {
94
+ persistEnvVar("COPILOT_MODEL", model);
95
+ }
96
+ //# sourceMappingURL=config.js.map
@@ -0,0 +1,72 @@
1
+ import { approveAll } from "@github/copilot-sdk";
2
+ // ---------------------------------------------------------------------------
3
+ // Persistent GPT-4.1 classifier session
4
+ // ---------------------------------------------------------------------------
5
+ const CLASSIFIER_MODEL = "gpt-4.1";
6
+ const CLASSIFY_TIMEOUT_MS = 8_000;
7
+ const SYSTEM_PROMPT = `You are a message complexity classifier for an AI assistant called Max. Your ONLY job is to classify incoming user messages into one of three tiers. Respond with ONLY the tier name — nothing else.
8
+
9
+ Tiers:
10
+ - FAST: Greetings, thanks, acknowledgments, simple yes/no, trivial factual questions ("what time is it?", "hello", "thanks"), casual chat with no technical depth.
11
+ - STANDARD: Coding tasks, file operations, tool usage requests, moderate reasoning, questions about technical topics, requests to create/check/manage things, anything involving code or development workflow.
12
+ - PREMIUM: Complex architecture decisions, deep analysis, multi-step reasoning, comparing trade-offs, detailed explanations of complex topics, debugging intricate issues, designing systems, strategic planning.
13
+
14
+ Rules:
15
+ - If unsure, respond STANDARD (it's the safe default).
16
+ - Respond with exactly one word: FAST, STANDARD, or PREMIUM.`;
17
+ let classifierSession;
18
+ let sessionClient;
19
+ async function ensureSession(client) {
20
+ // Recreate if the client changed (e.g. after a reset)
21
+ if (classifierSession && sessionClient === client) {
22
+ return classifierSession;
23
+ }
24
+ // Destroy stale session
25
+ if (classifierSession) {
26
+ classifierSession.destroy().catch(() => { });
27
+ classifierSession = undefined;
28
+ }
29
+ classifierSession = await client.createSession({
30
+ model: CLASSIFIER_MODEL,
31
+ streaming: false,
32
+ systemMessage: { content: SYSTEM_PROMPT },
33
+ onPermissionRequest: approveAll,
34
+ });
35
+ sessionClient = client;
36
+ return classifierSession;
37
+ }
38
+ const TIER_MAP = {
39
+ FAST: "fast",
40
+ STANDARD: "standard",
41
+ PREMIUM: "premium",
42
+ };
43
+ /**
44
+ * Classify a message using GPT-4.1.
45
+ * Returns the tier, or null if the classifier is unavailable / times out.
46
+ */
47
+ export async function classifyWithLLM(client, message) {
48
+ try {
49
+ const session = await ensureSession(client);
50
+ const result = await session.sendAndWait({ prompt: message }, CLASSIFY_TIMEOUT_MS);
51
+ const raw = (result?.data?.content || "").trim().toUpperCase();
52
+ return TIER_MAP[raw] ?? "standard";
53
+ }
54
+ catch (err) {
55
+ console.log(`[max] Classifier error (falling back to heuristics): ${err instanceof Error ? err.message : err}`);
56
+ // Destroy broken session so it's recreated next time
57
+ if (classifierSession) {
58
+ classifierSession.destroy().catch(() => { });
59
+ classifierSession = undefined;
60
+ }
61
+ return null;
62
+ }
63
+ }
64
+ /** Tear down the classifier session (e.g. on shutdown). */
65
+ export function stopClassifier() {
66
+ if (classifierSession) {
67
+ classifierSession.destroy().catch(() => { });
68
+ classifierSession = undefined;
69
+ sessionClient = undefined;
70
+ }
71
+ }
72
+ //# sourceMappingURL=classifier.js.map
@@ -0,0 +1,30 @@
1
+ import { CopilotClient } from "@github/copilot-sdk";
2
+ let client;
3
+ export async function getClient() {
4
+ if (!client) {
5
+ client = new CopilotClient({
6
+ autoStart: true,
7
+ autoRestart: true,
8
+ });
9
+ await client.start();
10
+ }
11
+ return client;
12
+ }
13
+ /** Tear down the existing client and create a fresh one. */
14
+ export async function resetClient() {
15
+ if (client) {
16
+ try {
17
+ await client.stop();
18
+ }
19
+ catch { /* best-effort */ }
20
+ client = undefined;
21
+ }
22
+ return getClient();
23
+ }
24
+ export async function stopClient() {
25
+ if (client) {
26
+ await client.stop();
27
+ client = undefined;
28
+ }
29
+ }
30
+ //# sourceMappingURL=client.js.map
@@ -0,0 +1,22 @@
1
+ import { readFileSync } from "fs";
2
+ import { join } from "path";
3
+ import { homedir } from "os";
4
+ /**
5
+ * Load MCP server configs from ~/.copilot/mcp-config.json.
6
+ * Returns an empty record if the file doesn't exist or is invalid.
7
+ */
8
+ export function loadMcpConfig() {
9
+ const configPath = join(homedir(), ".copilot", "mcp-config.json");
10
+ try {
11
+ const raw = readFileSync(configPath, "utf-8");
12
+ const parsed = JSON.parse(raw);
13
+ if (parsed.mcpServers && typeof parsed.mcpServers === "object") {
14
+ return parsed.mcpServers;
15
+ }
16
+ return {};
17
+ }
18
+ catch {
19
+ return {};
20
+ }
21
+ }
22
+ //# sourceMappingURL=mcp-config.js.map