@tenux/cli 0.0.1 → 0.0.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/chunk-KN4BV53L.js +317 -0
- package/dist/cli.js +7 -13
- package/dist/index.d.ts +1 -35
- package/dist/index.js +1 -19
- package/package.json +1 -1
- package/dist/chunk-P2FMWWKI.js +0 -625
|
@@ -0,0 +1,317 @@
|
|
|
1
|
+
// src/lib/config.ts
|
|
2
|
+
import { existsSync, mkdirSync, readFileSync, writeFileSync, chmodSync } from "fs";
|
|
3
|
+
import { homedir } from "os";
|
|
4
|
+
import { join } from "path";
|
|
5
|
+
var CONFIG_DIR = join(homedir(), ".tenux");
|
|
6
|
+
var CONFIG_FILE = join(CONFIG_DIR, "config.json");
|
|
7
|
+
function getConfigPath() {
|
|
8
|
+
return CONFIG_FILE;
|
|
9
|
+
}
|
|
10
|
+
function configExists() {
|
|
11
|
+
return existsSync(CONFIG_FILE);
|
|
12
|
+
}
|
|
13
|
+
function loadConfig() {
|
|
14
|
+
if (!existsSync(CONFIG_FILE)) {
|
|
15
|
+
throw new Error(
|
|
16
|
+
"Not logged in. Run `tenux login` first."
|
|
17
|
+
);
|
|
18
|
+
}
|
|
19
|
+
const raw = readFileSync(CONFIG_FILE, "utf-8");
|
|
20
|
+
return JSON.parse(raw);
|
|
21
|
+
}
|
|
22
|
+
function saveConfig(config) {
|
|
23
|
+
if (!existsSync(CONFIG_DIR)) {
|
|
24
|
+
mkdirSync(CONFIG_DIR, { recursive: true, mode: 448 });
|
|
25
|
+
}
|
|
26
|
+
writeFileSync(CONFIG_FILE, JSON.stringify(config, null, 2), {
|
|
27
|
+
encoding: "utf-8",
|
|
28
|
+
mode: 384
|
|
29
|
+
// owner read/write only — contains auth tokens
|
|
30
|
+
});
|
|
31
|
+
try {
|
|
32
|
+
chmodSync(CONFIG_FILE, 384);
|
|
33
|
+
} catch {
|
|
34
|
+
}
|
|
35
|
+
}
|
|
36
|
+
function updateConfig(partial) {
|
|
37
|
+
const config = loadConfig();
|
|
38
|
+
const updated = { ...config, ...partial };
|
|
39
|
+
saveConfig(updated);
|
|
40
|
+
return updated;
|
|
41
|
+
}
|
|
42
|
+
|
|
43
|
+
// src/lib/supabase.ts
|
|
44
|
+
import { createClient } from "@supabase/supabase-js";
|
|
45
|
+
var client = null;
|
|
46
|
+
function getSupabase() {
|
|
47
|
+
if (client) return client;
|
|
48
|
+
const config = loadConfig();
|
|
49
|
+
client = createClient(config.supabaseUrl, config.supabaseAnonKey, {
|
|
50
|
+
auth: {
|
|
51
|
+
persistSession: false,
|
|
52
|
+
autoRefreshToken: true
|
|
53
|
+
}
|
|
54
|
+
});
|
|
55
|
+
if (config.accessToken && config.refreshToken) {
|
|
56
|
+
client.auth.setSession({
|
|
57
|
+
access_token: config.accessToken,
|
|
58
|
+
refresh_token: config.refreshToken
|
|
59
|
+
});
|
|
60
|
+
}
|
|
61
|
+
return client;
|
|
62
|
+
}
|
|
63
|
+
function resetSupabase() {
|
|
64
|
+
client = null;
|
|
65
|
+
}
|
|
66
|
+
|
|
67
|
+
// src/lib/relay.ts
|
|
68
|
+
import chalk from "chalk";
|
|
69
|
+
var Relay = class {
|
|
70
|
+
supabase;
|
|
71
|
+
channel = null;
|
|
72
|
+
handlers = /* @__PURE__ */ new Map();
|
|
73
|
+
deviceId;
|
|
74
|
+
constructor(supabase) {
|
|
75
|
+
this.supabase = supabase;
|
|
76
|
+
this.deviceId = loadConfig().deviceId;
|
|
77
|
+
}
|
|
78
|
+
/**
|
|
79
|
+
* Register a handler for a command type (e.g., "git.clone", "claude.query").
|
|
80
|
+
*/
|
|
81
|
+
on(type, handler) {
|
|
82
|
+
this.handlers.set(type, handler);
|
|
83
|
+
return this;
|
|
84
|
+
}
|
|
85
|
+
/**
|
|
86
|
+
* Start listening for commands.
|
|
87
|
+
*/
|
|
88
|
+
async start() {
|
|
89
|
+
await this.processPending();
|
|
90
|
+
this.channel = this.supabase.channel(`commands:${this.deviceId}`).on(
|
|
91
|
+
"postgres_changes",
|
|
92
|
+
{
|
|
93
|
+
event: "INSERT",
|
|
94
|
+
schema: "public",
|
|
95
|
+
table: "commands",
|
|
96
|
+
filter: `device_id=eq.${this.deviceId}`
|
|
97
|
+
},
|
|
98
|
+
(payload) => {
|
|
99
|
+
const command = payload.new;
|
|
100
|
+
this.dispatch(command);
|
|
101
|
+
}
|
|
102
|
+
).subscribe((status) => {
|
|
103
|
+
if (status === "SUBSCRIBED") {
|
|
104
|
+
console.log(chalk.green("\u2713"), "Listening for commands");
|
|
105
|
+
}
|
|
106
|
+
});
|
|
107
|
+
}
|
|
108
|
+
/**
|
|
109
|
+
* Stop listening.
|
|
110
|
+
*/
|
|
111
|
+
async stop() {
|
|
112
|
+
if (this.channel) {
|
|
113
|
+
await this.supabase.removeChannel(this.channel);
|
|
114
|
+
this.channel = null;
|
|
115
|
+
}
|
|
116
|
+
}
|
|
117
|
+
/**
|
|
118
|
+
* Process any commands that arrived while the agent was offline.
|
|
119
|
+
*/
|
|
120
|
+
async processPending() {
|
|
121
|
+
const { data: pending } = await this.supabase.from("commands").select("*").eq("device_id", this.deviceId).eq("status", "pending").order("created_at", { ascending: true });
|
|
122
|
+
if (pending && pending.length > 0) {
|
|
123
|
+
console.log(
|
|
124
|
+
chalk.yellow("\u26A1"),
|
|
125
|
+
`Processing ${pending.length} pending command(s)`
|
|
126
|
+
);
|
|
127
|
+
for (const cmd of pending) {
|
|
128
|
+
await this.dispatch(cmd);
|
|
129
|
+
}
|
|
130
|
+
}
|
|
131
|
+
}
|
|
132
|
+
/**
|
|
133
|
+
* Dispatch a command to the appropriate handler.
|
|
134
|
+
*/
|
|
135
|
+
async dispatch(command) {
|
|
136
|
+
const handler = this.handlers.get(command.type);
|
|
137
|
+
if (!handler) {
|
|
138
|
+
console.log(
|
|
139
|
+
chalk.red("\u2717"),
|
|
140
|
+
`Unknown command type: ${command.type}`
|
|
141
|
+
);
|
|
142
|
+
await this.supabase.from("commands").update({
|
|
143
|
+
status: "error",
|
|
144
|
+
result: { error: `Unknown command type: ${command.type}` },
|
|
145
|
+
completed_at: (/* @__PURE__ */ new Date()).toISOString()
|
|
146
|
+
}).eq("id", command.id);
|
|
147
|
+
return;
|
|
148
|
+
}
|
|
149
|
+
console.log(
|
|
150
|
+
chalk.blue("\u2192"),
|
|
151
|
+
`${command.type}`,
|
|
152
|
+
chalk.dim(command.id.slice(0, 8))
|
|
153
|
+
);
|
|
154
|
+
await this.supabase.from("commands").update({ status: "running" }).eq("id", command.id);
|
|
155
|
+
try {
|
|
156
|
+
await handler(command, this.supabase);
|
|
157
|
+
const { data: current } = await this.supabase.from("commands").select("status").eq("id", command.id).single();
|
|
158
|
+
if (current?.status === "running") {
|
|
159
|
+
await this.supabase.from("commands").update({
|
|
160
|
+
status: "done",
|
|
161
|
+
completed_at: (/* @__PURE__ */ new Date()).toISOString()
|
|
162
|
+
}).eq("id", command.id);
|
|
163
|
+
}
|
|
164
|
+
console.log(
|
|
165
|
+
chalk.green("\u2713"),
|
|
166
|
+
`${command.type}`,
|
|
167
|
+
chalk.dim(command.id.slice(0, 8))
|
|
168
|
+
);
|
|
169
|
+
} catch (err) {
|
|
170
|
+
const message = err instanceof Error ? err.message : String(err);
|
|
171
|
+
console.log(
|
|
172
|
+
chalk.red("\u2717"),
|
|
173
|
+
`${command.type}: ${message}`
|
|
174
|
+
);
|
|
175
|
+
await this.supabase.from("commands").update({
|
|
176
|
+
status: "error",
|
|
177
|
+
result: { error: message },
|
|
178
|
+
completed_at: (/* @__PURE__ */ new Date()).toISOString()
|
|
179
|
+
}).eq("id", command.id);
|
|
180
|
+
}
|
|
181
|
+
}
|
|
182
|
+
};
|
|
183
|
+
|
|
184
|
+
// src/handlers/claude.ts
|
|
185
|
+
import { spawn } from "child_process";
|
|
186
|
+
import { resolve } from "path";
|
|
187
|
+
async function handleClaudeQuery(command, supabase) {
|
|
188
|
+
const {
|
|
189
|
+
prompt,
|
|
190
|
+
project_path,
|
|
191
|
+
model,
|
|
192
|
+
session_id
|
|
193
|
+
} = command.payload;
|
|
194
|
+
if (!prompt) throw new Error("prompt is required");
|
|
195
|
+
const config = loadConfig();
|
|
196
|
+
let cwd = config.projectsDir;
|
|
197
|
+
if (project_path) {
|
|
198
|
+
const resolved = resolve(config.projectsDir, project_path);
|
|
199
|
+
if (resolved.startsWith(resolve(config.projectsDir))) {
|
|
200
|
+
cwd = resolved;
|
|
201
|
+
}
|
|
202
|
+
}
|
|
203
|
+
const args = [
|
|
204
|
+
"--print",
|
|
205
|
+
// non-interactive
|
|
206
|
+
"--output-format",
|
|
207
|
+
"stream-json"
|
|
208
|
+
// structured streaming output
|
|
209
|
+
];
|
|
210
|
+
if (model) args.push("--model", model);
|
|
211
|
+
if (session_id) args.push("--resume", session_id);
|
|
212
|
+
args.push("-p", prompt);
|
|
213
|
+
const claudeBin = process.platform === "win32" ? "claude.cmd" : "claude";
|
|
214
|
+
const proc = spawn(claudeBin, args, {
|
|
215
|
+
cwd,
|
|
216
|
+
shell: false,
|
|
217
|
+
env: { ...process.env },
|
|
218
|
+
// inherits Claude auth from ~/.claude
|
|
219
|
+
stdio: ["pipe", "pipe", "pipe"]
|
|
220
|
+
});
|
|
221
|
+
let seq = 0;
|
|
222
|
+
let capturedSessionId = null;
|
|
223
|
+
let batch = [];
|
|
224
|
+
const batchInterval = 100;
|
|
225
|
+
const flushBatch = async () => {
|
|
226
|
+
if (batch.length === 0) return;
|
|
227
|
+
const toInsert = [...batch];
|
|
228
|
+
batch = [];
|
|
229
|
+
await supabase.from("command_stream").insert(toInsert);
|
|
230
|
+
};
|
|
231
|
+
const batchTimer = setInterval(flushBatch, batchInterval);
|
|
232
|
+
let buffer = "";
|
|
233
|
+
proc.stdout.on("data", (chunk) => {
|
|
234
|
+
buffer += chunk.toString();
|
|
235
|
+
const lines = buffer.split("\n");
|
|
236
|
+
buffer = lines.pop() || "";
|
|
237
|
+
for (const line of lines) {
|
|
238
|
+
if (!line.trim()) continue;
|
|
239
|
+
try {
|
|
240
|
+
const event = JSON.parse(line);
|
|
241
|
+
if (event.type === "system" && event.subtype === "init" && event.session_id) {
|
|
242
|
+
capturedSessionId = event.session_id;
|
|
243
|
+
}
|
|
244
|
+
if (event.type === "result" && event.session_id) {
|
|
245
|
+
capturedSessionId = event.session_id;
|
|
246
|
+
}
|
|
247
|
+
batch.push({
|
|
248
|
+
command_id: command.id,
|
|
249
|
+
seq: seq++,
|
|
250
|
+
type: event.type || "assistant",
|
|
251
|
+
data: event
|
|
252
|
+
});
|
|
253
|
+
} catch {
|
|
254
|
+
batch.push({
|
|
255
|
+
command_id: command.id,
|
|
256
|
+
seq: seq++,
|
|
257
|
+
type: "stdout",
|
|
258
|
+
data: { text: line }
|
|
259
|
+
});
|
|
260
|
+
}
|
|
261
|
+
}
|
|
262
|
+
});
|
|
263
|
+
proc.stderr.on("data", (chunk) => {
|
|
264
|
+
batch.push({
|
|
265
|
+
command_id: command.id,
|
|
266
|
+
seq: seq++,
|
|
267
|
+
type: "stderr",
|
|
268
|
+
data: { text: chunk.toString() }
|
|
269
|
+
});
|
|
270
|
+
});
|
|
271
|
+
return new Promise((promiseResolve, reject) => {
|
|
272
|
+
proc.on("close", async (exitCode) => {
|
|
273
|
+
if (buffer.trim()) {
|
|
274
|
+
batch.push({
|
|
275
|
+
command_id: command.id,
|
|
276
|
+
seq: seq++,
|
|
277
|
+
type: "stdout",
|
|
278
|
+
data: { text: buffer }
|
|
279
|
+
});
|
|
280
|
+
}
|
|
281
|
+
batch.push({
|
|
282
|
+
command_id: command.id,
|
|
283
|
+
seq: seq++,
|
|
284
|
+
type: "done",
|
|
285
|
+
data: { exit_code: exitCode }
|
|
286
|
+
});
|
|
287
|
+
clearInterval(batchTimer);
|
|
288
|
+
await flushBatch();
|
|
289
|
+
await supabase.from("commands").update({
|
|
290
|
+
status: exitCode === 0 ? "done" : "error",
|
|
291
|
+
result: {
|
|
292
|
+
exit_code: exitCode,
|
|
293
|
+
...capturedSessionId ? { session_id: capturedSessionId } : {}
|
|
294
|
+
},
|
|
295
|
+
completed_at: (/* @__PURE__ */ new Date()).toISOString()
|
|
296
|
+
}).eq("id", command.id);
|
|
297
|
+
promiseResolve();
|
|
298
|
+
});
|
|
299
|
+
proc.on("error", async (err) => {
|
|
300
|
+
clearInterval(batchTimer);
|
|
301
|
+
await flushBatch();
|
|
302
|
+
reject(err);
|
|
303
|
+
});
|
|
304
|
+
});
|
|
305
|
+
}
|
|
306
|
+
|
|
307
|
+
export {
|
|
308
|
+
getConfigPath,
|
|
309
|
+
configExists,
|
|
310
|
+
loadConfig,
|
|
311
|
+
saveConfig,
|
|
312
|
+
updateConfig,
|
|
313
|
+
getSupabase,
|
|
314
|
+
resetSupabase,
|
|
315
|
+
Relay,
|
|
316
|
+
handleClaudeQuery
|
|
317
|
+
};
|
package/dist/cli.js
CHANGED
|
@@ -5,19 +5,10 @@ import {
|
|
|
5
5
|
getConfigPath,
|
|
6
6
|
getSupabase,
|
|
7
7
|
handleClaudeQuery,
|
|
8
|
-
handleFsDelete,
|
|
9
|
-
handleFsList,
|
|
10
|
-
handleFsRead,
|
|
11
|
-
handleFsWrite,
|
|
12
|
-
handleGitClone,
|
|
13
|
-
handleGitPull,
|
|
14
|
-
handleGitStatus,
|
|
15
|
-
handleTerminalExec,
|
|
16
|
-
handleTerminalKill,
|
|
17
8
|
loadConfig,
|
|
18
9
|
saveConfig,
|
|
19
10
|
updateConfig
|
|
20
|
-
} from "./chunk-
|
|
11
|
+
} from "./chunk-KN4BV53L.js";
|
|
21
12
|
|
|
22
13
|
// src/cli.ts
|
|
23
14
|
import { Command } from "commander";
|
|
@@ -245,9 +236,6 @@ program.command("start").description("Start the agent and listen for commands").
|
|
|
245
236
|
await supabase.from("devices").update({ last_seen_at: (/* @__PURE__ */ new Date()).toISOString(), is_online: true }).eq("id", config.deviceId);
|
|
246
237
|
}, 3e4);
|
|
247
238
|
const relay = new Relay(supabase);
|
|
248
|
-
relay.on("git.clone", handleGitClone).on("git.status", handleGitStatus).on("git.pull", handleGitPull).on("fs.read", handleFsRead).on("fs.write", handleFsWrite).on("fs.list", handleFsList).on("fs.delete", handleFsDelete).on("terminal.exec", handleTerminalExec).on("terminal.kill", handleTerminalKill).on("claude.query", handleClaudeQuery);
|
|
249
|
-
await relay.start();
|
|
250
|
-
console.log(chalk.green(" \u2713"), "Agent running. Press Ctrl+C to stop.\n");
|
|
251
239
|
const shutdown = async () => {
|
|
252
240
|
console.log(chalk.dim("\n Shutting down..."));
|
|
253
241
|
clearInterval(heartbeat);
|
|
@@ -255,6 +243,12 @@ program.command("start").description("Start the agent and listen for commands").
|
|
|
255
243
|
await supabase.from("devices").update({ is_online: false }).eq("id", config.deviceId);
|
|
256
244
|
process.exit(0);
|
|
257
245
|
};
|
|
246
|
+
relay.on("claude.query", handleClaudeQuery).on("agent.shutdown", async () => {
|
|
247
|
+
console.log(chalk.yellow("\n \u26A1 Remote shutdown requested"));
|
|
248
|
+
await shutdown();
|
|
249
|
+
});
|
|
250
|
+
await relay.start();
|
|
251
|
+
console.log(chalk.green(" \u2713"), "Agent running. Press Ctrl+C to stop.\n");
|
|
258
252
|
process.on("SIGINT", shutdown);
|
|
259
253
|
process.on("SIGTERM", shutdown);
|
|
260
254
|
});
|
package/dist/index.d.ts
CHANGED
|
@@ -87,40 +87,6 @@ declare function loadConfig(): AgentConfig;
|
|
|
87
87
|
declare function saveConfig(config: AgentConfig): void;
|
|
88
88
|
declare function updateConfig(partial: Partial<AgentConfig>): AgentConfig;
|
|
89
89
|
|
|
90
|
-
/**
|
|
91
|
-
* Git command handlers — clone, status, pull, push, branch operations.
|
|
92
|
-
*/
|
|
93
|
-
|
|
94
|
-
/** git.clone — Clone a repository into the projects directory */
|
|
95
|
-
declare function handleGitClone(command: Command, supabase: SupabaseClient): Promise<void>;
|
|
96
|
-
/** git.status — Get status of a project repo */
|
|
97
|
-
declare function handleGitStatus(command: Command, supabase: SupabaseClient): Promise<void>;
|
|
98
|
-
/** git.pull — Pull latest changes */
|
|
99
|
-
declare function handleGitPull(command: Command, supabase: SupabaseClient): Promise<void>;
|
|
100
|
-
|
|
101
|
-
/**
|
|
102
|
-
* Filesystem command handlers — read, write, list, mkdir.
|
|
103
|
-
*/
|
|
104
|
-
|
|
105
|
-
/** fs.read — Read a file's contents */
|
|
106
|
-
declare function handleFsRead(command: Command, supabase: SupabaseClient): Promise<void>;
|
|
107
|
-
/** fs.write — Write content to a file */
|
|
108
|
-
declare function handleFsWrite(command: Command, supabase: SupabaseClient): Promise<void>;
|
|
109
|
-
/** fs.list — List directory contents */
|
|
110
|
-
declare function handleFsList(command: Command, supabase: SupabaseClient): Promise<void>;
|
|
111
|
-
/** fs.delete — Delete a file or directory */
|
|
112
|
-
declare function handleFsDelete(command: Command, supabase: SupabaseClient): Promise<void>;
|
|
113
|
-
|
|
114
|
-
/**
|
|
115
|
-
* Terminal command handler — execute commands and stream output
|
|
116
|
-
* back through Supabase Realtime via command_stream table.
|
|
117
|
-
*/
|
|
118
|
-
|
|
119
|
-
/** terminal.exec — Run a command and stream stdout/stderr */
|
|
120
|
-
declare function handleTerminalExec(command: Command, supabase: SupabaseClient): Promise<void>;
|
|
121
|
-
/** terminal.kill — Kill a running process */
|
|
122
|
-
declare function handleTerminalKill(command: Command, supabase: SupabaseClient): Promise<void>;
|
|
123
|
-
|
|
124
90
|
/**
|
|
125
91
|
* Claude command handler — spawns Claude Code CLI as a child process
|
|
126
92
|
* and streams responses back through command_stream.
|
|
@@ -131,4 +97,4 @@ declare function handleTerminalKill(command: Command, supabase: SupabaseClient):
|
|
|
131
97
|
/** claude.query — Run a prompt through Claude Code CLI */
|
|
132
98
|
declare function handleClaudeQuery(command: Command, supabase: SupabaseClient): Promise<void>;
|
|
133
99
|
|
|
134
|
-
export { type AgentConfig, Relay, configExists, getSupabase, handleClaudeQuery,
|
|
100
|
+
export { type AgentConfig, Relay, configExists, getSupabase, handleClaudeQuery, loadConfig, resetSupabase, saveConfig, updateConfig };
|
package/dist/index.js
CHANGED
|
@@ -3,34 +3,16 @@ import {
|
|
|
3
3
|
configExists,
|
|
4
4
|
getSupabase,
|
|
5
5
|
handleClaudeQuery,
|
|
6
|
-
handleFsDelete,
|
|
7
|
-
handleFsList,
|
|
8
|
-
handleFsRead,
|
|
9
|
-
handleFsWrite,
|
|
10
|
-
handleGitClone,
|
|
11
|
-
handleGitPull,
|
|
12
|
-
handleGitStatus,
|
|
13
|
-
handleTerminalExec,
|
|
14
|
-
handleTerminalKill,
|
|
15
6
|
loadConfig,
|
|
16
7
|
resetSupabase,
|
|
17
8
|
saveConfig,
|
|
18
9
|
updateConfig
|
|
19
|
-
} from "./chunk-
|
|
10
|
+
} from "./chunk-KN4BV53L.js";
|
|
20
11
|
export {
|
|
21
12
|
Relay,
|
|
22
13
|
configExists,
|
|
23
14
|
getSupabase,
|
|
24
15
|
handleClaudeQuery,
|
|
25
|
-
handleFsDelete,
|
|
26
|
-
handleFsList,
|
|
27
|
-
handleFsRead,
|
|
28
|
-
handleFsWrite,
|
|
29
|
-
handleGitClone,
|
|
30
|
-
handleGitPull,
|
|
31
|
-
handleGitStatus,
|
|
32
|
-
handleTerminalExec,
|
|
33
|
-
handleTerminalKill,
|
|
34
16
|
loadConfig,
|
|
35
17
|
resetSupabase,
|
|
36
18
|
saveConfig,
|
package/package.json
CHANGED
package/dist/chunk-P2FMWWKI.js
DELETED
|
@@ -1,625 +0,0 @@
|
|
|
1
|
-
// src/lib/config.ts
|
|
2
|
-
import { existsSync, mkdirSync, readFileSync, writeFileSync, chmodSync } from "fs";
|
|
3
|
-
import { homedir } from "os";
|
|
4
|
-
import { join } from "path";
|
|
5
|
-
var CONFIG_DIR = join(homedir(), ".tenux");
|
|
6
|
-
var CONFIG_FILE = join(CONFIG_DIR, "config.json");
|
|
7
|
-
function getConfigPath() {
|
|
8
|
-
return CONFIG_FILE;
|
|
9
|
-
}
|
|
10
|
-
function configExists() {
|
|
11
|
-
return existsSync(CONFIG_FILE);
|
|
12
|
-
}
|
|
13
|
-
function loadConfig() {
|
|
14
|
-
if (!existsSync(CONFIG_FILE)) {
|
|
15
|
-
throw new Error(
|
|
16
|
-
"Not logged in. Run `tenux login` first."
|
|
17
|
-
);
|
|
18
|
-
}
|
|
19
|
-
const raw = readFileSync(CONFIG_FILE, "utf-8");
|
|
20
|
-
return JSON.parse(raw);
|
|
21
|
-
}
|
|
22
|
-
function saveConfig(config) {
|
|
23
|
-
if (!existsSync(CONFIG_DIR)) {
|
|
24
|
-
mkdirSync(CONFIG_DIR, { recursive: true, mode: 448 });
|
|
25
|
-
}
|
|
26
|
-
writeFileSync(CONFIG_FILE, JSON.stringify(config, null, 2), {
|
|
27
|
-
encoding: "utf-8",
|
|
28
|
-
mode: 384
|
|
29
|
-
// owner read/write only — contains auth tokens
|
|
30
|
-
});
|
|
31
|
-
try {
|
|
32
|
-
chmodSync(CONFIG_FILE, 384);
|
|
33
|
-
} catch {
|
|
34
|
-
}
|
|
35
|
-
}
|
|
36
|
-
function updateConfig(partial) {
|
|
37
|
-
const config = loadConfig();
|
|
38
|
-
const updated = { ...config, ...partial };
|
|
39
|
-
saveConfig(updated);
|
|
40
|
-
return updated;
|
|
41
|
-
}
|
|
42
|
-
|
|
43
|
-
// src/lib/supabase.ts
|
|
44
|
-
import { createClient } from "@supabase/supabase-js";
|
|
45
|
-
var client = null;
|
|
46
|
-
function getSupabase() {
|
|
47
|
-
if (client) return client;
|
|
48
|
-
const config = loadConfig();
|
|
49
|
-
client = createClient(config.supabaseUrl, config.supabaseAnonKey, {
|
|
50
|
-
auth: {
|
|
51
|
-
persistSession: false,
|
|
52
|
-
autoRefreshToken: true
|
|
53
|
-
}
|
|
54
|
-
});
|
|
55
|
-
if (config.accessToken && config.refreshToken) {
|
|
56
|
-
client.auth.setSession({
|
|
57
|
-
access_token: config.accessToken,
|
|
58
|
-
refresh_token: config.refreshToken
|
|
59
|
-
});
|
|
60
|
-
}
|
|
61
|
-
return client;
|
|
62
|
-
}
|
|
63
|
-
function resetSupabase() {
|
|
64
|
-
client = null;
|
|
65
|
-
}
|
|
66
|
-
|
|
67
|
-
// src/lib/relay.ts
|
|
68
|
-
import chalk from "chalk";
|
|
69
|
-
var Relay = class {
|
|
70
|
-
supabase;
|
|
71
|
-
channel = null;
|
|
72
|
-
handlers = /* @__PURE__ */ new Map();
|
|
73
|
-
deviceId;
|
|
74
|
-
constructor(supabase) {
|
|
75
|
-
this.supabase = supabase;
|
|
76
|
-
this.deviceId = loadConfig().deviceId;
|
|
77
|
-
}
|
|
78
|
-
/**
|
|
79
|
-
* Register a handler for a command type (e.g., "git.clone", "claude.query").
|
|
80
|
-
*/
|
|
81
|
-
on(type, handler) {
|
|
82
|
-
this.handlers.set(type, handler);
|
|
83
|
-
return this;
|
|
84
|
-
}
|
|
85
|
-
/**
|
|
86
|
-
* Start listening for commands.
|
|
87
|
-
*/
|
|
88
|
-
async start() {
|
|
89
|
-
await this.processPending();
|
|
90
|
-
this.channel = this.supabase.channel(`commands:${this.deviceId}`).on(
|
|
91
|
-
"postgres_changes",
|
|
92
|
-
{
|
|
93
|
-
event: "INSERT",
|
|
94
|
-
schema: "public",
|
|
95
|
-
table: "commands",
|
|
96
|
-
filter: `device_id=eq.${this.deviceId}`
|
|
97
|
-
},
|
|
98
|
-
(payload) => {
|
|
99
|
-
const command = payload.new;
|
|
100
|
-
this.dispatch(command);
|
|
101
|
-
}
|
|
102
|
-
).subscribe((status) => {
|
|
103
|
-
if (status === "SUBSCRIBED") {
|
|
104
|
-
console.log(chalk.green("\u2713"), "Listening for commands");
|
|
105
|
-
}
|
|
106
|
-
});
|
|
107
|
-
}
|
|
108
|
-
/**
|
|
109
|
-
* Stop listening.
|
|
110
|
-
*/
|
|
111
|
-
async stop() {
|
|
112
|
-
if (this.channel) {
|
|
113
|
-
await this.supabase.removeChannel(this.channel);
|
|
114
|
-
this.channel = null;
|
|
115
|
-
}
|
|
116
|
-
}
|
|
117
|
-
/**
|
|
118
|
-
* Process any commands that arrived while the agent was offline.
|
|
119
|
-
*/
|
|
120
|
-
async processPending() {
|
|
121
|
-
const { data: pending } = await this.supabase.from("commands").select("*").eq("device_id", this.deviceId).eq("status", "pending").order("created_at", { ascending: true });
|
|
122
|
-
if (pending && pending.length > 0) {
|
|
123
|
-
console.log(
|
|
124
|
-
chalk.yellow("\u26A1"),
|
|
125
|
-
`Processing ${pending.length} pending command(s)`
|
|
126
|
-
);
|
|
127
|
-
for (const cmd of pending) {
|
|
128
|
-
await this.dispatch(cmd);
|
|
129
|
-
}
|
|
130
|
-
}
|
|
131
|
-
}
|
|
132
|
-
/**
|
|
133
|
-
* Dispatch a command to the appropriate handler.
|
|
134
|
-
*/
|
|
135
|
-
async dispatch(command) {
|
|
136
|
-
const handler = this.handlers.get(command.type);
|
|
137
|
-
if (!handler) {
|
|
138
|
-
console.log(
|
|
139
|
-
chalk.red("\u2717"),
|
|
140
|
-
`Unknown command type: ${command.type}`
|
|
141
|
-
);
|
|
142
|
-
await this.supabase.from("commands").update({
|
|
143
|
-
status: "error",
|
|
144
|
-
result: { error: `Unknown command type: ${command.type}` },
|
|
145
|
-
completed_at: (/* @__PURE__ */ new Date()).toISOString()
|
|
146
|
-
}).eq("id", command.id);
|
|
147
|
-
return;
|
|
148
|
-
}
|
|
149
|
-
console.log(
|
|
150
|
-
chalk.blue("\u2192"),
|
|
151
|
-
`${command.type}`,
|
|
152
|
-
chalk.dim(command.id.slice(0, 8))
|
|
153
|
-
);
|
|
154
|
-
await this.supabase.from("commands").update({ status: "running" }).eq("id", command.id);
|
|
155
|
-
try {
|
|
156
|
-
await handler(command, this.supabase);
|
|
157
|
-
const { data: current } = await this.supabase.from("commands").select("status").eq("id", command.id).single();
|
|
158
|
-
if (current?.status === "running") {
|
|
159
|
-
await this.supabase.from("commands").update({
|
|
160
|
-
status: "done",
|
|
161
|
-
completed_at: (/* @__PURE__ */ new Date()).toISOString()
|
|
162
|
-
}).eq("id", command.id);
|
|
163
|
-
}
|
|
164
|
-
console.log(
|
|
165
|
-
chalk.green("\u2713"),
|
|
166
|
-
`${command.type}`,
|
|
167
|
-
chalk.dim(command.id.slice(0, 8))
|
|
168
|
-
);
|
|
169
|
-
} catch (err) {
|
|
170
|
-
const message = err instanceof Error ? err.message : String(err);
|
|
171
|
-
console.log(
|
|
172
|
-
chalk.red("\u2717"),
|
|
173
|
-
`${command.type}: ${message}`
|
|
174
|
-
);
|
|
175
|
-
await this.supabase.from("commands").update({
|
|
176
|
-
status: "error",
|
|
177
|
-
result: { error: message },
|
|
178
|
-
completed_at: (/* @__PURE__ */ new Date()).toISOString()
|
|
179
|
-
}).eq("id", command.id);
|
|
180
|
-
}
|
|
181
|
-
}
|
|
182
|
-
};
|
|
183
|
-
|
|
184
|
-
// src/handlers/git.ts
|
|
185
|
-
import { spawn } from "child_process";
|
|
186
|
-
import { existsSync as existsSync2, mkdirSync as mkdirSync2 } from "fs";
|
|
187
|
-
import { join as join2 } from "path";
|
|
188
|
-
function runGit(args, cwd) {
|
|
189
|
-
return new Promise((resolve5, reject) => {
|
|
190
|
-
const child = spawn("git", args, {
|
|
191
|
-
cwd,
|
|
192
|
-
shell: false,
|
|
193
|
-
stdio: ["ignore", "pipe", "pipe"]
|
|
194
|
-
});
|
|
195
|
-
let stdout = "";
|
|
196
|
-
let stderr = "";
|
|
197
|
-
child.stdout.on("data", (d) => stdout += d.toString());
|
|
198
|
-
child.stderr.on("data", (d) => stderr += d.toString());
|
|
199
|
-
child.on("close", (code) => {
|
|
200
|
-
if (code === 0) resolve5({ stdout, stderr });
|
|
201
|
-
else reject(new Error(`git ${args[0]} failed (exit ${code}): ${stderr || stdout}`));
|
|
202
|
-
});
|
|
203
|
-
child.on("error", reject);
|
|
204
|
-
});
|
|
205
|
-
}
|
|
206
|
-
async function handleGitClone(command, supabase) {
|
|
207
|
-
const { url, name, project_id } = command.payload;
|
|
208
|
-
if (!url) throw new Error("url is required");
|
|
209
|
-
const config = loadConfig();
|
|
210
|
-
const projectsDir = config.projectsDir;
|
|
211
|
-
if (!existsSync2(projectsDir)) {
|
|
212
|
-
mkdirSync2(projectsDir, { recursive: true });
|
|
213
|
-
}
|
|
214
|
-
const repoName = name ?? url.replace(/\.git$/, "").split("/").pop() ?? "project";
|
|
215
|
-
const targetDir = join2(projectsDir, repoName);
|
|
216
|
-
if (existsSync2(targetDir)) {
|
|
217
|
-
throw new Error(`Directory already exists: ${targetDir}`);
|
|
218
|
-
}
|
|
219
|
-
if (project_id) {
|
|
220
|
-
await supabase.from("projects").update({ status: "cloning" }).eq("id", project_id);
|
|
221
|
-
}
|
|
222
|
-
await runGit(["clone", url, targetDir]);
|
|
223
|
-
const pm = detectPackageManager(targetDir);
|
|
224
|
-
if (pm && project_id) {
|
|
225
|
-
await supabase.from("projects").update({ status: "installing", package_manager: pm }).eq("id", project_id);
|
|
226
|
-
await installDeps(pm, targetDir);
|
|
227
|
-
}
|
|
228
|
-
if (project_id) {
|
|
229
|
-
await supabase.from("projects").update({
|
|
230
|
-
status: "ready",
|
|
231
|
-
local_path: targetDir,
|
|
232
|
-
package_manager: pm,
|
|
233
|
-
error_message: null
|
|
234
|
-
}).eq("id", project_id);
|
|
235
|
-
}
|
|
236
|
-
await supabase.from("commands").update({
|
|
237
|
-
status: "done",
|
|
238
|
-
result: { path: targetDir, package_manager: pm },
|
|
239
|
-
completed_at: (/* @__PURE__ */ new Date()).toISOString()
|
|
240
|
-
}).eq("id", command.id);
|
|
241
|
-
}
|
|
242
|
-
async function handleGitStatus(command, supabase) {
|
|
243
|
-
const { path } = command.payload;
|
|
244
|
-
if (!path) throw new Error("path is required");
|
|
245
|
-
const { stdout } = await runGit(["status", "--porcelain"], path);
|
|
246
|
-
const { stdout: branch } = await runGit(
|
|
247
|
-
["rev-parse", "--abbrev-ref", "HEAD"],
|
|
248
|
-
path
|
|
249
|
-
);
|
|
250
|
-
await supabase.from("commands").update({
|
|
251
|
-
status: "done",
|
|
252
|
-
result: {
|
|
253
|
-
branch: branch.trim(),
|
|
254
|
-
changes: stdout.trim().split("\n").filter(Boolean),
|
|
255
|
-
clean: stdout.trim() === ""
|
|
256
|
-
},
|
|
257
|
-
completed_at: (/* @__PURE__ */ new Date()).toISOString()
|
|
258
|
-
}).eq("id", command.id);
|
|
259
|
-
}
|
|
260
|
-
async function handleGitPull(command, supabase) {
|
|
261
|
-
const { path } = command.payload;
|
|
262
|
-
if (!path) throw new Error("path is required");
|
|
263
|
-
const { stdout } = await runGit(["pull"], path);
|
|
264
|
-
await supabase.from("commands").update({
|
|
265
|
-
status: "done",
|
|
266
|
-
result: { output: stdout.trim() },
|
|
267
|
-
completed_at: (/* @__PURE__ */ new Date()).toISOString()
|
|
268
|
-
}).eq("id", command.id);
|
|
269
|
-
}
|
|
270
|
-
var LOCKFILE_MAP = {
|
|
271
|
-
"package-lock.json": "npm",
|
|
272
|
-
"yarn.lock": "yarn",
|
|
273
|
-
"pnpm-lock.yaml": "pnpm",
|
|
274
|
-
"bun.lockb": "bun",
|
|
275
|
-
"requirements.txt": "pip",
|
|
276
|
-
"go.mod": "go",
|
|
277
|
-
"Cargo.toml": "cargo"
|
|
278
|
-
};
|
|
279
|
-
function detectPackageManager(projectPath) {
|
|
280
|
-
for (const [file, pm] of Object.entries(LOCKFILE_MAP)) {
|
|
281
|
-
if (existsSync2(join2(projectPath, file))) return pm;
|
|
282
|
-
}
|
|
283
|
-
if (existsSync2(join2(projectPath, "package.json"))) return "npm";
|
|
284
|
-
return null;
|
|
285
|
-
}
|
|
286
|
-
var INSTALL_COMMANDS = {
|
|
287
|
-
npm: ["npm", ["install"]],
|
|
288
|
-
yarn: ["yarn", ["install"]],
|
|
289
|
-
pnpm: ["pnpm", ["install"]],
|
|
290
|
-
bun: ["bun", ["install"]],
|
|
291
|
-
pip: ["pip", ["install", "-r", "requirements.txt"]],
|
|
292
|
-
go: ["go", ["mod", "download"]],
|
|
293
|
-
cargo: ["cargo", ["fetch"]]
|
|
294
|
-
};
|
|
295
|
-
function installDeps(pm, cwd) {
|
|
296
|
-
const [cmd, args] = INSTALL_COMMANDS[pm] ?? [pm, ["install"]];
|
|
297
|
-
return new Promise((resolve5, reject) => {
|
|
298
|
-
const child = spawn(cmd, args, { cwd, shell: true, stdio: "ignore" });
|
|
299
|
-
const timeout = setTimeout(() => {
|
|
300
|
-
child.kill("SIGTERM");
|
|
301
|
-
reject(new Error(`Install timed out after 10 minutes`));
|
|
302
|
-
}, 10 * 60 * 1e3);
|
|
303
|
-
child.on("close", (code) => {
|
|
304
|
-
clearTimeout(timeout);
|
|
305
|
-
if (code === 0) resolve5();
|
|
306
|
-
else reject(new Error(`${pm} install failed (exit ${code})`));
|
|
307
|
-
});
|
|
308
|
-
child.on("error", (err) => {
|
|
309
|
-
clearTimeout(timeout);
|
|
310
|
-
reject(err);
|
|
311
|
-
});
|
|
312
|
-
});
|
|
313
|
-
}
|
|
314
|
-
|
|
315
|
-
// src/handlers/fs.ts
|
|
316
|
-
import { existsSync as existsSync3, mkdirSync as mkdirSync3, readFileSync as readFileSync2, writeFileSync as writeFileSync2, readdirSync, statSync, realpathSync } from "fs";
|
|
317
|
-
import { rm } from "fs/promises";
|
|
318
|
-
import { join as join3, resolve as resolve2, extname } from "path";
|
|
319
|
-
function validatePath(filePath) {
|
|
320
|
-
const config = loadConfig();
|
|
321
|
-
const resolved = resolve2(filePath);
|
|
322
|
-
const projectsDir = resolve2(config.projectsDir);
|
|
323
|
-
if (!resolved.startsWith(projectsDir)) {
|
|
324
|
-
throw new Error(`Access denied: path is outside projects directory`);
|
|
325
|
-
}
|
|
326
|
-
if (existsSync3(resolved)) {
|
|
327
|
-
const realPath = realpathSync(resolved);
|
|
328
|
-
const realProjectsDir = realpathSync(projectsDir);
|
|
329
|
-
if (!realPath.startsWith(realProjectsDir)) {
|
|
330
|
-
throw new Error(`Access denied: symlink points outside projects directory`);
|
|
331
|
-
}
|
|
332
|
-
}
|
|
333
|
-
return resolved;
|
|
334
|
-
}
|
|
335
|
-
async function handleFsRead(command, supabase) {
|
|
336
|
-
const { path, encoding = "utf-8" } = command.payload;
|
|
337
|
-
const resolved = validatePath(path);
|
|
338
|
-
if (!existsSync3(resolved)) {
|
|
339
|
-
throw new Error(`File not found: ${path}`);
|
|
340
|
-
}
|
|
341
|
-
const content = readFileSync2(resolved, encoding);
|
|
342
|
-
await supabase.from("commands").update({
|
|
343
|
-
status: "done",
|
|
344
|
-
result: { content, size: Buffer.byteLength(content) },
|
|
345
|
-
completed_at: (/* @__PURE__ */ new Date()).toISOString()
|
|
346
|
-
}).eq("id", command.id);
|
|
347
|
-
}
|
|
348
|
-
async function handleFsWrite(command, supabase) {
|
|
349
|
-
const { path, content } = command.payload;
|
|
350
|
-
const resolved = validatePath(path);
|
|
351
|
-
const dir = resolve2(resolved, "..");
|
|
352
|
-
if (!existsSync3(dir)) {
|
|
353
|
-
mkdirSync3(dir, { recursive: true });
|
|
354
|
-
}
|
|
355
|
-
writeFileSync2(resolved, content, "utf-8");
|
|
356
|
-
await supabase.from("commands").update({
|
|
357
|
-
status: "done",
|
|
358
|
-
result: { path: resolved, size: Buffer.byteLength(content) },
|
|
359
|
-
completed_at: (/* @__PURE__ */ new Date()).toISOString()
|
|
360
|
-
}).eq("id", command.id);
|
|
361
|
-
}
|
|
362
|
-
async function handleFsList(command, supabase) {
|
|
363
|
-
const { path, recursive = false } = command.payload;
|
|
364
|
-
const resolved = validatePath(path);
|
|
365
|
-
if (!existsSync3(resolved)) {
|
|
366
|
-
throw new Error(`Directory not found: ${path}`);
|
|
367
|
-
}
|
|
368
|
-
const entries = listDir(resolved, recursive);
|
|
369
|
-
await supabase.from("commands").update({
|
|
370
|
-
status: "done",
|
|
371
|
-
result: { entries, count: entries.length },
|
|
372
|
-
completed_at: (/* @__PURE__ */ new Date()).toISOString()
|
|
373
|
-
}).eq("id", command.id);
|
|
374
|
-
}
|
|
375
|
-
async function handleFsDelete(command, supabase) {
|
|
376
|
-
const { path } = command.payload;
|
|
377
|
-
const resolved = validatePath(path);
|
|
378
|
-
if (!existsSync3(resolved)) {
|
|
379
|
-
throw new Error(`Path not found: ${path}`);
|
|
380
|
-
}
|
|
381
|
-
await rm(resolved, { recursive: true, force: true });
|
|
382
|
-
await supabase.from("commands").update({
|
|
383
|
-
status: "done",
|
|
384
|
-
result: { deleted: resolved },
|
|
385
|
-
completed_at: (/* @__PURE__ */ new Date()).toISOString()
|
|
386
|
-
}).eq("id", command.id);
|
|
387
|
-
}
|
|
388
|
-
function listDir(dirPath, recursive, maxDepth = 3, depth = 0) {
|
|
389
|
-
const entries = [];
|
|
390
|
-
try {
|
|
391
|
-
const items = readdirSync(dirPath);
|
|
392
|
-
for (const item of items) {
|
|
393
|
-
if (item === "node_modules" || item === ".git" || item === ".next") continue;
|
|
394
|
-
const fullPath = join3(dirPath, item);
|
|
395
|
-
try {
|
|
396
|
-
const stat = statSync(fullPath);
|
|
397
|
-
const entry = {
|
|
398
|
-
name: item,
|
|
399
|
-
path: fullPath,
|
|
400
|
-
type: stat.isDirectory() ? "directory" : "file",
|
|
401
|
-
size: stat.size,
|
|
402
|
-
ext: stat.isFile() ? extname(item) : ""
|
|
403
|
-
};
|
|
404
|
-
entries.push(entry);
|
|
405
|
-
if (recursive && stat.isDirectory() && depth < maxDepth) {
|
|
406
|
-
entries.push(...listDir(fullPath, true, maxDepth, depth + 1));
|
|
407
|
-
}
|
|
408
|
-
} catch {
|
|
409
|
-
}
|
|
410
|
-
}
|
|
411
|
-
} catch {
|
|
412
|
-
}
|
|
413
|
-
return entries;
|
|
414
|
-
}
|
|
415
|
-
|
|
416
|
-
// src/handlers/terminal.ts
|
|
417
|
-
import { spawn as spawn2 } from "child_process";
|
|
418
|
-
import { resolve as resolve3 } from "path";
|
|
419
|
-
var activeProcesses = /* @__PURE__ */ new Map();
|
|
420
|
-
async function handleTerminalExec(command, supabase) {
|
|
421
|
-
const { cmd, args = [], cwd, timeout = 3e5 } = command.payload;
|
|
422
|
-
if (!cmd) throw new Error("cmd is required");
|
|
423
|
-
const config = loadConfig();
|
|
424
|
-
const workDir = cwd ? resolve3(config.projectsDir, cwd) : config.projectsDir;
|
|
425
|
-
let seq = 0;
|
|
426
|
-
const batchInterval = 100;
|
|
427
|
-
let batch = [];
|
|
428
|
-
const flushBatch = async () => {
|
|
429
|
-
if (batch.length === 0) return;
|
|
430
|
-
const toInsert = [...batch];
|
|
431
|
-
batch = [];
|
|
432
|
-
await supabase.from("command_stream").insert(toInsert);
|
|
433
|
-
};
|
|
434
|
-
const batchTimer = setInterval(flushBatch, batchInterval);
|
|
435
|
-
return new Promise((resolvePromise, reject) => {
|
|
436
|
-
const child = spawn2(cmd, args, {
|
|
437
|
-
cwd: workDir,
|
|
438
|
-
shell: true,
|
|
439
|
-
stdio: ["ignore", "pipe", "pipe"]
|
|
440
|
-
});
|
|
441
|
-
activeProcesses.set(command.id, child);
|
|
442
|
-
const timer = setTimeout(() => {
|
|
443
|
-
child.kill("SIGTERM");
|
|
444
|
-
reject(new Error(`Command timed out after ${timeout}ms`));
|
|
445
|
-
}, timeout);
|
|
446
|
-
child.stdout?.on("data", (data) => {
|
|
447
|
-
batch.push({
|
|
448
|
-
command_id: command.id,
|
|
449
|
-
seq: seq++,
|
|
450
|
-
type: "stdout",
|
|
451
|
-
data: { text: data.toString() }
|
|
452
|
-
});
|
|
453
|
-
});
|
|
454
|
-
child.stderr?.on("data", (data) => {
|
|
455
|
-
batch.push({
|
|
456
|
-
command_id: command.id,
|
|
457
|
-
seq: seq++,
|
|
458
|
-
type: "stderr",
|
|
459
|
-
data: { text: data.toString() }
|
|
460
|
-
});
|
|
461
|
-
});
|
|
462
|
-
child.on("close", async (code) => {
|
|
463
|
-
clearTimeout(timer);
|
|
464
|
-
clearInterval(batchTimer);
|
|
465
|
-
activeProcesses.delete(command.id);
|
|
466
|
-
batch.push({
|
|
467
|
-
command_id: command.id,
|
|
468
|
-
seq: seq++,
|
|
469
|
-
type: "done",
|
|
470
|
-
data: { exit_code: code }
|
|
471
|
-
});
|
|
472
|
-
await flushBatch();
|
|
473
|
-
await supabase.from("commands").update({
|
|
474
|
-
status: code === 0 ? "done" : "error",
|
|
475
|
-
result: { exit_code: code },
|
|
476
|
-
completed_at: (/* @__PURE__ */ new Date()).toISOString()
|
|
477
|
-
}).eq("id", command.id);
|
|
478
|
-
resolvePromise();
|
|
479
|
-
});
|
|
480
|
-
child.on("error", async (err) => {
|
|
481
|
-
clearTimeout(timer);
|
|
482
|
-
clearInterval(batchTimer);
|
|
483
|
-
activeProcesses.delete(command.id);
|
|
484
|
-
reject(err);
|
|
485
|
-
});
|
|
486
|
-
});
|
|
487
|
-
}
|
|
488
|
-
async function handleTerminalKill(command, supabase) {
|
|
489
|
-
const { command_id } = command.payload;
|
|
490
|
-
const process2 = activeProcesses.get(command_id);
|
|
491
|
-
if (process2) {
|
|
492
|
-
process2.kill("SIGTERM");
|
|
493
|
-
activeProcesses.delete(command_id);
|
|
494
|
-
}
|
|
495
|
-
await supabase.from("commands").update({
|
|
496
|
-
status: "done",
|
|
497
|
-
result: { killed: !!process2 },
|
|
498
|
-
completed_at: (/* @__PURE__ */ new Date()).toISOString()
|
|
499
|
-
}).eq("id", command.id);
|
|
500
|
-
}
|
|
501
|
-
|
|
502
|
-
// src/handlers/claude.ts
|
|
503
|
-
import { spawn as spawn3 } from "child_process";
|
|
504
|
-
import { resolve as resolve4 } from "path";
|
|
505
|
-
async function handleClaudeQuery(command, supabase) {
|
|
506
|
-
const {
|
|
507
|
-
prompt,
|
|
508
|
-
project_path,
|
|
509
|
-
model
|
|
510
|
-
} = command.payload;
|
|
511
|
-
if (!prompt) throw new Error("prompt is required");
|
|
512
|
-
const config = loadConfig();
|
|
513
|
-
const cwd = project_path ? resolve4(config.projectsDir, project_path) : config.projectsDir;
|
|
514
|
-
const args = [
|
|
515
|
-
"--print",
|
|
516
|
-
// non-interactive
|
|
517
|
-
"--output-format",
|
|
518
|
-
"stream-json"
|
|
519
|
-
// structured streaming output
|
|
520
|
-
];
|
|
521
|
-
if (model) args.push("--model", model);
|
|
522
|
-
args.push("-p", prompt);
|
|
523
|
-
const proc = spawn3("claude", args, {
|
|
524
|
-
cwd,
|
|
525
|
-
shell: true,
|
|
526
|
-
env: { ...process.env },
|
|
527
|
-
// inherits Claude auth from ~/.claude
|
|
528
|
-
stdio: ["pipe", "pipe", "pipe"]
|
|
529
|
-
});
|
|
530
|
-
let seq = 0;
|
|
531
|
-
let batch = [];
|
|
532
|
-
const batchInterval = 100;
|
|
533
|
-
const flushBatch = async () => {
|
|
534
|
-
if (batch.length === 0) return;
|
|
535
|
-
const toInsert = [...batch];
|
|
536
|
-
batch = [];
|
|
537
|
-
await supabase.from("command_stream").insert(toInsert);
|
|
538
|
-
};
|
|
539
|
-
const batchTimer = setInterval(flushBatch, batchInterval);
|
|
540
|
-
let buffer = "";
|
|
541
|
-
proc.stdout.on("data", (chunk) => {
|
|
542
|
-
buffer += chunk.toString();
|
|
543
|
-
const lines = buffer.split("\n");
|
|
544
|
-
buffer = lines.pop() || "";
|
|
545
|
-
for (const line of lines) {
|
|
546
|
-
if (!line.trim()) continue;
|
|
547
|
-
try {
|
|
548
|
-
const event = JSON.parse(line);
|
|
549
|
-
batch.push({
|
|
550
|
-
command_id: command.id,
|
|
551
|
-
seq: seq++,
|
|
552
|
-
type: event.type || "assistant",
|
|
553
|
-
data: event
|
|
554
|
-
});
|
|
555
|
-
} catch {
|
|
556
|
-
batch.push({
|
|
557
|
-
command_id: command.id,
|
|
558
|
-
seq: seq++,
|
|
559
|
-
type: "stdout",
|
|
560
|
-
data: { text: line }
|
|
561
|
-
});
|
|
562
|
-
}
|
|
563
|
-
}
|
|
564
|
-
});
|
|
565
|
-
proc.stderr.on("data", (chunk) => {
|
|
566
|
-
batch.push({
|
|
567
|
-
command_id: command.id,
|
|
568
|
-
seq: seq++,
|
|
569
|
-
type: "stderr",
|
|
570
|
-
data: { text: chunk.toString() }
|
|
571
|
-
});
|
|
572
|
-
});
|
|
573
|
-
return new Promise((promiseResolve, reject) => {
|
|
574
|
-
proc.on("close", async (exitCode) => {
|
|
575
|
-
if (buffer.trim()) {
|
|
576
|
-
batch.push({
|
|
577
|
-
command_id: command.id,
|
|
578
|
-
seq: seq++,
|
|
579
|
-
type: "stdout",
|
|
580
|
-
data: { text: buffer }
|
|
581
|
-
});
|
|
582
|
-
}
|
|
583
|
-
batch.push({
|
|
584
|
-
command_id: command.id,
|
|
585
|
-
seq: seq++,
|
|
586
|
-
type: "done",
|
|
587
|
-
data: { exit_code: exitCode }
|
|
588
|
-
});
|
|
589
|
-
clearInterval(batchTimer);
|
|
590
|
-
await flushBatch();
|
|
591
|
-
await supabase.from("commands").update({
|
|
592
|
-
status: exitCode === 0 ? "done" : "error",
|
|
593
|
-
result: { exit_code: exitCode },
|
|
594
|
-
completed_at: (/* @__PURE__ */ new Date()).toISOString()
|
|
595
|
-
}).eq("id", command.id);
|
|
596
|
-
promiseResolve();
|
|
597
|
-
});
|
|
598
|
-
proc.on("error", async (err) => {
|
|
599
|
-
clearInterval(batchTimer);
|
|
600
|
-
await flushBatch();
|
|
601
|
-
reject(err);
|
|
602
|
-
});
|
|
603
|
-
});
|
|
604
|
-
}
|
|
605
|
-
|
|
606
|
-
export {
|
|
607
|
-
getConfigPath,
|
|
608
|
-
configExists,
|
|
609
|
-
loadConfig,
|
|
610
|
-
saveConfig,
|
|
611
|
-
updateConfig,
|
|
612
|
-
getSupabase,
|
|
613
|
-
resetSupabase,
|
|
614
|
-
Relay,
|
|
615
|
-
handleGitClone,
|
|
616
|
-
handleGitStatus,
|
|
617
|
-
handleGitPull,
|
|
618
|
-
handleFsRead,
|
|
619
|
-
handleFsWrite,
|
|
620
|
-
handleFsList,
|
|
621
|
-
handleFsDelete,
|
|
622
|
-
handleTerminalExec,
|
|
623
|
-
handleTerminalKill,
|
|
624
|
-
handleClaudeQuery
|
|
625
|
-
};
|