@upend/cli 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +231 -0
- package/bin/cli.ts +48 -0
- package/package.json +26 -0
- package/src/commands/deploy.ts +67 -0
- package/src/commands/dev.ts +96 -0
- package/src/commands/infra.ts +227 -0
- package/src/commands/init.ts +323 -0
- package/src/commands/migrate.ts +64 -0
- package/src/config.ts +18 -0
- package/src/index.ts +2 -0
- package/src/lib/auth.ts +89 -0
- package/src/lib/db.ts +14 -0
- package/src/lib/exec.ts +38 -0
- package/src/lib/log.ts +16 -0
- package/src/lib/middleware.ts +51 -0
- package/src/services/claude/index.ts +507 -0
- package/src/services/claude/snapshots.ts +142 -0
- package/src/services/claude/worktree.ts +151 -0
- package/src/services/dashboard/public/index.html +888 -0
- package/src/services/gateway/auth-routes.ts +203 -0
- package/src/services/gateway/index.ts +64 -0
|
@@ -0,0 +1,507 @@
|
|
|
1
|
+
import { Hono } from "hono";
|
|
2
|
+
import { logger } from "hono/logger";
|
|
3
|
+
import { cors } from "hono/cors";
|
|
4
|
+
import { sql } from "../../lib/db";
|
|
5
|
+
import { verifyToken } from "../../lib/auth";
|
|
6
|
+
import { requireAuth } from "../../lib/middleware";
|
|
7
|
+
import { snapshot, listSnapshots, restoreSnapshot } from "./snapshots";
|
|
8
|
+
import { generateSessionName, createWorktree, commitWorktree, checkMergeable, mergeToLive, removeWorktree, getWorktreePath } from "./worktree";
|
|
9
|
+
import { existsSync, mkdirSync, readdirSync, writeFileSync, statSync } from "fs";
|
|
10
|
+
import { join } from "path";
|
|
11
|
+
|
|
12
|
+
const app = new Hono();
|
|
13
|
+
app.use("*", logger());
|
|
14
|
+
app.use("*", cors());
|
|
15
|
+
|
|
16
|
+
const PROJECT_ROOT = process.env.UPEND_PROJECT || process.cwd();
|
|
17
|
+
const APPS_DIR = join(PROJECT_ROOT, "apps");
|
|
18
|
+
|
|
19
|
+
// preview endpoint — public (served in iframes, no auth header)
|
|
20
|
+
app.get("/preview/:session/*", async (c) => {
|
|
21
|
+
const sessionName = c.req.param("session");
|
|
22
|
+
const filePath = c.req.path.replace(`/preview/${sessionName}/`, "");
|
|
23
|
+
const fullPath = join(getWorktreePath(sessionName), filePath);
|
|
24
|
+
|
|
25
|
+
for (const candidate of [fullPath, join(fullPath, "index.html")]) {
|
|
26
|
+
if (existsSync(candidate) && statSync(candidate).isFile()) {
|
|
27
|
+
const file = Bun.file(candidate);
|
|
28
|
+
return new Response(file, {
|
|
29
|
+
headers: { "Content-Type": file.type || "text/html" },
|
|
30
|
+
});
|
|
31
|
+
}
|
|
32
|
+
}
|
|
33
|
+
return c.json({ error: "not found" }, 404);
|
|
34
|
+
});
|
|
35
|
+
|
|
36
|
+
// everything else requires auth
|
|
37
|
+
app.use("*", requireAuth);
|
|
38
|
+
|
|
39
|
+
// ---------- websocket clients ----------
|
|
40
|
+
|
|
41
|
+
const wsClients = new Map<number, Set<any>>(); // sessionId → Set<ws>
|
|
42
|
+
|
|
43
|
+
function broadcast(sessionId: number, msg: any) {
|
|
44
|
+
const clients = wsClients.get(sessionId);
|
|
45
|
+
if (!clients) return;
|
|
46
|
+
const data = JSON.stringify(msg);
|
|
47
|
+
for (const ws of clients) {
|
|
48
|
+
try { ws.send(data); } catch {}
|
|
49
|
+
}
|
|
50
|
+
}
|
|
51
|
+
|
|
52
|
+
// ---------- sessions ----------
|
|
53
|
+
|
|
54
|
+
app.post("/sessions", async (c) => {
|
|
55
|
+
const { prompt, force, title } = await c.req.json();
|
|
56
|
+
if (!prompt) return c.json({ error: "prompt is required" }, 400);
|
|
57
|
+
|
|
58
|
+
const user = c.get("user") as { sub: string; email: string };
|
|
59
|
+
|
|
60
|
+
const activeSessions = await sql`
|
|
61
|
+
SELECT es.*,
|
|
62
|
+
(SELECT sm.content FROM session_messages sm WHERE sm.session_id = es.id ORDER BY sm.created_at DESC LIMIT 1) as last_message
|
|
63
|
+
FROM editing_sessions es WHERE es.status = 'active' ORDER BY es.created_at DESC
|
|
64
|
+
`;
|
|
65
|
+
|
|
66
|
+
if (activeSessions.length > 0 && !force) {
|
|
67
|
+
return c.json({
|
|
68
|
+
error: "active_sessions",
|
|
69
|
+
message: "Active sessions exist. Creating a new one shares the codebase — rollback affects ALL sessions.",
|
|
70
|
+
activeSessions: activeSessions.map((s: any) => ({
|
|
71
|
+
id: s.id, prompt: s.prompt, snapshotName: s.snapshotName, createdAt: s.createdAt, lastMessage: s.lastMessage,
|
|
72
|
+
})),
|
|
73
|
+
options: {
|
|
74
|
+
force: "Send { force: true } to create anyway",
|
|
75
|
+
join: `POST /sessions/${activeSessions[0].id}/messages`,
|
|
76
|
+
},
|
|
77
|
+
}, 409);
|
|
78
|
+
}
|
|
79
|
+
|
|
80
|
+
console.log(`[session] new session for ${user.email}: "${prompt.slice(0, 80)}"`);
|
|
81
|
+
|
|
82
|
+
// create git worktree for isolated editing
|
|
83
|
+
const sessionName = generateSessionName();
|
|
84
|
+
const worktree = await createWorktree(sessionName);
|
|
85
|
+
console.log(`[session] worktree: ${sessionName} at ${worktree.path}`);
|
|
86
|
+
|
|
87
|
+
const claudeSessionId = crypto.randomUUID();
|
|
88
|
+
|
|
89
|
+
const [session] = await sql`
|
|
90
|
+
INSERT INTO editing_sessions (prompt, status, claude_session_id, snapshot_name, title, context)
|
|
91
|
+
VALUES (${prompt}, 'active', ${claudeSessionId}, ${sessionName}, ${title || null}, ${JSON.stringify({ root: worktree.path, worktree: sessionName, branch: worktree.branch })})
|
|
92
|
+
RETURNING *
|
|
93
|
+
`;
|
|
94
|
+
|
|
95
|
+
const [msg] = await sql`
|
|
96
|
+
INSERT INTO session_messages (session_id, role, content, status)
|
|
97
|
+
VALUES (${session.id}, 'user', ${prompt}, 'pending')
|
|
98
|
+
RETURNING *
|
|
99
|
+
`;
|
|
100
|
+
|
|
101
|
+
runMessage(Number(session.id), Number(msg.id), prompt, claudeSessionId, false, user, worktree.path);
|
|
102
|
+
|
|
103
|
+
return c.json({ session, message: msg, worktree: sessionName }, 201);
|
|
104
|
+
});
|
|
105
|
+
|
|
106
|
+
app.post("/sessions/:id/messages", async (c) => {
|
|
107
|
+
const sessionId = c.req.param("id");
|
|
108
|
+
const { prompt } = await c.req.json();
|
|
109
|
+
if (!prompt) return c.json({ error: "prompt is required" }, 400);
|
|
110
|
+
|
|
111
|
+
const [session] = await sql`SELECT * FROM editing_sessions WHERE id = ${sessionId}`;
|
|
112
|
+
if (!session) return c.json({ error: "session not found" }, 404);
|
|
113
|
+
if (session.status !== "active") return c.json({ error: `session is ${session.status}` }, 400);
|
|
114
|
+
|
|
115
|
+
const [running] = await sql`
|
|
116
|
+
SELECT id FROM session_messages WHERE session_id = ${sessionId} AND status = 'running'
|
|
117
|
+
`;
|
|
118
|
+
if (running) return c.json({ error: "a message is already running" }, 409);
|
|
119
|
+
|
|
120
|
+
const [msg] = await sql`
|
|
121
|
+
INSERT INTO session_messages (session_id, role, content, status)
|
|
122
|
+
VALUES (${sessionId}, 'user', ${prompt}, 'pending')
|
|
123
|
+
RETURNING *
|
|
124
|
+
`;
|
|
125
|
+
|
|
126
|
+
const user = c.get("user") as { sub: string; email: string };
|
|
127
|
+
const ctx = typeof session.context === 'string' ? JSON.parse(session.context) : session.context;
|
|
128
|
+
const worktreePath = ctx?.root || PROJECT_ROOT;
|
|
129
|
+
runMessage(Number(sessionId), Number(msg.id), prompt, session.claudeSessionId, true, user, worktreePath);
|
|
130
|
+
|
|
131
|
+
return c.json(msg, 201);
|
|
132
|
+
});
|
|
133
|
+
|
|
134
|
+
app.get("/sessions/:id", async (c) => {
|
|
135
|
+
const id = c.req.param("id");
|
|
136
|
+
const [session] = await sql`SELECT * FROM editing_sessions WHERE id = ${id}`;
|
|
137
|
+
if (!session) return c.json({ error: "not found" }, 404);
|
|
138
|
+
const messages = await sql`SELECT * FROM session_messages WHERE session_id = ${id} ORDER BY created_at`;
|
|
139
|
+
return c.json({ ...session, messages });
|
|
140
|
+
});
|
|
141
|
+
|
|
142
|
+
app.get("/sessions", async (c) => {
|
|
143
|
+
const rows = await sql`SELECT * FROM editing_sessions ORDER BY created_at DESC LIMIT 50`;
|
|
144
|
+
return c.json(rows);
|
|
145
|
+
});
|
|
146
|
+
|
|
147
|
+
app.post("/sessions/:id/end", async (c) => {
|
|
148
|
+
const id = c.req.param("id");
|
|
149
|
+
await sql`UPDATE editing_sessions SET status = 'ended' WHERE id = ${id}`;
|
|
150
|
+
activeProcesses.delete(Number(id));
|
|
151
|
+
return c.json({ ended: true });
|
|
152
|
+
});
|
|
153
|
+
|
|
154
|
+
app.post("/sessions/:id/kill", async (c) => {
|
|
155
|
+
const id = Number(c.req.param("id"));
|
|
156
|
+
const proc = activeProcesses.get(id);
|
|
157
|
+
if (!proc) return c.json({ error: "nothing running" }, 404);
|
|
158
|
+
proc.kill();
|
|
159
|
+
activeProcesses.delete(id);
|
|
160
|
+
await sql`UPDATE session_messages SET status = 'killed' WHERE session_id = ${id} AND status = 'running'`;
|
|
161
|
+
broadcast(id, { type: "status", status: "killed" });
|
|
162
|
+
return c.json({ killed: true });
|
|
163
|
+
});
|
|
164
|
+
|
|
165
|
+
// ---------- session commit (merge to live) ----------
|
|
166
|
+
|
|
167
|
+
// check if a session can merge cleanly
|
|
168
|
+
app.get("/sessions/:id/mergeable", async (c) => {
|
|
169
|
+
const id = c.req.param("id");
|
|
170
|
+
const [session] = await sql`SELECT * FROM editing_sessions WHERE id = ${id}`;
|
|
171
|
+
if (!session) return c.json({ error: "not found" }, 404);
|
|
172
|
+
|
|
173
|
+
const ctx = typeof session.context === 'string' ? JSON.parse(session.context) : session.context;
|
|
174
|
+
if (!ctx?.worktree) return c.json({ error: "session has no worktree" }, 400);
|
|
175
|
+
|
|
176
|
+
try {
|
|
177
|
+
// commit pending changes in worktree first
|
|
178
|
+
await commitWorktree(ctx.worktree, `session ${ctx.worktree}: auto-commit`);
|
|
179
|
+
const result = await checkMergeable(ctx.worktree);
|
|
180
|
+
return c.json(result);
|
|
181
|
+
} catch (err: any) {
|
|
182
|
+
return c.json({ mergeable: false, conflicts: [], error: err.message });
|
|
183
|
+
}
|
|
184
|
+
});
|
|
185
|
+
|
|
186
|
+
// commit session — merge worktree into live
|
|
187
|
+
app.post("/sessions/:id/commit", async (c) => {
|
|
188
|
+
const id = c.req.param("id");
|
|
189
|
+
const user = c.get("user") as { sub: string; email: string };
|
|
190
|
+
const [session] = await sql`SELECT * FROM editing_sessions WHERE id = ${id}`;
|
|
191
|
+
if (!session) return c.json({ error: "not found" }, 404);
|
|
192
|
+
if (session.status !== "active") return c.json({ error: `session is ${session.status}` }, 400);
|
|
193
|
+
|
|
194
|
+
const ctx = typeof session.context === 'string' ? JSON.parse(session.context) : session.context;
|
|
195
|
+
if (!ctx?.worktree) return c.json({ error: "session has no worktree" }, 400);
|
|
196
|
+
|
|
197
|
+
try {
|
|
198
|
+
// commit any pending changes
|
|
199
|
+
await commitWorktree(ctx.worktree, `session ${ctx.worktree}: final changes by ${user.email}`);
|
|
200
|
+
|
|
201
|
+
// merge into live
|
|
202
|
+
const result = await mergeToLive(ctx.worktree, user.email);
|
|
203
|
+
|
|
204
|
+
if (!result.success) {
|
|
205
|
+
return c.json({ error: "merge_conflict", message: result.message }, 409);
|
|
206
|
+
}
|
|
207
|
+
|
|
208
|
+
// mark session as committed
|
|
209
|
+
await sql`UPDATE editing_sessions SET status = 'committed' WHERE id = ${id}`;
|
|
210
|
+
|
|
211
|
+
// restart live services so changes take effect
|
|
212
|
+
restartServices();
|
|
213
|
+
|
|
214
|
+
console.log(`[session] ${ctx.worktree} committed to live by ${user.email}`);
|
|
215
|
+
return c.json({ committed: true, session: ctx.worktree, message: result.message });
|
|
216
|
+
} catch (err: any) {
|
|
217
|
+
console.error(`[session] commit failed:`, err);
|
|
218
|
+
return c.json({ error: err.message }, 500);
|
|
219
|
+
}
|
|
220
|
+
});
|
|
221
|
+
|
|
222
|
+
// ---------- apps ----------
|
|
223
|
+
|
|
224
|
+
// helper: resolve apps dir for live or session worktree
|
|
225
|
+
function resolveAppsDir(c: any): string {
|
|
226
|
+
const sessionName = c.req.query("session");
|
|
227
|
+
if (sessionName) {
|
|
228
|
+
const worktreeApps = join(getWorktreePath(sessionName), "apps");
|
|
229
|
+
if (existsSync(worktreeApps)) return worktreeApps;
|
|
230
|
+
}
|
|
231
|
+
return APPS_DIR;
|
|
232
|
+
}
|
|
233
|
+
|
|
234
|
+
app.get("/apps", async (c) => {
|
|
235
|
+
const appsDir = resolveAppsDir(c);
|
|
236
|
+
mkdirSync(appsDir, { recursive: true });
|
|
237
|
+
const apps = readdirSync(appsDir)
|
|
238
|
+
.filter((f) => statSync(join(appsDir, f)).isDirectory())
|
|
239
|
+
.map((name) => ({ name, url: `/apps/${name}/`, created: statSync(join(appsDir, name)).birthtime }));
|
|
240
|
+
return c.json(apps);
|
|
241
|
+
});
|
|
242
|
+
|
|
243
|
+
app.post("/apps", async (c) => {
|
|
244
|
+
const { name, files } = await c.req.json();
|
|
245
|
+
if (!name) return c.json({ error: "name is required" }, 400);
|
|
246
|
+
if (!files || typeof files !== "object") return c.json({ error: "provide files: { 'index.html': '...' }" }, 400);
|
|
247
|
+
|
|
248
|
+
const appDir = join(APPS_DIR, name);
|
|
249
|
+
mkdirSync(appDir, { recursive: true });
|
|
250
|
+
for (const [filename, content] of Object.entries(files)) {
|
|
251
|
+
const filePath = join(appDir, filename);
|
|
252
|
+
mkdirSync(join(filePath, ".."), { recursive: true });
|
|
253
|
+
writeFileSync(filePath, content as string);
|
|
254
|
+
}
|
|
255
|
+
return c.json({ app: name, url: `/apps/${name}/`, files: Object.keys(files), live: true }, 201);
|
|
256
|
+
});
|
|
257
|
+
|
|
258
|
+
// serve app files from a session worktree (preview before commit)
|
|
259
|
+
app.post("/apps/generate", async (c) => {
|
|
260
|
+
const { name, prompt } = await c.req.json();
|
|
261
|
+
if (!name || !prompt) return c.json({ error: "name and prompt required" }, 400);
|
|
262
|
+
|
|
263
|
+
const appDir = join(APPS_DIR, name);
|
|
264
|
+
mkdirSync(appDir, { recursive: true });
|
|
265
|
+
|
|
266
|
+
const metaPrompt = `Create a self-contained web app in the directory ${appDir}.
|
|
267
|
+
The app will be served as static files at /apps/${name}/ — it needs at minimum an index.html.
|
|
268
|
+
It can talk to the API at /api/ (same origin). Auth tokens are in localStorage as 'upend_token'.
|
|
269
|
+
Use Bearer token in Authorization headers. API endpoints:
|
|
270
|
+
- POST /api/auth/signup, /api/auth/login — { email, password } → { user, token }
|
|
271
|
+
- GET/POST/PATCH/DELETE /api/data/:table(/:id) — CRUD (requires auth)
|
|
272
|
+
Keep it simple. No build step. Vanilla JS unless the prompt asks otherwise.
|
|
273
|
+
User's request: ${prompt}`;
|
|
274
|
+
|
|
275
|
+
Bun.spawn(
|
|
276
|
+
["claude", "-p", metaPrompt, "--output-format", "stream-json", "--verbose", "--dangerously-skip-permissions"],
|
|
277
|
+
{ cwd: PROJECT_ROOT, env: { ...process.env, CLAUDE_CODE_ENTRYPOINT: "upend" }, stdout: "inherit", stderr: "inherit" }
|
|
278
|
+
);
|
|
279
|
+
|
|
280
|
+
return c.json({ app: name, url: `/apps/${name}/`, status: "generating" }, 202);
|
|
281
|
+
});
|
|
282
|
+
|
|
283
|
+
// ---------- snapshots / rollback ----------
|
|
284
|
+
|
|
285
|
+
app.get("/snapshots", async (c) => {
|
|
286
|
+
const snaps = await listSnapshots(PROJECT_ROOT);
|
|
287
|
+
return c.json(snaps);
|
|
288
|
+
});
|
|
289
|
+
|
|
290
|
+
app.post("/rollback", async (c) => {
|
|
291
|
+
const { snapshot: snapName, restoreDb } = await c.req.json();
|
|
292
|
+
if (!snapName) return c.json({ error: "snapshot name required" }, 400);
|
|
293
|
+
const safety = await snapshot(PROJECT_ROOT);
|
|
294
|
+
await restoreSnapshot(PROJECT_ROOT, snapName, { restoreDb: restoreDb !== false });
|
|
295
|
+
restartServices();
|
|
296
|
+
return c.json({ rolledBack: snapName, safetySnapshot: safety });
|
|
297
|
+
});
|
|
298
|
+
|
|
299
|
+
// ---------- claude process management ----------
|
|
300
|
+
|
|
301
|
+
const activeProcesses = new Map<number, ReturnType<typeof Bun.spawn>>();
|
|
302
|
+
|
|
303
|
+
async function runMessage(
|
|
304
|
+
sessionId: number,
|
|
305
|
+
messageId: number,
|
|
306
|
+
prompt: string,
|
|
307
|
+
claudeSessionId: string,
|
|
308
|
+
isResume: boolean,
|
|
309
|
+
user: { sub: string; email: string },
|
|
310
|
+
cwd: string = PROJECT_ROOT
|
|
311
|
+
) {
|
|
312
|
+
try {
|
|
313
|
+
await sql`UPDATE session_messages SET status = 'running' WHERE id = ${messageId}`;
|
|
314
|
+
broadcast(sessionId, { type: "status", status: "running", messageId });
|
|
315
|
+
console.log(`[claude:${sessionId}] message ${messageId} → running (user: ${user.email})`);
|
|
316
|
+
|
|
317
|
+
const userContext = [
|
|
318
|
+
`The current user is ${user.email} with id ${user.sub}.`,
|
|
319
|
+
`When inserting records that have an owner_id column, always set owner_id to '${user.sub}'.`,
|
|
320
|
+
`IMPORTANT: You are working in a SESSION WORKTREE at ${cwd}.`,
|
|
321
|
+
`ALL file changes MUST be made inside ${cwd}. NEVER write to /opt/upend/ directly.`,
|
|
322
|
+
`Apps go in ${cwd}/apps/, migrations in ${cwd}/migrations/, services in ${cwd}/services/.`,
|
|
323
|
+
`Your changes will be previewed and then published to live when the user clicks publish.`,
|
|
324
|
+
].join(' ');
|
|
325
|
+
|
|
326
|
+
const args = [
|
|
327
|
+
"claude", "-p", prompt,
|
|
328
|
+
"--output-format", "stream-json",
|
|
329
|
+
"--verbose",
|
|
330
|
+
"--dangerously-skip-permissions",
|
|
331
|
+
"--append-system-prompt", userContext,
|
|
332
|
+
];
|
|
333
|
+
if (isResume) {
|
|
334
|
+
args.push("--resume", claudeSessionId);
|
|
335
|
+
} else {
|
|
336
|
+
args.push("--session-id", claudeSessionId);
|
|
337
|
+
}
|
|
338
|
+
|
|
339
|
+
console.log(`[claude:${sessionId}] spawning: ${args.join(" ")}`);
|
|
340
|
+
console.log(`[claude:${sessionId}] cwd: ${cwd}`);
|
|
341
|
+
|
|
342
|
+
const proc = Bun.spawn(args, {
|
|
343
|
+
cwd,
|
|
344
|
+
env: { ...process.env, CLAUDE_CODE_ENTRYPOINT: "upend" },
|
|
345
|
+
stdout: "pipe",
|
|
346
|
+
stderr: "pipe",
|
|
347
|
+
});
|
|
348
|
+
|
|
349
|
+
console.log(`[claude:${sessionId}] pid: ${proc.pid}`);
|
|
350
|
+
activeProcesses.set(sessionId, proc);
|
|
351
|
+
|
|
352
|
+
// stderr → console
|
|
353
|
+
const stderrReader = proc.stderr.getReader();
|
|
354
|
+
(async () => {
|
|
355
|
+
while (true) {
|
|
356
|
+
const { done, value } = await stderrReader.read();
|
|
357
|
+
if (done) break;
|
|
358
|
+
console.error(`[claude:${sessionId}:stderr] ${new TextDecoder().decode(value)}`);
|
|
359
|
+
}
|
|
360
|
+
})();
|
|
361
|
+
|
|
362
|
+
// stdout → parse stream-json, store chunks in DB, broadcast to WS
|
|
363
|
+
const reader = proc.stdout.getReader();
|
|
364
|
+
const decoder = new TextDecoder();
|
|
365
|
+
let fullOutput = "";
|
|
366
|
+
let resultText = "";
|
|
367
|
+
|
|
368
|
+
while (true) {
|
|
369
|
+
const { done, value } = await reader.read();
|
|
370
|
+
if (done) break;
|
|
371
|
+
const chunk = decoder.decode(value);
|
|
372
|
+
fullOutput += chunk;
|
|
373
|
+
|
|
374
|
+
for (const line of chunk.split("\n").filter(Boolean)) {
|
|
375
|
+
console.log(`[claude:${sessionId}:out] ${line.slice(0, 200)}`);
|
|
376
|
+
|
|
377
|
+
try {
|
|
378
|
+
const evt = JSON.parse(line);
|
|
379
|
+
|
|
380
|
+
// extract text content from assistant messages
|
|
381
|
+
if (evt.type === "assistant" && evt.message?.content) {
|
|
382
|
+
for (const block of evt.message.content) {
|
|
383
|
+
if (block.type === "text") {
|
|
384
|
+
resultText += block.text;
|
|
385
|
+
// update DB with partial result as it streams
|
|
386
|
+
await sql`UPDATE session_messages SET result = ${resultText} WHERE id = ${messageId}`;
|
|
387
|
+
broadcast(sessionId, { type: "text", text: block.text, messageId });
|
|
388
|
+
} else if (block.type === "tool_use") {
|
|
389
|
+
broadcast(sessionId, { type: "tool_use", name: block.name, input: block.input, messageId });
|
|
390
|
+
}
|
|
391
|
+
}
|
|
392
|
+
}
|
|
393
|
+
|
|
394
|
+
// final result
|
|
395
|
+
if (evt.type === "result") {
|
|
396
|
+
resultText = evt.result || resultText;
|
|
397
|
+
}
|
|
398
|
+
} catch {
|
|
399
|
+
// non-JSON line, ignore
|
|
400
|
+
}
|
|
401
|
+
}
|
|
402
|
+
}
|
|
403
|
+
|
|
404
|
+
const exitCode = await proc.exited;
|
|
405
|
+
activeProcesses.delete(sessionId);
|
|
406
|
+
console.log(`[claude:${sessionId}] exited code ${exitCode}, ${fullOutput.length} bytes`);
|
|
407
|
+
|
|
408
|
+
if (exitCode !== 0) {
|
|
409
|
+
// parse error from stream-json if possible
|
|
410
|
+
let errorDetail = resultText || fullOutput;
|
|
411
|
+
try {
|
|
412
|
+
for (const line of fullOutput.split("\n").filter(Boolean)) {
|
|
413
|
+
const evt = JSON.parse(line);
|
|
414
|
+
if (evt.type === "result" && evt.is_error) {
|
|
415
|
+
errorDetail = evt.error || evt.subtype || `${evt.subtype} (session: ${evt.session_id})`;
|
|
416
|
+
}
|
|
417
|
+
}
|
|
418
|
+
} catch {}
|
|
419
|
+
const errMsg = `claude error: ${errorDetail}`;
|
|
420
|
+
console.error(`[claude:${sessionId}] FULL OUTPUT:\n${fullOutput}`);
|
|
421
|
+
console.error(`[claude:${sessionId}] ERROR: ${errMsg}`);
|
|
422
|
+
await sql`UPDATE session_messages SET status = 'error', result = ${errMsg} WHERE id = ${messageId}`;
|
|
423
|
+
broadcast(sessionId, { type: "status", status: "error", error: errMsg, messageId });
|
|
424
|
+
return;
|
|
425
|
+
}
|
|
426
|
+
|
|
427
|
+
await sql`UPDATE session_messages SET status = 'complete', result = ${resultText} WHERE id = ${messageId}`;
|
|
428
|
+
broadcast(sessionId, { type: "status", status: "complete", messageId });
|
|
429
|
+
console.log(`[claude:${sessionId}] complete: "${resultText.slice(0, 100)}"`);
|
|
430
|
+
|
|
431
|
+
restartServices();
|
|
432
|
+
|
|
433
|
+
} catch (err: any) {
|
|
434
|
+
console.error(`[claude:${sessionId}] EXCEPTION:`, err);
|
|
435
|
+
activeProcesses.delete(sessionId);
|
|
436
|
+
await sql`UPDATE session_messages SET status = 'error', result = ${err.message} WHERE id = ${messageId}`;
|
|
437
|
+
broadcast(sessionId, { type: "status", status: "error", error: err.message, messageId });
|
|
438
|
+
}
|
|
439
|
+
}
|
|
440
|
+
|
|
441
|
+
function restartServices() {
|
|
442
|
+
console.log("[restart] restarting non-claude services...");
|
|
443
|
+
const cliRoot = new URL("../../../", import.meta.url).pathname;
|
|
444
|
+
Bun.spawn(["bash", "-c", `
|
|
445
|
+
pkill -f "gateway/index.ts" 2>/dev/null || true
|
|
446
|
+
sleep 1
|
|
447
|
+
cd ${PROJECT_ROOT}
|
|
448
|
+
nohup bun --watch ${cliRoot}/src/services/gateway/index.ts > /tmp/upend-api.log 2>&1 &
|
|
449
|
+
echo "api restarted"
|
|
450
|
+
`], {
|
|
451
|
+
env: { ...process.env, API_PORT: "3001", UPEND_PROJECT: PROJECT_ROOT },
|
|
452
|
+
stdout: "inherit",
|
|
453
|
+
stderr: "inherit",
|
|
454
|
+
});
|
|
455
|
+
}
|
|
456
|
+
|
|
457
|
+
// ---------- bun server with websocket support ----------
|
|
458
|
+
|
|
459
|
+
const port = Number(process.env.CLAUDE_PORT) || 3002;
|
|
460
|
+
|
|
461
|
+
const server = Bun.serve({
|
|
462
|
+
port,
|
|
463
|
+
fetch: async (req, server) => {
|
|
464
|
+
const url = new URL(req.url);
|
|
465
|
+
|
|
466
|
+
// WebSocket upgrade: /ws/:sessionId?token=xxx
|
|
467
|
+
if (url.pathname.startsWith("/ws/")) {
|
|
468
|
+
const sessionId = Number(url.pathname.split("/")[2]);
|
|
469
|
+
const token = url.searchParams.get("token");
|
|
470
|
+
|
|
471
|
+
if (!token || !sessionId) {
|
|
472
|
+
return new Response("missing token or session id", { status: 401 });
|
|
473
|
+
}
|
|
474
|
+
|
|
475
|
+
try {
|
|
476
|
+
const payload = await verifyToken(token);
|
|
477
|
+
console.log(`[ws] upgrade: session ${sessionId}, user ${payload.email}`);
|
|
478
|
+
const upgraded = server.upgrade(req, { data: { sessionId, email: payload.email } });
|
|
479
|
+
if (upgraded) return undefined as any;
|
|
480
|
+
return new Response("upgrade failed", { status: 500 });
|
|
481
|
+
} catch {
|
|
482
|
+
return new Response("invalid token", { status: 401 });
|
|
483
|
+
}
|
|
484
|
+
}
|
|
485
|
+
|
|
486
|
+
// everything else → hono
|
|
487
|
+
return app.fetch(req, { ip: server.requestIP(req) });
|
|
488
|
+
},
|
|
489
|
+
websocket: {
|
|
490
|
+
open(ws) {
|
|
491
|
+
const { sessionId } = ws.data as { sessionId: number };
|
|
492
|
+
if (!wsClients.has(sessionId)) wsClients.set(sessionId, new Set());
|
|
493
|
+
wsClients.get(sessionId)!.add(ws);
|
|
494
|
+
console.log(`[ws] connected: session ${sessionId} (${wsClients.get(sessionId)!.size} clients)`);
|
|
495
|
+
},
|
|
496
|
+
message(ws, msg) {
|
|
497
|
+
// client can send ping, we don't need anything else
|
|
498
|
+
},
|
|
499
|
+
close(ws) {
|
|
500
|
+
const { sessionId } = ws.data as { sessionId: number };
|
|
501
|
+
wsClients.get(sessionId)?.delete(ws);
|
|
502
|
+
console.log(`[ws] disconnected: session ${sessionId}`);
|
|
503
|
+
},
|
|
504
|
+
},
|
|
505
|
+
});
|
|
506
|
+
|
|
507
|
+
console.log(`[claude] running on :${port} (http + ws)`);
|
|
@@ -0,0 +1,142 @@
|
|
|
1
|
+
import { existsSync, mkdirSync, readdirSync, statSync, writeFileSync } from "fs";
|
|
2
|
+
import { join } from "path";
|
|
3
|
+
|
|
4
|
+
const SNAPSHOTS_DIR = ".snapshots";
|
|
5
|
+
|
|
6
|
+
function snapshotsPath(projectRoot: string) {
|
|
7
|
+
return join(projectRoot, SNAPSHOTS_DIR);
|
|
8
|
+
}
|
|
9
|
+
|
|
10
|
+
// create a timestamped snapshot of files + database
|
|
11
|
+
export async function snapshot(projectRoot: string): Promise<string> {
|
|
12
|
+
const dir = snapshotsPath(projectRoot);
|
|
13
|
+
mkdirSync(dir, { recursive: true });
|
|
14
|
+
|
|
15
|
+
const name = `snap-${new Date().toISOString().replace(/[:.]/g, "-")}`;
|
|
16
|
+
const dest = join(dir, name);
|
|
17
|
+
mkdirSync(dest, { recursive: true });
|
|
18
|
+
|
|
19
|
+
// 1. snapshot files
|
|
20
|
+
const rsync = Bun.spawn(
|
|
21
|
+
[
|
|
22
|
+
"rsync", "-a",
|
|
23
|
+
"--exclude", SNAPSHOTS_DIR,
|
|
24
|
+
"--exclude", "node_modules",
|
|
25
|
+
"--exclude", ".env.keys",
|
|
26
|
+
`${projectRoot}/`,
|
|
27
|
+
`${dest}/files/`,
|
|
28
|
+
],
|
|
29
|
+
{ stdout: "inherit", stderr: "inherit" }
|
|
30
|
+
);
|
|
31
|
+
const rsyncExit = await rsync.exited;
|
|
32
|
+
if (rsyncExit !== 0) throw new Error(`file snapshot failed`);
|
|
33
|
+
|
|
34
|
+
// 2. snapshot database via pg_dump (non-fatal if pg_dump is missing)
|
|
35
|
+
const dbUrl = process.env.DATABASE_URL;
|
|
36
|
+
if (dbUrl) {
|
|
37
|
+
try {
|
|
38
|
+
const dumpFile = join(dest, "db.sql");
|
|
39
|
+
const pgDump = Bun.spawn(
|
|
40
|
+
["pg_dump", "--no-owner", "--no-privileges", dbUrl],
|
|
41
|
+
{ stdout: "pipe", stderr: "pipe" }
|
|
42
|
+
);
|
|
43
|
+
const dumpOutput = await new Response(pgDump.stdout).text();
|
|
44
|
+
const dumpExit = await pgDump.exited;
|
|
45
|
+
|
|
46
|
+
if (dumpExit === 0 && dumpOutput.length > 0) {
|
|
47
|
+
writeFileSync(dumpFile, dumpOutput);
|
|
48
|
+
console.log(`[snapshot] db dump: ${(dumpOutput.length / 1024).toFixed(1)}KB`);
|
|
49
|
+
} else {
|
|
50
|
+
const stderr = await new Response(pgDump.stderr).text();
|
|
51
|
+
console.warn(`[snapshot] pg_dump failed (non-fatal): ${stderr}`);
|
|
52
|
+
}
|
|
53
|
+
} catch (err: any) {
|
|
54
|
+
console.warn(`[snapshot] pg_dump unavailable (non-fatal): ${err.message}`);
|
|
55
|
+
}
|
|
56
|
+
}
|
|
57
|
+
|
|
58
|
+
await pruneSnapshots(projectRoot, 10);
|
|
59
|
+
console.log(`[snapshot] created ${name}`);
|
|
60
|
+
return name;
|
|
61
|
+
}
|
|
62
|
+
|
|
63
|
+
// list available snapshots
|
|
64
|
+
export async function listSnapshots(projectRoot: string) {
|
|
65
|
+
const dir = snapshotsPath(projectRoot);
|
|
66
|
+
if (!existsSync(dir)) return [];
|
|
67
|
+
|
|
68
|
+
return readdirSync(dir)
|
|
69
|
+
.filter((f) => f.startsWith("snap-"))
|
|
70
|
+
.map((name) => {
|
|
71
|
+
const stat = statSync(join(dir, name));
|
|
72
|
+
const hasDb = existsSync(join(dir, name, "db.sql"));
|
|
73
|
+
return { name, created: stat.birthtime, hasDb };
|
|
74
|
+
})
|
|
75
|
+
.sort((a, b) => b.created.getTime() - a.created.getTime());
|
|
76
|
+
}
|
|
77
|
+
|
|
78
|
+
// restore a snapshot — files + optionally database
|
|
79
|
+
export async function restoreSnapshot(
|
|
80
|
+
projectRoot: string,
|
|
81
|
+
name: string,
|
|
82
|
+
opts: { restoreDb?: boolean } = { restoreDb: true }
|
|
83
|
+
) {
|
|
84
|
+
const snapDir = join(snapshotsPath(projectRoot), name);
|
|
85
|
+
if (!existsSync(snapDir)) throw new Error(`snapshot '${name}' not found`);
|
|
86
|
+
|
|
87
|
+
// 1. restore files
|
|
88
|
+
const filesDir = join(snapDir, "files");
|
|
89
|
+
if (existsSync(filesDir)) {
|
|
90
|
+
const proc = Bun.spawn(
|
|
91
|
+
[
|
|
92
|
+
"rsync", "-a", "--delete",
|
|
93
|
+
"--exclude", SNAPSHOTS_DIR,
|
|
94
|
+
"--exclude", "node_modules",
|
|
95
|
+
"--exclude", ".env.keys",
|
|
96
|
+
"--exclude", ".env",
|
|
97
|
+
`${filesDir}/`,
|
|
98
|
+
`${projectRoot}/`,
|
|
99
|
+
],
|
|
100
|
+
{ stdout: "inherit", stderr: "inherit" }
|
|
101
|
+
);
|
|
102
|
+
const exitCode = await proc.exited;
|
|
103
|
+
if (exitCode !== 0) throw new Error(`file restore failed`);
|
|
104
|
+
console.log(`[snapshot] files restored from ${name}`);
|
|
105
|
+
}
|
|
106
|
+
|
|
107
|
+
// 2. restore database
|
|
108
|
+
const dumpFile = join(snapDir, "db.sql");
|
|
109
|
+
if (opts.restoreDb && existsSync(dumpFile)) {
|
|
110
|
+
const dbUrl = process.env.DATABASE_URL;
|
|
111
|
+
if (!dbUrl) throw new Error("DATABASE_URL not set, cannot restore db");
|
|
112
|
+
|
|
113
|
+
// drop and recreate all tables, then restore
|
|
114
|
+
// psql with the dump file
|
|
115
|
+
const proc = Bun.spawn(
|
|
116
|
+
["psql", dbUrl, "-f", dumpFile],
|
|
117
|
+
{
|
|
118
|
+
stdout: "pipe",
|
|
119
|
+
stderr: "pipe",
|
|
120
|
+
env: { ...process.env, PGOPTIONS: "--client-min-messages=warning" },
|
|
121
|
+
}
|
|
122
|
+
);
|
|
123
|
+
const exitCode = await proc.exited;
|
|
124
|
+
if (exitCode !== 0) {
|
|
125
|
+
const stderr = await new Response(proc.stderr).text();
|
|
126
|
+
throw new Error(`db restore failed: ${stderr}`);
|
|
127
|
+
}
|
|
128
|
+
console.log(`[snapshot] db restored from ${name}`);
|
|
129
|
+
}
|
|
130
|
+
}
|
|
131
|
+
|
|
132
|
+
async function pruneSnapshots(projectRoot: string, keep: number) {
|
|
133
|
+
const snaps = await listSnapshots(projectRoot);
|
|
134
|
+
for (const snap of snaps.slice(keep)) {
|
|
135
|
+
const proc = Bun.spawn(
|
|
136
|
+
["rm", "-rf", join(snapshotsPath(projectRoot), snap.name)],
|
|
137
|
+
{ stdout: "inherit", stderr: "inherit" }
|
|
138
|
+
);
|
|
139
|
+
await proc.exited;
|
|
140
|
+
console.log(`[snapshot] pruned ${snap.name}`);
|
|
141
|
+
}
|
|
142
|
+
}
|