claudeboard 2.16.0 → 3.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +89 -93
- package/bin/cli.js +198 -238
- package/bin/init-context.js +22 -0
- package/package.json +22 -43
- package/public/app.js +1411 -0
- package/public/index.html +250 -0
- package/public/style.css +1872 -0
- package/src/context-template.md +20 -0
- package/src/notifier.js +65 -0
- package/src/orchestrator.js +800 -0
- package/src/scanner.js +153 -0
- package/src/server.js +205 -0
- package/src/store.js +182 -0
- package/src/verifier.js +131 -0
- package/agents/architect.js +0 -166
- package/agents/board-client.js +0 -126
- package/agents/claude-api.js +0 -124
- package/agents/claude-resolver.js +0 -167
- package/agents/developer.js +0 -224
- package/agents/expo-health.js +0 -727
- package/agents/orchestrator.js +0 -306
- package/agents/qa.js +0 -336
- package/dashboard/index.html +0 -1980
- package/dashboard/server.js +0 -412
- package/sql/setup.sql +0 -57
- package/tools/filesystem.js +0 -95
- package/tools/screenshot.js +0 -74
- package/tools/supabase-reader.js +0 -74
- package/tools/terminal.js +0 -63
package/dashboard/server.js
DELETED
|
@@ -1,412 +0,0 @@
|
|
|
1
|
-
import express from "express";
|
|
2
|
-
import cors from "cors";
|
|
3
|
-
import { createServer } from "http";
|
|
4
|
-
import { WebSocketServer } from "ws";
|
|
5
|
-
import { createClient } from "@supabase/supabase-js";
|
|
6
|
-
import path from "path";
|
|
7
|
-
import { fileURLToPath } from "url";
|
|
8
|
-
import fs from "fs";
|
|
9
|
-
import { spawn } from "child_process";
|
|
10
|
-
import { createRequire } from "module";
|
|
11
|
-
import { createConnection } from "net";
|
|
12
|
-
|
|
13
|
-
const __dirname = path.dirname(fileURLToPath(import.meta.url));
|
|
14
|
-
const require = createRequire(import.meta.url);
|
|
15
|
-
|
|
16
|
-
const app = express();
|
|
17
|
-
const server = createServer(app);
|
|
18
|
-
|
|
19
|
-
// Two WS servers: one for board events, one for terminal
|
|
20
|
-
const boardWss = new WebSocketServer({ noServer: true });
|
|
21
|
-
const termWss = new WebSocketServer({ noServer: true });
|
|
22
|
-
|
|
23
|
-
// Route upgrade requests
|
|
24
|
-
server.on("upgrade", (req, socket, head) => {
|
|
25
|
-
if (req.url === "/terminal") {
|
|
26
|
-
termWss.handleUpgrade(req, socket, head, (ws) => termWss.emit("connection", ws, req));
|
|
27
|
-
} else {
|
|
28
|
-
boardWss.handleUpgrade(req, socket, head, (ws) => boardWss.emit("connection", ws, req));
|
|
29
|
-
}
|
|
30
|
-
});
|
|
31
|
-
|
|
32
|
-
app.use(cors());
|
|
33
|
-
app.use(express.json());
|
|
34
|
-
|
|
35
|
-
const PORT = process.env.PORT || 3131;
|
|
36
|
-
const PROJECT = process.env.PROJECT_NAME || "default";
|
|
37
|
-
const PROJECT_DIR = process.env.PROJECT_DIR || process.cwd();
|
|
38
|
-
const SUPABASE_URL = process.env.SUPABASE_URL;
|
|
39
|
-
const SUPABASE_KEY = process.env.SUPABASE_KEY;
|
|
40
|
-
|
|
41
|
-
const supabase = createClient(SUPABASE_URL, SUPABASE_KEY);
|
|
42
|
-
|
|
43
|
-
// ── STATE ─────────────────────────────────────────────────────────────────────
|
|
44
|
-
let expoProcess = null;
|
|
45
|
-
let expoStatus = "stopped"; // stopped | installing | starting | running | error
|
|
46
|
-
let expoQR = null;
|
|
47
|
-
let expoUrl = null;
|
|
48
|
-
|
|
49
|
-
// ── BOARD BROADCAST ───────────────────────────────────────────────────────────
|
|
50
|
-
function broadcast(event, data) {
|
|
51
|
-
const msg = JSON.stringify({ event, data, ts: Date.now() });
|
|
52
|
-
boardWss.clients.forEach((c) => { if (c.readyState === 1) c.send(msg); });
|
|
53
|
-
}
|
|
54
|
-
|
|
55
|
-
function broadcastExpoStatus() {
|
|
56
|
-
broadcast("expo_status", { status: expoStatus, qr: expoQR, url: expoUrl });
|
|
57
|
-
}
|
|
58
|
-
|
|
59
|
-
// ── SUPABASE REALTIME ─────────────────────────────────────────────────────────
|
|
60
|
-
supabase
|
|
61
|
-
.channel("cb_changes")
|
|
62
|
-
.on("postgres_changes", { event: "*", schema: "public", table: "cb_tasks" }, (p) => {
|
|
63
|
-
if (p.eventType === "UPDATE" && p.new) broadcast("task_update", p.new);
|
|
64
|
-
else if (p.eventType === "INSERT" && p.new) broadcast("task_added", p.new);
|
|
65
|
-
else if (p.eventType === "DELETE") broadcast("task_deleted", { id: p.old?.id });
|
|
66
|
-
})
|
|
67
|
-
.on("postgres_changes", { event: "*", schema: "public", table: "cb_logs" }, (p) => broadcast("log", p.new))
|
|
68
|
-
.subscribe();
|
|
69
|
-
|
|
70
|
-
// ── TERMINAL (xterm.js via WebSocket + node-pty) ──────────────────────────────
|
|
71
|
-
termWss.on("connection", (ws) => {
|
|
72
|
-
let pty = null;
|
|
73
|
-
|
|
74
|
-
try {
|
|
75
|
-
// Try node-pty for full PTY support
|
|
76
|
-
const nodePty = require("node-pty");
|
|
77
|
-
pty = nodePty.spawn(process.env.SHELL || "bash", [], {
|
|
78
|
-
name: "xterm-256color",
|
|
79
|
-
cols: 120,
|
|
80
|
-
rows: 40,
|
|
81
|
-
cwd: PROJECT_DIR,
|
|
82
|
-
env: {
|
|
83
|
-
...process.env,
|
|
84
|
-
SUPABASE_URL,
|
|
85
|
-
SUPABASE_ACCESS_TOKEN: process.env.SUPABASE_ACCESS_TOKEN || "",
|
|
86
|
-
TERM: "xterm-256color",
|
|
87
|
-
},
|
|
88
|
-
});
|
|
89
|
-
|
|
90
|
-
pty.onData((data) => { if (ws.readyState === 1) ws.send(JSON.stringify({ type: "output", data })); });
|
|
91
|
-
pty.onExit(() => { if (ws.readyState === 1) ws.send(JSON.stringify({ type: "exit" })); });
|
|
92
|
-
|
|
93
|
-
ws.on("message", (raw) => {
|
|
94
|
-
try {
|
|
95
|
-
const msg = JSON.parse(raw);
|
|
96
|
-
if (msg.type === "input") pty.write(msg.data);
|
|
97
|
-
if (msg.type === "resize") pty.resize(msg.cols, msg.rows);
|
|
98
|
-
} catch {}
|
|
99
|
-
});
|
|
100
|
-
|
|
101
|
-
ws.on("close", () => { try { pty.kill(); } catch {} });
|
|
102
|
-
|
|
103
|
-
} catch {
|
|
104
|
-
// Fallback: simple shell without PTY (no colors but functional)
|
|
105
|
-
const shell = spawn(process.env.SHELL || "bash", [], {
|
|
106
|
-
cwd: PROJECT_DIR,
|
|
107
|
-
env: { ...process.env, SUPABASE_URL, TERM: "dumb" },
|
|
108
|
-
});
|
|
109
|
-
|
|
110
|
-
shell.stdout.on("data", (d) => ws.send(JSON.stringify({ type: "output", data: d.toString() })));
|
|
111
|
-
shell.stderr.on("data", (d) => ws.send(JSON.stringify({ type: "output", data: d.toString() })));
|
|
112
|
-
shell.on("close", () => ws.send(JSON.stringify({ type: "exit" })));
|
|
113
|
-
|
|
114
|
-
ws.on("message", (raw) => {
|
|
115
|
-
try {
|
|
116
|
-
const msg = JSON.parse(raw);
|
|
117
|
-
if (msg.type === "input") shell.stdin.write(msg.data);
|
|
118
|
-
} catch {}
|
|
119
|
-
});
|
|
120
|
-
|
|
121
|
-
ws.on("close", () => shell.kill());
|
|
122
|
-
|
|
123
|
-
// Send welcome message
|
|
124
|
-
ws.send(JSON.stringify({
|
|
125
|
-
type: "output",
|
|
126
|
-
data: `\r\n\x1b[33m[ClaudeBoard Terminal]\x1b[0m — Project: ${PROJECT_DIR}\r\n` +
|
|
127
|
-
`\x1b[2mTip: Run 'npx supabase ...' for Supabase CLI commands\x1b[0m\r\n\r\n`,
|
|
128
|
-
}));
|
|
129
|
-
}
|
|
130
|
-
});
|
|
131
|
-
|
|
132
|
-
// ── EXPO MANAGEMENT ───────────────────────────────────────────────────────────
|
|
133
|
-
|
|
134
|
-
// On server start, check if Expo is already running on the port
|
|
135
|
-
// (e.g. started by claudeboard run) and mark it as running
|
|
136
|
-
async function detectExistingExpo() {
|
|
137
|
-
const port = parseInt(process.env.EXPO_PORT || "8081");
|
|
138
|
-
const running = await isPortOpen(port);
|
|
139
|
-
if (running) {
|
|
140
|
-
expoStatus = "running";
|
|
141
|
-
expoUrl = `exp://localhost:${port}`;
|
|
142
|
-
broadcast("expo_log", { message: `✓ Detected existing Expo on port ${port} — ready to scan` });
|
|
143
|
-
broadcastExpoStatus();
|
|
144
|
-
console.log(` Expo already running on port ${port} — attached`);
|
|
145
|
-
}
|
|
146
|
-
}
|
|
147
|
-
|
|
148
|
-
function isPortOpen(port) {
|
|
149
|
-
return new Promise(resolve => {
|
|
150
|
-
const sock = createConnection({ port, host: "127.0.0.1" });
|
|
151
|
-
sock.setTimeout(600);
|
|
152
|
-
sock.once("connect", () => { sock.destroy(); resolve(true); });
|
|
153
|
-
sock.once("error", () => resolve(false));
|
|
154
|
-
sock.once("timeout", () => resolve(false));
|
|
155
|
-
});
|
|
156
|
-
}
|
|
157
|
-
|
|
158
|
-
// GET expo status
|
|
159
|
-
app.get("/api/expo/status", (req, res) => {
|
|
160
|
-
res.json({ status: expoStatus, qr: expoQR, url: expoUrl });
|
|
161
|
-
});
|
|
162
|
-
|
|
163
|
-
// POST expo/start — smart start: attach if already running, otherwise install + start
|
|
164
|
-
app.post("/api/expo/start", async (req, res) => {
|
|
165
|
-
const port = parseInt(process.env.EXPO_PORT || "8081");
|
|
166
|
-
|
|
167
|
-
// Already managed by this server
|
|
168
|
-
if (expoProcess) {
|
|
169
|
-
return res.json({ ok: true, message: "Already running", status: expoStatus, url: expoUrl });
|
|
170
|
-
}
|
|
171
|
-
|
|
172
|
-
// Check if something is already listening on the port (e.g. started by claudeboard run)
|
|
173
|
-
const alreadyRunning = await isPortOpen(port);
|
|
174
|
-
if (alreadyRunning) {
|
|
175
|
-
expoStatus = "running";
|
|
176
|
-
expoUrl = `exp://localhost:${port}`;
|
|
177
|
-
broadcastExpoStatus();
|
|
178
|
-
broadcast("expo_log", { message: `✓ Attached to existing Expo on port ${port}` });
|
|
179
|
-
return res.json({ ok: true, message: "Attached to existing Expo", url: expoUrl });
|
|
180
|
-
}
|
|
181
|
-
|
|
182
|
-
res.json({ ok: true, message: "Starting Expo..." });
|
|
183
|
-
_startExpo(PROJECT_DIR);
|
|
184
|
-
});
|
|
185
|
-
|
|
186
|
-
// POST expo/stop
|
|
187
|
-
app.post("/api/expo/stop", (req, res) => {
|
|
188
|
-
if (expoProcess) {
|
|
189
|
-
try { expoProcess.kill("SIGTERM"); } catch {}
|
|
190
|
-
expoProcess = null;
|
|
191
|
-
}
|
|
192
|
-
expoStatus = "stopped";
|
|
193
|
-
expoQR = null;
|
|
194
|
-
expoUrl = null;
|
|
195
|
-
broadcastExpoStatus();
|
|
196
|
-
res.json({ ok: true });
|
|
197
|
-
});
|
|
198
|
-
|
|
199
|
-
async function _startExpo(projectDir) {
|
|
200
|
-
// Step 1: install deps with --legacy-peer-deps
|
|
201
|
-
// Expo projects almost always have peer dep conflicts — this is expected
|
|
202
|
-
expoStatus = "installing";
|
|
203
|
-
broadcastExpoStatus();
|
|
204
|
-
broadcast("expo_log", { message: "Installing dependencies (--legacy-peer-deps)..." });
|
|
205
|
-
|
|
206
|
-
await new Promise((resolve) => {
|
|
207
|
-
const install = spawn("npm", ["install", "--legacy-peer-deps"], {
|
|
208
|
-
cwd: projectDir,
|
|
209
|
-
stdio: "pipe",
|
|
210
|
-
env: { ...process.env },
|
|
211
|
-
});
|
|
212
|
-
install.stdout.on("data", (d) => {
|
|
213
|
-
const msg = d.toString().trim();
|
|
214
|
-
if (msg && !msg.startsWith("npm warn")) broadcast("expo_log", { message: msg });
|
|
215
|
-
});
|
|
216
|
-
install.stderr.on("data", (d) => {
|
|
217
|
-
const msg = d.toString().trim();
|
|
218
|
-
// Only show real errors, not peer dep warnings
|
|
219
|
-
if (msg && msg.includes("npm error") && !msg.includes("ERESOLVE")) {
|
|
220
|
-
broadcast("expo_log", { message: msg });
|
|
221
|
-
}
|
|
222
|
-
});
|
|
223
|
-
install.on("close", resolve);
|
|
224
|
-
});
|
|
225
|
-
|
|
226
|
-
broadcast("expo_log", { message: "✓ Dependencies ready. Starting Expo tunnel..." });
|
|
227
|
-
|
|
228
|
-
// Step 2: expo start --tunnel, fully non-interactive
|
|
229
|
-
expoStatus = "starting";
|
|
230
|
-
broadcastExpoStatus();
|
|
231
|
-
|
|
232
|
-
const expo = spawn("npx", ["expo", "start", "--tunnel", "--non-interactive"], {
|
|
233
|
-
cwd: projectDir,
|
|
234
|
-
stdio: "pipe",
|
|
235
|
-
env: {
|
|
236
|
-
...process.env,
|
|
237
|
-
CI: "1", // Prevents "use port X instead?" prompts
|
|
238
|
-
EXPO_NO_DOTENV: "0",
|
|
239
|
-
EXPO_NO_INTERACTIVE: "1", // Belt + suspenders non-interactive
|
|
240
|
-
TERM: "dumb", // No ANSI color codes in output
|
|
241
|
-
},
|
|
242
|
-
});
|
|
243
|
-
|
|
244
|
-
expoProcess = expo;
|
|
245
|
-
|
|
246
|
-
expo.stdout.on("data", (d) => {
|
|
247
|
-
const text = d.toString();
|
|
248
|
-
const clean = text.replace(/\x1b\[[0-9;]*m/g, "").trim(); // strip ANSI codes
|
|
249
|
-
if (clean) broadcast("expo_log", { message: clean });
|
|
250
|
-
|
|
251
|
-
// Detect URLs
|
|
252
|
-
const expUrl = text.match(/exp:\/\/[^\s\]]+/);
|
|
253
|
-
if (expUrl) { expoUrl = expUrl[0]; expoStatus = "running"; broadcastExpoStatus(); }
|
|
254
|
-
|
|
255
|
-
const tunnel = text.match(/https:\/\/[a-z0-9-]+\.exp\.direct[^\s\]]*/);
|
|
256
|
-
if (tunnel) { expoUrl = tunnel[0]; expoStatus = "running"; broadcastExpoStatus(); }
|
|
257
|
-
|
|
258
|
-
if (text.includes("scan") || text.includes("QR")) {
|
|
259
|
-
broadcast("expo_log", { message: "📱 QR ready — open Expo Go and scan" });
|
|
260
|
-
}
|
|
261
|
-
});
|
|
262
|
-
|
|
263
|
-
expo.stderr.on("data", (d) => {
|
|
264
|
-
const text = d.toString().replace(/\x1b\[[0-9;]*m/g, "").trim();
|
|
265
|
-
if (text) broadcast("expo_log", { message: text });
|
|
266
|
-
});
|
|
267
|
-
|
|
268
|
-
expo.on("close", (code) => {
|
|
269
|
-
expoProcess = null;
|
|
270
|
-
if (code !== 0 && expoStatus !== "running") expoStatus = "error";
|
|
271
|
-
broadcastExpoStatus();
|
|
272
|
-
broadcast("expo_log", { message: `Expo exited (code ${code})` });
|
|
273
|
-
});
|
|
274
|
-
}
|
|
275
|
-
|
|
276
|
-
// ── SUPABASE QUERY API ────────────────────────────────────────────────────────
|
|
277
|
-
app.post("/api/supabase/query", async (req, res) => {
|
|
278
|
-
const { sql } = req.body;
|
|
279
|
-
if (!sql) return res.status(400).json({ error: "No SQL provided" });
|
|
280
|
-
|
|
281
|
-
try {
|
|
282
|
-
const { data, error } = await supabase.rpc("execute_sql", { query: sql });
|
|
283
|
-
if (error) return res.status(400).json({ error: error.message });
|
|
284
|
-
res.json({ data });
|
|
285
|
-
} catch (err) {
|
|
286
|
-
res.status(500).json({ error: err.message });
|
|
287
|
-
}
|
|
288
|
-
});
|
|
289
|
-
|
|
290
|
-
// ── BOARD API ROUTES (unchanged) ──────────────────────────────────────────────
|
|
291
|
-
|
|
292
|
-
app.get("/api/board", async (req, res) => {
|
|
293
|
-
const { data: epics } = await supabase
|
|
294
|
-
.from("cb_epics").select("*, cb_tasks(*)")
|
|
295
|
-
.eq("project", PROJECT).order("created_at");
|
|
296
|
-
|
|
297
|
-
// Orphan tasks (manually created, no epic_id)
|
|
298
|
-
const { data: orphanTasks } = await supabase
|
|
299
|
-
.from("cb_tasks").select("*")
|
|
300
|
-
.eq("project", PROJECT).is("epic_id", null)
|
|
301
|
-
.order("priority_order", { ascending: true });
|
|
302
|
-
|
|
303
|
-
// Sort tasks within each epic by priority_order
|
|
304
|
-
const sortedEpics = (epics || []).map(epic => ({
|
|
305
|
-
...epic,
|
|
306
|
-
cb_tasks: (epic.cb_tasks || []).sort((a, b) => (a.priority_order ?? 99) - (b.priority_order ?? 99)),
|
|
307
|
-
}));
|
|
308
|
-
|
|
309
|
-
// Orphan tasks appear as a virtual "Manual Tasks" epic
|
|
310
|
-
if (orphanTasks?.length) {
|
|
311
|
-
sortedEpics.push({ id: "manual", name: "— Manual Tasks", cb_tasks: orphanTasks });
|
|
312
|
-
}
|
|
313
|
-
|
|
314
|
-
const { data: logs } = await supabase
|
|
315
|
-
.from("cb_logs").select("*").eq("project", PROJECT)
|
|
316
|
-
.order("created_at", { ascending: false }).limit(50);
|
|
317
|
-
res.json({ epics: sortedEpics, logs: logs || [], project: PROJECT });
|
|
318
|
-
});
|
|
319
|
-
|
|
320
|
-
app.get("/api/tasks/next", async (req, res) => {
|
|
321
|
-
const { data } = await supabase.from("cb_tasks").select("*")
|
|
322
|
-
.eq("project", PROJECT).eq("status", "todo")
|
|
323
|
-
.order("priority_order", { ascending: true }).limit(1).single();
|
|
324
|
-
if (!data) return res.json({ task: null, message: "All tasks complete! 🎉" });
|
|
325
|
-
res.json({ task: data });
|
|
326
|
-
});
|
|
327
|
-
|
|
328
|
-
app.post("/api/tasks/:id/start", async (req, res) => {
|
|
329
|
-
const { id } = req.params;
|
|
330
|
-
await supabase.from("cb_tasks").update({ status: "in_progress", started_at: new Date().toISOString() }).eq("id", id);
|
|
331
|
-
if (req.body.log) await addLog(id, req.body.log, "start");
|
|
332
|
-
broadcast("task_started", { id });
|
|
333
|
-
res.json({ ok: true });
|
|
334
|
-
});
|
|
335
|
-
|
|
336
|
-
app.post("/api/tasks/:id/log", async (req, res) => {
|
|
337
|
-
await addLog(req.params.id, req.body.message, "progress");
|
|
338
|
-
res.json({ ok: true });
|
|
339
|
-
});
|
|
340
|
-
|
|
341
|
-
app.post("/api/tasks/:id/complete", async (req, res) => {
|
|
342
|
-
const { id } = req.params;
|
|
343
|
-
await supabase.from("cb_tasks").update({ status: "done", completed_at: new Date().toISOString() }).eq("id", id);
|
|
344
|
-
if (req.body.log) await addLog(id, req.body.log, "complete");
|
|
345
|
-
broadcast("task_complete", { id });
|
|
346
|
-
res.json({ ok: true });
|
|
347
|
-
});
|
|
348
|
-
|
|
349
|
-
app.post("/api/tasks/:id/fail", async (req, res) => {
|
|
350
|
-
const { id } = req.params;
|
|
351
|
-
await supabase.from("cb_tasks").update({ status: "error" }).eq("id", id);
|
|
352
|
-
if (req.body.log) await addLog(id, req.body.log, "error");
|
|
353
|
-
broadcast("task_failed", { id });
|
|
354
|
-
res.json({ ok: true });
|
|
355
|
-
});
|
|
356
|
-
|
|
357
|
-
app.post("/api/tasks", async (req, res) => {
|
|
358
|
-
const { title, description, priority, type, epic_id } = req.body;
|
|
359
|
-
const priorityOrder = { high: 1, medium: 2, low: 3 };
|
|
360
|
-
const { data, error } = await supabase.from("cb_tasks").insert({
|
|
361
|
-
project: PROJECT, epic_id: epic_id || null, title, description,
|
|
362
|
-
priority: priority || "medium", priority_order: priorityOrder[priority] || 2,
|
|
363
|
-
type: type || "feature", status: "todo",
|
|
364
|
-
}).select().single();
|
|
365
|
-
if (error) return res.status(400).json({ error: error.message });
|
|
366
|
-
broadcast("task_added", data);
|
|
367
|
-
res.json({ task: data });
|
|
368
|
-
});
|
|
369
|
-
|
|
370
|
-
app.patch("/api/tasks/:id", async (req, res) => {
|
|
371
|
-
await supabase.from("cb_tasks").update(req.body).eq("id", req.params.id);
|
|
372
|
-
broadcast("task_update", { id: req.params.id, ...req.body });
|
|
373
|
-
res.json({ ok: true });
|
|
374
|
-
});
|
|
375
|
-
|
|
376
|
-
// DELETE a task
|
|
377
|
-
app.delete("/api/tasks/:id", async (req, res) => {
|
|
378
|
-
await supabase.from("cb_logs").delete().eq("task_id", req.params.id);
|
|
379
|
-
await supabase.from("cb_tasks").delete().eq("id", req.params.id);
|
|
380
|
-
broadcast("task_deleted", { id: req.params.id });
|
|
381
|
-
res.json({ ok: true });
|
|
382
|
-
});
|
|
383
|
-
|
|
384
|
-
// POST reorder — receives ordered array of task ids for a column, renumbers them
|
|
385
|
-
app.post("/api/tasks/reorder", async (req, res) => {
|
|
386
|
-
const { taskIds } = req.body; // array of ids in new order
|
|
387
|
-
for (let i = 0; i < taskIds.length; i++) {
|
|
388
|
-
await supabase.from("cb_tasks")
|
|
389
|
-
.update({ priority_order: i + 1 })
|
|
390
|
-
.eq("id", taskIds[i]);
|
|
391
|
-
}
|
|
392
|
-
broadcast("task_reordered", { taskIds });
|
|
393
|
-
res.json({ ok: true });
|
|
394
|
-
});
|
|
395
|
-
|
|
396
|
-
app.get("/api/tasks/:id/logs", async (req, res) => {
|
|
397
|
-
const { data } = await supabase.from("cb_logs").select("*")
|
|
398
|
-
.eq("task_id", req.params.id).order("created_at");
|
|
399
|
-
res.json({ logs: data || [] });
|
|
400
|
-
});
|
|
401
|
-
|
|
402
|
-
app.get("*", (req, res) => res.sendFile(path.join(__dirname, "index.html")));
|
|
403
|
-
|
|
404
|
-
async function addLog(taskId, message, type = "info") {
|
|
405
|
-
await supabase.from("cb_logs").insert({ project: PROJECT, task_id: taskId, message, type });
|
|
406
|
-
}
|
|
407
|
-
|
|
408
|
-
server.listen(PORT, () => {
|
|
409
|
-
console.log(`READY on port ${PORT}`);
|
|
410
|
-
// Check if Expo is already running (started by claudeboard run)
|
|
411
|
-
setTimeout(detectExistingExpo, 1000);
|
|
412
|
-
});
|
package/sql/setup.sql
DELETED
|
@@ -1,57 +0,0 @@
|
|
|
1
|
-
-- ClaudeBoard — Supabase Setup
|
|
2
|
-
-- Run this in your Supabase SQL Editor
|
|
3
|
-
|
|
4
|
-
-- Epics (grouping of tasks)
|
|
5
|
-
create table if not exists cb_epics (
|
|
6
|
-
id uuid primary key default gen_random_uuid(),
|
|
7
|
-
project text not null,
|
|
8
|
-
name text not null,
|
|
9
|
-
created_at timestamptz default now()
|
|
10
|
-
);
|
|
11
|
-
|
|
12
|
-
-- Tasks
|
|
13
|
-
create table if not exists cb_tasks (
|
|
14
|
-
id uuid primary key default gen_random_uuid(),
|
|
15
|
-
project text not null,
|
|
16
|
-
epic_id uuid references cb_epics(id) on delete set null,
|
|
17
|
-
title text not null,
|
|
18
|
-
description text,
|
|
19
|
-
status text not null default 'todo', -- todo | in_progress | done | error | blocked
|
|
20
|
-
priority text default 'medium', -- high | medium | low
|
|
21
|
-
priority_order int default 2,
|
|
22
|
-
type text default 'feature', -- feature | bug | refactor | test | config
|
|
23
|
-
started_at timestamptz,
|
|
24
|
-
completed_at timestamptz,
|
|
25
|
-
created_at timestamptz default now()
|
|
26
|
-
);
|
|
27
|
-
|
|
28
|
-
-- Logs (activity feed)
|
|
29
|
-
create table if not exists cb_logs (
|
|
30
|
-
id uuid primary key default gen_random_uuid(),
|
|
31
|
-
project text not null,
|
|
32
|
-
task_id uuid references cb_tasks(id) on delete cascade,
|
|
33
|
-
message text not null,
|
|
34
|
-
type text default 'info', -- start | progress | complete | error | info
|
|
35
|
-
created_at timestamptz default now()
|
|
36
|
-
);
|
|
37
|
-
|
|
38
|
-
-- Indexes
|
|
39
|
-
create index if not exists idx_tasks_project on cb_tasks(project);
|
|
40
|
-
create index if not exists idx_tasks_status on cb_tasks(status);
|
|
41
|
-
create index if not exists idx_tasks_priority on cb_tasks(priority_order);
|
|
42
|
-
create index if not exists idx_logs_task on cb_logs(task_id);
|
|
43
|
-
create index if not exists idx_logs_project on cb_logs(project);
|
|
44
|
-
|
|
45
|
-
-- Enable Realtime
|
|
46
|
-
alter publication supabase_realtime add table cb_tasks;
|
|
47
|
-
alter publication supabase_realtime add table cb_logs;
|
|
48
|
-
alter publication supabase_realtime add table cb_epics;
|
|
49
|
-
|
|
50
|
-
-- RLS (Row Level Security) — permissive for local dev
|
|
51
|
-
alter table cb_epics enable row level security;
|
|
52
|
-
alter table cb_tasks enable row level security;
|
|
53
|
-
alter table cb_logs enable row level security;
|
|
54
|
-
|
|
55
|
-
create policy "allow_all_epics" on cb_epics for all using (true) with check (true);
|
|
56
|
-
create policy "allow_all_tasks" on cb_tasks for all using (true) with check (true);
|
|
57
|
-
create policy "allow_all_logs" on cb_logs for all using (true) with check (true);
|
package/tools/filesystem.js
DELETED
|
@@ -1,95 +0,0 @@
|
|
|
1
|
-
import fs from "fs";
|
|
2
|
-
import path from "path";
|
|
3
|
-
|
|
4
|
-
/**
|
|
5
|
-
* Read a file, return content string or null
|
|
6
|
-
*/
|
|
7
|
-
export function readFile(filePath) {
|
|
8
|
-
try {
|
|
9
|
-
return fs.readFileSync(filePath, "utf8");
|
|
10
|
-
} catch {
|
|
11
|
-
return null;
|
|
12
|
-
}
|
|
13
|
-
}
|
|
14
|
-
|
|
15
|
-
/**
|
|
16
|
-
* Write a file (creates dirs if needed)
|
|
17
|
-
*/
|
|
18
|
-
export function writeFile(filePath, content) {
|
|
19
|
-
fs.mkdirSync(path.dirname(filePath), { recursive: true });
|
|
20
|
-
fs.writeFileSync(filePath, content, "utf8");
|
|
21
|
-
}
|
|
22
|
-
|
|
23
|
-
/**
|
|
24
|
-
* List files in a directory recursively (ignores node_modules, .git, etc)
|
|
25
|
-
*/
|
|
26
|
-
export function listFiles(dir, extensions = null, maxDepth = 4) {
|
|
27
|
-
const results = [];
|
|
28
|
-
const IGNORE = new Set(["node_modules", ".git", ".expo", "dist", "build", ".next", "coverage"]);
|
|
29
|
-
|
|
30
|
-
function walk(current, depth) {
|
|
31
|
-
if (depth > maxDepth) return;
|
|
32
|
-
let entries;
|
|
33
|
-
try { entries = fs.readdirSync(current, { withFileTypes: true }); } catch { return; }
|
|
34
|
-
|
|
35
|
-
for (const entry of entries) {
|
|
36
|
-
if (IGNORE.has(entry.name)) continue;
|
|
37
|
-
const fullPath = path.join(current, entry.name);
|
|
38
|
-
if (entry.isDirectory()) {
|
|
39
|
-
walk(fullPath, depth + 1);
|
|
40
|
-
} else if (!extensions || extensions.some((ext) => entry.name.endsWith(ext))) {
|
|
41
|
-
results.push(fullPath);
|
|
42
|
-
}
|
|
43
|
-
}
|
|
44
|
-
}
|
|
45
|
-
|
|
46
|
-
walk(dir, 0);
|
|
47
|
-
return results;
|
|
48
|
-
}
|
|
49
|
-
|
|
50
|
-
/**
|
|
51
|
-
* Get a compact project tree string (for LLM context)
|
|
52
|
-
*/
|
|
53
|
-
export function projectTree(dir, maxDepth = 3) {
|
|
54
|
-
const IGNORE = new Set(["node_modules", ".git", ".expo", "dist", "build", ".next"]);
|
|
55
|
-
const lines = [];
|
|
56
|
-
|
|
57
|
-
function walk(current, depth, prefix) {
|
|
58
|
-
if (depth > maxDepth) return;
|
|
59
|
-
let entries;
|
|
60
|
-
try { entries = fs.readdirSync(current, { withFileTypes: true }); } catch { return; }
|
|
61
|
-
|
|
62
|
-
const filtered = entries.filter((e) => !IGNORE.has(e.name));
|
|
63
|
-
filtered.forEach((entry, i) => {
|
|
64
|
-
const isLast = i === filtered.length - 1;
|
|
65
|
-
const connector = isLast ? "└── " : "├── ";
|
|
66
|
-
lines.push(prefix + connector + entry.name);
|
|
67
|
-
if (entry.isDirectory()) {
|
|
68
|
-
walk(
|
|
69
|
-
path.join(current, entry.name),
|
|
70
|
-
depth + 1,
|
|
71
|
-
prefix + (isLast ? " " : "│ ")
|
|
72
|
-
);
|
|
73
|
-
}
|
|
74
|
-
});
|
|
75
|
-
}
|
|
76
|
-
|
|
77
|
-
lines.push(path.basename(dir) + "/");
|
|
78
|
-
walk(dir, 0, "");
|
|
79
|
-
return lines.join("\n");
|
|
80
|
-
}
|
|
81
|
-
|
|
82
|
-
/**
|
|
83
|
-
* Read multiple files and return as context string
|
|
84
|
-
*/
|
|
85
|
-
export function readFilesAsContext(filePaths, projectRoot) {
|
|
86
|
-
return filePaths
|
|
87
|
-
.map((fp) => {
|
|
88
|
-
const content = readFile(fp);
|
|
89
|
-
if (!content) return null;
|
|
90
|
-
const rel = path.relative(projectRoot, fp);
|
|
91
|
-
return `### ${rel}\n\`\`\`\n${content.slice(0, 3000)}\n\`\`\``;
|
|
92
|
-
})
|
|
93
|
-
.filter(Boolean)
|
|
94
|
-
.join("\n\n");
|
|
95
|
-
}
|
package/tools/screenshot.js
DELETED
|
@@ -1,74 +0,0 @@
|
|
|
1
|
-
import { runCommand, waitForPort } from "./terminal.js";
|
|
2
|
-
import fs from "fs";
|
|
3
|
-
import path from "path";
|
|
4
|
-
|
|
5
|
-
let puppeteer = null;
|
|
6
|
-
|
|
7
|
-
async function getPuppeteer() {
|
|
8
|
-
if (!puppeteer) {
|
|
9
|
-
try {
|
|
10
|
-
const mod = await import("puppeteer");
|
|
11
|
-
puppeteer = mod.default;
|
|
12
|
-
} catch {
|
|
13
|
-
return null;
|
|
14
|
-
}
|
|
15
|
-
}
|
|
16
|
-
return puppeteer;
|
|
17
|
-
}
|
|
18
|
-
|
|
19
|
-
/**
|
|
20
|
-
* Take a screenshot of a running Expo Web app
|
|
21
|
-
* Returns { success, imagePath, base64, error }
|
|
22
|
-
*/
|
|
23
|
-
export async function screenshotExpoWeb(port = 8081, outputDir, pagePath = "/") {
|
|
24
|
-
const p = await getPuppeteer();
|
|
25
|
-
if (!p) {
|
|
26
|
-
return { success: false, error: "puppeteer not installed — run: npm install puppeteer" };
|
|
27
|
-
}
|
|
28
|
-
|
|
29
|
-
const url = `http://localhost:${port}${pagePath}`;
|
|
30
|
-
const filename = `screenshot-${Date.now()}.png`;
|
|
31
|
-
const imagePath = path.join(outputDir, filename);
|
|
32
|
-
|
|
33
|
-
fs.mkdirSync(outputDir, { recursive: true });
|
|
34
|
-
|
|
35
|
-
let browser;
|
|
36
|
-
try {
|
|
37
|
-
browser = await p.launch({
|
|
38
|
-
headless: "new",
|
|
39
|
-
args: ["--no-sandbox", "--disable-setuid-sandbox"],
|
|
40
|
-
});
|
|
41
|
-
|
|
42
|
-
const page = await browser.newPage();
|
|
43
|
-
|
|
44
|
-
// Mobile viewport — Expo apps are mobile-first
|
|
45
|
-
await page.setViewport({ width: 390, height: 844, deviceScaleFactor: 2 });
|
|
46
|
-
|
|
47
|
-
await page.goto(url, { waitUntil: "networkidle2", timeout: 30000 });
|
|
48
|
-
|
|
49
|
-
// Wait a bit for React to render
|
|
50
|
-
await new Promise((r) => setTimeout(r, 2000));
|
|
51
|
-
|
|
52
|
-
await page.screenshot({ path: imagePath, fullPage: false });
|
|
53
|
-
|
|
54
|
-
const base64 = fs.readFileSync(imagePath).toString("base64");
|
|
55
|
-
|
|
56
|
-
return { success: true, imagePath, base64, url };
|
|
57
|
-
} catch (err) {
|
|
58
|
-
return { success: false, error: err.message };
|
|
59
|
-
} finally {
|
|
60
|
-
if (browser) await browser.close();
|
|
61
|
-
}
|
|
62
|
-
}
|
|
63
|
-
|
|
64
|
-
/**
|
|
65
|
-
* Take multiple screenshots navigating through routes
|
|
66
|
-
*/
|
|
67
|
-
export async function screenshotRoutes(port = 8081, routes = ["/"], outputDir) {
|
|
68
|
-
const results = [];
|
|
69
|
-
for (const route of routes) {
|
|
70
|
-
const result = await screenshotExpoWeb(port, outputDir, route);
|
|
71
|
-
results.push({ route, ...result });
|
|
72
|
-
}
|
|
73
|
-
return results;
|
|
74
|
-
}
|