claudeboard 1.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,197 @@
1
+ import express from "express";
2
+ import cors from "cors";
3
+ import { createServer } from "http";
4
+ import { WebSocketServer } from "ws";
5
+ import { createClient } from "@supabase/supabase-js";
6
+ import path from "path";
7
+ import { fileURLToPath } from "url";
8
+ import fs from "fs";
9
+
10
+ const __dirname = path.dirname(fileURLToPath(import.meta.url));
11
+
12
+ const app = express();
13
+ const server = createServer(app);
14
+ const wss = new WebSocketServer({ server });
15
+
16
+ app.use(cors());
17
+ app.use(express.json());
18
+
19
+ const PORT = process.env.PORT || 3131;
20
+ const PROJECT = process.env.PROJECT_NAME || "default";
21
+
22
+ const supabase = createClient(
23
+ process.env.SUPABASE_URL,
24
+ process.env.SUPABASE_KEY
25
+ );
26
+
27
+ // Broadcast to all WS clients
28
+ function broadcast(event, data) {
29
+ const msg = JSON.stringify({ event, data, ts: Date.now() });
30
+ wss.clients.forEach((client) => {
31
+ if (client.readyState === 1) client.send(msg);
32
+ });
33
+ }
34
+
35
+ // Subscribe to Supabase realtime
36
+ supabase
37
+ .channel("cb_changes")
38
+ .on("postgres_changes", { event: "*", schema: "public", table: "cb_tasks" }, (payload) => {
39
+ broadcast("task_update", payload);
40
+ })
41
+ .on("postgres_changes", { event: "*", schema: "public", table: "cb_logs" }, (payload) => {
42
+ broadcast("log", payload.new);
43
+ })
44
+ .subscribe();
45
+
46
+ // ─── API ROUTES ───────────────────────────────────────────────────────────────
47
+
48
+ // GET all tasks grouped by epic
49
+ app.get("/api/board", async (req, res) => {
50
+ const { data: epics } = await supabase
51
+ .from("cb_epics")
52
+ .select("*, cb_tasks(*)")
53
+ .eq("project", PROJECT)
54
+ .order("created_at");
55
+
56
+ const { data: logs } = await supabase
57
+ .from("cb_logs")
58
+ .select("*")
59
+ .eq("project", PROJECT)
60
+ .order("created_at", { ascending: false })
61
+ .limit(50);
62
+
63
+ res.json({ epics: epics || [], logs: logs || [], project: PROJECT });
64
+ });
65
+
66
+ // GET next pending task
67
+ app.get("/api/tasks/next", async (req, res) => {
68
+ const { data } = await supabase
69
+ .from("cb_tasks")
70
+ .select("*")
71
+ .eq("project", PROJECT)
72
+ .eq("status", "todo")
73
+ .order("priority_order", { ascending: true })
74
+ .limit(1)
75
+ .single();
76
+
77
+ if (!data) return res.json({ task: null, message: "All tasks complete! 🎉" });
78
+ res.json({ task: data });
79
+ });
80
+
81
+ // POST start task
82
+ app.post("/api/tasks/:id/start", async (req, res) => {
83
+ const { id } = req.params;
84
+ const { log } = req.body;
85
+
86
+ await supabase
87
+ .from("cb_tasks")
88
+ .update({ status: "in_progress", started_at: new Date().toISOString() })
89
+ .eq("id", id);
90
+
91
+ if (log) await addLog(id, log, "start");
92
+ broadcast("task_started", { id });
93
+ res.json({ ok: true });
94
+ });
95
+
96
+ // POST log progress
97
+ app.post("/api/tasks/:id/log", async (req, res) => {
98
+ const { id } = req.params;
99
+ const { message } = req.body;
100
+ await addLog(id, message, "progress");
101
+ res.json({ ok: true });
102
+ });
103
+
104
+ // POST complete task
105
+ app.post("/api/tasks/:id/complete", async (req, res) => {
106
+ const { id } = req.params;
107
+ const { log } = req.body;
108
+
109
+ await supabase
110
+ .from("cb_tasks")
111
+ .update({ status: "done", completed_at: new Date().toISOString() })
112
+ .eq("id", id);
113
+
114
+ if (log) await addLog(id, log, "complete");
115
+ broadcast("task_complete", { id });
116
+ res.json({ ok: true });
117
+ });
118
+
119
+ // POST fail task
120
+ app.post("/api/tasks/:id/fail", async (req, res) => {
121
+ const { id } = req.params;
122
+ const { log } = req.body;
123
+
124
+ await supabase
125
+ .from("cb_tasks")
126
+ .update({ status: "error" })
127
+ .eq("id", id);
128
+
129
+ if (log) await addLog(id, log, "error");
130
+ broadcast("task_failed", { id });
131
+ res.json({ ok: true });
132
+ });
133
+
134
+ // POST add new task manually (from dashboard or agent)
135
+ app.post("/api/tasks", async (req, res) => {
136
+ const { title, description, priority, type, epic_id } = req.body;
137
+
138
+ const priorityOrder = { high: 1, medium: 2, low: 3 };
139
+
140
+ const { data, error } = await supabase
141
+ .from("cb_tasks")
142
+ .insert({
143
+ project: PROJECT,
144
+ epic_id: epic_id || null,
145
+ title,
146
+ description,
147
+ priority: priority || "medium",
148
+ priority_order: priorityOrder[priority] || 2,
149
+ type: type || "feature",
150
+ status: "todo",
151
+ })
152
+ .select()
153
+ .single();
154
+
155
+ if (error) return res.status(400).json({ error: error.message });
156
+ broadcast("task_added", data);
157
+ res.json({ task: data });
158
+ });
159
+
160
+ // PATCH update task status manually
161
+ app.patch("/api/tasks/:id", async (req, res) => {
162
+ const { id } = req.params;
163
+ const updates = req.body;
164
+
165
+ await supabase.from("cb_tasks").update(updates).eq("id", id);
166
+ broadcast("task_update", { id, ...updates });
167
+ res.json({ ok: true });
168
+ });
169
+
170
+ // GET logs for a specific task
171
+ app.get("/api/tasks/:id/logs", async (req, res) => {
172
+ const { data } = await supabase
173
+ .from("cb_logs")
174
+ .select("*")
175
+ .eq("task_id", req.params.id)
176
+ .order("created_at");
177
+ res.json({ logs: data || [] });
178
+ });
179
+
180
+ // Serve dashboard HTML
181
+ app.get("*", (req, res) => {
182
+ res.sendFile(path.join(__dirname, "index.html"));
183
+ });
184
+
185
+ // ─── HELPERS ─────────────────────────────────────────────────────────────────
186
+ async function addLog(taskId, message, type = "info") {
187
+ await supabase.from("cb_logs").insert({
188
+ project: PROJECT,
189
+ task_id: taskId,
190
+ message,
191
+ type,
192
+ });
193
+ }
194
+
195
+ server.listen(PORT, () => {
196
+ console.log(`READY on port ${PORT}`);
197
+ });
package/package.json ADDED
@@ -0,0 +1,55 @@
1
+ {
2
+ "name": "claudeboard",
3
+ "version": "1.0.0",
4
+ "description": "AI engineering team — from PRD to working mobile app, autonomously",
5
+ "type": "module",
6
+ "bin": {
7
+ "claudeboard": "./bin/cli.js"
8
+ },
9
+ "files": [
10
+ "bin/",
11
+ "dashboard/",
12
+ "agents/",
13
+ "tools/",
14
+ "sql/",
15
+ "README.md"
16
+ ],
17
+ "dependencies": {
18
+ "@supabase/supabase-js": "^2.43.1",
19
+ "chalk": "^5.3.0",
20
+ "commander": "^12.0.0",
21
+ "cors": "^2.8.5",
22
+ "dotenv": "^16.4.5",
23
+ "enquirer": "^2.4.1",
24
+ "express": "^4.18.3",
25
+ "open": "^10.1.0",
26
+ "ora": "^8.0.1",
27
+ "puppeteer": "^22.8.0",
28
+ "ws": "^8.17.0"
29
+ },
30
+ "engines": {
31
+ "node": ">=18.0.0"
32
+ },
33
+ "publishConfig": {
34
+ "access": "public"
35
+ },
36
+ "repository": {
37
+ "type": "git",
38
+ "url": "https://github.com/highvaluedigital/claudeboard"
39
+ },
40
+ "author": "highvaluedigital",
41
+ "license": "MIT",
42
+ "keywords": [
43
+ "claude",
44
+ "ai",
45
+ "agent",
46
+ "coding",
47
+ "automation",
48
+ "dashboard",
49
+ "mobile",
50
+ "expo",
51
+ "react-native",
52
+ "prd",
53
+ "engineering"
54
+ ]
55
+ }
package/sql/setup.sql ADDED
@@ -0,0 +1,57 @@
1
+ -- ClaudeBoard — Supabase Setup
2
+ -- Run this in your Supabase SQL Editor
3
+
4
+ -- Epics (grouping of tasks)
5
+ create table if not exists cb_epics (
6
+ id uuid primary key default gen_random_uuid(),
7
+ project text not null,
8
+ name text not null,
9
+ created_at timestamptz default now()
10
+ );
11
+
12
+ -- Tasks
13
+ create table if not exists cb_tasks (
14
+ id uuid primary key default gen_random_uuid(),
15
+ project text not null,
16
+ epic_id uuid references cb_epics(id) on delete set null,
17
+ title text not null,
18
+ description text,
19
+ status text not null default 'todo', -- todo | in_progress | done | error | blocked
20
+ priority text default 'medium', -- high | medium | low
21
+ priority_order int default 2,
22
+ type text default 'feature', -- feature | bug | refactor | test | config
23
+ started_at timestamptz,
24
+ completed_at timestamptz,
25
+ created_at timestamptz default now()
26
+ );
27
+
28
+ -- Logs (activity feed)
29
+ create table if not exists cb_logs (
30
+ id uuid primary key default gen_random_uuid(),
31
+ project text not null,
32
+ task_id uuid references cb_tasks(id) on delete cascade,
33
+ message text not null,
34
+ type text default 'info', -- start | progress | complete | error | info
35
+ created_at timestamptz default now()
36
+ );
37
+
38
+ -- Indexes
39
+ create index if not exists idx_tasks_project on cb_tasks(project);
40
+ create index if not exists idx_tasks_status on cb_tasks(status);
41
+ create index if not exists idx_tasks_priority on cb_tasks(priority_order);
42
+ create index if not exists idx_logs_task on cb_logs(task_id);
43
+ create index if not exists idx_logs_project on cb_logs(project);
44
+
45
+ -- Enable Realtime
46
+ alter publication supabase_realtime add table cb_tasks;
47
+ alter publication supabase_realtime add table cb_logs;
48
+ alter publication supabase_realtime add table cb_epics;
49
+
50
+ -- RLS (Row Level Security) — permissive for local dev
51
+ alter table cb_epics enable row level security;
52
+ alter table cb_tasks enable row level security;
53
+ alter table cb_logs enable row level security;
54
+
55
+ create policy "allow_all_epics" on cb_epics for all using (true) with check (true);
56
+ create policy "allow_all_tasks" on cb_tasks for all using (true) with check (true);
57
+ create policy "allow_all_logs" on cb_logs for all using (true) with check (true);
@@ -0,0 +1,95 @@
1
+ import fs from "fs";
2
+ import path from "path";
3
+
4
+ /**
5
+ * Read a file, return content string or null
6
+ */
7
+ export function readFile(filePath) {
8
+ try {
9
+ return fs.readFileSync(filePath, "utf8");
10
+ } catch {
11
+ return null;
12
+ }
13
+ }
14
+
15
+ /**
16
+ * Write a file (creates dirs if needed)
17
+ */
18
+ export function writeFile(filePath, content) {
19
+ fs.mkdirSync(path.dirname(filePath), { recursive: true });
20
+ fs.writeFileSync(filePath, content, "utf8");
21
+ }
22
+
23
+ /**
24
+ * List files in a directory recursively (ignores node_modules, .git, etc)
25
+ */
26
+ export function listFiles(dir, extensions = null, maxDepth = 4) {
27
+ const results = [];
28
+ const IGNORE = new Set(["node_modules", ".git", ".expo", "dist", "build", ".next", "coverage"]);
29
+
30
+ function walk(current, depth) {
31
+ if (depth > maxDepth) return;
32
+ let entries;
33
+ try { entries = fs.readdirSync(current, { withFileTypes: true }); } catch { return; }
34
+
35
+ for (const entry of entries) {
36
+ if (IGNORE.has(entry.name)) continue;
37
+ const fullPath = path.join(current, entry.name);
38
+ if (entry.isDirectory()) {
39
+ walk(fullPath, depth + 1);
40
+ } else if (!extensions || extensions.some((ext) => entry.name.endsWith(ext))) {
41
+ results.push(fullPath);
42
+ }
43
+ }
44
+ }
45
+
46
+ walk(dir, 0);
47
+ return results;
48
+ }
49
+
50
+ /**
51
+ * Get a compact project tree string (for LLM context)
52
+ */
53
+ export function projectTree(dir, maxDepth = 3) {
54
+ const IGNORE = new Set(["node_modules", ".git", ".expo", "dist", "build", ".next"]);
55
+ const lines = [];
56
+
57
+ function walk(current, depth, prefix) {
58
+ if (depth > maxDepth) return;
59
+ let entries;
60
+ try { entries = fs.readdirSync(current, { withFileTypes: true }); } catch { return; }
61
+
62
+ const filtered = entries.filter((e) => !IGNORE.has(e.name));
63
+ filtered.forEach((entry, i) => {
64
+ const isLast = i === filtered.length - 1;
65
+ const connector = isLast ? "└── " : "├── ";
66
+ lines.push(prefix + connector + entry.name);
67
+ if (entry.isDirectory()) {
68
+ walk(
69
+ path.join(current, entry.name),
70
+ depth + 1,
71
+ prefix + (isLast ? " " : "│ ")
72
+ );
73
+ }
74
+ });
75
+ }
76
+
77
+ lines.push(path.basename(dir) + "/");
78
+ walk(dir, 0, "");
79
+ return lines.join("\n");
80
+ }
81
+
82
+ /**
83
+ * Read multiple files and return as context string
84
+ */
85
+ export function readFilesAsContext(filePaths, projectRoot) {
86
+ return filePaths
87
+ .map((fp) => {
88
+ const content = readFile(fp);
89
+ if (!content) return null;
90
+ const rel = path.relative(projectRoot, fp);
91
+ return `### ${rel}\n\`\`\`\n${content.slice(0, 3000)}\n\`\`\``;
92
+ })
93
+ .filter(Boolean)
94
+ .join("\n\n");
95
+ }
@@ -0,0 +1,74 @@
1
+ import { runCommand, waitForPort } from "./terminal.js";
2
+ import fs from "fs";
3
+ import path from "path";
4
+
5
+ let puppeteer = null;
6
+
7
+ async function getPuppeteer() {
8
+ if (!puppeteer) {
9
+ try {
10
+ const mod = await import("puppeteer");
11
+ puppeteer = mod.default;
12
+ } catch {
13
+ return null;
14
+ }
15
+ }
16
+ return puppeteer;
17
+ }
18
+
19
+ /**
20
+ * Take a screenshot of a running Expo Web app
21
+ * Returns { success, imagePath, base64, error }
22
+ */
23
+ export async function screenshotExpoWeb(port = 8081, outputDir, pagePath = "/") {
24
+ const p = await getPuppeteer();
25
+ if (!p) {
26
+ return { success: false, error: "puppeteer not installed — run: npm install puppeteer" };
27
+ }
28
+
29
+ const url = `http://localhost:${port}${pagePath}`;
30
+ const filename = `screenshot-${Date.now()}.png`;
31
+ const imagePath = path.join(outputDir, filename);
32
+
33
+ fs.mkdirSync(outputDir, { recursive: true });
34
+
35
+ let browser;
36
+ try {
37
+ browser = await p.launch({
38
+ headless: "new",
39
+ args: ["--no-sandbox", "--disable-setuid-sandbox"],
40
+ });
41
+
42
+ const page = await browser.newPage();
43
+
44
+ // Mobile viewport — Expo apps are mobile-first
45
+ await page.setViewport({ width: 390, height: 844, deviceScaleFactor: 2 });
46
+
47
+ await page.goto(url, { waitUntil: "networkidle2", timeout: 30000 });
48
+
49
+ // Wait a bit for React to render
50
+ await new Promise((r) => setTimeout(r, 2000));
51
+
52
+ await page.screenshot({ path: imagePath, fullPage: false });
53
+
54
+ const base64 = fs.readFileSync(imagePath).toString("base64");
55
+
56
+ return { success: true, imagePath, base64, url };
57
+ } catch (err) {
58
+ return { success: false, error: err.message };
59
+ } finally {
60
+ if (browser) await browser.close();
61
+ }
62
+ }
63
+
64
+ /**
65
+ * Take multiple screenshots navigating through routes
66
+ */
67
+ export async function screenshotRoutes(port = 8081, routes = ["/"], outputDir) {
68
+ const results = [];
69
+ for (const route of routes) {
70
+ const result = await screenshotExpoWeb(port, outputDir, route);
71
+ results.push({ route, ...result });
72
+ }
73
+ return results;
74
+ }
@@ -0,0 +1,74 @@
1
+ import { createClient } from "@supabase/supabase-js";
2
+
3
+ let client = null;
4
+
5
+ export function initSupabaseReader(url, key) {
6
+ client = createClient(url, key);
7
+ }
8
+
9
+ /**
10
+ * Read recent logs from a logs table
11
+ * Returns array of log entries
12
+ */
13
+ export async function readRecentLogs(table = "logs", limit = 50) {
14
+ if (!client) return [];
15
+ try {
16
+ const { data, error } = await client
17
+ .from(table)
18
+ .select("*")
19
+ .order("created_at", { ascending: false })
20
+ .limit(limit);
21
+ if (error) return [{ error: error.message }];
22
+ return data || [];
23
+ } catch (err) {
24
+ return [{ error: err.message }];
25
+ }
26
+ }
27
+
28
+ /**
29
+ * Read error logs specifically
30
+ */
31
+ export async function readErrorLogs(table = "logs", limit = 20) {
32
+ if (!client) return [];
33
+ try {
34
+ const { data } = await client
35
+ .from(table)
36
+ .select("*")
37
+ .or("level.eq.error,level.eq.Error,type.eq.error")
38
+ .order("created_at", { ascending: false })
39
+ .limit(limit);
40
+ return data || [];
41
+ } catch {
42
+ return [];
43
+ }
44
+ }
45
+
46
+ /**
47
+ * List all tables in the project's Supabase
48
+ */
49
+ export async function listTables() {
50
+ if (!client) return [];
51
+ try {
52
+ const { data } = await client
53
+ .from("information_schema.tables")
54
+ .select("table_name")
55
+ .eq("table_schema", "public");
56
+ return data?.map((r) => r.table_name) || [];
57
+ } catch {
58
+ return [];
59
+ }
60
+ }
61
+
62
+ /**
63
+ * Sample data from a table (for QA context)
64
+ */
65
+ export async function sampleTable(tableName, limit = 5) {
66
+ if (!client) return [];
67
+ try {
68
+ const { data, error } = await client.from(tableName).select("*").limit(limit);
69
+ if (error) return [{ error: error.message }];
70
+ return data || [];
71
+ } catch (err) {
72
+ return [{ error: err.message }];
73
+ }
74
+ }
@@ -0,0 +1,58 @@
1
+ import { exec, spawn } from "child_process";
2
+ import { promisify } from "util";
3
+
4
+ const execAsync = promisify(exec);
5
+
6
+ /**
7
+ * Run a shell command in a given directory, return { stdout, stderr, exitCode }
8
+ */
9
+ export async function runCommand(cmd, cwd, timeoutMs = 60000) {
10
+ try {
11
+ const { stdout, stderr } = await execAsync(cmd, {
12
+ cwd,
13
+ timeout: timeoutMs,
14
+ env: { ...process.env, CI: "true", FORCE_COLOR: "0" },
15
+ });
16
+ return { stdout: stdout.trim(), stderr: stderr.trim(), exitCode: 0 };
17
+ } catch (err) {
18
+ return {
19
+ stdout: err.stdout?.trim() || "",
20
+ stderr: err.stderr?.trim() || err.message,
21
+ exitCode: err.code || 1,
22
+ };
23
+ }
24
+ }
25
+
26
+ /**
27
+ * Start a long-running process (e.g. expo start) and return kill function + log stream
28
+ */
29
+ export function startProcess(cmd, args, cwd, onLog) {
30
+ const proc = spawn(cmd, args, {
31
+ cwd,
32
+ env: { ...process.env, FORCE_COLOR: "0" },
33
+ stdio: ["ignore", "pipe", "pipe"],
34
+ });
35
+
36
+ proc.stdout.on("data", (d) => onLog(d.toString()));
37
+ proc.stderr.on("data", (d) => onLog(d.toString()));
38
+
39
+ return {
40
+ kill: () => proc.kill("SIGTERM"),
41
+ pid: proc.pid,
42
+ };
43
+ }
44
+
45
+ /**
46
+ * Check if a port is in use
47
+ */
48
+ export async function waitForPort(port, timeoutMs = 30000) {
49
+ const start = Date.now();
50
+ while (Date.now() - start < timeoutMs) {
51
+ try {
52
+ const result = await execAsync(`curl -s -o /dev/null -w "%{http_code}" http://localhost:${port}`);
53
+ if (result.stdout !== "000") return true;
54
+ } catch {}
55
+ await new Promise((r) => setTimeout(r, 1000));
56
+ }
57
+ return false;
58
+ }