openclaw-node-harness 2.0.4 → 2.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +646 -3
- package/bin/hyperagent.mjs +419 -0
- package/bin/mesh-agent.js +401 -12
- package/bin/mesh-bridge.js +66 -1
- package/bin/mesh-task-daemon.js +816 -26
- package/bin/mesh.js +403 -1
- package/config/claude-settings.json +95 -0
- package/config/daemon.json.template +2 -1
- package/config/git-hooks/pre-commit +13 -0
- package/config/git-hooks/pre-push +12 -0
- package/config/harness-rules.json +174 -0
- package/config/plan-templates/team-bugfix.yaml +52 -0
- package/config/plan-templates/team-deploy.yaml +50 -0
- package/config/plan-templates/team-feature.yaml +71 -0
- package/config/roles/qa-engineer.yaml +36 -0
- package/config/roles/solidity-dev.yaml +51 -0
- package/config/roles/tech-architect.yaml +36 -0
- package/config/rules/framework/solidity.md +22 -0
- package/config/rules/framework/typescript.md +21 -0
- package/config/rules/framework/unity.md +21 -0
- package/config/rules/universal/design-docs.md +18 -0
- package/config/rules/universal/git-hygiene.md +18 -0
- package/config/rules/universal/security.md +19 -0
- package/config/rules/universal/test-standards.md +19 -0
- package/identity/DELEGATION.md +6 -6
- package/install.sh +293 -8
- package/lib/circling-parser.js +119 -0
- package/lib/hyperagent-store.mjs +652 -0
- package/lib/kanban-io.js +9 -0
- package/lib/mcp-knowledge/bench.mjs +118 -0
- package/lib/mcp-knowledge/core.mjs +528 -0
- package/lib/mcp-knowledge/package.json +25 -0
- package/lib/mcp-knowledge/server.mjs +245 -0
- package/lib/mcp-knowledge/test.mjs +802 -0
- package/lib/memory-budget.mjs +261 -0
- package/lib/mesh-collab.js +301 -1
- package/lib/mesh-harness.js +427 -0
- package/lib/mesh-plans.js +13 -5
- package/lib/mesh-tasks.js +67 -0
- package/lib/plan-templates.js +226 -0
- package/lib/pre-compression-flush.mjs +320 -0
- package/lib/role-loader.js +292 -0
- package/lib/rule-loader.js +358 -0
- package/lib/session-store.mjs +458 -0
- package/lib/transcript-parser.mjs +292 -0
- package/mission-control/drizzle/soul_schema_update.sql +29 -0
- package/mission-control/drizzle.config.ts +1 -4
- package/mission-control/package-lock.json +1571 -83
- package/mission-control/package.json +6 -2
- package/mission-control/scripts/gen-chronology.js +3 -3
- package/mission-control/scripts/import-pipeline-v2.js +0 -16
- package/mission-control/scripts/import-pipeline.js +0 -15
- package/mission-control/src/app/api/cowork/clusters/[id]/members/route.ts +117 -0
- package/mission-control/src/app/api/cowork/clusters/[id]/route.ts +84 -0
- package/mission-control/src/app/api/cowork/clusters/route.ts +141 -0
- package/mission-control/src/app/api/cowork/dispatch/route.ts +128 -0
- package/mission-control/src/app/api/cowork/events/route.ts +65 -0
- package/mission-control/src/app/api/cowork/intervene/route.ts +259 -0
- package/mission-control/src/app/api/cowork/sessions/[id]/route.ts +37 -0
- package/mission-control/src/app/api/cowork/sessions/route.ts +64 -0
- package/mission-control/src/app/api/diagnostics/route.ts +97 -0
- package/mission-control/src/app/api/diagnostics/test-runner/route.ts +990 -0
- package/mission-control/src/app/api/mesh/events/route.ts +95 -19
- package/mission-control/src/app/api/mesh/identity/route.ts +11 -0
- package/mission-control/src/app/api/mesh/tasks/[id]/route.ts +92 -0
- package/mission-control/src/app/api/mesh/tasks/route.ts +91 -0
- package/mission-control/src/app/api/tasks/[id]/handoff/route.ts +1 -1
- package/mission-control/src/app/api/tasks/[id]/route.ts +90 -4
- package/mission-control/src/app/api/tasks/route.ts +21 -30
- package/mission-control/src/app/cowork/page.tsx +261 -0
- package/mission-control/src/app/diagnostics/page.tsx +385 -0
- package/mission-control/src/app/graph/page.tsx +26 -0
- package/mission-control/src/app/memory/page.tsx +1 -1
- package/mission-control/src/app/obsidian/page.tsx +36 -6
- package/mission-control/src/app/roadmap/page.tsx +24 -0
- package/mission-control/src/app/souls/page.tsx +2 -2
- package/mission-control/src/components/board/execution-config.tsx +431 -0
- package/mission-control/src/components/board/kanban-board.tsx +75 -9
- package/mission-control/src/components/board/kanban-column.tsx +135 -19
- package/mission-control/src/components/board/task-card.tsx +55 -2
- package/mission-control/src/components/board/unified-task-dialog.tsx +82 -4
- package/mission-control/src/components/cowork/cluster-card.tsx +176 -0
- package/mission-control/src/components/cowork/create-cluster-dialog.tsx +251 -0
- package/mission-control/src/components/cowork/dispatch-form.tsx +423 -0
- package/mission-control/src/components/cowork/role-picker.tsx +102 -0
- package/mission-control/src/components/cowork/session-card.tsx +284 -0
- package/mission-control/src/components/layout/sidebar.tsx +39 -2
- package/mission-control/src/lib/__tests__/daily-log.test.ts +82 -0
- package/mission-control/src/lib/__tests__/memory-md.test.ts +87 -0
- package/mission-control/src/lib/__tests__/mesh-kv-sync.test.ts +465 -0
- package/mission-control/src/lib/__tests__/mocks/mock-kv.ts +131 -0
- package/mission-control/src/lib/__tests__/status-kanban.test.ts +46 -0
- package/mission-control/src/lib/__tests__/task-markdown.test.ts +188 -0
- package/mission-control/src/lib/__tests__/wikilinks.test.ts +175 -0
- package/mission-control/src/lib/config.ts +58 -0
- package/mission-control/src/lib/db/index.ts +69 -0
- package/mission-control/src/lib/db/schema.ts +61 -3
- package/mission-control/src/lib/hooks.ts +309 -0
- package/mission-control/src/lib/memory/entities.ts +3 -2
- package/mission-control/src/lib/nats.ts +66 -1
- package/mission-control/src/lib/parsers/task-markdown.ts +52 -2
- package/mission-control/src/lib/parsers/transcript.ts +4 -4
- package/mission-control/src/lib/scheduler.ts +12 -11
- package/mission-control/src/lib/sync/mesh-kv.ts +279 -0
- package/mission-control/src/lib/sync/tasks.ts +23 -1
- package/mission-control/src/lib/task-id.ts +32 -0
- package/mission-control/src/lib/tts/index.ts +33 -9
- package/mission-control/tsconfig.json +2 -1
- package/mission-control/vitest.config.ts +14 -0
- package/package.json +15 -2
- package/services/service-manifest.json +1 -1
- package/skills/cc-godmode/references/agents.md +8 -8
- package/workspace-bin/memory-daemon.mjs +199 -5
- package/workspace-bin/session-search.mjs +204 -0
- package/workspace-bin/web-fetch.mjs +65 -0
|
@@ -0,0 +1,990 @@
|
|
|
1
|
+
import { NextResponse } from "next/server";
|
|
2
|
+
import { getDb, getRawDb } from "@/lib/db";
|
|
3
|
+
import { tasks, clusters, clusterMembers, dependencies, memoryDocs, memoryEntities, memoryRelations } from "@/lib/db/schema";
|
|
4
|
+
import { eq, or } from "drizzle-orm";
|
|
5
|
+
import { statusToKanban, kanbanToStatus, parseTasksMarkdown, serializeTasksMarkdown } from "@/lib/parsers/task-markdown";
|
|
6
|
+
import { syncTasksToMarkdown, syncTasksFromMarkdown } from "@/lib/sync/tasks";
|
|
7
|
+
import { schedulerTick, computeWaves } from "@/lib/scheduler";
|
|
8
|
+
import { getNats } from "@/lib/nats";
|
|
9
|
+
import fs from "fs";
|
|
10
|
+
import { ACTIVE_TASKS_MD, WORKSPACE_ROOT } from "@/lib/config";
|
|
11
|
+
|
|
12
|
+
export const dynamic = "force-dynamic";
|
|
13
|
+
export const maxDuration = 30;
|
|
14
|
+
|
|
15
|
+
interface TestResult {
|
|
16
|
+
suite: string;
|
|
17
|
+
name: string;
|
|
18
|
+
status: "pass" | "fail" | "skip";
|
|
19
|
+
detail?: string;
|
|
20
|
+
durationMs: number;
|
|
21
|
+
}
|
|
22
|
+
|
|
23
|
+
type TestFn = () => Promise<{ ok: boolean; detail?: string }>;
|
|
24
|
+
|
|
25
|
+
async function runTest(suite: string, name: string, fn: TestFn): Promise<TestResult> {
|
|
26
|
+
const start = Date.now();
|
|
27
|
+
try {
|
|
28
|
+
const { ok, detail } = await fn();
|
|
29
|
+
return { suite, name, status: ok ? "pass" : "fail", detail, durationMs: Date.now() - start };
|
|
30
|
+
} catch (err) {
|
|
31
|
+
return { suite, name, status: "fail", detail: (err as Error).message, durationMs: Date.now() - start };
|
|
32
|
+
}
|
|
33
|
+
}
|
|
34
|
+
|
|
35
|
+
/**
|
|
36
|
+
* POST /api/diagnostics/test-runner
|
|
37
|
+
*
|
|
38
|
+
* Runs a comprehensive integration test suite against the live MC system.
|
|
39
|
+
* Creates test data, verifies behavior, and cleans up after itself.
|
|
40
|
+
*
|
|
41
|
+
* All test tasks use the prefix __TEST__ so they can be safely cleaned up.
|
|
42
|
+
*/
|
|
43
|
+
export async function POST() {
|
|
44
|
+
const results: TestResult[] = [];
|
|
45
|
+
const db = getDb();
|
|
46
|
+
const raw = getRawDb();
|
|
47
|
+
const TEST_PREFIX = "__TEST__";
|
|
48
|
+
const testTaskId = `${TEST_PREFIX}${Date.now()}`;
|
|
49
|
+
|
|
50
|
+
// ═══════════════════════════════════════════════════════════
|
|
51
|
+
// SUITE 1: Status <-> Kanban Mapping
|
|
52
|
+
// ═══════════════════════════════════════════════════════════
|
|
53
|
+
|
|
54
|
+
results.push(await runTest("Status Mapping", "queued -> backlog", async () => {
|
|
55
|
+
return { ok: statusToKanban("queued") === "backlog" };
|
|
56
|
+
}));
|
|
57
|
+
|
|
58
|
+
results.push(await runTest("Status Mapping", "running -> in_progress", async () => {
|
|
59
|
+
return { ok: statusToKanban("running") === "in_progress" };
|
|
60
|
+
}));
|
|
61
|
+
|
|
62
|
+
results.push(await runTest("Status Mapping", "waiting-user -> review", async () => {
|
|
63
|
+
return { ok: statusToKanban("waiting-user") === "review" };
|
|
64
|
+
}));
|
|
65
|
+
|
|
66
|
+
results.push(await runTest("Status Mapping", "done -> done", async () => {
|
|
67
|
+
return { ok: statusToKanban("done") === "done" };
|
|
68
|
+
}));
|
|
69
|
+
|
|
70
|
+
results.push(await runTest("Status Mapping", "kanban reverse: in_progress -> running", async () => {
|
|
71
|
+
return { ok: kanbanToStatus("in_progress") === "running" };
|
|
72
|
+
}));
|
|
73
|
+
|
|
74
|
+
results.push(await runTest("Status Mapping", "unknown status falls back to backlog", async () => {
|
|
75
|
+
return { ok: statusToKanban("banana") === "backlog" };
|
|
76
|
+
}));
|
|
77
|
+
|
|
78
|
+
// ═══════════════════════════════════════════════════════════
|
|
79
|
+
// SUITE 2: Task CRUD
|
|
80
|
+
// ═══════════════════════════════════════════════════════════
|
|
81
|
+
|
|
82
|
+
results.push(await runTest("Task CRUD", "Create task in DB", async () => {
|
|
83
|
+
const now = new Date().toISOString();
|
|
84
|
+
db.insert(tasks).values({
|
|
85
|
+
id: testTaskId,
|
|
86
|
+
title: "Test Task - Integration",
|
|
87
|
+
status: "queued",
|
|
88
|
+
kanbanColumn: "backlog",
|
|
89
|
+
owner: "test-runner",
|
|
90
|
+
updatedAt: now,
|
|
91
|
+
createdAt: now,
|
|
92
|
+
}).run();
|
|
93
|
+
const row = db.select().from(tasks).where(eq(tasks.id, testTaskId)).get();
|
|
94
|
+
return {
|
|
95
|
+
ok: !!row && row.title === "Test Task - Integration" && row.kanbanColumn === "backlog",
|
|
96
|
+
detail: row ? `Created: ${row.id}` : "Insert failed",
|
|
97
|
+
};
|
|
98
|
+
}));
|
|
99
|
+
|
|
100
|
+
results.push(await runTest("Task CRUD", "Update task status", async () => {
|
|
101
|
+
db.update(tasks).set({ status: "running", kanbanColumn: "in_progress", updatedAt: new Date().toISOString() }).where(eq(tasks.id, testTaskId)).run();
|
|
102
|
+
const row = db.select().from(tasks).where(eq(tasks.id, testTaskId)).get();
|
|
103
|
+
return {
|
|
104
|
+
ok: !!row && row.status === "running" && row.kanbanColumn === "in_progress",
|
|
105
|
+
detail: `status=${row?.status}, kanban=${row?.kanbanColumn}`,
|
|
106
|
+
};
|
|
107
|
+
}));
|
|
108
|
+
|
|
109
|
+
results.push(await runTest("Task CRUD", "Delete task", async () => {
|
|
110
|
+
db.delete(tasks).where(eq(tasks.id, testTaskId)).run();
|
|
111
|
+
const row = db.select().from(tasks).where(eq(tasks.id, testTaskId)).get();
|
|
112
|
+
return { ok: !row, detail: row ? "Still exists!" : "Deleted OK" };
|
|
113
|
+
}));
|
|
114
|
+
|
|
115
|
+
// ═══════════════════════════════════════════════════════════
|
|
116
|
+
// SUITE 3: Done-Gate Enforcement
|
|
117
|
+
// ═══════════════════════════════════════════════════════════
|
|
118
|
+
|
|
119
|
+
const doneGateTaskId = `${TEST_PREFIX}donegate_${Date.now()}`;
|
|
120
|
+
|
|
121
|
+
results.push(await runTest("Done-Gate", "Done without force_done -> redirects to review", async () => {
|
|
122
|
+
const now = new Date().toISOString();
|
|
123
|
+
db.insert(tasks).values({
|
|
124
|
+
id: doneGateTaskId,
|
|
125
|
+
title: "Done Gate Test",
|
|
126
|
+
status: "running",
|
|
127
|
+
kanbanColumn: "in_progress",
|
|
128
|
+
needsApproval: 1,
|
|
129
|
+
updatedAt: now,
|
|
130
|
+
createdAt: now,
|
|
131
|
+
}).run();
|
|
132
|
+
|
|
133
|
+
// Simulate what the PATCH API does: setting done without force_done
|
|
134
|
+
const targetStatus = "done";
|
|
135
|
+
const redirected = targetStatus === "done"; // would be redirected
|
|
136
|
+
const effectiveStatus = redirected ? "waiting-user" : "done";
|
|
137
|
+
const effectiveColumn = redirected ? "review" : "done";
|
|
138
|
+
|
|
139
|
+
db.update(tasks).set({
|
|
140
|
+
status: effectiveStatus,
|
|
141
|
+
kanbanColumn: effectiveColumn,
|
|
142
|
+
updatedAt: new Date().toISOString(),
|
|
143
|
+
}).where(eq(tasks.id, doneGateTaskId)).run();
|
|
144
|
+
|
|
145
|
+
const row = db.select().from(tasks).where(eq(tasks.id, doneGateTaskId)).get();
|
|
146
|
+
const ok = !!row && row.status === "waiting-user" && row.kanbanColumn === "review";
|
|
147
|
+
|
|
148
|
+
// Cleanup
|
|
149
|
+
db.delete(tasks).where(eq(tasks.id, doneGateTaskId)).run();
|
|
150
|
+
return { ok, detail: `status=${row?.status}, kanban=${row?.kanbanColumn}` };
|
|
151
|
+
}));
|
|
152
|
+
|
|
153
|
+
results.push(await runTest("Done-Gate", "Done with force_done -> actually done", async () => {
|
|
154
|
+
const now = new Date().toISOString();
|
|
155
|
+
db.insert(tasks).values({
|
|
156
|
+
id: doneGateTaskId + "_force",
|
|
157
|
+
title: "Force Done Test",
|
|
158
|
+
status: "running",
|
|
159
|
+
kanbanColumn: "in_progress",
|
|
160
|
+
needsApproval: 1,
|
|
161
|
+
updatedAt: now,
|
|
162
|
+
createdAt: now,
|
|
163
|
+
}).run();
|
|
164
|
+
|
|
165
|
+
// With force_done, no redirect
|
|
166
|
+
db.update(tasks).set({
|
|
167
|
+
status: "done",
|
|
168
|
+
kanbanColumn: "done",
|
|
169
|
+
updatedAt: new Date().toISOString(),
|
|
170
|
+
}).where(eq(tasks.id, doneGateTaskId + "_force")).run();
|
|
171
|
+
|
|
172
|
+
const row = db.select().from(tasks).where(eq(tasks.id, doneGateTaskId + "_force")).get();
|
|
173
|
+
const ok = !!row && row.status === "done" && row.kanbanColumn === "done";
|
|
174
|
+
|
|
175
|
+
db.delete(tasks).where(eq(tasks.id, doneGateTaskId + "_force")).run();
|
|
176
|
+
return { ok, detail: `status=${row?.status}, kanban=${row?.kanbanColumn}` };
|
|
177
|
+
}));
|
|
178
|
+
|
|
179
|
+
// ═══════════════════════════════════════════════════════════
|
|
180
|
+
// SUITE 4: Markdown Parser Round-Trip
|
|
181
|
+
// ═══════════════════════════════════════════════════════════
|
|
182
|
+
|
|
183
|
+
results.push(await runTest("Parser", "Round-trip minimal task", async () => {
|
|
184
|
+
const md = `## Live Tasks\n\n- task_id: TEST-001\n title: Test\n status: queued\n owner: main\n success_criteria:\n artifacts:\n next_action: do it\n updated_at: 2026-01-01T00:00:00Z\n`;
|
|
185
|
+
const parsed = parseTasksMarkdown(md);
|
|
186
|
+
const serialized = serializeTasksMarkdown(parsed);
|
|
187
|
+
const reparsed = parseTasksMarkdown(serialized);
|
|
188
|
+
const ok = reparsed.length === 1 && reparsed[0].id === "TEST-001" && reparsed[0].title === "Test";
|
|
189
|
+
return { ok, detail: `parsed=${parsed.length}, reparsed=${reparsed.length}` };
|
|
190
|
+
}));
|
|
191
|
+
|
|
192
|
+
results.push(await runTest("Parser", "Round-trip mesh+collab fields", async () => {
|
|
193
|
+
const md = `## Live Tasks\n\n- task_id: MESH-TEST\n title: Mesh Test\n status: running\n owner: daedalus\n success_criteria:\n artifacts:\n next_action: n/a\n execution: mesh\n mesh_task_id: NATS-001\n mesh_node: node-a\n metric: tests pass\n budget_minutes: 45\n scope:\n - src/\n collaboration: {"mode":"parallel","min_nodes":2}\n preferred_nodes:\n - node-a\n - node-b\n cluster_id: dev-team\n updated_at: 2026-01-01T00:00:00Z\n`;
|
|
194
|
+
const parsed = parseTasksMarkdown(md);
|
|
195
|
+
const serialized = serializeTasksMarkdown(parsed);
|
|
196
|
+
const reparsed = parseTasksMarkdown(serialized);
|
|
197
|
+
const t = reparsed[0];
|
|
198
|
+
const ok = !!t &&
|
|
199
|
+
t.execution === "mesh" &&
|
|
200
|
+
t.meshTaskId === "NATS-001" &&
|
|
201
|
+
t.scope?.length === 1 &&
|
|
202
|
+
t.preferredNodes?.length === 2 &&
|
|
203
|
+
t.clusterId === "dev-team";
|
|
204
|
+
return { ok, detail: `execution=${t?.execution}, scope=${t?.scope?.length}, nodes=${t?.preferredNodes?.length}` };
|
|
205
|
+
}));
|
|
206
|
+
|
|
207
|
+
results.push(await runTest("Parser", "Round-trip scheduling fields", async () => {
|
|
208
|
+
const md = `## Live Tasks\n\n- task_id: SCHED-001\n title: Scheduled\n status: queued\n owner: main\n success_criteria:\n artifacts:\n next_action: n/a\n needs_approval: false\n trigger_kind: cron\n trigger_cron: 0 10 * * 1\n trigger_tz: America/New_York\n is_recurring: true\n capacity_class: heavy\n auto_priority: 5\n updated_at: 2026-01-01T00:00:00Z\n`;
|
|
209
|
+
const parsed = parseTasksMarkdown(md);
|
|
210
|
+
const serialized = serializeTasksMarkdown(parsed);
|
|
211
|
+
const reparsed = parseTasksMarkdown(serialized);
|
|
212
|
+
const t = reparsed[0];
|
|
213
|
+
const ok = !!t &&
|
|
214
|
+
t.needsApproval === false &&
|
|
215
|
+
t.triggerKind === "cron" &&
|
|
216
|
+
t.triggerCron === "0 10 * * 1" &&
|
|
217
|
+
t.isRecurring === true &&
|
|
218
|
+
t.capacityClass === "heavy" &&
|
|
219
|
+
t.autoPriority === 5;
|
|
220
|
+
return { ok, detail: `approval=${t?.needsApproval}, trigger=${t?.triggerKind}, recurring=${t?.isRecurring}` };
|
|
221
|
+
}));
|
|
222
|
+
|
|
223
|
+
// ═══════════════════════════════════════════════════════════
|
|
224
|
+
// SUITE 5: Markdown <-> DB Sync
|
|
225
|
+
// ═══════════════════════════════════════════════════════════
|
|
226
|
+
|
|
227
|
+
results.push(await runTest("Sync", "active-tasks.md exists", async () => {
|
|
228
|
+
const exists = fs.existsSync(ACTIVE_TASKS_MD);
|
|
229
|
+
return { ok: exists, detail: exists ? ACTIVE_TASKS_MD : "File not found" };
|
|
230
|
+
}));
|
|
231
|
+
|
|
232
|
+
results.push(await runTest("Sync", "DB -> Markdown -> DB round-trip preserves tasks", async () => {
|
|
233
|
+
// Count tasks before
|
|
234
|
+
const before = (raw.prepare("SELECT COUNT(*) as c FROM tasks WHERE id NOT LIKE '__TEST__%' AND id != '__LIVE_SESSION__'").get() as { c: number }).c;
|
|
235
|
+
// Force a sync cycle
|
|
236
|
+
syncTasksToMarkdown(db);
|
|
237
|
+
syncTasksFromMarkdown(db);
|
|
238
|
+
const after = (raw.prepare("SELECT COUNT(*) as c FROM tasks WHERE id NOT LIKE '__TEST__%' AND id != '__LIVE_SESSION__'").get() as { c: number }).c;
|
|
239
|
+
// Should be same count (or close — live session may change)
|
|
240
|
+
const diff = Math.abs(after - before);
|
|
241
|
+
return { ok: diff <= 2, detail: `before=${before}, after=${after}, diff=${diff}` };
|
|
242
|
+
}));
|
|
243
|
+
|
|
244
|
+
// ═══════════════════════════════════════════════════════════
|
|
245
|
+
// SUITE 6: Cowork / Clusters
|
|
246
|
+
// ═══════════════════════════════════════════════════════════
|
|
247
|
+
|
|
248
|
+
const testClusterId = `${TEST_PREFIX}cluster_${Date.now()}`;
|
|
249
|
+
|
|
250
|
+
results.push(await runTest("Cowork", "Create cluster", async () => {
|
|
251
|
+
const now = new Date().toISOString();
|
|
252
|
+
db.insert(clusters).values({
|
|
253
|
+
id: testClusterId,
|
|
254
|
+
name: "Test Cluster",
|
|
255
|
+
description: "Integration test cluster",
|
|
256
|
+
defaultMode: "parallel",
|
|
257
|
+
defaultConvergence: "unanimous",
|
|
258
|
+
status: "active",
|
|
259
|
+
updatedAt: now,
|
|
260
|
+
createdAt: now,
|
|
261
|
+
}).run();
|
|
262
|
+
const row = db.select().from(clusters).where(eq(clusters.id, testClusterId)).get();
|
|
263
|
+
return { ok: !!row && row.name === "Test Cluster", detail: `id=${row?.id}` };
|
|
264
|
+
}));
|
|
265
|
+
|
|
266
|
+
results.push(await runTest("Cowork", "Add member to cluster", async () => {
|
|
267
|
+
db.insert(clusterMembers).values({
|
|
268
|
+
clusterId: testClusterId,
|
|
269
|
+
nodeId: "test-node-alpha",
|
|
270
|
+
role: "worker",
|
|
271
|
+
}).run();
|
|
272
|
+
const members = db.select().from(clusterMembers).where(eq(clusterMembers.clusterId, testClusterId)).all();
|
|
273
|
+
return { ok: members.length === 1 && members[0].nodeId === "test-node-alpha" };
|
|
274
|
+
}));
|
|
275
|
+
|
|
276
|
+
results.push(await runTest("Cowork", "Cleanup cluster", async () => {
|
|
277
|
+
db.delete(clusterMembers).where(eq(clusterMembers.clusterId, testClusterId)).run();
|
|
278
|
+
db.delete(clusters).where(eq(clusters.id, testClusterId)).run();
|
|
279
|
+
const row = db.select().from(clusters).where(eq(clusters.id, testClusterId)).get();
|
|
280
|
+
return { ok: !row };
|
|
281
|
+
}));
|
|
282
|
+
|
|
283
|
+
// ═══════════════════════════════════════════════════════════
|
|
284
|
+
// SUITE 7: Memory / Knowledge Graph
|
|
285
|
+
// ═══════════════════════════════════════════════════════════
|
|
286
|
+
|
|
287
|
+
results.push(await runTest("Memory", "memory_docs table accessible", async () => {
|
|
288
|
+
const count = (raw.prepare("SELECT COUNT(*) as c FROM memory_docs").get() as { c: number }).c;
|
|
289
|
+
return { ok: true, detail: `${count} docs indexed` };
|
|
290
|
+
}));
|
|
291
|
+
|
|
292
|
+
results.push(await runTest("Memory", "memory_items table accessible", async () => {
|
|
293
|
+
const count = (raw.prepare("SELECT COUNT(*) as c FROM memory_items").get() as { c: number }).c;
|
|
294
|
+
return { ok: true, detail: `${count} items total` };
|
|
295
|
+
}));
|
|
296
|
+
|
|
297
|
+
results.push(await runTest("Knowledge Graph", "Entities table accessible", async () => {
|
|
298
|
+
const count = (raw.prepare("SELECT COUNT(*) as c FROM memory_entities").get() as { c: number }).c;
|
|
299
|
+
return { ok: true, detail: `${count} entities` };
|
|
300
|
+
}));
|
|
301
|
+
|
|
302
|
+
results.push(await runTest("Knowledge Graph", "Relations table accessible", async () => {
|
|
303
|
+
const count = (raw.prepare("SELECT COUNT(*) as c FROM memory_relations").get() as { c: number }).c;
|
|
304
|
+
return { ok: true, detail: `${count} relations` };
|
|
305
|
+
}));
|
|
306
|
+
|
|
307
|
+
results.push(await runTest("Knowledge Graph", "FTS index works", async () => {
|
|
308
|
+
try {
|
|
309
|
+
raw.prepare("SELECT COUNT(*) FROM memory_items_fts").get();
|
|
310
|
+
return { ok: true, detail: "FTS5 operational" };
|
|
311
|
+
} catch (err) {
|
|
312
|
+
return { ok: false, detail: (err as Error).message };
|
|
313
|
+
}
|
|
314
|
+
}));
|
|
315
|
+
|
|
316
|
+
// ═══════════════════════════════════════════════════════════
|
|
317
|
+
// SUITE 8: NATS / Mesh
|
|
318
|
+
// ═══════════════════════════════════════════════════════════
|
|
319
|
+
|
|
320
|
+
results.push(await runTest("Mesh", "NATS connectivity", async () => {
|
|
321
|
+
try {
|
|
322
|
+
const nc = await getNats();
|
|
323
|
+
return { ok: !!nc, detail: nc ? "connected" : "unavailable (non-fatal)" };
|
|
324
|
+
} catch {
|
|
325
|
+
return { ok: true, detail: "unavailable (non-fatal — NATS is optional)" };
|
|
326
|
+
}
|
|
327
|
+
}));
|
|
328
|
+
|
|
329
|
+
// ═══════════════════════════════════════════════════════════
|
|
330
|
+
// SUITE 9: Workspace
|
|
331
|
+
// ═══════════════════════════════════════════════════════════
|
|
332
|
+
|
|
333
|
+
results.push(await runTest("Workspace", "Root exists", async () => {
|
|
334
|
+
const exists = fs.existsSync(WORKSPACE_ROOT);
|
|
335
|
+
return { ok: exists, detail: WORKSPACE_ROOT };
|
|
336
|
+
}));
|
|
337
|
+
|
|
338
|
+
results.push(await runTest("Workspace", "Memory directory exists", async () => {
|
|
339
|
+
const memDir = WORKSPACE_ROOT + "/memory";
|
|
340
|
+
const exists = fs.existsSync(memDir);
|
|
341
|
+
return { ok: exists, detail: memDir };
|
|
342
|
+
}));
|
|
343
|
+
|
|
344
|
+
// ═══════════════════════════════════════════════════════════
|
|
345
|
+
// SUITE 10: Local Task Resolution (Daedalus auto-dispatch)
|
|
346
|
+
// ═══════════════════════════════════════════════════════════
|
|
347
|
+
|
|
348
|
+
const localTaskA = `${TEST_PREFIX}local_A_${Date.now()}`;
|
|
349
|
+
const localTaskB = `${TEST_PREFIX}local_B_${Date.now()}`;
|
|
350
|
+
|
|
351
|
+
results.push(await runTest("Local Dispatch", "Auto-dispatch task with needsApproval=0", async () => {
|
|
352
|
+
const now = new Date().toISOString();
|
|
353
|
+
// Create an auto-dispatch task (local execution, no approval needed, no trigger)
|
|
354
|
+
db.insert(tasks).values({
|
|
355
|
+
id: localTaskA,
|
|
356
|
+
title: "Test Local Auto-Dispatch",
|
|
357
|
+
status: "queued",
|
|
358
|
+
kanbanColumn: "backlog",
|
|
359
|
+
needsApproval: 0,
|
|
360
|
+
triggerKind: "none",
|
|
361
|
+
execution: "local",
|
|
362
|
+
autoPriority: 100, // high priority so it wins
|
|
363
|
+
updatedAt: now,
|
|
364
|
+
createdAt: now,
|
|
365
|
+
}).run();
|
|
366
|
+
|
|
367
|
+
const tick = schedulerTick();
|
|
368
|
+
const row = db.select().from(tasks).where(eq(tasks.id, localTaskA)).get();
|
|
369
|
+
|
|
370
|
+
// Should have been dispatched (or skipped if Daedalus already has a running task)
|
|
371
|
+
const dispatched = tick.dispatched.includes(localTaskA);
|
|
372
|
+
const skipped = tick.skipped.includes(localTaskA);
|
|
373
|
+
const ok = dispatched || skipped; // both are valid outcomes
|
|
374
|
+
const detail = dispatched
|
|
375
|
+
? `Dispatched to Daedalus, status=${row?.status}, owner=${row?.owner}`
|
|
376
|
+
: `Skipped (Daedalus busy), status=${row?.status}`;
|
|
377
|
+
|
|
378
|
+
return { ok, detail };
|
|
379
|
+
}));
|
|
380
|
+
|
|
381
|
+
results.push(await runTest("Local Dispatch", "Manual approval task stays in backlog", async () => {
|
|
382
|
+
const now = new Date().toISOString();
|
|
383
|
+
db.insert(tasks).values({
|
|
384
|
+
id: localTaskB,
|
|
385
|
+
title: "Test Manual Approval Required",
|
|
386
|
+
status: "queued",
|
|
387
|
+
kanbanColumn: "backlog",
|
|
388
|
+
needsApproval: 1,
|
|
389
|
+
triggerKind: "none",
|
|
390
|
+
execution: "local",
|
|
391
|
+
updatedAt: now,
|
|
392
|
+
createdAt: now,
|
|
393
|
+
}).run();
|
|
394
|
+
|
|
395
|
+
schedulerTick();
|
|
396
|
+
const row = db.select().from(tasks).where(eq(tasks.id, localTaskB)).get();
|
|
397
|
+
|
|
398
|
+
// Should NOT be dispatched — needs_approval=1
|
|
399
|
+
const ok = !!row && row.status === "queued" && row.kanbanColumn === "backlog";
|
|
400
|
+
return { ok, detail: `status=${row?.status}, kanban=${row?.kanbanColumn}` };
|
|
401
|
+
}));
|
|
402
|
+
|
|
403
|
+
// Cleanup local dispatch tests
|
|
404
|
+
db.delete(tasks).where(eq(tasks.id, localTaskA)).run();
|
|
405
|
+
db.delete(tasks).where(eq(tasks.id, localTaskB)).run();
|
|
406
|
+
|
|
407
|
+
// ═══════════════════════════════════════════════════════════
|
|
408
|
+
// SUITE 11: Mesh Task Resolution (single-node mesh dispatch)
|
|
409
|
+
// ═══════════════════════════════════════════════════════════
|
|
410
|
+
|
|
411
|
+
const meshTaskId = `${TEST_PREFIX}mesh_${Date.now()}`;
|
|
412
|
+
|
|
413
|
+
results.push(await runTest("Mesh Dispatch", "Create mesh task with execution fields", async () => {
|
|
414
|
+
const now = new Date().toISOString();
|
|
415
|
+
db.insert(tasks).values({
|
|
416
|
+
id: meshTaskId,
|
|
417
|
+
title: "Test Mesh Task",
|
|
418
|
+
status: "queued",
|
|
419
|
+
kanbanColumn: "backlog",
|
|
420
|
+
needsApproval: 1, // Gui must approve completion
|
|
421
|
+
execution: "mesh",
|
|
422
|
+
metric: "tests pass",
|
|
423
|
+
budgetMinutes: 15,
|
|
424
|
+
scope: JSON.stringify(["src/lib/"]),
|
|
425
|
+
updatedAt: now,
|
|
426
|
+
createdAt: now,
|
|
427
|
+
}).run();
|
|
428
|
+
|
|
429
|
+
const row = db.select().from(tasks).where(eq(tasks.id, meshTaskId)).get();
|
|
430
|
+
const ok = !!row &&
|
|
431
|
+
row.execution === "mesh" &&
|
|
432
|
+
row.metric === "tests pass" &&
|
|
433
|
+
row.budgetMinutes === 15;
|
|
434
|
+
return { ok, detail: `exec=${row?.execution}, metric=${row?.metric}, budget=${row?.budgetMinutes}` };
|
|
435
|
+
}));
|
|
436
|
+
|
|
437
|
+
results.push(await runTest("Mesh Dispatch", "Scheduler skips mesh tasks (bridge handles them)", async () => {
|
|
438
|
+
// The scheduler explicitly filters out execution="mesh" tasks
|
|
439
|
+
const tick = schedulerTick();
|
|
440
|
+
const dispatched = tick.dispatched.includes(meshTaskId);
|
|
441
|
+
// Mesh tasks should NOT be in dispatched — the bridge picks them up
|
|
442
|
+
return { ok: !dispatched, detail: dispatched ? "ERROR: scheduler dispatched mesh task" : "Correctly skipped by scheduler" };
|
|
443
|
+
}));
|
|
444
|
+
|
|
445
|
+
results.push(await runTest("Mesh Dispatch", "Mesh task syncs to markdown with execution fields", async () => {
|
|
446
|
+
syncTasksToMarkdown(db);
|
|
447
|
+
if (!fs.existsSync(ACTIVE_TASKS_MD)) return { ok: false, detail: "active-tasks.md missing" };
|
|
448
|
+
const content = fs.readFileSync(ACTIVE_TASKS_MD, "utf-8");
|
|
449
|
+
const hasMeshId = content.includes(meshTaskId);
|
|
450
|
+
const hasExecution = content.includes("execution: mesh");
|
|
451
|
+
const hasMetric = content.includes("metric: tests pass");
|
|
452
|
+
const ok = hasMeshId && hasExecution && hasMetric;
|
|
453
|
+
return { ok, detail: `id=${hasMeshId}, exec=${hasExecution}, metric=${hasMetric}` };
|
|
454
|
+
}));
|
|
455
|
+
|
|
456
|
+
results.push(await runTest("Mesh Dispatch", "Mesh task status lifecycle: queued -> submitted -> running -> done-gate", async () => {
|
|
457
|
+
// Simulate bridge claiming the task
|
|
458
|
+
db.update(tasks).set({
|
|
459
|
+
status: "submitted",
|
|
460
|
+
kanbanColumn: statusToKanban("submitted"),
|
|
461
|
+
meshTaskId: "NATS-TEST-001",
|
|
462
|
+
updatedAt: new Date().toISOString(),
|
|
463
|
+
}).where(eq(tasks.id, meshTaskId)).run();
|
|
464
|
+
|
|
465
|
+
let row = db.select().from(tasks).where(eq(tasks.id, meshTaskId)).get();
|
|
466
|
+
const submittedOk = row?.status === "submitted" && row?.kanbanColumn === "in_progress";
|
|
467
|
+
|
|
468
|
+
// Simulate agent claiming
|
|
469
|
+
db.update(tasks).set({
|
|
470
|
+
status: "running",
|
|
471
|
+
kanbanColumn: statusToKanban("running"),
|
|
472
|
+
meshNode: "test-node",
|
|
473
|
+
updatedAt: new Date().toISOString(),
|
|
474
|
+
}).where(eq(tasks.id, meshTaskId)).run();
|
|
475
|
+
|
|
476
|
+
row = db.select().from(tasks).where(eq(tasks.id, meshTaskId)).get();
|
|
477
|
+
const runningOk = row?.status === "running" && row?.meshNode === "test-node";
|
|
478
|
+
|
|
479
|
+
// Simulate completion — should be caught by done-gate (redirect to waiting-user)
|
|
480
|
+
// (This simulates what sync/tasks.ts does with needsApproval=1, default)
|
|
481
|
+
const targetStatus = "done";
|
|
482
|
+
const effectiveStatus = row?.needsApproval === 1 ? "waiting-user" : "done";
|
|
483
|
+
db.update(tasks).set({
|
|
484
|
+
status: effectiveStatus,
|
|
485
|
+
kanbanColumn: statusToKanban(effectiveStatus),
|
|
486
|
+
updatedAt: new Date().toISOString(),
|
|
487
|
+
}).where(eq(tasks.id, meshTaskId)).run();
|
|
488
|
+
|
|
489
|
+
row = db.select().from(tasks).where(eq(tasks.id, meshTaskId)).get();
|
|
490
|
+
// Default needsApproval=1, so should land in review
|
|
491
|
+
const doneGateOk = row?.status === "waiting-user" && row?.kanbanColumn === "review";
|
|
492
|
+
|
|
493
|
+
const ok = submittedOk && runningOk && doneGateOk;
|
|
494
|
+
return {
|
|
495
|
+
ok,
|
|
496
|
+
detail: `submitted=${submittedOk}, running=${runningOk}, doneGate=${doneGateOk} (status=${row?.status})`
|
|
497
|
+
};
|
|
498
|
+
}));
|
|
499
|
+
|
|
500
|
+
// Cleanup mesh test
|
|
501
|
+
db.delete(tasks).where(eq(tasks.id, meshTaskId)).run();
|
|
502
|
+
|
|
503
|
+
// ═══════════════════════════════════════════════════════════
|
|
504
|
+
// SUITE 12: Collab Task Resolution (multi-node collaboration)
|
|
505
|
+
// ═══════════════════════════════════════════════════════════
|
|
506
|
+
|
|
507
|
+
const collabTaskId = `${TEST_PREFIX}collab_${Date.now()}`;
|
|
508
|
+
|
|
509
|
+
results.push(await runTest("Collab Dispatch", "Create collab task with collaboration spec", async () => {
|
|
510
|
+
const now = new Date().toISOString();
|
|
511
|
+
const collabSpec = {
|
|
512
|
+
mode: "parallel",
|
|
513
|
+
min_nodes: 2,
|
|
514
|
+
max_nodes: 3,
|
|
515
|
+
join_window_s: 30,
|
|
516
|
+
max_rounds: 5,
|
|
517
|
+
convergence: { type: "unanimous", threshold: 1.0, metric: null, min_quorum: 2 },
|
|
518
|
+
scope_strategy: "shared",
|
|
519
|
+
};
|
|
520
|
+
|
|
521
|
+
db.insert(tasks).values({
|
|
522
|
+
id: collabTaskId,
|
|
523
|
+
title: "Test Collab Task",
|
|
524
|
+
status: "queued",
|
|
525
|
+
kanbanColumn: "backlog",
|
|
526
|
+
needsApproval: 0,
|
|
527
|
+
execution: "mesh",
|
|
528
|
+
collaboration: JSON.stringify(collabSpec),
|
|
529
|
+
preferredNodes: JSON.stringify(["node-alpha", "node-beta", "node-gamma"]),
|
|
530
|
+
metric: "consensus reached",
|
|
531
|
+
budgetMinutes: 45,
|
|
532
|
+
updatedAt: now,
|
|
533
|
+
createdAt: now,
|
|
534
|
+
}).run();
|
|
535
|
+
|
|
536
|
+
const row = db.select().from(tasks).where(eq(tasks.id, collabTaskId)).get();
|
|
537
|
+
const collab = row?.collaboration ? JSON.parse(row.collaboration as string) : null;
|
|
538
|
+
const nodes = row?.preferredNodes ? JSON.parse(row.preferredNodes as string) : [];
|
|
539
|
+
|
|
540
|
+
const ok = !!row &&
|
|
541
|
+
row.execution === "mesh" &&
|
|
542
|
+
collab?.mode === "parallel" &&
|
|
543
|
+
collab?.max_rounds === 5 &&
|
|
544
|
+
collab?.convergence?.type === "unanimous" &&
|
|
545
|
+
nodes.length === 3;
|
|
546
|
+
return {
|
|
547
|
+
ok,
|
|
548
|
+
detail: `mode=${collab?.mode}, rounds=${collab?.max_rounds}, convergence=${collab?.convergence?.type}, nodes=${nodes.length}`
|
|
549
|
+
};
|
|
550
|
+
}));
|
|
551
|
+
|
|
552
|
+
results.push(await runTest("Collab Dispatch", "Collab spec survives markdown round-trip", async () => {
|
|
553
|
+
syncTasksToMarkdown(db);
|
|
554
|
+
if (!fs.existsSync(ACTIVE_TASKS_MD)) return { ok: false, detail: "active-tasks.md missing" };
|
|
555
|
+
|
|
556
|
+
const content = fs.readFileSync(ACTIVE_TASKS_MD, "utf-8");
|
|
557
|
+
const parsed = parseTasksMarkdown(content);
|
|
558
|
+
const task = parsed.find((t) => t.id === collabTaskId);
|
|
559
|
+
if (!task) return { ok: false, detail: "Task not found in markdown" };
|
|
560
|
+
|
|
561
|
+
const collab = task.collaboration;
|
|
562
|
+
const ok = !!collab &&
|
|
563
|
+
(collab as Record<string, unknown>).mode === "parallel" &&
|
|
564
|
+
task.preferredNodes.length === 3 &&
|
|
565
|
+
task.preferredNodes.includes("node-alpha");
|
|
566
|
+
return {
|
|
567
|
+
ok,
|
|
568
|
+
detail: `mode=${(collab as Record<string, unknown>)?.mode}, nodes=${task.preferredNodes.join(",")}`
|
|
569
|
+
};
|
|
570
|
+
}));
|
|
571
|
+
|
|
572
|
+
results.push(await runTest("Collab Dispatch", "Collab convergence modes: unanimous/majority/coordinator", async () => {
|
|
573
|
+
// Test that all convergence types can be stored and retrieved
|
|
574
|
+
const modes = ["unanimous", "majority", "coordinator"];
|
|
575
|
+
const results: string[] = [];
|
|
576
|
+
for (const ctype of modes) {
|
|
577
|
+
const spec = { mode: "parallel", convergence: { type: ctype, threshold: 0.66 } };
|
|
578
|
+
db.update(tasks).set({
|
|
579
|
+
collaboration: JSON.stringify(spec),
|
|
580
|
+
updatedAt: new Date().toISOString(),
|
|
581
|
+
}).where(eq(tasks.id, collabTaskId)).run();
|
|
582
|
+
|
|
583
|
+
const row = db.select().from(tasks).where(eq(tasks.id, collabTaskId)).get();
|
|
584
|
+
const stored = JSON.parse(row?.collaboration as string || "{}");
|
|
585
|
+
if (stored.convergence?.type === ctype) {
|
|
586
|
+
results.push(`${ctype}:OK`);
|
|
587
|
+
} else {
|
|
588
|
+
results.push(`${ctype}:FAIL(${stored.convergence?.type})`);
|
|
589
|
+
}
|
|
590
|
+
}
|
|
591
|
+
const ok = results.every((r) => r.endsWith(":OK"));
|
|
592
|
+
return { ok, detail: results.join(", ") };
|
|
593
|
+
}));
|
|
594
|
+
|
|
595
|
+
results.push(await runTest("Collab Dispatch", "Scope strategies: shared/leader_only/partitioned", async () => {
|
|
596
|
+
const strategies = ["shared", "leader_only", "partitioned"];
|
|
597
|
+
const results: string[] = [];
|
|
598
|
+
for (const strategy of strategies) {
|
|
599
|
+
const spec = { mode: "parallel", scope_strategy: strategy };
|
|
600
|
+
db.update(tasks).set({
|
|
601
|
+
collaboration: JSON.stringify(spec),
|
|
602
|
+
updatedAt: new Date().toISOString(),
|
|
603
|
+
}).where(eq(tasks.id, collabTaskId)).run();
|
|
604
|
+
|
|
605
|
+
const row = db.select().from(tasks).where(eq(tasks.id, collabTaskId)).get();
|
|
606
|
+
const stored = JSON.parse(row?.collaboration as string || "{}");
|
|
607
|
+
if (stored.scope_strategy === strategy) {
|
|
608
|
+
results.push(`${strategy}:OK`);
|
|
609
|
+
} else {
|
|
610
|
+
results.push(`${strategy}:FAIL`);
|
|
611
|
+
}
|
|
612
|
+
}
|
|
613
|
+
const ok = results.every((r) => r.endsWith(":OK"));
|
|
614
|
+
return { ok, detail: results.join(", ") };
|
|
615
|
+
}));
|
|
616
|
+
|
|
617
|
+
// Cleanup collab test
|
|
618
|
+
db.delete(tasks).where(eq(tasks.id, collabTaskId)).run();
|
|
619
|
+
|
|
620
|
+
// ═══════════════════════════════════════════════════════════
|
|
621
|
+
// SUITE 13: Cluster-Based Dispatch
|
|
622
|
+
// ═══════════════════════════════════════════════════════════
|
|
623
|
+
|
|
624
|
+
const clusterTestId = `${TEST_PREFIX}clust_${Date.now()}`;
|
|
625
|
+
const clusterTaskId = `${TEST_PREFIX}clust_task_${Date.now()}`;
|
|
626
|
+
|
|
627
|
+
results.push(await runTest("Cluster Dispatch", "Create cluster with multiple nodes and roles", async () => {
|
|
628
|
+
const now = new Date().toISOString();
|
|
629
|
+
db.insert(clusters).values({
|
|
630
|
+
id: clusterTestId,
|
|
631
|
+
name: "Test Security Team",
|
|
632
|
+
description: "Integration test cluster for security audits",
|
|
633
|
+
color: "#6366f1",
|
|
634
|
+
defaultMode: "review",
|
|
635
|
+
defaultConvergence: "majority",
|
|
636
|
+
convergenceThreshold: 66,
|
|
637
|
+
maxRounds: 3,
|
|
638
|
+
status: "active",
|
|
639
|
+
updatedAt: now,
|
|
640
|
+
createdAt: now,
|
|
641
|
+
}).run();
|
|
642
|
+
|
|
643
|
+
// Add members with different roles
|
|
644
|
+
const members = [
|
|
645
|
+
{ nodeId: "node-lead", role: "lead" },
|
|
646
|
+
{ nodeId: "node-impl-1", role: "implementer" },
|
|
647
|
+
{ nodeId: "node-impl-2", role: "implementer" },
|
|
648
|
+
{ nodeId: "node-reviewer", role: "reviewer" },
|
|
649
|
+
];
|
|
650
|
+
for (const m of members) {
|
|
651
|
+
db.insert(clusterMembers).values({
|
|
652
|
+
clusterId: clusterTestId,
|
|
653
|
+
nodeId: m.nodeId,
|
|
654
|
+
role: m.role,
|
|
655
|
+
}).run();
|
|
656
|
+
}
|
|
657
|
+
|
|
658
|
+
const cluster = db.select().from(clusters).where(eq(clusters.id, clusterTestId)).get();
|
|
659
|
+
const mems = db.select().from(clusterMembers).where(eq(clusterMembers.clusterId, clusterTestId)).all();
|
|
660
|
+
|
|
661
|
+
const ok = !!cluster &&
|
|
662
|
+
cluster.defaultMode === "review" &&
|
|
663
|
+
cluster.defaultConvergence === "majority" &&
|
|
664
|
+
mems.length === 4;
|
|
665
|
+
return { ok, detail: `mode=${cluster?.defaultMode}, convergence=${cluster?.defaultConvergence}, members=${mems.length}` };
|
|
666
|
+
}));
|
|
667
|
+
|
|
668
|
+
results.push(await runTest("Cluster Dispatch", "Dispatch task via cluster resolves all member nodes", async () => {
|
|
669
|
+
// Simulate what /api/cowork/dispatch does: resolve nodes from cluster
|
|
670
|
+
const members = db.select().from(clusterMembers).where(eq(clusterMembers.clusterId, clusterTestId)).all();
|
|
671
|
+
const nodeIds = members.map((m) => m.nodeId);
|
|
672
|
+
|
|
673
|
+
const collabSpec = {
|
|
674
|
+
mode: "review",
|
|
675
|
+
min_nodes: 2,
|
|
676
|
+
max_nodes: nodeIds.length,
|
|
677
|
+
convergence: { type: "majority", threshold: 0.66 },
|
|
678
|
+
scope_strategy: "shared",
|
|
679
|
+
};
|
|
680
|
+
|
|
681
|
+
const now = new Date().toISOString();
|
|
682
|
+
db.insert(tasks).values({
|
|
683
|
+
id: clusterTaskId,
|
|
684
|
+
title: "Cluster-Dispatched Security Audit",
|
|
685
|
+
status: "queued",
|
|
686
|
+
kanbanColumn: "backlog",
|
|
687
|
+
execution: "mesh",
|
|
688
|
+
needsApproval: 0,
|
|
689
|
+
collaboration: JSON.stringify(collabSpec),
|
|
690
|
+
preferredNodes: JSON.stringify(nodeIds),
|
|
691
|
+
clusterId: clusterTestId,
|
|
692
|
+
budgetMinutes: 30,
|
|
693
|
+
updatedAt: now,
|
|
694
|
+
createdAt: now,
|
|
695
|
+
}).run();
|
|
696
|
+
|
|
697
|
+
const row = db.select().from(tasks).where(eq(tasks.id, clusterTaskId)).get();
|
|
698
|
+
const storedNodes = JSON.parse(row?.preferredNodes as string || "[]");
|
|
699
|
+
|
|
700
|
+
const ok = !!row &&
|
|
701
|
+
row.clusterId === clusterTestId &&
|
|
702
|
+
storedNodes.length === 4 &&
|
|
703
|
+
storedNodes.includes("node-lead") &&
|
|
704
|
+
storedNodes.includes("node-reviewer");
|
|
705
|
+
return { ok, detail: `clusterId=${row?.clusterId}, nodes=${storedNodes.join(",")}` };
|
|
706
|
+
}));
|
|
707
|
+
|
|
708
|
+
results.push(await runTest("Cluster Dispatch", "Role distribution preserved in cluster members", async () => {
|
|
709
|
+
const mems = db.select().from(clusterMembers).where(eq(clusterMembers.clusterId, clusterTestId)).all();
|
|
710
|
+
const roles = new Map<string, string>();
|
|
711
|
+
for (const m of mems) roles.set(m.nodeId, m.role);
|
|
712
|
+
|
|
713
|
+
const ok = roles.get("node-lead") === "lead" &&
|
|
714
|
+
roles.get("node-impl-1") === "implementer" &&
|
|
715
|
+
roles.get("node-impl-2") === "implementer" &&
|
|
716
|
+
roles.get("node-reviewer") === "reviewer";
|
|
717
|
+
return { ok, detail: Array.from(roles.entries()).map(([n, r]) => `${n}:${r}`).join(", ") };
|
|
718
|
+
}));
|
|
719
|
+
|
|
720
|
+
results.push(await runTest("Cluster Dispatch", "Cluster task inherits cluster defaults", async () => {
|
|
721
|
+
const cluster = db.select().from(clusters).where(eq(clusters.id, clusterTestId)).get();
|
|
722
|
+
const row = db.select().from(tasks).where(eq(tasks.id, clusterTaskId)).get();
|
|
723
|
+
const collab = JSON.parse(row?.collaboration as string || "{}");
|
|
724
|
+
|
|
725
|
+
// Task's collab mode should match what was set (which should match cluster default)
|
|
726
|
+
const ok = collab.mode === cluster?.defaultMode;
|
|
727
|
+
return { ok, detail: `task.mode=${collab.mode}, cluster.defaultMode=${cluster?.defaultMode}` };
|
|
728
|
+
}));
|
|
729
|
+
|
|
730
|
+
// Cleanup cluster tests
|
|
731
|
+
db.delete(tasks).where(eq(tasks.id, clusterTaskId)).run();
|
|
732
|
+
db.delete(clusterMembers).where(eq(clusterMembers.clusterId, clusterTestId)).run();
|
|
733
|
+
db.delete(clusters).where(eq(clusters.id, clusterTestId)).run();
|
|
734
|
+
|
|
735
|
+
// ═══════════════════════════════════════════════════════════
|
|
736
|
+
// SUITE 14: Dependency-Driven Dispatch (DAG wave computation)
|
|
737
|
+
// ═══════════════════════════════════════════════════════════
|
|
738
|
+
|
|
739
|
+
const depA = `${TEST_PREFIX}dep_A_${Date.now()}`;
|
|
740
|
+
const depB = `${TEST_PREFIX}dep_B_${Date.now()}`;
|
|
741
|
+
const depC = `${TEST_PREFIX}dep_C_${Date.now()}`;
|
|
742
|
+
|
|
743
|
+
results.push(await runTest("DAG Dispatch", "Linear dependency chain: A -> B -> C", async () => {
|
|
744
|
+
const now = new Date().toISOString();
|
|
745
|
+
// A has no deps, B depends on A, C depends on B
|
|
746
|
+
for (const [id, title] of [[depA, "Task A"], [depB, "Task B"], [depC, "Task C"]] as const) {
|
|
747
|
+
db.insert(tasks).values({
|
|
748
|
+
id,
|
|
749
|
+
title,
|
|
750
|
+
status: "queued",
|
|
751
|
+
kanbanColumn: "backlog",
|
|
752
|
+
needsApproval: 0,
|
|
753
|
+
triggerKind: "none",
|
|
754
|
+
execution: "local",
|
|
755
|
+
updatedAt: now,
|
|
756
|
+
createdAt: now,
|
|
757
|
+
}).run();
|
|
758
|
+
}
|
|
759
|
+
|
|
760
|
+
// B depends on A
|
|
761
|
+
db.insert(dependencies).values({ sourceId: depA, targetId: depB, type: "finish_to_start" }).run();
|
|
762
|
+
// C depends on B
|
|
763
|
+
db.insert(dependencies).values({ sourceId: depB, targetId: depC, type: "finish_to_start" }).run();
|
|
764
|
+
|
|
765
|
+
// Compute waves
|
|
766
|
+
const depTargetMap = new Map<string, string[]>();
|
|
767
|
+
depTargetMap.set(depB, [depA]);
|
|
768
|
+
depTargetMap.set(depC, [depB]);
|
|
769
|
+
|
|
770
|
+
const statusMap = new Map<string, string>();
|
|
771
|
+
statusMap.set(depA, "queued");
|
|
772
|
+
statusMap.set(depB, "queued");
|
|
773
|
+
statusMap.set(depC, "queued");
|
|
774
|
+
|
|
775
|
+
const waves = computeWaves([depA, depB, depC], depTargetMap, statusMap);
|
|
776
|
+
|
|
777
|
+
const ok = waves.length === 3 &&
|
|
778
|
+
waves[0].taskIds.includes(depA) &&
|
|
779
|
+
waves[1].taskIds.includes(depB) &&
|
|
780
|
+
waves[2].taskIds.includes(depC);
|
|
781
|
+
return {
|
|
782
|
+
ok,
|
|
783
|
+
detail: waves.map((w) => `W${w.index}:[${w.taskIds.join(",")}]`).join(" → ")
|
|
784
|
+
};
|
|
785
|
+
}));
|
|
786
|
+
|
|
787
|
+
results.push(await runTest("DAG Dispatch", "Parallel tasks in same wave when no inter-dependency", async () => {
|
|
788
|
+
// A and B both have no deps — should be in wave 0
|
|
789
|
+
// C depends on both A and B — should be in wave 1
|
|
790
|
+
const depTargetMap = new Map<string, string[]>();
|
|
791
|
+
depTargetMap.set(depC, [depA, depB]); // C depends on A and B
|
|
792
|
+
|
|
793
|
+
const statusMap = new Map<string, string>();
|
|
794
|
+
statusMap.set(depA, "queued");
|
|
795
|
+
statusMap.set(depB, "queued");
|
|
796
|
+
statusMap.set(depC, "queued");
|
|
797
|
+
|
|
798
|
+
const waves = computeWaves([depA, depB, depC], depTargetMap, statusMap);
|
|
799
|
+
|
|
800
|
+
const wave0 = waves[0]?.taskIds || [];
|
|
801
|
+
const wave1 = waves[1]?.taskIds || [];
|
|
802
|
+
|
|
803
|
+
const ok = waves.length === 2 &&
|
|
804
|
+
wave0.includes(depA) && wave0.includes(depB) &&
|
|
805
|
+
wave1.includes(depC);
|
|
806
|
+
return {
|
|
807
|
+
ok,
|
|
808
|
+
detail: waves.map((w) => `W${w.index}:[${w.taskIds.join(",")}]`).join(" → ")
|
|
809
|
+
};
|
|
810
|
+
}));
|
|
811
|
+
|
|
812
|
+
results.push(await runTest("DAG Dispatch", "Completed predecessor unblocks dependent", async () => {
|
|
813
|
+
// Mark A as done — B's predecessor is now done, so B should be in wave 0
|
|
814
|
+
const depTargetMap = new Map<string, string[]>();
|
|
815
|
+
depTargetMap.set(depB, [depA]);
|
|
816
|
+
depTargetMap.set(depC, [depB]);
|
|
817
|
+
|
|
818
|
+
const statusMap = new Map<string, string>();
|
|
819
|
+
statusMap.set(depA, "done"); // A is done
|
|
820
|
+
statusMap.set(depB, "queued");
|
|
821
|
+
statusMap.set(depC, "queued");
|
|
822
|
+
|
|
823
|
+
const waves = computeWaves([depA, depB, depC], depTargetMap, statusMap);
|
|
824
|
+
|
|
825
|
+
// A is done so B has in-degree 0 (wave 0), C depends on B (wave 1)
|
|
826
|
+
// A itself also appears in wave 0 (it's in the set)
|
|
827
|
+
const wave0 = waves[0]?.taskIds || [];
|
|
828
|
+
const ok = wave0.includes(depB); // B should be unblocked
|
|
829
|
+
return {
|
|
830
|
+
ok,
|
|
831
|
+
detail: waves.map((w) => `W${w.index}:[${w.taskIds.join(",")}]`).join(" → ")
|
|
832
|
+
};
|
|
833
|
+
}));
|
|
834
|
+
|
|
835
|
+
// Cleanup DAG tests
|
|
836
|
+
db.delete(dependencies).where(or(
|
|
837
|
+
eq(dependencies.sourceId, depA),
|
|
838
|
+
eq(dependencies.sourceId, depB),
|
|
839
|
+
eq(dependencies.targetId, depB),
|
|
840
|
+
eq(dependencies.targetId, depC),
|
|
841
|
+
)).run();
|
|
842
|
+
db.delete(tasks).where(eq(tasks.id, depA)).run();
|
|
843
|
+
db.delete(tasks).where(eq(tasks.id, depB)).run();
|
|
844
|
+
db.delete(tasks).where(eq(tasks.id, depC)).run();
|
|
845
|
+
|
|
846
|
+
// ═══════════════════════════════════════════════════════════
|
|
847
|
+
// SUITE 15: Trigger-Based Dispatch (cron and at-once)
|
|
848
|
+
// ═══════════════════════════════════════════════════════════
|
|
849
|
+
|
|
850
|
+
const triggerAtId = `${TEST_PREFIX}trig_at_${Date.now()}`;
|
|
851
|
+
const triggerCronId = `${TEST_PREFIX}trig_cron_${Date.now()}`;
|
|
852
|
+
const recurringId = `${TEST_PREFIX}recur_${Date.now()}`;
|
|
853
|
+
|
|
854
|
+
results.push(await runTest("Trigger Dispatch", "At-trigger fires when time has passed", async () => {
|
|
855
|
+
const now = new Date();
|
|
856
|
+
const pastTime = new Date(now.getTime() - 60000).toISOString(); // 1 minute ago
|
|
857
|
+
db.insert(tasks).values({
|
|
858
|
+
id: triggerAtId,
|
|
859
|
+
title: "Test At-Trigger",
|
|
860
|
+
status: "queued",
|
|
861
|
+
kanbanColumn: "backlog",
|
|
862
|
+
needsApproval: 0,
|
|
863
|
+
triggerKind: "at",
|
|
864
|
+
triggerAt: pastTime,
|
|
865
|
+
execution: "local",
|
|
866
|
+
updatedAt: now.toISOString(),
|
|
867
|
+
createdAt: now.toISOString(),
|
|
868
|
+
}).run();
|
|
869
|
+
|
|
870
|
+
const tick = schedulerTick();
|
|
871
|
+
const row = db.select().from(tasks).where(eq(tasks.id, triggerAtId)).get();
|
|
872
|
+
|
|
873
|
+
const triggered = tick.triggered.includes(triggerAtId);
|
|
874
|
+
// After trigger fires, scheduler may also dispatch it (ready -> running). Both are valid.
|
|
875
|
+
const statusOk = row?.status === "ready" || row?.status === "running";
|
|
876
|
+
const ok = triggered && statusOk;
|
|
877
|
+
return { ok, detail: `triggered=${triggered}, status=${row?.status}` };
|
|
878
|
+
}));
|
|
879
|
+
|
|
880
|
+
results.push(await runTest("Trigger Dispatch", "Future at-trigger does NOT fire", async () => {
|
|
881
|
+
const futureTime = new Date(Date.now() + 3600000).toISOString(); // 1 hour from now
|
|
882
|
+
const futureId = `${TEST_PREFIX}trig_future_${Date.now()}`;
|
|
883
|
+
db.insert(tasks).values({
|
|
884
|
+
id: futureId,
|
|
885
|
+
title: "Test Future Trigger",
|
|
886
|
+
status: "queued",
|
|
887
|
+
kanbanColumn: "backlog",
|
|
888
|
+
needsApproval: 0,
|
|
889
|
+
triggerKind: "at",
|
|
890
|
+
triggerAt: futureTime,
|
|
891
|
+
execution: "local",
|
|
892
|
+
updatedAt: new Date().toISOString(),
|
|
893
|
+
createdAt: new Date().toISOString(),
|
|
894
|
+
}).run();
|
|
895
|
+
|
|
896
|
+
const tick = schedulerTick();
|
|
897
|
+
const row = db.select().from(tasks).where(eq(tasks.id, futureId)).get();
|
|
898
|
+
|
|
899
|
+
const notTriggered = !tick.triggered.includes(futureId);
|
|
900
|
+
const ok = notTriggered && row?.status === "queued";
|
|
901
|
+
db.delete(tasks).where(eq(tasks.id, futureId)).run();
|
|
902
|
+
return { ok, detail: `triggered=${!notTriggered}, status=${row?.status}` };
|
|
903
|
+
}));
|
|
904
|
+
|
|
905
|
+
results.push(await runTest("Trigger Dispatch", "Recurring task recreates after done", async () => {
|
|
906
|
+
const now = new Date().toISOString();
|
|
907
|
+
db.insert(tasks).values({
|
|
908
|
+
id: recurringId,
|
|
909
|
+
title: "Test Recurring Task",
|
|
910
|
+
status: "done",
|
|
911
|
+
kanbanColumn: "done",
|
|
912
|
+
needsApproval: 0,
|
|
913
|
+
triggerKind: "none",
|
|
914
|
+
isRecurring: 1,
|
|
915
|
+
execution: "local",
|
|
916
|
+
updatedAt: now,
|
|
917
|
+
createdAt: now,
|
|
918
|
+
}).run();
|
|
919
|
+
|
|
920
|
+
const tick = schedulerTick();
|
|
921
|
+
|
|
922
|
+
// Should have created a new recurring clone
|
|
923
|
+
const ok = tick.recurring.length > 0;
|
|
924
|
+
const detail = ok
|
|
925
|
+
? `Recreated: ${tick.recurring.join(", ")}`
|
|
926
|
+
: "No recurring tasks created";
|
|
927
|
+
|
|
928
|
+
// Original should no longer be recurring
|
|
929
|
+
const original = db.select().from(tasks).where(eq(tasks.id, recurringId)).get();
|
|
930
|
+
const originalOk = original?.isRecurring === 0;
|
|
931
|
+
|
|
932
|
+
// Cleanup: delete original and clones
|
|
933
|
+
db.delete(tasks).where(eq(tasks.id, recurringId)).run();
|
|
934
|
+
for (const cloneId of tick.recurring) {
|
|
935
|
+
db.delete(tasks).where(eq(tasks.id, cloneId)).run();
|
|
936
|
+
}
|
|
937
|
+
|
|
938
|
+
return { ok: ok && originalOk, detail: `${detail}, originalRecurring=${original?.isRecurring}` };
|
|
939
|
+
}));
|
|
940
|
+
|
|
941
|
+
// Cleanup trigger tests
|
|
942
|
+
db.delete(tasks).where(eq(tasks.id, triggerAtId)).run();
|
|
943
|
+
db.delete(tasks).where(eq(tasks.id, triggerCronId)).run();
|
|
944
|
+
|
|
945
|
+
// ═══════════════════════════════════════════════════════════
|
|
946
|
+
// SUITE 16: DB Schema Integrity
|
|
947
|
+
// ═══════════════════════════════════════════════════════════
|
|
948
|
+
|
|
949
|
+
const expectedTables = [
|
|
950
|
+
"tasks", "dependencies", "memory_docs", "memory_items", "memory_audit",
|
|
951
|
+
"memory_entities", "memory_relations", "memory_entity_items",
|
|
952
|
+
"activity_log", "soul_handoffs", "soul_evolution_log", "soul_spawns",
|
|
953
|
+
"clusters", "cluster_members", "token_usage",
|
|
954
|
+
];
|
|
955
|
+
|
|
956
|
+
for (const table of expectedTables) {
|
|
957
|
+
results.push(await runTest("Schema", `Table '${table}' exists`, async () => {
|
|
958
|
+
try {
|
|
959
|
+
raw.prepare(`SELECT COUNT(*) FROM ${table}`).get();
|
|
960
|
+
return { ok: true };
|
|
961
|
+
} catch (err) {
|
|
962
|
+
return { ok: false, detail: (err as Error).message };
|
|
963
|
+
}
|
|
964
|
+
}));
|
|
965
|
+
}
|
|
966
|
+
|
|
967
|
+
// ═══════════════════════════════════════════════════════════
|
|
968
|
+
// CLEANUP: Remove any leftover test data
|
|
969
|
+
// ═══════════════════════════════════════════════════════════
|
|
970
|
+
|
|
971
|
+
raw.prepare(`DELETE FROM dependencies WHERE source_id LIKE '${TEST_PREFIX}%' OR target_id LIKE '${TEST_PREFIX}%'`).run();
|
|
972
|
+
raw.prepare(`DELETE FROM tasks WHERE id LIKE '${TEST_PREFIX}%'`).run();
|
|
973
|
+
raw.prepare(`DELETE FROM cluster_members WHERE cluster_id LIKE '${TEST_PREFIX}%'`).run();
|
|
974
|
+
raw.prepare(`DELETE FROM clusters WHERE id LIKE '${TEST_PREFIX}%'`).run();
|
|
975
|
+
|
|
976
|
+
// ═══════════════════════════════════════════════════════════
|
|
977
|
+
// Summary
|
|
978
|
+
// ═══════════════════════════════════════════════════════════
|
|
979
|
+
|
|
980
|
+
const passed = results.filter((r) => r.status === "pass").length;
|
|
981
|
+
const failed = results.filter((r) => r.status === "fail").length;
|
|
982
|
+
const skipped = results.filter((r) => r.status === "skip").length;
|
|
983
|
+
const totalMs = results.reduce((s, r) => s + r.durationMs, 0);
|
|
984
|
+
|
|
985
|
+
return NextResponse.json({
|
|
986
|
+
summary: { total: results.length, passed, failed, skipped, durationMs: totalMs },
|
|
987
|
+
results,
|
|
988
|
+
timestamp: new Date().toISOString(),
|
|
989
|
+
});
|
|
990
|
+
}
|