@codexa/cli 9.0.2 → 9.0.4
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/commands/architect.test.ts +531 -0
- package/commands/architect.ts +75 -17
- package/commands/check.ts +7 -17
- package/commands/clear.ts +40 -1
- package/commands/decide.ts +37 -49
- package/commands/discover.ts +136 -28
- package/commands/knowledge.test.ts +160 -0
- package/commands/knowledge.ts +192 -102
- package/commands/patterns.test.ts +169 -0
- package/commands/patterns.ts +6 -13
- package/commands/plan.test.ts +73 -0
- package/commands/plan.ts +18 -66
- package/commands/product.ts +8 -17
- package/commands/research.ts +4 -3
- package/commands/review.ts +190 -28
- package/commands/spec-resolver.test.ts +119 -0
- package/commands/spec-resolver.ts +90 -0
- package/commands/standards.ts +7 -15
- package/commands/sync.ts +89 -0
- package/commands/task.ts +72 -167
- package/commands/utils.test.ts +100 -0
- package/commands/utils.ts +78 -706
- package/db/schema.test.ts +760 -0
- package/db/schema.ts +284 -130
- package/gates/validator.test.ts +675 -0
- package/gates/validator.ts +112 -27
- package/package.json +3 -1
- package/protocol/process-return.ts +25 -93
- package/protocol/subagent-protocol.test.ts +936 -0
- package/protocol/subagent-protocol.ts +19 -1
- package/workflow.ts +176 -67
|
@@ -0,0 +1,760 @@
|
|
|
1
|
+
import { describe, it, expect, beforeEach, afterEach } from "bun:test";
|
|
2
|
+
import { Database } from "bun:sqlite";
|
|
3
|
+
import { mkdirSync, rmSync, existsSync } from "fs";
|
|
4
|
+
import { join } from "path";
|
|
5
|
+
|
|
6
|
+
// Setup: use a temporary directory for test databases
|
|
7
|
+
const TEST_DIR = join(import.meta.dir, "..", ".test-db");
|
|
8
|
+
const TEST_DB_PATH = join(TEST_DIR, "test.db");
|
|
9
|
+
|
|
10
|
+
// We need to override the DB path before importing schema functions.
|
|
11
|
+
// The connection module uses process.cwd() so we mock via direct DB manipulation.
|
|
12
|
+
|
|
13
|
+
describe("Migration System", () => {
|
|
14
|
+
let db: Database;
|
|
15
|
+
|
|
16
|
+
beforeEach(() => {
|
|
17
|
+
// Create in-memory database with same schema setup
|
|
18
|
+
db = new Database(":memory:");
|
|
19
|
+
db.exec("PRAGMA journal_mode = WAL");
|
|
20
|
+
db.exec("PRAGMA foreign_keys = ON");
|
|
21
|
+
});
|
|
22
|
+
|
|
23
|
+
afterEach(() => {
|
|
24
|
+
db.close();
|
|
25
|
+
});
|
|
26
|
+
|
|
27
|
+
function createBaseTables(db: Database) {
|
|
28
|
+
// Minimal table creation to test migrations against
|
|
29
|
+
db.exec(`
|
|
30
|
+
CREATE TABLE IF NOT EXISTS specs (
|
|
31
|
+
id TEXT PRIMARY KEY,
|
|
32
|
+
name TEXT NOT NULL,
|
|
33
|
+
phase TEXT NOT NULL DEFAULT 'planning',
|
|
34
|
+
approved_at TEXT,
|
|
35
|
+
created_at TEXT DEFAULT CURRENT_TIMESTAMP,
|
|
36
|
+
updated_at TEXT
|
|
37
|
+
);
|
|
38
|
+
|
|
39
|
+
CREATE TABLE IF NOT EXISTS project (
|
|
40
|
+
id TEXT PRIMARY KEY DEFAULT 'default',
|
|
41
|
+
name TEXT,
|
|
42
|
+
stack TEXT NOT NULL,
|
|
43
|
+
discovered_at TEXT,
|
|
44
|
+
updated_at TEXT
|
|
45
|
+
);
|
|
46
|
+
|
|
47
|
+
CREATE TABLE IF NOT EXISTS tasks (
|
|
48
|
+
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
|
49
|
+
spec_id TEXT NOT NULL REFERENCES specs(id),
|
|
50
|
+
number INTEGER NOT NULL,
|
|
51
|
+
name TEXT NOT NULL,
|
|
52
|
+
depends_on TEXT,
|
|
53
|
+
can_parallel INTEGER DEFAULT 1,
|
|
54
|
+
agent TEXT,
|
|
55
|
+
files TEXT,
|
|
56
|
+
status TEXT DEFAULT 'pending',
|
|
57
|
+
checkpoint TEXT,
|
|
58
|
+
completed_at TEXT,
|
|
59
|
+
UNIQUE(spec_id, number)
|
|
60
|
+
);
|
|
61
|
+
|
|
62
|
+
CREATE TABLE IF NOT EXISTS decisions (
|
|
63
|
+
id TEXT PRIMARY KEY,
|
|
64
|
+
spec_id TEXT NOT NULL REFERENCES specs(id),
|
|
65
|
+
task_ref INTEGER,
|
|
66
|
+
title TEXT NOT NULL,
|
|
67
|
+
decision TEXT NOT NULL,
|
|
68
|
+
rationale TEXT,
|
|
69
|
+
status TEXT DEFAULT 'active',
|
|
70
|
+
created_at TEXT DEFAULT CURRENT_TIMESTAMP
|
|
71
|
+
);
|
|
72
|
+
|
|
73
|
+
CREATE TABLE IF NOT EXISTS schema_migrations (
|
|
74
|
+
version TEXT PRIMARY KEY,
|
|
75
|
+
description TEXT NOT NULL,
|
|
76
|
+
applied_at TEXT DEFAULT CURRENT_TIMESTAMP
|
|
77
|
+
);
|
|
78
|
+
`);
|
|
79
|
+
}
|
|
80
|
+
|
|
81
|
+
// ═══════════════════════════════════════════════════════════════
|
|
82
|
+
// runMigrations() tests
|
|
83
|
+
// ═══════════════════════════════════════════════════════════════
|
|
84
|
+
|
|
85
|
+
describe("runMigrations()", () => {
|
|
86
|
+
it("should apply all migrations on a fresh database", () => {
|
|
87
|
+
createBaseTables(db);
|
|
88
|
+
|
|
89
|
+
// Define test migrations
|
|
90
|
+
const migrations = [
|
|
91
|
+
{
|
|
92
|
+
version: "8.4.0",
|
|
93
|
+
description: "Add analysis_id to specs",
|
|
94
|
+
up: (d: Database) => d.exec("ALTER TABLE specs ADD COLUMN analysis_id TEXT"),
|
|
95
|
+
},
|
|
96
|
+
{
|
|
97
|
+
version: "8.7.0",
|
|
98
|
+
description: "Add cli_version to project",
|
|
99
|
+
up: (d: Database) => d.exec("ALTER TABLE project ADD COLUMN cli_version TEXT"),
|
|
100
|
+
},
|
|
101
|
+
];
|
|
102
|
+
|
|
103
|
+
// Run migrations manually (simulating runMigrations logic)
|
|
104
|
+
for (const migration of migrations) {
|
|
105
|
+
const existing = db.query("SELECT version FROM schema_migrations WHERE version = ?").get(migration.version);
|
|
106
|
+
if (existing) continue;
|
|
107
|
+
|
|
108
|
+
migration.up(db);
|
|
109
|
+
db.run(
|
|
110
|
+
"INSERT INTO schema_migrations (version, description) VALUES (?, ?)",
|
|
111
|
+
[migration.version, migration.description]
|
|
112
|
+
);
|
|
113
|
+
}
|
|
114
|
+
|
|
115
|
+
// Verify all migrations were recorded
|
|
116
|
+
const applied = db.query("SELECT * FROM schema_migrations ORDER BY version").all() as any[];
|
|
117
|
+
expect(applied.length).toBe(2);
|
|
118
|
+
expect(applied[0].version).toBe("8.4.0");
|
|
119
|
+
expect(applied[1].version).toBe("8.7.0");
|
|
120
|
+
|
|
121
|
+
// Verify columns were added
|
|
122
|
+
const columns = db.query("PRAGMA table_info(specs)").all() as any[];
|
|
123
|
+
const colNames = columns.map((c: any) => c.name);
|
|
124
|
+
expect(colNames).toContain("analysis_id");
|
|
125
|
+
});
|
|
126
|
+
|
|
127
|
+
it("should be idempotent — calling twice applies each migration only once", () => {
|
|
128
|
+
createBaseTables(db);
|
|
129
|
+
|
|
130
|
+
const migration = {
|
|
131
|
+
version: "8.4.0",
|
|
132
|
+
description: "Add analysis_id to specs",
|
|
133
|
+
up: (d: Database) => d.exec("ALTER TABLE specs ADD COLUMN analysis_id TEXT"),
|
|
134
|
+
};
|
|
135
|
+
|
|
136
|
+
// First run
|
|
137
|
+
migration.up(db);
|
|
138
|
+
db.run(
|
|
139
|
+
"INSERT INTO schema_migrations (version, description) VALUES (?, ?)",
|
|
140
|
+
[migration.version, migration.description]
|
|
141
|
+
);
|
|
142
|
+
|
|
143
|
+
// Second run — should skip because already applied
|
|
144
|
+
const existing = db.query("SELECT version FROM schema_migrations WHERE version = ?").get(migration.version);
|
|
145
|
+
expect(existing).not.toBeNull();
|
|
146
|
+
|
|
147
|
+
// Verify only 1 record
|
|
148
|
+
const applied = db.query("SELECT COUNT(*) as c FROM schema_migrations").get() as any;
|
|
149
|
+
expect(applied.c).toBe(1);
|
|
150
|
+
});
|
|
151
|
+
|
|
152
|
+
it("should absorb 'duplicate column name' errors from pre-migration databases", () => {
|
|
153
|
+
createBaseTables(db);
|
|
154
|
+
|
|
155
|
+
// Manually add the column first (simulating pre-migration DB)
|
|
156
|
+
db.exec("ALTER TABLE specs ADD COLUMN analysis_id TEXT");
|
|
157
|
+
|
|
158
|
+
// Now try to run the migration — should NOT throw
|
|
159
|
+
const migration = {
|
|
160
|
+
version: "8.4.0",
|
|
161
|
+
description: "Add analysis_id to specs",
|
|
162
|
+
up: (d: Database) => d.exec("ALTER TABLE specs ADD COLUMN analysis_id TEXT"),
|
|
163
|
+
};
|
|
164
|
+
|
|
165
|
+
let absorbed = false;
|
|
166
|
+
try {
|
|
167
|
+
migration.up(db);
|
|
168
|
+
} catch (e: any) {
|
|
169
|
+
if (e.message.includes("duplicate column name")) {
|
|
170
|
+
absorbed = true;
|
|
171
|
+
} else {
|
|
172
|
+
throw e;
|
|
173
|
+
}
|
|
174
|
+
}
|
|
175
|
+
|
|
176
|
+
expect(absorbed).toBe(true);
|
|
177
|
+
|
|
178
|
+
// Record it anyway
|
|
179
|
+
db.run(
|
|
180
|
+
"INSERT INTO schema_migrations (version, description) VALUES (?, ?)",
|
|
181
|
+
[migration.version, migration.description]
|
|
182
|
+
);
|
|
183
|
+
|
|
184
|
+
const applied = db.query("SELECT COUNT(*) as c FROM schema_migrations").get() as any;
|
|
185
|
+
expect(applied.c).toBe(1);
|
|
186
|
+
});
|
|
187
|
+
|
|
188
|
+
it("should propagate real errors (not duplicate column)", () => {
|
|
189
|
+
createBaseTables(db);
|
|
190
|
+
|
|
191
|
+
const badMigration = {
|
|
192
|
+
version: "99.0.0",
|
|
193
|
+
description: "Bad migration",
|
|
194
|
+
up: (d: Database) => d.exec("ALTER TABLE nonexistent_table ADD COLUMN foo TEXT"),
|
|
195
|
+
};
|
|
196
|
+
|
|
197
|
+
expect(() => {
|
|
198
|
+
badMigration.up(db);
|
|
199
|
+
}).toThrow();
|
|
200
|
+
|
|
201
|
+
// Migration should NOT be recorded
|
|
202
|
+
const applied = db.query("SELECT COUNT(*) as c FROM schema_migrations").get() as any;
|
|
203
|
+
expect(applied.c).toBe(0);
|
|
204
|
+
});
|
|
205
|
+
});
|
|
206
|
+
|
|
207
|
+
// ═══════════════════════════════════════════════════════════════
|
|
208
|
+
// getNextDecisionId() tests
|
|
209
|
+
// ═══════════════════════════════════════════════════════════════
|
|
210
|
+
|
|
211
|
+
describe("getNextDecisionId()", () => {
|
|
212
|
+
// Reimplemented: uses timestamp+random instead of sequential count
|
|
213
|
+
function getNextDecisionId(specId: string): string {
|
|
214
|
+
const slug = specId.split("-").slice(1, 3).join("-");
|
|
215
|
+
const ts = Date.now().toString(36);
|
|
216
|
+
const rand = Math.random().toString(36).substring(2, 6);
|
|
217
|
+
return `DEC-${slug}-${ts}-${rand}`;
|
|
218
|
+
}
|
|
219
|
+
|
|
220
|
+
it("should start with DEC- prefix and contain spec slug", () => {
|
|
221
|
+
const id = getNextDecisionId("SPEC-001");
|
|
222
|
+
expect(id.startsWith("DEC-001-")).toBe(true);
|
|
223
|
+
});
|
|
224
|
+
|
|
225
|
+
it("should generate unique IDs on consecutive calls", () => {
|
|
226
|
+
const id1 = getNextDecisionId("SPEC-001");
|
|
227
|
+
const id2 = getNextDecisionId("SPEC-001");
|
|
228
|
+
expect(id1).not.toBe(id2);
|
|
229
|
+
});
|
|
230
|
+
|
|
231
|
+
it("should include slug from multi-part spec IDs", () => {
|
|
232
|
+
const id = getNextDecisionId("2026-02-11-feature-name");
|
|
233
|
+
expect(id.startsWith("DEC-02-11-")).toBe(true);
|
|
234
|
+
});
|
|
235
|
+
|
|
236
|
+
it("should not require DB access (no race condition)", () => {
|
|
237
|
+
// Generate 100 IDs rapidly — all should be unique
|
|
238
|
+
const ids = new Set<string>();
|
|
239
|
+
for (let i = 0; i < 100; i++) {
|
|
240
|
+
ids.add(getNextDecisionId("SPEC-001"));
|
|
241
|
+
}
|
|
242
|
+
expect(ids.size).toBe(100);
|
|
243
|
+
});
|
|
244
|
+
});
|
|
245
|
+
|
|
246
|
+
// ═══════════════════════════════════════════════════════════════
|
|
247
|
+
// claimTask() tests
|
|
248
|
+
// ═══════════════════════════════════════════════════════════════
|
|
249
|
+
|
|
250
|
+
describe("claimTask()", () => {
|
|
251
|
+
function claimTask(taskId: number): boolean {
|
|
252
|
+
const result = db.run(
|
|
253
|
+
"UPDATE tasks SET status = 'running' WHERE id = ? AND status = 'pending'",
|
|
254
|
+
[taskId]
|
|
255
|
+
);
|
|
256
|
+
return result.changes > 0;
|
|
257
|
+
}
|
|
258
|
+
|
|
259
|
+
beforeEach(() => {
|
|
260
|
+
createBaseTables(db);
|
|
261
|
+
db.run("INSERT INTO specs (id, name, phase) VALUES ('SPEC-001', 'test', 'implementing')");
|
|
262
|
+
db.run(
|
|
263
|
+
"INSERT INTO tasks (spec_id, number, name, status) VALUES (?, ?, ?, ?)",
|
|
264
|
+
["SPEC-001", 1, "Test task", "pending"]
|
|
265
|
+
);
|
|
266
|
+
});
|
|
267
|
+
|
|
268
|
+
it("should return true and set status to running for a pending task", () => {
|
|
269
|
+
const taskId = (db.query("SELECT id FROM tasks WHERE number = 1").get() as any).id;
|
|
270
|
+
expect(claimTask(taskId)).toBe(true);
|
|
271
|
+
|
|
272
|
+
const task = db.query("SELECT status FROM tasks WHERE id = ?").get(taskId) as any;
|
|
273
|
+
expect(task.status).toBe("running");
|
|
274
|
+
});
|
|
275
|
+
|
|
276
|
+
it("should return false for a task already in running status", () => {
|
|
277
|
+
const taskId = (db.query("SELECT id FROM tasks WHERE number = 1").get() as any).id;
|
|
278
|
+
|
|
279
|
+
// First claim succeeds
|
|
280
|
+
expect(claimTask(taskId)).toBe(true);
|
|
281
|
+
// Second claim fails
|
|
282
|
+
expect(claimTask(taskId)).toBe(false);
|
|
283
|
+
});
|
|
284
|
+
|
|
285
|
+
it("should return false for a task in done status", () => {
|
|
286
|
+
const taskId = (db.query("SELECT id FROM tasks WHERE number = 1").get() as any).id;
|
|
287
|
+
db.run("UPDATE tasks SET status = 'done' WHERE id = ?", [taskId]);
|
|
288
|
+
|
|
289
|
+
expect(claimTask(taskId)).toBe(false);
|
|
290
|
+
});
|
|
291
|
+
|
|
292
|
+
it("should return false for a non-existent task ID", () => {
|
|
293
|
+
expect(claimTask(99999)).toBe(false);
|
|
294
|
+
});
|
|
295
|
+
|
|
296
|
+
it("should prevent double-claim (only first caller wins)", () => {
|
|
297
|
+
const taskId = (db.query("SELECT id FROM tasks WHERE number = 1").get() as any).id;
|
|
298
|
+
|
|
299
|
+
// Simulate two concurrent claims
|
|
300
|
+
const claim1 = claimTask(taskId);
|
|
301
|
+
const claim2 = claimTask(taskId);
|
|
302
|
+
|
|
303
|
+
expect(claim1).toBe(true);
|
|
304
|
+
expect(claim2).toBe(false);
|
|
305
|
+
});
|
|
306
|
+
});
|
|
307
|
+
|
|
308
|
+
// ═══════════════════════════════════════════════════════════════
|
|
309
|
+
// v9.3: Agent Performance (P3.2)
|
|
310
|
+
// ═══════════════════════════════════════════════════════════════
|
|
311
|
+
|
|
312
|
+
describe("agent_performance", () => {
|
|
313
|
+
function createPerformanceTables(db: Database) {
|
|
314
|
+
createBaseTables(db);
|
|
315
|
+
db.exec(`
|
|
316
|
+
CREATE TABLE IF NOT EXISTS agent_performance (
|
|
317
|
+
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
|
318
|
+
agent_type TEXT NOT NULL,
|
|
319
|
+
spec_id TEXT NOT NULL,
|
|
320
|
+
task_id INTEGER NOT NULL,
|
|
321
|
+
gates_passed_first_try INTEGER DEFAULT 0,
|
|
322
|
+
gates_total INTEGER DEFAULT 0,
|
|
323
|
+
bypasses_used INTEGER DEFAULT 0,
|
|
324
|
+
files_created INTEGER DEFAULT 0,
|
|
325
|
+
files_modified INTEGER DEFAULT 0,
|
|
326
|
+
context_size_bytes INTEGER DEFAULT 0,
|
|
327
|
+
execution_duration_ms INTEGER DEFAULT 0,
|
|
328
|
+
created_at TEXT DEFAULT CURRENT_TIMESTAMP
|
|
329
|
+
)
|
|
330
|
+
`);
|
|
331
|
+
db.exec(`CREATE INDEX IF NOT EXISTS idx_agent_perf_type ON agent_performance(agent_type)`);
|
|
332
|
+
|
|
333
|
+
db.exec(`
|
|
334
|
+
CREATE TABLE IF NOT EXISTS gate_bypasses (
|
|
335
|
+
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
|
336
|
+
spec_id TEXT,
|
|
337
|
+
task_id INTEGER,
|
|
338
|
+
gate_name TEXT NOT NULL,
|
|
339
|
+
reason TEXT,
|
|
340
|
+
created_at TEXT DEFAULT CURRENT_TIMESTAMP
|
|
341
|
+
)
|
|
342
|
+
`);
|
|
343
|
+
}
|
|
344
|
+
|
|
345
|
+
it("migration 9.3.0 should create agent_performance table", () => {
|
|
346
|
+
createPerformanceTables(db);
|
|
347
|
+
|
|
348
|
+
const columns = db.query("PRAGMA table_info(agent_performance)").all() as any[];
|
|
349
|
+
const colNames = columns.map((c: any) => c.name);
|
|
350
|
+
expect(colNames).toContain("agent_type");
|
|
351
|
+
expect(colNames).toContain("spec_id");
|
|
352
|
+
expect(colNames).toContain("task_id");
|
|
353
|
+
expect(colNames).toContain("gates_passed_first_try");
|
|
354
|
+
expect(colNames).toContain("gates_total");
|
|
355
|
+
expect(colNames).toContain("bypasses_used");
|
|
356
|
+
expect(colNames).toContain("execution_duration_ms");
|
|
357
|
+
});
|
|
358
|
+
|
|
359
|
+
it("should insert and retrieve performance data", () => {
|
|
360
|
+
createPerformanceTables(db);
|
|
361
|
+
db.run("INSERT INTO specs (id, name, phase) VALUES ('SPEC-001', 'test', 'implementing')");
|
|
362
|
+
db.run(
|
|
363
|
+
"INSERT INTO tasks (spec_id, number, name, agent, status) VALUES (?, ?, ?, ?, ?)",
|
|
364
|
+
["SPEC-001", 1, "Test task", "frontend-next", "done"]
|
|
365
|
+
);
|
|
366
|
+
|
|
367
|
+
const now = new Date().toISOString();
|
|
368
|
+
db.run(
|
|
369
|
+
`INSERT INTO agent_performance
|
|
370
|
+
(agent_type, spec_id, task_id, gates_passed_first_try, gates_total, bypasses_used, files_created, files_modified, context_size_bytes, execution_duration_ms, created_at)
|
|
371
|
+
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)`,
|
|
372
|
+
["frontend-next", "SPEC-001", 1, 7, 7, 0, 3, 1, 4096, 15000, now]
|
|
373
|
+
);
|
|
374
|
+
|
|
375
|
+
const rows = db.query("SELECT * FROM agent_performance WHERE agent_type = ?").all("frontend-next") as any[];
|
|
376
|
+
expect(rows).toHaveLength(1);
|
|
377
|
+
expect(rows[0].gates_passed_first_try).toBe(7);
|
|
378
|
+
expect(rows[0].bypasses_used).toBe(0);
|
|
379
|
+
expect(rows[0].execution_duration_ms).toBe(15000);
|
|
380
|
+
});
|
|
381
|
+
|
|
382
|
+
it("should compute hints: no data returns empty", () => {
|
|
383
|
+
createPerformanceTables(db);
|
|
384
|
+
|
|
385
|
+
const recent = db.query(
|
|
386
|
+
"SELECT * FROM agent_performance WHERE agent_type = ? ORDER BY created_at DESC LIMIT 5"
|
|
387
|
+
).all("nonexistent") as any[];
|
|
388
|
+
expect(recent).toHaveLength(0);
|
|
389
|
+
});
|
|
390
|
+
|
|
391
|
+
it("should detect high bypass rate from performance data", () => {
|
|
392
|
+
createPerformanceTables(db);
|
|
393
|
+
db.run("INSERT INTO specs (id, name, phase) VALUES ('SPEC-001', 'test', 'implementing')");
|
|
394
|
+
|
|
395
|
+
const now = new Date().toISOString();
|
|
396
|
+
// Insert 3 records with bypasses
|
|
397
|
+
for (let i = 1; i <= 3; i++) {
|
|
398
|
+
db.run(
|
|
399
|
+
`INSERT INTO agent_performance
|
|
400
|
+
(agent_type, spec_id, task_id, gates_passed_first_try, gates_total, bypasses_used, created_at)
|
|
401
|
+
VALUES (?, ?, ?, ?, ?, ?, ?)`,
|
|
402
|
+
["backend-go", "SPEC-001", i, 5, 7, 2, now]
|
|
403
|
+
);
|
|
404
|
+
}
|
|
405
|
+
|
|
406
|
+
const recent = db.query(
|
|
407
|
+
"SELECT * FROM agent_performance WHERE agent_type = ? ORDER BY created_at DESC LIMIT 5"
|
|
408
|
+
).all("backend-go") as any[];
|
|
409
|
+
|
|
410
|
+
const avgBypass = recent.reduce((sum: number, r: any) => sum + r.bypasses_used, 0) / recent.length;
|
|
411
|
+
expect(avgBypass).toBe(2);
|
|
412
|
+
expect(avgBypass > 0.5).toBe(true);
|
|
413
|
+
|
|
414
|
+
const avgGateRate = recent.reduce((sum: number, r: any) => {
|
|
415
|
+
return sum + (r.gates_total > 0 ? r.gates_passed_first_try / r.gates_total : 1);
|
|
416
|
+
}, 0) / recent.length;
|
|
417
|
+
expect(avgGateRate).toBeCloseTo(5 / 7, 2);
|
|
418
|
+
expect(avgGateRate < 0.7).toBe(false); // 5/7 ≈ 0.71, just above threshold
|
|
419
|
+
});
|
|
420
|
+
|
|
421
|
+
it("should detect low gate pass rate", () => {
|
|
422
|
+
createPerformanceTables(db);
|
|
423
|
+
db.run("INSERT INTO specs (id, name, phase) VALUES ('SPEC-001', 'test', 'implementing')");
|
|
424
|
+
|
|
425
|
+
const now = new Date().toISOString();
|
|
426
|
+
for (let i = 1; i <= 3; i++) {
|
|
427
|
+
db.run(
|
|
428
|
+
`INSERT INTO agent_performance
|
|
429
|
+
(agent_type, spec_id, task_id, gates_passed_first_try, gates_total, bypasses_used, created_at)
|
|
430
|
+
VALUES (?, ?, ?, ?, ?, ?, ?)`,
|
|
431
|
+
["backend-csharp", "SPEC-001", i, 3, 7, 4, now]
|
|
432
|
+
);
|
|
433
|
+
}
|
|
434
|
+
|
|
435
|
+
const recent = db.query(
|
|
436
|
+
"SELECT * FROM agent_performance WHERE agent_type = ? ORDER BY created_at DESC LIMIT 5"
|
|
437
|
+
).all("backend-csharp") as any[];
|
|
438
|
+
|
|
439
|
+
const avgGateRate = recent.reduce((sum: number, r: any) => {
|
|
440
|
+
return sum + (r.gates_total > 0 ? r.gates_passed_first_try / r.gates_total : 1);
|
|
441
|
+
}, 0) / recent.length;
|
|
442
|
+
expect(avgGateRate).toBeCloseTo(3 / 7, 2);
|
|
443
|
+
expect(avgGateRate < 0.7).toBe(true); // 3/7 ≈ 0.43 — should trigger hint
|
|
444
|
+
});
|
|
445
|
+
|
|
446
|
+
it("should track frequent gate bypass types", () => {
|
|
447
|
+
createPerformanceTables(db);
|
|
448
|
+
db.run("INSERT INTO specs (id, name, phase) VALUES ('SPEC-001', 'test', 'implementing')");
|
|
449
|
+
db.run(
|
|
450
|
+
"INSERT INTO tasks (spec_id, number, name, agent, status) VALUES (?, ?, ?, ?, ?)",
|
|
451
|
+
["SPEC-001", 1, "Task 1", "frontend-next", "done"]
|
|
452
|
+
);
|
|
453
|
+
db.run(
|
|
454
|
+
"INSERT INTO tasks (spec_id, number, name, agent, status) VALUES (?, ?, ?, ?, ?)",
|
|
455
|
+
["SPEC-001", 2, "Task 2", "frontend-next", "done"]
|
|
456
|
+
);
|
|
457
|
+
|
|
458
|
+
const taskId1 = (db.query("SELECT id FROM tasks WHERE number = 1").get() as any).id;
|
|
459
|
+
const taskId2 = (db.query("SELECT id FROM tasks WHERE number = 2").get() as any).id;
|
|
460
|
+
|
|
461
|
+
// Add bypasses for same gate
|
|
462
|
+
db.run("INSERT INTO gate_bypasses (spec_id, task_id, gate_name, reason) VALUES (?, ?, ?, ?)",
|
|
463
|
+
["SPEC-001", taskId1, "standards-follow", "test"]);
|
|
464
|
+
db.run("INSERT INTO gate_bypasses (spec_id, task_id, gate_name, reason) VALUES (?, ?, ?, ?)",
|
|
465
|
+
["SPEC-001", taskId2, "standards-follow", "test"]);
|
|
466
|
+
|
|
467
|
+
const bypassTypes = db.query(
|
|
468
|
+
`SELECT gb.gate_name, COUNT(*) as cnt FROM gate_bypasses gb
|
|
469
|
+
JOIN tasks t ON gb.task_id = t.id
|
|
470
|
+
WHERE t.agent = ?
|
|
471
|
+
GROUP BY gb.gate_name
|
|
472
|
+
ORDER BY cnt DESC LIMIT 3`
|
|
473
|
+
).all("frontend-next") as any[];
|
|
474
|
+
|
|
475
|
+
expect(bypassTypes).toHaveLength(1);
|
|
476
|
+
expect(bypassTypes[0].gate_name).toBe("standards-follow");
|
|
477
|
+
expect(bypassTypes[0].cnt).toBe(2);
|
|
478
|
+
});
|
|
479
|
+
});
|
|
480
|
+
|
|
481
|
+
// ═══════════════════════════════════════════════════════════════
|
|
482
|
+
// v9.4: Knowledge Acknowledgments (P1-5)
|
|
483
|
+
// ═══════════════════════════════════════════════════════════════
|
|
484
|
+
|
|
485
|
+
describe("knowledge_acknowledgments", () => {
|
|
486
|
+
function createKnowledgeTables(db: Database) {
|
|
487
|
+
createBaseTables(db);
|
|
488
|
+
db.exec(`
|
|
489
|
+
CREATE TABLE IF NOT EXISTS knowledge (
|
|
490
|
+
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
|
491
|
+
spec_id TEXT NOT NULL,
|
|
492
|
+
task_origin INTEGER NOT NULL,
|
|
493
|
+
category TEXT NOT NULL,
|
|
494
|
+
content TEXT NOT NULL,
|
|
495
|
+
severity TEXT DEFAULT 'info',
|
|
496
|
+
broadcast_to TEXT DEFAULT 'all',
|
|
497
|
+
acknowledged_by TEXT,
|
|
498
|
+
created_at TEXT DEFAULT CURRENT_TIMESTAMP
|
|
499
|
+
)
|
|
500
|
+
`);
|
|
501
|
+
db.exec(`
|
|
502
|
+
CREATE TABLE IF NOT EXISTS knowledge_acknowledgments (
|
|
503
|
+
knowledge_id INTEGER NOT NULL REFERENCES knowledge(id) ON DELETE CASCADE,
|
|
504
|
+
task_id INTEGER NOT NULL,
|
|
505
|
+
acknowledged_at TEXT DEFAULT CURRENT_TIMESTAMP,
|
|
506
|
+
PRIMARY KEY (knowledge_id, task_id)
|
|
507
|
+
)
|
|
508
|
+
`);
|
|
509
|
+
db.exec(`CREATE INDEX IF NOT EXISTS idx_ka_task ON knowledge_acknowledgments(task_id)`);
|
|
510
|
+
}
|
|
511
|
+
|
|
512
|
+
it("migration 9.4.0 should create knowledge_acknowledgments table", () => {
|
|
513
|
+
createKnowledgeTables(db);
|
|
514
|
+
|
|
515
|
+
const columns = db.query("PRAGMA table_info(knowledge_acknowledgments)").all() as any[];
|
|
516
|
+
const colNames = columns.map((c: any) => c.name);
|
|
517
|
+
expect(colNames).toContain("knowledge_id");
|
|
518
|
+
expect(colNames).toContain("task_id");
|
|
519
|
+
expect(colNames).toContain("acknowledged_at");
|
|
520
|
+
});
|
|
521
|
+
|
|
522
|
+
it("should insert and query acknowledgment", () => {
|
|
523
|
+
createKnowledgeTables(db);
|
|
524
|
+
db.run("INSERT INTO specs (id, name, phase) VALUES ('SPEC-001', 'test', 'implementing')");
|
|
525
|
+
db.run(
|
|
526
|
+
"INSERT INTO knowledge (spec_id, task_origin, category, content, severity) VALUES (?, ?, ?, ?, ?)",
|
|
527
|
+
["SPEC-001", 1, "discovery", "Test knowledge", "critical"]
|
|
528
|
+
);
|
|
529
|
+
|
|
530
|
+
const kid = (db.query("SELECT id FROM knowledge LIMIT 1").get() as any).id;
|
|
531
|
+
|
|
532
|
+
// Not acknowledged yet
|
|
533
|
+
const before = db.query(
|
|
534
|
+
"SELECT 1 FROM knowledge_acknowledgments WHERE knowledge_id = ? AND task_id = ?"
|
|
535
|
+
).get(kid, 2);
|
|
536
|
+
expect(before).toBeNull();
|
|
537
|
+
|
|
538
|
+
// Acknowledge
|
|
539
|
+
db.run(
|
|
540
|
+
"INSERT OR IGNORE INTO knowledge_acknowledgments (knowledge_id, task_id) VALUES (?, ?)",
|
|
541
|
+
[kid, 2]
|
|
542
|
+
);
|
|
543
|
+
|
|
544
|
+
// Now acknowledged
|
|
545
|
+
const after = db.query(
|
|
546
|
+
"SELECT 1 FROM knowledge_acknowledgments WHERE knowledge_id = ? AND task_id = ?"
|
|
547
|
+
).get(kid, 2);
|
|
548
|
+
expect(after).not.toBeNull();
|
|
549
|
+
});
|
|
550
|
+
|
|
551
|
+
it("should be idempotent (INSERT OR IGNORE)", () => {
|
|
552
|
+
createKnowledgeTables(db);
|
|
553
|
+
db.run("INSERT INTO specs (id, name, phase) VALUES ('SPEC-001', 'test', 'implementing')");
|
|
554
|
+
db.run(
|
|
555
|
+
"INSERT INTO knowledge (spec_id, task_origin, category, content) VALUES (?, ?, ?, ?)",
|
|
556
|
+
["SPEC-001", 1, "discovery", "Test"]
|
|
557
|
+
);
|
|
558
|
+
|
|
559
|
+
const kid = (db.query("SELECT id FROM knowledge LIMIT 1").get() as any).id;
|
|
560
|
+
|
|
561
|
+
// Insert twice — no error
|
|
562
|
+
db.run("INSERT OR IGNORE INTO knowledge_acknowledgments (knowledge_id, task_id) VALUES (?, ?)", [kid, 2]);
|
|
563
|
+
db.run("INSERT OR IGNORE INTO knowledge_acknowledgments (knowledge_id, task_id) VALUES (?, ?)", [kid, 2]);
|
|
564
|
+
|
|
565
|
+
const count = (db.query(
|
|
566
|
+
"SELECT COUNT(*) as c FROM knowledge_acknowledgments WHERE knowledge_id = ?"
|
|
567
|
+
).get(kid) as any).c;
|
|
568
|
+
expect(count).toBe(1);
|
|
569
|
+
});
|
|
570
|
+
|
|
571
|
+
it("should migrate data from JSON acknowledged_by", () => {
|
|
572
|
+
createKnowledgeTables(db);
|
|
573
|
+
db.run("INSERT INTO specs (id, name, phase) VALUES ('SPEC-001', 'test', 'implementing')");
|
|
574
|
+
|
|
575
|
+
// Insert with old JSON format
|
|
576
|
+
db.run(
|
|
577
|
+
"INSERT INTO knowledge (spec_id, task_origin, category, content, acknowledged_by) VALUES (?, ?, ?, ?, ?)",
|
|
578
|
+
["SPEC-001", 1, "discovery", "Test", JSON.stringify([2, 3, 5])]
|
|
579
|
+
);
|
|
580
|
+
|
|
581
|
+
const kid = (db.query("SELECT id FROM knowledge LIMIT 1").get() as any).id;
|
|
582
|
+
|
|
583
|
+
// Simulate migration
|
|
584
|
+
const rows = db.query("SELECT id, acknowledged_by FROM knowledge WHERE acknowledged_by IS NOT NULL").all() as any[];
|
|
585
|
+
const insert = db.prepare("INSERT OR IGNORE INTO knowledge_acknowledgments (knowledge_id, task_id) VALUES (?, ?)");
|
|
586
|
+
for (const row of rows) {
|
|
587
|
+
const taskIds = JSON.parse(row.acknowledged_by) as number[];
|
|
588
|
+
for (const taskId of taskIds) {
|
|
589
|
+
insert.run(row.id, taskId);
|
|
590
|
+
}
|
|
591
|
+
}
|
|
592
|
+
|
|
593
|
+
// Verify migrated data
|
|
594
|
+
const acks = db.query(
|
|
595
|
+
"SELECT task_id FROM knowledge_acknowledgments WHERE knowledge_id = ? ORDER BY task_id"
|
|
596
|
+
).all(kid) as any[];
|
|
597
|
+
expect(acks.map((a: any) => a.task_id)).toEqual([2, 3, 5]);
|
|
598
|
+
});
|
|
599
|
+
|
|
600
|
+
it("should find unacknowledged critical knowledge via NOT EXISTS", () => {
|
|
601
|
+
createKnowledgeTables(db);
|
|
602
|
+
db.run("INSERT INTO specs (id, name, phase) VALUES ('SPEC-001', 'test', 'implementing')");
|
|
603
|
+
|
|
604
|
+
// Insert critical knowledge from task 1
|
|
605
|
+
db.run(
|
|
606
|
+
"INSERT INTO knowledge (spec_id, task_origin, category, content, severity) VALUES (?, ?, ?, ?, ?)",
|
|
607
|
+
["SPEC-001", 1, "discovery", "Critical A", "critical"]
|
|
608
|
+
);
|
|
609
|
+
db.run(
|
|
610
|
+
"INSERT INTO knowledge (spec_id, task_origin, category, content, severity) VALUES (?, ?, ?, ?, ?)",
|
|
611
|
+
["SPEC-001", 1, "discovery", "Critical B", "critical"]
|
|
612
|
+
);
|
|
613
|
+
|
|
614
|
+
const kids = (db.query("SELECT id FROM knowledge ORDER BY id").all() as any[]).map((r: any) => r.id);
|
|
615
|
+
|
|
616
|
+
// Acknowledge only the first one by task 2
|
|
617
|
+
db.run("INSERT INTO knowledge_acknowledgments (knowledge_id, task_id) VALUES (?, ?)", [kids[0], 2]);
|
|
618
|
+
|
|
619
|
+
// Query unacknowledged for task 2
|
|
620
|
+
const unacked = db.query(`
|
|
621
|
+
SELECT k.* FROM knowledge k
|
|
622
|
+
WHERE k.spec_id = ?
|
|
623
|
+
AND k.severity = 'critical'
|
|
624
|
+
AND k.task_origin != ?
|
|
625
|
+
AND NOT EXISTS (
|
|
626
|
+
SELECT 1 FROM knowledge_acknowledgments ka
|
|
627
|
+
WHERE ka.knowledge_id = k.id AND ka.task_id = ?
|
|
628
|
+
)
|
|
629
|
+
`).all("SPEC-001", 2, 2) as any[];
|
|
630
|
+
|
|
631
|
+
expect(unacked).toHaveLength(1);
|
|
632
|
+
expect(unacked[0].content).toBe("Critical B");
|
|
633
|
+
});
|
|
634
|
+
});
|
|
635
|
+
|
|
636
|
+
// ═══════════════════════════════════════════════════════════════
|
|
637
|
+
// P1-2: Review Score
|
|
638
|
+
// ═══════════════════════════════════════════════════════════════
|
|
639
|
+
|
|
640
|
+
describe("calculateReviewScore()", () => {
|
|
641
|
+
function createReviewTables(db: Database) {
|
|
642
|
+
createBaseTables(db);
|
|
643
|
+
db.exec(`
|
|
644
|
+
CREATE TABLE IF NOT EXISTS artifacts (
|
|
645
|
+
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
|
646
|
+
spec_id TEXT NOT NULL,
|
|
647
|
+
task_ref INTEGER NOT NULL,
|
|
648
|
+
path TEXT NOT NULL,
|
|
649
|
+
action TEXT NOT NULL,
|
|
650
|
+
created_at TEXT DEFAULT CURRENT_TIMESTAMP,
|
|
651
|
+
UNIQUE(spec_id, path)
|
|
652
|
+
);
|
|
653
|
+
CREATE TABLE IF NOT EXISTS gate_bypasses (
|
|
654
|
+
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
|
655
|
+
spec_id TEXT NOT NULL,
|
|
656
|
+
task_id INTEGER NOT NULL,
|
|
657
|
+
gate_name TEXT NOT NULL,
|
|
658
|
+
reason TEXT,
|
|
659
|
+
created_at TEXT DEFAULT CURRENT_TIMESTAMP
|
|
660
|
+
);
|
|
661
|
+
CREATE TABLE IF NOT EXISTS review (
|
|
662
|
+
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
|
663
|
+
spec_id TEXT NOT NULL,
|
|
664
|
+
planned_vs_done TEXT,
|
|
665
|
+
deviations TEXT,
|
|
666
|
+
pattern_violations TEXT,
|
|
667
|
+
status TEXT DEFAULT 'pending',
|
|
668
|
+
resolution TEXT,
|
|
669
|
+
created_at TEXT DEFAULT CURRENT_TIMESTAMP
|
|
670
|
+
);
|
|
671
|
+
`);
|
|
672
|
+
}
|
|
673
|
+
|
|
674
|
+
function calcScore(db: Database, specId: string) {
|
|
675
|
+
const totalTasks = (db.query("SELECT COUNT(*) as c FROM tasks WHERE spec_id = ?").get(specId) as any).c;
|
|
676
|
+
const completedTasks = (db.query("SELECT COUNT(*) as c FROM tasks WHERE spec_id = ? AND status = 'done'").get(specId) as any).c;
|
|
677
|
+
const tasksCompleted = totalTasks > 0 ? Math.round((completedTasks / totalTasks) * 25) : 25;
|
|
678
|
+
|
|
679
|
+
const totalGateEvents = totalTasks * 7;
|
|
680
|
+
const bypassCount = (db.query("SELECT COUNT(*) as c FROM gate_bypasses WHERE spec_id = ?").get(specId) as any).c;
|
|
681
|
+
const cleanGateEvents = Math.max(0, totalGateEvents - bypassCount);
|
|
682
|
+
const gatesPassedClean = totalGateEvents > 0 ? Math.round((cleanGateEvents / totalGateEvents) * 25) : 25;
|
|
683
|
+
|
|
684
|
+
const plannedFiles = db.query("SELECT files FROM tasks WHERE spec_id = ? AND files IS NOT NULL").all(specId) as any[];
|
|
685
|
+
const allPlannedFiles = new Set<string>();
|
|
686
|
+
for (const t of plannedFiles) {
|
|
687
|
+
try { for (const f of JSON.parse(t.files) as string[]) allPlannedFiles.add(f); } catch {}
|
|
688
|
+
}
|
|
689
|
+
const deliveredFiles = new Set((db.query("SELECT DISTINCT path FROM artifacts WHERE spec_id = ?").all(specId) as any[]).map(a => a.path));
|
|
690
|
+
let filesDelivered: number;
|
|
691
|
+
if (allPlannedFiles.size === 0) { filesDelivered = deliveredFiles.size > 0 ? 25 : 0; }
|
|
692
|
+
else { let m = 0; for (const f of allPlannedFiles) { if (deliveredFiles.has(f)) m++; } filesDelivered = Math.round((m / allPlannedFiles.size) * 25); }
|
|
693
|
+
|
|
694
|
+
const standardsBypasses = (db.query("SELECT COUNT(*) as c FROM gate_bypasses WHERE spec_id = ? AND gate_name = 'standards-follow'").get(specId) as any).c;
|
|
695
|
+
const standardsFollowed = totalTasks > 0 ? Math.round(((totalTasks - standardsBypasses) / totalTasks) * 25) : 25;
|
|
696
|
+
|
|
697
|
+
return { total: tasksCompleted + gatesPassedClean + filesDelivered + standardsFollowed, breakdown: { tasksCompleted, gatesPassedClean, filesDelivered, standardsFollowed } };
|
|
698
|
+
}
|
|
699
|
+
|
|
700
|
+
it("should return perfect score for clean implementation", () => {
|
|
701
|
+
createReviewTables(db);
|
|
702
|
+
db.run("INSERT INTO specs (id, name, phase) VALUES ('SPEC-001', 'test', 'reviewing')");
|
|
703
|
+
db.run("INSERT INTO tasks (spec_id, number, name, status, files) VALUES (?, 1, 'Task 1', 'done', ?)", ["SPEC-001", '["src/a.ts"]']);
|
|
704
|
+
db.run("INSERT INTO tasks (spec_id, number, name, status, files) VALUES (?, 2, 'Task 2', 'done', ?)", ["SPEC-001", '["src/b.ts"]']);
|
|
705
|
+
db.run("INSERT INTO artifacts (spec_id, task_ref, path, action) VALUES (?, 1, 'src/a.ts', 'created')", ["SPEC-001"]);
|
|
706
|
+
db.run("INSERT INTO artifacts (spec_id, task_ref, path, action) VALUES (?, 2, 'src/b.ts', 'created')", ["SPEC-001"]);
|
|
707
|
+
|
|
708
|
+
const score = calcScore(db, "SPEC-001");
|
|
709
|
+
expect(score.total).toBe(100);
|
|
710
|
+
expect(score.breakdown.tasksCompleted).toBe(25);
|
|
711
|
+
expect(score.breakdown.gatesPassedClean).toBe(25);
|
|
712
|
+
expect(score.breakdown.filesDelivered).toBe(25);
|
|
713
|
+
expect(score.breakdown.standardsFollowed).toBe(25);
|
|
714
|
+
});
|
|
715
|
+
|
|
716
|
+
it("should penalize incomplete tasks", () => {
|
|
717
|
+
createReviewTables(db);
|
|
718
|
+
db.run("INSERT INTO specs (id, name, phase) VALUES ('SPEC-001', 'test', 'reviewing')");
|
|
719
|
+
db.run("INSERT INTO tasks (spec_id, number, name, status) VALUES (?, 1, 'Task 1', 'done')", ["SPEC-001"]);
|
|
720
|
+
db.run("INSERT INTO tasks (spec_id, number, name, status) VALUES (?, 2, 'Task 2', 'pending')", ["SPEC-001"]);
|
|
721
|
+
|
|
722
|
+
const score = calcScore(db, "SPEC-001");
|
|
723
|
+
expect(score.breakdown.tasksCompleted).toBe(13); // round(1/2 * 25) = 13
|
|
724
|
+
});
|
|
725
|
+
|
|
726
|
+
it("should penalize gate bypasses", () => {
|
|
727
|
+
createReviewTables(db);
|
|
728
|
+
db.run("INSERT INTO specs (id, name, phase) VALUES ('SPEC-001', 'test', 'reviewing')");
|
|
729
|
+
db.run("INSERT INTO tasks (spec_id, number, name, status) VALUES (?, 1, 'Task 1', 'done')", ["SPEC-001"]);
|
|
730
|
+
db.run("INSERT INTO gate_bypasses (spec_id, task_id, gate_name, reason) VALUES (?, 1, 'standards-follow', 'test')", ["SPEC-001"]);
|
|
731
|
+
db.run("INSERT INTO gate_bypasses (spec_id, task_id, gate_name, reason) VALUES (?, 1, 'dry-check', 'test')", ["SPEC-001"]);
|
|
732
|
+
|
|
733
|
+
const score = calcScore(db, "SPEC-001");
|
|
734
|
+
// 1 task * 7 gates = 7 events, 2 bypasses = 5/7 clean
|
|
735
|
+
expect(score.breakdown.gatesPassedClean).toBe(Math.round(5 / 7 * 25));
|
|
736
|
+
});
|
|
737
|
+
|
|
738
|
+
it("should penalize missing files", () => {
|
|
739
|
+
createReviewTables(db);
|
|
740
|
+
db.run("INSERT INTO specs (id, name, phase) VALUES ('SPEC-001', 'test', 'reviewing')");
|
|
741
|
+
db.run("INSERT INTO tasks (spec_id, number, name, status, files) VALUES (?, 1, 'Task 1', 'done', ?)", ["SPEC-001", '["src/a.ts","src/b.ts"]']);
|
|
742
|
+
db.run("INSERT INTO artifacts (spec_id, task_ref, path, action) VALUES (?, 1, 'src/a.ts', 'created')", ["SPEC-001"]);
|
|
743
|
+
// src/b.ts missing
|
|
744
|
+
|
|
745
|
+
const score = calcScore(db, "SPEC-001");
|
|
746
|
+
expect(score.breakdown.filesDelivered).toBe(13); // round(1/2 * 25) = 13
|
|
747
|
+
});
|
|
748
|
+
|
|
749
|
+
it("should penalize standards bypasses specifically", () => {
|
|
750
|
+
createReviewTables(db);
|
|
751
|
+
db.run("INSERT INTO specs (id, name, phase) VALUES ('SPEC-001', 'test', 'reviewing')");
|
|
752
|
+
db.run("INSERT INTO tasks (spec_id, number, name, status) VALUES (?, 1, 'Task 1', 'done')", ["SPEC-001"]);
|
|
753
|
+
db.run("INSERT INTO tasks (spec_id, number, name, status) VALUES (?, 2, 'Task 2', 'done')", ["SPEC-001"]);
|
|
754
|
+
db.run("INSERT INTO gate_bypasses (spec_id, task_id, gate_name, reason) VALUES (?, 1, 'standards-follow', 'test')", ["SPEC-001"]);
|
|
755
|
+
|
|
756
|
+
const score = calcScore(db, "SPEC-001");
|
|
757
|
+
expect(score.breakdown.standardsFollowed).toBe(13); // round(1/2 * 25) = 13
|
|
758
|
+
});
|
|
759
|
+
});
|
|
760
|
+
});
|