@kodrunhq/opencode-autopilot 1.15.1 → 1.16.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (64) hide show
  1. package/README.md +14 -0
  2. package/bin/cli.ts +5 -0
  3. package/bin/inspect.ts +337 -0
  4. package/package.json +1 -1
  5. package/src/agents/autopilot.ts +7 -15
  6. package/src/agents/index.ts +54 -21
  7. package/src/health/checks.ts +108 -4
  8. package/src/health/runner.ts +3 -0
  9. package/src/index.ts +105 -12
  10. package/src/inspect/formatters.ts +225 -0
  11. package/src/inspect/repository.ts +882 -0
  12. package/src/kernel/database.ts +45 -0
  13. package/src/kernel/migrations.ts +62 -0
  14. package/src/kernel/repository.ts +571 -0
  15. package/src/kernel/schema.ts +122 -0
  16. package/src/kernel/types.ts +66 -0
  17. package/src/memory/capture.ts +221 -25
  18. package/src/memory/database.ts +74 -12
  19. package/src/memory/index.ts +17 -1
  20. package/src/memory/project-key.ts +6 -0
  21. package/src/memory/repository.ts +833 -42
  22. package/src/memory/retrieval.ts +83 -169
  23. package/src/memory/schemas.ts +39 -7
  24. package/src/memory/types.ts +4 -0
  25. package/src/observability/event-handlers.ts +28 -17
  26. package/src/observability/event-store.ts +29 -1
  27. package/src/observability/forensic-log.ts +159 -0
  28. package/src/observability/forensic-schemas.ts +69 -0
  29. package/src/observability/forensic-types.ts +10 -0
  30. package/src/observability/index.ts +21 -27
  31. package/src/observability/log-reader.ts +142 -111
  32. package/src/observability/log-writer.ts +41 -83
  33. package/src/observability/retention.ts +2 -2
  34. package/src/observability/session-logger.ts +36 -57
  35. package/src/observability/summary-generator.ts +31 -19
  36. package/src/observability/types.ts +12 -24
  37. package/src/orchestrator/contracts/invariants.ts +14 -0
  38. package/src/orchestrator/contracts/legacy-result-adapter.ts +8 -20
  39. package/src/orchestrator/fallback/event-handler.ts +47 -3
  40. package/src/orchestrator/handlers/architect.ts +2 -1
  41. package/src/orchestrator/handlers/build.ts +55 -97
  42. package/src/orchestrator/handlers/retrospective.ts +2 -1
  43. package/src/orchestrator/handlers/types.ts +0 -1
  44. package/src/orchestrator/lesson-memory.ts +29 -9
  45. package/src/orchestrator/orchestration-logger.ts +37 -23
  46. package/src/orchestrator/phase.ts +8 -4
  47. package/src/orchestrator/state.ts +79 -17
  48. package/src/projects/database.ts +47 -0
  49. package/src/projects/repository.ts +264 -0
  50. package/src/projects/resolve.ts +301 -0
  51. package/src/projects/schemas.ts +30 -0
  52. package/src/projects/types.ts +12 -0
  53. package/src/review/memory.ts +29 -9
  54. package/src/tools/doctor.ts +40 -5
  55. package/src/tools/forensics.ts +7 -12
  56. package/src/tools/logs.ts +6 -5
  57. package/src/tools/memory-preferences.ts +157 -0
  58. package/src/tools/memory-status.ts +17 -96
  59. package/src/tools/orchestrate.ts +97 -81
  60. package/src/tools/pipeline-report.ts +3 -2
  61. package/src/tools/quick.ts +2 -2
  62. package/src/tools/review.ts +39 -6
  63. package/src/tools/session-stats.ts +3 -2
  64. package/src/utils/paths.ts +20 -1
@@ -0,0 +1,45 @@
1
+ import { Database } from "bun:sqlite";
2
+ import { existsSync, mkdirSync } from "node:fs";
3
+ import { dirname, join } from "node:path";
4
+ import { getAutopilotDbPath, isProjectArtifactDir } from "../utils/paths";
5
+ import { runKernelMigrations } from "./migrations";
6
+
7
+ export const KERNEL_DB_FILE = "kernel.db";
8
+
9
+ export function getKernelDbPath(artifactDirOrProjectRoot?: string): string {
10
+ if (
11
+ typeof artifactDirOrProjectRoot === "string" &&
12
+ artifactDirOrProjectRoot.length > 0 &&
13
+ !isProjectArtifactDir(artifactDirOrProjectRoot)
14
+ ) {
15
+ return join(artifactDirOrProjectRoot, KERNEL_DB_FILE);
16
+ }
17
+
18
+ return getAutopilotDbPath();
19
+ }
20
+
21
+ export function kernelDbExists(artifactDirOrProjectRoot?: string): boolean {
22
+ return existsSync(getKernelDbPath(artifactDirOrProjectRoot));
23
+ }
24
+
25
+ export function openKernelDb(
26
+ artifactDirOrProjectRoot?: string,
27
+ options?: { readonly?: boolean },
28
+ ): Database {
29
+ const dbPath = getKernelDbPath(artifactDirOrProjectRoot);
30
+ if (!options?.readonly) {
31
+ mkdirSync(dirname(dbPath), { recursive: true });
32
+ }
33
+
34
+ const database = new Database(dbPath, options?.readonly ? { readonly: true } : undefined);
35
+
36
+ database.run("PRAGMA foreign_keys=ON");
37
+ database.run("PRAGMA busy_timeout=5000");
38
+
39
+ if (!options?.readonly) {
40
+ database.run("PRAGMA journal_mode=WAL");
41
+ runKernelMigrations(database);
42
+ }
43
+
44
+ return database;
45
+ }
@@ -0,0 +1,62 @@
1
+ import type { Database } from "bun:sqlite";
2
+ import { runProjectRegistryMigrations } from "../projects/database";
3
+ import { KERNEL_SCHEMA_STATEMENTS, KERNEL_SCHEMA_VERSION } from "./schema";
4
+
5
+ function columnExists(database: Database, tableName: string, columnName: string): boolean {
6
+ const columns = database.query(`PRAGMA table_info(${tableName})`).all() as Array<{
7
+ name?: string;
8
+ }>;
9
+ return columns.some((column) => column.name === columnName);
10
+ }
11
+
12
+ function backfillProjectAwareColumns(database: Database): void {
13
+ if (!columnExists(database, "pipeline_runs", "project_id")) {
14
+ database.run("ALTER TABLE pipeline_runs ADD COLUMN project_id TEXT");
15
+ database.run("UPDATE pipeline_runs SET project_id = 'legacy-project' WHERE project_id IS NULL");
16
+ }
17
+
18
+ if (!columnExists(database, "active_review_state", "project_id")) {
19
+ database.run("ALTER TABLE active_review_state ADD COLUMN project_id TEXT");
20
+ database.run(
21
+ "UPDATE active_review_state SET project_id = 'legacy-project' WHERE project_id IS NULL",
22
+ );
23
+ }
24
+
25
+ if (!columnExists(database, "project_review_memory", "project_id")) {
26
+ database.run("ALTER TABLE project_review_memory ADD COLUMN project_id TEXT");
27
+ database.run(
28
+ "UPDATE project_review_memory SET project_id = 'legacy-project' WHERE project_id IS NULL",
29
+ );
30
+ }
31
+
32
+ if (!columnExists(database, "project_lesson_memory", "project_id")) {
33
+ database.run("ALTER TABLE project_lesson_memory ADD COLUMN project_id TEXT");
34
+ database.run(
35
+ "UPDATE project_lesson_memory SET project_id = 'legacy-project' WHERE project_id IS NULL",
36
+ );
37
+ }
38
+
39
+ if (!columnExists(database, "forensic_events", "project_id")) {
40
+ database.run("ALTER TABLE forensic_events ADD COLUMN project_id TEXT");
41
+ database.run(
42
+ "UPDATE forensic_events SET project_id = 'legacy-project' WHERE project_id IS NULL",
43
+ );
44
+ }
45
+ }
46
+
47
+ export function runKernelMigrations(database: Database): void {
48
+ const row = database.query("PRAGMA user_version").get() as { user_version?: number } | null;
49
+ const currentVersion = row?.user_version ?? 0;
50
+
51
+ runProjectRegistryMigrations(database);
52
+
53
+ for (const statement of KERNEL_SCHEMA_STATEMENTS) {
54
+ database.run(statement);
55
+ }
56
+
57
+ backfillProjectAwareColumns(database);
58
+
59
+ if (currentVersion < KERNEL_SCHEMA_VERSION) {
60
+ database.run(`PRAGMA user_version = ${KERNEL_SCHEMA_VERSION}`);
61
+ }
62
+ }
@@ -0,0 +1,571 @@
1
+ import type { Database } from "bun:sqlite";
2
+ import { forensicEventSchema } from "../observability/forensic-schemas";
3
+ import type { ForensicEvent } from "../observability/forensic-types";
4
+ import { lessonMemorySchema } from "../orchestrator/lesson-schemas";
5
+ import type { LessonMemory } from "../orchestrator/lesson-types";
6
+ import { pipelineStateSchema } from "../orchestrator/schemas";
7
+ import type { PipelineState } from "../orchestrator/types";
8
+ import { resolveProjectIdentitySync } from "../projects/resolve";
9
+ import { reviewMemorySchema, reviewStateSchema } from "../review/schemas";
10
+ import type { ReviewMemory, ReviewState } from "../review/types";
11
+ import { getProjectRootFromArtifactDir } from "../utils/paths";
12
+ import { kernelDbExists, openKernelDb } from "./database";
13
+ import type {
14
+ ActiveReviewStateRow,
15
+ ForensicEventRow,
16
+ PipelineRunRow,
17
+ ProjectLessonMemoryRow,
18
+ ProjectReviewMemoryRow,
19
+ } from "./types";
20
+ import { KERNEL_STATE_CONFLICT_CODE } from "./types";
21
+
22
+ function getProjectRoot(path: string): string {
23
+ return getProjectRootFromArtifactDir(path);
24
+ }
25
+
26
+ function resolveProjectId(path: string, db: Database, options?: { readonly?: boolean }): string {
27
+ const projectRoot = getProjectRoot(path);
28
+ return resolveProjectIdentitySync(projectRoot, {
29
+ db,
30
+ allowCreate: options?.readonly !== true,
31
+ }).id;
32
+ }
33
+
34
+ function parsePipelineStateRow(row: PipelineRunRow | null): PipelineState | null {
35
+ if (row === null) {
36
+ return null;
37
+ }
38
+ return pipelineStateSchema.parse(JSON.parse(row.state_json));
39
+ }
40
+
41
+ function parseReviewStateRow(row: ActiveReviewStateRow | null): ReviewState | null {
42
+ if (row === null) {
43
+ return null;
44
+ }
45
+ return reviewStateSchema.parse(JSON.parse(row.state_json));
46
+ }
47
+
48
+ function parseReviewMemoryRow(row: ProjectReviewMemoryRow | null): ReviewMemory | null {
49
+ if (row === null) {
50
+ return null;
51
+ }
52
+ return reviewMemorySchema.parse(JSON.parse(row.state_json));
53
+ }
54
+
55
+ function parseLessonMemoryRow(row: ProjectLessonMemoryRow | null): LessonMemory | null {
56
+ if (row === null) {
57
+ return null;
58
+ }
59
+ return lessonMemorySchema.parse(JSON.parse(row.state_json));
60
+ }
61
+
62
+ interface ProjectLessonRow {
63
+ readonly project_id: string;
64
+ readonly content: string;
65
+ readonly domain: LessonMemory["lessons"][number]["domain"];
66
+ readonly extracted_at: string;
67
+ readonly source_phase: LessonMemory["lessons"][number]["sourcePhase"];
68
+ readonly last_updated_at: string | null;
69
+ }
70
+
71
+ function parseLessonRows(rows: readonly ProjectLessonRow[]): LessonMemory {
72
+ if (rows.length === 0) {
73
+ return lessonMemorySchema.parse({
74
+ schemaVersion: 1,
75
+ lessons: [],
76
+ lastUpdatedAt: null,
77
+ });
78
+ }
79
+
80
+ return lessonMemorySchema.parse({
81
+ schemaVersion: 1,
82
+ lessons: rows.map((row) => ({
83
+ content: row.content,
84
+ domain: row.domain,
85
+ extractedAt: row.extracted_at,
86
+ sourcePhase: row.source_phase,
87
+ })),
88
+ lastUpdatedAt: rows[0]?.last_updated_at ?? null,
89
+ });
90
+ }
91
+
92
+ function parseForensicEventRow(row: ForensicEventRow): ForensicEvent {
93
+ return forensicEventSchema.parse({
94
+ schemaVersion: row.schema_version,
95
+ timestamp: row.timestamp,
96
+ projectRoot: row.project_root,
97
+ domain: row.domain,
98
+ runId: row.run_id,
99
+ sessionId: row.session_id,
100
+ parentSessionId: row.parent_session_id,
101
+ phase: row.phase,
102
+ dispatchId: row.dispatch_id,
103
+ taskId: row.task_id,
104
+ agent: row.agent,
105
+ type: row.type,
106
+ code: row.code,
107
+ message: row.message,
108
+ payload: JSON.parse(row.payload_json) as ForensicEvent["payload"],
109
+ });
110
+ }
111
+
112
+ function getLatestPipelineRow(db: Database, projectId: string): PipelineRunRow | null {
113
+ return db
114
+ .query(
115
+ `SELECT *
116
+ FROM pipeline_runs
117
+ WHERE project_id = ?
118
+ ORDER BY last_updated_at DESC, run_id DESC
119
+ LIMIT 1`,
120
+ )
121
+ .get(projectId) as PipelineRunRow | null;
122
+ }
123
+
124
+ function withWriteTransaction<T>(db: Database, callback: () => T): T {
125
+ db.run("BEGIN IMMEDIATE");
126
+ try {
127
+ const result = callback();
128
+ db.run("COMMIT");
129
+ return result;
130
+ } catch (error: unknown) {
131
+ try {
132
+ db.run("ROLLBACK");
133
+ } catch {
134
+ // Ignore rollback errors so the original failure wins.
135
+ }
136
+ throw error;
137
+ }
138
+ }
139
+
140
+ function tableExists(db: Database, tableName: string): boolean {
141
+ const row = db
142
+ .query("SELECT name FROM sqlite_master WHERE type = 'table' AND name = ?")
143
+ .get(tableName) as { name?: string } | null;
144
+ return row?.name === tableName;
145
+ }
146
+
147
+ export function loadLatestPipelineStateFromKernel(artifactDir: string): PipelineState | null {
148
+ if (!kernelDbExists(artifactDir)) {
149
+ return null;
150
+ }
151
+
152
+ const db = openKernelDb(artifactDir, { readonly: true });
153
+ try {
154
+ const projectId = resolveProjectId(artifactDir, db, { readonly: true });
155
+ return parsePipelineStateRow(getLatestPipelineRow(db, projectId));
156
+ } finally {
157
+ db.close();
158
+ }
159
+ }
160
+
161
+ export function savePipelineStateToKernel(
162
+ artifactDir: string,
163
+ state: PipelineState,
164
+ expectedRevision?: number,
165
+ ): void {
166
+ const validated = pipelineStateSchema.parse(state);
167
+ const db = openKernelDb(artifactDir);
168
+ try {
169
+ const projectId = resolveProjectId(artifactDir, db);
170
+ withWriteTransaction(db, () => {
171
+ const current = getLatestPipelineRow(db, projectId);
172
+ const currentRevision = current?.state_revision ?? -1;
173
+ if (typeof expectedRevision === "number" && currentRevision !== expectedRevision) {
174
+ throw new Error(
175
+ `${KERNEL_STATE_CONFLICT_CODE}: expected stateRevision ${expectedRevision}, found ${currentRevision}`,
176
+ );
177
+ }
178
+
179
+ db.run(
180
+ `INSERT INTO pipeline_runs (
181
+ project_id,
182
+ run_id,
183
+ schema_version,
184
+ status,
185
+ current_phase,
186
+ idea,
187
+ state_revision,
188
+ started_at,
189
+ last_updated_at,
190
+ failure_phase,
191
+ failure_agent,
192
+ failure_message,
193
+ last_successful_phase,
194
+ state_json
195
+ ) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
196
+ ON CONFLICT(run_id) DO UPDATE SET
197
+ project_id = excluded.project_id,
198
+ schema_version = excluded.schema_version,
199
+ status = excluded.status,
200
+ current_phase = excluded.current_phase,
201
+ idea = excluded.idea,
202
+ state_revision = excluded.state_revision,
203
+ started_at = excluded.started_at,
204
+ last_updated_at = excluded.last_updated_at,
205
+ failure_phase = excluded.failure_phase,
206
+ failure_agent = excluded.failure_agent,
207
+ failure_message = excluded.failure_message,
208
+ last_successful_phase = excluded.last_successful_phase,
209
+ state_json = excluded.state_json`,
210
+ [
211
+ projectId,
212
+ validated.runId,
213
+ validated.schemaVersion,
214
+ validated.status,
215
+ validated.currentPhase,
216
+ validated.idea,
217
+ validated.stateRevision,
218
+ validated.startedAt,
219
+ validated.lastUpdatedAt,
220
+ validated.failureContext?.failedPhase ?? null,
221
+ validated.failureContext?.failedAgent ?? null,
222
+ validated.failureContext?.errorMessage ?? null,
223
+ validated.failureContext?.lastSuccessfulPhase ?? null,
224
+ JSON.stringify(validated),
225
+ ],
226
+ );
227
+
228
+ db.run("DELETE FROM run_phases WHERE run_id = ?", [validated.runId]);
229
+ for (const phase of validated.phases) {
230
+ db.run(
231
+ `INSERT INTO run_phases (run_id, phase_name, status, completed_at, confidence)
232
+ VALUES (?, ?, ?, ?, ?)`,
233
+ [validated.runId, phase.name, phase.status, phase.completedAt, phase.confidence],
234
+ );
235
+ }
236
+
237
+ db.run("DELETE FROM run_tasks WHERE run_id = ?", [validated.runId]);
238
+ for (const task of validated.tasks) {
239
+ db.run(
240
+ `INSERT INTO run_tasks (
241
+ run_id,
242
+ task_id,
243
+ title,
244
+ status,
245
+ wave,
246
+ depends_on_json,
247
+ attempt,
248
+ strike
249
+ ) VALUES (?, ?, ?, ?, ?, ?, ?, ?)`,
250
+ [
251
+ validated.runId,
252
+ task.id,
253
+ task.title,
254
+ task.status,
255
+ task.wave,
256
+ JSON.stringify(task.depends_on),
257
+ task.attempt,
258
+ task.strike,
259
+ ],
260
+ );
261
+ }
262
+
263
+ db.run("DELETE FROM run_pending_dispatches WHERE run_id = ?", [validated.runId]);
264
+ for (const pending of validated.pendingDispatches) {
265
+ db.run(
266
+ `INSERT INTO run_pending_dispatches (
267
+ run_id,
268
+ dispatch_id,
269
+ phase,
270
+ agent,
271
+ issued_at,
272
+ result_kind,
273
+ task_id
274
+ ) VALUES (?, ?, ?, ?, ?, ?, ?)`,
275
+ [
276
+ validated.runId,
277
+ pending.dispatchId,
278
+ pending.phase,
279
+ pending.agent,
280
+ pending.issuedAt,
281
+ pending.resultKind,
282
+ pending.taskId,
283
+ ],
284
+ );
285
+ }
286
+
287
+ db.run("DELETE FROM run_processed_results WHERE run_id = ?", [validated.runId]);
288
+ for (const resultId of validated.processedResultIds) {
289
+ db.run(`INSERT INTO run_processed_results (run_id, result_id) VALUES (?, ?)`, [
290
+ validated.runId,
291
+ resultId,
292
+ ]);
293
+ }
294
+ });
295
+ } finally {
296
+ db.close();
297
+ }
298
+ }
299
+
300
+ export function loadActiveReviewStateFromKernel(artifactDir: string): ReviewState | null {
301
+ if (!kernelDbExists(artifactDir)) {
302
+ return null;
303
+ }
304
+
305
+ const db = openKernelDb(artifactDir, { readonly: true });
306
+ try {
307
+ const projectId = resolveProjectId(artifactDir, db, { readonly: true });
308
+ const row = db
309
+ .query("SELECT * FROM active_review_state WHERE project_id = ?")
310
+ .get(projectId) as ActiveReviewStateRow | null;
311
+ return parseReviewStateRow(row);
312
+ } finally {
313
+ db.close();
314
+ }
315
+ }
316
+
317
+ export function saveActiveReviewStateToKernel(artifactDir: string, state: ReviewState): void {
318
+ const validated = reviewStateSchema.parse(state);
319
+ const db = openKernelDb(artifactDir);
320
+ try {
321
+ const projectId = resolveProjectId(artifactDir, db);
322
+ db.run(
323
+ `INSERT INTO active_review_state (project_id, stage, scope, started_at, saved_at, state_json)
324
+ VALUES (?, ?, ?, ?, ?, ?)
325
+ ON CONFLICT(project_id) DO UPDATE SET
326
+ stage = excluded.stage,
327
+ scope = excluded.scope,
328
+ started_at = excluded.started_at,
329
+ saved_at = excluded.saved_at,
330
+ state_json = excluded.state_json`,
331
+ [
332
+ projectId,
333
+ validated.stage,
334
+ validated.scope,
335
+ validated.startedAt,
336
+ new Date().toISOString(),
337
+ JSON.stringify(validated),
338
+ ],
339
+ );
340
+ } finally {
341
+ db.close();
342
+ }
343
+ }
344
+
345
+ export function clearActiveReviewStateInKernel(artifactDir: string): void {
346
+ if (!kernelDbExists(artifactDir)) {
347
+ return;
348
+ }
349
+
350
+ const db = openKernelDb(artifactDir);
351
+ try {
352
+ const projectId = resolveProjectId(artifactDir, db);
353
+ db.run("DELETE FROM active_review_state WHERE project_id = ?", [projectId]);
354
+ } finally {
355
+ db.close();
356
+ }
357
+ }
358
+
359
+ export function loadReviewMemoryFromKernel(artifactDir: string): ReviewMemory | null {
360
+ if (!kernelDbExists(artifactDir)) {
361
+ return null;
362
+ }
363
+
364
+ const db = openKernelDb(artifactDir, { readonly: true });
365
+ try {
366
+ const projectId = resolveProjectId(artifactDir, db, { readonly: true });
367
+ const row = db
368
+ .query("SELECT * FROM project_review_memory WHERE project_id = ?")
369
+ .get(projectId) as ProjectReviewMemoryRow | null;
370
+ return parseReviewMemoryRow(row);
371
+ } finally {
372
+ db.close();
373
+ }
374
+ }
375
+
376
+ export function saveReviewMemoryToKernel(artifactDir: string, memory: ReviewMemory): void {
377
+ const validated = reviewMemorySchema.parse(memory);
378
+ const db = openKernelDb(artifactDir);
379
+ try {
380
+ const projectId = resolveProjectId(artifactDir, db);
381
+ db.run(
382
+ `INSERT INTO project_review_memory (project_id, schema_version, last_reviewed_at, state_json)
383
+ VALUES (?, ?, ?, ?)
384
+ ON CONFLICT(project_id) DO UPDATE SET
385
+ schema_version = excluded.schema_version,
386
+ last_reviewed_at = excluded.last_reviewed_at,
387
+ state_json = excluded.state_json`,
388
+ [projectId, validated.schemaVersion, validated.lastReviewedAt, JSON.stringify(validated)],
389
+ );
390
+ } finally {
391
+ db.close();
392
+ }
393
+ }
394
+
395
+ export function loadLessonMemoryFromKernel(artifactDir: string): LessonMemory | null {
396
+ if (!kernelDbExists(artifactDir)) {
397
+ return null;
398
+ }
399
+
400
+ const db = openKernelDb(artifactDir, { readonly: true });
401
+ try {
402
+ const projectId = resolveProjectId(artifactDir, db, { readonly: true });
403
+ if (tableExists(db, "project_lessons")) {
404
+ const lessonRows = db
405
+ .query(
406
+ `SELECT project_id, content, domain, extracted_at, source_phase, last_updated_at
407
+ FROM project_lessons
408
+ WHERE project_id = ?
409
+ ORDER BY extracted_at DESC, lesson_id DESC`,
410
+ )
411
+ .all(projectId) as ProjectLessonRow[];
412
+ if (lessonRows.length > 0) {
413
+ return parseLessonRows(lessonRows);
414
+ }
415
+ }
416
+
417
+ if (!tableExists(db, "project_lesson_memory")) {
418
+ return null;
419
+ }
420
+
421
+ const row = db
422
+ .query("SELECT * FROM project_lesson_memory WHERE project_id = ?")
423
+ .get(projectId) as ProjectLessonMemoryRow | null;
424
+ return parseLessonMemoryRow(row);
425
+ } finally {
426
+ db.close();
427
+ }
428
+ }
429
+
430
+ export function saveLessonMemoryToKernel(artifactDir: string, memory: LessonMemory): void {
431
+ const validated = lessonMemorySchema.parse(memory);
432
+ const db = openKernelDb(artifactDir);
433
+ try {
434
+ const projectId = resolveProjectId(artifactDir, db);
435
+ withWriteTransaction(db, () => {
436
+ db.run(
437
+ `INSERT INTO project_lesson_memory (project_id, schema_version, last_updated_at, state_json)
438
+ VALUES (?, ?, ?, ?)
439
+ ON CONFLICT(project_id) DO UPDATE SET
440
+ schema_version = excluded.schema_version,
441
+ last_updated_at = excluded.last_updated_at,
442
+ state_json = excluded.state_json`,
443
+ [projectId, validated.schemaVersion, validated.lastUpdatedAt, JSON.stringify(validated)],
444
+ );
445
+
446
+ db.run("DELETE FROM project_lessons WHERE project_id = ?", [projectId]);
447
+ for (const lesson of validated.lessons) {
448
+ db.run(
449
+ `INSERT INTO project_lessons (
450
+ project_id,
451
+ content,
452
+ domain,
453
+ extracted_at,
454
+ source_phase,
455
+ last_updated_at
456
+ ) VALUES (?, ?, ?, ?, ?, ?)`,
457
+ [
458
+ projectId,
459
+ lesson.content,
460
+ lesson.domain,
461
+ lesson.extractedAt,
462
+ lesson.sourcePhase,
463
+ validated.lastUpdatedAt,
464
+ ],
465
+ );
466
+ }
467
+ });
468
+ } finally {
469
+ db.close();
470
+ }
471
+ }
472
+
473
+ export function countForensicEventsInKernel(artifactDir: string): number {
474
+ if (!kernelDbExists(artifactDir)) {
475
+ return 0;
476
+ }
477
+
478
+ const db = openKernelDb(artifactDir, { readonly: true });
479
+ try {
480
+ const projectId = resolveProjectId(artifactDir, db, { readonly: true });
481
+ const row = db
482
+ .query("SELECT COUNT(*) as count FROM forensic_events WHERE project_id = ?")
483
+ .get(projectId) as {
484
+ count?: number;
485
+ } | null;
486
+ return row?.count ?? 0;
487
+ } finally {
488
+ db.close();
489
+ }
490
+ }
491
+
492
+ export function appendForensicEventsToKernel(
493
+ artifactDir: string,
494
+ events: readonly ForensicEvent[],
495
+ ): void {
496
+ if (events.length === 0) {
497
+ return;
498
+ }
499
+
500
+ const validated = events.map((event) => forensicEventSchema.parse(event));
501
+ const db = openKernelDb(artifactDir);
502
+ try {
503
+ const projectId = resolveProjectId(artifactDir, db);
504
+ withWriteTransaction(db, () => {
505
+ for (const event of validated) {
506
+ db.run(
507
+ `INSERT INTO forensic_events (
508
+ project_id,
509
+ schema_version,
510
+ timestamp,
511
+ project_root,
512
+ domain,
513
+ run_id,
514
+ session_id,
515
+ parent_session_id,
516
+ phase,
517
+ dispatch_id,
518
+ task_id,
519
+ agent,
520
+ type,
521
+ code,
522
+ message,
523
+ payload_json
524
+ ) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)`,
525
+ [
526
+ projectId,
527
+ event.schemaVersion,
528
+ event.timestamp,
529
+ event.projectRoot,
530
+ event.domain,
531
+ event.runId,
532
+ event.sessionId,
533
+ event.parentSessionId,
534
+ event.phase,
535
+ event.dispatchId,
536
+ event.taskId,
537
+ event.agent,
538
+ event.type,
539
+ event.code,
540
+ event.message,
541
+ JSON.stringify(event.payload),
542
+ ],
543
+ );
544
+ }
545
+ });
546
+ } finally {
547
+ db.close();
548
+ }
549
+ }
550
+
551
+ export function loadForensicEventsFromKernel(artifactDir: string): readonly ForensicEvent[] {
552
+ if (!kernelDbExists(artifactDir)) {
553
+ return Object.freeze([]);
554
+ }
555
+
556
+ const db = openKernelDb(artifactDir, { readonly: true });
557
+ try {
558
+ const projectId = resolveProjectId(artifactDir, db, { readonly: true });
559
+ const rows = db
560
+ .query(
561
+ `SELECT *
562
+ FROM forensic_events
563
+ WHERE project_id = ?
564
+ ORDER BY timestamp ASC, event_id ASC`,
565
+ )
566
+ .all(projectId) as ForensicEventRow[];
567
+ return Object.freeze(rows.map(parseForensicEventRow));
568
+ } finally {
569
+ db.close();
570
+ }
571
+ }