panopticon-cli 0.6.0 → 0.6.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (57) hide show
  1. package/dist/cli/index.js +36 -27
  2. package/dist/cli/index.js.map +1 -1
  3. package/dist/dashboard/{event-store-BtuZCLHu.js → event-store-D7kLBd07.js} +1 -1
  4. package/dist/dashboard/{event-store-OS5jH3Eu.js → event-store-O9q0Gweh.js} +2 -2
  5. package/dist/dashboard/{event-store-OS5jH3Eu.js.map → event-store-O9q0Gweh.js.map} +1 -1
  6. package/dist/dashboard/{inspect-agent-CwT4mrvV.js → inspect-agent-B57kGDUV.js} +3 -3
  7. package/dist/dashboard/{inspect-agent-CwT4mrvV.js.map → inspect-agent-B57kGDUV.js.map} +1 -1
  8. package/dist/dashboard/{issue-service-singleton-z78bbRiO.js → issue-service-singleton-DQK42EqH.js} +1 -1
  9. package/dist/dashboard/{issue-service-singleton-0n9hcF71.js → issue-service-singleton-sb2HkB9f.js} +2 -2
  10. package/dist/dashboard/{issue-service-singleton-0n9hcF71.js.map → issue-service-singleton-sb2HkB9f.js.map} +1 -1
  11. package/dist/dashboard/{lifecycle-B6d3AE3n.js → lifecycle-ZTYdrr2O.js} +1 -1
  12. package/dist/dashboard/{merge-agent-DaIEvGJG.js → merge-agent-GLtMEsTu.js} +1 -1
  13. package/dist/dashboard/{merge-agent-CmqR1MFf.js → merge-agent-twroFuAh.js} +2 -2
  14. package/dist/dashboard/{merge-agent-CmqR1MFf.js.map → merge-agent-twroFuAh.js.map} +1 -1
  15. package/dist/dashboard/{projection-cache-Bkzs_90o.js → projection-cache-DQ9zegkK.js} +10 -10
  16. package/dist/dashboard/projection-cache-DQ9zegkK.js.map +1 -0
  17. package/dist/dashboard/public/assets/{dist-D-q87oB4.js → dist-C2sRcZJv.js} +1 -1
  18. package/dist/dashboard/public/assets/{index--G6_upSx.js → index-BCLmEMRf.js} +41 -41
  19. package/dist/dashboard/public/assets/index-BEdq7CFf.css +1 -0
  20. package/dist/dashboard/public/index.html +2 -2
  21. package/dist/dashboard/{review-status-DqJZDthU.js → review-status-CK3eBGyb.js} +1 -1
  22. package/dist/dashboard/{review-status-LQATWF6L.js → review-status-CV55Tl-n.js} +2 -2
  23. package/dist/dashboard/{review-status-LQATWF6L.js.map → review-status-CV55Tl-n.js.map} +1 -1
  24. package/dist/dashboard/server.js +85 -85
  25. package/dist/dashboard/server.js.map +1 -1
  26. package/dist/dashboard/{specialist-context-IX8ZZBxy.js → specialist-context-ColzlmGE.js} +2 -2
  27. package/dist/dashboard/{specialist-context-IX8ZZBxy.js.map → specialist-context-ColzlmGE.js.map} +1 -1
  28. package/dist/dashboard/{specialist-logs-BvOQ3XPt.js → specialist-logs-BhmDpFIq.js} +1 -1
  29. package/dist/dashboard/{specialists-C7Fyhq_j.js → specialists-C6s3U6tX.js} +21 -7
  30. package/dist/dashboard/specialists-C6s3U6tX.js.map +1 -0
  31. package/dist/dashboard/{specialists-B4aDa5xP.js → specialists-Cny632-T.js} +1 -1
  32. package/dist/dashboard/{test-agent-queue-C0WrVdrJ.js → test-agent-queue-tqI4VDsu.js} +3 -3
  33. package/dist/dashboard/{test-agent-queue-C0WrVdrJ.js.map → test-agent-queue-tqI4VDsu.js.map} +1 -1
  34. package/dist/dashboard/workflows-B2ARUpOa.js +2 -0
  35. package/dist/dashboard/{workflows-Cj6tzch6.js → workflows-N1UTipYl.js} +3 -3
  36. package/dist/dashboard/{workflows-Cj6tzch6.js.map → workflows-N1UTipYl.js.map} +1 -1
  37. package/dist/{merge-agent-BCPyotWG.js → merge-agent-VQH9z9t8.js} +2 -2
  38. package/dist/{merge-agent-BCPyotWG.js.map → merge-agent-VQH9z9t8.js.map} +1 -1
  39. package/dist/{review-status-p_HOugvo.js → review-status-2TdtHNcs.js} +1 -1
  40. package/dist/{review-status-BbY22dtx.js → review-status-Bm1bWNEa.js} +2 -2
  41. package/dist/{review-status-BbY22dtx.js.map → review-status-Bm1bWNEa.js.map} +1 -1
  42. package/dist/{specialist-context-CRBBW-z5.js → specialist-context-BdNFsfMG.js} +2 -2
  43. package/dist/{specialist-context-CRBBW-z5.js.map → specialist-context-BdNFsfMG.js.map} +1 -1
  44. package/dist/{specialist-logs-m0UvPm3F.js → specialist-logs-CLztE_bE.js} +1 -1
  45. package/dist/{specialists-ldNesMhg.js → specialists-DEKqgkxp.js} +21 -7
  46. package/dist/specialists-DEKqgkxp.js.map +1 -0
  47. package/dist/{specialists-DXDDLqoY.js → specialists-aUoUVWsN.js} +1 -1
  48. package/package.json +1 -1
  49. package/scripts/record-cost-event.js +15 -0
  50. package/scripts/record-cost-event.js.map +1 -1
  51. package/scripts/record-cost-event.ts +2 -0
  52. package/scripts/work-agent-stop-hook +26 -0
  53. package/dist/dashboard/projection-cache-Bkzs_90o.js.map +0 -1
  54. package/dist/dashboard/public/assets/index-CjpnhB4Q.css +0 -1
  55. package/dist/dashboard/specialists-C7Fyhq_j.js.map +0 -1
  56. package/dist/dashboard/workflows-BsUDQntr.js +0 -2
  57. package/dist/specialists-ldNesMhg.js.map +0 -1
@@ -540,7 +540,7 @@ function setReviewStatus(issueId, update, filePath = DEFAULT_STATUS_FILE) {
540
540
  timestamp: now
541
541
  });
542
542
  while (history.length > 10) history.shift();
543
- const readyForMerge = update.readyForMerge !== void 0 ? update.readyForMerge : merged.reviewStatus === "passed" && merged.testStatus === "passed" && merged.mergeStatus !== "merged" && (merged.uatStatus === void 0 || merged.uatStatus === "passed");
543
+ const readyForMerge = update.readyForMerge !== void 0 ? update.readyForMerge : merged.reviewStatus === "passed" && merged.testStatus === "passed" && merged.verificationStatus !== "failed" && merged.mergeStatus !== "merged" && (merged.uatStatus === void 0 || merged.uatStatus === "passed");
544
544
  const updated = {
545
545
  ...merged,
546
546
  issueId,
@@ -611,4 +611,4 @@ var init_review_status = __esmMin((() => {
611
611
  //#endregion
612
612
  export { saveReviewStatuses as a, getDatabase as c, loadReviewStatuses as i, init_database as l, getReviewStatus as n, setReviewStatus as o, init_review_status as r, closeDatabase as s, clearReviewStatus as t };
613
613
 
614
- //# sourceMappingURL=review-status-BbY22dtx.js.map
614
+ //# sourceMappingURL=review-status-Bm1bWNEa.js.map
@@ -1 +1 @@
1
- {"version":3,"file":"review-status-BbY22dtx.js","names":[],"sources":["../src/lib/database/schema.ts","../src/lib/database/index.ts","../src/lib/database/review-status-db.ts","../src/lib/review-status.ts"],"sourcesContent":["/**\n * Panopticon Database Schema\n *\n * Defines the unified schema for panopticon.db.\n * All persistent application state lives here.\n */\n\nimport type Database from 'better-sqlite3';\n\n// Schema version — increment when making breaking schema changes\nexport const SCHEMA_VERSION = 13;\n\n/**\n * Initialize the complete database schema.\n * Idempotent — uses CREATE TABLE IF NOT EXISTS throughout.\n */\nexport function initSchema(db: Database.Database): void {\n db.exec(`\n -- ===== Cost Events =====\n CREATE TABLE IF NOT EXISTS cost_events (\n id INTEGER PRIMARY KEY AUTOINCREMENT,\n ts TEXT NOT NULL,\n agent_id TEXT NOT NULL,\n issue_id TEXT NOT NULL,\n session_type TEXT NOT NULL DEFAULT 'unknown',\n provider TEXT NOT NULL DEFAULT 'anthropic',\n model TEXT NOT NULL,\n input INTEGER NOT NULL DEFAULT 0,\n output INTEGER NOT NULL DEFAULT 0,\n cache_read INTEGER NOT NULL DEFAULT 0,\n cache_write INTEGER NOT NULL DEFAULT 0,\n cost REAL NOT NULL DEFAULT 0,\n request_id TEXT,\n session_id TEXT, -- Claude Code session UUID (for reconciler offset tracking)\n -- TLDR metrics\n tldr_interceptions INTEGER,\n tldr_bypasses INTEGER,\n tldr_tokens_saved INTEGER,\n tldr_bypass_reasons TEXT, -- JSON string\n -- WAL source tracking\n source_file TEXT -- path of WAL file this came from (for imports)\n );\n\n CREATE UNIQUE INDEX IF NOT EXISTS idx_cost_request_id\n ON cost_events(request_id) WHERE request_id IS NOT NULL;\n\n CREATE INDEX IF NOT EXISTS idx_cost_issue_id\n ON cost_events(issue_id, ts);\n\n CREATE INDEX IF NOT EXISTS idx_cost_agent_id\n ON cost_events(agent_id, ts);\n\n CREATE INDEX IF NOT EXISTS idx_cost_ts\n ON cost_events(ts);\n\n CREATE INDEX IF NOT EXISTS idx_cost_session_id\n ON cost_events(session_id) WHERE session_id IS NOT NULL;\n\n -- ===== Review Status =====\n CREATE TABLE IF NOT EXISTS review_status (\n issue_id TEXT PRIMARY KEY,\n review_status TEXT NOT NULL DEFAULT 'pending',\n test_status TEXT NOT NULL DEFAULT 'pending',\n merge_status TEXT,\n verification_status TEXT,\n verification_notes TEXT,\n verification_cycle_count INTEGER DEFAULT 0,\n verification_max_cycles INTEGER,\n review_notes TEXT,\n test_notes TEXT,\n merge_notes TEXT,\n updated_at TEXT NOT NULL,\n ready_for_merge INTEGER NOT NULL DEFAULT 0,\n auto_requeue_count INTEGER DEFAULT 0,\n pr_url TEXT\n );\n\n CREATE INDEX IF NOT EXISTS idx_review_status_updated\n ON review_status(updated_at);\n\n -- ===== Status History =====\n CREATE TABLE IF NOT EXISTS status_history (\n id INTEGER PRIMARY KEY AUTOINCREMENT,\n issue_id TEXT NOT NULL,\n type TEXT NOT NULL, -- 'review', 'test', 'merge'\n status TEXT NOT NULL,\n timestamp TEXT NOT NULL,\n notes TEXT,\n FOREIGN KEY (issue_id) REFERENCES review_status(issue_id) ON DELETE CASCADE\n );\n\n CREATE INDEX IF NOT EXISTS idx_status_history_issue\n ON status_history(issue_id, timestamp);\n\n -- UNIQUE constraint enables INSERT OR IGNORE deduplication in upsertReviewStatus\n CREATE UNIQUE INDEX IF NOT EXISTS idx_status_history_unique\n ON status_history(issue_id, type, status, timestamp);\n\n -- ===== Health Events =====\n CREATE TABLE IF NOT EXISTS health_events (\n id INTEGER PRIMARY KEY AUTOINCREMENT,\n agent_id TEXT NOT NULL,\n timestamp TEXT NOT NULL,\n state TEXT NOT NULL,\n previous_state TEXT,\n source TEXT,\n metadata TEXT -- JSON string\n );\n\n CREATE INDEX IF NOT EXISTS idx_health_agent_timestamp\n ON health_events(agent_id, timestamp);\n\n CREATE INDEX IF NOT EXISTS idx_health_timestamp\n ON health_events(timestamp);\n\n -- ===== Processed Sessions (for reconciler offset tracking) =====\n CREATE TABLE IF NOT EXISTS processed_sessions (\n session_id TEXT PRIMARY KEY,\n agent_id TEXT,\n issue_id TEXT,\n transcript_path TEXT, -- full path to the .jsonl file\n byte_offset INTEGER NOT NULL DEFAULT 0, -- bytes consumed so far\n processed_at TEXT NOT NULL,\n event_count INTEGER NOT NULL DEFAULT 0\n );\n\n -- ===== API Cache =====\n CREATE TABLE IF NOT EXISTS api_cache (\n key TEXT PRIMARY KEY,\n value TEXT NOT NULL, -- JSON string\n expires_at TEXT,\n created_at TEXT NOT NULL\n );\n\n -- ===== Rate Limits =====\n CREATE TABLE IF NOT EXISTS rate_limits (\n service TEXT PRIMARY KEY,\n requests INTEGER NOT NULL DEFAULT 0,\n window_start TEXT NOT NULL,\n limit_per_window INTEGER NOT NULL DEFAULT 1000\n );\n\n -- ===== Domain Events (PAN-428: push-first architecture) =====\n CREATE TABLE IF NOT EXISTS events (\n sequence INTEGER PRIMARY KEY AUTOINCREMENT,\n type TEXT NOT NULL,\n timestamp TEXT NOT NULL,\n payload TEXT NOT NULL -- JSON\n );\n\n CREATE INDEX IF NOT EXISTS idx_events_type\n ON events(type);\n\n CREATE INDEX IF NOT EXISTS idx_events_timestamp\n ON events(timestamp);\n\n -- ===== Projection Cache (PAN-437: instant dashboard startup) =====\n CREATE TABLE IF NOT EXISTS projection_cache (\n key TEXT PRIMARY KEY,\n data TEXT NOT NULL, -- JSON-serialized DashboardSnapshot\n sequence INTEGER NOT NULL, -- Last event sequence applied\n updated_at TEXT NOT NULL -- ISO timestamp\n );\n\n -- ===== Conversations (PAN-416: Mission Control conversation launcher) =====\n CREATE TABLE IF NOT EXISTS conversations (\n id INTEGER PRIMARY KEY AUTOINCREMENT,\n name TEXT NOT NULL UNIQUE,\n tmux_session TEXT NOT NULL,\n status TEXT NOT NULL DEFAULT 'active', -- 'active', 'ended'\n cwd TEXT NOT NULL,\n issue_id TEXT, -- optional cost attribution\n created_at TEXT NOT NULL,\n ended_at TEXT,\n last_attached_at TEXT,\n session_file TEXT, -- path to Claude Code JSONL session file (PAN-451)\n title TEXT, -- human-readable title, auto-set from first message\n title_source TEXT, -- 'auto', 'ai', or 'manual'\n title_seed TEXT, -- original auto-generated title for replacement check\n total_cost REAL DEFAULT 0, -- cached total cost in USD\n archived_at TEXT, -- ISO timestamp when archived, null = active\n model TEXT, -- model used to spawn conversation (e.g. 'minimax-m2.7-highspeed')\n effort TEXT -- effort level (e.g. 'low', 'medium', 'high')\n );\n\n CREATE INDEX IF NOT EXISTS idx_conversations_status\n ON conversations(status);\n\n CREATE INDEX IF NOT EXISTS idx_conversations_created_at\n ON conversations(created_at);\n `);\n\n // Record schema version\n db.pragma(`user_version = ${SCHEMA_VERSION}`);\n}\n\n/**\n * Run schema migrations if the database version is older than SCHEMA_VERSION.\n * This function handles upgrading from older schema versions.\n */\nexport function runMigrations(db: Database.Database): void {\n const currentVersion = db.pragma('user_version', { simple: true }) as number;\n\n if (currentVersion === SCHEMA_VERSION) {\n return; // Already at latest version\n }\n\n if (currentVersion === 0) {\n // Fresh database — just initialize the full schema\n initSchema(db);\n return;\n }\n\n // v1 → v2: add UNIQUE index on status_history for INSERT OR IGNORE dedup\n if (currentVersion < 2) {\n // Remove duplicate rows before adding the unique index (keep lowest id per unique key)\n db.exec(`\n DELETE FROM status_history\n WHERE id NOT IN (\n SELECT MIN(id)\n FROM status_history\n GROUP BY issue_id, type, status, timestamp\n );\n CREATE UNIQUE INDEX IF NOT EXISTS idx_status_history_unique\n ON status_history(issue_id, type, status, timestamp);\n `);\n }\n\n // v2 → v3: add session_id to cost_events, extend processed_sessions for reconciler\n if (currentVersion < 3) {\n // Add session_id column to cost_events (nullable, no data loss)\n try {\n db.exec(`ALTER TABLE cost_events ADD COLUMN session_id TEXT`);\n } catch {\n // Column may already exist if schema was manually applied\n }\n\n // Add index on session_id\n db.exec(`\n CREATE INDEX IF NOT EXISTS idx_cost_session_id\n ON cost_events(session_id) WHERE session_id IS NOT NULL;\n `);\n\n // Extend processed_sessions with new columns for reconciler\n try {\n db.exec(`ALTER TABLE processed_sessions ADD COLUMN agent_id TEXT`);\n } catch { /* already exists */ }\n try {\n db.exec(`ALTER TABLE processed_sessions ADD COLUMN issue_id TEXT`);\n } catch { /* already exists */ }\n try {\n db.exec(`ALTER TABLE processed_sessions ADD COLUMN transcript_path TEXT`);\n } catch { /* already exists */ }\n try {\n db.exec(`ALTER TABLE processed_sessions ADD COLUMN byte_offset INTEGER NOT NULL DEFAULT 0`);\n } catch { /* already exists */ }\n }\n\n // v3 → v4: add events table for push-first architecture (PAN-428)\n if (currentVersion < 4) {\n db.exec(`\n CREATE TABLE IF NOT EXISTS events (\n sequence INTEGER PRIMARY KEY AUTOINCREMENT,\n type TEXT NOT NULL,\n timestamp TEXT NOT NULL,\n payload TEXT NOT NULL -- JSON\n );\n\n CREATE INDEX IF NOT EXISTS idx_events_type\n ON events(type);\n\n CREATE INDEX IF NOT EXISTS idx_events_timestamp\n ON events(timestamp);\n `);\n }\n\n // v4 → v5: add projection_cache table (PAN-437: instant dashboard startup)\n if (currentVersion < 5) {\n db.exec(`\n CREATE TABLE IF NOT EXISTS projection_cache (\n key TEXT PRIMARY KEY,\n data TEXT NOT NULL,\n sequence INTEGER NOT NULL,\n updated_at TEXT NOT NULL\n );\n `);\n }\n\n // v5 → v6: add conversations table (PAN-416)\n if (currentVersion < 6) {\n db.exec(`\n CREATE TABLE IF NOT EXISTS conversations (\n id INTEGER PRIMARY KEY AUTOINCREMENT,\n name TEXT NOT NULL UNIQUE,\n tmux_session TEXT NOT NULL,\n status TEXT NOT NULL DEFAULT 'active',\n cwd TEXT NOT NULL,\n issue_id TEXT,\n created_at TEXT NOT NULL,\n ended_at TEXT,\n last_attached_at TEXT\n );\n\n CREATE INDEX IF NOT EXISTS idx_conversations_status\n ON conversations(status);\n\n CREATE INDEX IF NOT EXISTS idx_conversations_created_at\n ON conversations(created_at);\n `);\n }\n\n // v6 → v7: add session_file column to conversations (PAN-451)\n if (currentVersion < 7) {\n try {\n db.exec(`ALTER TABLE conversations ADD COLUMN session_file TEXT`);\n } catch { /* already exists */ }\n }\n\n // v7 → v8: add title column to conversations (auto-set from first message)\n if (currentVersion < 8) {\n try {\n db.exec(`ALTER TABLE conversations ADD COLUMN title TEXT`);\n } catch { /* already exists */ }\n }\n\n // v8 → v9: add title_source and title_seed columns to conversations\n // title_source tracks how the title was set: 'auto' (truncated first message),\n // 'ai' (Claude-generated), or 'manual' (user renamed). Used for T3Code-style\n // canReplaceThreadTitle logic — only auto-generated titles get AI replacement.\n // title_seed stores the original truncated message for replacement eligibility.\n if (currentVersion < 9) {\n try {\n db.exec(`ALTER TABLE conversations ADD COLUMN title_source TEXT`);\n } catch { /* already exists */ }\n try {\n db.exec(`ALTER TABLE conversations ADD COLUMN title_seed TEXT`);\n } catch { /* already exists */ }\n }\n\n // v9 → v10: add total_cost column to conversations (cached cost in USD)\n if (currentVersion < 10) {\n try {\n db.exec(`ALTER TABLE conversations ADD COLUMN total_cost REAL DEFAULT 0`);\n } catch { /* already exists */ }\n }\n\n // v10 → v11: expression index for UPPER(issue_id) on cost_events\n // The N+1 queries in getCostsByIssueFromDb use UPPER(issue_id) which defeats\n // the existing idx_cost_issue_id index. This expression index fixes that.\n if (currentVersion < 11) {\n try {\n db.exec(`CREATE INDEX IF NOT EXISTS idx_cost_issue_upper ON cost_events(UPPER(issue_id))`);\n } catch { /* already exists */ }\n }\n\n // v11 → v12: archived_at column + index for conversations (T3Code pattern)\n if (currentVersion < 12) {\n try {\n db.exec(`ALTER TABLE conversations ADD COLUMN archived_at TEXT`);\n } catch { /* already exists */ }\n try {\n db.exec(`CREATE INDEX IF NOT EXISTS idx_conversations_archived ON conversations(archived_at)`);\n } catch { /* already exists */ }\n }\n\n // v12 → v13: add model + effort columns to conversations (preserve model on resume)\n if (currentVersion < 13) {\n try {\n db.exec(`ALTER TABLE conversations ADD COLUMN model TEXT`);\n } catch { /* already exists */ }\n try {\n db.exec(`ALTER TABLE conversations ADD COLUMN effort TEXT`);\n } catch { /* already exists */ }\n }\n\n // After all migrations, set the version\n db.pragma(`user_version = ${SCHEMA_VERSION}`);\n}\n","/**\n * Panopticon Unified Database\n *\n * Single panopticon.db at ~/.panopticon/panopticon.db.\n * Singleton pattern — one connection shared across the process.\n *\n * IMPORTANT: This module is safe to import in both server and CLI contexts.\n * Never use execSync here — this is synchronous SQLite, not a subprocess.\n *\n * Dual-runtime (PAN-428):\n * - Bun: uses bun:sqlite (better-sqlite3 is a native addon — ERR_DLOPEN_FAILED in Bun)\n * - Node: uses better-sqlite3\n * In both cases the external API is identical: pragma(), exec(), prepare(), close().\n */\n\nimport type Database from 'better-sqlite3';\nimport { createRequire } from 'module';\nimport { join } from 'path';\nimport { existsSync, mkdirSync } from 'fs';\nimport { getPanopticonHome } from '../paths.js';\nimport { runMigrations } from './schema.js';\n\ndeclare const Bun: unknown;\n\nfunction isBunRuntime(): boolean {\n return typeof Bun !== 'undefined';\n}\n\n// createRequire allows synchronous require() in ESM — works in both Bun and Node\nconst _require = createRequire(import.meta.url);\n\nlet _db: Database.Database | null = null;\n\n/**\n * Get the path to panopticon.db (dynamic, respects PANOPTICON_HOME override for tests)\n */\nexport function getDatabasePath(): string {\n return join(getPanopticonHome(), 'panopticon.db');\n}\n\n/**\n * Initialize and return the singleton database connection.\n * Safe to call multiple times — returns the existing connection after first call.\n */\nexport function getDatabase(): Database.Database {\n if (_db) {\n return _db;\n }\n\n const home = getPanopticonHome();\n if (!existsSync(home)) {\n mkdirSync(home, { recursive: true });\n }\n\n const dbPath = getDatabasePath();\n\n if (isBunRuntime()) {\n // better-sqlite3 is a native Node.js addon that fails in Bun with ERR_DLOPEN_FAILED.\n // Use bun:sqlite instead, with a pragma() shim for API compatibility.\n const { Database: BunDatabase } = _require('bun:sqlite') as { Database: new (path: string) => any };\n const bunDb = new BunDatabase(dbPath);\n\n // bun:sqlite has no pragma() method — shim it using exec() and query().get()\n bunDb.pragma = function (sql: string, options?: { simple?: boolean }): any {\n if (options?.simple) {\n // Read-only: return the scalar value directly (e.g. db.pragma('user_version', { simple: true }))\n const key = sql.trim();\n const row = bunDb.query(`PRAGMA ${key}`).get() as Record<string, unknown> | null;\n return row?.[key] ?? null;\n }\n // Set or no-return pragma (e.g. 'journal_mode = WAL', 'foreign_keys = ON')\n bunDb.exec(`PRAGMA ${sql}`);\n return undefined;\n };\n\n _db = bunDb as Database.Database;\n } else {\n // Node.js path: load better-sqlite3 lazily (avoids import-time native addon load)\n const BetterSqlite3 = _require('better-sqlite3');\n _db = new BetterSqlite3(dbPath) as Database.Database;\n }\n\n // Enable WAL mode for concurrent readers + single writer\n _db.pragma('journal_mode = WAL');\n // Enforce foreign keys\n _db.pragma('foreign_keys = ON');\n // Write-ahead log synchronization — NORMAL is safe and fast\n _db.pragma('synchronous = NORMAL');\n\n // Initialize or migrate schema\n runMigrations(_db);\n\n return _db;\n}\n\n/**\n * Close the database connection and release the singleton.\n * Primarily used in tests to get a fresh connection.\n */\nexport function closeDatabase(): void {\n if (_db) {\n _db.close();\n _db = null;\n }\n}\n\n/**\n * Force re-initialization of the database connection.\n * Used in tests after PANOPTICON_HOME changes.\n */\nexport function resetDatabase(): void {\n closeDatabase();\n}\n","/**\n * Review Status SQLite Storage\n *\n * Provides SQLite-backed CRUD for ReviewStatus, matching the interface in\n * src/lib/review-status.ts. Atomic single-transaction writes eliminate the\n * TOCTOU race in the JSON-backed implementation.\n */\n\nimport { getDatabase } from './index.js';\nimport type { ReviewStatus, StatusHistoryEntry } from '../review-status.js';\n\n// ============== Write operations ==============\n\n/**\n * Upsert a review status record atomically.\n * Replaces the JSON read-modify-write cycle with a single transaction.\n */\nexport function upsertReviewStatus(status: ReviewStatus): void {\n const db = getDatabase();\n\n const upsert = db.transaction((s: ReviewStatus) => {\n // Upsert main record\n db.prepare(`\n INSERT INTO review_status (\n issue_id, review_status, test_status, merge_status,\n verification_status, verification_notes,\n verification_cycle_count, verification_max_cycles,\n review_notes, test_notes, merge_notes,\n updated_at, ready_for_merge, auto_requeue_count, pr_url\n ) VALUES (\n ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?\n )\n ON CONFLICT(issue_id) DO UPDATE SET\n review_status = excluded.review_status,\n test_status = excluded.test_status,\n merge_status = excluded.merge_status,\n verification_status = excluded.verification_status,\n verification_notes = excluded.verification_notes,\n verification_cycle_count = excluded.verification_cycle_count,\n verification_max_cycles = excluded.verification_max_cycles,\n review_notes = excluded.review_notes,\n test_notes = excluded.test_notes,\n merge_notes = excluded.merge_notes,\n updated_at = excluded.updated_at,\n ready_for_merge = excluded.ready_for_merge,\n auto_requeue_count = excluded.auto_requeue_count,\n pr_url = excluded.pr_url\n `).run(\n s.issueId,\n s.reviewStatus,\n s.testStatus,\n s.mergeStatus ?? null,\n s.verificationStatus ?? null,\n s.verificationNotes ?? null,\n s.verificationCycleCount ?? null,\n s.verificationMaxCycles ?? null,\n s.reviewNotes ?? null,\n s.testNotes ?? null,\n s.mergeNotes ?? null,\n s.updatedAt,\n s.readyForMerge ? 1 : 0,\n s.autoRequeueCount ?? null,\n s.prUrl ?? null,\n );\n\n // Append new history entries (deduplicate by timestamp to avoid re-inserting)\n if (s.history && s.history.length > 0) {\n const insertHistory = db.prepare(`\n INSERT OR IGNORE INTO status_history (issue_id, type, status, timestamp, notes)\n VALUES (?, ?, ?, ?, ?)\n `);\n for (const entry of s.history) {\n insertHistory.run(s.issueId, entry.type, entry.status, entry.timestamp, entry.notes ?? null);\n }\n }\n });\n\n upsert(status);\n}\n\n/**\n * Delete a review status record and its history.\n */\nexport function deleteReviewStatus(issueId: string): void {\n const db = getDatabase();\n db.prepare('DELETE FROM review_status WHERE issue_id = ?').run(issueId);\n}\n\n// ============== Read operations ==============\n\n/**\n * Get a single review status by issue ID.\n */\nexport function getReviewStatusFromDb(issueId: string): ReviewStatus | null {\n const db = getDatabase();\n\n const row = db.prepare(`\n SELECT * FROM review_status WHERE issue_id = ?\n `).get(issueId) as DbReviewStatusRow | undefined;\n\n if (!row) return null;\n\n const history = getHistoryFromDb(issueId);\n return rowToReviewStatus(row, history);\n}\n\n/**\n * Get all review statuses.\n */\nexport function getAllReviewStatusesFromDb(): Record<string, ReviewStatus> {\n const db = getDatabase();\n\n const rows = db.prepare('SELECT * FROM review_status ORDER BY updated_at DESC').all() as DbReviewStatusRow[];\n const result: Record<string, ReviewStatus> = {};\n\n for (const row of rows) {\n const history = getHistoryFromDb(row.issue_id);\n result[row.issue_id] = rowToReviewStatus(row, history);\n }\n\n return result;\n}\n\n/**\n * Get history entries for an issue.\n */\nfunction getHistoryFromDb(issueId: string): StatusHistoryEntry[] {\n const db = getDatabase();\n const rows = db.prepare(`\n SELECT type, status, timestamp, notes\n FROM status_history\n WHERE issue_id = ?\n ORDER BY timestamp ASC\n `).all(issueId) as Array<{ type: string; status: string; timestamp: string; notes: string | null }>;\n\n return rows.map(r => ({\n type: r.type as 'review' | 'test' | 'merge',\n status: r.status,\n timestamp: r.timestamp,\n ...(r.notes ? { notes: r.notes } : {}),\n }));\n}\n\n// ============== Row mapping ==============\n\ninterface DbReviewStatusRow {\n issue_id: string;\n review_status: string;\n test_status: string;\n merge_status: string | null;\n verification_status: string | null;\n verification_notes: string | null;\n verification_cycle_count: number | null;\n verification_max_cycles: number | null;\n review_notes: string | null;\n test_notes: string | null;\n merge_notes: string | null;\n updated_at: string;\n ready_for_merge: number;\n auto_requeue_count: number | null;\n pr_url: string | null;\n}\n\nfunction rowToReviewStatus(row: DbReviewStatusRow, history: StatusHistoryEntry[]): ReviewStatus {\n return {\n issueId: row.issue_id,\n reviewStatus: row.review_status as ReviewStatus['reviewStatus'],\n testStatus: row.test_status as ReviewStatus['testStatus'],\n mergeStatus: row.merge_status as ReviewStatus['mergeStatus'] ?? undefined,\n verificationStatus: row.verification_status as ReviewStatus['verificationStatus'] ?? undefined,\n verificationNotes: row.verification_notes ?? undefined,\n verificationCycleCount: row.verification_cycle_count ?? undefined,\n verificationMaxCycles: row.verification_max_cycles ?? undefined,\n reviewNotes: row.review_notes ?? undefined,\n testNotes: row.test_notes ?? undefined,\n mergeNotes: row.merge_notes ?? undefined,\n updatedAt: row.updated_at,\n readyForMerge: row.ready_for_merge === 1,\n autoRequeueCount: row.auto_requeue_count ?? undefined,\n prUrl: row.pr_url ?? undefined,\n history: history.length > 0 ? history : undefined,\n };\n}\n","import { existsSync, readFileSync, writeFileSync, mkdirSync } from 'fs';\nimport { join, dirname } from 'path';\nimport { homedir } from 'os';\nimport { notifyPipeline } from './pipeline-notifier.js';\nimport {\n upsertReviewStatus as dbUpsert,\n deleteReviewStatus as dbDelete,\n getReviewStatusFromDb,\n getAllReviewStatusesFromDb,\n} from './database/review-status-db.js';\n\nexport interface StatusHistoryEntry {\n type: 'review' | 'test' | 'merge' | 'inspect' | 'uat';\n status: string;\n timestamp: string;\n notes?: string;\n}\n\nexport interface ReviewStatus {\n issueId: string;\n reviewStatus: 'pending' | 'reviewing' | 'passed' | 'failed' | 'blocked';\n testStatus: 'pending' | 'testing' | 'passed' | 'failed' | 'skipped' | 'dispatch_failed';\n mergeStatus?: 'pending' | 'merging' | 'merged' | 'failed';\n inspectStatus?: 'pending' | 'inspecting' | 'passed' | 'failed';\n inspectNotes?: string;\n uatStatus?: 'pending' | 'testing' | 'passed' | 'failed';\n uatNotes?: string;\n verificationStatus?: 'pending' | 'running' | 'passed' | 'failed' | 'skipped';\n verificationNotes?: string;\n verificationCycleCount?: number;\n verificationMaxCycles?: number;\n reviewNotes?: string;\n testNotes?: string;\n mergeNotes?: string;\n updatedAt: string;\n readyForMerge: boolean;\n autoRequeueCount?: number;\n prUrl?: string;\n history?: StatusHistoryEntry[];\n /** HEAD commit SHA at the time review passed — used to detect new commits after review */\n reviewedAtCommit?: string;\n}\n\nconst DEFAULT_STATUS_FILE = join(homedir(), '.panopticon', 'review-status.json');\n\nexport function loadReviewStatuses(filePath = DEFAULT_STATUS_FILE): Record<string, ReviewStatus> {\n // Prefer SQLite when using the default path\n if (filePath === DEFAULT_STATUS_FILE) {\n try {\n return getAllReviewStatusesFromDb();\n } catch {\n // Fall through to JSON on DB error\n }\n }\n\n try {\n if (existsSync(filePath)) {\n return JSON.parse(readFileSync(filePath, 'utf-8'));\n }\n } catch (err) {\n console.error('Failed to load review statuses:', err);\n }\n return {};\n}\n\nexport function saveReviewStatuses(statuses: Record<string, ReviewStatus>, filePath = DEFAULT_STATUS_FILE): void {\n try {\n const dir = dirname(filePath);\n if (!existsSync(dir)) {\n mkdirSync(dir, { recursive: true });\n }\n writeFileSync(filePath, JSON.stringify(statuses, null, 2));\n } catch (err) {\n console.error('Failed to save review statuses:', err);\n }\n}\n\nexport function setReviewStatus(\n issueId: string,\n update: Partial<ReviewStatus>,\n filePath = DEFAULT_STATUS_FILE,\n): ReviewStatus {\n const statuses = loadReviewStatuses(filePath);\n const existing = statuses[issueId] || {\n issueId,\n reviewStatus: 'pending' as const,\n testStatus: 'pending' as const,\n updatedAt: new Date().toISOString(),\n readyForMerge: false,\n };\n\n // Guard: reject reviewStatus regression from 'passed' to 'reviewing' unless the caller\n // is explicitly resetting the merge lifecycle (update includes mergeStatus).\n // This is belt-and-suspenders — endpoint-level guards should catch this first.\n if (update.reviewStatus === 'reviewing' && existing.reviewStatus === 'passed' && update.mergeStatus === undefined) {\n console.warn(`[review-status] Rejecting reviewStatus regression from 'passed' to 'reviewing' for ${issueId} (mergeStatus not being reset)`);\n return existing as ReviewStatus;\n }\n\n const merged = { ...existing, ...update };\n\n // Track status transitions in history (last 10 entries)\n const history = [...(existing.history || [])];\n const now = new Date().toISOString();\n if (update.reviewStatus && update.reviewStatus !== existing.reviewStatus) {\n history.push({ type: 'review', status: update.reviewStatus, timestamp: now, notes: update.reviewNotes });\n }\n if (update.testStatus && update.testStatus !== existing.testStatus) {\n history.push({ type: 'test', status: update.testStatus, timestamp: now, notes: update.testNotes });\n }\n if (update.uatStatus && update.uatStatus !== existing.uatStatus) {\n history.push({ type: 'uat', status: update.uatStatus, timestamp: now, notes: update.uatNotes });\n }\n if (update.mergeStatus && update.mergeStatus !== existing.mergeStatus) {\n history.push({ type: 'merge', status: update.mergeStatus, timestamp: now });\n }\n while (history.length > 10) history.shift();\n\n // readyForMerge is true when all required gates pass.\n // If uatStatus exists (UAT specialist has been involved), it must also be 'passed'.\n const readyForMerge = update.readyForMerge !== undefined\n ? update.readyForMerge\n : (\n merged.reviewStatus === 'passed' &&\n merged.testStatus === 'passed' &&\n merged.mergeStatus !== 'merged' &&\n // If UAT has been initiated, it must pass too\n (merged.uatStatus === undefined || merged.uatStatus === 'passed')\n );\n\n const updated: ReviewStatus = {\n ...merged,\n issueId,\n updatedAt: now,\n readyForMerge,\n history,\n };\n\n // Report commit statuses to GitHub when readyForMerge transitions to true (PAN-536)\n if (readyForMerge && !existing.readyForMerge && updated.prUrl) {\n (async () => {\n try {\n const { isGitHubAppConfigured, reportCommitStatus } = await import('./github-app.js');\n if (!isGitHubAppConfigured()) return;\n const prMatch = updated.prUrl!.match(/github\\.com\\/([^/]+)\\/([^/]+)\\/pull/);\n if (!prMatch) return;\n const [, owner, repo] = prMatch;\n // Get HEAD SHA of the PR branch\n const { exec } = await import('child_process');\n const { promisify } = await import('util');\n const execAsync = promisify(exec);\n const { stdout } = await execAsync(\n `gh pr view ${updated.prUrl!.match(/\\/pull\\/(\\d+)/)?.[1]} --json headRefOid --jq .headRefOid`,\n { encoding: 'utf-8', timeout: 10000 }\n );\n const sha = stdout.trim();\n if (sha) {\n await reportCommitStatus(owner, repo, sha, 'success', 'panopticon/review', 'Review passed');\n await reportCommitStatus(owner, repo, sha, 'success', 'panopticon/test', 'Tests passed');\n console.log(`[review-status] Reported commit statuses for ${issueId} (${sha.slice(0, 8)})`);\n }\n } catch (err: any) {\n console.warn(`[review-status] Failed to report commit status: ${err.message}`);\n }\n })();\n }\n\n // SQLite first — it is the authoritative store (reads prefer SQLite)\n if (filePath === DEFAULT_STATUS_FILE) {\n try {\n dbUpsert(updated);\n } catch (err) {\n console.error('[review-status] SQLite write failed (continuing with JSON):', err);\n }\n }\n\n // JSON second — legacy fallback for tools that read review-status.json directly\n statuses[issueId] = updated;\n saveReviewStatuses(statuses, filePath);\n\n notifyPipeline({ type: 'status_changed', issueId, status: updated });\n\n return updated;\n}\n\nexport function getReviewStatus(issueId: string, filePath = DEFAULT_STATUS_FILE): ReviewStatus | null {\n // Prefer SQLite when using the default path\n if (filePath === DEFAULT_STATUS_FILE) {\n try {\n const fromDb = getReviewStatusFromDb(issueId);\n if (fromDb) return fromDb;\n } catch {\n // Fall through to JSON on DB error\n }\n }\n const statuses = loadReviewStatuses(filePath);\n return statuses[issueId] || null;\n}\n\n/**\n * On server startup, clear any mergeStatus stuck at 'merging'.\n * Pending merge operations are in-memory only — they don't survive a restart.\n * Any 'merging' status after boot is definitionally stuck (PAN-490).\n */\nexport function clearStuckMergeStatuses(): void {\n const statuses = loadReviewStatuses();\n const stuck = Object.values(statuses).filter(s => s.mergeStatus === 'merging');\n if (stuck.length === 0) return;\n console.log(`[review-status] Clearing ${stuck.length} stuck 'merging' status(es) on startup`);\n for (const s of stuck) {\n setReviewStatus(s.issueId, { mergeStatus: 'pending' });\n }\n}\n\nexport function clearReviewStatus(issueId: string, filePath = DEFAULT_STATUS_FILE): void {\n const statuses = loadReviewStatuses(filePath);\n delete statuses[issueId];\n saveReviewStatuses(statuses, filePath);\n\n // Dual-delete from SQLite when using the default path\n if (filePath === DEFAULT_STATUS_FILE) {\n try {\n dbDelete(issueId);\n } catch (err) {\n console.error('[review-status] SQLite delete failed (continuing with JSON):', err);\n }\n }\n}\n"],"mappings":";;;;;;;;;;;;AAgBA,SAAgB,WAAW,IAA6B;AACtD,IAAG,KAAK;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;IA6KN;AAGF,IAAG,OAAO,oBAAmC;;;;;;AAO/C,SAAgB,cAAc,IAA6B;CACzD,MAAM,iBAAiB,GAAG,OAAO,gBAAgB,EAAE,QAAQ,MAAM,CAAC;AAElE,KAAI,mBAAA,GACF;AAGF,KAAI,mBAAmB,GAAG;AAExB,aAAW,GAAG;AACd;;AAIF,KAAI,iBAAiB,EAEnB,IAAG,KAAK;;;;;;;;;MASN;AAIJ,KAAI,iBAAiB,GAAG;AAEtB,MAAI;AACF,MAAG,KAAK,qDAAqD;UACvD;AAKR,KAAG,KAAK;;;MAGN;AAGF,MAAI;AACF,MAAG,KAAK,0DAA0D;UAC5D;AACR,MAAI;AACF,MAAG,KAAK,0DAA0D;UAC5D;AACR,MAAI;AACF,MAAG,KAAK,iEAAiE;UACnE;AACR,MAAI;AACF,MAAG,KAAK,mFAAmF;UACrF;;AAIV,KAAI,iBAAiB,EACnB,IAAG,KAAK;;;;;;;;;;;;;MAaN;AAIJ,KAAI,iBAAiB,EACnB,IAAG,KAAK;;;;;;;MAON;AAIJ,KAAI,iBAAiB,EACnB,IAAG,KAAK;;;;;;;;;;;;;;;;;;MAkBN;AAIJ,KAAI,iBAAiB,EACnB,KAAI;AACF,KAAG,KAAK,yDAAyD;SAC3D;AAIV,KAAI,iBAAiB,EACnB,KAAI;AACF,KAAG,KAAK,kDAAkD;SACpD;AAQV,KAAI,iBAAiB,GAAG;AACtB,MAAI;AACF,MAAG,KAAK,yDAAyD;UAC3D;AACR,MAAI;AACF,MAAG,KAAK,uDAAuD;UACzD;;AAIV,KAAI,iBAAiB,GACnB,KAAI;AACF,KAAG,KAAK,iEAAiE;SACnE;AAMV,KAAI,iBAAiB,GACnB,KAAI;AACF,KAAG,KAAK,kFAAkF;SACpF;AAIV,KAAI,iBAAiB,IAAI;AACvB,MAAI;AACF,MAAG,KAAK,wDAAwD;UAC1D;AACR,MAAI;AACF,MAAG,KAAK,sFAAsF;UACxF;;AAIV,KAAI,iBAAiB,IAAI;AACvB,MAAI;AACF,MAAG,KAAK,kDAAkD;UACpD;AACR,MAAI;AACF,MAAG,KAAK,mDAAmD;UACrD;;AAIV,IAAG,OAAO,oBAAmC;;;;;AChW/C,SAAS,eAAwB;AAC/B,QAAO,OAAO,QAAQ;;;;;AAWxB,SAAgB,kBAA0B;AACxC,QAAO,KAAK,mBAAmB,EAAE,gBAAgB;;;;;;AAOnD,SAAgB,cAAiC;AAC/C,KAAI,IACF,QAAO;CAGT,MAAM,OAAO,mBAAmB;AAChC,KAAI,CAAC,WAAW,KAAK,CACnB,WAAU,MAAM,EAAE,WAAW,MAAM,CAAC;CAGtC,MAAM,SAAS,iBAAiB;AAEhC,KAAI,cAAc,EAAE;EAGlB,MAAM,EAAE,UAAU,gBAAgB,SAAS,aAAa;EACxD,MAAM,QAAQ,IAAI,YAAY,OAAO;AAGrC,QAAM,SAAS,SAAU,KAAa,SAAqC;AACzE,OAAI,SAAS,QAAQ;IAEnB,MAAM,MAAM,IAAI,MAAM;AAEtB,WADY,MAAM,MAAM,UAAU,MAAM,CAAC,KAAK,GACjC,QAAQ;;AAGvB,SAAM,KAAK,UAAU,MAAM;;AAI7B,QAAM;OAIN,OAAM,KADgB,SAAS,iBAAiB,EACxB,OAAO;AAIjC,KAAI,OAAO,qBAAqB;AAEhC,KAAI,OAAO,oBAAoB;AAE/B,KAAI,OAAO,uBAAuB;AAGlC,eAAc,IAAI;AAElB,QAAO;;;;;;AAOT,SAAgB,gBAAsB;AACpC,KAAI,KAAK;AACP,MAAI,OAAO;AACX,QAAM;;;;;aAnFsC;cACJ;AAStC,YAAW,cAAc,OAAO,KAAK,IAAI;AAE3C,OAAgC;;;;;;;;ACdpC,SAAgB,mBAAmB,QAA4B;CAC7D,MAAM,KAAK,aAAa;AAET,IAAG,aAAa,MAAoB;AAEjD,KAAG,QAAQ;;;;;;;;;;;;;;;;;;;;;;;;;MAyBT,CAAC,IACD,EAAE,SACF,EAAE,cACF,EAAE,YACF,EAAE,eAAe,MACjB,EAAE,sBAAsB,MACxB,EAAE,qBAAqB,MACvB,EAAE,0BAA0B,MAC5B,EAAE,yBAAyB,MAC3B,EAAE,eAAe,MACjB,EAAE,aAAa,MACf,EAAE,cAAc,MAChB,EAAE,WACF,EAAE,gBAAgB,IAAI,GACtB,EAAE,oBAAoB,MACtB,EAAE,SAAS,KACZ;AAGD,MAAI,EAAE,WAAW,EAAE,QAAQ,SAAS,GAAG;GACrC,MAAM,gBAAgB,GAAG,QAAQ;;;QAG/B;AACF,QAAK,MAAM,SAAS,EAAE,QACpB,eAAc,IAAI,EAAE,SAAS,MAAM,MAAM,MAAM,QAAQ,MAAM,WAAW,MAAM,SAAS,KAAK;;GAGhG,CAEK,OAAO;;;;;AAMhB,SAAgB,mBAAmB,SAAuB;AAC7C,cAAa,CACrB,QAAQ,+CAA+C,CAAC,IAAI,QAAQ;;;;;AAQzE,SAAgB,sBAAsB,SAAsC;CAG1E,MAAM,MAFK,aAAa,CAET,QAAQ;;IAErB,CAAC,IAAI,QAAQ;AAEf,KAAI,CAAC,IAAK,QAAO;AAGjB,QAAO,kBAAkB,KADT,iBAAiB,QAAQ,CACH;;;;;AAMxC,SAAgB,6BAA2D;CAGzE,MAAM,OAFK,aAAa,CAER,QAAQ,uDAAuD,CAAC,KAAK;CACrF,MAAM,SAAuC,EAAE;AAE/C,MAAK,MAAM,OAAO,MAAM;EACtB,MAAM,UAAU,iBAAiB,IAAI,SAAS;AAC9C,SAAO,IAAI,YAAY,kBAAkB,KAAK,QAAQ;;AAGxD,QAAO;;;;;AAMT,SAAS,iBAAiB,SAAuC;AAS/D,QARW,aAAa,CACR,QAAQ;;;;;IAKtB,CAAC,IAAI,QAAQ,CAEH,KAAI,OAAM;EACpB,MAAM,EAAE;EACR,QAAQ,EAAE;EACV,WAAW,EAAE;EACb,GAAI,EAAE,QAAQ,EAAE,OAAO,EAAE,OAAO,GAAG,EAAE;EACtC,EAAE;;AAuBL,SAAS,kBAAkB,KAAwB,SAA6C;AAC9F,QAAO;EACL,SAAS,IAAI;EACb,cAAc,IAAI;EAClB,YAAY,IAAI;EAChB,aAAa,IAAI,gBAA+C,KAAA;EAChE,oBAAoB,IAAI,uBAA6D,KAAA;EACrF,mBAAmB,IAAI,sBAAsB,KAAA;EAC7C,wBAAwB,IAAI,4BAA4B,KAAA;EACxD,uBAAuB,IAAI,2BAA2B,KAAA;EACtD,aAAa,IAAI,gBAAgB,KAAA;EACjC,WAAW,IAAI,cAAc,KAAA;EAC7B,YAAY,IAAI,eAAe,KAAA;EAC/B,WAAW,IAAI;EACf,eAAe,IAAI,oBAAoB;EACvC,kBAAkB,IAAI,sBAAsB,KAAA;EAC5C,OAAO,IAAI,UAAU,KAAA;EACrB,SAAS,QAAQ,SAAS,IAAI,UAAU,KAAA;EACzC;;;gBA7KsC;;;;ACqCzC,SAAgB,mBAAmB,WAAW,qBAAmD;AAE/F,KAAI,aAAa,oBACf,KAAI;AACF,SAAO,4BAA4B;SAC7B;AAKV,KAAI;AACF,MAAI,WAAW,SAAS,CACtB,QAAO,KAAK,MAAM,aAAa,UAAU,QAAQ,CAAC;UAE7C,KAAK;AACZ,UAAQ,MAAM,mCAAmC,IAAI;;AAEvD,QAAO,EAAE;;AAGX,SAAgB,mBAAmB,UAAwC,WAAW,qBAA2B;AAC/G,KAAI;EACF,MAAM,MAAM,QAAQ,SAAS;AAC7B,MAAI,CAAC,WAAW,IAAI,CAClB,WAAU,KAAK,EAAE,WAAW,MAAM,CAAC;AAErC,gBAAc,UAAU,KAAK,UAAU,UAAU,MAAM,EAAE,CAAC;UACnD,KAAK;AACZ,UAAQ,MAAM,mCAAmC,IAAI;;;AAIzD,SAAgB,gBACd,SACA,QACA,WAAW,qBACG;CACd,MAAM,WAAW,mBAAmB,SAAS;CAC7C,MAAM,WAAW,SAAS,YAAY;EACpC;EACA,cAAc;EACd,YAAY;EACZ,4BAAW,IAAI,MAAM,EAAC,aAAa;EACnC,eAAe;EAChB;AAKD,KAAI,OAAO,iBAAiB,eAAe,SAAS,iBAAiB,YAAY,OAAO,gBAAgB,KAAA,GAAW;AACjH,UAAQ,KAAK,sFAAsF,QAAQ,gCAAgC;AAC3I,SAAO;;CAGT,MAAM,SAAS;EAAE,GAAG;EAAU,GAAG;EAAQ;CAGzC,MAAM,UAAU,CAAC,GAAI,SAAS,WAAW,EAAE,CAAE;CAC7C,MAAM,uBAAM,IAAI,MAAM,EAAC,aAAa;AACpC,KAAI,OAAO,gBAAgB,OAAO,iBAAiB,SAAS,aAC1D,SAAQ,KAAK;EAAE,MAAM;EAAU,QAAQ,OAAO;EAAc,WAAW;EAAK,OAAO,OAAO;EAAa,CAAC;AAE1G,KAAI,OAAO,cAAc,OAAO,eAAe,SAAS,WACtD,SAAQ,KAAK;EAAE,MAAM;EAAQ,QAAQ,OAAO;EAAY,WAAW;EAAK,OAAO,OAAO;EAAW,CAAC;AAEpG,KAAI,OAAO,aAAa,OAAO,cAAc,SAAS,UACpD,SAAQ,KAAK;EAAE,MAAM;EAAO,QAAQ,OAAO;EAAW,WAAW;EAAK,OAAO,OAAO;EAAU,CAAC;AAEjG,KAAI,OAAO,eAAe,OAAO,gBAAgB,SAAS,YACxD,SAAQ,KAAK;EAAE,MAAM;EAAS,QAAQ,OAAO;EAAa,WAAW;EAAK,CAAC;AAE7E,QAAO,QAAQ,SAAS,GAAI,SAAQ,OAAO;CAI3C,MAAM,gBAAgB,OAAO,kBAAkB,KAAA,IAC3C,OAAO,gBAEL,OAAO,iBAAiB,YACxB,OAAO,eAAe,YACtB,OAAO,gBAAgB,aAEtB,OAAO,cAAc,KAAA,KAAa,OAAO,cAAc;CAG9D,MAAM,UAAwB;EAC5B,GAAG;EACH;EACA,WAAW;EACX;EACA;EACD;AAGD,KAAI,iBAAiB,CAAC,SAAS,iBAAiB,QAAQ,MACtD,EAAC,YAAY;AACX,MAAI;GACF,MAAM,EAAE,uBAAuB,uBAAuB,MAAM,OAAO;AACnE,OAAI,CAAC,uBAAuB,CAAE;GAC9B,MAAM,UAAU,QAAQ,MAAO,MAAM,sCAAsC;AAC3E,OAAI,CAAC,QAAS;GACd,MAAM,GAAG,OAAO,QAAQ;GAExB,MAAM,EAAE,SAAS,MAAM,OAAO;GAC9B,MAAM,EAAE,cAAc,MAAM,OAAO;GAEnC,MAAM,EAAE,WAAW,MADD,UAAU,KAAK,CAE/B,cAAc,QAAQ,MAAO,MAAM,gBAAgB,GAAG,GAAG,sCACzD;IAAE,UAAU;IAAS,SAAS;IAAO,CACtC;GACD,MAAM,MAAM,OAAO,MAAM;AACzB,OAAI,KAAK;AACP,UAAM,mBAAmB,OAAO,MAAM,KAAK,WAAW,qBAAqB,gBAAgB;AAC3F,UAAM,mBAAmB,OAAO,MAAM,KAAK,WAAW,mBAAmB,eAAe;AACxF,YAAQ,IAAI,gDAAgD,QAAQ,IAAI,IAAI,MAAM,GAAG,EAAE,CAAC,GAAG;;WAEtF,KAAU;AACjB,WAAQ,KAAK,mDAAmD,IAAI,UAAU;;KAE9E;AAIN,KAAI,aAAa,oBACf,KAAI;AACF,qBAAS,QAAQ;UACV,KAAK;AACZ,UAAQ,MAAM,+DAA+D,IAAI;;AAKrF,UAAS,WAAW;AACpB,oBAAmB,UAAU,SAAS;AAEtC,gBAAe;EAAE,MAAM;EAAkB;EAAS,QAAQ;EAAS,CAAC;AAEpE,QAAO;;AAGT,SAAgB,gBAAgB,SAAiB,WAAW,qBAA0C;AAEpG,KAAI,aAAa,oBACf,KAAI;EACF,MAAM,SAAS,sBAAsB,QAAQ;AAC7C,MAAI,OAAQ,QAAO;SACb;AAKV,QADiB,mBAAmB,SAAS,CAC7B,YAAY;;AAkB9B,SAAgB,kBAAkB,SAAiB,WAAW,qBAA2B;CACvF,MAAM,WAAW,mBAAmB,SAAS;AAC7C,QAAO,SAAS;AAChB,oBAAmB,UAAU,SAAS;AAGtC,KAAI,aAAa,oBACf,KAAI;AACF,qBAAS,QAAQ;UACV,KAAK;AACZ,UAAQ,MAAM,gEAAgE,IAAI;;;;;yBA7NhC;wBAMhB;AAkClC,uBAAsB,KAAK,SAAS,EAAE,eAAe,qBAAqB"}
1
+ {"version":3,"file":"review-status-Bm1bWNEa.js","names":[],"sources":["../src/lib/database/schema.ts","../src/lib/database/index.ts","../src/lib/database/review-status-db.ts","../src/lib/review-status.ts"],"sourcesContent":["/**\n * Panopticon Database Schema\n *\n * Defines the unified schema for panopticon.db.\n * All persistent application state lives here.\n */\n\nimport type Database from 'better-sqlite3';\n\n// Schema version — increment when making breaking schema changes\nexport const SCHEMA_VERSION = 13;\n\n/**\n * Initialize the complete database schema.\n * Idempotent — uses CREATE TABLE IF NOT EXISTS throughout.\n */\nexport function initSchema(db: Database.Database): void {\n db.exec(`\n -- ===== Cost Events =====\n CREATE TABLE IF NOT EXISTS cost_events (\n id INTEGER PRIMARY KEY AUTOINCREMENT,\n ts TEXT NOT NULL,\n agent_id TEXT NOT NULL,\n issue_id TEXT NOT NULL,\n session_type TEXT NOT NULL DEFAULT 'unknown',\n provider TEXT NOT NULL DEFAULT 'anthropic',\n model TEXT NOT NULL,\n input INTEGER NOT NULL DEFAULT 0,\n output INTEGER NOT NULL DEFAULT 0,\n cache_read INTEGER NOT NULL DEFAULT 0,\n cache_write INTEGER NOT NULL DEFAULT 0,\n cost REAL NOT NULL DEFAULT 0,\n request_id TEXT,\n session_id TEXT, -- Claude Code session UUID (for reconciler offset tracking)\n -- TLDR metrics\n tldr_interceptions INTEGER,\n tldr_bypasses INTEGER,\n tldr_tokens_saved INTEGER,\n tldr_bypass_reasons TEXT, -- JSON string\n -- WAL source tracking\n source_file TEXT -- path of WAL file this came from (for imports)\n );\n\n CREATE UNIQUE INDEX IF NOT EXISTS idx_cost_request_id\n ON cost_events(request_id) WHERE request_id IS NOT NULL;\n\n CREATE INDEX IF NOT EXISTS idx_cost_issue_id\n ON cost_events(issue_id, ts);\n\n CREATE INDEX IF NOT EXISTS idx_cost_agent_id\n ON cost_events(agent_id, ts);\n\n CREATE INDEX IF NOT EXISTS idx_cost_ts\n ON cost_events(ts);\n\n CREATE INDEX IF NOT EXISTS idx_cost_session_id\n ON cost_events(session_id) WHERE session_id IS NOT NULL;\n\n -- ===== Review Status =====\n CREATE TABLE IF NOT EXISTS review_status (\n issue_id TEXT PRIMARY KEY,\n review_status TEXT NOT NULL DEFAULT 'pending',\n test_status TEXT NOT NULL DEFAULT 'pending',\n merge_status TEXT,\n verification_status TEXT,\n verification_notes TEXT,\n verification_cycle_count INTEGER DEFAULT 0,\n verification_max_cycles INTEGER,\n review_notes TEXT,\n test_notes TEXT,\n merge_notes TEXT,\n updated_at TEXT NOT NULL,\n ready_for_merge INTEGER NOT NULL DEFAULT 0,\n auto_requeue_count INTEGER DEFAULT 0,\n pr_url TEXT\n );\n\n CREATE INDEX IF NOT EXISTS idx_review_status_updated\n ON review_status(updated_at);\n\n -- ===== Status History =====\n CREATE TABLE IF NOT EXISTS status_history (\n id INTEGER PRIMARY KEY AUTOINCREMENT,\n issue_id TEXT NOT NULL,\n type TEXT NOT NULL, -- 'review', 'test', 'merge'\n status TEXT NOT NULL,\n timestamp TEXT NOT NULL,\n notes TEXT,\n FOREIGN KEY (issue_id) REFERENCES review_status(issue_id) ON DELETE CASCADE\n );\n\n CREATE INDEX IF NOT EXISTS idx_status_history_issue\n ON status_history(issue_id, timestamp);\n\n -- UNIQUE constraint enables INSERT OR IGNORE deduplication in upsertReviewStatus\n CREATE UNIQUE INDEX IF NOT EXISTS idx_status_history_unique\n ON status_history(issue_id, type, status, timestamp);\n\n -- ===== Health Events =====\n CREATE TABLE IF NOT EXISTS health_events (\n id INTEGER PRIMARY KEY AUTOINCREMENT,\n agent_id TEXT NOT NULL,\n timestamp TEXT NOT NULL,\n state TEXT NOT NULL,\n previous_state TEXT,\n source TEXT,\n metadata TEXT -- JSON string\n );\n\n CREATE INDEX IF NOT EXISTS idx_health_agent_timestamp\n ON health_events(agent_id, timestamp);\n\n CREATE INDEX IF NOT EXISTS idx_health_timestamp\n ON health_events(timestamp);\n\n -- ===== Processed Sessions (for reconciler offset tracking) =====\n CREATE TABLE IF NOT EXISTS processed_sessions (\n session_id TEXT PRIMARY KEY,\n agent_id TEXT,\n issue_id TEXT,\n transcript_path TEXT, -- full path to the .jsonl file\n byte_offset INTEGER NOT NULL DEFAULT 0, -- bytes consumed so far\n processed_at TEXT NOT NULL,\n event_count INTEGER NOT NULL DEFAULT 0\n );\n\n -- ===== API Cache =====\n CREATE TABLE IF NOT EXISTS api_cache (\n key TEXT PRIMARY KEY,\n value TEXT NOT NULL, -- JSON string\n expires_at TEXT,\n created_at TEXT NOT NULL\n );\n\n -- ===== Rate Limits =====\n CREATE TABLE IF NOT EXISTS rate_limits (\n service TEXT PRIMARY KEY,\n requests INTEGER NOT NULL DEFAULT 0,\n window_start TEXT NOT NULL,\n limit_per_window INTEGER NOT NULL DEFAULT 1000\n );\n\n -- ===== Domain Events (PAN-428: push-first architecture) =====\n CREATE TABLE IF NOT EXISTS events (\n sequence INTEGER PRIMARY KEY AUTOINCREMENT,\n type TEXT NOT NULL,\n timestamp TEXT NOT NULL,\n payload TEXT NOT NULL -- JSON\n );\n\n CREATE INDEX IF NOT EXISTS idx_events_type\n ON events(type);\n\n CREATE INDEX IF NOT EXISTS idx_events_timestamp\n ON events(timestamp);\n\n -- ===== Projection Cache (PAN-437: instant dashboard startup) =====\n CREATE TABLE IF NOT EXISTS projection_cache (\n key TEXT PRIMARY KEY,\n data TEXT NOT NULL, -- JSON-serialized DashboardSnapshot\n sequence INTEGER NOT NULL, -- Last event sequence applied\n updated_at TEXT NOT NULL -- ISO timestamp\n );\n\n -- ===== Conversations (PAN-416: Mission Control conversation launcher) =====\n CREATE TABLE IF NOT EXISTS conversations (\n id INTEGER PRIMARY KEY AUTOINCREMENT,\n name TEXT NOT NULL UNIQUE,\n tmux_session TEXT NOT NULL,\n status TEXT NOT NULL DEFAULT 'active', -- 'active', 'ended'\n cwd TEXT NOT NULL,\n issue_id TEXT, -- optional cost attribution\n created_at TEXT NOT NULL,\n ended_at TEXT,\n last_attached_at TEXT,\n session_file TEXT, -- path to Claude Code JSONL session file (PAN-451)\n title TEXT, -- human-readable title, auto-set from first message\n title_source TEXT, -- 'auto', 'ai', or 'manual'\n title_seed TEXT, -- original auto-generated title for replacement check\n total_cost REAL DEFAULT 0, -- cached total cost in USD\n archived_at TEXT, -- ISO timestamp when archived, null = active\n model TEXT, -- model used to spawn conversation (e.g. 'minimax-m2.7-highspeed')\n effort TEXT -- effort level (e.g. 'low', 'medium', 'high')\n );\n\n CREATE INDEX IF NOT EXISTS idx_conversations_status\n ON conversations(status);\n\n CREATE INDEX IF NOT EXISTS idx_conversations_created_at\n ON conversations(created_at);\n `);\n\n // Record schema version\n db.pragma(`user_version = ${SCHEMA_VERSION}`);\n}\n\n/**\n * Run schema migrations if the database version is older than SCHEMA_VERSION.\n * This function handles upgrading from older schema versions.\n */\nexport function runMigrations(db: Database.Database): void {\n const currentVersion = db.pragma('user_version', { simple: true }) as number;\n\n if (currentVersion === SCHEMA_VERSION) {\n return; // Already at latest version\n }\n\n if (currentVersion === 0) {\n // Fresh database — just initialize the full schema\n initSchema(db);\n return;\n }\n\n // v1 → v2: add UNIQUE index on status_history for INSERT OR IGNORE dedup\n if (currentVersion < 2) {\n // Remove duplicate rows before adding the unique index (keep lowest id per unique key)\n db.exec(`\n DELETE FROM status_history\n WHERE id NOT IN (\n SELECT MIN(id)\n FROM status_history\n GROUP BY issue_id, type, status, timestamp\n );\n CREATE UNIQUE INDEX IF NOT EXISTS idx_status_history_unique\n ON status_history(issue_id, type, status, timestamp);\n `);\n }\n\n // v2 → v3: add session_id to cost_events, extend processed_sessions for reconciler\n if (currentVersion < 3) {\n // Add session_id column to cost_events (nullable, no data loss)\n try {\n db.exec(`ALTER TABLE cost_events ADD COLUMN session_id TEXT`);\n } catch {\n // Column may already exist if schema was manually applied\n }\n\n // Add index on session_id\n db.exec(`\n CREATE INDEX IF NOT EXISTS idx_cost_session_id\n ON cost_events(session_id) WHERE session_id IS NOT NULL;\n `);\n\n // Extend processed_sessions with new columns for reconciler\n try {\n db.exec(`ALTER TABLE processed_sessions ADD COLUMN agent_id TEXT`);\n } catch { /* already exists */ }\n try {\n db.exec(`ALTER TABLE processed_sessions ADD COLUMN issue_id TEXT`);\n } catch { /* already exists */ }\n try {\n db.exec(`ALTER TABLE processed_sessions ADD COLUMN transcript_path TEXT`);\n } catch { /* already exists */ }\n try {\n db.exec(`ALTER TABLE processed_sessions ADD COLUMN byte_offset INTEGER NOT NULL DEFAULT 0`);\n } catch { /* already exists */ }\n }\n\n // v3 → v4: add events table for push-first architecture (PAN-428)\n if (currentVersion < 4) {\n db.exec(`\n CREATE TABLE IF NOT EXISTS events (\n sequence INTEGER PRIMARY KEY AUTOINCREMENT,\n type TEXT NOT NULL,\n timestamp TEXT NOT NULL,\n payload TEXT NOT NULL -- JSON\n );\n\n CREATE INDEX IF NOT EXISTS idx_events_type\n ON events(type);\n\n CREATE INDEX IF NOT EXISTS idx_events_timestamp\n ON events(timestamp);\n `);\n }\n\n // v4 → v5: add projection_cache table (PAN-437: instant dashboard startup)\n if (currentVersion < 5) {\n db.exec(`\n CREATE TABLE IF NOT EXISTS projection_cache (\n key TEXT PRIMARY KEY,\n data TEXT NOT NULL,\n sequence INTEGER NOT NULL,\n updated_at TEXT NOT NULL\n );\n `);\n }\n\n // v5 → v6: add conversations table (PAN-416)\n if (currentVersion < 6) {\n db.exec(`\n CREATE TABLE IF NOT EXISTS conversations (\n id INTEGER PRIMARY KEY AUTOINCREMENT,\n name TEXT NOT NULL UNIQUE,\n tmux_session TEXT NOT NULL,\n status TEXT NOT NULL DEFAULT 'active',\n cwd TEXT NOT NULL,\n issue_id TEXT,\n created_at TEXT NOT NULL,\n ended_at TEXT,\n last_attached_at TEXT\n );\n\n CREATE INDEX IF NOT EXISTS idx_conversations_status\n ON conversations(status);\n\n CREATE INDEX IF NOT EXISTS idx_conversations_created_at\n ON conversations(created_at);\n `);\n }\n\n // v6 → v7: add session_file column to conversations (PAN-451)\n if (currentVersion < 7) {\n try {\n db.exec(`ALTER TABLE conversations ADD COLUMN session_file TEXT`);\n } catch { /* already exists */ }\n }\n\n // v7 → v8: add title column to conversations (auto-set from first message)\n if (currentVersion < 8) {\n try {\n db.exec(`ALTER TABLE conversations ADD COLUMN title TEXT`);\n } catch { /* already exists */ }\n }\n\n // v8 → v9: add title_source and title_seed columns to conversations\n // title_source tracks how the title was set: 'auto' (truncated first message),\n // 'ai' (Claude-generated), or 'manual' (user renamed). Used for T3Code-style\n // canReplaceThreadTitle logic — only auto-generated titles get AI replacement.\n // title_seed stores the original truncated message for replacement eligibility.\n if (currentVersion < 9) {\n try {\n db.exec(`ALTER TABLE conversations ADD COLUMN title_source TEXT`);\n } catch { /* already exists */ }\n try {\n db.exec(`ALTER TABLE conversations ADD COLUMN title_seed TEXT`);\n } catch { /* already exists */ }\n }\n\n // v9 → v10: add total_cost column to conversations (cached cost in USD)\n if (currentVersion < 10) {\n try {\n db.exec(`ALTER TABLE conversations ADD COLUMN total_cost REAL DEFAULT 0`);\n } catch { /* already exists */ }\n }\n\n // v10 → v11: expression index for UPPER(issue_id) on cost_events\n // The N+1 queries in getCostsByIssueFromDb use UPPER(issue_id) which defeats\n // the existing idx_cost_issue_id index. This expression index fixes that.\n if (currentVersion < 11) {\n try {\n db.exec(`CREATE INDEX IF NOT EXISTS idx_cost_issue_upper ON cost_events(UPPER(issue_id))`);\n } catch { /* already exists */ }\n }\n\n // v11 → v12: archived_at column + index for conversations (T3Code pattern)\n if (currentVersion < 12) {\n try {\n db.exec(`ALTER TABLE conversations ADD COLUMN archived_at TEXT`);\n } catch { /* already exists */ }\n try {\n db.exec(`CREATE INDEX IF NOT EXISTS idx_conversations_archived ON conversations(archived_at)`);\n } catch { /* already exists */ }\n }\n\n // v12 → v13: add model + effort columns to conversations (preserve model on resume)\n if (currentVersion < 13) {\n try {\n db.exec(`ALTER TABLE conversations ADD COLUMN model TEXT`);\n } catch { /* already exists */ }\n try {\n db.exec(`ALTER TABLE conversations ADD COLUMN effort TEXT`);\n } catch { /* already exists */ }\n }\n\n // After all migrations, set the version\n db.pragma(`user_version = ${SCHEMA_VERSION}`);\n}\n","/**\n * Panopticon Unified Database\n *\n * Single panopticon.db at ~/.panopticon/panopticon.db.\n * Singleton pattern — one connection shared across the process.\n *\n * IMPORTANT: This module is safe to import in both server and CLI contexts.\n * Never use execSync here — this is synchronous SQLite, not a subprocess.\n *\n * Dual-runtime (PAN-428):\n * - Bun: uses bun:sqlite (better-sqlite3 is a native addon — ERR_DLOPEN_FAILED in Bun)\n * - Node: uses better-sqlite3\n * In both cases the external API is identical: pragma(), exec(), prepare(), close().\n */\n\nimport type Database from 'better-sqlite3';\nimport { createRequire } from 'module';\nimport { join } from 'path';\nimport { existsSync, mkdirSync } from 'fs';\nimport { getPanopticonHome } from '../paths.js';\nimport { runMigrations } from './schema.js';\n\ndeclare const Bun: unknown;\n\nfunction isBunRuntime(): boolean {\n return typeof Bun !== 'undefined';\n}\n\n// createRequire allows synchronous require() in ESM — works in both Bun and Node\nconst _require = createRequire(import.meta.url);\n\nlet _db: Database.Database | null = null;\n\n/**\n * Get the path to panopticon.db (dynamic, respects PANOPTICON_HOME override for tests)\n */\nexport function getDatabasePath(): string {\n return join(getPanopticonHome(), 'panopticon.db');\n}\n\n/**\n * Initialize and return the singleton database connection.\n * Safe to call multiple times — returns the existing connection after first call.\n */\nexport function getDatabase(): Database.Database {\n if (_db) {\n return _db;\n }\n\n const home = getPanopticonHome();\n if (!existsSync(home)) {\n mkdirSync(home, { recursive: true });\n }\n\n const dbPath = getDatabasePath();\n\n if (isBunRuntime()) {\n // better-sqlite3 is a native Node.js addon that fails in Bun with ERR_DLOPEN_FAILED.\n // Use bun:sqlite instead, with a pragma() shim for API compatibility.\n const { Database: BunDatabase } = _require('bun:sqlite') as { Database: new (path: string) => any };\n const bunDb = new BunDatabase(dbPath);\n\n // bun:sqlite has no pragma() method — shim it using exec() and query().get()\n bunDb.pragma = function (sql: string, options?: { simple?: boolean }): any {\n if (options?.simple) {\n // Read-only: return the scalar value directly (e.g. db.pragma('user_version', { simple: true }))\n const key = sql.trim();\n const row = bunDb.query(`PRAGMA ${key}`).get() as Record<string, unknown> | null;\n return row?.[key] ?? null;\n }\n // Set or no-return pragma (e.g. 'journal_mode = WAL', 'foreign_keys = ON')\n bunDb.exec(`PRAGMA ${sql}`);\n return undefined;\n };\n\n _db = bunDb as Database.Database;\n } else {\n // Node.js path: load better-sqlite3 lazily (avoids import-time native addon load)\n const BetterSqlite3 = _require('better-sqlite3');\n _db = new BetterSqlite3(dbPath) as Database.Database;\n }\n\n // Enable WAL mode for concurrent readers + single writer\n _db.pragma('journal_mode = WAL');\n // Enforce foreign keys\n _db.pragma('foreign_keys = ON');\n // Write-ahead log synchronization — NORMAL is safe and fast\n _db.pragma('synchronous = NORMAL');\n\n // Initialize or migrate schema\n runMigrations(_db);\n\n return _db;\n}\n\n/**\n * Close the database connection and release the singleton.\n * Primarily used in tests to get a fresh connection.\n */\nexport function closeDatabase(): void {\n if (_db) {\n _db.close();\n _db = null;\n }\n}\n\n/**\n * Force re-initialization of the database connection.\n * Used in tests after PANOPTICON_HOME changes.\n */\nexport function resetDatabase(): void {\n closeDatabase();\n}\n","/**\n * Review Status SQLite Storage\n *\n * Provides SQLite-backed CRUD for ReviewStatus, matching the interface in\n * src/lib/review-status.ts. Atomic single-transaction writes eliminate the\n * TOCTOU race in the JSON-backed implementation.\n */\n\nimport { getDatabase } from './index.js';\nimport type { ReviewStatus, StatusHistoryEntry } from '../review-status.js';\n\n// ============== Write operations ==============\n\n/**\n * Upsert a review status record atomically.\n * Replaces the JSON read-modify-write cycle with a single transaction.\n */\nexport function upsertReviewStatus(status: ReviewStatus): void {\n const db = getDatabase();\n\n const upsert = db.transaction((s: ReviewStatus) => {\n // Upsert main record\n db.prepare(`\n INSERT INTO review_status (\n issue_id, review_status, test_status, merge_status,\n verification_status, verification_notes,\n verification_cycle_count, verification_max_cycles,\n review_notes, test_notes, merge_notes,\n updated_at, ready_for_merge, auto_requeue_count, pr_url\n ) VALUES (\n ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?\n )\n ON CONFLICT(issue_id) DO UPDATE SET\n review_status = excluded.review_status,\n test_status = excluded.test_status,\n merge_status = excluded.merge_status,\n verification_status = excluded.verification_status,\n verification_notes = excluded.verification_notes,\n verification_cycle_count = excluded.verification_cycle_count,\n verification_max_cycles = excluded.verification_max_cycles,\n review_notes = excluded.review_notes,\n test_notes = excluded.test_notes,\n merge_notes = excluded.merge_notes,\n updated_at = excluded.updated_at,\n ready_for_merge = excluded.ready_for_merge,\n auto_requeue_count = excluded.auto_requeue_count,\n pr_url = excluded.pr_url\n `).run(\n s.issueId,\n s.reviewStatus,\n s.testStatus,\n s.mergeStatus ?? null,\n s.verificationStatus ?? null,\n s.verificationNotes ?? null,\n s.verificationCycleCount ?? null,\n s.verificationMaxCycles ?? null,\n s.reviewNotes ?? null,\n s.testNotes ?? null,\n s.mergeNotes ?? null,\n s.updatedAt,\n s.readyForMerge ? 1 : 0,\n s.autoRequeueCount ?? null,\n s.prUrl ?? null,\n );\n\n // Append new history entries (deduplicate by timestamp to avoid re-inserting)\n if (s.history && s.history.length > 0) {\n const insertHistory = db.prepare(`\n INSERT OR IGNORE INTO status_history (issue_id, type, status, timestamp, notes)\n VALUES (?, ?, ?, ?, ?)\n `);\n for (const entry of s.history) {\n insertHistory.run(s.issueId, entry.type, entry.status, entry.timestamp, entry.notes ?? null);\n }\n }\n });\n\n upsert(status);\n}\n\n/**\n * Delete a review status record and its history.\n */\nexport function deleteReviewStatus(issueId: string): void {\n const db = getDatabase();\n db.prepare('DELETE FROM review_status WHERE issue_id = ?').run(issueId);\n}\n\n// ============== Read operations ==============\n\n/**\n * Get a single review status by issue ID.\n */\nexport function getReviewStatusFromDb(issueId: string): ReviewStatus | null {\n const db = getDatabase();\n\n const row = db.prepare(`\n SELECT * FROM review_status WHERE issue_id = ?\n `).get(issueId) as DbReviewStatusRow | undefined;\n\n if (!row) return null;\n\n const history = getHistoryFromDb(issueId);\n return rowToReviewStatus(row, history);\n}\n\n/**\n * Get all review statuses.\n */\nexport function getAllReviewStatusesFromDb(): Record<string, ReviewStatus> {\n const db = getDatabase();\n\n const rows = db.prepare('SELECT * FROM review_status ORDER BY updated_at DESC').all() as DbReviewStatusRow[];\n const result: Record<string, ReviewStatus> = {};\n\n for (const row of rows) {\n const history = getHistoryFromDb(row.issue_id);\n result[row.issue_id] = rowToReviewStatus(row, history);\n }\n\n return result;\n}\n\n/**\n * Get history entries for an issue.\n */\nfunction getHistoryFromDb(issueId: string): StatusHistoryEntry[] {\n const db = getDatabase();\n const rows = db.prepare(`\n SELECT type, status, timestamp, notes\n FROM status_history\n WHERE issue_id = ?\n ORDER BY timestamp ASC\n `).all(issueId) as Array<{ type: string; status: string; timestamp: string; notes: string | null }>;\n\n return rows.map(r => ({\n type: r.type as 'review' | 'test' | 'merge',\n status: r.status,\n timestamp: r.timestamp,\n ...(r.notes ? { notes: r.notes } : {}),\n }));\n}\n\n// ============== Row mapping ==============\n\ninterface DbReviewStatusRow {\n issue_id: string;\n review_status: string;\n test_status: string;\n merge_status: string | null;\n verification_status: string | null;\n verification_notes: string | null;\n verification_cycle_count: number | null;\n verification_max_cycles: number | null;\n review_notes: string | null;\n test_notes: string | null;\n merge_notes: string | null;\n updated_at: string;\n ready_for_merge: number;\n auto_requeue_count: number | null;\n pr_url: string | null;\n}\n\nfunction rowToReviewStatus(row: DbReviewStatusRow, history: StatusHistoryEntry[]): ReviewStatus {\n return {\n issueId: row.issue_id,\n reviewStatus: row.review_status as ReviewStatus['reviewStatus'],\n testStatus: row.test_status as ReviewStatus['testStatus'],\n mergeStatus: row.merge_status as ReviewStatus['mergeStatus'] ?? undefined,\n verificationStatus: row.verification_status as ReviewStatus['verificationStatus'] ?? undefined,\n verificationNotes: row.verification_notes ?? undefined,\n verificationCycleCount: row.verification_cycle_count ?? undefined,\n verificationMaxCycles: row.verification_max_cycles ?? undefined,\n reviewNotes: row.review_notes ?? undefined,\n testNotes: row.test_notes ?? undefined,\n mergeNotes: row.merge_notes ?? undefined,\n updatedAt: row.updated_at,\n readyForMerge: row.ready_for_merge === 1,\n autoRequeueCount: row.auto_requeue_count ?? undefined,\n prUrl: row.pr_url ?? undefined,\n history: history.length > 0 ? history : undefined,\n };\n}\n","import { existsSync, readFileSync, writeFileSync, mkdirSync } from 'fs';\nimport { join, dirname } from 'path';\nimport { homedir } from 'os';\nimport { notifyPipeline } from './pipeline-notifier.js';\nimport {\n upsertReviewStatus as dbUpsert,\n deleteReviewStatus as dbDelete,\n getReviewStatusFromDb,\n getAllReviewStatusesFromDb,\n} from './database/review-status-db.js';\n\nexport interface StatusHistoryEntry {\n type: 'review' | 'test' | 'merge' | 'inspect' | 'uat';\n status: string;\n timestamp: string;\n notes?: string;\n}\n\nexport interface ReviewStatus {\n issueId: string;\n reviewStatus: 'pending' | 'reviewing' | 'passed' | 'failed' | 'blocked';\n testStatus: 'pending' | 'testing' | 'passed' | 'failed' | 'skipped' | 'dispatch_failed';\n mergeStatus?: 'pending' | 'merging' | 'merged' | 'failed';\n inspectStatus?: 'pending' | 'inspecting' | 'passed' | 'failed';\n inspectNotes?: string;\n uatStatus?: 'pending' | 'testing' | 'passed' | 'failed';\n uatNotes?: string;\n verificationStatus?: 'pending' | 'running' | 'passed' | 'failed' | 'skipped';\n verificationNotes?: string;\n verificationCycleCount?: number;\n verificationMaxCycles?: number;\n reviewNotes?: string;\n testNotes?: string;\n mergeNotes?: string;\n updatedAt: string;\n readyForMerge: boolean;\n autoRequeueCount?: number;\n prUrl?: string;\n history?: StatusHistoryEntry[];\n /** HEAD commit SHA at the time review passed — used to detect new commits after review */\n reviewedAtCommit?: string;\n}\n\nconst DEFAULT_STATUS_FILE = join(homedir(), '.panopticon', 'review-status.json');\n\nexport function loadReviewStatuses(filePath = DEFAULT_STATUS_FILE): Record<string, ReviewStatus> {\n // Prefer SQLite when using the default path\n if (filePath === DEFAULT_STATUS_FILE) {\n try {\n return getAllReviewStatusesFromDb();\n } catch {\n // Fall through to JSON on DB error\n }\n }\n\n try {\n if (existsSync(filePath)) {\n return JSON.parse(readFileSync(filePath, 'utf-8'));\n }\n } catch (err) {\n console.error('Failed to load review statuses:', err);\n }\n return {};\n}\n\nexport function saveReviewStatuses(statuses: Record<string, ReviewStatus>, filePath = DEFAULT_STATUS_FILE): void {\n try {\n const dir = dirname(filePath);\n if (!existsSync(dir)) {\n mkdirSync(dir, { recursive: true });\n }\n writeFileSync(filePath, JSON.stringify(statuses, null, 2));\n } catch (err) {\n console.error('Failed to save review statuses:', err);\n }\n}\n\nexport function setReviewStatus(\n issueId: string,\n update: Partial<ReviewStatus>,\n filePath = DEFAULT_STATUS_FILE,\n): ReviewStatus {\n const statuses = loadReviewStatuses(filePath);\n const existing = statuses[issueId] || {\n issueId,\n reviewStatus: 'pending' as const,\n testStatus: 'pending' as const,\n updatedAt: new Date().toISOString(),\n readyForMerge: false,\n };\n\n // Guard: reject reviewStatus regression from 'passed' to 'reviewing' unless the caller\n // is explicitly resetting the merge lifecycle (update includes mergeStatus).\n // This is belt-and-suspenders — endpoint-level guards should catch this first.\n if (update.reviewStatus === 'reviewing' && existing.reviewStatus === 'passed' && update.mergeStatus === undefined) {\n console.warn(`[review-status] Rejecting reviewStatus regression from 'passed' to 'reviewing' for ${issueId} (mergeStatus not being reset)`);\n return existing as ReviewStatus;\n }\n\n const merged = { ...existing, ...update };\n\n // Track status transitions in history (last 10 entries)\n const history = [...(existing.history || [])];\n const now = new Date().toISOString();\n if (update.reviewStatus && update.reviewStatus !== existing.reviewStatus) {\n history.push({ type: 'review', status: update.reviewStatus, timestamp: now, notes: update.reviewNotes });\n }\n if (update.testStatus && update.testStatus !== existing.testStatus) {\n history.push({ type: 'test', status: update.testStatus, timestamp: now, notes: update.testNotes });\n }\n if (update.uatStatus && update.uatStatus !== existing.uatStatus) {\n history.push({ type: 'uat', status: update.uatStatus, timestamp: now, notes: update.uatNotes });\n }\n if (update.mergeStatus && update.mergeStatus !== existing.mergeStatus) {\n history.push({ type: 'merge', status: update.mergeStatus, timestamp: now });\n }\n while (history.length > 10) history.shift();\n\n // readyForMerge is true when all required gates pass.\n // If uatStatus exists (UAT specialist has been involved), it must also be 'passed'.\n // verificationStatus must not be 'failed' — verification catches pre-existing test breakage\n // that scoped test runs (e2e/dashboard) may miss.\n const readyForMerge = update.readyForMerge !== undefined\n ? update.readyForMerge\n : (\n merged.reviewStatus === 'passed' &&\n merged.testStatus === 'passed' &&\n merged.verificationStatus !== 'failed' &&\n merged.mergeStatus !== 'merged' &&\n // If UAT has been initiated, it must pass too\n (merged.uatStatus === undefined || merged.uatStatus === 'passed')\n );\n\n const updated: ReviewStatus = {\n ...merged,\n issueId,\n updatedAt: now,\n readyForMerge,\n history,\n };\n\n // Report commit statuses to GitHub when readyForMerge transitions to true (PAN-536)\n if (readyForMerge && !existing.readyForMerge && updated.prUrl) {\n (async () => {\n try {\n const { isGitHubAppConfigured, reportCommitStatus } = await import('./github-app.js');\n if (!isGitHubAppConfigured()) return;\n const prMatch = updated.prUrl!.match(/github\\.com\\/([^/]+)\\/([^/]+)\\/pull/);\n if (!prMatch) return;\n const [, owner, repo] = prMatch;\n // Get HEAD SHA of the PR branch\n const { exec } = await import('child_process');\n const { promisify } = await import('util');\n const execAsync = promisify(exec);\n const { stdout } = await execAsync(\n `gh pr view ${updated.prUrl!.match(/\\/pull\\/(\\d+)/)?.[1]} --json headRefOid --jq .headRefOid`,\n { encoding: 'utf-8', timeout: 10000 }\n );\n const sha = stdout.trim();\n if (sha) {\n await reportCommitStatus(owner, repo, sha, 'success', 'panopticon/review', 'Review passed');\n await reportCommitStatus(owner, repo, sha, 'success', 'panopticon/test', 'Tests passed');\n console.log(`[review-status] Reported commit statuses for ${issueId} (${sha.slice(0, 8)})`);\n }\n } catch (err: any) {\n console.warn(`[review-status] Failed to report commit status: ${err.message}`);\n }\n })();\n }\n\n // SQLite first — it is the authoritative store (reads prefer SQLite)\n if (filePath === DEFAULT_STATUS_FILE) {\n try {\n dbUpsert(updated);\n } catch (err) {\n console.error('[review-status] SQLite write failed (continuing with JSON):', err);\n }\n }\n\n // JSON second — legacy fallback for tools that read review-status.json directly\n statuses[issueId] = updated;\n saveReviewStatuses(statuses, filePath);\n\n notifyPipeline({ type: 'status_changed', issueId, status: updated });\n\n return updated;\n}\n\nexport function getReviewStatus(issueId: string, filePath = DEFAULT_STATUS_FILE): ReviewStatus | null {\n // Prefer SQLite when using the default path\n if (filePath === DEFAULT_STATUS_FILE) {\n try {\n const fromDb = getReviewStatusFromDb(issueId);\n if (fromDb) return fromDb;\n } catch {\n // Fall through to JSON on DB error\n }\n }\n const statuses = loadReviewStatuses(filePath);\n return statuses[issueId] || null;\n}\n\n/**\n * On server startup, clear any mergeStatus stuck at 'merging'.\n * Pending merge operations are in-memory only — they don't survive a restart.\n * Any 'merging' status after boot is definitionally stuck (PAN-490).\n */\nexport function clearStuckMergeStatuses(): void {\n const statuses = loadReviewStatuses();\n const stuck = Object.values(statuses).filter(s => s.mergeStatus === 'merging');\n if (stuck.length === 0) return;\n console.log(`[review-status] Clearing ${stuck.length} stuck 'merging' status(es) on startup`);\n for (const s of stuck) {\n setReviewStatus(s.issueId, { mergeStatus: 'pending' });\n }\n}\n\nexport function clearReviewStatus(issueId: string, filePath = DEFAULT_STATUS_FILE): void {\n const statuses = loadReviewStatuses(filePath);\n delete statuses[issueId];\n saveReviewStatuses(statuses, filePath);\n\n // Dual-delete from SQLite when using the default path\n if (filePath === DEFAULT_STATUS_FILE) {\n try {\n dbDelete(issueId);\n } catch (err) {\n console.error('[review-status] SQLite delete failed (continuing with JSON):', err);\n }\n }\n}\n"],"mappings":";;;;;;;;;;;;AAgBA,SAAgB,WAAW,IAA6B;AACtD,IAAG,KAAK;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;IA6KN;AAGF,IAAG,OAAO,oBAAmC;;;;;;AAO/C,SAAgB,cAAc,IAA6B;CACzD,MAAM,iBAAiB,GAAG,OAAO,gBAAgB,EAAE,QAAQ,MAAM,CAAC;AAElE,KAAI,mBAAA,GACF;AAGF,KAAI,mBAAmB,GAAG;AAExB,aAAW,GAAG;AACd;;AAIF,KAAI,iBAAiB,EAEnB,IAAG,KAAK;;;;;;;;;MASN;AAIJ,KAAI,iBAAiB,GAAG;AAEtB,MAAI;AACF,MAAG,KAAK,qDAAqD;UACvD;AAKR,KAAG,KAAK;;;MAGN;AAGF,MAAI;AACF,MAAG,KAAK,0DAA0D;UAC5D;AACR,MAAI;AACF,MAAG,KAAK,0DAA0D;UAC5D;AACR,MAAI;AACF,MAAG,KAAK,iEAAiE;UACnE;AACR,MAAI;AACF,MAAG,KAAK,mFAAmF;UACrF;;AAIV,KAAI,iBAAiB,EACnB,IAAG,KAAK;;;;;;;;;;;;;MAaN;AAIJ,KAAI,iBAAiB,EACnB,IAAG,KAAK;;;;;;;MAON;AAIJ,KAAI,iBAAiB,EACnB,IAAG,KAAK;;;;;;;;;;;;;;;;;;MAkBN;AAIJ,KAAI,iBAAiB,EACnB,KAAI;AACF,KAAG,KAAK,yDAAyD;SAC3D;AAIV,KAAI,iBAAiB,EACnB,KAAI;AACF,KAAG,KAAK,kDAAkD;SACpD;AAQV,KAAI,iBAAiB,GAAG;AACtB,MAAI;AACF,MAAG,KAAK,yDAAyD;UAC3D;AACR,MAAI;AACF,MAAG,KAAK,uDAAuD;UACzD;;AAIV,KAAI,iBAAiB,GACnB,KAAI;AACF,KAAG,KAAK,iEAAiE;SACnE;AAMV,KAAI,iBAAiB,GACnB,KAAI;AACF,KAAG,KAAK,kFAAkF;SACpF;AAIV,KAAI,iBAAiB,IAAI;AACvB,MAAI;AACF,MAAG,KAAK,wDAAwD;UAC1D;AACR,MAAI;AACF,MAAG,KAAK,sFAAsF;UACxF;;AAIV,KAAI,iBAAiB,IAAI;AACvB,MAAI;AACF,MAAG,KAAK,kDAAkD;UACpD;AACR,MAAI;AACF,MAAG,KAAK,mDAAmD;UACrD;;AAIV,IAAG,OAAO,oBAAmC;;;;;AChW/C,SAAS,eAAwB;AAC/B,QAAO,OAAO,QAAQ;;;;;AAWxB,SAAgB,kBAA0B;AACxC,QAAO,KAAK,mBAAmB,EAAE,gBAAgB;;;;;;AAOnD,SAAgB,cAAiC;AAC/C,KAAI,IACF,QAAO;CAGT,MAAM,OAAO,mBAAmB;AAChC,KAAI,CAAC,WAAW,KAAK,CACnB,WAAU,MAAM,EAAE,WAAW,MAAM,CAAC;CAGtC,MAAM,SAAS,iBAAiB;AAEhC,KAAI,cAAc,EAAE;EAGlB,MAAM,EAAE,UAAU,gBAAgB,SAAS,aAAa;EACxD,MAAM,QAAQ,IAAI,YAAY,OAAO;AAGrC,QAAM,SAAS,SAAU,KAAa,SAAqC;AACzE,OAAI,SAAS,QAAQ;IAEnB,MAAM,MAAM,IAAI,MAAM;AAEtB,WADY,MAAM,MAAM,UAAU,MAAM,CAAC,KAAK,GACjC,QAAQ;;AAGvB,SAAM,KAAK,UAAU,MAAM;;AAI7B,QAAM;OAIN,OAAM,KADgB,SAAS,iBAAiB,EACxB,OAAO;AAIjC,KAAI,OAAO,qBAAqB;AAEhC,KAAI,OAAO,oBAAoB;AAE/B,KAAI,OAAO,uBAAuB;AAGlC,eAAc,IAAI;AAElB,QAAO;;;;;;AAOT,SAAgB,gBAAsB;AACpC,KAAI,KAAK;AACP,MAAI,OAAO;AACX,QAAM;;;;;aAnFsC;cACJ;AAStC,YAAW,cAAc,OAAO,KAAK,IAAI;AAE3C,OAAgC;;;;;;;;ACdpC,SAAgB,mBAAmB,QAA4B;CAC7D,MAAM,KAAK,aAAa;AAET,IAAG,aAAa,MAAoB;AAEjD,KAAG,QAAQ;;;;;;;;;;;;;;;;;;;;;;;;;MAyBT,CAAC,IACD,EAAE,SACF,EAAE,cACF,EAAE,YACF,EAAE,eAAe,MACjB,EAAE,sBAAsB,MACxB,EAAE,qBAAqB,MACvB,EAAE,0BAA0B,MAC5B,EAAE,yBAAyB,MAC3B,EAAE,eAAe,MACjB,EAAE,aAAa,MACf,EAAE,cAAc,MAChB,EAAE,WACF,EAAE,gBAAgB,IAAI,GACtB,EAAE,oBAAoB,MACtB,EAAE,SAAS,KACZ;AAGD,MAAI,EAAE,WAAW,EAAE,QAAQ,SAAS,GAAG;GACrC,MAAM,gBAAgB,GAAG,QAAQ;;;QAG/B;AACF,QAAK,MAAM,SAAS,EAAE,QACpB,eAAc,IAAI,EAAE,SAAS,MAAM,MAAM,MAAM,QAAQ,MAAM,WAAW,MAAM,SAAS,KAAK;;GAGhG,CAEK,OAAO;;;;;AAMhB,SAAgB,mBAAmB,SAAuB;AAC7C,cAAa,CACrB,QAAQ,+CAA+C,CAAC,IAAI,QAAQ;;;;;AAQzE,SAAgB,sBAAsB,SAAsC;CAG1E,MAAM,MAFK,aAAa,CAET,QAAQ;;IAErB,CAAC,IAAI,QAAQ;AAEf,KAAI,CAAC,IAAK,QAAO;AAGjB,QAAO,kBAAkB,KADT,iBAAiB,QAAQ,CACH;;;;;AAMxC,SAAgB,6BAA2D;CAGzE,MAAM,OAFK,aAAa,CAER,QAAQ,uDAAuD,CAAC,KAAK;CACrF,MAAM,SAAuC,EAAE;AAE/C,MAAK,MAAM,OAAO,MAAM;EACtB,MAAM,UAAU,iBAAiB,IAAI,SAAS;AAC9C,SAAO,IAAI,YAAY,kBAAkB,KAAK,QAAQ;;AAGxD,QAAO;;;;;AAMT,SAAS,iBAAiB,SAAuC;AAS/D,QARW,aAAa,CACR,QAAQ;;;;;IAKtB,CAAC,IAAI,QAAQ,CAEH,KAAI,OAAM;EACpB,MAAM,EAAE;EACR,QAAQ,EAAE;EACV,WAAW,EAAE;EACb,GAAI,EAAE,QAAQ,EAAE,OAAO,EAAE,OAAO,GAAG,EAAE;EACtC,EAAE;;AAuBL,SAAS,kBAAkB,KAAwB,SAA6C;AAC9F,QAAO;EACL,SAAS,IAAI;EACb,cAAc,IAAI;EAClB,YAAY,IAAI;EAChB,aAAa,IAAI,gBAA+C,KAAA;EAChE,oBAAoB,IAAI,uBAA6D,KAAA;EACrF,mBAAmB,IAAI,sBAAsB,KAAA;EAC7C,wBAAwB,IAAI,4BAA4B,KAAA;EACxD,uBAAuB,IAAI,2BAA2B,KAAA;EACtD,aAAa,IAAI,gBAAgB,KAAA;EACjC,WAAW,IAAI,cAAc,KAAA;EAC7B,YAAY,IAAI,eAAe,KAAA;EAC/B,WAAW,IAAI;EACf,eAAe,IAAI,oBAAoB;EACvC,kBAAkB,IAAI,sBAAsB,KAAA;EAC5C,OAAO,IAAI,UAAU,KAAA;EACrB,SAAS,QAAQ,SAAS,IAAI,UAAU,KAAA;EACzC;;;gBA7KsC;;;;ACqCzC,SAAgB,mBAAmB,WAAW,qBAAmD;AAE/F,KAAI,aAAa,oBACf,KAAI;AACF,SAAO,4BAA4B;SAC7B;AAKV,KAAI;AACF,MAAI,WAAW,SAAS,CACtB,QAAO,KAAK,MAAM,aAAa,UAAU,QAAQ,CAAC;UAE7C,KAAK;AACZ,UAAQ,MAAM,mCAAmC,IAAI;;AAEvD,QAAO,EAAE;;AAGX,SAAgB,mBAAmB,UAAwC,WAAW,qBAA2B;AAC/G,KAAI;EACF,MAAM,MAAM,QAAQ,SAAS;AAC7B,MAAI,CAAC,WAAW,IAAI,CAClB,WAAU,KAAK,EAAE,WAAW,MAAM,CAAC;AAErC,gBAAc,UAAU,KAAK,UAAU,UAAU,MAAM,EAAE,CAAC;UACnD,KAAK;AACZ,UAAQ,MAAM,mCAAmC,IAAI;;;AAIzD,SAAgB,gBACd,SACA,QACA,WAAW,qBACG;CACd,MAAM,WAAW,mBAAmB,SAAS;CAC7C,MAAM,WAAW,SAAS,YAAY;EACpC;EACA,cAAc;EACd,YAAY;EACZ,4BAAW,IAAI,MAAM,EAAC,aAAa;EACnC,eAAe;EAChB;AAKD,KAAI,OAAO,iBAAiB,eAAe,SAAS,iBAAiB,YAAY,OAAO,gBAAgB,KAAA,GAAW;AACjH,UAAQ,KAAK,sFAAsF,QAAQ,gCAAgC;AAC3I,SAAO;;CAGT,MAAM,SAAS;EAAE,GAAG;EAAU,GAAG;EAAQ;CAGzC,MAAM,UAAU,CAAC,GAAI,SAAS,WAAW,EAAE,CAAE;CAC7C,MAAM,uBAAM,IAAI,MAAM,EAAC,aAAa;AACpC,KAAI,OAAO,gBAAgB,OAAO,iBAAiB,SAAS,aAC1D,SAAQ,KAAK;EAAE,MAAM;EAAU,QAAQ,OAAO;EAAc,WAAW;EAAK,OAAO,OAAO;EAAa,CAAC;AAE1G,KAAI,OAAO,cAAc,OAAO,eAAe,SAAS,WACtD,SAAQ,KAAK;EAAE,MAAM;EAAQ,QAAQ,OAAO;EAAY,WAAW;EAAK,OAAO,OAAO;EAAW,CAAC;AAEpG,KAAI,OAAO,aAAa,OAAO,cAAc,SAAS,UACpD,SAAQ,KAAK;EAAE,MAAM;EAAO,QAAQ,OAAO;EAAW,WAAW;EAAK,OAAO,OAAO;EAAU,CAAC;AAEjG,KAAI,OAAO,eAAe,OAAO,gBAAgB,SAAS,YACxD,SAAQ,KAAK;EAAE,MAAM;EAAS,QAAQ,OAAO;EAAa,WAAW;EAAK,CAAC;AAE7E,QAAO,QAAQ,SAAS,GAAI,SAAQ,OAAO;CAM3C,MAAM,gBAAgB,OAAO,kBAAkB,KAAA,IAC3C,OAAO,gBAEL,OAAO,iBAAiB,YACxB,OAAO,eAAe,YACtB,OAAO,uBAAuB,YAC9B,OAAO,gBAAgB,aAEtB,OAAO,cAAc,KAAA,KAAa,OAAO,cAAc;CAG9D,MAAM,UAAwB;EAC5B,GAAG;EACH;EACA,WAAW;EACX;EACA;EACD;AAGD,KAAI,iBAAiB,CAAC,SAAS,iBAAiB,QAAQ,MACtD,EAAC,YAAY;AACX,MAAI;GACF,MAAM,EAAE,uBAAuB,uBAAuB,MAAM,OAAO;AACnE,OAAI,CAAC,uBAAuB,CAAE;GAC9B,MAAM,UAAU,QAAQ,MAAO,MAAM,sCAAsC;AAC3E,OAAI,CAAC,QAAS;GACd,MAAM,GAAG,OAAO,QAAQ;GAExB,MAAM,EAAE,SAAS,MAAM,OAAO;GAC9B,MAAM,EAAE,cAAc,MAAM,OAAO;GAEnC,MAAM,EAAE,WAAW,MADD,UAAU,KAAK,CAE/B,cAAc,QAAQ,MAAO,MAAM,gBAAgB,GAAG,GAAG,sCACzD;IAAE,UAAU;IAAS,SAAS;IAAO,CACtC;GACD,MAAM,MAAM,OAAO,MAAM;AACzB,OAAI,KAAK;AACP,UAAM,mBAAmB,OAAO,MAAM,KAAK,WAAW,qBAAqB,gBAAgB;AAC3F,UAAM,mBAAmB,OAAO,MAAM,KAAK,WAAW,mBAAmB,eAAe;AACxF,YAAQ,IAAI,gDAAgD,QAAQ,IAAI,IAAI,MAAM,GAAG,EAAE,CAAC,GAAG;;WAEtF,KAAU;AACjB,WAAQ,KAAK,mDAAmD,IAAI,UAAU;;KAE9E;AAIN,KAAI,aAAa,oBACf,KAAI;AACF,qBAAS,QAAQ;UACV,KAAK;AACZ,UAAQ,MAAM,+DAA+D,IAAI;;AAKrF,UAAS,WAAW;AACpB,oBAAmB,UAAU,SAAS;AAEtC,gBAAe;EAAE,MAAM;EAAkB;EAAS,QAAQ;EAAS,CAAC;AAEpE,QAAO;;AAGT,SAAgB,gBAAgB,SAAiB,WAAW,qBAA0C;AAEpG,KAAI,aAAa,oBACf,KAAI;EACF,MAAM,SAAS,sBAAsB,QAAQ;AAC7C,MAAI,OAAQ,QAAO;SACb;AAKV,QADiB,mBAAmB,SAAS,CAC7B,YAAY;;AAkB9B,SAAgB,kBAAkB,SAAiB,WAAW,qBAA2B;CACvF,MAAM,WAAW,mBAAmB,SAAS;AAC7C,QAAO,SAAS;AAChB,oBAAmB,UAAU,SAAS;AAGtC,KAAI,aAAa,oBACf,KAAI;AACF,qBAAS,QAAQ;UACV,KAAK;AACZ,UAAQ,MAAM,gEAAgE,IAAI;;;;;yBAhOhC;wBAMhB;AAkClC,uBAAsB,KAAK,SAAS,EAAE,eAAe,qBAAqB"}
@@ -2,7 +2,7 @@ import { t as __esmMin } from "./chunk-ruWRV7i3.js";
2
2
  import { U as getPanopticonHome, W as init_paths } from "./paths-lMaxrYtT.js";
3
3
  import { c as getProject, p as init_projects } from "./projects-CvLepaxC.js";
4
4
  import { n as init_work_type_router, t as getModelId } from "./work-type-router-CS2BB1vS.js";
5
- import { mt as getRecentRunLogs, yt as init_specialist_logs } from "./specialists-ldNesMhg.js";
5
+ import { mt as getRecentRunLogs, yt as init_specialist_logs } from "./specialists-DEKqgkxp.js";
6
6
  import { existsSync, mkdirSync, readFileSync, unlinkSync, writeFileSync } from "fs";
7
7
  import { join } from "path";
8
8
  import { exec } from "child_process";
@@ -232,4 +232,4 @@ __esmMin((() => {
232
232
  }))();
233
233
  export { loadContextDigest, scheduleDigestGeneration };
234
234
 
235
- //# sourceMappingURL=specialist-context-CRBBW-z5.js.map
235
+ //# sourceMappingURL=specialist-context-BdNFsfMG.js.map
@@ -1 +1 @@
1
- {"version":3,"file":"specialist-context-CRBBW-z5.js","names":[],"sources":["../src/lib/cloister/specialist-context.ts"],"sourcesContent":["/**\n * Specialist Context Management\n *\n * Generates and manages AI-powered context digests from recent specialist runs.\n * These digests seed new specialist sessions with learned patterns and expertise.\n *\n * Directory structure:\n * ~/.panopticon/specialists/{projectKey}/{specialistType}/context/latest-digest.md\n */\n\nimport { existsSync, mkdirSync, readFileSync, writeFileSync, unlinkSync } from 'fs';\nimport { join } from 'path';\nimport { exec } from 'child_process';\nimport { promisify } from 'util';\nimport { getPanopticonHome } from '../paths.js';\nimport { getRecentRunLogs, type RunLogEntry } from './specialist-logs.js';\nimport { getProject } from '../projects.js';\nimport { getModelId } from '../work-type-router.js';\n\nconst execAsync = promisify(exec);\n\n/** Get specialists directory (lazy to support test env overrides) */\nfunction getSpecialistsDir(): string {\n return join(getPanopticonHome(), 'specialists');\n}\n\n/**\n * Get the context directory for a project's specialist\n */\nexport function getContextDirectory(projectKey: string, specialistType: string): string {\n return join(getSpecialistsDir(), projectKey, specialistType, 'context');\n}\n\n/**\n * Get the path to the latest context digest file\n */\nexport function getContextDigestPath(projectKey: string, specialistType: string): string {\n const contextDir = getContextDirectory(projectKey, specialistType);\n return join(contextDir, 'latest-digest.md');\n}\n\n/**\n * Ensure context directory exists for a project's specialist\n */\nfunction ensureContextDirectory(projectKey: string, specialistType: string): void {\n const contextDir = getContextDirectory(projectKey, specialistType);\n if (!existsSync(contextDir)) {\n mkdirSync(contextDir, { recursive: true });\n }\n}\n\n/**\n * Load the context digest for a specialist\n *\n * @param projectKey - Project identifier\n * @param specialistType - Specialist type\n * @returns Context digest content or null if not found\n */\nexport function loadContextDigest(projectKey: string, specialistType: string): string | null {\n const digestPath = getContextDigestPath(projectKey, specialistType);\n\n if (!existsSync(digestPath)) {\n return null;\n }\n\n try {\n return readFileSync(digestPath, 'utf-8');\n } catch (error) {\n console.error(`[specialist-context] Failed to load digest for ${projectKey}/${specialistType}:`, error);\n return null;\n }\n}\n\n/**\n * Get the number of recent runs to include in context\n *\n * Reads from project config or uses default.\n *\n * @param projectKey - Project identifier\n * @returns Number of runs to include (default: 5)\n */\nfunction getContextRunsCount(projectKey: string): number {\n const project = getProject(projectKey);\n return project?.specialists?.context_runs ?? 5;\n}\n\n/**\n * Get the model to use for digest generation\n *\n * Reads from project config or uses the same model as the specialist.\n *\n * @param projectKey - Project identifier\n * @param specialistType - Specialist type\n * @returns Model ID to use\n */\nfunction getDigestModel(projectKey: string, specialistType: string): string {\n const project = getProject(projectKey);\n\n // Check for explicit digest model in project config\n if (project?.specialists?.digest_model) {\n return project.specialists.digest_model;\n }\n\n // Fall back to specialist's model\n try {\n const workTypeId = `specialist-${specialistType}` as any;\n return getModelId(workTypeId);\n } catch (error) {\n // Default to Sonnet if can't resolve\n return 'claude-sonnet-4-6';\n }\n}\n\n/**\n * Generate a context digest from recent runs using AI\n *\n * Creates an AI-generated summary of recent specialist runs to provide\n * context for the next run. This includes patterns, learnings, and common issues.\n *\n * @param projectKey - Project identifier\n * @param specialistType - Specialist type\n * @param options - Generation options\n * @returns Generated digest or null if generation failed\n */\nexport async function generateContextDigest(\n projectKey: string,\n specialistType: string,\n options: {\n runCount?: number;\n model?: string;\n force?: boolean; // Generate even if no recent runs\n } = {}\n): Promise<string | null> {\n ensureContextDirectory(projectKey, specialistType);\n\n // Get recent runs\n const runCount = options.runCount ?? getContextRunsCount(projectKey);\n const recentRuns = getRecentRunLogs(projectKey, specialistType, runCount);\n\n if (recentRuns.length === 0 && !options.force) {\n console.log(`[specialist-context] No recent runs for ${projectKey}/${specialistType}, skipping digest generation`);\n return null;\n }\n\n // Build prompt for digest generation\n const prompt = buildDigestPrompt(projectKey, specialistType, recentRuns);\n const model = options.model ?? getDigestModel(projectKey, specialistType);\n\n try {\n console.log(`[specialist-context] Generating digest for ${projectKey}/${specialistType} using ${model}...`);\n\n // Use Claude Code CLI to generate digest\n // Write prompt to temp file to avoid shell escaping issues\n const tempDir = join(getPanopticonHome(), 'tmp');\n if (!existsSync(tempDir)) {\n mkdirSync(tempDir, { recursive: true });\n }\n\n const promptFile = join(tempDir, `digest-prompt-${Date.now()}.md`);\n writeFileSync(promptFile, prompt, 'utf-8');\n\n // Run Claude Code with the prompt (include provider env vars for non-Anthropic models)\n const { getProviderEnvForModel } = await import('../agents.js');\n const providerEnv = getProviderEnvForModel(model);\n const envPrefix = Object.entries(providerEnv).map(([k, v]) => `${k}=\"${v}\"`).join(' ');\n const { stdout, stderr } = await execAsync(\n `${envPrefix ? envPrefix + ' ' : ''}claude --dangerously-skip-permissions --model ${model} \"$(cat '${promptFile}')\"`,\n {\n encoding: 'utf-8',\n maxBuffer: 10 * 1024 * 1024, // 10MB buffer\n timeout: 60000, // 60 second timeout\n }\n );\n\n // Clean up temp file\n try {\n unlinkSync(promptFile);\n } catch {\n // Ignore cleanup errors\n }\n\n if (stderr && !stderr.includes('warning')) {\n console.error(`[specialist-context] Claude stderr:`, stderr);\n }\n\n const digest = stdout.trim();\n\n if (!digest) {\n console.error(`[specialist-context] Empty digest generated`);\n return null;\n }\n\n // Save digest\n const digestPath = getContextDigestPath(projectKey, specialistType);\n writeFileSync(digestPath, digest, 'utf-8');\n\n console.log(`[specialist-context] Generated digest (${digest.length} chars)`);\n return digest;\n } catch (error: any) {\n console.error(`[specialist-context] Failed to generate digest:`, error.message);\n // Degrade gracefully - return null so specialist can continue without context\n return null;\n }\n}\n\n/**\n * Build the prompt for digest generation\n *\n * @param projectKey - Project identifier\n * @param specialistType - Specialist type\n * @param recentRuns - Recent run logs\n * @returns Prompt for Claude\n */\nfunction buildDigestPrompt(\n projectKey: string,\n specialistType: string,\n recentRuns: RunLogEntry[]\n): string {\n const project = getProject(projectKey);\n const projectName = project?.name || projectKey;\n\n let prompt = `You are analyzing the recent history of a ${specialistType} specialist for the ${projectName} project.\n\nYour task is to generate a concise context digest that will be provided to the specialist at the start of their next run. This digest should help them understand:\n- Common patterns and practices observed in recent runs\n- Recurring issues or failure modes\n- Successful approaches and best practices\n- Any project-specific context that would be helpful\n\nGenerate a digest in markdown format. Keep it focused and actionable - aim for 200-400 words total.\n\n## Recent Runs\n\n`;\n\n if (recentRuns.length === 0) {\n prompt += `No recent runs available yet. This is the specialist's first run.\\n\\n`;\n prompt += `Generate a brief introduction for the specialist explaining their role and what to expect.\\n`;\n } else {\n recentRuns.forEach((run, index) => {\n prompt += `### Run ${index + 1}: ${run.metadata.issueId} (${run.metadata.status || 'unknown'})\\n`;\n prompt += `Started: ${run.metadata.startedAt}\\n`;\n if (run.metadata.finishedAt) {\n prompt += `Finished: ${run.metadata.finishedAt}\\n`;\n }\n if (run.metadata.duration) {\n const durationSec = Math.floor(run.metadata.duration / 1000);\n const minutes = Math.floor(durationSec / 60);\n const seconds = durationSec % 60;\n prompt += `Duration: ${minutes}m ${seconds}s\\n`;\n }\n if (run.metadata.notes) {\n prompt += `Notes: ${run.metadata.notes}\\n`;\n }\n\n // Include snippets from the log if available\n try {\n const logContent = readFileSync(run.filePath, 'utf-8');\n // Extract key sections (limit to avoid overwhelming the prompt)\n const maxChars = 500;\n const transcriptMatch = logContent.match(/## Session Transcript\\n([\\s\\S]+?)(?=\\n## |$)/);\n if (transcriptMatch) {\n let transcript = transcriptMatch[1].trim();\n if (transcript.length > maxChars) {\n transcript = transcript.substring(0, maxChars) + '... [truncated]';\n }\n prompt += `\\nTranscript excerpt:\\n${transcript}\\n`;\n }\n } catch (error) {\n // If we can't read the log, skip the excerpt\n }\n\n prompt += `\\n`;\n });\n }\n\n prompt += `\\n## Your Task\n\nGenerate a context digest that summarizes the key insights from these runs. Format it as:\n\n# Recent ${specialistType} History for ${projectName}\n\n## Summary\n[2-3 sentence overview of patterns and trends]\n\n## Common Patterns\n[Bulleted list of observed patterns]\n\n## Recent Notable Runs\n[Brief highlights of 2-3 most interesting runs]\n\n## Recommendations\n[Specific guidance for the next run based on this history]\n\nKeep it concise, actionable, and focused on helping the specialist be more effective.`;\n\n return prompt;\n}\n\n/**\n * Regenerate the context digest\n *\n * Forces regeneration even if a digest already exists.\n *\n * @param projectKey - Project identifier\n * @param specialistType - Specialist type\n * @returns Generated digest or null if generation failed\n */\nexport async function regenerateContextDigest(\n projectKey: string,\n specialistType: string\n): Promise<string | null> {\n return generateContextDigest(projectKey, specialistType, { force: true });\n}\n\n/**\n * Generate digest after a run completes (async, fire-and-forget)\n *\n * This is called after a specialist finishes a run to update the context\n * for the next run. It runs asynchronously and failures are logged but not thrown.\n *\n * @param projectKey - Project identifier\n * @param specialistType - Specialist type\n */\nexport function scheduleDigestGeneration(projectKey: string, specialistType: string): void {\n // Run async without awaiting\n generateContextDigest(projectKey, specialistType).catch((error) => {\n console.error(\n `[specialist-context] Background digest generation failed for ${projectKey}/${specialistType}:`,\n error\n );\n });\n}\n\n/**\n * Check if a context digest exists\n *\n * @param projectKey - Project identifier\n * @param specialistType - Specialist type\n * @returns True if digest file exists\n */\nexport function hasContextDigest(projectKey: string, specialistType: string): boolean {\n const digestPath = getContextDigestPath(projectKey, specialistType);\n return existsSync(digestPath);\n}\n\n/**\n * Delete the context digest\n *\n * Useful for forcing a fresh start or clearing stale context.\n *\n * @param projectKey - Project identifier\n * @param specialistType - Specialist type\n * @returns True if digest was deleted, false if it didn't exist\n */\nexport function deleteContextDigest(projectKey: string, specialistType: string): boolean {\n const digestPath = getContextDigestPath(projectKey, specialistType);\n\n if (!existsSync(digestPath)) {\n return false;\n }\n\n try {\n unlinkSync(digestPath);\n return true;\n } catch (error) {\n console.error(`[specialist-context] Failed to delete digest:`, error);\n return false;\n }\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;;;AAsBA,SAAS,oBAA4B;AACnC,QAAO,KAAK,mBAAmB,EAAE,cAAc;;;;;AAMjD,SAAgB,oBAAoB,YAAoB,gBAAgC;AACtF,QAAO,KAAK,mBAAmB,EAAE,YAAY,gBAAgB,UAAU;;;;;AAMzE,SAAgB,qBAAqB,YAAoB,gBAAgC;AAEvF,QAAO,KADY,oBAAoB,YAAY,eAAe,EAC1C,mBAAmB;;;;;AAM7C,SAAS,uBAAuB,YAAoB,gBAA8B;CAChF,MAAM,aAAa,oBAAoB,YAAY,eAAe;AAClE,KAAI,CAAC,WAAW,WAAW,CACzB,WAAU,YAAY,EAAE,WAAW,MAAM,CAAC;;;;;;;;;AAW9C,SAAgB,kBAAkB,YAAoB,gBAAuC;CAC3F,MAAM,aAAa,qBAAqB,YAAY,eAAe;AAEnE,KAAI,CAAC,WAAW,WAAW,CACzB,QAAO;AAGT,KAAI;AACF,SAAO,aAAa,YAAY,QAAQ;UACjC,OAAO;AACd,UAAQ,MAAM,kDAAkD,WAAW,GAAG,eAAe,IAAI,MAAM;AACvG,SAAO;;;;;;;;;;;AAYX,SAAS,oBAAoB,YAA4B;AAEvD,QADgB,WAAW,WAAW,EACtB,aAAa,gBAAgB;;;;;;;;;;;AAY/C,SAAS,eAAe,YAAoB,gBAAgC;CAC1E,MAAM,UAAU,WAAW,WAAW;AAGtC,KAAI,SAAS,aAAa,aACxB,QAAO,QAAQ,YAAY;AAI7B,KAAI;AAEF,SAAO,WADY,cAAc,iBACJ;UACtB,OAAO;AAEd,SAAO;;;;;;;;;;;;;;AAeX,eAAsB,sBACpB,YACA,gBACA,UAII,EAAE,EACkB;AACxB,wBAAuB,YAAY,eAAe;CAIlD,MAAM,aAAa,iBAAiB,YAAY,gBAD/B,QAAQ,YAAY,oBAAoB,WAAW,CACK;AAEzE,KAAI,WAAW,WAAW,KAAK,CAAC,QAAQ,OAAO;AAC7C,UAAQ,IAAI,2CAA2C,WAAW,GAAG,eAAe,8BAA8B;AAClH,SAAO;;CAIT,MAAM,SAAS,kBAAkB,YAAY,gBAAgB,WAAW;CACxE,MAAM,QAAQ,QAAQ,SAAS,eAAe,YAAY,eAAe;AAEzE,KAAI;AACF,UAAQ,IAAI,8CAA8C,WAAW,GAAG,eAAe,SAAS,MAAM,KAAK;EAI3G,MAAM,UAAU,KAAK,mBAAmB,EAAE,MAAM;AAChD,MAAI,CAAC,WAAW,QAAQ,CACtB,WAAU,SAAS,EAAE,WAAW,MAAM,CAAC;EAGzC,MAAM,aAAa,KAAK,SAAS,iBAAiB,KAAK,KAAK,CAAC,KAAK;AAClE,gBAAc,YAAY,QAAQ,QAAQ;EAG1C,MAAM,EAAE,2BAA2B,MAAM,OAAO;EAChD,MAAM,cAAc,uBAAuB,MAAM;EACjD,MAAM,YAAY,OAAO,QAAQ,YAAY,CAAC,KAAK,CAAC,GAAG,OAAO,GAAG,EAAE,IAAI,EAAE,GAAG,CAAC,KAAK,IAAI;EACtF,MAAM,EAAE,QAAQ,WAAW,MAAM,UAC/B,GAAG,YAAY,YAAY,MAAM,GAAG,gDAAgD,MAAM,WAAW,WAAW,MAChH;GACE,UAAU;GACV,WAAW,KAAK,OAAO;GACvB,SAAS;GACV,CACF;AAGD,MAAI;AACF,cAAW,WAAW;UAChB;AAIR,MAAI,UAAU,CAAC,OAAO,SAAS,UAAU,CACvC,SAAQ,MAAM,uCAAuC,OAAO;EAG9D,MAAM,SAAS,OAAO,MAAM;AAE5B,MAAI,CAAC,QAAQ;AACX,WAAQ,MAAM,8CAA8C;AAC5D,UAAO;;AAKT,gBADmB,qBAAqB,YAAY,eAAe,EACzC,QAAQ,QAAQ;AAE1C,UAAQ,IAAI,0CAA0C,OAAO,OAAO,SAAS;AAC7E,SAAO;UACA,OAAY;AACnB,UAAQ,MAAM,mDAAmD,MAAM,QAAQ;AAE/E,SAAO;;;;;;;;;;;AAYX,SAAS,kBACP,YACA,gBACA,YACQ;CAER,MAAM,cADU,WAAW,WAAW,EACT,QAAQ;CAErC,IAAI,SAAS,6CAA6C,eAAe,sBAAsB,YAAY;;;;;;;;;;;;;AAc3G,KAAI,WAAW,WAAW,GAAG;AAC3B,YAAU;AACV,YAAU;OAEV,YAAW,SAAS,KAAK,UAAU;AACjC,YAAU,WAAW,QAAQ,EAAE,IAAI,IAAI,SAAS,QAAQ,IAAI,IAAI,SAAS,UAAU,UAAU;AAC7F,YAAU,YAAY,IAAI,SAAS,UAAU;AAC7C,MAAI,IAAI,SAAS,WACf,WAAU,aAAa,IAAI,SAAS,WAAW;AAEjD,MAAI,IAAI,SAAS,UAAU;GACzB,MAAM,cAAc,KAAK,MAAM,IAAI,SAAS,WAAW,IAAK;GAC5D,MAAM,UAAU,KAAK,MAAM,cAAc,GAAG;GAC5C,MAAM,UAAU,cAAc;AAC9B,aAAU,aAAa,QAAQ,IAAI,QAAQ;;AAE7C,MAAI,IAAI,SAAS,MACf,WAAU,UAAU,IAAI,SAAS,MAAM;AAIzC,MAAI;GACF,MAAM,aAAa,aAAa,IAAI,UAAU,QAAQ;GAEtD,MAAM,WAAW;GACjB,MAAM,kBAAkB,WAAW,MAAM,+CAA+C;AACxF,OAAI,iBAAiB;IACnB,IAAI,aAAa,gBAAgB,GAAG,MAAM;AAC1C,QAAI,WAAW,SAAS,SACtB,cAAa,WAAW,UAAU,GAAG,SAAS,GAAG;AAEnD,cAAU,0BAA0B,WAAW;;WAE1C,OAAO;AAIhB,YAAU;GACV;AAGJ,WAAU;;;;WAID,eAAe,eAAe,YAAY;;;;;;;;;;;;;;;AAgBnD,QAAO;;;;;;;;;;;AA4BT,SAAgB,yBAAyB,YAAoB,gBAA8B;AAEzF,uBAAsB,YAAY,eAAe,CAAC,OAAO,UAAU;AACjE,UAAQ,MACN,gEAAgE,WAAW,GAAG,eAAe,IAC7F,MACD;GACD;;;;;aA7T4C;uBAC0B;gBAC9B;wBACQ;AAE9C,aAAY,UAAU,KAAK"}
1
+ {"version":3,"file":"specialist-context-BdNFsfMG.js","names":[],"sources":["../src/lib/cloister/specialist-context.ts"],"sourcesContent":["/**\n * Specialist Context Management\n *\n * Generates and manages AI-powered context digests from recent specialist runs.\n * These digests seed new specialist sessions with learned patterns and expertise.\n *\n * Directory structure:\n * ~/.panopticon/specialists/{projectKey}/{specialistType}/context/latest-digest.md\n */\n\nimport { existsSync, mkdirSync, readFileSync, writeFileSync, unlinkSync } from 'fs';\nimport { join } from 'path';\nimport { exec } from 'child_process';\nimport { promisify } from 'util';\nimport { getPanopticonHome } from '../paths.js';\nimport { getRecentRunLogs, type RunLogEntry } from './specialist-logs.js';\nimport { getProject } from '../projects.js';\nimport { getModelId } from '../work-type-router.js';\n\nconst execAsync = promisify(exec);\n\n/** Get specialists directory (lazy to support test env overrides) */\nfunction getSpecialistsDir(): string {\n return join(getPanopticonHome(), 'specialists');\n}\n\n/**\n * Get the context directory for a project's specialist\n */\nexport function getContextDirectory(projectKey: string, specialistType: string): string {\n return join(getSpecialistsDir(), projectKey, specialistType, 'context');\n}\n\n/**\n * Get the path to the latest context digest file\n */\nexport function getContextDigestPath(projectKey: string, specialistType: string): string {\n const contextDir = getContextDirectory(projectKey, specialistType);\n return join(contextDir, 'latest-digest.md');\n}\n\n/**\n * Ensure context directory exists for a project's specialist\n */\nfunction ensureContextDirectory(projectKey: string, specialistType: string): void {\n const contextDir = getContextDirectory(projectKey, specialistType);\n if (!existsSync(contextDir)) {\n mkdirSync(contextDir, { recursive: true });\n }\n}\n\n/**\n * Load the context digest for a specialist\n *\n * @param projectKey - Project identifier\n * @param specialistType - Specialist type\n * @returns Context digest content or null if not found\n */\nexport function loadContextDigest(projectKey: string, specialistType: string): string | null {\n const digestPath = getContextDigestPath(projectKey, specialistType);\n\n if (!existsSync(digestPath)) {\n return null;\n }\n\n try {\n return readFileSync(digestPath, 'utf-8');\n } catch (error) {\n console.error(`[specialist-context] Failed to load digest for ${projectKey}/${specialistType}:`, error);\n return null;\n }\n}\n\n/**\n * Get the number of recent runs to include in context\n *\n * Reads from project config or uses default.\n *\n * @param projectKey - Project identifier\n * @returns Number of runs to include (default: 5)\n */\nfunction getContextRunsCount(projectKey: string): number {\n const project = getProject(projectKey);\n return project?.specialists?.context_runs ?? 5;\n}\n\n/**\n * Get the model to use for digest generation\n *\n * Reads from project config or uses the same model as the specialist.\n *\n * @param projectKey - Project identifier\n * @param specialistType - Specialist type\n * @returns Model ID to use\n */\nfunction getDigestModel(projectKey: string, specialistType: string): string {\n const project = getProject(projectKey);\n\n // Check for explicit digest model in project config\n if (project?.specialists?.digest_model) {\n return project.specialists.digest_model;\n }\n\n // Fall back to specialist's model\n try {\n const workTypeId = `specialist-${specialistType}` as any;\n return getModelId(workTypeId);\n } catch (error) {\n // Default to Sonnet if can't resolve\n return 'claude-sonnet-4-6';\n }\n}\n\n/**\n * Generate a context digest from recent runs using AI\n *\n * Creates an AI-generated summary of recent specialist runs to provide\n * context for the next run. This includes patterns, learnings, and common issues.\n *\n * @param projectKey - Project identifier\n * @param specialistType - Specialist type\n * @param options - Generation options\n * @returns Generated digest or null if generation failed\n */\nexport async function generateContextDigest(\n projectKey: string,\n specialistType: string,\n options: {\n runCount?: number;\n model?: string;\n force?: boolean; // Generate even if no recent runs\n } = {}\n): Promise<string | null> {\n ensureContextDirectory(projectKey, specialistType);\n\n // Get recent runs\n const runCount = options.runCount ?? getContextRunsCount(projectKey);\n const recentRuns = getRecentRunLogs(projectKey, specialistType, runCount);\n\n if (recentRuns.length === 0 && !options.force) {\n console.log(`[specialist-context] No recent runs for ${projectKey}/${specialistType}, skipping digest generation`);\n return null;\n }\n\n // Build prompt for digest generation\n const prompt = buildDigestPrompt(projectKey, specialistType, recentRuns);\n const model = options.model ?? getDigestModel(projectKey, specialistType);\n\n try {\n console.log(`[specialist-context] Generating digest for ${projectKey}/${specialistType} using ${model}...`);\n\n // Use Claude Code CLI to generate digest\n // Write prompt to temp file to avoid shell escaping issues\n const tempDir = join(getPanopticonHome(), 'tmp');\n if (!existsSync(tempDir)) {\n mkdirSync(tempDir, { recursive: true });\n }\n\n const promptFile = join(tempDir, `digest-prompt-${Date.now()}.md`);\n writeFileSync(promptFile, prompt, 'utf-8');\n\n // Run Claude Code with the prompt (include provider env vars for non-Anthropic models)\n const { getProviderEnvForModel } = await import('../agents.js');\n const providerEnv = getProviderEnvForModel(model);\n const envPrefix = Object.entries(providerEnv).map(([k, v]) => `${k}=\"${v}\"`).join(' ');\n const { stdout, stderr } = await execAsync(\n `${envPrefix ? envPrefix + ' ' : ''}claude --dangerously-skip-permissions --model ${model} \"$(cat '${promptFile}')\"`,\n {\n encoding: 'utf-8',\n maxBuffer: 10 * 1024 * 1024, // 10MB buffer\n timeout: 60000, // 60 second timeout\n }\n );\n\n // Clean up temp file\n try {\n unlinkSync(promptFile);\n } catch {\n // Ignore cleanup errors\n }\n\n if (stderr && !stderr.includes('warning')) {\n console.error(`[specialist-context] Claude stderr:`, stderr);\n }\n\n const digest = stdout.trim();\n\n if (!digest) {\n console.error(`[specialist-context] Empty digest generated`);\n return null;\n }\n\n // Save digest\n const digestPath = getContextDigestPath(projectKey, specialistType);\n writeFileSync(digestPath, digest, 'utf-8');\n\n console.log(`[specialist-context] Generated digest (${digest.length} chars)`);\n return digest;\n } catch (error: any) {\n console.error(`[specialist-context] Failed to generate digest:`, error.message);\n // Degrade gracefully - return null so specialist can continue without context\n return null;\n }\n}\n\n/**\n * Build the prompt for digest generation\n *\n * @param projectKey - Project identifier\n * @param specialistType - Specialist type\n * @param recentRuns - Recent run logs\n * @returns Prompt for Claude\n */\nfunction buildDigestPrompt(\n projectKey: string,\n specialistType: string,\n recentRuns: RunLogEntry[]\n): string {\n const project = getProject(projectKey);\n const projectName = project?.name || projectKey;\n\n let prompt = `You are analyzing the recent history of a ${specialistType} specialist for the ${projectName} project.\n\nYour task is to generate a concise context digest that will be provided to the specialist at the start of their next run. This digest should help them understand:\n- Common patterns and practices observed in recent runs\n- Recurring issues or failure modes\n- Successful approaches and best practices\n- Any project-specific context that would be helpful\n\nGenerate a digest in markdown format. Keep it focused and actionable - aim for 200-400 words total.\n\n## Recent Runs\n\n`;\n\n if (recentRuns.length === 0) {\n prompt += `No recent runs available yet. This is the specialist's first run.\\n\\n`;\n prompt += `Generate a brief introduction for the specialist explaining their role and what to expect.\\n`;\n } else {\n recentRuns.forEach((run, index) => {\n prompt += `### Run ${index + 1}: ${run.metadata.issueId} (${run.metadata.status || 'unknown'})\\n`;\n prompt += `Started: ${run.metadata.startedAt}\\n`;\n if (run.metadata.finishedAt) {\n prompt += `Finished: ${run.metadata.finishedAt}\\n`;\n }\n if (run.metadata.duration) {\n const durationSec = Math.floor(run.metadata.duration / 1000);\n const minutes = Math.floor(durationSec / 60);\n const seconds = durationSec % 60;\n prompt += `Duration: ${minutes}m ${seconds}s\\n`;\n }\n if (run.metadata.notes) {\n prompt += `Notes: ${run.metadata.notes}\\n`;\n }\n\n // Include snippets from the log if available\n try {\n const logContent = readFileSync(run.filePath, 'utf-8');\n // Extract key sections (limit to avoid overwhelming the prompt)\n const maxChars = 500;\n const transcriptMatch = logContent.match(/## Session Transcript\\n([\\s\\S]+?)(?=\\n## |$)/);\n if (transcriptMatch) {\n let transcript = transcriptMatch[1].trim();\n if (transcript.length > maxChars) {\n transcript = transcript.substring(0, maxChars) + '... [truncated]';\n }\n prompt += `\\nTranscript excerpt:\\n${transcript}\\n`;\n }\n } catch (error) {\n // If we can't read the log, skip the excerpt\n }\n\n prompt += `\\n`;\n });\n }\n\n prompt += `\\n## Your Task\n\nGenerate a context digest that summarizes the key insights from these runs. Format it as:\n\n# Recent ${specialistType} History for ${projectName}\n\n## Summary\n[2-3 sentence overview of patterns and trends]\n\n## Common Patterns\n[Bulleted list of observed patterns]\n\n## Recent Notable Runs\n[Brief highlights of 2-3 most interesting runs]\n\n## Recommendations\n[Specific guidance for the next run based on this history]\n\nKeep it concise, actionable, and focused on helping the specialist be more effective.`;\n\n return prompt;\n}\n\n/**\n * Regenerate the context digest\n *\n * Forces regeneration even if a digest already exists.\n *\n * @param projectKey - Project identifier\n * @param specialistType - Specialist type\n * @returns Generated digest or null if generation failed\n */\nexport async function regenerateContextDigest(\n projectKey: string,\n specialistType: string\n): Promise<string | null> {\n return generateContextDigest(projectKey, specialistType, { force: true });\n}\n\n/**\n * Generate digest after a run completes (async, fire-and-forget)\n *\n * This is called after a specialist finishes a run to update the context\n * for the next run. It runs asynchronously and failures are logged but not thrown.\n *\n * @param projectKey - Project identifier\n * @param specialistType - Specialist type\n */\nexport function scheduleDigestGeneration(projectKey: string, specialistType: string): void {\n // Run async without awaiting\n generateContextDigest(projectKey, specialistType).catch((error) => {\n console.error(\n `[specialist-context] Background digest generation failed for ${projectKey}/${specialistType}:`,\n error\n );\n });\n}\n\n/**\n * Check if a context digest exists\n *\n * @param projectKey - Project identifier\n * @param specialistType - Specialist type\n * @returns True if digest file exists\n */\nexport function hasContextDigest(projectKey: string, specialistType: string): boolean {\n const digestPath = getContextDigestPath(projectKey, specialistType);\n return existsSync(digestPath);\n}\n\n/**\n * Delete the context digest\n *\n * Useful for forcing a fresh start or clearing stale context.\n *\n * @param projectKey - Project identifier\n * @param specialistType - Specialist type\n * @returns True if digest was deleted, false if it didn't exist\n */\nexport function deleteContextDigest(projectKey: string, specialistType: string): boolean {\n const digestPath = getContextDigestPath(projectKey, specialistType);\n\n if (!existsSync(digestPath)) {\n return false;\n }\n\n try {\n unlinkSync(digestPath);\n return true;\n } catch (error) {\n console.error(`[specialist-context] Failed to delete digest:`, error);\n return false;\n }\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;;;AAsBA,SAAS,oBAA4B;AACnC,QAAO,KAAK,mBAAmB,EAAE,cAAc;;;;;AAMjD,SAAgB,oBAAoB,YAAoB,gBAAgC;AACtF,QAAO,KAAK,mBAAmB,EAAE,YAAY,gBAAgB,UAAU;;;;;AAMzE,SAAgB,qBAAqB,YAAoB,gBAAgC;AAEvF,QAAO,KADY,oBAAoB,YAAY,eAAe,EAC1C,mBAAmB;;;;;AAM7C,SAAS,uBAAuB,YAAoB,gBAA8B;CAChF,MAAM,aAAa,oBAAoB,YAAY,eAAe;AAClE,KAAI,CAAC,WAAW,WAAW,CACzB,WAAU,YAAY,EAAE,WAAW,MAAM,CAAC;;;;;;;;;AAW9C,SAAgB,kBAAkB,YAAoB,gBAAuC;CAC3F,MAAM,aAAa,qBAAqB,YAAY,eAAe;AAEnE,KAAI,CAAC,WAAW,WAAW,CACzB,QAAO;AAGT,KAAI;AACF,SAAO,aAAa,YAAY,QAAQ;UACjC,OAAO;AACd,UAAQ,MAAM,kDAAkD,WAAW,GAAG,eAAe,IAAI,MAAM;AACvG,SAAO;;;;;;;;;;;AAYX,SAAS,oBAAoB,YAA4B;AAEvD,QADgB,WAAW,WAAW,EACtB,aAAa,gBAAgB;;;;;;;;;;;AAY/C,SAAS,eAAe,YAAoB,gBAAgC;CAC1E,MAAM,UAAU,WAAW,WAAW;AAGtC,KAAI,SAAS,aAAa,aACxB,QAAO,QAAQ,YAAY;AAI7B,KAAI;AAEF,SAAO,WADY,cAAc,iBACJ;UACtB,OAAO;AAEd,SAAO;;;;;;;;;;;;;;AAeX,eAAsB,sBACpB,YACA,gBACA,UAII,EAAE,EACkB;AACxB,wBAAuB,YAAY,eAAe;CAIlD,MAAM,aAAa,iBAAiB,YAAY,gBAD/B,QAAQ,YAAY,oBAAoB,WAAW,CACK;AAEzE,KAAI,WAAW,WAAW,KAAK,CAAC,QAAQ,OAAO;AAC7C,UAAQ,IAAI,2CAA2C,WAAW,GAAG,eAAe,8BAA8B;AAClH,SAAO;;CAIT,MAAM,SAAS,kBAAkB,YAAY,gBAAgB,WAAW;CACxE,MAAM,QAAQ,QAAQ,SAAS,eAAe,YAAY,eAAe;AAEzE,KAAI;AACF,UAAQ,IAAI,8CAA8C,WAAW,GAAG,eAAe,SAAS,MAAM,KAAK;EAI3G,MAAM,UAAU,KAAK,mBAAmB,EAAE,MAAM;AAChD,MAAI,CAAC,WAAW,QAAQ,CACtB,WAAU,SAAS,EAAE,WAAW,MAAM,CAAC;EAGzC,MAAM,aAAa,KAAK,SAAS,iBAAiB,KAAK,KAAK,CAAC,KAAK;AAClE,gBAAc,YAAY,QAAQ,QAAQ;EAG1C,MAAM,EAAE,2BAA2B,MAAM,OAAO;EAChD,MAAM,cAAc,uBAAuB,MAAM;EACjD,MAAM,YAAY,OAAO,QAAQ,YAAY,CAAC,KAAK,CAAC,GAAG,OAAO,GAAG,EAAE,IAAI,EAAE,GAAG,CAAC,KAAK,IAAI;EACtF,MAAM,EAAE,QAAQ,WAAW,MAAM,UAC/B,GAAG,YAAY,YAAY,MAAM,GAAG,gDAAgD,MAAM,WAAW,WAAW,MAChH;GACE,UAAU;GACV,WAAW,KAAK,OAAO;GACvB,SAAS;GACV,CACF;AAGD,MAAI;AACF,cAAW,WAAW;UAChB;AAIR,MAAI,UAAU,CAAC,OAAO,SAAS,UAAU,CACvC,SAAQ,MAAM,uCAAuC,OAAO;EAG9D,MAAM,SAAS,OAAO,MAAM;AAE5B,MAAI,CAAC,QAAQ;AACX,WAAQ,MAAM,8CAA8C;AAC5D,UAAO;;AAKT,gBADmB,qBAAqB,YAAY,eAAe,EACzC,QAAQ,QAAQ;AAE1C,UAAQ,IAAI,0CAA0C,OAAO,OAAO,SAAS;AAC7E,SAAO;UACA,OAAY;AACnB,UAAQ,MAAM,mDAAmD,MAAM,QAAQ;AAE/E,SAAO;;;;;;;;;;;AAYX,SAAS,kBACP,YACA,gBACA,YACQ;CAER,MAAM,cADU,WAAW,WAAW,EACT,QAAQ;CAErC,IAAI,SAAS,6CAA6C,eAAe,sBAAsB,YAAY;;;;;;;;;;;;;AAc3G,KAAI,WAAW,WAAW,GAAG;AAC3B,YAAU;AACV,YAAU;OAEV,YAAW,SAAS,KAAK,UAAU;AACjC,YAAU,WAAW,QAAQ,EAAE,IAAI,IAAI,SAAS,QAAQ,IAAI,IAAI,SAAS,UAAU,UAAU;AAC7F,YAAU,YAAY,IAAI,SAAS,UAAU;AAC7C,MAAI,IAAI,SAAS,WACf,WAAU,aAAa,IAAI,SAAS,WAAW;AAEjD,MAAI,IAAI,SAAS,UAAU;GACzB,MAAM,cAAc,KAAK,MAAM,IAAI,SAAS,WAAW,IAAK;GAC5D,MAAM,UAAU,KAAK,MAAM,cAAc,GAAG;GAC5C,MAAM,UAAU,cAAc;AAC9B,aAAU,aAAa,QAAQ,IAAI,QAAQ;;AAE7C,MAAI,IAAI,SAAS,MACf,WAAU,UAAU,IAAI,SAAS,MAAM;AAIzC,MAAI;GACF,MAAM,aAAa,aAAa,IAAI,UAAU,QAAQ;GAEtD,MAAM,WAAW;GACjB,MAAM,kBAAkB,WAAW,MAAM,+CAA+C;AACxF,OAAI,iBAAiB;IACnB,IAAI,aAAa,gBAAgB,GAAG,MAAM;AAC1C,QAAI,WAAW,SAAS,SACtB,cAAa,WAAW,UAAU,GAAG,SAAS,GAAG;AAEnD,cAAU,0BAA0B,WAAW;;WAE1C,OAAO;AAIhB,YAAU;GACV;AAGJ,WAAU;;;;WAID,eAAe,eAAe,YAAY;;;;;;;;;;;;;;;AAgBnD,QAAO;;;;;;;;;;;AA4BT,SAAgB,yBAAyB,YAAoB,gBAA8B;AAEzF,uBAAsB,YAAY,eAAe,CAAC,OAAO,UAAU;AACjE,UAAQ,MACN,gEAAgE,WAAW,GAAG,eAAe,IAC7F,MACD;GACD;;;;;aA7T4C;uBAC0B;gBAC9B;wBACQ;AAE9C,aAAY,UAAU,KAAK"}
@@ -1,3 +1,3 @@
1
- import { St as parseLogMetadata, dt as createRunLog, ft as finalizeRunLog, gt as getRunLogPath, ht as getRunLog, lt as cleanupAllLogs, ut as cleanupOldLogs, xt as listRunLogs, yt as init_specialist_logs } from "./specialists-ldNesMhg.js";
1
+ import { St as parseLogMetadata, dt as createRunLog, ft as finalizeRunLog, gt as getRunLogPath, ht as getRunLog, lt as cleanupAllLogs, ut as cleanupOldLogs, xt as listRunLogs, yt as init_specialist_logs } from "./specialists-DEKqgkxp.js";
2
2
  init_specialist_logs();
3
3
  export { cleanupAllLogs, cleanupOldLogs, createRunLog, finalizeRunLog, getRunLog, getRunLogPath, listRunLogs, parseLogMetadata };
@@ -491,6 +491,20 @@ var init_cost = __esmMin((() => {
491
491
  inputPer1k: 3e-4,
492
492
  outputPer1k: .0012,
493
493
  currency: "USD"
494
+ },
495
+ {
496
+ provider: "custom",
497
+ model: "MiniMax-M2.7",
498
+ inputPer1k: 3e-4,
499
+ outputPer1k: .0012,
500
+ currency: "USD"
501
+ },
502
+ {
503
+ provider: "custom",
504
+ model: "MiniMax-M2.7-highspeed",
505
+ inputPer1k: 3e-4,
506
+ outputPer1k: .0012,
507
+ currency: "USD"
494
508
  }
495
509
  ];
496
510
  BUDGETS_FILE = join(COSTS_DIR, "budgets.json");
@@ -1538,9 +1552,9 @@ function recordWake(name, sessionId) {
1538
1552
  */
1539
1553
  async function spawnEphemeralSpecialist(projectKey, specialistType, task) {
1540
1554
  ensureProjectSpecialistDir(projectKey, specialistType);
1541
- const { loadContextDigest } = await import("./specialist-context-CRBBW-z5.js");
1555
+ const { loadContextDigest } = await import("./specialist-context-BdNFsfMG.js");
1542
1556
  const contextDigest = loadContextDigest(projectKey, specialistType);
1543
- const { createRunLog } = await import("./specialist-logs-m0UvPm3F.js");
1557
+ const { createRunLog } = await import("./specialist-logs-CLztE_bE.js");
1544
1558
  const { runId, filePath: logFilePath } = createRunLog(projectKey, specialistType, task.issueId, contextDigest || void 0);
1545
1559
  setCurrentRun(projectKey, specialistType, runId);
1546
1560
  incrementProjectRunCount(projectKey, specialistType);
@@ -1961,7 +1975,7 @@ async function terminateSpecialist(projectKey, specialistType) {
1961
1975
  console.error(`[specialist] Failed to kill tmux session ${tmuxSession}:`, error);
1962
1976
  }
1963
1977
  if (metadata.currentRun) {
1964
- const { finalizeRunLog } = await import("./specialist-logs-m0UvPm3F.js");
1978
+ const { finalizeRunLog } = await import("./specialist-logs-CLztE_bE.js");
1965
1979
  try {
1966
1980
  finalizeRunLog(projectKey, specialistType, metadata.currentRun, {
1967
1981
  status: metadata.lastRunStatus || "incomplete",
@@ -1979,7 +1993,7 @@ async function terminateSpecialist(projectKey, specialistType) {
1979
1993
  state: "suspended",
1980
1994
  lastActivity: (/* @__PURE__ */ new Date()).toISOString()
1981
1995
  });
1982
- const { scheduleDigestGeneration } = await import("./specialist-context-CRBBW-z5.js");
1996
+ const { scheduleDigestGeneration } = await import("./specialist-context-BdNFsfMG.js");
1983
1997
  scheduleDigestGeneration(projectKey, specialistType);
1984
1998
  scheduleLogCleanup(projectKey, specialistType);
1985
1999
  }
@@ -1992,7 +2006,7 @@ async function terminateSpecialist(projectKey, specialistType) {
1992
2006
  function scheduleLogCleanup(projectKey, specialistType) {
1993
2007
  Promise.resolve().then(async () => {
1994
2008
  try {
1995
- const { cleanupOldLogs } = await import("./specialist-logs-m0UvPm3F.js");
2009
+ const { cleanupOldLogs } = await import("./specialist-logs-CLztE_bE.js");
1996
2010
  const { getSpecialistRetention } = await import("./projects-DMWmPeIU.js");
1997
2011
  const retention = getSpecialistRetention(projectKey);
1998
2012
  const deleted = cleanupOldLogs(projectKey, specialistType, {
@@ -2626,7 +2640,7 @@ CRITICAL: Do NOT delete the feature branch.`;
2626
2640
  }
2627
2641
  if (totalChangedFiles === 0) {
2628
2642
  console.log(`[specialist] review-agent: stale branch detected for ${task.issueId} — 0 files changed vs main`);
2629
- const { setReviewStatus } = await import("./review-status-p_HOugvo.js");
2643
+ const { setReviewStatus } = await import("./review-status-2TdtHNcs.js");
2630
2644
  setReviewStatus(task.issueId.toUpperCase(), {
2631
2645
  reviewStatus: "passed",
2632
2646
  reviewNotes: "No changes to review — branch identical to main (already merged or stale)"
@@ -3233,4 +3247,4 @@ var init_specialists = __esmMin((() => {
3233
3247
  //#endregion
3234
3248
  export { updateContextTokens as $, initSpecialistsDirectory as A, formatCost as At, loadRegistry as B, init_io as Bt, getSessionGeneration as C, getProjectDirs as Ct, getSpecialistStatus as D, checkBudget as Dt, getSpecialistState as E, parseClaudeSession as Et, isInitialized as F, getWeeklySummary as Ft, sendFeedbackToAgent as G, recordWake as H, updateItemStatus as Ht, isRunning as I, init_cost as It, signalSpecialistCompletion as J, setCurrentRun as K, listProjectsWithSpecialists as L, readIssueCosts as Lt, initializeEnabledSpecialists as M, getAllBudgets as Mt, initializeSpecialist as N, getDailySummary as Nt, getTmuxSessionName as O, createBudget as Ot, isEnabled as P, getMonthlySummary as Pt, terminateSpecialist as Q, listSessionFiles as R, readTodayCosts as Rt, getSessionFilePath as S, parseLogMetadata as St, getSpecialistMetadata as T, init_jsonl_parser as Tt, resumeGracePeriod as U, updateSubItemStatus as Ut, pauseGracePeriod as V, readWorkspacePlan as Vt, saveRegistry as W, startGracePeriod as X, spawnEphemeralSpecialist as Y, submitToSpecialistQueue as Z, getGracePeriodState as _, getRunLogSize as _t, completeSpecialistTask as a, wakeSpecialistWithTask as at, getProjectSpecialistDir as b, isRunLogActive as bt, enableSpecialist as c, checkLogSizeLimit as ct, findSessionFile as d, createRunLog as dt, updateProjectSpecialistMetadata as et, getAllProjectSpecialistStatuses as f, finalizeRunLog as ft, getFeedbackStats as g, getRunLogPath as gt, getEnabledSpecialists as h, getRunLog as ht, clearSessionId as i, wakeSpecialistOrQueue as it, init_specialists as j, generateReport as jt, incrementProjectRunCount as k, deleteBudget as kt, ensureProjectSpecialistDir as l, cleanupAllLogs as lt, getAllSpecialists as m, getRecentRunLogs as mt, bumpSessionGeneration as n, updateSpecialistMetadata as nt, countContextTokens as o, MAX_LOG_SIZE as ot, getAllSpecialistStatus as p, generateRunId as pt, setSessionId as q, checkSpecialistQueue as r, wakeSpecialist as rt, disableSpecialist as s, appendToRunLog as st, buildTestAgentPromptContent as t, updateRunStatus as tt, exitGracePeriod as u, cleanupOldLogs as ut, getNextSpecialistTask as v, getRunsDirectory as vt, getSessionId as w, getSessionFiles as wt, getProjectSpecialistMetadata as x, listRunLogs as xt, getPendingFeedback as y, init_specialist_logs as yt, listSpecialistsForProject as z, summarizeCosts as zt };
3235
3249
 
3236
- //# sourceMappingURL=specialists-ldNesMhg.js.map
3250
+ //# sourceMappingURL=specialists-DEKqgkxp.js.map