panopticon-cli 0.5.8 → 0.5.9

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (61) hide show
  1. package/README.md +29 -83
  2. package/dist/{agents-I6RAEGL5.js → agents-M2ZOZL3P.js} +8 -6
  3. package/dist/{chunk-UKSGE6RH.js → chunk-3KYTNMSE.js} +1 -2
  4. package/dist/{chunk-UKSGE6RH.js.map → chunk-3KYTNMSE.js.map} +1 -1
  5. package/dist/{chunk-M6ZVVKZ3.js → chunk-3WDSD2VK.js} +52 -38
  6. package/dist/chunk-3WDSD2VK.js.map +1 -0
  7. package/dist/{chunk-ZN5RHWGR.js → chunk-4R6ATXYI.js} +5 -5
  8. package/dist/{chunk-ZMJFEHGF.js → chunk-7ZB5D46Y.js} +2 -2
  9. package/dist/{chunk-ZMJFEHGF.js.map → chunk-7ZB5D46Y.js.map} +1 -1
  10. package/dist/{chunk-SUM2WVPF.js → chunk-GM22HPYS.js} +10 -10
  11. package/dist/{chunk-BYWVPPAZ.js → chunk-KPGVCGST.js} +25 -2
  12. package/dist/{chunk-BYWVPPAZ.js.map → chunk-KPGVCGST.js.map} +1 -1
  13. package/dist/{chunk-NYOGHGIW.js → chunk-QQ27EVBD.js} +10 -9
  14. package/dist/chunk-QQ27EVBD.js.map +1 -0
  15. package/dist/{chunk-IZIXJYXZ.js → chunk-TA5X4QYQ.js} +6 -2
  16. package/dist/{chunk-IZIXJYXZ.js.map → chunk-TA5X4QYQ.js.map} +1 -1
  17. package/dist/{chunk-43F4LDZ4.js → chunk-VVTAPQOI.js} +2 -2
  18. package/dist/{chunk-YAAT66RT.js → chunk-WP6ZLWU3.js} +28 -3
  19. package/dist/chunk-WP6ZLWU3.js.map +1 -0
  20. package/dist/cli/index.js +667 -279
  21. package/dist/cli/index.js.map +1 -1
  22. package/dist/dashboard/prompts/inspect-agent.md +157 -0
  23. package/dist/dashboard/prompts/uat-agent.md +215 -0
  24. package/dist/dashboard/prompts/work-agent.md +45 -5
  25. package/dist/dashboard/public/assets/{index-C7hJ5-o1.js → index-DqPey4Of.js} +62 -62
  26. package/dist/dashboard/public/index.html +2 -5
  27. package/dist/dashboard/server.js +2031 -1375
  28. package/dist/factory-KKT7324R.js +20 -0
  29. package/dist/{feedback-writer-T2WCT6EZ.js → feedback-writer-IPPIUPDX.js} +2 -2
  30. package/dist/feedback-writer-IPPIUPDX.js.map +1 -0
  31. package/dist/index.js +17 -17
  32. package/dist/{merge-agent-ZITLVF2B.js → merge-agent-756U4NPX.js} +10 -10
  33. package/dist/{projects-3CRF57ZU.js → projects-BPGM6IFB.js} +2 -2
  34. package/dist/{remote-workspace-M4IULGFZ.js → remote-workspace-LKRDGYEB.js} +2 -2
  35. package/dist/{review-status-J2YJGL3E.js → review-status-E77PZZWG.js} +2 -2
  36. package/dist/{specialist-context-W25PPWM4.js → specialist-context-UBVUUFJV.js} +5 -5
  37. package/dist/{specialist-logs-KPC45SZN.js → specialist-logs-FQRI3AIS.js} +5 -5
  38. package/dist/{specialists-H4LGYR7R.js → specialists-CXRGSJY3.js} +5 -5
  39. package/dist/{traefik-QXLZ4PO2.js → traefik-X2IWTUHO.js} +3 -3
  40. package/dist/{workspace-manager-G6TTBPC3.js → workspace-manager-OWHLR5BL.js} +2 -2
  41. package/dist/workspace-manager-OWHLR5BL.js.map +1 -0
  42. package/package.json +1 -1
  43. package/scripts/inspect-on-bead-close +73 -0
  44. package/scripts/stop-hook +17 -0
  45. package/dist/chunk-M6ZVVKZ3.js.map +0 -1
  46. package/dist/chunk-NYOGHGIW.js.map +0 -1
  47. package/dist/chunk-YAAT66RT.js.map +0 -1
  48. package/dist/feedback-writer-T2WCT6EZ.js.map +0 -1
  49. /package/dist/{agents-I6RAEGL5.js.map → agents-M2ZOZL3P.js.map} +0 -0
  50. /package/dist/{chunk-ZN5RHWGR.js.map → chunk-4R6ATXYI.js.map} +0 -0
  51. /package/dist/{chunk-SUM2WVPF.js.map → chunk-GM22HPYS.js.map} +0 -0
  52. /package/dist/{chunk-43F4LDZ4.js.map → chunk-VVTAPQOI.js.map} +0 -0
  53. /package/dist/{projects-3CRF57ZU.js.map → factory-KKT7324R.js.map} +0 -0
  54. /package/dist/{merge-agent-ZITLVF2B.js.map → merge-agent-756U4NPX.js.map} +0 -0
  55. /package/dist/{review-status-J2YJGL3E.js.map → projects-BPGM6IFB.js.map} +0 -0
  56. /package/dist/{remote-workspace-M4IULGFZ.js.map → remote-workspace-LKRDGYEB.js.map} +0 -0
  57. /package/dist/{specialist-logs-KPC45SZN.js.map → review-status-E77PZZWG.js.map} +0 -0
  58. /package/dist/{specialist-context-W25PPWM4.js.map → specialist-context-UBVUUFJV.js.map} +0 -0
  59. /package/dist/{specialists-H4LGYR7R.js.map → specialist-logs-FQRI3AIS.js.map} +0 -0
  60. /package/dist/{traefik-QXLZ4PO2.js.map → specialists-CXRGSJY3.js.map} +0 -0
  61. /package/dist/{workspace-manager-G6TTBPC3.js.map → traefik-X2IWTUHO.js.map} +0 -0
@@ -0,0 +1 @@
1
+ {"version":3,"sources":["../src/lib/cv.ts","../src/lib/cloister/config.ts","../src/lib/agents.ts"],"sourcesContent":["/**\n * Agent CV (Work History) System\n *\n * Tracks agent performance over time to enable capability-based routing.\n */\n\nimport { existsSync, mkdirSync, readFileSync, writeFileSync, readdirSync } from 'fs';\nimport { join } from 'path';\nimport { AGENTS_DIR } from './paths.js';\n\nexport interface WorkEntry {\n issueId: string;\n startedAt: string;\n completedAt?: string;\n outcome: 'success' | 'failed' | 'abandoned' | 'in_progress';\n duration?: number; // minutes\n skills?: string[];\n failureReason?: string;\n commits?: number;\n linesChanged?: number;\n}\n\nexport interface AgentCV {\n agentId: string;\n createdAt: string;\n lastActive: string;\n runtime: string;\n model: string;\n stats: {\n totalIssues: number;\n successCount: number;\n failureCount: number;\n abandonedCount: number;\n avgDuration: number; // minutes\n successRate: number; // 0-1\n };\n skillsUsed: string[];\n recentWork: WorkEntry[];\n}\n\nfunction getCVFile(agentId: string): string {\n return join(AGENTS_DIR, agentId, 'cv.json');\n}\n\n/**\n * Get or create an agent's CV\n */\nexport function getAgentCV(agentId: string): AgentCV {\n const cvFile = getCVFile(agentId);\n\n if (existsSync(cvFile)) {\n try {\n return JSON.parse(readFileSync(cvFile, 'utf-8'));\n } catch {}\n }\n\n // Create new CV\n const cv: AgentCV = {\n agentId,\n createdAt: new Date().toISOString(),\n lastActive: new Date().toISOString(),\n runtime: 'claude',\n model: 'sonnet',\n stats: {\n totalIssues: 0,\n successCount: 0,\n failureCount: 0,\n abandonedCount: 0,\n avgDuration: 0,\n successRate: 0,\n },\n skillsUsed: [],\n recentWork: [],\n };\n\n saveAgentCV(cv);\n return cv;\n}\n\n/**\n * Save an agent's CV\n */\nexport function saveAgentCV(cv: AgentCV): void {\n const dir = join(AGENTS_DIR, cv.agentId);\n mkdirSync(dir, { recursive: true });\n writeFileSync(getCVFile(cv.agentId), JSON.stringify(cv, null, 2));\n}\n\n/**\n * Start tracking work for an agent\n */\nexport function startWork(agentId: string, issueId: string, skills?: string[]): void {\n const cv = getAgentCV(agentId);\n\n const entry: WorkEntry = {\n issueId,\n startedAt: new Date().toISOString(),\n outcome: 'in_progress',\n skills,\n };\n\n cv.recentWork.unshift(entry);\n cv.stats.totalIssues++;\n cv.lastActive = new Date().toISOString();\n\n // Track skills\n if (skills) {\n for (const skill of skills) {\n if (!cv.skillsUsed.includes(skill)) {\n cv.skillsUsed.push(skill);\n }\n }\n }\n\n // Keep only last 50 entries\n if (cv.recentWork.length > 50) {\n cv.recentWork = cv.recentWork.slice(0, 50);\n }\n\n saveAgentCV(cv);\n}\n\n/**\n * Complete work for an agent\n */\nexport function completeWork(\n agentId: string,\n issueId: string,\n outcome: 'success' | 'failed' | 'abandoned',\n details?: { commits?: number; linesChanged?: number; failureReason?: string }\n): void {\n const cv = getAgentCV(agentId);\n\n // Find the work entry\n const entry = cv.recentWork.find(\n (w) => w.issueId === issueId && w.outcome === 'in_progress'\n );\n\n if (entry) {\n entry.outcome = outcome;\n entry.completedAt = new Date().toISOString();\n entry.duration = Math.round(\n (new Date().getTime() - new Date(entry.startedAt).getTime()) / (1000 * 60)\n );\n if (details?.commits) entry.commits = details.commits;\n if (details?.linesChanged) entry.linesChanged = details.linesChanged;\n if (details?.failureReason) entry.failureReason = details.failureReason;\n }\n\n // Update stats\n if (outcome === 'success') {\n cv.stats.successCount++;\n } else if (outcome === 'failed') {\n cv.stats.failureCount++;\n } else if (outcome === 'abandoned') {\n cv.stats.abandonedCount++;\n }\n\n // Calculate success rate\n const completed = cv.stats.successCount + cv.stats.failureCount + cv.stats.abandonedCount;\n cv.stats.successRate = completed > 0 ? cv.stats.successCount / completed : 0;\n\n // Calculate average duration (only from completed work)\n const completedEntries = cv.recentWork.filter(\n (w) => w.duration !== undefined && w.outcome !== 'in_progress'\n );\n if (completedEntries.length > 0) {\n const totalDuration = completedEntries.reduce((sum, w) => sum + (w.duration || 0), 0);\n cv.stats.avgDuration = Math.round(totalDuration / completedEntries.length);\n }\n\n cv.lastActive = new Date().toISOString();\n saveAgentCV(cv);\n}\n\n/**\n * Get agent rankings by success rate\n */\nexport function getAgentRankings(): Array<{\n agentId: string;\n successRate: number;\n totalIssues: number;\n avgDuration: number;\n}> {\n const rankings: Array<{\n agentId: string;\n successRate: number;\n totalIssues: number;\n avgDuration: number;\n }> = [];\n\n if (!existsSync(AGENTS_DIR)) return rankings;\n\n const dirs = readdirSync(AGENTS_DIR, { withFileTypes: true }).filter(\n (d) => d.isDirectory()\n );\n\n for (const dir of dirs) {\n const cv = getAgentCV(dir.name);\n if (cv.stats.totalIssues > 0) {\n rankings.push({\n agentId: dir.name,\n successRate: cv.stats.successRate,\n totalIssues: cv.stats.totalIssues,\n avgDuration: cv.stats.avgDuration,\n });\n }\n }\n\n // Sort by success rate, then by total issues\n rankings.sort((a, b) => {\n if (b.successRate !== a.successRate) {\n return b.successRate - a.successRate;\n }\n return b.totalIssues - a.totalIssues;\n });\n\n return rankings;\n}\n\n/**\n * Format CV for display\n */\nexport function formatCV(cv: AgentCV): string {\n const lines: string[] = [\n `# Agent CV: ${cv.agentId}`,\n '',\n `Runtime: ${cv.runtime} (${cv.model})`,\n `Created: ${cv.createdAt}`,\n `Last Active: ${cv.lastActive}`,\n '',\n '## Statistics',\n '',\n `- Total Issues: ${cv.stats.totalIssues}`,\n `- Success Rate: ${(cv.stats.successRate * 100).toFixed(1)}%`,\n `- Successes: ${cv.stats.successCount}`,\n `- Failures: ${cv.stats.failureCount}`,\n `- Abandoned: ${cv.stats.abandonedCount}`,\n `- Avg Duration: ${cv.stats.avgDuration} minutes`,\n '',\n ];\n\n if (cv.skillsUsed.length > 0) {\n lines.push('## Skills Used');\n lines.push('');\n lines.push(cv.skillsUsed.join(', '));\n lines.push('');\n }\n\n if (cv.recentWork.length > 0) {\n lines.push('## Recent Work');\n lines.push('');\n\n for (const work of cv.recentWork.slice(0, 10)) {\n const statusIcon = {\n success: '✓',\n failed: '✗',\n abandoned: '⊘',\n in_progress: '●',\n }[work.outcome];\n\n const duration = work.duration ? ` (${work.duration}m)` : '';\n lines.push(`${statusIcon} ${work.issueId}${duration}`);\n\n if (work.failureReason) {\n lines.push(` Reason: ${work.failureReason}`);\n }\n }\n lines.push('');\n }\n\n return lines.join('\\n');\n}\n","/**\n * Cloister Configuration\n *\n * Loads and manages Cloister configuration from ~/.panopticon/cloister.toml\n */\n\nimport { readFileSync, writeFileSync, existsSync, mkdirSync } from 'fs';\nimport { parse, stringify } from '@iarna/toml';\nimport { join } from 'path';\nimport { PANOPTICON_HOME } from '../paths.js';\n\nconst CLOISTER_CONFIG_FILE = join(PANOPTICON_HOME, 'cloister.toml');\n\n/**\n * Health threshold configuration (in minutes)\n */\nexport interface HealthThresholds {\n stale: number;\n warning: number;\n stuck: number;\n}\n\n/**\n * Automatic action configuration\n */\nexport interface AutoActions {\n poke_on_warning: boolean;\n kill_on_stuck: boolean;\n restart_on_kill: boolean;\n}\n\n/**\n * Monitoring configuration\n */\nexport interface MonitoringConfig {\n check_interval: number; // seconds between health checks\n heartbeat_sources: ('jsonl_mtime' | 'tmux_activity' | 'git_activity' | 'active_heartbeat')[];\n}\n\n/**\n * Startup configuration\n */\nexport interface StartupConfig {\n auto_start: boolean; // Start Cloister when dashboard starts\n}\n\n/**\n * Notification configuration (future feature)\n */\nexport interface NotificationConfig {\n slack_webhook?: string;\n email?: string;\n}\n\n/**\n * Specialist agent configuration\n */\nexport interface SpecialistConfig {\n enabled: boolean;\n auto_wake: boolean;\n}\n\n/**\n * Test agent specific configuration\n */\nexport interface TestAgentConfig extends SpecialistConfig {\n test_command?: string; // Optional test command override (e.g., \"npm test\", \"pytest\", etc.)\n}\n\n/**\n * All specialist agents configuration\n */\nexport interface SpecialistsConfig {\n merge_agent?: SpecialistConfig;\n review_agent?: SpecialistConfig;\n test_agent?: TestAgentConfig;\n}\n\n/**\n * Model selection configuration\n */\nexport interface ModelSelectionConfig {\n default_model: 'opus' | 'sonnet' | 'haiku';\n complexity_routing: {\n trivial: 'opus' | 'sonnet' | 'haiku';\n simple: 'opus' | 'sonnet' | 'haiku';\n medium: 'opus' | 'sonnet' | 'haiku';\n complex: 'opus' | 'sonnet' | 'haiku';\n expert: 'opus' | 'sonnet' | 'haiku';\n };\n specialist_models: {\n merge_agent: 'opus' | 'sonnet' | 'haiku';\n review_agent: 'opus' | 'sonnet' | 'haiku';\n test_agent: 'opus' | 'sonnet' | 'haiku';\n };\n}\n\n/**\n * Handoff trigger configuration\n */\nexport interface HandoffTriggersConfig {\n stuck_escalation?: {\n enabled: boolean;\n haiku_to_sonnet_minutes: number;\n sonnet_to_opus_minutes: number;\n };\n test_failure?: {\n enabled: boolean;\n from_model: 'opus' | 'sonnet' | 'haiku';\n to_model: 'opus' | 'sonnet' | 'haiku';\n trigger_on: 'any_failure' | '2_consecutive';\n };\n implementation_complete?: {\n enabled: boolean;\n to_specialist: string; // e.g., 'test-agent'\n };\n}\n\n/**\n * Handoff configuration\n */\nexport interface HandoffConfig {\n auto_triggers: HandoffTriggersConfig;\n}\n\n/**\n * Cost tracking configuration\n */\nexport interface CostTrackingConfig {\n display_enabled: boolean;\n log_to_jsonl: boolean;\n}\n\n/**\n * Auto-restart configuration\n */\nexport interface AutoRestartConfig {\n enabled: boolean;\n max_retries: number;\n backoff_seconds: number[]; // Array of backoff delays (e.g., [30, 60, 120])\n}\n\n/**\n * Cost limits configuration\n */\nexport interface CostLimitsConfig {\n per_agent_usd: number;\n per_issue_usd: number;\n daily_total_usd: number;\n alert_threshold: number; // Fraction (0.0-1.0) at which to start alerting\n}\n\n/**\n * Retention policy configuration\n */\nexport interface RetentionConfig {\n agent_state_days: number; // Days to keep agent state dirs (default: 30)\n health_staleness_hours: number; // Hours before hiding stale agents in health API (default: 24)\n}\n\n/**\n * Complete Cloister configuration\n */\nexport interface CloisterConfig {\n startup: StartupConfig;\n thresholds: HealthThresholds;\n auto_actions: AutoActions;\n monitoring: MonitoringConfig;\n notifications?: NotificationConfig;\n specialists?: SpecialistsConfig;\n model_selection?: ModelSelectionConfig;\n handoffs?: HandoffConfig;\n cost_tracking?: CostTrackingConfig;\n auto_restart?: AutoRestartConfig;\n cost_limits?: CostLimitsConfig;\n retention?: RetentionConfig;\n}\n\n/**\n * Default Cloister configuration\n */\nexport const DEFAULT_CLOISTER_CONFIG: CloisterConfig = {\n startup: {\n auto_start: true,\n },\n thresholds: {\n stale: 5,\n warning: 15,\n stuck: 30,\n },\n auto_actions: {\n poke_on_warning: true,\n kill_on_stuck: false, // Manual by default for safety\n restart_on_kill: false,\n },\n monitoring: {\n check_interval: 60, // 1 minute\n heartbeat_sources: ['jsonl_mtime', 'tmux_activity', 'git_activity'],\n },\n notifications: {\n slack_webhook: undefined,\n email: undefined,\n },\n specialists: {\n merge_agent: {\n enabled: true,\n auto_wake: false, // Only wake on explicit \"Approve & Merge\" click\n },\n review_agent: {\n enabled: true,\n auto_wake: false, // Only wake on explicit request\n },\n test_agent: {\n enabled: false, // Not yet implemented\n auto_wake: false,\n },\n },\n model_selection: {\n default_model: 'sonnet',\n complexity_routing: {\n trivial: 'haiku',\n simple: 'haiku',\n medium: 'sonnet',\n complex: 'sonnet',\n expert: 'opus',\n },\n specialist_models: {\n merge_agent: 'sonnet',\n review_agent: 'sonnet',\n test_agent: 'haiku',\n },\n },\n handoffs: {\n auto_triggers: {\n stuck_escalation: {\n enabled: true,\n haiku_to_sonnet_minutes: 10,\n sonnet_to_opus_minutes: 20,\n },\n test_failure: {\n enabled: true,\n from_model: 'haiku',\n to_model: 'sonnet',\n trigger_on: 'any_failure',\n },\n implementation_complete: {\n enabled: true, // Auto-handoff to test-agent when implementation done\n to_specialist: 'test-agent',\n },\n },\n },\n cost_tracking: {\n display_enabled: true,\n log_to_jsonl: true,\n },\n auto_restart: {\n enabled: true,\n max_retries: 3,\n backoff_seconds: [30, 60, 120], // 30s, 1m, 2m\n },\n cost_limits: {\n per_agent_usd: 10.0,\n per_issue_usd: 25.0,\n daily_total_usd: 100.0,\n alert_threshold: 0.8, // Alert at 80%\n },\n retention: {\n agent_state_days: 30,\n health_staleness_hours: 24,\n },\n};\n\n/**\n * Deep merge utility that recursively merges objects.\n * - Recursively merges nested objects\n * - Arrays in overrides replace defaults (not concatenated)\n * - User values take precedence over defaults\n */\nfunction deepMerge<T extends object>(defaults: T, overrides: Partial<T>): T {\n const result = { ...defaults };\n\n for (const key of Object.keys(overrides) as (keyof T)[]) {\n const defaultVal = defaults[key];\n const overrideVal = overrides[key];\n\n // Skip undefined values in overrides\n if (overrideVal === undefined) continue;\n\n // Deep merge if both values are non-array objects\n if (\n typeof defaultVal === 'object' &&\n defaultVal !== null &&\n !Array.isArray(defaultVal) &&\n typeof overrideVal === 'object' &&\n overrideVal !== null &&\n !Array.isArray(overrideVal)\n ) {\n result[key] = deepMerge(defaultVal as any, overrideVal as any);\n } else {\n // Direct override for primitives and arrays\n result[key] = overrideVal as T[keyof T];\n }\n }\n\n return result;\n}\n\n/**\n * Load Cloister configuration\n *\n * Reads from ~/.panopticon/cloister.toml and merges with defaults.\n * Creates default config file if it doesn't exist.\n */\nexport function loadCloisterConfig(): CloisterConfig {\n // Ensure panopticon home exists\n if (!existsSync(PANOPTICON_HOME)) {\n mkdirSync(PANOPTICON_HOME, { recursive: true });\n }\n\n // If config file doesn't exist, create it with defaults\n if (!existsSync(CLOISTER_CONFIG_FILE)) {\n saveCloisterConfig(DEFAULT_CLOISTER_CONFIG);\n return DEFAULT_CLOISTER_CONFIG;\n }\n\n try {\n const content = readFileSync(CLOISTER_CONFIG_FILE, 'utf-8');\n const parsed = parse(content) as unknown as Partial<CloisterConfig>;\n\n // Deep merge with defaults\n return deepMerge(DEFAULT_CLOISTER_CONFIG, parsed);\n } catch (error) {\n console.error('Failed to load Cloister config:', error);\n console.error('Using default configuration');\n return DEFAULT_CLOISTER_CONFIG;\n }\n}\n\n/**\n * Save Cloister configuration\n *\n * Writes configuration to ~/.panopticon/cloister.toml\n */\nexport function saveCloisterConfig(config: CloisterConfig): void {\n // Ensure panopticon home exists\n if (!existsSync(PANOPTICON_HOME)) {\n mkdirSync(PANOPTICON_HOME, { recursive: true });\n }\n\n try {\n const content = stringify(config as any);\n writeFileSync(CLOISTER_CONFIG_FILE, content, 'utf-8');\n } catch (error) {\n console.error('Failed to save Cloister config:', error);\n throw error;\n }\n}\n\n/**\n * Update Cloister configuration\n *\n * Merges partial config updates with existing config.\n */\nexport function updateCloisterConfig(updates: Partial<CloisterConfig>): CloisterConfig {\n const current = loadCloisterConfig();\n const updated = deepMerge(current, updates);\n saveCloisterConfig(updated);\n return updated;\n}\n\n/**\n * Get the path to the Cloister config file\n */\nexport function getCloisterConfigPath(): string {\n return CLOISTER_CONFIG_FILE;\n}\n\n/**\n * Check if Cloister should auto-start\n */\nexport function shouldAutoStart(): boolean {\n const config = loadCloisterConfig();\n return config.startup.auto_start;\n}\n\n/**\n * Get health thresholds in milliseconds\n */\nexport function getHealthThresholdsMs(): {\n stale: number;\n warning: number;\n stuck: number;\n} {\n const config = loadCloisterConfig();\n return {\n stale: config.thresholds.stale * 60 * 1000,\n warning: config.thresholds.warning * 60 * 1000,\n stuck: config.thresholds.stuck * 60 * 1000,\n };\n}\n","import { existsSync, mkdirSync, writeFileSync, readFileSync, readdirSync, appendFileSync, unlinkSync, statSync } from 'fs';\nimport { join, resolve } from 'path';\nimport { homedir } from 'os';\nimport { exec } from 'child_process';\nimport { promisify } from 'util';\nimport { AGENTS_DIR } from './paths.js';\nimport { createSession, killSession, sendKeys, sendKeysAsync, sessionExists, getAgentSessions, capturePane } from './tmux.js';\nimport { initHook, checkHook, generateFixedPointPrompt } from './hooks.js';\nimport { startWork, completeWork, getAgentCV } from './cv.js';\nimport type { ComplexityLevel } from './cloister/complexity.js';\nimport { loadCloisterConfig } from './cloister/config.js';\nimport { loadSettings, type ModelId } from './settings.js';\nimport { getModelId, WorkTypeId } from './work-type-router.js';\nimport { getProviderForModel, getProviderEnv, setupCredentialFileAuth, clearCredentialFileAuth, requiresRouter } from './providers.js';\nimport { loadConfig } from './config.js';\nimport { createTrackerFromConfig, createTracker } from './tracker/factory.js';\nimport type { IssueState } from './tracker/interface.js';\nimport { findProjectByPath } from './projects.js';\n\nconst execAsync = promisify(exec);\n\n/** Known agent ID prefixes — IDs with these prefixes are already normalized */\nconst AGENT_PREFIXES = ['agent-', 'planning-'];\n\n/** Normalize agent ID: preserve known prefixes, add 'agent-' for bare issue IDs */\nfunction normalizeAgentId(agentId: string): string {\n if (AGENT_PREFIXES.some(p => agentId.startsWith(p))) {\n return agentId;\n }\n return `agent-${agentId.toLowerCase()}`;\n}\n\n/**\n * Get provider-specific env vars (BASE_URL, AUTH_TOKEN) for a model.\n * Reads the current API key from settings so resumed/recovered agents\n * always use the latest key.\n */\nfunction getProviderEnvForModel(model: string): Record<string, string> {\n const provider = getProviderForModel(model as ModelId);\n if (provider.name === 'anthropic') return {};\n\n const settings = loadSettings();\n const apiKey = settings.api_keys?.[provider.name as keyof typeof settings.api_keys];\n if (apiKey) {\n return getProviderEnv(provider, apiKey);\n }\n console.warn(`Warning: No API key configured for ${provider.displayName}, falling back to Anthropic`);\n return {};\n}\n\n// ============================================================================\n// Ready Signal Management (PAN-87)\n// ============================================================================\n\n/**\n * Get path to agent's ready signal file (written by SessionStart hook)\n */\nfunction getReadySignalPath(agentId: string): string {\n return join(getAgentDir(agentId), 'ready.json');\n}\n\n/**\n * Clear ready signal before spawning (clean slate)\n */\nfunction clearReadySignal(agentId: string): void {\n const readyPath = getReadySignalPath(agentId);\n if (existsSync(readyPath)) {\n try {\n unlinkSync(readyPath);\n } catch {\n // Ignore errors - non-critical\n }\n }\n}\n\n/**\n * Wait for SessionStart hook to signal ready (async - non-blocking)\n * Returns true if ready signal received, false if timeout\n */\nasync function waitForReadySignal(agentId: string, timeoutSeconds = 30): Promise<boolean> {\n const readyPath = getReadySignalPath(agentId);\n\n for (let i = 0; i < timeoutSeconds; i++) {\n await new Promise(resolve => setTimeout(resolve, 1000)); // Non-blocking sleep\n\n if (existsSync(readyPath)) {\n try {\n const content = readFileSync(readyPath, 'utf-8');\n const signal = JSON.parse(content);\n if (signal.ready === true) {\n return true;\n }\n } catch {\n // File exists but invalid - keep waiting\n }\n }\n }\n\n return false;\n}\n\nexport interface AgentState {\n id: string;\n issueId: string;\n workspace: string;\n runtime: string;\n model: string;\n status: 'starting' | 'running' | 'stopped' | 'error';\n startedAt: string;\n lastActivity?: string;\n branch?: string; // Git branch name for this agent\n\n // Model routing & handoffs (Phase 4)\n complexity?: ComplexityLevel;\n handoffCount?: number;\n costSoFar?: number;\n sessionId?: string; // For resuming sessions after handoff\n\n // Work type system (PAN-118)\n phase?: 'exploration' | 'implementation' | 'testing' | 'documentation' | 'review-response';\n workType?: WorkTypeId; // Current work type ID\n\n // SageOx session tracking (PAN-278)\n sageoxSessionPath?: string; // Path to SageOx session folder for parent linking\n}\n\nexport function getAgentDir(agentId: string): string {\n return join(AGENTS_DIR, agentId);\n}\n\nexport function getAgentState(agentId: string): AgentState | null {\n const stateFile = join(getAgentDir(agentId), 'state.json');\n if (!existsSync(stateFile)) return null;\n\n const content = readFileSync(stateFile, 'utf8');\n return JSON.parse(content);\n}\n\nexport function saveAgentState(state: AgentState): void {\n const dir = getAgentDir(state.id);\n mkdirSync(dir, { recursive: true });\n\n writeFileSync(\n join(dir, 'state.json'),\n JSON.stringify(state, null, 2)\n );\n}\n\n// ============================================================================\n// Hook-based State Management (PAN-80)\n// ============================================================================\n\n/**\n * Agent runtime state (hook-based tracking)\n */\nexport type AgentResolution = 'working' | 'done' | 'needs_input' | 'stuck' | 'completed' | 'unclear';\n\nexport interface AgentRuntimeState {\n state: 'active' | 'idle' | 'suspended' | 'stopped' | 'uninitialized';\n lastActivity: string;\n currentTool?: string;\n sessionId?: string;\n suspendedAt?: string;\n resumedAt?: string;\n currentIssue?: string; // Issue ID the agent is currently working on\n resolution?: AgentResolution; // Lifecycle completion signal (PAN-309)\n resolutionCount?: number; // How many times this resolution was set\n resolutionUpdatedAt?: string; // When resolution was last updated\n}\n\n/**\n * Activity log entry\n */\nexport interface ActivityEntry {\n ts: string;\n tool: string;\n action?: string;\n state?: 'active' | 'idle';\n}\n\n/**\n * Get the path to an agent's runtime state file (separate from config state)\n */\nexport function getAgentRuntimeFile(agentId: string): string {\n return join(getAgentDir(agentId), 'runtime.json');\n}\n\n/**\n * Get agent runtime state (from hooks)\n *\n * Reads from runtime.json (new) with fallback to state.json (legacy migration).\n * This separation prevents bash hooks from corrupting AgentState config.\n */\nexport function getAgentRuntimeState(agentId: string): AgentRuntimeState | null {\n const runtimeFile = getAgentRuntimeFile(agentId);\n const stateFile = join(getAgentDir(agentId), 'state.json');\n\n // Try runtime.json first (new location)\n if (existsSync(runtimeFile)) {\n try {\n const content = readFileSync(runtimeFile, 'utf8');\n return JSON.parse(content) as AgentRuntimeState;\n } catch {\n // Fall through to legacy\n }\n }\n\n // Fallback to state.json (legacy — runtime fields were mixed in)\n if (existsSync(stateFile)) {\n try {\n const content = readFileSync(stateFile, 'utf8');\n const parsed = JSON.parse(content);\n // Only use if it has runtime-specific fields\n if (parsed.state && parsed.lastActivity) {\n return parsed as AgentRuntimeState;\n }\n } catch {\n // Ignore\n }\n }\n\n // No state at all — uninitialized\n if (!existsSync(stateFile) && !existsSync(runtimeFile)) {\n return {\n state: 'uninitialized',\n lastActivity: new Date().toISOString(),\n };\n }\n\n return null;\n}\n\n/**\n * Save agent runtime state to runtime.json (separate from AgentState config)\n *\n * This writes ONLY to runtime.json, never touching state.json.\n * This separation is critical: bash hooks write runtime.json on every tool call,\n * while AgentState in state.json is only written at lifecycle events (spawn/stop/handoff).\n */\nexport function saveAgentRuntimeState(agentId: string, state: Partial<AgentRuntimeState>): void {\n const dir = getAgentDir(agentId);\n mkdirSync(dir, { recursive: true });\n\n const runtimeFile = getAgentRuntimeFile(agentId);\n\n // Merge with existing runtime state (read from runtime.json only, not state.json)\n let existing: AgentRuntimeState | null = null;\n if (existsSync(runtimeFile)) {\n try {\n existing = JSON.parse(readFileSync(runtimeFile, 'utf8'));\n } catch {\n // Ignore corrupt file\n }\n }\n\n const merged: AgentRuntimeState = {\n ...(existing || { state: 'uninitialized', lastActivity: new Date().toISOString() }),\n ...state,\n };\n\n writeFileSync(runtimeFile, JSON.stringify(merged, null, 2));\n}\n\n/**\n * Append to activity log with automatic pruning to 100 entries\n */\nexport function appendActivity(agentId: string, entry: ActivityEntry): void {\n const dir = getAgentDir(agentId);\n mkdirSync(dir, { recursive: true });\n\n const activityFile = join(dir, 'activity.jsonl');\n\n // Append entry\n appendFileSync(activityFile, JSON.stringify(entry) + '\\n');\n\n // Prune to last 100 entries\n if (existsSync(activityFile)) {\n try {\n const lines = readFileSync(activityFile, 'utf8').trim().split('\\n');\n if (lines.length > 100) {\n const trimmed = lines.slice(-100);\n writeFileSync(activityFile, trimmed.join('\\n') + '\\n');\n }\n } catch (error) {\n // Ignore pruning errors - activity log is non-critical\n }\n }\n}\n\n/**\n * Read activity log (last N entries)\n */\nexport function getActivity(agentId: string, limit = 100): ActivityEntry[] {\n const activityFile = join(getAgentDir(agentId), 'activity.jsonl');\n\n if (!existsSync(activityFile)) {\n return [];\n }\n\n try {\n const lines = readFileSync(activityFile, 'utf8').trim().split('\\n');\n const entries = lines\n .filter(line => line.trim())\n .map(line => JSON.parse(line) as ActivityEntry)\n .slice(-limit);\n\n return entries;\n } catch {\n return [];\n }\n}\n\n/**\n * Save Claude session ID for later resume\n */\nexport function saveSessionId(agentId: string, sessionId: string): void {\n const dir = getAgentDir(agentId);\n mkdirSync(dir, { recursive: true });\n\n writeFileSync(join(dir, 'session.id'), sessionId);\n}\n\n/**\n * Get saved Claude session ID\n */\nexport function getSessionId(agentId: string): string | null {\n const sessionFile = join(getAgentDir(agentId), 'session.id');\n\n if (!existsSync(sessionFile)) {\n return null;\n }\n\n try {\n return readFileSync(sessionFile, 'utf8').trim();\n } catch {\n return null;\n }\n}\n\nexport interface SpawnOptions {\n issueId: string;\n workspace: string;\n runtime?: string;\n model?: string;\n prompt?: string;\n difficulty?: ComplexityLevel;\n agentType?: 'review-agent' | 'test-agent' | 'merge-agent' | 'work-agent';\n\n // Work type system (PAN-118)\n phase?: 'exploration' | 'implementation' | 'testing' | 'documentation' | 'review-response';\n workType?: WorkTypeId; // Explicit work type ID (overrides phase-based detection)\n}\n\n/**\n * Determine which model to use for an agent based on configuration\n *\n * New Priority (PAN-118):\n * 1. Explicitly provided model (options.model)\n * 2. Explicit work type ID (options.workType)\n * 3. Work type from phase (options.phase → issue-agent:{phase})\n * 4. Specialist work type (options.agentType → specialist-{type})\n * 5. Complexity-based routing (LEGACY - deprecated)\n * 6. Default fallback (claude-sonnet-4-6)\n */\nfunction determineModel(options: SpawnOptions): string {\n console.log(`[DEBUG] determineModel called with:`, { model: options.model, workType: options.workType, phase: options.phase, agentType: options.agentType, difficulty: options.difficulty });\n\n // Explicit model always wins\n if (options.model) {\n console.log(`[DEBUG] Using explicit model: ${options.model}`);\n return options.model;\n }\n\n try {\n // Use work type router if work type or phase specified\n if (options.workType) {\n return getModelId(options.workType);\n }\n\n // Map phase to work type ID\n if (options.phase) {\n const workType: WorkTypeId = `issue-agent:${options.phase}` as WorkTypeId;\n return getModelId(workType);\n }\n\n // Map specialist agent type to work type ID\n if (options.agentType && options.agentType !== 'work-agent') {\n // Specialists: review-agent, test-agent, merge-agent\n const workType: WorkTypeId = `specialist-${options.agentType}` as WorkTypeId;\n return getModelId(workType);\n }\n\n // LEGACY: Complexity-based routing (deprecated but kept for backward compat)\n if (options.difficulty) {\n const settings = loadSettings();\n if (settings.models.complexity[options.difficulty]) {\n console.warn(`Using legacy complexity-based routing for ${options.difficulty}. Consider migrating to work types.`);\n return settings.models.complexity[options.difficulty];\n }\n }\n\n // Fall back to default model from Cloister config or claude-sonnet-4-6\n try {\n const cloisterConfig = loadCloisterConfig();\n const defaultModel = cloisterConfig.model_selection?.default_model || 'sonnet';\n const modelMap: Record<string, string> = {\n 'opus': 'claude-opus-4-6',\n 'sonnet': 'claude-sonnet-4-6',\n 'haiku': 'claude-haiku-4-5',\n };\n return modelMap[defaultModel] || 'claude-sonnet-4-6';\n } catch {\n return 'claude-sonnet-4-6';\n }\n } catch (error) {\n // If work type router fails, fall back to default\n console.warn('Warning: Could not resolve model using work type router, using default');\n return options.model || 'claude-sonnet-4-6';\n }\n}\n\n/**\n * Shared tracker resolution logic for issue state transitions.\n *\n * Resolution order:\n * 1. Primary tracker from global config (e.g. Linear)\n * 2. Secondary tracker from global config (if configured)\n * 3. Project-specific tracker derived from the workspace path:\n * looks up the project in projects.yaml and uses its github_repo or gitlab_repo\n *\n * This means projects that only have a github_repo (no linear_team) will\n * still get their issues transitioned correctly without any extra config.\n */\nasync function transitionIssueState(issueId: string, state: IssueState, workspacePath?: string): Promise<void> {\n // Resolve the project from workspacePath — its configured tracker is authoritative.\n // Every issue MUST belong to a registered project with a tracker configured.\n const projectConfig = workspacePath ? findProjectByPath(workspacePath) : null;\n if (!projectConfig) {\n throw new Error(`Cannot transition ${issueId}: no project config found for workspace ${workspacePath || '(none)'}. Register the project in projects.yaml.`);\n }\n\n // Project has a Linear team — use Linear tracker\n if (projectConfig.linear_team) {\n const config = loadConfig();\n const trackersConfig = config.trackers;\n if (!trackersConfig?.linear) {\n throw new Error(`Project ${projectConfig.name} uses Linear (team: ${projectConfig.linear_team}) but no Linear tracker is configured in config.yaml`);\n }\n const tracker = createTrackerFromConfig(trackersConfig, 'linear');\n await tracker.transitionIssue(issueId, state);\n console.log(`[agents] Transitioned ${issueId} to ${state} via Linear (team: ${projectConfig.linear_team})`);\n return;\n }\n\n // Project has a GitHub repo — use project-specific GitHub tracker\n if (projectConfig.github_repo) {\n const [owner, repo] = projectConfig.github_repo.split('/');\n const tracker = createTracker({ type: 'github', owner, repo });\n await tracker.transitionIssue(issueId, state);\n console.log(`[agents] Transitioned ${issueId} to ${state} via GitHub (${projectConfig.github_repo})`);\n return;\n }\n\n // Project has a Rally project — use Rally tracker\n if (projectConfig.rally_project) {\n const config = loadConfig();\n const trackersConfig = config.trackers;\n if (!trackersConfig?.rally) {\n throw new Error(`Project ${projectConfig.name} uses Rally (project: ${projectConfig.rally_project}) but no Rally tracker is configured in config.yaml`);\n }\n const tracker = createTrackerFromConfig(trackersConfig, 'rally');\n await tracker.transitionIssue(issueId, state);\n console.log(`[agents] Transitioned ${issueId} to ${state} via Rally (project: ${projectConfig.rally_project})`);\n return;\n }\n\n if (projectConfig.gitlab_repo) {\n console.warn(`[agents] GitLab project detected (${projectConfig.gitlab_repo}) but GitLab does not support ${state} label transitions`);\n return;\n }\n\n throw new Error(`Project ${projectConfig.name} has no tracker configured (need linear_team, github_repo, or rally_project in projects.yaml)`);\n}\n\nexport async function transitionIssueToInProgress(issueId: string, workspacePath?: string): Promise<void> {\n return transitionIssueState(issueId, 'in_progress', workspacePath);\n}\n\n/**\n * Transitions an issue to \"in_review\" state in the configured issue tracker.\n * Fire-and-forget — logs warnings on failure but never blocks the pipeline.\n */\nexport async function transitionIssueToInReview(issueId: string, workspacePath?: string): Promise<void> {\n return transitionIssueState(issueId, 'in_review', workspacePath);\n}\n\nexport async function spawnAgent(options: SpawnOptions): Promise<AgentState> {\n const agentId = `agent-${options.issueId.toLowerCase()}`;\n\n // Check if already running\n if (sessionExists(agentId)) {\n throw new Error(`Agent ${agentId} already running. Use 'pan work tell' to message it.`);\n }\n\n // Initialize hook for this agent (FPP support)\n initHook(agentId);\n\n // Determine model based on configuration\n const selectedModel = determineModel(options);\n console.log(`[DEBUG] Selected model: ${selectedModel}`);\n\n // Create state\n const state: AgentState = {\n id: agentId,\n issueId: options.issueId,\n workspace: options.workspace,\n runtime: options.runtime || 'claude',\n model: selectedModel,\n status: 'starting',\n startedAt: new Date().toISOString(),\n // Initialize Phase 4 fields (legacy)\n complexity: options.difficulty,\n handoffCount: 0,\n costSoFar: 0,\n // Work type system (PAN-118)\n phase: options.phase,\n workType: options.workType,\n };\n\n saveAgentState(state);\n\n // Build prompt with FPP work if available\n let prompt = options.prompt || '';\n\n // FPP: Check for pending work on hook\n const { hasWork, items } = checkHook(agentId);\n if (hasWork) {\n const fixedPointPrompt = generateFixedPointPrompt(agentId);\n if (fixedPointPrompt) {\n prompt = fixedPointPrompt + '\\n\\n---\\n\\n' + prompt;\n }\n }\n\n // Write prompt to file for complex prompts (avoids shell escaping issues)\n const promptFile = join(getAgentDir(agentId), 'initial-prompt.md');\n if (prompt) {\n writeFileSync(promptFile, prompt);\n }\n\n // Auto-setup hooks if not configured\n checkAndSetupHooks();\n\n // Ensure TLDR daemon is running for the workspace (non-blocking, non-fatal)\n try {\n const venvPath = join(options.workspace, '.venv');\n if (existsSync(venvPath)) {\n const { getTldrDaemonService } = await import('./tldr-daemon.js');\n const tldrService = getTldrDaemonService(options.workspace, venvPath);\n const status = await tldrService.getStatus();\n if (!status.running) {\n await tldrService.start(true);\n console.log(`[${agentId}] Started TLDR daemon for workspace`);\n }\n }\n } catch {\n // Non-fatal — agents degrade to direct file reads if TLDR unavailable\n }\n\n // Write initial task cache for heartbeat hook\n writeTaskCache(agentId, options.issueId);\n\n // Clear ready signal before spawning (clean slate for PAN-87 fix)\n clearReadySignal(agentId);\n\n // Get provider-specific environment variables (BASE_URL, AUTH_TOKEN)\n const providerEnv = getProviderEnvForModel(selectedModel);\n\n // For credential-file providers (e.g. Kimi Code Plan), configure apiKeyHelper\n // so Claude Code can refresh short-lived tokens dynamically.\n // For all other providers, CLEAR any stale apiKeyHelper from previous runs\n // (e.g. switching from Kimi to Anthropic plan-based auth).\n const provider = getProviderForModel(selectedModel as ModelId);\n if (provider.authType === 'credential-file') {\n setupCredentialFileAuth(provider, options.workspace);\n } else {\n clearCredentialFileAuth(options.workspace);\n }\n\n // Create tmux session and start claude\n // For prompts with special shell characters, use a launcher script to safely pass the prompt\n // The script reads the file into a variable, which bash then safely expands\n let claudeCmd: string;\n if (prompt) {\n const launcherScript = join(getAgentDir(agentId), 'launcher.sh');\n const launcherContent = `#!/bin/bash\nprompt=$(cat \"${promptFile}\")\nexec claude --dangerously-skip-permissions --model ${state.model} \"\\$prompt\"\n`;\n writeFileSync(launcherScript, launcherContent, { mode: 0o755 });\n claudeCmd = `bash \"${launcherScript}\"`;\n } else {\n claudeCmd = `claude --dangerously-skip-permissions --model ${state.model}`;\n }\n\n // Pre-trust workspace directory in Claude Code to avoid the trust prompt\n try {\n const { preTrustDirectory } = await import('./workspace-manager.js') as { preTrustDirectory: (dir: string) => void };\n preTrustDirectory(options.workspace);\n } catch { /* non-fatal */ }\n\n // Build SageOx environment variables for session linking (only if project is SageOx-initialized)\n // Derive project root from workspace path: <project-root>/workspaces/<branch>\n const projectRoot = resolve(options.workspace, '..', '..');\n const sageoxEnabled = existsSync(join(projectRoot, '.sageox'));\n const sageoxEnv: Record<string, string> = {};\n\n if (sageoxEnabled) {\n sageoxEnv.OX_PROJECT_ROOT = projectRoot;\n\n // Add issue tracking for multi-agent pipelines\n if (options.issueId) {\n sageoxEnv.PAN_ISSUE_ID = options.issueId;\n }\n if (options.phase) {\n sageoxEnv.PAN_PHASE = options.phase;\n }\n\n // For non-planner agents, find the planner's session path for parent linking\n if (options.phase && (options.phase as string) !== 'planning') {\n const plannerAgentId = `agent-${options.issueId.toLowerCase()}`;\n const plannerState = getAgentState(plannerAgentId);\n if (plannerState?.sageoxSessionPath) {\n sageoxEnv.PAN_PARENT_SESSION = plannerState.sageoxSessionPath;\n }\n }\n }\n\n createSession(agentId, options.workspace, claudeCmd, {\n env: {\n PANOPTICON_AGENT_ID: agentId,\n PANOPTICON_ISSUE_ID: options.issueId,\n PANOPTICON_SESSION_TYPE: options.phase || 'implementation',\n CLAUDE_CODE_ENABLE_PROMPT_SUGGESTION: 'false', // Disable suggested prompts for autonomous agents (PAN-251)\n ...providerEnv, // Add provider-specific env vars (BASE_URL, AUTH_TOKEN, etc.)\n ...sageoxEnv // Add SageOx environment variables\n }\n });\n\n // Update status\n state.status = 'running';\n saveAgentState(state);\n\n // Track work in CV\n startWork(agentId, options.issueId);\n\n // Transition issue tracker to \"in progress\" (best-effort, don't block agent spawn)\n // Only for work agents, not planning/specialist agents\n if (!options.agentType || options.agentType === 'work-agent') {\n transitionIssueToInProgress(options.issueId, options.workspace).catch((err) => {\n console.warn(`[agents] Could not transition ${options.issueId} to in_progress: ${err.message}`);\n });\n }\n\n // For planner agents, capture SageOx session path after it becomes available\n if (sageoxEnabled && (options.phase as string) === 'planning') {\n captureSageoxSessionPath(agentId, projectRoot).catch((err) => {\n console.warn(`[agents] Could not capture SageOx session path: ${err.message}`);\n });\n }\n\n return state;\n}\n\nexport function listRunningAgents(): (AgentState & { tmuxActive: boolean })[] {\n const tmuxSessions = getAgentSessions();\n const tmuxNames = new Set(tmuxSessions.map(s => s.name));\n\n const agents: (AgentState & { tmuxActive: boolean })[] = [];\n\n // Read all agent states\n if (!existsSync(AGENTS_DIR)) return agents;\n\n const dirs = readdirSync(AGENTS_DIR, { withFileTypes: true })\n .filter(d => d.isDirectory());\n\n for (const dir of dirs) {\n const state = getAgentState(dir.name);\n if (state) {\n agents.push({\n ...state,\n tmuxActive: tmuxNames.has(state.id),\n });\n }\n }\n\n return agents;\n}\n\nexport function stopAgent(agentId: string): void {\n const normalizedId = normalizeAgentId(agentId);\n\n if (sessionExists(normalizedId)) {\n // Capture tmux output before killing so logs remain viewable after stop\n try {\n const output = capturePane(normalizedId, 5000);\n if (output) {\n const agentDir = getAgentDir(normalizedId);\n mkdirSync(agentDir, { recursive: true });\n writeFileSync(join(agentDir, 'output.log'), output);\n }\n } catch {\n // Non-fatal — best effort log capture\n }\n\n killSession(normalizedId);\n }\n\n const state = getAgentState(normalizedId);\n if (state) {\n // Ensure id is set — runtime state files may lack it (PAN-150)\n if (!state.id) state.id = normalizedId;\n\n state.status = 'stopped';\n saveAgentState(state);\n }\n\n // Also mark runtime.json as stopped so Cloister/Deacon won't auto-restart.\n // state.json and runtime.json are separate files — both must agree the agent\n // was intentionally stopped to prevent race conditions with health check polls.\n saveAgentRuntimeState(normalizedId, { state: 'stopped' });\n}\n\nexport async function messageAgent(agentId: string, message: string): Promise<void> {\n const normalizedId = normalizeAgentId(agentId);\n\n // Check if agent is suspended - auto-resume if so (PAN-80)\n const runtimeState = getAgentRuntimeState(normalizedId);\n if (runtimeState?.state === 'suspended') {\n console.log(`[agents] Auto-resuming suspended agent ${normalizedId} to deliver message`);\n const result = await resumeAgent(normalizedId, message);\n if (!result.success) {\n throw new Error(`Failed to auto-resume agent: ${result.error}`);\n }\n // Message already sent during resume\n return;\n }\n\n // Check if agent is stopped — auto-restart to deliver feedback (PAN-367)\n const agentState = getAgentState(normalizedId);\n if (agentState && agentState.status === 'stopped' && !sessionExists(normalizedId)) {\n console.log(`[agents] Auto-restarting stopped agent ${normalizedId} to deliver feedback`);\n\n const providerEnv = agentState.model ? getProviderEnvForModel(agentState.model) : {};\n if (agentState.model) {\n const provider = getProviderForModel(agentState.model as ModelId);\n if (provider.authType === 'credential-file') {\n setupCredentialFileAuth(provider, agentState.workspace);\n } else {\n clearCredentialFileAuth(agentState.workspace);\n }\n }\n\n clearReadySignal(normalizedId);\n const claudeCmd = `claude --dangerously-skip-permissions --model ${agentState.model || 'claude-sonnet-4-6'} \"You are resuming work on ${agentState.issueId}. Check .planning/feedback/ for specialist feedback that arrived while you were stopped, then continue working.\"`;\n createSession(normalizedId, agentState.workspace, claudeCmd, {\n env: {\n PANOPTICON_AGENT_ID: normalizedId,\n PANOPTICON_ISSUE_ID: agentState.issueId || '',\n PANOPTICON_SESSION_TYPE: agentState.phase || 'implementation',\n CLAUDE_CODE_ENABLE_PROMPT_SUGGESTION: 'false',\n ...providerEnv\n }\n });\n\n agentState.status = 'running';\n agentState.lastActivity = new Date().toISOString();\n saveAgentState(agentState);\n\n // Wait for ready, then deliver the message\n const ready = await waitForReadySignal(normalizedId, 30);\n if (ready) {\n await sendKeysAsync(normalizedId, message);\n console.log(`[agents] Restarted ${normalizedId} and delivered feedback`);\n } else {\n console.warn(`[agents] Restarted ${normalizedId} but ready signal not detected — feedback in mail queue`);\n }\n\n // Save to mail queue regardless\n const mailDir = join(getAgentDir(normalizedId), 'mail');\n mkdirSync(mailDir, { recursive: true });\n const timestamp = new Date().toISOString().replace(/[:.]/g, '-');\n writeFileSync(\n join(mailDir, `${timestamp}.md`),\n `# Message\\n\\n${message}\\n`\n );\n return;\n }\n\n // Check if this is a remote agent\n const { loadRemoteAgentState, sendToRemoteAgent } = await import('./remote/remote-agents.js');\n const remoteState = loadRemoteAgentState(normalizedId);\n if (remoteState && remoteState.vmName) {\n console.log(`[agents] Sending message to remote agent ${normalizedId} on ${remoteState.vmName}`);\n await sendToRemoteAgent(normalizedId, remoteState.vmName, message);\n\n // Also save to mail queue for persistence\n const mailDir = join(getAgentDir(normalizedId), 'mail');\n mkdirSync(mailDir, { recursive: true });\n const timestamp = new Date().toISOString().replace(/[:.]/g, '-');\n writeFileSync(\n join(mailDir, `${timestamp}.md`),\n `# Message\\n\\n${message}\\n`\n );\n return;\n }\n\n if (!sessionExists(normalizedId)) {\n throw new Error(`Agent ${normalizedId} not running`);\n }\n\n await sendKeysAsync(normalizedId, message);\n\n // Also save to mail queue\n const mailDir = join(getAgentDir(normalizedId), 'mail');\n mkdirSync(mailDir, { recursive: true });\n\n const timestamp = new Date().toISOString().replace(/[:.]/g, '-');\n writeFileSync(\n join(mailDir, `${timestamp}.md`),\n `# Message\\n\\n${message}\\n`\n );\n}\n\n/**\n * Resume a suspended agent (PAN-80)\n *\n * Reads saved session ID and creates new tmux session with --resume flag.\n * Optionally sends a message after resuming.\n *\n * Auto-resume triggers:\n * - Specialists: When queued work arrives\n * - Work agents: When message is sent via /work-tell\n */\nexport async function resumeAgent(agentId: string, message?: string): Promise<{ success: boolean; error?: string }> {\n const normalizedId = normalizeAgentId(agentId);\n\n // Check runtime state\n const runtimeState = getAgentRuntimeState(normalizedId);\n if (!runtimeState || runtimeState.state !== 'suspended') {\n return {\n success: false,\n error: `Cannot resume agent in state: ${runtimeState?.state || 'unknown'}`\n };\n }\n\n // Get saved session ID\n const sessionId = getSessionId(normalizedId);\n if (!sessionId) {\n return {\n success: false,\n error: 'No saved session ID found'\n };\n }\n\n // Get agent state for workspace info\n const agentState = getAgentState(normalizedId);\n if (!agentState) {\n return {\n success: false,\n error: 'Agent state not found'\n };\n }\n\n // Check if session already exists (shouldn't happen for suspended agents)\n if (sessionExists(normalizedId)) {\n return {\n success: false,\n error: 'Agent session already exists'\n };\n }\n\n try {\n // Clear ready signal before resuming (clean slate for PAN-87 fix)\n clearReadySignal(normalizedId);\n\n // Get provider env for the agent's model (reads latest API key from settings)\n const providerEnv = agentState.model ? getProviderEnvForModel(agentState.model) : {};\n\n // For credential-file providers, ensure apiKeyHelper is configured.\n // For all other providers, clear stale apiKeyHelper from previous runs.\n if (agentState.model) {\n const provider = getProviderForModel(agentState.model as ModelId);\n if (provider.authType === 'credential-file') {\n setupCredentialFileAuth(provider, agentState.workspace);\n } else {\n clearCredentialFileAuth(agentState.workspace);\n }\n }\n\n // Create new tmux session with resume command\n const claudeCmd = `claude --resume \"${sessionId}\" --dangerously-skip-permissions`;\n createSession(normalizedId, agentState.workspace, claudeCmd, {\n env: {\n PANOPTICON_AGENT_ID: normalizedId,\n PANOPTICON_ISSUE_ID: agentState.issueId || '',\n PANOPTICON_SESSION_TYPE: agentState.phase || 'implementation',\n CLAUDE_CODE_ENABLE_PROMPT_SUGGESTION: 'false',\n ...providerEnv\n }\n });\n\n // If there's a message, wait for ready signal then send\n if (message) {\n // Wait for SessionStart hook to signal ready (PAN-87: reliable message delivery)\n const ready = await waitForReadySignal(normalizedId, 30);\n\n if (ready) {\n // Send message\n await sendKeysAsync(normalizedId, message);\n } else {\n console.error('Claude SessionStart hook did not fire during resume, message not sent');\n }\n }\n\n // Update runtime state\n saveAgentRuntimeState(normalizedId, {\n state: 'active',\n resumedAt: new Date().toISOString(),\n });\n\n // Update agent state\n if (agentState) {\n agentState.status = 'running';\n agentState.lastActivity = new Date().toISOString();\n saveAgentState(agentState);\n }\n\n return { success: true };\n } catch (error: unknown) {\n const msg = error instanceof Error ? error.message : String(error);\n return {\n success: false,\n error: `Failed to resume agent: ${msg}`\n };\n }\n}\n\n/**\n * Detect crashed agents (state shows running but tmux session is gone)\n */\nexport function detectCrashedAgents(): AgentState[] {\n const agents = listRunningAgents();\n return agents.filter(\n (agent) => agent.status === 'running' && !agent.tmuxActive\n );\n}\n\n/**\n * Recover a crashed agent by restarting it with context\n */\nexport function recoverAgent(agentId: string): AgentState | null {\n const normalizedId = normalizeAgentId(agentId);\n const state = getAgentState(normalizedId);\n\n if (!state) {\n return null;\n }\n\n // Runtime state files may lack required fields (PAN-150)\n if (!state.id) state.id = normalizedId;\n if (!state.workspace || !state.model) {\n console.error(`[agents] Cannot recover ${normalizedId}: state.json missing workspace or model`);\n return null;\n }\n\n // Check if already running\n if (sessionExists(normalizedId)) {\n return state;\n }\n\n // Update crash count in health file\n const healthFile = join(getAgentDir(normalizedId), 'health.json');\n let health = { consecutiveFailures: 0, killCount: 0, recoveryCount: 0 };\n if (existsSync(healthFile)) {\n try {\n health = { ...health, ...JSON.parse(readFileSync(healthFile, 'utf-8')) };\n } catch {}\n }\n health.recoveryCount = (health.recoveryCount || 0) + 1;\n writeFileSync(healthFile, JSON.stringify(health, null, 2));\n\n // Build recovery prompt\n const recoveryPrompt = generateRecoveryPrompt(state);\n\n // Get provider env for the agent's model (reads latest API key from settings)\n const providerEnv = state.model ? getProviderEnvForModel(state.model) : {};\n\n // For credential-file providers, ensure apiKeyHelper is configured.\n // For all other providers, clear stale apiKeyHelper from previous runs.\n if (state.model) {\n const provider = getProviderForModel(state.model as ModelId);\n if (provider.authType === 'credential-file') {\n setupCredentialFileAuth(provider, state.workspace);\n } else {\n clearCredentialFileAuth(state.workspace);\n }\n }\n\n // Restart the agent with recovery context (YOLO mode - skip permissions)\n const claudeCmd = `claude --dangerously-skip-permissions --model ${state.model} \"${recoveryPrompt.replace(/\"/g, '\\\\\"').replace(/\\n/g, '\\\\n')}\"`;\n createSession(normalizedId, state.workspace, claudeCmd, {\n env: {\n PANOPTICON_AGENT_ID: normalizedId,\n PANOPTICON_ISSUE_ID: state.issueId || '',\n PANOPTICON_SESSION_TYPE: state.phase || 'implementation',\n CLAUDE_CODE_ENABLE_PROMPT_SUGGESTION: 'false',\n ...providerEnv\n }\n });\n\n // Update state\n state.status = 'running';\n state.lastActivity = new Date().toISOString();\n saveAgentState(state);\n\n return state;\n}\n\n/**\n * Generate a recovery prompt for a crashed agent\n */\nfunction generateRecoveryPrompt(state: AgentState): string {\n const lines: string[] = [\n '# Agent Recovery',\n '',\n '⚠️ This agent session was recovered after a crash.',\n '',\n '## Previous Context',\n `- Issue: ${state.issueId}`,\n `- Workspace: ${state.workspace}`,\n `- Started: ${state.startedAt}`,\n '',\n '## Recovery Steps',\n '1. Check beads for context: `bd show ' + state.issueId + '`',\n '2. Review recent git commits: `git log --oneline -10`',\n '3. Check hook for pending work: `pan work hook check`',\n '4. Resume from last known state',\n '',\n '## FPP Reminder',\n '> \"Any runnable action is a fixed point and must resolve before the system can rest.\"',\n '',\n ];\n\n // Add FPP work if available\n const { hasWork } = checkHook(state.id);\n if (hasWork) {\n const fixedPointPrompt = generateFixedPointPrompt(state.id);\n if (fixedPointPrompt) {\n lines.push('---');\n lines.push('');\n lines.push(fixedPointPrompt);\n }\n }\n\n return lines.join('\\n');\n}\n\n/**\n * Auto-recover all crashed agents\n */\nexport function autoRecoverAgents(): { recovered: string[]; failed: string[] } {\n const crashed = detectCrashedAgents();\n const recovered: string[] = [];\n const failed: string[] = [];\n\n for (const agent of crashed) {\n try {\n const result = recoverAgent(agent.id);\n if (result) {\n recovered.push(agent.id);\n } else {\n failed.push(agent.id);\n }\n } catch (error) {\n failed.push(agent.id);\n }\n }\n\n return { recovered, failed };\n}\n\n/**\n * Check if Panopticon hooks are configured, and auto-setup if not\n */\nfunction checkAndSetupHooks(): void {\n const settingsPath = join(homedir(), '.claude', 'settings.json');\n const hookPath = join(homedir(), '.panopticon', 'bin', 'heartbeat-hook');\n\n // Check if settings.json exists and has heartbeat hook configured\n if (existsSync(settingsPath)) {\n try {\n const settingsContent = readFileSync(settingsPath, 'utf-8');\n const settings = JSON.parse(settingsContent);\n const postToolUse = settings?.hooks?.PostToolUse || [];\n\n const hookConfigured = postToolUse.some((hookConfig: any) =>\n hookConfig.hooks?.some((hook: any) =>\n hook.command === hookPath ||\n hook.command?.includes('panopticon') ||\n hook.command?.includes('heartbeat-hook')\n )\n );\n\n if (hookConfigured) {\n return; // Already configured\n }\n } catch {\n // Ignore errors, will attempt setup\n }\n }\n\n // Hooks not configured - run setup silently\n try {\n console.log('Configuring Panopticon heartbeat hooks...');\n // Note: This runs during spawn which is now async, so we can use execAsync\n // But this is called from a sync context in checkAndSetupHooks, so we use fire-and-forget\n exec('pan setup hooks', (error: Error | null) => {\n if (error) {\n console.warn('⚠ Failed to auto-configure hooks. Run `pan setup hooks` manually.');\n } else {\n console.log('✓ Heartbeat hooks configured');\n }\n });\n } catch (error) {\n console.warn('⚠ Failed to auto-configure hooks. Run `pan setup hooks` manually.');\n }\n}\n\n/**\n * Write task cache for heartbeat hook to use\n */\nfunction writeTaskCache(agentId: string, issueId: string): void {\n const cacheDir = join(getAgentDir(agentId));\n mkdirSync(cacheDir, { recursive: true });\n\n const cacheFile = join(cacheDir, 'current-task.json');\n writeFileSync(\n cacheFile,\n JSON.stringify({\n id: issueId,\n title: `Working on ${issueId}`,\n updated_at: new Date().toISOString()\n }, null, 2)\n );\n}\n\n/**\n * Capture SageOx session path for a planner agent.\n * This is used for parent-child session linking in multi-agent pipelines.\n * Subsequent agents (worker, reviewer, tester, merger) will use this path\n * as their PAN_PARENT_SESSION to link their sessions to the planner's session.\n */\nasync function captureSageoxSessionPath(agentId: string, projectRoot: string): Promise<void> {\n // Wait for SageOx session to be created by the hook (up to 10 seconds)\n const sessionsDir = join(projectRoot, '.sageox', 'sessions');\n let attempts = 0;\n const maxAttempts = 20;\n const delayMs = 500;\n\n while (attempts < maxAttempts) {\n // Check if sessions directory exists\n if (existsSync(sessionsDir)) {\n // Find the most recent session directory for this agent\n const sessions = readdirSync(sessionsDir, { withFileTypes: true })\n .filter(d => d.isDirectory())\n .map(d => ({\n name: d.name,\n path: join(sessionsDir, d.name),\n mtime: existsSync(join(sessionsDir, d.name, '.recording.json'))\n ? readFileSync(join(sessionsDir, d.name, '.recording.json'), 'utf-8')\n : null\n }))\n .filter(s => {\n // Check if this session belongs to our agent\n if (!s.mtime) return false;\n try {\n const state = JSON.parse(s.mtime);\n return state.agent_id === agentId || state.AgentID === agentId;\n } catch {\n return false;\n }\n })\n .sort((a, b) => {\n // Sort by modification time (newest first)\n const aTime = existsSync(join(a.path, '.recording.json'))\n ? (statSync(join(a.path, '.recording.json')).mtimeMs || 0)\n : 0;\n const bTime = existsSync(join(b.path, '.recording.json'))\n ? (statSync(join(b.path, '.recording.json')).mtimeMs || 0)\n : 0;\n return bTime - aTime;\n });\n\n if (sessions.length > 0) {\n // Update agent state with SageOx session path\n const state = getAgentState(agentId);\n if (state) {\n state.sageoxSessionPath = sessions[0].path;\n saveAgentState(state);\n console.log(`[agents] Captured SageOx session path for ${agentId}: ${sessions[0].path}`);\n return;\n }\n }\n }\n\n // Wait before retrying\n await new Promise(resolve => setTimeout(resolve, delayMs));\n attempts++;\n }\n\n throw new Error(`Could not find SageOx session for ${agentId} after ${maxAttempts * delayMs}ms`);\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAMA,SAAS,YAAY,WAAW,cAAc,eAAe,mBAAmB;AAChF,SAAS,YAAY;AAiCrB,SAAS,UAAU,SAAyB;AAC1C,SAAO,KAAK,YAAY,SAAS,SAAS;AAC5C;AAKO,SAAS,WAAW,SAA0B;AACnD,QAAM,SAAS,UAAU,OAAO;AAEhC,MAAI,WAAW,MAAM,GAAG;AACtB,QAAI;AACF,aAAO,KAAK,MAAM,aAAa,QAAQ,OAAO,CAAC;AAAA,IACjD,QAAQ;AAAA,IAAC;AAAA,EACX;AAGA,QAAM,KAAc;AAAA,IAClB;AAAA,IACA,YAAW,oBAAI,KAAK,GAAE,YAAY;AAAA,IAClC,aAAY,oBAAI,KAAK,GAAE,YAAY;AAAA,IACnC,SAAS;AAAA,IACT,OAAO;AAAA,IACP,OAAO;AAAA,MACL,aAAa;AAAA,MACb,cAAc;AAAA,MACd,cAAc;AAAA,MACd,gBAAgB;AAAA,MAChB,aAAa;AAAA,MACb,aAAa;AAAA,IACf;AAAA,IACA,YAAY,CAAC;AAAA,IACb,YAAY,CAAC;AAAA,EACf;AAEA,cAAY,EAAE;AACd,SAAO;AACT;AAKO,SAAS,YAAY,IAAmB;AAC7C,QAAM,MAAM,KAAK,YAAY,GAAG,OAAO;AACvC,YAAU,KAAK,EAAE,WAAW,KAAK,CAAC;AAClC,gBAAc,UAAU,GAAG,OAAO,GAAG,KAAK,UAAU,IAAI,MAAM,CAAC,CAAC;AAClE;AAKO,SAAS,UAAU,SAAiB,SAAiB,QAAyB;AACnF,QAAM,KAAK,WAAW,OAAO;AAE7B,QAAM,QAAmB;AAAA,IACvB;AAAA,IACA,YAAW,oBAAI,KAAK,GAAE,YAAY;AAAA,IAClC,SAAS;AAAA,IACT;AAAA,EACF;AAEA,KAAG,WAAW,QAAQ,KAAK;AAC3B,KAAG,MAAM;AACT,KAAG,cAAa,oBAAI,KAAK,GAAE,YAAY;AAGvC,MAAI,QAAQ;AACV,eAAW,SAAS,QAAQ;AAC1B,UAAI,CAAC,GAAG,WAAW,SAAS,KAAK,GAAG;AAClC,WAAG,WAAW,KAAK,KAAK;AAAA,MAC1B;AAAA,IACF;AAAA,EACF;AAGA,MAAI,GAAG,WAAW,SAAS,IAAI;AAC7B,OAAG,aAAa,GAAG,WAAW,MAAM,GAAG,EAAE;AAAA,EAC3C;AAEA,cAAY,EAAE;AAChB;AA0DO,SAAS,mBAKb;AACD,QAAM,WAKD,CAAC;AAEN,MAAI,CAAC,WAAW,UAAU,EAAG,QAAO;AAEpC,QAAM,OAAO,YAAY,YAAY,EAAE,eAAe,KAAK,CAAC,EAAE;AAAA,IAC5D,CAAC,MAAM,EAAE,YAAY;AAAA,EACvB;AAEA,aAAW,OAAO,MAAM;AACtB,UAAM,KAAK,WAAW,IAAI,IAAI;AAC9B,QAAI,GAAG,MAAM,cAAc,GAAG;AAC5B,eAAS,KAAK;AAAA,QACZ,SAAS,IAAI;AAAA,QACb,aAAa,GAAG,MAAM;AAAA,QACtB,aAAa,GAAG,MAAM;AAAA,QACtB,aAAa,GAAG,MAAM;AAAA,MACxB,CAAC;AAAA,IACH;AAAA,EACF;AAGA,WAAS,KAAK,CAAC,GAAG,MAAM;AACtB,QAAI,EAAE,gBAAgB,EAAE,aAAa;AACnC,aAAO,EAAE,cAAc,EAAE;AAAA,IAC3B;AACA,WAAO,EAAE,cAAc,EAAE;AAAA,EAC3B,CAAC;AAED,SAAO;AACT;AAKO,SAAS,SAAS,IAAqB;AAC5C,QAAM,QAAkB;AAAA,IACtB,eAAe,GAAG,OAAO;AAAA,IACzB;AAAA,IACA,YAAY,GAAG,OAAO,KAAK,GAAG,KAAK;AAAA,IACnC,YAAY,GAAG,SAAS;AAAA,IACxB,gBAAgB,GAAG,UAAU;AAAA,IAC7B;AAAA,IACA;AAAA,IACA;AAAA,IACA,mBAAmB,GAAG,MAAM,WAAW;AAAA,IACvC,oBAAoB,GAAG,MAAM,cAAc,KAAK,QAAQ,CAAC,CAAC;AAAA,IAC1D,gBAAgB,GAAG,MAAM,YAAY;AAAA,IACrC,eAAe,GAAG,MAAM,YAAY;AAAA,IACpC,gBAAgB,GAAG,MAAM,cAAc;AAAA,IACvC,mBAAmB,GAAG,MAAM,WAAW;AAAA,IACvC;AAAA,EACF;AAEA,MAAI,GAAG,WAAW,SAAS,GAAG;AAC5B,UAAM,KAAK,gBAAgB;AAC3B,UAAM,KAAK,EAAE;AACb,UAAM,KAAK,GAAG,WAAW,KAAK,IAAI,CAAC;AACnC,UAAM,KAAK,EAAE;AAAA,EACf;AAEA,MAAI,GAAG,WAAW,SAAS,GAAG;AAC5B,UAAM,KAAK,gBAAgB;AAC3B,UAAM,KAAK,EAAE;AAEb,eAAW,QAAQ,GAAG,WAAW,MAAM,GAAG,EAAE,GAAG;AAC7C,YAAM,aAAa;AAAA,QACjB,SAAS;AAAA,QACT,QAAQ;AAAA,QACR,WAAW;AAAA,QACX,aAAa;AAAA,MACf,EAAE,KAAK,OAAO;AAEd,YAAM,WAAW,KAAK,WAAW,KAAK,KAAK,QAAQ,OAAO;AAC1D,YAAM,KAAK,GAAG,UAAU,IAAI,KAAK,OAAO,GAAG,QAAQ,EAAE;AAErD,UAAI,KAAK,eAAe;AACtB,cAAM,KAAK,aAAa,KAAK,aAAa,EAAE;AAAA,MAC9C;AAAA,IACF;AACA,UAAM,KAAK,EAAE;AAAA,EACf;AAEA,SAAO,MAAM,KAAK,IAAI;AACxB;AAhRA;AAAA;AAAA;AAAA;AAQA;AAAA;AAAA;;;ACFA,SAAS,gBAAAA,eAAc,iBAAAC,gBAAe,cAAAC,aAAY,aAAAC,kBAAiB;AACnE,SAAS,OAAO,iBAAiB;AACjC,SAAS,QAAAC,aAAY;AA8QrB,SAAS,UAA4B,UAAa,WAA0B;AAC1E,QAAM,SAAS,EAAE,GAAG,SAAS;AAE7B,aAAW,OAAO,OAAO,KAAK,SAAS,GAAkB;AACvD,UAAM,aAAa,SAAS,GAAG;AAC/B,UAAM,cAAc,UAAU,GAAG;AAGjC,QAAI,gBAAgB,OAAW;AAG/B,QACE,OAAO,eAAe,YACtB,eAAe,QACf,CAAC,MAAM,QAAQ,UAAU,KACzB,OAAO,gBAAgB,YACvB,gBAAgB,QAChB,CAAC,MAAM,QAAQ,WAAW,GAC1B;AACA,aAAO,GAAG,IAAI,UAAU,YAAmB,WAAkB;AAAA,IAC/D,OAAO;AAEL,aAAO,GAAG,IAAI;AAAA,IAChB;AAAA,EACF;AAEA,SAAO;AACT;AAQO,SAAS,qBAAqC;AAEnD,MAAI,CAACF,YAAW,eAAe,GAAG;AAChC,IAAAC,WAAU,iBAAiB,EAAE,WAAW,KAAK,CAAC;AAAA,EAChD;AAGA,MAAI,CAACD,YAAW,oBAAoB,GAAG;AACrC,uBAAmB,uBAAuB;AAC1C,WAAO;AAAA,EACT;AAEA,MAAI;AACF,UAAM,UAAUF,cAAa,sBAAsB,OAAO;AAC1D,UAAM,SAAS,MAAM,OAAO;AAG5B,WAAO,UAAU,yBAAyB,MAAM;AAAA,EAClD,SAAS,OAAO;AACd,YAAQ,MAAM,mCAAmC,KAAK;AACtD,YAAQ,MAAM,6BAA6B;AAC3C,WAAO;AAAA,EACT;AACF;AAOO,SAAS,mBAAmB,QAA8B;AAE/D,MAAI,CAACE,YAAW,eAAe,GAAG;AAChC,IAAAC,WAAU,iBAAiB,EAAE,WAAW,KAAK,CAAC;AAAA,EAChD;AAEA,MAAI;AACF,UAAM,UAAU,UAAU,MAAa;AACvC,IAAAF,eAAc,sBAAsB,SAAS,OAAO;AAAA,EACtD,SAAS,OAAO;AACd,YAAQ,MAAM,mCAAmC,KAAK;AACtD,UAAM;AAAA,EACR;AACF;AAgCO,SAAS,wBAId;AACA,QAAM,SAAS,mBAAmB;AAClC,SAAO;AAAA,IACL,OAAO,OAAO,WAAW,QAAQ,KAAK;AAAA,IACtC,SAAS,OAAO,WAAW,UAAU,KAAK;AAAA,IAC1C,OAAO,OAAO,WAAW,QAAQ,KAAK;AAAA,EACxC;AACF;AA/YA,IAWM,sBA0KO;AArLb,IAAAI,eAAA;AAAA;AAAA;AAAA;AASA;AAEA,IAAM,uBAAuBD,MAAK,iBAAiB,eAAe;AA0K3D,IAAM,0BAA0C;AAAA,MACrD,SAAS;AAAA,QACP,YAAY;AAAA,MACd;AAAA,MACA,YAAY;AAAA,QACV,OAAO;AAAA,QACP,SAAS;AAAA,QACT,OAAO;AAAA,MACT;AAAA,MACA,cAAc;AAAA,QACZ,iBAAiB;AAAA,QACjB,eAAe;AAAA;AAAA,QACf,iBAAiB;AAAA,MACnB;AAAA,MACA,YAAY;AAAA,QACV,gBAAgB;AAAA;AAAA,QAChB,mBAAmB,CAAC,eAAe,iBAAiB,cAAc;AAAA,MACpE;AAAA,MACA,eAAe;AAAA,QACb,eAAe;AAAA,QACf,OAAO;AAAA,MACT;AAAA,MACA,aAAa;AAAA,QACX,aAAa;AAAA,UACX,SAAS;AAAA,UACT,WAAW;AAAA;AAAA,QACb;AAAA,QACA,cAAc;AAAA,UACZ,SAAS;AAAA,UACT,WAAW;AAAA;AAAA,QACb;AAAA,QACA,YAAY;AAAA,UACV,SAAS;AAAA;AAAA,UACT,WAAW;AAAA,QACb;AAAA,MACF;AAAA,MACA,iBAAiB;AAAA,QACf,eAAe;AAAA,QACf,oBAAoB;AAAA,UAClB,SAAS;AAAA,UACT,QAAQ;AAAA,UACR,QAAQ;AAAA,UACR,SAAS;AAAA,UACT,QAAQ;AAAA,QACV;AAAA,QACA,mBAAmB;AAAA,UACjB,aAAa;AAAA,UACb,cAAc;AAAA,UACd,YAAY;AAAA,QACd;AAAA,MACF;AAAA,MACA,UAAU;AAAA,QACR,eAAe;AAAA,UACb,kBAAkB;AAAA,YAChB,SAAS;AAAA,YACT,yBAAyB;AAAA,YACzB,wBAAwB;AAAA,UAC1B;AAAA,UACA,cAAc;AAAA,YACZ,SAAS;AAAA,YACT,YAAY;AAAA,YACZ,UAAU;AAAA,YACV,YAAY;AAAA,UACd;AAAA,UACA,yBAAyB;AAAA,YACvB,SAAS;AAAA;AAAA,YACT,eAAe;AAAA,UACjB;AAAA,QACF;AAAA,MACF;AAAA,MACA,eAAe;AAAA,QACb,iBAAiB;AAAA,QACjB,cAAc;AAAA,MAChB;AAAA,MACA,cAAc;AAAA,QACZ,SAAS;AAAA,QACT,aAAa;AAAA,QACb,iBAAiB,CAAC,IAAI,IAAI,GAAG;AAAA;AAAA,MAC/B;AAAA,MACA,aAAa;AAAA,QACX,eAAe;AAAA,QACf,eAAe;AAAA,QACf,iBAAiB;AAAA,QACjB,iBAAiB;AAAA;AAAA,MACnB;AAAA,MACA,WAAW;AAAA,QACT,kBAAkB;AAAA,QAClB,wBAAwB;AAAA,MAC1B;AAAA,IACF;AAAA;AAAA;;;AC9QA,SAAS,cAAAE,aAAY,aAAAC,YAAW,iBAAAC,gBAAe,gBAAAC,eAAc,eAAAC,cAAa,gBAAgB,YAAY,gBAAgB;AACtH,SAAS,QAAAC,OAAM,eAAe;AAC9B,SAAS,eAAe;AACxB,SAAS,YAAY;AACrB,SAAS,iBAAiB;AAqB1B,SAAS,iBAAiB,SAAyB;AACjD,MAAI,eAAe,KAAK,OAAK,QAAQ,WAAW,CAAC,CAAC,GAAG;AACnD,WAAO;AAAA,EACT;AACA,SAAO,SAAS,QAAQ,YAAY,CAAC;AACvC;AAOA,SAAS,uBAAuB,OAAuC;AACrE,QAAM,WAAW,oBAAoB,KAAgB;AACrD,MAAI,SAAS,SAAS,YAAa,QAAO,CAAC;AAE3C,QAAM,WAAW,aAAa;AAC9B,QAAM,SAAS,SAAS,WAAW,SAAS,IAAsC;AAClF,MAAI,QAAQ;AACV,WAAO,eAAe,UAAU,MAAM;AAAA,EACxC;AACA,UAAQ,KAAK,sCAAsC,SAAS,WAAW,6BAA6B;AACpG,SAAO,CAAC;AACV;AASA,SAAS,mBAAmB,SAAyB;AACnD,SAAOA,MAAK,YAAY,OAAO,GAAG,YAAY;AAChD;AAKA,SAAS,iBAAiB,SAAuB;AAC/C,QAAM,YAAY,mBAAmB,OAAO;AAC5C,MAAIL,YAAW,SAAS,GAAG;AACzB,QAAI;AACF,iBAAW,SAAS;AAAA,IACtB,QAAQ;AAAA,IAER;AAAA,EACF;AACF;AAMA,eAAe,mBAAmB,SAAiB,iBAAiB,IAAsB;AACxF,QAAM,YAAY,mBAAmB,OAAO;AAE5C,WAAS,IAAI,GAAG,IAAI,gBAAgB,KAAK;AACvC,UAAM,IAAI,QAAQ,CAAAM,aAAW,WAAWA,UAAS,GAAI,CAAC;AAEtD,QAAIN,YAAW,SAAS,GAAG;AACzB,UAAI;AACF,cAAM,UAAUG,cAAa,WAAW,OAAO;AAC/C,cAAM,SAAS,KAAK,MAAM,OAAO;AACjC,YAAI,OAAO,UAAU,MAAM;AACzB,iBAAO;AAAA,QACT;AAAA,MACF,QAAQ;AAAA,MAER;AAAA,IACF;AAAA,EACF;AAEA,SAAO;AACT;AA2BO,SAAS,YAAY,SAAyB;AACnD,SAAOE,MAAK,YAAY,OAAO;AACjC;AAEO,SAAS,cAAc,SAAoC;AAChE,QAAM,YAAYA,MAAK,YAAY,OAAO,GAAG,YAAY;AACzD,MAAI,CAACL,YAAW,SAAS,EAAG,QAAO;AAEnC,QAAM,UAAUG,cAAa,WAAW,MAAM;AAC9C,SAAO,KAAK,MAAM,OAAO;AAC3B;AAEO,SAAS,eAAe,OAAyB;AACtD,QAAM,MAAM,YAAY,MAAM,EAAE;AAChC,EAAAF,WAAU,KAAK,EAAE,WAAW,KAAK,CAAC;AAElC,EAAAC;AAAA,IACEG,MAAK,KAAK,YAAY;AAAA,IACtB,KAAK,UAAU,OAAO,MAAM,CAAC;AAAA,EAC/B;AACF;AAqCO,SAAS,oBAAoB,SAAyB;AAC3D,SAAOA,MAAK,YAAY,OAAO,GAAG,cAAc;AAClD;AAQO,SAAS,qBAAqB,SAA2C;AAC9E,QAAM,cAAc,oBAAoB,OAAO;AAC/C,QAAM,YAAYA,MAAK,YAAY,OAAO,GAAG,YAAY;AAGzD,MAAIL,YAAW,WAAW,GAAG;AAC3B,QAAI;AACF,YAAM,UAAUG,cAAa,aAAa,MAAM;AAChD,aAAO,KAAK,MAAM,OAAO;AAAA,IAC3B,QAAQ;AAAA,IAER;AAAA,EACF;AAGA,MAAIH,YAAW,SAAS,GAAG;AACzB,QAAI;AACF,YAAM,UAAUG,cAAa,WAAW,MAAM;AAC9C,YAAM,SAAS,KAAK,MAAM,OAAO;AAEjC,UAAI,OAAO,SAAS,OAAO,cAAc;AACvC,eAAO;AAAA,MACT;AAAA,IACF,QAAQ;AAAA,IAER;AAAA,EACF;AAGA,MAAI,CAACH,YAAW,SAAS,KAAK,CAACA,YAAW,WAAW,GAAG;AACtD,WAAO;AAAA,MACL,OAAO;AAAA,MACP,eAAc,oBAAI,KAAK,GAAE,YAAY;AAAA,IACvC;AAAA,EACF;AAEA,SAAO;AACT;AASO,SAAS,sBAAsB,SAAiB,OAAyC;AAC9F,QAAM,MAAM,YAAY,OAAO;AAC/B,EAAAC,WAAU,KAAK,EAAE,WAAW,KAAK,CAAC;AAElC,QAAM,cAAc,oBAAoB,OAAO;AAG/C,MAAI,WAAqC;AACzC,MAAID,YAAW,WAAW,GAAG;AAC3B,QAAI;AACF,iBAAW,KAAK,MAAMG,cAAa,aAAa,MAAM,CAAC;AAAA,IACzD,QAAQ;AAAA,IAER;AAAA,EACF;AAEA,QAAM,SAA4B;AAAA,IAChC,GAAI,YAAY,EAAE,OAAO,iBAAiB,eAAc,oBAAI,KAAK,GAAE,YAAY,EAAE;AAAA,IACjF,GAAG;AAAA,EACL;AAEA,EAAAD,eAAc,aAAa,KAAK,UAAU,QAAQ,MAAM,CAAC,CAAC;AAC5D;AAKO,SAAS,eAAe,SAAiB,OAA4B;AAC1E,QAAM,MAAM,YAAY,OAAO;AAC/B,EAAAD,WAAU,KAAK,EAAE,WAAW,KAAK,CAAC;AAElC,QAAM,eAAeI,MAAK,KAAK,gBAAgB;AAG/C,iBAAe,cAAc,KAAK,UAAU,KAAK,IAAI,IAAI;AAGzD,MAAIL,YAAW,YAAY,GAAG;AAC5B,QAAI;AACF,YAAM,QAAQG,cAAa,cAAc,MAAM,EAAE,KAAK,EAAE,MAAM,IAAI;AAClE,UAAI,MAAM,SAAS,KAAK;AACtB,cAAM,UAAU,MAAM,MAAM,IAAI;AAChC,QAAAD,eAAc,cAAc,QAAQ,KAAK,IAAI,IAAI,IAAI;AAAA,MACvD;AAAA,IACF,SAAS,OAAO;AAAA,IAEhB;AAAA,EACF;AACF;AAKO,SAAS,YAAY,SAAiB,QAAQ,KAAsB;AACzE,QAAM,eAAeG,MAAK,YAAY,OAAO,GAAG,gBAAgB;AAEhE,MAAI,CAACL,YAAW,YAAY,GAAG;AAC7B,WAAO,CAAC;AAAA,EACV;AAEA,MAAI;AACF,UAAM,QAAQG,cAAa,cAAc,MAAM,EAAE,KAAK,EAAE,MAAM,IAAI;AAClE,UAAM,UAAU,MACb,OAAO,UAAQ,KAAK,KAAK,CAAC,EAC1B,IAAI,UAAQ,KAAK,MAAM,IAAI,CAAkB,EAC7C,MAAM,CAAC,KAAK;AAEf,WAAO;AAAA,EACT,QAAQ;AACN,WAAO,CAAC;AAAA,EACV;AACF;AAKO,SAAS,cAAc,SAAiB,WAAyB;AACtE,QAAM,MAAM,YAAY,OAAO;AAC/B,EAAAF,WAAU,KAAK,EAAE,WAAW,KAAK,CAAC;AAElC,EAAAC,eAAcG,MAAK,KAAK,YAAY,GAAG,SAAS;AAClD;AAKO,SAAS,aAAa,SAAgC;AAC3D,QAAM,cAAcA,MAAK,YAAY,OAAO,GAAG,YAAY;AAE3D,MAAI,CAACL,YAAW,WAAW,GAAG;AAC5B,WAAO;AAAA,EACT;AAEA,MAAI;AACF,WAAOG,cAAa,aAAa,MAAM,EAAE,KAAK;AAAA,EAChD,QAAQ;AACN,WAAO;AAAA,EACT;AACF;AA2BA,SAAS,eAAe,SAA+B;AACrD,UAAQ,IAAI,uCAAuC,EAAE,OAAO,QAAQ,OAAO,UAAU,QAAQ,UAAU,OAAO,QAAQ,OAAO,WAAW,QAAQ,WAAW,YAAY,QAAQ,WAAW,CAAC;AAG3L,MAAI,QAAQ,OAAO;AACjB,YAAQ,IAAI,iCAAiC,QAAQ,KAAK,EAAE;AAC5D,WAAO,QAAQ;AAAA,EACjB;AAEA,MAAI;AAEF,QAAI,QAAQ,UAAU;AACpB,aAAO,WAAW,QAAQ,QAAQ;AAAA,IACpC;AAGA,QAAI,QAAQ,OAAO;AACjB,YAAM,WAAuB,eAAe,QAAQ,KAAK;AACzD,aAAO,WAAW,QAAQ;AAAA,IAC5B;AAGA,QAAI,QAAQ,aAAa,QAAQ,cAAc,cAAc;AAE3D,YAAM,WAAuB,cAAc,QAAQ,SAAS;AAC5D,aAAO,WAAW,QAAQ;AAAA,IAC5B;AAGA,QAAI,QAAQ,YAAY;AACtB,YAAM,WAAW,aAAa;AAC9B,UAAI,SAAS,OAAO,WAAW,QAAQ,UAAU,GAAG;AAClD,gBAAQ,KAAK,6CAA6C,QAAQ,UAAU,qCAAqC;AACjH,eAAO,SAAS,OAAO,WAAW,QAAQ,UAAU;AAAA,MACtD;AAAA,IACF;AAGA,QAAI;AACF,YAAM,iBAAiB,mBAAmB;AAC1C,YAAM,eAAe,eAAe,iBAAiB,iBAAiB;AACtE,YAAM,WAAmC;AAAA,QACvC,QAAQ;AAAA,QACR,UAAU;AAAA,QACV,SAAS;AAAA,MACX;AACA,aAAO,SAAS,YAAY,KAAK;AAAA,IACnC,QAAQ;AACN,aAAO;AAAA,IACT;AAAA,EACF,SAAS,OAAO;AAEd,YAAQ,KAAK,wEAAwE;AACrF,WAAO,QAAQ,SAAS;AAAA,EAC1B;AACF;AAcA,eAAe,qBAAqB,SAAiB,OAAmB,eAAuC;AAG7G,QAAM,gBAAgB,gBAAgB,kBAAkB,aAAa,IAAI;AACzE,MAAI,CAAC,eAAe;AAClB,UAAM,IAAI,MAAM,qBAAqB,OAAO,2CAA2C,iBAAiB,QAAQ,0CAA0C;AAAA,EAC5J;AAGA,MAAI,cAAc,aAAa;AAC7B,UAAM,SAAS,WAAW;AAC1B,UAAM,iBAAiB,OAAO;AAC9B,QAAI,CAAC,gBAAgB,QAAQ;AAC3B,YAAM,IAAI,MAAM,WAAW,cAAc,IAAI,uBAAuB,cAAc,WAAW,sDAAsD;AAAA,IACrJ;AACA,UAAM,UAAU,wBAAwB,gBAAgB,QAAQ;AAChE,UAAM,QAAQ,gBAAgB,SAAS,KAAK;AAC5C,YAAQ,IAAI,yBAAyB,OAAO,OAAO,KAAK,sBAAsB,cAAc,WAAW,GAAG;AAC1G;AAAA,EACF;AAGA,MAAI,cAAc,aAAa;AAC7B,UAAM,CAAC,OAAO,IAAI,IAAI,cAAc,YAAY,MAAM,GAAG;AACzD,UAAM,UAAU,cAAc,EAAE,MAAM,UAAU,OAAO,KAAK,CAAC;AAC7D,UAAM,QAAQ,gBAAgB,SAAS,KAAK;AAC5C,YAAQ,IAAI,yBAAyB,OAAO,OAAO,KAAK,gBAAgB,cAAc,WAAW,GAAG;AACpG;AAAA,EACF;AAGA,MAAI,cAAc,eAAe;AAC/B,UAAM,SAAS,WAAW;AAC1B,UAAM,iBAAiB,OAAO;AAC9B,QAAI,CAAC,gBAAgB,OAAO;AAC1B,YAAM,IAAI,MAAM,WAAW,cAAc,IAAI,yBAAyB,cAAc,aAAa,qDAAqD;AAAA,IACxJ;AACA,UAAM,UAAU,wBAAwB,gBAAgB,OAAO;AAC/D,UAAM,QAAQ,gBAAgB,SAAS,KAAK;AAC5C,YAAQ,IAAI,yBAAyB,OAAO,OAAO,KAAK,wBAAwB,cAAc,aAAa,GAAG;AAC9G;AAAA,EACF;AAEA,MAAI,cAAc,aAAa;AAC7B,YAAQ,KAAK,qCAAqC,cAAc,WAAW,iCAAiC,KAAK,oBAAoB;AACrI;AAAA,EACF;AAEA,QAAM,IAAI,MAAM,WAAW,cAAc,IAAI,+FAA+F;AAC9I;AAEA,eAAsB,4BAA4B,SAAiB,eAAuC;AACxG,SAAO,qBAAqB,SAAS,eAAe,aAAa;AACnE;AAMA,eAAsB,0BAA0B,SAAiB,eAAuC;AACtG,SAAO,qBAAqB,SAAS,aAAa,aAAa;AACjE;AAEA,eAAsB,WAAW,SAA4C;AAC3E,QAAM,UAAU,SAAS,QAAQ,QAAQ,YAAY,CAAC;AAGtD,MAAI,cAAc,OAAO,GAAG;AAC1B,UAAM,IAAI,MAAM,SAAS,OAAO,sDAAsD;AAAA,EACxF;AAGA,WAAS,OAAO;AAGhB,QAAM,gBAAgB,eAAe,OAAO;AAC5C,UAAQ,IAAI,2BAA2B,aAAa,EAAE;AAGtD,QAAM,QAAoB;AAAA,IACxB,IAAI;AAAA,IACJ,SAAS,QAAQ;AAAA,IACjB,WAAW,QAAQ;AAAA,IACnB,SAAS,QAAQ,WAAW;AAAA,IAC5B,OAAO;AAAA,IACP,QAAQ;AAAA,IACR,YAAW,oBAAI,KAAK,GAAE,YAAY;AAAA;AAAA,IAElC,YAAY,QAAQ;AAAA,IACpB,cAAc;AAAA,IACd,WAAW;AAAA;AAAA,IAEX,OAAO,QAAQ;AAAA,IACf,UAAU,QAAQ;AAAA,EACpB;AAEA,iBAAe,KAAK;AAGpB,MAAI,SAAS,QAAQ,UAAU;AAG/B,QAAM,EAAE,SAAS,MAAM,IAAI,UAAU,OAAO;AAC5C,MAAI,SAAS;AACX,UAAM,mBAAmB,yBAAyB,OAAO;AACzD,QAAI,kBAAkB;AACpB,eAAS,mBAAmB,gBAAgB;AAAA,IAC9C;AAAA,EACF;AAGA,QAAM,aAAaE,MAAK,YAAY,OAAO,GAAG,mBAAmB;AACjE,MAAI,QAAQ;AACV,IAAAH,eAAc,YAAY,MAAM;AAAA,EAClC;AAGA,qBAAmB;AAGnB,MAAI;AACF,UAAM,WAAWG,MAAK,QAAQ,WAAW,OAAO;AAChD,QAAIL,YAAW,QAAQ,GAAG;AACxB,YAAM,EAAE,qBAAqB,IAAI,MAAM,OAAO,2BAAkB;AAChE,YAAM,cAAc,qBAAqB,QAAQ,WAAW,QAAQ;AACpE,YAAM,SAAS,MAAM,YAAY,UAAU;AAC3C,UAAI,CAAC,OAAO,SAAS;AACnB,cAAM,YAAY,MAAM,IAAI;AAC5B,gBAAQ,IAAI,IAAI,OAAO,qCAAqC;AAAA,MAC9D;AAAA,IACF;AAAA,EACF,QAAQ;AAAA,EAER;AAGA,iBAAe,SAAS,QAAQ,OAAO;AAGvC,mBAAiB,OAAO;AAGxB,QAAM,cAAc,uBAAuB,aAAa;AAMxD,QAAM,WAAW,oBAAoB,aAAwB;AAC7D,MAAI,SAAS,aAAa,mBAAmB;AAC3C,4BAAwB,UAAU,QAAQ,SAAS;AAAA,EACrD,OAAO;AACL,4BAAwB,QAAQ,SAAS;AAAA,EAC3C;AAKA,MAAI;AACJ,MAAI,QAAQ;AACV,UAAM,iBAAiBK,MAAK,YAAY,OAAO,GAAG,aAAa;AAC/D,UAAM,kBAAkB;AAAA,gBACZ,UAAU;AAAA,qDAC2B,MAAM,KAAK;AAAA;AAE5D,IAAAH,eAAc,gBAAgB,iBAAiB,EAAE,MAAM,IAAM,CAAC;AAC9D,gBAAY,SAAS,cAAc;AAAA,EACrC,OAAO;AACL,gBAAY,iDAAiD,MAAM,KAAK;AAAA,EAC1E;AAGA,MAAI;AACF,UAAM,EAAE,kBAAkB,IAAI,MAAM,OAAO,iCAAwB;AACnE,sBAAkB,QAAQ,SAAS;AAAA,EACrC,QAAQ;AAAA,EAAkB;AAI1B,QAAM,cAAc,QAAQ,QAAQ,WAAW,MAAM,IAAI;AACzD,QAAM,gBAAgBF,YAAWK,MAAK,aAAa,SAAS,CAAC;AAC7D,QAAM,YAAoC,CAAC;AAE3C,MAAI,eAAe;AACjB,cAAU,kBAAkB;AAG5B,QAAI,QAAQ,SAAS;AACnB,gBAAU,eAAe,QAAQ;AAAA,IACnC;AACA,QAAI,QAAQ,OAAO;AACjB,gBAAU,YAAY,QAAQ;AAAA,IAChC;AAGA,QAAI,QAAQ,SAAU,QAAQ,UAAqB,YAAY;AAC7D,YAAM,iBAAiB,SAAS,QAAQ,QAAQ,YAAY,CAAC;AAC7D,YAAM,eAAe,cAAc,cAAc;AACjD,UAAI,cAAc,mBAAmB;AACnC,kBAAU,qBAAqB,aAAa;AAAA,MAC9C;AAAA,IACF;AAAA,EACF;AAEA,gBAAc,SAAS,QAAQ,WAAW,WAAW;AAAA,IACnD,KAAK;AAAA,MACH,qBAAqB;AAAA,MACrB,qBAAqB,QAAQ;AAAA,MAC7B,yBAAyB,QAAQ,SAAS;AAAA,MAC1C,sCAAsC;AAAA;AAAA,MACtC,GAAG;AAAA;AAAA,MACH,GAAG;AAAA;AAAA,IACL;AAAA,EACF,CAAC;AAGD,QAAM,SAAS;AACf,iBAAe,KAAK;AAGpB,YAAU,SAAS,QAAQ,OAAO;AAIlC,MAAI,CAAC,QAAQ,aAAa,QAAQ,cAAc,cAAc;AAC5D,gCAA4B,QAAQ,SAAS,QAAQ,SAAS,EAAE,MAAM,CAAC,QAAQ;AAC7E,cAAQ,KAAK,iCAAiC,QAAQ,OAAO,oBAAoB,IAAI,OAAO,EAAE;AAAA,IAChG,CAAC;AAAA,EACH;AAGA,MAAI,iBAAkB,QAAQ,UAAqB,YAAY;AAC7D,6BAAyB,SAAS,WAAW,EAAE,MAAM,CAAC,QAAQ;AAC5D,cAAQ,KAAK,mDAAmD,IAAI,OAAO,EAAE;AAAA,IAC/E,CAAC;AAAA,EACH;AAEA,SAAO;AACT;AAEO,SAAS,oBAA8D;AAC5E,QAAM,eAAe,iBAAiB;AACtC,QAAM,YAAY,IAAI,IAAI,aAAa,IAAI,OAAK,EAAE,IAAI,CAAC;AAEvD,QAAM,SAAmD,CAAC;AAG1D,MAAI,CAACL,YAAW,UAAU,EAAG,QAAO;AAEpC,QAAM,OAAOI,aAAY,YAAY,EAAE,eAAe,KAAK,CAAC,EACzD,OAAO,OAAK,EAAE,YAAY,CAAC;AAE9B,aAAW,OAAO,MAAM;AACtB,UAAM,QAAQ,cAAc,IAAI,IAAI;AACpC,QAAI,OAAO;AACT,aAAO,KAAK;AAAA,QACV,GAAG;AAAA,QACH,YAAY,UAAU,IAAI,MAAM,EAAE;AAAA,MACpC,CAAC;AAAA,IACH;AAAA,EACF;AAEA,SAAO;AACT;AAEO,SAAS,UAAU,SAAuB;AAC/C,QAAM,eAAe,iBAAiB,OAAO;AAE7C,MAAI,cAAc,YAAY,GAAG;AAE/B,QAAI;AACF,YAAM,SAAS,YAAY,cAAc,GAAI;AAC7C,UAAI,QAAQ;AACV,cAAM,WAAW,YAAY,YAAY;AACzC,QAAAH,WAAU,UAAU,EAAE,WAAW,KAAK,CAAC;AACvC,QAAAC,eAAcG,MAAK,UAAU,YAAY,GAAG,MAAM;AAAA,MACpD;AAAA,IACF,QAAQ;AAAA,IAER;AAEA,gBAAY,YAAY;AAAA,EAC1B;AAEA,QAAM,QAAQ,cAAc,YAAY;AACxC,MAAI,OAAO;AAET,QAAI,CAAC,MAAM,GAAI,OAAM,KAAK;AAE1B,UAAM,SAAS;AACf,mBAAe,KAAK;AAAA,EACtB;AAKA,wBAAsB,cAAc,EAAE,OAAO,UAAU,CAAC;AAC1D;AAEA,eAAsB,aAAa,SAAiB,SAAgC;AAClF,QAAM,eAAe,iBAAiB,OAAO;AAG7C,QAAM,eAAe,qBAAqB,YAAY;AACtD,MAAI,cAAc,UAAU,aAAa;AACvC,YAAQ,IAAI,0CAA0C,YAAY,qBAAqB;AACvF,UAAM,SAAS,MAAM,YAAY,cAAc,OAAO;AACtD,QAAI,CAAC,OAAO,SAAS;AACnB,YAAM,IAAI,MAAM,gCAAgC,OAAO,KAAK,EAAE;AAAA,IAChE;AAEA;AAAA,EACF;AAGA,QAAM,aAAa,cAAc,YAAY;AAC7C,MAAI,cAAc,WAAW,WAAW,aAAa,CAAC,cAAc,YAAY,GAAG;AACjF,YAAQ,IAAI,0CAA0C,YAAY,sBAAsB;AAExF,UAAM,cAAc,WAAW,QAAQ,uBAAuB,WAAW,KAAK,IAAI,CAAC;AACnF,QAAI,WAAW,OAAO;AACpB,YAAM,WAAW,oBAAoB,WAAW,KAAgB;AAChE,UAAI,SAAS,aAAa,mBAAmB;AAC3C,gCAAwB,UAAU,WAAW,SAAS;AAAA,MACxD,OAAO;AACL,gCAAwB,WAAW,SAAS;AAAA,MAC9C;AAAA,IACF;AAEA,qBAAiB,YAAY;AAC7B,UAAM,YAAY,iDAAiD,WAAW,SAAS,mBAAmB,8BAA8B,WAAW,OAAO;AAC1J,kBAAc,cAAc,WAAW,WAAW,WAAW;AAAA,MAC3D,KAAK;AAAA,QACH,qBAAqB;AAAA,QACrB,qBAAqB,WAAW,WAAW;AAAA,QAC3C,yBAAyB,WAAW,SAAS;AAAA,QAC7C,sCAAsC;AAAA,QACtC,GAAG;AAAA,MACL;AAAA,IACF,CAAC;AAED,eAAW,SAAS;AACpB,eAAW,gBAAe,oBAAI,KAAK,GAAE,YAAY;AACjD,mBAAe,UAAU;AAGzB,UAAM,QAAQ,MAAM,mBAAmB,cAAc,EAAE;AACvD,QAAI,OAAO;AACT,YAAM,cAAc,cAAc,OAAO;AACzC,cAAQ,IAAI,sBAAsB,YAAY,yBAAyB;AAAA,IACzE,OAAO;AACL,cAAQ,KAAK,sBAAsB,YAAY,8DAAyD;AAAA,IAC1G;AAGA,UAAME,WAAUF,MAAK,YAAY,YAAY,GAAG,MAAM;AACtD,IAAAJ,WAAUM,UAAS,EAAE,WAAW,KAAK,CAAC;AACtC,UAAMC,cAAY,oBAAI,KAAK,GAAE,YAAY,EAAE,QAAQ,SAAS,GAAG;AAC/D,IAAAN;AAAA,MACEG,MAAKE,UAAS,GAAGC,UAAS,KAAK;AAAA,MAC/B;AAAA;AAAA,EAAgB,OAAO;AAAA;AAAA,IACzB;AACA;AAAA,EACF;AAGA,QAAM,EAAE,sBAAsB,kBAAkB,IAAI,MAAM,OAAO,6BAA2B;AAC5F,QAAM,cAAc,qBAAqB,YAAY;AACrD,MAAI,eAAe,YAAY,QAAQ;AACrC,YAAQ,IAAI,4CAA4C,YAAY,OAAO,YAAY,MAAM,EAAE;AAC/F,UAAM,kBAAkB,cAAc,YAAY,QAAQ,OAAO;AAGjE,UAAMD,WAAUF,MAAK,YAAY,YAAY,GAAG,MAAM;AACtD,IAAAJ,WAAUM,UAAS,EAAE,WAAW,KAAK,CAAC;AACtC,UAAMC,cAAY,oBAAI,KAAK,GAAE,YAAY,EAAE,QAAQ,SAAS,GAAG;AAC/D,IAAAN;AAAA,MACEG,MAAKE,UAAS,GAAGC,UAAS,KAAK;AAAA,MAC/B;AAAA;AAAA,EAAgB,OAAO;AAAA;AAAA,IACzB;AACA;AAAA,EACF;AAEA,MAAI,CAAC,cAAc,YAAY,GAAG;AAChC,UAAM,IAAI,MAAM,SAAS,YAAY,cAAc;AAAA,EACrD;AAEA,QAAM,cAAc,cAAc,OAAO;AAGzC,QAAM,UAAUH,MAAK,YAAY,YAAY,GAAG,MAAM;AACtD,EAAAJ,WAAU,SAAS,EAAE,WAAW,KAAK,CAAC;AAEtC,QAAM,aAAY,oBAAI,KAAK,GAAE,YAAY,EAAE,QAAQ,SAAS,GAAG;AAC/D,EAAAC;AAAA,IACEG,MAAK,SAAS,GAAG,SAAS,KAAK;AAAA,IAC/B;AAAA;AAAA,EAAgB,OAAO;AAAA;AAAA,EACzB;AACF;AAYA,eAAsB,YAAY,SAAiB,SAAiE;AAClH,QAAM,eAAe,iBAAiB,OAAO;AAG7C,QAAM,eAAe,qBAAqB,YAAY;AACtD,MAAI,CAAC,gBAAgB,aAAa,UAAU,aAAa;AACvD,WAAO;AAAA,MACL,SAAS;AAAA,MACT,OAAO,iCAAiC,cAAc,SAAS,SAAS;AAAA,IAC1E;AAAA,EACF;AAGA,QAAM,YAAY,aAAa,YAAY;AAC3C,MAAI,CAAC,WAAW;AACd,WAAO;AAAA,MACL,SAAS;AAAA,MACT,OAAO;AAAA,IACT;AAAA,EACF;AAGA,QAAM,aAAa,cAAc,YAAY;AAC7C,MAAI,CAAC,YAAY;AACf,WAAO;AAAA,MACL,SAAS;AAAA,MACT,OAAO;AAAA,IACT;AAAA,EACF;AAGA,MAAI,cAAc,YAAY,GAAG;AAC/B,WAAO;AAAA,MACL,SAAS;AAAA,MACT,OAAO;AAAA,IACT;AAAA,EACF;AAEA,MAAI;AAEF,qBAAiB,YAAY;AAG7B,UAAM,cAAc,WAAW,QAAQ,uBAAuB,WAAW,KAAK,IAAI,CAAC;AAInF,QAAI,WAAW,OAAO;AACpB,YAAM,WAAW,oBAAoB,WAAW,KAAgB;AAChE,UAAI,SAAS,aAAa,mBAAmB;AAC3C,gCAAwB,UAAU,WAAW,SAAS;AAAA,MACxD,OAAO;AACL,gCAAwB,WAAW,SAAS;AAAA,MAC9C;AAAA,IACF;AAGA,UAAM,YAAY,oBAAoB,SAAS;AAC/C,kBAAc,cAAc,WAAW,WAAW,WAAW;AAAA,MAC3D,KAAK;AAAA,QACH,qBAAqB;AAAA,QACrB,qBAAqB,WAAW,WAAW;AAAA,QAC3C,yBAAyB,WAAW,SAAS;AAAA,QAC7C,sCAAsC;AAAA,QACtC,GAAG;AAAA,MACL;AAAA,IACF,CAAC;AAGD,QAAI,SAAS;AAEX,YAAM,QAAQ,MAAM,mBAAmB,cAAc,EAAE;AAEvD,UAAI,OAAO;AAET,cAAM,cAAc,cAAc,OAAO;AAAA,MAC3C,OAAO;AACL,gBAAQ,MAAM,uEAAuE;AAAA,MACvF;AAAA,IACF;AAGA,0BAAsB,cAAc;AAAA,MAClC,OAAO;AAAA,MACP,YAAW,oBAAI,KAAK,GAAE,YAAY;AAAA,IACpC,CAAC;AAGD,QAAI,YAAY;AACd,iBAAW,SAAS;AACpB,iBAAW,gBAAe,oBAAI,KAAK,GAAE,YAAY;AACjD,qBAAe,UAAU;AAAA,IAC3B;AAEA,WAAO,EAAE,SAAS,KAAK;AAAA,EACzB,SAAS,OAAgB;AACvB,UAAM,MAAM,iBAAiB,QAAQ,MAAM,UAAU,OAAO,KAAK;AACjE,WAAO;AAAA,MACL,SAAS;AAAA,MACT,OAAO,2BAA2B,GAAG;AAAA,IACvC;AAAA,EACF;AACF;AAKO,SAAS,sBAAoC;AAClD,QAAM,SAAS,kBAAkB;AACjC,SAAO,OAAO;AAAA,IACZ,CAAC,UAAU,MAAM,WAAW,aAAa,CAAC,MAAM;AAAA,EAClD;AACF;AAKO,SAAS,aAAa,SAAoC;AAC/D,QAAM,eAAe,iBAAiB,OAAO;AAC7C,QAAM,QAAQ,cAAc,YAAY;AAExC,MAAI,CAAC,OAAO;AACV,WAAO;AAAA,EACT;AAGA,MAAI,CAAC,MAAM,GAAI,OAAM,KAAK;AAC1B,MAAI,CAAC,MAAM,aAAa,CAAC,MAAM,OAAO;AACpC,YAAQ,MAAM,2BAA2B,YAAY,yCAAyC;AAC9F,WAAO;AAAA,EACT;AAGA,MAAI,cAAc,YAAY,GAAG;AAC/B,WAAO;AAAA,EACT;AAGA,QAAM,aAAaA,MAAK,YAAY,YAAY,GAAG,aAAa;AAChE,MAAI,SAAS,EAAE,qBAAqB,GAAG,WAAW,GAAG,eAAe,EAAE;AACtE,MAAIL,YAAW,UAAU,GAAG;AAC1B,QAAI;AACF,eAAS,EAAE,GAAG,QAAQ,GAAG,KAAK,MAAMG,cAAa,YAAY,OAAO,CAAC,EAAE;AAAA,IACzE,QAAQ;AAAA,IAAC;AAAA,EACX;AACA,SAAO,iBAAiB,OAAO,iBAAiB,KAAK;AACrD,EAAAD,eAAc,YAAY,KAAK,UAAU,QAAQ,MAAM,CAAC,CAAC;AAGzD,QAAM,iBAAiB,uBAAuB,KAAK;AAGnD,QAAM,cAAc,MAAM,QAAQ,uBAAuB,MAAM,KAAK,IAAI,CAAC;AAIzE,MAAI,MAAM,OAAO;AACf,UAAM,WAAW,oBAAoB,MAAM,KAAgB;AAC3D,QAAI,SAAS,aAAa,mBAAmB;AAC3C,8BAAwB,UAAU,MAAM,SAAS;AAAA,IACnD,OAAO;AACL,8BAAwB,MAAM,SAAS;AAAA,IACzC;AAAA,EACF;AAGA,QAAM,YAAY,iDAAiD,MAAM,KAAK,KAAK,eAAe,QAAQ,MAAM,KAAK,EAAE,QAAQ,OAAO,KAAK,CAAC;AAC5I,gBAAc,cAAc,MAAM,WAAW,WAAW;AAAA,IACtD,KAAK;AAAA,MACH,qBAAqB;AAAA,MACrB,qBAAqB,MAAM,WAAW;AAAA,MACtC,yBAAyB,MAAM,SAAS;AAAA,MACxC,sCAAsC;AAAA,MACtC,GAAG;AAAA,IACL;AAAA,EACF,CAAC;AAGD,QAAM,SAAS;AACf,QAAM,gBAAe,oBAAI,KAAK,GAAE,YAAY;AAC5C,iBAAe,KAAK;AAEpB,SAAO;AACT;AAKA,SAAS,uBAAuB,OAA2B;AACzD,QAAM,QAAkB;AAAA,IACtB;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA,YAAY,MAAM,OAAO;AAAA,IACzB,gBAAgB,MAAM,SAAS;AAAA,IAC/B,cAAc,MAAM,SAAS;AAAA,IAC7B;AAAA,IACA;AAAA,IACA,0CAA0C,MAAM,UAAU;AAAA,IAC1D;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF;AAGA,QAAM,EAAE,QAAQ,IAAI,UAAU,MAAM,EAAE;AACtC,MAAI,SAAS;AACX,UAAM,mBAAmB,yBAAyB,MAAM,EAAE;AAC1D,QAAI,kBAAkB;AACpB,YAAM,KAAK,KAAK;AAChB,YAAM,KAAK,EAAE;AACb,YAAM,KAAK,gBAAgB;AAAA,IAC7B;AAAA,EACF;AAEA,SAAO,MAAM,KAAK,IAAI;AACxB;AAKO,SAAS,oBAA+D;AAC7E,QAAM,UAAU,oBAAoB;AACpC,QAAM,YAAsB,CAAC;AAC7B,QAAM,SAAmB,CAAC;AAE1B,aAAW,SAAS,SAAS;AAC3B,QAAI;AACF,YAAM,SAAS,aAAa,MAAM,EAAE;AACpC,UAAI,QAAQ;AACV,kBAAU,KAAK,MAAM,EAAE;AAAA,MACzB,OAAO;AACL,eAAO,KAAK,MAAM,EAAE;AAAA,MACtB;AAAA,IACF,SAAS,OAAO;AACd,aAAO,KAAK,MAAM,EAAE;AAAA,IACtB;AAAA,EACF;AAEA,SAAO,EAAE,WAAW,OAAO;AAC7B;AAKA,SAAS,qBAA2B;AAClC,QAAM,eAAeG,MAAK,QAAQ,GAAG,WAAW,eAAe;AAC/D,QAAM,WAAWA,MAAK,QAAQ,GAAG,eAAe,OAAO,gBAAgB;AAGvE,MAAIL,YAAW,YAAY,GAAG;AAC5B,QAAI;AACF,YAAM,kBAAkBG,cAAa,cAAc,OAAO;AAC1D,YAAM,WAAW,KAAK,MAAM,eAAe;AAC3C,YAAM,cAAc,UAAU,OAAO,eAAe,CAAC;AAErD,YAAM,iBAAiB,YAAY;AAAA,QAAK,CAAC,eACvC,WAAW,OAAO;AAAA,UAAK,CAAC,SACtB,KAAK,YAAY,YACjB,KAAK,SAAS,SAAS,YAAY,KACnC,KAAK,SAAS,SAAS,gBAAgB;AAAA,QACzC;AAAA,MACF;AAEA,UAAI,gBAAgB;AAClB;AAAA,MACF;AAAA,IACF,QAAQ;AAAA,IAER;AAAA,EACF;AAGA,MAAI;AACF,YAAQ,IAAI,2CAA2C;AAGvD,SAAK,mBAAmB,CAAC,UAAwB;AAC/C,UAAI,OAAO;AACT,gBAAQ,KAAK,wEAAmE;AAAA,MAClF,OAAO;AACL,gBAAQ,IAAI,mCAA8B;AAAA,MAC5C;AAAA,IACF,CAAC;AAAA,EACH,SAAS,OAAO;AACd,YAAQ,KAAK,wEAAmE;AAAA,EAClF;AACF;AAKA,SAAS,eAAe,SAAiB,SAAuB;AAC9D,QAAM,WAAWE,MAAK,YAAY,OAAO,CAAC;AAC1C,EAAAJ,WAAU,UAAU,EAAE,WAAW,KAAK,CAAC;AAEvC,QAAM,YAAYI,MAAK,UAAU,mBAAmB;AACpD,EAAAH;AAAA,IACE;AAAA,IACA,KAAK,UAAU;AAAA,MACb,IAAI;AAAA,MACJ,OAAO,cAAc,OAAO;AAAA,MAC5B,aAAY,oBAAI,KAAK,GAAE,YAAY;AAAA,IACrC,GAAG,MAAM,CAAC;AAAA,EACZ;AACF;AAQA,eAAe,yBAAyB,SAAiB,aAAoC;AAE3F,QAAM,cAAcG,MAAK,aAAa,WAAW,UAAU;AAC3D,MAAI,WAAW;AACf,QAAM,cAAc;AACpB,QAAM,UAAU;AAEhB,SAAO,WAAW,aAAa;AAE7B,QAAIL,YAAW,WAAW,GAAG;AAE3B,YAAM,WAAWI,aAAY,aAAa,EAAE,eAAe,KAAK,CAAC,EAC9D,OAAO,OAAK,EAAE,YAAY,CAAC,EAC3B,IAAI,QAAM;AAAA,QACT,MAAM,EAAE;AAAA,QACR,MAAMC,MAAK,aAAa,EAAE,IAAI;AAAA,QAC9B,OAAOL,YAAWK,MAAK,aAAa,EAAE,MAAM,iBAAiB,CAAC,IAC1DF,cAAaE,MAAK,aAAa,EAAE,MAAM,iBAAiB,GAAG,OAAO,IAClE;AAAA,MACN,EAAE,EACD,OAAO,OAAK;AAEX,YAAI,CAAC,EAAE,MAAO,QAAO;AACrB,YAAI;AACF,gBAAM,QAAQ,KAAK,MAAM,EAAE,KAAK;AAChC,iBAAO,MAAM,aAAa,WAAW,MAAM,YAAY;AAAA,QACzD,QAAQ;AACN,iBAAO;AAAA,QACT;AAAA,MACF,CAAC,EACA,KAAK,CAAC,GAAG,MAAM;AAEd,cAAM,QAAQL,YAAWK,MAAK,EAAE,MAAM,iBAAiB,CAAC,IACnD,SAASA,MAAK,EAAE,MAAM,iBAAiB,CAAC,EAAE,WAAW,IACtD;AACJ,cAAM,QAAQL,YAAWK,MAAK,EAAE,MAAM,iBAAiB,CAAC,IACnD,SAASA,MAAK,EAAE,MAAM,iBAAiB,CAAC,EAAE,WAAW,IACtD;AACJ,eAAO,QAAQ;AAAA,MACjB,CAAC;AAEH,UAAI,SAAS,SAAS,GAAG;AAEvB,cAAM,QAAQ,cAAc,OAAO;AACnC,YAAI,OAAO;AACT,gBAAM,oBAAoB,SAAS,CAAC,EAAE;AACtC,yBAAe,KAAK;AACpB,kBAAQ,IAAI,6CAA6C,OAAO,KAAK,SAAS,CAAC,EAAE,IAAI,EAAE;AACvF;AAAA,QACF;AAAA,MACF;AAAA,IACF;AAGA,UAAM,IAAI,QAAQ,CAAAC,aAAW,WAAWA,UAAS,OAAO,CAAC;AACzD;AAAA,EACF;AAEA,QAAM,IAAI,MAAM,qCAAqC,OAAO,UAAU,cAAc,OAAO,IAAI;AACjG;AArsCA,IAmBM,WAGA;AAtBN;AAAA;AAAA;AAKA;AACA;AACA;AACA;AAEA,IAAAG;AACA;AACA;AACA;AACA;AACA;AAEA;AAEA,IAAM,YAAY,UAAU,IAAI;AAGhC,IAAM,iBAAiB,CAAC,UAAU,WAAW;AAAA;AAAA;","names":["readFileSync","writeFileSync","existsSync","mkdirSync","join","init_config","existsSync","mkdirSync","writeFileSync","readFileSync","readdirSync","join","resolve","mailDir","timestamp","init_config"]}
@@ -415,11 +415,15 @@ function setReviewStatus(issueId, update, filePath = DEFAULT_STATUS_FILE) {
415
415
  if (update.testStatus && update.testStatus !== existing.testStatus) {
416
416
  history.push({ type: "test", status: update.testStatus, timestamp: now, notes: update.testNotes });
417
417
  }
418
+ if (update.uatStatus && update.uatStatus !== existing.uatStatus) {
419
+ history.push({ type: "uat", status: update.uatStatus, timestamp: now, notes: update.uatNotes });
420
+ }
418
421
  if (update.mergeStatus && update.mergeStatus !== existing.mergeStatus) {
419
422
  history.push({ type: "merge", status: update.mergeStatus, timestamp: now });
420
423
  }
421
424
  while (history.length > 10) history.shift();
422
- const readyForMerge = update.readyForMerge !== void 0 ? update.readyForMerge : merged.reviewStatus === "passed" && merged.testStatus === "passed" && merged.mergeStatus !== "merged";
425
+ const readyForMerge = update.readyForMerge !== void 0 ? update.readyForMerge : merged.reviewStatus === "passed" && merged.testStatus === "passed" && merged.mergeStatus !== "merged" && // If UAT has been initiated, it must pass too
426
+ (merged.uatStatus === void 0 || merged.uatStatus === "passed");
423
427
  const updated = {
424
428
  ...merged,
425
429
  issueId,
@@ -483,4 +487,4 @@ export {
483
487
  clearReviewStatus,
484
488
  init_review_status
485
489
  };
486
- //# sourceMappingURL=chunk-IZIXJYXZ.js.map
490
+ //# sourceMappingURL=chunk-TA5X4QYQ.js.map
@@ -1 +1 @@
1
- {"version":3,"sources":["../src/lib/database/schema.ts","../src/lib/database/index.ts","../src/lib/database/review-status-db.ts","../src/lib/review-status.ts"],"sourcesContent":["/**\n * Panopticon Database Schema\n *\n * Defines the unified schema for panopticon.db.\n * All persistent application state lives here.\n */\n\nimport type Database from 'better-sqlite3';\n\n// Schema version — increment when making breaking schema changes\nexport const SCHEMA_VERSION = 3;\n\n/**\n * Initialize the complete database schema.\n * Idempotent — uses CREATE TABLE IF NOT EXISTS throughout.\n */\nexport function initSchema(db: Database.Database): void {\n db.exec(`\n -- ===== Cost Events =====\n CREATE TABLE IF NOT EXISTS cost_events (\n id INTEGER PRIMARY KEY AUTOINCREMENT,\n ts TEXT NOT NULL,\n agent_id TEXT NOT NULL,\n issue_id TEXT NOT NULL,\n session_type TEXT NOT NULL DEFAULT 'unknown',\n provider TEXT NOT NULL DEFAULT 'anthropic',\n model TEXT NOT NULL,\n input INTEGER NOT NULL DEFAULT 0,\n output INTEGER NOT NULL DEFAULT 0,\n cache_read INTEGER NOT NULL DEFAULT 0,\n cache_write INTEGER NOT NULL DEFAULT 0,\n cost REAL NOT NULL DEFAULT 0,\n request_id TEXT,\n session_id TEXT, -- Claude Code session UUID (for reconciler offset tracking)\n -- TLDR metrics\n tldr_interceptions INTEGER,\n tldr_bypasses INTEGER,\n tldr_tokens_saved INTEGER,\n tldr_bypass_reasons TEXT, -- JSON string\n -- WAL source tracking\n source_file TEXT -- path of WAL file this came from (for imports)\n );\n\n CREATE UNIQUE INDEX IF NOT EXISTS idx_cost_request_id\n ON cost_events(request_id) WHERE request_id IS NOT NULL;\n\n CREATE INDEX IF NOT EXISTS idx_cost_issue_id\n ON cost_events(issue_id, ts);\n\n CREATE INDEX IF NOT EXISTS idx_cost_agent_id\n ON cost_events(agent_id, ts);\n\n CREATE INDEX IF NOT EXISTS idx_cost_ts\n ON cost_events(ts);\n\n CREATE INDEX IF NOT EXISTS idx_cost_session_id\n ON cost_events(session_id) WHERE session_id IS NOT NULL;\n\n -- ===== Review Status =====\n CREATE TABLE IF NOT EXISTS review_status (\n issue_id TEXT PRIMARY KEY,\n review_status TEXT NOT NULL DEFAULT 'pending',\n test_status TEXT NOT NULL DEFAULT 'pending',\n merge_status TEXT,\n verification_status TEXT,\n verification_notes TEXT,\n verification_cycle_count INTEGER DEFAULT 0,\n verification_max_cycles INTEGER,\n review_notes TEXT,\n test_notes TEXT,\n merge_notes TEXT,\n updated_at TEXT NOT NULL,\n ready_for_merge INTEGER NOT NULL DEFAULT 0,\n auto_requeue_count INTEGER DEFAULT 0,\n pr_url TEXT\n );\n\n CREATE INDEX IF NOT EXISTS idx_review_status_updated\n ON review_status(updated_at);\n\n -- ===== Status History =====\n CREATE TABLE IF NOT EXISTS status_history (\n id INTEGER PRIMARY KEY AUTOINCREMENT,\n issue_id TEXT NOT NULL,\n type TEXT NOT NULL, -- 'review', 'test', 'merge'\n status TEXT NOT NULL,\n timestamp TEXT NOT NULL,\n notes TEXT,\n FOREIGN KEY (issue_id) REFERENCES review_status(issue_id) ON DELETE CASCADE\n );\n\n CREATE INDEX IF NOT EXISTS idx_status_history_issue\n ON status_history(issue_id, timestamp);\n\n -- UNIQUE constraint enables INSERT OR IGNORE deduplication in upsertReviewStatus\n CREATE UNIQUE INDEX IF NOT EXISTS idx_status_history_unique\n ON status_history(issue_id, type, status, timestamp);\n\n -- ===== Health Events =====\n CREATE TABLE IF NOT EXISTS health_events (\n id INTEGER PRIMARY KEY AUTOINCREMENT,\n agent_id TEXT NOT NULL,\n timestamp TEXT NOT NULL,\n state TEXT NOT NULL,\n previous_state TEXT,\n source TEXT,\n metadata TEXT -- JSON string\n );\n\n CREATE INDEX IF NOT EXISTS idx_health_agent_timestamp\n ON health_events(agent_id, timestamp);\n\n CREATE INDEX IF NOT EXISTS idx_health_timestamp\n ON health_events(timestamp);\n\n -- ===== Processed Sessions (for reconciler offset tracking) =====\n CREATE TABLE IF NOT EXISTS processed_sessions (\n session_id TEXT PRIMARY KEY,\n agent_id TEXT,\n issue_id TEXT,\n transcript_path TEXT, -- full path to the .jsonl file\n byte_offset INTEGER NOT NULL DEFAULT 0, -- bytes consumed so far\n processed_at TEXT NOT NULL,\n event_count INTEGER NOT NULL DEFAULT 0\n );\n\n -- ===== API Cache =====\n CREATE TABLE IF NOT EXISTS api_cache (\n key TEXT PRIMARY KEY,\n value TEXT NOT NULL, -- JSON string\n expires_at TEXT,\n created_at TEXT NOT NULL\n );\n\n -- ===== Rate Limits =====\n CREATE TABLE IF NOT EXISTS rate_limits (\n service TEXT PRIMARY KEY,\n requests INTEGER NOT NULL DEFAULT 0,\n window_start TEXT NOT NULL,\n limit_per_window INTEGER NOT NULL DEFAULT 1000\n );\n `);\n\n // Record schema version\n db.pragma(`user_version = ${SCHEMA_VERSION}`);\n}\n\n/**\n * Run schema migrations if the database version is older than SCHEMA_VERSION.\n * This function handles upgrading from older schema versions.\n */\nexport function runMigrations(db: Database.Database): void {\n const currentVersion = db.pragma('user_version', { simple: true }) as number;\n\n if (currentVersion === SCHEMA_VERSION) {\n return; // Already at latest version\n }\n\n if (currentVersion === 0) {\n // Fresh database — just initialize the full schema\n initSchema(db);\n return;\n }\n\n // v1 → v2: add UNIQUE index on status_history for INSERT OR IGNORE dedup\n if (currentVersion < 2) {\n // Remove duplicate rows before adding the unique index (keep lowest id per unique key)\n db.exec(`\n DELETE FROM status_history\n WHERE id NOT IN (\n SELECT MIN(id)\n FROM status_history\n GROUP BY issue_id, type, status, timestamp\n );\n CREATE UNIQUE INDEX IF NOT EXISTS idx_status_history_unique\n ON status_history(issue_id, type, status, timestamp);\n `);\n }\n\n // v2 → v3: add session_id to cost_events, extend processed_sessions for reconciler\n if (currentVersion < 3) {\n // Add session_id column to cost_events (nullable, no data loss)\n try {\n db.exec(`ALTER TABLE cost_events ADD COLUMN session_id TEXT`);\n } catch {\n // Column may already exist if schema was manually applied\n }\n\n // Add index on session_id\n db.exec(`\n CREATE INDEX IF NOT EXISTS idx_cost_session_id\n ON cost_events(session_id) WHERE session_id IS NOT NULL;\n `);\n\n // Extend processed_sessions with new columns for reconciler\n try {\n db.exec(`ALTER TABLE processed_sessions ADD COLUMN agent_id TEXT`);\n } catch { /* already exists */ }\n try {\n db.exec(`ALTER TABLE processed_sessions ADD COLUMN issue_id TEXT`);\n } catch { /* already exists */ }\n try {\n db.exec(`ALTER TABLE processed_sessions ADD COLUMN transcript_path TEXT`);\n } catch { /* already exists */ }\n try {\n db.exec(`ALTER TABLE processed_sessions ADD COLUMN byte_offset INTEGER NOT NULL DEFAULT 0`);\n } catch { /* already exists */ }\n }\n\n // After all migrations, set the version\n db.pragma(`user_version = ${SCHEMA_VERSION}`);\n}\n","/**\n * Panopticon Unified Database\n *\n * Single panopticon.db at ~/.panopticon/panopticon.db.\n * Singleton pattern — one connection shared across the process.\n *\n * IMPORTANT: This module is safe to import in both server and CLI contexts.\n * Never use execSync here — this is synchronous SQLite, not a subprocess.\n */\n\nimport Database from 'better-sqlite3';\nimport { join } from 'path';\nimport { existsSync, mkdirSync } from 'fs';\nimport { getPanopticonHome } from '../paths.js';\nimport { runMigrations } from './schema.js';\n\nlet _db: Database.Database | null = null;\n\n/**\n * Get the path to panopticon.db (dynamic, respects PANOPTICON_HOME override for tests)\n */\nexport function getDatabasePath(): string {\n return join(getPanopticonHome(), 'panopticon.db');\n}\n\n/**\n * Initialize and return the singleton database connection.\n * Safe to call multiple times — returns the existing connection after first call.\n */\nexport function getDatabase(): Database.Database {\n if (_db) {\n return _db;\n }\n\n const home = getPanopticonHome();\n if (!existsSync(home)) {\n mkdirSync(home, { recursive: true });\n }\n\n const dbPath = getDatabasePath();\n _db = new Database(dbPath);\n\n // Enable WAL mode for concurrent readers + single writer\n _db.pragma('journal_mode = WAL');\n // Enforce foreign keys\n _db.pragma('foreign_keys = ON');\n // Write-ahead log synchronization — NORMAL is safe and fast\n _db.pragma('synchronous = NORMAL');\n\n // Initialize or migrate schema\n runMigrations(_db);\n\n return _db;\n}\n\n/**\n * Close the database connection and release the singleton.\n * Primarily used in tests to get a fresh connection.\n */\nexport function closeDatabase(): void {\n if (_db) {\n _db.close();\n _db = null;\n }\n}\n\n/**\n * Force re-initialization of the database connection.\n * Used in tests after PANOPTICON_HOME changes.\n */\nexport function resetDatabase(): void {\n closeDatabase();\n}\n","/**\n * Review Status SQLite Storage\n *\n * Provides SQLite-backed CRUD for ReviewStatus, matching the interface in\n * src/lib/review-status.ts. Atomic single-transaction writes eliminate the\n * TOCTOU race in the JSON-backed implementation.\n */\n\nimport { getDatabase } from './index.js';\nimport type { ReviewStatus, StatusHistoryEntry } from '../review-status.js';\n\n// ============== Write operations ==============\n\n/**\n * Upsert a review status record atomically.\n * Replaces the JSON read-modify-write cycle with a single transaction.\n */\nexport function upsertReviewStatus(status: ReviewStatus): void {\n const db = getDatabase();\n\n const upsert = db.transaction((s: ReviewStatus) => {\n // Upsert main record\n db.prepare(`\n INSERT INTO review_status (\n issue_id, review_status, test_status, merge_status,\n verification_status, verification_notes,\n verification_cycle_count, verification_max_cycles,\n review_notes, test_notes, merge_notes,\n updated_at, ready_for_merge, auto_requeue_count, pr_url\n ) VALUES (\n ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?\n )\n ON CONFLICT(issue_id) DO UPDATE SET\n review_status = excluded.review_status,\n test_status = excluded.test_status,\n merge_status = excluded.merge_status,\n verification_status = excluded.verification_status,\n verification_notes = excluded.verification_notes,\n verification_cycle_count = excluded.verification_cycle_count,\n verification_max_cycles = excluded.verification_max_cycles,\n review_notes = excluded.review_notes,\n test_notes = excluded.test_notes,\n merge_notes = excluded.merge_notes,\n updated_at = excluded.updated_at,\n ready_for_merge = excluded.ready_for_merge,\n auto_requeue_count = excluded.auto_requeue_count,\n pr_url = excluded.pr_url\n `).run(\n s.issueId,\n s.reviewStatus,\n s.testStatus,\n s.mergeStatus ?? null,\n s.verificationStatus ?? null,\n s.verificationNotes ?? null,\n s.verificationCycleCount ?? null,\n s.verificationMaxCycles ?? null,\n s.reviewNotes ?? null,\n s.testNotes ?? null,\n s.mergeNotes ?? null,\n s.updatedAt,\n s.readyForMerge ? 1 : 0,\n s.autoRequeueCount ?? null,\n s.prUrl ?? null,\n );\n\n // Append new history entries (deduplicate by timestamp to avoid re-inserting)\n if (s.history && s.history.length > 0) {\n const insertHistory = db.prepare(`\n INSERT OR IGNORE INTO status_history (issue_id, type, status, timestamp, notes)\n VALUES (?, ?, ?, ?, ?)\n `);\n for (const entry of s.history) {\n insertHistory.run(s.issueId, entry.type, entry.status, entry.timestamp, entry.notes ?? null);\n }\n }\n });\n\n upsert(status);\n}\n\n/**\n * Delete a review status record and its history.\n */\nexport function deleteReviewStatus(issueId: string): void {\n const db = getDatabase();\n db.prepare('DELETE FROM review_status WHERE issue_id = ?').run(issueId);\n}\n\n// ============== Read operations ==============\n\n/**\n * Get a single review status by issue ID.\n */\nexport function getReviewStatusFromDb(issueId: string): ReviewStatus | null {\n const db = getDatabase();\n\n const row = db.prepare(`\n SELECT * FROM review_status WHERE issue_id = ?\n `).get(issueId) as DbReviewStatusRow | undefined;\n\n if (!row) return null;\n\n const history = getHistoryFromDb(issueId);\n return rowToReviewStatus(row, history);\n}\n\n/**\n * Get all review statuses.\n */\nexport function getAllReviewStatusesFromDb(): Record<string, ReviewStatus> {\n const db = getDatabase();\n\n const rows = db.prepare('SELECT * FROM review_status ORDER BY updated_at DESC').all() as DbReviewStatusRow[];\n const result: Record<string, ReviewStatus> = {};\n\n for (const row of rows) {\n const history = getHistoryFromDb(row.issue_id);\n result[row.issue_id] = rowToReviewStatus(row, history);\n }\n\n return result;\n}\n\n/**\n * Get history entries for an issue.\n */\nfunction getHistoryFromDb(issueId: string): StatusHistoryEntry[] {\n const db = getDatabase();\n const rows = db.prepare(`\n SELECT type, status, timestamp, notes\n FROM status_history\n WHERE issue_id = ?\n ORDER BY timestamp ASC\n `).all(issueId) as Array<{ type: string; status: string; timestamp: string; notes: string | null }>;\n\n return rows.map(r => ({\n type: r.type as 'review' | 'test' | 'merge',\n status: r.status,\n timestamp: r.timestamp,\n ...(r.notes ? { notes: r.notes } : {}),\n }));\n}\n\n// ============== Row mapping ==============\n\ninterface DbReviewStatusRow {\n issue_id: string;\n review_status: string;\n test_status: string;\n merge_status: string | null;\n verification_status: string | null;\n verification_notes: string | null;\n verification_cycle_count: number | null;\n verification_max_cycles: number | null;\n review_notes: string | null;\n test_notes: string | null;\n merge_notes: string | null;\n updated_at: string;\n ready_for_merge: number;\n auto_requeue_count: number | null;\n pr_url: string | null;\n}\n\nfunction rowToReviewStatus(row: DbReviewStatusRow, history: StatusHistoryEntry[]): ReviewStatus {\n return {\n issueId: row.issue_id,\n reviewStatus: row.review_status as ReviewStatus['reviewStatus'],\n testStatus: row.test_status as ReviewStatus['testStatus'],\n mergeStatus: row.merge_status as ReviewStatus['mergeStatus'] ?? undefined,\n verificationStatus: row.verification_status as ReviewStatus['verificationStatus'] ?? undefined,\n verificationNotes: row.verification_notes ?? undefined,\n verificationCycleCount: row.verification_cycle_count ?? undefined,\n verificationMaxCycles: row.verification_max_cycles ?? undefined,\n reviewNotes: row.review_notes ?? undefined,\n testNotes: row.test_notes ?? undefined,\n mergeNotes: row.merge_notes ?? undefined,\n updatedAt: row.updated_at,\n readyForMerge: row.ready_for_merge === 1,\n autoRequeueCount: row.auto_requeue_count ?? undefined,\n prUrl: row.pr_url ?? undefined,\n history: history.length > 0 ? history : undefined,\n };\n}\n","import { existsSync, readFileSync, writeFileSync, mkdirSync } from 'fs';\nimport { join, dirname } from 'path';\nimport { homedir } from 'os';\nimport { notifyPipeline } from './pipeline-notifier.js';\nimport {\n upsertReviewStatus as dbUpsert,\n deleteReviewStatus as dbDelete,\n getReviewStatusFromDb,\n getAllReviewStatusesFromDb,\n} from './database/review-status-db.js';\n\nexport interface StatusHistoryEntry {\n type: 'review' | 'test' | 'merge';\n status: string;\n timestamp: string;\n notes?: string;\n}\n\nexport interface ReviewStatus {\n issueId: string;\n reviewStatus: 'pending' | 'reviewing' | 'passed' | 'failed' | 'blocked';\n testStatus: 'pending' | 'testing' | 'passed' | 'failed' | 'skipped' | 'dispatch_failed';\n mergeStatus?: 'pending' | 'merging' | 'merged' | 'failed';\n verificationStatus?: 'pending' | 'running' | 'passed' | 'failed' | 'skipped';\n verificationNotes?: string;\n verificationCycleCount?: number;\n verificationMaxCycles?: number;\n reviewNotes?: string;\n testNotes?: string;\n mergeNotes?: string;\n updatedAt: string;\n readyForMerge: boolean;\n autoRequeueCount?: number;\n prUrl?: string;\n history?: StatusHistoryEntry[];\n}\n\nconst DEFAULT_STATUS_FILE = join(homedir(), '.panopticon', 'review-status.json');\n\nexport function loadReviewStatuses(filePath = DEFAULT_STATUS_FILE): Record<string, ReviewStatus> {\n // Prefer SQLite when using the default path\n if (filePath === DEFAULT_STATUS_FILE) {\n try {\n return getAllReviewStatusesFromDb();\n } catch {\n // Fall through to JSON on DB error\n }\n }\n\n try {\n if (existsSync(filePath)) {\n return JSON.parse(readFileSync(filePath, 'utf-8'));\n }\n } catch (err) {\n console.error('Failed to load review statuses:', err);\n }\n return {};\n}\n\nexport function saveReviewStatuses(statuses: Record<string, ReviewStatus>, filePath = DEFAULT_STATUS_FILE): void {\n try {\n const dir = dirname(filePath);\n if (!existsSync(dir)) {\n mkdirSync(dir, { recursive: true });\n }\n writeFileSync(filePath, JSON.stringify(statuses, null, 2));\n } catch (err) {\n console.error('Failed to save review statuses:', err);\n }\n}\n\nexport function setReviewStatus(\n issueId: string,\n update: Partial<ReviewStatus>,\n filePath = DEFAULT_STATUS_FILE,\n): ReviewStatus {\n const statuses = loadReviewStatuses(filePath);\n const existing = statuses[issueId] || {\n issueId,\n reviewStatus: 'pending' as const,\n testStatus: 'pending' as const,\n updatedAt: new Date().toISOString(),\n readyForMerge: false,\n };\n\n // Guard: reject reviewStatus regression from 'passed' to 'reviewing' unless the caller\n // is explicitly resetting the merge lifecycle (update includes mergeStatus).\n // This is belt-and-suspenders — endpoint-level guards should catch this first.\n if (update.reviewStatus === 'reviewing' && existing.reviewStatus === 'passed' && update.mergeStatus === undefined) {\n console.warn(`[review-status] Rejecting reviewStatus regression from 'passed' to 'reviewing' for ${issueId} (mergeStatus not being reset)`);\n return existing as ReviewStatus;\n }\n\n const merged = { ...existing, ...update };\n\n // Track status transitions in history (last 10 entries)\n const history = [...(existing.history || [])];\n const now = new Date().toISOString();\n if (update.reviewStatus && update.reviewStatus !== existing.reviewStatus) {\n history.push({ type: 'review', status: update.reviewStatus, timestamp: now, notes: update.reviewNotes });\n }\n if (update.testStatus && update.testStatus !== existing.testStatus) {\n history.push({ type: 'test', status: update.testStatus, timestamp: now, notes: update.testNotes });\n }\n if (update.mergeStatus && update.mergeStatus !== existing.mergeStatus) {\n history.push({ type: 'merge', status: update.mergeStatus, timestamp: now });\n }\n while (history.length > 10) history.shift();\n\n const readyForMerge = update.readyForMerge !== undefined\n ? update.readyForMerge\n : (merged.reviewStatus === 'passed' && merged.testStatus === 'passed' && merged.mergeStatus !== 'merged');\n\n const updated: ReviewStatus = {\n ...merged,\n issueId,\n updatedAt: now,\n readyForMerge,\n history,\n };\n\n // SQLite first — it is the authoritative store (reads prefer SQLite)\n if (filePath === DEFAULT_STATUS_FILE) {\n try {\n dbUpsert(updated);\n } catch (err) {\n console.error('[review-status] SQLite write failed (continuing with JSON):', err);\n }\n }\n\n // JSON second — legacy fallback for tools that read review-status.json directly\n statuses[issueId] = updated;\n saveReviewStatuses(statuses, filePath);\n\n notifyPipeline({ type: 'status_changed', issueId, status: updated });\n\n return updated;\n}\n\nexport function getReviewStatus(issueId: string, filePath = DEFAULT_STATUS_FILE): ReviewStatus | null {\n // Prefer SQLite when using the default path\n if (filePath === DEFAULT_STATUS_FILE) {\n try {\n const fromDb = getReviewStatusFromDb(issueId);\n if (fromDb) return fromDb;\n } catch {\n // Fall through to JSON on DB error\n }\n }\n const statuses = loadReviewStatuses(filePath);\n return statuses[issueId] || null;\n}\n\nexport function clearReviewStatus(issueId: string, filePath = DEFAULT_STATUS_FILE): void {\n const statuses = loadReviewStatuses(filePath);\n delete statuses[issueId];\n saveReviewStatuses(statuses, filePath);\n\n // Dual-delete from SQLite when using the default path\n if (filePath === DEFAULT_STATUS_FILE) {\n try {\n dbDelete(issueId);\n } catch (err) {\n console.error('[review-status] SQLite delete failed (continuing with JSON):', err);\n }\n }\n}\n"],"mappings":";;;;;;;;;;;;;;AAgBO,SAAS,WAAW,IAA6B;AACtD,KAAG,KAAK;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,GA4HP;AAGD,KAAG,OAAO,kBAAkB,cAAc,EAAE;AAC9C;AAMO,SAAS,cAAc,IAA6B;AACzD,QAAM,iBAAiB,GAAG,OAAO,gBAAgB,EAAE,QAAQ,KAAK,CAAC;AAEjE,MAAI,mBAAmB,gBAAgB;AACrC;AAAA,EACF;AAEA,MAAI,mBAAmB,GAAG;AAExB,eAAW,EAAE;AACb;AAAA,EACF;AAGA,MAAI,iBAAiB,GAAG;AAEtB,OAAG,KAAK;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,KASP;AAAA,EACH;AAGA,MAAI,iBAAiB,GAAG;AAEtB,QAAI;AACF,SAAG,KAAK,oDAAoD;AAAA,IAC9D,QAAQ;AAAA,IAER;AAGA,OAAG,KAAK;AAAA;AAAA;AAAA,KAGP;AAGD,QAAI;AACF,SAAG,KAAK,yDAAyD;AAAA,IACnE,QAAQ;AAAA,IAAuB;AAC/B,QAAI;AACF,SAAG,KAAK,yDAAyD;AAAA,IACnE,QAAQ;AAAA,IAAuB;AAC/B,QAAI;AACF,SAAG,KAAK,gEAAgE;AAAA,IAC1E,QAAQ;AAAA,IAAuB;AAC/B,QAAI;AACF,SAAG,KAAK,kFAAkF;AAAA,IAC5F,QAAQ;AAAA,IAAuB;AAAA,EACjC;AAGA,KAAG,OAAO,kBAAkB,cAAc,EAAE;AAC9C;AAnNA,IAUa;AAVb;AAAA;AAAA;AAAA;AAUO,IAAM,iBAAiB;AAAA;AAAA;;;ACA9B,OAAO,cAAc;AACrB,SAAS,YAAY;AACrB,SAAS,YAAY,iBAAiB;AAS/B,SAAS,kBAA0B;AACxC,SAAO,KAAK,kBAAkB,GAAG,eAAe;AAClD;AAMO,SAAS,cAAiC;AAC/C,MAAI,KAAK;AACP,WAAO;AAAA,EACT;AAEA,QAAM,OAAO,kBAAkB;AAC/B,MAAI,CAAC,WAAW,IAAI,GAAG;AACrB,cAAU,MAAM,EAAE,WAAW,KAAK,CAAC;AAAA,EACrC;AAEA,QAAM,SAAS,gBAAgB;AAC/B,QAAM,IAAI,SAAS,MAAM;AAGzB,MAAI,OAAO,oBAAoB;AAE/B,MAAI,OAAO,mBAAmB;AAE9B,MAAI,OAAO,sBAAsB;AAGjC,gBAAc,GAAG;AAEjB,SAAO;AACT;AAMO,SAAS,gBAAsB;AACpC,MAAI,KAAK;AACP,QAAI,MAAM;AACV,UAAM;AAAA,EACR;AACF;AAhEA,IAgBI;AAhBJ;AAAA;AAAA;AAAA;AAaA;AACA;AAEA,IAAI,MAAgC;AAAA;AAAA;;;ACC7B,SAAS,mBAAmB,QAA4B;AAC7D,QAAM,KAAK,YAAY;AAEvB,QAAM,SAAS,GAAG,YAAY,CAAC,MAAoB;AAEjD,OAAG,QAAQ;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,KAyBV,EAAE;AAAA,MACD,EAAE;AAAA,MACF,EAAE;AAAA,MACF,EAAE;AAAA,MACF,EAAE,eAAe;AAAA,MACjB,EAAE,sBAAsB;AAAA,MACxB,EAAE,qBAAqB;AAAA,MACvB,EAAE,0BAA0B;AAAA,MAC5B,EAAE,yBAAyB;AAAA,MAC3B,EAAE,eAAe;AAAA,MACjB,EAAE,aAAa;AAAA,MACf,EAAE,cAAc;AAAA,MAChB,EAAE;AAAA,MACF,EAAE,gBAAgB,IAAI;AAAA,MACtB,EAAE,oBAAoB;AAAA,MACtB,EAAE,SAAS;AAAA,IACb;AAGA,QAAI,EAAE,WAAW,EAAE,QAAQ,SAAS,GAAG;AACrC,YAAM,gBAAgB,GAAG,QAAQ;AAAA;AAAA;AAAA,OAGhC;AACD,iBAAW,SAAS,EAAE,SAAS;AAC7B,sBAAc,IAAI,EAAE,SAAS,MAAM,MAAM,MAAM,QAAQ,MAAM,WAAW,MAAM,SAAS,IAAI;AAAA,MAC7F;AAAA,IACF;AAAA,EACF,CAAC;AAED,SAAO,MAAM;AACf;AAKO,SAAS,mBAAmB,SAAuB;AACxD,QAAM,KAAK,YAAY;AACvB,KAAG,QAAQ,8CAA8C,EAAE,IAAI,OAAO;AACxE;AAOO,SAAS,sBAAsB,SAAsC;AAC1E,QAAM,KAAK,YAAY;AAEvB,QAAM,MAAM,GAAG,QAAQ;AAAA;AAAA,GAEtB,EAAE,IAAI,OAAO;AAEd,MAAI,CAAC,IAAK,QAAO;AAEjB,QAAM,UAAU,iBAAiB,OAAO;AACxC,SAAO,kBAAkB,KAAK,OAAO;AACvC;AAKO,SAAS,6BAA2D;AACzE,QAAM,KAAK,YAAY;AAEvB,QAAM,OAAO,GAAG,QAAQ,sDAAsD,EAAE,IAAI;AACpF,QAAM,SAAuC,CAAC;AAE9C,aAAW,OAAO,MAAM;AACtB,UAAM,UAAU,iBAAiB,IAAI,QAAQ;AAC7C,WAAO,IAAI,QAAQ,IAAI,kBAAkB,KAAK,OAAO;AAAA,EACvD;AAEA,SAAO;AACT;AAKA,SAAS,iBAAiB,SAAuC;AAC/D,QAAM,KAAK,YAAY;AACvB,QAAM,OAAO,GAAG,QAAQ;AAAA;AAAA;AAAA;AAAA;AAAA,GAKvB,EAAE,IAAI,OAAO;AAEd,SAAO,KAAK,IAAI,QAAM;AAAA,IACpB,MAAM,EAAE;AAAA,IACR,QAAQ,EAAE;AAAA,IACV,WAAW,EAAE;AAAA,IACb,GAAI,EAAE,QAAQ,EAAE,OAAO,EAAE,MAAM,IAAI,CAAC;AAAA,EACtC,EAAE;AACJ;AAsBA,SAAS,kBAAkB,KAAwB,SAA6C;AAC9F,SAAO;AAAA,IACL,SAAS,IAAI;AAAA,IACb,cAAc,IAAI;AAAA,IAClB,YAAY,IAAI;AAAA,IAChB,aAAa,IAAI,gBAA+C;AAAA,IAChE,oBAAoB,IAAI,uBAA6D;AAAA,IACrF,mBAAmB,IAAI,sBAAsB;AAAA,IAC7C,wBAAwB,IAAI,4BAA4B;AAAA,IACxD,uBAAuB,IAAI,2BAA2B;AAAA,IACtD,aAAa,IAAI,gBAAgB;AAAA,IACjC,WAAW,IAAI,cAAc;AAAA,IAC7B,YAAY,IAAI,eAAe;AAAA,IAC/B,WAAW,IAAI;AAAA,IACf,eAAe,IAAI,oBAAoB;AAAA,IACvC,kBAAkB,IAAI,sBAAsB;AAAA,IAC5C,OAAO,IAAI,UAAU;AAAA,IACrB,SAAS,QAAQ,SAAS,IAAI,UAAU;AAAA,EAC1C;AACF;AAtLA;AAAA;AAAA;AAAA;AAQA;AAAA;AAAA;;;ACRA,SAAS,cAAAA,aAAY,cAAc,eAAe,aAAAC,kBAAiB;AACnE,SAAS,QAAAC,OAAM,eAAe;AAC9B,SAAS,eAAe;AAqCjB,SAAS,mBAAmB,WAAW,qBAAmD;AAE/F,MAAI,aAAa,qBAAqB;AACpC,QAAI;AACF,aAAO,2BAA2B;AAAA,IACpC,QAAQ;AAAA,IAER;AAAA,EACF;AAEA,MAAI;AACF,QAAIF,YAAW,QAAQ,GAAG;AACxB,aAAO,KAAK,MAAM,aAAa,UAAU,OAAO,CAAC;AAAA,IACnD;AAAA,EACF,SAAS,KAAK;AACZ,YAAQ,MAAM,mCAAmC,GAAG;AAAA,EACtD;AACA,SAAO,CAAC;AACV;AAEO,SAAS,mBAAmB,UAAwC,WAAW,qBAA2B;AAC/G,MAAI;AACF,UAAM,MAAM,QAAQ,QAAQ;AAC5B,QAAI,CAACA,YAAW,GAAG,GAAG;AACpB,MAAAC,WAAU,KAAK,EAAE,WAAW,KAAK,CAAC;AAAA,IACpC;AACA,kBAAc,UAAU,KAAK,UAAU,UAAU,MAAM,CAAC,CAAC;AAAA,EAC3D,SAAS,KAAK;AACZ,YAAQ,MAAM,mCAAmC,GAAG;AAAA,EACtD;AACF;AAEO,SAAS,gBACd,SACA,QACA,WAAW,qBACG;AACd,QAAM,WAAW,mBAAmB,QAAQ;AAC5C,QAAM,WAAW,SAAS,OAAO,KAAK;AAAA,IACpC;AAAA,IACA,cAAc;AAAA,IACd,YAAY;AAAA,IACZ,YAAW,oBAAI,KAAK,GAAE,YAAY;AAAA,IAClC,eAAe;AAAA,EACjB;AAKA,MAAI,OAAO,iBAAiB,eAAe,SAAS,iBAAiB,YAAY,OAAO,gBAAgB,QAAW;AACjH,YAAQ,KAAK,sFAAsF,OAAO,gCAAgC;AAC1I,WAAO;AAAA,EACT;AAEA,QAAM,SAAS,EAAE,GAAG,UAAU,GAAG,OAAO;AAGxC,QAAM,UAAU,CAAC,GAAI,SAAS,WAAW,CAAC,CAAE;AAC5C,QAAM,OAAM,oBAAI,KAAK,GAAE,YAAY;AACnC,MAAI,OAAO,gBAAgB,OAAO,iBAAiB,SAAS,cAAc;AACxE,YAAQ,KAAK,EAAE,MAAM,UAAU,QAAQ,OAAO,cAAc,WAAW,KAAK,OAAO,OAAO,YAAY,CAAC;AAAA,EACzG;AACA,MAAI,OAAO,cAAc,OAAO,eAAe,SAAS,YAAY;AAClE,YAAQ,KAAK,EAAE,MAAM,QAAQ,QAAQ,OAAO,YAAY,WAAW,KAAK,OAAO,OAAO,UAAU,CAAC;AAAA,EACnG;AACA,MAAI,OAAO,eAAe,OAAO,gBAAgB,SAAS,aAAa;AACrE,YAAQ,KAAK,EAAE,MAAM,SAAS,QAAQ,OAAO,aAAa,WAAW,IAAI,CAAC;AAAA,EAC5E;AACA,SAAO,QAAQ,SAAS,GAAI,SAAQ,MAAM;AAE1C,QAAM,gBAAgB,OAAO,kBAAkB,SAC3C,OAAO,gBACN,OAAO,iBAAiB,YAAY,OAAO,eAAe,YAAY,OAAO,gBAAgB;AAElG,QAAM,UAAwB;AAAA,IAC5B,GAAG;AAAA,IACH;AAAA,IACA,WAAW;AAAA,IACX;AAAA,IACA;AAAA,EACF;AAGA,MAAI,aAAa,qBAAqB;AACpC,QAAI;AACF,yBAAS,OAAO;AAAA,IAClB,SAAS,KAAK;AACZ,cAAQ,MAAM,+DAA+D,GAAG;AAAA,IAClF;AAAA,EACF;AAGA,WAAS,OAAO,IAAI;AACpB,qBAAmB,UAAU,QAAQ;AAErC,iBAAe,EAAE,MAAM,kBAAkB,SAAS,QAAQ,QAAQ,CAAC;AAEnE,SAAO;AACT;AAEO,SAAS,gBAAgB,SAAiB,WAAW,qBAA0C;AAEpG,MAAI,aAAa,qBAAqB;AACpC,QAAI;AACF,YAAM,SAAS,sBAAsB,OAAO;AAC5C,UAAI,OAAQ,QAAO;AAAA,IACrB,QAAQ;AAAA,IAER;AAAA,EACF;AACA,QAAM,WAAW,mBAAmB,QAAQ;AAC5C,SAAO,SAAS,OAAO,KAAK;AAC9B;AAEO,SAAS,kBAAkB,SAAiB,WAAW,qBAA2B;AACvF,QAAM,WAAW,mBAAmB,QAAQ;AAC5C,SAAO,SAAS,OAAO;AACvB,qBAAmB,UAAU,QAAQ;AAGrC,MAAI,aAAa,qBAAqB;AACpC,QAAI;AACF,yBAAS,OAAO;AAAA,IAClB,SAAS,KAAK;AACZ,cAAQ,MAAM,gEAAgE,GAAG;AAAA,IACnF;AAAA,EACF;AACF;AAtKA,IAqCM;AArCN;AAAA;AAAA;AAGA;AACA;AAiCA,IAAM,sBAAsBC,MAAK,QAAQ,GAAG,eAAe,oBAAoB;AAAA;AAAA;","names":["existsSync","mkdirSync","join"]}
1
+ {"version":3,"sources":["../src/lib/database/schema.ts","../src/lib/database/index.ts","../src/lib/database/review-status-db.ts","../src/lib/review-status.ts"],"sourcesContent":["/**\n * Panopticon Database Schema\n *\n * Defines the unified schema for panopticon.db.\n * All persistent application state lives here.\n */\n\nimport type Database from 'better-sqlite3';\n\n// Schema version — increment when making breaking schema changes\nexport const SCHEMA_VERSION = 3;\n\n/**\n * Initialize the complete database schema.\n * Idempotent — uses CREATE TABLE IF NOT EXISTS throughout.\n */\nexport function initSchema(db: Database.Database): void {\n db.exec(`\n -- ===== Cost Events =====\n CREATE TABLE IF NOT EXISTS cost_events (\n id INTEGER PRIMARY KEY AUTOINCREMENT,\n ts TEXT NOT NULL,\n agent_id TEXT NOT NULL,\n issue_id TEXT NOT NULL,\n session_type TEXT NOT NULL DEFAULT 'unknown',\n provider TEXT NOT NULL DEFAULT 'anthropic',\n model TEXT NOT NULL,\n input INTEGER NOT NULL DEFAULT 0,\n output INTEGER NOT NULL DEFAULT 0,\n cache_read INTEGER NOT NULL DEFAULT 0,\n cache_write INTEGER NOT NULL DEFAULT 0,\n cost REAL NOT NULL DEFAULT 0,\n request_id TEXT,\n session_id TEXT, -- Claude Code session UUID (for reconciler offset tracking)\n -- TLDR metrics\n tldr_interceptions INTEGER,\n tldr_bypasses INTEGER,\n tldr_tokens_saved INTEGER,\n tldr_bypass_reasons TEXT, -- JSON string\n -- WAL source tracking\n source_file TEXT -- path of WAL file this came from (for imports)\n );\n\n CREATE UNIQUE INDEX IF NOT EXISTS idx_cost_request_id\n ON cost_events(request_id) WHERE request_id IS NOT NULL;\n\n CREATE INDEX IF NOT EXISTS idx_cost_issue_id\n ON cost_events(issue_id, ts);\n\n CREATE INDEX IF NOT EXISTS idx_cost_agent_id\n ON cost_events(agent_id, ts);\n\n CREATE INDEX IF NOT EXISTS idx_cost_ts\n ON cost_events(ts);\n\n CREATE INDEX IF NOT EXISTS idx_cost_session_id\n ON cost_events(session_id) WHERE session_id IS NOT NULL;\n\n -- ===== Review Status =====\n CREATE TABLE IF NOT EXISTS review_status (\n issue_id TEXT PRIMARY KEY,\n review_status TEXT NOT NULL DEFAULT 'pending',\n test_status TEXT NOT NULL DEFAULT 'pending',\n merge_status TEXT,\n verification_status TEXT,\n verification_notes TEXT,\n verification_cycle_count INTEGER DEFAULT 0,\n verification_max_cycles INTEGER,\n review_notes TEXT,\n test_notes TEXT,\n merge_notes TEXT,\n updated_at TEXT NOT NULL,\n ready_for_merge INTEGER NOT NULL DEFAULT 0,\n auto_requeue_count INTEGER DEFAULT 0,\n pr_url TEXT\n );\n\n CREATE INDEX IF NOT EXISTS idx_review_status_updated\n ON review_status(updated_at);\n\n -- ===== Status History =====\n CREATE TABLE IF NOT EXISTS status_history (\n id INTEGER PRIMARY KEY AUTOINCREMENT,\n issue_id TEXT NOT NULL,\n type TEXT NOT NULL, -- 'review', 'test', 'merge'\n status TEXT NOT NULL,\n timestamp TEXT NOT NULL,\n notes TEXT,\n FOREIGN KEY (issue_id) REFERENCES review_status(issue_id) ON DELETE CASCADE\n );\n\n CREATE INDEX IF NOT EXISTS idx_status_history_issue\n ON status_history(issue_id, timestamp);\n\n -- UNIQUE constraint enables INSERT OR IGNORE deduplication in upsertReviewStatus\n CREATE UNIQUE INDEX IF NOT EXISTS idx_status_history_unique\n ON status_history(issue_id, type, status, timestamp);\n\n -- ===== Health Events =====\n CREATE TABLE IF NOT EXISTS health_events (\n id INTEGER PRIMARY KEY AUTOINCREMENT,\n agent_id TEXT NOT NULL,\n timestamp TEXT NOT NULL,\n state TEXT NOT NULL,\n previous_state TEXT,\n source TEXT,\n metadata TEXT -- JSON string\n );\n\n CREATE INDEX IF NOT EXISTS idx_health_agent_timestamp\n ON health_events(agent_id, timestamp);\n\n CREATE INDEX IF NOT EXISTS idx_health_timestamp\n ON health_events(timestamp);\n\n -- ===== Processed Sessions (for reconciler offset tracking) =====\n CREATE TABLE IF NOT EXISTS processed_sessions (\n session_id TEXT PRIMARY KEY,\n agent_id TEXT,\n issue_id TEXT,\n transcript_path TEXT, -- full path to the .jsonl file\n byte_offset INTEGER NOT NULL DEFAULT 0, -- bytes consumed so far\n processed_at TEXT NOT NULL,\n event_count INTEGER NOT NULL DEFAULT 0\n );\n\n -- ===== API Cache =====\n CREATE TABLE IF NOT EXISTS api_cache (\n key TEXT PRIMARY KEY,\n value TEXT NOT NULL, -- JSON string\n expires_at TEXT,\n created_at TEXT NOT NULL\n );\n\n -- ===== Rate Limits =====\n CREATE TABLE IF NOT EXISTS rate_limits (\n service TEXT PRIMARY KEY,\n requests INTEGER NOT NULL DEFAULT 0,\n window_start TEXT NOT NULL,\n limit_per_window INTEGER NOT NULL DEFAULT 1000\n );\n `);\n\n // Record schema version\n db.pragma(`user_version = ${SCHEMA_VERSION}`);\n}\n\n/**\n * Run schema migrations if the database version is older than SCHEMA_VERSION.\n * This function handles upgrading from older schema versions.\n */\nexport function runMigrations(db: Database.Database): void {\n const currentVersion = db.pragma('user_version', { simple: true }) as number;\n\n if (currentVersion === SCHEMA_VERSION) {\n return; // Already at latest version\n }\n\n if (currentVersion === 0) {\n // Fresh database — just initialize the full schema\n initSchema(db);\n return;\n }\n\n // v1 → v2: add UNIQUE index on status_history for INSERT OR IGNORE dedup\n if (currentVersion < 2) {\n // Remove duplicate rows before adding the unique index (keep lowest id per unique key)\n db.exec(`\n DELETE FROM status_history\n WHERE id NOT IN (\n SELECT MIN(id)\n FROM status_history\n GROUP BY issue_id, type, status, timestamp\n );\n CREATE UNIQUE INDEX IF NOT EXISTS idx_status_history_unique\n ON status_history(issue_id, type, status, timestamp);\n `);\n }\n\n // v2 → v3: add session_id to cost_events, extend processed_sessions for reconciler\n if (currentVersion < 3) {\n // Add session_id column to cost_events (nullable, no data loss)\n try {\n db.exec(`ALTER TABLE cost_events ADD COLUMN session_id TEXT`);\n } catch {\n // Column may already exist if schema was manually applied\n }\n\n // Add index on session_id\n db.exec(`\n CREATE INDEX IF NOT EXISTS idx_cost_session_id\n ON cost_events(session_id) WHERE session_id IS NOT NULL;\n `);\n\n // Extend processed_sessions with new columns for reconciler\n try {\n db.exec(`ALTER TABLE processed_sessions ADD COLUMN agent_id TEXT`);\n } catch { /* already exists */ }\n try {\n db.exec(`ALTER TABLE processed_sessions ADD COLUMN issue_id TEXT`);\n } catch { /* already exists */ }\n try {\n db.exec(`ALTER TABLE processed_sessions ADD COLUMN transcript_path TEXT`);\n } catch { /* already exists */ }\n try {\n db.exec(`ALTER TABLE processed_sessions ADD COLUMN byte_offset INTEGER NOT NULL DEFAULT 0`);\n } catch { /* already exists */ }\n }\n\n // After all migrations, set the version\n db.pragma(`user_version = ${SCHEMA_VERSION}`);\n}\n","/**\n * Panopticon Unified Database\n *\n * Single panopticon.db at ~/.panopticon/panopticon.db.\n * Singleton pattern — one connection shared across the process.\n *\n * IMPORTANT: This module is safe to import in both server and CLI contexts.\n * Never use execSync here — this is synchronous SQLite, not a subprocess.\n */\n\nimport Database from 'better-sqlite3';\nimport { join } from 'path';\nimport { existsSync, mkdirSync } from 'fs';\nimport { getPanopticonHome } from '../paths.js';\nimport { runMigrations } from './schema.js';\n\nlet _db: Database.Database | null = null;\n\n/**\n * Get the path to panopticon.db (dynamic, respects PANOPTICON_HOME override for tests)\n */\nexport function getDatabasePath(): string {\n return join(getPanopticonHome(), 'panopticon.db');\n}\n\n/**\n * Initialize and return the singleton database connection.\n * Safe to call multiple times — returns the existing connection after first call.\n */\nexport function getDatabase(): Database.Database {\n if (_db) {\n return _db;\n }\n\n const home = getPanopticonHome();\n if (!existsSync(home)) {\n mkdirSync(home, { recursive: true });\n }\n\n const dbPath = getDatabasePath();\n _db = new Database(dbPath);\n\n // Enable WAL mode for concurrent readers + single writer\n _db.pragma('journal_mode = WAL');\n // Enforce foreign keys\n _db.pragma('foreign_keys = ON');\n // Write-ahead log synchronization — NORMAL is safe and fast\n _db.pragma('synchronous = NORMAL');\n\n // Initialize or migrate schema\n runMigrations(_db);\n\n return _db;\n}\n\n/**\n * Close the database connection and release the singleton.\n * Primarily used in tests to get a fresh connection.\n */\nexport function closeDatabase(): void {\n if (_db) {\n _db.close();\n _db = null;\n }\n}\n\n/**\n * Force re-initialization of the database connection.\n * Used in tests after PANOPTICON_HOME changes.\n */\nexport function resetDatabase(): void {\n closeDatabase();\n}\n","/**\n * Review Status SQLite Storage\n *\n * Provides SQLite-backed CRUD for ReviewStatus, matching the interface in\n * src/lib/review-status.ts. Atomic single-transaction writes eliminate the\n * TOCTOU race in the JSON-backed implementation.\n */\n\nimport { getDatabase } from './index.js';\nimport type { ReviewStatus, StatusHistoryEntry } from '../review-status.js';\n\n// ============== Write operations ==============\n\n/**\n * Upsert a review status record atomically.\n * Replaces the JSON read-modify-write cycle with a single transaction.\n */\nexport function upsertReviewStatus(status: ReviewStatus): void {\n const db = getDatabase();\n\n const upsert = db.transaction((s: ReviewStatus) => {\n // Upsert main record\n db.prepare(`\n INSERT INTO review_status (\n issue_id, review_status, test_status, merge_status,\n verification_status, verification_notes,\n verification_cycle_count, verification_max_cycles,\n review_notes, test_notes, merge_notes,\n updated_at, ready_for_merge, auto_requeue_count, pr_url\n ) VALUES (\n ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?\n )\n ON CONFLICT(issue_id) DO UPDATE SET\n review_status = excluded.review_status,\n test_status = excluded.test_status,\n merge_status = excluded.merge_status,\n verification_status = excluded.verification_status,\n verification_notes = excluded.verification_notes,\n verification_cycle_count = excluded.verification_cycle_count,\n verification_max_cycles = excluded.verification_max_cycles,\n review_notes = excluded.review_notes,\n test_notes = excluded.test_notes,\n merge_notes = excluded.merge_notes,\n updated_at = excluded.updated_at,\n ready_for_merge = excluded.ready_for_merge,\n auto_requeue_count = excluded.auto_requeue_count,\n pr_url = excluded.pr_url\n `).run(\n s.issueId,\n s.reviewStatus,\n s.testStatus,\n s.mergeStatus ?? null,\n s.verificationStatus ?? null,\n s.verificationNotes ?? null,\n s.verificationCycleCount ?? null,\n s.verificationMaxCycles ?? null,\n s.reviewNotes ?? null,\n s.testNotes ?? null,\n s.mergeNotes ?? null,\n s.updatedAt,\n s.readyForMerge ? 1 : 0,\n s.autoRequeueCount ?? null,\n s.prUrl ?? null,\n );\n\n // Append new history entries (deduplicate by timestamp to avoid re-inserting)\n if (s.history && s.history.length > 0) {\n const insertHistory = db.prepare(`\n INSERT OR IGNORE INTO status_history (issue_id, type, status, timestamp, notes)\n VALUES (?, ?, ?, ?, ?)\n `);\n for (const entry of s.history) {\n insertHistory.run(s.issueId, entry.type, entry.status, entry.timestamp, entry.notes ?? null);\n }\n }\n });\n\n upsert(status);\n}\n\n/**\n * Delete a review status record and its history.\n */\nexport function deleteReviewStatus(issueId: string): void {\n const db = getDatabase();\n db.prepare('DELETE FROM review_status WHERE issue_id = ?').run(issueId);\n}\n\n// ============== Read operations ==============\n\n/**\n * Get a single review status by issue ID.\n */\nexport function getReviewStatusFromDb(issueId: string): ReviewStatus | null {\n const db = getDatabase();\n\n const row = db.prepare(`\n SELECT * FROM review_status WHERE issue_id = ?\n `).get(issueId) as DbReviewStatusRow | undefined;\n\n if (!row) return null;\n\n const history = getHistoryFromDb(issueId);\n return rowToReviewStatus(row, history);\n}\n\n/**\n * Get all review statuses.\n */\nexport function getAllReviewStatusesFromDb(): Record<string, ReviewStatus> {\n const db = getDatabase();\n\n const rows = db.prepare('SELECT * FROM review_status ORDER BY updated_at DESC').all() as DbReviewStatusRow[];\n const result: Record<string, ReviewStatus> = {};\n\n for (const row of rows) {\n const history = getHistoryFromDb(row.issue_id);\n result[row.issue_id] = rowToReviewStatus(row, history);\n }\n\n return result;\n}\n\n/**\n * Get history entries for an issue.\n */\nfunction getHistoryFromDb(issueId: string): StatusHistoryEntry[] {\n const db = getDatabase();\n const rows = db.prepare(`\n SELECT type, status, timestamp, notes\n FROM status_history\n WHERE issue_id = ?\n ORDER BY timestamp ASC\n `).all(issueId) as Array<{ type: string; status: string; timestamp: string; notes: string | null }>;\n\n return rows.map(r => ({\n type: r.type as 'review' | 'test' | 'merge',\n status: r.status,\n timestamp: r.timestamp,\n ...(r.notes ? { notes: r.notes } : {}),\n }));\n}\n\n// ============== Row mapping ==============\n\ninterface DbReviewStatusRow {\n issue_id: string;\n review_status: string;\n test_status: string;\n merge_status: string | null;\n verification_status: string | null;\n verification_notes: string | null;\n verification_cycle_count: number | null;\n verification_max_cycles: number | null;\n review_notes: string | null;\n test_notes: string | null;\n merge_notes: string | null;\n updated_at: string;\n ready_for_merge: number;\n auto_requeue_count: number | null;\n pr_url: string | null;\n}\n\nfunction rowToReviewStatus(row: DbReviewStatusRow, history: StatusHistoryEntry[]): ReviewStatus {\n return {\n issueId: row.issue_id,\n reviewStatus: row.review_status as ReviewStatus['reviewStatus'],\n testStatus: row.test_status as ReviewStatus['testStatus'],\n mergeStatus: row.merge_status as ReviewStatus['mergeStatus'] ?? undefined,\n verificationStatus: row.verification_status as ReviewStatus['verificationStatus'] ?? undefined,\n verificationNotes: row.verification_notes ?? undefined,\n verificationCycleCount: row.verification_cycle_count ?? undefined,\n verificationMaxCycles: row.verification_max_cycles ?? undefined,\n reviewNotes: row.review_notes ?? undefined,\n testNotes: row.test_notes ?? undefined,\n mergeNotes: row.merge_notes ?? undefined,\n updatedAt: row.updated_at,\n readyForMerge: row.ready_for_merge === 1,\n autoRequeueCount: row.auto_requeue_count ?? undefined,\n prUrl: row.pr_url ?? undefined,\n history: history.length > 0 ? history : undefined,\n };\n}\n","import { existsSync, readFileSync, writeFileSync, mkdirSync } from 'fs';\nimport { join, dirname } from 'path';\nimport { homedir } from 'os';\nimport { notifyPipeline } from './pipeline-notifier.js';\nimport {\n upsertReviewStatus as dbUpsert,\n deleteReviewStatus as dbDelete,\n getReviewStatusFromDb,\n getAllReviewStatusesFromDb,\n} from './database/review-status-db.js';\n\nexport interface StatusHistoryEntry {\n type: 'review' | 'test' | 'merge' | 'inspect' | 'uat';\n status: string;\n timestamp: string;\n notes?: string;\n}\n\nexport interface ReviewStatus {\n issueId: string;\n reviewStatus: 'pending' | 'reviewing' | 'passed' | 'failed' | 'blocked';\n testStatus: 'pending' | 'testing' | 'passed' | 'failed' | 'skipped' | 'dispatch_failed';\n mergeStatus?: 'pending' | 'merging' | 'merged' | 'failed';\n inspectStatus?: 'pending' | 'inspecting' | 'passed' | 'failed';\n inspectNotes?: string;\n uatStatus?: 'pending' | 'testing' | 'passed' | 'failed';\n uatNotes?: string;\n verificationStatus?: 'pending' | 'running' | 'passed' | 'failed' | 'skipped';\n verificationNotes?: string;\n verificationCycleCount?: number;\n verificationMaxCycles?: number;\n reviewNotes?: string;\n testNotes?: string;\n mergeNotes?: string;\n updatedAt: string;\n readyForMerge: boolean;\n autoRequeueCount?: number;\n prUrl?: string;\n history?: StatusHistoryEntry[];\n}\n\nconst DEFAULT_STATUS_FILE = join(homedir(), '.panopticon', 'review-status.json');\n\nexport function loadReviewStatuses(filePath = DEFAULT_STATUS_FILE): Record<string, ReviewStatus> {\n // Prefer SQLite when using the default path\n if (filePath === DEFAULT_STATUS_FILE) {\n try {\n return getAllReviewStatusesFromDb();\n } catch {\n // Fall through to JSON on DB error\n }\n }\n\n try {\n if (existsSync(filePath)) {\n return JSON.parse(readFileSync(filePath, 'utf-8'));\n }\n } catch (err) {\n console.error('Failed to load review statuses:', err);\n }\n return {};\n}\n\nexport function saveReviewStatuses(statuses: Record<string, ReviewStatus>, filePath = DEFAULT_STATUS_FILE): void {\n try {\n const dir = dirname(filePath);\n if (!existsSync(dir)) {\n mkdirSync(dir, { recursive: true });\n }\n writeFileSync(filePath, JSON.stringify(statuses, null, 2));\n } catch (err) {\n console.error('Failed to save review statuses:', err);\n }\n}\n\nexport function setReviewStatus(\n issueId: string,\n update: Partial<ReviewStatus>,\n filePath = DEFAULT_STATUS_FILE,\n): ReviewStatus {\n const statuses = loadReviewStatuses(filePath);\n const existing = statuses[issueId] || {\n issueId,\n reviewStatus: 'pending' as const,\n testStatus: 'pending' as const,\n updatedAt: new Date().toISOString(),\n readyForMerge: false,\n };\n\n // Guard: reject reviewStatus regression from 'passed' to 'reviewing' unless the caller\n // is explicitly resetting the merge lifecycle (update includes mergeStatus).\n // This is belt-and-suspenders — endpoint-level guards should catch this first.\n if (update.reviewStatus === 'reviewing' && existing.reviewStatus === 'passed' && update.mergeStatus === undefined) {\n console.warn(`[review-status] Rejecting reviewStatus regression from 'passed' to 'reviewing' for ${issueId} (mergeStatus not being reset)`);\n return existing as ReviewStatus;\n }\n\n const merged = { ...existing, ...update };\n\n // Track status transitions in history (last 10 entries)\n const history = [...(existing.history || [])];\n const now = new Date().toISOString();\n if (update.reviewStatus && update.reviewStatus !== existing.reviewStatus) {\n history.push({ type: 'review', status: update.reviewStatus, timestamp: now, notes: update.reviewNotes });\n }\n if (update.testStatus && update.testStatus !== existing.testStatus) {\n history.push({ type: 'test', status: update.testStatus, timestamp: now, notes: update.testNotes });\n }\n if (update.uatStatus && update.uatStatus !== existing.uatStatus) {\n history.push({ type: 'uat', status: update.uatStatus, timestamp: now, notes: update.uatNotes });\n }\n if (update.mergeStatus && update.mergeStatus !== existing.mergeStatus) {\n history.push({ type: 'merge', status: update.mergeStatus, timestamp: now });\n }\n while (history.length > 10) history.shift();\n\n // readyForMerge is true when all required gates pass.\n // If uatStatus exists (UAT specialist has been involved), it must also be 'passed'.\n const readyForMerge = update.readyForMerge !== undefined\n ? update.readyForMerge\n : (\n merged.reviewStatus === 'passed' &&\n merged.testStatus === 'passed' &&\n merged.mergeStatus !== 'merged' &&\n // If UAT has been initiated, it must pass too\n (merged.uatStatus === undefined || merged.uatStatus === 'passed')\n );\n\n const updated: ReviewStatus = {\n ...merged,\n issueId,\n updatedAt: now,\n readyForMerge,\n history,\n };\n\n // SQLite first — it is the authoritative store (reads prefer SQLite)\n if (filePath === DEFAULT_STATUS_FILE) {\n try {\n dbUpsert(updated);\n } catch (err) {\n console.error('[review-status] SQLite write failed (continuing with JSON):', err);\n }\n }\n\n // JSON second — legacy fallback for tools that read review-status.json directly\n statuses[issueId] = updated;\n saveReviewStatuses(statuses, filePath);\n\n notifyPipeline({ type: 'status_changed', issueId, status: updated });\n\n return updated;\n}\n\nexport function getReviewStatus(issueId: string, filePath = DEFAULT_STATUS_FILE): ReviewStatus | null {\n // Prefer SQLite when using the default path\n if (filePath === DEFAULT_STATUS_FILE) {\n try {\n const fromDb = getReviewStatusFromDb(issueId);\n if (fromDb) return fromDb;\n } catch {\n // Fall through to JSON on DB error\n }\n }\n const statuses = loadReviewStatuses(filePath);\n return statuses[issueId] || null;\n}\n\nexport function clearReviewStatus(issueId: string, filePath = DEFAULT_STATUS_FILE): void {\n const statuses = loadReviewStatuses(filePath);\n delete statuses[issueId];\n saveReviewStatuses(statuses, filePath);\n\n // Dual-delete from SQLite when using the default path\n if (filePath === DEFAULT_STATUS_FILE) {\n try {\n dbDelete(issueId);\n } catch (err) {\n console.error('[review-status] SQLite delete failed (continuing with JSON):', err);\n }\n }\n}\n"],"mappings":";;;;;;;;;;;;;;AAgBO,SAAS,WAAW,IAA6B;AACtD,KAAG,KAAK;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,GA4HP;AAGD,KAAG,OAAO,kBAAkB,cAAc,EAAE;AAC9C;AAMO,SAAS,cAAc,IAA6B;AACzD,QAAM,iBAAiB,GAAG,OAAO,gBAAgB,EAAE,QAAQ,KAAK,CAAC;AAEjE,MAAI,mBAAmB,gBAAgB;AACrC;AAAA,EACF;AAEA,MAAI,mBAAmB,GAAG;AAExB,eAAW,EAAE;AACb;AAAA,EACF;AAGA,MAAI,iBAAiB,GAAG;AAEtB,OAAG,KAAK;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,KASP;AAAA,EACH;AAGA,MAAI,iBAAiB,GAAG;AAEtB,QAAI;AACF,SAAG,KAAK,oDAAoD;AAAA,IAC9D,QAAQ;AAAA,IAER;AAGA,OAAG,KAAK;AAAA;AAAA;AAAA,KAGP;AAGD,QAAI;AACF,SAAG,KAAK,yDAAyD;AAAA,IACnE,QAAQ;AAAA,IAAuB;AAC/B,QAAI;AACF,SAAG,KAAK,yDAAyD;AAAA,IACnE,QAAQ;AAAA,IAAuB;AAC/B,QAAI;AACF,SAAG,KAAK,gEAAgE;AAAA,IAC1E,QAAQ;AAAA,IAAuB;AAC/B,QAAI;AACF,SAAG,KAAK,kFAAkF;AAAA,IAC5F,QAAQ;AAAA,IAAuB;AAAA,EACjC;AAGA,KAAG,OAAO,kBAAkB,cAAc,EAAE;AAC9C;AAnNA,IAUa;AAVb;AAAA;AAAA;AAAA;AAUO,IAAM,iBAAiB;AAAA;AAAA;;;ACA9B,OAAO,cAAc;AACrB,SAAS,YAAY;AACrB,SAAS,YAAY,iBAAiB;AAS/B,SAAS,kBAA0B;AACxC,SAAO,KAAK,kBAAkB,GAAG,eAAe;AAClD;AAMO,SAAS,cAAiC;AAC/C,MAAI,KAAK;AACP,WAAO;AAAA,EACT;AAEA,QAAM,OAAO,kBAAkB;AAC/B,MAAI,CAAC,WAAW,IAAI,GAAG;AACrB,cAAU,MAAM,EAAE,WAAW,KAAK,CAAC;AAAA,EACrC;AAEA,QAAM,SAAS,gBAAgB;AAC/B,QAAM,IAAI,SAAS,MAAM;AAGzB,MAAI,OAAO,oBAAoB;AAE/B,MAAI,OAAO,mBAAmB;AAE9B,MAAI,OAAO,sBAAsB;AAGjC,gBAAc,GAAG;AAEjB,SAAO;AACT;AAMO,SAAS,gBAAsB;AACpC,MAAI,KAAK;AACP,QAAI,MAAM;AACV,UAAM;AAAA,EACR;AACF;AAhEA,IAgBI;AAhBJ;AAAA;AAAA;AAAA;AAaA;AACA;AAEA,IAAI,MAAgC;AAAA;AAAA;;;ACC7B,SAAS,mBAAmB,QAA4B;AAC7D,QAAM,KAAK,YAAY;AAEvB,QAAM,SAAS,GAAG,YAAY,CAAC,MAAoB;AAEjD,OAAG,QAAQ;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,KAyBV,EAAE;AAAA,MACD,EAAE;AAAA,MACF,EAAE;AAAA,MACF,EAAE;AAAA,MACF,EAAE,eAAe;AAAA,MACjB,EAAE,sBAAsB;AAAA,MACxB,EAAE,qBAAqB;AAAA,MACvB,EAAE,0BAA0B;AAAA,MAC5B,EAAE,yBAAyB;AAAA,MAC3B,EAAE,eAAe;AAAA,MACjB,EAAE,aAAa;AAAA,MACf,EAAE,cAAc;AAAA,MAChB,EAAE;AAAA,MACF,EAAE,gBAAgB,IAAI;AAAA,MACtB,EAAE,oBAAoB;AAAA,MACtB,EAAE,SAAS;AAAA,IACb;AAGA,QAAI,EAAE,WAAW,EAAE,QAAQ,SAAS,GAAG;AACrC,YAAM,gBAAgB,GAAG,QAAQ;AAAA;AAAA;AAAA,OAGhC;AACD,iBAAW,SAAS,EAAE,SAAS;AAC7B,sBAAc,IAAI,EAAE,SAAS,MAAM,MAAM,MAAM,QAAQ,MAAM,WAAW,MAAM,SAAS,IAAI;AAAA,MAC7F;AAAA,IACF;AAAA,EACF,CAAC;AAED,SAAO,MAAM;AACf;AAKO,SAAS,mBAAmB,SAAuB;AACxD,QAAM,KAAK,YAAY;AACvB,KAAG,QAAQ,8CAA8C,EAAE,IAAI,OAAO;AACxE;AAOO,SAAS,sBAAsB,SAAsC;AAC1E,QAAM,KAAK,YAAY;AAEvB,QAAM,MAAM,GAAG,QAAQ;AAAA;AAAA,GAEtB,EAAE,IAAI,OAAO;AAEd,MAAI,CAAC,IAAK,QAAO;AAEjB,QAAM,UAAU,iBAAiB,OAAO;AACxC,SAAO,kBAAkB,KAAK,OAAO;AACvC;AAKO,SAAS,6BAA2D;AACzE,QAAM,KAAK,YAAY;AAEvB,QAAM,OAAO,GAAG,QAAQ,sDAAsD,EAAE,IAAI;AACpF,QAAM,SAAuC,CAAC;AAE9C,aAAW,OAAO,MAAM;AACtB,UAAM,UAAU,iBAAiB,IAAI,QAAQ;AAC7C,WAAO,IAAI,QAAQ,IAAI,kBAAkB,KAAK,OAAO;AAAA,EACvD;AAEA,SAAO;AACT;AAKA,SAAS,iBAAiB,SAAuC;AAC/D,QAAM,KAAK,YAAY;AACvB,QAAM,OAAO,GAAG,QAAQ;AAAA;AAAA;AAAA;AAAA;AAAA,GAKvB,EAAE,IAAI,OAAO;AAEd,SAAO,KAAK,IAAI,QAAM;AAAA,IACpB,MAAM,EAAE;AAAA,IACR,QAAQ,EAAE;AAAA,IACV,WAAW,EAAE;AAAA,IACb,GAAI,EAAE,QAAQ,EAAE,OAAO,EAAE,MAAM,IAAI,CAAC;AAAA,EACtC,EAAE;AACJ;AAsBA,SAAS,kBAAkB,KAAwB,SAA6C;AAC9F,SAAO;AAAA,IACL,SAAS,IAAI;AAAA,IACb,cAAc,IAAI;AAAA,IAClB,YAAY,IAAI;AAAA,IAChB,aAAa,IAAI,gBAA+C;AAAA,IAChE,oBAAoB,IAAI,uBAA6D;AAAA,IACrF,mBAAmB,IAAI,sBAAsB;AAAA,IAC7C,wBAAwB,IAAI,4BAA4B;AAAA,IACxD,uBAAuB,IAAI,2BAA2B;AAAA,IACtD,aAAa,IAAI,gBAAgB;AAAA,IACjC,WAAW,IAAI,cAAc;AAAA,IAC7B,YAAY,IAAI,eAAe;AAAA,IAC/B,WAAW,IAAI;AAAA,IACf,eAAe,IAAI,oBAAoB;AAAA,IACvC,kBAAkB,IAAI,sBAAsB;AAAA,IAC5C,OAAO,IAAI,UAAU;AAAA,IACrB,SAAS,QAAQ,SAAS,IAAI,UAAU;AAAA,EAC1C;AACF;AAtLA;AAAA;AAAA;AAAA;AAQA;AAAA;AAAA;;;ACRA,SAAS,cAAAA,aAAY,cAAc,eAAe,aAAAC,kBAAiB;AACnE,SAAS,QAAAC,OAAM,eAAe;AAC9B,SAAS,eAAe;AAyCjB,SAAS,mBAAmB,WAAW,qBAAmD;AAE/F,MAAI,aAAa,qBAAqB;AACpC,QAAI;AACF,aAAO,2BAA2B;AAAA,IACpC,QAAQ;AAAA,IAER;AAAA,EACF;AAEA,MAAI;AACF,QAAIF,YAAW,QAAQ,GAAG;AACxB,aAAO,KAAK,MAAM,aAAa,UAAU,OAAO,CAAC;AAAA,IACnD;AAAA,EACF,SAAS,KAAK;AACZ,YAAQ,MAAM,mCAAmC,GAAG;AAAA,EACtD;AACA,SAAO,CAAC;AACV;AAEO,SAAS,mBAAmB,UAAwC,WAAW,qBAA2B;AAC/G,MAAI;AACF,UAAM,MAAM,QAAQ,QAAQ;AAC5B,QAAI,CAACA,YAAW,GAAG,GAAG;AACpB,MAAAC,WAAU,KAAK,EAAE,WAAW,KAAK,CAAC;AAAA,IACpC;AACA,kBAAc,UAAU,KAAK,UAAU,UAAU,MAAM,CAAC,CAAC;AAAA,EAC3D,SAAS,KAAK;AACZ,YAAQ,MAAM,mCAAmC,GAAG;AAAA,EACtD;AACF;AAEO,SAAS,gBACd,SACA,QACA,WAAW,qBACG;AACd,QAAM,WAAW,mBAAmB,QAAQ;AAC5C,QAAM,WAAW,SAAS,OAAO,KAAK;AAAA,IACpC;AAAA,IACA,cAAc;AAAA,IACd,YAAY;AAAA,IACZ,YAAW,oBAAI,KAAK,GAAE,YAAY;AAAA,IAClC,eAAe;AAAA,EACjB;AAKA,MAAI,OAAO,iBAAiB,eAAe,SAAS,iBAAiB,YAAY,OAAO,gBAAgB,QAAW;AACjH,YAAQ,KAAK,sFAAsF,OAAO,gCAAgC;AAC1I,WAAO;AAAA,EACT;AAEA,QAAM,SAAS,EAAE,GAAG,UAAU,GAAG,OAAO;AAGxC,QAAM,UAAU,CAAC,GAAI,SAAS,WAAW,CAAC,CAAE;AAC5C,QAAM,OAAM,oBAAI,KAAK,GAAE,YAAY;AACnC,MAAI,OAAO,gBAAgB,OAAO,iBAAiB,SAAS,cAAc;AACxE,YAAQ,KAAK,EAAE,MAAM,UAAU,QAAQ,OAAO,cAAc,WAAW,KAAK,OAAO,OAAO,YAAY,CAAC;AAAA,EACzG;AACA,MAAI,OAAO,cAAc,OAAO,eAAe,SAAS,YAAY;AAClE,YAAQ,KAAK,EAAE,MAAM,QAAQ,QAAQ,OAAO,YAAY,WAAW,KAAK,OAAO,OAAO,UAAU,CAAC;AAAA,EACnG;AACA,MAAI,OAAO,aAAa,OAAO,cAAc,SAAS,WAAW;AAC/D,YAAQ,KAAK,EAAE,MAAM,OAAO,QAAQ,OAAO,WAAW,WAAW,KAAK,OAAO,OAAO,SAAS,CAAC;AAAA,EAChG;AACA,MAAI,OAAO,eAAe,OAAO,gBAAgB,SAAS,aAAa;AACrE,YAAQ,KAAK,EAAE,MAAM,SAAS,QAAQ,OAAO,aAAa,WAAW,IAAI,CAAC;AAAA,EAC5E;AACA,SAAO,QAAQ,SAAS,GAAI,SAAQ,MAAM;AAI1C,QAAM,gBAAgB,OAAO,kBAAkB,SAC3C,OAAO,gBAEL,OAAO,iBAAiB,YACxB,OAAO,eAAe,YACtB,OAAO,gBAAgB;AAAA,GAEtB,OAAO,cAAc,UAAa,OAAO,cAAc;AAG9D,QAAM,UAAwB;AAAA,IAC5B,GAAG;AAAA,IACH;AAAA,IACA,WAAW;AAAA,IACX;AAAA,IACA;AAAA,EACF;AAGA,MAAI,aAAa,qBAAqB;AACpC,QAAI;AACF,yBAAS,OAAO;AAAA,IAClB,SAAS,KAAK;AACZ,cAAQ,MAAM,+DAA+D,GAAG;AAAA,IAClF;AAAA,EACF;AAGA,WAAS,OAAO,IAAI;AACpB,qBAAmB,UAAU,QAAQ;AAErC,iBAAe,EAAE,MAAM,kBAAkB,SAAS,QAAQ,QAAQ,CAAC;AAEnE,SAAO;AACT;AAEO,SAAS,gBAAgB,SAAiB,WAAW,qBAA0C;AAEpG,MAAI,aAAa,qBAAqB;AACpC,QAAI;AACF,YAAM,SAAS,sBAAsB,OAAO;AAC5C,UAAI,OAAQ,QAAO;AAAA,IACrB,QAAQ;AAAA,IAER;AAAA,EACF;AACA,QAAM,WAAW,mBAAmB,QAAQ;AAC5C,SAAO,SAAS,OAAO,KAAK;AAC9B;AAEO,SAAS,kBAAkB,SAAiB,WAAW,qBAA2B;AACvF,QAAM,WAAW,mBAAmB,QAAQ;AAC5C,SAAO,SAAS,OAAO;AACvB,qBAAmB,UAAU,QAAQ;AAGrC,MAAI,aAAa,qBAAqB;AACpC,QAAI;AACF,yBAAS,OAAO;AAAA,IAClB,SAAS,KAAK;AACZ,cAAQ,MAAM,gEAAgE,GAAG;AAAA,IACnF;AAAA,EACF;AACF;AArLA,IAyCM;AAzCN;AAAA;AAAA;AAGA;AACA;AAqCA,IAAM,sBAAsBC,MAAK,QAAQ,GAAG,eAAe,oBAAoB;AAAA;AAAA;","names":["existsSync","mkdirSync","join"]}
@@ -5,7 +5,7 @@ import {
5
5
  import {
6
6
  init_projects,
7
7
  loadProjectsConfig
8
- } from "./chunk-ZMJFEHGF.js";
8
+ } from "./chunk-7ZB5D46Y.js";
9
9
  import {
10
10
  SOURCE_TRAEFIK_TEMPLATES,
11
11
  TRAEFIK_CERTS_DIR,
@@ -151,4 +151,4 @@ export {
151
151
  ensureProjectCerts,
152
152
  cleanupStaleTlsSections
153
153
  };
154
- //# sourceMappingURL=chunk-43F4LDZ4.js.map
154
+ //# sourceMappingURL=chunk-VVTAPQOI.js.map
@@ -1,7 +1,7 @@
1
1
  import {
2
2
  init_projects,
3
3
  loadProjectsConfig
4
- } from "./chunk-ZMJFEHGF.js";
4
+ } from "./chunk-7ZB5D46Y.js";
5
5
  import {
6
6
  init_esm_shims
7
7
  } from "./chunk-ZHC57RCV.js";
@@ -62,9 +62,34 @@ function resolveGitHubIssue(issueId) {
62
62
  function isGitHubIssue(issueId) {
63
63
  return resolveGitHubIssue(issueId).isGitHub;
64
64
  }
65
+ function resolveTrackerType(issueId) {
66
+ if (resolveGitHubIssue(issueId).isGitHub) {
67
+ return "github";
68
+ }
69
+ const prefix = extractIssuePrefix(issueId);
70
+ try {
71
+ const { projects } = loadProjectsConfig();
72
+ for (const [key, project] of Object.entries(projects)) {
73
+ if (project.linear_team?.toUpperCase() === prefix) {
74
+ return "linear";
75
+ }
76
+ if (!project.linear_team) {
77
+ const derivedPrefix = key.toUpperCase().replace(/-/g, "");
78
+ if (derivedPrefix === prefix) {
79
+ if (project.rally_project && !project.github_repo) {
80
+ return "rally";
81
+ }
82
+ }
83
+ }
84
+ }
85
+ } catch {
86
+ }
87
+ return "linear";
88
+ }
65
89
 
66
90
  export {
67
91
  resolveGitHubIssue,
68
- isGitHubIssue
92
+ isGitHubIssue,
93
+ resolveTrackerType
69
94
  };
70
- //# sourceMappingURL=chunk-YAAT66RT.js.map
95
+ //# sourceMappingURL=chunk-WP6ZLWU3.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"sources":["../src/lib/tracker-utils.ts"],"sourcesContent":["/**\n * Shared tracker utilities for resolving issue IDs to their tracker type\n * (GitHub or Linear) based on GITHUB_REPOS configuration.\n *\n * Eliminates hardcoded prefix checks like `issueId.startsWith('PAN-')`.\n */\n\nimport { readFileSync, existsSync } from 'fs';\nimport { join } from 'path';\nimport { homedir } from 'os';\nimport { loadProjectsConfig } from './projects.js';\n\nexport interface GitHubRepoConfig {\n owner: string;\n repo: string;\n prefix: string;\n}\n\nexport interface GitHubIssueResolution {\n isGitHub: true;\n owner: string;\n repo: string;\n prefix: string;\n number: number;\n}\n\nexport interface NonGitHubResolution {\n isGitHub: false;\n}\n\nexport type IssueResolution = GitHubIssueResolution | NonGitHubResolution;\n\n/**\n * Parse GitHub repos from GITHUB_REPOS env var and projects.yaml.\n * Priority: GITHUB_REPOS env var first, then auto-derive from projects.yaml.\n * Format for env var: \"owner/repo:PREFIX,owner2/repo2:PREFIX2\"\n */\nexport function parseGitHubRepos(): GitHubRepoConfig[] {\n const repos: GitHubRepoConfig[] = [];\n\n // 1. Check GITHUB_REPOS env var\n const envFile = join(homedir(), '.panopticon.env');\n if (existsSync(envFile)) {\n const content = readFileSync(envFile, 'utf-8');\n const reposMatch = content.match(/GITHUB_REPOS=(.+)/);\n if (reposMatch) {\n repos.push(...reposMatch[1].trim().split(',').map(r => {\n const [repoPath, prefix] = r.trim().split(':');\n const [owner, repo] = (repoPath || '').split('/');\n return { owner: owner || '', repo: repo || '', prefix: (prefix || '').toUpperCase() };\n }).filter(r => r.owner && r.repo && r.prefix));\n }\n }\n\n // 2. Auto-derive from projects.yaml (if no explicit GITHUB_REPOS)\n if (repos.length === 0) {\n try {\n const { projects } = loadProjectsConfig();\n for (const [key, project] of Object.entries(projects)) {\n if (project.github_repo) {\n const [owner, repo] = project.github_repo.split('/');\n // Derive prefix: linear_team if set, otherwise uppercase project key\n const prefix = project.linear_team || key.toUpperCase().replace(/-/g, '');\n if (owner && repo && prefix) {\n repos.push({ owner, repo, prefix: prefix.toUpperCase() });\n }\n }\n }\n } catch { /* ignore */ }\n }\n\n return repos;\n}\n\n/**\n * Extract the prefix from an issue ID (e.g., \"CLI\" from \"CLI-1\", \"PAN\" from \"PAN-42\").\n */\nexport function extractIssuePrefix(issueId: string): string {\n return issueId.split('-')[0].toUpperCase();\n}\n\n/**\n * Resolve an issue ID to its GitHub repo config, or determine it's not a GitHub issue.\n *\n * Checks the issue prefix against all prefixes configured in GITHUB_REPOS.\n * Returns the matching repo config with parsed issue number, or { isGitHub: false }.\n */\nexport function resolveGitHubIssue(issueId: string): IssueResolution {\n const prefix = extractIssuePrefix(issueId);\n const repos = parseGitHubRepos();\n\n for (const repoConfig of repos) {\n if (repoConfig.prefix === prefix) {\n const number = parseInt(issueId.split('-')[1], 10);\n if (!isNaN(number)) {\n return { isGitHub: true, ...repoConfig, number };\n }\n }\n }\n\n return { isGitHub: false };\n}\n\n/**\n * Check if an issue ID belongs to a GitHub-tracked project.\n */\nexport function isGitHubIssue(issueId: string): boolean {\n return resolveGitHubIssue(issueId).isGitHub;\n}\n\nexport type TrackerTypeResolution = 'github' | 'rally' | 'linear';\n\n/**\n * Resolve the tracker type for an issue ID by checking projects.yaml configuration.\n *\n * Resolution order:\n * 1. GitHub — prefix matches a configured github_repo project\n * 2. Rally — prefix matches a project with rally_project but no linear_team / github_repo\n * 3. Linear — fallback (matches linear_team or unknown prefix)\n */\nexport function resolveTrackerType(issueId: string): TrackerTypeResolution {\n // Check GitHub first (existing logic)\n if (resolveGitHubIssue(issueId).isGitHub) {\n return 'github';\n }\n\n // Check if the issue prefix matches a Rally-only project\n const prefix = extractIssuePrefix(issueId);\n try {\n const { projects } = loadProjectsConfig();\n for (const [key, project] of Object.entries(projects)) {\n // Match by linear_team first (even Rally projects may have linear_team for routing)\n if (project.linear_team?.toUpperCase() === prefix) {\n // Project has linear_team matching this prefix — it's a Linear project\n // (even if it also has rally_project for cross-tracking)\n return 'linear';\n }\n\n // For projects without linear_team, derive prefix from project key\n if (!project.linear_team) {\n const derivedPrefix = key.toUpperCase().replace(/-/g, '');\n if (derivedPrefix === prefix) {\n // Prefix matches — determine tracker by what's configured\n if (project.rally_project && !project.github_repo) {\n return 'rally';\n }\n // github_repo projects are already caught by resolveGitHubIssue above\n }\n }\n }\n } catch { /* ignore config errors */ }\n\n // Default to Linear for unknown prefixes\n return 'linear';\n}\n"],"mappings":";;;;;;;;;AAAA;AAUA;AAHA,SAAS,cAAc,kBAAkB;AACzC,SAAS,YAAY;AACrB,SAAS,eAAe;AA4BjB,SAAS,mBAAuC;AACrD,QAAM,QAA4B,CAAC;AAGnC,QAAM,UAAU,KAAK,QAAQ,GAAG,iBAAiB;AACjD,MAAI,WAAW,OAAO,GAAG;AACvB,UAAM,UAAU,aAAa,SAAS,OAAO;AAC7C,UAAM,aAAa,QAAQ,MAAM,mBAAmB;AACpD,QAAI,YAAY;AACd,YAAM,KAAK,GAAG,WAAW,CAAC,EAAE,KAAK,EAAE,MAAM,GAAG,EAAE,IAAI,OAAK;AACrD,cAAM,CAAC,UAAU,MAAM,IAAI,EAAE,KAAK,EAAE,MAAM,GAAG;AAC7C,cAAM,CAAC,OAAO,IAAI,KAAK,YAAY,IAAI,MAAM,GAAG;AAChD,eAAO,EAAE,OAAO,SAAS,IAAI,MAAM,QAAQ,IAAI,SAAS,UAAU,IAAI,YAAY,EAAE;AAAA,MACtF,CAAC,EAAE,OAAO,OAAK,EAAE,SAAS,EAAE,QAAQ,EAAE,MAAM,CAAC;AAAA,IAC/C;AAAA,EACF;AAGA,MAAI,MAAM,WAAW,GAAG;AACtB,QAAI;AACF,YAAM,EAAE,SAAS,IAAI,mBAAmB;AACxC,iBAAW,CAAC,KAAK,OAAO,KAAK,OAAO,QAAQ,QAAQ,GAAG;AACrD,YAAI,QAAQ,aAAa;AACvB,gBAAM,CAAC,OAAO,IAAI,IAAI,QAAQ,YAAY,MAAM,GAAG;AAEnD,gBAAM,SAAS,QAAQ,eAAe,IAAI,YAAY,EAAE,QAAQ,MAAM,EAAE;AACxE,cAAI,SAAS,QAAQ,QAAQ;AAC3B,kBAAM,KAAK,EAAE,OAAO,MAAM,QAAQ,OAAO,YAAY,EAAE,CAAC;AAAA,UAC1D;AAAA,QACF;AAAA,MACF;AAAA,IACF,QAAQ;AAAA,IAAe;AAAA,EACzB;AAEA,SAAO;AACT;AAKO,SAAS,mBAAmB,SAAyB;AAC1D,SAAO,QAAQ,MAAM,GAAG,EAAE,CAAC,EAAE,YAAY;AAC3C;AAQO,SAAS,mBAAmB,SAAkC;AACnE,QAAM,SAAS,mBAAmB,OAAO;AACzC,QAAM,QAAQ,iBAAiB;AAE/B,aAAW,cAAc,OAAO;AAC9B,QAAI,WAAW,WAAW,QAAQ;AAChC,YAAM,SAAS,SAAS,QAAQ,MAAM,GAAG,EAAE,CAAC,GAAG,EAAE;AACjD,UAAI,CAAC,MAAM,MAAM,GAAG;AAClB,eAAO,EAAE,UAAU,MAAM,GAAG,YAAY,OAAO;AAAA,MACjD;AAAA,IACF;AAAA,EACF;AAEA,SAAO,EAAE,UAAU,MAAM;AAC3B;AAKO,SAAS,cAAc,SAA0B;AACtD,SAAO,mBAAmB,OAAO,EAAE;AACrC;AAYO,SAAS,mBAAmB,SAAwC;AAEzE,MAAI,mBAAmB,OAAO,EAAE,UAAU;AACxC,WAAO;AAAA,EACT;AAGA,QAAM,SAAS,mBAAmB,OAAO;AACzC,MAAI;AACF,UAAM,EAAE,SAAS,IAAI,mBAAmB;AACxC,eAAW,CAAC,KAAK,OAAO,KAAK,OAAO,QAAQ,QAAQ,GAAG;AAErD,UAAI,QAAQ,aAAa,YAAY,MAAM,QAAQ;AAGjD,eAAO;AAAA,MACT;AAGA,UAAI,CAAC,QAAQ,aAAa;AACxB,cAAM,gBAAgB,IAAI,YAAY,EAAE,QAAQ,MAAM,EAAE;AACxD,YAAI,kBAAkB,QAAQ;AAE5B,cAAI,QAAQ,iBAAiB,CAAC,QAAQ,aAAa;AACjD,mBAAO;AAAA,UACT;AAAA,QAEF;AAAA,MACF;AAAA,IACF;AAAA,EACF,QAAQ;AAAA,EAA6B;AAGrC,SAAO;AACT;","names":[]}