panopticon-cli 0.5.3 → 0.5.6
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/{agents-DMPT32H7.js → agents-5HWTDR4S.js} +12 -9
- package/dist/archive-planning-U3AZAKWI.js +16 -0
- package/dist/{chunk-KBHRXV5T.js → chunk-43F4LDZ4.js} +3 -3
- package/dist/chunk-6OYUJ4AJ.js +146 -0
- package/dist/chunk-6OYUJ4AJ.js.map +1 -0
- package/dist/{chunk-MOPGR3CL.js → chunk-AAP4G6U7.js} +1 -1
- package/dist/chunk-AAP4G6U7.js.map +1 -0
- package/dist/{chunk-4HST45MO.js → chunk-BYWVPPAZ.js} +19 -12
- package/dist/chunk-BYWVPPAZ.js.map +1 -0
- package/dist/{chunk-CFCUOV3Q.js → chunk-DMRTN432.js} +4 -1
- package/dist/chunk-DMRTN432.js.map +1 -0
- package/dist/{chunk-HOGYHJ2G.js → chunk-DW3PKGIS.js} +2 -2
- package/dist/{chunk-D67AQTHF.js → chunk-FUUP55PE.js} +108 -46
- package/dist/chunk-FUUP55PE.js.map +1 -0
- package/dist/chunk-GUV2EPBG.js +692 -0
- package/dist/chunk-GUV2EPBG.js.map +1 -0
- package/dist/{chunk-44EOY2ZL.js → chunk-HHL3AWXA.js} +46 -2
- package/dist/chunk-HHL3AWXA.js.map +1 -0
- package/dist/{chunk-6N2KBSJA.js → chunk-IZIXJYXZ.js} +40 -6
- package/dist/chunk-IZIXJYXZ.js.map +1 -0
- package/dist/chunk-MJXYTGK5.js +64 -0
- package/dist/chunk-MJXYTGK5.js.map +1 -0
- package/dist/chunk-OJF4QS3S.js +269 -0
- package/dist/chunk-OJF4QS3S.js.map +1 -0
- package/dist/{chunk-FQ66DECN.js → chunk-QAJAJBFW.js} +1 -1
- package/dist/chunk-QAJAJBFW.js.map +1 -0
- package/dist/chunk-R4KPLLRB.js +36 -0
- package/dist/chunk-R4KPLLRB.js.map +1 -0
- package/dist/{chunk-DFNVHK3N.js → chunk-SUM2WVPF.js} +4 -4
- package/dist/{chunk-T7BBPDEJ.js → chunk-UKSGE6RH.js} +45 -15
- package/dist/chunk-UKSGE6RH.js.map +1 -0
- package/dist/chunk-W2OTF6OS.js +201 -0
- package/dist/chunk-W2OTF6OS.js.map +1 -0
- package/dist/chunk-WEQW3EAT.js +78 -0
- package/dist/chunk-WEQW3EAT.js.map +1 -0
- package/dist/{chunk-2V2DQ3IX.js → chunk-WJJ3ZIQ6.js} +112 -45
- package/dist/chunk-WJJ3ZIQ6.js.map +1 -0
- package/dist/chunk-YAAT66RT.js +70 -0
- package/dist/chunk-YAAT66RT.js.map +1 -0
- package/dist/{chunk-RLZQB7HS.js → chunk-ZMJFEHGF.js} +13 -1
- package/dist/chunk-ZMJFEHGF.js.map +1 -0
- package/dist/{chunk-HRU7S4TA.js → chunk-ZN5RHWGR.js} +18 -208
- package/dist/{chunk-HRU7S4TA.js.map → chunk-ZN5RHWGR.js.map} +1 -1
- package/dist/{chunk-ZTYHZMEC.js → chunk-ZWZNEA26.js} +2 -2
- package/dist/clean-planning-7Z5YY64X.js +9 -0
- package/dist/cli/index.js +1314 -2146
- package/dist/cli/index.js.map +1 -1
- package/dist/close-issue-CTZK777I.js +9 -0
- package/dist/compact-beads-72SHALOL.js +9 -0
- package/dist/{config-4CJNUE3O.js → config-FFTMBVHM.js} +2 -2
- package/dist/dashboard/public/assets/{index-BJKEp64j.css → index-Bx4NCn9A.css} +1 -1
- package/dist/dashboard/public/assets/index-Db9NOz4z.js +756 -0
- package/dist/dashboard/public/index.html +3 -2
- package/dist/dashboard/server.js +34785 -34330
- package/dist/{feedback-writer-T43PI5S2.js → feedback-writer-T2WCT6EZ.js} +2 -2
- package/dist/{hume-CKJJ3OUU.js → hume-GVTB5BKW.js} +3 -3
- package/dist/index.d.ts +24 -16
- package/dist/index.js +4 -4
- package/dist/label-cleanup-4HJVX6NP.js +103 -0
- package/dist/label-cleanup-4HJVX6NP.js.map +1 -0
- package/dist/merge-agent-WM7ZKUET.js +1725 -0
- package/dist/merge-agent-WM7ZKUET.js.map +1 -0
- package/dist/{projects-KVM3MN3Y.js → projects-3CRF57ZU.js} +2 -2
- package/dist/{rally-RKFSWC7E.js → rally-LBY24P4C.js} +2 -2
- package/dist/{remote-agents-ULPD6C5U.js → remote-agents-3NZPSHYG.js} +2 -3
- package/dist/{remote-workspace-XX6ARE6I.js → remote-workspace-M4IULGFZ.js} +24 -49
- package/dist/remote-workspace-M4IULGFZ.js.map +1 -0
- package/dist/{review-status-XKUKZF6J.js → review-status-J2YJGL3E.js} +2 -2
- package/dist/{specialist-context-53AWO6AE.js → specialist-context-74RQF5SR.js} +7 -5
- package/dist/{specialist-context-53AWO6AE.js.map → specialist-context-74RQF5SR.js.map} +1 -1
- package/dist/{specialist-logs-QREUJ4HN.js → specialist-logs-T5GW7CSU.js} +6 -4
- package/dist/{specialists-2DBBXRCK.js → specialists-HTYYFXHQ.js} +6 -4
- package/dist/specialists-HTYYFXHQ.js.map +1 -0
- package/dist/tmux-X2I5SAIJ.js +31 -0
- package/dist/tmux-X2I5SAIJ.js.map +1 -0
- package/dist/{traefik-5GL3Q7DJ.js → traefik-QXLZ4PO2.js} +4 -4
- package/dist/traefik-QXLZ4PO2.js.map +1 -0
- package/dist/{tunnel-BKC7KLBX.js → tunnel-7IOSRZVH.js} +3 -3
- package/dist/tunnel-7IOSRZVH.js.map +1 -0
- package/dist/{workspace-manager-ALBR62AS.js → workspace-manager-G6TTBPC3.js} +6 -6
- package/dist/workspace-manager-G6TTBPC3.js.map +1 -0
- package/package.json +2 -2
- package/scripts/build-cost-script.mjs +17 -0
- package/scripts/heartbeat-hook +28 -8
- package/scripts/record-cost-event.js +46 -7
- package/scripts/record-cost-event.ts +2 -1
- package/scripts/recover-costs-deep.mjs +209 -0
- package/scripts/recover-costs-proportional.mjs +206 -0
- package/scripts/recover-costs.mjs +169 -0
- package/dist/chunk-2V2DQ3IX.js.map +0 -1
- package/dist/chunk-44EOY2ZL.js.map +0 -1
- package/dist/chunk-4HST45MO.js.map +0 -1
- package/dist/chunk-565HZ6VV.js +0 -159
- package/dist/chunk-565HZ6VV.js.map +0 -1
- package/dist/chunk-6N2KBSJA.js.map +0 -1
- package/dist/chunk-CFCUOV3Q.js.map +0 -1
- package/dist/chunk-D67AQTHF.js.map +0 -1
- package/dist/chunk-FQ66DECN.js.map +0 -1
- package/dist/chunk-MOPGR3CL.js.map +0 -1
- package/dist/chunk-RLZQB7HS.js.map +0 -1
- package/dist/chunk-T7BBPDEJ.js.map +0 -1
- package/dist/chunk-ZDNQFWR5.js +0 -650
- package/dist/chunk-ZDNQFWR5.js.map +0 -1
- package/dist/dashboard/public/assets/index-CgJjqjAV.js +0 -767
- package/dist/remote-workspace-XX6ARE6I.js.map +0 -1
- /package/dist/{agents-DMPT32H7.js.map → agents-5HWTDR4S.js.map} +0 -0
- /package/dist/{config-4CJNUE3O.js.map → archive-planning-U3AZAKWI.js.map} +0 -0
- /package/dist/{chunk-KBHRXV5T.js.map → chunk-43F4LDZ4.js.map} +0 -0
- /package/dist/{chunk-HOGYHJ2G.js.map → chunk-DW3PKGIS.js.map} +0 -0
- /package/dist/{chunk-DFNVHK3N.js.map → chunk-SUM2WVPF.js.map} +0 -0
- /package/dist/{chunk-ZTYHZMEC.js.map → chunk-ZWZNEA26.js.map} +0 -0
- /package/dist/{hume-CKJJ3OUU.js.map → clean-planning-7Z5YY64X.js.map} +0 -0
- /package/dist/{projects-KVM3MN3Y.js.map → close-issue-CTZK777I.js.map} +0 -0
- /package/dist/{rally-RKFSWC7E.js.map → compact-beads-72SHALOL.js.map} +0 -0
- /package/dist/{remote-agents-ULPD6C5U.js.map → config-FFTMBVHM.js.map} +0 -0
- /package/dist/{feedback-writer-T43PI5S2.js.map → feedback-writer-T2WCT6EZ.js.map} +0 -0
- /package/dist/{review-status-XKUKZF6J.js.map → hume-GVTB5BKW.js.map} +0 -0
- /package/dist/{specialist-logs-QREUJ4HN.js.map → projects-3CRF57ZU.js.map} +0 -0
- /package/dist/{specialists-2DBBXRCK.js.map → rally-LBY24P4C.js.map} +0 -0
- /package/dist/{traefik-5GL3Q7DJ.js.map → remote-agents-3NZPSHYG.js.map} +0 -0
- /package/dist/{tunnel-BKC7KLBX.js.map → review-status-J2YJGL3E.js.map} +0 -0
- /package/dist/{workspace-manager-ALBR62AS.js.map → specialist-logs-T5GW7CSU.js.map} +0 -0
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../src/lib/remote/fly-api.ts","../src/lib/remote/fly-provider.ts","../src/lib/remote/remote-agents.ts"],"sourcesContent":["/**\n * Fly Machines REST API Client\n *\n * Wraps the Fly Machines API (flaps) for machine lifecycle management.\n * Base URL: https://api.machines.dev/v1\n * Auth: FLY_API_TOKEN environment variable\n */\n\nexport interface FlyMachineConfig {\n image: string;\n env?: Record<string, string>;\n size?: string; // e.g. \"shared-cpu-2x\"\n memory?: number; // MB\n region?: string; // e.g. \"iad\"\n auto_destroy?: boolean;\n restart?: { policy: 'no' | 'always' | 'on-failure' };\n metadata?: Record<string, string>;\n}\n\nexport interface FlyMachine {\n id: string;\n name: string;\n state: string; // 'started', 'stopped', 'created', 'destroying', etc.\n region: string;\n image_ref?: { registry: string; repository: string; tag: string };\n instance_id?: string;\n private_ip?: string;\n created_at?: string;\n config?: {\n image: string;\n env?: Record<string, string>;\n guest?: { cpu_kind: string; cpus: number; memory_mb: number };\n };\n}\n\nexport interface FlyExecResult {\n stdout: string;\n stderr: string;\n exit_code: number;\n}\n\nexport class FlyApiError extends Error {\n constructor(\n message: string,\n public readonly statusCode: number,\n public readonly body: string\n ) {\n super(message);\n this.name = 'FlyApiError';\n }\n}\n\nconst BASE_URL = 'https://api.machines.dev/v1';\n\nexport class FlyApiClient {\n private readonly token: string;\n\n constructor(token: string) {\n this.token = token;\n }\n\n private async request<T>(\n method: string,\n path: string,\n body?: unknown\n ): Promise<T> {\n const url = `${BASE_URL}${path}`;\n const headers: Record<string, string> = {\n Authorization: `Bearer ${this.token}`,\n 'Content-Type': 'application/json',\n };\n\n const response = await fetch(url, {\n method,\n headers,\n body: body !== undefined ? JSON.stringify(body) : undefined,\n });\n\n const text = await response.text();\n\n if (!response.ok) {\n throw new FlyApiError(\n `Fly API error ${response.status} for ${method} ${path}: ${text}`,\n response.status,\n text\n );\n }\n\n if (!text) return undefined as T;\n\n try {\n return JSON.parse(text) as T;\n } catch {\n return text as unknown as T;\n }\n }\n\n /** Create a machine in an app */\n async createMachine(\n appName: string,\n name: string,\n config: FlyMachineConfig\n ): Promise<FlyMachine> {\n return this.request<FlyMachine>('POST', `/apps/${appName}/machines`, {\n name,\n config: {\n image: config.image,\n env: config.env,\n guest: config.size\n ? { cpu_kind: 'shared', cpus: 2, memory_mb: config.memory ?? 1024 }\n : undefined,\n restart: config.restart ?? { policy: 'no' },\n auto_destroy: config.auto_destroy,\n metadata: config.metadata,\n },\n region: config.region,\n });\n }\n\n /** Destroy a machine (force=true for immediate) */\n async destroyMachine(appName: string, machineId: string): Promise<void> {\n await this.request<void>(\n 'DELETE',\n `/apps/${appName}/machines/${machineId}?force=true`\n );\n }\n\n /** Start a stopped machine */\n async startMachine(appName: string, machineId: string): Promise<void> {\n await this.request<void>(\n 'POST',\n `/apps/${appName}/machines/${machineId}/start`\n );\n }\n\n /** Stop a running machine */\n async stopMachine(\n appName: string,\n machineId: string,\n signal?: string,\n timeout?: number\n ): Promise<void> {\n await this.request<void>(\n 'POST',\n `/apps/${appName}/machines/${machineId}/stop`,\n signal || timeout ? { signal, timeout } : undefined\n );\n }\n\n /** Get a machine by ID */\n async getMachine(appName: string, machineId: string): Promise<FlyMachine> {\n return this.request<FlyMachine>(\n 'GET',\n `/apps/${appName}/machines/${machineId}`\n );\n }\n\n /** List all machines in an app */\n async listMachines(appName: string): Promise<FlyMachine[]> {\n const result = await this.request<FlyMachine[] | null>(\n 'GET',\n `/apps/${appName}/machines`\n );\n return result ?? [];\n }\n\n /** Execute a command inside a running machine */\n async execCommand(\n appName: string,\n machineId: string,\n command: string[],\n timeout: number = 30\n ): Promise<FlyExecResult> {\n return this.request<FlyExecResult>(\n 'POST',\n `/apps/${appName}/machines/${machineId}/exec`,\n { command, timeout }\n );\n }\n\n /** Wait for a machine to reach a target state */\n async waitForState(\n appName: string,\n machineId: string,\n state: string,\n timeout: number = 60\n ): Promise<void> {\n await this.request<void>(\n 'GET',\n `/apps/${appName}/machines/${machineId}/wait?state=${state}&timeout=${timeout}`\n );\n }\n\n /** Create a Fly app if it doesn't exist */\n async ensureApp(appName: string, orgSlug: string): Promise<void> {\n try {\n await this.request<unknown>('GET', `/apps/${appName}`);\n } catch (err) {\n if (err instanceof FlyApiError && err.statusCode === 404) {\n await this.request<unknown>('POST', '/apps', {\n app_name: appName,\n org_slug: orgSlug,\n network: 'default',\n });\n } else {\n throw err;\n }\n }\n }\n}\n\n/** Create a FlyApiClient from env or explicit token */\nexport function createFlyApiClient(token?: string): FlyApiClient {\n const tok = token ?? process.env.FLY_API_TOKEN;\n if (!tok) {\n throw new Error(\n 'Fly API token not found. Set FLY_API_TOKEN environment variable or run: fly auth login'\n );\n }\n return new FlyApiClient(tok);\n}\n","/**\n * Fly.io Remote Provider\n *\n * Implements the RemoteProvider interface using Fly Machines API and Fly CLI.\n * VM lifecycle is managed via REST API; exec/SSH via Fly CLI.\n */\n\nimport { exec, spawn } from 'child_process';\nimport { promisify } from 'util';\nimport { existsSync, readFileSync, readdirSync } from 'fs';\nimport { join } from 'path';\nimport { homedir } from 'os';\nimport { parse } from 'yaml';\nimport { FlyApiClient, createFlyApiClient, FlyApiError } from './fly-api.js';\nimport type { RemoteProvider, VmInfo, VmStatus, ExecResult } from './interface.js';\n\nconst execAsync = promisify(exec);\n\nexport interface FlyProviderConfig {\n /** Fly.io app name for workspace machines (default: pan-workspaces) */\n app?: string;\n /** Fly.io org slug */\n org?: string;\n /** Default region (default: iad) */\n region?: string;\n /** Machine size (default: shared-cpu-2x) */\n vmSize?: string;\n /** Memory in MB (default: 1024) */\n vmMemory?: number;\n /** Docker image for workspace machines */\n image?: string;\n /** API token (falls back to FLY_API_TOKEN env var) */\n apiToken?: string;\n}\n\nfunction mapFlyStateToVmStatus(state: string): VmStatus {\n switch (state) {\n case 'started':\n return 'running';\n case 'stopped':\n case 'suspended':\n return 'stopped';\n case 'created':\n case 'replacing':\n return 'creating';\n case 'destroying':\n case 'destroyed':\n return 'deleting';\n default:\n return 'unknown';\n }\n}\n\nexport class FlyProvider implements RemoteProvider {\n readonly name = 'fly';\n\n private readonly config: Required<FlyProviderConfig>;\n private api: FlyApiClient | null = null;\n\n constructor(config: FlyProviderConfig = {}) {\n this.config = {\n app: config.app ?? 'pan-workspaces',\n org: config.org ?? 'personal',\n region: config.region ?? 'iad',\n vmSize: config.vmSize ?? 'shared-cpu-2x',\n vmMemory: config.vmMemory ?? 1024,\n image: config.image ?? 'registry.fly.io/pan-workspace:latest',\n apiToken: config.apiToken ?? process.env.FLY_API_TOKEN ?? '',\n };\n }\n\n private getApi(): FlyApiClient {\n if (!this.api) {\n this.api = createFlyApiClient(this.config.apiToken || undefined);\n }\n return this.api;\n }\n\n async isAuthenticated(): Promise<boolean> {\n // Check API token first\n if (this.config.apiToken || process.env.FLY_API_TOKEN) {\n try {\n await this.getApi().listMachines(this.config.app);\n return true;\n } catch {\n // Fall through to CLI check\n }\n }\n\n // Check fly CLI auth\n try {\n const result = await execAsync('fly auth whoami', { timeout: 10000 });\n return !result.stdout.includes('not logged in');\n } catch {\n return false;\n }\n }\n\n /**\n * Resolve vmName to {appName, machineId} by scanning workspace metadata.\n * Falls back to listing machines in the app.\n */\n async resolveVm(vmName: string): Promise<{ appName: string; machineId: string }> {\n const workspacesDir = join(homedir(), '.panopticon', 'workspaces');\n if (existsSync(workspacesDir)) {\n for (const file of readdirSync(workspacesDir)) {\n if (!file.endsWith('.yaml')) continue;\n try {\n const content = readFileSync(join(workspacesDir, file), 'utf-8');\n const metadata = parse(content) as { vmName?: string; machineId?: string; appName?: string };\n if (metadata.vmName === vmName && metadata.machineId && metadata.appName) {\n return { appName: metadata.appName, machineId: metadata.machineId };\n }\n } catch {\n // Skip invalid files\n }\n }\n }\n\n // Fallback: search by machine name via API\n const machines = await this.getApi().listMachines(this.config.app);\n const machine = machines.find(m => m.name === vmName);\n if (!machine) {\n throw new Error(`No Fly machine found for VM name: ${vmName}`);\n }\n return { appName: this.config.app, machineId: machine.id };\n }\n\n async createVm(name: string): Promise<VmInfo> {\n const api = this.getApi();\n\n // Ensure app exists\n await api.ensureApp(this.config.app, this.config.org);\n\n // Create machine\n const machine = await api.createMachine(this.config.app, name, {\n image: this.config.image,\n size: this.config.vmSize,\n memory: this.config.vmMemory,\n region: this.config.region,\n restart: { policy: 'no' },\n auto_destroy: false,\n });\n\n // Wait for machine to start\n try {\n await api.waitForState(this.config.app, machine.id, 'started', 120);\n } catch {\n // Non-fatal: machine may still be starting\n }\n\n return {\n name,\n status: mapFlyStateToVmStatus(machine.state),\n machineId: machine.id,\n ipAddress: machine.private_ip,\n created: machine.created_at ? new Date(machine.created_at) : undefined,\n };\n }\n\n async deleteVm(name: string): Promise<void> {\n const { appName, machineId } = await this.resolveVm(name);\n await this.getApi().destroyMachine(appName, machineId);\n }\n\n async listVms(): Promise<VmInfo[]> {\n const machines = await this.getApi().listMachines(this.config.app);\n return machines.map(m => ({\n name: m.name,\n status: mapFlyStateToVmStatus(m.state),\n machineId: m.id,\n ipAddress: m.private_ip,\n created: m.created_at ? new Date(m.created_at) : undefined,\n }));\n }\n\n async getStatus(name: string): Promise<VmStatus> {\n try {\n const { appName, machineId } = await this.resolveVm(name);\n const machine = await this.getApi().getMachine(appName, machineId);\n return mapFlyStateToVmStatus(machine.state);\n } catch {\n return 'unknown';\n }\n }\n\n async getVmInfo(name: string): Promise<VmInfo | null> {\n try {\n const { appName, machineId } = await this.resolveVm(name);\n const machine = await this.getApi().getMachine(appName, machineId);\n return {\n name,\n status: mapFlyStateToVmStatus(machine.state),\n machineId: machine.id,\n ipAddress: machine.private_ip,\n created: machine.created_at ? new Date(machine.created_at) : undefined,\n memoryTotal: machine.config?.guest?.memory_mb,\n };\n } catch {\n return null;\n }\n }\n\n async startVm(name: string): Promise<void> {\n const { appName, machineId } = await this.resolveVm(name);\n await this.getApi().startMachine(appName, machineId);\n await this.getApi().waitForState(appName, machineId, 'started', 60);\n }\n\n async stopVm(name: string): Promise<void> {\n const { appName, machineId } = await this.resolveVm(name);\n await this.getApi().stopMachine(appName, machineId);\n }\n\n /** Execute a command on the VM via Fly Machines exec API */\n async ssh(vm: string, command: string): Promise<ExecResult> {\n const { appName, machineId } = await this.resolveVm(vm);\n try {\n const result = await this.getApi().execCommand(\n appName,\n machineId,\n ['/bin/sh', '-c', command],\n 60\n );\n return {\n stdout: result.stdout ?? '',\n stderr: result.stderr ?? '',\n exitCode: result.exit_code ?? 0,\n };\n } catch (err) {\n if (err instanceof FlyApiError) {\n return { stdout: '', stderr: err.message, exitCode: 1 };\n }\n throw err;\n }\n }\n\n /** Stream command output via fly SSH console */\n async *sshStream(vm: string, command: string): AsyncIterable<string> {\n const { appName } = await this.resolveVm(vm);\n const child = spawn('fly', ['ssh', 'console', '-a', appName, '-C', command], {\n env: { ...process.env },\n });\n\n for await (const chunk of child.stdout) {\n yield chunk.toString();\n }\n for await (const chunk of child.stderr) {\n yield chunk.toString();\n }\n\n await new Promise<void>((resolve, reject) => {\n child.on('close', resolve);\n child.on('error', reject);\n });\n }\n\n /** Copy a local file to VM using base64 encoding */\n async copyToVm(vm: string, localPath: string, remotePath: string): Promise<void> {\n const content = readFileSync(localPath);\n const b64 = content.toString('base64');\n const dirPath = remotePath.substring(0, remotePath.lastIndexOf('/'));\n if (dirPath) {\n await this.ssh(vm, `mkdir -p ${JSON.stringify(dirPath)}`);\n }\n await this.ssh(vm, `echo '${b64}' | base64 -d > ${JSON.stringify(remotePath)}`);\n }\n\n /** Copy a file from VM to local path */\n async copyFromVm(vm: string, remotePath: string, localPath: string): Promise<void> {\n const { appName } = await this.resolveVm(vm);\n await execAsync(\n `fly ssh sftp get -a ${JSON.stringify(appName)} ${JSON.stringify(remotePath)} ${JSON.stringify(localPath)}`,\n { timeout: 60000 }\n );\n }\n\n /** Expose a port — not supported by Fly.io provider */\n async exposePort(_vm: string, _port: number): Promise<string> {\n throw new Error(\n 'exposePort is not supported by the Fly.io provider. ' +\n 'Configure services in fly.toml or via the Fly Machines API config.'\n );\n }\n\n /** Create a fly proxy tunnel to the machine */\n async tunnel(\n vm: string,\n remotePort: number,\n localPort: number\n ): Promise<{ close: () => void }> {\n const { appName } = await this.resolveVm(vm);\n const child = spawn('fly', ['proxy', `${localPort}:${remotePort}`, '-a', appName], {\n env: { ...process.env },\n });\n\n return {\n close: () => {\n child.kill();\n },\n };\n }\n\n // ============================================================================\n // Credential Sync & Configuration (ported from ExeProvider)\n // ============================================================================\n\n /** Sync Claude Code credentials from local macOS Keychain to remote VM */\n async syncClaudeCredentials(vmName: string): Promise<boolean> {\n try {\n const { stdout: credentials } = await execAsync(\n 'security find-generic-password -s \"Claude Code-credentials\" -w 2>/dev/null',\n { encoding: 'utf-8', timeout: 10000 }\n );\n if (!credentials?.trim()) return false;\n\n const b64 = Buffer.from(credentials.trim()).toString('base64');\n await this.ssh(vmName, `mkdir -p ~/.claude && echo '${b64}' | base64 -d > ~/.claude/.credentials.json`);\n return true;\n } catch {\n return false;\n }\n }\n\n /** Sync GitHub CLI authentication to the remote VM */\n async syncGitHubAuth(vmName: string): Promise<boolean> {\n const ghConfigPath = join(homedir(), '.config', 'gh', 'hosts.yml');\n if (!existsSync(ghConfigPath)) return false;\n\n try {\n const content = readFileSync(ghConfigPath, 'utf-8');\n const b64 = Buffer.from(content).toString('base64');\n await this.ssh(vmName, `mkdir -p ~/.config/gh && echo '${b64}' | base64 -d > ~/.config/gh/hosts.yml`);\n return true;\n } catch {\n return false;\n }\n }\n\n /** Sync GitLab CLI (glab) authentication to the remote VM */\n async syncGitLabAuth(vmName: string): Promise<boolean> {\n const glabConfigPath = join(homedir(), '.config', 'glab-cli', 'config.yml');\n if (!existsSync(glabConfigPath)) return false;\n\n try {\n const content = readFileSync(glabConfigPath, 'utf-8');\n const b64 = Buffer.from(content).toString('base64');\n await this.ssh(vmName, `mkdir -p ~/.config/glab-cli && echo '${b64}' | base64 -d > ~/.config/glab-cli/config.yml`);\n return true;\n } catch {\n return false;\n }\n }\n\n /** Sync all credentials needed for remote workspace operation */\n async syncAllCredentials(vmName: string): Promise<{ claude: boolean; github: boolean }> {\n const [claude, github] = await Promise.all([\n this.syncClaudeCredentials(vmName),\n this.syncGitHubAuth(vmName),\n ]);\n return { claude, github };\n }\n\n /** Install beads CLI (bd) on a remote VM */\n async installBeads(vmName: string): Promise<boolean> {\n // Check if already installed\n const check = await this.ssh(vmName, 'which bd 2>/dev/null');\n if (check.exitCode === 0 && check.stdout.trim()) return true;\n\n // Install via npm\n const result = await this.ssh(vmName, 'npm install -g @beads-dev/beads 2>&1');\n if (result.exitCode !== 0) {\n // Try alternative install\n const alt = await this.ssh(\n vmName,\n 'curl -fsSL https://raw.githubusercontent.com/beads-dev/beads/main/install.sh | bash 2>&1'\n );\n return alt.exitCode === 0;\n }\n return true;\n }\n\n /** Initialize beads in a workspace on a remote VM */\n async initBeads(vmName: string, workspacePath: string = '/workspace'): Promise<boolean> {\n const result = await this.ssh(\n vmName,\n `cd ${workspacePath} && bd init --prefix PAN 2>&1 || bd init 2>&1`\n );\n return result.exitCode === 0;\n }\n\n /** Configure Claude Code on a VM for autonomous operation */\n async configureClaudeCode(vmName: string): Promise<void> {\n await this.ssh(vmName, 'mkdir -p ~/.claude');\n\n // Set onboarding complete\n const onboardingScript = `\nimport json, os\npath = os.path.expanduser(\"~/.claude.json\")\ndata = {}\nif os.path.exists(path):\n with open(path) as f:\n data = json.load(f)\ndata[\"hasCompletedOnboarding\"] = True\ndata[\"lastOnboardingVersion\"] = \"2.0.50\"\nwith open(path, \"w\") as f:\n json.dump(data, f, indent=2)\n`;\n const scriptB64 = Buffer.from(onboardingScript).toString('base64');\n await this.ssh(vmName, `echo '${scriptB64}' | base64 -d | python3`);\n\n // Write settings.json with bypass permissions\n const settings = JSON.stringify({\n theme: 'dark',\n permissions: { defaultMode: 'bypassPermissions' },\n });\n const settingsB64 = Buffer.from(settings).toString('base64');\n await this.ssh(vmName, `echo '${settingsB64}' | base64 -d > ~/.claude/settings.json`);\n }\n\n /** Copy essential skills from local ~/.panopticon/skills/ to remote VM */\n async copySkillsToVm(vmName: string): Promise<void> {\n const skillsDir = join(homedir(), '.panopticon', 'skills');\n if (!existsSync(skillsDir)) return;\n\n await this.ssh(vmName, 'mkdir -p ~/.claude/skills');\n\n try {\n const entries = readdirSync(skillsDir, { withFileTypes: true });\n for (const entry of entries) {\n if (!entry.isFile() || !entry.name.endsWith('.md')) continue;\n const localPath = join(skillsDir, entry.name);\n const content = readFileSync(localPath, 'utf-8');\n const b64 = Buffer.from(content).toString('base64');\n await this.ssh(vmName, `echo '${b64}' | base64 -d > ~/.claude/skills/${entry.name}`);\n }\n } catch {\n // Non-fatal: skills are optional\n }\n }\n\n /** Sync beads from remote VM to git: exports JSONL, commits, and pushes */\n async syncBeadsToGit(\n vmName: string,\n workspacePath: string = '/workspace',\n commitMessage?: string\n ): Promise<boolean> {\n const msg = commitMessage ?? 'chore: sync beads from remote';\n\n // Export beads to JSONL\n const exportResult = await this.ssh(\n vmName,\n `cd ${workspacePath} && bd export --output .beads/issues.jsonl 2>&1`\n );\n if (exportResult.exitCode !== 0) {\n return false;\n }\n\n // Commit and push\n const gitResult = await this.ssh(\n vmName,\n `cd ${workspacePath} && git add .beads/ && git diff --cached --quiet || (git commit -m ${JSON.stringify(msg)} && git push origin HEAD) 2>&1`\n );\n return gitResult.exitCode === 0;\n }\n\n /** Query beads on a remote VM via bd search */\n async queryBeads(\n vmName: string,\n searchTerm: string,\n workspacePath: string = '/workspace'\n ): Promise<unknown[]> {\n const result = await this.ssh(\n vmName,\n `cd ${workspacePath} && bd search ${JSON.stringify(searchTerm)} --json 2>/dev/null || echo '[]'`\n );\n try {\n return JSON.parse(result.stdout.trim() || '[]');\n } catch {\n return [];\n }\n }\n\n /** Get the configured app name */\n getAppName(): string {\n return this.config.app;\n }\n}\n\nexport function createFlyProvider(config?: FlyProviderConfig): FlyProvider {\n return new FlyProvider(config);\n}\n","/**\n * Remote Agent Management\n *\n * Spawn and manage Claude agents on remote Fly.io machines.\n * Agents run in tmux sessions for persistence and monitoring.\n */\n\nimport { createFlyProvider } from './fly-provider.js';\nimport type { RemoteProvider, RemoteWorkspaceMetadata } from './interface.js';\nimport { join } from 'path';\nimport { existsSync, readFileSync, writeFileSync, mkdirSync } from 'fs';\nimport { homedir } from 'os';\n\nconst AGENTS_DIR = join(homedir(), '.panopticon', 'agents');\n\nexport interface RemoteAgentState {\n id: string;\n issueId: string;\n vmName: string;\n model: string;\n status: 'starting' | 'running' | 'stopped' | 'error';\n startedAt: string;\n lastActivity?: string;\n location: 'remote';\n}\n\n/**\n * Get agent state file path\n */\nfunction getRemoteAgentStateFile(agentId: string): string {\n return join(AGENTS_DIR, agentId, 'remote-state.json');\n}\n\n/**\n * Save remote agent state\n */\nfunction saveRemoteAgentState(state: RemoteAgentState): void {\n const dir = join(AGENTS_DIR, state.id);\n if (!existsSync(dir)) {\n mkdirSync(dir, { recursive: true });\n }\n writeFileSync(getRemoteAgentStateFile(state.id), JSON.stringify(state, null, 2));\n}\n\n/**\n * Load remote agent state\n */\nexport function loadRemoteAgentState(agentId: string): RemoteAgentState | null {\n const file = getRemoteAgentStateFile(agentId);\n if (!existsSync(file)) return null;\n\n try {\n return JSON.parse(readFileSync(file, 'utf-8'));\n } catch {\n return null;\n }\n}\n\n/**\n * Check if remote agent session exists\n */\nasync function remoteSessionExists(\n provider: RemoteProvider,\n vmName: string,\n sessionName: string\n): Promise<boolean> {\n const result = await provider.ssh(vmName, `tmux has-session -t ${sessionName} 2>/dev/null && echo exists || echo not-found`);\n return result.stdout.trim() === 'exists';\n}\n\nexport interface SpawnRemoteAgentOptions {\n issueId: string;\n workspace: RemoteWorkspaceMetadata;\n model?: string;\n prompt?: string;\n phase?: string;\n}\n\n/**\n * Spawn a Claude agent on a remote VM\n */\nexport async function spawnRemoteAgent(options: SpawnRemoteAgentOptions): Promise<RemoteAgentState> {\n const { issueId, workspace, model = 'claude-sonnet-4-6', prompt } = options;\n\n const agentId = `agent-${issueId.toLowerCase()}`;\n const vmName = workspace.vmName;\n\n const fly = createFlyProvider();\n\n // Check if VM is running\n const vmStatus = await fly.getStatus(vmName);\n if (vmStatus !== 'running') {\n throw new Error(`VM ${vmName} is not running. Start it with: pan workspace start ${issueId}`);\n }\n\n // Check if agent already exists\n if (await remoteSessionExists(fly, vmName, agentId)) {\n throw new Error(`Agent ${agentId} already running on ${vmName}. Use 'pan work tell' to message it.`);\n }\n\n // Create agent state\n const state: RemoteAgentState = {\n id: agentId,\n issueId,\n vmName,\n model,\n status: 'starting',\n startedAt: new Date().toISOString(),\n location: 'remote',\n };\n\n saveRemoteAgentState(state);\n\n // Write prompt to file on remote VM if provided\n let claudeCmd: string;\n\n if (prompt) {\n // Write prompt to file on VM using base64 to avoid escaping issues\n const promptFile = `/workspace/.panopticon/prompts/${agentId}.md`;\n await fly.ssh(vmName, `mkdir -p /workspace/.panopticon/prompts`);\n const promptBase64 = Buffer.from(prompt).toString('base64');\n await fly.ssh(vmName, `echo '${promptBase64}' | base64 -d > ${promptFile}`);\n\n // Create launcher script using base64 to avoid shell interpretation\n const launcherScript = `/workspace/.panopticon/prompts/${agentId}-launcher.sh`;\n const launcherContent = `#!/bin/bash\nexport PATH=\"/usr/local/bin:\\$PATH\"\nprompt=\\$(cat \"${promptFile}\")\nexec claude --dangerously-skip-permissions --model ${model} \"\\$prompt\"\n`;\n const launcherBase64 = Buffer.from(launcherContent).toString('base64');\n await fly.ssh(vmName, `echo '${launcherBase64}' | base64 -d > ${launcherScript} && chmod +x ${launcherScript}`);\n\n claudeCmd = `bash ${launcherScript}`;\n } else {\n claudeCmd = `claude --dangerously-skip-permissions --model ${model}`;\n }\n\n // Create tmux session on remote VM\n const tmuxCmd = `tmux new-session -d -s ${agentId} -c /workspace '${claudeCmd}'`;\n const result = await fly.ssh(vmName, tmuxCmd);\n\n if (result.exitCode !== 0) {\n state.status = 'error';\n saveRemoteAgentState(state);\n throw new Error(`Failed to start agent: ${result.stderr}`);\n }\n\n // Update status\n state.status = 'running';\n saveRemoteAgentState(state);\n\n return state;\n}\n\n/**\n * Get remote agent output from tmux session\n */\nexport async function getRemoteAgentOutput(\n agentId: string,\n vmName: string,\n lines: number = 100\n): Promise<string> {\n const fly = createFlyProvider();\n\n const result = await fly.ssh(vmName, `tmux capture-pane -t ${agentId} -p -S -${lines}`);\n return result.stdout;\n}\n\n/**\n * Send message to remote agent\n */\nexport async function sendToRemoteAgent(\n agentId: string,\n vmName: string,\n message: string\n): Promise<void> {\n const fly = createFlyProvider();\n\n // Escape message for shell\n const escapedMessage = message.replace(/'/g, \"'\\\\''\");\n\n // Send keys to tmux session (send message then Enter)\n await fly.ssh(vmName, `tmux send-keys -t ${agentId} '${escapedMessage}'`);\n await fly.ssh(vmName, `tmux send-keys -t ${agentId} C-m`);\n}\n\n/**\n * Check if remote agent is still running\n */\nexport async function isRemoteAgentRunning(\n agentId: string,\n vmName: string\n): Promise<boolean> {\n const fly = createFlyProvider();\n return remoteSessionExists(fly, vmName, agentId);\n}\n\n/**\n * Kill remote agent session\n */\nexport async function killRemoteAgent(\n agentId: string,\n vmName: string\n): Promise<void> {\n const fly = createFlyProvider();\n await fly.ssh(vmName, `tmux kill-session -t ${agentId} 2>/dev/null || true`);\n\n // Update state\n const state = loadRemoteAgentState(agentId);\n if (state) {\n state.status = 'stopped';\n saveRemoteAgentState(state);\n }\n}\n\n/**\n * Get list of running remote agents on a VM\n */\nexport async function listRemoteAgents(vmName: string): Promise<string[]> {\n const fly = createFlyProvider();\n\n const result = await fly.ssh(vmName, `tmux list-sessions -F \"#{session_name}\" 2>/dev/null | grep \"^agent-\" || true`);\n if (!result.stdout.trim()) {\n return [];\n }\n\n return result.stdout.trim().split('\\n').filter(Boolean);\n}\n\n/**\n * Poll remote agent for status updates\n * Returns parsed events from the agent output\n */\nexport async function pollRemoteAgentStatus(\n agentId: string,\n vmName: string\n): Promise<{\n isRunning: boolean;\n lastOutput: string;\n toolUses: string[];\n}> {\n const fly = createFlyProvider();\n\n // Check if session exists\n const isRunning = await remoteSessionExists(fly, vmName, agentId);\n\n if (!isRunning) {\n return { isRunning: false, lastOutput: '', toolUses: [] };\n }\n\n // Get recent output\n const output = await getRemoteAgentOutput(agentId, vmName, 50);\n\n // Parse tool uses from output (simple pattern matching)\n const toolUses: string[] = [];\n const toolPattern = /(?:Using|Calling|Running)\\s+(\\w+)\\s+tool/gi;\n let match;\n while ((match = toolPattern.exec(output)) !== null) {\n toolUses.push(match[1]);\n }\n\n return {\n isRunning,\n lastOutput: output,\n toolUses,\n };\n}\n"],"mappings":";;;;;;AAoNO,SAAS,mBAAmB,OAA8B;AAC/D,QAAM,MAAM,SAAS,QAAQ,IAAI;AACjC,MAAI,CAAC,KAAK;AACR,UAAM,IAAI;AAAA,MACR;AAAA,IACF;AAAA,EACF;AACA,SAAO,IAAI,aAAa,GAAG;AAC7B;AA5NA,IAyCa,aAWP,UAEO;AAtDb;AAAA;AAAA;AAAA;AAyCO,IAAM,cAAN,cAA0B,MAAM;AAAA,MACrC,YACE,SACgB,YACA,MAChB;AACA,cAAM,OAAO;AAHG;AACA;AAGhB,aAAK,OAAO;AAAA,MACd;AAAA,IACF;AAEA,IAAM,WAAW;AAEV,IAAM,eAAN,MAAmB;AAAA,MACP;AAAA,MAEjB,YAAY,OAAe;AACzB,aAAK,QAAQ;AAAA,MACf;AAAA,MAEA,MAAc,QACZ,QACA,MACA,MACY;AACZ,cAAM,MAAM,GAAG,QAAQ,GAAG,IAAI;AAC9B,cAAM,UAAkC;AAAA,UACtC,eAAe,UAAU,KAAK,KAAK;AAAA,UACnC,gBAAgB;AAAA,QAClB;AAEA,cAAM,WAAW,MAAM,MAAM,KAAK;AAAA,UAChC;AAAA,UACA;AAAA,UACA,MAAM,SAAS,SAAY,KAAK,UAAU,IAAI,IAAI;AAAA,QACpD,CAAC;AAED,cAAM,OAAO,MAAM,SAAS,KAAK;AAEjC,YAAI,CAAC,SAAS,IAAI;AAChB,gBAAM,IAAI;AAAA,YACR,iBAAiB,SAAS,MAAM,QAAQ,MAAM,IAAI,IAAI,KAAK,IAAI;AAAA,YAC/D,SAAS;AAAA,YACT;AAAA,UACF;AAAA,QACF;AAEA,YAAI,CAAC,KAAM,QAAO;AAElB,YAAI;AACF,iBAAO,KAAK,MAAM,IAAI;AAAA,QACxB,QAAQ;AACN,iBAAO;AAAA,QACT;AAAA,MACF;AAAA;AAAA,MAGA,MAAM,cACJ,SACA,MACA,QACqB;AACrB,eAAO,KAAK,QAAoB,QAAQ,SAAS,OAAO,aAAa;AAAA,UACnE;AAAA,UACA,QAAQ;AAAA,YACN,OAAO,OAAO;AAAA,YACd,KAAK,OAAO;AAAA,YACZ,OAAO,OAAO,OACV,EAAE,UAAU,UAAU,MAAM,GAAG,WAAW,OAAO,UAAU,KAAK,IAChE;AAAA,YACJ,SAAS,OAAO,WAAW,EAAE,QAAQ,KAAK;AAAA,YAC1C,cAAc,OAAO;AAAA,YACrB,UAAU,OAAO;AAAA,UACnB;AAAA,UACA,QAAQ,OAAO;AAAA,QACjB,CAAC;AAAA,MACH;AAAA;AAAA,MAGA,MAAM,eAAe,SAAiB,WAAkC;AACtE,cAAM,KAAK;AAAA,UACT;AAAA,UACA,SAAS,OAAO,aAAa,SAAS;AAAA,QACxC;AAAA,MACF;AAAA;AAAA,MAGA,MAAM,aAAa,SAAiB,WAAkC;AACpE,cAAM,KAAK;AAAA,UACT;AAAA,UACA,SAAS,OAAO,aAAa,SAAS;AAAA,QACxC;AAAA,MACF;AAAA;AAAA,MAGA,MAAM,YACJ,SACA,WACA,QACA,SACe;AACf,cAAM,KAAK;AAAA,UACT;AAAA,UACA,SAAS,OAAO,aAAa,SAAS;AAAA,UACtC,UAAU,UAAU,EAAE,QAAQ,QAAQ,IAAI;AAAA,QAC5C;AAAA,MACF;AAAA;AAAA,MAGA,MAAM,WAAW,SAAiB,WAAwC;AACxE,eAAO,KAAK;AAAA,UACV;AAAA,UACA,SAAS,OAAO,aAAa,SAAS;AAAA,QACxC;AAAA,MACF;AAAA;AAAA,MAGA,MAAM,aAAa,SAAwC;AACzD,cAAM,SAAS,MAAM,KAAK;AAAA,UACxB;AAAA,UACA,SAAS,OAAO;AAAA,QAClB;AACA,eAAO,UAAU,CAAC;AAAA,MACpB;AAAA;AAAA,MAGA,MAAM,YACJ,SACA,WACA,SACA,UAAkB,IACM;AACxB,eAAO,KAAK;AAAA,UACV;AAAA,UACA,SAAS,OAAO,aAAa,SAAS;AAAA,UACtC,EAAE,SAAS,QAAQ;AAAA,QACrB;AAAA,MACF;AAAA;AAAA,MAGA,MAAM,aACJ,SACA,WACA,OACA,UAAkB,IACH;AACf,cAAM,KAAK;AAAA,UACT;AAAA,UACA,SAAS,OAAO,aAAa,SAAS,eAAe,KAAK,YAAY,OAAO;AAAA,QAC/E;AAAA,MACF;AAAA;AAAA,MAGA,MAAM,UAAU,SAAiB,SAAgC;AAC/D,YAAI;AACF,gBAAM,KAAK,QAAiB,OAAO,SAAS,OAAO,EAAE;AAAA,QACvD,SAAS,KAAK;AACZ,cAAI,eAAe,eAAe,IAAI,eAAe,KAAK;AACxD,kBAAM,KAAK,QAAiB,QAAQ,SAAS;AAAA,cAC3C,UAAU;AAAA,cACV,UAAU;AAAA,cACV,SAAS;AAAA,YACX,CAAC;AAAA,UACH,OAAO;AACL,kBAAM;AAAA,UACR;AAAA,QACF;AAAA,MACF;AAAA,IACF;AAAA;AAAA;;;AC1MA,SAAS,MAAM,aAAa;AAC5B,SAAS,iBAAiB;AAC1B,SAAS,YAAY,cAAc,mBAAmB;AACtD,SAAS,YAAY;AACrB,SAAS,eAAe;AACxB,SAAS,aAAa;AAuBtB,SAAS,sBAAsB,OAAyB;AACtD,UAAQ,OAAO;AAAA,IACb,KAAK;AACH,aAAO;AAAA,IACT,KAAK;AAAA,IACL,KAAK;AACH,aAAO;AAAA,IACT,KAAK;AAAA,IACL,KAAK;AACH,aAAO;AAAA,IACT,KAAK;AAAA,IACL,KAAK;AACH,aAAO;AAAA,IACT;AACE,aAAO;AAAA,EACX;AACF;AAsbO,SAAS,kBAAkB,QAAyC;AACzE,SAAO,IAAI,YAAY,MAAM;AAC/B;AA3eA,IAgBM,WAqCO;AArDb;AAAA;AAAA;AAAA;AAaA;AAGA,IAAM,YAAY,UAAU,IAAI;AAqCzB,IAAM,cAAN,MAA4C;AAAA,MACxC,OAAO;AAAA,MAEC;AAAA,MACT,MAA2B;AAAA,MAEnC,YAAY,SAA4B,CAAC,GAAG;AAC1C,aAAK,SAAS;AAAA,UACZ,KAAK,OAAO,OAAO;AAAA,UACnB,KAAK,OAAO,OAAO;AAAA,UACnB,QAAQ,OAAO,UAAU;AAAA,UACzB,QAAQ,OAAO,UAAU;AAAA,UACzB,UAAU,OAAO,YAAY;AAAA,UAC7B,OAAO,OAAO,SAAS;AAAA,UACvB,UAAU,OAAO,YAAY,QAAQ,IAAI,iBAAiB;AAAA,QAC5D;AAAA,MACF;AAAA,MAEQ,SAAuB;AAC7B,YAAI,CAAC,KAAK,KAAK;AACb,eAAK,MAAM,mBAAmB,KAAK,OAAO,YAAY,MAAS;AAAA,QACjE;AACA,eAAO,KAAK;AAAA,MACd;AAAA,MAEA,MAAM,kBAAoC;AAExC,YAAI,KAAK,OAAO,YAAY,QAAQ,IAAI,eAAe;AACrD,cAAI;AACF,kBAAM,KAAK,OAAO,EAAE,aAAa,KAAK,OAAO,GAAG;AAChD,mBAAO;AAAA,UACT,QAAQ;AAAA,UAER;AAAA,QACF;AAGA,YAAI;AACF,gBAAM,SAAS,MAAM,UAAU,mBAAmB,EAAE,SAAS,IAAM,CAAC;AACpE,iBAAO,CAAC,OAAO,OAAO,SAAS,eAAe;AAAA,QAChD,QAAQ;AACN,iBAAO;AAAA,QACT;AAAA,MACF;AAAA;AAAA;AAAA;AAAA;AAAA,MAMA,MAAM,UAAU,QAAiE;AAC/E,cAAM,gBAAgB,KAAK,QAAQ,GAAG,eAAe,YAAY;AACjE,YAAI,WAAW,aAAa,GAAG;AAC7B,qBAAW,QAAQ,YAAY,aAAa,GAAG;AAC7C,gBAAI,CAAC,KAAK,SAAS,OAAO,EAAG;AAC7B,gBAAI;AACF,oBAAM,UAAU,aAAa,KAAK,eAAe,IAAI,GAAG,OAAO;AAC/D,oBAAM,WAAW,MAAM,OAAO;AAC9B,kBAAI,SAAS,WAAW,UAAU,SAAS,aAAa,SAAS,SAAS;AACxE,uBAAO,EAAE,SAAS,SAAS,SAAS,WAAW,SAAS,UAAU;AAAA,cACpE;AAAA,YACF,QAAQ;AAAA,YAER;AAAA,UACF;AAAA,QACF;AAGA,cAAM,WAAW,MAAM,KAAK,OAAO,EAAE,aAAa,KAAK,OAAO,GAAG;AACjE,cAAM,UAAU,SAAS,KAAK,OAAK,EAAE,SAAS,MAAM;AACpD,YAAI,CAAC,SAAS;AACZ,gBAAM,IAAI,MAAM,qCAAqC,MAAM,EAAE;AAAA,QAC/D;AACA,eAAO,EAAE,SAAS,KAAK,OAAO,KAAK,WAAW,QAAQ,GAAG;AAAA,MAC3D;AAAA,MAEA,MAAM,SAAS,MAA+B;AAC5C,cAAM,MAAM,KAAK,OAAO;AAGxB,cAAM,IAAI,UAAU,KAAK,OAAO,KAAK,KAAK,OAAO,GAAG;AAGpD,cAAM,UAAU,MAAM,IAAI,cAAc,KAAK,OAAO,KAAK,MAAM;AAAA,UAC7D,OAAO,KAAK,OAAO;AAAA,UACnB,MAAM,KAAK,OAAO;AAAA,UAClB,QAAQ,KAAK,OAAO;AAAA,UACpB,QAAQ,KAAK,OAAO;AAAA,UACpB,SAAS,EAAE,QAAQ,KAAK;AAAA,UACxB,cAAc;AAAA,QAChB,CAAC;AAGD,YAAI;AACF,gBAAM,IAAI,aAAa,KAAK,OAAO,KAAK,QAAQ,IAAI,WAAW,GAAG;AAAA,QACpE,QAAQ;AAAA,QAER;AAEA,eAAO;AAAA,UACL;AAAA,UACA,QAAQ,sBAAsB,QAAQ,KAAK;AAAA,UAC3C,WAAW,QAAQ;AAAA,UACnB,WAAW,QAAQ;AAAA,UACnB,SAAS,QAAQ,aAAa,IAAI,KAAK,QAAQ,UAAU,IAAI;AAAA,QAC/D;AAAA,MACF;AAAA,MAEA,MAAM,SAAS,MAA6B;AAC1C,cAAM,EAAE,SAAS,UAAU,IAAI,MAAM,KAAK,UAAU,IAAI;AACxD,cAAM,KAAK,OAAO,EAAE,eAAe,SAAS,SAAS;AAAA,MACvD;AAAA,MAEA,MAAM,UAA6B;AACjC,cAAM,WAAW,MAAM,KAAK,OAAO,EAAE,aAAa,KAAK,OAAO,GAAG;AACjE,eAAO,SAAS,IAAI,QAAM;AAAA,UACxB,MAAM,EAAE;AAAA,UACR,QAAQ,sBAAsB,EAAE,KAAK;AAAA,UACrC,WAAW,EAAE;AAAA,UACb,WAAW,EAAE;AAAA,UACb,SAAS,EAAE,aAAa,IAAI,KAAK,EAAE,UAAU,IAAI;AAAA,QACnD,EAAE;AAAA,MACJ;AAAA,MAEA,MAAM,UAAU,MAAiC;AAC/C,YAAI;AACF,gBAAM,EAAE,SAAS,UAAU,IAAI,MAAM,KAAK,UAAU,IAAI;AACxD,gBAAM,UAAU,MAAM,KAAK,OAAO,EAAE,WAAW,SAAS,SAAS;AACjE,iBAAO,sBAAsB,QAAQ,KAAK;AAAA,QAC5C,QAAQ;AACN,iBAAO;AAAA,QACT;AAAA,MACF;AAAA,MAEA,MAAM,UAAU,MAAsC;AACpD,YAAI;AACF,gBAAM,EAAE,SAAS,UAAU,IAAI,MAAM,KAAK,UAAU,IAAI;AACxD,gBAAM,UAAU,MAAM,KAAK,OAAO,EAAE,WAAW,SAAS,SAAS;AACjE,iBAAO;AAAA,YACL;AAAA,YACA,QAAQ,sBAAsB,QAAQ,KAAK;AAAA,YAC3C,WAAW,QAAQ;AAAA,YACnB,WAAW,QAAQ;AAAA,YACnB,SAAS,QAAQ,aAAa,IAAI,KAAK,QAAQ,UAAU,IAAI;AAAA,YAC7D,aAAa,QAAQ,QAAQ,OAAO;AAAA,UACtC;AAAA,QACF,QAAQ;AACN,iBAAO;AAAA,QACT;AAAA,MACF;AAAA,MAEA,MAAM,QAAQ,MAA6B;AACzC,cAAM,EAAE,SAAS,UAAU,IAAI,MAAM,KAAK,UAAU,IAAI;AACxD,cAAM,KAAK,OAAO,EAAE,aAAa,SAAS,SAAS;AACnD,cAAM,KAAK,OAAO,EAAE,aAAa,SAAS,WAAW,WAAW,EAAE;AAAA,MACpE;AAAA,MAEA,MAAM,OAAO,MAA6B;AACxC,cAAM,EAAE,SAAS,UAAU,IAAI,MAAM,KAAK,UAAU,IAAI;AACxD,cAAM,KAAK,OAAO,EAAE,YAAY,SAAS,SAAS;AAAA,MACpD;AAAA;AAAA,MAGA,MAAM,IAAI,IAAY,SAAsC;AAC1D,cAAM,EAAE,SAAS,UAAU,IAAI,MAAM,KAAK,UAAU,EAAE;AACtD,YAAI;AACF,gBAAM,SAAS,MAAM,KAAK,OAAO,EAAE;AAAA,YACjC;AAAA,YACA;AAAA,YACA,CAAC,WAAW,MAAM,OAAO;AAAA,YACzB;AAAA,UACF;AACA,iBAAO;AAAA,YACL,QAAQ,OAAO,UAAU;AAAA,YACzB,QAAQ,OAAO,UAAU;AAAA,YACzB,UAAU,OAAO,aAAa;AAAA,UAChC;AAAA,QACF,SAAS,KAAK;AACZ,cAAI,eAAe,aAAa;AAC9B,mBAAO,EAAE,QAAQ,IAAI,QAAQ,IAAI,SAAS,UAAU,EAAE;AAAA,UACxD;AACA,gBAAM;AAAA,QACR;AAAA,MACF;AAAA;AAAA,MAGA,OAAO,UAAU,IAAY,SAAwC;AACnE,cAAM,EAAE,QAAQ,IAAI,MAAM,KAAK,UAAU,EAAE;AAC3C,cAAM,QAAQ,MAAM,OAAO,CAAC,OAAO,WAAW,MAAM,SAAS,MAAM,OAAO,GAAG;AAAA,UAC3E,KAAK,EAAE,GAAG,QAAQ,IAAI;AAAA,QACxB,CAAC;AAED,yBAAiB,SAAS,MAAM,QAAQ;AACtC,gBAAM,MAAM,SAAS;AAAA,QACvB;AACA,yBAAiB,SAAS,MAAM,QAAQ;AACtC,gBAAM,MAAM,SAAS;AAAA,QACvB;AAEA,cAAM,IAAI,QAAc,CAAC,SAAS,WAAW;AAC3C,gBAAM,GAAG,SAAS,OAAO;AACzB,gBAAM,GAAG,SAAS,MAAM;AAAA,QAC1B,CAAC;AAAA,MACH;AAAA;AAAA,MAGA,MAAM,SAAS,IAAY,WAAmB,YAAmC;AAC/E,cAAM,UAAU,aAAa,SAAS;AACtC,cAAM,MAAM,QAAQ,SAAS,QAAQ;AACrC,cAAM,UAAU,WAAW,UAAU,GAAG,WAAW,YAAY,GAAG,CAAC;AACnE,YAAI,SAAS;AACX,gBAAM,KAAK,IAAI,IAAI,YAAY,KAAK,UAAU,OAAO,CAAC,EAAE;AAAA,QAC1D;AACA,cAAM,KAAK,IAAI,IAAI,SAAS,GAAG,mBAAmB,KAAK,UAAU,UAAU,CAAC,EAAE;AAAA,MAChF;AAAA;AAAA,MAGA,MAAM,WAAW,IAAY,YAAoB,WAAkC;AACjF,cAAM,EAAE,QAAQ,IAAI,MAAM,KAAK,UAAU,EAAE;AAC3C,cAAM;AAAA,UACJ,uBAAuB,KAAK,UAAU,OAAO,CAAC,IAAI,KAAK,UAAU,UAAU,CAAC,IAAI,KAAK,UAAU,SAAS,CAAC;AAAA,UACzG,EAAE,SAAS,IAAM;AAAA,QACnB;AAAA,MACF;AAAA;AAAA,MAGA,MAAM,WAAW,KAAa,OAAgC;AAC5D,cAAM,IAAI;AAAA,UACR;AAAA,QAEF;AAAA,MACF;AAAA;AAAA,MAGA,MAAM,OACJ,IACA,YACA,WACgC;AAChC,cAAM,EAAE,QAAQ,IAAI,MAAM,KAAK,UAAU,EAAE;AAC3C,cAAM,QAAQ,MAAM,OAAO,CAAC,SAAS,GAAG,SAAS,IAAI,UAAU,IAAI,MAAM,OAAO,GAAG;AAAA,UACjF,KAAK,EAAE,GAAG,QAAQ,IAAI;AAAA,QACxB,CAAC;AAED,eAAO;AAAA,UACL,OAAO,MAAM;AACX,kBAAM,KAAK;AAAA,UACb;AAAA,QACF;AAAA,MACF;AAAA;AAAA;AAAA;AAAA;AAAA,MAOA,MAAM,sBAAsB,QAAkC;AAC5D,YAAI;AACF,gBAAM,EAAE,QAAQ,YAAY,IAAI,MAAM;AAAA,YACpC;AAAA,YACA,EAAE,UAAU,SAAS,SAAS,IAAM;AAAA,UACtC;AACA,cAAI,CAAC,aAAa,KAAK,EAAG,QAAO;AAEjC,gBAAM,MAAM,OAAO,KAAK,YAAY,KAAK,CAAC,EAAE,SAAS,QAAQ;AAC7D,gBAAM,KAAK,IAAI,QAAQ,+BAA+B,GAAG,6CAA6C;AACtG,iBAAO;AAAA,QACT,QAAQ;AACN,iBAAO;AAAA,QACT;AAAA,MACF;AAAA;AAAA,MAGA,MAAM,eAAe,QAAkC;AACrD,cAAM,eAAe,KAAK,QAAQ,GAAG,WAAW,MAAM,WAAW;AACjE,YAAI,CAAC,WAAW,YAAY,EAAG,QAAO;AAEtC,YAAI;AACF,gBAAM,UAAU,aAAa,cAAc,OAAO;AAClD,gBAAM,MAAM,OAAO,KAAK,OAAO,EAAE,SAAS,QAAQ;AAClD,gBAAM,KAAK,IAAI,QAAQ,kCAAkC,GAAG,wCAAwC;AACpG,iBAAO;AAAA,QACT,QAAQ;AACN,iBAAO;AAAA,QACT;AAAA,MACF;AAAA;AAAA,MAGA,MAAM,eAAe,QAAkC;AACrD,cAAM,iBAAiB,KAAK,QAAQ,GAAG,WAAW,YAAY,YAAY;AAC1E,YAAI,CAAC,WAAW,cAAc,EAAG,QAAO;AAExC,YAAI;AACF,gBAAM,UAAU,aAAa,gBAAgB,OAAO;AACpD,gBAAM,MAAM,OAAO,KAAK,OAAO,EAAE,SAAS,QAAQ;AAClD,gBAAM,KAAK,IAAI,QAAQ,wCAAwC,GAAG,+CAA+C;AACjH,iBAAO;AAAA,QACT,QAAQ;AACN,iBAAO;AAAA,QACT;AAAA,MACF;AAAA;AAAA,MAGA,MAAM,mBAAmB,QAA+D;AACtF,cAAM,CAAC,QAAQ,MAAM,IAAI,MAAM,QAAQ,IAAI;AAAA,UACzC,KAAK,sBAAsB,MAAM;AAAA,UACjC,KAAK,eAAe,MAAM;AAAA,QAC5B,CAAC;AACD,eAAO,EAAE,QAAQ,OAAO;AAAA,MAC1B;AAAA;AAAA,MAGA,MAAM,aAAa,QAAkC;AAEnD,cAAM,QAAQ,MAAM,KAAK,IAAI,QAAQ,sBAAsB;AAC3D,YAAI,MAAM,aAAa,KAAK,MAAM,OAAO,KAAK,EAAG,QAAO;AAGxD,cAAM,SAAS,MAAM,KAAK,IAAI,QAAQ,sCAAsC;AAC5E,YAAI,OAAO,aAAa,GAAG;AAEzB,gBAAM,MAAM,MAAM,KAAK;AAAA,YACrB;AAAA,YACA;AAAA,UACF;AACA,iBAAO,IAAI,aAAa;AAAA,QAC1B;AACA,eAAO;AAAA,MACT;AAAA;AAAA,MAGA,MAAM,UAAU,QAAgB,gBAAwB,cAAgC;AACtF,cAAM,SAAS,MAAM,KAAK;AAAA,UACxB;AAAA,UACA,MAAM,aAAa;AAAA,QACrB;AACA,eAAO,OAAO,aAAa;AAAA,MAC7B;AAAA;AAAA,MAGA,MAAM,oBAAoB,QAA+B;AACvD,cAAM,KAAK,IAAI,QAAQ,oBAAoB;AAG3C,cAAM,mBAAmB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAYzB,cAAM,YAAY,OAAO,KAAK,gBAAgB,EAAE,SAAS,QAAQ;AACjE,cAAM,KAAK,IAAI,QAAQ,SAAS,SAAS,yBAAyB;AAGlE,cAAM,WAAW,KAAK,UAAU;AAAA,UAC9B,OAAO;AAAA,UACP,aAAa,EAAE,aAAa,oBAAoB;AAAA,QAClD,CAAC;AACD,cAAM,cAAc,OAAO,KAAK,QAAQ,EAAE,SAAS,QAAQ;AAC3D,cAAM,KAAK,IAAI,QAAQ,SAAS,WAAW,yCAAyC;AAAA,MACtF;AAAA;AAAA,MAGA,MAAM,eAAe,QAA+B;AAClD,cAAM,YAAY,KAAK,QAAQ,GAAG,eAAe,QAAQ;AACzD,YAAI,CAAC,WAAW,SAAS,EAAG;AAE5B,cAAM,KAAK,IAAI,QAAQ,2BAA2B;AAElD,YAAI;AACF,gBAAM,UAAU,YAAY,WAAW,EAAE,eAAe,KAAK,CAAC;AAC9D,qBAAW,SAAS,SAAS;AAC3B,gBAAI,CAAC,MAAM,OAAO,KAAK,CAAC,MAAM,KAAK,SAAS,KAAK,EAAG;AACpD,kBAAM,YAAY,KAAK,WAAW,MAAM,IAAI;AAC5C,kBAAM,UAAU,aAAa,WAAW,OAAO;AAC/C,kBAAM,MAAM,OAAO,KAAK,OAAO,EAAE,SAAS,QAAQ;AAClD,kBAAM,KAAK,IAAI,QAAQ,SAAS,GAAG,oCAAoC,MAAM,IAAI,EAAE;AAAA,UACrF;AAAA,QACF,QAAQ;AAAA,QAER;AAAA,MACF;AAAA;AAAA,MAGA,MAAM,eACJ,QACA,gBAAwB,cACxB,eACkB;AAClB,cAAM,MAAM,iBAAiB;AAG7B,cAAM,eAAe,MAAM,KAAK;AAAA,UAC9B;AAAA,UACA,MAAM,aAAa;AAAA,QACrB;AACA,YAAI,aAAa,aAAa,GAAG;AAC/B,iBAAO;AAAA,QACT;AAGA,cAAM,YAAY,MAAM,KAAK;AAAA,UAC3B;AAAA,UACA,MAAM,aAAa,sEAAsE,KAAK,UAAU,GAAG,CAAC;AAAA,QAC9G;AACA,eAAO,UAAU,aAAa;AAAA,MAChC;AAAA;AAAA,MAGA,MAAM,WACJ,QACA,YACA,gBAAwB,cACJ;AACpB,cAAM,SAAS,MAAM,KAAK;AAAA,UACxB;AAAA,UACA,MAAM,aAAa,iBAAiB,KAAK,UAAU,UAAU,CAAC;AAAA,QAChE;AACA,YAAI;AACF,iBAAO,KAAK,MAAM,OAAO,OAAO,KAAK,KAAK,IAAI;AAAA,QAChD,QAAQ;AACN,iBAAO,CAAC;AAAA,QACV;AAAA,MACF;AAAA;AAAA,MAGA,aAAqB;AACnB,eAAO,KAAK,OAAO;AAAA,MACrB;AAAA,IACF;AAAA;AAAA;;;AC9dA,SAAS,QAAAA,aAAY;AACrB,SAAS,cAAAC,aAAY,gBAAAC,eAAc,eAAe,iBAAiB;AACnE,SAAS,WAAAC,gBAAe;AAkBxB,SAAS,wBAAwB,SAAyB;AACxD,SAAOH,MAAK,YAAY,SAAS,mBAAmB;AACtD;AAKA,SAAS,qBAAqB,OAA+B;AAC3D,QAAM,MAAMA,MAAK,YAAY,MAAM,EAAE;AACrC,MAAI,CAACC,YAAW,GAAG,GAAG;AACpB,cAAU,KAAK,EAAE,WAAW,KAAK,CAAC;AAAA,EACpC;AACA,gBAAc,wBAAwB,MAAM,EAAE,GAAG,KAAK,UAAU,OAAO,MAAM,CAAC,CAAC;AACjF;AAKO,SAAS,qBAAqB,SAA0C;AAC7E,QAAM,OAAO,wBAAwB,OAAO;AAC5C,MAAI,CAACA,YAAW,IAAI,EAAG,QAAO;AAE9B,MAAI;AACF,WAAO,KAAK,MAAMC,cAAa,MAAM,OAAO,CAAC;AAAA,EAC/C,QAAQ;AACN,WAAO;AAAA,EACT;AACF;AAKA,eAAe,oBACb,UACA,QACA,aACkB;AAClB,QAAM,SAAS,MAAM,SAAS,IAAI,QAAQ,uBAAuB,WAAW,+CAA+C;AAC3H,SAAO,OAAO,OAAO,KAAK,MAAM;AAClC;AAaA,eAAsB,iBAAiB,SAA6D;AAClG,QAAM,EAAE,SAAS,WAAW,QAAQ,qBAAqB,OAAO,IAAI;AAEpE,QAAM,UAAU,SAAS,QAAQ,YAAY,CAAC;AAC9C,QAAM,SAAS,UAAU;AAEzB,QAAM,MAAM,kBAAkB;AAG9B,QAAM,WAAW,MAAM,IAAI,UAAU,MAAM;AAC3C,MAAI,aAAa,WAAW;AAC1B,UAAM,IAAI,MAAM,MAAM,MAAM,uDAAuD,OAAO,EAAE;AAAA,EAC9F;AAGA,MAAI,MAAM,oBAAoB,KAAK,QAAQ,OAAO,GAAG;AACnD,UAAM,IAAI,MAAM,SAAS,OAAO,uBAAuB,MAAM,sCAAsC;AAAA,EACrG;AAGA,QAAM,QAA0B;AAAA,IAC9B,IAAI;AAAA,IACJ;AAAA,IACA;AAAA,IACA;AAAA,IACA,QAAQ;AAAA,IACR,YAAW,oBAAI,KAAK,GAAE,YAAY;AAAA,IAClC,UAAU;AAAA,EACZ;AAEA,uBAAqB,KAAK;AAG1B,MAAI;AAEJ,MAAI,QAAQ;AAEV,UAAM,aAAa,kCAAkC,OAAO;AAC5D,UAAM,IAAI,IAAI,QAAQ,yCAAyC;AAC/D,UAAM,eAAe,OAAO,KAAK,MAAM,EAAE,SAAS,QAAQ;AAC1D,UAAM,IAAI,IAAI,QAAQ,SAAS,YAAY,mBAAmB,UAAU,EAAE;AAG1E,UAAM,iBAAiB,kCAAkC,OAAO;AAChE,UAAM,kBAAkB;AAAA;AAAA,gBAEX,UAAU;AAAA,qDAC0B,KAAK;AAAA;AAEtD,UAAM,iBAAiB,OAAO,KAAK,eAAe,EAAE,SAAS,QAAQ;AACrE,UAAM,IAAI,IAAI,QAAQ,SAAS,cAAc,mBAAmB,cAAc,gBAAgB,cAAc,EAAE;AAE9G,gBAAY,QAAQ,cAAc;AAAA,EACpC,OAAO;AACL,gBAAY,iDAAiD,KAAK;AAAA,EACpE;AAGA,QAAM,UAAU,0BAA0B,OAAO,mBAAmB,SAAS;AAC7E,QAAM,SAAS,MAAM,IAAI,IAAI,QAAQ,OAAO;AAE5C,MAAI,OAAO,aAAa,GAAG;AACzB,UAAM,SAAS;AACf,yBAAqB,KAAK;AAC1B,UAAM,IAAI,MAAM,0BAA0B,OAAO,MAAM,EAAE;AAAA,EAC3D;AAGA,QAAM,SAAS;AACf,uBAAqB,KAAK;AAE1B,SAAO;AACT;AAKA,eAAsB,qBACpB,SACA,QACA,QAAgB,KACC;AACjB,QAAM,MAAM,kBAAkB;AAE9B,QAAM,SAAS,MAAM,IAAI,IAAI,QAAQ,wBAAwB,OAAO,WAAW,KAAK,EAAE;AACtF,SAAO,OAAO;AAChB;AAKA,eAAsB,kBACpB,SACA,QACA,SACe;AACf,QAAM,MAAM,kBAAkB;AAG9B,QAAM,iBAAiB,QAAQ,QAAQ,MAAM,OAAO;AAGpD,QAAM,IAAI,IAAI,QAAQ,qBAAqB,OAAO,KAAK,cAAc,GAAG;AACxE,QAAM,IAAI,IAAI,QAAQ,qBAAqB,OAAO,MAAM;AAC1D;AAKA,eAAsB,qBACpB,SACA,QACkB;AAClB,QAAM,MAAM,kBAAkB;AAC9B,SAAO,oBAAoB,KAAK,QAAQ,OAAO;AACjD;AAKA,eAAsB,gBACpB,SACA,QACe;AACf,QAAM,MAAM,kBAAkB;AAC9B,QAAM,IAAI,IAAI,QAAQ,wBAAwB,OAAO,sBAAsB;AAG3E,QAAM,QAAQ,qBAAqB,OAAO;AAC1C,MAAI,OAAO;AACT,UAAM,SAAS;AACf,yBAAqB,KAAK;AAAA,EAC5B;AACF;AAKA,eAAsB,iBAAiB,QAAmC;AACxE,QAAM,MAAM,kBAAkB;AAE9B,QAAM,SAAS,MAAM,IAAI,IAAI,QAAQ,8EAA8E;AACnH,MAAI,CAAC,OAAO,OAAO,KAAK,GAAG;AACzB,WAAO,CAAC;AAAA,EACV;AAEA,SAAO,OAAO,OAAO,KAAK,EAAE,MAAM,IAAI,EAAE,OAAO,OAAO;AACxD;AAMA,eAAsB,sBACpB,SACA,QAKC;AACD,QAAM,MAAM,kBAAkB;AAG9B,QAAM,YAAY,MAAM,oBAAoB,KAAK,QAAQ,OAAO;AAEhE,MAAI,CAAC,WAAW;AACd,WAAO,EAAE,WAAW,OAAO,YAAY,IAAI,UAAU,CAAC,EAAE;AAAA,EAC1D;AAGA,QAAM,SAAS,MAAM,qBAAqB,SAAS,QAAQ,EAAE;AAG7D,QAAM,WAAqB,CAAC;AAC5B,QAAM,cAAc;AACpB,MAAI;AACJ,UAAQ,QAAQ,YAAY,KAAK,MAAM,OAAO,MAAM;AAClD,aAAS,KAAK,MAAM,CAAC,CAAC;AAAA,EACxB;AAEA,SAAO;AAAA,IACL;AAAA,IACA,YAAY;AAAA,IACZ;AAAA,EACF;AACF;AA3QA,IAaM;AAbN;AAAA;AAAA;AAOA;AAMA,IAAM,aAAaF,MAAKG,SAAQ,GAAG,eAAe,QAAQ;AAAA;AAAA;","names":["join","existsSync","readFileSync","homedir"]}
|
|
@@ -1,3 +1,8 @@
|
|
|
1
|
+
import {
|
|
2
|
+
createFlyProvider,
|
|
3
|
+
init_fly_provider,
|
|
4
|
+
init_remote_agents
|
|
5
|
+
} from "./chunk-GUV2EPBG.js";
|
|
1
6
|
import {
|
|
2
7
|
__require,
|
|
3
8
|
init_esm_shims
|
|
@@ -48,11 +53,50 @@ function deleteWorkspaceMetadata(issueId) {
|
|
|
48
53
|
}
|
|
49
54
|
}
|
|
50
55
|
|
|
56
|
+
// src/lib/remote/index.ts
|
|
57
|
+
init_esm_shims();
|
|
58
|
+
init_fly_provider();
|
|
59
|
+
init_remote_agents();
|
|
60
|
+
init_fly_provider();
|
|
61
|
+
async function isRemoteAvailable() {
|
|
62
|
+
const fly = createFlyProvider();
|
|
63
|
+
try {
|
|
64
|
+
const isAuth = await fly.isAuthenticated();
|
|
65
|
+
if (!isAuth) {
|
|
66
|
+
return {
|
|
67
|
+
available: false,
|
|
68
|
+
reason: "Not authenticated with Fly.io. Set FLY_API_TOKEN or run: fly auth login"
|
|
69
|
+
};
|
|
70
|
+
}
|
|
71
|
+
return { available: true };
|
|
72
|
+
} catch (error) {
|
|
73
|
+
return {
|
|
74
|
+
available: false,
|
|
75
|
+
reason: `Fly.io not available: ${error.message}`
|
|
76
|
+
};
|
|
77
|
+
}
|
|
78
|
+
}
|
|
79
|
+
function createFlyProviderFromConfig(remoteConfig) {
|
|
80
|
+
const fly = remoteConfig?.fly;
|
|
81
|
+
const tokenEnv = fly?.api_token_env ?? "FLY_API_TOKEN";
|
|
82
|
+
return createFlyProvider({
|
|
83
|
+
app: fly?.app,
|
|
84
|
+
org: fly?.org,
|
|
85
|
+
region: fly?.region,
|
|
86
|
+
vmSize: fly?.vm_size,
|
|
87
|
+
vmMemory: fly?.vm_memory,
|
|
88
|
+
image: fly?.image,
|
|
89
|
+
apiToken: process.env[tokenEnv]
|
|
90
|
+
});
|
|
91
|
+
}
|
|
92
|
+
|
|
51
93
|
export {
|
|
52
94
|
WORKSPACES_DIR,
|
|
53
95
|
saveWorkspaceMetadata,
|
|
54
96
|
loadWorkspaceMetadata,
|
|
55
97
|
findRemoteWorkspaceMetadata,
|
|
56
|
-
deleteWorkspaceMetadata
|
|
98
|
+
deleteWorkspaceMetadata,
|
|
99
|
+
isRemoteAvailable,
|
|
100
|
+
createFlyProviderFromConfig
|
|
57
101
|
};
|
|
58
|
-
//# sourceMappingURL=chunk-
|
|
102
|
+
//# sourceMappingURL=chunk-HHL3AWXA.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../src/lib/remote/workspace-metadata.ts","../src/lib/remote/index.ts"],"sourcesContent":["/**\n * Workspace Metadata Management\n *\n * Shared module for loading, saving, and listing workspace metadata.\n * Used by both workspace.ts and work/issue.ts for remote workspace support.\n */\n\nimport { existsSync, mkdirSync, writeFileSync, readdirSync, readFileSync } from 'fs';\nimport { join } from 'path';\nimport { homedir } from 'os';\nimport { parse, stringify } from 'yaml';\nimport type { RemoteWorkspaceMetadata } from './interface.js';\n\n// Path for workspace metadata\nexport const WORKSPACES_DIR = join(homedir(), '.panopticon', 'workspaces');\n\n/**\n * Save workspace metadata to ~/.panopticon/workspaces/{issueId}.yaml\n */\nexport function saveWorkspaceMetadata(metadata: RemoteWorkspaceMetadata): void {\n if (!existsSync(WORKSPACES_DIR)) {\n mkdirSync(WORKSPACES_DIR, { recursive: true });\n }\n\n const filename = join(WORKSPACES_DIR, `${metadata.id}.yaml`);\n writeFileSync(filename, stringify(metadata), 'utf-8');\n}\n\n/**\n * Load workspace metadata from ~/.panopticon/workspaces/{issueId}.yaml\n */\nexport function loadWorkspaceMetadata(issueId: string): RemoteWorkspaceMetadata | null {\n const normalizedId = issueId.toLowerCase().replace(/[^a-z0-9-]/g, '-');\n const filename = join(WORKSPACES_DIR, `${normalizedId}.yaml`);\n\n if (!existsSync(filename)) {\n return null;\n }\n\n try {\n const content = readFileSync(filename, 'utf-8');\n return parse(content) as RemoteWorkspaceMetadata;\n } catch {\n return null;\n }\n}\n\n/**\n * List all workspace metadata files\n */\nexport function listWorkspaceMetadata(): RemoteWorkspaceMetadata[] {\n if (!existsSync(WORKSPACES_DIR)) {\n return [];\n }\n\n const files = readdirSync(WORKSPACES_DIR).filter(f => f.endsWith('.yaml'));\n const workspaces: RemoteWorkspaceMetadata[] = [];\n\n for (const file of files) {\n try {\n const content = readFileSync(join(WORKSPACES_DIR, file), 'utf-8');\n workspaces.push(parse(content) as RemoteWorkspaceMetadata);\n } catch {\n // Skip invalid files\n }\n }\n\n return workspaces;\n}\n\n/**\n * Check if a workspace exists (local or remote)\n * Returns metadata if remote workspace exists, null otherwise\n */\nexport function findRemoteWorkspaceMetadata(issueId: string): RemoteWorkspaceMetadata | null {\n return loadWorkspaceMetadata(issueId);\n}\n\n/**\n * Delete workspace metadata\n */\nexport function deleteWorkspaceMetadata(issueId: string): boolean {\n const normalizedId = issueId.toLowerCase().replace(/[^a-z0-9-]/g, '-');\n const filename = join(WORKSPACES_DIR, `${normalizedId}.yaml`);\n\n if (!existsSync(filename)) {\n return false;\n }\n\n try {\n const { unlinkSync } = require('fs');\n unlinkSync(filename);\n return true;\n } catch {\n return false;\n }\n}\n","/**\n * Remote Workspace Module\n *\n * Provides support for running workspaces on remote Fly.io Machines\n * to offload Claude agents from the local machine.\n */\n\nexport type {\n RemoteProvider,\n VmInfo,\n VmStatus,\n ExecResult,\n RemoteProviderConfig,\n RemoteWorkspaceMetadata,\n} from './interface.js';\n\nexport { FlyProvider, createFlyProvider } from './fly-provider.js';\nexport type { FlyProviderConfig } from './fly-provider.js';\n\n// Remote agent management\nexport {\n spawnRemoteAgent,\n getRemoteAgentOutput,\n sendToRemoteAgent,\n isRemoteAgentRunning,\n killRemoteAgent,\n listRemoteAgents,\n pollRemoteAgentStatus,\n loadRemoteAgentState,\n} from './remote-agents.js';\nexport type { RemoteAgentState, SpawnRemoteAgentOptions } from './remote-agents.js';\n\n// Workspace metadata management\nexport {\n saveWorkspaceMetadata,\n loadWorkspaceMetadata,\n listWorkspaceMetadata,\n deleteWorkspaceMetadata,\n findRemoteWorkspaceMetadata,\n WORKSPACES_DIR,\n} from './workspace-metadata.js';\n\nimport { FlyProvider, createFlyProvider } from './fly-provider.js';\nimport type { RemoteProvider, RemoteProviderConfig } from './interface.js';\n\nexport type ProviderType = 'fly';\n\n/**\n * Get a remote provider by type\n */\nexport function getRemoteProvider(\n type: ProviderType,\n config?: RemoteProviderConfig\n): RemoteProvider {\n switch (type) {\n case 'fly':\n return createFlyProvider();\n default:\n throw new Error(`Unknown remote provider type: ${type}`);\n }\n}\n\n/**\n * Check if remote providers are available\n */\nexport async function isRemoteAvailable(): Promise<{ available: boolean; reason?: string }> {\n const fly = createFlyProvider();\n\n try {\n const isAuth = await fly.isAuthenticated();\n if (!isAuth) {\n return {\n available: false,\n reason: 'Not authenticated with Fly.io. Set FLY_API_TOKEN or run: fly auth login',\n };\n }\n return { available: true };\n } catch (error: any) {\n return {\n available: false,\n reason: `Fly.io not available: ${error.message}`,\n };\n }\n}\n\n/**\n * Create a FlyProvider from config settings\n */\nexport function createFlyProviderFromConfig(remoteConfig?: {\n fly?: {\n app?: string;\n org?: string;\n region?: string;\n vm_size?: string;\n vm_memory?: number;\n image?: string;\n api_token_env?: string;\n };\n}): FlyProvider {\n const fly = remoteConfig?.fly;\n const tokenEnv = fly?.api_token_env ?? 'FLY_API_TOKEN';\n return createFlyProvider({\n app: fly?.app,\n org: fly?.org,\n region: fly?.region,\n vmSize: fly?.vm_size,\n vmMemory: fly?.vm_memory,\n image: fly?.image,\n apiToken: process.env[tokenEnv],\n });\n}\n"],"mappings":";;;;;;;;;;;AAAA;AAOA,SAAS,YAAY,WAAW,eAAe,aAAa,oBAAoB;AAChF,SAAS,YAAY;AACrB,SAAS,eAAe;AACxB,SAAS,OAAO,iBAAiB;AAI1B,IAAM,iBAAiB,KAAK,QAAQ,GAAG,eAAe,YAAY;AAKlE,SAAS,sBAAsB,UAAyC;AAC7E,MAAI,CAAC,WAAW,cAAc,GAAG;AAC/B,cAAU,gBAAgB,EAAE,WAAW,KAAK,CAAC;AAAA,EAC/C;AAEA,QAAM,WAAW,KAAK,gBAAgB,GAAG,SAAS,EAAE,OAAO;AAC3D,gBAAc,UAAU,UAAU,QAAQ,GAAG,OAAO;AACtD;AAKO,SAAS,sBAAsB,SAAiD;AACrF,QAAM,eAAe,QAAQ,YAAY,EAAE,QAAQ,eAAe,GAAG;AACrE,QAAM,WAAW,KAAK,gBAAgB,GAAG,YAAY,OAAO;AAE5D,MAAI,CAAC,WAAW,QAAQ,GAAG;AACzB,WAAO;AAAA,EACT;AAEA,MAAI;AACF,UAAM,UAAU,aAAa,UAAU,OAAO;AAC9C,WAAO,MAAM,OAAO;AAAA,EACtB,QAAQ;AACN,WAAO;AAAA,EACT;AACF;AA6BO,SAAS,4BAA4B,SAAiD;AAC3F,SAAO,sBAAsB,OAAO;AACtC;AAKO,SAAS,wBAAwB,SAA0B;AAChE,QAAM,eAAe,QAAQ,YAAY,EAAE,QAAQ,eAAe,GAAG;AACrE,QAAM,WAAW,KAAK,gBAAgB,GAAG,YAAY,OAAO;AAE5D,MAAI,CAAC,WAAW,QAAQ,GAAG;AACzB,WAAO;AAAA,EACT;AAEA,MAAI;AACF,UAAM,EAAE,WAAW,IAAI,UAAQ,IAAI;AACnC,eAAW,QAAQ;AACnB,WAAO;AAAA,EACT,QAAQ;AACN,WAAO;AAAA,EACT;AACF;;;AChGA;AAgBA;AAIA;AAsBA;AAuBA,eAAsB,oBAAsE;AAC1F,QAAM,MAAM,kBAAkB;AAE9B,MAAI;AACF,UAAM,SAAS,MAAM,IAAI,gBAAgB;AACzC,QAAI,CAAC,QAAQ;AACX,aAAO;AAAA,QACL,WAAW;AAAA,QACX,QAAQ;AAAA,MACV;AAAA,IACF;AACA,WAAO,EAAE,WAAW,KAAK;AAAA,EAC3B,SAAS,OAAY;AACnB,WAAO;AAAA,MACL,WAAW;AAAA,MACX,QAAQ,yBAAyB,MAAM,OAAO;AAAA,IAChD;AAAA,EACF;AACF;AAKO,SAAS,4BAA4B,cAU5B;AACd,QAAM,MAAM,cAAc;AAC1B,QAAM,WAAW,KAAK,iBAAiB;AACvC,SAAO,kBAAkB;AAAA,IACvB,KAAK,KAAK;AAAA,IACV,KAAK,KAAK;AAAA,IACV,QAAQ,KAAK;AAAA,IACb,QAAQ,KAAK;AAAA,IACb,UAAU,KAAK;AAAA,IACf,OAAO,KAAK;AAAA,IACZ,UAAU,QAAQ,IAAI,QAAQ;AAAA,EAChC,CAAC;AACH;","names":[]}
|
|
@@ -29,6 +29,7 @@ function initSchema(db) {
|
|
|
29
29
|
cache_write INTEGER NOT NULL DEFAULT 0,
|
|
30
30
|
cost REAL NOT NULL DEFAULT 0,
|
|
31
31
|
request_id TEXT,
|
|
32
|
+
session_id TEXT, -- Claude Code session UUID (for reconciler offset tracking)
|
|
32
33
|
-- TLDR metrics
|
|
33
34
|
tldr_interceptions INTEGER,
|
|
34
35
|
tldr_bypasses INTEGER,
|
|
@@ -50,6 +51,9 @@ function initSchema(db) {
|
|
|
50
51
|
CREATE INDEX IF NOT EXISTS idx_cost_ts
|
|
51
52
|
ON cost_events(ts);
|
|
52
53
|
|
|
54
|
+
CREATE INDEX IF NOT EXISTS idx_cost_session_id
|
|
55
|
+
ON cost_events(session_id) WHERE session_id IS NOT NULL;
|
|
56
|
+
|
|
53
57
|
-- ===== Review Status =====
|
|
54
58
|
CREATE TABLE IF NOT EXISTS review_status (
|
|
55
59
|
issue_id TEXT PRIMARY KEY,
|
|
@@ -107,11 +111,15 @@ function initSchema(db) {
|
|
|
107
111
|
CREATE INDEX IF NOT EXISTS idx_health_timestamp
|
|
108
112
|
ON health_events(timestamp);
|
|
109
113
|
|
|
110
|
-
-- ===== Processed Sessions (for
|
|
114
|
+
-- ===== Processed Sessions (for reconciler offset tracking) =====
|
|
111
115
|
CREATE TABLE IF NOT EXISTS processed_sessions (
|
|
112
|
-
session_id
|
|
113
|
-
|
|
114
|
-
|
|
116
|
+
session_id TEXT PRIMARY KEY,
|
|
117
|
+
agent_id TEXT,
|
|
118
|
+
issue_id TEXT,
|
|
119
|
+
transcript_path TEXT, -- full path to the .jsonl file
|
|
120
|
+
byte_offset INTEGER NOT NULL DEFAULT 0, -- bytes consumed so far
|
|
121
|
+
processed_at TEXT NOT NULL,
|
|
122
|
+
event_count INTEGER NOT NULL DEFAULT 0
|
|
115
123
|
);
|
|
116
124
|
|
|
117
125
|
-- ===== API Cache =====
|
|
@@ -153,6 +161,32 @@ function runMigrations(db) {
|
|
|
153
161
|
ON status_history(issue_id, type, status, timestamp);
|
|
154
162
|
`);
|
|
155
163
|
}
|
|
164
|
+
if (currentVersion < 3) {
|
|
165
|
+
try {
|
|
166
|
+
db.exec(`ALTER TABLE cost_events ADD COLUMN session_id TEXT`);
|
|
167
|
+
} catch {
|
|
168
|
+
}
|
|
169
|
+
db.exec(`
|
|
170
|
+
CREATE INDEX IF NOT EXISTS idx_cost_session_id
|
|
171
|
+
ON cost_events(session_id) WHERE session_id IS NOT NULL;
|
|
172
|
+
`);
|
|
173
|
+
try {
|
|
174
|
+
db.exec(`ALTER TABLE processed_sessions ADD COLUMN agent_id TEXT`);
|
|
175
|
+
} catch {
|
|
176
|
+
}
|
|
177
|
+
try {
|
|
178
|
+
db.exec(`ALTER TABLE processed_sessions ADD COLUMN issue_id TEXT`);
|
|
179
|
+
} catch {
|
|
180
|
+
}
|
|
181
|
+
try {
|
|
182
|
+
db.exec(`ALTER TABLE processed_sessions ADD COLUMN transcript_path TEXT`);
|
|
183
|
+
} catch {
|
|
184
|
+
}
|
|
185
|
+
try {
|
|
186
|
+
db.exec(`ALTER TABLE processed_sessions ADD COLUMN byte_offset INTEGER NOT NULL DEFAULT 0`);
|
|
187
|
+
} catch {
|
|
188
|
+
}
|
|
189
|
+
}
|
|
156
190
|
db.pragma(`user_version = ${SCHEMA_VERSION}`);
|
|
157
191
|
}
|
|
158
192
|
var SCHEMA_VERSION;
|
|
@@ -160,7 +194,7 @@ var init_schema = __esm({
|
|
|
160
194
|
"src/lib/database/schema.ts"() {
|
|
161
195
|
"use strict";
|
|
162
196
|
init_esm_shims();
|
|
163
|
-
SCHEMA_VERSION =
|
|
197
|
+
SCHEMA_VERSION = 3;
|
|
164
198
|
}
|
|
165
199
|
});
|
|
166
200
|
|
|
@@ -449,4 +483,4 @@ export {
|
|
|
449
483
|
clearReviewStatus,
|
|
450
484
|
init_review_status
|
|
451
485
|
};
|
|
452
|
-
//# sourceMappingURL=chunk-
|
|
486
|
+
//# sourceMappingURL=chunk-IZIXJYXZ.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../src/lib/database/schema.ts","../src/lib/database/index.ts","../src/lib/database/review-status-db.ts","../src/lib/review-status.ts"],"sourcesContent":["/**\n * Panopticon Database Schema\n *\n * Defines the unified schema for panopticon.db.\n * All persistent application state lives here.\n */\n\nimport type Database from 'better-sqlite3';\n\n// Schema version — increment when making breaking schema changes\nexport const SCHEMA_VERSION = 3;\n\n/**\n * Initialize the complete database schema.\n * Idempotent — uses CREATE TABLE IF NOT EXISTS throughout.\n */\nexport function initSchema(db: Database.Database): void {\n db.exec(`\n -- ===== Cost Events =====\n CREATE TABLE IF NOT EXISTS cost_events (\n id INTEGER PRIMARY KEY AUTOINCREMENT,\n ts TEXT NOT NULL,\n agent_id TEXT NOT NULL,\n issue_id TEXT NOT NULL,\n session_type TEXT NOT NULL DEFAULT 'unknown',\n provider TEXT NOT NULL DEFAULT 'anthropic',\n model TEXT NOT NULL,\n input INTEGER NOT NULL DEFAULT 0,\n output INTEGER NOT NULL DEFAULT 0,\n cache_read INTEGER NOT NULL DEFAULT 0,\n cache_write INTEGER NOT NULL DEFAULT 0,\n cost REAL NOT NULL DEFAULT 0,\n request_id TEXT,\n session_id TEXT, -- Claude Code session UUID (for reconciler offset tracking)\n -- TLDR metrics\n tldr_interceptions INTEGER,\n tldr_bypasses INTEGER,\n tldr_tokens_saved INTEGER,\n tldr_bypass_reasons TEXT, -- JSON string\n -- WAL source tracking\n source_file TEXT -- path of WAL file this came from (for imports)\n );\n\n CREATE UNIQUE INDEX IF NOT EXISTS idx_cost_request_id\n ON cost_events(request_id) WHERE request_id IS NOT NULL;\n\n CREATE INDEX IF NOT EXISTS idx_cost_issue_id\n ON cost_events(issue_id, ts);\n\n CREATE INDEX IF NOT EXISTS idx_cost_agent_id\n ON cost_events(agent_id, ts);\n\n CREATE INDEX IF NOT EXISTS idx_cost_ts\n ON cost_events(ts);\n\n CREATE INDEX IF NOT EXISTS idx_cost_session_id\n ON cost_events(session_id) WHERE session_id IS NOT NULL;\n\n -- ===== Review Status =====\n CREATE TABLE IF NOT EXISTS review_status (\n issue_id TEXT PRIMARY KEY,\n review_status TEXT NOT NULL DEFAULT 'pending',\n test_status TEXT NOT NULL DEFAULT 'pending',\n merge_status TEXT,\n verification_status TEXT,\n verification_notes TEXT,\n verification_cycle_count INTEGER DEFAULT 0,\n verification_max_cycles INTEGER,\n review_notes TEXT,\n test_notes TEXT,\n merge_notes TEXT,\n updated_at TEXT NOT NULL,\n ready_for_merge INTEGER NOT NULL DEFAULT 0,\n auto_requeue_count INTEGER DEFAULT 0,\n pr_url TEXT\n );\n\n CREATE INDEX IF NOT EXISTS idx_review_status_updated\n ON review_status(updated_at);\n\n -- ===== Status History =====\n CREATE TABLE IF NOT EXISTS status_history (\n id INTEGER PRIMARY KEY AUTOINCREMENT,\n issue_id TEXT NOT NULL,\n type TEXT NOT NULL, -- 'review', 'test', 'merge'\n status TEXT NOT NULL,\n timestamp TEXT NOT NULL,\n notes TEXT,\n FOREIGN KEY (issue_id) REFERENCES review_status(issue_id) ON DELETE CASCADE\n );\n\n CREATE INDEX IF NOT EXISTS idx_status_history_issue\n ON status_history(issue_id, timestamp);\n\n -- UNIQUE constraint enables INSERT OR IGNORE deduplication in upsertReviewStatus\n CREATE UNIQUE INDEX IF NOT EXISTS idx_status_history_unique\n ON status_history(issue_id, type, status, timestamp);\n\n -- ===== Health Events =====\n CREATE TABLE IF NOT EXISTS health_events (\n id INTEGER PRIMARY KEY AUTOINCREMENT,\n agent_id TEXT NOT NULL,\n timestamp TEXT NOT NULL,\n state TEXT NOT NULL,\n previous_state TEXT,\n source TEXT,\n metadata TEXT -- JSON string\n );\n\n CREATE INDEX IF NOT EXISTS idx_health_agent_timestamp\n ON health_events(agent_id, timestamp);\n\n CREATE INDEX IF NOT EXISTS idx_health_timestamp\n ON health_events(timestamp);\n\n -- ===== Processed Sessions (for reconciler offset tracking) =====\n CREATE TABLE IF NOT EXISTS processed_sessions (\n session_id TEXT PRIMARY KEY,\n agent_id TEXT,\n issue_id TEXT,\n transcript_path TEXT, -- full path to the .jsonl file\n byte_offset INTEGER NOT NULL DEFAULT 0, -- bytes consumed so far\n processed_at TEXT NOT NULL,\n event_count INTEGER NOT NULL DEFAULT 0\n );\n\n -- ===== API Cache =====\n CREATE TABLE IF NOT EXISTS api_cache (\n key TEXT PRIMARY KEY,\n value TEXT NOT NULL, -- JSON string\n expires_at TEXT,\n created_at TEXT NOT NULL\n );\n\n -- ===== Rate Limits =====\n CREATE TABLE IF NOT EXISTS rate_limits (\n service TEXT PRIMARY KEY,\n requests INTEGER NOT NULL DEFAULT 0,\n window_start TEXT NOT NULL,\n limit_per_window INTEGER NOT NULL DEFAULT 1000\n );\n `);\n\n // Record schema version\n db.pragma(`user_version = ${SCHEMA_VERSION}`);\n}\n\n/**\n * Run schema migrations if the database version is older than SCHEMA_VERSION.\n * This function handles upgrading from older schema versions.\n */\nexport function runMigrations(db: Database.Database): void {\n const currentVersion = db.pragma('user_version', { simple: true }) as number;\n\n if (currentVersion === SCHEMA_VERSION) {\n return; // Already at latest version\n }\n\n if (currentVersion === 0) {\n // Fresh database — just initialize the full schema\n initSchema(db);\n return;\n }\n\n // v1 → v2: add UNIQUE index on status_history for INSERT OR IGNORE dedup\n if (currentVersion < 2) {\n // Remove duplicate rows before adding the unique index (keep lowest id per unique key)\n db.exec(`\n DELETE FROM status_history\n WHERE id NOT IN (\n SELECT MIN(id)\n FROM status_history\n GROUP BY issue_id, type, status, timestamp\n );\n CREATE UNIQUE INDEX IF NOT EXISTS idx_status_history_unique\n ON status_history(issue_id, type, status, timestamp);\n `);\n }\n\n // v2 → v3: add session_id to cost_events, extend processed_sessions for reconciler\n if (currentVersion < 3) {\n // Add session_id column to cost_events (nullable, no data loss)\n try {\n db.exec(`ALTER TABLE cost_events ADD COLUMN session_id TEXT`);\n } catch {\n // Column may already exist if schema was manually applied\n }\n\n // Add index on session_id\n db.exec(`\n CREATE INDEX IF NOT EXISTS idx_cost_session_id\n ON cost_events(session_id) WHERE session_id IS NOT NULL;\n `);\n\n // Extend processed_sessions with new columns for reconciler\n try {\n db.exec(`ALTER TABLE processed_sessions ADD COLUMN agent_id TEXT`);\n } catch { /* already exists */ }\n try {\n db.exec(`ALTER TABLE processed_sessions ADD COLUMN issue_id TEXT`);\n } catch { /* already exists */ }\n try {\n db.exec(`ALTER TABLE processed_sessions ADD COLUMN transcript_path TEXT`);\n } catch { /* already exists */ }\n try {\n db.exec(`ALTER TABLE processed_sessions ADD COLUMN byte_offset INTEGER NOT NULL DEFAULT 0`);\n } catch { /* already exists */ }\n }\n\n // After all migrations, set the version\n db.pragma(`user_version = ${SCHEMA_VERSION}`);\n}\n","/**\n * Panopticon Unified Database\n *\n * Single panopticon.db at ~/.panopticon/panopticon.db.\n * Singleton pattern — one connection shared across the process.\n *\n * IMPORTANT: This module is safe to import in both server and CLI contexts.\n * Never use execSync here — this is synchronous SQLite, not a subprocess.\n */\n\nimport Database from 'better-sqlite3';\nimport { join } from 'path';\nimport { existsSync, mkdirSync } from 'fs';\nimport { getPanopticonHome } from '../paths.js';\nimport { runMigrations } from './schema.js';\n\nlet _db: Database.Database | null = null;\n\n/**\n * Get the path to panopticon.db (dynamic, respects PANOPTICON_HOME override for tests)\n */\nexport function getDatabasePath(): string {\n return join(getPanopticonHome(), 'panopticon.db');\n}\n\n/**\n * Initialize and return the singleton database connection.\n * Safe to call multiple times — returns the existing connection after first call.\n */\nexport function getDatabase(): Database.Database {\n if (_db) {\n return _db;\n }\n\n const home = getPanopticonHome();\n if (!existsSync(home)) {\n mkdirSync(home, { recursive: true });\n }\n\n const dbPath = getDatabasePath();\n _db = new Database(dbPath);\n\n // Enable WAL mode for concurrent readers + single writer\n _db.pragma('journal_mode = WAL');\n // Enforce foreign keys\n _db.pragma('foreign_keys = ON');\n // Write-ahead log synchronization — NORMAL is safe and fast\n _db.pragma('synchronous = NORMAL');\n\n // Initialize or migrate schema\n runMigrations(_db);\n\n return _db;\n}\n\n/**\n * Close the database connection and release the singleton.\n * Primarily used in tests to get a fresh connection.\n */\nexport function closeDatabase(): void {\n if (_db) {\n _db.close();\n _db = null;\n }\n}\n\n/**\n * Force re-initialization of the database connection.\n * Used in tests after PANOPTICON_HOME changes.\n */\nexport function resetDatabase(): void {\n closeDatabase();\n}\n","/**\n * Review Status SQLite Storage\n *\n * Provides SQLite-backed CRUD for ReviewStatus, matching the interface in\n * src/lib/review-status.ts. Atomic single-transaction writes eliminate the\n * TOCTOU race in the JSON-backed implementation.\n */\n\nimport { getDatabase } from './index.js';\nimport type { ReviewStatus, StatusHistoryEntry } from '../review-status.js';\n\n// ============== Write operations ==============\n\n/**\n * Upsert a review status record atomically.\n * Replaces the JSON read-modify-write cycle with a single transaction.\n */\nexport function upsertReviewStatus(status: ReviewStatus): void {\n const db = getDatabase();\n\n const upsert = db.transaction((s: ReviewStatus) => {\n // Upsert main record\n db.prepare(`\n INSERT INTO review_status (\n issue_id, review_status, test_status, merge_status,\n verification_status, verification_notes,\n verification_cycle_count, verification_max_cycles,\n review_notes, test_notes, merge_notes,\n updated_at, ready_for_merge, auto_requeue_count, pr_url\n ) VALUES (\n ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?\n )\n ON CONFLICT(issue_id) DO UPDATE SET\n review_status = excluded.review_status,\n test_status = excluded.test_status,\n merge_status = excluded.merge_status,\n verification_status = excluded.verification_status,\n verification_notes = excluded.verification_notes,\n verification_cycle_count = excluded.verification_cycle_count,\n verification_max_cycles = excluded.verification_max_cycles,\n review_notes = excluded.review_notes,\n test_notes = excluded.test_notes,\n merge_notes = excluded.merge_notes,\n updated_at = excluded.updated_at,\n ready_for_merge = excluded.ready_for_merge,\n auto_requeue_count = excluded.auto_requeue_count,\n pr_url = excluded.pr_url\n `).run(\n s.issueId,\n s.reviewStatus,\n s.testStatus,\n s.mergeStatus ?? null,\n s.verificationStatus ?? null,\n s.verificationNotes ?? null,\n s.verificationCycleCount ?? null,\n s.verificationMaxCycles ?? null,\n s.reviewNotes ?? null,\n s.testNotes ?? null,\n s.mergeNotes ?? null,\n s.updatedAt,\n s.readyForMerge ? 1 : 0,\n s.autoRequeueCount ?? null,\n s.prUrl ?? null,\n );\n\n // Append new history entries (deduplicate by timestamp to avoid re-inserting)\n if (s.history && s.history.length > 0) {\n const insertHistory = db.prepare(`\n INSERT OR IGNORE INTO status_history (issue_id, type, status, timestamp, notes)\n VALUES (?, ?, ?, ?, ?)\n `);\n for (const entry of s.history) {\n insertHistory.run(s.issueId, entry.type, entry.status, entry.timestamp, entry.notes ?? null);\n }\n }\n });\n\n upsert(status);\n}\n\n/**\n * Delete a review status record and its history.\n */\nexport function deleteReviewStatus(issueId: string): void {\n const db = getDatabase();\n db.prepare('DELETE FROM review_status WHERE issue_id = ?').run(issueId);\n}\n\n// ============== Read operations ==============\n\n/**\n * Get a single review status by issue ID.\n */\nexport function getReviewStatusFromDb(issueId: string): ReviewStatus | null {\n const db = getDatabase();\n\n const row = db.prepare(`\n SELECT * FROM review_status WHERE issue_id = ?\n `).get(issueId) as DbReviewStatusRow | undefined;\n\n if (!row) return null;\n\n const history = getHistoryFromDb(issueId);\n return rowToReviewStatus(row, history);\n}\n\n/**\n * Get all review statuses.\n */\nexport function getAllReviewStatusesFromDb(): Record<string, ReviewStatus> {\n const db = getDatabase();\n\n const rows = db.prepare('SELECT * FROM review_status ORDER BY updated_at DESC').all() as DbReviewStatusRow[];\n const result: Record<string, ReviewStatus> = {};\n\n for (const row of rows) {\n const history = getHistoryFromDb(row.issue_id);\n result[row.issue_id] = rowToReviewStatus(row, history);\n }\n\n return result;\n}\n\n/**\n * Get history entries for an issue.\n */\nfunction getHistoryFromDb(issueId: string): StatusHistoryEntry[] {\n const db = getDatabase();\n const rows = db.prepare(`\n SELECT type, status, timestamp, notes\n FROM status_history\n WHERE issue_id = ?\n ORDER BY timestamp ASC\n `).all(issueId) as Array<{ type: string; status: string; timestamp: string; notes: string | null }>;\n\n return rows.map(r => ({\n type: r.type as 'review' | 'test' | 'merge',\n status: r.status,\n timestamp: r.timestamp,\n ...(r.notes ? { notes: r.notes } : {}),\n }));\n}\n\n// ============== Row mapping ==============\n\ninterface DbReviewStatusRow {\n issue_id: string;\n review_status: string;\n test_status: string;\n merge_status: string | null;\n verification_status: string | null;\n verification_notes: string | null;\n verification_cycle_count: number | null;\n verification_max_cycles: number | null;\n review_notes: string | null;\n test_notes: string | null;\n merge_notes: string | null;\n updated_at: string;\n ready_for_merge: number;\n auto_requeue_count: number | null;\n pr_url: string | null;\n}\n\nfunction rowToReviewStatus(row: DbReviewStatusRow, history: StatusHistoryEntry[]): ReviewStatus {\n return {\n issueId: row.issue_id,\n reviewStatus: row.review_status as ReviewStatus['reviewStatus'],\n testStatus: row.test_status as ReviewStatus['testStatus'],\n mergeStatus: row.merge_status as ReviewStatus['mergeStatus'] ?? undefined,\n verificationStatus: row.verification_status as ReviewStatus['verificationStatus'] ?? undefined,\n verificationNotes: row.verification_notes ?? undefined,\n verificationCycleCount: row.verification_cycle_count ?? undefined,\n verificationMaxCycles: row.verification_max_cycles ?? undefined,\n reviewNotes: row.review_notes ?? undefined,\n testNotes: row.test_notes ?? undefined,\n mergeNotes: row.merge_notes ?? undefined,\n updatedAt: row.updated_at,\n readyForMerge: row.ready_for_merge === 1,\n autoRequeueCount: row.auto_requeue_count ?? undefined,\n prUrl: row.pr_url ?? undefined,\n history: history.length > 0 ? history : undefined,\n };\n}\n","import { existsSync, readFileSync, writeFileSync, mkdirSync } from 'fs';\nimport { join, dirname } from 'path';\nimport { homedir } from 'os';\nimport { notifyPipeline } from './pipeline-notifier.js';\nimport {\n upsertReviewStatus as dbUpsert,\n deleteReviewStatus as dbDelete,\n getReviewStatusFromDb,\n getAllReviewStatusesFromDb,\n} from './database/review-status-db.js';\n\nexport interface StatusHistoryEntry {\n type: 'review' | 'test' | 'merge';\n status: string;\n timestamp: string;\n notes?: string;\n}\n\nexport interface ReviewStatus {\n issueId: string;\n reviewStatus: 'pending' | 'reviewing' | 'passed' | 'failed' | 'blocked';\n testStatus: 'pending' | 'testing' | 'passed' | 'failed' | 'skipped' | 'dispatch_failed';\n mergeStatus?: 'pending' | 'merging' | 'merged' | 'failed';\n verificationStatus?: 'pending' | 'running' | 'passed' | 'failed' | 'skipped';\n verificationNotes?: string;\n verificationCycleCount?: number;\n verificationMaxCycles?: number;\n reviewNotes?: string;\n testNotes?: string;\n mergeNotes?: string;\n updatedAt: string;\n readyForMerge: boolean;\n autoRequeueCount?: number;\n prUrl?: string;\n history?: StatusHistoryEntry[];\n}\n\nconst DEFAULT_STATUS_FILE = join(homedir(), '.panopticon', 'review-status.json');\n\nexport function loadReviewStatuses(filePath = DEFAULT_STATUS_FILE): Record<string, ReviewStatus> {\n // Prefer SQLite when using the default path\n if (filePath === DEFAULT_STATUS_FILE) {\n try {\n return getAllReviewStatusesFromDb();\n } catch {\n // Fall through to JSON on DB error\n }\n }\n\n try {\n if (existsSync(filePath)) {\n return JSON.parse(readFileSync(filePath, 'utf-8'));\n }\n } catch (err) {\n console.error('Failed to load review statuses:', err);\n }\n return {};\n}\n\nexport function saveReviewStatuses(statuses: Record<string, ReviewStatus>, filePath = DEFAULT_STATUS_FILE): void {\n try {\n const dir = dirname(filePath);\n if (!existsSync(dir)) {\n mkdirSync(dir, { recursive: true });\n }\n writeFileSync(filePath, JSON.stringify(statuses, null, 2));\n } catch (err) {\n console.error('Failed to save review statuses:', err);\n }\n}\n\nexport function setReviewStatus(\n issueId: string,\n update: Partial<ReviewStatus>,\n filePath = DEFAULT_STATUS_FILE,\n): ReviewStatus {\n const statuses = loadReviewStatuses(filePath);\n const existing = statuses[issueId] || {\n issueId,\n reviewStatus: 'pending' as const,\n testStatus: 'pending' as const,\n updatedAt: new Date().toISOString(),\n readyForMerge: false,\n };\n\n // Guard: reject reviewStatus regression from 'passed' to 'reviewing' unless the caller\n // is explicitly resetting the merge lifecycle (update includes mergeStatus).\n // This is belt-and-suspenders — endpoint-level guards should catch this first.\n if (update.reviewStatus === 'reviewing' && existing.reviewStatus === 'passed' && update.mergeStatus === undefined) {\n console.warn(`[review-status] Rejecting reviewStatus regression from 'passed' to 'reviewing' for ${issueId} (mergeStatus not being reset)`);\n return existing as ReviewStatus;\n }\n\n const merged = { ...existing, ...update };\n\n // Track status transitions in history (last 10 entries)\n const history = [...(existing.history || [])];\n const now = new Date().toISOString();\n if (update.reviewStatus && update.reviewStatus !== existing.reviewStatus) {\n history.push({ type: 'review', status: update.reviewStatus, timestamp: now, notes: update.reviewNotes });\n }\n if (update.testStatus && update.testStatus !== existing.testStatus) {\n history.push({ type: 'test', status: update.testStatus, timestamp: now, notes: update.testNotes });\n }\n if (update.mergeStatus && update.mergeStatus !== existing.mergeStatus) {\n history.push({ type: 'merge', status: update.mergeStatus, timestamp: now });\n }\n while (history.length > 10) history.shift();\n\n const readyForMerge = update.readyForMerge !== undefined\n ? update.readyForMerge\n : (merged.reviewStatus === 'passed' && merged.testStatus === 'passed' && merged.mergeStatus !== 'merged');\n\n const updated: ReviewStatus = {\n ...merged,\n issueId,\n updatedAt: now,\n readyForMerge,\n history,\n };\n\n // SQLite first — it is the authoritative store (reads prefer SQLite)\n if (filePath === DEFAULT_STATUS_FILE) {\n try {\n dbUpsert(updated);\n } catch (err) {\n console.error('[review-status] SQLite write failed (continuing with JSON):', err);\n }\n }\n\n // JSON second — legacy fallback for tools that read review-status.json directly\n statuses[issueId] = updated;\n saveReviewStatuses(statuses, filePath);\n\n notifyPipeline({ type: 'status_changed', issueId, status: updated });\n\n return updated;\n}\n\nexport function getReviewStatus(issueId: string, filePath = DEFAULT_STATUS_FILE): ReviewStatus | null {\n // Prefer SQLite when using the default path\n if (filePath === DEFAULT_STATUS_FILE) {\n try {\n const fromDb = getReviewStatusFromDb(issueId);\n if (fromDb) return fromDb;\n } catch {\n // Fall through to JSON on DB error\n }\n }\n const statuses = loadReviewStatuses(filePath);\n return statuses[issueId] || null;\n}\n\nexport function clearReviewStatus(issueId: string, filePath = DEFAULT_STATUS_FILE): void {\n const statuses = loadReviewStatuses(filePath);\n delete statuses[issueId];\n saveReviewStatuses(statuses, filePath);\n\n // Dual-delete from SQLite when using the default path\n if (filePath === DEFAULT_STATUS_FILE) {\n try {\n dbDelete(issueId);\n } catch (err) {\n console.error('[review-status] SQLite delete failed (continuing with JSON):', err);\n }\n }\n}\n"],"mappings":";;;;;;;;;;;;;;AAgBO,SAAS,WAAW,IAA6B;AACtD,KAAG,KAAK;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,GA4HP;AAGD,KAAG,OAAO,kBAAkB,cAAc,EAAE;AAC9C;AAMO,SAAS,cAAc,IAA6B;AACzD,QAAM,iBAAiB,GAAG,OAAO,gBAAgB,EAAE,QAAQ,KAAK,CAAC;AAEjE,MAAI,mBAAmB,gBAAgB;AACrC;AAAA,EACF;AAEA,MAAI,mBAAmB,GAAG;AAExB,eAAW,EAAE;AACb;AAAA,EACF;AAGA,MAAI,iBAAiB,GAAG;AAEtB,OAAG,KAAK;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,KASP;AAAA,EACH;AAGA,MAAI,iBAAiB,GAAG;AAEtB,QAAI;AACF,SAAG,KAAK,oDAAoD;AAAA,IAC9D,QAAQ;AAAA,IAER;AAGA,OAAG,KAAK;AAAA;AAAA;AAAA,KAGP;AAGD,QAAI;AACF,SAAG,KAAK,yDAAyD;AAAA,IACnE,QAAQ;AAAA,IAAuB;AAC/B,QAAI;AACF,SAAG,KAAK,yDAAyD;AAAA,IACnE,QAAQ;AAAA,IAAuB;AAC/B,QAAI;AACF,SAAG,KAAK,gEAAgE;AAAA,IAC1E,QAAQ;AAAA,IAAuB;AAC/B,QAAI;AACF,SAAG,KAAK,kFAAkF;AAAA,IAC5F,QAAQ;AAAA,IAAuB;AAAA,EACjC;AAGA,KAAG,OAAO,kBAAkB,cAAc,EAAE;AAC9C;AAnNA,IAUa;AAVb;AAAA;AAAA;AAAA;AAUO,IAAM,iBAAiB;AAAA;AAAA;;;ACA9B,OAAO,cAAc;AACrB,SAAS,YAAY;AACrB,SAAS,YAAY,iBAAiB;AAS/B,SAAS,kBAA0B;AACxC,SAAO,KAAK,kBAAkB,GAAG,eAAe;AAClD;AAMO,SAAS,cAAiC;AAC/C,MAAI,KAAK;AACP,WAAO;AAAA,EACT;AAEA,QAAM,OAAO,kBAAkB;AAC/B,MAAI,CAAC,WAAW,IAAI,GAAG;AACrB,cAAU,MAAM,EAAE,WAAW,KAAK,CAAC;AAAA,EACrC;AAEA,QAAM,SAAS,gBAAgB;AAC/B,QAAM,IAAI,SAAS,MAAM;AAGzB,MAAI,OAAO,oBAAoB;AAE/B,MAAI,OAAO,mBAAmB;AAE9B,MAAI,OAAO,sBAAsB;AAGjC,gBAAc,GAAG;AAEjB,SAAO;AACT;AAMO,SAAS,gBAAsB;AACpC,MAAI,KAAK;AACP,QAAI,MAAM;AACV,UAAM;AAAA,EACR;AACF;AAhEA,IAgBI;AAhBJ;AAAA;AAAA;AAAA;AAaA;AACA;AAEA,IAAI,MAAgC;AAAA;AAAA;;;ACC7B,SAAS,mBAAmB,QAA4B;AAC7D,QAAM,KAAK,YAAY;AAEvB,QAAM,SAAS,GAAG,YAAY,CAAC,MAAoB;AAEjD,OAAG,QAAQ;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,KAyBV,EAAE;AAAA,MACD,EAAE;AAAA,MACF,EAAE;AAAA,MACF,EAAE;AAAA,MACF,EAAE,eAAe;AAAA,MACjB,EAAE,sBAAsB;AAAA,MACxB,EAAE,qBAAqB;AAAA,MACvB,EAAE,0BAA0B;AAAA,MAC5B,EAAE,yBAAyB;AAAA,MAC3B,EAAE,eAAe;AAAA,MACjB,EAAE,aAAa;AAAA,MACf,EAAE,cAAc;AAAA,MAChB,EAAE;AAAA,MACF,EAAE,gBAAgB,IAAI;AAAA,MACtB,EAAE,oBAAoB;AAAA,MACtB,EAAE,SAAS;AAAA,IACb;AAGA,QAAI,EAAE,WAAW,EAAE,QAAQ,SAAS,GAAG;AACrC,YAAM,gBAAgB,GAAG,QAAQ;AAAA;AAAA;AAAA,OAGhC;AACD,iBAAW,SAAS,EAAE,SAAS;AAC7B,sBAAc,IAAI,EAAE,SAAS,MAAM,MAAM,MAAM,QAAQ,MAAM,WAAW,MAAM,SAAS,IAAI;AAAA,MAC7F;AAAA,IACF;AAAA,EACF,CAAC;AAED,SAAO,MAAM;AACf;AAKO,SAAS,mBAAmB,SAAuB;AACxD,QAAM,KAAK,YAAY;AACvB,KAAG,QAAQ,8CAA8C,EAAE,IAAI,OAAO;AACxE;AAOO,SAAS,sBAAsB,SAAsC;AAC1E,QAAM,KAAK,YAAY;AAEvB,QAAM,MAAM,GAAG,QAAQ;AAAA;AAAA,GAEtB,EAAE,IAAI,OAAO;AAEd,MAAI,CAAC,IAAK,QAAO;AAEjB,QAAM,UAAU,iBAAiB,OAAO;AACxC,SAAO,kBAAkB,KAAK,OAAO;AACvC;AAKO,SAAS,6BAA2D;AACzE,QAAM,KAAK,YAAY;AAEvB,QAAM,OAAO,GAAG,QAAQ,sDAAsD,EAAE,IAAI;AACpF,QAAM,SAAuC,CAAC;AAE9C,aAAW,OAAO,MAAM;AACtB,UAAM,UAAU,iBAAiB,IAAI,QAAQ;AAC7C,WAAO,IAAI,QAAQ,IAAI,kBAAkB,KAAK,OAAO;AAAA,EACvD;AAEA,SAAO;AACT;AAKA,SAAS,iBAAiB,SAAuC;AAC/D,QAAM,KAAK,YAAY;AACvB,QAAM,OAAO,GAAG,QAAQ;AAAA;AAAA;AAAA;AAAA;AAAA,GAKvB,EAAE,IAAI,OAAO;AAEd,SAAO,KAAK,IAAI,QAAM;AAAA,IACpB,MAAM,EAAE;AAAA,IACR,QAAQ,EAAE;AAAA,IACV,WAAW,EAAE;AAAA,IACb,GAAI,EAAE,QAAQ,EAAE,OAAO,EAAE,MAAM,IAAI,CAAC;AAAA,EACtC,EAAE;AACJ;AAsBA,SAAS,kBAAkB,KAAwB,SAA6C;AAC9F,SAAO;AAAA,IACL,SAAS,IAAI;AAAA,IACb,cAAc,IAAI;AAAA,IAClB,YAAY,IAAI;AAAA,IAChB,aAAa,IAAI,gBAA+C;AAAA,IAChE,oBAAoB,IAAI,uBAA6D;AAAA,IACrF,mBAAmB,IAAI,sBAAsB;AAAA,IAC7C,wBAAwB,IAAI,4BAA4B;AAAA,IACxD,uBAAuB,IAAI,2BAA2B;AAAA,IACtD,aAAa,IAAI,gBAAgB;AAAA,IACjC,WAAW,IAAI,cAAc;AAAA,IAC7B,YAAY,IAAI,eAAe;AAAA,IAC/B,WAAW,IAAI;AAAA,IACf,eAAe,IAAI,oBAAoB;AAAA,IACvC,kBAAkB,IAAI,sBAAsB;AAAA,IAC5C,OAAO,IAAI,UAAU;AAAA,IACrB,SAAS,QAAQ,SAAS,IAAI,UAAU;AAAA,EAC1C;AACF;AAtLA;AAAA;AAAA;AAAA;AAQA;AAAA;AAAA;;;ACRA,SAAS,cAAAA,aAAY,cAAc,eAAe,aAAAC,kBAAiB;AACnE,SAAS,QAAAC,OAAM,eAAe;AAC9B,SAAS,eAAe;AAqCjB,SAAS,mBAAmB,WAAW,qBAAmD;AAE/F,MAAI,aAAa,qBAAqB;AACpC,QAAI;AACF,aAAO,2BAA2B;AAAA,IACpC,QAAQ;AAAA,IAER;AAAA,EACF;AAEA,MAAI;AACF,QAAIF,YAAW,QAAQ,GAAG;AACxB,aAAO,KAAK,MAAM,aAAa,UAAU,OAAO,CAAC;AAAA,IACnD;AAAA,EACF,SAAS,KAAK;AACZ,YAAQ,MAAM,mCAAmC,GAAG;AAAA,EACtD;AACA,SAAO,CAAC;AACV;AAEO,SAAS,mBAAmB,UAAwC,WAAW,qBAA2B;AAC/G,MAAI;AACF,UAAM,MAAM,QAAQ,QAAQ;AAC5B,QAAI,CAACA,YAAW,GAAG,GAAG;AACpB,MAAAC,WAAU,KAAK,EAAE,WAAW,KAAK,CAAC;AAAA,IACpC;AACA,kBAAc,UAAU,KAAK,UAAU,UAAU,MAAM,CAAC,CAAC;AAAA,EAC3D,SAAS,KAAK;AACZ,YAAQ,MAAM,mCAAmC,GAAG;AAAA,EACtD;AACF;AAEO,SAAS,gBACd,SACA,QACA,WAAW,qBACG;AACd,QAAM,WAAW,mBAAmB,QAAQ;AAC5C,QAAM,WAAW,SAAS,OAAO,KAAK;AAAA,IACpC;AAAA,IACA,cAAc;AAAA,IACd,YAAY;AAAA,IACZ,YAAW,oBAAI,KAAK,GAAE,YAAY;AAAA,IAClC,eAAe;AAAA,EACjB;AAKA,MAAI,OAAO,iBAAiB,eAAe,SAAS,iBAAiB,YAAY,OAAO,gBAAgB,QAAW;AACjH,YAAQ,KAAK,sFAAsF,OAAO,gCAAgC;AAC1I,WAAO;AAAA,EACT;AAEA,QAAM,SAAS,EAAE,GAAG,UAAU,GAAG,OAAO;AAGxC,QAAM,UAAU,CAAC,GAAI,SAAS,WAAW,CAAC,CAAE;AAC5C,QAAM,OAAM,oBAAI,KAAK,GAAE,YAAY;AACnC,MAAI,OAAO,gBAAgB,OAAO,iBAAiB,SAAS,cAAc;AACxE,YAAQ,KAAK,EAAE,MAAM,UAAU,QAAQ,OAAO,cAAc,WAAW,KAAK,OAAO,OAAO,YAAY,CAAC;AAAA,EACzG;AACA,MAAI,OAAO,cAAc,OAAO,eAAe,SAAS,YAAY;AAClE,YAAQ,KAAK,EAAE,MAAM,QAAQ,QAAQ,OAAO,YAAY,WAAW,KAAK,OAAO,OAAO,UAAU,CAAC;AAAA,EACnG;AACA,MAAI,OAAO,eAAe,OAAO,gBAAgB,SAAS,aAAa;AACrE,YAAQ,KAAK,EAAE,MAAM,SAAS,QAAQ,OAAO,aAAa,WAAW,IAAI,CAAC;AAAA,EAC5E;AACA,SAAO,QAAQ,SAAS,GAAI,SAAQ,MAAM;AAE1C,QAAM,gBAAgB,OAAO,kBAAkB,SAC3C,OAAO,gBACN,OAAO,iBAAiB,YAAY,OAAO,eAAe,YAAY,OAAO,gBAAgB;AAElG,QAAM,UAAwB;AAAA,IAC5B,GAAG;AAAA,IACH;AAAA,IACA,WAAW;AAAA,IACX;AAAA,IACA;AAAA,EACF;AAGA,MAAI,aAAa,qBAAqB;AACpC,QAAI;AACF,yBAAS,OAAO;AAAA,IAClB,SAAS,KAAK;AACZ,cAAQ,MAAM,+DAA+D,GAAG;AAAA,IAClF;AAAA,EACF;AAGA,WAAS,OAAO,IAAI;AACpB,qBAAmB,UAAU,QAAQ;AAErC,iBAAe,EAAE,MAAM,kBAAkB,SAAS,QAAQ,QAAQ,CAAC;AAEnE,SAAO;AACT;AAEO,SAAS,gBAAgB,SAAiB,WAAW,qBAA0C;AAEpG,MAAI,aAAa,qBAAqB;AACpC,QAAI;AACF,YAAM,SAAS,sBAAsB,OAAO;AAC5C,UAAI,OAAQ,QAAO;AAAA,IACrB,QAAQ;AAAA,IAER;AAAA,EACF;AACA,QAAM,WAAW,mBAAmB,QAAQ;AAC5C,SAAO,SAAS,OAAO,KAAK;AAC9B;AAEO,SAAS,kBAAkB,SAAiB,WAAW,qBAA2B;AACvF,QAAM,WAAW,mBAAmB,QAAQ;AAC5C,SAAO,SAAS,OAAO;AACvB,qBAAmB,UAAU,QAAQ;AAGrC,MAAI,aAAa,qBAAqB;AACpC,QAAI;AACF,yBAAS,OAAO;AAAA,IAClB,SAAS,KAAK;AACZ,cAAQ,MAAM,gEAAgE,GAAG;AAAA,IACnF;AAAA,EACF;AACF;AAtKA,IAqCM;AArCN;AAAA;AAAA;AAGA;AACA;AAiCA,IAAM,sBAAsBC,MAAK,QAAQ,GAAG,eAAe,oBAAoB;AAAA;AAAA;","names":["existsSync","mkdirSync","join"]}
|
|
@@ -0,0 +1,64 @@
|
|
|
1
|
+
import {
|
|
2
|
+
stepFailed,
|
|
3
|
+
stepOk,
|
|
4
|
+
stepSkipped
|
|
5
|
+
} from "./chunk-R4KPLLRB.js";
|
|
6
|
+
import {
|
|
7
|
+
init_esm_shims
|
|
8
|
+
} from "./chunk-ZHC57RCV.js";
|
|
9
|
+
|
|
10
|
+
// src/lib/lifecycle/compact-beads.ts
|
|
11
|
+
init_esm_shims();
|
|
12
|
+
import { existsSync } from "fs";
|
|
13
|
+
import { join } from "path";
|
|
14
|
+
import { exec } from "child_process";
|
|
15
|
+
import { promisify } from "util";
|
|
16
|
+
var execAsync = promisify(exec);
|
|
17
|
+
async function compactBeads(ctx, opts = {}) {
|
|
18
|
+
const { days = 30, pushToRemote = true } = opts;
|
|
19
|
+
const step = "compact-beads";
|
|
20
|
+
try {
|
|
21
|
+
await execAsync("which bd", { encoding: "utf-8" });
|
|
22
|
+
} catch {
|
|
23
|
+
return stepSkipped(step, ["bd CLI not available"]);
|
|
24
|
+
}
|
|
25
|
+
const beadsDir = join(ctx.projectPath, ".beads");
|
|
26
|
+
if (!existsSync(beadsDir)) {
|
|
27
|
+
return stepSkipped(step, ["No .beads directory in project"]);
|
|
28
|
+
}
|
|
29
|
+
try {
|
|
30
|
+
const { stdout: countOutput } = await execAsync(
|
|
31
|
+
`bd list --status closed --json 2>/dev/null | jq '[.[] | select(.closed_at != null) | select((now - (.closed_at | fromdateiso8601)) > (${days} * 24 * 60 * 60))] | length' 2>/dev/null || echo "0"`,
|
|
32
|
+
{ cwd: ctx.projectPath, encoding: "utf-8" }
|
|
33
|
+
);
|
|
34
|
+
const count = parseInt(countOutput.trim(), 10) || 0;
|
|
35
|
+
if (count === 0) {
|
|
36
|
+
return stepSkipped(step, ["No closed beads older than " + days + " days"]);
|
|
37
|
+
}
|
|
38
|
+
await execAsync(`bd admin compact --days ${days}`, {
|
|
39
|
+
cwd: ctx.projectPath,
|
|
40
|
+
encoding: "utf-8"
|
|
41
|
+
});
|
|
42
|
+
await execAsync("git add .beads/", { cwd: ctx.projectPath, encoding: "utf-8" });
|
|
43
|
+
try {
|
|
44
|
+
await execAsync("git diff --cached --quiet", { cwd: ctx.projectPath, encoding: "utf-8" });
|
|
45
|
+
return stepOk(step, [`Compacted ${count} beads (no git changes)`]);
|
|
46
|
+
} catch {
|
|
47
|
+
await execAsync(
|
|
48
|
+
`git commit -m "chore: compact beads (remove closed issues > ${days} days)"`,
|
|
49
|
+
{ cwd: ctx.projectPath, encoding: "utf-8" }
|
|
50
|
+
);
|
|
51
|
+
if (pushToRemote) {
|
|
52
|
+
await execAsync("git push", { cwd: ctx.projectPath, encoding: "utf-8" });
|
|
53
|
+
}
|
|
54
|
+
return stepOk(step, [`Compacted ${count} closed beads and committed`]);
|
|
55
|
+
}
|
|
56
|
+
} catch (err) {
|
|
57
|
+
return stepFailed(step, `Beads compaction failed: ${err.message}`);
|
|
58
|
+
}
|
|
59
|
+
}
|
|
60
|
+
|
|
61
|
+
export {
|
|
62
|
+
compactBeads
|
|
63
|
+
};
|
|
64
|
+
//# sourceMappingURL=chunk-MJXYTGK5.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../src/lib/lifecycle/compact-beads.ts"],"sourcesContent":["/**\n * compact-beads — Beads compaction + git commit/push.\n *\n * Extracted from merge-agent.ts conditionalBeadsCompaction().\n * Compacts closed beads older than 30 days and commits the result.\n */\n\nimport { existsSync } from 'fs';\nimport { join } from 'path';\nimport { exec } from 'child_process';\nimport { promisify } from 'util';\nimport type { LifecycleContext, StepResult } from './types.js';\nimport { stepOk, stepSkipped, stepFailed } from './types.js';\n\nconst execAsync = promisify(exec);\n\n/** Options for beads compaction */\nexport interface CompactBeadsOptions {\n /** Number of days to keep closed beads. Default: 30 */\n days?: number;\n /** Push commits to remote. Default: true */\n pushToRemote?: boolean;\n}\n\n/**\n * Compact closed beads older than N days.\n * Idempotent — returns skip if no beads to compact.\n */\nexport async function compactBeads(\n ctx: LifecycleContext,\n opts: CompactBeadsOptions = {},\n): Promise<StepResult> {\n const { days = 30, pushToRemote = true } = opts;\n const step = 'compact-beads';\n\n // Check if bd CLI is available\n try {\n await execAsync('which bd', { encoding: 'utf-8' });\n } catch {\n return stepSkipped(step, ['bd CLI not available']);\n }\n\n // Check if .beads directory exists\n const beadsDir = join(ctx.projectPath, '.beads');\n if (!existsSync(beadsDir)) {\n return stepSkipped(step, ['No .beads directory in project']);\n }\n\n // Count old closed beads\n try {\n const { stdout: countOutput } = await execAsync(\n `bd list --status closed --json 2>/dev/null | jq '[.[] | select(.closed_at != null) | select((now - (.closed_at | fromdateiso8601)) > (${days} * 24 * 60 * 60))] | length' 2>/dev/null || echo \"0\"`,\n { cwd: ctx.projectPath, encoding: 'utf-8' },\n );\n\n const count = parseInt(countOutput.trim(), 10) || 0;\n if (count === 0) {\n return stepSkipped(step, ['No closed beads older than ' + days + ' days']);\n }\n\n // Run compaction\n await execAsync(`bd admin compact --days ${days}`, {\n cwd: ctx.projectPath,\n encoding: 'utf-8',\n });\n\n // Stage changes\n await execAsync('git add .beads/', { cwd: ctx.projectPath, encoding: 'utf-8' });\n\n // Check if there are changes to commit\n try {\n await execAsync('git diff --cached --quiet', { cwd: ctx.projectPath, encoding: 'utf-8' });\n // No changes after compaction\n return stepOk(step, [`Compacted ${count} beads (no git changes)`]);\n } catch {\n // There are staged changes — commit them\n await execAsync(\n `git commit -m \"chore: compact beads (remove closed issues > ${days} days)\"`,\n { cwd: ctx.projectPath, encoding: 'utf-8' },\n );\n if (pushToRemote) {\n await execAsync('git push', { cwd: ctx.projectPath, encoding: 'utf-8' });\n }\n return stepOk(step, [`Compacted ${count} closed beads and committed`]);\n }\n } catch (err) {\n return stepFailed(step, `Beads compaction failed: ${(err as Error).message}`);\n }\n}\n"],"mappings":";;;;;;;;;;AAAA;AAOA,SAAS,kBAAkB;AAC3B,SAAS,YAAY;AACrB,SAAS,YAAY;AACrB,SAAS,iBAAiB;AAI1B,IAAM,YAAY,UAAU,IAAI;AAchC,eAAsB,aACpB,KACA,OAA4B,CAAC,GACR;AACrB,QAAM,EAAE,OAAO,IAAI,eAAe,KAAK,IAAI;AAC3C,QAAM,OAAO;AAGb,MAAI;AACF,UAAM,UAAU,YAAY,EAAE,UAAU,QAAQ,CAAC;AAAA,EACnD,QAAQ;AACN,WAAO,YAAY,MAAM,CAAC,sBAAsB,CAAC;AAAA,EACnD;AAGA,QAAM,WAAW,KAAK,IAAI,aAAa,QAAQ;AAC/C,MAAI,CAAC,WAAW,QAAQ,GAAG;AACzB,WAAO,YAAY,MAAM,CAAC,gCAAgC,CAAC;AAAA,EAC7D;AAGA,MAAI;AACF,UAAM,EAAE,QAAQ,YAAY,IAAI,MAAM;AAAA,MACpC,yIAAyI,IAAI;AAAA,MAC7I,EAAE,KAAK,IAAI,aAAa,UAAU,QAAQ;AAAA,IAC5C;AAEA,UAAM,QAAQ,SAAS,YAAY,KAAK,GAAG,EAAE,KAAK;AAClD,QAAI,UAAU,GAAG;AACf,aAAO,YAAY,MAAM,CAAC,gCAAgC,OAAO,OAAO,CAAC;AAAA,IAC3E;AAGA,UAAM,UAAU,2BAA2B,IAAI,IAAI;AAAA,MACjD,KAAK,IAAI;AAAA,MACT,UAAU;AAAA,IACZ,CAAC;AAGD,UAAM,UAAU,mBAAmB,EAAE,KAAK,IAAI,aAAa,UAAU,QAAQ,CAAC;AAG9E,QAAI;AACF,YAAM,UAAU,6BAA6B,EAAE,KAAK,IAAI,aAAa,UAAU,QAAQ,CAAC;AAExF,aAAO,OAAO,MAAM,CAAC,aAAa,KAAK,yBAAyB,CAAC;AAAA,IACnE,QAAQ;AAEN,YAAM;AAAA,QACJ,+DAA+D,IAAI;AAAA,QACnE,EAAE,KAAK,IAAI,aAAa,UAAU,QAAQ;AAAA,MAC5C;AACA,UAAI,cAAc;AAChB,cAAM,UAAU,YAAY,EAAE,KAAK,IAAI,aAAa,UAAU,QAAQ,CAAC;AAAA,MACzE;AACA,aAAO,OAAO,MAAM,CAAC,aAAa,KAAK,6BAA6B,CAAC;AAAA,IACvE;AAAA,EACF,SAAS,KAAK;AACZ,WAAO,WAAW,MAAM,4BAA6B,IAAc,OAAO,EAAE;AAAA,EAC9E;AACF;","names":[]}
|