@goondocks/myco 0.20.0 → 0.20.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (117) hide show
  1. package/dist/{agent-run-4HUXVRHW.js → agent-run-X25Q2A6T.js} +6 -6
  2. package/dist/{agent-tasks-JF45ELB6.js → agent-tasks-7B6OFERB.js} +6 -6
  3. package/dist/{chunk-POR75WM6.js → chunk-2PDWCDKY.js} +2 -2
  4. package/dist/{chunk-57O67XVF.js → chunk-6X2ERTQV.js} +2 -2
  5. package/dist/{chunk-4M7EWPIA.js → chunk-CCRGY3QW.js} +4 -3
  6. package/dist/{chunk-4M7EWPIA.js.map → chunk-CCRGY3QW.js.map} +1 -1
  7. package/dist/{chunk-P3DN5EWW.js → chunk-JZGN33AY.js} +5 -5
  8. package/dist/chunk-JZGN33AY.js.map +1 -0
  9. package/dist/{chunk-YSNIAJ5D.js → chunk-KESLPBKV.js} +3 -3
  10. package/dist/{chunk-SRXTSI25.js → chunk-OD4AA7PV.js} +2 -2
  11. package/dist/chunk-OD4AA7PV.js.map +1 -0
  12. package/dist/{chunk-4LCIKVDM.js → chunk-Q36VMZST.js} +4 -4
  13. package/dist/chunk-Q36VMZST.js.map +1 -0
  14. package/dist/{chunk-L6XFAJIF.js → chunk-UYMFCYBF.js} +57 -7
  15. package/dist/chunk-UYMFCYBF.js.map +1 -0
  16. package/dist/{chunk-UOQQENDW.js → chunk-VVNL26WX.js} +2 -2
  17. package/dist/{chunk-QS5TWZBL.js → chunk-XATDZX7U.js} +2 -2
  18. package/dist/{chunk-ACQ2AIEM.js → chunk-XG5RRUYF.js} +2 -2
  19. package/dist/{cli-AHTINAHY.js → cli-GGPWH4UO.js} +38 -38
  20. package/dist/{client-LHENCAV3.js → client-YXQUTXVZ.js} +3 -3
  21. package/dist/{config-XPV5GDE4.js → config-OMCYHG2S.js} +2 -2
  22. package/dist/{doctor-XPCF5HV5.js → doctor-5JXJ36KA.js} +9 -9
  23. package/dist/doctor-5JXJ36KA.js.map +1 -0
  24. package/dist/{executor-ACDHGTRH.js → executor-HWW2QNZQ.js} +10 -10
  25. package/dist/{init-V3KCC36O.js → init-LMYOVZAV.js} +12 -16
  26. package/dist/init-LMYOVZAV.js.map +1 -0
  27. package/dist/{installer-ZNK4JSQA.js → installer-FS257JRZ.js} +3 -3
  28. package/dist/{loader-H7OFASVC.js → loader-CQYTFHEW.js} +2 -2
  29. package/dist/{loader-TSB5M7FD.js → loader-NOMBJUPW.js} +2 -2
  30. package/dist/{main-5S4MDCIO.js → main-YTBVRTBI.js} +234 -115
  31. package/dist/main-YTBVRTBI.js.map +1 -0
  32. package/dist/{open-AB5ULZIB.js → open-HG2DX6RN.js} +6 -6
  33. package/dist/{post-compact-P2B7C7FE.js → post-compact-JSECI44W.js} +4 -4
  34. package/dist/{post-tool-use-LXL6NXDS.js → post-tool-use-POGPTJBA.js} +3 -3
  35. package/dist/{post-tool-use-failure-WAYVVKGR.js → post-tool-use-failure-OT7BFWQW.js} +4 -4
  36. package/dist/{pre-compact-BCXUCF4V.js → pre-compact-OXVODKH4.js} +4 -4
  37. package/dist/{registry-MGJSJBAS.js → registry-U4CHXK6R.js} +3 -3
  38. package/dist/{remove-KAPX5NT2.js → remove-N7ZPELFU.js} +6 -6
  39. package/dist/{restart-HQO36FTG.js → restart-ADG5GBTB.js} +7 -7
  40. package/dist/{search-YOMOKAAI.js → search-AHZEUNRR.js} +6 -6
  41. package/dist/{server-2N23P6F2.js → server-AGVYZVP5.js} +3 -3
  42. package/dist/{session-WW2JLHPX.js → session-6IU4AXYP.js} +6 -6
  43. package/dist/{session-end-4WRTIBVQ.js → session-end-FT27DWYZ.js} +3 -3
  44. package/dist/{session-start-HRWTZXQR.js → session-start-LAFICHII.js} +6 -6
  45. package/dist/{session-start-HRWTZXQR.js.map → session-start-LAFICHII.js.map} +1 -1
  46. package/dist/{setup-llm-HFWSBUAF.js → setup-llm-77MP4I2G.js} +6 -6
  47. package/dist/src/agent/definitions/tasks/full-intelligence.yaml +1 -1
  48. package/dist/src/agent/definitions/tasks/skill-evolve.yaml +1 -0
  49. package/dist/src/agent/definitions/tasks/skill-generate.yaml +1 -0
  50. package/dist/src/agent/definitions/tasks/skill-survey.yaml +23 -7
  51. package/dist/src/cli.js +1 -1
  52. package/dist/src/daemon/main.js +1 -1
  53. package/dist/src/hooks/post-tool-use.js +1 -1
  54. package/dist/src/hooks/session-end.js +1 -1
  55. package/dist/src/hooks/session-start.js +1 -1
  56. package/dist/src/hooks/stop.js +1 -1
  57. package/dist/src/hooks/user-prompt-submit.js +1 -1
  58. package/dist/src/mcp/server.js +1 -1
  59. package/dist/{stats-7A4CJ4MS.js → stats-NVPWOYTE.js} +7 -7
  60. package/dist/{stop-R2GDHMRA.js → stop-ZPIKVLH4.js} +3 -3
  61. package/dist/{stop-failure-773KR4VZ.js → stop-failure-2PX67YJC.js} +4 -4
  62. package/dist/{subagent-start-IDECNBHW.js → subagent-start-UUE6EHQD.js} +4 -4
  63. package/dist/{subagent-stop-3JH7DR2S.js → subagent-stop-KQWWWPE6.js} +4 -4
  64. package/dist/{task-completed-AYVHPHDR.js → task-completed-WMHOFQ7B.js} +4 -4
  65. package/dist/{team-3JKF7VAD.js → team-LRZ6GTQK.js} +3 -3
  66. package/dist/{update-YWYW55JM.js → update-O6V4RC4W.js} +6 -6
  67. package/dist/{user-prompt-submit-YELSR6XI.js → user-prompt-submit-N36KUPHI.js} +3 -3
  68. package/dist/{verify-JS44DVKJ.js → verify-LXPV7NYG.js} +4 -4
  69. package/dist/verify-LXPV7NYG.js.map +1 -0
  70. package/dist/{version-K5NETYIL.js → version-XMPPJQHR.js} +2 -2
  71. package/package.json +1 -1
  72. package/dist/chunk-4LCIKVDM.js.map +0 -1
  73. package/dist/chunk-L6XFAJIF.js.map +0 -1
  74. package/dist/chunk-P3DN5EWW.js.map +0 -1
  75. package/dist/chunk-SRXTSI25.js.map +0 -1
  76. package/dist/doctor-XPCF5HV5.js.map +0 -1
  77. package/dist/init-V3KCC36O.js.map +0 -1
  78. package/dist/main-5S4MDCIO.js.map +0 -1
  79. package/dist/verify-JS44DVKJ.js.map +0 -1
  80. /package/dist/{agent-run-4HUXVRHW.js.map → agent-run-X25Q2A6T.js.map} +0 -0
  81. /package/dist/{agent-tasks-JF45ELB6.js.map → agent-tasks-7B6OFERB.js.map} +0 -0
  82. /package/dist/{chunk-POR75WM6.js.map → chunk-2PDWCDKY.js.map} +0 -0
  83. /package/dist/{chunk-57O67XVF.js.map → chunk-6X2ERTQV.js.map} +0 -0
  84. /package/dist/{chunk-YSNIAJ5D.js.map → chunk-KESLPBKV.js.map} +0 -0
  85. /package/dist/{chunk-UOQQENDW.js.map → chunk-VVNL26WX.js.map} +0 -0
  86. /package/dist/{chunk-QS5TWZBL.js.map → chunk-XATDZX7U.js.map} +0 -0
  87. /package/dist/{chunk-ACQ2AIEM.js.map → chunk-XG5RRUYF.js.map} +0 -0
  88. /package/dist/{cli-AHTINAHY.js.map → cli-GGPWH4UO.js.map} +0 -0
  89. /package/dist/{client-LHENCAV3.js.map → client-YXQUTXVZ.js.map} +0 -0
  90. /package/dist/{config-XPV5GDE4.js.map → config-OMCYHG2S.js.map} +0 -0
  91. /package/dist/{executor-ACDHGTRH.js.map → executor-HWW2QNZQ.js.map} +0 -0
  92. /package/dist/{installer-ZNK4JSQA.js.map → installer-FS257JRZ.js.map} +0 -0
  93. /package/dist/{loader-H7OFASVC.js.map → loader-CQYTFHEW.js.map} +0 -0
  94. /package/dist/{loader-TSB5M7FD.js.map → loader-NOMBJUPW.js.map} +0 -0
  95. /package/dist/{open-AB5ULZIB.js.map → open-HG2DX6RN.js.map} +0 -0
  96. /package/dist/{post-compact-P2B7C7FE.js.map → post-compact-JSECI44W.js.map} +0 -0
  97. /package/dist/{post-tool-use-LXL6NXDS.js.map → post-tool-use-POGPTJBA.js.map} +0 -0
  98. /package/dist/{post-tool-use-failure-WAYVVKGR.js.map → post-tool-use-failure-OT7BFWQW.js.map} +0 -0
  99. /package/dist/{pre-compact-BCXUCF4V.js.map → pre-compact-OXVODKH4.js.map} +0 -0
  100. /package/dist/{registry-MGJSJBAS.js.map → registry-U4CHXK6R.js.map} +0 -0
  101. /package/dist/{remove-KAPX5NT2.js.map → remove-N7ZPELFU.js.map} +0 -0
  102. /package/dist/{restart-HQO36FTG.js.map → restart-ADG5GBTB.js.map} +0 -0
  103. /package/dist/{search-YOMOKAAI.js.map → search-AHZEUNRR.js.map} +0 -0
  104. /package/dist/{server-2N23P6F2.js.map → server-AGVYZVP5.js.map} +0 -0
  105. /package/dist/{session-WW2JLHPX.js.map → session-6IU4AXYP.js.map} +0 -0
  106. /package/dist/{session-end-4WRTIBVQ.js.map → session-end-FT27DWYZ.js.map} +0 -0
  107. /package/dist/{setup-llm-HFWSBUAF.js.map → setup-llm-77MP4I2G.js.map} +0 -0
  108. /package/dist/{stats-7A4CJ4MS.js.map → stats-NVPWOYTE.js.map} +0 -0
  109. /package/dist/{stop-R2GDHMRA.js.map → stop-ZPIKVLH4.js.map} +0 -0
  110. /package/dist/{stop-failure-773KR4VZ.js.map → stop-failure-2PX67YJC.js.map} +0 -0
  111. /package/dist/{subagent-start-IDECNBHW.js.map → subagent-start-UUE6EHQD.js.map} +0 -0
  112. /package/dist/{subagent-stop-3JH7DR2S.js.map → subagent-stop-KQWWWPE6.js.map} +0 -0
  113. /package/dist/{task-completed-AYVHPHDR.js.map → task-completed-WMHOFQ7B.js.map} +0 -0
  114. /package/dist/{team-3JKF7VAD.js.map → team-LRZ6GTQK.js.map} +0 -0
  115. /package/dist/{update-YWYW55JM.js.map → update-O6V4RC4W.js.map} +0 -0
  116. /package/dist/{user-prompt-submit-YELSR6XI.js.map → user-prompt-submit-N36KUPHI.js.map} +0 -0
  117. /package/dist/{version-K5NETYIL.js.map → version-XMPPJQHR.js.map} +0 -0
@@ -0,0 +1 @@
1
+ {"version":3,"sources":["../src/daemon/server.ts","../src/daemon/router.ts","../src/daemon/static.ts","../src/daemon/lifecycle.ts","../src/daemon/port.ts","../src/symbionts/adapter.ts","../src/symbionts/parsers/standard-jsonl.ts","../src/symbionts/claude-code.ts","../src/symbionts/cursor.ts","../src/symbionts/parsers/codex-jsonl.ts","../src/symbionts/codex.ts","../src/symbionts/gemini.ts","../src/symbionts/windsurf.ts","../src/symbionts/vscode-copilot.ts","../src/symbionts/registry.ts","../src/capture/transcript-miner.ts","../src/daemon/config-reactions/touched-paths.ts","../src/daemon/api/config.ts","../src/utils/parse-csv-list.ts","../src/db/queries/logs.ts","../src/daemon/api/log-explorer.ts","../src/daemon/api/restart.ts","../src/daemon/update-checker.ts","../src/daemon/update-installer.ts","../src/daemon/api/update.ts","../src/symbionts/reconcile.ts","../src/daemon/backup.ts","../src/daemon/api/backup.ts","../src/daemon/team-sync.ts","../src/daemon/api/team-connect.ts","../src/daemon/api/collective.ts","../src/daemon/api/session-lifecycle.ts","../src/daemon/api/skills.ts","../src/db/queries/skill-usage.ts","../src/daemon/team-sync-init.ts","../src/daemon/api/progress.ts","../src/daemon/api/models.ts","../src/daemon/api/stats.ts","../src/db/queries/activities.ts","../src/db/queries/attachments.ts","../src/db/queries/plans.ts","../src/daemon/jobs/session-cleanup.ts","../src/daemon/trigger-title-summary.ts","../src/daemon/api/sessions.ts","../src/daemon/api/mycelium.ts","../src/daemon/api/search.ts","../src/daemon/api/context.ts","../src/db/queries/feed.ts","../src/daemon/api/feed.ts","../src/daemon/api/symbionts.ts","../src/daemon/api/embedding.ts","../src/daemon/database/types.ts","../src/daemon/api/database.ts","../src/daemon/embedding/manager.ts","../src/daemon/embedding/sqlite-vec-store.ts","../src/intelligence/embeddings.ts","../src/daemon/embedding/provider-adapter.ts","../src/daemon/embedding/record-source.ts","../src/daemon/database/manager.ts","../src/db/queries/database.ts","../src/notifications/domains.ts","../src/daemon/api/notifications.ts","../src/daemon/api/agent-tasks.ts","../src/daemon/api/providers.ts","../src/daemon/task-scheduling.ts","../src/daemon/task-scheduler.ts","../src/db/queries/team-members.ts","../src/daemon/api/mcp-proxy.ts","../src/daemon/api/agent-runs.ts","../src/daemon/api/attachments.ts","../src/daemon/log-reconcile.ts","../src/config/focus.ts","../src/daemon/power.ts","../src/daemon/jobs/session-maintenance.ts","../src/daemon/power-jobs.ts","../src/daemon/reconciliation.ts","../src/daemon/event-handlers.ts","../src/daemon/stop-processing.ts","../src/daemon/capture-images.ts","../src/daemon/plan-capture.ts","../src/daemon/skill-usage.ts","../src/daemon/event-dispatch.ts","../src/daemon/config-reactions/registry.ts","../src/daemon/config-reactions/context.ts","../src/daemon/plan-watch-reaction.ts","../src/daemon/main.ts"],"sourcesContent":["import http from 'node:http';\nimport fs from 'node:fs';\nimport path from 'node:path';\nimport type { DaemonLogger } from './logger.js';\nimport { getPluginVersion } from '../version.js';\nimport { Router, type RouteHandler } from './router.js';\nimport { resolveStaticFile } from './static.js';\nimport { DAEMON_EVICT_TIMEOUT_MS, DAEMON_EVICT_POLL_MS } from '../constants.js';\nimport { LOG_KINDS } from '../constants/log-kinds.js';\n\nconst DEFAULT_STATUS = 200;\n\nexport interface DaemonServerConfig {\n vaultDir: string;\n logger: DaemonLogger;\n uiDir?: string;\n onRequest?: () => void;\n}\n\nexport class DaemonServer {\n port = 0;\n readonly version: string;\n uiDir: string | null;\n private server: http.Server | null = null;\n private vaultDir: string;\n private logger: DaemonLogger;\n private router = new Router();\n private onRequest: (() => void) | null;\n\n constructor(config: DaemonServerConfig) {\n this.vaultDir = config.vaultDir;\n this.logger = config.logger;\n this.uiDir = config.uiDir ?? null;\n this.onRequest = config.onRequest ?? null;\n this.version = getPluginVersion();\n this.registerDefaultRoutes();\n }\n\n registerRoute(method: string, routePath: string, handler: RouteHandler): void {\n this.router.add(method, routePath, handler);\n }\n\n async start(port: number = 0): Promise<void> {\n return new Promise((resolve, reject) => {\n this.server = http.createServer((req, res) => this.handleRequest(req, res));\n this.server.on('error', reject);\n\n this.server.listen(port, '127.0.0.1', () => {\n const addr = this.server!.address() as { port: number };\n this.port = addr.port;\n this.writeDaemonJson();\n this.logger.info(LOG_KINDS.DAEMON_PORT, 'Server started', { port: this.port, dashboard: `http://localhost:${this.port}/` });\n resolve();\n });\n });\n }\n\n async stop(): Promise<void> {\n return new Promise((resolve) => {\n this.removeDaemonJson();\n if (this.server) {\n this.server.close(() => {\n this.logger.info(LOG_KINDS.DAEMON_START, 'Server stopped');\n resolve();\n });\n } else {\n resolve();\n }\n });\n }\n\n private registerDefaultRoutes(): void {\n this.registerRoute('GET', '/health', async () => ({\n body: {\n myco: true,\n version: this.version,\n pid: process.pid,\n uptime: process.uptime(),\n },\n }));\n }\n\n private async handleRequest(req: http.IncomingMessage, res: http.ServerResponse): Promise<void> {\n // API/daemon routes take priority over static files\n const match = this.router.match(req.method!, req.url!);\n\n if (match) {\n this.onRequest?.();\n try {\n const body = (req.method === 'POST' || req.method === 'PUT' || req.method === 'PATCH') ? await readBody(req) : undefined;\n const result = await match.handler({\n body,\n query: match.query,\n params: match.params,\n pathname: match.pathname,\n });\n const status = result.status ?? DEFAULT_STATUS;\n if (Buffer.isBuffer(result.body)) {\n res.writeHead(status, result.headers ?? {});\n res.end(result.body);\n return;\n }\n const headers = { 'Content-Type': 'application/json', ...result.headers };\n res.writeHead(status, headers);\n res.end(JSON.stringify(result.body));\n } catch (error) {\n this.logger.error(LOG_KINDS.SERVER_ERROR, 'Request handler error', {\n path: req.url,\n error: (error as Error).message,\n });\n res.writeHead(500, { 'Content-Type': 'application/json' });\n res.end(JSON.stringify({ error: (error as Error).message }));\n }\n return;\n }\n\n // No API route matched — serve static files (dashboard SPA)\n if (this.uiDir && req.method === 'GET') {\n const pathname = new URL(req.url!, 'http://localhost').pathname;\n const result = resolveStaticFile(this.uiDir, pathname);\n if (result) {\n try {\n const content = await fs.promises.readFile(result.filePath);\n res.writeHead(200, {\n 'Content-Type': result.contentType,\n 'Cache-Control': result.cacheControl,\n });\n res.end(content);\n } catch {\n res.writeHead(404, { 'Content-Type': 'application/json' });\n res.end(JSON.stringify({ error: 'not found' }));\n }\n return;\n }\n }\n\n res.writeHead(404, { 'Content-Type': 'application/json' });\n res.end(JSON.stringify({ error: 'not found' }));\n }\n\n updateDaemonJsonSessions(sessions: string[]): void {\n const jsonPath = path.join(this.vaultDir, 'daemon.json');\n try {\n const info = JSON.parse(fs.readFileSync(jsonPath, 'utf-8'));\n info.sessions = sessions;\n fs.writeFileSync(jsonPath, JSON.stringify(info, null, 2));\n } catch { /* daemon.json may not exist during shutdown */ }\n }\n\n /**\n * Kill any existing daemon for this vault before taking over.\n * Prevents orphaned daemons when spawned from worktrees or plugin upgrades.\n * Must be called BEFORE resolvePort() so the old daemon releases the port.\n */\n async evictExistingDaemon(): Promise<void> {\n const jsonPath = path.join(this.vaultDir, 'daemon.json');\n let existingPid: number | undefined;\n try {\n const content = fs.readFileSync(jsonPath, 'utf-8');\n const info = JSON.parse(content);\n if (typeof info.pid === 'number' && info.pid !== process.pid) {\n existingPid = info.pid;\n }\n } catch { /* no daemon.json or invalid — nothing to evict */ }\n\n if (!existingPid) return;\n\n // Check if the process is alive\n try { process.kill(existingPid, 0); } catch { return; /* already dead */ }\n\n this.logger.info(LOG_KINDS.DAEMON_START, 'Evicting existing daemon', { pid: existingPid });\n try { process.kill(existingPid, 'SIGTERM'); } catch { return; }\n\n // Give SIGTERM a grace period, then escalate to SIGKILL to guarantee port release\n const deadline = Date.now() + DAEMON_EVICT_TIMEOUT_MS;\n while (Date.now() < deadline) {\n await new Promise((r) => setTimeout(r, DAEMON_EVICT_POLL_MS));\n try { process.kill(existingPid, 0); } catch { return; /* dead */ }\n }\n\n this.logger.warn(LOG_KINDS.DAEMON_START, 'Evicted daemon did not exit in time, sending SIGKILL', { pid: existingPid });\n try { process.kill(existingPid, 'SIGKILL'); } catch { return; }\n\n // Verify SIGKILL took effect\n await new Promise((r) => setTimeout(r, DAEMON_EVICT_POLL_MS));\n try { process.kill(existingPid, 0); } catch { return; /* dead */ }\n this.logger.warn(LOG_KINDS.DAEMON_START, 'Evicted daemon still alive after SIGKILL', { pid: existingPid });\n }\n\n private writeDaemonJson(): void {\n const info = {\n pid: process.pid,\n port: this.port,\n started: new Date().toISOString(),\n sessions: [] as string[],\n };\n const jsonPath = path.join(this.vaultDir, 'daemon.json');\n fs.writeFileSync(jsonPath, JSON.stringify(info, null, 2));\n }\n\n private removeDaemonJson(): void {\n const jsonPath = path.join(this.vaultDir, 'daemon.json');\n try {\n const content = fs.readFileSync(jsonPath, 'utf-8');\n const info = JSON.parse(content);\n // Only delete if we still own the file — a successor daemon may have taken over.\n if (info.pid !== process.pid) return;\n fs.unlinkSync(jsonPath);\n } catch { /* already gone or unreadable */ }\n }\n}\n\nfunction readBody(req: http.IncomingMessage): Promise<unknown> {\n return new Promise((resolve, reject) => {\n let data = '';\n req.on('data', (chunk: string) => { data += chunk; });\n req.on('end', () => {\n try { resolve(data ? JSON.parse(data) : {}); }\n catch (e) { reject(e); }\n });\n req.on('error', reject);\n });\n}\n","export interface RouteRequest {\n body: unknown;\n query: Record<string, string>;\n params: Record<string, string>;\n pathname: string;\n}\n\nexport interface RouteResponse {\n status?: number;\n headers?: Record<string, string>;\n body: unknown;\n}\n\nexport type RouteHandler = (req: RouteRequest) => Promise<RouteResponse>;\n\ninterface RouteEntry {\n method: string;\n pattern: string;\n handler: RouteHandler;\n type: 'exact' | 'param' | 'prefix';\n segments?: string[];\n}\n\nexport interface RouteMatch {\n handler: RouteHandler;\n params: Record<string, string>;\n query: Record<string, string>;\n pathname: string;\n}\n\nexport class Router {\n private routes: RouteEntry[] = [];\n\n add(method: string, pattern: string, handler: RouteHandler): void {\n const type = pattern.includes(':') ? 'param'\n : pattern.endsWith('/*') ? 'prefix'\n : 'exact';\n const segments = type === 'param' ? pattern.split('/') : undefined;\n this.routes.push({ method, pattern, handler, type, segments });\n }\n\n /**\n * Match a request against registered routes.\n * Priority: exact > parameterized > prefix. Within parameterized routes,\n * first-registered wins if multiple patterns match at the same depth.\n */\n match(method: string, rawUrl: string): RouteMatch | undefined {\n const url = new URL(rawUrl, 'http://localhost');\n const pathname = url.pathname;\n const query: Record<string, string> = {};\n url.searchParams.forEach((v, k) => { query[k] = v; });\n\n // Priority: exact > param > prefix\n let paramMatch: RouteMatch | undefined;\n let prefixMatch: RouteMatch | undefined;\n\n for (const route of this.routes) {\n if (route.method !== method) continue;\n\n if (route.type === 'exact' && route.pattern === pathname) {\n return { handler: route.handler, params: {}, query, pathname };\n }\n\n if (route.type === 'param' && !paramMatch && route.segments) {\n const parts = pathname.split('/');\n if (parts.length === route.segments.length) {\n const params: Record<string, string> = {};\n let matched = true;\n for (let i = 0; i < route.segments.length; i++) {\n if (route.segments[i].startsWith(':')) {\n params[route.segments[i].slice(1)] = parts[i];\n } else if (route.segments[i] !== parts[i]) {\n matched = false;\n break;\n }\n }\n if (matched) {\n paramMatch = { handler: route.handler, params, query, pathname };\n }\n }\n }\n\n if (route.type === 'prefix' && !prefixMatch) {\n const prefix = route.pattern.slice(0, -1); // Remove trailing *\n if (pathname.startsWith(prefix)) {\n prefixMatch = { handler: route.handler, params: {}, query, pathname };\n }\n }\n }\n\n return paramMatch ?? prefixMatch;\n }\n}\n","import fs from 'node:fs';\nimport path from 'node:path';\n\nconst HASHED_ASSET_PREFIX = '/assets/';\nconst IMMUTABLE_CACHE = 'public, max-age=31536000, immutable';\nconst NO_CACHE = 'no-cache';\n\nexport const MIME_TYPES: Record<string, string> = {\n '.html': 'text/html',\n '.js': 'application/javascript',\n '.css': 'text/css',\n '.json': 'application/json',\n '.svg': 'image/svg+xml',\n '.png': 'image/png',\n '.ico': 'image/x-icon',\n '.woff': 'font/woff',\n '.woff2': 'font/woff2',\n '.ttf': 'font/ttf',\n};\n\nexport interface StaticFileResult {\n filePath: string;\n contentType: string;\n cacheControl: string;\n}\n\n/** Resolve a request to a file in the UI directory. Returns undefined if blocked (path traversal). */\nexport function resolveStaticFile(uiDir: string, pathname: string): StaticFileResult | undefined {\n // Strip leading slash to get relative path\n const relative = pathname.startsWith('/') ? pathname.slice(1) : pathname;\n\n // Resolve \"/\" to index.html\n const resolved = path.resolve(uiDir, relative || 'index.html');\n if (!resolved.startsWith(path.resolve(uiDir))) {\n return undefined;\n }\n\n // Serve the file if it exists\n if (fs.existsSync(resolved) && fs.statSync(resolved).isFile()) {\n const ext = path.extname(resolved);\n const contentType = MIME_TYPES[ext] ?? 'application/octet-stream';\n const cacheControl = pathname.startsWith(HASHED_ASSET_PREFIX) ? IMMUTABLE_CACHE : NO_CACHE;\n return { filePath: resolved, contentType, cacheControl };\n }\n\n // SPA fallback: serve index.html for any non-file path\n const indexPath = path.join(uiDir, 'index.html');\n if (fs.existsSync(indexPath)) {\n return { filePath: indexPath, contentType: 'text/html', cacheControl: NO_CACHE };\n }\n\n return undefined;\n}\n","export interface SessionMetadata {\n started_at: string;\n branch?: string;\n}\n\nexport interface RegisteredSession extends SessionMetadata {\n id: string;\n}\n\ninterface RegistryOptions {\n gracePeriod: number;\n onEmpty: () => void;\n}\n\nexport class SessionRegistry {\n private _sessions: Map<string, SessionMetadata> = new Map();\n private graceTimer: ReturnType<typeof setTimeout> | null = null;\n private gracePeriod: number;\n private onEmpty: () => void;\n\n constructor(options: RegistryOptions) {\n this.gracePeriod = options.gracePeriod;\n this.onEmpty = options.onEmpty;\n }\n\n get sessions(): string[] {\n return [...this._sessions.keys()];\n }\n\n register(sessionId: string, metadata?: SessionMetadata): void {\n if (!this._sessions.has(sessionId)) {\n this._sessions.set(sessionId, metadata ?? { started_at: new Date().toISOString() });\n }\n this.cancelGrace();\n }\n\n getSession(sessionId: string): RegisteredSession | undefined {\n const meta = this._sessions.get(sessionId);\n if (!meta) return undefined;\n return { id: sessionId, ...meta };\n }\n\n unregister(sessionId: string): void {\n this._sessions.delete(sessionId);\n if (this._sessions.size === 0) {\n this.startGrace();\n }\n }\n\n destroy(): void {\n this.cancelGrace();\n this._sessions.clear();\n }\n\n private startGrace(): void {\n this.cancelGrace();\n this.graceTimer = setTimeout(() => {\n if (this._sessions.size === 0) {\n this.onEmpty();\n }\n }, this.gracePeriod * 1000);\n }\n\n private cancelGrace(): void {\n if (this.graceTimer) {\n clearTimeout(this.graceTimer);\n this.graceTimer = null;\n }\n }\n}\n","import { createHash } from 'node:crypto';\nimport net from 'node:net';\n\nexport const PORT_RANGE_START = 19200;\nexport const PORT_RANGE_SIZE = 10000;\nconst PORT_RETRY_COUNT = 10;\n\n/** Derive a deterministic port from a vault path. */\nexport function derivePort(vaultPath: string): number {\n const hash = createHash('md5').update(vaultPath).digest();\n const num = hash.readUInt16LE(0);\n return PORT_RANGE_START + (num % PORT_RANGE_SIZE);\n}\n\n/** Resolve the port to bind: try config port, derive from path, or fall back to ephemeral. */\nexport async function resolvePort(\n configPort: number | null,\n vaultPath: string,\n): Promise<number> {\n const basePort = configPort ?? derivePort(vaultPath);\n\n for (let offset = 0; offset < PORT_RETRY_COUNT; offset++) {\n const candidate = basePort + offset;\n if (candidate > 65535) break;\n if (await isPortAvailable(candidate)) return candidate;\n }\n\n // All candidates taken — fall back to ephemeral\n return 0;\n}\n\nfunction isPortAvailable(port: number): Promise<boolean> {\n return new Promise((resolve) => {\n const server = net.createServer();\n server.once('error', () => resolve(false));\n server.once('listening', () => {\n server.close(() => resolve(true));\n });\n server.listen(port, '127.0.0.1');\n });\n}\n","/**\n * Symbiont adapter interface — declares what each coding agent provides to Myco.\n *\n * Each supported symbiont (Claude Code, Cursor, Cline, etc.) has an adapter that\n * tells Myco where to find transcripts, how to parse them, and what capabilities\n * the agent supports. The daemon uses these adapters at runtime to read the\n * authoritative conversation record.\n */\nimport fs from 'node:fs';\nimport path from 'node:path';\n\n/** An image attached to a conversation turn */\nexport interface TranscriptImage {\n /** Base64-encoded image data */\n data: string;\n /** MIME type (e.g., image/png) */\n mediaType: string;\n}\n\n/** A single conversation turn extracted from an agent's transcript */\nexport interface TranscriptTurn {\n prompt: string;\n toolCount: number;\n /** Per-tool call counts (e.g., { Read: 5, Edit: 3 }). Populated from buffer events. */\n toolBreakdown?: Record<string, number>;\n /** Deduplicated file paths touched in this turn. Populated from buffer events. */\n files?: string[];\n aiResponse?: string;\n timestamp: string;\n /** Images attached to this turn's user prompt */\n images?: TranscriptImage[];\n}\n\n/**\n * Maps agent-specific hook field names to normalized names.\n * Each agent's hook system uses different field names for the same data.\n */\nexport interface HookFieldNames {\n /** Field name for the session ID (e.g., 'session_id', 'sessionId', 'trajectory_id') */\n sessionId: string;\n /** Field name for the transcript file path (e.g., 'transcript_path') */\n transcriptPath: string;\n /** Field name for the last AI response text (e.g., 'last_assistant_message') */\n lastResponse: string;\n /** Field name for the user prompt (e.g., 'prompt') */\n prompt: string;\n /** Field name for the tool name (e.g., 'tool_name') */\n toolName: string;\n /** Field name for the tool input (e.g., 'tool_input'). Supports dot notation for nested objects. */\n toolInput: string;\n /** Field name for the tool output (e.g., 'tool_output'). Supports dot notation for nested objects. */\n toolOutput: string;\n /** Env var fallback for session ID (e.g., 'GEMINI_SESSION_ID'). */\n sessionIdEnv?: string;\n}\n\nexport interface SymbiontAdapter {\n /** Agent identifier (matches plugin directory names) */\n readonly name: string;\n /** Human-readable display name */\n readonly displayName: string;\n /** Environment variable for the plugin root directory */\n readonly pluginRootEnvVar: string;\n /** Maps agent-specific hook body field names to normalized names */\n readonly hookFields: HookFieldNames;\n\n /**\n * Find the transcript file for a given session ID.\n * Returns the absolute path if found, null otherwise.\n */\n findTranscript(sessionId: string): string | null;\n\n /**\n * Parse a transcript file's content into normalized turns.\n * Each adapter handles its agent's specific format.\n */\n parseTurns(content: string): TranscriptTurn[];\n\n}\n\n/**\n * Scan subdirectories of baseDir for a JSONL transcript file matching sessionId.\n * Shared by claude-code, cursor, custom adapters, and tests.\n */\nexport function findJsonlInSubdirs(baseDir: string, sessionId: string): string | null {\n try {\n for (const entry of fs.readdirSync(baseDir, { withFileTypes: true })) {\n if (!entry.isDirectory()) continue;\n const candidate = path.join(baseDir, entry.name, `${sessionId}.jsonl`);\n try {\n fs.accessSync(candidate);\n return candidate;\n } catch { /* not here */ }\n }\n } catch { /* baseDir doesn't exist or unreadable */ }\n return null;\n}\n\n/**\n * Factory for creating simple per-project adapters from a base directory.\n * Used for user-configured transcript_paths and testing.\n */\nexport function createPerProjectAdapter(\n baseDir: string,\n parseTurns: SymbiontAdapter['parseTurns'],\n name?: string,\n): SymbiontAdapter {\n return {\n name: name ?? `custom:${path.basename(baseDir)}`,\n displayName: `Custom (${baseDir})`,\n pluginRootEnvVar: '',\n hookFields: { sessionId: 'session_id', transcriptPath: 'transcript_path', lastResponse: 'last_assistant_message', prompt: 'prompt', toolName: 'tool_name', toolInput: 'tool_input', toolOutput: 'tool_output' },\n findTranscript: (sessionId) => findJsonlInSubdirs(baseDir, sessionId),\n parseTurns,\n };\n}\n\n/** Map MIME type to file extension */\nconst MIME_TO_EXT: Record<string, string> = {\n 'image/jpeg': 'jpg',\n 'image/gif': 'gif',\n 'image/webp': 'webp',\n 'image/png': 'png',\n};\n\nexport function extensionForMimeType(mimeType: string): string {\n return MIME_TO_EXT[mimeType] ?? 'png';\n}\n\n/** Map file extension to MIME type */\nconst EXT_TO_MIME: Record<string, string> = {\n '.jpg': 'image/jpeg',\n '.jpeg': 'image/jpeg',\n '.gif': 'image/gif',\n '.webp': 'image/webp',\n '.png': 'image/png',\n};\n\nexport function mimeTypeForExtension(ext: string): string {\n return EXT_TO_MIME[ext.toLowerCase()] ?? 'image/png';\n}\n\nimport { StandardJsonlParser } from './parsers/standard-jsonl.js';\n\nexport interface ParseJsonlOptions {\n /** Field name containing the message role ('type' for Claude Code, 'role' for Cursor) */\n roleField: 'type' | 'role';\n /** Whether entries have a timestamp field to extract */\n extractTimestamp: boolean;\n /** Whether to check for text-only user messages (Claude Code has tool_result user messages to skip) */\n skipToolResultUsers: boolean;\n /** Whether to strip [Image: source: ...] text references from prompts (Claude Code-specific) */\n stripImageTextRefs: boolean;\n}\n\n/**\n * Shared JSONL transcript parser — used by both Claude Code and Cursor adapters.\n * Handles user/assistant role detection, text/image extraction, and tool counting.\n */\nexport function parseJsonlTurns(content: string, opts: ParseJsonlOptions): TranscriptTurn[] {\n return new StandardJsonlParser(opts).parseTurns(content);\n}\n","import type { TranscriptTurn, TranscriptImage, ParseJsonlOptions } from '../adapter.js';\nimport type { TranscriptParser } from './types.js';\nimport { PROMPT_PREVIEW_CHARS } from '../../constants.js';\n\n/** Claude Code injects [Image: source: /path] text alongside base64 image blocks. Strip these since the actual images are captured as attachments. */\nconst IMAGE_TEXT_REF_PATTERN = /\\[Image: source: [^\\]]+\\]\\n*/g;\n\n/**\n * Standard JSONL transcript parser — handles the flat JSONL format used by\n * Claude Code, Cursor, and other adapters with top-level role fields.\n *\n * Extracts user/assistant turn pairs with text, images, and tool-use counts.\n * Behavior is controlled by ParseJsonlOptions (role field name, timestamp extraction,\n * tool_result skipping, image text reference stripping).\n */\nexport class StandardJsonlParser implements TranscriptParser {\n constructor(private readonly opts: ParseJsonlOptions) {}\n\n parseTurns(content: string): TranscriptTurn[] {\n const lines = content.split('\\n').filter(Boolean);\n const turns: TranscriptTurn[] = [];\n let current: TranscriptTurn | null = null;\n\n for (const line of lines) {\n let entry: Record<string, unknown>;\n try { entry = JSON.parse(line); } catch { continue; }\n\n const role = entry[this.opts.roleField] as string;\n const timestamp = this.opts.extractTimestamp ? (entry.timestamp as string ?? '') : '';\n\n if (role === 'user') {\n // Skip meta messages (skill injections, deprecation notices, etc.) — they are\n // not real user prompts and should not appear as turns or influence the title.\n if (entry.isMeta === true) continue;\n\n const msg = entry.message as { content?: Array<{ type: string; text?: string; source?: { type?: string; data?: string; media_type?: string } }> } | undefined;\n const blocks = Array.isArray(msg?.content) ? msg!.content : [];\n const hasText = blocks.some((b) => b.type === 'text' && b.text?.trim());\n\n if (!hasText) continue;\n\n if (current) turns.push(current);\n\n const rawPrompt = blocks\n .filter((b) => b.type === 'text' && b.text)\n .map((b) => b.text!)\n .join('\\n');\n\n const promptText = (this.opts.stripImageTextRefs ? rawPrompt.replace(IMAGE_TEXT_REF_PATTERN, '') : rawPrompt)\n .trim()\n .slice(0, PROMPT_PREVIEW_CHARS);\n\n const images: TranscriptImage[] = blocks\n .filter((b) => b.type === 'image' && b.source?.type === 'base64' && b.source.data)\n .map((b) => ({ data: b.source!.data!, mediaType: b.source!.media_type ?? 'image/png' }));\n\n current = { prompt: promptText, toolCount: 0, timestamp, ...(images.length > 0 ? { images } : {}) };\n } else if (role === 'assistant' && current) {\n const msg = entry.message as { content?: Array<{ type: string; text?: string }> } | undefined;\n if (Array.isArray(msg?.content)) {\n const textParts = msg!.content.filter((b) => b.type === 'text' && b.text).map((b) => b.text!);\n const text = textParts.join('\\n').trim();\n if (text) current.aiResponse = text;\n current.toolCount += msg!.content.filter((b) => b.type === 'tool_use').length;\n }\n }\n }\n\n if (current) turns.push(current);\n return turns;\n }\n}\n","import type { SymbiontAdapter } from './adapter.js';\nimport { findJsonlInSubdirs, parseJsonlTurns } from './adapter.js';\nimport path from 'node:path';\nimport os from 'node:os';\n\nconst TRANSCRIPT_BASE = path.join(os.homedir(), '.claude', 'projects');\n\nexport const claudeCodeAdapter: SymbiontAdapter = {\n name: 'claude-code',\n displayName: 'Claude Code',\n pluginRootEnvVar: 'CLAUDE_PLUGIN_ROOT',\n hookFields: {\n sessionId: 'session_id',\n transcriptPath: 'transcript_path',\n lastResponse: 'last_assistant_message',\n prompt: 'prompt',\n toolName: 'tool_name',\n toolInput: 'tool_input',\n toolOutput: 'tool_output',\n },\n\n findTranscript: (sessionId) => findJsonlInSubdirs(TRANSCRIPT_BASE, sessionId),\n\n parseTurns: (content) => parseJsonlTurns(content, {\n roleField: 'type',\n extractTimestamp: true,\n skipToolResultUsers: true,\n stripImageTextRefs: true,\n }),\n};\n","import type { SymbiontAdapter } from './adapter.js';\nimport type { TranscriptTurn, TranscriptImage } from './adapter.js';\nimport { mimeTypeForExtension, parseJsonlTurns } from './adapter.js';\nimport { PROMPT_PREVIEW_CHARS } from '../constants.js';\nimport fs from 'node:fs';\nimport path from 'node:path';\nimport os from 'node:os';\n\n/**\n * Cursor stores conversation transcripts in:\n * ~/.cursor/projects/<project-path>/agent-transcripts/<session-id>.txt\n *\n * Images are saved as files in:\n * ~/.cursor/projects/<project-path>/assets/<filename>.png\n *\n * Transcript format is plain text with role markers on their own line:\n * user: — human prompt (may contain <image_files> and <user_query> tags)\n * assistant: — assistant response (may contain [Tool call] and [Thinking] blocks)\n */\n\nconst USER_MARKER = '\\nuser:\\n';\nconst ASSISTANT_MARKER = '\\nassistant:\\n';\nconst TOOL_CALL_MARKER = '[Tool call]';\nconst TOOL_RESULT_MARKER = '[Tool result]';\nconst THINKING_MARKER = '[Thinking]';\n\nfunction getCursorProjectsBase(): string {\n return path.join(os.homedir(), '.cursor', 'projects');\n}\n\nconst CURSOR_PROJECTS = getCursorProjectsBase();\n\nexport const cursorAdapter: SymbiontAdapter = {\n name: 'cursor',\n displayName: 'Cursor',\n pluginRootEnvVar: 'CURSOR_PLUGIN_ROOT',\n hookFields: {\n sessionId: 'conversation_id',\n transcriptPath: 'transcript_path',\n lastResponse: 'last_assistant_message',\n prompt: 'prompt',\n toolName: 'tool_name',\n toolInput: 'tool_input',\n toolOutput: 'tool_output',\n },\n\n findTranscript(sessionId: string): string | null {\n try {\n for (const project of fs.readdirSync(CURSOR_PROJECTS, { withFileTypes: true })) {\n if (!project.isDirectory()) continue;\n const transcriptsDir = path.join(CURSOR_PROJECTS, project.name, 'agent-transcripts');\n // Try .txt (older Cursor) then .jsonl inside session directory (newer Cursor)\n for (const candidate of [\n path.join(transcriptsDir, `${sessionId}.txt`),\n path.join(transcriptsDir, sessionId, `${sessionId}.jsonl`),\n ]) {\n try {\n fs.accessSync(candidate);\n return candidate;\n } catch { /* not here */ }\n }\n }\n } catch { /* projects dir doesn't exist */ }\n return null;\n },\n\n parseTurns(content: string): TranscriptTurn[] {\n // Detect format: JSONL (starts with '{') or plain text (starts with 'user:')\n const trimmed = content.trimStart();\n if (trimmed.startsWith('{')) {\n return parseCursorJsonl(content);\n }\n return parseCursorText(content);\n },\n};\n\n/** Parse Cursor's newer JSONL format — same structure as Claude's but uses 'role' field */\nfunction parseCursorJsonl(content: string): TranscriptTurn[] {\n return parseJsonlTurns(content, {\n roleField: 'role',\n extractTimestamp: false,\n skipToolResultUsers: false,\n stripImageTextRefs: false,\n });\n}\n\n/** Parse Cursor's older plain-text transcript format. */\nfunction parseCursorText(content: string): TranscriptTurn[] {\n const turns: TranscriptTurn[] = [];\n // Split on user marker — each block is a new human turn.\n const sections = ('\\n' + content).split(USER_MARKER).slice(1);\n\n for (const section of sections) {\n // Extract user query from <user_query> tags or raw text before first assistant response\n let promptText = '';\n const queryMatch = section.match(/<user_query>\\s*([\\s\\S]*?)\\s*<\\/user_query>/);\n if (queryMatch) {\n promptText = queryMatch[1].trim().slice(0, PROMPT_PREVIEW_CHARS);\n } else {\n // No tags — take text before the first assistant response.\n const beforeAssistant = section.split(ASSISTANT_MARKER)[0];\n promptText = beforeAssistant.replace(/<[^>]+>[\\s\\S]*?<\\/[^>]+>/g, '').trim().slice(0, PROMPT_PREVIEW_CHARS);\n }\n\n // Extract image references from <image_files> tags\n const images: TranscriptImage[] = [];\n const imageFilesMatch = section.match(/<image_files>([\\s\\S]*?)<\\/image_files>/);\n if (imageFilesMatch) {\n const imageBlock = imageFilesMatch[1];\n const pathMatches = imageBlock.matchAll(/^\\d+\\.\\s+(.+\\.(?:png|jpg|jpeg|gif|webp))\\s*$/gmi);\n for (const match of pathMatches) {\n const imagePath = match[1].trim();\n try {\n const data = fs.readFileSync(imagePath).toString('base64');\n const mediaType = mimeTypeForExtension(path.extname(imagePath));\n images.push({ data, mediaType });\n } catch {\n // Image file not accessible — skip\n }\n }\n }\n\n // Count tool calls in assistant sections\n const toolCallCount = section.split(TOOL_CALL_MARKER).length - 1;\n\n // Extract the last meaningful assistant text response.\n // Scan assistant blocks (split on \\nA:\\n) from the end.\n // A block is \"meaningful\" if it contains lines that aren't tool calls/results/thinking.\n let aiResponse: string | undefined;\n const assistantBlocks = section.split(ASSISTANT_MARKER).slice(1);\n for (let j = assistantBlocks.length - 1; j >= 0; j--) {\n const lines = assistantBlocks[j].split('\\n');\n const textLines: string[] = [];\n let skip = false;\n for (const line of lines) {\n // Skip tool calls, tool results, and thinking blocks\n if (line.startsWith(TOOL_CALL_MARKER) || line.startsWith(TOOL_RESULT_MARKER) || line.startsWith(THINKING_MARKER)) {\n skip = true;\n continue;\n }\n // Resume after a blank line following a skipped block\n if (skip && line.trim() === '') continue;\n if (skip && !line.startsWith(' ')) skip = false; // End of indented tool args\n if (skip) continue;\n textLines.push(line);\n }\n const text = textLines.join('\\n').trim();\n if (text) {\n aiResponse = text;\n break;\n }\n }\n\n if (promptText || images.length > 0) {\n turns.push({\n prompt: promptText,\n toolCount: toolCallCount,\n timestamp: '',\n ...(aiResponse ? { aiResponse } : {}),\n ...(images.length > 0 ? { images } : {}),\n });\n }\n }\n\n return turns;\n}\n","import type { TranscriptParser } from './types.js';\nimport type { TranscriptTurn, TranscriptImage } from '../adapter.js';\nimport { PROMPT_PREVIEW_CHARS } from '../../constants.js';\n\n/** Parse a data URL (data:<mime>;base64,<data>) into media type and base64 data. */\nfunction parseDataUrl(url: string): { mediaType: string; data: string } | null {\n const match = url.match(/^data:([^;]+);base64,(.+)$/);\n if (!match) return null;\n return { mediaType: match[1], data: match[2] };\n}\n\n/**\n * Codex Desktop wraps user prompts with file-mention preambles when screenshots\n * are attached, and injects \"\n */\nconst IMAGE_WRAPPER_TAG = /^<\\/?image\\b[^>]*>$/;\nconst CODEX_PROMPT_MARKER = '## My request for Codex:\\n';\n\nfunction cleanCodexPromptText(text: string): string {\n // Strip image wrapper tags\n if (IMAGE_WRAPPER_TAG.test(text.trim())) return '';\n // Extract actual prompt from file-mention preamble\n const idx = text.indexOf(CODEX_PROMPT_MARKER);\n if (idx !== -1) return text.slice(idx + CODEX_PROMPT_MARKER.length);\n return text;\n}\n\n/**\n * Parses Codex's nested-payload JSONL transcript format.\n *\n * Codex JSONL entries have the structure:\n * { type: \"response_item\", payload: { type: \"message\", role: \"user\"|\"assistant\"|\"developer\", content: [...] } }\n *\n * Key differences from the standard (Claude Code) format:\n * - Role is at payload.role, not top-level\n * - Content is at payload.content, not message.content\n * - User content blocks use type: \"input_text\", assistant use type: \"output_text\"\n * - Tool use is separate \"function_call\" entries, not nested blocks\n * - Images are data URLs in \"input_image\" blocks (data:<mime>;base64,<data>), not structured source objects\n * - Codex Desktop wraps prompts with file-mention preambles and <image> tags when screenshots are attached — these are stripped\n * - Non-conversation entries (event_msg, session_meta, turn_context, reasoning) are skipped\n */\nexport class CodexJsonlParser implements TranscriptParser {\n parseTurns(content: string): TranscriptTurn[] {\n const lines = content.split('\\n').filter(Boolean);\n const turns: TranscriptTurn[] = [];\n let current: TranscriptTurn | null = null;\n\n for (const line of lines) {\n let entry: Record<string, unknown>;\n try { entry = JSON.parse(line); } catch { continue; }\n\n // Only process response_item entries — skip event_msg, session_meta, turn_context\n if (entry.type !== 'response_item') continue;\n\n const payload = entry.payload as Record<string, unknown> | undefined;\n if (!payload) continue;\n\n const payloadType = payload.type as string;\n const timestamp = (entry.timestamp as string) ?? '';\n\n // Function calls are separate entries — count them as tool use\n if (payloadType === 'function_call') {\n if (current) current.toolCount++;\n continue;\n }\n\n // Only process message payloads from here\n if (payloadType !== 'message') continue;\n\n const role = payload.role as string;\n const blocks = Array.isArray(payload.content)\n ? (payload.content as Array<{ type: string; text?: string; image_url?: string }>)\n : [];\n\n if (role === 'user') {\n const textParts = blocks\n .filter((b) => b.type === 'input_text' && b.text?.trim())\n .map((b) => cleanCodexPromptText(b.text!))\n .filter((t) => t.trim());\n\n if (textParts.length === 0) continue;\n\n if (current) turns.push(current);\n\n const promptText = textParts.join('\\n').trim().slice(0, PROMPT_PREVIEW_CHARS);\n\n // Extract images from input_image blocks (data URL format: data:<mime>;base64,<data>)\n const images: TranscriptImage[] = [];\n for (const b of blocks) {\n if (b.type === 'input_image' && b.image_url) {\n const parsed = parseDataUrl(b.image_url);\n if (parsed) images.push(parsed);\n }\n }\n\n current = { prompt: promptText, toolCount: 0, timestamp, ...(images.length > 0 ? { images } : {}) };\n } else if (role === 'assistant' && current) {\n const textParts = blocks\n .filter((b) => b.type === 'output_text' && b.text)\n .map((b) => b.text!);\n const text = textParts.join('\\n').trim();\n if (text) current.aiResponse = text;\n }\n // role === 'developer' is silently skipped\n }\n\n if (current) turns.push(current);\n return turns;\n }\n}\n","import type { SymbiontAdapter } from './adapter.js';\nimport { CodexJsonlParser } from './parsers/codex-jsonl.js';\nimport fs from 'node:fs';\nimport path from 'node:path';\nimport os from 'node:os';\n\nconst TRANSCRIPT_BASE = path.join(os.homedir(), '.codex');\nconst codexParser = new CodexJsonlParser();\n\n/**\n * Find a Codex transcript file by session ID.\n *\n * Codex stores transcripts at:\n * <baseDir>/sessions/YYYY/MM/DD/rollout-<timestamp>-<sessionId>.jsonl\n *\n * Recursively scans the sessions directory for a JSONL file whose name\n * contains the session ID.\n */\nexport function findCodexTranscript(baseDir: string, sessionId: string): string | null {\n const sessionsDir = path.join(baseDir, 'sessions');\n try {\n return scanForSessionFile(sessionsDir, sessionId);\n } catch {\n return null;\n }\n}\n\nfunction scanForSessionFile(dir: string, sessionId: string): string | null {\n let entries: fs.Dirent[];\n try { entries = fs.readdirSync(dir, { withFileTypes: true }); }\n catch { return null; }\n\n for (const entry of entries) {\n const fullPath = path.join(dir, entry.name);\n if (entry.isDirectory()) {\n const found = scanForSessionFile(fullPath, sessionId);\n if (found) return found;\n } else if (entry.isFile() && entry.name.includes(sessionId) && entry.name.endsWith('.jsonl')) {\n return fullPath;\n }\n }\n return null;\n}\n\nexport const codexAdapter: SymbiontAdapter = {\n name: 'codex',\n displayName: 'Codex',\n pluginRootEnvVar: 'CODEX_PLUGIN_ROOT',\n hookFields: {\n sessionId: 'session_id',\n transcriptPath: 'transcript_path',\n lastResponse: 'last_assistant_message',\n prompt: 'prompt',\n toolName: 'tool_name',\n toolInput: 'tool_input',\n toolOutput: 'tool_output',\n },\n\n findTranscript: (sessionId) => findCodexTranscript(TRANSCRIPT_BASE, sessionId),\n\n parseTurns: (content) => codexParser.parseTurns(content),\n};\n","import type { SymbiontAdapter, TranscriptTurn } from './adapter.js';\nimport { PROMPT_PREVIEW_CHARS } from '../constants.js';\nimport fs from 'node:fs';\nimport path from 'node:path';\nimport os from 'node:os';\n\n/**\n * Gemini CLI stores transcripts as single JSON files (not JSONL) in:\n * ~/.gemini/tmp/<project-name>/chats/session-<date>-<sessionId>.json\n *\n * Each file has a messages array with type: 'user' | 'gemini'.\n * User messages have content as array of { text } blocks.\n * Gemini messages have content as a string, with optional toolCalls array.\n */\n\nconst GEMINI_TMP = path.join(os.homedir(), '.gemini', 'tmp');\n\nexport const geminiAdapter: SymbiontAdapter = {\n name: 'gemini',\n displayName: 'Gemini CLI',\n pluginRootEnvVar: 'GEMINI_PLUGIN_ROOT',\n hookFields: {\n sessionId: 'session_id',\n transcriptPath: 'transcript_path',\n lastResponse: 'last_assistant_message',\n prompt: 'prompt',\n toolName: 'tool_name',\n toolInput: 'tool_input',\n toolOutput: 'tool_output',\n sessionIdEnv: 'GEMINI_SESSION_ID',\n },\n\n findTranscript(sessionId: string): string | null {\n // Gemini session files are named session-<date>-<sessionId-prefix>.json\n // and stored under ~/.gemini/tmp/<project>/chats/\n // The sessionId in the filename is a prefix of the full UUID.\n try {\n for (const project of fs.readdirSync(GEMINI_TMP, { withFileTypes: true })) {\n if (!project.isDirectory()) continue;\n const chatsDir = path.join(GEMINI_TMP, project.name, 'chats');\n try {\n for (const file of fs.readdirSync(chatsDir)) {\n if (!file.endsWith('.json')) continue;\n // Match by sessionId prefix in filename (session-<date>-<prefix>.json)\n if (file.includes(sessionId.slice(0, 8))) {\n // Verify the sessionId inside the file matches\n try {\n const data = JSON.parse(fs.readFileSync(path.join(chatsDir, file), 'utf-8'));\n if (data.sessionId === sessionId) {\n return path.join(chatsDir, file);\n }\n } catch { /* malformed file */ }\n }\n }\n } catch { /* chats dir doesn't exist */ }\n }\n } catch { /* tmp dir doesn't exist */ }\n return null;\n },\n\n parseTurns(content: string): TranscriptTurn[] {\n return parseGeminiJson(content);\n },\n};\n\n/** Gemini message types. */\nconst USER_TYPE = 'user';\nconst GEMINI_TYPE = 'gemini';\n\n/**\n * Parse Gemini's single-JSON transcript format.\n * The file contains { messages: [...] } where each message has type, content, and optional toolCalls.\n */\nfunction parseGeminiJson(content: string): TranscriptTurn[] {\n let data: { messages?: GeminiMessage[] };\n try { data = JSON.parse(content); } catch { return []; }\n\n const messages = data.messages;\n if (!Array.isArray(messages)) return [];\n\n const turns: TranscriptTurn[] = [];\n let current: TranscriptTurn | null = null;\n\n for (const msg of messages) {\n if (msg.type === USER_TYPE) {\n if (current) turns.push(current);\n\n // User content is an array of { text } blocks\n const promptText = Array.isArray(msg.content)\n ? msg.content.map((b) => b.text ?? '').join('\\n').trim()\n : (typeof msg.content === 'string' ? msg.content : '');\n\n current = {\n prompt: promptText.slice(0, PROMPT_PREVIEW_CHARS),\n toolCount: 0,\n timestamp: msg.timestamp ?? '',\n };\n } else if (msg.type === GEMINI_TYPE && current) {\n // Gemini content is a plain string\n const text = typeof msg.content === 'string' ? msg.content.trim() : '';\n if (text) current.aiResponse = text;\n\n // Count tool calls\n if (Array.isArray(msg.toolCalls)) {\n current.toolCount += msg.toolCalls.length;\n }\n }\n }\n\n if (current) turns.push(current);\n return turns;\n}\n\n/** Shape of a message in Gemini's transcript JSON. */\ninterface GeminiMessage {\n type: string;\n content: string | Array<{ text?: string }>;\n timestamp?: string;\n toolCalls?: Array<{ name: string; args?: unknown }>;\n}\n","import type { SymbiontAdapter, TranscriptTurn } from './adapter.js';\nimport { PROMPT_PREVIEW_CHARS } from '../constants.js';\nimport fs from 'node:fs';\nimport path from 'node:path';\nimport os from 'node:os';\n\nconst TRANSCRIPT_DIR = path.join(os.homedir(), '.windsurf', 'transcripts');\n\n/** Windsurf JSONL entry type field values. */\nconst USER_INPUT_TYPE = 'user_input';\nconst PLANNER_RESPONSE_TYPE = 'planner_response';\nconst CODE_ACTION_TYPE = 'code_action';\n\nexport const windsurfAdapter: SymbiontAdapter = {\n name: 'windsurf',\n displayName: 'Windsurf',\n pluginRootEnvVar: 'WINDSURF_PLUGIN_ROOT',\n hookFields: {\n sessionId: 'trajectory_id',\n transcriptPath: 'transcript_path',\n lastResponse: 'last_assistant_message',\n prompt: 'prompt',\n toolName: 'tool_name',\n toolInput: 'tool_input',\n toolOutput: 'tool_output',\n },\n\n findTranscript(sessionId: string): string | null {\n // Windsurf stores transcripts directly by trajectory ID\n const candidate = path.join(TRANSCRIPT_DIR, `${sessionId}.jsonl`);\n try {\n fs.accessSync(candidate);\n return candidate;\n } catch { return null; }\n },\n\n parseTurns(content: string): TranscriptTurn[] {\n return parseWindsurfJsonl(content);\n },\n};\n\n/**\n * Parse Windsurf's JSONL transcript format.\n *\n * Windsurf entries use a `type` field with values like 'user_input',\n * 'planner_response', 'code_action' — NOT the standard user/assistant roles.\n */\nfunction parseWindsurfJsonl(content: string): TranscriptTurn[] {\n const lines = content.split('\\n').filter(Boolean);\n const turns: TranscriptTurn[] = [];\n let current: TranscriptTurn | null = null;\n\n for (const line of lines) {\n let entry: Record<string, unknown>;\n try { entry = JSON.parse(line); } catch { continue; }\n\n const type = entry.type as string;\n\n if (type === USER_INPUT_TYPE) {\n if (current) turns.push(current);\n\n // Extract prompt text — Windsurf may store it in different fields\n const promptText = (\n (entry.user_response as string) ??\n (entry.text as string) ??\n (entry.content as string) ??\n ''\n ).trim().slice(0, PROMPT_PREVIEW_CHARS);\n\n current = { prompt: promptText, toolCount: 0, timestamp: '' };\n } else if (type === PLANNER_RESPONSE_TYPE && current) {\n const text = (\n (entry.response as string) ??\n (entry.text as string) ??\n (entry.content as string) ??\n ''\n ).trim();\n if (text) current.aiResponse = text;\n } else if (type === CODE_ACTION_TYPE && current) {\n current.toolCount++;\n }\n }\n\n if (current) turns.push(current);\n return turns;\n}\n","import type { SymbiontAdapter, TranscriptTurn } from './adapter.js';\nimport { PROMPT_PREVIEW_CHARS } from '../constants.js';\n\n/**\n * VS Code Copilot stores chat transcripts as JSONL delta files at:\n * ~/Library/Application Support/Code/User/workspaceStorage/<hash>/chatSessions/<sessionId>.jsonl\n *\n * Format:\n * Line 0: kind=0 — full initial state { v: { sessionId, requests: [...] } }\n * Lines 1+: kind=1 (key update) or kind=2 (append to array at key path)\n *\n * Each request has:\n * - message.text — the user prompt\n * - response — array of parts accumulated via kind:2 deltas, each part is an\n * array of objects with `kind` field: 'thinking', 'toolInvocationSerialized',\n * 'markdownContent', 'progressTaskSerialized', or plain text { value: \"...\" }\n */\n\n/** Response part kinds that represent tool invocations. */\nconst TOOL_PART_KINDS = new Set(['toolInvocationSerialized', 'toolConfirmation', 'toolMessage']);\n\nexport const vscodeCopilotAdapter: SymbiontAdapter = {\n name: 'vscode-copilot',\n displayName: 'VS Code Copilot',\n pluginRootEnvVar: 'VSCODE_PLUGIN_ROOT',\n hookFields: {\n sessionId: 'sessionId',\n transcriptPath: 'transcript_path',\n lastResponse: 'last_assistant_message',\n prompt: 'prompt',\n toolName: 'tool_name',\n toolInput: 'tool_input',\n toolOutput: 'tool_output',\n },\n\n // VS Code doesn't have a predictable transcript directory — hooks provide the path\n findTranscript: () => null,\n\n parseTurns: (content) => parseVsCodeDeltaJsonl(content),\n};\n\n/**\n * Parse VS Code Copilot's delta JSONL transcript format.\n * Replays kind:1 (set) and kind:2 (append) deltas onto the initial state,\n * then extracts turns from the reconstructed requests array.\n */\nfunction parseVsCodeDeltaJsonl(content: string): TranscriptTurn[] {\n const lines = content.split('\\n').filter(Boolean);\n if (lines.length === 0) return [];\n\n // Parse initial state (kind: 0)\n let initial: VsCodeState;\n try {\n const first = JSON.parse(lines[0]);\n if (first.kind !== 0 || !first.v) return [];\n initial = first.v;\n } catch { return []; }\n\n // Replay deltas to reconstruct final state\n const state = JSON.parse(JSON.stringify(initial)) as VsCodeState;\n\n for (let i = 1; i < lines.length; i++) {\n try {\n const delta = JSON.parse(lines[i]);\n if (!delta.k || !Array.isArray(delta.k)) continue;\n applyDelta(state, delta.k, delta.v, delta.kind);\n } catch { /* malformed delta line */ }\n }\n\n // Extract turns from requests\n return extractTurns(state);\n}\n\n/** Apply a single delta to the state. */\nfunction applyDelta(state: Record<string, unknown>, keyPath: string[], value: unknown, kind: number): void {\n let obj: Record<string, unknown> = state;\n for (let j = 0; j < keyPath.length - 1; j++) {\n if (obj[keyPath[j]] === undefined || obj[keyPath[j]] === null) {\n obj[keyPath[j]] = {};\n }\n obj = obj[keyPath[j]] as Record<string, unknown>;\n }\n const lastKey = keyPath[keyPath.length - 1];\n\n if (kind === 1) {\n // Key update — set value\n obj[lastKey] = value;\n } else if (kind === 2) {\n // Append — push to array\n if (!Array.isArray(obj[lastKey])) obj[lastKey] = [];\n (obj[lastKey] as unknown[]).push(value);\n }\n}\n\n/** Extract transcript turns from the reconstructed state. */\nfunction extractTurns(state: VsCodeState): TranscriptTurn[] {\n if (!Array.isArray(state.requests)) return [];\n\n const turns: TranscriptTurn[] = [];\n\n for (const req of state.requests) {\n const promptText = req.message?.text?.trim() ?? '';\n if (!promptText) continue;\n\n const timestamp = req.timestamp ? new Date(req.timestamp).toISOString() : '';\n\n // Count tool invocations and extract AI response text from response parts\n let toolCount = 0;\n let aiResponse = '';\n const responseParts = normalizeResponseParts(req.response);\n\n for (const part of responseParts) {\n if (TOOL_PART_KINDS.has(part.kind ?? '')) {\n toolCount++;\n } else if (part.kind === 'markdownContent' || part.kind === 'markdownVuln') {\n // Markdown content is the AI's text response\n const text = part.content?.value ?? part.value ?? '';\n if (text) aiResponse = text;\n } else if (!part.kind && typeof part.value === 'string' && part.value.trim()) {\n // Plain text parts (no kind field) — accumulate as AI response\n aiResponse += (aiResponse ? '\\n' : '') + part.value.trim();\n }\n }\n\n turns.push({\n prompt: promptText.slice(0, PROMPT_PREVIEW_CHARS),\n toolCount,\n timestamp,\n ...(aiResponse ? { aiResponse: aiResponse.trim() } : {}),\n });\n }\n\n return turns;\n}\n\n/**\n * VS Code response parts can be:\n * - An array of arrays (each delta append pushes an array of part objects)\n * - An indexed object { 0: [...], 1: [...] } from the initial state\n * Flatten to a single array of part objects.\n */\nfunction normalizeResponseParts(response: unknown): VsCodeResponsePart[] {\n if (!response) return [];\n\n // Array of arrays → flatten\n if (Array.isArray(response)) {\n return response.flat().filter((p): p is VsCodeResponsePart => p && typeof p === 'object');\n }\n\n // Indexed object → extract values and flatten\n if (typeof response === 'object') {\n return Object.values(response)\n .flat()\n .filter((p): p is VsCodeResponsePart => p && typeof p === 'object' && !Array.isArray(p));\n }\n\n return [];\n}\n\n// --- Types ---\n\ninterface VsCodeState {\n sessionId?: string;\n requests?: VsCodeRequest[];\n [key: string]: unknown;\n}\n\ninterface VsCodeRequest {\n requestId?: string;\n timestamp?: number;\n message?: { text?: string };\n response?: unknown;\n [key: string]: unknown;\n}\n\ninterface VsCodeResponsePart {\n kind?: string;\n value?: string;\n content?: { value?: string };\n [key: string]: unknown;\n}\n","import type { SymbiontAdapter, TranscriptTurn } from './adapter.js';\nimport { claudeCodeAdapter } from './claude-code.js';\nimport { cursorAdapter } from './cursor.js';\nimport { codexAdapter } from './codex.js';\nimport { geminiAdapter } from './gemini.js';\nimport { windsurfAdapter } from './windsurf.js';\nimport { vscodeCopilotAdapter } from './vscode-copilot.js';\nimport fs from 'node:fs';\n\n/**\n * All known symbiont adapters, ordered by priority.\n * When searching for a transcript, adapters are tried in order.\n * Add new adapters here as symbiont support grows.\n */\nconst ALL_ADAPTERS: SymbiontAdapter[] = [\n claudeCodeAdapter,\n cursorAdapter,\n codexAdapter,\n geminiAdapter,\n windsurfAdapter,\n vscodeCopilotAdapter,\n];\n\nexport class SymbiontRegistry {\n private adapters: SymbiontAdapter[];\n\n constructor(additionalAdapters: SymbiontAdapter[] = []) {\n this.adapters = [...ALL_ADAPTERS, ...additionalAdapters];\n }\n\n /**\n * Find and parse transcript turns for a session.\n * Tries each adapter in priority order. Returns the first match.\n */\n getTranscriptTurns(sessionId: string): { turns: TranscriptTurn[]; source: string } | null {\n for (const adapter of this.adapters) {\n const filePath = adapter.findTranscript(sessionId);\n if (!filePath) continue;\n\n try {\n const content = fs.readFileSync(filePath, 'utf-8');\n const turns = adapter.parseTurns(content);\n if (turns.length > 0) {\n return { turns, source: adapter.name };\n }\n } catch {\n // Adapter found a path but read/parse failed — try next\n }\n }\n return null;\n }\n\n /** List all registered adapter names */\n get adapterNames(): string[] {\n return this.adapters.map((a) => a.name);\n }\n\n /** Get a specific adapter by name */\n getAdapter(name: string): SymbiontAdapter | undefined {\n return this.adapters.find((a) => a.name === name);\n }\n\n /** Detect which symbiont is currently active based on environment variables */\n detectActiveAgent(): SymbiontAdapter | undefined {\n for (const adapter of this.adapters) {\n if (process.env[adapter.pluginRootEnvVar]) {\n return adapter;\n }\n }\n return undefined;\n }\n\n /**\n * Parse turns from a known transcript file path (provided by hook).\n * Tries each adapter's parseTurns until one produces results.\n * Skips directory scanning entirely — the path is already known.\n */\n parseTurnsFromPath(filePath: string): { turns: TranscriptTurn[]; source: string } | null {\n try {\n const content = fs.readFileSync(filePath, 'utf-8');\n // Try the active agent's parser first, then fall back to others\n const active = this.detectActiveAgent();\n const orderedAdapters = active\n ? [active, ...this.adapters.filter((a) => a !== active)]\n : this.adapters;\n\n for (const adapter of orderedAdapters) {\n const turns = adapter.parseTurns(content);\n if (turns.length > 0) {\n return { turns, source: `${adapter.name}:direct` };\n }\n }\n } catch {\n // File unreadable — caller will fall back to directory scanning\n }\n return null;\n }\n\n /**\n * Resolve the plugin root directory from the active agent's environment variable.\n * Returns undefined if no agent env var is set (e.g., running from CLI directly).\n */\n resolvePluginRoot(): string | undefined {\n for (const adapter of this.adapters) {\n const value = process.env[adapter.pluginRootEnvVar];\n if (value) return value;\n }\n return undefined;\n }\n}\n","import { SymbiontRegistry } from '../symbionts/registry.js';\nimport type { SymbiontAdapter } from '../symbionts/adapter.js';\nimport { PROMPT_PREVIEW_CHARS } from '../constants.js';\nimport fs from 'node:fs';\n\n// Re-export TranscriptTurn from its canonical home in symbionts/adapter.ts\nexport type { TranscriptTurn } from '../symbionts/adapter.js';\nimport type { TranscriptTurn } from '../symbionts/adapter.js';\n\ninterface TranscriptConfig {\n /** Additional symbiont adapters to register (useful for testing or custom symbionts) */\n additionalAdapters?: SymbiontAdapter[];\n}\n\nexport class TranscriptMiner {\n private registry: SymbiontRegistry;\n\n constructor(config?: TranscriptConfig) {\n this.registry = new SymbiontRegistry(config?.additionalAdapters);\n }\n\n /**\n * Extract all conversation turns for a session.\n * Convenience wrapper — delegates to getAllTurnsWithSource.\n */\n getAllTurns(sessionId: string): TranscriptTurn[] {\n return this.getAllTurnsWithSource(sessionId).turns;\n }\n\n /**\n * Extract turns using the hook-provided transcript path first (fast, no scanning),\n * then fall back to adapter registry scanning if the path isn't provided.\n */\n getAllTurnsWithSource(sessionId: string, transcriptPath?: string): { turns: TranscriptTurn[]; source: string } {\n // Primary: use the path provided by the hook (no directory scanning needed)\n if (transcriptPath) {\n const result = this.registry.parseTurnsFromPath(transcriptPath);\n if (result) return result;\n }\n\n // Fallback: scan known agent directories\n const result = this.registry.getTranscriptTurns(sessionId);\n if (result) return result;\n return { turns: [], source: 'none' };\n }\n}\n\n/**\n * Build turns from buffer events — the fallback when no agent transcript is available.\n * Buffer events come from hooks (user_prompt, tool_use) and lack AI responses.\n * Turns will have prompts and tool counts but no aiResponse.\n */\nexport function extractTurnsFromBuffer(events: Array<Record<string, unknown>>): TranscriptTurn[] {\n const turns: TranscriptTurn[] = [];\n let current: TranscriptTurn | null = null;\n\n for (const event of events) {\n const type = event.type as string;\n if (type === 'user_prompt') {\n if (current) turns.push(current);\n current = {\n prompt: String(event.prompt ?? '').slice(0, PROMPT_PREVIEW_CHARS),\n toolCount: 0,\n timestamp: String(event.timestamp ?? new Date().toISOString()),\n };\n } else if (type === 'tool_use') {\n if (current) current.toolCount++;\n }\n }\n if (current) turns.push(current);\n return turns;\n}\n","/**\n * Enumerate leaf paths in a plain-object patch. A \"leaf\" is any value that\n * isn't a non-array plain object — primitives, arrays, and `null` all count.\n * Used to determine which config paths a patch touches so reactions can\n * decide whether to fire.\n */\nexport function enumerateLeafPaths(obj: unknown, prefix = ''): string[] {\n if (obj === null || typeof obj !== 'object' || Array.isArray(obj)) {\n return prefix ? [prefix] : [];\n }\n const out: string[] = [];\n for (const [key, value] of Object.entries(obj as Record<string, unknown>)) {\n const next = prefix ? `${prefix}.${key}` : key;\n out.push(...enumerateLeafPaths(value, next));\n }\n return out;\n}\n\n/**\n * Compute the union of paths touched by a patch object and a list of\n * explicit clear-key strings, deduplicated. This is the input to\n * `ConfigReactionRegistry.fire()`.\n */\nexport function computeTouchedPaths(patch: unknown, clear: string[] | undefined): string[] {\n const patchLeaves = patch && typeof patch === 'object' ? enumerateLeafPaths(patch) : [];\n const clearList = Array.isArray(clear) ? clear : [];\n return [...new Set([...patchLeaves, ...clearList])];\n}\n","import {\n loadConfig,\n updateConfig,\n updateLocalConfig,\n loadMergedConfig,\n loadLocalConfig,\n deepMergeConfig,\n} from '../../config/loader.js';\nimport { z } from 'zod';\nimport { MycoConfigSchema, type MycoConfig } from '../../config/schema.js';\nimport { unsetAtPath } from '../../utils/dot-path.js';\nimport { enumerateLeafPaths } from '../config-reactions/touched-paths.js';\nimport type { RouteRequest, RouteResponse } from '../router.js';\n\nexport async function handleGetConfig(vaultDir: string): Promise<RouteResponse> {\n const config = loadConfig(vaultDir);\n return { body: config };\n}\n\n// ---------------------------------------------------------------------------\n// Scoped config handlers (project vs. local overlay)\n// ---------------------------------------------------------------------------\n\n/** GET /api/config/merged — project config with local overlay applied. */\nexport async function handleGetMergedConfig(vaultDir: string): Promise<RouteResponse> {\n const config = loadMergedConfig(vaultDir);\n return { body: config };\n}\n\n/** GET /api/config/local — raw local overrides (may be empty). */\nexport async function handleGetLocalConfig(vaultDir: string): Promise<RouteResponse> {\n return { body: loadLocalConfig(vaultDir) };\n}\n\ninterface ScopedPutBody {\n scope?: 'project' | 'local';\n patch?: Record<string, unknown>;\n clear?: string[];\n}\n\nconst SCOPED_CONFIG_SCOPES = ['project', 'local'] as const;\n\nfunction isScopedConfigScope(value: unknown): value is ScopedPutBody['scope'] {\n return typeof value === 'string'\n && (SCOPED_CONFIG_SCOPES as readonly string[]).includes(value);\n}\n\nfunction validateClearList(clear: unknown): string[] | RouteResponse {\n if (clear === undefined) return [];\n if (!Array.isArray(clear)) {\n return { status: 400, body: { error: 'clear must be an array of dot-paths' } };\n }\n const invalidEntry = clear.find((entry) => typeof entry !== 'string' || entry.trim().length === 0);\n if (invalidEntry !== undefined) {\n return { status: 400, body: { error: 'clear entries must be non-empty strings' } };\n }\n return clear;\n}\n\nfunction pathsOverlap(a: string, b: string): boolean {\n return a === b || a.startsWith(`${b}.`) || b.startsWith(`${a}.`);\n}\n\n/**\n * PUT /api/config/scoped — atomic patch + clear against project or local config.\n *\n * Request body:\n * { scope: 'project' | 'local',\n * patch?: DeepPartial<MycoConfig>, // deep-merged into scope\n * clear?: string[] } // dot-paths removed from scope\n *\n * At least one of `patch` (non-empty object) or `clear` (non-empty array) is\n * required. If both are present, overlapping keys are rejected (400). The\n * server applies `clear` first, then merges `patch`, in a single write.\n */\nexport async function handlePutScopedConfig(vaultDir: string, body: unknown): Promise<RouteResponse> {\n const payload = (body ?? {}) as ScopedPutBody;\n if (!isScopedConfigScope(payload.scope)) {\n return { status: 400, body: { error: 'scope must be project or local' } };\n }\n const scope = payload.scope;\n const patch = payload.patch ?? {};\n const clearListOrError = validateClearList(payload.clear);\n if (Array.isArray(clearListOrError) === false) return clearListOrError;\n const clearList = clearListOrError;\n\n if (typeof patch !== 'object' || patch === null || Array.isArray(patch)) {\n return { status: 400, body: { error: 'patch must be an object' } };\n }\n const patchLeaves = enumerateLeafPaths(patch);\n const hasPatch = patchLeaves.length > 0;\n const hasClear = clearList.length > 0;\n if (!hasPatch && !hasClear) {\n return { status: 400, body: { error: 'patch or clear required' } };\n }\n\n const overlap = patchLeaves.filter((leaf) => clearList.some((clearPath) => pathsOverlap(leaf, clearPath)));\n if (overlap.length > 0) {\n return { status: 400, body: { error: 'patch_clear_overlap', keys: overlap } };\n }\n\n if (scope === 'local') {\n try {\n const project = loadConfig(vaultDir);\n const updated = updateLocalConfig(vaultDir, (local) => {\n const working = structuredClone(local) as Record<string, unknown>;\n for (const key of clearList) unsetAtPath(working, key);\n const nextLocal = deepMergeConfig(\n working,\n patch as Record<string, unknown>,\n ) as Partial<MycoConfig>;\n const merged = deepMergeConfig(\n project as Record<string, unknown>,\n nextLocal as Record<string, unknown>,\n );\n MycoConfigSchema.parse(merged);\n return nextLocal;\n });\n return { body: updated };\n } catch (err) {\n if (err instanceof z.ZodError) {\n return { status: 400, body: { error: 'validation_failed', issues: err.issues } };\n }\n throw err;\n }\n }\n\n try {\n // saveConfig (called by updateConfig) runs the Zod parse — the callback\n // returns the deep-merged object without validating, and any invalid\n // shape raises a ZodError that we convert to a 400 below.\n const updated = updateConfig(vaultDir, (current) => {\n const working = structuredClone(current) as Record<string, unknown>;\n for (const key of clearList) unsetAtPath(working, key);\n return deepMergeConfig(working, patch as Record<string, unknown>) as MycoConfig;\n });\n return { body: updated };\n } catch (err) {\n if (err instanceof z.ZodError) {\n return { status: 400, body: { error: 'validation_failed', issues: err.issues } };\n }\n throw err;\n }\n}\n\n// ---------------------------------------------------------------------------\n// Plan-dirs factory (requires mutable references to runtime state)\n// ---------------------------------------------------------------------------\n\nexport interface PlanDirDeps {\n symbiontPlanDirsByAgent: Record<string, string[]>;\n}\n\nexport function createPlanDirHandlers(deps: PlanDirDeps) {\n /**\n * GET /api/config/plan-dirs — returns the symbiont-derived plan dir\n * inventory (manifest-driven, never user-editable). Custom plan dirs\n * are read/written through /api/config/scoped like any other config\n * field.\n */\n async function handleGetPlanDirs(_req: RouteRequest): Promise<RouteResponse> {\n return { body: { symbiont: deps.symbiontPlanDirsByAgent } };\n }\n\n return { handleGetPlanDirs };\n}\n","/**\n * Parse a comma-separated query-string value into a trimmed,\n * non-empty list of tokens. Used by REST handlers that accept\n * `?field=a,b,c` style multi-value parameters.\n *\n * Returns an empty array when the input is undefined, empty, or\n * contains only whitespace — callers can treat an empty result as\n * \"no filter\" without a branch.\n */\nexport function parseCsvList(raw: string | undefined | null): string[] {\n if (!raw) return [];\n return raw\n .split(',')\n .map((token) => token.trim())\n .filter((token) => token.length > 0);\n}\n","/**\n * Log entry CRUD query helpers.\n *\n * All functions obtain the SQLite instance internally via `getDatabase()`.\n * Queries use positional `?` placeholders throughout (better-sqlite3).\n */\n\nimport { getDatabase } from '@myco/db/client.js';\nimport { LEVEL_ORDER, type LogLevel } from '@myco/daemon/logger.js';\nimport { parseCsvList } from '@myco/utils/parse-csv-list.js';\n\n// ---------------------------------------------------------------------------\n// Constants\n// ---------------------------------------------------------------------------\n\n/** Default number of log entries per page for search results. */\nconst DEFAULT_PAGE_SIZE = 100;\n\n/** Default number of entries returned by getLogsSince. */\nconst DEFAULT_STREAM_LIMIT = 200;\n\n// ---------------------------------------------------------------------------\n// Types\n// ---------------------------------------------------------------------------\n\n/** Fields required when inserting a log entry. */\nexport interface LogEntryInsert {\n timestamp: string;\n level: string;\n kind: string;\n component: string;\n message: string;\n data: string | null;\n session_id: string | null;\n}\n\n/** Row shape returned from log_entries queries (all columns). */\nexport interface LogEntryRow {\n id: number;\n timestamp: string;\n level: string;\n kind: string;\n component: string;\n message: string;\n data: string | null;\n session_id: string | null;\n}\n\n/** Filter options for `searchLogs`. */\nexport interface LogSearchParams {\n q?: string;\n level?: string;\n component?: string;\n kind?: string;\n session_id?: string;\n from?: string;\n to?: string;\n page?: number;\n page_size?: number;\n}\n\n/** Paginated result from `searchLogs`. */\nexport interface LogSearchResult {\n entries: LogEntryRow[];\n total: number;\n page: number;\n page_size: number;\n}\n\n/** Result from `getLogsSince` for streaming/tailing. */\nexport interface LogStreamResult {\n entries: LogEntryRow[];\n cursor: number;\n}\n\n// ---------------------------------------------------------------------------\n// Helpers\n// ---------------------------------------------------------------------------\n\n/** Normalize a raw SQLite result row into a typed LogEntryRow. */\nfunction toLogEntryRow(row: Record<string, unknown>): LogEntryRow {\n return {\n id: row.id as number,\n timestamp: row.timestamp as string,\n level: row.level as string,\n kind: row.kind as string,\n component: row.component as string,\n message: row.message as string,\n data: (row.data as string) ?? null,\n session_id: (row.session_id as string) ?? null,\n };\n}\n\n/**\n * Return all level names whose numeric order is >= the given minimum level.\n *\n * Example: levelsAtOrAbove('warn') → ['warn', 'error']\n */\nfunction levelsAtOrAbove(minLevel: string): string[] {\n const minOrder = LEVEL_ORDER[minLevel as LogLevel] ?? 0;\n return (Object.keys(LEVEL_ORDER) as LogLevel[]).filter(\n (l) => LEVEL_ORDER[l] >= minOrder,\n );\n}\n\n// ---------------------------------------------------------------------------\n// Public API\n// ---------------------------------------------------------------------------\n\n/**\n * Insert a log entry into `log_entries`.\n *\n * FTS sync is handled automatically by the `log_entries_ai` trigger.\n * Returns the new row's integer id.\n */\nexport function insertLogEntry(entry: LogEntryInsert): number {\n const db = getDatabase();\n\n const info = db.prepare(\n `INSERT INTO log_entries (timestamp, level, kind, component, message, data, session_id)\n VALUES (?, ?, ?, ?, ?, ?, ?)`,\n ).run(\n entry.timestamp,\n entry.level,\n entry.kind,\n entry.component,\n entry.message,\n entry.data,\n entry.session_id,\n );\n\n return info.lastInsertRowid as number;\n}\n\n/**\n * Search log entries with optional filters and pagination.\n *\n * Supports:\n * - Full-text search via FTS5 (q param)\n * - Level filter (returns entries at or above the specified level)\n * - Component filter (comma-separated list)\n * - Kind filter\n * - Session ID filter\n * - Time range (from / to ISO timestamps)\n * - Pagination (page / page_size, 1-based page index)\n *\n * Results are ordered by timestamp DESC, id DESC.\n */\nexport function searchLogs(params: LogSearchParams): LogSearchResult {\n const db = getDatabase();\n\n const page = params.page ?? 1;\n const pageSize = params.page_size ?? DEFAULT_PAGE_SIZE;\n const offset = (page - 1) * pageSize;\n\n const conditions: string[] = [];\n const queryParams: unknown[] = [];\n\n // Full-text search via FTS5 sub-select\n if (params.q !== undefined && params.q.length > 0) {\n conditions.push(`le.id IN (SELECT rowid FROM log_entries_fts WHERE log_entries_fts MATCH ?)`);\n queryParams.push(params.q);\n }\n\n // Level filter — include all levels at or above the minimum\n if (params.level !== undefined && params.level.length > 0) {\n const levels = levelsAtOrAbove(params.level);\n if (levels.length > 0) {\n conditions.push(`le.level IN (SELECT value FROM json_each(?))`);\n queryParams.push(JSON.stringify(levels));\n }\n }\n\n // Component filter — comma-separated list\n if (params.component !== undefined && params.component.length > 0) {\n const components = parseCsvList(params.component);\n if (components.length > 0) {\n conditions.push(`le.component IN (SELECT value FROM json_each(?))`);\n queryParams.push(JSON.stringify(components));\n }\n }\n\n // Kind filter\n if (params.kind !== undefined && params.kind.length > 0) {\n conditions.push(`le.kind = ?`);\n queryParams.push(params.kind);\n }\n\n // Session ID filter\n if (params.session_id !== undefined && params.session_id.length > 0) {\n conditions.push(`le.session_id = ?`);\n queryParams.push(params.session_id);\n }\n\n // Time range\n if (params.from !== undefined && params.from.length > 0) {\n conditions.push(`le.timestamp >= ?`);\n queryParams.push(params.from);\n }\n\n if (params.to !== undefined && params.to.length > 0) {\n conditions.push(`le.timestamp <= ?`);\n queryParams.push(params.to);\n }\n\n const where = conditions.length > 0 ? `WHERE ${conditions.join(' AND ')}` : '';\n\n const countRow = db.prepare(\n `SELECT COUNT(*) as count FROM log_entries le ${where}`,\n ).get(...queryParams) as { count: number };\n\n const rows = db.prepare(\n `SELECT le.id, le.timestamp, le.level, le.kind, le.component, le.message, le.data, le.session_id\n FROM log_entries le\n ${where}\n ORDER BY le.timestamp DESC, le.id DESC\n LIMIT ?\n OFFSET ?`,\n ).all(...queryParams, pageSize, offset) as Record<string, unknown>[];\n\n return {\n entries: rows.map(toLogEntryRow),\n total: countRow.count,\n page,\n page_size: pageSize,\n };\n}\n\n/**\n * Return log entries with id > sinceId in ascending order, for streaming/tailing.\n *\n * Returns entries and a cursor (the id of the last entry returned,\n * or sinceId if no entries were found).\n */\nexport function getLogsSince(sinceId: number, limit?: number): LogStreamResult {\n const db = getDatabase();\n const effectiveLimit = limit ?? DEFAULT_STREAM_LIMIT;\n\n const rows = db.prepare(\n `SELECT id, timestamp, level, kind, component, message, data, session_id\n FROM log_entries\n WHERE id > ?\n ORDER BY id ASC\n LIMIT ?`,\n ).all(sinceId, effectiveLimit) as Record<string, unknown>[];\n\n const entries = rows.map(toLogEntryRow);\n const cursor = entries.length > 0 ? entries[entries.length - 1].id : sinceId;\n\n return { entries, cursor };\n}\n\n/**\n * Return the most recent N log entries, sorted ASC (oldest of the tail first).\n *\n * Used to prime a live-tail stream on initial load — callers then follow the\n * stream forward with `getLogsSince(cursor)`. Cursor is the max id returned,\n * or 0 when the table is empty.\n */\nexport function getLogTail(limit?: number): LogStreamResult {\n const db = getDatabase();\n const effectiveLimit = limit ?? DEFAULT_STREAM_LIMIT;\n\n // Select newest first, then reverse — this uses the id index efficiently.\n const rows = db.prepare(\n `SELECT id, timestamp, level, kind, component, message, data, session_id\n FROM log_entries\n ORDER BY id DESC\n LIMIT ?`,\n ).all(effectiveLimit) as Record<string, unknown>[];\n\n const entries = rows.map(toLogEntryRow).reverse();\n const cursor = entries.length > 0 ? entries[entries.length - 1].id : 0;\n\n return { entries, cursor };\n}\n\n/**\n * Retrieve a single log entry by id.\n *\n * @returns the entry row, or null if not found.\n */\nexport function getLogEntry(id: number): LogEntryRow | null {\n const db = getDatabase();\n\n const row = db.prepare(\n `SELECT id, timestamp, level, kind, component, message, data, session_id\n FROM log_entries\n WHERE id = ?`,\n ).get(id) as Record<string, unknown> | undefined;\n\n if (!row) return null;\n return toLogEntryRow(row);\n}\n\n/**\n * Delete log entries older than `beforeTimestamp`.\n *\n * FTS cleanup is handled automatically by the `log_entries_ad` trigger.\n *\n * @returns the number of rows deleted from log_entries.\n */\nexport function deleteOldLogs(beforeTimestamp: string): number {\n const db = getDatabase();\n\n const info = db.prepare(\n `DELETE FROM log_entries WHERE timestamp < ?`,\n ).run(beforeTimestamp);\n\n return info.changes;\n}\n\n/**\n * Return the maximum timestamp in the log_entries table.\n *\n * Used for reconciliation to detect gaps between file logs and DB logs.\n * Returns null if the table is empty.\n */\nexport function getMaxTimestamp(): string | null {\n const db = getDatabase();\n\n const row = db.prepare(\n `SELECT MAX(timestamp) as max_ts FROM log_entries`,\n ).get() as { max_ts: string | null };\n\n return row.max_ts;\n}\n","/**\n * Log explorer API handlers — search, stream (polling), detail, and external ingestion.\n */\n\nimport { z } from 'zod';\nimport { searchLogs, getLogsSince, getLogTail, getLogEntry } from '@myco/db/queries/logs.js';\nimport type { LogEntryRow } from '@myco/db/queries/logs.js';\nimport { getSession } from '@myco/db/queries/sessions.js';\nimport { LOG_KINDS } from '@myco/constants/log-kinds.js';\nimport type { RouteRequest, RouteResponse, RouteHandler } from '../router.js';\nimport type { DaemonLogger } from '../logger.js';\n\n// ---------------------------------------------------------------------------\n// Search (historical mode)\n// ---------------------------------------------------------------------------\n\nexport async function handleLogSearch(req: RouteRequest): Promise<RouteResponse> {\n const { q, level, component, kind, session_id, from, to, page, page_size } = req.query;\n\n const result = searchLogs({\n q: q || undefined,\n level: level || undefined,\n component: component || undefined,\n kind: kind || undefined,\n session_id: session_id || undefined,\n from: from || undefined,\n to: to || undefined,\n page: page ? parseInt(page, 10) : undefined,\n page_size: page_size ? parseInt(page_size, 10) : undefined,\n });\n\n return {\n body: {\n entries: result.entries.map(formatEntry),\n total: result.total,\n page: result.page,\n page_size: result.page_size,\n },\n };\n}\n\n// ---------------------------------------------------------------------------\n// Stream (real-time polling mode)\n// ---------------------------------------------------------------------------\n\nexport async function handleLogStream(req: RouteRequest): Promise<RouteResponse> {\n const sinceStr = req.query.since;\n const limitStr = req.query.limit;\n const categoryFilter = req.query.category || undefined;\n const limit = limitStr ? parseInt(limitStr, 10) : undefined;\n\n // No `since` param → tail mode: return the latest N entries so the UI can\n // show \"now\" on initial load. Explicit `since=<id>` (including `since=0`)\n // keeps the forward-scan behavior for the follow path.\n const result = sinceStr === undefined\n ? getLogTail(limit)\n : getLogsSince(parseInt(sinceStr, 10), limit);\n const entries = result.entries.map(formatEntry);\n const filtered = categoryFilter\n ? entries.filter((e) => e.category === categoryFilter)\n : entries;\n\n return {\n body: {\n entries: filtered,\n cursor: result.cursor,\n },\n };\n}\n\n// ---------------------------------------------------------------------------\n// Detail (single entry with resolved references)\n// ---------------------------------------------------------------------------\n\nexport async function handleLogDetail(req: RouteRequest): Promise<RouteResponse> {\n const id = parseInt(req.params.id, 10);\n if (isNaN(id)) return { status: 400, body: { error: 'Invalid log entry ID' } };\n\n const entry = getLogEntry(id);\n if (!entry) return { status: 404, body: { error: 'Log entry not found' } };\n\n const parsed = entry.data ? JSON.parse(entry.data) : {};\n const resolved: Record<string, unknown> = {};\n\n // Resolve session_id to session title\n if (entry.session_id) {\n try {\n const session = getSession(entry.session_id);\n if (session) {\n resolved.session_title = (session as { title?: string }).title ?? null;\n }\n } catch { /* session may not exist */ }\n }\n\n return {\n body: {\n ...entry,\n data: parsed,\n resolved,\n },\n };\n}\n\n// ---------------------------------------------------------------------------\n// External log ingestion\n// ---------------------------------------------------------------------------\n\nconst ExternalLogBody = z.object({\n level: z.enum(['debug', 'info', 'warn', 'error']),\n component: z.string(),\n message: z.string(),\n data: z.record(z.string(), z.unknown()).optional(),\n});\n\n/**\n * POST /api/log — parse ExternalLogBody and write through the daemon logger.\n * Allows the MCP server (separate process) to log through the daemon.\n */\nexport function createLogIngestionHandler(logger: DaemonLogger): RouteHandler {\n return async (req: RouteRequest): Promise<RouteResponse> => {\n const { level, component, message, data } = ExternalLogBody.parse(req.body);\n logger.log(level, LOG_KINDS.MCP_EVENT, message, { ...data, mcp_component: component });\n return { body: { ok: true } };\n };\n}\n\n// ---------------------------------------------------------------------------\n// Helpers\n// ---------------------------------------------------------------------------\n\nfunction formatEntry(entry: LogEntryRow) {\n const dot = entry.kind.indexOf('.');\n const category = dot > 0 ? entry.kind.slice(0, dot) : entry.kind;\n return {\n ...entry,\n category,\n data: entry.data ? JSON.parse(entry.data) : null,\n };\n}\n","import { spawn } from 'node:child_process';\nimport { z } from 'zod';\nimport { resolveCliEntryPath } from '../../hooks/client.js';\nimport type { RouteResponse } from '../router.js';\nimport type { ProgressTracker } from './progress.js';\nimport { RESTART_RESPONSE_FLUSH_MS } from '../../constants.js';\n\nconst RestartBodySchema = z.object({\n force: z.boolean().optional(),\n}).optional();\n/** Delay before the child process starts — allows the parent to fully release the port. */\nconst RESTART_CHILD_DELAY_SECONDS = 3;\n\nexport interface RestartHandlerDeps {\n vaultDir: string;\n progressTracker: ProgressTracker;\n}\n\nexport async function handleRestart(\n deps: RestartHandlerDeps,\n body: unknown,\n): Promise<RouteResponse> {\n const parsed = RestartBodySchema.safeParse(body);\n const force = parsed.success ? parsed.data?.force : false;\n\n // Check for active operations unless force is set\n if (!force && deps.progressTracker.hasActiveOperations()) {\n return {\n status: 409,\n body: { status: 'busy', message: 'Active operations in progress. Use force=true to override.' },\n };\n }\n\n // Schedule: respond → wait for flush → SIGTERM self → child starts after parent exits.\n // The child waits RESTART_CHILD_DELAY_SECONDS before starting to ensure the parent\n // has fully released the port and cleaned up daemon.json.\n const { execPath, cliEntry } = resolveCliEntryPath();\n const shellCmd = `sleep ${RESTART_CHILD_DELAY_SECONDS} && ${execPath} ${cliEntry} daemon --vault ${deps.vaultDir}`;\n\n const child = spawn('/bin/sh', ['-c', shellCmd], {\n detached: true,\n stdio: 'ignore',\n });\n child.unref();\n\n // Schedule self-termination after response flushes\n setTimeout(() => {\n process.kill(process.pid, 'SIGTERM');\n }, RESTART_RESPONSE_FLUSH_MS);\n\n return { body: { status: 'restarting' } };\n}\n","/**\n * Update checker — fetches the npm registry for @goondocks/myco, compares\n * versions against the current installation, caches results, and supports\n * stable/beta release channels.\n *\n * - Stable channel: compare against dist-tags.latest only.\n * - Beta channel: compare against max(dist-tags.latest, dist-tags.beta).\n * Beta users can always reach stable (no-downgrade rule).\n * - Dev mode exemption: the daemon records its own CLI entry at startup\n * via `setDevBuildCliEntry()` when `detectDevBuild()` reports the\n * running binary is outside the npm global prefix. When set, update\n * checks are skipped entirely and any child-spawned shell script\n * (update/restart) uses the recorded CLI entry as its restart\n * target. This replaced the previous `MYCO_CMD` env-var dispatch,\n * which was fragile because several symbionts do not propagate\n * env vars to hook or MCP child processes.\n */\n\nimport fs from 'node:fs';\nimport path from 'node:path';\nimport { execFileSync } from 'node:child_process';\nimport YAML from 'yaml';\nimport semver from 'semver';\n\nimport {\n NPM_REGISTRY_BASE_URL,\n NPM_PACKAGE_NAME,\n UPDATE_PACKAGES,\n MYCO_GLOBAL_DIR,\n UPDATE_CHECK_CACHE_PATH,\n UPDATE_CONFIG_PATH,\n UPDATE_ERROR_PATH,\n UPDATE_CHECK_INTERVAL_HOURS,\n MS_PER_HOUR,\n DEFAULT_RELEASE_CHANNEL,\n RELEASE_CHANNELS,\n type ReleaseChannel,\n type UpdatePackageId,\n} from '../constants/update.js';\n\n// ---------------------------------------------------------------------------\n// Public types\n// ---------------------------------------------------------------------------\n\n/** Persisted update configuration stored in ~/.myco/update.yaml */\nexport interface UpdateConfig {\n channel: ReleaseChannel;\n check_interval_hours: number;\n}\n\n/** Cached dist-tags for a single package. */\nexport interface CachedPackageCheck {\n package_name: string;\n latest_stable: string;\n latest_beta: string | null;\n}\n\n/** Cached result of a registry check stored in ~/.myco/last-update-check.json */\nexport interface CachedCheck {\n checked_at: string;\n channel: ReleaseChannel;\n packages: Partial<Record<UpdatePackageId, CachedPackageCheck>>;\n}\n\n/** Installed/update status for one globally installed Myco package. */\nexport interface PackageCheckResult {\n id: UpdatePackageId;\n display_name: string;\n package_name: string;\n installed: boolean;\n installed_version: string | null;\n latest_version: string | null;\n latest_stable: string | null;\n latest_beta: string | null;\n update_available: boolean;\n}\n\n/** Result returned to callers of checkForUpdate / statusFromCache */\nexport interface CheckResult {\n update_available: boolean;\n running_version: string;\n latest_version: string;\n latest_stable: string;\n latest_beta: string | null;\n channel: ReleaseChannel;\n check_interval_hours: number;\n last_check: string;\n error: string | null;\n packages: PackageCheckResult[];\n}\n\n// ---------------------------------------------------------------------------\n// Constants\n// ---------------------------------------------------------------------------\n\n/** Fetch timeout for registry requests. */\nconst REGISTRY_FETCH_TIMEOUT_MS = 10_000;\n\n// ---------------------------------------------------------------------------\n// Dev-mode exemption\n// ---------------------------------------------------------------------------\n\n/**\n * Module-level state: the CLI entry path of the running daemon when it's\n * a dev build, or null when it's a proper global install.\n *\n * Set once at daemon startup from `main.ts` after `detectDevBuild()`\n * reports its finding. Read by `isUpdateExempt()` (to skip update checks)\n * and by `resolveMycoBinary()` (to choose the restart target for\n * update/restart shell scripts).\n *\n * Test code can reset this via `setDevBuildCliEntry(null)`.\n */\nlet devBuildCliEntry: string | null = null;\n\n/**\n * Record the daemon's dev-build CLI entry. Pass `null` to clear.\n * Called once at daemon startup after `detectDevBuild()` decides whether\n * the running binary is a dev build.\n */\nexport function setDevBuildCliEntry(cliEntry: string | null): void {\n devBuildCliEntry = cliEntry;\n}\n\n/**\n * Returns the recorded dev-build CLI entry, or null when the daemon is\n * running from a proper global install.\n */\nexport function getDevBuildCliEntry(): string | null {\n return devBuildCliEntry;\n}\n\n/**\n * Resolve the myco binary that child-spawned restart/update scripts\n * should invoke to restart the daemon.\n *\n * - Dev mode (dev build CLI entry set): use the literal CLI entry path,\n * so the restart respawns the same dev binary. After an npm update\n * this intentionally keeps running the dev build — dev mode is opaque\n * to global updates, which is the correct semantic.\n * - Prod mode (no dev build CLI entry): fall back to the bare `myco`\n * command, which PATH-resolves to the freshly-updated global install.\n */\nexport function resolveMycoBinary(): string {\n return devBuildCliEntry ?? 'myco';\n}\n\n/**\n * Returns true when the daemon is running from a dev build — skip\n * update checks and suppress the Operations UI update banner.\n */\nexport function isUpdateExempt(): boolean {\n return devBuildCliEntry !== null;\n}\n\n/**\n * Detects whether the running daemon is a dev build by comparing the CLI\n * entry point's realpath against the npm global prefix's realpath.\n *\n * Returns the CLI entry path when a dev build is detected (so the caller\n * can record it via `setDevBuildCliEntry()`), or null when no dev build\n * applies.\n *\n * A dev build is any binary whose realpath is NOT under the npm global\n * prefix — direct `myco-dev` invocations, `npm link` installs, local\n * `node dist/cli.js` runs, etc.\n *\n * Returns null when:\n * - globalPrefix is null (npm prefix resolution failed; can't verify)\n * - cliEntry is missing\n * - realpath resolution throws\n * - the binary IS under the global prefix (proper install — normal updates)\n *\n * All inputs are passed explicitly (no defaults) so tests can control the\n * environment without inheriting from the enclosing process.\n */\nexport function detectDevBuild(\n globalPrefix: string | null,\n cliEntry: string | undefined,\n realpath: (p: string) => string,\n): string | null {\n if (!globalPrefix) return null;\n if (!cliEntry) return null;\n try {\n const resolvedEntry = realpath(cliEntry);\n const resolvedPrefix = realpath(globalPrefix);\n if (resolvedEntry.startsWith(resolvedPrefix + path.sep) || resolvedEntry === resolvedPrefix) {\n return null;\n }\n return cliEntry;\n } catch {\n return null;\n }\n}\n\n// ---------------------------------------------------------------------------\n// Config helpers\n// ---------------------------------------------------------------------------\n\n/** Default config returned when no update.yaml exists. */\nfunction defaultUpdateConfig(): UpdateConfig {\n return {\n channel: DEFAULT_RELEASE_CHANNEL,\n check_interval_hours: UPDATE_CHECK_INTERVAL_HOURS,\n };\n}\n\n/**\n * Reads ~/.myco/update.yaml. Returns defaults when the file is missing or\n * unparseable.\n */\nexport function readUpdateConfig(): UpdateConfig {\n try {\n const raw = fs.readFileSync(UPDATE_CONFIG_PATH, 'utf-8');\n const parsed = YAML.parse(raw) as Partial<UpdateConfig>;\n\n const channel = RELEASE_CHANNELS.includes(parsed?.channel as ReleaseChannel)\n ? (parsed.channel as ReleaseChannel)\n : DEFAULT_RELEASE_CHANNEL;\n\n const check_interval_hours =\n typeof parsed?.check_interval_hours === 'number' && parsed.check_interval_hours > 0\n ? parsed.check_interval_hours\n : UPDATE_CHECK_INTERVAL_HOURS;\n\n return { channel, check_interval_hours };\n } catch {\n return defaultUpdateConfig();\n }\n}\n\n/**\n * Writes UpdateConfig to ~/.myco/update.yaml. Creates ~/.myco/ if needed.\n */\nexport function writeUpdateConfig(config: UpdateConfig): void {\n fs.mkdirSync(MYCO_GLOBAL_DIR, { recursive: true });\n fs.writeFileSync(UPDATE_CONFIG_PATH, YAML.stringify(config), 'utf-8');\n}\n\n// ---------------------------------------------------------------------------\n// Cache helpers\n// ---------------------------------------------------------------------------\n\n/**\n * Reads ~/.myco/last-update-check.json. Returns null when the file is missing\n * or unparseable.\n */\nexport function readCachedCheck(): CachedCheck | null {\n try {\n const raw = fs.readFileSync(UPDATE_CHECK_CACHE_PATH, 'utf-8');\n const parsed = JSON.parse(raw) as CachedCheck | Record<string, unknown>;\n\n if (parsed && typeof parsed === 'object' && 'packages' in parsed && parsed.packages) {\n return parsed as CachedCheck;\n }\n\n const legacy = parsed as {\n checked_at?: string;\n channel?: ReleaseChannel;\n latest_stable?: string;\n latest_beta?: string | null;\n };\n\n if (\n typeof legacy.checked_at === 'string' &&\n typeof legacy.latest_stable === 'string'\n ) {\n return {\n checked_at: legacy.checked_at,\n channel: RELEASE_CHANNELS.includes(legacy.channel as ReleaseChannel)\n ? (legacy.channel as ReleaseChannel)\n : DEFAULT_RELEASE_CHANNEL,\n packages: {\n myco: {\n package_name: NPM_PACKAGE_NAME,\n latest_stable: legacy.latest_stable,\n latest_beta: legacy.latest_beta ?? null,\n },\n },\n };\n }\n\n return null;\n } catch {\n return null;\n }\n}\n\n/**\n * Deletes the cache file. Used when switching channels so the stale cached\n * result is not returned.\n */\nexport function clearCachedCheck(): void {\n try {\n fs.unlinkSync(UPDATE_CHECK_CACHE_PATH);\n } catch {\n // File not present — that's fine.\n }\n}\n\n/**\n * Returns true when the cache is null (never checked) or older than\n * intervalHours.\n */\nexport function isCacheStale(cache: CachedCheck | null, intervalHours: number): boolean {\n if (cache === null) return true;\n\n const checkedAt = new Date(cache.checked_at).getTime();\n if (isNaN(checkedAt)) return true;\n\n const ageMs = Date.now() - checkedAt;\n return ageMs > intervalHours * MS_PER_HOUR;\n}\n\n// ---------------------------------------------------------------------------\n// Error file\n// ---------------------------------------------------------------------------\n\n/**\n * Reads ~/.myco/update-error.json. Returns the error string when present, null\n * otherwise.\n */\nexport function readUpdateError(): string | null {\n try {\n const raw = fs.readFileSync(UPDATE_ERROR_PATH, 'utf-8');\n const parsed = JSON.parse(raw) as { error?: string };\n return parsed?.error ?? null;\n } catch {\n return null;\n }\n}\n\n// ---------------------------------------------------------------------------\n// Registry types\n// ---------------------------------------------------------------------------\n\ninterface NpmDistTags {\n latest: string;\n beta?: string;\n [tag: string]: string | undefined;\n}\n\ninterface NpmRegistryResponse {\n 'dist-tags': NpmDistTags;\n}\n\n/** Build the npm registry URL for a specific package. */\nfunction packageRegistryUrl(packageName: string): string {\n return `${NPM_REGISTRY_BASE_URL}/${encodeURIComponent(packageName)}`;\n}\n\n// ---------------------------------------------------------------------------\n// Channel comparison logic\n// ---------------------------------------------------------------------------\n\n/**\n * Returns the target version to compare against based on channel.\n * - Stable: dist-tags.latest\n * - Beta: max(dist-tags.latest, dist-tags.beta) — no-downgrade rule\n */\nfunction resolveTargetVersion(distTags: NpmDistTags, channel: ReleaseChannel): string {\n const stable = distTags.latest;\n const beta = distTags.beta ?? null;\n\n if (channel === 'stable' || beta === null) {\n return stable;\n }\n\n // Beta channel: pick whichever is higher (stable can exceed beta tag)\n const higher = semver.gt(beta, stable) ? beta : stable;\n return higher;\n}\n\nfunction resolveTargetVersionFromCache(\n pkg: CachedPackageCheck,\n channel: ReleaseChannel,\n): string {\n return resolveTargetVersion(\n { latest: pkg.latest_stable, beta: pkg.latest_beta ?? undefined },\n channel,\n );\n}\n\nfunction buildInstalledPackageVersions(\n globalPrefix: string | null,\n currentVersion: string,\n): Record<UpdatePackageId, string | null> {\n const installed: Record<UpdatePackageId, string | null> = {\n myco: currentVersion,\n 'myco-team': null,\n 'myco-collective': null,\n };\n\n if (globalPrefix === null) return installed;\n\n for (const pkg of UPDATE_PACKAGES) {\n const version = getInstalledVersion(globalPrefix, pkg.packageName);\n if (pkg.id === 'myco') {\n installed.myco = version ?? currentVersion;\n continue;\n }\n installed[pkg.id] = version;\n }\n\n return installed;\n}\n\nfunction buildPackageResults(\n currentVersion: string,\n cache: CachedCheck,\n channel: ReleaseChannel,\n globalPrefix: string | null,\n): PackageCheckResult[] {\n const installedVersions = buildInstalledPackageVersions(globalPrefix, currentVersion);\n\n return UPDATE_PACKAGES.map((pkg) => {\n const cached = cache.packages[pkg.id];\n const installedVersion = installedVersions[pkg.id];\n const latestVersion = cached ? resolveTargetVersionFromCache(cached, channel) : null;\n const updateAvailable =\n installedVersion !== null &&\n latestVersion !== null &&\n semver.valid(installedVersion) !== null &&\n semver.valid(latestVersion) !== null &&\n semver.gt(latestVersion, installedVersion);\n\n return {\n id: pkg.id,\n display_name: pkg.displayName,\n package_name: pkg.packageName,\n installed: installedVersion !== null,\n installed_version: installedVersion,\n latest_version: latestVersion,\n latest_stable: cached?.latest_stable ?? null,\n latest_beta: cached?.latest_beta ?? null,\n update_available: updateAvailable,\n };\n });\n}\n\n// ---------------------------------------------------------------------------\n// CheckResult builder\n// ---------------------------------------------------------------------------\n\nfunction buildCheckResult(\n currentVersion: string,\n cache: CachedCheck,\n config: UpdateConfig,\n error: string | null,\n globalPrefix: string | null,\n): CheckResult {\n const packages = buildPackageResults(currentVersion, cache, cache.channel, globalPrefix);\n const primaryPackage = packages.find((pkg) => pkg.id === 'myco');\n const targetVersion = primaryPackage?.latest_version ?? currentVersion;\n const latestStable = primaryPackage?.latest_stable ?? currentVersion;\n const latestBeta = primaryPackage?.latest_beta ?? null;\n const updateAvailable = packages.some((pkg) => pkg.installed && pkg.update_available);\n\n return {\n update_available: updateAvailable,\n running_version: currentVersion,\n latest_version: targetVersion,\n latest_stable: latestStable,\n latest_beta: latestBeta,\n channel: cache.channel,\n check_interval_hours: config.check_interval_hours,\n last_check: cache.checked_at,\n error,\n packages,\n };\n}\n\n// ---------------------------------------------------------------------------\n// Installed version detection\n// ---------------------------------------------------------------------------\n\n/**\n * Resolves the npm global prefix by running `npm prefix -g`.\n * Returns the trimmed path string. Throws on failure.\n *\n * Uses execFileSync (not execSync) to avoid shell injection — consistent\n * with codebase conventions per src/utils/execFileNoThrow.ts patterns.\n */\nexport function resolveGlobalPrefix(): string {\n return execFileSync('npm', ['prefix', '-g'], { encoding: 'utf-8', timeout: 5_000 }).trim();\n}\n\n/**\n * Reads the version of the globally installed @goondocks/myco package\n * from disk. Returns null if the package isn't installed or unreadable.\n *\n * Uses a direct fs.readFileSync of the package.json at the expected\n * npm global path — no module resolution, no cache involvement.\n */\nexport function getInstalledVersion(\n globalPrefix: string,\n packageName = NPM_PACKAGE_NAME,\n): string | null {\n try {\n const pkgPath = path.join(\n globalPrefix, 'lib', 'node_modules', packageName, 'package.json',\n );\n const raw = fs.readFileSync(pkgPath, 'utf-8');\n const pkg = JSON.parse(raw) as { version?: string };\n return pkg.version ?? null;\n } catch {\n return null;\n }\n}\n\n// ---------------------------------------------------------------------------\n// Primary exports\n// ---------------------------------------------------------------------------\n\n/**\n * Fetches the npm registry, compares versions, and writes the result to cache.\n *\n * On network failure, returns the last cached result (with an error field) if\n * one exists. If no cache exists and the fetch fails, the error field is set\n * and update_available is false.\n */\nexport async function checkForUpdate(\n currentVersion: string,\n globalPrefix: string | null = null,\n): Promise<CheckResult> {\n const config = readUpdateConfig();\n const existingCache = readCachedCheck();\n\n const freshPackages: Partial<Record<UpdatePackageId, CachedPackageCheck>> = {};\n const fetchErrors: string[] = [];\n\n const registryChecks = await Promise.allSettled(\n UPDATE_PACKAGES.map(async (pkg) => {\n const response = await fetch(packageRegistryUrl(pkg.packageName), {\n signal: AbortSignal.timeout(REGISTRY_FETCH_TIMEOUT_MS),\n });\n\n if (!response.ok) {\n throw new Error(`${pkg.packageName}: registry responded with ${response.status}`);\n }\n\n const data = (await response.json()) as NpmRegistryResponse;\n return {\n id: pkg.id,\n package_name: pkg.packageName,\n latest_stable: data['dist-tags'].latest,\n latest_beta: data['dist-tags'].beta ?? null,\n };\n }),\n );\n\n for (const result of registryChecks) {\n if (result.status === 'fulfilled') {\n freshPackages[result.value.id] = {\n package_name: result.value.package_name,\n latest_stable: result.value.latest_stable,\n latest_beta: result.value.latest_beta,\n };\n continue;\n }\n\n const message = result.reason instanceof Error ? result.reason.message : String(result.reason);\n fetchErrors.push(message);\n }\n\n if (existingCache !== null) {\n for (const pkg of UPDATE_PACKAGES) {\n if (freshPackages[pkg.id] !== undefined) continue;\n const cached = existingCache.packages[pkg.id];\n if (cached) {\n freshPackages[pkg.id] = cached;\n }\n }\n }\n\n if (Object.keys(freshPackages).length === 0) {\n const fetchError = fetchErrors[0] ?? 'registry fetch failed';\n return {\n update_available: false,\n running_version: currentVersion,\n latest_version: currentVersion,\n latest_stable: currentVersion,\n latest_beta: null,\n channel: config.channel,\n check_interval_hours: config.check_interval_hours,\n last_check: new Date().toISOString(),\n error: fetchError,\n packages: buildPackageResults(\n currentVersion,\n { checked_at: new Date().toISOString(), channel: config.channel, packages: {} },\n config.channel,\n globalPrefix,\n ),\n };\n }\n\n const freshCache: CachedCheck = {\n checked_at: new Date().toISOString(),\n channel: config.channel,\n packages: freshPackages,\n };\n\n try {\n fs.mkdirSync(path.dirname(UPDATE_CHECK_CACHE_PATH), { recursive: true });\n fs.writeFileSync(UPDATE_CHECK_CACHE_PATH, JSON.stringify(freshCache, null, 2), 'utf-8');\n } catch {\n // Cache write failure is non-fatal\n }\n\n const error = fetchErrors.length > 0 ? fetchErrors.join('; ') : null;\n return buildCheckResult(currentVersion, freshCache, config, error, globalPrefix);\n}\n\n/**\n * Builds a CheckResult from cached data without hitting the registry.\n * Returns null when no cache exists.\n *\n * Accepts optional pre-read `cache` and `config` to avoid redundant file\n * reads when the caller has already loaded them (e.g. for a staleness check).\n */\nexport function statusFromCache(\n currentVersion: string,\n cache?: CachedCheck | null,\n config?: UpdateConfig,\n globalPrefix: string | null = null,\n): CheckResult | null {\n const resolvedCache = cache !== undefined ? cache : readCachedCheck();\n if (resolvedCache === null) return null;\n\n const resolvedConfig = config !== undefined ? config : readUpdateConfig();\n return buildCheckResult(currentVersion, resolvedCache, resolvedConfig, null, globalPrefix);\n}\n","/**\n * Update installer — generates and spawns a detached shell script that installs\n * the npm update and restarts the daemon after the current process exits.\n *\n * The script is written to a temp file with mode 0o755, spawned detached with\n * stdio ignored, and unreffed so the parent process can exit immediately.\n */\n\nimport fs from 'node:fs';\nimport os from 'node:os';\nimport path from 'node:path';\nimport { spawn } from 'node:child_process';\n\nimport {\n MYCO_GLOBAL_DIR,\n UPDATE_ERROR_PATH,\n UPDATE_SCRIPT_DELAY_SECONDS,\n RESTART_REASON_FILENAME,\n} from '../constants/update.js';\n\n// ---------------------------------------------------------------------------\n// Public types\n// ---------------------------------------------------------------------------\n\n/** Parameters required to generate and spawn an update script. */\nexport interface InstallParams {\n /** Fully-qualified npm package specs to install (e.g. [\"@goondocks/myco@0.11.0\"]). */\n packageSpecs: string[];\n /** Absolute path to the project root for `myco update --project`. */\n projectRoot: string;\n /** Absolute path to the vault directory for `myco daemon --vault`. */\n vaultDir: string;\n /**\n * Literal myco binary the script should invoke for the post-install\n * `update --project` step and the final daemon respawn. Baked into the\n * script at generation time — see `resolveMycoBinary()` in update-checker\n * for how the daemon picks it (dev build CLI entry in dev mode, bare\n * `myco` in prod).\n */\n mycoBinary: string;\n}\n\n// ---------------------------------------------------------------------------\n// Script generation\n// ---------------------------------------------------------------------------\n\n/**\n * Generates a POSIX shell script string that:\n * 1. Waits UPDATE_SCRIPT_DELAY_SECONDS for the daemon to exit.\n * 2. Runs `npm install -g <package>@<version>`.\n * 3. On success: runs `myco update --project <projectRoot>` (non-fatal).\n * 4. On success: clears ~/.myco/update-error.json.\n * 5. On failure: writes error JSON to ~/.myco/update-error.json.\n * 6. Always: starts `myco daemon --vault <vaultDir>` in background.\n * 7. Cleans up the script file itself.\n */\nexport function generateUpdateScript(params: InstallParams): string {\n const { packageSpecs, projectRoot, vaultDir, mycoBinary } = params;\n\n // Use JSON.stringify for safe path quoting (handles spaces, special chars).\n const installArgs = packageSpecs.map((spec) => JSON.stringify(spec)).join(' ');\n const quotedProjectRoot = JSON.stringify(projectRoot);\n const quotedVaultDir = JSON.stringify(vaultDir);\n const quotedMycoBinary = JSON.stringify(mycoBinary);\n const quotedErrorPath = JSON.stringify(UPDATE_ERROR_PATH);\n const errorJson = JSON.stringify(\n JSON.stringify({ error: `npm install failed for ${packageSpecs.join(', ')}` }),\n );\n\n // Bake the literal myco binary into the script at generation time. Prod\n // installs get `\"myco\"` (PATH-resolves to the freshly-updated global\n // binary). Dev builds get the CLI entry path recorded in update-checker\n // state, so the restart respawns the same dev binary regardless of what\n // the global install looks like after the upgrade.\n return `#!/bin/sh\nset -e\nMYCO=${quotedMycoBinary}\n\n# Wait for daemon to exit cleanly\nsleep ${UPDATE_SCRIPT_DELAY_SECONDS}\n\n# Attempt the update\nif npm install -g ${installArgs} 2>&1; then\n # Sync project files (gitignore, symbiont registration)\n \"$MYCO\" update --project ${quotedProjectRoot} || true\n # Clear any previous error\n rm -f ${quotedErrorPath}\nelse\n # Write error and attempt restart with old version\n echo ${errorJson} > ${quotedErrorPath}\nfi\n\n# Restart daemon (works whether install succeeded or failed)\n\"$MYCO\" daemon --vault ${quotedVaultDir} &\n\n# Clean up this script\nrm -f \"$0\"\n`;\n}\n\n// ---------------------------------------------------------------------------\n// Script spawning\n// ---------------------------------------------------------------------------\n\n/**\n * Writes a script to a temp file, spawns it detached, and unrefs the child\n * so the parent process can exit without waiting.\n */\nfunction spawnDetachedScript(namePrefix: string, content: string): string {\n const scriptPath = path.join(os.tmpdir(), `${namePrefix}-${Date.now()}.sh`);\n fs.writeFileSync(scriptPath, content, { encoding: 'utf-8', mode: 0o755 });\n\n const child = spawn('/bin/sh', [scriptPath], {\n detached: true,\n stdio: 'ignore',\n });\n child.unref();\n\n return scriptPath;\n}\n\n/**\n * Generates and spawns the update script. Returns the script path.\n */\nexport function spawnUpdateScript(params: InstallParams): string {\n // Ensure ~/.myco/ exists before writing the error path or checking state.\n fs.mkdirSync(MYCO_GLOBAL_DIR, { recursive: true });\n return spawnDetachedScript('myco-update', generateUpdateScript(params));\n}\n\n// ---------------------------------------------------------------------------\n// Restart script (no npm install — just restart + conditional local update)\n// ---------------------------------------------------------------------------\n\n/** Parameters for a restart-only script (no global npm install). */\nexport interface RestartParams {\n /** Absolute path to the project root for `myco update --project`. */\n projectRoot: string;\n /** Absolute path to the vault directory for `myco daemon --vault`. */\n vaultDir: string;\n /** Whether to run `myco update --project` before restarting. */\n runLocalUpdate: boolean;\n /** The version currently running (baked into the script to avoid shell interpolation). */\n fromVersion: string;\n /** The version that will be running after restart (baked into the script). */\n toVersion: string;\n /**\n * Literal myco binary the script should invoke for the optional\n * `update --project` step and the final daemon respawn. Baked into\n * the script at generation time; see `resolveMycoBinary()` in\n * update-checker for how callers pick it.\n */\n mycoBinary: string;\n}\n\n/**\n * Generates a POSIX shell script that:\n * 1. Waits for the daemon to exit.\n * 2. Optionally runs `myco update --project` when runLocalUpdate is true.\n * 3. Writes restart-reason.json into the vault.\n * 4. Starts `myco daemon --vault` in background.\n * 5. Cleans up the script file.\n */\nexport function generateRestartScript(params: RestartParams): string {\n const { projectRoot, vaultDir, runLocalUpdate, fromVersion, toVersion, mycoBinary } = params;\n const quotedProjectRoot = JSON.stringify(projectRoot);\n const quotedVaultDir = JSON.stringify(vaultDir);\n const quotedMycoBinary = JSON.stringify(mycoBinary);\n const reasonFile = JSON.stringify(path.join(vaultDir, RESTART_REASON_FILENAME));\n\n // Bake version strings and reason JSON from Node to avoid shell interpolation\n // in heredocs — prevents JSON corruption from unexpected characters.\n const reasonJson = JSON.stringify(JSON.stringify({\n reason: 'version_sync',\n from_version: fromVersion,\n to_version: toVersion,\n local_update_ran: runLocalUpdate,\n }));\n\n const updateBlock = runLocalUpdate\n ? `\n# Run local project update (hooks, symbionts, gitignore)\n\"$MYCO\" update --project ${quotedProjectRoot} || true`\n : '';\n\n // MYCO is baked as a literal at generation time — see InstallParams\n // docstring for the dev vs prod binary selection rationale.\n return `#!/bin/sh\nset -e\nMYCO=${quotedMycoBinary}\n\n# Wait for daemon to exit cleanly\nsleep ${UPDATE_SCRIPT_DELAY_SECONDS}\n${updateBlock}\n\n# Write restart reason for the new daemon to pick up\necho ${reasonJson} > ${reasonFile}\n\n# Restart daemon\n\"$MYCO\" daemon --vault ${quotedVaultDir} &\n\n# Clean up this script\nrm -f \"$0\"\n`;\n}\n\n/**\n * Generates and spawns the restart script. Returns the script path.\n */\nexport function spawnRestartScript(params: RestartParams): string {\n return spawnDetachedScript('myco-restart', generateRestartScript(params));\n}\n","/**\n * Update API handlers — status, manual check, apply, and channel switch.\n *\n * Factory function injects vaultDir, projectRoot, currentVersion, and a\n * scheduleShutdown callback; returns handlers for:\n * GET /api/update/status\n * POST /api/update/check\n * POST /api/update/apply\n * PUT /api/update/channel\n */\n\nimport { z } from 'zod';\n\nimport {\n isUpdateExempt,\n checkForUpdate,\n statusFromCache,\n readCachedCheck,\n readUpdateConfig,\n writeUpdateConfig,\n clearCachedCheck,\n isCacheStale,\n getInstalledVersion,\n resolveMycoBinary,\n} from '../update-checker.js';\nimport { spawnUpdateScript, spawnRestartScript } from '../update-installer.js';\nimport { RELEASE_CHANNELS, UPDATE_STAMP_FILENAME } from '../../constants/update.js';\nimport semver from 'semver';\nimport fs from 'node:fs';\nimport path from 'node:path';\nimport type { RouteRequest, RouteResponse } from '../router.js';\n\n// ---------------------------------------------------------------------------\n// Types\n// ---------------------------------------------------------------------------\n\n/** Dependencies injected by the daemon when registering update routes. */\nexport interface UpdateDeps {\n /** Absolute path to the active vault directory. */\n vaultDir: string;\n /** Absolute path to the project root (used by `myco update --project`). */\n projectRoot: string;\n /** The currently running version (from package.json at startup). */\n currentVersion: string;\n /** Callback that schedules a graceful daemon shutdown after the update script spawns. */\n scheduleShutdown: () => void;\n /** npm global prefix, resolved once at daemon startup. Null if resolution failed. */\n globalPrefix: string | null;\n}\n\n// ---------------------------------------------------------------------------\n// Zod schema\n// ---------------------------------------------------------------------------\n\nconst ChannelBodySchema = z.object({\n channel: z.enum(RELEASE_CHANNELS),\n});\n\n// ---------------------------------------------------------------------------\n// Handler factory\n// ---------------------------------------------------------------------------\n\n/**\n * Create update API handlers with injected dependencies.\n *\n * Returns an object with named handlers for each update endpoint.\n */\nexport function createUpdateHandlers(deps: UpdateDeps) {\n const { vaultDir, projectRoot, currentVersion, scheduleShutdown, globalPrefix } = deps;\n\n /** Prevents multiple restart scripts from racing during the shutdown window. */\n let restartInitiated = false;\n\n /** Returns true when the stamp file matches the current running version. */\n function isStampCurrent(): boolean {\n try {\n const stampPath = path.join(vaultDir, UPDATE_STAMP_FILENAME);\n const stamp = fs.readFileSync(stampPath, 'utf-8').trim();\n return stamp === currentVersion;\n } catch {\n return false;\n }\n }\n\n /**\n * GET /api/update/status — returns cached update state.\n *\n * When the cache is stale, kicks off a background registry check\n * (fire-and-forget) and immediately returns the current cached value.\n */\n async function handleUpdateStatus(_req: RouteRequest): Promise<RouteResponse> {\n if (isUpdateExempt()) {\n return { body: { exempt: true, running_version: currentVersion } };\n }\n\n // --- Installed-version check (short-circuits before registry) ---\n if (globalPrefix && !restartInitiated) {\n const installedVersion = getInstalledVersion(globalPrefix);\n if (\n installedVersion &&\n semver.valid(installedVersion) &&\n semver.valid(currentVersion) &&\n semver.gt(installedVersion, currentVersion)\n ) {\n restartInitiated = true;\n const runLocalUpdate = !isStampCurrent();\n spawnRestartScript({\n projectRoot, vaultDir, runLocalUpdate,\n fromVersion: currentVersion,\n toVersion: installedVersion,\n mycoBinary: resolveMycoBinary(),\n });\n scheduleShutdown();\n return {\n body: {\n restarting: true,\n reason: 'version_sync',\n running_version: currentVersion,\n installed_version: installedVersion,\n },\n };\n }\n }\n\n // --- Normal registry check flow (unchanged) ---\n const config = readUpdateConfig();\n const cache = readCachedCheck();\n\n if (isCacheStale(cache, config.check_interval_hours)) {\n // Fire-and-forget — don't block the response on the registry fetch.\n checkForUpdate(currentVersion, globalPrefix).catch(() => {});\n }\n\n // Pass pre-read config and cache to avoid reading the files a second time.\n const status = statusFromCache(currentVersion, cache, config, globalPrefix);\n if (!status) {\n // No cache yet — return minimal response; background check will populate it.\n return {\n body: {\n exempt: false,\n update_available: false,\n running_version: currentVersion,\n latest_version: currentVersion,\n latest_stable: currentVersion,\n latest_beta: null,\n channel: config.channel,\n check_interval_hours: config.check_interval_hours,\n last_check: '',\n error: null,\n },\n };\n }\n return { body: { exempt: false, ...status } };\n }\n\n /**\n * POST /api/update/check — forces an immediate registry check (blocking).\n *\n * Intended for user-initiated \"Check Now\" actions where the caller wants\n * fresh data before rendering.\n */\n async function handleUpdateCheck(_req: RouteRequest): Promise<RouteResponse> {\n if (isUpdateExempt()) {\n return {\n status: 400,\n body: { error: 'update_exempt', message: 'Updates disabled in dev mode' },\n };\n }\n\n const result = await checkForUpdate(currentVersion, globalPrefix);\n return { body: { exempt: false, ...result } };\n }\n\n /**\n * POST /api/update/apply — spawns the update script and schedules shutdown.\n *\n * Returns 400 when no update is available or when in dev mode.\n */\n async function handleUpdateApply(_req: RouteRequest): Promise<RouteResponse> {\n if (isUpdateExempt()) {\n return { status: 400, body: { error: 'update_exempt' } };\n }\n\n const status = statusFromCache(currentVersion, undefined, undefined, globalPrefix);\n const packageSpecs = (status?.packages ?? [])\n .filter((pkg) => pkg.installed && pkg.update_available && pkg.latest_version)\n .map((pkg) => `${pkg.package_name}@${pkg.latest_version}`);\n if (!status || packageSpecs.length === 0) {\n return { status: 400, body: { error: 'no_update_available' } };\n }\n\n spawnUpdateScript({\n packageSpecs,\n projectRoot,\n vaultDir,\n mycoBinary: resolveMycoBinary(),\n });\n scheduleShutdown();\n\n return {\n body: {\n status: 'applying',\n version: status.latest_version,\n packages: packageSpecs,\n },\n };\n }\n\n /**\n * PUT /api/update/channel — switches the release channel and clears the cache.\n *\n * Returns 400 when the channel value is not in RELEASE_CHANNELS.\n */\n async function handleUpdateChannel(req: RouteRequest): Promise<RouteResponse> {\n const parsed = ChannelBodySchema.safeParse(req.body);\n if (!parsed.success) {\n return { status: 400, body: { error: 'invalid_channel' } };\n }\n\n const { channel } = parsed.data;\n const config = readUpdateConfig();\n\n writeUpdateConfig({ ...config, channel });\n clearCachedCheck();\n\n const channelStatus = statusFromCache(currentVersion, undefined, undefined, globalPrefix);\n if (!channelStatus) {\n return {\n body: {\n exempt: false,\n update_available: false,\n running_version: currentVersion,\n latest_version: currentVersion,\n latest_stable: currentVersion,\n latest_beta: null,\n channel,\n check_interval_hours: config.check_interval_hours,\n last_check: '',\n error: null,\n },\n };\n }\n return { body: { exempt: false, ...channelStatus } };\n }\n\n return {\n handleUpdateStatus,\n handleUpdateCheck,\n handleUpdateApply,\n handleUpdateChannel,\n };\n}\n","import fs from 'node:fs';\nimport path from 'node:path';\nimport { getEnabledSymbiontNames, loadMergedConfig } from '../config/loader.js';\nimport type { MycoConfig } from '../config/schema.js';\nimport { loadManifests, resolvePackageRoot } from './detect.js';\nimport { SymbiontInstaller } from './installer.js';\n\nexport function getConfiguredManifests(projectRoot: string, config: MycoConfig) {\n const allManifests = loadManifests();\n const enabledNames = getEnabledSymbiontNames(config);\n if (enabledNames) {\n return allManifests.filter((manifest) => enabledNames.has(manifest.name));\n }\n\n return allManifests.filter((manifest) => fs.existsSync(path.join(projectRoot, manifest.configDir)));\n}\n\nexport function reconcileConfiguredSymbionts(\n projectRoot: string,\n vaultDir = path.join(projectRoot, '.myco'),\n preloadedConfig?: MycoConfig,\n): number {\n const config = preloadedConfig ?? loadMergedConfig(vaultDir);\n const manifests = getConfiguredManifests(projectRoot, config);\n const packageRoot = resolvePackageRoot();\n let updatedCount = 0;\n\n for (const manifest of manifests) {\n const installer = new SymbiontInstaller(manifest, projectRoot, packageRoot);\n installer.install();\n updatedCount++;\n }\n\n return updatedCount;\n}\n","/**\n * Backup engine — SQL-dump backup and restore for synced vault tables.\n *\n * Produces portable `INSERT OR IGNORE` SQL dumps scoped to a single machine.\n * Restore merges foreign machine data without overwriting local records.\n */\n\nimport fs from 'node:fs';\nimport path from 'node:path';\nimport type { Database } from 'better-sqlite3';\nimport { SYNC_PROTOCOL_VERSION, epochSeconds } from '@myco/constants.js';\n\n// ---------------------------------------------------------------------------\n// Constants\n// ---------------------------------------------------------------------------\n\n/** Tables included in backup dumps (all synced tables). */\nexport const BACKUP_TABLES = [\n 'sessions',\n 'prompt_batches',\n 'spores',\n 'entities',\n 'graph_edges',\n 'entity_mentions',\n 'resolution_events',\n 'plans',\n 'artifacts',\n 'digest_extracts',\n 'team_members',\n] as const;\n\n/** File extension for backup dumps. */\nconst BACKUP_EXTENSION = '.sql';\n\n/**\n * Pattern matching a valid backup filename: `<machine_id>.sql`.\n *\n * Machine IDs follow `{github_user}_{machine_hash}` (see machine-id.ts)\n * which uses alphanumerics, underscore, and hyphen — but never dots.\n * The stem is constrained to `[A-Za-z0-9_-]` so a single literal `.sql`\n * extension is the only dot in the filename.\n *\n * Constraining the stem rejects conflict markers introduced by cloud\n * sync services when the backup directory lives inside a synced folder.\n * These typically insert spaces, parentheses, quotes, or extra dots\n * into filenames — none of which match a valid machine ID.\n */\nconst BACKUP_FILENAME_PATTERN = /^[A-Za-z0-9_-]+\\.sql$/;\n\n/** Header comment template for backup files. */\nconst BACKUP_HEADER_TEMPLATE = '-- Myco backup';\n\n// ---------------------------------------------------------------------------\n// Types\n// ---------------------------------------------------------------------------\n\n/** Metadata for a backup file on disk. */\nexport interface BackupMeta {\n machine_id: string;\n file_name: string;\n size_bytes: number;\n modified_at: string;\n}\n\n/** Per-table counts returned by restore preview/execute. */\nexport interface TableCounts {\n table: string;\n new: number;\n existing: number;\n}\n\n/** Result returned by restoreBackup. */\nexport interface RestoreResult {\n tables: TableCounts[];\n total_restored: number;\n total_skipped: number;\n}\n\n// ---------------------------------------------------------------------------\n// SQL value serialization\n// ---------------------------------------------------------------------------\n\n/**\n * Escape a string value for inclusion in a SQL literal.\n * Doubles single quotes per SQL standard.\n */\nfunction escapeSql(value: string): string {\n return value.replace(/'/g, \"''\");\n}\n\n/**\n * Serialize a JavaScript value into a SQL literal.\n *\n * - null / undefined → NULL\n * - number → numeric literal\n * - Buffer → X'hex'\n * - string → 'escaped string'\n */\nfunction toSqlLiteral(value: unknown): string {\n if (value === null || value === undefined) return 'NULL';\n if (typeof value === 'number') return String(value);\n if (Buffer.isBuffer(value)) return `X'${value.toString('hex')}'`;\n return `'${escapeSql(String(value))}'`;\n}\n\n// ---------------------------------------------------------------------------\n// Backup\n// ---------------------------------------------------------------------------\n\n/**\n * Create a SQL dump backup of all synced tables.\n *\n * Writes `INSERT OR IGNORE` statements for every row in BACKUP_TABLES\n * to `{backupDir}/{machineId}.sql`. Idempotent — overwrites any existing\n * backup for the same machine.\n *\n * @returns the absolute path of the created backup file.\n */\nexport function createBackup(\n db: Database,\n backupDir: string,\n machineId: string,\n): string {\n fs.mkdirSync(backupDir, { recursive: true });\n\n const lines: string[] = [];\n const timestamp = epochSeconds();\n\n // Header\n lines.push(`${BACKUP_HEADER_TEMPLATE}: machine_id=${machineId}, created_at=${timestamp}`);\n lines.push(`-- Protocol version: ${SYNC_PROTOCOL_VERSION}`);\n lines.push('');\n\n for (const table of BACKUP_TABLES) {\n const rows = db.prepare(`SELECT * FROM ${table}`).all() as Record<string, unknown>[];\n if (rows.length === 0) continue;\n\n lines.push(`-- Table: ${table} (${rows.length} rows)`);\n\n // Get column names from the first row\n const columns = Object.keys(rows[0]);\n const columnList = columns.map((c) => `\"${c}\"`).join(', ');\n\n for (const row of rows) {\n const values = columns.map((c) => toSqlLiteral(row[c])).join(', ');\n lines.push(`INSERT OR IGNORE INTO ${table} (${columnList}) VALUES (${values});`);\n }\n\n lines.push('');\n }\n\n const filePath = path.join(backupDir, `${machineId}${BACKUP_EXTENSION}`);\n fs.writeFileSync(filePath, lines.join('\\n'), 'utf-8');\n\n return filePath;\n}\n\n// ---------------------------------------------------------------------------\n// List\n// ---------------------------------------------------------------------------\n\n/**\n * Scan the backup directory for `.sql` files and return metadata.\n *\n * Machine ID is derived from the filename (stripping the extension).\n */\nexport function listBackups(backupDir: string): BackupMeta[] {\n let entries: string[];\n try {\n entries = fs.readdirSync(backupDir);\n } catch {\n return [];\n }\n\n const backups: BackupMeta[] = [];\n\n for (const entry of entries) {\n if (!BACKUP_FILENAME_PATTERN.test(entry)) continue;\n\n const filePath = path.join(backupDir, entry);\n const stat = fs.statSync(filePath);\n\n backups.push({\n machine_id: entry.slice(0, -BACKUP_EXTENSION.length),\n file_name: entry,\n size_bytes: stat.size,\n modified_at: stat.mtime.toISOString(),\n });\n }\n\n return backups.sort((a, b) => b.modified_at.localeCompare(a.modified_at));\n}\n\n// ---------------------------------------------------------------------------\n// Restore helpers\n// ---------------------------------------------------------------------------\n\n/** Regex matching INSERT OR IGNORE statements generated by createBackup. */\nconst INSERT_REGEX = /^INSERT OR IGNORE INTO (\\w+)\\s+\\(([^)]+)\\)\\s+VALUES\\s+\\((.+)\\);$/;\n\n/** Parsed INSERT statement. */\ninterface ParsedInsert {\n table: string;\n columns: string[];\n valueSql: string;\n}\n\n/**\n * Parse all INSERT statements from a backup file.\n */\nfunction parseBackupFile(backupPath: string): ParsedInsert[] {\n const content = fs.readFileSync(backupPath, 'utf-8');\n const inserts: ParsedInsert[] = [];\n\n for (const line of content.split('\\n')) {\n const match = INSERT_REGEX.exec(line);\n if (!match) continue;\n\n inserts.push({\n table: match[1],\n columns: match[2].split(',').map((c) => c.trim().replace(/\"/g, '')),\n valueSql: match[3],\n });\n }\n\n return inserts;\n}\n\n// ---------------------------------------------------------------------------\n// Restore preview\n// ---------------------------------------------------------------------------\n\n/**\n * Preview what a restore would do without making changes.\n *\n * For each INSERT in the backup, checks if a conflicting row already exists\n * (via INSERT OR IGNORE in a savepoint that gets rolled back).\n *\n * Returns per-table counts of new vs existing records.\n */\nexport function restorePreview(\n db: Database,\n backupPath: string,\n): TableCounts[] {\n const inserts = parseBackupFile(backupPath);\n const counts = new Map<string, { new: number; existing: number }>();\n\n // Defer FK checks — backup may reference rows in non-synced tables\n db.pragma('foreign_keys = OFF');\n // Use a savepoint so we can test INSERTs without persisting\n db.exec('SAVEPOINT restore_preview');\n try {\n for (const insert of inserts) {\n if (!counts.has(insert.table)) {\n counts.set(insert.table, { new: 0, existing: 0 });\n }\n const tableCounts = counts.get(insert.table)!;\n\n try {\n const columnList = insert.columns.map((c) => `\"${c}\"`).join(', ');\n const stmt = `INSERT OR IGNORE INTO ${insert.table} (${columnList}) VALUES (${insert.valueSql})`;\n const result = db.prepare(stmt).run();\n\n if (result.changes > 0) {\n tableCounts.new++;\n } else {\n tableCounts.existing++;\n }\n } catch {\n tableCounts.existing++;\n }\n }\n } finally {\n db.exec('ROLLBACK TO restore_preview');\n db.exec('RELEASE restore_preview');\n db.pragma('foreign_keys = ON');\n }\n\n return Array.from(counts.entries()).map(([table, c]) => ({\n table,\n new: c.new,\n existing: c.existing,\n }));\n}\n\n// ---------------------------------------------------------------------------\n// Restore\n// ---------------------------------------------------------------------------\n\n/**\n * Restore a backup by running all INSERTs in a transaction.\n *\n * Uses `INSERT OR IGNORE` — existing records are skipped, new records\n * are inserted. Returns per-table counts.\n */\nexport function restoreBackup(\n db: Database,\n backupPath: string,\n): RestoreResult {\n const inserts = parseBackupFile(backupPath);\n const counts = new Map<string, { new: number; existing: number }>();\n\n // Defer FK checks — backup may reference rows in non-synced tables (e.g. agents)\n // that don't exist yet. Re-enable after the transaction.\n db.pragma('foreign_keys = OFF');\n try {\n const runRestore = db.transaction(() => {\n for (const insert of inserts) {\n if (!counts.has(insert.table)) {\n counts.set(insert.table, { new: 0, existing: 0 });\n }\n const tableCounts = counts.get(insert.table)!;\n\n const columnList = insert.columns.map((c) => `\"${c}\"`).join(', ');\n const stmt = `INSERT OR IGNORE INTO ${insert.table} (${columnList}) VALUES (${insert.valueSql})`;\n const result = db.prepare(stmt).run();\n\n if (result.changes > 0) {\n tableCounts.new++;\n } else {\n tableCounts.existing++;\n }\n }\n });\n\n runRestore();\n } finally {\n db.pragma('foreign_keys = ON');\n }\n\n const tables = Array.from(counts.entries()).map(([table, c]) => ({\n table,\n new: c.new,\n existing: c.existing,\n }));\n\n const total_restored = tables.reduce((sum, t) => sum + t.new, 0);\n const total_skipped = tables.reduce((sum, t) => sum + t.existing, 0);\n\n return { tables, total_restored, total_skipped };\n}\n","/**\n * Backup API handlers — create, list, preview, and restore backups.\n *\n * Factory function injects backupDir and machineId; returns handlers\n * for POST /api/backup, GET /api/backups, POST /api/restore/preview,\n * and POST /api/restore.\n */\n\nimport type { Database } from 'better-sqlite3';\nimport type { RouteRequest, RouteResponse } from '../router.js';\nimport type { MycoConfig } from '../../config/schema.js';\nimport {\n createBackup,\n listBackups,\n restorePreview,\n restoreBackup,\n} from '../backup.js';\nimport { loadMergedConfig, updateBackupConfig } from '../../config/loader.js';\nimport os from 'node:os';\nimport path from 'node:path';\n\n// ---------------------------------------------------------------------------\n// Types\n// ---------------------------------------------------------------------------\n\n/** Dependencies injected by the daemon when registering backup routes. */\nexport interface BackupDeps {\n db: Database;\n machineId: string;\n vaultDir: string;\n // Holder so the dir is re-resolved on every request — a user can change\n // `backup.dir` in Settings (either scope) and the next backup writes to\n // the new location without a daemon restart.\n liveConfig: { current: MycoConfig };\n}\n\n/**\n * Resolve the effective backup directory from the current config. The user's\n * configured path may be relative or start with `~/`; absent, it falls back\n * to `<vaultDir>/backups`.\n */\nexport function resolveBackupDir(config: MycoConfig, vaultDir: string): string {\n const rawDir = config.backup.dir;\n if (!rawDir) return path.resolve(vaultDir, 'backups');\n const expanded = rawDir.startsWith('~/')\n ? path.join(os.homedir(), rawDir.slice(2))\n : rawDir;\n return path.resolve(expanded);\n}\n\n// ---------------------------------------------------------------------------\n// Handler factory\n// ---------------------------------------------------------------------------\n\n/**\n * Create backup API handlers with injected dependencies.\n *\n * Returns an object with named handlers for each backup endpoint.\n */\nexport function createBackupHandlers(deps: BackupDeps) {\n const currentBackupDir = () => resolveBackupDir(deps.liveConfig.current, deps.vaultDir);\n\n /** POST /api/backup — create a new backup of all synced tables. */\n async function handleCreateBackup(_req: RouteRequest): Promise<RouteResponse> {\n const backupDir = currentBackupDir();\n const filePath = createBackup(deps.db, backupDir, deps.machineId);\n const backups = listBackups(backupDir);\n const created = backups.find((b) => b.machine_id === deps.machineId);\n\n return {\n body: {\n file_path: filePath,\n machine_id: deps.machineId,\n size_bytes: created?.size_bytes ?? 0,\n },\n };\n }\n\n /** GET /api/backups — list all backup files with metadata. */\n async function handleListBackups(_req: RouteRequest): Promise<RouteResponse> {\n const backups = listBackups(currentBackupDir());\n return { body: { backups } };\n }\n\n /** POST /api/restore/preview — dry-run restore to show new/existing counts. */\n async function handleRestorePreview(req: RouteRequest): Promise<RouteResponse> {\n const { machine_id } = req.body as { machine_id?: string };\n if (!machine_id) {\n return { status: 400, body: { error: 'missing_machine_id' } };\n }\n\n const backupDir = currentBackupDir();\n const backups = listBackups(backupDir);\n const backup = backups.find((b) => b.machine_id === machine_id);\n if (!backup) {\n return { status: 404, body: { error: 'backup_not_found' } };\n }\n\n const backupPath = `${backupDir}/${backup.file_name}`;\n const tables = restorePreview(deps.db, backupPath);\n const total_new = tables.reduce((sum, t) => sum + t.new, 0);\n const total_existing = tables.reduce((sum, t) => sum + t.existing, 0);\n\n return { body: { machine_id, tables, total_new, total_existing } };\n }\n\n /** POST /api/restore — execute restore from a backup file. */\n async function handleRestore(req: RouteRequest): Promise<RouteResponse> {\n const { machine_id } = req.body as { machine_id?: string };\n if (!machine_id) {\n return { status: 400, body: { error: 'missing_machine_id' } };\n }\n\n const backupDir = currentBackupDir();\n const backups = listBackups(backupDir);\n const backup = backups.find((b) => b.machine_id === machine_id);\n if (!backup) {\n return { status: 404, body: { error: 'backup_not_found' } };\n }\n\n const backupPath = `${backupDir}/${backup.file_name}`;\n const result = restoreBackup(deps.db, backupPath);\n\n return { body: { machine_id, ...result } };\n }\n\n return {\n handleCreateBackup,\n handleListBackups,\n handleRestorePreview,\n handleRestore,\n };\n}\n\n// ---------------------------------------------------------------------------\n// Backup config handlers — factory\n// ---------------------------------------------------------------------------\n\nexport interface BackupConfigDeps {\n vaultDir: string;\n}\n\n/**\n * Create handlers for GET/PUT /api/backup/config.\n */\nexport function createBackupConfigHandlers(deps: BackupConfigDeps) {\n const { vaultDir } = deps;\n\n /** GET /api/backup/config — read the configured backup directory (merged). */\n async function handleGetBackupConfig(): Promise<RouteResponse> {\n const cfg = loadMergedConfig(vaultDir);\n return { body: { dir: cfg.backup.dir ?? null, default_dir: path.resolve(vaultDir, 'backups') } };\n }\n\n /** PUT /api/backup/config — update the backup directory setting. */\n async function handlePutBackupConfig(req: RouteRequest): Promise<RouteResponse> {\n const { dir } = req.body as { dir?: string | null };\n updateBackupConfig(vaultDir, { dir: dir || undefined });\n return { body: { dir: dir || null } };\n }\n\n return { handleGetBackupConfig, handlePutBackupConfig };\n}\n","/**\n * Team sync HTTP client.\n *\n * Communicates with the Cloudflare Worker to push outbox records,\n * search team knowledge, and check connection health.\n */\n\nimport type { OutboxRow } from '@myco/db/queries/team-outbox.js';\nimport { TEAM_SEARCH_TIMEOUT_MS, TEAM_HEALTH_TIMEOUT_MS } from '@myco/constants.js';\n\n// ---------------------------------------------------------------------------\n// Types\n// ---------------------------------------------------------------------------\n\nexport interface TeamSyncClientOptions {\n workerUrl: string;\n apiKey: string;\n machineId: string;\n syncProtocolVersion: number;\n /** Inject custom fetch for testing. */\n fetch?: typeof globalThis.fetch;\n}\n\nexport interface TeamSearchOptions {\n limit?: number;\n tables?: string[];\n timeoutMs?: number;\n}\n\nexport interface TeamSearchResult {\n id: string;\n table_name: string;\n content: string;\n score: number;\n machine_id: string;\n metadata?: Record<string, unknown>;\n}\n\nexport interface TeamSearchResponse {\n results: TeamSearchResult[];\n machine_ids: string[];\n}\n\nexport interface TeamHealthResponse {\n status: string;\n node_count: number;\n sync_protocol_version: number;\n package_version?: string;\n schema_version?: number | null;\n mcp_token_hash?: string;\n}\n\nexport interface TeamConnectInfo {\n machine_id: string;\n vault_name?: string;\n agent?: string;\n version?: string;\n}\n\nexport interface TeamConfigResponse {\n config: Record<string, unknown>;\n sync_protocol_version: number;\n mcp_token?: string;\n mcp_endpoint?: string;\n}\n\nexport interface TeamCollectiveStatusResponse {\n connected: boolean;\n collective_url: string | null;\n project_id: string | null;\n last_settings_sync: number | null;\n last_heartbeat: number | null;\n capabilities: string[];\n settings: Record<string, unknown>;\n}\n\nexport interface TeamCollectiveSettingsResponse {\n collective_enabled: boolean;\n settings: Record<string, unknown>;\n last_sync: number | null;\n}\n\n// ---------------------------------------------------------------------------\n// Client\n// ---------------------------------------------------------------------------\n\nexport class TeamSyncClient {\n private readonly workerUrl: string;\n private readonly apiKey: string;\n private readonly machineId: string;\n private readonly syncProtocolVersion: number;\n private readonly fetchFn: typeof globalThis.fetch;\n private mcpToken: string | null = null;\n private mcpTokenHash: string | null = null;\n\n constructor(options: TeamSyncClientOptions) {\n this.workerUrl = options.workerUrl.replace(/\\/+$/, '');\n this.apiKey = options.apiKey;\n this.machineId = options.machineId;\n this.syncProtocolVersion = options.syncProtocolVersion;\n this.fetchFn = options.fetch ?? globalThis.fetch;\n }\n\n // Must match getMcpTokenHash() in src/worker/src/mcp/auth.ts\n private static hashToken(token: string): string {\n let hash = 0;\n for (let i = 0; i < token.length; i++) {\n hash = ((hash << 5) - hash + token.charCodeAt(i)) | 0;\n }\n return Math.abs(hash).toString(16).padStart(8, '0').slice(0, 8);\n }\n\n /**\n * Register this machine with the team worker.\n */\n async connect(info: TeamConnectInfo): Promise<TeamConfigResponse> {\n const res = await this.request('POST', '/connect', {\n ...info,\n machine_id: this.machineId,\n sync_protocol_version: this.syncProtocolVersion,\n });\n const response = res as TeamConfigResponse;\n if (response.mcp_token) {\n this.mcpToken = response.mcp_token;\n this.mcpTokenHash = TeamSyncClient.hashToken(response.mcp_token);\n }\n return response;\n }\n\n /**\n * Push a batch of outbox records to the team worker.\n *\n * @returns the number of records accepted by the worker.\n */\n async pushBatch(records: OutboxRow[]): Promise<{ synced: number; skipped: number; errors: Array<{ id: string; table: string; error: string }> }> {\n const res = await this.request('POST', '/sync', {\n machine_id: this.machineId,\n sync_protocol_version: this.syncProtocolVersion,\n records: records.map((r) => {\n const data = typeof r.payload === 'string' ? JSON.parse(r.payload) : r.payload;\n return {\n table: r.table_name,\n id: String(r.row_id),\n machine_id: r.machine_id,\n operation: r.operation,\n data,\n content_hash: data.content_hash ?? null,\n };\n }),\n });\n return res as { synced: number; skipped: number; errors: Array<{ id: string; table: string; error: string }> };\n }\n\n /**\n * Search team knowledge across all connected machines.\n *\n * Uses AbortController for timeout enforcement.\n */\n async search(query: string, options: TeamSearchOptions = {}): Promise<TeamSearchResponse> {\n const timeoutMs = options.timeoutMs ?? TEAM_SEARCH_TIMEOUT_MS;\n const controller = new AbortController();\n const timer = setTimeout(() => controller.abort(), timeoutMs);\n\n try {\n const params = new URLSearchParams({ q: query });\n if (options.limit) params.set('limit', String(options.limit));\n if (options.tables) params.set('tables', options.tables.join(','));\n\n const res = await this.fetchFn(`${this.workerUrl}/search?${params}`, {\n method: 'GET',\n headers: this.headers(),\n signal: controller.signal,\n });\n\n if (!res.ok) {\n throw new Error(`Team search failed: ${res.status} ${res.statusText}`);\n }\n\n return (await res.json()) as TeamSearchResponse;\n } finally {\n clearTimeout(timer);\n }\n }\n\n /**\n * Check worker health.\n */\n async health(): Promise<TeamHealthResponse> {\n const controller = new AbortController();\n const timer = setTimeout(() => controller.abort(), TEAM_HEALTH_TIMEOUT_MS);\n\n try {\n const res = await this.fetchFn(`${this.workerUrl}/health`, {\n method: 'GET',\n headers: this.headers(),\n signal: controller.signal,\n });\n\n if (!res.ok) {\n throw new Error(`Health check failed: ${res.status} ${res.statusText}`);\n }\n\n const data = (await res.json()) as TeamHealthResponse;\n\n // If the worker reports a different token hash than we have cached,\n // reconnect to fetch the token. This handles three cases:\n // 1. Initial hydration — worker has a token but we haven't fetched it yet\n // (e.g. daemon started before worker upgrade)\n // 2. Token rotation — worker has a new token\n // 3. Worker switched — hash differs from any previously known value\n if (data.mcp_token_hash && data.mcp_token_hash !== this.mcpTokenHash) {\n try {\n await this.connect({ machine_id: this.machineId });\n } catch {\n // Non-fatal: token will be picked up on next connect\n }\n }\n\n return data;\n } finally {\n clearTimeout(timer);\n }\n }\n\n /**\n * Get team configuration from the worker.\n */\n async getConfig(): Promise<TeamConfigResponse> {\n const res = await this.request('GET', '/config');\n return res as TeamConfigResponse;\n }\n\n async getCollectiveStatus(): Promise<TeamCollectiveStatusResponse> {\n const res = await this.request('GET', '/collective/status');\n return res as TeamCollectiveStatusResponse;\n }\n\n async getCollectiveSettings(): Promise<TeamCollectiveSettingsResponse> {\n const res = await this.request('GET', '/collective/settings');\n return res as TeamCollectiveSettingsResponse;\n }\n\n async collectiveQuery<T = unknown>(tool: string, args: Record<string, unknown> = {}): Promise<T> {\n const res = await this.request('POST', '/collective/query', { tool, args });\n return res as T;\n }\n\n // ---------------------------------------------------------------------------\n // MCP token accessors\n // ---------------------------------------------------------------------------\n\n getMcpToken(): string | null {\n return this.mcpToken;\n }\n\n getMcpEndpoint(): string | null {\n if (!this.mcpToken) return null;\n return `${this.workerUrl}/mcp`;\n }\n\n async rotateMcpToken(): Promise<string> {\n const result = await this.request('POST', '/mcp/rotate') as { token: string };\n this.mcpToken = result.token;\n this.mcpTokenHash = TeamSyncClient.hashToken(result.token);\n return result.token;\n }\n\n // ---------------------------------------------------------------------------\n // Internal\n // ---------------------------------------------------------------------------\n\n private headers(): Record<string, string> {\n return {\n 'Authorization': `Bearer ${this.apiKey}`,\n 'Content-Type': 'application/json',\n };\n }\n\n private async request(method: string, path: string, body?: unknown): Promise<unknown> {\n const res = await this.fetchFn(`${this.workerUrl}${path}`, {\n method,\n headers: this.headers(),\n body: body !== undefined ? JSON.stringify(body) : undefined,\n });\n\n if (!res.ok) {\n const text = await res.text().catch(() => '');\n throw new Error(`Team sync request ${method} ${path} failed: ${res.status} ${text}`);\n }\n\n return res.json();\n }\n}\n","/**\n * Team connect/disconnect/status API handlers.\n *\n * Factory pattern: `createTeamHandlers(deps)` returns route handlers that\n * close over the daemon's shared state (vault dir, machine ID, team client).\n */\n\nimport { updateTeamConfig, loadMergedConfig } from '@myco/config/loader.js';\nimport { writeSecret, readSecrets } from '@myco/config/secrets.js';\nimport { countPending, countDeadLettered, backfillUnsynced, retryDeadLettered } from '@myco/db/queries/team-outbox.js';\nimport { readJsonConfig, resolveVaultConfigPath } from '@myco-deploy/index.js';\nimport { getTeamPackageVersion } from '@myco/cli/team.js';\nimport { TeamSyncClient } from '../team-sync.js';\nimport { SYNC_PROTOCOL_VERSION, TEAM_API_KEY_SECRET } from '@myco/constants.js';\nimport { getPluginVersion } from '@myco/version.js';\nimport { SCHEMA_VERSION } from '@myco/db/schema.js';\nimport type { RouteRequest, RouteResponse } from '../router.js';\nimport type { DaemonLogger } from '../logger.js';\n\n// ---------------------------------------------------------------------------\n// Constants\n// ---------------------------------------------------------------------------\n\nconst TEAM_CONFIG_DIR = 'team';\nconst TEAM_CONFIG_FILE = 'config.json';\n\ninterface TeamLocalConfig {\n package_version?: string;\n}\n\nfunction readCachedTeamPackageVersion(vaultDir: string): string | null {\n const config = readJsonConfig<TeamLocalConfig>(resolveVaultConfigPath(vaultDir, TEAM_CONFIG_DIR, TEAM_CONFIG_FILE));\n return config?.package_version?.trim() || null;\n}\n\n// ---------------------------------------------------------------------------\n// Types\n// ---------------------------------------------------------------------------\n\nexport interface TeamHandlerDeps {\n vaultDir: string;\n machineId: string;\n logger: DaemonLogger;\n getTeamClient: () => TeamSyncClient | null;\n setTeamClient: (client: TeamSyncClient | null) => void;\n}\n\n// ---------------------------------------------------------------------------\n// Factory\n// ---------------------------------------------------------------------------\n\nexport function createTeamHandlers(deps: TeamHandlerDeps) {\n const { vaultDir, machineId, logger } = deps;\n\n /**\n * POST /api/team/connect\n * Body: { url: string, api_key: string }\n *\n * Creates a TeamSyncClient, tests the connection, saves config + secrets.\n */\n async function handleConnect(req: RouteRequest): Promise<RouteResponse> {\n const { url, api_key } = req.body as { url?: string; api_key?: string };\n\n if (!url || !api_key) {\n return {\n status: 400,\n body: { error: 'missing_fields', message: 'Both url and api_key are required' },\n };\n }\n\n // Validate URL format\n try {\n new URL(url);\n } catch {\n return {\n status: 400,\n body: { error: 'invalid_url', message: 'Invalid worker URL' },\n };\n }\n\n // Create client and test connection\n const client = new TeamSyncClient({\n workerUrl: url,\n apiKey: api_key,\n machineId,\n syncProtocolVersion: SYNC_PROTOCOL_VERSION,\n });\n\n try {\n await client.health();\n } catch (err) {\n return {\n status: 502,\n body: {\n error: 'connection_failed',\n message: `Could not connect to team worker: ${(err as Error).message}`,\n },\n };\n }\n\n // Save config and secret\n updateTeamConfig(vaultDir, {\n enabled: true,\n worker_url: url,\n });\n writeSecret(vaultDir, TEAM_API_KEY_SECRET, api_key);\n\n const config = loadMergedConfig(vaultDir);\n return { body: { connected: true, team: config.team } };\n }\n\n /**\n * POST /api/team/disconnect\n *\n * Disables team sync and clears the live client reference.\n */\n async function handleDisconnect(_req: RouteRequest): Promise<RouteResponse> {\n updateTeamConfig(vaultDir, { enabled: false });\n\n return { body: { connected: false } };\n }\n\n /**\n * GET /api/team/status\n *\n * Returns connection status, health check result, pending sync count, and machine_id.\n */\n async function handleStatus(_req: RouteRequest): Promise<RouteResponse> {\n const config = loadMergedConfig(vaultDir);\n const client = deps.getTeamClient();\n const secrets = readSecrets(vaultDir);\n const hasApiKey = Boolean(secrets[TEAM_API_KEY_SECRET]);\n const localTeamPackageVersion = getTeamPackageVersion();\n const cachedTeamPackageVersion = readCachedTeamPackageVersion(vaultDir);\n let deployedWorkerVersion: string | null = null;\n\n let healthy = false;\n let healthError: string | undefined;\n\n if (client && config.team.enabled) {\n try {\n const health = await client.health();\n healthy = true;\n deployedWorkerVersion = health.package_version?.trim() || null;\n } catch (err) {\n healthError = (err as Error).message;\n }\n }\n\n let pendingCount = 0;\n let deadLetterCount = 0;\n try {\n pendingCount = countPending();\n deadLetterCount = countDeadLettered();\n } catch {\n // DB may not have the table yet\n }\n\n let collectiveStatus: Awaited<ReturnType<TeamSyncClient['getCollectiveStatus']>> | null = null;\n if (client && config.team.enabled) {\n try {\n collectiveStatus = await client.getCollectiveStatus();\n } catch {\n collectiveStatus = null;\n }\n }\n\n return {\n body: {\n enabled: config.team.enabled,\n worker_url: config.team.worker_url ?? null,\n has_api_key: hasApiKey,\n api_key: secrets[TEAM_API_KEY_SECRET] ?? null,\n healthy,\n health_error: healthError,\n pending_sync_count: pendingCount,\n dead_letter_count: deadLetterCount,\n machine_id: machineId,\n package_version: getPluginVersion(),\n local_team_package_version: localTeamPackageVersion,\n cached_team_package_version: cachedTeamPackageVersion,\n deployed_worker_version: deployedWorkerVersion,\n worker_update_available:\n config.team.enabled &&\n Boolean(localTeamPackageVersion) &&\n Boolean(deployedWorkerVersion) &&\n deployedWorkerVersion !== localTeamPackageVersion,\n collective_connected: collectiveStatus?.connected ?? false,\n collective_url: collectiveStatus?.collective_url ?? null,\n collective_project_id: collectiveStatus?.project_id ?? null,\n collective_last_settings_sync: collectiveStatus?.last_settings_sync ?? null,\n collective_last_heartbeat: collectiveStatus?.last_heartbeat ?? null,\n collective_capabilities: collectiveStatus?.capabilities ?? [],\n collective_settings: collectiveStatus?.settings ?? {},\n schema_version: SCHEMA_VERSION,\n sync_protocol_version: SYNC_PROTOCOL_VERSION,\n mcp_token: client?.getMcpToken() ?? null,\n mcp_endpoint: client?.getMcpEndpoint() ?? null,\n },\n };\n }\n\n /** POST /api/team/backfill — enqueue all unsynced rows to the outbox. */\n async function handleBackfill(_req: RouteRequest): Promise<RouteResponse> {\n const count = backfillUnsynced(machineId);\n return { body: { enqueued: count } };\n }\n\n /** POST /api/team/retry-failed — move dead-lettered outbox rows back to pending. */\n async function handleRetryFailed(_req: RouteRequest): Promise<RouteResponse> {\n const count = retryDeadLettered();\n return { body: { retried: count } };\n }\n\n /** POST /api/team/upgrade-worker — deploy latest worker and reinitialize client. */\n async function handleUpgradeWorker(_req: RouteRequest): Promise<RouteResponse> {\n const { upgradeWorker } = await import('@myco/cli/team.js');\n logger.info('team-sync.upgrade.start', 'Starting worker upgrade');\n const result = upgradeWorker(vaultDir);\n if (!result.success) {\n logger.error('team-sync.upgrade.failed', 'Worker upgrade failed', { error: result.error });\n return { status: 500, body: { error: result.error } };\n }\n logger.info('team-sync.upgrade.complete', 'Worker upgrade complete', {\n worker_url: result.worker_url,\n version: result.version,\n });\n // Reinitialize team client with potentially new URL\n if (result.worker_url && deps.getTeamClient()) {\n const secrets = readSecrets(vaultDir);\n const apiKey = secrets[TEAM_API_KEY_SECRET];\n if (apiKey) {\n deps.setTeamClient(new TeamSyncClient({\n workerUrl: result.worker_url,\n apiKey,\n machineId,\n syncProtocolVersion: SYNC_PROTOCOL_VERSION,\n }));\n }\n }\n return { body: result };\n }\n\n /** POST /api/team/rotate-mcp-token — rotate the MCP bearer token. */\n async function handleRotateMcpToken(_req: RouteRequest): Promise<RouteResponse> {\n const client = deps.getTeamClient();\n if (!client) {\n return {\n status: 400,\n body: { error: 'Team sync not connected' },\n };\n }\n try {\n const token = await client.rotateMcpToken();\n logger.info('team-sync.mcp-token.rotated', 'MCP access token rotated');\n return { body: { token } };\n } catch (err) {\n const message = err instanceof Error ? err.message : String(err);\n logger.error('team-sync.mcp-token.rotate-failed', 'MCP token rotation failed', { error: message });\n return {\n status: 500,\n body: { error: message },\n };\n }\n }\n\n return { handleConnect, handleDisconnect, handleStatus, handleBackfill, handleRetryFailed, handleUpgradeWorker, handleRotateMcpToken };\n}\n","import type { TeamSyncClient } from '../team-sync.js';\nimport type { RouteRequest, RouteResponse } from '../router.js';\n\nexport interface CollectiveHandlerDeps {\n getTeamClient: () => TeamSyncClient | null;\n}\n\nexport function createCollectiveHandlers(deps: CollectiveHandlerDeps) {\n async function requireTeamClient(): Promise<TeamSyncClient> {\n const client = deps.getTeamClient();\n if (!client) {\n throw new Error('Team sync is not connected');\n }\n return client;\n }\n\n async function handleStatus(_req: RouteRequest): Promise<RouteResponse> {\n try {\n const client = await requireTeamClient();\n return { body: await client.getCollectiveStatus() };\n } catch (error) {\n return {\n status: 400,\n body: { error: error instanceof Error ? error.message : String(error) },\n };\n }\n }\n\n async function handleSearch(req: RouteRequest): Promise<RouteResponse> {\n try {\n const client = await requireTeamClient();\n const query = req.query.q;\n if (!query) {\n return { status: 400, body: { error: 'Missing q parameter' } };\n }\n const project = req.query.project;\n const limit = req.query.limit ? Number(req.query.limit) : undefined;\n return {\n body: await client.collectiveQuery('collective_search', { query, project, limit }),\n };\n } catch (error) {\n return {\n status: 400,\n body: { error: error instanceof Error ? error.message : String(error) },\n };\n }\n }\n\n async function handleProjects(_req: RouteRequest): Promise<RouteResponse> {\n try {\n const client = await requireTeamClient();\n return {\n body: await client.collectiveQuery('collective_projects', {}),\n };\n } catch (error) {\n return {\n status: 400,\n body: { error: error instanceof Error ? error.message : String(error) },\n };\n }\n }\n\n async function handleProject(req: RouteRequest): Promise<RouteResponse> {\n try {\n const client = await requireTeamClient();\n const project = req.query.project;\n if (!project) {\n return { status: 400, body: { error: 'Missing project parameter' } };\n }\n const includeDigest = req.query.include_digest === 'true';\n return {\n body: await client.collectiveQuery('collective_project', { project, include_digest: includeDigest }),\n };\n } catch (error) {\n return {\n status: 400,\n body: { error: error instanceof Error ? error.message : String(error) },\n };\n }\n }\n\n async function handleSettings(_req: RouteRequest): Promise<RouteResponse> {\n try {\n const client = await requireTeamClient();\n return { body: await client.getCollectiveSettings() };\n } catch (error) {\n return {\n status: 400,\n body: { error: error instanceof Error ? error.message : String(error) },\n };\n }\n }\n\n return {\n handleStatus,\n handleSearch,\n handleProjects,\n handleProject,\n handleSettings,\n };\n}\n","/**\n * Session register/unregister route handlers.\n *\n * Factory pattern: `createSessionLifecycleHandlers(deps)` returns handlers\n * that close over the daemon's shared state for session management.\n *\n * Route overview:\n * POST /sessions/register — register a new or reloaded session\n * POST /sessions/unregister — unregister a session (authoritative close)\n */\n\nimport { z } from 'zod';\nimport type { RouteResponse } from '../router.js';\nimport type { SessionRegistry } from '../lifecycle.js';\nimport type { DaemonLogger } from '../logger.js';\nimport type { DaemonServer } from '../server.js';\nimport type { PowerManager } from '../power.js';\nimport type { EventBuffer } from '@myco/capture/buffer.js';\nimport type { MycoConfig } from '@myco/config/schema.js';\nimport { cleanStaleBuffers } from '@myco/capture/buffer.js';\nimport { upsertSession, closeSession, updateSession } from '@myco/db/queries/sessions.js';\nimport { notify } from '@myco/notifications/notify.js';\nimport { epochSeconds, STALE_BUFFER_MAX_AGE_MS } from '@myco/constants.js';\nimport { LOG_KINDS } from '@myco/constants/log-kinds.js';\n\n// ---------------------------------------------------------------------------\n// Schemas\n// ---------------------------------------------------------------------------\n\nexport const RegisterBody = z.object({\n session_id: z.string(),\n agent: z.string().optional(),\n branch: z.string().optional(),\n started_at: z.string().optional(),\n});\n\nexport const UnregisterBody = z.object({ session_id: z.string() });\n\n// ---------------------------------------------------------------------------\n// Types\n// ---------------------------------------------------------------------------\n\nexport interface SessionLifecycleDeps {\n registry: SessionRegistry;\n sessionBuffers: Map<string, EventBuffer>;\n reconciler: { reconcileSession: (sessionId: string) => void; clearSession: (sessionId: string) => void };\n stopProcessor: { clearSession: (sessionId: string) => void };\n server: DaemonServer;\n powerManager: PowerManager;\n machineId: string;\n logger: DaemonLogger;\n // Holder so notify() consults the current merged config — a user toggling\n // notifications.enabled or a domain's enabled flag sees the next event gate\n // respect the change without a daemon restart.\n liveConfig: { current: MycoConfig };\n vaultDir: string;\n}\n\n// ---------------------------------------------------------------------------\n// Factory\n// ---------------------------------------------------------------------------\n\nexport function createSessionLifecycleHandlers(deps: SessionLifecycleDeps) {\n const {\n registry,\n sessionBuffers,\n reconciler,\n stopProcessor,\n server,\n powerManager,\n machineId,\n logger,\n liveConfig,\n vaultDir,\n } = deps;\n\n /** POST /sessions/register */\n async function handleRegister(req: { body: unknown }): Promise<RouteResponse> {\n powerManager.recordActivity();\n const { session_id, agent, branch, started_at } = RegisterBody.parse(req.body);\n const resolvedStartedAt = started_at ?? new Date().toISOString();\n registry.register(session_id, { started_at: resolvedStartedAt, branch });\n server.updateDaemonJsonSessions(registry.sessions);\n\n // Upsert session in SQLite — always reset to active on register\n const now = epochSeconds();\n const startedEpoch = Math.floor(new Date(resolvedStartedAt).getTime() / 1000);\n upsertSession({\n id: session_id,\n agent: agent ?? 'claude-code',\n user: null,\n project_root: process.cwd(),\n branch: branch ?? null,\n started_at: startedEpoch,\n created_at: now,\n status: 'active',\n machine_id: machineId,\n });\n // Clear ended_at if session was previously completed (reload scenario)\n updateSession(session_id, { ended_at: null, status: 'active' });\n\n // Reconcile buffer against DB — recover prompts lost if daemon was down mid-session.\n reconciler.reconcileSession(session_id);\n\n logger.info(LOG_KINDS.LIFECYCLE_REGISTER, 'Session registered', { session_id, branch, started_at: started_at ?? null });\n\n notify(vaultDir, {\n domain: 'sessions',\n type: 'session.started',\n title: 'Session started',\n message: branch ? `Branch: ${branch}` : undefined,\n link: `/sessions/${session_id}`,\n metadata: { sessionId: session_id, agent: agent ?? 'claude-code', branch },\n }, liveConfig.current);\n\n return { body: { ok: true, sessions: registry.sessions } };\n }\n\n /** POST /sessions/unregister */\n async function handleUnregister(req: { body: unknown }): Promise<RouteResponse> {\n const { session_id } = UnregisterBody.parse(req.body);\n registry.unregister(session_id);\n // Opportunistically clean stale buffers for OTHER sessions (>24h).\n // We do NOT delete THIS session's buffer — session reload reuses the same ID.\n const bufferDir = `${vaultDir}/buffer`;\n cleanStaleBuffers(bufferDir, STALE_BUFFER_MAX_AGE_MS, session_id);\n // Close the session in SQLite — this is the authoritative end-of-session.\n // The Stop hook fires per-turn and does NOT close the session.\n closeSession(session_id, epochSeconds());\n\n // Prune in-memory state\n sessionBuffers.delete(session_id);\n stopProcessor.clearSession(session_id);\n reconciler.clearSession(session_id);\n server.updateDaemonJsonSessions(registry.sessions);\n logger.info(LOG_KINDS.LIFECYCLE_UNREGISTER, 'Session unregistered', { session_id });\n\n notify(vaultDir, {\n domain: 'sessions',\n type: 'session.ended',\n title: 'Session ended',\n link: `/sessions/${session_id}`,\n metadata: { sessionId: session_id },\n }, liveConfig.current);\n\n return { body: { ok: true, sessions: registry.sessions } };\n }\n\n return { handleRegister, handleUnregister };\n}\n","/**\n * API route handlers for skill lifecycle endpoints.\n *\n * Provides read access to skill candidates and skill records, plus status\n * updates for candidates (the primary lifecycle transition surface).\n *\n * Route overview:\n * GET /api/skill-candidates — list candidates (filterable by status)\n * GET /api/skill-candidates/:id — get a single candidate\n * PUT /api/skill-candidates/:id — update candidate fields (status, etc.)\n * GET /api/skill-records — list promoted skills\n * GET /api/skill-records/:id — get a single skill record with lineage + usage\n */\n\nimport type { RouteRequest, RouteResponse } from '../router.js';\nimport type { DaemonLogger } from '../logger.js';\nimport { epochSeconds, DEFAULT_LIST_LIMIT } from '@myco/constants.js';\nimport { LOG_KINDS } from '@myco/constants/log-kinds.js';\nimport fs from 'node:fs';\nimport path from 'node:path';\nimport {\n listCandidatesWithCount,\n getCandidate,\n updateCandidate,\n deleteCandidate,\n} from '@myco/db/queries/skill-candidates.js';\nimport {\n listSkillRecordsWithCount,\n getSkillRecord,\n getSkillRecordByName,\n deleteSkillRecordCascade,\n} from '@myco/db/queries/skill-records.js';\nimport { listLineageForSkill } from '@myco/db/queries/skill-lineage.js';\nimport { countUsageForSkill } from '@myco/db/queries/skill-usage.js';\nimport { enqueueOutbox } from '@myco/db/queries/team-outbox.js';\nimport { isTeamSyncEnabled, getTeamMachineId } from '@myco/daemon/team-context.js';\nimport { REST_SETTABLE_STATUSES } from '@myco/constants/skill-candidate-status.js';\nimport { parseCsvList } from '@myco/utils/parse-csv-list.js';\n\n// ---------------------------------------------------------------------------\n// Constants\n// ---------------------------------------------------------------------------\n\nconst DEFAULT_LIST_OFFSET = 0;\n\n// ---------------------------------------------------------------------------\n// Handlers\n// ---------------------------------------------------------------------------\n\n/**\n * List skill candidates with optional status filter and pagination.\n *\n * Query params:\n * - status: exact match, or a comma-separated list for multi-status\n * filtering (e.g. `status=approved,generated`)\n * - limit, offset: standard pagination\n */\nexport async function handleListCandidates(req: RouteRequest): Promise<RouteResponse> {\n const limit = req.query.limit ? Number(req.query.limit) : DEFAULT_LIST_LIMIT;\n const offset = req.query.offset ? Number(req.query.offset) : DEFAULT_LIST_OFFSET;\n\n const { items: candidates, total } = listCandidatesWithCount({\n statuses: parseCsvList(req.query.status),\n limit,\n offset,\n });\n\n return { status: 200, body: { candidates, total } };\n}\n\n/**\n * Get a single skill candidate by id.\n *\n * Returns 404 if not found.\n */\nexport async function handleGetCandidate(req: RouteRequest): Promise<RouteResponse> {\n const candidate = getCandidate(req.params.id);\n if (!candidate) {\n return { status: 404, body: { error: `Not found: ${req.params.id}` } };\n }\n return { status: 200, body: { candidate } };\n}\n\n/**\n * Status values REST callers (UI + MCP) are allowed to set.\n * 'generated' is internal — only vault_finalize_skill sets it, and that\n * path calls updateCandidate directly rather than going through REST.\n */\nconst ALLOWED_REST_STATUSES = new Set<string>(REST_SETTABLE_STATUSES);\n\n/**\n * Update a skill candidate's fields (typically used to advance its status).\n *\n * Automatically sets updated_at to the current epoch seconds.\n * Returns 400 if no body or if the status value is not in ALLOWED_REST_STATUSES,\n * 404 if candidate not found.\n */\nexport async function handleUpdateCandidate(req: RouteRequest): Promise<RouteResponse> {\n const id = req.params.id;\n const body = req.body as Record<string, unknown> | undefined;\n if (!body) return { status: 400, body: { error: 'Request body required' } };\n\n // Pick only allowed mutable fields — reject arbitrary body fields\n const { status, topic, rationale, confidence, source_ids, skill_id } = body as Record<string, unknown>;\n\n // Status whitelist guard — defense in depth against a compromised or\n // misconfigured MCP client reaching this endpoint with an internal\n // status. The agent-facing vault_skill_candidates tool also narrows\n // its Zod enum to the same set.\n if (status !== undefined) {\n if (typeof status !== 'string' || !ALLOWED_REST_STATUSES.has(status)) {\n return {\n status: 400,\n body: {\n error:\n `Invalid status '${String(status)}'. REST callers may only set: ` +\n `${[...ALLOWED_REST_STATUSES].join(', ')}. The 'generated' status ` +\n \"is set internally by vault_finalize_skill after validation.\",\n },\n };\n }\n }\n\n const updated = updateCandidate(id, {\n ...(status !== undefined ? { status: status as string } : {}),\n ...(topic !== undefined ? { topic: topic as string } : {}),\n ...(rationale !== undefined ? { rationale: rationale as string } : {}),\n ...(confidence !== undefined ? { confidence: confidence as number } : {}),\n ...(source_ids !== undefined ? { source_ids: source_ids as string } : {}),\n ...(skill_id !== undefined ? { skill_id: skill_id as string | null } : {}),\n updated_at: epochSeconds(),\n });\n\n if (!updated) return { status: 404, body: { error: `Candidate not found: ${id}` } };\n return { status: 200, body: { candidate: updated } };\n}\n\n/**\n * List skill records with optional status filter and pagination.\n *\n * Query params: status, limit, offset\n */\nexport async function handleListSkillRecords(req: RouteRequest): Promise<RouteResponse> {\n const status = req.query.status || undefined;\n const limit = req.query.limit ? Number(req.query.limit) : DEFAULT_LIST_LIMIT;\n const offset = req.query.offset ? Number(req.query.offset) : DEFAULT_LIST_OFFSET;\n\n const { items: records, total } = listSkillRecordsWithCount({ status, limit, offset });\n\n return { status: 200, body: { records, total } };\n}\n\n/**\n * Get a single skill record by id or name, including its lineage history and\n * total usage count.\n *\n * Tries id first, then falls back to name lookup. Returns 404 if not found.\n */\nexport async function handleGetSkillRecord(req: RouteRequest): Promise<RouteResponse> {\n const idOrName = req.params.id;\n\n const record = getSkillRecord(idOrName) ?? getSkillRecordByName(idOrName);\n\n if (!record) {\n return { status: 404, body: { error: `Not found: ${idOrName}` } };\n }\n\n const lineage = listLineageForSkill(record.id);\n const usage_total = countUsageForSkill(record.id);\n\n // Parse frontmatter from latest lineage snapshot so the UI avoids client-side regex\n const latestSnapshot = lineage[0]?.content_snapshot;\n const frontmatterFields: Record<string, string> = {};\n if (latestSnapshot) {\n const fmMatch = latestSnapshot.match(/^---\\n([\\s\\S]*?)\\n---/);\n if (fmMatch) {\n for (const line of fmMatch[1].split('\\n')) {\n const colonIdx = line.indexOf(':');\n if (colonIdx > 0) {\n const key = line.slice(0, colonIdx).trim();\n const val = line.slice(colonIdx + 1).trim();\n if (key && val) frontmatterFields[key] = val;\n }\n }\n }\n }\n\n return { status: 200, body: { ...record, lineage, usage_total, frontmatter: frontmatterFields } };\n}\n\n/**\n * Delete a skill candidate by id.\n */\nexport async function handleDeleteCandidate(req: RouteRequest): Promise<RouteResponse> {\n const id = req.params.id;\n const deleted = deleteCandidate(id);\n if (!deleted) return { status: 404, body: { error: `Not found: ${id}` } };\n\n return { status: 200, body: { deleted: true, id } };\n}\n\n/**\n * Delete a skill record by id or name, including lineage and usage data.\n */\nexport async function handleDeleteSkillRecord(req: RouteRequest): Promise<RouteResponse> {\n const idOrName = req.params.id;\n const result = deleteSkillRecordCascade(idOrName);\n if (!result) return { status: 404, body: { error: `Not found: ${idOrName}` } };\n\n // Sync deletion to team outbox (best-effort)\n if (isTeamSyncEnabled()) {\n try {\n enqueueOutbox({\n table_name: 'skill_records',\n row_id: result.id,\n operation: 'delete',\n payload: JSON.stringify({ id: result.id, name: result.name }),\n machine_id: getTeamMachineId(),\n created_at: epochSeconds(),\n });\n } catch (err) {\n // Best-effort sync — log for diagnosability\n console.warn('[team-sync] Failed to enqueue skill record deletion:', err instanceof Error ? err.message : err);\n }\n }\n\n return { status: 200, body: { deleted: true, id: result.id, name: result.name } };\n}\n\n// ---------------------------------------------------------------------------\n// Skill record delete with disk cleanup — factory\n// ---------------------------------------------------------------------------\n\nexport interface SkillDeleteDeps {\n vaultDir: string;\n logger: DaemonLogger;\n}\n\n/**\n * Creates a DELETE /api/skill-records/:id handler that wraps\n * `handleDeleteSkillRecord` with post-deletion file/symlink cleanup.\n */\nexport function createSkillRecordDeleteHandler(deps: SkillDeleteDeps) {\n const { vaultDir, logger } = deps;\n\n return async function handleDeleteSkillRecordWithCleanup(req: RouteRequest): Promise<RouteResponse> {\n const result = await handleDeleteSkillRecord(req);\n // Delete skill file and symlinks from disk if the DB delete succeeded\n if ((result.body as Record<string, unknown>)?.deleted) {\n const record = result.body as { name?: string };\n if (record.name) {\n const projectRoot = path.resolve(vaultDir, '..');\n const skillDir = path.resolve(projectRoot, '.agents', 'skills', record.name);\n try { fs.rmSync(skillDir, { recursive: true, force: true }); } catch (err) {\n logger.warn(LOG_KINDS.PROCESSOR_BATCH, 'Failed to remove skill directory', { name: record.name, error: String(err) });\n }\n // Remove agent-specific symlinks (e.g., .claude/skills/<name>)\n try {\n const { syncSkillSymlinks } = await import('@myco/symbionts/installer.js');\n syncSkillSymlinks(projectRoot, record.name, { remove: true });\n } catch (err) {\n logger.warn(LOG_KINDS.PROCESSOR_BATCH, 'Failed to remove skill symlinks', { name: record.name, error: String(err) });\n }\n }\n }\n return result;\n };\n}\n","/**\n * Skill usage query helpers.\n *\n * Usage is append-only — each row records a detected use of a skill within\n * a session. No update path exists.\n *\n * All functions obtain the SQLite instance internally via `getDatabase()`.\n * Queries use positional `?` placeholders throughout (better-sqlite3).\n */\n\nimport { getDatabase } from '@myco/db/client.js';\nimport { DEFAULT_LIST_LIMIT } from '@myco/constants.js';\nimport { getTeamMachineId } from '@myco/daemon/team-context.js';\n// skill_usage has no synced_at column — does not participate in team sync.\n\n\n// ---------------------------------------------------------------------------\n// Types\n// ---------------------------------------------------------------------------\n\n/** Fields required (or optional) when inserting a skill usage entry. */\nexport interface SkillUsageInsert {\n id: string;\n skill_id: string;\n session_id: string;\n machine_id?: string;\n detected_at: number;\n}\n\n/** Row shape returned from skill usage queries (all columns). */\nexport interface SkillUsageRow {\n id: string;\n skill_id: string;\n session_id: string;\n machine_id: string;\n detected_at: number;\n}\n\n/** Filter options for `listUsageForSkill`. */\nexport interface ListUsageOptions {\n limit?: number;\n}\n\n// ---------------------------------------------------------------------------\n// Column list\n// ---------------------------------------------------------------------------\n\nexport const USAGE_COLUMNS = [\n 'id',\n 'skill_id',\n 'session_id',\n 'machine_id',\n 'detected_at',\n] as const;\n\nconst SELECT_COLUMNS = USAGE_COLUMNS.join(', ');\n\n// ---------------------------------------------------------------------------\n// Helpers\n// ---------------------------------------------------------------------------\n\n/** Normalize a SQLite result row into a typed SkillUsageRow. */\nfunction toUsageRow(row: Record<string, unknown>): SkillUsageRow {\n return {\n id: row.id as string,\n skill_id: row.skill_id as string,\n session_id: row.session_id as string,\n machine_id: (row.machine_id as string) ?? getTeamMachineId(),\n detected_at: row.detected_at as number,\n };\n}\n\n// ---------------------------------------------------------------------------\n// Public API\n// ---------------------------------------------------------------------------\n\n/**\n * Insert a new skill usage entry.\n *\n * Requires a valid `skill_id` (FK to skill_records) and `session_id` (FK to sessions).\n */\nexport function insertSkillUsage(data: SkillUsageInsert): SkillUsageRow {\n const db = getDatabase();\n\n db.prepare(\n `INSERT INTO skill_usage (\n id, skill_id, session_id, machine_id, detected_at\n ) VALUES (\n ?, ?, ?, ?, ?\n )`,\n ).run(\n data.id,\n data.skill_id,\n data.session_id,\n data.machine_id ?? getTeamMachineId(),\n data.detected_at,\n );\n\n const row = toUsageRow(\n db.prepare(`SELECT ${SELECT_COLUMNS} FROM skill_usage WHERE id = ?`).get(data.id) as Record<string, unknown>,\n );\n\n // Note: skill_usage has no synced_at column, so skip syncRow for now.\n // Usage data is derived/local — does not need team sync.\n\n return row;\n}\n\n/**\n * List usage entries for a skill, ordered by detected_at DESC.\n */\nexport function listUsageForSkill(\n skillId: string,\n options: ListUsageOptions = {},\n): SkillUsageRow[] {\n const db = getDatabase();\n const limit = options.limit ?? DEFAULT_LIST_LIMIT;\n\n const rows = db.prepare(\n `SELECT ${SELECT_COLUMNS}\n FROM skill_usage\n WHERE skill_id = ?\n ORDER BY detected_at DESC\n LIMIT ?`,\n ).all(skillId, limit) as Record<string, unknown>[];\n\n return rows.map(toUsageRow);\n}\n\n/**\n * Check whether a usage entry exists for a specific skill and session.\n *\n * Used for idempotency checks in detectSkillUsage — avoids loading all\n * usage rows for a skill just to scan for one session match.\n */\nexport function hasUsageForSkillAndSession(skillId: string, sessionId: string): boolean {\n const db = getDatabase();\n const row = db.prepare(\n `SELECT 1 FROM skill_usage WHERE skill_id = ? AND session_id = ? LIMIT 1`,\n ).get(skillId, sessionId);\n return row !== undefined;\n}\n\n/**\n * Count total usage events for a skill.\n */\nexport function countUsageForSkill(skillId: string): number {\n const db = getDatabase();\n\n const row = db.prepare(\n `SELECT COUNT(*) as count FROM skill_usage WHERE skill_id = ?`,\n ).get(skillId) as { count: number };\n\n return row.count;\n}\n","/**\n * Team sync initialization.\n *\n * Extracted from main.ts — creates the TeamSyncClient from saved config,\n * registers the node, backfills unsynced records, and exposes the outbox\n * flush power job.\n */\n\nimport type { DaemonLogger } from './logger.js';\nimport type { MycoConfig } from '@myco/config/schema.js';\nimport type { PowerManager } from './power.js';\nimport { TeamSyncClient } from './team-sync.js';\nimport { readSecrets } from '@myco/config/secrets.js';\nimport {\n listPending,\n markSent,\n markSourceRowsSynced,\n pruneOld,\n backfillUnsynced,\n incrementRetryCount,\n countPending,\n} from '@myco/db/queries/team-outbox.js';\nimport {\n SYNC_PROTOCOL_VERSION,\n TEAM_API_KEY_SECRET,\n epochSeconds,\n} from '@myco/constants.js';\nimport { LOG_KINDS } from '@myco/constants/log-kinds.js';\n\n// ---------------------------------------------------------------------------\n// Types\n// ---------------------------------------------------------------------------\n\nexport interface TeamSyncDeps {\n // Holder so the flush job and client reconciliation both read the current\n // value of team settings and can hot-reload team sync without a daemon\n // restart.\n liveConfig: { current: MycoConfig };\n machineId: string;\n logger: DaemonLogger;\n vaultDir: string;\n serverVersion: string;\n}\n\nexport interface TeamSyncResult {\n getTeamClient: () => TeamSyncClient | null;\n setTeamClient: (client: TeamSyncClient | null) => void;\n reconcileClient: () => Promise<void>;\n registerFlushJob: (powerManager: PowerManager) => void;\n}\n\n// ---------------------------------------------------------------------------\n// Initialization\n// ---------------------------------------------------------------------------\n\nexport function initTeamSync(deps: TeamSyncDeps): TeamSyncResult {\n const { liveConfig, machineId, logger, vaultDir, serverVersion } = deps;\n let teamClient: TeamSyncClient | null = null;\n let clientSignature: string | null = null;\n\n async function reconcileClient(): Promise<void> {\n const config = liveConfig.current;\n const workerUrl = config.team.worker_url?.trim() || null;\n const apiKey = readSecrets(vaultDir)[TEAM_API_KEY_SECRET]?.trim() || null;\n const nextSignature = config.team.enabled && workerUrl && apiKey\n ? `${workerUrl}\\n${apiKey}`\n : null;\n\n if (!nextSignature) {\n if (teamClient) {\n logger.info(LOG_KINDS.TEAM_SYNC_START, 'Team sync client cleared', {\n enabled: config.team.enabled,\n has_worker_url: Boolean(workerUrl),\n has_api_key: Boolean(apiKey),\n });\n }\n teamClient = null;\n clientSignature = null;\n return;\n }\n\n if (teamClient && clientSignature === nextSignature) return;\n\n const activeWorkerUrl = workerUrl!;\n const activeApiKey = apiKey!;\n teamClient = new TeamSyncClient({\n workerUrl: activeWorkerUrl,\n apiKey: activeApiKey,\n machineId,\n syncProtocolVersion: SYNC_PROTOCOL_VERSION,\n });\n clientSignature = nextSignature;\n\n logger.info(LOG_KINDS.TEAM_SYNC_START, 'Team sync client initialized', { worker_url: activeWorkerUrl });\n\n try {\n await teamClient.connect({\n machine_id: machineId,\n version: serverVersion,\n });\n logger.info(LOG_KINDS.TEAM_SYNC_START, 'Node registered with team worker');\n } catch (err) {\n logger.warn(LOG_KINDS.TEAM_SYNC_ERROR, 'Node registration failed (will retry on next flush)', {\n error: (err as Error).message,\n });\n }\n\n try {\n const backfilled = backfillUnsynced(machineId);\n if (backfilled > 0) {\n logger.info(LOG_KINDS.TEAM_SYNC_START, `Backfilled ${backfilled} unsynced records into outbox`);\n }\n } catch (err) {\n logger.error(LOG_KINDS.TEAM_SYNC_ERROR, 'Backfill failed', { error: (err as Error).message });\n }\n }\n\n return {\n getTeamClient: () => teamClient,\n setTeamClient: (client) => { teamClient = client; },\n reconcileClient,\n registerFlushJob: (powerManager) => {\n // Always register the flush job; gate at run time so toggling\n // team.enabled in Settings takes effect without a daemon restart.\n const logDeadLettered = (ids: number[]) => {\n if (ids.length > 0) {\n logger.error(LOG_KINDS.TEAM_SYNC_DEAD_LETTER, `Dead-lettered ${ids.length} records after max retries`, { ids });\n }\n };\n\n powerManager.register({\n name: 'team-sync-flush',\n runIn: ['active', 'idle', 'sleep'],\n preventsDeepSleep: () => liveConfig.current.team.enabled && countPending() > 0,\n fn: async () => {\n if (!liveConfig.current.team.enabled) return;\n const client = teamClient;\n if (!client) return;\n\n const pending = listPending();\n if (pending.length === 0) return;\n\n try {\n logger.info(LOG_KINDS.TEAM_SYNC_START, 'Flushing outbox', { count: pending.length });\n const result = await client.pushBatch(pending);\n const now = epochSeconds();\n\n // Mark successfully synced records as sent\n const failedIds = new Set(result.errors.map((e) => e.id));\n const sentRecords = pending.filter((r) => !failedIds.has(String(r.row_id)));\n const sentIds = sentRecords.map((r) => r.id);\n if (sentIds.length > 0) {\n markSent(sentIds, now);\n markSourceRowsSynced(sentRecords, now);\n }\n\n // Increment retry count on per-record failures\n if (result.errors.length > 0) {\n const failedOutboxIds = pending\n .filter((r) => failedIds.has(String(r.row_id)))\n .map((r) => r.id);\n const deadLettered = incrementRetryCount(failedOutboxIds, now);\n\n logger.warn(LOG_KINDS.TEAM_SYNC_RETRY, `Retrying ${failedOutboxIds.length} records`, {\n errors: result.errors.slice(0, 5),\n });\n\n logDeadLettered(deadLettered);\n }\n\n pruneOld();\n logger.info(LOG_KINDS.TEAM_SYNC_COMPLETE, 'Outbox flush complete', {\n synced: result.synced, skipped: result.skipped, errors: result.errors.length, total: pending.length,\n });\n } catch (err) {\n // Batch-level failure: increment retry count on all records\n try {\n const now = epochSeconds();\n const allIds = pending.map((r) => r.id);\n const deadLettered = incrementRetryCount(allIds, now);\n\n logger.warn(LOG_KINDS.TEAM_SYNC_RETRY, `Batch failed, retrying ${allIds.length} records`, {\n error: (err as Error).message,\n });\n\n logDeadLettered(deadLettered);\n } catch { /* best-effort retry tracking */ }\n logger.error(LOG_KINDS.TEAM_SYNC_ERROR, 'Outbox flush failed', { error: (err as Error).message });\n }\n },\n });\n },\n };\n}\n","import { randomUUID } from 'node:crypto';\nimport type { RouteResponse } from '../router.js';\n\n/** Maximum number of concurrently tracked operations. */\nconst MAX_CONCURRENT_OPERATIONS = 10;\n\n/** Time-to-live for completed/failed entries before cleanup (ms). */\nconst PROGRESS_TTL_MS = 5 * 60 * 1000;\n\nexport type ProgressStatus = 'running' | 'completed' | 'failed';\n\nexport interface ProgressEntry {\n token: string;\n type: string;\n status: ProgressStatus;\n percent?: number;\n message?: string;\n created: number;\n updated: number;\n}\n\nexport class ProgressTracker {\n private entries = new Map<string, ProgressEntry>();\n\n /**\n * Create a new tracked operation. Returns the existing token if an\n * operation of the same type is already running (duplicate prevention).\n * Throws if the maximum concurrent operations limit is reached.\n */\n /**\n * Create a new tracked operation or return existing one.\n * Returns `{ token, isNew }` — if `isNew` is false, the operation\n * was already running and the caller should NOT launch it again.\n * Throws if the maximum concurrent operations limit is reached.\n */\n create(type: string): { token: string; isNew: boolean } {\n // Lazy cleanup of stale completed/failed entries before checking limits\n this.cleanup();\n\n // Duplicate prevention: if an operation of the same type is already running, return its token\n for (const entry of this.entries.values()) {\n if (entry.type === type && entry.status === 'running') {\n return { token: entry.token, isNew: false };\n }\n }\n\n // Enforce concurrency limit (count only running entries)\n const runningCount = [...this.entries.values()].filter((e) => e.status === 'running').length;\n if (runningCount >= MAX_CONCURRENT_OPERATIONS) {\n throw new Error(`Maximum concurrent operations reached (${MAX_CONCURRENT_OPERATIONS})`);\n }\n\n const token = randomUUID();\n const now = Date.now();\n this.entries.set(token, {\n token,\n type,\n status: 'running',\n created: now,\n updated: now,\n });\n return { token, isNew: true };\n }\n\n /**\n * Update progress for a tracked operation.\n */\n update(token: string, data: { percent?: number; message?: string; status?: ProgressStatus }): void {\n const entry = this.entries.get(token);\n if (!entry) return;\n\n if (data.percent !== undefined) entry.percent = data.percent;\n if (data.message !== undefined) entry.message = data.message;\n if (data.status !== undefined) entry.status = data.status;\n entry.updated = Date.now();\n }\n\n /**\n * Get the current state of a tracked operation.\n */\n get(token: string): ProgressEntry | undefined {\n return this.entries.get(token);\n }\n\n /**\n * Check whether any operations are currently running.\n */\n hasActiveOperations(): boolean {\n for (const entry of this.entries.values()) {\n if (entry.status === 'running') return true;\n }\n return false;\n }\n\n /**\n * Remove completed/failed entries older than PROGRESS_TTL_MS.\n */\n cleanup(): void {\n const cutoff = Date.now() - PROGRESS_TTL_MS;\n for (const [token, entry] of this.entries) {\n if (entry.status !== 'running' && entry.updated < cutoff) {\n this.entries.delete(token);\n }\n }\n }\n}\n\nexport async function handleGetProgress(\n tracker: ProgressTracker,\n token: string,\n): Promise<RouteResponse> {\n const entry = tracker.get(token);\n if (!entry) {\n return { status: 404, body: { error: 'not_found', message: 'Progress token not found' } };\n }\n return { body: entry };\n}\n","import { OllamaBackend } from '../../intelligence/ollama.js';\nimport { LmStudioBackend } from '../../intelligence/lm-studio.js';\nimport type { RouteRequest, RouteResponse } from '../router.js';\n\nconst MODEL_LIST_TIMEOUT_MS = 5000;\n\n/** Well-known Anthropic models — no list API available locally.\n * Sonnet is first because it's the recommended default for all built-in\n * tasks, and the UI auto-selects the first model when a provider is picked. */\nexport const ANTHROPIC_MODELS = [\n 'claude-sonnet-4-6',\n 'claude-opus-4-6',\n 'claude-haiku-4-5-20251001',\n];\n\n/** Patterns that indicate an embedding model (case-insensitive). */\nconst EMBEDDING_PATTERNS = [\n 'embed', 'bge-', 'nomic-embed', 'e5-', 'gte-', 'granite-embedding',\n];\n\n/** Filter models to only include embedding models. */\nfunction filterEmbeddingModels(models: string[]): string[] {\n return models.filter((m) => {\n const name = m.toLowerCase();\n return EMBEDDING_PATTERNS.some((p) => name.includes(p));\n });\n}\n\n/** Filter models to exclude embedding models (LLM-only). */\nexport function filterLlmModels(models: string[]): string[] {\n return models.filter((m) => {\n const name = m.toLowerCase();\n return !EMBEDDING_PATTERNS.some((p) => name.includes(p));\n });\n}\n\nexport async function handleGetModels(req: RouteRequest): Promise<RouteResponse> {\n const provider = req.query.provider;\n const type = req.query.type; // 'llm' | 'embedding' | undefined (all)\n\n if (!provider) {\n return { status: 400, body: { error: 'provider query parameter required' } };\n }\n\n let models: string[] = [];\n\n try {\n if (provider === 'ollama') {\n const backend = new OllamaBackend({ base_url: req.query.base_url });\n models = await backend.listModels(MODEL_LIST_TIMEOUT_MS);\n } else if (provider === 'lm-studio' || provider === 'openai-compatible') {\n const backend = new LmStudioBackend({ base_url: req.query.base_url });\n models = await backend.listModels(MODEL_LIST_TIMEOUT_MS);\n } else if (provider === 'anthropic') {\n models = ANTHROPIC_MODELS;\n }\n } catch {\n // Provider unreachable — return empty list\n }\n\n // Filter by type if requested\n if (type === 'embedding') {\n models = filterEmbeddingModels(models);\n } else if (type === 'llm') {\n models = filterLlmModels(models);\n }\n\n return { body: { provider, models } };\n}\n","import { createHash } from 'node:crypto';\nimport fs from 'node:fs';\nimport path from 'node:path';\nimport { CONFIG_FILENAME } from '../../config/loader.js';\nimport { gatherStats } from '@myco/services/stats.js';\nimport type { RouteHandler, RouteResponse } from '../router.js';\n\n/** Compute config hash from the YAML file on disk. Cache this at startup and after saves. */\nexport function computeConfigHash(vaultDir: string): string {\n try {\n const configPath = path.join(vaultDir, CONFIG_FILENAME);\n const raw = fs.readFileSync(configPath, 'utf-8');\n return createHash('md5').update(raw).digest('hex');\n } catch {\n return '';\n }\n}\n\n// ---------------------------------------------------------------------------\n// Live stats factory\n// ---------------------------------------------------------------------------\n\nexport interface LiveStatsDeps {\n vaultDir: string;\n registry: { sessions: string[] };\n server: { port: number; version: string };\n configHash: { get(): string };\n}\n\nexport function createLiveStatsHandler(deps: LiveStatsDeps): RouteHandler {\n return async (): Promise<RouteResponse> => {\n const stats = gatherStats(deps.vaultDir, { active_sessions: deps.registry.sessions });\n // Overlay live daemon fields from the running process (more accurate than daemon.json)\n stats.daemon.pid = process.pid;\n stats.daemon.port = deps.server.port;\n stats.daemon.version = deps.server.version;\n stats.daemon.uptime_seconds = Math.floor(process.uptime());\n return { body: { ...stats, config_hash: deps.configHash.get() } };\n };\n}\n","/**\n * Activity insert/query helpers.\n *\n * All functions obtain the SQLite instance internally via `getDatabase()`.\n * Queries use positional `?` placeholders throughout (better-sqlite3).\n */\n\nimport { getDatabase } from '@myco/db/client.js';\n\n// ---------------------------------------------------------------------------\n// Constants\n// ---------------------------------------------------------------------------\n\n/** Default number of activities returned by listActivities when no limit given. */\nconst DEFAULT_LIST_LIMIT = 100;\n\n/** Default success flag for new activities. */\nconst DEFAULT_SUCCESS = 1;\n\n/** Default processed flag for new activities. */\nconst DEFAULT_PROCESSED = 0;\n\n// ---------------------------------------------------------------------------\n// Types\n// ---------------------------------------------------------------------------\n\n/** Fields required (or optional) when inserting an activity. */\nexport interface ActivityInsert {\n session_id: string;\n tool_name: string;\n timestamp: number;\n created_at: number;\n prompt_batch_id?: number | null;\n tool_input?: string | null;\n tool_output_summary?: string | null;\n file_path?: string | null;\n files_affected?: string | null;\n duration_ms?: number | null;\n success?: number;\n error_message?: string | null;\n processed?: number;\n content_hash?: string | null;\n}\n\n/** Row shape returned from activity queries. */\nexport interface ActivityRow {\n id: number;\n session_id: string;\n prompt_batch_id: number | null;\n tool_name: string;\n tool_input: string | null;\n tool_output_summary: string | null;\n file_path: string | null;\n files_affected: string | null;\n duration_ms: number | null;\n success: number;\n error_message: string | null;\n timestamp: number;\n processed: number;\n content_hash: string | null;\n created_at: number;\n}\n\n/** Filter options for `listActivities`. */\nexport interface ListActivitiesOptions {\n session_id?: string;\n prompt_batch_id?: number;\n limit?: number;\n}\n\n// ---------------------------------------------------------------------------\n// Column list\n// ---------------------------------------------------------------------------\n\nconst ACTIVITY_COLUMNS = [\n 'id',\n 'session_id',\n 'prompt_batch_id',\n 'tool_name',\n 'tool_input',\n 'tool_output_summary',\n 'file_path',\n 'files_affected',\n 'duration_ms',\n 'success',\n 'error_message',\n 'timestamp',\n 'processed',\n 'content_hash',\n 'created_at',\n] as const;\n\nconst SELECT_COLUMNS = ACTIVITY_COLUMNS.join(', ');\n\n// ---------------------------------------------------------------------------\n// Helpers\n// ---------------------------------------------------------------------------\n\n/** Normalize a SQLite result row into a typed ActivityRow. */\nfunction toActivityRow(row: Record<string, unknown>): ActivityRow {\n return {\n id: row.id as number,\n session_id: row.session_id as string,\n prompt_batch_id: (row.prompt_batch_id as number) ?? null,\n tool_name: row.tool_name as string,\n tool_input: (row.tool_input as string) ?? null,\n tool_output_summary: (row.tool_output_summary as string) ?? null,\n file_path: (row.file_path as string) ?? null,\n files_affected: (row.files_affected as string) ?? null,\n duration_ms: (row.duration_ms as number) ?? null,\n success: row.success as number,\n error_message: (row.error_message as string) ?? null,\n timestamp: row.timestamp as number,\n processed: row.processed as number,\n content_hash: (row.content_hash as string) ?? null,\n created_at: row.created_at as number,\n };\n}\n\n// ---------------------------------------------------------------------------\n// Public API\n// ---------------------------------------------------------------------------\n\n/**\n * Insert a new activity row.\n *\n * The `id` is auto-generated by the INTEGER PRIMARY KEY (AUTOINCREMENT).\n * FTS5 index is kept in sync via a follow-up INSERT into activities_fts.\n */\nexport function insertActivity(data: ActivityInsert): ActivityRow {\n const db = getDatabase();\n\n const info = db.prepare(\n `INSERT INTO activities (\n session_id, prompt_batch_id, tool_name, tool_input,\n tool_output_summary, file_path, files_affected, duration_ms,\n success, error_message, timestamp, processed,\n content_hash, created_at\n ) VALUES (\n ?, ?, ?, ?,\n ?, ?, ?, ?,\n ?, ?, ?, ?,\n ?, ?\n )`,\n ).run(\n data.session_id,\n data.prompt_batch_id ?? null,\n data.tool_name,\n data.tool_input ?? null,\n data.tool_output_summary ?? null,\n data.file_path ?? null,\n data.files_affected ?? null,\n data.duration_ms ?? null,\n data.success ?? DEFAULT_SUCCESS,\n data.error_message ?? null,\n data.timestamp,\n data.processed ?? DEFAULT_PROCESSED,\n data.content_hash ?? null,\n data.created_at,\n );\n\n const activityId = Number(info.lastInsertRowid);\n\n // FTS5 sync\n const toolName = data.tool_name;\n const toolInput = data.tool_input ?? null;\n const filePath = data.file_path ?? null;\n if (toolName || toolInput || filePath) {\n db.prepare(\n 'INSERT INTO activities_fts(rowid, tool_name, tool_input, file_path) VALUES (?, ?, ?, ?)',\n ).run(activityId, toolName ?? '', toolInput ?? '', filePath ?? '');\n }\n\n return toActivityRow(\n db.prepare(`SELECT ${SELECT_COLUMNS} FROM activities WHERE id = ?`).get(activityId) as Record<string, unknown>,\n );\n}\n\n/** Fields required when inserting an activity with inline batch linkage. */\nexport interface StatelessActivityInsert {\n session_id: string;\n tool_name: string;\n timestamp: number;\n created_at: number;\n tool_input?: string | null;\n tool_output_summary?: string | null;\n file_path?: string | null;\n files_affected?: string | null;\n duration_ms?: number | null;\n success?: number;\n error_message?: string | null;\n content_hash?: string | null;\n}\n\n/**\n * Insert an activity with batch linkage resolved via inline subquery.\n *\n * The `prompt_batch_id` is set to the latest open batch for the session\n * (i.e., `ended_at IS NULL`, ordered by `id DESC`). If no open batch exists,\n * `prompt_batch_id` will be NULL. The caller never needs a separate SELECT.\n *\n * FTS5 index is kept in sync via a follow-up INSERT into activities_fts.\n */\nexport function insertActivityWithBatch(\n data: StatelessActivityInsert,\n): ActivityRow {\n const db = getDatabase();\n\n const info = db.prepare(\n `INSERT INTO activities (\n session_id, prompt_batch_id, tool_name, tool_input,\n tool_output_summary, file_path, files_affected, duration_ms,\n success, error_message, timestamp, processed,\n content_hash, created_at\n ) VALUES (\n ?,\n (SELECT id FROM prompt_batches WHERE session_id = ? AND ended_at IS NULL ORDER BY id DESC LIMIT 1),\n ?, ?,\n ?, ?, ?, ?,\n ?, ?, ?, ?,\n ?, ?\n )`,\n ).run(\n data.session_id,\n data.session_id,\n data.tool_name,\n data.tool_input ?? null,\n data.tool_output_summary ?? null,\n data.file_path ?? null,\n data.files_affected ?? null,\n data.duration_ms ?? null,\n data.success ?? DEFAULT_SUCCESS,\n data.error_message ?? null,\n data.timestamp,\n DEFAULT_PROCESSED,\n data.content_hash ?? null,\n data.created_at,\n );\n\n const activityId = Number(info.lastInsertRowid);\n\n // FTS5 sync\n const toolName = data.tool_name;\n const toolInput = data.tool_input ?? null;\n const filePath = data.file_path ?? null;\n if (toolName || toolInput || filePath) {\n db.prepare(\n 'INSERT INTO activities_fts(rowid, tool_name, tool_input, file_path) VALUES (?, ?, ?, ?)',\n ).run(activityId, toolName ?? '', toolInput ?? '', filePath ?? '');\n }\n\n return toActivityRow(\n db.prepare(`SELECT ${SELECT_COLUMNS} FROM activities WHERE id = ?`).get(activityId) as Record<string, unknown>,\n );\n}\n\n/**\n * List activities with optional filters, ordered by timestamp ASC.\n *\n * At least one of `session_id` or `prompt_batch_id` should be provided\n * to avoid unbounded queries.\n */\nexport function listActivities(\n options: ListActivitiesOptions = {},\n): ActivityRow[] {\n const db = getDatabase();\n\n const conditions: string[] = [];\n const params: unknown[] = [];\n\n if (options.session_id !== undefined) {\n conditions.push(`session_id = ?`);\n params.push(options.session_id);\n }\n\n if (options.prompt_batch_id !== undefined) {\n conditions.push(`prompt_batch_id = ?`);\n params.push(options.prompt_batch_id);\n }\n\n const where = conditions.length > 0 ? `WHERE ${conditions.join(' AND ')}` : '';\n const limit = options.limit ?? DEFAULT_LIST_LIMIT;\n\n params.push(limit);\n\n const rows = db.prepare(\n `SELECT ${SELECT_COLUMNS}\n FROM activities\n ${where}\n ORDER BY timestamp ASC\n LIMIT ?`,\n ).all(...params) as Record<string, unknown>[];\n\n return rows.map(toActivityRow);\n}\n\n/**\n * List all activities for a specific batch, ordered by timestamp ASC.\n */\nexport function listActivitiesByBatch(\n batchId: number,\n): ActivityRow[] {\n const db = getDatabase();\n\n const rows = db.prepare(\n `SELECT ${SELECT_COLUMNS}\n FROM activities\n WHERE prompt_batch_id = ?\n ORDER BY timestamp ASC`,\n ).all(batchId) as Record<string, unknown>[];\n\n return rows.map(toActivityRow);\n}\n\n/**\n * Count total activities for a given session.\n */\nexport function countActivities(sessionId: string): number {\n const db = getDatabase();\n\n const row = db.prepare(\n `SELECT COUNT(*) AS count FROM activities WHERE session_id = ?`,\n ).get(sessionId) as Record<string, unknown>;\n\n return row.count as number;\n}\n","/**\n * Attachment CRUD query helpers.\n *\n * All functions obtain the SQLite instance internally via `getDatabase()`.\n * Queries use positional `?` placeholders throughout (better-sqlite3).\n */\n\nimport { getDatabase } from '@myco/db/client.js';\n\n// ---------------------------------------------------------------------------\n// Types\n// ---------------------------------------------------------------------------\n\n/** Fields required (or optional) when inserting an attachment. */\nexport interface AttachmentInsert {\n id: string;\n session_id: string;\n prompt_batch_id?: number;\n file_path: string;\n media_type?: string;\n description?: string;\n data?: Buffer;\n content_hash?: string;\n created_at: number;\n}\n\n/** Row shape returned from attachment queries (all columns, including BLOB). */\nexport interface AttachmentRow {\n id: string;\n session_id: string;\n prompt_batch_id: number | null;\n file_path: string;\n media_type: string | null;\n description: string | null;\n data: Buffer | null;\n content_hash: string | null;\n created_at: number;\n}\n\n/**\n * Row shape returned by list queries — excludes the `data` BLOB column.\n * Use this type when you only need metadata (e.g. listing attachments for a session).\n * The full row (including binary data) is only fetched by getAttachmentByFilePath.\n */\nexport type AttachmentListRow = Omit<AttachmentRow, 'data'>;\n\n// ---------------------------------------------------------------------------\n// Column lists\n// ---------------------------------------------------------------------------\n\nconst ATTACHMENT_COLUMNS = [\n 'id',\n 'session_id',\n 'prompt_batch_id',\n 'file_path',\n 'media_type',\n 'description',\n 'data',\n 'content_hash',\n 'created_at',\n] as const;\n\n/** Column list that omits the `data` BLOB — used by list queries to avoid loading megabytes of binary data. */\nconst ATTACHMENT_LIST_COLUMNS = [\n 'id',\n 'session_id',\n 'prompt_batch_id',\n 'file_path',\n 'media_type',\n 'description',\n 'content_hash',\n 'created_at',\n] as const;\n\nconst SELECT_COLUMNS = ATTACHMENT_COLUMNS.join(', ');\nconst SELECT_LIST_COLUMNS = ATTACHMENT_LIST_COLUMNS.join(', ');\n\n// ---------------------------------------------------------------------------\n// Helpers\n// ---------------------------------------------------------------------------\n\n/** Normalize shared metadata fields from a SQLite result row. */\nfunction toAttachmentBase(row: Record<string, unknown>): AttachmentListRow {\n return {\n id: row.id as string,\n session_id: row.session_id as string,\n prompt_batch_id: (row.prompt_batch_id as number) ?? null,\n file_path: row.file_path as string,\n media_type: (row.media_type as string) ?? null,\n description: (row.description as string) ?? null,\n content_hash: (row.content_hash as string) ?? null,\n created_at: row.created_at as number,\n };\n}\n\n/** Normalize a SQLite result row into a typed AttachmentRow (includes BLOB). */\nfunction toAttachmentRow(row: Record<string, unknown>): AttachmentRow {\n return { ...toAttachmentBase(row), data: (row.data as Buffer) ?? null };\n}\n\n/** Normalize a SQLite result row into a typed AttachmentListRow (no BLOB). */\nfunction toAttachmentListRow(row: Record<string, unknown>): AttachmentListRow {\n return toAttachmentBase(row);\n}\n\n// ---------------------------------------------------------------------------\n// Public API\n// ---------------------------------------------------------------------------\n\n/**\n * Insert an attachment record.\n *\n * Idempotent — ON CONFLICT (id) DO NOTHING means a second insert with the\n * same id silently succeeds without duplicating the row.\n *\n * @returns the inserted row, or undefined if the id already existed.\n */\nexport function insertAttachment(data: AttachmentInsert): AttachmentRow | undefined {\n const db = getDatabase();\n\n const info = db.prepare(\n `INSERT INTO attachments (${SELECT_COLUMNS})\n VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?)\n ON CONFLICT (id) DO NOTHING`,\n ).run(\n data.id,\n data.session_id,\n data.prompt_batch_id ?? null,\n data.file_path,\n data.media_type ?? null,\n data.description ?? null,\n data.data ?? null,\n data.content_hash ?? null,\n data.created_at,\n );\n\n if (info.changes === 0) return undefined;\n\n return toAttachmentRow(\n db.prepare(`SELECT ${SELECT_COLUMNS} FROM attachments WHERE id = ?`).get(data.id) as Record<string, unknown>,\n );\n}\n\n/**\n * List all attachments for a given session, ordered by created_at ASC.\n *\n * The `data` BLOB column is intentionally excluded — use getAttachmentByFilePath\n * when you need the binary content (e.g. for the serving route).\n *\n * @returns array of attachment metadata rows (empty array if none exist).\n */\nexport function listAttachmentsBySession(sessionId: string): AttachmentListRow[] {\n const db = getDatabase();\n\n const rows = db.prepare(\n `SELECT ${SELECT_LIST_COLUMNS} FROM attachments WHERE session_id = ? ORDER BY created_at ASC`,\n ).all(sessionId) as Record<string, unknown>[];\n\n return rows.map(toAttachmentListRow);\n}\n\n/**\n * Find an attachment by its file_path.\n *\n * @returns the first matching attachment row, or null if none exists.\n */\nexport function getAttachmentByFilePath(filePath: string): AttachmentRow | null {\n const db = getDatabase();\n\n const row = db.prepare(\n `SELECT ${SELECT_COLUMNS} FROM attachments WHERE file_path = ? LIMIT 1`,\n ).get(filePath) as Record<string, unknown> | undefined;\n\n return row ? toAttachmentRow(row) : null;\n}\n","/**\n * Plan CRUD query helpers.\n *\n * All functions obtain the SQLite instance internally via `getDatabase()`.\n * Queries use positional `?` placeholders throughout (better-sqlite3).\n */\n\nimport { getDatabase } from '@myco/db/client.js';\nimport { getTeamMachineId } from '@myco/daemon/team-context.js';\nimport { syncRow } from '@myco/db/queries/team-outbox.js';\n\n// ---------------------------------------------------------------------------\n// Constants\n// ---------------------------------------------------------------------------\n\n/** Default number of plans returned by listPlans when no limit given. */\nconst DEFAULT_LIST_LIMIT = 100;\n\n/** Default plan status for new plans. */\nconst DEFAULT_STATUS = 'active';\n\n/** Default processed flag for new plans. */\nconst DEFAULT_PROCESSED = 0;\n\n// ---------------------------------------------------------------------------\n// Types\n// ---------------------------------------------------------------------------\n\n/** Fields required (or optional) when inserting/upserting a plan. */\nexport interface PlanInsert {\n id: string;\n created_at: number;\n status?: string;\n author?: string | null;\n title?: string | null;\n content?: string | null;\n source_path?: string | null;\n tags?: string | null;\n session_id?: string | null;\n prompt_batch_id?: number | null;\n content_hash?: string | null;\n processed?: number;\n updated_at?: number | null;\n machine_id?: string;\n}\n\n/** Row shape returned from plan queries. */\nexport interface PlanRow {\n id: string;\n status: string;\n author: string | null;\n title: string | null;\n content: string | null;\n source_path: string | null;\n tags: string | null;\n session_id: string | null;\n prompt_batch_id: number | null;\n content_hash: string | null;\n processed: number;\n embedded: number;\n created_at: number;\n updated_at: number | null;\n machine_id: string;\n synced_at: number | null;\n}\n\n/** Filter options for `listPlans`. */\nexport interface ListPlansOptions {\n status?: string;\n limit?: number;\n}\n\n// ---------------------------------------------------------------------------\n// Column list\n// ---------------------------------------------------------------------------\n\nconst PLAN_COLUMNS = [\n 'id',\n 'status',\n 'author',\n 'title',\n 'content',\n 'source_path',\n 'tags',\n 'session_id',\n 'prompt_batch_id',\n 'content_hash',\n 'processed',\n 'embedded',\n 'created_at',\n 'updated_at',\n 'machine_id',\n 'synced_at',\n] as const;\n\nconst SELECT_COLUMNS = PLAN_COLUMNS.join(', ');\n\n// ---------------------------------------------------------------------------\n// Helpers\n// ---------------------------------------------------------------------------\n\n/** Normalize a SQLite result row into a typed PlanRow. */\nfunction toPlanRow(row: Record<string, unknown>): PlanRow {\n return {\n id: row.id as string,\n status: row.status as string,\n author: (row.author as string) ?? null,\n title: (row.title as string) ?? null,\n content: (row.content as string) ?? null,\n source_path: (row.source_path as string) ?? null,\n tags: (row.tags as string) ?? null,\n session_id: (row.session_id as string) ?? null,\n prompt_batch_id: (row.prompt_batch_id as number) ?? null,\n content_hash: (row.content_hash as string) ?? null,\n processed: row.processed as number,\n embedded: (row.embedded as number) ?? 0,\n created_at: row.created_at as number,\n updated_at: (row.updated_at as number) ?? null,\n machine_id: (row.machine_id as string) ?? 'local',\n synced_at: (row.synced_at as number) ?? null,\n };\n}\n\n// ---------------------------------------------------------------------------\n// Public API\n// ---------------------------------------------------------------------------\n\n/**\n * Insert a plan or update it if the id already exists.\n *\n * On conflict the row is updated with the values from `data`.\n */\nexport function upsertPlan(data: PlanInsert): PlanRow {\n const db = getDatabase();\n\n db.prepare(\n `INSERT INTO plans (\n id, status, author, title, content,\n source_path, tags, session_id, prompt_batch_id, content_hash,\n processed, created_at, updated_at, machine_id\n ) VALUES (\n ?, ?, ?, ?, ?,\n ?, ?, ?, ?, ?,\n ?, ?, ?, ?\n )\n ON CONFLICT (id) DO UPDATE SET\n status = EXCLUDED.status,\n author = EXCLUDED.author,\n title = EXCLUDED.title,\n content = EXCLUDED.content,\n source_path = EXCLUDED.source_path,\n tags = EXCLUDED.tags,\n session_id = EXCLUDED.session_id,\n prompt_batch_id = EXCLUDED.prompt_batch_id,\n content_hash = EXCLUDED.content_hash,\n processed = EXCLUDED.processed,\n updated_at = EXCLUDED.updated_at,\n embedded = CASE\n WHEN EXCLUDED.content_hash != plans.content_hash THEN 0\n ELSE plans.embedded\n END`,\n ).run(\n data.id,\n data.status ?? DEFAULT_STATUS,\n data.author ?? null,\n data.title ?? null,\n data.content ?? null,\n data.source_path ?? null,\n data.tags ?? null,\n data.session_id ?? null,\n data.prompt_batch_id ?? null,\n data.content_hash ?? null,\n data.processed ?? DEFAULT_PROCESSED,\n data.created_at,\n data.updated_at ?? null,\n data.machine_id ?? getTeamMachineId(),\n );\n\n const row = toPlanRow(\n db.prepare(`SELECT ${SELECT_COLUMNS} FROM plans WHERE id = ?`).get(data.id) as Record<string, unknown>,\n );\n\n syncRow('plans', row);\n\n return row;\n}\n\n/**\n * Retrieve a single plan by id.\n *\n * @returns the plan row, or null if not found.\n */\nexport function getPlan(id: string): PlanRow | null {\n const db = getDatabase();\n\n const row = db.prepare(\n `SELECT ${SELECT_COLUMNS} FROM plans WHERE id = ?`,\n ).get(id) as Record<string, unknown> | undefined;\n\n if (!row) return null;\n return toPlanRow(row);\n}\n\n/**\n * List plans with optional filters, ordered by created_at DESC.\n */\nexport function listPlans(\n options: ListPlansOptions = {},\n): PlanRow[] {\n const db = getDatabase();\n\n const conditions: string[] = [];\n const params: unknown[] = [];\n\n if (options.status !== undefined) {\n conditions.push(`status = ?`);\n params.push(options.status);\n }\n\n const where = conditions.length > 0 ? `WHERE ${conditions.join(' AND ')}` : '';\n const limit = options.limit ?? DEFAULT_LIST_LIMIT;\n\n params.push(limit);\n\n const rows = db.prepare(\n `SELECT ${SELECT_COLUMNS}\n FROM plans\n ${where}\n ORDER BY created_at DESC\n LIMIT ?`,\n ).all(...params) as Record<string, unknown>[];\n\n return rows.map(toPlanRow);\n}\n\n/**\n * List all plans associated with a specific session, ordered by created_at DESC.\n */\nexport function listPlansBySession(sessionId: string): PlanRow[] {\n const db = getDatabase();\n\n const rows = db.prepare(\n `SELECT ${SELECT_COLUMNS}\n FROM plans\n WHERE session_id = ?\n ORDER BY created_at DESC`,\n ).all(sessionId) as Record<string, unknown>[];\n\n return rows.map(toPlanRow);\n}\n","/**\n * Post-transaction cleanup after a session cascade delete.\n *\n * Shared by the DELETE /api/sessions/:id route and the session-maintenance job\n * to ensure both code paths perform identical cleanup (embedding vectors,\n * vault markdown files, and attachment files on disk).\n */\n\nimport { unlink, glob } from 'node:fs/promises';\nimport type { DeleteCascadeResult } from '../../db/queries/sessions.js';\nimport type { EmbeddingManager } from '../embedding/manager.js';\n\n/**\n * Remove embedding vectors and vault files for a cascade-deleted session.\n *\n * All operations are best-effort — partial failures are swallowed so that\n * one missing file does not block cleanup of the rest.\n */\nexport async function cleanupAfterSessionCascade(\n sessionId: string,\n result: DeleteCascadeResult,\n embeddingManager: EmbeddingManager,\n vaultDir: string,\n): Promise<void> {\n // Embedding vectors\n try { embeddingManager.onRemoved('sessions', sessionId); } catch { /* best-effort */ }\n for (const sporeId of result.deletedSporeIds) {\n try { embeddingManager.onRemoved('spores', sporeId); } catch { /* best-effort */ }\n }\n\n // Session markdown\n try {\n for await (const f of glob(`sessions/**/session-${sessionId}.md`, { cwd: vaultDir })) {\n await unlink(`${vaultDir}/${f}`).catch(() => {});\n }\n } catch { /* best-effort */ }\n\n // Spore markdown files\n for (const sporeId of result.deletedSporeIds) {\n try {\n for await (const f of glob(`spores/**/${sporeId}*.md`, { cwd: vaultDir })) {\n await unlink(`${vaultDir}/${f}`).catch(() => {});\n }\n } catch { /* best-effort */ }\n }\n\n // Attachment files on disk\n for (const filePath of result.deletedAttachmentPaths) {\n try { await unlink(filePath); } catch { /* best-effort */ }\n }\n}\n","/**\n * Fire-and-forget trigger for the title-summary agent task.\n *\n * Shared between the Stop-hook pipeline (per-session activity) and the\n * manual \"Complete Session\" API (user-initiated regenerate). Both paths\n * need the same config gates and the same dynamic-import guard against a\n * missing agent module; sharing avoids drift when either concern changes.\n */\n\nimport type { EmbeddingManager } from './embedding/manager.js';\nimport type { DaemonLogger } from './logger.js';\nimport type { MycoConfig } from '@myco/config/schema.js';\nimport { LOG_KINDS } from '@myco/constants/log-kinds.js';\n\nexport interface TriggerTitleSummaryDeps {\n vaultDir: string;\n embeddingManager: EmbeddingManager;\n // Holder rather than snapshot so the gates below observe toggle flips\n // (agent.event_tasks_enabled, agent.summary_batch_interval) from Settings\n // without a daemon restart.\n liveConfig: { current: MycoConfig };\n logger: DaemonLogger;\n}\n\n/**\n * Trigger `title-summary` for one session.\n *\n * Returns without scheduling a run when:\n * - `agent.summary_batch_interval <= 0` (summaries disabled entirely), or\n * - `agent.event_tasks_enabled === false` (event-driven tasks globally off), or\n * - the agent module can't be loaded.\n *\n * Rejections from the executor surface via `logger.warn` — the task's own\n * per-task concurrency guard handles overlap with in-flight runs.\n */\nexport async function triggerTitleSummary(\n sessionId: string,\n deps: TriggerTitleSummaryDeps,\n): Promise<void> {\n const { vaultDir, embeddingManager, liveConfig, logger } = deps;\n const config = liveConfig.current;\n\n if (config.agent.summary_batch_interval <= 0) return;\n if (config.agent.event_tasks_enabled === false) return;\n\n try {\n const { runAgent } = await import('../agent/executor.js');\n runAgent(vaultDir, {\n task: 'title-summary',\n instruction: `Process session ${sessionId} only`,\n embeddingManager,\n }).catch((err) => {\n logger.warn(LOG_KINDS.AGENT_ERROR, 'Title-summary task failed', {\n session_id: sessionId,\n error: String(err),\n });\n });\n } catch {\n // agent module unavailable — silently no-op\n }\n}\n","import { getSession, listSessions, countSessions, deleteSessionCascade, getSessionImpact, updateSession } from '@myco/db/queries/sessions.js';\nimport { listBatchesBySession, countBatchesBySession } from '@myco/db/queries/batches.js';\nimport { listActivitiesByBatch, countActivities } from '@myco/db/queries/activities.js';\nimport { listAttachmentsBySession } from '@myco/db/queries/attachments.js';\nimport { listPlansBySession } from '@myco/db/queries/plans.js';\nimport { LOG_KINDS } from '@myco/constants/log-kinds.js';\nimport { epochSeconds } from '@myco/constants.js';\nimport { cleanupAfterSessionCascade } from '../jobs/session-cleanup.js';\nimport { triggerTitleSummary } from '../trigger-title-summary.js';\nimport type { RouteRequest, RouteResponse } from '../router.js';\nimport type { EmbeddingManager } from '../embedding/manager.js';\nimport type { DaemonLogger } from '../logger.js';\nimport type { MycoConfig } from '@myco/config/schema.js';\n\nconst DEFAULT_LIST_LIMIT = 50;\nconst DEFAULT_LIST_OFFSET = 0;\n\nexport async function handleListSessions(req: RouteRequest): Promise<RouteResponse> {\n const limit = req.query.limit ? Number(req.query.limit) : DEFAULT_LIST_LIMIT;\n const offset = req.query.offset ? Number(req.query.offset) : DEFAULT_LIST_OFFSET;\n const status = req.query.status || undefined;\n const agent = req.query.agent || undefined;\n const search = req.query.search || undefined;\n\n const filterOpts = { status, agent, search };\n\n const sessions = listSessions({ ...filterOpts, limit, offset }).map((s) => ({\n id: s.id,\n date: new Date(s.started_at * 1000).toISOString().slice(0, 10),\n title: s.title || s.id.slice(0, 8),\n status: s.status,\n agent: s.agent,\n prompt_count: s.prompt_count,\n tool_count: s.tool_count,\n started_at: s.started_at,\n ended_at: s.ended_at,\n }));\n const total = countSessions(filterOpts);\n\n return { body: { sessions, total, offset, limit } };\n}\n\nexport async function handleGetSession(req: RouteRequest): Promise<RouteResponse> {\n const session = getSession(req.params.id);\n if (!session) return { status: 404, body: { error: 'not_found' } };\n\n // Derive counts from actual rows — the database is the authority,\n // not the cached prompt_count/tool_count on the sessions row.\n const promptCount = countBatchesBySession(session.id);\n const toolCount = countActivities(session.id);\n\n return { body: { ...session, prompt_count: promptCount, tool_count: toolCount } };\n}\n\nexport async function handleGetSessionBatches(req: RouteRequest): Promise<RouteResponse> {\n const batches = listBatchesBySession(req.params.id);\n return { body: batches };\n}\n\nexport async function handleGetBatchActivities(req: RouteRequest): Promise<RouteResponse> {\n const batchId = Number(req.params.id);\n if (isNaN(batchId)) return { status: 400, body: { error: 'invalid_batch_id' } };\n const activities = listActivitiesByBatch(batchId);\n return { body: activities };\n}\n\nexport async function handleGetSessionAttachments(req: RouteRequest): Promise<RouteResponse> {\n const attachments = listAttachmentsBySession(req.params.id);\n return { body: attachments };\n}\n\nexport async function handleGetSessionPlans(req: RouteRequest): Promise<RouteResponse> {\n const plans = listPlansBySession(req.params.id);\n return { body: plans };\n}\n\n// ---------------------------------------------------------------------------\n// Session mutation factory (requires injected deps)\n// ---------------------------------------------------------------------------\n\nexport interface SessionMutationDeps {\n embeddingManager: EmbeddingManager;\n vaultDir: string;\n logger: DaemonLogger;\n liveConfig: { current: MycoConfig };\n}\n\nexport function createSessionMutationHandlers(deps: SessionMutationDeps) {\n const { embeddingManager, vaultDir, logger, liveConfig } = deps;\n\n /** DELETE /api/sessions/:id — cascade delete with post-transaction cleanup. */\n async function handleDeleteSession(req: RouteRequest): Promise<RouteResponse> {\n const sessionId = req.params.id;\n const result = deleteSessionCascade(sessionId);\n if (!result.deleted) return { status: 404, body: { error: 'Session not found' } };\n\n // Post-transaction cleanup (fire-and-forget)\n cleanupAfterSessionCascade(sessionId, result, embeddingManager, vaultDir).catch(() => {});\n\n logger.info(LOG_KINDS.API_SESSION_DELETE, 'Session cascade deleted', {\n session_id: sessionId,\n counts: result.counts,\n });\n return { body: { ok: true, counts: result.counts } };\n }\n\n /**\n * POST /api/sessions/:id/complete — manual mirror of the SessionEnd hook.\n *\n * Flips the session to `status = 'completed'` (if not already) and fires\n * the title-summary task so the summary regenerates against the full arc.\n * Kept deliberately forgiving: completing an already-completed session is\n * idempotent — it re-triggers the regenerate without rewriting status.\n *\n * Exists because the SessionEnd hook isn't reliably fired by every\n * symbiont, and because users sometimes know a session is done before\n * any timer-based stale-sweep would catch it.\n */\n async function handleCompleteSession(req: RouteRequest): Promise<RouteResponse> {\n const sessionId = req.params.id;\n const session = getSession(sessionId);\n if (!session) return { status: 404, body: { error: 'Session not found' } };\n\n const wasActive = session.status === 'active';\n if (wasActive) {\n updateSession(sessionId, {\n status: 'completed',\n ended_at: session.ended_at ?? epochSeconds(),\n });\n }\n\n await triggerTitleSummary(sessionId, { vaultDir, embeddingManager, liveConfig, logger });\n\n logger.info(LOG_KINDS.API_SESSION_COMPLETE, 'Session manually completed', {\n session_id: sessionId,\n was_active: wasActive,\n });\n\n return { body: { ok: true, was_active: wasActive } };\n }\n\n /** GET /api/sessions/:id/impact — get session impact data. */\n async function handleGetSessionImpact(req: RouteRequest): Promise<RouteResponse> {\n const sessionId = req.params.id;\n const session = getSession(sessionId);\n if (!session) return { status: 404, body: { error: 'Session not found' } };\n const impact = getSessionImpact(sessionId);\n return { body: impact };\n }\n\n return { handleDeleteSession, handleCompleteSession, handleGetSessionImpact };\n}\n","import { listSpores, countSpores, getSpore } from '@myco/db/queries/spores.js';\nimport { listEntities, getEntity } from '@myco/db/queries/entities.js';\nimport { getSession } from '@myco/db/queries/sessions.js';\nimport { listDigestExtracts } from '@myco/db/queries/digest-extracts.js';\nimport { getGraphForNode } from '@myco/db/queries/graph-edges.js';\nimport { getDatabase } from '@myco/db/client.js';\nimport { DEFAULT_AGENT_ID } from '@myco/constants.js';\nimport type { RouteRequest, RouteResponse } from '../router.js';\n\n// ---------------------------------------------------------------------------\n// Constants\n// ---------------------------------------------------------------------------\n\n/** Default number of items returned by list endpoints. */\nconst DEFAULT_LIST_LIMIT = 50;\n\n/** Default pagination offset for list endpoints. */\nconst DEFAULT_LIST_OFFSET = 0;\n\n/** Default graph traversal depth. */\nconst DEFAULT_GRAPH_DEPTH = 1;\n\n/** Maximum graph traversal depth (capped for performance). */\nconst MAX_GRAPH_DEPTH = 3;\n\n/** Spore node name preview length (first N chars of content). */\nconst SPORE_NAME_PREVIEW_CHARS = 60;\n\n/** Seed counts for focused graph startup. */\nconst GRAPH_SEED_ENTITY_LIMIT = 4;\nconst GRAPH_SEED_SPORE_LIMIT = 4;\nconst GRAPH_SEED_SESSION_LIMIT = 4;\n\n/** Edge types to exclude from graph visualization (too granular). */\nconst EXCLUDED_GRAPH_EDGE_TYPES = new Set(['HAS_BATCH', 'EXTRACTED_FROM']);\n\n// ---------------------------------------------------------------------------\n// Spore handlers\n// ---------------------------------------------------------------------------\n\nexport async function handleListSpores(req: RouteRequest): Promise<RouteResponse> {\n const agentId = req.query.agent_id; // undefined = all agents\n const type = req.query.type;\n const status = req.query.status;\n const limit = req.query.limit ? Number(req.query.limit) : DEFAULT_LIST_LIMIT;\n const offset = req.query.offset ? Number(req.query.offset) : DEFAULT_LIST_OFFSET;\n const search = req.query.search || undefined;\n\n const filterOpts = {\n ...(agentId ? { agent_id: agentId } : {}),\n observation_type: type,\n status,\n search,\n };\n\n const spores = listSpores({ ...filterOpts, limit, offset });\n const total = countSpores(filterOpts);\n\n return { body: { spores, total, offset, limit } };\n}\n\nexport async function handleGetSpore(req: RouteRequest): Promise<RouteResponse> {\n const spore = getSpore(req.params.id);\n if (!spore) return { status: 404, body: { error: 'not_found' } };\n return { body: spore };\n}\n\n// ---------------------------------------------------------------------------\n// Entity handlers\n// ---------------------------------------------------------------------------\n\nexport async function handleListEntities(req: RouteRequest): Promise<RouteResponse> {\n const agentId = req.query.agent_id ?? DEFAULT_AGENT_ID;\n const type = req.query.type;\n const mentioned_in = req.query.mentioned_in;\n const note_type = req.query.note_type;\n const limit = req.query.limit ? Number(req.query.limit) : DEFAULT_LIST_LIMIT;\n const offset = req.query.offset ? Number(req.query.offset) : DEFAULT_LIST_OFFSET;\n\n const entities = listEntities({\n agent_id: agentId,\n type,\n mentioned_in,\n note_type,\n limit,\n offset,\n });\n\n return { body: { entities } };\n}\n\nexport async function handleGetGraphSeeds(_req: RouteRequest): Promise<RouteResponse> {\n const db = getDatabase();\n\n const sporeRows = db.prepare(\n `SELECT id, observation_type, status, content, created_at\n FROM spores\n WHERE agent_id = ? AND status = 'active'\n ORDER BY created_at DESC\n LIMIT ?`,\n ).all(DEFAULT_AGENT_ID, GRAPH_SEED_SPORE_LIMIT) as Array<Record<string, unknown>>;\n\n const sessionRows = db.prepare(\n `SELECT id, title, summary, status, started_at as created_at\n FROM sessions\n WHERE status != 'active'\n ORDER BY started_at DESC\n LIMIT ?`,\n ).all(GRAPH_SEED_SESSION_LIMIT) as Array<Record<string, unknown>>;\n\n const entityRows = db.prepare(\n `SELECT e.id, e.type, e.name, e.status, e.first_seen as created_at, COUNT(em.entity_id) as mention_count\n FROM entities e\n LEFT JOIN entity_mentions em ON em.entity_id = e.id\n WHERE e.agent_id = ? AND e.status = 'active'\n GROUP BY e.id\n ORDER BY mention_count DESC, e.last_seen DESC\n LIMIT ?`,\n ).all(DEFAULT_AGENT_ID, GRAPH_SEED_ENTITY_LIMIT) as Array<Record<string, unknown>>;\n\n const sporeSeeds = sporeRows.map((row) => ({\n id: row.id as string,\n name: ((row.content as string) ?? '').slice(0, SPORE_NAME_PREVIEW_CHARS),\n type: 'spore' as const,\n status: (row.status as string) ?? undefined,\n created_at: row.created_at as number | undefined,\n content: row.content as string | undefined,\n observation_type: row.observation_type as string | undefined,\n }));\n const sessionSeeds = sessionRows.map((row) => ({\n id: row.id as string,\n name: (row.title as string) ?? `Session ${(row.id as string).slice(-6)}`,\n type: 'session' as const,\n status: (row.status as string) ?? undefined,\n created_at: row.created_at as number | undefined,\n content: (row.summary as string) ?? undefined,\n }));\n const entitySeeds = entityRows.map((row) => ({\n id: row.id as string,\n name: row.name as string,\n type: row.type as string,\n status: (row.status as string) ?? undefined,\n created_at: row.created_at as number | undefined,\n mention_count: Number(row.mention_count) || 0,\n }));\n\n const seeds = [\n ...entitySeeds,\n ...sessionSeeds,\n ...sporeSeeds,\n ];\n\n const recommendedId = entitySeeds[0]?.id\n ?? sessionSeeds[0]?.id\n ?? sporeSeeds[0]?.id\n ?? null;\n\n return {\n body: {\n seeds,\n recommended_id: recommendedId,\n },\n };\n}\n\n// ---------------------------------------------------------------------------\n// Graph handler\n// ---------------------------------------------------------------------------\n\nexport async function handleGetGraph(req: RouteRequest): Promise<RouteResponse> {\n const depth = Math.min(Number(req.query.depth) || DEFAULT_GRAPH_DEPTH, MAX_GRAPH_DEPTH);\n const id = req.params.id;\n\n // Verify center node exists in any of the primary tables\n let centerNode: any = null;\n let centerType: 'entity' | 'spore' | 'session' = 'entity';\n\n const entity = getEntity(id);\n if (entity) {\n centerNode = entity;\n centerType = 'entity';\n } else {\n const spore = getSpore(id);\n if (spore) {\n centerNode = spore;\n centerType = 'spore';\n } else {\n const session = getSession(id);\n if (session) {\n centerNode = session;\n centerType = 'session';\n }\n }\n }\n\n if (!centerNode) return { status: 404, body: { error: 'not_found' } };\n\n // Use graph_edges for BFS traversal\n const graph = getGraphForNode(id, centerType, { depth });\n\n // Filter out batch-related edges (too granular for visualization)\n const filteredEdges = graph.edges.filter(\n (e) => !EXCLUDED_GRAPH_EDGE_TYPES.has(e.type),\n );\n\n const graphDb = getDatabase();\n\n // Collect ALL unique node IDs from filtered edges, grouped by type\n const entityIds = new Set<string>();\n const sporeIds = new Set<string>();\n const sessionIds = new Set<string>();\n\n for (const edge of filteredEdges) {\n for (const [nodeId, type] of [\n [edge.source_id, edge.source_type],\n [edge.target_id, edge.target_type],\n ] as [string, string][]) {\n switch (type) {\n case 'entity': entityIds.add(nodeId); break;\n case 'spore': sporeIds.add(nodeId); break;\n case 'session': sessionIds.add(nodeId); break;\n // batch nodes are intentionally excluded\n }\n }\n }\n \n // Center node is always included in the appropriate set\n if (centerType === 'entity') entityIds.add(id);\n if (centerType === 'spore') sporeIds.add(id);\n if (centerType === 'session') sessionIds.add(id);\n\n // --- Batch-fetch entity nodes ---\n const entityIdArray = Array.from(entityIds);\n let entityNodes: Array<Record<string, unknown>> = [];\n if (entityIdArray.length > 0) {\n const placeholders = entityIdArray.map(() => '?').join(', ');\n entityNodes = graphDb.prepare(\n `SELECT id, type, name, properties, status, first_seen as created_at\n FROM entities WHERE id IN (${placeholders})`,\n ).all(...entityIdArray) as Array<Record<string, unknown>>;\n }\n\n // --- Batch-fetch spore nodes ---\n const sporeIdArray = Array.from(sporeIds);\n let sporeNodes: Array<Record<string, unknown>> = [];\n if (sporeIdArray.length > 0) {\n const placeholders = sporeIdArray.map(() => '?').join(', ');\n sporeNodes = graphDb.prepare(\n `SELECT id, observation_type, status, content, properties, created_at\n FROM spores WHERE id IN (${placeholders})`,\n ).all(...sporeIdArray) as Array<Record<string, unknown>>;\n }\n\n // --- Batch-fetch session nodes ---\n const sessionIdArray = Array.from(sessionIds);\n let sessionNodes: Array<Record<string, unknown>> = [];\n if (sessionIdArray.length > 0) {\n const placeholders = sessionIdArray.map(() => '?').join(', ');\n sessionNodes = graphDb.prepare(\n `SELECT id, title, summary, status, started_at as created_at\n FROM sessions WHERE id IN (${placeholders})`,\n ).all(...sessionIdArray) as Array<Record<string, unknown>>;\n }\n\n // --- Batch-fetch mention counts for entity nodes ---\n const mentionCounts = new Map<string, number>();\n if (entityIdArray.length > 0) {\n const placeholders = entityIdArray.map(() => '?').join(', ');\n const mentionRows = graphDb.prepare(\n `SELECT entity_id, COUNT(*) as count FROM entity_mentions\n WHERE entity_id IN (${placeholders}) GROUP BY entity_id`,\n ).all(...entityIdArray) as Array<Record<string, unknown>>;\n for (const row of mentionRows) {\n mentionCounts.set(row.entity_id as string, Number(row.count));\n }\n }\n\n // --- Build unified nodes array ---\n const allNodes = [\n ...entityNodes.map((n) => ({\n id: n.id as string,\n name: n.name as string,\n type: n.type as string,\n status: (n.status as string) ?? undefined,\n created_at: n.created_at as number | undefined,\n properties: (n.properties as string) ?? undefined,\n mention_count: mentionCounts.get(n.id as string) ?? 0,\n })),\n ...sporeNodes.map((n) => ({\n id: n.id as string,\n name: ((n.content as string) ?? '').slice(0, SPORE_NAME_PREVIEW_CHARS),\n type: 'spore' as const,\n status: (n.status as string) ?? undefined,\n created_at: n.created_at as number | undefined,\n content: n.content as string | undefined,\n properties: (n.properties as string) ?? undefined,\n observation_type: n.observation_type as string | undefined,\n })),\n ...sessionNodes.map((n) => ({\n id: n.id as string,\n name: (n.title as string) ?? `Session ${(n.id as string).slice(-6)}`,\n type: 'session' as const,\n status: (n.status as string) ?? undefined,\n created_at: n.created_at as number | undefined,\n content: (n.summary as string) ?? undefined,\n })),\n ];\n\n // Map edges to UI-friendly shape (label + weight instead of type + confidence)\n const uiEdges = filteredEdges.map((e) => ({\n source_id: e.source_id,\n target_id: e.target_id,\n label: e.type,\n weight: e.confidence,\n }));\n\n const centerResponseNode = allNodes.find((n) => n.id === id);\n\n return {\n body: {\n center: centerResponseNode,\n nodes: allNodes.filter((n) => n.id !== id),\n edges: uiEdges,\n depth,\n },\n };\n}\n\n// ---------------------------------------------------------------------------\n// Full graph handler\n// ---------------------------------------------------------------------------\n\n/** Maximum nodes returned in full graph view to prevent overload. */\nconst FULL_GRAPH_NODE_LIMIT = 500;\n\nexport async function handleGetFullGraph(_req: RouteRequest): Promise<RouteResponse> {\n const db = getDatabase();\n\n // Fetch all entities\n const entityRows = db.prepare(\n `SELECT id, type, name, properties, status, first_seen as created_at\n FROM entities WHERE agent_id = ? LIMIT ?`,\n ).all(DEFAULT_AGENT_ID, FULL_GRAPH_NODE_LIMIT) as Array<Record<string, unknown>>;\n\n // Fetch active spores (skip superseded)\n const sporeRows = db.prepare(\n `SELECT id, observation_type, status, content, properties, created_at\n FROM spores WHERE agent_id = ? AND status = 'active' LIMIT ?`,\n ).all(DEFAULT_AGENT_ID, FULL_GRAPH_NODE_LIMIT) as Array<Record<string, unknown>>;\n\n // Fetch recent sessions\n const sessionRows = db.prepare(\n `SELECT id, title, summary, status, started_at as created_at\n FROM sessions ORDER BY created_at DESC LIMIT ?`,\n ).all(FULL_GRAPH_NODE_LIMIT) as Array<Record<string, unknown>>;\n\n // Collect all node IDs for edge filtering\n const allIds = new Set<string>();\n for (const r of [...entityRows, ...sporeRows, ...sessionRows]) {\n allIds.add(r.id as string);\n }\n\n // Fetch edges between known nodes, excluding batch-level edges\n const excludedTypes = Array.from(EXCLUDED_GRAPH_EDGE_TYPES).map(() => '?').join(', ');\n const allIdsList = Array.from(allIds);\n const idPlaceholders = allIdsList.map(() => '?').join(', ');\n const edgeRows = db.prepare(\n `SELECT source_id, source_type, target_id, target_type, type, confidence\n FROM graph_edges\n WHERE agent_id = ?\n AND type NOT IN (${excludedTypes})\n AND source_id IN (${idPlaceholders})\n AND target_id IN (${idPlaceholders})`,\n ).all(DEFAULT_AGENT_ID, ...Array.from(EXCLUDED_GRAPH_EDGE_TYPES), ...allIdsList, ...allIdsList) as Array<Record<string, unknown>>;\n\n const filteredEdges = edgeRows;\n\n // Mention counts for entity sizing\n const mentionCounts = new Map<string, number>();\n const entityIdArray = entityRows.map((r) => r.id as string);\n if (entityIdArray.length > 0) {\n const placeholders = entityIdArray.map(() => '?').join(', ');\n const mentionRows = db.prepare(\n `SELECT entity_id, COUNT(*) as count FROM entity_mentions\n WHERE entity_id IN (${placeholders}) GROUP BY entity_id`,\n ).all(...entityIdArray) as Array<Record<string, unknown>>;\n for (const row of mentionRows) {\n mentionCounts.set(row.entity_id as string, Number(row.count));\n }\n }\n\n // Build nodes\n const nodes = [\n ...entityRows.map((n) => ({\n id: n.id as string,\n name: n.name as string,\n type: n.type as string,\n status: (n.status as string) ?? undefined,\n created_at: n.created_at as number | undefined,\n properties: (n.properties as string) ?? undefined,\n mention_count: mentionCounts.get(n.id as string) ?? 0,\n })),\n ...sporeRows.map((n) => ({\n id: n.id as string,\n name: ((n.content as string) ?? '').slice(0, SPORE_NAME_PREVIEW_CHARS),\n type: 'spore' as const,\n status: (n.status as string) ?? undefined,\n created_at: n.created_at as number | undefined,\n content: n.content as string | undefined,\n properties: (n.properties as string) ?? undefined,\n observation_type: n.observation_type as string | undefined,\n })),\n ...sessionRows.map((n) => ({\n id: n.id as string,\n name: (n.title as string) ?? `Session ${(n.id as string).slice(-6)}`,\n type: 'session' as const,\n status: (n.status as string) ?? undefined,\n created_at: n.created_at as number | undefined,\n content: (n.summary as string) ?? undefined,\n })),\n ];\n\n const edges = filteredEdges.map((e) => ({\n source_id: e.source_id as string,\n target_id: e.target_id as string,\n label: e.type as string,\n weight: e.confidence as number | undefined,\n }));\n\n return { body: { nodes, edges } };\n}\n\n// ---------------------------------------------------------------------------\n// Digest handler\n// ---------------------------------------------------------------------------\n\nexport async function handleGetDigest(req: RouteRequest): Promise<RouteResponse> {\n const agentId = req.query.agent_id ?? DEFAULT_AGENT_ID;\n const extracts = listDigestExtracts(agentId);\n return { body: { tiers: extracts } };\n}\n","/**\n * Search API handler — supports FTS, semantic, and auto modes.\n *\n * - mode=fts: FTS5 full-text search (prompt_batches + activities)\n * - mode=semantic: Vector similarity search via VectorStore (sessions, spores, plans, artifacts)\n * - mode=auto (default): Tries semantic first, falls back to FTS if provider unavailable\n */\n\nimport { fullTextSearch, hydrateSearchResults } from '@myco/db/queries/search.js';\nimport {\n SEARCH_RESULTS_DEFAULT_LIMIT,\n SEARCH_SIMILARITY_THRESHOLD,\n TEAM_SOURCE_PREFIX,\n} from '@myco/constants.js';\nimport type { RouteRequest, RouteResponse } from '../router.js';\nimport type { EmbeddingManager } from '../embedding/manager.js';\nimport type { TeamSyncClient, TeamSearchResult } from '../team-sync.js';\n\n// ---------------------------------------------------------------------------\n// Types\n// ---------------------------------------------------------------------------\n\n/** Valid search modes. */\ntype SearchMode = 'auto' | 'semantic' | 'fts';\n\nconst SEARCH_NAMESPACE_RULES: Array<{ key: string; value?: string }> = [\n { key: 'all', value: undefined },\n { key: 'session', value: 'sessions' },\n { key: 'sessions', value: 'sessions' },\n { key: 'spore', value: 'spores' },\n { key: 'spores', value: 'spores' },\n { key: 'plan', value: 'plans' },\n { key: 'plans', value: 'plans' },\n { key: 'artifact', value: 'artifacts' },\n { key: 'artifacts', value: 'artifacts' },\n { key: 'skill', value: 'skill_records' },\n { key: 'skill_records', value: 'skill_records' },\n];\n\nexport function normalizeSearchNamespace(value?: string): string | undefined {\n if (!value) return undefined;\n for (const rule of SEARCH_NAMESPACE_RULES) {\n if (rule.key === value) return rule.value;\n }\n return value;\n}\n\n/** Dependencies injected by the daemon when registering the route. */\nexport interface SearchDeps {\n embeddingManager: EmbeddingManager;\n getTeamClient?: () => TeamSyncClient | null;\n machineId?: string;\n}\n\n// ---------------------------------------------------------------------------\n// Handler factory\n// ---------------------------------------------------------------------------\n\n/**\n * Create a search handler with injected dependencies.\n *\n * Returns an async handler that supports `mode` query parameter:\n * - `auto` (default): tries semantic search, falls back to FTS\n * - `semantic`: vector similarity search only\n * - `fts`: FTS5 text search only\n */\nexport function createSearchHandler(deps: SearchDeps) {\n return async function handleSearch(req: RouteRequest): Promise<RouteResponse> {\n const query = req.query.q;\n if (!query) return { status: 400, body: { error: 'missing_query' } };\n\n const mode = (req.query.mode ?? 'auto') as SearchMode;\n const type = req.query.type;\n const limit = Number(req.query.limit) || SEARCH_RESULTS_DEFAULT_LIMIT;\n const namespace = req.query.namespace;\n\n // --- FTS-only mode ---\n if (mode === 'fts') {\n const results = fullTextSearch(query, { type, limit });\n return { body: { mode: 'fts', results } };\n }\n\n // --- Semantic or auto mode: attempt vector search ---\n const queryVector = await deps.embeddingManager.embedQuery(query);\n\n // If provider unavailable, auto falls back to FTS; semantic returns empty\n if (queryVector === null) {\n if (mode === 'auto') {\n const results = fullTextSearch(query, { type, limit });\n return { body: { mode: 'fts', results, fallback: true } };\n }\n // mode === 'semantic' but no provider\n return { body: { mode: 'semantic', results: [], provider_unavailable: true } };\n }\n\n // Vector search with optional namespace/type filtering\n const searchNamespace = normalizeSearchNamespace(namespace ?? type);\n const vectorResults = deps.embeddingManager.searchVectors(queryVector, {\n namespace: searchNamespace,\n limit,\n threshold: SEARCH_SIMILARITY_THRESHOLD,\n });\n\n // Hydrate local vector results into full SearchResults\n const localResults = hydrateSearchResults(vectorResults).map((r) => ({\n ...r,\n source: 'local',\n }));\n\n // Fan out to team search in parallel (if connected)\n const teamClient = deps.getTeamClient?.();\n let teamResults: Array<TeamSearchResult & { source: string }> = [];\n if (teamClient) {\n try {\n const teamResponse = await teamClient.search(query, { limit });\n teamResults = teamResponse.results.map((r) => ({\n ...r,\n source: `${TEAM_SOURCE_PREFIX}${r.machine_id}`,\n }));\n } catch {\n // Team search failure is non-blocking — local results still returned\n }\n }\n\n // Deduplicate: skip team results from this machine (we already have them locally)\n const dedupedTeam = deps.machineId\n ? teamResults.filter((r) => r.machine_id !== deps.machineId)\n : teamResults;\n\n // Merge by score (highest first), slice to limit\n const merged = [...localResults, ...dedupedTeam]\n .sort((a, b) => (b.score ?? 0) - (a.score ?? 0))\n .slice(0, limit);\n\n return { body: { mode: 'semantic', results: merged } };\n };\n}\n","/**\n * Context injection API handlers — digest at session start, semantic spore search per prompt.\n *\n * - POST /context: Injects digest extract + branch/session metadata at session start\n * - POST /context/prompt: Searches spore embeddings for relevant observations per prompt\n */\n\nimport { z } from 'zod';\nimport { getDigestExtract } from '@myco/db/queries/digest-extracts.js';\nimport { hydrateSearchResults } from '@myco/db/queries/search.js';\nimport { getSession } from '@myco/db/queries/sessions.js';\nimport {\n DEFAULT_AGENT_ID,\n EXCLUDED_SPORE_STATUSES,\n PROMPT_CONTEXT_MIN_LENGTH,\n PROMPT_CONTEXT_MIN_SIMILARITY,\n PROMPT_CONTEXT_MAX_TOKENS,\n PROMPT_VECTOR_OVER_FETCH,\n estimateTokens,\n} from '@myco/constants.js';\nimport { LOG_KINDS } from '@myco/constants/log-kinds.js';\nimport type { MycoConfig } from '@myco/config/schema.js';\nimport type { RouteRequest, RouteResponse } from '../router.js';\nimport type { EmbeddingManager } from '../embedding/manager.js';\nimport type { DaemonLogger } from '../logger.js';\n\n// ---------------------------------------------------------------------------\n// Types\n// ---------------------------------------------------------------------------\n\n/** Dependencies injected by the daemon when registering context routes. */\nexport interface ContextDeps {\n embeddingManager: EmbeddingManager;\n logger: DaemonLogger;\n // Holder so each request reads the current merged config — a user can\n // flip `context.prompt_search` or bump `context.digest_tier` and the\n // very next request sees the change without a daemon restart.\n liveConfig: { current: MycoConfig };\n}\n\n// ---------------------------------------------------------------------------\n// Validation schemas\n// ---------------------------------------------------------------------------\n\nconst SessionContextBody = z.object({\n session_id: z.string().optional(),\n branch: z.string().optional(),\n});\n\nconst ResumeContextBody = z.object({\n session_id: z.string(),\n parent_session_id: z.string().optional(),\n branch: z.string().optional(),\n});\n\nconst PromptContextBody = z.object({\n prompt: z.string(),\n session_id: z.string().optional(),\n});\n\n// ---------------------------------------------------------------------------\n// Session-start context handler\n// ---------------------------------------------------------------------------\n\n/**\n * Create a handler that injects digest extract + metadata at session start.\n *\n * Reads the configured digest tier from digest_extracts. If an extract exists,\n * it becomes the primary context payload. Branch and session ID are always included.\n */\nexport function createSessionContextHandler(deps: ContextDeps) {\n return async function handleSessionContext(req: RouteRequest): Promise<RouteResponse> {\n const { session_id, branch } = SessionContextBody.parse(req.body);\n const { logger, liveConfig } = deps;\n const config = liveConfig.current;\n\n logger.debug(LOG_KINDS.CONTEXT_QUERY, 'Session context query', { session_id });\n\n try {\n const parts: string[] = [];\n\n // Digest extract — the primary session context payload\n const tier = config.context.digest_tier;\n const extract = getDigestExtract(DEFAULT_AGENT_ID, tier);\n\n if (extract) {\n parts.push(extract.content);\n logger.info(LOG_KINDS.CONTEXT_DIGEST, 'Digest extract found', {\n session_id,\n tier,\n content_length: extract.content.length,\n generated_at: extract.generated_at,\n });\n } else {\n logger.debug(LOG_KINDS.CONTEXT_DIGEST, 'No digest extract available', { session_id, tier });\n }\n\n // Branch info\n if (branch) {\n parts.push(`Branch:: \\`${branch}\\``);\n }\n\n // Session ID — always included\n parts.push(`Session:: \\`${session_id}\\``);\n\n const source = extract ? 'digest' : 'basic';\n const contextText = parts.join('\\n\\n');\n\n const estimatedTokens = estimateTokens(contextText);\n logger.info(\n LOG_KINDS.CONTEXT_SESSION,\n `Session context: ${estimatedTokens} est. tokens, source=${source}${extract ? `, tier=${tier}` : ''}`,\n {\n session_id,\n source,\n tier: extract ? tier : undefined,\n text_length: contextText.length,\n estimated_tokens: estimatedTokens,\n generated_at: extract?.generated_at,\n injected_text: contextText,\n },\n );\n\n return {\n body: {\n text: contextText,\n source,\n ...(extract ? { tier } : {}),\n },\n };\n } catch (error) {\n logger.error(LOG_KINDS.CONTEXT_SESSION, 'Session context failed', { error: (error as Error).message });\n return { body: { text: '' } };\n }\n };\n}\n\n// ---------------------------------------------------------------------------\n// Resume context handler\n// ---------------------------------------------------------------------------\n\n/**\n * Create a handler that injects a small resume-specific recap for opencode.\n *\n * Resume sessions already inherit their chat history, so this endpoint avoids\n * repeating the full digest. It returns only a terse recap from the parent\n * session when there is meaningful prior context to surface.\n */\nexport function createResumeContextHandler(deps: ContextDeps) {\n return async function handleResumeContext(req: RouteRequest): Promise<RouteResponse> {\n const { session_id, parent_session_id, branch } = ResumeContextBody.parse(req.body);\n const { logger } = deps;\n\n logger.debug(LOG_KINDS.CONTEXT_QUERY, 'Resume context query', {\n session_id,\n parent_session_id,\n });\n\n try {\n const parentSession = parent_session_id ? getSession(parent_session_id) : null;\n const resolvedBranch = branch ?? parentSession?.branch ?? null;\n const parts: string[] = [];\n\n if (parentSession?.title) {\n parts.push(`Resuming work from: ${parentSession.title}`);\n }\n\n if (parentSession?.summary) {\n parts.push(parentSession.summary);\n }\n\n if (resolvedBranch) {\n parts.push(`Branch:: \\`${resolvedBranch}\\``);\n }\n\n if (parentSession && parent_session_id) {\n parts.push(`Previous Session:: \\`${parent_session_id}\\``);\n }\n\n if (parts.length === 0) {\n logger.debug(LOG_KINDS.CONTEXT_SESSION, 'No resume context available', { session_id, parent_session_id });\n return { body: { text: '' } };\n }\n\n parts.push(`Session:: \\`${session_id}\\``);\n const contextText = parts.join('\\n\\n');\n const estimatedTokens = estimateTokens(contextText);\n\n logger.info(\n LOG_KINDS.CONTEXT_SESSION,\n `Resume context: ${estimatedTokens} est. tokens`,\n {\n session_id,\n parent_session_id,\n branch: resolvedBranch ?? undefined,\n text_length: contextText.length,\n estimated_tokens: estimatedTokens,\n injected_text: contextText,\n },\n );\n\n return {\n body: {\n text: contextText,\n source: 'resume',\n },\n };\n } catch (error) {\n logger.error(LOG_KINDS.CONTEXT_SESSION, 'Resume context failed', {\n session_id,\n parent_session_id,\n error: (error as Error).message,\n });\n return { body: { text: '' } };\n }\n };\n}\n\n// ---------------------------------------------------------------------------\n// Per-prompt context handler\n// ---------------------------------------------------------------------------\n\n/**\n * Create a handler that searches spore embeddings for observations relevant to the prompt.\n *\n * Embeds the prompt, searches the 'spores' namespace via vector similarity,\n * post-filters by status, and returns formatted spore context.\n */\nexport function createPromptContextHandler(deps: ContextDeps) {\n return async function handlePromptContext(req: RouteRequest): Promise<RouteResponse> {\n const { prompt, session_id } = PromptContextBody.parse(req.body);\n const { logger, liveConfig, embeddingManager } = deps;\n const config = liveConfig.current;\n\n // Guard: prompt search disabled\n if (!config.context.prompt_search) {\n logger.debug(LOG_KINDS.CONTEXT_PROMPT, 'Prompt search disabled by config', { session_id });\n return { body: { text: '' } };\n }\n\n // Guard: prompt too short\n if (prompt.length < PROMPT_CONTEXT_MIN_LENGTH) {\n logger.debug(LOG_KINDS.CONTEXT_PROMPT, 'Prompt too short for search', {\n session_id,\n length: prompt.length,\n min: PROMPT_CONTEXT_MIN_LENGTH,\n });\n return { body: { text: '' } };\n }\n\n // Guard: max spores is 0 (disabled)\n const maxSpores = config.context.prompt_max_spores;\n if (maxSpores === 0) {\n logger.debug(LOG_KINDS.CONTEXT_PROMPT, 'Prompt spore injection disabled (max_spores=0)', { session_id });\n return { body: { text: '' } };\n }\n\n // Embed the prompt\n const queryVector = await embeddingManager.embedQuery(prompt);\n if (!queryVector) {\n logger.debug(LOG_KINDS.CONTEXT_EMBED, 'Embedding provider unavailable for prompt search', { session_id });\n return { body: { text: '' } };\n }\n\n // Search spores namespace — over-fetch to compensate for post-filtering\n const vectorResults = embeddingManager.searchVectors(queryVector, {\n namespace: 'spores',\n limit: maxSpores * PROMPT_VECTOR_OVER_FETCH,\n threshold: PROMPT_CONTEXT_MIN_SIMILARITY,\n });\n\n logger.debug(LOG_KINDS.CONTEXT_SEARCH, 'Prompt vector search completed', {\n session_id,\n raw_results: vectorResults.length,\n top_similarity: vectorResults[0]?.similarity,\n });\n\n if (vectorResults.length === 0) {\n return { body: { text: '' } };\n }\n\n // Post-filter: exclude superseded/archived spores via domain_metadata\n const eligible = vectorResults.filter(\n (r) => !EXCLUDED_SPORE_STATUSES.has(r.metadata.status as string),\n );\n\n if (eligible.length === 0) {\n logger.debug(LOG_KINDS.CONTEXT_FILTER, 'All spore results excluded by status filter', { session_id });\n return { body: { text: '' } };\n }\n\n // Take top N and hydrate with full record data\n const topResults = eligible.slice(0, maxSpores);\n const hydrated = hydrateSearchResults(topResults);\n const spores = hydrated.filter((r) => r.type === 'spore');\n\n if (spores.length === 0) {\n return { body: { text: '' } };\n }\n\n // Format spore context with token budget enforcement\n const text = formatSporeContext(spores);\n\n const promptTokens = estimateTokens(text);\n const titles = spores.map((s) => s.title);\n // Single log line: summary in the message, full injected text in the data\n // blob so the log detail panel shows exactly what the model received.\n // No separate debug line — debug mode shouldn't hide information, and\n // splitting summary vs. detail across two rows just doubles clicks.\n logger.info(LOG_KINDS.CONTEXT_PROMPT, `Prompt context: ${spores.length} spores [${titles.join(', ')}] (~${promptTokens} tokens)`, {\n session_id,\n spore_count: spores.length,\n spore_titles: titles,\n scores: spores.map((s) => s.score.toFixed(3)),\n estimated_tokens: promptTokens,\n injected_text: text,\n });\n\n return { body: { text } };\n };\n}\n\n// ---------------------------------------------------------------------------\n// Helpers\n// ---------------------------------------------------------------------------\n\n/**\n * Format hydrated spore search results as markdown context for injection.\n * Respects PROMPT_CONTEXT_MAX_TOKENS budget.\n */\nfunction formatSporeContext(\n spores: Array<{ title: string; preview: string; score: number }>,\n): string {\n const header = 'Relevant vault observations:';\n let text = header;\n let tokens = estimateTokens(text);\n\n for (const spore of spores) {\n const line = `\\n- (${spore.title}) ${spore.preview}`;\n const lineTokens = estimateTokens(line);\n\n if (tokens + lineTokens > PROMPT_CONTEXT_MAX_TOKENS) break;\n\n text += line;\n tokens += lineTokens;\n }\n\n // Don't return just the header with no items\n return text === header ? '' : text;\n}\n","/**\n * Activity feed query — unified timeline across sessions, agent_runs, and spores.\n *\n * Uses UNION ALL to merge per-table subqueries, then a final ORDER BY + LIMIT\n * to produce a cross-table timeline ordered by timestamp descending.\n *\n * All functions obtain the SQLite instance internally via `getDatabase()`.\n * Queries use positional `?` placeholders throughout (better-sqlite3).\n */\n\nimport { getDatabase } from '@myco/db/client.js';\nimport { FEED_DEFAULT_LIMIT } from '@myco/constants.js';\n\n// ---------------------------------------------------------------------------\n// Types\n// ---------------------------------------------------------------------------\n\n/** A single entry in the unified activity feed. */\nexport interface FeedEntry {\n type: 'session' | 'agent_run' | 'spore';\n id: string;\n summary: string;\n timestamp: number;\n}\n\n// ---------------------------------------------------------------------------\n// Public API\n// ---------------------------------------------------------------------------\n\n/**\n * Return the most recent activity across sessions, agent runs, and spores,\n * merged into a single timeline sorted by timestamp descending.\n *\n * Each branch contributes up to `limit` candidates; the final result is\n * also capped at `limit`.\n *\n * SQLite does not support per-branch ORDER BY + LIMIT inside UNION ALL\n * parenthesized subqueries the way PostgreSQL does. Instead, each branch\n * is wrapped as a subquery (SELECT ... ORDER BY ... LIMIT ?) to achieve\n * the same effect.\n *\n * @param limit - max entries to return (defaults to FEED_DEFAULT_LIMIT)\n */\nexport function getActivityFeed(limit: number = FEED_DEFAULT_LIMIT): FeedEntry[] {\n const db = getDatabase();\n\n const rows = db.prepare(`\n SELECT * FROM (\n SELECT 'session' as type, id, COALESCE(title, 'Session ' || substr(id, 1, 8)) as summary,\n COALESCE(ended_at, started_at) as timestamp\n FROM sessions ORDER BY started_at DESC LIMIT ?\n )\n\n UNION ALL\n\n SELECT * FROM (\n SELECT 'agent_run' as type, id, task || ' — ' || status as summary,\n COALESCE(completed_at, started_at) as timestamp\n FROM agent_runs ORDER BY started_at DESC LIMIT ?\n )\n\n UNION ALL\n\n SELECT * FROM (\n SELECT 'spore' as type, id, observation_type || ': ' || substr(content, 1, 80) as summary,\n created_at as timestamp\n FROM spores WHERE status = 'active' ORDER BY created_at DESC LIMIT ?\n )\n\n ORDER BY timestamp DESC LIMIT ?\n `).all(limit, limit, limit, limit) as FeedEntry[];\n\n return rows;\n}\n","import { getActivityFeed } from '@myco/db/queries/feed.js';\nimport { FEED_DEFAULT_LIMIT } from '@myco/constants.js';\nimport type { RouteRequest, RouteResponse } from '../router.js';\n\n// ---------------------------------------------------------------------------\n// Handler\n// ---------------------------------------------------------------------------\n\nexport async function handleGetFeed(req: RouteRequest): Promise<RouteResponse> {\n const limit = Number(req.query.limit) || FEED_DEFAULT_LIMIT;\n const feed = getActivityFeed(limit);\n return { body: feed };\n}\n","import { loadManifests } from '@myco/symbionts/detect.js';\nimport { loadMergedConfig, getEnabledSymbiontNames } from '../../config/loader.js';\nimport type { RouteResponse } from '../router.js';\n\n// ---------------------------------------------------------------------------\n// Types\n// ---------------------------------------------------------------------------\n\n/** Public manifest fields exposed via the API (no internal hook config). */\ninterface SymbiontInfo {\n name: string;\n displayName: string;\n binary: string;\n enabled: boolean;\n resumeCommand?: string;\n}\n\n// ---------------------------------------------------------------------------\n// Handler\n// ---------------------------------------------------------------------------\n\n/**\n * List all registered symbiont manifests with their enabled state.\n *\n * Returns the public-facing subset of each manifest — enough for the UI\n * to build resume commands, display agent names, and show enabled state.\n * When the config lacks a `symbionts` section (pre-existing installs),\n * all manifests default to `enabled: true`.\n */\nexport async function handleListSymbionts(vaultDir: string): Promise<RouteResponse> {\n const manifests = loadManifests();\n\n let enabledNames: Set<string> | null = null;\n try {\n enabledNames = getEnabledSymbiontNames(loadMergedConfig(vaultDir));\n } catch { /* config not loadable */ }\n\n const symbionts: SymbiontInfo[] = manifests.map((m) => ({\n name: m.name,\n displayName: m.displayName,\n binary: m.binary,\n enabled: enabledNames ? enabledNames.has(m.name) : true,\n ...(m.resumeCommand ? { resumeCommand: m.resumeCommand } : {}),\n }));\n\n return { body: { symbionts } };\n}\n","import { getEmbeddingQueueDepth } from '@myco/db/queries/embeddings.js';\nimport { loadMergedConfig } from '../../config/loader.js';\nimport { EMBEDDING_BATCH_SIZE } from '../../constants.js';\nimport type { EmbeddingManager } from '../embedding/index.js';\nimport type { RouteResponse } from '../router.js';\n\n// ---------------------------------------------------------------------------\n// Constants\n// ---------------------------------------------------------------------------\n\n/** Status when no items are pending embedding. */\nconst EMBEDDING_STATUS_IDLE = 'idle';\n\n/** Status when items are waiting to be embedded. */\nconst EMBEDDING_STATUS_PENDING = 'pending';\n\n// ---------------------------------------------------------------------------\n// Handlers\n// ---------------------------------------------------------------------------\n\nexport async function handleGetEmbeddingStatus(vaultDir: string): Promise<RouteResponse> {\n const config = loadMergedConfig(vaultDir);\n\n const { queue_depth, embedded_count } = getEmbeddingQueueDepth();\n\n return {\n body: {\n provider: config.embedding.provider,\n model: config.embedding.model,\n base_url: config.embedding.base_url ?? null,\n queue_depth,\n embedded_count,\n status: queue_depth === 0 ? EMBEDDING_STATUS_IDLE : EMBEDDING_STATUS_PENDING,\n },\n };\n}\n\nexport function handleEmbeddingDetails(manager: EmbeddingManager): RouteResponse {\n const details = manager.getDetails();\n return { body: details };\n}\n\nexport function handleEmbeddingRebuild(manager: EmbeddingManager): RouteResponse {\n const result = manager.rebuildAll();\n return { body: result };\n}\n\nexport async function handleEmbeddingReconcile(manager: EmbeddingManager): Promise<RouteResponse> {\n const result = await manager.reconcile(EMBEDDING_BATCH_SIZE);\n return { body: result };\n}\n\nexport function handleEmbeddingCleanOrphans(manager: EmbeddingManager): RouteResponse {\n const result = manager.cleanOrphans();\n return { body: result };\n}\n\nexport async function handleEmbeddingReembedStale(manager: EmbeddingManager): Promise<RouteResponse> {\n const result = await manager.reembedStale(EMBEDDING_BATCH_SIZE);\n return { body: result };\n}\n","import type { TableBreakdownRow, IndexInfo } from '@myco/db/queries/database.js';\n\nexport interface DatabaseDetails {\n file: {\n path: string;\n size_bytes: number;\n wal_size_bytes: number;\n page_size: number;\n page_count: number;\n freelist_count: number;\n fragmentation_pct: number;\n };\n schema: {\n version: number;\n journal_mode: string;\n foreign_keys: boolean;\n };\n tables: TableBreakdownRow[];\n indexes: IndexInfo[];\n last_optimize_at: string | null;\n last_vacuum_at: string | null;\n last_integrity_check: { at: string; status: 'ok' | 'issues' } | null;\n}\n\nexport interface OptimizeAction {\n name: string;\n duration_ms: number;\n ok: boolean;\n error?: string;\n}\n\nexport interface OptimizeResult {\n actions_completed: OptimizeAction[];\n actions_failed: OptimizeAction[];\n duration_ms: number;\n}\n\nexport interface VacuumResult {\n size_before: number;\n size_after: number;\n freed_bytes: number;\n duration_ms: number;\n}\n\nexport interface ReindexResult {\n duration_ms: number;\n}\n\nexport interface IntegrityResult {\n status: 'ok' | 'issues';\n issues: string[];\n fk_violations: number;\n duration_ms: number;\n}\n\n/** Error-code discriminant returned by the vacuum HTTP handler on 409. */\nexport const VACUUM_ERROR_CODE = 'insufficient_disk_space' as const;\n\nexport class VacuumPrecheckError extends Error {\n constructor(public required_bytes: number, public free_bytes: number) {\n super('VACUUM requires at least ' + required_bytes + ' bytes free; only ' + free_bytes + ' available');\n this.name = 'VacuumPrecheckError';\n }\n}\n","import type { DatabaseMaintenanceManager } from '../database/manager.js';\nimport { VacuumPrecheckError, VACUUM_ERROR_CODE } from '../database/types.js';\nimport type { RouteResponse } from '../router.js';\n\nexport async function handleDatabaseDetails(\n manager: DatabaseMaintenanceManager,\n): Promise<RouteResponse> {\n const details = await manager.getDetails();\n return { body: details };\n}\n\nexport async function handleDatabaseOptimize(\n manager: DatabaseMaintenanceManager,\n): Promise<RouteResponse> {\n const result = await manager.optimize();\n return { body: result };\n}\n\nexport async function handleDatabaseVacuum(\n manager: DatabaseMaintenanceManager,\n): Promise<RouteResponse> {\n try {\n const result = await manager.vacuum();\n return { body: result };\n } catch (err) {\n if (err instanceof VacuumPrecheckError) {\n return {\n status: 409,\n body: {\n error: VACUUM_ERROR_CODE,\n required_bytes: err.required_bytes,\n free_bytes: err.free_bytes,\n },\n };\n }\n throw err;\n }\n}\n\nexport async function handleDatabaseReindex(\n manager: DatabaseMaintenanceManager,\n): Promise<RouteResponse> {\n const result = await manager.reindex();\n return { body: result };\n}\n\nexport async function handleDatabaseIntegrityCheck(\n manager: DatabaseMaintenanceManager,\n): Promise<RouteResponse> {\n const result = await manager.integrityCheck();\n return { body: result };\n}\n","/**\n * EmbeddingManager — orchestrates the embedding lifecycle.\n *\n * Coordinates three injected dependencies:\n * - VectorStore: stores/retrieves vectors (sync, sqlite-vec)\n * - ManagerEmbeddingProvider: generates vectors from text (async)\n * - EmbeddableRecordSource: queries record store for embeddable rows (sync)\n *\n * All write-path methods (onContentWritten, onStatusChanged, onRemoved) are\n * fire-and-forget safe — they catch and log errors, never throw.\n *\n * The reconcile() method is called by the reconcile worker on a timer.\n * Operations UI calls rebuildAll(), cleanOrphans(), getDetails().\n */\n\nimport { createHash } from 'node:crypto';\nimport { CONTENT_HASH_ALGORITHM, epochSeconds } from '@myco/constants.js';\nimport { LOG_KINDS } from '@myco/constants/log-kinds.js';\nimport type { Logger } from '../logger.js';\nimport {\n EMBEDDABLE_NAMESPACES,\n type EmbeddableNamespace,\n type DomainMetadata,\n type EmbeddingDetails,\n type ReconcileResult,\n type VectorStore,\n type VectorSearchResult,\n type ManagerEmbeddingProvider,\n type EmbeddableRecordSource,\n} from './types.js';\n\n// ---------------------------------------------------------------------------\n// Constants\n// ---------------------------------------------------------------------------\n\n/** Spore status that qualifies for embedding. */\nconst ACTIVE_STATUS = 'active';\n\n// ---------------------------------------------------------------------------\n// EmbeddingManager\n// ---------------------------------------------------------------------------\n\nexport class EmbeddingManager {\n constructor(\n private vectorStore: VectorStore,\n private embeddingProvider: ManagerEmbeddingProvider,\n private recordSource: EmbeddableRecordSource,\n private logger: Logger,\n ) {}\n\n // -------------------------------------------------------------------------\n // Private helpers\n // -------------------------------------------------------------------------\n\n private contentHash(text: string): string {\n return createHash(CONTENT_HASH_ALGORITHM).update(text).digest('hex');\n }\n\n // -------------------------------------------------------------------------\n // Write-path event handlers\n // -------------------------------------------------------------------------\n\n /**\n * Called when content is written (session note, spore, plan, artifact).\n * Embeds the text and stores the vector. Fire-and-forget safe.\n */\n async onContentWritten(\n namespace: EmbeddableNamespace,\n id: string,\n text: string,\n metadata: DomainMetadata,\n ): Promise<void> {\n try {\n const embedding = await this.embeddingProvider.embed(text);\n if (embedding === null) {\n this.logger.warn(LOG_KINDS.EMBEDDING_PROVIDER, 'Provider unavailable, skipping embed', {\n namespace,\n id,\n });\n return;\n }\n\n const hash = this.contentHash(text);\n\n this.vectorStore.upsert(namespace, id, embedding, {\n model: this.embeddingProvider.model,\n provider: this.embeddingProvider.providerName,\n dimensions: this.embeddingProvider.dimensions,\n content_hash: hash,\n embedded_at: epochSeconds(),\n domain_metadata: metadata,\n });\n\n this.recordSource.markEmbedded(namespace, id);\n\n this.logger.debug(LOG_KINDS.EMBEDDING_EMBED, 'Vector stored', { namespace, id });\n } catch (err) {\n this.logger.warn(LOG_KINDS.EMBEDDING_EMBED, 'Failed to embed content', {\n namespace,\n id,\n error: String(err),\n });\n }\n }\n\n /**\n * Called when a spore's status changes (e.g., superseded, archived).\n * Removes the vector for non-active statuses.\n */\n onStatusChanged(namespace: 'spores', id: string, status: string): void {\n try {\n if (status === ACTIVE_STATUS) return;\n\n this.vectorStore.remove(namespace, id);\n this.recordSource.clearEmbedded(namespace, id);\n\n this.logger.debug(LOG_KINDS.EMBEDDING_CLEANUP, 'Vector removed', {\n namespace,\n id,\n reason: `status=${status}`,\n });\n } catch (err) {\n this.logger.warn(LOG_KINDS.EMBEDDING_CLEANUP, 'Failed to remove vector on status change', {\n namespace,\n id,\n status,\n error: String(err),\n });\n }\n }\n\n /**\n * Called when a record is deleted. Removes the vector.\n * No clearEmbedded needed — the record itself is being deleted.\n */\n onRemoved(namespace: EmbeddableNamespace, id: string): void {\n try {\n this.vectorStore.remove(namespace, id);\n\n this.logger.debug(LOG_KINDS.EMBEDDING_CLEANUP, 'Vector removed', {\n namespace,\n id,\n reason: 'record deleted',\n });\n } catch (err) {\n this.logger.warn(LOG_KINDS.EMBEDDING_CLEANUP, 'Failed to remove vector on delete', {\n namespace,\n id,\n error: String(err),\n });\n }\n }\n\n // -------------------------------------------------------------------------\n // Reconciliation\n // -------------------------------------------------------------------------\n\n /**\n * Embed missing rows, re-embed stale vectors, and clean orphans across all namespaces.\n * Called by the reconcile worker on a timer.\n */\n async reconcile(batchSize: number): Promise<ReconcileResult> {\n const start = Date.now();\n let embedded = 0;\n let stale_reembedded = 0;\n let orphans_cleaned = 0;\n const currentModel = this.embeddingProvider.model;\n\n for (const namespace of EMBEDDABLE_NAMESPACES) {\n // Phase 1: Embed missing rows\n const rows = this.recordSource.getEmbeddableRows(namespace, batchSize);\n\n for (const row of rows) {\n const embedding = await this.embeddingProvider.embed(row.text);\n if (embedding === null) {\n this.logger.warn(LOG_KINDS.EMBEDDING_PROVIDER, 'Provider unavailable during reconcile, returning partial progress', {\n namespace,\n embedded,\n });\n return {\n embedded,\n stale_reembedded,\n orphans_cleaned,\n duration_ms: Date.now() - start,\n };\n }\n\n const hash = this.contentHash(row.text);\n\n this.vectorStore.upsert(namespace, row.id, embedding, {\n model: currentModel,\n provider: this.embeddingProvider.providerName,\n dimensions: this.embeddingProvider.dimensions,\n content_hash: hash,\n embedded_at: epochSeconds(),\n domain_metadata: row.metadata,\n });\n\n this.recordSource.markEmbedded(namespace, row.id);\n embedded++;\n }\n\n // Phase 2: Re-embed stale vectors (model mismatch)\n const staleIds = this.vectorStore.getStaleIds(namespace, currentModel, batchSize);\n if (staleIds.length > 0) {\n const records = this.recordSource.getRecordContent(namespace, staleIds);\n const foundIds = new Set(records.map((r) => r.id));\n\n for (const record of records) {\n const embedding = await this.embeddingProvider.embed(record.text);\n if (embedding === null) {\n this.logger.warn(LOG_KINDS.EMBEDDING_PROVIDER, 'Provider unavailable during stale re-embed, returning partial progress', {\n namespace,\n stale_reembedded,\n });\n return {\n embedded,\n stale_reembedded,\n orphans_cleaned,\n duration_ms: Date.now() - start,\n };\n }\n\n this.vectorStore.upsert(namespace, record.id, embedding, {\n model: currentModel,\n provider: this.embeddingProvider.providerName,\n dimensions: this.embeddingProvider.dimensions,\n content_hash: this.contentHash(record.text),\n embedded_at: epochSeconds(),\n domain_metadata: record.metadata,\n });\n\n stale_reembedded++;\n }\n\n // Clean stale vectors whose source records no longer exist\n for (const staleId of staleIds) {\n if (!foundIds.has(staleId)) {\n this.vectorStore.remove(namespace, staleId);\n this.logger.warn(LOG_KINDS.EMBEDDING_CLEANUP, 'Stale orphan vector cleaned', {\n namespace,\n id: staleId,\n });\n orphans_cleaned++;\n }\n }\n }\n\n // Phase 3: Orphan sweep\n orphans_cleaned += this.sweepOrphans(namespace);\n }\n\n const duration_ms = Date.now() - start;\n\n if (embedded > 0 || stale_reembedded > 0 || orphans_cleaned > 0) {\n this.logger.info(LOG_KINDS.EMBEDDING_RECONCILE, 'Reconcile cycle completed', {\n embedded,\n stale_reembedded,\n orphans_cleaned,\n duration_ms,\n });\n }\n\n return { embedded, stale_reembedded, orphans_cleaned, duration_ms };\n }\n\n /**\n * Remove orphan vectors (vectors without corresponding active records).\n */\n cleanOrphans(): { orphans_cleaned: number } {\n let orphans_cleaned = 0;\n for (const namespace of EMBEDDABLE_NAMESPACES) {\n orphans_cleaned += this.sweepOrphans(namespace);\n }\n return { orphans_cleaned };\n }\n\n // -------------------------------------------------------------------------\n // Operations\n // -------------------------------------------------------------------------\n\n /**\n * Clear all vectors and reset embedded flags.\n * The reconcile worker picks up all rows on subsequent cycles.\n */\n rebuildAll(): { queued: number } {\n const { cleared } = this.vectorStore.clear();\n this.recordSource.clearAllEmbedded();\n\n this.logger.info(LOG_KINDS.EMBEDDING_REBUILD, 'Rebuild started', { cleared });\n\n return { queued: cleared };\n }\n\n /**\n * Re-embed vectors that were created with a different model.\n */\n async reembedStale(batchSize: number): Promise<{ reembedded: number }> {\n let reembedded = 0;\n const currentModel = this.embeddingProvider.model;\n\n for (const namespace of EMBEDDABLE_NAMESPACES) {\n const staleIds = this.vectorStore.getStaleIds(namespace, currentModel, batchSize);\n if (staleIds.length === 0) continue;\n\n const records = this.recordSource.getRecordContent(namespace, staleIds);\n\n for (const record of records) {\n const embedding = await this.embeddingProvider.embed(record.text);\n if (embedding === null) {\n this.logger.warn(LOG_KINDS.EMBEDDING_PROVIDER, 'Provider unavailable during re-embed', {\n namespace,\n reembedded,\n });\n return { reembedded };\n }\n\n const hash = this.contentHash(record.text);\n\n this.vectorStore.upsert(namespace, record.id, embedding, {\n model: currentModel,\n provider: this.embeddingProvider.providerName,\n dimensions: this.embeddingProvider.dimensions,\n content_hash: hash,\n embedded_at: epochSeconds(),\n domain_metadata: record.metadata,\n });\n\n reembedded++;\n }\n }\n\n return { reembedded };\n }\n\n /**\n * Get details for the operations UI: vector stats, pending counts, provider info.\n */\n getDetails(): EmbeddingDetails {\n const stats = this.vectorStore.stats();\n\n const pending: Record<string, number> = {};\n for (const namespace of EMBEDDABLE_NAMESPACES) {\n pending[namespace] = this.recordSource.getPendingCount(namespace);\n }\n\n return {\n ...stats,\n pending,\n provider: {\n name: this.embeddingProvider.providerName,\n model: this.embeddingProvider.model,\n available: true, // If we got here, the manager was constructed with a provider\n },\n };\n }\n\n /**\n * Pass-through for search handler — embed a query string.\n */\n async embedQuery(text: string): Promise<number[] | null> {\n return this.embeddingProvider.embed(text);\n }\n\n /**\n * Pass-through for search handler — similarity search via the vector store.\n * Keeps the VectorStore private to the manager.\n */\n searchVectors(query: number[], options?: {\n namespace?: string;\n limit?: number;\n threshold?: number;\n filters?: Record<string, unknown>;\n }): VectorSearchResult[] {\n return this.vectorStore.search(query, options);\n }\n\n /**\n * Compute pairwise cosine similarity between all vectors in a namespace.\n * Used by the evolve instruction builder to find semantically overlapping skills.\n */\n pairwiseSimilarity(\n namespace: string,\n threshold?: number,\n ): Array<{ idA: string; idB: string; similarity: number }> {\n return this.vectorStore.pairwiseSimilarity(namespace, threshold);\n }\n\n // -------------------------------------------------------------------------\n // Private helpers\n // -------------------------------------------------------------------------\n\n /**\n * Sweep orphan vectors for a single namespace. Returns count removed.\n *\n * Compares vector IDs against active record IDs — vectors without a matching\n * active record are removed. Does NOT short-circuit on count equality because\n * equal counts can mask orphans (e.g., 3 orphan vectors + 3 active records\n * missing vectors = same count, zero cleanup).\n */\n private sweepOrphans(namespace: EmbeddableNamespace): number {\n const embeddedIds = this.vectorStore.getEmbeddedIds(namespace);\n if (embeddedIds.length === 0) return 0;\n\n const activeIds = this.recordSource.getActiveRecordIds(namespace);\n const activeSet = new Set(activeIds);\n let cleaned = 0;\n\n for (const vecId of embeddedIds) {\n if (!activeSet.has(vecId)) {\n this.vectorStore.remove(namespace, vecId);\n this.logger.warn(LOG_KINDS.EMBEDDING_CLEANUP, 'Orphan vector cleaned', {\n namespace,\n id: vecId,\n });\n cleaned++;\n }\n }\n\n return cleaned;\n }\n}\n","/**\n * SqliteVecVectorStore — vector storage backed by sqlite-vec in a separate vectors.db.\n *\n * Fully decoupled from the record store (myco.db). Owns:\n * - One vec0 virtual table per embeddable namespace (cosine distance metric)\n * - A regular `embedding_metadata` table for provider/model/hash tracking\n *\n * All methods are synchronous (better-sqlite3 is sync).\n */\n\nimport Database from 'better-sqlite3';\nimport type { Database as DatabaseType, Statement } from 'better-sqlite3';\nimport * as sqliteVec from 'sqlite-vec';\nimport { EMBEDDING_DIMENSIONS } from '@myco/db/schema.js';\nimport {\n EMBEDDABLE_NAMESPACES,\n type EmbeddableNamespace,\n type VectorStore,\n type VectorSearchResult,\n type VectorStoreStats,\n} from '@myco/daemon/embedding/types.js';\n\n// ---------------------------------------------------------------------------\n// Constants\n// ---------------------------------------------------------------------------\n\n/** Default search result limit when none is specified. */\nconst DEFAULT_SEARCH_LIMIT = 10;\n\n/** Default similarity threshold — results below this are excluded. */\nconst DEFAULT_SIMILARITY_THRESHOLD = 0;\n\n/** Fallback model name when metadata omits it. */\nconst DEFAULT_META_MODEL = 'unknown';\n\n/** Fallback provider name when metadata omits it. */\nconst DEFAULT_META_PROVIDER = 'unknown';\n\n/** Fallback content hash when metadata omits it. */\nconst DEFAULT_META_CONTENT_HASH = '';\n\n/** Metadata columns safe to filter on in search queries (prevents SQL injection via key names). */\nconst FILTERABLE_COLUMNS = new Set(['model', 'provider', 'namespace']);\n\n/**\n * Convert cosine *distance* (0 = identical, 2 = opposite) to a similarity\n * score in [−1, 1]. Cosine distance = 1 − cosine_similarity.\n */\nfunction cosineDistanceToSimilarity(distance: number): number {\n return 1 - distance;\n}\n\n// ---------------------------------------------------------------------------\n// Schema DDL\n// ---------------------------------------------------------------------------\n\nconst METADATA_TABLE = `\n CREATE TABLE IF NOT EXISTS embedding_metadata (\n namespace TEXT NOT NULL,\n record_id TEXT NOT NULL,\n model TEXT NOT NULL,\n provider TEXT NOT NULL,\n dimensions INTEGER NOT NULL,\n content_hash TEXT NOT NULL,\n embedded_at INTEGER NOT NULL,\n domain_metadata TEXT,\n PRIMARY KEY (namespace, record_id)\n )`;\n\nconst METADATA_MODEL_INDEX = `\n CREATE INDEX IF NOT EXISTS idx_emb_meta_model\n ON embedding_metadata (namespace, model)`;\n\n/** Build the DDL for a single vec0 virtual table. */\nfunction vecTableDDL(namespace: EmbeddableNamespace): string {\n return `CREATE VIRTUAL TABLE IF NOT EXISTS vec_${namespace} USING vec0(\n record_id TEXT PRIMARY KEY,\n embedding float[${EMBEDDING_DIMENSIONS}] distance_metric=cosine\n )`;\n}\n\n// ---------------------------------------------------------------------------\n// Implementation\n// ---------------------------------------------------------------------------\n\nexport class SqliteVecVectorStore implements VectorStore {\n private db: DatabaseType;\n\n // Cached prepared statements (lazy-initialized per namespace)\n private deleteVecStmts = new Map<string, Statement>();\n private insertVecStmts = new Map<string, Statement>();\n private upsertMetaStmt!: Statement;\n private deleteMetaStmt!: Statement;\n private searchStmts = new Map<string, Statement>();\n private statsCountStmt!: Statement;\n private statsModelsStmt!: Statement;\n private staleIdsStmt!: Statement;\n private embeddedIdsStmt!: Statement;\n\n constructor(dbPath?: string) {\n this.db = new Database(dbPath ?? ':memory:');\n sqliteVec.load(this.db);\n this.db.pragma('journal_mode = WAL');\n this.createSchema();\n this.prepareStatements();\n }\n\n // -------------------------------------------------------------------------\n // Schema\n // -------------------------------------------------------------------------\n\n private createSchema(): void {\n this.db.exec(METADATA_TABLE);\n this.db.exec(METADATA_MODEL_INDEX);\n for (const ns of EMBEDDABLE_NAMESPACES) {\n this.db.exec(vecTableDDL(ns));\n }\n }\n\n private prepareStatements(): void {\n this.upsertMetaStmt = this.db.prepare(`\n INSERT INTO embedding_metadata (namespace, record_id, model, provider, dimensions, content_hash, embedded_at, domain_metadata)\n VALUES (@namespace, @record_id, @model, @provider, @dimensions, @content_hash, @embedded_at, @domain_metadata)\n ON CONFLICT (namespace, record_id) DO UPDATE SET\n model = excluded.model,\n provider = excluded.provider,\n dimensions = excluded.dimensions,\n content_hash = excluded.content_hash,\n embedded_at = excluded.embedded_at,\n domain_metadata = excluded.domain_metadata\n `);\n\n this.deleteMetaStmt = this.db.prepare(\n `DELETE FROM embedding_metadata WHERE namespace = ? AND record_id = ?`\n );\n this.statsCountStmt = this.db.prepare(\n `SELECT COUNT(*) AS cnt FROM embedding_metadata WHERE namespace = ?`\n );\n this.statsModelsStmt = this.db.prepare(\n `SELECT model, COUNT(*) AS cnt FROM embedding_metadata WHERE namespace = ? GROUP BY model`\n );\n this.staleIdsStmt = this.db.prepare(\n `SELECT record_id FROM embedding_metadata WHERE namespace = ? AND model != ? LIMIT ?`\n );\n this.embeddedIdsStmt = this.db.prepare(\n `SELECT record_id FROM embedding_metadata WHERE namespace = ?`\n );\n\n // Per-namespace statements\n for (const ns of EMBEDDABLE_NAMESPACES) {\n this.deleteVecStmts.set(\n ns,\n this.db.prepare(`DELETE FROM vec_${ns} WHERE record_id = ?`)\n );\n this.insertVecStmts.set(\n ns,\n this.db.prepare(`INSERT INTO vec_${ns}(record_id, embedding) VALUES (?, ?)`)\n );\n this.searchStmts.set(\n ns,\n this.db.prepare(`\n SELECT v.record_id, v.distance,\n em.model, em.provider, em.content_hash, em.embedded_at, em.domain_metadata\n FROM vec_${ns} v\n LEFT JOIN embedding_metadata em\n ON em.namespace = '${ns}' AND em.record_id = v.record_id\n WHERE v.embedding MATCH ?\n AND k = ?\n ORDER BY v.distance\n `)\n );\n }\n }\n\n // -------------------------------------------------------------------------\n // VectorStore interface\n // -------------------------------------------------------------------------\n\n upsert(\n namespace: string,\n id: string,\n embedding: number[],\n metadata?: Record<string, unknown>,\n ): void {\n this.validateNamespace(namespace);\n const ns = namespace as EmbeddableNamespace;\n\n const vec = new Float32Array(embedding);\n\n const txn = this.db.transaction(() => {\n // Delete-then-insert for vec0 (INSERT OR REPLACE not fully supported)\n this.deleteVecStmts.get(ns)!.run(id);\n this.insertVecStmts.get(ns)!.run(id, vec);\n\n // Upsert metadata\n this.upsertMetaStmt.run({\n namespace: ns,\n record_id: id,\n model: (metadata?.['model'] as string) ?? DEFAULT_META_MODEL,\n provider: (metadata?.['provider'] as string) ?? DEFAULT_META_PROVIDER,\n dimensions: embedding.length,\n content_hash: (metadata?.['content_hash'] as string) ?? DEFAULT_META_CONTENT_HASH,\n embedded_at: (metadata?.['embedded_at'] as number) ?? Date.now(),\n domain_metadata: metadata?.['domain_metadata']\n ? JSON.stringify(metadata['domain_metadata'])\n : null,\n });\n });\n\n txn();\n }\n\n remove(namespace: string, id: string): void {\n this.validateNamespace(namespace);\n const ns = namespace as EmbeddableNamespace;\n\n const txn = this.db.transaction(() => {\n this.deleteVecStmts.get(ns)!.run(id);\n this.deleteMetaStmt.run(ns, id);\n });\n\n txn();\n }\n\n clear(namespace?: string): { cleared: number } {\n let cleared = 0;\n\n const targets = namespace\n ? [this.validatedNamespace(namespace)]\n : [...EMBEDDABLE_NAMESPACES];\n\n const txn = this.db.transaction(() => {\n for (const ns of targets) {\n // Count rows before clearing\n const countRow = this.db\n .prepare(`SELECT COUNT(*) as cnt FROM embedding_metadata WHERE namespace = ?`)\n .get(ns) as { cnt: number };\n cleared += countRow.cnt;\n\n // Delete all vectors in this namespace's vec table\n this.db.exec(`DELETE FROM vec_${ns}`);\n\n // Delete metadata for this namespace\n this.db\n .prepare(`DELETE FROM embedding_metadata WHERE namespace = ?`)\n .run(ns);\n }\n });\n\n txn();\n return { cleared };\n }\n\n /**\n * KNN similarity search across one or all namespaces.\n *\n * Threshold filtering is applied **post-KNN**: sqlite-vec returns the top-k\n * nearest neighbors first, then results below `threshold` are discarded.\n * This means fewer than `limit` results may be returned when a threshold is set.\n * This is standard KNN behavior, not a bug.\n */\n search(\n query: number[],\n options?: {\n namespace?: string;\n limit?: number;\n threshold?: number;\n filters?: Record<string, unknown>;\n },\n ): VectorSearchResult[] {\n const limit = options?.limit ?? DEFAULT_SEARCH_LIMIT;\n const threshold = options?.threshold ?? DEFAULT_SIMILARITY_THRESHOLD;\n const queryVec = new Float32Array(query);\n\n const targets = options?.namespace\n ? [this.validatedNamespace(options.namespace)]\n : [...EMBEDDABLE_NAMESPACES];\n\n const hasFilters = options?.filters && Object.keys(options.filters).length > 0;\n const results: VectorSearchResult[] = [];\n\n for (const ns of targets) {\n let rows: Array<Record<string, unknown>>;\n\n if (hasFilters) {\n // Build a filtered query that JOINs with embedding_metadata\n const { sql, params } = this.buildFilteredSearchQuery(\n ns,\n options!.filters!,\n limit,\n );\n const stmt = this.db.prepare(sql);\n rows = stmt.all(queryVec, limit, ...params) as Array<Record<string, unknown>>;\n } else {\n rows = this.searchStmts.get(ns)!.all(queryVec, limit) as Array<Record<string, unknown>>;\n }\n\n for (const row of rows) {\n const similarity = cosineDistanceToSimilarity(row.distance as number);\n if (similarity >= threshold) {\n results.push({\n id: row.record_id as string,\n namespace: ns,\n similarity,\n metadata: {\n model: row.model,\n provider: row.provider,\n content_hash: row.content_hash,\n embedded_at: row.embedded_at,\n ...(row.domain_metadata ? JSON.parse(row.domain_metadata as string) : {}),\n },\n });\n }\n }\n }\n\n // Sort by similarity DESC across all namespaces, then truncate to limit\n results.sort((a, b) => b.similarity - a.similarity);\n return results.slice(0, limit);\n }\n\n stats(namespace?: string): VectorStoreStats {\n const targets = namespace\n ? [this.validatedNamespace(namespace)]\n : [...EMBEDDABLE_NAMESPACES];\n\n let total = 0;\n const by_namespace: Record<string, { embedded: number; stale: number }> = {};\n const models: Record<string, number> = {};\n\n for (const ns of targets) {\n const countRow = this.statsCountStmt.get(ns) as { cnt: number };\n const modelRows = this.statsModelsStmt.all(ns) as Array<{ model: string; cnt: number }>;\n\n // \"stale\" = count of rows whose model is NOT the most common model.\n // Without knowing the \"current model\" (which stats() doesn't receive),\n // we approximate by treating the majority model as current.\n let stale = 0;\n let maxModelCount = 0;\n for (const mr of modelRows) {\n models[mr.model] = (models[mr.model] ?? 0) + mr.cnt;\n if (mr.cnt > maxModelCount) maxModelCount = mr.cnt;\n }\n stale = countRow.cnt - maxModelCount;\n if (stale < 0) stale = 0;\n\n by_namespace[ns] = { embedded: countRow.cnt, stale };\n total += countRow.cnt;\n }\n\n return { total, by_namespace, models };\n }\n\n getStaleIds(namespace: string, currentModel: string, limit: number): string[] {\n this.validateNamespace(namespace);\n const rows = this.staleIdsStmt.all(namespace, currentModel, limit) as Array<{ record_id: string }>;\n return rows.map((r) => r.record_id);\n }\n\n getEmbeddedIds(namespace: string): string[] {\n this.validateNamespace(namespace);\n const rows = this.embeddedIdsStmt.all(namespace) as Array<{ record_id: string }>;\n return rows.map((r) => r.record_id);\n }\n\n /**\n * Compute pairwise cosine similarity between all vectors in a namespace.\n * Returns pairs above the threshold, sorted by similarity DESC.\n *\n * Uses sqlite-vec's KNN search: for each vector, find the top-K nearest\n * neighbors within the same namespace. O(n * K) where K is small.\n */\n pairwiseSimilarity(\n namespace: string,\n threshold: number = 0.5,\n ): Array<{ idA: string; idB: string; similarity: number }> {\n this.validateNamespace(namespace);\n const ns = namespace as EmbeddableNamespace;\n\n // Get all record IDs and their vectors\n const allRows = this.db.prepare(\n `SELECT record_id, embedding FROM vec_${ns}`,\n ).all() as Array<{ record_id: string; embedding: Buffer }>;\n\n if (allRows.length < 2) return [];\n\n const pairs: Array<{ idA: string; idB: string; similarity: number }> = [];\n const seen = new Set<string>();\n\n // For each vector, search for similar ones in the same namespace\n const searchStmt = this.searchStmts.get(ns)!;\n for (const row of allRows) {\n const results = searchStmt.all(\n row.embedding, // Use the raw embedding as the query vector\n allRows.length, // K = all rows to get exhaustive comparison\n ) as Array<{ record_id: string; distance: number }>;\n\n for (const match of results) {\n if (match.record_id === row.record_id) continue; // skip self\n const pairKey = [row.record_id, match.record_id].sort().join('|');\n if (seen.has(pairKey)) continue;\n seen.add(pairKey);\n\n const similarity = cosineDistanceToSimilarity(match.distance);\n if (similarity >= threshold) {\n pairs.push({\n idA: row.record_id,\n idB: match.record_id,\n similarity: Math.round(similarity * 1000) / 1000,\n });\n }\n }\n }\n\n pairs.sort((a, b) => b.similarity - a.similarity);\n return pairs;\n }\n\n // -------------------------------------------------------------------------\n // Lifecycle\n // -------------------------------------------------------------------------\n\n close(): void {\n this.db.close();\n }\n\n // -------------------------------------------------------------------------\n // Private helpers\n // -------------------------------------------------------------------------\n\n private validateNamespace(namespace: string): void {\n if (!(EMBEDDABLE_NAMESPACES as readonly string[]).includes(namespace)) {\n throw new Error(\n `Invalid namespace \"${namespace}\". Must be one of: ${EMBEDDABLE_NAMESPACES.join(', ')}`,\n );\n }\n }\n\n private validatedNamespace(namespace: string): EmbeddableNamespace {\n this.validateNamespace(namespace);\n return namespace as EmbeddableNamespace;\n }\n\n /**\n * Build a filtered KNN query that JOINs vec results with embedding_metadata.\n * Filters are applied as WHERE conditions on the metadata table.\n */\n private buildFilteredSearchQuery(\n namespace: EmbeddableNamespace,\n filters: Record<string, unknown>,\n _limit: number,\n ): { sql: string; params: unknown[] } {\n const conditions: string[] = [];\n const params: unknown[] = [];\n\n for (const [key, value] of Object.entries(filters)) {\n if (FILTERABLE_COLUMNS.has(key)) {\n conditions.push(`em.${key} = ?`);\n params.push(value);\n }\n }\n\n const whereClause =\n conditions.length > 0 ? `WHERE ${conditions.join(' AND ')}` : '';\n\n const sql = `\n WITH knn AS (\n SELECT record_id, distance\n FROM vec_${namespace}\n WHERE embedding MATCH ?\n AND k = ?\n ORDER BY distance\n )\n SELECT knn.record_id, knn.distance,\n em.model, em.provider, em.content_hash, em.embedded_at, em.domain_metadata\n FROM knn\n INNER JOIN embedding_metadata em\n ON em.namespace = '${namespace}' AND em.record_id = knn.record_id\n ${whereClause}\n `;\n\n return { sql, params };\n }\n}\n","import type { EmbeddingProvider, EmbeddingResponse } from './llm.js';\n\nexport async function generateEmbedding(\n backend: EmbeddingProvider,\n text: string,\n): Promise<EmbeddingResponse> {\n const raw = await backend.embed(text);\n return {\n embedding: normalize(raw.embedding),\n model: raw.model,\n dimensions: raw.dimensions,\n };\n}\n\nfunction normalize(vec: number[]): number[] {\n const magnitude = Math.sqrt(vec.reduce((sum, v) => sum + v * v, 0));\n if (magnitude === 0) return vec;\n return vec.map((v) => v / magnitude);\n}\n","/**\n * Adapter wrapping the existing EmbeddingProvider interface into the\n * ManagerEmbeddingProvider contract used by EmbeddingManager.\n *\n * Uses `generateEmbedding` (L2-normalised) rather than raw `provider.embed()`\n * so vectors are always unit-length before storage.\n */\n\nimport type { EmbeddingProvider } from '@myco/intelligence/llm.js';\nimport type { EmbeddingProviderConfig } from '@myco/config/schema.js';\nimport { generateEmbedding } from '@myco/intelligence/embeddings.js';\nimport { EMBEDDING_DIMENSIONS } from '@myco/db/schema.js';\nimport type { ManagerEmbeddingProvider } from './types.js';\n\n/** TTL for cached availability check (ms). Avoids HTTP probe on every embed(). */\nconst AVAILABILITY_CACHE_TTL_MS = 5_000;\n\n/** Ollama default tag — untagged model names implicitly resolve to :latest. */\nconst OLLAMA_DEFAULT_TAG = ':latest';\n\n/** Providers that use Docker-style model tags where untagged means :latest. */\nconst TAGGED_PROVIDERS = new Set(['ollama']);\n\n/**\n * Normalize model string for consistent storage. Ollama treats 'bge-m3' and\n * 'bge-m3:latest' as the same model — normalize to include the tag so vector\n * metadata comparisons don't produce false stale counts.\n */\nfunction normalizeModelName(model: string, provider: string): string {\n if (TAGGED_PROVIDERS.has(provider) && !model.includes(':')) {\n return model + OLLAMA_DEFAULT_TAG;\n }\n return model;\n}\n\nexport class EmbeddingProviderAdapter implements ManagerEmbeddingProvider {\n readonly model: string;\n readonly providerName: string;\n readonly dimensions: number;\n\n /** Cached availability state to avoid per-embed HTTP probes. */\n private cachedAvailable: boolean | null = null;\n private cachedAvailableAt = 0;\n\n constructor(\n private provider: EmbeddingProvider,\n config: EmbeddingProviderConfig,\n ) {\n this.model = normalizeModelName(config.model, config.provider);\n this.providerName = config.provider;\n this.dimensions = EMBEDDING_DIMENSIONS;\n }\n\n async embed(text: string): Promise<number[] | null> {\n try {\n const isUp = await this.checkAvailability();\n if (!isUp) return null;\n const result = await generateEmbedding(this.provider, text);\n return result.embedding;\n } catch {\n // Provider went down mid-embed — invalidate cache\n this.cachedAvailable = null;\n return null;\n }\n }\n\n /** Check availability with a short TTL cache to avoid HTTP probes on every call. */\n private async checkAvailability(): Promise<boolean> {\n const now = Date.now();\n if (this.cachedAvailable !== null && (now - this.cachedAvailableAt) < AVAILABILITY_CACHE_TTL_MS) {\n return this.cachedAvailable;\n }\n this.cachedAvailable = await this.provider.isAvailable();\n this.cachedAvailableAt = now;\n return this.cachedAvailable;\n }\n}\n","/**\n * SqliteRecordSource — queries the record store for rows that need embedding.\n *\n * Delegates to existing helpers from `@myco/db/queries/embeddings.js` where\n * possible (markEmbedded, clearEmbedded, getUnembedded). Custom queries are\n * used for spore status filtering, metadata enrichment, and content retrieval.\n */\n\nimport { getDatabase } from '@myco/db/client.js';\nimport {\n markEmbedded as dbMarkEmbedded,\n clearEmbedded as dbClearEmbedded,\n getUnembedded,\n assertValidTable as assertValidNamespace,\n EMBEDDABLE_TABLES,\n EMBEDDABLE_TEXT_COLUMNS,\n type EmbeddableTable,\n} from '@myco/db/queries/embeddings.js';\nimport type { DomainMetadata, EmbeddableRecordSource } from '@myco/daemon/embedding/types.js';\n\n// ---------------------------------------------------------------------------\n// Constants\n// ---------------------------------------------------------------------------\n\n/** Spore status that qualifies for embedding. */\nconst ACTIVE_STATUS = 'active';\n\n/** Build metadata for a session row. */\nfunction sessionMetadata(row: Record<string, unknown>): DomainMetadata {\n return {\n ...(row.project_root != null ? { project_root: row.project_root as string } : {}),\n };\n}\n\n/** Build metadata for a spore row. */\nfunction sporeMetadata(row: Record<string, unknown>): DomainMetadata {\n return {\n ...(row.status != null ? { status: row.status as string } : {}),\n ...(row.session_id != null ? { session_id: row.session_id as string } : {}),\n ...(row.observation_type != null ? { observation_type: row.observation_type as string } : {}),\n };\n}\n\n/** Build metadata for an artifact row — empty. */\nfunction emptyMetadata(): DomainMetadata {\n return {};\n}\n\n/** Build metadata for a plan row. */\nfunction planMetadata(row: Record<string, unknown>): DomainMetadata {\n return {\n ...(row.session_id != null ? { session_id: row.session_id as string } : {}),\n ...(row.source_path != null ? { source_path: row.source_path as string } : {}),\n };\n}\n\n/** Build metadata for a skill_records row. */\nfunction skillRecordMetadata(row: Record<string, unknown>): DomainMetadata {\n return {\n ...(row.status != null ? { status: row.status as string } : {}),\n ...(row.name != null ? { name: row.name as string } : {}),\n };\n}\n\n/** Get the metadata builder for a given namespace. */\nfunction metadataFor(namespace: EmbeddableTable, row: Record<string, unknown>): DomainMetadata {\n switch (namespace) {\n case 'sessions':\n return sessionMetadata(row);\n case 'spores':\n return sporeMetadata(row);\n case 'plans':\n return planMetadata(row);\n case 'artifacts':\n return emptyMetadata();\n case 'skill_records':\n return skillRecordMetadata(row);\n }\n}\n\n// ---------------------------------------------------------------------------\n// SqliteRecordSource\n// ---------------------------------------------------------------------------\n\nexport class SqliteRecordSource implements EmbeddableRecordSource {\n /**\n * Get rows that need embedding (embedded=0, content non-null).\n *\n * For spores: additionally filters WHERE status = 'active'.\n * For sessions: delegates to getUnembedded (which filters summary IS NOT NULL).\n */\n getEmbeddableRows(namespace: string, limit: number): Array<{\n id: string;\n text: string;\n metadata: DomainMetadata;\n }> {\n assertValidNamespace(namespace);\n\n if (namespace === 'spores') {\n return this.getUnembeddedActiveSpores(limit);\n }\n\n if (namespace === 'skill_records') {\n return this.getUnembeddedActiveSkillRecords(limit);\n }\n\n // For sessions/plans/artifacts: delegate to getUnembedded, then enrich with metadata\n const rows = getUnembedded(namespace, limit);\n const db = getDatabase();\n return rows.map((row) => {\n const fullRow = db.prepare(`SELECT * FROM ${namespace} WHERE id = ?`).get(row.id) as Record<string, unknown>;\n return {\n id: String(row.id),\n text: row.text,\n metadata: metadataFor(namespace as EmbeddableTable, fullRow),\n };\n });\n }\n\n /**\n * Get IDs of all records that should have embeddings.\n *\n * - sessions: those with a non-null summary\n * - spores: those with status = 'active'\n * - plans/artifacts: those with non-null content\n */\n getActiveRecordIds(namespace: string): string[] {\n assertValidNamespace(namespace);\n const db = getDatabase();\n\n switch (namespace) {\n case 'sessions': {\n const rows = db.prepare(\n `SELECT id FROM sessions WHERE summary IS NOT NULL`,\n ).all() as Array<{ id: string }>;\n return rows.map((r) => r.id);\n }\n case 'spores': {\n const rows = db.prepare(\n `SELECT id FROM spores WHERE status = ?`,\n ).all(ACTIVE_STATUS) as Array<{ id: string }>;\n return rows.map((r) => r.id);\n }\n case 'plans': {\n const rows = db.prepare(\n `SELECT id FROM plans WHERE content IS NOT NULL`,\n ).all() as Array<{ id: string }>;\n return rows.map((r) => r.id);\n }\n case 'artifacts': {\n const rows = db.prepare(\n `SELECT id FROM artifacts WHERE content IS NOT NULL`,\n ).all() as Array<{ id: string }>;\n return rows.map((r) => r.id);\n }\n case 'skill_records': {\n const rows = db.prepare(\n `SELECT id FROM skill_records WHERE status = ?`,\n ).all(ACTIVE_STATUS) as Array<{ id: string }>;\n return rows.map((r) => r.id);\n }\n }\n }\n\n /**\n * Fetch content + metadata for specific record IDs.\n *\n * Returns same shape as getEmbeddableRows but for specific records.\n * Empty ids array returns empty result.\n */\n getRecordContent(namespace: string, ids: string[]): Array<{\n id: string;\n text: string;\n metadata: DomainMetadata;\n }> {\n assertValidNamespace(namespace);\n\n if (ids.length === 0) return [];\n\n const db = getDatabase();\n const textCol = EMBEDDABLE_TEXT_COLUMNS[namespace as EmbeddableTable];\n const placeholders = ids.map(() => '?').join(', ');\n\n const rows = db.prepare(\n `SELECT *, ${textCol} AS text FROM ${namespace} WHERE id IN (${placeholders})`,\n ).all(...ids) as Array<Record<string, unknown>>;\n\n return rows.map((row) => ({\n id: String(row.id),\n text: row.text as string,\n metadata: metadataFor(namespace as EmbeddableTable, row),\n }));\n }\n\n /** Mark a record as embedded. Delegates to existing helper. */\n markEmbedded(namespace: string, id: string): void {\n dbMarkEmbedded(namespace, id);\n }\n\n /** Clear the embedded flag on a record. Delegates to existing helper. */\n clearEmbedded(namespace: string, id: string): void {\n dbClearEmbedded(namespace, id);\n }\n\n /**\n * Clear the embedded flag on all records, optionally scoped to a namespace.\n *\n * If namespace is omitted, clears all embeddable tables.\n */\n clearAllEmbedded(namespace?: string): void {\n const db = getDatabase();\n\n if (namespace !== undefined) {\n assertValidNamespace(namespace);\n db.prepare(`UPDATE ${namespace} SET embedded = 0`).run();\n return;\n }\n\n for (const table of EMBEDDABLE_TABLES) {\n db.prepare(`UPDATE ${table} SET embedded = 0`).run();\n }\n }\n\n /**\n * Count rows that need embedding — lightweight SELECT COUNT(*), no row materialization.\n */\n getPendingCount(namespace: string): number {\n assertValidNamespace(namespace);\n const db = getDatabase();\n\n const contentFilter = namespace === 'sessions' ? ' AND summary IS NOT NULL' : '';\n const statusFilter = (namespace === 'spores' || namespace === 'skill_records') ? ` AND status = '${ACTIVE_STATUS}'` : '';\n\n const row = db.prepare(\n `SELECT COUNT(*) AS cnt FROM ${namespace} WHERE embedded = 0${contentFilter}${statusFilter}`,\n ).get() as { cnt: number };\n\n return Number(row.cnt);\n }\n\n // ---------------------------------------------------------------------------\n // Private helpers\n // ---------------------------------------------------------------------------\n\n /** Custom query for spores: embedded=0 AND status='active'. */\n private getUnembeddedActiveSpores(limit: number): Array<{\n id: string;\n text: string;\n metadata: DomainMetadata;\n }> {\n const db = getDatabase();\n const rows = db.prepare(\n `SELECT id, content AS text, status, session_id, observation_type\n FROM spores\n WHERE embedded = 0 AND status = ?\n ORDER BY created_at ASC\n LIMIT ?`,\n ).all(ACTIVE_STATUS, limit) as Array<Record<string, unknown>>;\n\n return rows.map((row) => ({\n id: String(row.id),\n text: row.text as string,\n metadata: sporeMetadata(row),\n }));\n }\n\n /** Custom query for skill_records: embedded=0 AND status='active'. */\n private getUnembeddedActiveSkillRecords(limit: number): Array<{\n id: string;\n text: string;\n metadata: DomainMetadata;\n }> {\n const db = getDatabase();\n const rows = db.prepare(\n `SELECT id, description AS text, status, name\n FROM skill_records\n WHERE embedded = 0 AND status = ?\n ORDER BY created_at ASC\n LIMIT ?`,\n ).all(ACTIVE_STATUS, limit) as Array<Record<string, unknown>>;\n\n return rows.map((row) => ({\n id: String(row.id),\n text: row.text as string,\n metadata: skillRecordMetadata(row),\n }));\n }\n}\n","import fs from 'node:fs';\nimport { LOG_KINDS } from '@myco/constants/log-kinds.js';\nimport {\n getDatabaseFileStats,\n getTablesBreakdown,\n getIndexesList,\n getSchemaInfo,\n getLastDatabaseLogTimestamp,\n getLastDatabaseLogTimestamps,\n runVacuum,\n runAnalyze,\n runReindex,\n runIntegrityCheck,\n runForeignKeyCheck,\n runWalCheckpointTruncate,\n runPragmaOptimize,\n runFtsOptimize,\n listFtsTableNames,\n} from '@myco/db/queries/database.js';\nimport type { Logger } from '../logger.js';\nimport {\n type DatabaseDetails,\n type OptimizeAction,\n type OptimizeResult,\n type VacuumResult,\n type ReindexResult,\n type IntegrityResult,\n VacuumPrecheckError,\n} from './types.js';\n\nconst VACUUM_FREE_SPACE_MULTIPLIER = 2;\n\nexport class DatabaseMaintenanceManager {\n constructor(\n private dbPath: string,\n private vaultDir: string,\n private logger: Logger,\n ) {}\n\n async getDetails(): Promise<DatabaseDetails> {\n const file = getDatabaseFileStats(this.dbPath);\n const schema = getSchemaInfo();\n const tables = getTablesBreakdown();\n const indexes = getIndexesList();\n\n // Batch the four last-run lookups into a single log_entries query.\n const lastRuns = getLastDatabaseLogTimestamps([\n LOG_KINDS.DATABASE_OPTIMIZE,\n LOG_KINDS.DATABASE_VACUUM,\n LOG_KINDS.DATABASE_INTEGRITY_CHECK,\n LOG_KINDS.DATABASE_INTEGRITY_ISSUES,\n ]);\n const optimizeMs = lastRuns.get(LOG_KINDS.DATABASE_OPTIMIZE) ?? null;\n const vacuumMs = lastRuns.get(LOG_KINDS.DATABASE_VACUUM) ?? null;\n const integrityOkMs = lastRuns.get(LOG_KINDS.DATABASE_INTEGRITY_CHECK) ?? null;\n const integrityIssuesMs = lastRuns.get(LOG_KINDS.DATABASE_INTEGRITY_ISSUES) ?? null;\n\n let last_integrity_check: { at: string; status: 'ok' | 'issues' } | null = null;\n if (integrityOkMs !== null || integrityIssuesMs !== null) {\n const okMs = integrityOkMs ?? 0;\n const issuesMs = integrityIssuesMs ?? 0;\n if (okMs >= issuesMs) {\n last_integrity_check = { at: new Date(okMs).toISOString(), status: 'ok' };\n } else {\n last_integrity_check = { at: new Date(issuesMs).toISOString(), status: 'issues' };\n }\n }\n\n return {\n file,\n schema,\n tables,\n indexes,\n last_optimize_at: optimizeMs ? new Date(optimizeMs).toISOString() : null,\n last_vacuum_at: vacuumMs ? new Date(vacuumMs).toISOString() : null,\n last_integrity_check,\n };\n }\n\n async getLastOptimizeAt(): Promise<number | null> {\n return getLastDatabaseLogTimestamp(LOG_KINDS.DATABASE_OPTIMIZE);\n }\n\n async optimize(): Promise<OptimizeResult> {\n const startedAt = Date.now();\n const completed: OptimizeAction[] = [];\n const failed: OptimizeAction[] = [];\n\n const steps: Array<{ name: string; fn: () => void }> = [\n { name: 'analyze', fn: runAnalyze },\n { name: 'pragma_optimize', fn: runPragmaOptimize },\n ...listFtsTableNames().map((tbl) => ({\n name: 'fts_optimize:' + tbl,\n fn: () => runFtsOptimize(tbl),\n })),\n // wal_checkpoint_truncate returns WalCheckpointResult; if busy !== 0 another\n // reader blocked the checkpoint — log a warning but don't fail the step.\n {\n name: 'wal_checkpoint_truncate',\n fn: () => {\n const result = runWalCheckpointTruncate();\n if (result.busy !== 0) {\n this.logger.warn(LOG_KINDS.DATABASE_ERROR, 'wal_checkpoint blocked by reader', {\n busy: result.busy,\n log: result.log,\n checkpointed: result.checkpointed,\n });\n }\n },\n },\n ];\n\n for (const step of steps) {\n const stepStart = Date.now();\n try {\n step.fn();\n completed.push({ name: step.name, duration_ms: Date.now() - stepStart, ok: true });\n } catch (err) {\n const error = (err as Error).message;\n failed.push({ name: step.name, duration_ms: Date.now() - stepStart, ok: false, error });\n this.logger.warn(LOG_KINDS.DATABASE_ERROR, 'optimize step failed: ' + step.name, { error });\n }\n }\n\n const duration_ms = Date.now() - startedAt;\n this.logger.info(LOG_KINDS.DATABASE_OPTIMIZE, 'Database optimize complete', {\n completed: completed.length,\n failed: failed.length,\n duration_ms,\n });\n\n return { actions_completed: completed, actions_failed: failed, duration_ms };\n }\n\n async vacuum(): Promise<VacuumResult> {\n const size_before = this.fileSize();\n\n // Disk precheck — VACUUM rebuilds the DB into a temp file before swapping.\n // If the disk is too full the user can be left in a broken state, so refuse.\n const stats = await fs.promises.statfs(this.vaultDir);\n const free_bytes = Number(stats.bavail) * Number(stats.bsize);\n const required_bytes = size_before * VACUUM_FREE_SPACE_MULTIPLIER;\n if (free_bytes < required_bytes) {\n throw new VacuumPrecheckError(required_bytes, free_bytes);\n }\n\n const startedAt = Date.now();\n runVacuum();\n const duration_ms = Date.now() - startedAt;\n const size_after = this.fileSize();\n const freed_bytes = size_before - size_after;\n\n this.logger.info(LOG_KINDS.DATABASE_VACUUM, 'Database vacuum complete', {\n size_before,\n size_after,\n freed_bytes,\n duration_ms,\n });\n\n return { size_before, size_after, freed_bytes, duration_ms };\n }\n\n async reindex(): Promise<ReindexResult> {\n const startedAt = Date.now();\n runReindex();\n const duration_ms = Date.now() - startedAt;\n\n this.logger.info(LOG_KINDS.DATABASE_REINDEX, 'Database reindex complete', { duration_ms });\n\n return { duration_ms };\n }\n\n async integrityCheck(): Promise<IntegrityResult> {\n const startedAt = Date.now();\n const integrity = runIntegrityCheck();\n const fkViolations = runForeignKeyCheck();\n const duration_ms = Date.now() - startedAt;\n const status = integrity.status === 'ok' && fkViolations.length === 0 ? 'ok' : 'issues';\n\n // Use distinct kinds based on outcome so the stored history preserves\n // status info; getDetails() reads the more recent of the two kinds to\n // determine last_integrity_check.status.\n const logKind = status === 'ok'\n ? LOG_KINDS.DATABASE_INTEGRITY_CHECK\n : LOG_KINDS.DATABASE_INTEGRITY_ISSUES;\n this.logger.info(logKind, 'Database integrity check complete', {\n status,\n issue_count: integrity.issues.length,\n fk_violations: fkViolations.length,\n duration_ms,\n });\n\n return {\n status,\n issues: integrity.issues,\n fk_violations: fkViolations.length,\n duration_ms,\n };\n }\n\n private fileSize(): number {\n try {\n return fs.statSync(this.dbPath).size;\n } catch {\n return 0;\n }\n }\n}\n","/**\n * Database introspection and maintenance queries.\n *\n * Read side: file stats, schema/index metadata, log lookup.\n * Write side: VACUUM, ANALYZE, REINDEX, integrity_check, wal_checkpoint, FTS optimize.\n */\n\nimport fs from 'node:fs';\nimport { getDatabase } from '@myco/db/client.js';\nimport { SCHEMA_VERSION } from '@myco/db/schema.js';\n\n// ---------------------------------------------------------------------------\n// Types\n// ---------------------------------------------------------------------------\n\nexport interface DatabaseFileStats {\n path: string;\n size_bytes: number;\n wal_size_bytes: number;\n page_size: number;\n page_count: number;\n freelist_count: number;\n fragmentation_pct: number;\n}\n\nexport interface TableBreakdownRow {\n name: string;\n rows: number;\n index_count: number;\n is_fts: boolean;\n}\n\nexport interface IndexInfo {\n name: string;\n table: string;\n type: 'btree' | 'auto';\n sql: string | null;\n}\n\nexport interface SchemaInfo {\n version: number;\n journal_mode: string;\n foreign_keys: boolean;\n}\n\n// ---------------------------------------------------------------------------\n// Internal helpers\n// ---------------------------------------------------------------------------\n\nfunction pragmaScalar<T>(name: string): T {\n const db = getDatabase();\n return db.pragma(name, { simple: true }) as T;\n}\n\nfunction safeFileSize(filePath: string): number {\n try {\n return fs.statSync(filePath).size;\n } catch (err) {\n if ((err as NodeJS.ErrnoException).code === 'ENOENT') return 0;\n throw err;\n }\n}\n\n// ---------------------------------------------------------------------------\n// Queries\n// ---------------------------------------------------------------------------\n\nexport function getDatabaseFileStats(dbPath: string): DatabaseFileStats {\n const size_bytes = safeFileSize(dbPath);\n const wal_size_bytes = safeFileSize(dbPath + '-wal');\n\n const page_size = Number(pragmaScalar<number>('page_size'));\n const page_count = Number(pragmaScalar<number>('page_count'));\n const freelist_count = Number(pragmaScalar<number>('freelist_count'));\n const fragmentation_pct = page_count > 0 ? (freelist_count / page_count) * 100 : 0;\n\n return {\n path: dbPath,\n size_bytes,\n wal_size_bytes,\n page_size,\n page_count,\n freelist_count,\n fragmentation_pct,\n };\n}\n\n/**\n * Double-quote-escape a SQL identifier (table/column name). SQLite accepts\n * `\"\"` as an escaped double-quote inside a quoted identifier. Names that\n * come from `sqlite_master` never contain double quotes in practice, but\n * escape defensively so the helper is safe for any caller.\n */\nfunction quoteIdent(name: string): string {\n return '\"' + name.replace(/\"/g, '\"\"') + '\"';\n}\n\nexport function getTablesBreakdown(): TableBreakdownRow[] {\n const db = getDatabase();\n\n // List all user tables (exclude sqlite_* internal)\n const tableRows = db.prepare(\n \"SELECT name, sql FROM sqlite_master WHERE type='table' AND name NOT LIKE 'sqlite_%' ORDER BY name\",\n ).all() as Array<{ name: string; sql: string | null }>;\n\n // FTS5 shadow tables (e.g. sessions_fts_data, sessions_fts_idx, sessions_fts_docsize,\n // sessions_fts_config) are auto-created by SQLite with DDL that single-quotes the name\n // (CREATE TABLE 'sessions_fts_data' ...). User-created tables never quote with single\n // quotes, so filter by that marker to hide them from the UI breakdown.\n const userTableRows = tableRows.filter((row) => !(row.sql ?? '').startsWith(\"CREATE TABLE '\"));\n\n // Index counts grouped by table (single query covering all tables).\n const indexCountRows = db.prepare(\n \"SELECT tbl_name, COUNT(*) AS cnt FROM sqlite_master WHERE type='index' GROUP BY tbl_name\",\n ).all() as Array<{ tbl_name: string; cnt: number }>;\n const indexCountByTable = new Map(indexCountRows.map((r) => [r.tbl_name, Number(r.cnt)]));\n\n // Batch all per-table COUNT(*) into one UNION ALL query instead of N round-trips.\n // SAFE: table names come from sqlite_master, not user input. Names are\n // double-quoted for the FROM clause and positional parameters are used for\n // the table-name output column so the query can be read back by index.\n const countsByTable = new Map<string, number>();\n if (userTableRows.length > 0) {\n const unionSql = userTableRows\n .map((_, i) => `SELECT ? AS t, COUNT(*) AS c FROM ${quoteIdent(userTableRows[i].name)}`)\n .join(' UNION ALL ');\n const params = userTableRows.map((row) => row.name);\n try {\n const rows = db.prepare(unionSql).all(...params) as Array<{ t: string; c: number }>;\n for (const r of rows) {\n countsByTable.set(r.t, Number(r.c ?? 0));\n }\n } catch {\n // Fallback: if the batched query fails for any reason (e.g. one of the\n // tables was dropped mid-query), fall back to per-table counts so the\n // breakdown still reports something reasonable. Rare in practice.\n for (const row of userTableRows) {\n try {\n const r = db.prepare(`SELECT COUNT(*) AS c FROM ${quoteIdent(row.name)}`).get() as { c: number };\n countsByTable.set(row.name, Number(r.c ?? 0));\n } catch {\n countsByTable.set(row.name, 0);\n }\n }\n }\n }\n\n return userTableRows.map((row) => ({\n name: row.name,\n rows: countsByTable.get(row.name) ?? 0,\n index_count: indexCountByTable.get(row.name) ?? 0,\n is_fts: (row.sql ?? '').toLowerCase().includes('fts5'),\n }));\n}\n\nexport function getIndexesList(): IndexInfo[] {\n const db = getDatabase();\n const rows = db.prepare(\n \"SELECT name, tbl_name, sql FROM sqlite_master WHERE type='index' ORDER BY tbl_name, name\",\n ).all() as Array<{ name: string; tbl_name: string; sql: string | null }>;\n\n return rows.map((r) => {\n const type: 'btree' | 'auto' = r.name.startsWith('sqlite_autoindex_') ? 'auto' : 'btree';\n return {\n name: r.name,\n table: r.tbl_name,\n type,\n sql: r.sql,\n };\n });\n}\n\nexport function getSchemaInfo(): SchemaInfo {\n const journal_mode = String(pragmaScalar<string>('journal_mode'));\n const foreign_keys_raw = pragmaScalar<number | string>('foreign_keys');\n const foreign_keys = Number(foreign_keys_raw) === 1;\n return {\n version: SCHEMA_VERSION,\n journal_mode,\n foreign_keys,\n };\n}\n\nexport function getLastDatabaseLogTimestamp(kind: string): number | null {\n const db = getDatabase();\n const row = db.prepare(\n 'SELECT timestamp FROM log_entries WHERE kind = ? ORDER BY id DESC LIMIT 1',\n ).get(kind) as { timestamp: string } | undefined;\n if (!row) return null;\n const t = new Date(row.timestamp).getTime();\n return Number.isFinite(t) ? t : null;\n}\n\n/**\n * Batched variant: look up the most recent `timestamp` for each of the given\n * kinds in a single query. Missing kinds map to `null`. Used by\n * `DatabaseMaintenanceManager.getDetails()` to fetch optimize/vacuum/integrity\n * timestamps in one round-trip instead of N.\n */\nexport function getLastDatabaseLogTimestamps(kinds: string[]): Map<string, number | null> {\n const result = new Map<string, number | null>();\n for (const k of kinds) result.set(k, null);\n if (kinds.length === 0) return result;\n\n const db = getDatabase();\n const placeholders = kinds.map(() => '?').join(',');\n const rows = db.prepare(\n `SELECT kind, MAX(timestamp) AS latest FROM log_entries WHERE kind IN (${placeholders}) GROUP BY kind`,\n ).all(...kinds) as Array<{ kind: string; latest: string | null }>;\n\n for (const row of rows) {\n if (!row.latest) continue;\n const t = new Date(row.latest).getTime();\n result.set(row.kind, Number.isFinite(t) ? t : null);\n }\n return result;\n}\n\n// -----------------------------------------------------------------------------\n// Maintenance operations\n//\n// Convention: imperative commands use the `run*` prefix to distinguish them\n// from the CRUD-style queries above (`getX`, `listX`). These functions execute\n// SQL commands that modify database state or schema metadata rather than\n// returning rows from user tables.\n// -----------------------------------------------------------------------------\n\nexport interface IntegrityCheckResult {\n status: 'ok' | 'issues';\n issues: string[];\n}\n\nexport interface ForeignKeyViolation {\n table: string;\n rowid: number;\n parent: string;\n fkid: number;\n}\n\nexport interface WalCheckpointResult {\n busy: number; // 0 = succeeded, 1 = blocked by reader\n log: number; // total WAL frames at start\n checkpointed: number; // frames actually checkpointed\n}\n\nexport function runVacuum(): void {\n const db = getDatabase();\n db.exec('VACUUM');\n}\n\nexport function runAnalyze(): void {\n const db = getDatabase();\n db.exec('ANALYZE');\n}\n\nexport function runReindex(): void {\n const db = getDatabase();\n db.exec('REINDEX');\n}\n\nexport function runIntegrityCheck(): IntegrityCheckResult {\n const db = getDatabase();\n const rows = db.prepare('PRAGMA integrity_check').all() as Array<{ integrity_check: string }>;\n const messages = rows.map((r) => r.integrity_check);\n const ok = messages.length === 1 && messages[0] === 'ok';\n return {\n status: ok ? 'ok' : 'issues',\n issues: ok ? [] : messages,\n };\n}\n\nexport function runForeignKeyCheck(): ForeignKeyViolation[] {\n const db = getDatabase();\n const rows = db.prepare('PRAGMA foreign_key_check').all() as Array<{\n table: string;\n rowid: number;\n parent: string;\n fkid: number;\n }>;\n return rows;\n}\n\nexport function runWalCheckpointTruncate(): WalCheckpointResult {\n const db = getDatabase();\n const rows = db.pragma('wal_checkpoint(TRUNCATE)') as Array<{\n busy: number;\n log: number;\n checkpointed: number;\n }>;\n const row = rows[0] ?? { busy: 0, log: 0, checkpointed: 0 };\n return {\n busy: Number(row.busy),\n log: Number(row.log),\n checkpointed: Number(row.checkpointed),\n };\n}\n\nexport function runPragmaOptimize(): void {\n const db = getDatabase();\n db.pragma('optimize');\n}\n\nconst FTS_TABLE_PATTERN = /^[a-z_][a-z0-9_]*_fts$/;\n\nexport function runFtsOptimize(ftsTableName: string): void {\n // Defense in depth: validate the table name against an allowlist regex AND\n // verify it actually exists as an FTS5 virtual table in sqlite_master before\n // building the SQL string. The name is never user-supplied — it comes from\n // listFtsTableNames() — but the check protects against future regressions.\n if (!FTS_TABLE_PATTERN.test(ftsTableName)) {\n throw new Error('Invalid FTS5 table name: ' + ftsTableName);\n }\n const db = getDatabase();\n const row = db.prepare(\n \"SELECT sql FROM sqlite_master WHERE type='table' AND name=?\",\n ).get(ftsTableName) as { sql: string | null } | undefined;\n if (!row || !(row.sql ?? '').toLowerCase().includes('fts5')) {\n throw new Error('Not an FTS5 table: ' + ftsTableName);\n }\n // Build the optimize statement using string concatenation; FTS5 virtual\n // tables require the table name to appear both as the target and as the\n // first column reference. Parameter binding does not work for table names.\n const quoted = '\"' + ftsTableName + '\"';\n const optimizeSql = 'INSERT INTO ' + quoted + '(' + quoted + \") VALUES ('optimize')\";\n db.prepare(optimizeSql).run();\n}\n\nexport function listFtsTableNames(): string[] {\n const db = getDatabase();\n const rows = db.prepare(\n \"SELECT name FROM sqlite_master WHERE type='table' AND sql LIKE '%fts5%' ORDER BY name\",\n ).all() as Array<{ name: string }>;\n return rows.map((r) => r.name).filter((name) => FTS_TABLE_PATTERN.test(name));\n}\n","/**\n * Built-in domain notification registrations.\n *\n * Each domain registers its notification types here. External/plugin\n * domains can call register() directly.\n */\n\nimport { register } from './registry.js';\n\n/** Register all built-in domain notifications. Called once at daemon startup. */\nexport function registerBuiltinDomains(): void {\n register({\n domain: 'agents',\n label: 'Agent Tasks',\n types: [\n { id: 'agent.task.success', label: 'Task completed', defaultMode: 'summary', defaultLevel: 'success' },\n { id: 'agent.task.failure', label: 'Task failed', defaultMode: 'summary', defaultLevel: 'error' },\n ],\n });\n\n register({\n domain: 'sessions',\n label: 'Sessions',\n types: [\n { id: 'session.started', label: 'Session started', defaultMode: 'summary', defaultLevel: 'info' },\n { id: 'session.ended', label: 'Session ended', defaultMode: 'summary', defaultLevel: 'info' },\n ],\n });\n\n register({\n domain: 'skills',\n label: 'Skills',\n types: [\n { id: 'skill.surveyed', label: 'Skill candidate surveyed', defaultMode: 'summary', defaultLevel: 'info' },\n { id: 'skill.created', label: 'Skill created', defaultMode: 'summary', defaultLevel: 'success' },\n { id: 'skill.evolved', label: 'Skill evolved', defaultMode: 'summary', defaultLevel: 'info' },\n ],\n });\n\n register({\n domain: 'mycelium',\n label: 'Mycelium',\n types: [\n { id: 'mycelium.digest.completed', label: 'Digest cycle completed', defaultMode: 'summary', defaultLevel: 'info' },\n { id: 'mycelium.spore.created', label: 'New spore extracted', defaultMode: 'summary', defaultLevel: 'info' },\n ],\n });\n\n register({\n domain: 'daemon',\n label: 'Daemon',\n types: [\n { id: 'daemon.version_sync', label: 'Version sync restart', defaultMode: 'summary', defaultLevel: 'info' },\n ],\n });\n\n register({\n domain: 'settings',\n label: 'Settings',\n types: [\n { id: 'settings.saved', label: 'Settings saved', defaultMode: 'banner', defaultLevel: 'success' },\n ],\n });\n}\n","/**\n * Notification API handlers.\n *\n * Thin handlers that delegate to DB queries and the notification registry.\n */\n\nimport { z } from 'zod';\nimport type { RouteResponse } from '../router.js';\nimport {\n listNotifications,\n countNotifications,\n getNotification,\n updateNotificationStatus,\n dismissAllNotifications,\n markAllRead,\n} from '../../db/queries/notifications.js';\nimport { getAllDomains } from '../../notifications/registry.js';\nimport { notify } from '../../notifications/notify.js';\nimport { loadMergedConfig } from '../../config/loader.js';\nimport type { NotificationMode } from '../../notifications/types.js';\n\n// ---------------------------------------------------------------------------\n// Validation schemas\n// ---------------------------------------------------------------------------\n\nconst CreateNotificationBody = z.object({\n domain: z.string().min(1),\n type: z.string().min(1),\n level: z.enum(['info', 'success', 'warning', 'error']).optional(),\n title: z.string().min(1),\n message: z.string().optional(),\n mode: z.enum(['banner', 'summary']).optional(),\n link: z.string().optional(),\n metadata: z.record(z.string(), z.unknown()).optional(),\n});\n\nconst UpdateStatusBody = z.object({\n status: z.enum(['read', 'dismissed']),\n});\n\n// ---------------------------------------------------------------------------\n// Handlers\n// ---------------------------------------------------------------------------\n\n/** GET /api/notifications — list notifications with optional filters. */\nexport async function handleListNotifications(\n _vaultDir: string,\n query: Record<string, string>,\n): Promise<RouteResponse> {\n const status = query.status as 'unread' | 'read' | 'dismissed' | undefined;\n const domain = query.domain;\n const mode = query.mode as NotificationMode | undefined;\n const limit = query.limit ? Number(query.limit) : undefined;\n const offset = query.offset ? Number(query.offset) : undefined;\n\n const items = listNotifications({ status, domain, mode, limit, offset });\n const unreadCount = countNotifications('unread');\n\n return {\n body: {\n items: items.map(parseNotificationRow),\n unread_count: unreadCount,\n },\n };\n}\n\n/** POST /api/notifications — create a notification. */\nexport async function handleCreateNotification(\n vaultDir: string,\n body: unknown,\n): Promise<RouteResponse> {\n const parsed = CreateNotificationBody.safeParse(body);\n if (!parsed.success) {\n return { status: 400, body: { error: 'validation_failed', issues: parsed.error.issues } };\n }\n\n const { domain, type, title, message, link, metadata } = parsed.data;\n\n // Check config for structured HTTP responses before delegating\n const config = loadMergedConfig(vaultDir);\n if (!config.notifications.enabled) {\n return { body: { ok: true, suppressed: true, reason: 'notifications_disabled' } };\n }\n const domainConfig = config.notifications.domains[domain];\n if (domainConfig && !domainConfig.enabled) {\n return { body: { ok: true, suppressed: true, reason: 'domain_disabled' } };\n }\n\n // Delegate resolution + insertion to notify() — pass config to avoid re-reading\n const id = notify(vaultDir, {\n domain, type, title, message, link, metadata,\n level: parsed.data.level,\n mode: parsed.data.mode,\n }, config);\n\n if (!id) {\n return { body: { ok: true, suppressed: true, reason: 'unknown' } };\n }\n\n return {\n body: {\n ok: true,\n id,\n notification: parseNotificationRow(getNotification(id)!),\n },\n };\n}\n\n/** PATCH /api/notifications/:id — update status (read/dismissed). */\nexport async function handleUpdateNotification(\n _vaultDir: string,\n id: string,\n body: unknown,\n): Promise<RouteResponse> {\n const parsed = UpdateStatusBody.safeParse(body);\n if (!parsed.success) {\n return { status: 400, body: { error: 'validation_failed', issues: parsed.error.issues } };\n }\n\n const updated = updateNotificationStatus(id, parsed.data.status);\n if (!updated) {\n return { status: 404, body: { error: 'not_found' } };\n }\n\n return { body: { ok: true } };\n}\n\n/** POST /api/notifications/dismiss-all — dismiss all (optionally per domain). */\nexport async function handleDismissAll(\n _vaultDir: string,\n body: unknown,\n): Promise<RouteResponse> {\n const domain = (body as Record<string, unknown>)?.domain as string | undefined;\n const count = dismissAllNotifications(domain);\n return { body: { ok: true, dismissed: count } };\n}\n\n/** POST /api/notifications/mark-all-read — mark all unread as read. */\nexport async function handleMarkAllRead(\n _vaultDir: string,\n body: unknown,\n): Promise<RouteResponse> {\n const domain = (body as Record<string, unknown>)?.domain as string | undefined;\n const count = markAllRead(domain);\n return { body: { ok: true, marked: count } };\n}\n\n/** GET /api/notifications/registry — return all registered domain descriptors. */\nexport async function handleGetRegistry(): Promise<RouteResponse> {\n return { body: { domains: getAllDomains() } };\n}\n\n/** GET /api/notifications/unread-count — lightweight unread count endpoint. */\nexport async function handleUnreadCount(): Promise<RouteResponse> {\n return { body: { count: countNotifications('unread') } };\n}\n\n// ---------------------------------------------------------------------------\n// Helpers\n// ---------------------------------------------------------------------------\n\nfunction parseNotificationRow(row: ReturnType<typeof getNotification>) {\n if (!row) return null;\n return {\n ...row,\n metadata: row.metadata ? JSON.parse(row.metadata) : null,\n };\n}\n","/**\n * API route handlers for agent task CRUD.\n *\n * Thin handlers that delegate to the registry and loader. Each handler\n * takes a RouteRequest and the vault directory, returning a RouteResponse.\n *\n * Route overview:\n * GET /api/agent/tasks — list all tasks (built-in + user)\n * GET /api/agent/tasks/:id — get a single task by name\n * POST /api/agent/tasks — create a new user task\n * POST /api/agent/tasks/:id/copy — copy an existing task to user dir\n * DELETE /api/agent/tasks/:id — delete a user task (built-ins blocked)\n */\n\nimport { stringify as stringifyYaml, parse as parseYaml } from 'yaml';\nimport { errorMessage as toErrorMessage } from '@myco/utils/error-message.js';\nimport { taskFromParsed } from '@myco/agent/loader.js';\nimport { AgentTaskSchema } from '@myco/agent/schemas.js';\nimport {\n loadAllTasks,\n validateTaskName,\n writeUserTask,\n deleteUserTask,\n copyTaskToUser,\n} from '@myco/agent/registry.js';\nimport { resolveDefinitionsDir } from '@myco/agent/loader.js';\nimport { USER_TASK_SOURCE } from '@myco/constants.js';\nimport { loadMergedConfig, updateConfig } from '../../config/loader.js';\nimport { withTaskConfig } from '../../config/updates.js';\nimport type { TaskConfigUpdate } from '../../config/updates.js';\nimport type { RouteRequest, RouteResponse } from '../router.js';\n\n// ---------------------------------------------------------------------------\n// Constants\n// ---------------------------------------------------------------------------\n\n/** HTTP status: 200 OK (returned as undefined — default) */\nconst HTTP_OK = 200;\n\n/** HTTP status: 201 Created */\nconst HTTP_CREATED = 201;\n\n/** HTTP status: 400 Bad Request */\nconst HTTP_BAD_REQUEST = 400;\n\n/** HTTP status: 403 Forbidden */\nconst HTTP_FORBIDDEN = 403;\n\n/** HTTP status: 404 Not Found */\nconst HTTP_NOT_FOUND = 404;\n\n/** HTTP status: 409 Conflict */\nconst HTTP_CONFLICT = 409;\n\n// ---------------------------------------------------------------------------\n// Handlers\n// ---------------------------------------------------------------------------\n\n/**\n * List all tasks: built-in definitions merged with user-created overrides.\n *\n * Optionally filtered by `?source=user` or `?source=built-in`.\n */\nexport async function handleListTasks(\n req: RouteRequest,\n vaultDir: string,\n): Promise<RouteResponse> {\n const definitionsDir = resolveDefinitionsDir();\n const allTasks = loadAllTasks(definitionsDir, vaultDir);\n let tasks = Array.from(allTasks.values());\n\n const sourceFilter = req.query?.source as string | undefined;\n if (sourceFilter) {\n tasks = tasks.filter((t) => t.source === sourceFilter);\n }\n\n return { status: HTTP_OK, body: { tasks } };\n}\n\n/**\n * Get a single task by its name (used as the URL `:id` parameter).\n *\n * Returns 404 if the task is not found.\n */\nexport async function handleGetTask(\n req: RouteRequest,\n vaultDir: string,\n): Promise<RouteResponse> {\n const definitionsDir = resolveDefinitionsDir();\n const allTasks = loadAllTasks(definitionsDir, vaultDir);\n const task = allTasks.get(req.params.id);\n\n if (!task) {\n return { status: HTTP_NOT_FOUND, body: { error: 'task_not_found' } };\n }\n\n return { status: HTTP_OK, body: { task } };\n}\n\n/**\n * Create a new user task from the request body.\n *\n * Validates:\n * - Body must parse against AgentTaskSchema.\n * - Task name must be valid (lowercase letters, digits, hyphens only).\n * - No existing user task with the same name may exist.\n *\n * Returns 201 on success.\n */\nexport async function handleCreateTask(\n req: RouteRequest,\n vaultDir: string,\n): Promise<RouteResponse> {\n // Parse and validate body against schema\n const result = AgentTaskSchema.safeParse(req.body);\n if (!result.success) {\n return {\n status: HTTP_BAD_REQUEST,\n body: { error: 'validation_failed', issues: result.error.issues },\n };\n }\n\n const parsed = result.data;\n\n // Validate task name format\n if (!validateTaskName(parsed.name)) {\n return {\n status: HTTP_BAD_REQUEST,\n body: { error: 'invalid_task_name', name: parsed.name },\n };\n }\n\n // Check for existing user task with the same name (built-ins can be shadowed)\n const definitionsDir = resolveDefinitionsDir();\n const allTasks = loadAllTasks(definitionsDir, vaultDir);\n const existing = allTasks.get(parsed.name);\n if (existing && existing.source === USER_TASK_SOURCE) {\n return {\n status: HTTP_CONFLICT,\n body: { error: 'task_already_exists', name: parsed.name },\n };\n }\n\n const task = {\n ...parsed,\n isBuiltin: false,\n source: USER_TASK_SOURCE,\n };\n\n writeUserTask(vaultDir, task);\n\n return { status: HTTP_CREATED, body: { task } };\n}\n\n/**\n * Copy an existing task (built-in or user) to the user task directory.\n *\n * The source task is identified by `req.params.id`.\n * An optional new name may be provided via `req.body.name`.\n *\n * Returns 201 on success.\n */\nexport async function handleCopyTask(\n req: RouteRequest,\n vaultDir: string,\n): Promise<RouteResponse> {\n const sourceName = req.params.id;\n const newName = (req.body as Record<string, unknown> | undefined)?.name as string | undefined;\n\n const definitionsDir = resolveDefinitionsDir();\n\n // Validate the new name if provided\n if (newName !== undefined && !validateTaskName(newName)) {\n return {\n status: HTTP_BAD_REQUEST,\n body: { error: 'invalid_task_name', name: newName },\n };\n }\n\n try {\n const copy = copyTaskToUser(definitionsDir, vaultDir, sourceName, newName);\n return { status: HTTP_CREATED, body: { task: copy } };\n } catch (err) {\n const message = toErrorMessage(err);\n if (message.includes('not found')) {\n return { status: HTTP_NOT_FOUND, body: { error: 'task_not_found', name: sourceName } };\n }\n return { status: HTTP_BAD_REQUEST, body: { error: 'copy_failed', message } };\n }\n}\n\n/**\n * Get the raw YAML content of a user task file.\n *\n * Built-in tasks return their serialized AgentTask as YAML.\n * Returns 404 if the task doesn't exist.\n */\nexport async function handleGetTaskYaml(\n req: RouteRequest,\n vaultDir: string,\n): Promise<RouteResponse> {\n const taskName = req.params.id;\n const definitionsDir = resolveDefinitionsDir();\n const allTasks = loadAllTasks(definitionsDir, vaultDir);\n const task = allTasks.get(taskName);\n\n if (!task) {\n return { status: HTTP_NOT_FOUND, body: { error: 'task_not_found', name: taskName } };\n }\n\n // Serialize task to YAML (strip internal fields)\n const { isBuiltin: _ib, source: _src, ...serializable } = task;\n const yaml = stringifyYaml(serializable);\n\n return { status: HTTP_OK, body: { yaml, source: task.source } };\n}\n\n/**\n * Update a user task from raw YAML content.\n *\n * Parses the YAML through AgentTaskSchema for validation.\n * Built-in tasks cannot be updated (returns 403).\n * Returns the updated task on success.\n */\nexport async function handleUpdateTask(\n req: RouteRequest,\n vaultDir: string,\n): Promise<RouteResponse> {\n const taskName = req.params.id;\n const definitionsDir = resolveDefinitionsDir();\n const allTasks = loadAllTasks(definitionsDir, vaultDir);\n const existing = allTasks.get(taskName);\n\n if (!existing) {\n return { status: HTTP_NOT_FOUND, body: { error: 'task_not_found', name: taskName } };\n }\n\n if (existing.isBuiltin || existing.source !== USER_TASK_SOURCE) {\n return { status: HTTP_FORBIDDEN, body: { error: 'cannot_update_builtin', name: taskName } };\n }\n\n const body = req.body as Record<string, unknown> | undefined;\n const yamlContent = body?.yaml;\n if (typeof yamlContent !== 'string') {\n return { status: HTTP_BAD_REQUEST, body: { error: 'missing_yaml_field' } };\n }\n\n try {\n const parsed = AgentTaskSchema.parse(parseYaml(yamlContent));\n const task = { ...taskFromParsed(parsed), isBuiltin: false, source: USER_TASK_SOURCE };\n\n // Ensure the name matches the URL param (prevent renaming via YAML)\n if (task.name !== taskName) {\n return { status: HTTP_BAD_REQUEST, body: { error: 'name_mismatch', expected: taskName, got: task.name } };\n }\n\n writeUserTask(vaultDir, task);\n return { status: HTTP_OK, body: { task } };\n } catch (err) {\n const message = toErrorMessage(err);\n return { status: HTTP_BAD_REQUEST, body: { error: 'validation_failed', message } };\n }\n}\n\n/**\n * Delete a user task by name.\n *\n * Built-in tasks may not be deleted (returns 403).\n * Returns 404 if the task does not exist.\n */\nexport async function handleDeleteTask(\n req: RouteRequest,\n vaultDir: string,\n): Promise<RouteResponse> {\n const taskName = req.params.id;\n const definitionsDir = resolveDefinitionsDir();\n const allTasks = loadAllTasks(definitionsDir, vaultDir);\n const task = allTasks.get(taskName);\n\n // Task must exist\n if (!task) {\n return { status: HTTP_NOT_FOUND, body: { error: 'task_not_found', name: taskName } };\n }\n\n // Built-in tasks cannot be deleted\n if (task.isBuiltin || task.source !== USER_TASK_SOURCE) {\n return {\n status: HTTP_FORBIDDEN,\n body: { error: 'cannot_delete_builtin', name: taskName },\n };\n }\n\n deleteUserTask(vaultDir, taskName);\n\n return { status: HTTP_OK, body: { deleted: taskName } };\n}\n\n/**\n * Get the full config override for a specific task from myco.yaml.\n *\n * Returns: provider, model, maxTurns, timeoutSeconds, and per-phase overrides.\n */\nexport async function handleGetTaskConfig(\n req: RouteRequest,\n vaultDir: string,\n): Promise<RouteResponse> {\n const taskId = req.params.id;\n const config = loadMergedConfig(vaultDir);\n const taskConfig = config.agent.tasks?.[taskId] ?? null;\n return { status: HTTP_OK, body: { taskId, config: taskConfig } };\n}\n\n/**\n * Update config overrides for a specific task in myco.yaml.\n *\n * Accepts partial updates — only provided fields are set. Fields set to\n * `null` are removed. Supports: provider, model, maxTurns, timeoutSeconds, phases.\n *\n * Phase overrides are keyed by phase name and support: provider, model, maxTurns.\n */\nexport async function handleUpdateTaskConfig(\n req: RouteRequest,\n vaultDir: string,\n): Promise<RouteResponse> {\n const taskId = req.params.id;\n const body = req.body as TaskConfigUpdate | undefined;\n\n if (!body) {\n return { status: HTTP_BAD_REQUEST, body: { error: 'missing_body' } };\n }\n\n const updated = updateConfig(vaultDir, (config) =>\n withTaskConfig(config, taskId, body),\n );\n\n return {\n status: HTTP_OK,\n body: { taskId, config: updated.agent.tasks?.[taskId] ?? null },\n };\n}\n","/**\n * API route handlers for provider detection and connectivity testing.\n *\n * Route overview:\n * GET /api/providers — detect available LLM providers and their models\n * POST /api/providers/test — test connectivity to a specific provider\n */\n\nimport Anthropic from '@anthropic-ai/sdk';\nimport { OllamaBackend } from '../../intelligence/ollama.js';\nimport { LmStudioBackend } from '../../intelligence/lm-studio.js';\nimport { checkLocalProvider } from '../../intelligence/provider-check.js';\nimport { ANTHROPIC_MODELS, filterLlmModels } from './models.js';\nimport type { RouteRequest, RouteResponse } from '../router.js';\n\n/** Timeout for the live Anthropic model list query (short -- fall back fast). */\nconst ANTHROPIC_MODELS_TIMEOUT_MS = 5000;\n\n/** TTL for the cached live Anthropic model list. The list changes rarely\n * and the SDK call is the slowest part of `/providers`; cache to keep the\n * endpoint snappy under React Query's 30s stale time. */\nconst ANTHROPIC_MODELS_CACHE_TTL_MS = 10 * 60 * 1000;\nlet anthropicModelsCache: { ts: number; models: string[] } | null = null;\n\n/** HTTP status codes. */\nconst HTTP_OK = 200;\nconst HTTP_BAD_REQUEST = 400;\n\n// ---------------------------------------------------------------------------\n// Types\n// ---------------------------------------------------------------------------\n\ninterface ProviderInfo {\n type: string;\n available: boolean;\n baseUrl?: string;\n models: string[];\n}\n\ninterface TestResult {\n ok: boolean;\n latency_ms?: number;\n error?: string;\n}\n\n// ---------------------------------------------------------------------------\n// Handlers\n// ---------------------------------------------------------------------------\n\n/**\n * Detect available providers (Ollama, LM Studio, Anthropic).\n *\n * Uses Promise.allSettled for parallel detection with timeouts so one\n * slow/unavailable provider doesn't block the others.\n */\nexport async function handleGetProviders(): Promise<RouteResponse> {\n // UI rendering order: Anthropic first (recommended default), then locals.\n const results = await Promise.allSettled([\n detectAnthropic(),\n detectLocalProviderInfo('ollama', OllamaBackend.DEFAULT_BASE_URL),\n detectLocalProviderInfo('lmstudio', LmStudioBackend.DEFAULT_BASE_URL),\n ]);\n\n const providers: ProviderInfo[] = results.map((r) =>\n r.status === 'fulfilled'\n ? r.value\n : { type: 'unknown', available: false, models: [] },\n );\n\n return { status: HTTP_OK, body: { providers } };\n}\n\n/**\n * Test connectivity to a specific provider.\n *\n * Accepts: { type: 'anthropic' | 'ollama' | 'lmstudio', baseUrl?: string, model?: string }\n * Returns: { ok: boolean, latency_ms?: number, error?: string }\n */\nexport async function handleTestProvider(req: RouteRequest): Promise<RouteResponse> {\n const body = req.body as Record<string, unknown> | undefined;\n const type = body?.type as string | undefined;\n\n if (!type || !['anthropic', 'ollama', 'lmstudio'].includes(type)) {\n return {\n status: HTTP_BAD_REQUEST,\n body: { error: 'type is required and must be one of: anthropic, ollama, lmstudio' },\n };\n }\n\n const baseUrl = body?.baseUrl as string | undefined;\n const start = performance.now();\n let result: TestResult;\n\n try {\n if (type === 'ollama') {\n result = await testLocalProvider(new OllamaBackend({ base_url: baseUrl }), 'Ollama', OllamaBackend.DEFAULT_BASE_URL, baseUrl);\n } else if (type === 'lmstudio') {\n result = await testLocalProvider(new LmStudioBackend({ base_url: baseUrl }), 'LM Studio', LmStudioBackend.DEFAULT_BASE_URL, baseUrl);\n } else {\n result = testAnthropic();\n }\n } catch (err) {\n result = { ok: false, error: String(err) };\n }\n\n if (result.ok) {\n result.latency_ms = Math.round(performance.now() - start);\n }\n\n return { status: HTTP_OK, body: result };\n}\n\n// ---------------------------------------------------------------------------\n// Detection helpers\n// ---------------------------------------------------------------------------\n\n/** Detect a local provider (Ollama or LM Studio) and wrap as ProviderInfo.\n * Filters embedding models out — the agent provider only runs LLM tasks. */\nasync function detectLocalProviderInfo(\n type: 'ollama' | 'lmstudio',\n defaultBaseUrl: string,\n): Promise<ProviderInfo> {\n const status = await checkLocalProvider(type);\n // Filter out Myco-created context variants (e.g., gpt-oss-ctx32768)\n const variantFiltered = status.models.filter(m => !/-ctx\\d+/.test(m));\n // Drop embedding models -- the agent provider only runs LLM tasks\n const models = filterLlmModels(variantFiltered);\n return { type, available: status.available, baseUrl: defaultBaseUrl, models };\n}\n\nasync function detectAnthropic(): Promise<ProviderInfo> {\n // Anthropic is always available — the SDK handles auth internally via OAuth,\n // API key, Bedrock, Vertex, or Foundry. The daemon can't reliably detect\n // which method is in use since env vars aren't always inherited.\n //\n // The live model list is cached with a 10-minute TTL so we don't hit the\n // SDK on every `/providers` request. On any failure (no API key set in the\n // daemon's env, no network, OAuth-only auth) we fall back to the hardcoded\n // ANTHROPIC_MODELS constant so the dropdown is never empty.\n const now = Date.now();\n if (anthropicModelsCache && now - anthropicModelsCache.ts < ANTHROPIC_MODELS_CACHE_TTL_MS) {\n return { type: 'anthropic', available: true, models: anthropicModelsCache.models };\n }\n\n let models = ANTHROPIC_MODELS;\n try {\n const client = new Anthropic();\n const response = await client.models.list(\n { limit: 50 },\n { timeout: ANTHROPIC_MODELS_TIMEOUT_MS },\n );\n const liveModels = response.data\n .map((m) => m.id)\n .filter((id) => id.startsWith('claude-'));\n if (liveModels.length > 0) {\n models = liveModels;\n }\n } catch {\n // Fall through to hardcoded ANTHROPIC_MODELS\n }\n anthropicModelsCache = { ts: now, models };\n return { type: 'anthropic', available: true, models };\n}\n\n// ---------------------------------------------------------------------------\n// Test helpers\n// ---------------------------------------------------------------------------\n\n/** Test a local provider's connectivity — shared pattern. */\nasync function testLocalProvider(\n backend: { isAvailable(): Promise<boolean> },\n label: string,\n defaultBaseUrl: string,\n baseUrl?: string,\n): Promise<TestResult> {\n const available = await backend.isAvailable();\n if (!available) {\n return { ok: false, error: `${label} not reachable at ${baseUrl ?? defaultBaseUrl}` };\n }\n return { ok: true };\n}\n\nfunction testAnthropic(): TestResult {\n // SDK handles auth — always report OK. Auth failures surface at runtime.\n return { ok: true };\n}\n","/**\n * Dynamic task scheduling registration.\n *\n * Extracted from main.ts — loads task definitions, seeds last-run times\n * from the database, builds the ScheduledJobContext (pre-conditions,\n * runTask with notifications), and registers scheduled jobs with the\n * PowerManager.\n */\n\nimport { resolve } from 'node:path';\nimport type { DaemonLogger } from './logger.js';\nimport type { MycoConfig } from '@myco/config/schema.js';\nimport type { PowerManager } from './power.js';\nimport type { EmbeddingManager } from './embedding/manager.js';\nimport type { ScheduledJobContext } from './task-scheduler.js';\nimport { buildScheduledJobs } from './task-scheduler.js';\nimport {\n buildTaskInstruction,\n getSkillSurveyEligibility,\n isInstructionRequiredTask,\n SKILL_SURVEY_TASK,\n} from '@myco/agent/instruction-builders.js';\nimport { countSkillRecords } from '@myco/db/queries/skill-records.js';\nimport { countCandidates } from '@myco/db/queries/skill-candidates.js';\nimport { getDatabase } from '@myco/db/client.js';\nimport { notify } from '@myco/notifications/notify.js';\nimport { LOG_KINDS } from '@myco/constants/log-kinds.js';\n\nconst SCHEDULED_JOB_PREFIX = 'scheduled:';\n\n// ---------------------------------------------------------------------------\n// Types\n// ---------------------------------------------------------------------------\n\nexport interface TaskSchedulingDeps {\n definitionsDir: string | undefined;\n vaultDir: string;\n embeddingManager: EmbeddingManager;\n logger: DaemonLogger;\n // Holder so the run-time gate below sees toggle flips\n // (agent.scheduled_tasks_enabled) without a daemon restart.\n liveConfig: { current: MycoConfig };\n}\n\n// ---------------------------------------------------------------------------\n// Registration\n// ---------------------------------------------------------------------------\n\nexport async function registerScheduledTasks(\n powerManager: PowerManager,\n deps: TaskSchedulingDeps,\n): Promise<void> {\n const { definitionsDir, vaultDir, embeddingManager, logger, liveConfig } = deps;\n const runningTasks = new Set<string>();\n\n if (!definitionsDir) {\n logger.warn(LOG_KINDS.AGENT_ERROR, 'Skipping dynamic task scheduling — definitions directory unavailable');\n return;\n }\n\n // Jobs always register. The scheduled_tasks_enabled gate lives inside\n // runTask so flipping the toggle in Settings takes effect immediately —\n // registration-time gating would lock the scheduler to its startup value.\n let lastEnabled = liveConfig.current.agent.scheduled_tasks_enabled !== false;\n if (!lastEnabled) {\n logger.info(LOG_KINDS.AGENT_RUN, 'Scheduled agent tasks disabled (agent.scheduled_tasks_enabled: false) — jobs registered but will no-op until enabled');\n }\n\n const { loadAllTasks } = await import('@myco/agent/registry.js');\n const allTasks = Array.from(loadAllTasks(definitionsDir, vaultDir).values());\n\n // Map task name → agent id for instruction builders that need it\n const taskAgentMap = new Map<string, string>();\n for (const task of allTasks) {\n taskAgentMap.set(task.name, task.agent);\n }\n\n // Seed lastRun from DB: find the most recent completed/failed run per task\n const initialLastRuns: Record<string, number> = {};\n try {\n const recentRuns = getDatabase().prepare(\n `SELECT task, MAX(completed_at) as last_completed\n FROM agent_runs\n WHERE status IN ('completed', 'failed') AND completed_at IS NOT NULL\n GROUP BY task`\n ).all() as Array<{ task: string; last_completed: number }>;\n for (const row of recentRuns) {\n initialLastRuns[row.task] = row.last_completed * 1000; // epoch seconds → ms\n }\n } catch {\n // Best-effort seeding\n }\n\n const scheduledContext: ScheduledJobContext = {\n isTaskRunning: (name) => runningTasks.has(name),\n setTaskRunning: (name, running) => {\n if (running) runningTasks.add(name);\n else runningTasks.delete(name);\n },\n runTask: async (taskName) => {\n const config = liveConfig.current;\n\n // Runtime gate — honors the toggle flipped since startup. We log once\n // per transition so the log doesn't repeat on every scheduler tick.\n const enabled = config.agent.scheduled_tasks_enabled !== false;\n if (enabled !== lastEnabled) {\n logger.info(\n LOG_KINDS.AGENT_RUN,\n enabled\n ? 'Scheduled agent tasks re-enabled — resuming'\n : 'Scheduled agent tasks disabled — skipping until re-enabled',\n );\n lastEnabled = enabled;\n }\n if (!enabled) return;\n\n const { runAgent } = await import('@myco/agent/executor.js');\n\n const taskConfig = config.agent.tasks?.[taskName];\n const projectRoot = resolve(vaultDir, '..');\n const built = buildTaskInstruction(taskName, taskConfig?.params, taskAgentMap.get(taskName), projectRoot, embeddingManager);\n\n // Short-circuit: instruction-required tasks must not dispatch\n // the agent when there's no work. For skill-generate this means\n // no approved candidates — without the guard the agent falls\n // back to its default prompt and picks whatever it finds.\n if (isInstructionRequiredTask(taskName) && !built) {\n logger.info(\n LOG_KINDS.AGENT_RUN,\n `Scheduled task ${taskName} skipped — no work to do`,\n { task: taskName, reason: 'no-work' },\n );\n return;\n }\n\n const result = await runAgent(vaultDir, {\n task: taskName,\n instruction: built?.instruction,\n runContext: built?.context,\n embeddingManager,\n });\n logger.info(LOG_KINDS.AGENT_RUN, `Scheduled task ${taskName} completed`, {\n status: result.status,\n runId: result.runId,\n });\n\n if (result.status === 'failed') {\n notify(vaultDir, {\n domain: 'agents',\n type: 'agent.task.failure',\n title: `Task failed: ${taskName}`,\n message: result.error ?? 'Unknown error',\n link: `/agent?run=${result.runId}`,\n metadata: { taskName, runId: result.runId },\n }, config);\n } else if (result.status === 'completed') {\n notify(vaultDir, {\n domain: 'agents',\n type: 'agent.task.success',\n title: `Task completed: ${taskName}`,\n link: `/agent?run=${result.runId}`,\n metadata: { taskName, runId: result.runId },\n }, config);\n\n // Batched mycelium notifications — emit summaries instead of per-tool-call\n const { countToolCallsByRun } = await import('@myco/db/queries/turns.js');\n const counts = countToolCallsByRun(result.runId, ['vault_create_spore', 'vault_write_digest']);\n const sporeCount = counts['vault_create_spore'] ?? 0;\n const digestCount = counts['vault_write_digest'] ?? 0;\n\n if (sporeCount > 0) {\n notify(vaultDir, {\n domain: 'mycelium',\n type: 'mycelium.spore.created',\n title: sporeCount === 1 ? 'Extracted 1 observation' : `Extracted ${sporeCount} observations`,\n message: `From ${taskName} run`,\n link: '/mycelium?tab=spores',\n metadata: { count: sporeCount, taskName, runId: result.runId },\n }, config);\n }\n if (digestCount > 0) {\n notify(vaultDir, {\n domain: 'mycelium',\n type: 'mycelium.digest.completed',\n title: `Digest updated (${digestCount} ${digestCount === 1 ? 'tier' : 'tiers'})`,\n link: '/mycelium?tab=digest',\n metadata: { tierCount: digestCount, taskName, runId: result.runId },\n }, config);\n }\n }\n },\n preConditions: {\n 'has-unprocessed-batches': () => {\n // Only count unprocessed batches from sessions that have settled\n // (status != 'active'). Otherwise full-intelligence fires on every\n // live prompt and then filters everything out — wasted agent runs.\n const row = getDatabase().prepare(\n `SELECT 1 FROM prompt_batches pb\n WHERE pb.processed = 0\n AND EXISTS (\n SELECT 1 FROM sessions s\n WHERE s.id = pb.session_id AND s.status != 'active'\n )\n LIMIT 1`,\n ).get();\n return row !== undefined;\n },\n 'has-active-skills': () => {\n return countSkillRecords({ status: 'active' }) > 0;\n },\n 'has-approved-candidates': () => {\n return countCandidates({ status: 'approved' }) > 0;\n },\n 'has-skill-survey-evidence': () => {\n return getSkillSurveyEligibility(taskAgentMap.get(SKILL_SURVEY_TASK)).eligible;\n },\n },\n onTaskError: (taskName, err) => {\n logger.error(LOG_KINDS.AGENT_ERROR, `Detached task \"${taskName}\" threw`, {\n error: err instanceof Error ? err.message : String(err),\n });\n },\n };\n\n const scheduledJobs = buildScheduledJobs(\n allTasks,\n liveConfig.current.agent.tasks ?? {},\n scheduledContext,\n initialLastRuns,\n );\n powerManager.replaceGroup(SCHEDULED_JOB_PREFIX, scheduledJobs);\n logger.info(LOG_KINDS.DAEMON_START, `Synced ${scheduledJobs.length} scheduled task(s)`, {\n tasks: scheduledJobs.map((j) => j.name),\n });\n}\n","/**\n * Dynamic PowerManager job registration from task schedule definitions.\n *\n * Reads all task definitions, overlays user config overrides, and builds\n * PowerJob entries for tasks with enabled schedules.\n */\n\nimport type { AgentTask, TaskSchedule } from '@myco/agent/types.js';\nimport type { PowerJob } from './power.js';\n\n/** Resolve effective schedule: YAML defaults + myco.yaml overrides. */\nfunction resolveSchedule(\n yamlSchedule: TaskSchedule,\n configOverride?: { schedule?: Partial<TaskSchedule> },\n): TaskSchedule {\n if (!configOverride?.schedule) return yamlSchedule;\n return {\n enabled: configOverride.schedule.enabled ?? yamlSchedule.enabled,\n intervalSeconds: configOverride.schedule.intervalSeconds ?? yamlSchedule.intervalSeconds,\n runIn: configOverride.schedule.runIn ?? yamlSchedule.runIn,\n preCondition: configOverride.schedule.preCondition ?? yamlSchedule.preCondition,\n };\n}\n\nexport interface ScheduledJobContext {\n /** Check if a specific task is currently running. */\n isTaskRunning: (taskName: string) => boolean;\n /** Mark a task as running/not running. */\n setTaskRunning: (taskName: string, running: boolean) => void;\n /** Called to run the task. */\n runTask: (taskName: string) => Promise<void>;\n /** Pre-condition checkers keyed by preCondition name. */\n preConditions: Record<string, () => boolean>;\n /**\n * Optional error sink for detached task runs. Because scheduled tasks are\n * kicked off without awaiting (so the PowerManager tick loop stays\n * responsive), unhandled rejections from `runTask` land here instead of\n * propagating through the tick.\n */\n onTaskError?: (taskName: string, err: unknown) => void;\n}\n\n/**\n * Build PowerManager jobs from task definitions + config overrides.\n *\n * Returns only jobs for tasks with schedule.enabled = true (after override merge).\n * Each job respects its own interval, runIn states, and optional pre-condition.\n *\n * @param tasks — All loaded agent tasks (built-in + user).\n * @param configOverrides — Per-task config from myco.yaml `agent.tasks`.\n * @param context — Runtime context for agent execution. Optional for testing.\n * @param initialLastRuns — Map of task name → epoch ms of last completed run (for restart seeding).\n */\nexport function buildScheduledJobs(\n tasks: AgentTask[],\n configOverrides: Record<string, unknown>,\n context?: ScheduledJobContext,\n initialLastRuns?: Record<string, number>,\n): PowerJob[] {\n const jobs: PowerJob[] = [];\n\n for (const task of tasks) {\n if (!task.schedule) continue;\n\n const override = configOverrides[task.name] as { schedule?: Partial<TaskSchedule> } | undefined;\n const effective = resolveSchedule(task.schedule, override);\n\n if (!effective.enabled) continue;\n\n let lastRun = initialLastRuns?.[task.name] ?? 0;\n const intervalMs = effective.intervalSeconds * 1000;\n\n jobs.push({\n name: `scheduled:${task.name}`,\n runIn: effective.runIn,\n fn: async () => {\n if (!context) return;\n if (context.isTaskRunning(task.name)) return;\n if (Date.now() - lastRun < intervalMs) return;\n\n // Check pre-condition if defined\n if (effective.preCondition) {\n const check = context.preConditions[effective.preCondition];\n if (!check) return; // Unknown pre-condition — don't run\n if (!check()) return;\n }\n\n // Kick off the task detached from the PowerManager tick loop.\n // Scheduled agent runs can take 20+ minutes; awaiting them inside\n // the tick would starve every other power job (team-sync-flush,\n // embedding-reconcile, session-maintenance) for the duration.\n // Re-entry is prevented by the isTaskRunning check above, and\n // lastRun is stamped before dispatch so interval throttling stays\n // correct even if the task is still in flight on the next tick.\n const ctx = context;\n ctx.setTaskRunning(task.name, true);\n lastRun = Date.now();\n\n void ctx.runTask(task.name)\n .catch((err) => {\n ctx.onTaskError?.(task.name, err);\n })\n .finally(() => {\n ctx.setTaskRunning(task.name, false);\n });\n },\n });\n }\n\n return jobs;\n}\n","/**\n * Team member queries.\n */\n\nimport { getDatabase } from '../client.js';\n\nexport interface TeamMemberRow {\n id: string;\n user: string;\n role: string | null;\n joined: string | null;\n tags: string | null;\n}\n\n/** List all team members ordered by ID. */\nexport function listTeamMembers(): TeamMemberRow[] {\n return getDatabase().prepare(\n `SELECT id, \"user\", role, joined, tags\n FROM team_members\n ORDER BY id ASC`,\n ).all() as TeamMemberRow[];\n}\n","/**\n * MCP proxy API handlers — routes that the MCP server proxies through the\n * daemon instead of opening its own SQLite connection.\n *\n * Factory function injects machineId and embeddingManager; returns handlers\n * for remember, supersede, consolidate, plans, sessions, and team endpoints.\n */\n\nimport { z } from 'zod';\nimport {\n epochSeconds,\n MCP_SESSIONS_DEFAULT_LIMIT,\n SESSION_SUMMARY_PREVIEW_CHARS,\n USER_AGENT_ID,\n USER_AGENT_NAME,\n} from '@myco/constants.js';\nimport { getDatabase } from '@myco/db/client.js';\nimport { registerAgent } from '@myco/db/queries/agents.js';\nimport { insertSpore, updateSporeStatus } from '@myco/db/queries/spores.js';\nimport { getPlan, listPlans } from '@myco/db/queries/plans.js';\nimport { listSessions } from '@myco/db/queries/sessions.js';\nimport { listTeamMembers } from '@myco/db/queries/team-members.js';\nimport { insertResolutionEvent } from '@myco/db/queries/resolution-events.js';\nimport type { RouteRequest, RouteResponse } from '../router.js';\nimport type { EmbeddingManager } from '../embedding/manager.js';\n\n// ---------------------------------------------------------------------------\n// Constants\n// ---------------------------------------------------------------------------\n\nconst SPORE_ID_RANDOM_BYTES = 4;\nconst RESOLUTION_ID_RANDOM_BYTES = 8;\nconst MIN_CONSOLIDATE_SOURCES = 2;\n\n// ---------------------------------------------------------------------------\n// Schemas\n// ---------------------------------------------------------------------------\n\nconst RememberBody = z.object({\n content: z.string(),\n type: z.string().optional(),\n tags: z.array(z.string()).optional(),\n});\n\nconst SupersedeBody = z.object({\n old_spore_id: z.string(),\n new_spore_id: z.string(),\n reason: z.string().optional(),\n});\n\n/**\n * Convert an ISO-8601 string to epoch seconds.\n * Returns undefined if parsing fails (silently — callers treat undefined as \"no filter\").\n */\nfunction isoToEpochSeconds(iso: string): number | undefined {\n const ms = Date.parse(iso);\n return Number.isNaN(ms) ? undefined : Math.floor(ms / 1000);\n}\n\nfunction registerMcpUserAgent(createdAt: number): void {\n registerAgent({\n id: USER_AGENT_ID,\n name: USER_AGENT_NAME,\n created_at: createdAt,\n });\n}\n\nfunction toPlanProgress(content: string | null): string {\n const planContent = content ?? '';\n const checked = (planContent.match(/- \\[x\\]/gi) ?? []).length;\n const unchecked = (planContent.match(/- \\[ \\]/g) ?? []).length;\n const total = checked + unchecked;\n return total === 0 ? 'N/A' : `${checked}/${total}`;\n}\n\nfunction toPlanTags(tags: string | null): string[] {\n return tags ? tags.split(',').map((tag) => tag.trim()) : [];\n}\n\nconst ConsolidateBody = z.object({\n source_spore_ids: z.array(z.string()).min(MIN_CONSOLIDATE_SOURCES),\n consolidated_content: z.string().min(1),\n observation_type: z.string(),\n tags: z.array(z.string()).optional(),\n reason: z.string().optional(),\n});\n\n// ---------------------------------------------------------------------------\n// Types\n// ---------------------------------------------------------------------------\n\nexport interface McpProxyDeps {\n machineId: string;\n embeddingManager: EmbeddingManager;\n}\n\n// ---------------------------------------------------------------------------\n// Factory\n// ---------------------------------------------------------------------------\n\nexport function createMcpProxyHandlers(deps: McpProxyDeps) {\n const { machineId, embeddingManager } = deps;\n\n function toPlanSummary(row: {\n id: string;\n title: string | null;\n status: string;\n content: string | null;\n tags: string | null;\n created_at: number;\n }) {\n return {\n id: row.id,\n title: row.title,\n status: row.status,\n progress: toPlanProgress(row.content),\n tags: toPlanTags(row.tags),\n created_at: row.created_at,\n };\n }\n\n /** POST /api/mcp/remember — create a spore and trigger embedding. */\n async function handleRemember(req: RouteRequest): Promise<RouteResponse> {\n const { content, type, tags } = RememberBody.parse(req.body);\n const { randomBytes } = await import('node:crypto');\n\n const observationType = type ?? 'discovery';\n const id = `${observationType}-${randomBytes(SPORE_ID_RANDOM_BYTES).toString('hex')}`;\n const now = epochSeconds();\n\n registerMcpUserAgent(now);\n\n const spore = insertSpore({\n id,\n agent_id: USER_AGENT_ID,\n machine_id: machineId,\n observation_type: observationType,\n content,\n tags: tags ? tags.join(', ') : null,\n created_at: now,\n });\n\n embeddingManager.onContentWritten('spores', spore.id, content, {\n status: 'active',\n observation_type: observationType,\n }).catch(() => {});\n\n return {\n body: {\n id: spore.id,\n observation_type: spore.observation_type,\n status: spore.status,\n created_at: spore.created_at,\n },\n };\n }\n\n /** POST /api/mcp/supersede — mark spore as superseded and record resolution event. */\n async function handleSupersede(req: RouteRequest): Promise<RouteResponse> {\n const { old_spore_id, new_spore_id, reason } = SupersedeBody.parse(req.body);\n const { randomBytes } = await import('node:crypto');\n const now = epochSeconds();\n\n // Update status to superseded\n updateSporeStatus(old_spore_id, 'superseded', now);\n try { embeddingManager.onStatusChanged('spores', old_spore_id, 'superseded'); } catch { /* best-effort */ }\n\n registerMcpUserAgent(now);\n\n // Record resolution event for audit trail\n const resolutionId = `res-${randomBytes(RESOLUTION_ID_RANDOM_BYTES).toString('hex')}`;\n\n insertResolutionEvent({\n id: resolutionId,\n agent_id: USER_AGENT_ID,\n machine_id: machineId,\n spore_id: old_spore_id,\n action: 'supersede',\n new_spore_id,\n reason: reason ?? null,\n created_at: now,\n });\n\n return {\n body: {\n old_spore: old_spore_id,\n new_spore: new_spore_id,\n status: 'superseded' as const,\n },\n };\n }\n\n /**\n * POST /api/mcp/consolidate — merge source spores into a single wisdom spore.\n *\n * Inserts a new spore with the consolidated content, then for each source:\n * - marks its status as 'superseded'\n * - records a resolution_events row (action='consolidate', new_spore_id=wisdom)\n *\n * Returns { new_spore_id, sources_superseded, status: 'consolidated' }.\n */\n async function handleConsolidate(req: RouteRequest): Promise<RouteResponse> {\n const { source_spore_ids, consolidated_content, observation_type, tags, reason } = ConsolidateBody.parse(req.body);\n const { randomBytes } = await import('node:crypto');\n const now = epochSeconds();\n const newSporeId = `${observation_type}-${randomBytes(SPORE_ID_RANDOM_BYTES).toString('hex')}`;\n const db = getDatabase();\n\n registerMcpUserAgent(now);\n\n const { wisdom, sourcesSuperseded } = db.transaction(() => {\n const insertedWisdom = insertSpore({\n id: newSporeId,\n agent_id: USER_AGENT_ID,\n machine_id: machineId,\n observation_type,\n content: consolidated_content,\n tags: tags ? tags.join(', ') : null,\n created_at: now,\n });\n\n const supersededSourceIds: string[] = [];\n for (const sourceId of source_spore_ids) {\n updateSporeStatus(sourceId, 'superseded', now);\n insertResolutionEvent({\n id: `res-${randomBytes(RESOLUTION_ID_RANDOM_BYTES).toString('hex')}`,\n agent_id: USER_AGENT_ID,\n machine_id: machineId,\n spore_id: sourceId,\n action: 'consolidate',\n new_spore_id: newSporeId,\n reason: reason ?? null,\n created_at: now,\n });\n supersededSourceIds.push(sourceId);\n }\n\n return { wisdom: insertedWisdom, sourcesSuperseded: supersededSourceIds };\n })();\n\n embeddingManager.onContentWritten('spores', wisdom.id, consolidated_content, {\n status: 'active',\n observation_type,\n }).catch(() => {});\n for (const sourceId of sourcesSuperseded) {\n try { embeddingManager.onStatusChanged('spores', sourceId, 'superseded'); } catch { /* best-effort */ }\n }\n\n return {\n body: {\n new_spore_id: newSporeId,\n sources_superseded: sourcesSuperseded,\n status: 'consolidated' as const,\n created_at: now,\n },\n };\n }\n\n /** GET /api/mcp/plans — list plans, or return a single plan with content when id is set. */\n async function handlePlans(req: RouteRequest): Promise<RouteResponse> {\n const id = typeof req.query.id === 'string' ? req.query.id : undefined;\n\n if (id) {\n const row = getPlan(id);\n if (!row) return { body: { plans: [] } };\n return {\n body: {\n plans: [{\n ...toPlanSummary(row),\n content: row.content,\n }],\n },\n };\n }\n\n const statusFilter = req.query.status === 'all' ? undefined : req.query.status;\n const limit = req.query.limit ? Number(req.query.limit) : undefined;\n\n const rows = listPlans({ status: statusFilter, limit });\n const plans = rows.map(toPlanSummary);\n\n return { body: { plans } };\n }\n\n /**\n * GET /api/mcp/sessions — list sessions with optional filters.\n *\n * Supports query params: limit, status, branch, user, since (ISO string), plan.\n * `plan` resolves to the session recorded for that plan via `getPlan().session_id`.\n */\n async function handleSessions(req: RouteRequest): Promise<RouteResponse> {\n const limit = req.query.limit ? Number(req.query.limit) : MCP_SESSIONS_DEFAULT_LIMIT;\n const status = typeof req.query.status === 'string' ? req.query.status : undefined;\n const branch = typeof req.query.branch === 'string' ? req.query.branch : undefined;\n const user = typeof req.query.user === 'string' ? req.query.user : undefined;\n const plan = typeof req.query.plan === 'string' ? req.query.plan : undefined;\n const sinceRaw = typeof req.query.since === 'string' ? req.query.since : undefined;\n\n const since = sinceRaw ? isoToEpochSeconds(sinceRaw) : undefined;\n\n let id: string | undefined;\n if (plan) {\n const planRow = getPlan(plan);\n if (!planRow || !planRow.session_id) return { body: { sessions: [] } };\n id = planRow.session_id;\n }\n\n const rows = listSessions({ limit, status, branch, user, since, id });\n const sessions = rows.map((row) => ({\n id: row.id,\n agent: row.agent,\n user: row.user,\n branch: row.branch,\n started_at: row.started_at,\n ended_at: row.ended_at,\n status: row.status,\n title: row.title,\n summary: (row.summary ?? '').slice(0, SESSION_SUMMARY_PREVIEW_CHARS),\n prompt_count: row.prompt_count,\n tool_count: row.tool_count,\n parent_session_id: row.parent_session_id,\n }));\n\n return { body: { sessions } };\n }\n\n /** GET /api/mcp/team — list team members from DB. */\n async function handleTeam(_req: RouteRequest): Promise<RouteResponse> {\n const rows = listTeamMembers();\n const members = rows.map((row) => ({\n id: row.id,\n user: row.user,\n role: row.role,\n joined: row.joined,\n tags: row.tags ? row.tags.split(',').map((t) => t.trim()) : [],\n }));\n\n return { body: { members } };\n }\n\n return {\n handleRemember,\n handleSupersede,\n handleConsolidate,\n handlePlans,\n handleSessions,\n handleTeam,\n };\n}\n","/**\n * Agent run API handlers — trigger runs, list runs, and fetch run details.\n *\n * Factory function injects vaultDir and embeddingManager; returns handlers\n * for the /api/agent/run and /api/agent/runs/* endpoints.\n */\n\nimport { resolve } from 'node:path';\nimport { z } from 'zod';\nimport { listRuns, countRuns, getRun, getLatestRunId } from '@myco/db/queries/runs.js';\nimport { listReports } from '@myco/db/queries/reports.js';\nimport { listTurnsByRun } from '@myco/db/queries/turns.js';\nimport { buildTaskInstruction, isInstructionRequiredTask } from '@myco/agent/instruction-builders.js';\nimport { hasConfiguredProvider } from '@myco/agent/config-resolver.js';\nimport { loadMergedConfig } from '@myco/config/loader.js';\nimport { LOG_KINDS } from '@myco/constants/log-kinds.js';\nimport { notify } from '@myco/notifications/notify.js';\nimport type { RouteRequest, RouteResponse } from '../router.js';\nimport type { EmbeddingManager } from '../embedding/manager.js';\nimport type { DaemonLogger } from '../logger.js';\n\n// ---------------------------------------------------------------------------\n// Constants\n// ---------------------------------------------------------------------------\n\n/** Default limit for listing agent runs in the API. */\nexport const AGENT_RUNS_DEFAULT_LIMIT = 50;\n\n// ---------------------------------------------------------------------------\n// Schemas\n// ---------------------------------------------------------------------------\n\nconst AgentRunBody = z.object({\n task: z.string().optional(),\n instruction: z.string().optional(),\n agentId: z.string().optional(),\n});\n\n// Re-export for backward compatibility\nexport { buildTaskInstruction, SKILL_GENERATE_TASK, SKILL_EVOLVE_TASK, SKILL_SURVEY_TASK } from '@myco/agent/instruction-builders.js';\n\n// ---------------------------------------------------------------------------\n// Types\n// ---------------------------------------------------------------------------\n\nexport interface AgentRunDeps {\n vaultDir: string;\n embeddingManager: EmbeddingManager;\n logger: DaemonLogger;\n}\n\n// ---------------------------------------------------------------------------\n// Factory\n// ---------------------------------------------------------------------------\n\nexport function createAgentRunHandlers(deps: AgentRunDeps) {\n const { vaultDir, embeddingManager, logger } = deps;\n\n /** POST /api/agent/run — trigger an agent run. */\n async function handleRun(req: RouteRequest): Promise<RouteResponse> {\n const { task, instruction: rawInstruction, agentId } = AgentRunBody.parse(req.body);\n\n // Guard: ensure a provider is configured before allowing a run.\n // Uses the same per-task-over-global precedence as the executor's resolver.\n const mycoConfig = loadMergedConfig(vaultDir);\n if (!hasConfiguredProvider(mycoConfig, task)) {\n return {\n status: 400,\n body: {\n ok: false,\n error: 'No agent provider configured. Configure one in Settings.',\n },\n };\n }\n\n let instruction = rawInstruction;\n let runContext: { candidate_id?: string } | undefined;\n if (task && !instruction) {\n let built;\n try {\n const taskParams = mycoConfig.agent.tasks?.[task]?.params;\n const projectRoot = resolve(vaultDir, '..');\n built = buildTaskInstruction(task, taskParams, agentId, projectRoot, embeddingManager);\n } catch {\n const projectRoot = resolve(vaultDir, '..');\n built = buildTaskInstruction(task, undefined, agentId, projectRoot, embeddingManager);\n }\n instruction = built?.instruction;\n runContext = built?.context;\n\n // Short-circuit: instruction-required tasks (skill-generate,\n // skill-evolve) must not run when there's no work to do. For a\n // manual trigger via the API, surface this as a 200 with a\n // skipped status rather than a failed run row — the caller\n // should see \"nothing to do\" as a valid outcome.\n if (task && isInstructionRequiredTask(task) && !built) {\n return {\n body: {\n ok: true,\n message: `Task ${task} skipped — no work to do`,\n status: 'skipped',\n reason: 'no-work',\n },\n };\n }\n }\n\n const { runAgent } = await import('@myco/agent/executor.js');\n const resultPromise = runAgent(vaultDir, {\n task,\n instruction,\n agentId,\n embeddingManager,\n runContext,\n });\n\n // runAgent inserts the run row synchronously before the first await.\n // Query for the most recently created run matching this task to get\n // the correct ID — not getRunningRun which may return a different task.\n const effectiveAgentId = agentId ?? 'myco-agent';\n const runId = getLatestRunId(effectiveAgentId, task);\n\n resultPromise\n .then((result) => {\n const taskName = task ?? 'agent run';\n if (result.status === 'failed') {\n notify(vaultDir, {\n domain: 'agents',\n type: 'agent.task.failure',\n title: `Task failed: ${taskName}`,\n message: result.error ?? 'Unknown error',\n link: `/agent?run=${result.runId}`,\n metadata: { taskName: task ?? null, runId: result.runId },\n }, mycoConfig);\n logger.error(LOG_KINDS.AGENT_ERROR, 'Agent run failed', {\n runId: result.runId,\n error: result.error ?? 'No error message',\n phases: result.phases?.map(p => `${p.name}:${p.status}`) ?? [],\n });\n } else {\n notify(vaultDir, {\n domain: 'agents',\n type: 'agent.task.success',\n title: `Task completed: ${taskName}`,\n link: `/agent?run=${result.runId}`,\n metadata: { taskName: task ?? null, runId: result.runId },\n }, mycoConfig);\n logger.info(LOG_KINDS.AGENT_RUN, 'Agent run completed', {\n runId: result.runId,\n status: result.status,\n phases: result.phases?.map(p => `${p.name}:${p.status}`) ?? [],\n });\n }\n })\n .catch((err) => {\n logger.error(LOG_KINDS.AGENT_ERROR, 'Agent run threw unhandled error', {\n error: (err as Error).message ?? String(err),\n stack: (err as Error).stack?.split('\\n').slice(0, 3).join(' | '),\n });\n });\n\n return { body: { ok: true, message: 'Agent started', runId } };\n }\n\n /** GET /api/agent/runs — list runs with filtering. */\n async function handleListRuns(req: RouteRequest): Promise<RouteResponse> {\n const limit = req.query.limit ? Number(req.query.limit) : AGENT_RUNS_DEFAULT_LIMIT;\n const offset = req.query.offset ? Number(req.query.offset) : 0;\n const agentId = req.query.agentId || undefined;\n const status = req.query.status || undefined;\n const task = req.query.task || undefined;\n const search = req.query.search || undefined;\n\n const filterOpts = { agent_id: agentId, status, task, search };\n const runs = listRuns({ ...filterOpts, limit, offset });\n const total = countRuns(filterOpts);\n\n return { body: { runs, total, offset, limit } };\n }\n\n /** GET /api/agent/runs/:id — get a single run. */\n async function handleGetRun(req: RouteRequest): Promise<RouteResponse> {\n const run = getRun(req.params.id);\n if (!run) {\n return { status: 404, body: { error: 'Run not found' } };\n }\n return { body: { run } };\n }\n\n /** GET /api/agent/runs/:id/reports — list reports for a run. */\n async function handleGetRunReports(req: RouteRequest): Promise<RouteResponse> {\n const reports = listReports(req.params.id);\n return { body: { reports } };\n }\n\n /** GET /api/agent/runs/:id/turns — list turns for a run. */\n async function handleGetRunTurns(req: RouteRequest): Promise<RouteResponse> {\n const turns = listTurnsByRun(req.params.id);\n return { body: turns };\n }\n\n return {\n handleRun,\n handleListRuns,\n handleGetRun,\n handleGetRunReports,\n handleGetRunTurns,\n };\n}\n","/**\n * Attachment serving handler — serves attachment files from DB or disk fallback.\n *\n * Factory function injects vaultDir; returns a single route handler for\n * GET /api/attachments/:filename.\n */\n\nimport fs from 'node:fs';\nimport path from 'node:path';\nimport { getAttachmentByFilePath } from '@myco/db/queries/attachments.js';\nimport type { RouteRequest, RouteResponse } from '../router.js';\n\n// ---------------------------------------------------------------------------\n// Constants\n// ---------------------------------------------------------------------------\n\n/** Media type lookup for attachment file serving. */\nconst ATTACHMENT_MEDIA_TYPES: Record<string, string> = {\n png: 'image/png',\n jpg: 'image/jpeg',\n jpeg: 'image/jpeg',\n gif: 'image/gif',\n webp: 'image/webp',\n};\n\n// ---------------------------------------------------------------------------\n// Types\n// ---------------------------------------------------------------------------\n\nexport interface AttachmentDeps {\n vaultDir: string;\n}\n\n// ---------------------------------------------------------------------------\n// Factory\n// ---------------------------------------------------------------------------\n\nexport function createAttachmentHandler(deps: AttachmentDeps) {\n const { vaultDir } = deps;\n\n /** GET /api/attachments/:filename — serve attachment from DB or disk fallback. */\n async function handleGetAttachment(req: RouteRequest): Promise<RouteResponse> {\n const filename = req.params.filename;\n // Prevent path traversal\n if (filename.includes('..') || filename.includes('/')) {\n return { status: 400, body: { error: 'invalid_filename' } };\n }\n\n // Try DB first (new path)\n const att = getAttachmentByFilePath(filename);\n if (att?.data) {\n const contentType = att.media_type ?? 'application/octet-stream';\n return { status: 200, headers: { 'Content-Type': contentType }, body: att.data };\n }\n\n // Fallback to disk for pre-migration attachments\n const filePath = path.join(vaultDir, 'attachments', filename);\n let diskData: Buffer;\n try {\n diskData = fs.readFileSync(filePath);\n } catch {\n return { status: 404, body: { error: 'not_found' } };\n }\n const ext = path.extname(filename).slice(1).toLowerCase();\n const contentType = ATTACHMENT_MEDIA_TYPES[ext] ?? 'application/octet-stream';\n return { status: 200, headers: { 'Content-Type': contentType }, body: diskData };\n }\n\n return { handleGetAttachment };\n}\n","import fs from 'node:fs';\nimport path from 'node:path';\nimport { insertLogEntry } from '@myco/db/queries/logs.js';\nimport { kindToComponent } from '@myco/constants/log-kinds.js';\n\n/**\n * Replay JSONL log entries that are newer than the last entry in SQLite.\n * Returns the number of entries replayed.\n */\nexport function reconcileLogBuffer(logDir: string, sinceTimestamp: string): number {\n let replayed = 0;\n\n // Read log files in order: rotated files first (oldest), then current\n const files: string[] = [];\n for (let i = 3; i >= 1; i--) {\n const rotated = path.join(logDir, `daemon.${i}.log`);\n if (fs.existsSync(rotated)) files.push(rotated);\n }\n const current = path.join(logDir, 'daemon.log');\n if (fs.existsSync(current)) files.push(current);\n\n for (const file of files) {\n const content = fs.readFileSync(file, 'utf-8');\n for (const line of content.split('\\n')) {\n if (!line.trim()) continue;\n try {\n const entry = JSON.parse(line);\n if (entry.timestamp > sinceTimestamp) {\n const { timestamp, level, kind, component, message, ...rest } = entry;\n insertLogEntry({\n timestamp,\n level,\n kind: kind ?? `${component ?? 'unknown'}.unknown`,\n component: component ?? kindToComponent(kind ?? 'unknown'),\n message,\n data: Object.keys(rest).length > 0 ? JSON.stringify(rest) : null,\n session_id: rest.session_id ?? null,\n });\n replayed++;\n }\n } catch {\n // Skip malformed lines\n }\n }\n }\n\n return replayed;\n}\n","export const CONFIG_FOCUS_SECTION_PARAM = 'configSection';\nexport const CONFIG_FOCUS_FIELD_PARAM = 'configField';\n\nconst FIELD_ID_PREFIX = 'config-field-';\n\nexport const CONFIG_SECTION_IDS = {\n appearance: 'config-section-appearance',\n settingsAgent: 'config-section-settings-agent',\n settingsEmbedding: 'config-section-settings-embedding',\n settingsContextInjection: 'config-section-settings-context-injection',\n settingsNotifications: 'config-section-settings-notifications',\n settingsPlanCapture: 'config-section-settings-plan-capture',\n settingsProject: 'config-section-settings-project',\n agentOperations: 'config-section-agent-operations',\n operationsMaintenance: 'config-section-operations-maintenance',\n operationsBackup: 'config-section-operations-backup',\n} as const;\n\ntype ConfigPagePath = '/settings' | '/agent' | '/operations';\n\ninterface ConfigSectionTarget {\n page: ConfigPagePath;\n sectionId: string;\n sectionLabel: string;\n searchParams?: Record<string, string>;\n}\n\nexport interface ConfigFocusTarget extends ConfigSectionTarget {\n fieldPath: string;\n fieldLabel: string;\n}\n\ninterface PrefixRule extends ConfigSectionTarget {\n prefix: string;\n}\n\nconst SECTION_RULES: PrefixRule[] = [\n {\n prefix: 'appearance',\n page: '/settings',\n sectionId: CONFIG_SECTION_IDS.appearance,\n sectionLabel: 'Appearance',\n },\n {\n prefix: 'agent.provider',\n page: '/settings',\n sectionId: CONFIG_SECTION_IDS.settingsAgent,\n sectionLabel: 'Myco Agent',\n },\n {\n prefix: 'embedding',\n page: '/settings',\n sectionId: CONFIG_SECTION_IDS.settingsEmbedding,\n sectionLabel: 'Embedding',\n },\n {\n prefix: 'context',\n page: '/settings',\n sectionId: CONFIG_SECTION_IDS.settingsContextInjection,\n sectionLabel: 'Context Injection',\n },\n {\n prefix: 'notifications',\n page: '/settings',\n sectionId: CONFIG_SECTION_IDS.settingsNotifications,\n sectionLabel: 'Notifications',\n },\n {\n prefix: 'capture',\n page: '/settings',\n sectionId: CONFIG_SECTION_IDS.settingsPlanCapture,\n sectionLabel: 'Plan Capture',\n },\n {\n prefix: 'daemon',\n page: '/settings',\n sectionId: CONFIG_SECTION_IDS.settingsProject,\n sectionLabel: 'Project',\n },\n {\n prefix: 'agent.scheduled_tasks_enabled',\n page: '/agent',\n sectionId: CONFIG_SECTION_IDS.agentOperations,\n sectionLabel: 'Agent Operations',\n searchParams: { tab: 'config' },\n },\n {\n prefix: 'agent.event_tasks_enabled',\n page: '/agent',\n sectionId: CONFIG_SECTION_IDS.agentOperations,\n sectionLabel: 'Agent Operations',\n searchParams: { tab: 'config' },\n },\n {\n prefix: 'agent.summary_batch_interval',\n page: '/agent',\n sectionId: CONFIG_SECTION_IDS.agentOperations,\n sectionLabel: 'Agent Operations',\n searchParams: { tab: 'config' },\n },\n {\n prefix: 'maintenance',\n page: '/operations',\n sectionId: CONFIG_SECTION_IDS.operationsMaintenance,\n sectionLabel: 'Scheduled Maintenance',\n },\n {\n prefix: 'backup',\n page: '/operations',\n sectionId: CONFIG_SECTION_IDS.operationsBackup,\n sectionLabel: 'Backup & Restore',\n },\n];\n\nconst EXACT_FIELD_LABELS: Record<string, string> = {\n 'appearance.theme': 'Color Theme',\n 'appearance.mode': 'Mode',\n 'appearance.font': 'Font',\n 'appearance.density': 'Density',\n 'agent.provider': 'Provider',\n 'agent.provider.type': 'Provider',\n 'agent.provider.model': 'Model',\n 'agent.provider.base_url': 'Base URL',\n 'agent.provider.context_length': 'Context Length',\n 'embedding.provider': 'Provider',\n 'embedding.model': 'Model',\n 'embedding.base_url': 'Base URL',\n 'context.digest_tier': 'Digest Tier',\n 'context.prompt_search': 'Prompt Search',\n 'context.prompt_max_spores': 'Max Spores per Prompt',\n 'notifications.enabled': 'Notifications',\n 'notifications.default_mode': 'Default Display',\n 'notifications.system_notifications': 'Browser Notifications',\n 'capture.ignore_plan_dirs_in_git': 'Ignore Custom Plan Dirs In Git',\n 'capture.plan_dirs': 'Custom Directories',\n 'daemon.port': 'Daemon Port',\n 'daemon.log_level': 'Log Level',\n 'daemon.log_retention_days': 'Log Retention (days)',\n 'agent.scheduled_tasks_enabled': 'Scheduled Tasks',\n 'agent.event_tasks_enabled': 'Event-Driven Tasks',\n 'agent.summary_batch_interval': 'Title & Summary Batch Interval',\n 'maintenance.auto_optimize': 'Auto-optimize',\n 'maintenance.auto_optimize_interval_hours': 'Auto-optimize Interval',\n 'backup.dir': 'Backup Directory',\n};\n\nconst DYNAMIC_FIELD_LABEL_RULES: Array<{\n prefix: string;\n format: (path: string) => string | null;\n}> = [\n {\n prefix: 'notifications.domains.',\n format: (path) => {\n const match = /^notifications\\.domains\\.([^.]+)\\.(enabled|mode)$/.exec(path);\n if (!match) return null;\n const [, domain, leaf] = match;\n const domainLabel = humanizeToken(domain);\n return leaf === 'mode' ? `${domainLabel} Display` : `${domainLabel} Notifications`;\n },\n },\n];\n\nconst SAVE_MESSAGE_LABEL_LIMIT = 3;\n\nexport function configFieldId(path: string): string {\n return `${FIELD_ID_PREFIX}${path.replace(/[^a-zA-Z0-9]+/g, '-')}`;\n}\n\nexport function resolveConfigFocusTarget(path: string): ConfigFocusTarget | null {\n const section = findSectionRule(path);\n if (!section) return null;\n return {\n ...section,\n fieldPath: path,\n fieldLabel: resolveFieldLabel(path),\n };\n}\n\nexport function buildConfigFocusLink(target: ConfigFocusTarget): string {\n const params = new URLSearchParams(target.searchParams);\n params.set(CONFIG_FOCUS_SECTION_PARAM, target.sectionId);\n params.set(CONFIG_FOCUS_FIELD_PARAM, target.fieldPath);\n return `${target.page}?${params.toString()}`;\n}\n\nexport function buildScopedConfigSaveNotification(scope: 'project' | 'local', touchedPaths: string[]) {\n const uniquePaths = [...new Set(touchedPaths)];\n const scopeLabel = scope === 'local' ? 'Personal' : 'Project';\n const focusTarget = uniquePaths.map(resolveConfigFocusTarget).find((target) => target !== null) ?? null;\n const fieldLabels = uniquePaths.map(resolveFieldLabel);\n const primaryLabel = fieldLabels[0] ?? 'Setting';\n const labelList = fieldLabels.slice(0, SAVE_MESSAGE_LABEL_LIMIT).join(', ');\n const remainingCount = Math.max(0, fieldLabels.length - SAVE_MESSAGE_LABEL_LIMIT);\n const messageLabel = remainingCount > 0 ? `${labelList}, +${remainingCount} more` : labelList;\n\n return {\n title: uniquePaths.length === 1 ? `${primaryLabel} saved` : `${uniquePaths.length} settings saved`,\n message: focusTarget\n ? `${focusTarget.sectionLabel} · ${messageLabel} · ${scopeLabel}`\n : `${messageLabel} · ${scopeLabel}`,\n link: focusTarget ? buildConfigFocusLink(focusTarget) : null,\n metadata: {\n scope,\n touched_paths: uniquePaths,\n field_labels: fieldLabels,\n focus_target: focusTarget\n ? {\n page: focusTarget.page,\n section_id: focusTarget.sectionId,\n field_path: focusTarget.fieldPath,\n field_label: focusTarget.fieldLabel,\n }\n : null,\n },\n };\n}\n\nfunction findSectionRule(path: string): ConfigSectionTarget | null {\n for (const rule of SECTION_RULES) {\n if (path === rule.prefix || path.startsWith(`${rule.prefix}.`)) {\n const { page, sectionId, sectionLabel, searchParams } = rule;\n return { page, sectionId, sectionLabel, searchParams };\n }\n }\n return null;\n}\n\nfunction resolveFieldLabel(path: string): string {\n const exact = EXACT_FIELD_LABELS[path];\n if (exact) return exact;\n\n for (const rule of DYNAMIC_FIELD_LABEL_RULES) {\n if (path === rule.prefix || path.startsWith(rule.prefix)) {\n const label = rule.format(path);\n if (label) return label;\n }\n }\n\n return humanizeToken(path.split('.').pop() ?? 'setting');\n}\n\nfunction humanizeToken(value: string): string {\n return value\n .split(/[-_]/g)\n .filter(Boolean)\n .map((part) => part.charAt(0).toUpperCase() + part.slice(1))\n .join(' ');\n}\n","import type { DaemonLogger } from './logger.js';\nimport { LOG_KINDS } from '../constants/log-kinds.js';\n\nexport type PowerState = 'active' | 'idle' | 'sleep' | 'deep_sleep';\n\nexport interface PowerJob {\n name: string;\n runIn: PowerState[];\n fn: () => Promise<void>;\n /** When true, prevents transition from sleep → deep_sleep. */\n preventsDeepSleep?: () => boolean;\n}\n\nexport interface PowerManagerConfig {\n idleThresholdMs: number;\n sleepThresholdMs: number;\n deepSleepThresholdMs: number;\n activeIntervalMs: number;\n sleepIntervalMs: number;\n logger: DaemonLogger;\n}\n\nexport class PowerManager {\n private state: PowerState = 'active';\n private lastActivity: number = Date.now();\n private jobs: PowerJob[] = [];\n private timer: ReturnType<typeof setTimeout> | null = null;\n private running = false;\n private config: PowerManagerConfig;\n private logger: DaemonLogger;\n private deepSleepHeld = false;\n\n constructor(config: PowerManagerConfig) {\n this.config = config;\n this.logger = config.logger;\n }\n\n register(job: PowerJob): void {\n this.jobs.push(job);\n }\n\n replaceGroup(prefix: string, jobs: PowerJob[]): void {\n this.jobs = this.jobs.filter((job) => !job.name.startsWith(prefix));\n this.jobs.push(...jobs);\n }\n\n recordActivity(): void {\n this.lastActivity = Date.now();\n this.deepSleepHeld = false;\n\n if (this.state === 'deep_sleep') {\n this.logger.info(LOG_KINDS.POWER_STATE, 'Waking from deep sleep');\n this.state = 'active';\n this.scheduleNextTick();\n }\n }\n\n start(): void {\n this.lastActivity = Date.now();\n this.state = 'active';\n this.running = true;\n this.scheduleNextTick();\n this.logger.info(LOG_KINDS.POWER_STATE, 'PowerManager started', {\n jobs: this.jobs.map((j) => j.name),\n });\n }\n\n stop(): void {\n this.running = false;\n if (this.timer) {\n clearTimeout(this.timer);\n this.timer = null;\n }\n this.logger.info(LOG_KINDS.POWER_STATE, 'PowerManager stopped');\n }\n\n getState(): PowerState {\n this.evaluateState();\n return this.state;\n }\n\n private evaluateState(): void {\n const idleMs = Date.now() - this.lastActivity;\n let target: PowerState;\n\n if (idleMs >= this.config.deepSleepThresholdMs) {\n const blocker = this.jobs.find((j) => j.preventsDeepSleep?.());\n if (blocker) {\n target = 'sleep';\n if (!this.deepSleepHeld) {\n this.deepSleepHeld = true;\n this.logger.info(LOG_KINDS.POWER_STATE, 'Deep sleep held', { by: blocker.name });\n }\n } else {\n target = 'deep_sleep';\n this.deepSleepHeld = false;\n }\n } else if (idleMs >= this.config.sleepThresholdMs) {\n target = 'sleep';\n } else if (idleMs >= this.config.idleThresholdMs) {\n target = 'idle';\n } else {\n target = 'active';\n }\n\n if (target !== this.state) {\n this.logger.info(LOG_KINDS.POWER_STATE, 'Power state transition', {\n from: this.state,\n to: target,\n idle_ms: idleMs,\n });\n this.state = target;\n }\n }\n\n private scheduleNextTick(): void {\n if (!this.running) return;\n if (this.timer) clearTimeout(this.timer);\n\n const interval =\n this.state === 'sleep'\n ? this.config.sleepIntervalMs\n : this.config.activeIntervalMs;\n\n this.timer = setTimeout(() => this.tick(), interval);\n }\n\n private async tick(): Promise<void> {\n if (!this.running) return;\n\n this.evaluateState();\n\n if (this.state === 'deep_sleep') {\n this.logger.info(LOG_KINDS.POWER_STATE, 'Entering deep sleep — timer stopped');\n this.timer = null;\n return;\n }\n\n // Run eligible jobs\n const eligible = this.jobs.filter((j) => j.runIn.includes(this.state));\n this.logger.debug(LOG_KINDS.POWER_TICK, 'Tick', {\n state: this.state,\n jobs: eligible.map((j) => j.name),\n });\n\n for (const job of eligible) {\n try {\n await job.fn();\n } catch (err) {\n this.logger.error(LOG_KINDS.POWER_JOB_ERROR, `Job \"${job.name}\" failed`, {\n error: (err as Error).message,\n });\n }\n }\n\n this.scheduleNextTick();\n }\n}\n","/**\n * Session maintenance job.\n *\n * Two tasks run in sequence:\n * 1. Complete stale active sessions — active sessions with no new prompts\n * in more than the configured stale threshold are marked completed.\n * 2. Delete dead sessions — sessions with ≤ DEAD_SESSION_MAX_PROMPTS prompts\n * are deleted via cascade, including vault file and embedding vector cleanup.\n */\n\nimport { getDatabase } from '@myco/db/client.js';\nimport { deleteSessionCascade } from '@myco/db/queries/sessions.js';\nimport {\n epochSeconds,\n MS_PER_SECOND,\n STALE_SESSION_THRESHOLD_MS,\n DEAD_SESSION_MAX_PROMPTS,\n} from '../../constants.js';\nimport type { DaemonLogger } from '../logger.js';\nimport type { EmbeddingManager } from '../embedding/manager.js';\nimport { cleanupAfterSessionCascade } from './session-cleanup.js';\nimport { LOG_KINDS } from '../../constants/log-kinds.js';\n\n/**\n * Complete active sessions whose last prompt is older than the stale threshold.\n *\n * Uses COALESCE to fall back to the session's started_at when no prompt\n * batches exist (session was registered but never received a prompt).\n *\n * The activity-timestamp predicate itself is the only protection: sessions\n * with recent work fall outside the stale window and won't be swept. A\n * previously-registered session that's been idle past the threshold is\n * swept normally — if it later receives a new event, `event-dispatch.ts`\n * upserts it back to `status='active'`, so marking completed is reversible.\n *\n * @param thresholdSeconds window of inactivity before a session is stale\n * @returns number of sessions completed\n */\nexport function completeStaleActiveSessions(\n thresholdSeconds: number = STALE_SESSION_THRESHOLD_MS / MS_PER_SECOND,\n): number {\n const db = getDatabase();\n const cutoff = epochSeconds() - thresholdSeconds;\n\n const info = db.prepare(\n `UPDATE sessions\n SET status = 'completed', ended_at = COALESCE(ended_at, ?)\n WHERE status = 'active'\n AND COALESCE(\n (SELECT MAX(pb.started_at) FROM prompt_batches pb WHERE pb.session_id = sessions.id),\n sessions.started_at\n ) < ?`,\n ).run(epochSeconds(), cutoff);\n\n return info.changes;\n}\n\n/**\n * Find session IDs eligible for dead-session cleanup.\n *\n * A session is \"dead\" only if BOTH:\n * 1. Its status is NOT 'active' (prevents racing with a session that's\n * currently running — active sessions get swept by the stale-session\n * step first when truly idle).\n * 2. Its prompt_count is at most DEAD_SESSION_MAX_PROMPTS (default 0,\n * meaning only empty \"registered but never used\" sessions qualify).\n *\n * Also excludes currently-registered in-memory sessions as a defense-in-depth\n * guard against TOCTOU between the status check and the delete.\n */\nexport function findDeadSessionIds(registeredSessionIds: string[]): string[] {\n const db = getDatabase();\n\n const excludePlaceholders = registeredSessionIds.length > 0\n ? `AND id NOT IN (${registeredSessionIds.map(() => '?').join(', ')})`\n : '';\n\n const params: unknown[] = [DEAD_SESSION_MAX_PROMPTS, ...registeredSessionIds];\n\n const rows = db.prepare(\n `SELECT id FROM sessions\n WHERE prompt_count <= ?\n AND status != 'active'\n ${excludePlaceholders}`,\n ).all(...params) as { id: string }[];\n\n return rows.map((r) => r.id);\n}\n\nexport interface SessionMaintenanceDeps {\n logger: DaemonLogger;\n registeredSessionIds: () => string[];\n embeddingManager: EmbeddingManager;\n vaultDir: string;\n /**\n * Inactivity window (ms) after which an active session is marked completed.\n * When omitted, falls back to `STALE_SESSION_THRESHOLD_MS`.\n */\n staleThresholdMs?: number;\n}\n\n/**\n * Run both maintenance tasks in sequence:\n * 1. Complete stale active sessions\n * 2. Delete dead sessions (cascade)\n */\nexport async function runSessionMaintenance(deps: SessionMaintenanceDeps): Promise<void> {\n const { logger, registeredSessionIds, embeddingManager, vaultDir, staleThresholdMs } = deps;\n const registered = registeredSessionIds();\n\n // Task 1: Complete stale sessions\n const thresholdSeconds = (staleThresholdMs ?? STALE_SESSION_THRESHOLD_MS) / MS_PER_SECOND;\n const completed = completeStaleActiveSessions(thresholdSeconds);\n if (completed > 0) {\n logger.info(LOG_KINDS.MAINTENANCE_SESSION, 'Completed stale sessions', { count: completed });\n }\n\n // Task 2: Delete dead sessions\n const deadIds = findDeadSessionIds(registered);\n if (deadIds.length === 0) return;\n\n let deletedCount = 0;\n for (const sessionId of deadIds) {\n const result = deleteSessionCascade(sessionId);\n if (!result.deleted) continue;\n\n await cleanupAfterSessionCascade(sessionId, result, embeddingManager, vaultDir);\n\n deletedCount++;\n logger.info(LOG_KINDS.MAINTENANCE_SESSION, 'Deleted dead session', {\n session_id: sessionId,\n counts: result.counts,\n });\n }\n\n if (deletedCount > 0) {\n logger.info(LOG_KINDS.MAINTENANCE_SESSION, 'Dead session cleanup complete', { deleted: deletedCount });\n }\n}\n","/**\n * Power-managed job registrations.\n *\n * Extracted from main.ts — registers the 4 core housekeeping jobs\n * with the PowerManager: embedding reconciliation, session maintenance,\n * log retention, and auto-backup.\n */\n\nimport type { Database } from 'better-sqlite3';\nimport type { DaemonLogger } from './logger.js';\nimport type { PowerManager } from './power.js';\nimport type { EmbeddingManager } from './embedding/manager.js';\nimport type { SessionRegistry } from './lifecycle.js';\nimport type { MycoConfig } from '@myco/config/schema.js';\nimport type { DatabaseMaintenanceManager } from './database/manager.js';\nimport { runSessionMaintenance } from './jobs/session-maintenance.js';\nimport { createBackup } from './backup.js';\nimport { resolveBackupDir } from './api/backup.js';\nimport { deleteOldLogs } from '@myco/db/queries/logs.js';\nimport {\n listStaleStagingDirs,\n cleanupStagedSkill,\n} from '@myco/agent/tools/skill-staging.js';\nimport { EMBEDDING_BATCH_SIZE, MS_PER_DAY, MS_PER_HOUR } from '@myco/constants.js';\nimport { LOG_KINDS } from '@myco/constants/log-kinds.js';\n\n/**\n * Maximum age for a staging directory before the sweep reclaims it.\n * 24 hours is well beyond any legitimate skill-generate run — a task\n * that failed to clean up via the executor hook has long since gone.\n */\nconst STAGING_MAX_AGE_MS = 24 * 60 * 60 * 1000;\n\n// ---------------------------------------------------------------------------\n// Types\n// ---------------------------------------------------------------------------\n\nexport interface PowerJobDeps {\n embeddingManager: EmbeddingManager;\n registry: SessionRegistry;\n logger: DaemonLogger;\n // Holder so each job observes the current merged config at run time and\n // picks up setting flips without a daemon restart.\n liveConfig: { current: MycoConfig };\n db: Database;\n machineId: string;\n vaultDir: string;\n databaseManager: DatabaseMaintenanceManager;\n}\n\n// ---------------------------------------------------------------------------\n// Registration\n// ---------------------------------------------------------------------------\n\nexport function registerPowerJobs(powerManager: PowerManager, deps: PowerJobDeps): void {\n const { embeddingManager, registry, logger, liveConfig, db, machineId, vaultDir, databaseManager } = deps;\n\n let reconcileRunning = false;\n powerManager.register({\n name: 'embedding-reconcile',\n runIn: ['active', 'idle'],\n fn: async () => {\n if (reconcileRunning) return;\n reconcileRunning = true;\n try {\n await embeddingManager.reconcile(EMBEDDING_BATCH_SIZE);\n } finally {\n reconcileRunning = false;\n }\n },\n });\n\n powerManager.register({\n name: 'session-maintenance',\n runIn: ['active', 'idle', 'sleep'],\n fn: () => runSessionMaintenance({\n logger,\n registeredSessionIds: () => registry.sessions,\n embeddingManager,\n vaultDir,\n staleThresholdMs: liveConfig.current.daemon.stale_session_threshold_ms,\n }),\n });\n\n powerManager.register({\n name: 'log-retention',\n runIn: ['idle', 'sleep'],\n fn: async () => {\n const retentionDays = liveConfig.current.daemon.log_retention_days;\n const cutoff = new Date(Date.now() - retentionDays * MS_PER_DAY).toISOString();\n const deleted = deleteOldLogs(cutoff);\n if (deleted > 0) {\n logger.info(LOG_KINDS.LOG_RETENTION, `Deleted ${deleted} log entries older than ${retentionDays} days`, { deleted, retention_days: retentionDays });\n }\n },\n });\n\n // Auto-backup: create a local SQL dump during idle/sleep cycles\n powerManager.register({\n name: 'auto-backup',\n runIn: ['idle', 'sleep'],\n fn: async () => {\n try {\n const backupDir = resolveBackupDir(liveConfig.current, vaultDir);\n logger.info(LOG_KINDS.BACKUP_START, 'Auto-backup starting');\n const filePath = createBackup(db, backupDir, machineId);\n logger.info(LOG_KINDS.BACKUP_COMPLETE, 'Auto-backup complete', { file_path: filePath });\n } catch (err) {\n logger.error(LOG_KINDS.BACKUP_ERROR, 'Auto-backup failed', { error: (err as Error).message });\n }\n },\n });\n\n // Database optimize: run VACUUM + WAL checkpoint + ANALYZE during idle/sleep cycles\n powerManager.register({\n name: 'database-optimize',\n runIn: ['idle', 'sleep'],\n fn: async () => {\n const config = liveConfig.current;\n if (!config.maintenance?.auto_optimize) return;\n const intervalMs = (config.maintenance.auto_optimize_interval_hours ?? 24) * MS_PER_HOUR;\n const lastRun = await databaseManager.getLastOptimizeAt();\n if (lastRun !== null && Date.now() - lastRun < intervalMs) return;\n try {\n await databaseManager.optimize();\n } catch (err) {\n logger.error(LOG_KINDS.DATABASE_ERROR, 'Auto-optimize failed', {\n error: (err as Error).message,\n });\n }\n },\n });\n\n // Staging GC: belt-and-suspenders cleanup for skill-generate staging\n // dirs that escaped the executor's per-run failure hook — e.g., a\n // daemon crash between draft stage and the failure handler. Running\n // on every idle tick is cheap because the happy path has zero stale\n // entries and the check is a single readdir on a typically-empty\n // directory.\n powerManager.register({\n name: 'staging-gc',\n runIn: ['idle', 'sleep'],\n fn: async () => {\n const stale = listStaleStagingDirs(vaultDir, STAGING_MAX_AGE_MS);\n if (stale.length === 0) return;\n for (const candidateId of stale) {\n cleanupStagedSkill(vaultDir, candidateId);\n }\n logger.info(LOG_KINDS.MAINTENANCE_STAGING_GC, 'Staging GC swept stale skill drafts', {\n count: stale.length,\n candidate_ids: stale,\n });\n },\n });\n}\n","/**\n * Buffer reconciliation factory for the Myco daemon.\n *\n * The buffer is the authoritative event log — JSONL files on disk. The DB\n * (prompt_batches + activities) is a derived view. After a daemon restart,\n * reconciliation replays missed events from buffer files to keep the DB in sync.\n */\n\nimport fs from 'node:fs';\nimport path from 'node:path';\nimport { listBufferSessionIds, cleanStaleBuffers } from '@myco/capture/buffer.js';\nimport { listBatchesBySession } from '@myco/db/queries/batches.js';\nimport { getSession } from '@myco/db/queries/sessions.js';\nimport { STALE_BUFFER_MAX_AGE_MS } from '@myco/constants.js';\nimport { LOG_KINDS } from '@myco/constants/log-kinds.js';\nimport type { DaemonLogger } from './logger.js';\nimport { isSystemMessage, handleUserPrompt, handleToolUse, handleToolFailure } from './event-handlers.js';\n\n// ---------------------------------------------------------------------------\n// Constants\n// ---------------------------------------------------------------------------\n\n/** Event types replayed during buffer reconciliation. */\nconst REPLAYABLE_EVENT_TYPES: ReadonlySet<string> = new Set(['user_prompt', 'tool_use', 'tool_failure']);\n\n// ---------------------------------------------------------------------------\n// Types\n// ---------------------------------------------------------------------------\n\nexport interface ReconcilerDeps {\n bufferDir: string;\n logger: DaemonLogger;\n}\n\nexport interface Reconciler {\n reconcileSession(sessionId: string): void;\n replayEvent(sessionId: string, event: Record<string, unknown>): 'prompt' | 'activity' | null;\n runStartupReconciliation(): void;\n /** Clear reconciliation state for a session (call on unregister). */\n clearSession(sessionId: string): void;\n}\n\n// ---------------------------------------------------------------------------\n// Factory\n// ---------------------------------------------------------------------------\n\n/**\n * Create a reconciler instance bound to the given buffer directory and logger.\n *\n * The returned object exposes `reconcileSession`, `replayEvent`, and\n * `runStartupReconciliation` — all of which share the internal\n * `reconciledSessions` set so that each session is only reconciled once\n * per daemon lifetime.\n */\nexport function createReconciler({ bufferDir, logger }: ReconcilerDeps): Reconciler {\n // Track sessions already reconciled this daemon lifetime to avoid\n // redundant file reads (startup scan + register + event can all fire).\n const reconciledSessions = new Set<string>();\n\n /**\n * Replay a single buffer event into the DB via the appropriate handler.\n *\n * Shared between reconcileSession (buffer replay) and the live /events\n * route to eliminate dispatch duplication.\n *\n * @returns 'prompt' | 'activity' | null indicating what was created.\n */\n function replayEvent(sessionId: string, event: Record<string, unknown>): 'prompt' | 'activity' | null {\n if (event.type === 'user_prompt') {\n if (isSystemMessage(String(event.prompt ?? ''))) return null;\n handleUserPrompt(sessionId, String(event.prompt ?? ''));\n return 'prompt';\n }\n if (event.type === 'tool_use') {\n handleToolUse(\n sessionId,\n String(event.tool_name ?? ''),\n event.tool_input,\n typeof event.output_preview === 'string' ? event.output_preview : undefined,\n );\n return 'activity';\n }\n if (event.type === 'tool_failure') {\n handleToolFailure(\n sessionId,\n String(event.tool_name ?? ''),\n event.tool_input,\n typeof event.error === 'string' ? event.error : undefined,\n !!event.is_interrupt,\n );\n return 'activity';\n }\n return null;\n }\n\n /**\n * Reconcile buffer events against DB state for a session.\n *\n * The buffer is the authoritative event log. The DB (prompt_batches +\n * activities) is a derived view. After a daemon restart, the DB may be\n * missing events the daemon didn't process while it was down.\n *\n * Activities belong to batches — they're linked via the latest open batch\n * at insertion time. So we can't reconcile them separately. Instead, we\n * find where the DB diverges from the buffer (by prompt count) and replay\n * the FULL event stream from that point: prompts open batches, tool events\n * attach to the open batch — exactly the normal flow.\n */\n function reconcileSession(sessionId: string): void {\n if (reconciledSessions.has(sessionId)) return;\n reconciledSessions.add(sessionId);\n\n // Read buffer file directly — avoid EventBuffer constructor which reads\n // the file to compute a count we don't need.\n const bufferPath = path.join(bufferDir, `${sessionId}.jsonl`);\n let content: string;\n try {\n content = fs.readFileSync(bufferPath, 'utf-8').trim();\n } catch {\n return; // Buffer file doesn't exist or is unreadable\n }\n if (!content) return;\n\n // Buffer files outlive session rows — sessions may have been manually\n // deleted or cleaned up by the session cleanup job. Skip reconciliation\n // for sessions that no longer exist rather than resurrecting them.\n if (!getSession(sessionId)) {\n logger.debug(LOG_KINDS.LIFECYCLE_RECONCILE, 'Skipping reconciliation for deleted session', { session_id: sessionId });\n return;\n }\n\n const allEvents: Array<Record<string, unknown>> = content.split('\\n').map((line) => JSON.parse(line));\n\n // Find the divergence point: how many real prompts does the DB have?\n const existingBatchCount = listBatchesBySession(sessionId).length;\n\n let promptsSeen = 0;\n let replayStartIndex = -1;\n\n for (let i = 0; i < allEvents.length; i++) {\n const e = allEvents[i];\n if (e.type === 'user_prompt' && !isSystemMessage(String(e.prompt ?? ''))) {\n promptsSeen++;\n if (promptsSeen === existingBatchCount + 1) {\n replayStartIndex = i;\n break;\n }\n }\n }\n\n if (replayStartIndex === -1) return;\n\n // Replay full event stream from the divergence point\n const eventsToReplay = allEvents.slice(replayStartIndex).filter(\n (e) => REPLAYABLE_EVENT_TYPES.has(String(e.type)),\n );\n\n let promptsRecovered = 0;\n let activitiesRecovered = 0;\n\n for (const event of eventsToReplay) {\n try {\n const result = replayEvent(sessionId, event);\n if (result === 'prompt') promptsRecovered++;\n else if (result === 'activity') activitiesRecovered++;\n } catch (err) {\n logger.warn(LOG_KINDS.LIFECYCLE_RECONCILE, 'Reconciliation: failed to replay event', {\n type: String(event.type),\n error: String(err),\n });\n }\n }\n\n if (promptsRecovered > 0 || activitiesRecovered > 0) {\n logger.info(LOG_KINDS.LIFECYCLE_RECONCILE, 'Buffer reconciliation complete', {\n session_id: sessionId,\n prompts_recovered: promptsRecovered,\n activities_recovered: activitiesRecovered,\n });\n }\n }\n\n /**\n * Run startup reconciliation: clean stale buffers, then reconcile all\n * buffer sessions found on disk.\n */\n function runStartupReconciliation(): void {\n // Clean up stale buffer files (>24h) on startup\n const startupCleanedCount = cleanStaleBuffers(bufferDir, STALE_BUFFER_MAX_AGE_MS);\n if (startupCleanedCount > 0) {\n logger.info(LOG_KINDS.CAPTURE_BUFFER, 'Buffer cleanup complete', { stale_removed: startupCleanedCount });\n }\n\n // Reconcile all remaining buffer files — recover events from sessions\n // that had activity while the daemon was down.\n for (const sessionId of listBufferSessionIds(bufferDir)) {\n try {\n reconcileSession(sessionId);\n } catch (err) {\n logger.warn(LOG_KINDS.LIFECYCLE_RECONCILE, 'Startup reconciliation failed', { session_id: sessionId, error: String(err) });\n }\n }\n }\n\n function clearSession(sessionId: string): void {\n reconciledSessions.delete(sessionId);\n }\n\n return { reconcileSession, replayEvent, runStartupReconciliation, clearSession };\n}\n","/**\n * Stateless event handler functions for the Myco daemon capture layer.\n *\n * These handlers are pure/stateless — they have no closure dependencies on\n * main() and are extracted here for testability and modularity.\n */\n\nimport { epochSeconds, DEFAULT_AGENT_ID } from '@myco/constants.js';\nimport { getTeamMachineId } from './team-context.js';\nimport { closeOpenBatches, insertBatchStateless, incrementActivityCount } from '@myco/db/queries/batches.js';\nimport { insertActivityWithBatch } from '@myco/db/queries/activities.js';\nimport { updateSession, incrementSessionToolCount } from '@myco/db/queries/sessions.js';\nimport { createBatchLineage } from '@myco/db/queries/lineage.js';\n\n// ---------------------------------------------------------------------------\n// Constants\n// ---------------------------------------------------------------------------\n\n/** Max chars of tool input stored in the activity row. */\nexport const TOOL_INPUT_STORE_LIMIT = 4000;\n\n/** Max chars of tool output summary stored in the activity row. */\nexport const TOOL_OUTPUT_STORE_LIMIT = 2000;\n\n/** Max chars for deriving a title from the first user prompt. */\nexport const TITLE_PREVIEW_CHARS = 80;\n\n/** Prefixes that identify system-injected messages (not real user prompts). */\nexport const SYSTEM_MESSAGE_PREFIXES = [\n '<task-notification>',\n '<system-reminder>',\n] as const;\n\n/** Returns true if the prompt is a system-injected message, not a real user prompt. */\nexport function isSystemMessage(prompt: string): boolean {\n const trimmed = prompt.trimStart();\n return SYSTEM_MESSAGE_PREFIXES.some((prefix) => trimmed.startsWith(prefix));\n}\n\n/** Extract a file path from tool input across snake_case and camelCase conventions. */\nfunction extractToolFilePath(toolInput: unknown): string | null {\n const inputObj = toolInput as Record<string, unknown> | undefined;\n if (!inputObj) return null;\n\n const filePath = inputObj.file_path;\n if (typeof filePath === 'string') return filePath;\n\n const camelFilePath = inputObj.filePath;\n if (typeof camelFilePath === 'string') return camelFilePath;\n\n return null;\n}\n\n// ---------------------------------------------------------------------------\n// Event handling helpers (exported for testing)\n// ---------------------------------------------------------------------------\n\n/**\n * Handle a UserPromptSubmit event: close previous batch, open new one.\n *\n * Fully stateless — prompt_number is derived from an inline DB subquery,\n * and open batches are closed with a blind UPDATE (no prior SELECT).\n *\n * @returns the new batch ID and prompt number\n */\nexport function handleUserPrompt(\n sessionId: string,\n prompt: string | undefined,\n): { batchId: number; promptNumber: number } {\n const now = epochSeconds();\n\n // Close any open batches for this session — blind UPDATE, no prior read\n closeOpenBatches(sessionId, now);\n\n // Insert new batch with prompt_number derived from DB\n const batch = insertBatchStateless({\n session_id: sessionId,\n user_prompt: prompt ?? null,\n started_at: now,\n created_at: now,\n machine_id: getTeamMachineId(),\n });\n\n // insertBatchStateless guarantees non-null prompt_number via COALESCE subquery\n const promptNumber = batch.prompt_number!;\n\n // Create HAS_BATCH lineage edge (best-effort)\n try { createBatchLineage(DEFAULT_AGENT_ID, sessionId, batch.id, now); } catch { /* lineage best-effort */ }\n\n // Update session prompt count\n updateSession(sessionId, { prompt_count: promptNumber });\n\n return { batchId: batch.id, promptNumber };\n}\n\n/**\n * Handle a PostToolUse event: insert activity with inline batch linkage.\n *\n * Fully stateless — the batch ID is resolved via an inline subquery in\n * `insertActivityWithBatch`, so no in-memory state is needed.\n */\nexport function handleToolUse(\n sessionId: string,\n toolName: string,\n toolInput: unknown,\n toolOutput: string | undefined,\n): void {\n const now = epochSeconds();\n\n const filePath = extractToolFilePath(toolInput);\n\n const activity = insertActivityWithBatch({\n session_id: sessionId,\n tool_name: toolName,\n tool_input: toolInput ? JSON.stringify(toolInput).slice(0, TOOL_INPUT_STORE_LIMIT) : null,\n tool_output_summary: toolOutput?.slice(0, TOOL_OUTPUT_STORE_LIMIT) ?? null,\n file_path: filePath,\n timestamp: now,\n created_at: now,\n });\n\n // Increment batch activity count if linked to a batch\n if (activity.prompt_batch_id !== null) {\n incrementActivityCount(activity.prompt_batch_id);\n }\n\n // Increment session-level tool_count atomically.\n incrementSessionToolCount(sessionId);\n}\n\n/**\n * Handle stop event: close all open batches for this session.\n *\n * Does NOT close the session — the Stop hook fires after every assistant\n * turn, not just session end. Session closure happens in /sessions/unregister\n * (SessionEnd hook).\n *\n * Fully stateless — uses `closeOpenBatches` (blind UPDATE) instead of\n * reading from an in-memory map.\n */\nexport function handleStopBatches(\n sessionId: string,\n): void {\n closeOpenBatches(sessionId, epochSeconds());\n}\n\n/**\n * Handle a tool failure event: insert activity with success=0.\n */\nexport function handleToolFailure(\n sessionId: string,\n toolName: string,\n toolInput: unknown,\n error: string | undefined,\n isInterrupt: boolean | undefined,\n): void {\n const now = epochSeconds();\n const filePath = extractToolFilePath(toolInput);\n\n const activity = insertActivityWithBatch({\n session_id: sessionId,\n tool_name: toolName,\n tool_input: toolInput ? JSON.stringify(toolInput).slice(0, TOOL_INPUT_STORE_LIMIT) : null,\n tool_output_summary: error?.slice(0, TOOL_OUTPUT_STORE_LIMIT) ?? null,\n file_path: filePath,\n success: 0,\n error_message: error?.slice(0, TOOL_OUTPUT_STORE_LIMIT) ?? (isInterrupt ? 'interrupted' : null),\n timestamp: now,\n created_at: now,\n });\n\n if (activity.prompt_batch_id !== null) {\n incrementActivityCount(activity.prompt_batch_id);\n }\n}\n\n/**\n * Handle a subagent start event: record that a subagent was spawned.\n */\nexport function handleSubagentStart(\n sessionId: string,\n agentId: string | undefined,\n agentType: string | undefined,\n): void {\n const now = epochSeconds();\n insertActivityWithBatch({\n session_id: sessionId,\n tool_name: 'subagent_start',\n tool_input: JSON.stringify({ agent_id: agentId, agent_type: agentType }).slice(0, TOOL_INPUT_STORE_LIMIT),\n timestamp: now,\n created_at: now,\n });\n}\n\n/**\n * Handle a subagent stop event: record that a subagent completed.\n */\nexport function handleSubagentStop(\n sessionId: string,\n agentId: string | undefined,\n agentType: string | undefined,\n lastAssistantMessage: string | undefined,\n): void {\n const now = epochSeconds();\n insertActivityWithBatch({\n session_id: sessionId,\n tool_name: 'subagent_stop',\n tool_input: JSON.stringify({ agent_id: agentId, agent_type: agentType }).slice(0, TOOL_INPUT_STORE_LIMIT),\n tool_output_summary: lastAssistantMessage?.slice(0, TOOL_OUTPUT_STORE_LIMIT) ?? null,\n timestamp: now,\n created_at: now,\n });\n}\n\n/**\n * Handle a stop failure event: record that the stop hook encountered an error.\n */\nexport function handleStopFailure(\n sessionId: string,\n error: string | undefined,\n errorDetails: string | undefined,\n): void {\n const now = epochSeconds();\n insertActivityWithBatch({\n session_id: sessionId,\n tool_name: 'stop_failure',\n tool_output_summary: errorDetails?.slice(0, TOOL_OUTPUT_STORE_LIMIT) ?? null,\n success: 0,\n error_message: error?.slice(0, TOOL_OUTPUT_STORE_LIMIT) ?? null,\n timestamp: now,\n created_at: now,\n });\n}\n\n/**\n * Handle a task completed event: record task completion as an activity.\n */\nexport function handleTaskCompleted(\n sessionId: string,\n taskId: string | undefined,\n taskSubject: string | undefined,\n taskDescription: string | undefined,\n): void {\n const now = epochSeconds();\n insertActivityWithBatch({\n session_id: sessionId,\n tool_name: 'task_completed',\n tool_input: JSON.stringify({ task_id: taskId, task_subject: taskSubject, task_description: taskDescription }).slice(0, TOOL_INPUT_STORE_LIMIT),\n tool_output_summary: taskSubject?.slice(0, TOOL_OUTPUT_STORE_LIMIT) ?? null,\n timestamp: now,\n created_at: now,\n });\n}\n\n/**\n * Handle a compact event (pre or post): record compaction in the activity stream.\n */\nexport function handleCompact(\n sessionId: string,\n phase: 'pre' | 'post',\n trigger: string | undefined,\n compactSummary: string | undefined,\n): void {\n const now = epochSeconds();\n insertActivityWithBatch({\n session_id: sessionId,\n tool_name: `${phase}_compact`,\n tool_input: trigger ? JSON.stringify({ trigger }).slice(0, TOOL_INPUT_STORE_LIMIT) : null,\n tool_output_summary: compactSummary?.slice(0, TOOL_OUTPUT_STORE_LIMIT) ?? null,\n timestamp: now,\n created_at: now,\n });\n}\n","/**\n * Stop-event processing pipeline.\n *\n * Extracted from daemon/main.ts. All logic for handling POST /events/stop lives\n * here: session auto-registration, transcript mining, batch reconciliation,\n * attachment capture, and title/summary agent task triggering.\n */\n\nimport { z } from 'zod';\nimport fs from 'node:fs';\nimport { TranscriptMiner, extractTurnsFromBuffer } from '@myco/capture/transcript-miner.js';\nimport type { TranscriptTurn } from '@myco/symbionts/adapter.js';\nimport { loadManifests } from '@myco/symbionts/detect.js';\nimport { captureBatchImages } from './capture-images.js';\nimport { extractTaggedPlans, capturePlan, TRANSCRIPT_SOURCE_PREFIX } from './plan-capture.js';\nimport {\n getLatestBatch,\n setResponseSummary,\n populateBatchResponses,\n closeOpenBatches,\n listBatchesBySession,\n findBatchByPromptPrefix,\n} from '@myco/db/queries/batches.js';\nimport { deleteSessionCascade, getSession, updateSession } from '@myco/db/queries/sessions.js';\nimport { detectSkillUsage, SKILL_USAGE_DETECTION_ENABLED } from './skill-usage.js';\nimport { epochSeconds, LOG_MESSAGE_PREVIEW_CHARS } from '@myco/constants.js';\nimport { TITLE_PREVIEW_CHARS } from './event-handlers.js';\nimport { SessionRegistry } from './lifecycle.js';\nimport { EventBuffer } from '@myco/capture/buffer.js';\nimport { DaemonLogger } from './logger.js';\nimport type { MycoConfig } from '@myco/config/schema.js';\nimport { EmbeddingManager } from './embedding/index.js';\nimport { LOG_KINDS } from '@myco/constants/log-kinds.js';\nimport { triggerTitleSummary as sharedTriggerTitleSummary } from './trigger-title-summary.js';\nimport type { RouteHandler } from './router.js';\nimport type { RegisteredSession } from './lifecycle.js';\nimport { evaluateSessionCaptureRules } from '@myco/hooks/capture-rules.js';\nimport { readTranscriptMeta } from '@myco/hooks/transcript-meta.js';\nimport { cleanupAfterSessionCascade } from './jobs/session-cleanup.js';\n\n// ---------------------------------------------------------------------------\n// Types\n// ---------------------------------------------------------------------------\n\nexport interface StopProcessorDeps {\n registry: SessionRegistry;\n sessionBuffers: Map<string, EventBuffer>;\n transcriptMiner: TranscriptMiner;\n embeddingManager: EmbeddingManager;\n logger: DaemonLogger;\n liveConfig: { current: MycoConfig };\n vaultDir: string;\n /** Plan tag names to extract from transcript responses. Merged from all symbiont manifests. */\n planTags: string[];\n}\n\n// ---------------------------------------------------------------------------\n// Exported pure utility\n// ---------------------------------------------------------------------------\n\n/** Correlate buffer tool_use events with transcript turns by timestamp to populate toolBreakdown and files. */\nexport function enrichTurnsWithToolMetadata(turns: TranscriptTurn[], events: Array<Record<string, unknown>>): void {\n if (events.length === 0 || turns.length === 0) return;\n\n const toolEvents = events.filter((e) => e.type === 'tool_use');\n if (toolEvents.length === 0) return;\n\n let cursor = 0;\n for (let i = 0; i < turns.length; i++) {\n const turnEnd = i + 1 < turns.length ? turns[i + 1].timestamp : null;\n const breakdown: Record<string, number> = {};\n const files = new Set<string>();\n\n while (cursor < toolEvents.length) {\n const ts = String(toolEvents[cursor].timestamp ?? '');\n if (turnEnd !== null && ts >= turnEnd) break;\n const evt = toolEvents[cursor];\n const toolName = String(evt.tool_name ?? evt.tool ?? 'unknown');\n breakdown[toolName] = (breakdown[toolName] ?? 0) + 1;\n const input = evt.tool_input as Record<string, unknown> | undefined;\n const filePath = input?.file_path ?? input?.path;\n if (typeof filePath === 'string') files.add(filePath);\n cursor++;\n }\n\n if (Object.keys(breakdown).length > 0) {\n turns[i].toolBreakdown = breakdown;\n if (files.size > 0) turns[i].files = [...files];\n }\n }\n}\n\n// ---------------------------------------------------------------------------\n// Factory\n// ---------------------------------------------------------------------------\n\nexport function createStopProcessor(deps: StopProcessorDeps): {\n handleStopRoute: RouteHandler;\n clearSession: (sessionId: string) => void;\n getActiveProcessing: () => Promise<void> | null;\n triggerTitleSummary: (sessionId: string) => Promise<void>;\n} {\n const { registry, sessionBuffers, transcriptMiner, embeddingManager, logger, liveConfig, vaultDir } = deps;\n\n // Internal state\n let activeStopProcessing: Promise<void> | null = null;\n const sessionTitleCache = new Map<string, string>();\n\n // Route body schema.\n //\n // `transcript_path` is nullish (missing, undefined, or literal `null`)\n // because some symbionts fire Stop hooks for internal sub-invocations\n // that never write a transcript — notably Codex's title-generation\n // ephemeral session. The SessionStart capture-rule filter already\n // skips registering such sessions, so a null transcript_path here\n // means \"Stop for a session we never captured.\" The handler treats\n // that case as a silent no-op rather than erroring.\n const StopBody = z.object({\n session_id: z.string(),\n agent: z.string().optional(),\n user: z.string().optional(),\n transcript_path: z.string().nullish(),\n last_assistant_message: z.string().nullish(),\n });\n\n const triggerTitleSummary = (sessionId: string) =>\n sharedTriggerTitleSummary(sessionId, { vaultDir, embeddingManager, liveConfig, logger });\n\n function cleanupInvalidCapturedSession(sessionId: string): boolean {\n registry.unregister(sessionId);\n sessionBuffers.delete(sessionId);\n sessionTitleCache.delete(sessionId);\n\n const result = deleteSessionCascade(sessionId);\n if (!result.deleted) return false;\n\n cleanupAfterSessionCascade(sessionId, result, embeddingManager, vaultDir).catch(() => {});\n return true;\n }\n\n async function processStopEvent(\n sessionId: string,\n user: string | undefined,\n sessionMeta: RegisteredSession | undefined,\n hookTranscriptPath?: string,\n lastAssistantMessage?: string,\n ): Promise<void> {\n\n // --- Phase 1: Gather transcript data ---\n\n const transcriptResult = transcriptMiner.getAllTurnsWithSource(sessionId, hookTranscriptPath);\n let allTurns = transcriptResult.turns;\n let turnSource = transcriptResult.source;\n\n const bufferEvents = sessionBuffers.get(sessionId)?.readAll() ?? [];\n\n if (allTurns.length === 0) {\n allTurns = extractTurnsFromBuffer(bufferEvents);\n turnSource = 'buffer';\n } else if (bufferEvents.length > 0) {\n const lastTranscriptTs = allTurns[allTurns.length - 1].timestamp;\n if (lastTranscriptTs) {\n const newerEvents = bufferEvents.filter((e) =>\n String(e.timestamp ?? '') > lastTranscriptTs,\n );\n if (newerEvents.length > 0) {\n const bufferTurns = extractTurnsFromBuffer(newerEvents);\n allTurns = [...allTurns, ...bufferTurns];\n turnSource = `${transcriptResult.source}+buffer`;\n logger.info(LOG_KINDS.PROCESSOR_TRANSCRIPT, 'Appended buffer turns missing from transcript', {\n session_id: sessionId, transcriptTurns: transcriptResult.turns.length, bufferTurns: bufferTurns.length,\n });\n }\n }\n }\n\n // Attach the last assistant message from the hook to the most recent turn\n if (lastAssistantMessage && allTurns.length > 0) {\n const lastTurn = allTurns[allTurns.length - 1];\n if (!lastTurn.aiResponse) {\n lastTurn.aiResponse = lastAssistantMessage;\n }\n }\n\n enrichTurnsWithToolMetadata(allTurns, bufferEvents);\n\n const imageCount = allTurns.reduce((sum, t) => sum + (t.images?.length ?? 0), 0);\n logger.debug(LOG_KINDS.PROCESSOR_TRANSCRIPT, 'Transcript parsed', {\n session_id: sessionId,\n turn_count: allTurns.length,\n image_count: imageCount,\n });\n\n // --- Phase 2: Capture response + close session ---\n\n // Get the latest batch BEFORE closing — this is the batch for the current turn.\n const latestBatch = getLatestBatch(sessionId);\n\n // Primary capture: put last_assistant_message directly on the latest batch.\n // No positional mapping needed — the hook gives us the response directly.\n if (lastAssistantMessage && latestBatch && !latestBatch.response_summary) {\n try { setResponseSummary(latestBatch.id, lastAssistantMessage); }\n catch (err) { logger.warn(LOG_KINDS.PROCESSOR_BATCH, 'Failed to set response_summary on latest batch', { error: String(err) }); }\n }\n\n // Close open batches but do NOT close the session — the Stop hook fires\n // after every assistant turn, not just session end. The session is closed\n // when the SessionEnd hook fires (via /sessions/unregister).\n closeOpenBatches(sessionId, epochSeconds());\n\n // Derive a simple title from the first user prompt — but only if the\n // session has no title yet. Once the LLM (or anything else) sets a title,\n // stop overwriting it with the fallback.\n const existingSession = getSession(sessionId);\n const hasTitle = existingSession?.title !== null && existingSession?.title !== undefined;\n\n if (!hasTitle) {\n let title = sessionTitleCache.get(sessionId) ?? null;\n if (!title) {\n const firstBatch = listBatchesBySession(sessionId, { limit: 1 })[0];\n if (firstBatch?.user_prompt) {\n title = firstBatch.user_prompt.slice(0, TITLE_PREVIEW_CHARS);\n if (firstBatch.user_prompt.length > TITLE_PREVIEW_CHARS) {\n title += '...';\n }\n sessionTitleCache.set(sessionId, title);\n }\n }\n }\n\n // Update session with transcript metadata (no LLM calls).\n // Use MAX of current DB count vs transcript-derived count — the incremental\n // count from handleUserPrompt is authoritative during active sessions; the\n // transcript parse may see fewer turns if the file is incomplete.\n const currentSession = getSession(sessionId);\n const transcriptPromptCount = allTurns.length;\n const transcriptToolCount = allTurns.reduce((sum, t) => sum + t.toolCount, 0);\n\n const updateFields: Record<string, unknown> = {\n transcript_path: hookTranscriptPath ?? null,\n prompt_count: Math.max(transcriptPromptCount, currentSession?.prompt_count ?? 0),\n tool_count: Math.max(transcriptToolCount, currentSession?.tool_count ?? 0),\n };\n if (user) updateFields.user = user;\n if (!hasTitle && sessionTitleCache.has(sessionId)) {\n updateFields.title = sessionTitleCache.get(sessionId);\n }\n\n updateSession(sessionId, updateFields as Parameters<typeof updateSession>[1]);\n\n // Detect skill usage from transcript content (best-effort, non-blocking).\n // Skip transcript I/O entirely when detection is disabled.\n if (SKILL_USAGE_DETECTION_ENABLED) {\n try {\n let transcriptText: string | null = null;\n if (hookTranscriptPath) {\n try { transcriptText = fs.readFileSync(hookTranscriptPath, 'utf-8'); }\n catch { /* file may not exist yet — fall through */ }\n }\n if (!transcriptText && allTurns.length > 0) {\n transcriptText = allTurns\n .map((t) => [t.prompt ?? '', t.aiResponse ?? ''].join(' '))\n .join('\\n');\n }\n if (transcriptText) {\n detectSkillUsage(sessionId, transcriptText);\n }\n } catch {\n // Best-effort — don't block reconciliation\n }\n }\n\n // Enhanced capture: populate response_summary on earlier batches from transcript.\n // Maps by batch insertion order (id ASC) to transcript turn position.\n // This is best-effort — the parser may skip empty-text turns, causing misalignment.\n // The primary capture (above) handles the current turn reliably.\n const responses: Array<{ turnIndex: number; response: string }> = [];\n for (let i = 0; i < allTurns.length; i++) {\n if (allTurns[i].aiResponse) {\n responses.push({ turnIndex: i + 1, response: allTurns[i].aiResponse! });\n }\n }\n if (responses.length > 0) {\n try { populateBatchResponses(sessionId, responses); }\n catch (err) { logger.warn(LOG_KINDS.PROCESSOR_BATCH, 'Failed to populate batch responses', { error: String(err) }); }\n }\n\n // --- Plan tag extraction from transcript responses ---\n if (deps.planTags.length > 0) {\n for (const turn of allTurns) {\n if (!turn.aiResponse) continue;\n const taggedPlans = extractTaggedPlans(turn.aiResponse, deps.planTags);\n for (const { tag, content } of taggedPlans) {\n try {\n capturePlan({\n sourcePath: `${TRANSCRIPT_SOURCE_PREFIX}${tag}`,\n content,\n sessionId,\n promptBatchId: latestBatch?.id ?? null,\n });\n logger.info(LOG_KINDS.CAPTURE_PLAN, 'Plan captured from transcript tag', {\n session_id: sessionId,\n tag,\n content_length: content.length,\n });\n } catch (err) {\n logger.warn(LOG_KINDS.CAPTURE_PLAN, 'Failed to capture plan from transcript tag', {\n session_id: sessionId,\n tag,\n error: (err as Error).message,\n });\n }\n }\n }\n }\n\n // Trigger title/summary if the session still needs one.\n if (!hasTitle) {\n triggerTitleSummary(sessionId);\n }\n\n // Write images to attachments — decoupled from transcript turn indices.\n // After context compaction, transcript turn indices no longer match batch prompt_numbers.\n // Instead, match each turn to its batch by prompt text (content-based, not position-based).\n // Binary data is stored in the DB BLOB column; DB uses ON CONFLICT DO NOTHING → idempotent.\n for (let i = 0; i < allTurns.length; i++) {\n const turn = allTurns[i];\n if (!turn.images?.length) continue;\n\n // Resolve which batch this turn belongs to:\n // 1. Last turn → use latestBatch (always correct, comes from the current stop event)\n // 2. Earlier turns → match by prompt text prefix against DB\n // 3. Fallback → null batch_id (still saved, UI matches by filename pattern)\n const isLastTurn = i === allTurns.length - 1;\n let resolvedBatchId: number | null = null;\n let resolvedPromptNumber: number = i + 1; // default to turn index (pre-compaction compatible)\n\n if (isLastTurn && latestBatch) {\n resolvedBatchId = latestBatch.id;\n resolvedPromptNumber = latestBatch.prompt_number ?? resolvedPromptNumber;\n } else if (turn.prompt) {\n try {\n const match = findBatchByPromptPrefix(sessionId, turn.prompt);\n if (match) {\n resolvedBatchId = match.id;\n resolvedPromptNumber = match.prompt_number;\n }\n } catch { /* fallback to index-based */ }\n }\n\n captureBatchImages({\n sessionId,\n promptBatchId: resolvedBatchId,\n promptNumber: resolvedPromptNumber,\n images: turn.images,\n logger,\n });\n }\n\n logger.info(LOG_KINDS.PROCESSOR_SESSION, 'Session captured', {\n session_id: sessionId,\n turns: allTurns.length,\n source: turnSource,\n title: existingSession?.title ?? sessionTitleCache.get(sessionId) ?? '(untitled)',\n });\n }\n\n const handleStopRoute: RouteHandler = async (req) => {\n const {\n session_id: sessionId,\n agent,\n user,\n transcript_path: hookTranscriptPath,\n last_assistant_message: lastAssistantMessage,\n } = StopBody.parse(req.body);\n\n if (hookTranscriptPath) {\n const transcriptMeta = readTranscriptMeta(hookTranscriptPath) ?? undefined;\n const detectedAgent = agent ?? getSession(sessionId)?.agent ?? 'claude-code';\n const decision = evaluateSessionCaptureRules(loadManifests(), detectedAgent, {\n transcriptPath: hookTranscriptPath,\n transcriptMeta,\n });\n if (decision.action === 'drop') {\n const deleted = cleanupInvalidCapturedSession(sessionId);\n logger.info(LOG_KINDS.HOOKS_STOP, 'Stop ignored — invalid captured session', {\n session_id: sessionId,\n reason: decision.reason ?? 'rule',\n deleted_existing_session: deleted,\n });\n return { body: { ok: true, ignored: decision.reason ?? 'rule' } };\n }\n }\n\n // Ephemeral sub-invocation guard.\n //\n // When Codex (or a similar agent) spawns an internal sub-invocation\n // that never writes a transcript — e.g., its title-generation call —\n // the sub-invocation's Stop hook fires with transcript_path=null.\n // The SessionStart capture-rule filter already skips registering\n // that session_id, so at this point we have no session row and no\n // meaningful Stop to process. Silently no-op rather than auto-\n // registering a row we then have nothing to update.\n const existingSessionMeta = registry.getSession(sessionId);\n if (!hookTranscriptPath && !existingSessionMeta) {\n // Info level so `grep hooks.stop` in the default daemon log confirms\n // the ephemeral-sub-invocation drop pattern is firing without\n // needing to crank the log level. Codex's sub-invocation behavior\n // is experimental upstream and may change over time — this log is\n // the signal we'd watch if the pattern needed revisiting.\n logger.info(LOG_KINDS.HOOKS_STOP, 'Stop ignored — ephemeral sub-invocation', {\n session_id: sessionId,\n });\n return { body: { ok: true, ignored: 'ephemeral-sub-invocation' } };\n }\n\n // Ensure session is registered (handles daemon restarts mid-session)\n if (!existingSessionMeta) {\n registry.register(sessionId, { started_at: new Date().toISOString() });\n logger.debug(LOG_KINDS.LIFECYCLE_AUTO_REGISTER, 'Auto-registered session from stop event', { session_id: sessionId });\n }\n const sessionMeta = existingSessionMeta ?? registry.getSession(sessionId);\n logger.info(LOG_KINDS.HOOKS_STOP, 'Stop received', {\n session_id: sessionId,\n has_transcript_path: !!hookTranscriptPath,\n has_response: !!lastAssistantMessage,\n });\n logger.debug(LOG_KINDS.HOOKS_STOP, 'Stop event detail', {\n session_id: sessionId,\n transcript_path: hookTranscriptPath ?? null,\n last_message_preview: lastAssistantMessage?.slice(0, LOG_MESSAGE_PREVIEW_CHARS) ?? null,\n });\n\n // Respond immediately — the hook should not block on processing.\n // Normalize nullish hook fields to undefined so downstream processStopEvent\n // keeps its existing `string | undefined` contract (the schema accepts\n // `nullish()` for robustness against ephemeral sub-invocation Stop events).\n const normalizedTranscriptPath = hookTranscriptPath ?? undefined;\n const normalizedAssistantMessage = lastAssistantMessage ?? undefined;\n const run = () => processStopEvent(sessionId, user, sessionMeta, normalizedTranscriptPath, normalizedAssistantMessage).catch((err) => {\n logger.error(LOG_KINDS.PROCESSOR_SESSION, 'Stop processing failed', { session_id: sessionId, error: (err as Error).message });\n });\n\n const prev = activeStopProcessing ?? Promise.resolve();\n activeStopProcessing = prev.then(run).finally(() => { activeStopProcessing = null; });\n\n return { body: { ok: true } };\n };\n\n return {\n handleStopRoute,\n clearSession: (sessionId: string) => { sessionTitleCache.delete(sessionId); },\n getActiveProcessing: () => activeStopProcessing,\n triggerTitleSummary,\n };\n}\n","/**\n * Shared image attachment persistence.\n *\n * Called from two paths that both produce the same attachment row shape:\n *\n * 1. `stop-processing.ts` — for claude-code / cursor, where images come from\n * transcript-mined `TranscriptTurn.images`. The transcript miner has\n * already decoded base64 blocks and produced a canonical `TranscriptImage`\n * per image per turn.\n *\n * 2. `event-dispatch.ts` — for opencode, where images come from the plugin\n * in the `user_prompt` event payload. The opencode plugin extracts each\n * image from a `FilePart.url` data URL and ships it straight to the\n * daemon, since opencode has no on-disk transcript to mine.\n *\n * Deterministic IDs (`${sessionShort}-b${promptNumber}-${index}`) keep the\n * insert idempotent under replay: `insertAttachment` uses `ON CONFLICT DO NOTHING`.\n */\n\nimport { insertAttachment } from '@myco/db/queries/attachments.js';\nimport { extensionForMimeType } from '@myco/symbionts/adapter.js';\nimport { epochSeconds } from '@myco/constants.js';\nimport { LOG_KINDS } from '@myco/constants/log-kinds.js';\nimport type { DaemonLogger } from './logger.js';\n\n/** Short session-id suffix used in attachment filenames and IDs. */\nconst SESSION_SHORT_LEN = 6;\n\n/** Image attachment in canonical base64 form. Matches `TranscriptImage`. */\nexport interface CapturedImage {\n /** Base64-encoded image bytes. */\n data: string;\n /** MIME type (e.g. `image/png`). */\n mediaType: string;\n}\n\nexport interface CaptureBatchImagesInput {\n sessionId: string;\n promptBatchId: number | null | undefined;\n promptNumber: number;\n images: CapturedImage[];\n logger: DaemonLogger;\n}\n\n/**\n * Persist a batch of images as attachment rows linked to a specific prompt batch.\n */\nexport function captureBatchImages(input: CaptureBatchImagesInput): void {\n const { sessionId, promptBatchId, promptNumber, images, logger } = input;\n if (images.length === 0) return;\n\n const sessionShort = sessionId.slice(-SESSION_SHORT_LEN);\n for (let j = 0; j < images.length; j++) {\n const img = images[j];\n if (!img?.data || !img?.mediaType) continue;\n try {\n const ext = extensionForMimeType(img.mediaType);\n const filename = `${sessionShort}-t${promptNumber}-${j + 1}.${ext}`;\n const inserted = insertAttachment({\n id: `${sessionShort}-b${promptNumber}-${j + 1}`,\n session_id: sessionId,\n prompt_batch_id: promptBatchId ?? undefined,\n file_path: filename,\n media_type: img.mediaType,\n data: Buffer.from(img.data, 'base64'),\n created_at: epochSeconds(),\n });\n // insertAttachment returns undefined on ON CONFLICT DO NOTHING — only\n // log when a row was actually inserted, otherwise stop-event replays\n // (or plugin retries) produce phantom \"Image stored in DB\" lines for\n // attachments that were already persisted.\n if (inserted) {\n logger.debug(LOG_KINDS.CAPTURE_ATTACHMENT, 'Image stored in DB', {\n filename,\n batch: promptNumber,\n });\n }\n } catch (err) {\n logger.warn(LOG_KINDS.CAPTURE_ATTACHMENT, 'Failed to record attachment', {\n error: String(err),\n });\n }\n }\n}\n","/**\n * Event-driven plan capture module.\n *\n * Provides pure detection and storage functions for capturing plan files\n * written to watched directories. Called by the daemon's event handler\n * (Task 6) when a tool event targets a plan directory.\n *\n * All functions are stateless — no file I/O, no event handling.\n */\n\nimport { createHash } from 'node:crypto';\nimport os from 'node:os';\nimport path from 'node:path';\nimport { CONTENT_HASH_ALGORITHM } from '@myco/constants.js';\nimport { upsertPlan } from '@myco/db/queries/plans.js';\nimport type { PlanRow } from '@myco/db/queries/plans.js';\n\n// ---------------------------------------------------------------------------\n// Transcript-based plan extraction\n// ---------------------------------------------------------------------------\n\n/**\n * Extract plan content from XML-style tags in transcript text.\n *\n * Scans the input text for each declared tag name and returns all matches.\n * Tags are exact names (e.g., 'proposed_plan' matches `<proposed_plan>...</proposed_plan>`).\n * Returns all occurrences — the caller decides upsert semantics (e.g., last one wins).\n */\nexport function extractTaggedPlans(\n text: string,\n tags: string[],\n): Array<{ tag: string; content: string }> {\n const results: Array<{ tag: string; content: string }> = [];\n for (const tag of tags) {\n const regex = new RegExp(`<${tag}>\\\\n?([\\\\s\\\\S]*?)\\\\n?</${tag}>`, 'g');\n let match;\n while ((match = regex.exec(text)) !== null) {\n const content = match[1].trim();\n if (content) results.push({ tag, content });\n }\n }\n return results;\n}\n\n// ---------------------------------------------------------------------------\n// Constants\n// ---------------------------------------------------------------------------\n\n/**\n * Source path prefix for plans extracted from transcript tags.\n * Used to build deterministic plan IDs: `transcript:<tagName>`.\n */\nexport const TRANSCRIPT_SOURCE_PREFIX = 'transcript:';\n\n/**\n * Tool names that constitute a file write operation.\n * Includes both PascalCase (Claude Code, Cursor, Codex, Windsurf, Gemini) and\n * lowercase (opencode) variants. `patch` is opencode's unified-diff tool.\n */\nconst FILE_WRITE_TOOLS = new Set([\n 'Write', 'Edit', 'Create',\n 'write', 'edit', 'patch', 'create',\n]);\n\n/** Regex matching a top-level markdown heading (# Title). */\nconst HEADING_REGEX = /^#\\s+(.+)$/m;\n\n/** Number of hex chars to use from the MD5 hash for plan IDs. */\nconst PLAN_ID_HASH_LENGTH = 16;\n\n// ---------------------------------------------------------------------------\n// Public API\n// ---------------------------------------------------------------------------\n\n/**\n * Check if a file path falls inside any watched plan directory.\n *\n * Both the file path and watch directories are resolved against projectRoot\n * before comparison, so relative and absolute paths both work correctly.\n */\nexport function isInPlanDirectory(\n filePath: string,\n watchDirs: string[],\n projectRoot: string,\n): boolean {\n const abs = path.isAbsolute(filePath) ? filePath : path.resolve(projectRoot, filePath);\n return watchDirs.some((dir) => {\n // Expand ~ to home directory (manifests use ~/... for global plan dirs)\n const expanded = dir.startsWith('~/') ? path.join(os.homedir(), dir.slice(2)) : dir;\n const absDir = path.isAbsolute(expanded) ? expanded : path.resolve(projectRoot, expanded);\n // Ensure we match a directory boundary, not a prefix of a sibling dir name.\n // e.g. absDir = /foo/plans must NOT match /foo/plans-extra\n const prefix = absDir.endsWith(path.sep) ? absDir : absDir + path.sep;\n return abs === absDir || abs.startsWith(prefix);\n });\n}\n\n/** Configuration for plan directory matching. */\nexport interface PlanWatchConfig {\n watchDirs: string[];\n projectRoot: string;\n extensions?: string[];\n}\n\n/**\n * Check if a tool event is a file write to a plan directory.\n *\n * Returns the file path if it matches, null otherwise. Only Write, Edit,\n * and Create tools are considered. Extension filtering enforces the\n * `artifact_extensions` config setting (e.g. ['.md']).\n */\nexport function isPlanWriteEvent(\n toolName: string,\n toolInput: Record<string, unknown> | undefined,\n config: PlanWatchConfig,\n): string | null {\n if (!FILE_WRITE_TOOLS.has(toolName)) return null;\n // `filePath` (camelCase) is opencode's convention — the plugin template ships\n // `input.args` through as `tool_input`, and opencode tools use camelCase args.\n const filePath = toolInput?.file_path ?? toolInput?.path ?? toolInput?.filePath;\n if (typeof filePath !== 'string') return null;\n if (!isInPlanDirectory(filePath, config.watchDirs, config.projectRoot)) return null;\n if (config.extensions?.length) {\n const ext = path.extname(filePath).toLowerCase();\n if (!config.extensions.includes(ext)) return null;\n }\n return filePath;\n}\n\n/**\n * Extract a plan title from markdown content.\n *\n * Looks for the first top-level heading (# Title). If none is found,\n * falls back to the provided filename. Returns null if neither is available.\n */\nexport function parsePlanTitle(content: string, filename?: string): string | null {\n const match = HEADING_REGEX.exec(content);\n if (match) return match[1].trim();\n return filename ?? null;\n}\n\n/** Input to capturePlan. */\nexport interface CapturePlanInput {\n /** Absolute or relative path to the source plan file. */\n sourcePath: string;\n /** Full markdown content of the plan file. */\n content: string;\n /** Session ID that triggered the write event. */\n sessionId: string;\n /** Optional prompt batch ID at the time of capture. */\n promptBatchId?: number | null;\n}\n\n/**\n * Store a plan in the database.\n *\n * The plan ID is derived deterministically from sourcePath (MD5 hash,\n * first 16 chars), so repeated writes to the same file upsert rather than\n * insert duplicate rows.\n *\n * The content hash (SHA256) is used by upsertPlan to decide whether to\n * reset the embedded flag — if the content is unchanged the flag is\n * preserved.\n */\nexport function capturePlan(input: CapturePlanInput): PlanRow {\n const now = Math.floor(Date.now() / 1000);\n const contentHash = createHash(CONTENT_HASH_ALGORITHM).update(input.content).digest('hex');\n const id = createHash('md5').update(input.sourcePath).digest('hex').slice(0, PLAN_ID_HASH_LENGTH);\n const title = parsePlanTitle(input.content, path.basename(input.sourcePath));\n\n return upsertPlan({\n id,\n title,\n content: input.content,\n source_path: input.sourcePath,\n session_id: input.sessionId,\n prompt_batch_id: input.promptBatchId ?? null,\n content_hash: contentHash,\n status: 'active',\n created_at: now,\n updated_at: now,\n });\n}\n","/**\n * Skill usage detection for session reconciliation.\n *\n * Scans session transcript content for Myco-managed skill activations and\n * records them in the skill_usage table. Idempotent — skips skills already\n * recorded for this session.\n */\n\nimport { listSkillRecords, incrementSkillUsageCount } from '@myco/db/queries/skill-records.js';\nimport { insertSkillUsage, hasUsageForSkillAndSession } from '@myco/db/queries/skill-usage.js';\nimport { epochSeconds } from '@myco/constants.js';\nimport crypto from 'node:crypto';\n\n/** Set to true to enable automatic skill usage detection from transcripts. */\nexport const SKILL_USAGE_DETECTION_ENABLED = false;\n\n/** Maximum number of active skills to check in a single detection pass. */\nconst MAX_ACTIVE_SKILLS_CHECK = 1000;\n\n/**\n * Scan a session transcript for Myco-managed skill activations.\n * Idempotent — skips skills already recorded for this session.\n */\nexport function detectSkillUsage(sessionId: string, transcriptContent: string): void {\n // Skip transcripts that contain vault_write_skill calls — these are\n // agent sessions generating/evolving skills, not developer sessions using them.\n if (transcriptContent.includes('vault_write_skill')) return;\n\n // Automatic detection is gated: the regex-based approach produces false\n // positives when a skill name is merely *discussed* in a session (not actually\n // loaded). Re-enable once a reliable activation signal is available (e.g., a\n // specific tag that Claude Code emits when loading a skill file).\n if (!SKILL_USAGE_DETECTION_ENABLED) return;\n\n const activeSkills = listSkillRecords({ status: 'active', limit: MAX_ACTIVE_SKILLS_CHECK });\n if (activeSkills.length === 0) return;\n\n // Pre-compile patterns for all skills outside the loop\n const skillPatterns = activeSkills.map((skill) => ({\n skill,\n pattern: new RegExp(\n `skills/${escapeRegex(skill.name)}/SKILL\\\\.md|` +\n `<skill[^>]*name=[\"']${escapeRegex(skill.name)}[\"']`,\n ),\n }));\n\n const now = epochSeconds();\n\n for (const { skill, pattern } of skillPatterns) {\n try {\n if (!pattern.test(transcriptContent)) continue;\n\n // Idempotent: skip if already recorded for this session\n if (hasUsageForSkillAndSession(skill.id, sessionId)) continue;\n\n // Record usage\n insertSkillUsage({\n id: crypto.randomUUID(),\n skill_id: skill.id,\n session_id: sessionId,\n detected_at: now,\n });\n\n // Atomically increment usage_count\n incrementSkillUsageCount(skill.id, now);\n } catch {\n // Best-effort per skill — don't let one broken skill stop detection\n }\n }\n}\n\nfunction escapeRegex(s: string): string {\n return s.replace(/[.*+?^${}()|[\\]\\\\]/g, '\\\\$&');\n}\n","/**\n * Event dispatch factory for the Myco daemon.\n *\n * Extracted from daemon/main.ts. All logic for handling POST /events lives\n * here: session auto-registration, buffer persistence, and the full\n * if/else dispatch chain for all event types.\n */\n\nimport { z } from 'zod';\nimport fs from 'node:fs';\nimport path from 'node:path';\nimport type { RouteHandler } from './router.js';\nimport { SessionRegistry } from './lifecycle.js';\nimport { EventBuffer } from '@myco/capture/buffer.js';\nimport { PowerManager } from './power.js';\nimport { DaemonLogger } from './logger.js';\nimport type { MycoConfig } from '@myco/config/schema.js';\nimport type { PlanWatchConfig } from './plan-capture.js';\nimport { isPlanWriteEvent, capturePlan } from './plan-capture.js';\nimport {\n isSystemMessage,\n handleUserPrompt,\n handleToolUse,\n handleToolFailure,\n handleSubagentStart,\n handleSubagentStop,\n handleStopFailure,\n handleTaskCompleted,\n handleCompact,\n} from './event-handlers.js';\nimport { getLatestBatch } from '@myco/db/queries/batches.js';\nimport { upsertSession, reactivateSessionIfCompleted } from '@myco/db/queries/sessions.js';\nimport { captureBatchImages, type CapturedImage } from './capture-images.js';\nimport { epochSeconds, LOG_PROMPT_PREVIEW_CHARS } from '@myco/constants.js';\nimport { LOG_KINDS } from '@myco/constants/log-kinds.js';\n\n// ---------------------------------------------------------------------------\n// Schema\n// ---------------------------------------------------------------------------\n\nconst EventBody = z.object({ type: z.string(), session_id: z.string() }).passthrough();\n\n// ---------------------------------------------------------------------------\n// Deps\n// ---------------------------------------------------------------------------\n\nexport interface EventDispatchDeps {\n registry: SessionRegistry;\n sessionBuffers: Map<string, EventBuffer>;\n powerManager: PowerManager;\n logger: DaemonLogger;\n machineId: string;\n // Holder so summary_batch_interval is read fresh on each user_prompt event —\n // changing the interval in Settings takes effect on the very next prompt.\n liveConfig: { current: MycoConfig };\n vaultDir: string;\n reconcileSession: (sessionId: string) => void;\n planWatchConfig: PlanWatchConfig; // object reference — mutated in place for hot-reload\n triggerTitleSummary: (sessionId: string) => Promise<void>;\n}\n\n// ---------------------------------------------------------------------------\n// Factory\n// ---------------------------------------------------------------------------\n\nexport function createEventDispatcher(deps: EventDispatchDeps): RouteHandler {\n const {\n registry,\n sessionBuffers,\n powerManager,\n logger,\n machineId,\n liveConfig,\n vaultDir: vaultDir,\n reconcileSession,\n planWatchConfig,\n triggerTitleSummary,\n } = deps;\n\n const projectRoot = process.cwd();\n\n return async (req) => {\n const validated = EventBody.parse(req.body);\n const event = {\n ...validated,\n timestamp: (validated as Record<string, unknown>).timestamp ?? new Date().toISOString(),\n } as Record<string, unknown> & { type: string; session_id: string; timestamp: string };\n\n logger.debug(LOG_KINDS.HOOKS_EVENT, 'Event received', { type: event.type, session_id: event.session_id });\n\n // Ensure session is registered (idempotent — handles daemon restarts mid-session)\n if (!registry.getSession(event.session_id)) {\n registry.register(event.session_id, { started_at: event.timestamp });\n logger.debug(LOG_KINDS.LIFECYCLE_AUTO_REGISTER, 'Auto-registered session from event', { session_id: event.session_id });\n\n // Ensure SQLite session exists — explicitly set status='active' so\n // resumed sessions (previously 'completed') get reopened.\n const now = epochSeconds();\n const startedEpoch = Math.floor(new Date(event.timestamp).getTime() / 1000);\n upsertSession({\n id: event.session_id,\n agent: (event as Record<string, unknown>).agent as string ?? 'claude-code',\n status: 'active',\n started_at: startedEpoch,\n created_at: now,\n machine_id: machineId,\n });\n\n // Reconcile buffer against DB — recover any prompts lost during downtime.\n reconcileSession(event.session_id);\n }\n\n // Persist to disk so events survive daemon restarts\n if (!sessionBuffers.has(event.session_id)) {\n const bufferDir = path.join(vaultDir, 'buffer');\n sessionBuffers.set(event.session_id, new EventBuffer(bufferDir, event.session_id));\n }\n sessionBuffers.get(event.session_id)!.append(event);\n\n // --- Prompt batch tracking ---\n if (event.type === 'user_prompt') {\n powerManager.recordActivity();\n const promptText = String(event.prompt ?? '');\n\n // Skip system-injected messages (task notifications, system reminders) —\n // they trigger UserPromptSubmit but are not real user prompts.\n if (isSystemMessage(promptText)) {\n logger.debug(LOG_KINDS.HOOKS_PROMPT, 'Skipped system-injected message', {\n session_id: event.session_id,\n prefix: promptText.trimStart().slice(0, LOG_PROMPT_PREVIEW_CHARS),\n });\n } else {\n logger.info(LOG_KINDS.HOOKS_PROMPT, 'User prompt received', {\n session_id: event.session_id,\n prompt_preview: promptText.slice(0, LOG_PROMPT_PREVIEW_CHARS),\n prompt_length: promptText.length,\n });\n // Flip a completed session back to active on genuine user activity.\n // The auto-register branch above only reactivates when the session\n // isn't in the in-memory registry (e.g., after daemon restart) —\n // without this, a manually-completed or stale-swept session stays\n // hidden from intelligence-task queries even after the user resumes.\n if (reactivateSessionIfCompleted(event.session_id)) {\n logger.info(LOG_KINDS.LIFECYCLE_AUTO_REGISTER, 'Reactivated completed session on new activity', {\n session_id: event.session_id,\n });\n }\n try {\n const { batchId, promptNumber } = handleUserPrompt(event.session_id, promptText || undefined);\n logger.debug(LOG_KINDS.CAPTURE_BATCH, 'Batch opened', { session_id: event.session_id, batch_id: batchId, prompt_number: promptNumber });\n\n // Plugin-based symbionts (opencode) ship image attachments in the\n // user_prompt event payload rather than in an on-disk transcript.\n // The stop-event transcript-mining path handles claude-code/cursor;\n // the persistence logic is shared between both paths via\n // captureBatchImages.\n const eventImages = event.images as CapturedImage[] | undefined;\n if (Array.isArray(eventImages) && eventImages.length > 0) {\n captureBatchImages({\n sessionId: event.session_id,\n promptBatchId: batchId,\n promptNumber,\n images: eventImages,\n logger,\n });\n }\n\n // Batch-threshold summary trigger\n const batchCount = promptNumber;\n const summaryInterval = liveConfig.current.agent.summary_batch_interval;\n if (summaryInterval > 0 && batchCount > 0 && batchCount % summaryInterval === 0) {\n triggerTitleSummary(event.session_id);\n }\n } catch (err) {\n logger.warn(LOG_KINDS.CAPTURE_BATCH, 'Failed to open batch', { session_id: event.session_id, error: (err as Error).message });\n }\n }\n }\n\n if (event.type === 'tool_use') {\n const toolName = String(event.tool_name ?? '');\n logger.debug(LOG_KINDS.HOOKS_TOOL, 'Tool use event', {\n session_id: event.session_id,\n tool_name: toolName,\n });\n // Plan capture — detect writes to watched directories (async, non-blocking)\n const planFilePath = isPlanWriteEvent(\n toolName,\n event.tool_input as Record<string, unknown> | undefined,\n planWatchConfig,\n );\n if (planFilePath) {\n const captureSessionId = event.session_id;\n fs.promises.readFile(planFilePath, 'utf-8').then((planContent) => {\n const latestBatch = getLatestBatch(captureSessionId);\n capturePlan({\n sourcePath: path.relative(projectRoot, planFilePath),\n content: planContent,\n sessionId: captureSessionId,\n promptBatchId: latestBatch?.id ?? null,\n });\n logger.info(LOG_KINDS.CAPTURE_PLAN, 'Plan captured', {\n session_id: captureSessionId,\n source_path: planFilePath,\n });\n }).catch((err) => {\n logger.warn(LOG_KINDS.CAPTURE_PLAN, 'Failed to capture plan', {\n error: (err as Error).message,\n path: planFilePath,\n });\n });\n }\n try {\n handleToolUse(\n event.session_id,\n toolName,\n event.tool_input,\n typeof event.output_preview === 'string' ? event.output_preview : undefined,\n );\n } catch (err) {\n logger.warn(LOG_KINDS.CAPTURE_ACTIVITY, 'Failed to record activity', { session_id: event.session_id, error: (err as Error).message });\n }\n }\n\n if (event.type === 'tool_failure') {\n const toolName = String(event.tool_name ?? '');\n logger.info(LOG_KINDS.HOOKS_TOOL, 'Tool failure event', {\n session_id: event.session_id,\n tool_name: toolName,\n is_interrupt: !!event.is_interrupt,\n });\n try {\n handleToolFailure(\n event.session_id,\n toolName,\n event.tool_input,\n typeof event.error === 'string' ? event.error : undefined,\n !!event.is_interrupt,\n );\n } catch (err) {\n logger.warn(LOG_KINDS.CAPTURE_ACTIVITY, 'Failed to record tool failure', { session_id: event.session_id, error: (err as Error).message });\n }\n }\n\n if (event.type === 'subagent_start') {\n logger.info(LOG_KINDS.HOOKS_SUBAGENT, 'Subagent start event', {\n session_id: event.session_id,\n agent_id: event.agent_id,\n agent_type: event.agent_type,\n });\n try {\n handleSubagentStart(\n event.session_id,\n typeof event.agent_id === 'string' ? event.agent_id : undefined,\n typeof event.agent_type === 'string' ? event.agent_type : undefined,\n );\n } catch (err) {\n logger.warn(LOG_KINDS.CAPTURE_ACTIVITY, 'Failed to record subagent start', { session_id: event.session_id, error: (err as Error).message });\n }\n }\n\n if (event.type === 'subagent_stop') {\n logger.info(LOG_KINDS.HOOKS_SUBAGENT, 'Subagent stop event', {\n session_id: event.session_id,\n agent_id: event.agent_id,\n agent_type: event.agent_type,\n });\n try {\n handleSubagentStop(\n event.session_id,\n typeof event.agent_id === 'string' ? event.agent_id : undefined,\n typeof event.agent_type === 'string' ? event.agent_type : undefined,\n typeof event.last_assistant_message === 'string' ? event.last_assistant_message : undefined,\n );\n } catch (err) {\n logger.warn(LOG_KINDS.CAPTURE_ACTIVITY, 'Failed to record subagent stop', { session_id: event.session_id, error: (err as Error).message });\n }\n }\n\n if (event.type === 'stop_failure') {\n logger.warn(LOG_KINDS.HOOKS_STOP, 'Stop failure event', {\n session_id: event.session_id,\n error: event.error,\n });\n try {\n handleStopFailure(\n event.session_id,\n typeof event.error === 'string' ? event.error : undefined,\n typeof event.error_details === 'string' ? event.error_details : undefined,\n );\n } catch (err) {\n logger.warn(LOG_KINDS.CAPTURE_ACTIVITY, 'Failed to record stop failure', { session_id: event.session_id, error: (err as Error).message });\n }\n }\n\n if (event.type === 'task_completed') {\n logger.info(LOG_KINDS.HOOKS_EVENT, 'Task completed event', {\n session_id: event.session_id,\n task_id: event.task_id,\n task_subject: event.task_subject,\n });\n try {\n handleTaskCompleted(\n event.session_id,\n typeof event.task_id === 'string' ? event.task_id : undefined,\n typeof event.task_subject === 'string' ? event.task_subject : undefined,\n typeof event.task_description === 'string' ? event.task_description : undefined,\n );\n } catch (err) {\n logger.warn(LOG_KINDS.CAPTURE_ACTIVITY, 'Failed to record task completion', { session_id: event.session_id, error: (err as Error).message });\n }\n }\n\n if (event.type === 'pre_compact') {\n logger.info(LOG_KINDS.HOOKS_EVENT, 'Pre-compact event', { session_id: event.session_id });\n try {\n handleCompact(\n event.session_id,\n 'pre',\n typeof event.trigger === 'string' ? event.trigger : undefined,\n undefined,\n );\n } catch (err) {\n logger.warn(LOG_KINDS.CAPTURE_ACTIVITY, 'Failed to record pre-compact', { session_id: event.session_id, error: (err as Error).message });\n }\n }\n\n if (event.type === 'post_compact') {\n logger.info(LOG_KINDS.HOOKS_EVENT, 'Post-compact event', { session_id: event.session_id });\n try {\n handleCompact(\n event.session_id,\n 'post',\n typeof event.trigger === 'string' ? event.trigger : undefined,\n typeof event.compact_summary === 'string' ? event.compact_summary : undefined,\n );\n } catch (err) {\n logger.warn(LOG_KINDS.CAPTURE_ACTIVITY, 'Failed to record post-compact', { session_id: event.session_id, error: (err as Error).message });\n }\n }\n\n return { body: { ok: true } };\n };\n}\n","/**\n * In-process registry for \"things that should happen after any successful\n * scoped-config write.\" Reactions register once at daemon startup with a\n * list of path prefixes; `fire(touchedPaths)` runs every reaction whose\n * registered prefixes match.\n *\n * Contract (documented, not enforced):\n * - A reaction MUST be idempotent: firing twice with the same touched paths\n * produces the same observable state.\n * - A reaction MUST NOT issue a scoped-config write itself (would recurse).\n */\n\nimport type { Logger } from '../logger.js';\nimport type { MycoConfig } from '../../config/schema.js';\n\nexport type ConfigReaction = (ctx: MycoConfig) => void | Promise<void>;\n\nexport interface ConfigReactionRegistry {\n /**\n * Register a reaction. `paths` is a list of dot-path prefixes. The reaction\n * fires when any touched path matches any listed prefix. Pass `[]` to fire\n * on every write.\n *\n * A prefix `p` matches a touched path `t` when `t === p` or\n * `t.startsWith(p + '.')`. So `'capture'` matches `'capture.plan_dirs'`\n * but not `'captures.x'` or `'capture_mode'`.\n *\n * The reaction receives the post-write merged config (project + local\n * overlay). Use this instead of reloading — the registry has already paid\n * the YAML + schema parse cost once.\n */\n on(paths: string[], reaction: ConfigReaction): void;\n\n /**\n * Fire every matching reaction in registration order. Awaits each in turn.\n * If a reaction throws, the error is logged and subsequent reactions still\n * run — the scoped write itself has already succeeded by this point.\n */\n fire(touchedPaths: string[], ctx: MycoConfig): Promise<void>;\n}\n\ninterface Entry {\n paths: string[];\n fn: ConfigReaction;\n}\n\nexport function createConfigReactionRegistry(logger: Logger): ConfigReactionRegistry {\n const entries: Entry[] = [];\n\n return {\n on(paths, fn) {\n entries.push({ paths, fn });\n },\n async fire(touchedPaths, ctx) {\n for (const entry of entries) {\n if (!shouldFire(entry.paths, touchedPaths)) continue;\n try {\n await entry.fn(ctx);\n } catch (err) {\n logger.error('config-reactions', 'reaction threw', { error: String(err) });\n }\n }\n },\n };\n}\n\nfunction shouldFire(registeredPaths: string[], touched: string[]): boolean {\n if (registeredPaths.length === 0) return true;\n for (const prefix of registeredPaths) {\n for (const path of touched) {\n if (path === prefix || path.startsWith(`${prefix}.`)) return true;\n }\n }\n return false;\n}\n","import { z } from 'zod';\nimport { loadMergedConfig } from '../../config/loader.js';\nimport type { MycoConfig } from '../../config/schema.js';\nimport type { Logger } from '../logger.js';\n\n/**\n * Best-effort merged config load for post-write reactions. A stale invalid\n * local overlay should not turn an already-persisted scoped write into a 500.\n */\nexport function loadReactionContext(vaultDir: string, logger: Logger): MycoConfig | null {\n try {\n return loadMergedConfig(vaultDir);\n } catch (err) {\n if (err instanceof z.ZodError) {\n logger.warn('config-reactions', 'skipping reactions because merged config is invalid', {\n issues: err.issues.map((issue) => ({\n path: issue.path.join('.'),\n message: issue.message,\n })),\n });\n return null;\n }\n throw err;\n }\n}\n","import type { ConfigReaction } from './config-reactions/registry.js';\nimport type { PlanWatchConfig } from './plan-capture.js';\n\nexport interface PlanWatchReactionDeps {\n symbiontPlanDirs: string[];\n /**\n * The live PlanWatchConfig object. This factory mutates `.watchDirs` in\n * place — do NOT pass a fresh copy. Downstream consumers (event dispatcher)\n * close over this same object reference for hot-reload to work.\n */\n planWatchConfig: PlanWatchConfig;\n}\n\n/**\n * Returns a reaction that refreshes `planWatchConfig.watchDirs` from the\n * merged `capture.plan_dirs` passed through by the registry. Mutates the\n * config object in place so consumers that closed over the reference see\n * the update.\n */\nexport function createPlanWatchReaction(deps: PlanWatchReactionDeps): ConfigReaction {\n return (ctx) => {\n const customDirs = ctx.capture.plan_dirs ?? [];\n deps.planWatchConfig.watchDirs = [...new Set([...deps.symbiontPlanDirs, ...customDirs])];\n deps.planWatchConfig.extensions = ctx.capture.artifact_extensions;\n };\n}\n","/**\n * Myco daemon — SQLite capture engine.\n *\n * All data goes to a local SQLite database (better-sqlite3). The intelligence\n * pipeline (extraction, embedding, consolidation, digest) is removed — it\n * moves to Phase 2 Agent SDK. What remains is the capture layer: session\n * lifecycle, prompt batch tracking, activity recording, and transcript mining.\n */\n\nimport { DaemonServer } from './server.js';\nimport { SessionRegistry } from './lifecycle.js';\nimport { DaemonLogger } from './logger.js';\nimport { loadMergedConfig, updateConfig } from '../config/loader.js';\nimport { resolvePort } from './port.js';\nimport { TranscriptMiner } from '../capture/transcript-miner.js';\nimport { createPerProjectAdapter } from '../symbionts/adapter.js';\nimport { claudeCodeAdapter } from '../symbionts/claude-code.js';\nimport { findPackageRoot } from '../utils/find-package-root.js';\nimport { EventBuffer } from '../capture/buffer.js';\nimport { loadManifests } from '../symbionts/detect.js';\nimport type { PlanWatchConfig } from './plan-capture.js';\nimport {\n handleGetConfig,\n handleGetMergedConfig,\n handleGetLocalConfig,\n handlePutScopedConfig,\n createPlanDirHandlers,\n} from './api/config.js';\nimport { handleLogSearch, handleLogStream, handleLogDetail, createLogIngestionHandler } from './api/log-explorer.js';\nimport { handleRestart } from './api/restart.js';\nimport { createUpdateHandlers } from './api/update.js';\nimport { reconcileConfiguredSymbionts } from '../symbionts/reconcile.js';\nimport { resolveGlobalPrefix, detectDevBuild, setDevBuildCliEntry } from './update-checker.js';\nimport { getMachineId } from './machine-id.js';\nimport { createBackupHandlers, createBackupConfigHandlers } from './api/backup.js';\nimport { createTeamHandlers } from './api/team-connect.js';\nimport { createCollectiveHandlers } from './api/collective.js';\nimport { createSessionLifecycleHandlers } from './api/session-lifecycle.js';\nimport {\n handleListCandidates,\n handleGetCandidate,\n handleUpdateCandidate,\n handleListSkillRecords,\n handleGetSkillRecord,\n handleDeleteCandidate,\n createSkillRecordDeleteHandler,\n} from './api/skills.js';\nimport { initTeamContext } from './team-context.js';\nimport { initTeamSync } from './team-sync-init.js';\nimport { ProgressTracker, handleGetProgress } from './api/progress.js';\nimport { handleGetModels } from './api/models.js';\nimport { computeConfigHash, createLiveStatsHandler } from './api/stats.js';\nimport {\n handleListSessions,\n handleGetSession,\n handleGetSessionBatches,\n handleGetBatchActivities,\n handleGetSessionAttachments,\n handleGetSessionPlans,\n createSessionMutationHandlers,\n} from './api/sessions.js';\nimport {\n handleListSpores,\n handleGetSpore,\n handleListEntities,\n handleGetGraphSeeds,\n handleGetGraph,\n handleGetFullGraph,\n handleGetDigest,\n} from './api/mycelium.js';\nimport { createSearchHandler } from './api/search.js';\nimport { createSessionContextHandler, createPromptContextHandler, createResumeContextHandler } from './api/context.js';\nimport { handleGetFeed } from './api/feed.js';\nimport { handleListSymbionts } from './api/symbionts.js';\nimport {\n handleGetEmbeddingStatus,\n handleEmbeddingDetails,\n handleEmbeddingRebuild,\n handleEmbeddingReconcile,\n handleEmbeddingCleanOrphans,\n handleEmbeddingReembedStale,\n} from './api/embedding.js';\nimport {\n handleDatabaseDetails,\n handleDatabaseOptimize,\n handleDatabaseVacuum,\n handleDatabaseReindex,\n handleDatabaseIntegrityCheck,\n} from './api/database.js';\nimport { EmbeddingManager, SqliteVecVectorStore, EmbeddingProviderAdapter, SqliteRecordSource } from './embedding/index.js';\nimport { DatabaseMaintenanceManager } from './database/manager.js';\nimport { registerBuiltinDomains } from '../notifications/domains.js';\nimport {\n handleListNotifications,\n handleCreateNotification,\n handleUpdateNotification,\n handleDismissAll,\n handleMarkAllRead,\n handleGetRegistry,\n handleUnreadCount,\n} from './api/notifications.js';\nimport { createEmbeddingProvider } from '../intelligence/llm.js';\nimport {\n handleListTasks,\n handleGetTask,\n handleGetTaskYaml,\n handleUpdateTask,\n handleCreateTask,\n handleCopyTask,\n handleDeleteTask,\n handleGetTaskConfig,\n handleUpdateTaskConfig,\n} from './api/agent-tasks.js';\nimport { handleGetProviders, handleTestProvider } from './api/providers.js';\nimport { registerScheduledTasks } from './task-scheduling.js';\nimport { initDatabase, vaultDbPath, closeDatabase, getDatabase } from '../db/client.js';\nimport { createSchema } from '../db/schema.js';\nimport { insertLogEntry, getMaxTimestamp } from '../db/queries/logs.js';\nimport { createMcpProxyHandlers } from './api/mcp-proxy.js';\nimport { createAgentRunHandlers } from './api/agent-runs.js';\nimport { createAttachmentHandler } from './api/attachments.js';\nimport { reconcileLogBuffer } from './log-reconcile.js';\nimport {\n POWER_IDLE_THRESHOLD_MS,\n POWER_SLEEP_THRESHOLD_MS,\n POWER_DEEP_SLEEP_THRESHOLD_MS,\n POWER_ACTIVE_INTERVAL_MS,\n POWER_SLEEP_INTERVAL_MS,\n RESTART_RESPONSE_FLUSH_MS,\n epochSeconds,\n} from '../constants.js';\nimport { RESTART_REASON_FILENAME } from '../constants/update.js';\nimport { buildScopedConfigSaveNotification } from '../config/focus.js';\nimport { notify } from '../notifications/notify.js';\nimport { PowerManager } from './power.js';\nimport { registerPowerJobs } from './power-jobs.js';\nimport {\n handleUserPrompt, handleToolUse, handleStopBatches, handleToolFailure,\n handleSubagentStart, handleSubagentStop, handleStopFailure,\n handleTaskCompleted, handleCompact,\n} from './event-handlers.js';\nimport { createReconciler } from './reconciliation.js';\nimport { createStopProcessor } from './stop-processing.js';\nimport { createEventDispatcher } from './event-dispatch.js';\nimport { createConfigReactionRegistry, computeTouchedPaths, loadReactionContext } from './config-reactions/index.js';\nimport { createPlanWatchReaction } from './plan-watch-reaction.js';\nexport {\n handleUserPrompt, handleToolUse, handleStopBatches, handleToolFailure,\n handleSubagentStart, handleSubagentStop, handleStopFailure,\n handleTaskCompleted, handleCompact,\n} from './event-handlers.js';\nimport { loadSecrets } from '../config/secrets.js';\nimport { LOG_KINDS } from '../constants/log-kinds.js';\nimport fs from 'node:fs';\nimport os from 'node:os';\nimport path from 'node:path';\n\n// ---------------------------------------------------------------------------\n// Stale daemon cleanup\n// ---------------------------------------------------------------------------\n\n/**\n * Kill any stale daemon process for this vault before starting a new one.\n * Reads daemon.json — if a live process exists with that PID, kill it.\n * This prevents orphaned daemons from accumulating across restarts.\n */\nfunction killStaleDaemon(vaultDir: string, logger: DaemonLogger): void {\n const daemonJsonPath = path.join(vaultDir, 'daemon.json');\n try {\n if (!fs.existsSync(daemonJsonPath)) return;\n const info = JSON.parse(fs.readFileSync(daemonJsonPath, 'utf-8')) as { pid?: number };\n if (!info.pid) return;\n\n // Don't kill ourselves\n if (info.pid === process.pid) return;\n\n try {\n process.kill(info.pid, 0);\n process.kill(info.pid, 'SIGTERM');\n logger.info(LOG_KINDS.DAEMON_START, 'Killed stale daemon', { pid: info.pid });\n } catch { /* already dead */ }\n\n fs.unlinkSync(daemonJsonPath);\n } catch { /* daemon.json unreadable — ignore */ }\n}\n\n// ---------------------------------------------------------------------------\n// Main\n// ---------------------------------------------------------------------------\n\nexport async function main(): Promise<void> {\n const vaultArg = process.argv.find((_, i) => process.argv[i - 1] === '--vault');\n if (!vaultArg) {\n process.stderr.write('Usage: mycod --vault <path>\\n');\n process.exit(1);\n }\n\n const vaultDir = path.resolve(vaultArg);\n\n // Load API keys from secrets.env into process.env before any provider init\n loadSecrets(vaultDir);\n\n // Merged = project (myco.yaml) + personal overlay (local.yaml). Any gate\n // downstream of this needs to see personal overrides, so the daemon loads\n // the merged view and never the raw project config.\n const config = loadMergedConfig(vaultDir);\n // Mutable holder that reactions update after each scoped-config write, so\n // runtime gates (scheduled-task registration, event triggers) observe the\n // flipped value without a daemon restart.\n const liveConfig: { current: typeof config } = { current: config };\n\n const manifests = loadManifests();\n const symbiontPlanDirs = manifests.flatMap((m) => m.capture?.planDirs ?? []);\n const symbiontPlanTags = [...new Set(manifests.flatMap((m) => m.capture?.planTags ?? []))];\n const projectRoot = process.cwd();\n const planWatchConfig: PlanWatchConfig = {\n watchDirs: [...new Set([...symbiontPlanDirs, ...(config.capture.plan_dirs ?? [])])],\n projectRoot,\n extensions: config.capture.artifact_extensions,\n };\n\n const logger = new DaemonLogger(path.join(vaultDir, 'logs'), {\n level: config.daemon.log_level,\n });\n\n // When debug logging is on, surface per-turn tool_use / tool_result detail\n // from the agent executor. The executor reads this env var directly because\n // it has no logger handle. Used to diagnose turn-budget exhaustion (e.g.\n // local-model rejection loops in skill-generate).\n if (config.daemon.log_level === 'debug') {\n process.env.MYCO_AGENT_DEBUG = '1';\n }\n\n // Kill any stale daemon for this vault before starting\n killStaleDaemon(vaultDir, logger);\n\n logger.info(LOG_KINDS.DAEMON_CONFIG, 'Config loaded', {\n vault: vaultDir,\n embedding_provider: config.embedding.provider,\n });\n logger.info(LOG_KINDS.CAPTURE_PLAN, 'Plan watch directories', { dirs: planWatchConfig.watchDirs });\n if (symbiontPlanTags.length > 0) {\n logger.info(LOG_KINDS.CAPTURE_PLAN, 'Plan transcript tags', { tags: symbiontPlanTags });\n }\n\n // --- Machine identity ---\n const machineId = getMachineId(vaultDir);\n logger.info(LOG_KINDS.DAEMON_START, 'Machine ID resolved', { machine_id: machineId });\n\n // --- Resolve npm global prefix + detect dev build ---\n // globalPrefix is used both for installed-version detection (in the status\n // handler) and for dev-build auto-detection via detectDevBuild().\n let globalPrefix: string | null = null;\n try {\n globalPrefix = resolveGlobalPrefix();\n logger.debug(LOG_KINDS.DAEMON_START, 'npm global prefix resolved', { prefix: globalPrefix });\n } catch (err) {\n logger.warn(LOG_KINDS.DAEMON_START, 'Failed to resolve npm global prefix', {\n error: (err as Error).message,\n });\n }\n\n // Auto-detect dev builds: if the running binary isn't under the global\n // prefix, record the CLI entry via setDevBuildCliEntry() so update checks\n // are exempted and any restart/update shell script uses the dev binary\n // as its restart target (baked in at script-generation time — no env var\n // propagation required).\n const devCliEntry = detectDevBuild(\n globalPrefix,\n process.argv[1],\n fs.realpathSync,\n );\n if (devCliEntry) {\n setDevBuildCliEntry(devCliEntry);\n globalPrefix = null;\n logger.info(LOG_KINDS.DAEMON_START, 'Dev build detected; update checks exempted', {\n cli_entry: devCliEntry,\n });\n }\n\n // --- SQLite initialization ---\n const db = initDatabase(vaultDbPath(vaultDir));\n createSchema(db, machineId);\n registerBuiltinDomains();\n\n logger.info(LOG_KINDS.DAEMON_START, 'SQLite initialized', { vault: vaultDir });\n\n // --- Check for restart-reason signal file (left by version sync restart script) ---\n {\n const reasonPath = path.join(vaultDir, RESTART_REASON_FILENAME);\n try {\n if (fs.existsSync(reasonPath)) {\n const raw = JSON.parse(fs.readFileSync(reasonPath, 'utf-8')) as {\n reason?: string;\n from_version?: string;\n to_version?: string;\n local_update_ran?: boolean;\n };\n fs.unlinkSync(reasonPath);\n\n if (raw.reason === 'version_sync' && raw.to_version) {\n const message = raw.local_update_ran\n ? 'Restarted and updated local project hooks.'\n : 'Restarted to pick up the latest version.';\n\n notify(vaultDir, {\n domain: 'daemon',\n type: 'daemon.version_sync',\n title: `Updated to v${raw.to_version}`,\n message,\n metadata: {\n from_version: raw.from_version ?? 'unknown',\n to_version: raw.to_version,\n local_update_ran: raw.local_update_ran ?? false,\n },\n });\n\n logger.info(LOG_KINDS.DAEMON_START, 'Version sync restart detected', {\n from: raw.from_version,\n to: raw.to_version,\n local_update: raw.local_update_ran,\n });\n }\n }\n } catch (err) {\n logger.warn(LOG_KINDS.DAEMON_START, 'Failed to read restart-reason file', {\n error: (err as Error).message,\n });\n }\n }\n\n // --- Team context ---\n initTeamContext(config.team.enabled, machineId);\n\n // Wire logger to SQLite persistence\n logger.setPersistFn((entry) => {\n const { timestamp, level, kind, component, message, ...rest } = entry;\n insertLogEntry({\n timestamp,\n level,\n kind,\n component,\n message,\n data: Object.keys(rest).length > 0 ? JSON.stringify(rest) : null,\n session_id: (rest.session_id as string) ?? null,\n });\n });\n\n // Reconcile log entries missed while daemon was down\n const lastLogTimestamp = getMaxTimestamp();\n if (lastLogTimestamp) {\n const logDir = path.join(vaultDir, 'logs');\n const replayedCount = reconcileLogBuffer(logDir, lastLogTimestamp);\n if (replayedCount > 0) {\n logger.info(LOG_KINDS.DAEMON_RECONCILE, `Replayed ${replayedCount} log entries from buffer`, { replayed: replayedCount });\n }\n }\n\n // --- Embedding lifecycle manager ---\n const vectorsDbPath = path.join(vaultDir, 'vectors.db');\n const vectorStore = new SqliteVecVectorStore(vectorsDbPath);\n const llmProvider = createEmbeddingProvider(config.embedding);\n const embeddingProvider = new EmbeddingProviderAdapter(llmProvider, config.embedding);\n const recordSource = new SqliteRecordSource();\n const embeddingManager = new EmbeddingManager(vectorStore, embeddingProvider, recordSource, logger);\n logger.info(LOG_KINDS.EMBEDDING_EMBED, 'EmbeddingManager initialized', { vectors_db: vectorsDbPath });\n const databaseManager = new DatabaseMaintenanceManager(vaultDbPath(vaultDir), vaultDir, logger);\n\n // --- Register built-in agents and tasks ---\n let definitionsDir: string | undefined;\n try {\n const { registerBuiltInAgentsAndTasks, resolveDefinitionsDir } = await import('../agent/loader.js');\n definitionsDir = resolveDefinitionsDir();\n await registerBuiltInAgentsAndTasks(definitionsDir, vaultDir);\n logger.info(LOG_KINDS.AGENT_TASK, 'Built-in agents and tasks registered');\n } catch (err) {\n logger.warn(LOG_KINDS.AGENT_ERROR, 'Failed to register built-in agents/tasks', { error: (err as Error).message });\n }\n\n // Clean up stale \"running\" agent runs from previous daemon — they'll never complete\n try {\n const staleDb = getDatabase();\n // SQLite doesn't support RETURNING — query first, then update\n const staleRows = staleDb.prepare(\n `SELECT id, task FROM agent_runs WHERE status = 'running'`,\n ).all() as Array<{ id: string; task: string | null }>;\n\n if (staleRows.length > 0) {\n const completedAt = epochSeconds();\n staleDb.prepare(\n `UPDATE agent_runs SET status = 'failed', completed_at = ?, error = 'Daemon restarted while run was in progress' WHERE status = 'running'`,\n ).run(completedAt);\n for (const row of staleRows) {\n notify(vaultDir, {\n domain: 'agents',\n type: 'agent.task.failure',\n title: `Task failed: ${row.task ?? 'agent run'}`,\n message: 'Daemon restarted while run was in progress',\n link: `/agent?run=${row.id}`,\n metadata: { taskName: row.task, runId: row.id, reason: 'daemon_restart' },\n }, liveConfig.current);\n }\n logger.info(LOG_KINDS.AGENT_RUN, 'Cleaned stale running agent runs', {\n count: staleRows.length,\n ids: staleRows.map((r) => r.id),\n });\n }\n } catch (err) {\n logger.warn(LOG_KINDS.AGENT_ERROR, 'Failed to clean stale runs', { error: (err as Error).message });\n }\n\n // Resolve dist/ui/ from the package root\n let uiDir: string | null = null;\n {\n const root = findPackageRoot(path.dirname(new URL(import.meta.url).pathname));\n if (root) {\n const candidate = path.join(root, 'dist', 'ui');\n if (fs.existsSync(candidate)) uiDir = candidate;\n }\n }\n if (uiDir) {\n logger.debug(LOG_KINDS.DAEMON_START, 'Static UI directory found', { path: uiDir });\n }\n\n const powerManager = new PowerManager({\n idleThresholdMs: POWER_IDLE_THRESHOLD_MS,\n sleepThresholdMs: POWER_SLEEP_THRESHOLD_MS,\n deepSleepThresholdMs: POWER_DEEP_SLEEP_THRESHOLD_MS,\n activeIntervalMs: POWER_ACTIVE_INTERVAL_MS,\n sleepIntervalMs: POWER_SLEEP_INTERVAL_MS,\n logger,\n });\n\n const server = new DaemonServer({\n vaultDir,\n logger,\n uiDir: uiDir ?? undefined,\n // Don't record activity on every HTTP request — UI polling (every 3-10s)\n // would prevent the PowerManager from ever reaching 'idle' state, blocking\n // all idle-only scheduled tasks (skill-survey, skill-generate, skill-evolve).\n // Activity is recorded on meaningful events below (session register, prompt capture, etc.).\n });\n\n // The daemon serves the dashboard UI and must stay running regardless of\n // active sessions. No auto-shutdown — runs until explicitly killed.\n const registry = new SessionRegistry({\n gracePeriod: 0,\n onEmpty: () => {},\n });\n\n const transcriptMiner = new TranscriptMiner({\n additionalAdapters: config.capture.transcript_paths.map((p) =>\n createPerProjectAdapter(p, claudeCodeAdapter.parseTurns),\n ),\n });\n\n const bufferDir = path.join(vaultDir, 'buffer');\n const sessionBuffers = new Map<string, EventBuffer>();\n\n const reconciler = createReconciler({ bufferDir, logger });\n reconciler.runStartupReconciliation();\n\n // --- Stop processor (created early so triggerTitleSummary is available to /events route) ---\n const stopProcessor = createStopProcessor({\n registry,\n sessionBuffers,\n transcriptMiner,\n embeddingManager,\n logger,\n liveConfig,\n vaultDir,\n planTags: symbiontPlanTags,\n });\n\n // --- Session routes ---\n const sessionLifecycle = createSessionLifecycleHandlers({\n registry, sessionBuffers, reconciler, stopProcessor,\n server, powerManager, machineId, logger, liveConfig, vaultDir,\n });\n server.registerRoute('POST', '/sessions/register', sessionLifecycle.handleRegister);\n server.registerRoute('POST', '/sessions/unregister', sessionLifecycle.handleUnregister);\n\n // --- Event routes ---\n\n const eventDispatcher = createEventDispatcher({\n registry,\n sessionBuffers,\n powerManager,\n logger,\n machineId,\n liveConfig,\n vaultDir,\n reconcileSession: reconciler.reconcileSession,\n planWatchConfig,\n triggerTitleSummary: stopProcessor.triggerTitleSummary,\n });\n server.registerRoute('POST', '/events', eventDispatcher);\n\n // --- Stop route ---\n\n server.registerRoute('POST', '/events/stop', stopProcessor.handleStopRoute);\n\n // --- Context injection (digest + semantic spore search) ---\n const contextDeps = { embeddingManager, liveConfig, logger };\n server.registerRoute('POST', '/context', createSessionContextHandler(contextDeps));\n server.registerRoute('POST', '/context/resume', createResumeContextHandler(contextDeps));\n server.registerRoute('POST', '/context/prompt', createPromptContextHandler(contextDeps));\n\n // --- Dashboard API routes ---\n const progressTracker = new ProgressTracker();\n let configHash = computeConfigHash(vaultDir);\n\n server.registerRoute('GET', '/api/config', async () => handleGetConfig(vaultDir));\n server.registerRoute('GET', '/api/symbionts', async () => handleListSymbionts(vaultDir));\n\n server.registerRoute('GET', '/api/config/merged', async () => handleGetMergedConfig(vaultDir));\n server.registerRoute('GET', '/api/config/local', async () => handleGetLocalConfig(vaultDir));\n\n // Pre-compute symbiont plan dirs for the config endpoint (manifests don't change at runtime)\n const symbiontPlanDirsByAgent: Record<string, string[]> = {};\n for (const m of manifests) {\n const dirs = m.capture?.planDirs ?? [];\n if (dirs.length > 0) symbiontPlanDirsByAgent[m.displayName] = dirs;\n }\n\n // --- Config-change reaction registry ---\n // Reactions register once at daemon startup. `fire(touchedPaths, ctx)` runs\n // every matching reaction after a successful scoped-config write, passing\n // the freshly merged config so reactions don't reload it themselves. See\n // packages/myco/src/daemon/config-reactions/registry.ts for the contract.\n const reactions = createConfigReactionRegistry(logger);\n\n // Refresh the live-stats configHash on every write.\n reactions.on([], () => { configHash = computeConfigHash(vaultDir); });\n\n // Keep liveConfig pointed at the latest merged config so runtime gates\n // (agent.scheduled_tasks_enabled, agent.event_tasks_enabled) pick up\n // toggle flips immediately.\n reactions.on([], (ctx) => { liveConfig.current = ctx; });\n\n // Reinstall symbiont artefacts (agent hooks, .gitignore) when capture dirs\n // or symbiont enablement change. The reconcile has no other config inputs.\n reactions.on(['capture', 'symbionts'], (ctx) => {\n reconcileConfiguredSymbionts(path.dirname(vaultDir), vaultDir, ctx);\n });\n\n // Refresh the in-memory plan-watch list on capture changes.\n reactions.on(['capture'], createPlanWatchReaction({\n symbiontPlanDirs,\n planWatchConfig,\n }));\n\n // Live-reconfigure the logger on daemon.log_level change.\n reactions.on(['daemon.log_level'], (ctx) => {\n logger.setLevel(ctx.daemon.log_level);\n if (ctx.daemon.log_level === 'debug') {\n process.env.MYCO_AGENT_DEBUG = '1';\n } else {\n delete process.env.MYCO_AGENT_DEBUG;\n }\n });\n\n async function syncScheduledTasks() {\n await registerScheduledTasks(powerManager, { definitionsDir, vaultDir, embeddingManager, logger, liveConfig });\n }\n\n reactions.on(['agent.tasks'], async () => {\n await syncScheduledTasks();\n });\n\n async function applyConfigWriteReactions(touchedPaths: string[]) {\n const reactionContext = loadReactionContext(vaultDir, logger);\n if (!reactionContext) {\n configHash = computeConfigHash(vaultDir);\n return null;\n }\n await reactions.fire(touchedPaths, reactionContext);\n return reactionContext;\n }\n\n server.registerRoute('PUT', '/api/config/scoped', async (req) => {\n const result = await handlePutScopedConfig(vaultDir, req.body);\n if (!result.status || result.status < 400) {\n const body = req.body as { scope: 'project' | 'local'; patch?: unknown; clear?: string[] };\n const touchedPaths = computeTouchedPaths(body.patch, body.clear);\n const reactionContext = await applyConfigWriteReactions(touchedPaths);\n if (reactionContext) {\n const summary = buildScopedConfigSaveNotification(body.scope, touchedPaths);\n notify(vaultDir, {\n domain: 'settings',\n type: 'settings.saved',\n title: summary.title,\n message: summary.message,\n link: summary.link ?? undefined,\n metadata: summary.metadata,\n }, reactionContext);\n } else {\n configHash = computeConfigHash(vaultDir);\n }\n }\n return result;\n });\n\n const planDirHandlers = createPlanDirHandlers({\n symbiontPlanDirsByAgent,\n });\n server.registerRoute('GET', '/api/config/plan-dirs', planDirHandlers.handleGetPlanDirs);\n\n // V2 stats — vault counts, embedding coverage, agent status, digest freshness\n const configHashRef = { get: () => configHash };\n server.registerRoute('GET', '/api/stats', createLiveStatsHandler({\n vaultDir,\n registry,\n server,\n configHash: configHashRef,\n }));\n\n server.registerRoute('GET', '/api/logs', handleLogStream);\n server.registerRoute('GET', '/api/logs/search', handleLogSearch);\n server.registerRoute('GET', '/api/logs/stream', handleLogStream);\n server.registerRoute('GET', '/api/logs/:id', handleLogDetail);\n\n // External log ingestion: allows MCP server (separate process) to write through the daemon logger\n server.registerRoute('POST', '/api/log', createLogIngestionHandler(logger));\n\n server.registerRoute('GET', '/api/models', async (req) => handleGetModels(req));\n server.registerRoute('POST', '/api/restart', async (req) => handleRestart({ vaultDir, progressTracker }, req.body));\n\n // --- Update routes ---\n const updateProjectRoot = path.dirname(vaultDir);\n const updateHandlers = createUpdateHandlers({\n vaultDir,\n projectRoot: updateProjectRoot,\n currentVersion: server.version,\n globalPrefix,\n scheduleShutdown: () => {\n setTimeout(() => {\n process.kill(process.pid, 'SIGTERM');\n }, RESTART_RESPONSE_FLUSH_MS);\n },\n });\n\n server.registerRoute('GET', '/api/update/status', async (req) => updateHandlers.handleUpdateStatus(req));\n server.registerRoute('POST', '/api/update/check', async (req) => updateHandlers.handleUpdateCheck(req));\n server.registerRoute('POST', '/api/update/apply', async (req) => updateHandlers.handleUpdateApply(req));\n server.registerRoute('PUT', '/api/update/channel', async (req) => updateHandlers.handleUpdateChannel(req));\n\n server.registerRoute('GET', '/api/progress/:token', async (req) => handleGetProgress(progressTracker, req.params.token));\n\n server.registerRoute('GET', '/api/sessions', handleListSessions);\n\n server.registerRoute('GET', '/api/sessions/:id', handleGetSession);\n const sessionMutations = createSessionMutationHandlers({ embeddingManager, vaultDir, logger, liveConfig });\n server.registerRoute('GET', '/api/sessions/:id/impact', sessionMutations.handleGetSessionImpact);\n server.registerRoute('POST', '/api/sessions/:id/complete', sessionMutations.handleCompleteSession);\n server.registerRoute('DELETE', '/api/sessions/:id', sessionMutations.handleDeleteSession);\n server.registerRoute('GET', '/api/sessions/:id/batches', handleGetSessionBatches);\n server.registerRoute('GET', '/api/batches/:id/activities', handleGetBatchActivities);\n server.registerRoute('GET', '/api/sessions/:id/attachments', handleGetSessionAttachments);\n server.registerRoute('GET', '/api/sessions/:id/plans', handleGetSessionPlans);\n\n // --- Skill lifecycle API routes ---\n server.registerRoute('GET', '/api/skill-candidates', handleListCandidates);\n server.registerRoute('GET', '/api/skill-candidates/:id', handleGetCandidate);\n server.registerRoute('PUT', '/api/skill-candidates/:id', handleUpdateCandidate);\n server.registerRoute('GET', '/api/skill-records', handleListSkillRecords);\n server.registerRoute('GET', '/api/skill-records/:id', handleGetSkillRecord);\n server.registerRoute('DELETE', '/api/skill-candidates/:id', handleDeleteCandidate);\n server.registerRoute('DELETE', '/api/skill-records/:id', createSkillRecordDeleteHandler({ vaultDir, logger }));\n\n // --- Mycelium API routes ---\n server.registerRoute('GET', '/api/spores', handleListSpores);\n server.registerRoute('GET', '/api/spores/:id', handleGetSpore);\n server.registerRoute('GET', '/api/entities', handleListEntities);\n server.registerRoute('GET', '/api/graph/seeds', handleGetGraphSeeds);\n server.registerRoute('GET', '/api/graph', handleGetFullGraph);\n server.registerRoute('GET', '/api/graph/:id', handleGetGraph);\n server.registerRoute('GET', '/api/digest', handleGetDigest);\n\n const attachments = createAttachmentHandler({ vaultDir });\n server.registerRoute('GET', '/api/attachments/:filename', attachments.handleGetAttachment);\n\n // --- Agent API routes ---\n const agentRunHandlers = createAgentRunHandlers({ vaultDir, embeddingManager, logger });\n server.registerRoute('POST', '/api/agent/run', agentRunHandlers.handleRun);\n server.registerRoute('GET', '/api/agent/runs', agentRunHandlers.handleListRuns);\n server.registerRoute('GET', '/api/agent/runs/:id', agentRunHandlers.handleGetRun);\n server.registerRoute('GET', '/api/agent/runs/:id/reports', agentRunHandlers.handleGetRunReports);\n server.registerRoute('GET', '/api/agent/runs/:id/turns', agentRunHandlers.handleGetRunTurns);\n\n server.registerRoute('GET', '/api/agent/tasks', async (req) => handleListTasks(req, vaultDir));\n server.registerRoute('GET', '/api/agent/tasks/:id', async (req) => handleGetTask(req, vaultDir));\n server.registerRoute('GET', '/api/agent/tasks/:id/yaml', async (req) => handleGetTaskYaml(req, vaultDir));\n server.registerRoute('PUT', '/api/agent/tasks/:id', async (req) => {\n const result = await handleUpdateTask(req, vaultDir);\n if (!result.status || result.status < 400) {\n await syncScheduledTasks();\n }\n return result;\n });\n server.registerRoute('POST', '/api/agent/tasks', async (req) => {\n const result = await handleCreateTask(req, vaultDir);\n if (!result.status || result.status < 400) {\n await syncScheduledTasks();\n }\n return result;\n });\n server.registerRoute('POST', '/api/agent/tasks/:id/copy', async (req) => {\n const result = await handleCopyTask(req, vaultDir);\n if (!result.status || result.status < 400) {\n await syncScheduledTasks();\n }\n return result;\n });\n server.registerRoute('DELETE', '/api/agent/tasks/:id', async (req) => {\n const result = await handleDeleteTask(req, vaultDir);\n if (!result.status || result.status < 400) {\n await syncScheduledTasks();\n }\n return result;\n });\n server.registerRoute('GET', '/api/agent/tasks/:id/config', async (req) => handleGetTaskConfig(req, vaultDir));\n server.registerRoute('PUT', '/api/agent/tasks/:id/config', async (req) => {\n const result = await handleUpdateTaskConfig(req, vaultDir);\n if (!result.status || result.status < 400) {\n await applyConfigWriteReactions([`agent.tasks.${req.params.id}`]);\n }\n return result;\n });\n\n // --- Provider detection & testing ---\n server.registerRoute('GET', '/api/providers', async () => handleGetProviders());\n server.registerRoute('POST', '/api/providers/test', async (req) => handleTestProvider(req));\n\n // --- MCP proxy routes ---\n // These routes exist so the MCP server can proxy tool calls through the\n // daemon instead of opening its own SQLite connection.\n const mcpProxy = createMcpProxyHandlers({ machineId, embeddingManager });\n server.registerRoute('POST', '/api/mcp/remember', mcpProxy.handleRemember);\n server.registerRoute('POST', '/api/mcp/supersede', mcpProxy.handleSupersede);\n server.registerRoute('POST', '/api/mcp/consolidate', mcpProxy.handleConsolidate);\n server.registerRoute('GET', '/api/mcp/plans', mcpProxy.handlePlans);\n server.registerRoute('GET', '/api/mcp/sessions', mcpProxy.handleSessions);\n server.registerRoute('GET', '/api/mcp/team', mcpProxy.handleTeam);\n\n // --- Backup routes ---\n const backupHandlers = createBackupHandlers({ db, machineId, vaultDir, liveConfig });\n server.registerRoute('POST', '/api/backup', backupHandlers.handleCreateBackup);\n server.registerRoute('GET', '/api/backups', backupHandlers.handleListBackups);\n server.registerRoute('POST', '/api/restore/preview', backupHandlers.handleRestorePreview);\n server.registerRoute('POST', '/api/restore', backupHandlers.handleRestore);\n\n const backupConfigHandlers = createBackupConfigHandlers({ vaultDir });\n server.registerRoute('GET', '/api/backup/config', backupConfigHandlers.handleGetBackupConfig);\n server.registerRoute('PUT', '/api/backup/config', async (req) => {\n const result = await backupConfigHandlers.handlePutBackupConfig(req);\n if (!result.status || result.status < 400) {\n await applyConfigWriteReactions(['backup.dir']);\n }\n return result;\n });\n\n // --- Team sync ---\n const teamSync = initTeamSync({ liveConfig, machineId, logger, vaultDir, serverVersion: server.version });\n reactions.on(['team'], async () => {\n await teamSync.reconcileClient();\n });\n await teamSync.reconcileClient();\n\n const teamHandlers = createTeamHandlers({\n vaultDir,\n machineId,\n logger,\n getTeamClient: teamSync.getTeamClient,\n setTeamClient: teamSync.setTeamClient,\n });\n server.registerRoute('POST', '/api/team/connect', async (req) => {\n const result = await teamHandlers.handleConnect(req);\n if (!result.status || result.status < 400) {\n await applyConfigWriteReactions(['team.enabled', 'team.worker_url']);\n }\n return result;\n });\n server.registerRoute('POST', '/api/team/disconnect', async (req) => {\n const result = await teamHandlers.handleDisconnect(req);\n if (!result.status || result.status < 400) {\n await applyConfigWriteReactions(['team.enabled', 'team.worker_url']);\n }\n return result;\n });\n server.registerRoute('GET', '/api/team/status', teamHandlers.handleStatus);\n server.registerRoute('POST', '/api/team/backfill', teamHandlers.handleBackfill);\n server.registerRoute('POST', '/api/team/retry-failed', teamHandlers.handleRetryFailed);\n server.registerRoute('POST', '/api/team/upgrade-worker', teamHandlers.handleUpgradeWorker);\n server.registerRoute('POST', '/api/team/rotate-mcp-token', teamHandlers.handleRotateMcpToken);\n\n const collectiveHandlers = createCollectiveHandlers({\n getTeamClient: teamSync.getTeamClient,\n });\n server.registerRoute('GET', '/api/collective/status', collectiveHandlers.handleStatus);\n server.registerRoute('GET', '/api/collective/search', collectiveHandlers.handleSearch);\n server.registerRoute('GET', '/api/collective/projects', collectiveHandlers.handleProjects);\n server.registerRoute('GET', '/api/collective/project', collectiveHandlers.handleProject);\n server.registerRoute('GET', '/api/collective/settings', collectiveHandlers.handleSettings);\n\n // --- Search, activity feed, and embedding status ---\n\n server.registerRoute('GET', '/api/search', createSearchHandler({ embeddingManager, getTeamClient: teamSync.getTeamClient, machineId }));\n server.registerRoute('GET', '/api/activity', handleGetFeed);\n server.registerRoute('GET', '/api/embedding/status', async () => handleGetEmbeddingStatus(vaultDir));\n server.registerRoute('GET', '/api/embedding/details', async () => handleEmbeddingDetails(embeddingManager));\n server.registerRoute('POST', '/api/embedding/rebuild', async () => handleEmbeddingRebuild(embeddingManager));\n server.registerRoute('POST', '/api/embedding/reconcile', async () => handleEmbeddingReconcile(embeddingManager));\n server.registerRoute('POST', '/api/embedding/clean-orphans', async () => handleEmbeddingCleanOrphans(embeddingManager));\n server.registerRoute('POST', '/api/embedding/reembed-stale', async () => handleEmbeddingReembedStale(embeddingManager));\n server.registerRoute('GET', '/api/database/details', async () => handleDatabaseDetails(databaseManager));\n server.registerRoute('POST', '/api/database/optimize', async () => handleDatabaseOptimize(databaseManager));\n server.registerRoute('POST', '/api/database/vacuum', async () => handleDatabaseVacuum(databaseManager));\n server.registerRoute('POST', '/api/database/reindex', async () => handleDatabaseReindex(databaseManager));\n server.registerRoute('POST', '/api/database/integrity-check', async () => handleDatabaseIntegrityCheck(databaseManager));\n\n // --- Notification API routes ---\n server.registerRoute('GET', '/api/notifications', async (req) => handleListNotifications(vaultDir, req.query));\n server.registerRoute('POST', '/api/notifications', async (req) => handleCreateNotification(vaultDir, req.body));\n server.registerRoute('PATCH', '/api/notifications/:id', async (req) => handleUpdateNotification(vaultDir, req.params.id, req.body));\n server.registerRoute('POST', '/api/notifications/dismiss-all', async (req) => handleDismissAll(vaultDir, req.body));\n server.registerRoute('POST', '/api/notifications/mark-all-read', async (req) => handleMarkAllRead(vaultDir, req.body));\n server.registerRoute('GET', '/api/notifications/registry', async () => handleGetRegistry());\n server.registerRoute('GET', '/api/notifications/unread-count', async () => handleUnreadCount());\n\n // --- Start server ---\n\n await server.evictExistingDaemon();\n const resolvedPort = await resolvePort(config.daemon.port, vaultDir);\n if (resolvedPort === 0) {\n logger.warn(LOG_KINDS.DAEMON_PORT, 'All preferred ports occupied, using ephemeral port');\n }\n await server.start(resolvedPort);\n logger.info(LOG_KINDS.DAEMON_READY, 'Daemon ready', { vault: vaultDir, port: server.port });\n\n // Persist the resolved port to config if it was auto-derived\n if (config.daemon.port === null && resolvedPort !== 0) {\n try {\n updateConfig(vaultDir, (c) => ({\n ...c,\n daemon: { ...c.daemon, port: resolvedPort },\n }));\n logger.info(LOG_KINDS.DAEMON_CONFIG, 'Persisted auto-derived port to myco.yaml', { port: resolvedPort });\n } catch (err) {\n logger.warn(LOG_KINDS.DAEMON_CONFIG, 'Failed to persist auto-derived port', { error: (err as Error).message });\n }\n }\n\n // --- Register power-managed jobs ---\n registerPowerJobs(powerManager, { embeddingManager, registry, logger, liveConfig, db, machineId, vaultDir, databaseManager });\n teamSync.registerFlushJob(powerManager);\n\n // -- Dynamic task scheduling --\n await registerScheduledTasks(powerManager, { definitionsDir, vaultDir, embeddingManager, logger, liveConfig });\n\n powerManager.start();\n\n // --- Shutdown ---\n\n const shutdown = async (signal: string) => {\n logger.info(LOG_KINDS.DAEMON_START, `${signal} received`);\n powerManager.stop();\n // Wait for any active stop processing to finish before shutting down\n const activeStopProcessing = stopProcessor.getActiveProcessing();\n if (activeStopProcessing) {\n logger.info(LOG_KINDS.DAEMON_START, 'Waiting for active stop processing to complete...');\n await activeStopProcessing;\n }\n registry.destroy();\n await server.stop();\n vectorStore.close();\n closeDatabase();\n logger.close();\n process.exit(0);\n };\n\n process.on('SIGTERM', () => shutdown('SIGTERM'));\n process.on('SIGINT', () => shutdown('SIGINT'));\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA,OAAO,UAAU;AACjB,OAAOA,SAAQ;AACf,OAAOC,WAAU;;;AC4BV,IAAM,SAAN,MAAa;AAAA,EACV,SAAuB,CAAC;AAAA,EAEhC,IAAI,QAAgB,SAAiB,SAA6B;AAChE,UAAM,OAAO,QAAQ,SAAS,GAAG,IAAI,UACxB,QAAQ,SAAS,IAAI,IAAI,WACzB;AACb,UAAM,WAAW,SAAS,UAAU,QAAQ,MAAM,GAAG,IAAI;AACzD,SAAK,OAAO,KAAK,EAAE,QAAQ,SAAS,SAAS,MAAM,SAAS,CAAC;AAAA,EAC/D;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOA,MAAM,QAAgB,QAAwC;AAC5D,UAAM,MAAM,IAAI,IAAI,QAAQ,kBAAkB;AAC9C,UAAM,WAAW,IAAI;AACrB,UAAM,QAAgC,CAAC;AACvC,QAAI,aAAa,QAAQ,CAAC,GAAG,MAAM;AAAE,YAAM,CAAC,IAAI;AAAA,IAAG,CAAC;AAGpD,QAAI;AACJ,QAAI;AAEJ,eAAW,SAAS,KAAK,QAAQ;AAC/B,UAAI,MAAM,WAAW,OAAQ;AAE7B,UAAI,MAAM,SAAS,WAAW,MAAM,YAAY,UAAU;AACxD,eAAO,EAAE,SAAS,MAAM,SAAS,QAAQ,CAAC,GAAG,OAAO,SAAS;AAAA,MAC/D;AAEA,UAAI,MAAM,SAAS,WAAW,CAAC,cAAc,MAAM,UAAU;AAC3D,cAAM,QAAQ,SAAS,MAAM,GAAG;AAChC,YAAI,MAAM,WAAW,MAAM,SAAS,QAAQ;AAC1C,gBAAM,SAAiC,CAAC;AACxC,cAAI,UAAU;AACd,mBAAS,IAAI,GAAG,IAAI,MAAM,SAAS,QAAQ,KAAK;AAC9C,gBAAI,MAAM,SAAS,CAAC,EAAE,WAAW,GAAG,GAAG;AACrC,qBAAO,MAAM,SAAS,CAAC,EAAE,MAAM,CAAC,CAAC,IAAI,MAAM,CAAC;AAAA,YAC9C,WAAW,MAAM,SAAS,CAAC,MAAM,MAAM,CAAC,GAAG;AACzC,wBAAU;AACV;AAAA,YACF;AAAA,UACF;AACA,cAAI,SAAS;AACX,yBAAa,EAAE,SAAS,MAAM,SAAS,QAAQ,OAAO,SAAS;AAAA,UACjE;AAAA,QACF;AAAA,MACF;AAEA,UAAI,MAAM,SAAS,YAAY,CAAC,aAAa;AAC3C,cAAM,SAAS,MAAM,QAAQ,MAAM,GAAG,EAAE;AACxC,YAAI,SAAS,WAAW,MAAM,GAAG;AAC/B,wBAAc,EAAE,SAAS,MAAM,SAAS,QAAQ,CAAC,GAAG,OAAO,SAAS;AAAA,QACtE;AAAA,MACF;AAAA,IACF;AAEA,WAAO,cAAc;AAAA,EACvB;AACF;;;AC5FA,OAAO,QAAQ;AACf,OAAO,UAAU;AAEjB,IAAM,sBAAsB;AAC5B,IAAM,kBAAkB;AACxB,IAAM,WAAW;AAEV,IAAM,aAAqC;AAAA,EAChD,SAAS;AAAA,EACT,OAAO;AAAA,EACP,QAAQ;AAAA,EACR,SAAS;AAAA,EACT,QAAQ;AAAA,EACR,QAAQ;AAAA,EACR,QAAQ;AAAA,EACR,SAAS;AAAA,EACT,UAAU;AAAA,EACV,QAAQ;AACV;AASO,SAAS,kBAAkB,OAAe,UAAgD;AAE/F,QAAM,WAAW,SAAS,WAAW,GAAG,IAAI,SAAS,MAAM,CAAC,IAAI;AAGhE,QAAM,WAAW,KAAK,QAAQ,OAAO,YAAY,YAAY;AAC7D,MAAI,CAAC,SAAS,WAAW,KAAK,QAAQ,KAAK,CAAC,GAAG;AAC7C,WAAO;AAAA,EACT;AAGA,MAAI,GAAG,WAAW,QAAQ,KAAK,GAAG,SAAS,QAAQ,EAAE,OAAO,GAAG;AAC7D,UAAM,MAAM,KAAK,QAAQ,QAAQ;AACjC,UAAM,cAAc,WAAW,GAAG,KAAK;AACvC,UAAM,eAAe,SAAS,WAAW,mBAAmB,IAAI,kBAAkB;AAClF,WAAO,EAAE,UAAU,UAAU,aAAa,aAAa;AAAA,EACzD;AAGA,QAAM,YAAY,KAAK,KAAK,OAAO,YAAY;AAC/C,MAAI,GAAG,WAAW,SAAS,GAAG;AAC5B,WAAO,EAAE,UAAU,WAAW,aAAa,aAAa,cAAc,SAAS;AAAA,EACjF;AAEA,SAAO;AACT;;;AF1CA,IAAM,iBAAiB;AAShB,IAAM,eAAN,MAAmB;AAAA,EACxB,OAAO;AAAA,EACE;AAAA,EACT;AAAA,EACQ,SAA6B;AAAA,EAC7B;AAAA,EACA;AAAA,EACA,SAAS,IAAI,OAAO;AAAA,EACpB;AAAA,EAER,YAAY,QAA4B;AACtC,SAAK,WAAW,OAAO;AACvB,SAAK,SAAS,OAAO;AACrB,SAAK,QAAQ,OAAO,SAAS;AAC7B,SAAK,YAAY,OAAO,aAAa;AACrC,SAAK,UAAU,iBAAiB;AAChC,SAAK,sBAAsB;AAAA,EAC7B;AAAA,EAEA,cAAc,QAAgB,WAAmB,SAA6B;AAC5E,SAAK,OAAO,IAAI,QAAQ,WAAW,OAAO;AAAA,EAC5C;AAAA,EAEA,MAAM,MAAM,OAAe,GAAkB;AAC3C,WAAO,IAAI,QAAQ,CAACC,UAAS,WAAW;AACtC,WAAK,SAAS,KAAK,aAAa,CAAC,KAAK,QAAQ,KAAK,cAAc,KAAK,GAAG,CAAC;AAC1E,WAAK,OAAO,GAAG,SAAS,MAAM;AAE9B,WAAK,OAAO,OAAO,MAAM,aAAa,MAAM;AAC1C,cAAM,OAAO,KAAK,OAAQ,QAAQ;AAClC,aAAK,OAAO,KAAK;AACjB,aAAK,gBAAgB;AACrB,aAAK,OAAO,KAAK,UAAU,aAAa,kBAAkB,EAAE,MAAM,KAAK,MAAM,WAAW,oBAAoB,KAAK,IAAI,IAAI,CAAC;AAC1H,QAAAA,SAAQ;AAAA,MACV,CAAC;AAAA,IACH,CAAC;AAAA,EACH;AAAA,EAEA,MAAM,OAAsB;AAC1B,WAAO,IAAI,QAAQ,CAACA,aAAY;AAC9B,WAAK,iBAAiB;AACtB,UAAI,KAAK,QAAQ;AACf,aAAK,OAAO,MAAM,MAAM;AACtB,eAAK,OAAO,KAAK,UAAU,cAAc,gBAAgB;AACzD,UAAAA,SAAQ;AAAA,QACV,CAAC;AAAA,MACH,OAAO;AACL,QAAAA,SAAQ;AAAA,MACV;AAAA,IACF,CAAC;AAAA,EACH;AAAA,EAEQ,wBAA8B;AACpC,SAAK,cAAc,OAAO,WAAW,aAAa;AAAA,MAChD,MAAM;AAAA,QACJ,MAAM;AAAA,QACN,SAAS,KAAK;AAAA,QACd,KAAK,QAAQ;AAAA,QACb,QAAQ,QAAQ,OAAO;AAAA,MACzB;AAAA,IACF,EAAE;AAAA,EACJ;AAAA,EAEA,MAAc,cAAc,KAA2B,KAAyC;AAE9F,UAAM,QAAQ,KAAK,OAAO,MAAM,IAAI,QAAS,IAAI,GAAI;AAErD,QAAI,OAAO;AACT,WAAK,YAAY;AACjB,UAAI;AACF,cAAM,OAAQ,IAAI,WAAW,UAAU,IAAI,WAAW,SAAS,IAAI,WAAW,UAAW,MAAM,SAAS,GAAG,IAAI;AAC/G,cAAM,SAAS,MAAM,MAAM,QAAQ;AAAA,UACjC;AAAA,UACA,OAAO,MAAM;AAAA,UACb,QAAQ,MAAM;AAAA,UACd,UAAU,MAAM;AAAA,QAClB,CAAC;AACD,cAAM,SAAS,OAAO,UAAU;AAChC,YAAI,OAAO,SAAS,OAAO,IAAI,GAAG;AAChC,cAAI,UAAU,QAAQ,OAAO,WAAW,CAAC,CAAC;AAC1C,cAAI,IAAI,OAAO,IAAI;AACnB;AAAA,QACF;AACA,cAAM,UAAU,EAAE,gBAAgB,oBAAoB,GAAG,OAAO,QAAQ;AACxE,YAAI,UAAU,QAAQ,OAAO;AAC7B,YAAI,IAAI,KAAK,UAAU,OAAO,IAAI,CAAC;AAAA,MACrC,SAAS,OAAO;AACd,aAAK,OAAO,MAAM,UAAU,cAAc,yBAAyB;AAAA,UACjE,MAAM,IAAI;AAAA,UACV,OAAQ,MAAgB;AAAA,QAC1B,CAAC;AACD,YAAI,UAAU,KAAK,EAAE,gBAAgB,mBAAmB,CAAC;AACzD,YAAI,IAAI,KAAK,UAAU,EAAE,OAAQ,MAAgB,QAAQ,CAAC,CAAC;AAAA,MAC7D;AACA;AAAA,IACF;AAGA,QAAI,KAAK,SAAS,IAAI,WAAW,OAAO;AACtC,YAAM,WAAW,IAAI,IAAI,IAAI,KAAM,kBAAkB,EAAE;AACvD,YAAM,SAAS,kBAAkB,KAAK,OAAO,QAAQ;AACrD,UAAI,QAAQ;AACV,YAAI;AACF,gBAAM,UAAU,MAAMC,IAAG,SAAS,SAAS,OAAO,QAAQ;AAC1D,cAAI,UAAU,KAAK;AAAA,YACjB,gBAAgB,OAAO;AAAA,YACvB,iBAAiB,OAAO;AAAA,UAC1B,CAAC;AACD,cAAI,IAAI,OAAO;AAAA,QACjB,QAAQ;AACN,cAAI,UAAU,KAAK,EAAE,gBAAgB,mBAAmB,CAAC;AACzD,cAAI,IAAI,KAAK,UAAU,EAAE,OAAO,YAAY,CAAC,CAAC;AAAA,QAChD;AACA;AAAA,MACF;AAAA,IACF;AAEA,QAAI,UAAU,KAAK,EAAE,gBAAgB,mBAAmB,CAAC;AACzD,QAAI,IAAI,KAAK,UAAU,EAAE,OAAO,YAAY,CAAC,CAAC;AAAA,EAChD;AAAA,EAEA,yBAAyB,UAA0B;AACjD,UAAM,WAAWC,MAAK,KAAK,KAAK,UAAU,aAAa;AACvD,QAAI;AACF,YAAM,OAAO,KAAK,MAAMD,IAAG,aAAa,UAAU,OAAO,CAAC;AAC1D,WAAK,WAAW;AAChB,MAAAA,IAAG,cAAc,UAAU,KAAK,UAAU,MAAM,MAAM,CAAC,CAAC;AAAA,IAC1D,QAAQ;AAAA,IAAkD;AAAA,EAC5D;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOA,MAAM,sBAAqC;AACzC,UAAM,WAAWC,MAAK,KAAK,KAAK,UAAU,aAAa;AACvD,QAAI;AACJ,QAAI;AACF,YAAM,UAAUD,IAAG,aAAa,UAAU,OAAO;AACjD,YAAM,OAAO,KAAK,MAAM,OAAO;AAC/B,UAAI,OAAO,KAAK,QAAQ,YAAY,KAAK,QAAQ,QAAQ,KAAK;AAC5D,sBAAc,KAAK;AAAA,MACrB;AAAA,IACF,QAAQ;AAAA,IAAqD;AAE7D,QAAI,CAAC,YAAa;AAGlB,QAAI;AAAE,cAAQ,KAAK,aAAa,CAAC;AAAA,IAAG,QAAQ;AAAE;AAAA,IAA2B;AAEzE,SAAK,OAAO,KAAK,UAAU,cAAc,4BAA4B,EAAE,KAAK,YAAY,CAAC;AACzF,QAAI;AAAE,cAAQ,KAAK,aAAa,SAAS;AAAA,IAAG,QAAQ;AAAE;AAAA,IAAQ;AAG9D,UAAM,WAAW,KAAK,IAAI,IAAI;AAC9B,WAAO,KAAK,IAAI,IAAI,UAAU;AAC5B,YAAM,IAAI,QAAQ,CAAC,MAAM,WAAW,GAAG,oBAAoB,CAAC;AAC5D,UAAI;AAAE,gBAAQ,KAAK,aAAa,CAAC;AAAA,MAAG,QAAQ;AAAE;AAAA,MAAmB;AAAA,IACnE;AAEA,SAAK,OAAO,KAAK,UAAU,cAAc,wDAAwD,EAAE,KAAK,YAAY,CAAC;AACrH,QAAI;AAAE,cAAQ,KAAK,aAAa,SAAS;AAAA,IAAG,QAAQ;AAAE;AAAA,IAAQ;AAG9D,UAAM,IAAI,QAAQ,CAAC,MAAM,WAAW,GAAG,oBAAoB,CAAC;AAC5D,QAAI;AAAE,cAAQ,KAAK,aAAa,CAAC;AAAA,IAAG,QAAQ;AAAE;AAAA,IAAmB;AACjE,SAAK,OAAO,KAAK,UAAU,cAAc,4CAA4C,EAAE,KAAK,YAAY,CAAC;AAAA,EAC3G;AAAA,EAEQ,kBAAwB;AAC9B,UAAM,OAAO;AAAA,MACX,KAAK,QAAQ;AAAA,MACb,MAAM,KAAK;AAAA,MACX,UAAS,oBAAI,KAAK,GAAE,YAAY;AAAA,MAChC,UAAU,CAAC;AAAA,IACb;AACA,UAAM,WAAWC,MAAK,KAAK,KAAK,UAAU,aAAa;AACvD,IAAAD,IAAG,cAAc,UAAU,KAAK,UAAU,MAAM,MAAM,CAAC,CAAC;AAAA,EAC1D;AAAA,EAEQ,mBAAyB;AAC/B,UAAM,WAAWC,MAAK,KAAK,KAAK,UAAU,aAAa;AACvD,QAAI;AACF,YAAM,UAAUD,IAAG,aAAa,UAAU,OAAO;AACjD,YAAM,OAAO,KAAK,MAAM,OAAO;AAE/B,UAAI,KAAK,QAAQ,QAAQ,IAAK;AAC9B,MAAAA,IAAG,WAAW,QAAQ;AAAA,IACxB,QAAQ;AAAA,IAAmC;AAAA,EAC7C;AACF;AAEA,SAAS,SAAS,KAA6C;AAC7D,SAAO,IAAI,QAAQ,CAACD,UAAS,WAAW;AACtC,QAAI,OAAO;AACX,QAAI,GAAG,QAAQ,CAAC,UAAkB;AAAE,cAAQ;AAAA,IAAO,CAAC;AACpD,QAAI,GAAG,OAAO,MAAM;AAClB,UAAI;AAAE,QAAAA,SAAQ,OAAO,KAAK,MAAM,IAAI,IAAI,CAAC,CAAC;AAAA,MAAG,SACtC,GAAG;AAAE,eAAO,CAAC;AAAA,MAAG;AAAA,IACzB,CAAC;AACD,QAAI,GAAG,SAAS,MAAM;AAAA,EACxB,CAAC;AACH;;;AGhNO,IAAM,kBAAN,MAAsB;AAAA,EACnB,YAA0C,oBAAI,IAAI;AAAA,EAClD,aAAmD;AAAA,EACnD;AAAA,EACA;AAAA,EAER,YAAY,SAA0B;AACpC,SAAK,cAAc,QAAQ;AAC3B,SAAK,UAAU,QAAQ;AAAA,EACzB;AAAA,EAEA,IAAI,WAAqB;AACvB,WAAO,CAAC,GAAG,KAAK,UAAU,KAAK,CAAC;AAAA,EAClC;AAAA,EAEA,SAAS,WAAmB,UAAkC;AAC5D,QAAI,CAAC,KAAK,UAAU,IAAI,SAAS,GAAG;AAClC,WAAK,UAAU,IAAI,WAAW,YAAY,EAAE,aAAY,oBAAI,KAAK,GAAE,YAAY,EAAE,CAAC;AAAA,IACpF;AACA,SAAK,YAAY;AAAA,EACnB;AAAA,EAEA,WAAW,WAAkD;AAC3D,UAAM,OAAO,KAAK,UAAU,IAAI,SAAS;AACzC,QAAI,CAAC,KAAM,QAAO;AAClB,WAAO,EAAE,IAAI,WAAW,GAAG,KAAK;AAAA,EAClC;AAAA,EAEA,WAAW,WAAyB;AAClC,SAAK,UAAU,OAAO,SAAS;AAC/B,QAAI,KAAK,UAAU,SAAS,GAAG;AAC7B,WAAK,WAAW;AAAA,IAClB;AAAA,EACF;AAAA,EAEA,UAAgB;AACd,SAAK,YAAY;AACjB,SAAK,UAAU,MAAM;AAAA,EACvB;AAAA,EAEQ,aAAmB;AACzB,SAAK,YAAY;AACjB,SAAK,aAAa,WAAW,MAAM;AACjC,UAAI,KAAK,UAAU,SAAS,GAAG;AAC7B,aAAK,QAAQ;AAAA,MACf;AAAA,IACF,GAAG,KAAK,cAAc,GAAI;AAAA,EAC5B;AAAA,EAEQ,cAAoB;AAC1B,QAAI,KAAK,YAAY;AACnB,mBAAa,KAAK,UAAU;AAC5B,WAAK,aAAa;AAAA,IACpB;AAAA,EACF;AACF;;;ACrEA,SAAS,kBAAkB;AAC3B,OAAO,SAAS;AAET,IAAM,mBAAmB;AACzB,IAAM,kBAAkB;AAC/B,IAAM,mBAAmB;AAGlB,SAAS,WAAW,WAA2B;AACpD,QAAM,OAAO,WAAW,KAAK,EAAE,OAAO,SAAS,EAAE,OAAO;AACxD,QAAM,MAAM,KAAK,aAAa,CAAC;AAC/B,SAAO,mBAAoB,MAAM;AACnC;AAGA,eAAsB,YACpB,YACA,WACiB;AACjB,QAAM,WAAW,cAAc,WAAW,SAAS;AAEnD,WAAS,SAAS,GAAG,SAAS,kBAAkB,UAAU;AACxD,UAAM,YAAY,WAAW;AAC7B,QAAI,YAAY,MAAO;AACvB,QAAI,MAAM,gBAAgB,SAAS,EAAG,QAAO;AAAA,EAC/C;AAGA,SAAO;AACT;AAEA,SAAS,gBAAgB,MAAgC;AACvD,SAAO,IAAI,QAAQ,CAACG,aAAY;AAC9B,UAAM,SAAS,IAAI,aAAa;AAChC,WAAO,KAAK,SAAS,MAAMA,SAAQ,KAAK,CAAC;AACzC,WAAO,KAAK,aAAa,MAAM;AAC7B,aAAO,MAAM,MAAMA,SAAQ,IAAI,CAAC;AAAA,IAClC,CAAC;AACD,WAAO,OAAO,MAAM,WAAW;AAAA,EACjC,CAAC;AACH;;;AChCA,OAAOC,SAAQ;AACf,OAAOC,WAAU;;;ACJjB,IAAM,yBAAyB;AAUxB,IAAM,sBAAN,MAAsD;AAAA,EAC3D,YAA6B,MAAyB;AAAzB;AAAA,EAA0B;AAAA,EAEvD,WAAW,SAAmC;AAC5C,UAAM,QAAQ,QAAQ,MAAM,IAAI,EAAE,OAAO,OAAO;AAChD,UAAM,QAA0B,CAAC;AACjC,QAAI,UAAiC;AAErC,eAAW,QAAQ,OAAO;AACxB,UAAI;AACJ,UAAI;AAAE,gBAAQ,KAAK,MAAM,IAAI;AAAA,MAAG,QAAQ;AAAE;AAAA,MAAU;AAEpD,YAAM,OAAO,MAAM,KAAK,KAAK,SAAS;AACtC,YAAM,YAAY,KAAK,KAAK,mBAAoB,MAAM,aAAuB,KAAM;AAEnF,UAAI,SAAS,QAAQ;AAGnB,YAAI,MAAM,WAAW,KAAM;AAE3B,cAAM,MAAM,MAAM;AAClB,cAAM,SAAS,MAAM,QAAQ,KAAK,OAAO,IAAI,IAAK,UAAU,CAAC;AAC7D,cAAM,UAAU,OAAO,KAAK,CAAC,MAAM,EAAE,SAAS,UAAU,EAAE,MAAM,KAAK,CAAC;AAEtE,YAAI,CAAC,QAAS;AAEd,YAAI,QAAS,OAAM,KAAK,OAAO;AAE/B,cAAM,YAAY,OACf,OAAO,CAAC,MAAM,EAAE,SAAS,UAAU,EAAE,IAAI,EACzC,IAAI,CAAC,MAAM,EAAE,IAAK,EAClB,KAAK,IAAI;AAEZ,cAAM,cAAc,KAAK,KAAK,qBAAqB,UAAU,QAAQ,wBAAwB,EAAE,IAAI,WAChG,KAAK,EACL,MAAM,GAAG,oBAAoB;AAEhC,cAAM,SAA4B,OAC/B,OAAO,CAAC,MAAM,EAAE,SAAS,WAAW,EAAE,QAAQ,SAAS,YAAY,EAAE,OAAO,IAAI,EAChF,IAAI,CAAC,OAAO,EAAE,MAAM,EAAE,OAAQ,MAAO,WAAW,EAAE,OAAQ,cAAc,YAAY,EAAE;AAEzF,kBAAU,EAAE,QAAQ,YAAY,WAAW,GAAG,WAAW,GAAI,OAAO,SAAS,IAAI,EAAE,OAAO,IAAI,CAAC,EAAG;AAAA,MACpG,WAAW,SAAS,eAAe,SAAS;AAC1C,cAAM,MAAM,MAAM;AAClB,YAAI,MAAM,QAAQ,KAAK,OAAO,GAAG;AAC/B,gBAAM,YAAY,IAAK,QAAQ,OAAO,CAAC,MAAM,EAAE,SAAS,UAAU,EAAE,IAAI,EAAE,IAAI,CAAC,MAAM,EAAE,IAAK;AAC5F,gBAAM,OAAO,UAAU,KAAK,IAAI,EAAE,KAAK;AACvC,cAAI,KAAM,SAAQ,aAAa;AAC/B,kBAAQ,aAAa,IAAK,QAAQ,OAAO,CAAC,MAAM,EAAE,SAAS,UAAU,EAAE;AAAA,QACzE;AAAA,MACF;AAAA,IACF;AAEA,QAAI,QAAS,OAAM,KAAK,OAAO;AAC/B,WAAO;AAAA,EACT;AACF;;;ADaO,SAAS,mBAAmB,SAAiB,WAAkC;AACpF,MAAI;AACF,eAAW,SAASC,IAAG,YAAY,SAAS,EAAE,eAAe,KAAK,CAAC,GAAG;AACpE,UAAI,CAAC,MAAM,YAAY,EAAG;AAC1B,YAAM,YAAYC,MAAK,KAAK,SAAS,MAAM,MAAM,GAAG,SAAS,QAAQ;AACrE,UAAI;AACF,QAAAD,IAAG,WAAW,SAAS;AACvB,eAAO;AAAA,MACT,QAAQ;AAAA,MAAiB;AAAA,IAC3B;AAAA,EACF,QAAQ;AAAA,EAA4C;AACpD,SAAO;AACT;AAMO,SAAS,wBACd,SACA,YACA,MACiB;AACjB,SAAO;AAAA,IACL,MAAM,QAAQ,UAAUC,MAAK,SAAS,OAAO,CAAC;AAAA,IAC9C,aAAa,WAAW,OAAO;AAAA,IAC/B,kBAAkB;AAAA,IAClB,YAAY,EAAE,WAAW,cAAc,gBAAgB,mBAAmB,cAAc,0BAA0B,QAAQ,UAAU,UAAU,aAAa,WAAW,cAAc,YAAY,cAAc;AAAA,IAC9M,gBAAgB,CAAC,cAAc,mBAAmB,SAAS,SAAS;AAAA,IACpE;AAAA,EACF;AACF;AAGA,IAAM,cAAsC;AAAA,EAC1C,cAAc;AAAA,EACd,aAAa;AAAA,EACb,cAAc;AAAA,EACd,aAAa;AACf;AAEO,SAAS,qBAAqB,UAA0B;AAC7D,SAAO,YAAY,QAAQ,KAAK;AAClC;AAGA,IAAM,cAAsC;AAAA,EAC1C,QAAQ;AAAA,EACR,SAAS;AAAA,EACT,QAAQ;AAAA,EACR,SAAS;AAAA,EACT,QAAQ;AACV;AAEO,SAAS,qBAAqB,KAAqB;AACxD,SAAO,YAAY,IAAI,YAAY,CAAC,KAAK;AAC3C;AAmBO,SAAS,gBAAgB,SAAiB,MAA2C;AAC1F,SAAO,IAAI,oBAAoB,IAAI,EAAE,WAAW,OAAO;AACzD;;;AE/JA,OAAOC,WAAU;AACjB,OAAO,QAAQ;AAEf,IAAM,kBAAkBA,MAAK,KAAK,GAAG,QAAQ,GAAG,WAAW,UAAU;AAE9D,IAAM,oBAAqC;AAAA,EAChD,MAAM;AAAA,EACN,aAAa;AAAA,EACb,kBAAkB;AAAA,EAClB,YAAY;AAAA,IACV,WAAW;AAAA,IACX,gBAAgB;AAAA,IAChB,cAAc;AAAA,IACd,QAAQ;AAAA,IACR,UAAU;AAAA,IACV,WAAW;AAAA,IACX,YAAY;AAAA,EACd;AAAA,EAEA,gBAAgB,CAAC,cAAc,mBAAmB,iBAAiB,SAAS;AAAA,EAE5E,YAAY,CAAC,YAAY,gBAAgB,SAAS;AAAA,IAChD,WAAW;AAAA,IACX,kBAAkB;AAAA,IAClB,qBAAqB;AAAA,IACrB,oBAAoB;AAAA,EACtB,CAAC;AACH;;;ACzBA,OAAOC,SAAQ;AACf,OAAOC,WAAU;AACjB,OAAOC,SAAQ;AAcf,IAAM,cAAc;AACpB,IAAM,mBAAmB;AACzB,IAAM,mBAAmB;AACzB,IAAM,qBAAqB;AAC3B,IAAM,kBAAkB;AAExB,SAAS,wBAAgC;AACvC,SAAOD,MAAK,KAAKC,IAAG,QAAQ,GAAG,WAAW,UAAU;AACtD;AAEA,IAAM,kBAAkB,sBAAsB;AAEvC,IAAM,gBAAiC;AAAA,EAC5C,MAAM;AAAA,EACN,aAAa;AAAA,EACb,kBAAkB;AAAA,EAClB,YAAY;AAAA,IACV,WAAW;AAAA,IACX,gBAAgB;AAAA,IAChB,cAAc;AAAA,IACd,QAAQ;AAAA,IACR,UAAU;AAAA,IACV,WAAW;AAAA,IACX,YAAY;AAAA,EACd;AAAA,EAEA,eAAe,WAAkC;AAC/C,QAAI;AACF,iBAAW,WAAWF,IAAG,YAAY,iBAAiB,EAAE,eAAe,KAAK,CAAC,GAAG;AAC9E,YAAI,CAAC,QAAQ,YAAY,EAAG;AAC5B,cAAM,iBAAiBC,MAAK,KAAK,iBAAiB,QAAQ,MAAM,mBAAmB;AAEnF,mBAAW,aAAa;AAAA,UACtBA,MAAK,KAAK,gBAAgB,GAAG,SAAS,MAAM;AAAA,UAC5CA,MAAK,KAAK,gBAAgB,WAAW,GAAG,SAAS,QAAQ;AAAA,QAC3D,GAAG;AACD,cAAI;AACF,YAAAD,IAAG,WAAW,SAAS;AACvB,mBAAO;AAAA,UACT,QAAQ;AAAA,UAAiB;AAAA,QAC3B;AAAA,MACF;AAAA,IACF,QAAQ;AAAA,IAAmC;AAC3C,WAAO;AAAA,EACT;AAAA,EAEA,WAAW,SAAmC;AAE5C,UAAM,UAAU,QAAQ,UAAU;AAClC,QAAI,QAAQ,WAAW,GAAG,GAAG;AAC3B,aAAO,iBAAiB,OAAO;AAAA,IACjC;AACA,WAAO,gBAAgB,OAAO;AAAA,EAChC;AACF;AAGA,SAAS,iBAAiB,SAAmC;AAC3D,SAAO,gBAAgB,SAAS;AAAA,IAC9B,WAAW;AAAA,IACX,kBAAkB;AAAA,IAClB,qBAAqB;AAAA,IACrB,oBAAoB;AAAA,EACtB,CAAC;AACH;AAGA,SAAS,gBAAgB,SAAmC;AACxD,QAAM,QAA0B,CAAC;AAEjC,QAAM,YAAY,OAAO,SAAS,MAAM,WAAW,EAAE,MAAM,CAAC;AAE5D,aAAW,WAAW,UAAU;AAE9B,QAAI,aAAa;AACjB,UAAM,aAAa,QAAQ,MAAM,4CAA4C;AAC7E,QAAI,YAAY;AACd,mBAAa,WAAW,CAAC,EAAE,KAAK,EAAE,MAAM,GAAG,oBAAoB;AAAA,IACjE,OAAO;AAEL,YAAM,kBAAkB,QAAQ,MAAM,gBAAgB,EAAE,CAAC;AACzD,mBAAa,gBAAgB,QAAQ,6BAA6B,EAAE,EAAE,KAAK,EAAE,MAAM,GAAG,oBAAoB;AAAA,IAC5G;AAGA,UAAM,SAA4B,CAAC;AACnC,UAAM,kBAAkB,QAAQ,MAAM,wCAAwC;AAC9E,QAAI,iBAAiB;AACnB,YAAM,aAAa,gBAAgB,CAAC;AACpC,YAAM,cAAc,WAAW,SAAS,iDAAiD;AACzF,iBAAW,SAAS,aAAa;AAC/B,cAAM,YAAY,MAAM,CAAC,EAAE,KAAK;AAChC,YAAI;AACF,gBAAM,OAAOA,IAAG,aAAa,SAAS,EAAE,SAAS,QAAQ;AACzD,gBAAM,YAAY,qBAAqBC,MAAK,QAAQ,SAAS,CAAC;AAC9D,iBAAO,KAAK,EAAE,MAAM,UAAU,CAAC;AAAA,QACjC,QAAQ;AAAA,QAER;AAAA,MACF;AAAA,IACF;AAGA,UAAM,gBAAgB,QAAQ,MAAM,gBAAgB,EAAE,SAAS;AAK/D,QAAI;AACJ,UAAM,kBAAkB,QAAQ,MAAM,gBAAgB,EAAE,MAAM,CAAC;AAC/D,aAAS,IAAI,gBAAgB,SAAS,GAAG,KAAK,GAAG,KAAK;AACpD,YAAM,QAAQ,gBAAgB,CAAC,EAAE,MAAM,IAAI;AAC3C,YAAM,YAAsB,CAAC;AAC7B,UAAI,OAAO;AACX,iBAAW,QAAQ,OAAO;AAExB,YAAI,KAAK,WAAW,gBAAgB,KAAK,KAAK,WAAW,kBAAkB,KAAK,KAAK,WAAW,eAAe,GAAG;AAChH,iBAAO;AACP;AAAA,QACF;AAEA,YAAI,QAAQ,KAAK,KAAK,MAAM,GAAI;AAChC,YAAI,QAAQ,CAAC,KAAK,WAAW,IAAI,EAAG,QAAO;AAC3C,YAAI,KAAM;AACV,kBAAU,KAAK,IAAI;AAAA,MACrB;AACA,YAAM,OAAO,UAAU,KAAK,IAAI,EAAE,KAAK;AACvC,UAAI,MAAM;AACR,qBAAa;AACb;AAAA,MACF;AAAA,IACF;AAEA,QAAI,cAAc,OAAO,SAAS,GAAG;AACnC,YAAM,KAAK;AAAA,QACT,QAAQ;AAAA,QACR,WAAW;AAAA,QACX,WAAW;AAAA,QACX,GAAI,aAAa,EAAE,WAAW,IAAI,CAAC;AAAA,QACnC,GAAI,OAAO,SAAS,IAAI,EAAE,OAAO,IAAI,CAAC;AAAA,MACxC,CAAC;AAAA,IACH;AAAA,EACF;AAEA,SAAO;AACX;;;AChKA,SAAS,aAAa,KAAyD;AAC7E,QAAM,QAAQ,IAAI,MAAM,4BAA4B;AACpD,MAAI,CAAC,MAAO,QAAO;AACnB,SAAO,EAAE,WAAW,MAAM,CAAC,GAAG,MAAM,MAAM,CAAC,EAAE;AAC/C;AAaA,IAAM,oBAAoB;AAC1B,IAAM,sBAAsB;AAE5B,SAAS,qBAAqB,MAAsB;AAElD,MAAI,kBAAkB,KAAK,KAAK,KAAK,CAAC,EAAG,QAAO;AAEhD,QAAM,MAAM,KAAK,QAAQ,mBAAmB;AAC5C,MAAI,QAAQ,GAAI,QAAO,KAAK,MAAM,MAAM,oBAAoB,MAAM;AAClE,SAAO;AACT;AAiBO,IAAM,mBAAN,MAAmD;AAAA,EACxD,WAAW,SAAmC;AAC5C,UAAM,QAAQ,QAAQ,MAAM,IAAI,EAAE,OAAO,OAAO;AAChD,UAAM,QAA0B,CAAC;AACjC,QAAI,UAAiC;AAErC,eAAW,QAAQ,OAAO;AACxB,UAAI;AACJ,UAAI;AAAE,gBAAQ,KAAK,MAAM,IAAI;AAAA,MAAG,QAAQ;AAAE;AAAA,MAAU;AAGpD,UAAI,MAAM,SAAS,gBAAiB;AAEpC,YAAM,UAAU,MAAM;AACtB,UAAI,CAAC,QAAS;AAEd,YAAM,cAAc,QAAQ;AAC5B,YAAM,YAAa,MAAM,aAAwB;AAGjD,UAAI,gBAAgB,iBAAiB;AACnC,YAAI,QAAS,SAAQ;AACrB;AAAA,MACF;AAGA,UAAI,gBAAgB,UAAW;AAE/B,YAAM,OAAO,QAAQ;AACrB,YAAM,SAAS,MAAM,QAAQ,QAAQ,OAAO,IACvC,QAAQ,UACT,CAAC;AAEL,UAAI,SAAS,QAAQ;AACnB,cAAM,YAAY,OACf,OAAO,CAAC,MAAM,EAAE,SAAS,gBAAgB,EAAE,MAAM,KAAK,CAAC,EACvD,IAAI,CAAC,MAAM,qBAAqB,EAAE,IAAK,CAAC,EACxC,OAAO,CAAC,MAAM,EAAE,KAAK,CAAC;AAEzB,YAAI,UAAU,WAAW,EAAG;AAE5B,YAAI,QAAS,OAAM,KAAK,OAAO;AAE/B,cAAM,aAAa,UAAU,KAAK,IAAI,EAAE,KAAK,EAAE,MAAM,GAAG,oBAAoB;AAG5E,cAAM,SAA4B,CAAC;AACnC,mBAAW,KAAK,QAAQ;AACtB,cAAI,EAAE,SAAS,iBAAiB,EAAE,WAAW;AAC3C,kBAAM,SAAS,aAAa,EAAE,SAAS;AACvC,gBAAI,OAAQ,QAAO,KAAK,MAAM;AAAA,UAChC;AAAA,QACF;AAEA,kBAAU,EAAE,QAAQ,YAAY,WAAW,GAAG,WAAW,GAAI,OAAO,SAAS,IAAI,EAAE,OAAO,IAAI,CAAC,EAAG;AAAA,MACpG,WAAW,SAAS,eAAe,SAAS;AAC1C,cAAM,YAAY,OACf,OAAO,CAAC,MAAM,EAAE,SAAS,iBAAiB,EAAE,IAAI,EAChD,IAAI,CAAC,MAAM,EAAE,IAAK;AACrB,cAAM,OAAO,UAAU,KAAK,IAAI,EAAE,KAAK;AACvC,YAAI,KAAM,SAAQ,aAAa;AAAA,MACjC;AAAA,IAEF;AAEA,QAAI,QAAS,OAAM,KAAK,OAAO;AAC/B,WAAO;AAAA,EACT;AACF;;;ACnHA,OAAOE,SAAQ;AACf,OAAOC,WAAU;AACjB,OAAOC,SAAQ;AAEf,IAAMC,mBAAkBF,MAAK,KAAKC,IAAG,QAAQ,GAAG,QAAQ;AACxD,IAAM,cAAc,IAAI,iBAAiB;AAWlC,SAAS,oBAAoB,SAAiB,WAAkC;AACrF,QAAM,cAAcD,MAAK,KAAK,SAAS,UAAU;AACjD,MAAI;AACF,WAAO,mBAAmB,aAAa,SAAS;AAAA,EAClD,QAAQ;AACN,WAAO;AAAA,EACT;AACF;AAEA,SAAS,mBAAmB,KAAa,WAAkC;AACzE,MAAI;AACJ,MAAI;AAAE,cAAUD,IAAG,YAAY,KAAK,EAAE,eAAe,KAAK,CAAC;AAAA,EAAG,QACxD;AAAE,WAAO;AAAA,EAAM;AAErB,aAAW,SAAS,SAAS;AAC3B,UAAM,WAAWC,MAAK,KAAK,KAAK,MAAM,IAAI;AAC1C,QAAI,MAAM,YAAY,GAAG;AACvB,YAAM,QAAQ,mBAAmB,UAAU,SAAS;AACpD,UAAI,MAAO,QAAO;AAAA,IACpB,WAAW,MAAM,OAAO,KAAK,MAAM,KAAK,SAAS,SAAS,KAAK,MAAM,KAAK,SAAS,QAAQ,GAAG;AAC5F,aAAO;AAAA,IACT;AAAA,EACF;AACA,SAAO;AACT;AAEO,IAAM,eAAgC;AAAA,EAC3C,MAAM;AAAA,EACN,aAAa;AAAA,EACb,kBAAkB;AAAA,EAClB,YAAY;AAAA,IACV,WAAW;AAAA,IACX,gBAAgB;AAAA,IAChB,cAAc;AAAA,IACd,QAAQ;AAAA,IACR,UAAU;AAAA,IACV,WAAW;AAAA,IACX,YAAY;AAAA,EACd;AAAA,EAEA,gBAAgB,CAAC,cAAc,oBAAoBE,kBAAiB,SAAS;AAAA,EAE7E,YAAY,CAAC,YAAY,YAAY,WAAW,OAAO;AACzD;;;AC3DA,OAAOC,SAAQ;AACf,OAAOC,WAAU;AACjB,OAAOC,SAAQ;AAWf,IAAM,aAAaD,MAAK,KAAKC,IAAG,QAAQ,GAAG,WAAW,KAAK;AAEpD,IAAM,gBAAiC;AAAA,EAC5C,MAAM;AAAA,EACN,aAAa;AAAA,EACb,kBAAkB;AAAA,EAClB,YAAY;AAAA,IACV,WAAW;AAAA,IACX,gBAAgB;AAAA,IAChB,cAAc;AAAA,IACd,QAAQ;AAAA,IACR,UAAU;AAAA,IACV,WAAW;AAAA,IACX,YAAY;AAAA,IACZ,cAAc;AAAA,EAChB;AAAA,EAEA,eAAe,WAAkC;AAI/C,QAAI;AACF,iBAAW,WAAWF,IAAG,YAAY,YAAY,EAAE,eAAe,KAAK,CAAC,GAAG;AACzE,YAAI,CAAC,QAAQ,YAAY,EAAG;AAC5B,cAAM,WAAWC,MAAK,KAAK,YAAY,QAAQ,MAAM,OAAO;AAC5D,YAAI;AACF,qBAAW,QAAQD,IAAG,YAAY,QAAQ,GAAG;AAC3C,gBAAI,CAAC,KAAK,SAAS,OAAO,EAAG;AAE7B,gBAAI,KAAK,SAAS,UAAU,MAAM,GAAG,CAAC,CAAC,GAAG;AAExC,kBAAI;AACF,sBAAM,OAAO,KAAK,MAAMA,IAAG,aAAaC,MAAK,KAAK,UAAU,IAAI,GAAG,OAAO,CAAC;AAC3E,oBAAI,KAAK,cAAc,WAAW;AAChC,yBAAOA,MAAK,KAAK,UAAU,IAAI;AAAA,gBACjC;AAAA,cACF,QAAQ;AAAA,cAAuB;AAAA,YACjC;AAAA,UACF;AAAA,QACF,QAAQ;AAAA,QAAgC;AAAA,MAC1C;AAAA,IACF,QAAQ;AAAA,IAA8B;AACtC,WAAO;AAAA,EACT;AAAA,EAEA,WAAW,SAAmC;AAC5C,WAAO,gBAAgB,OAAO;AAAA,EAChC;AACF;AAGA,IAAM,YAAY;AAClB,IAAM,cAAc;AAMpB,SAAS,gBAAgB,SAAmC;AAC1D,MAAI;AACJ,MAAI;AAAE,WAAO,KAAK,MAAM,OAAO;AAAA,EAAG,QAAQ;AAAE,WAAO,CAAC;AAAA,EAAG;AAEvD,QAAM,WAAW,KAAK;AACtB,MAAI,CAAC,MAAM,QAAQ,QAAQ,EAAG,QAAO,CAAC;AAEtC,QAAM,QAA0B,CAAC;AACjC,MAAI,UAAiC;AAErC,aAAW,OAAO,UAAU;AAC1B,QAAI,IAAI,SAAS,WAAW;AAC1B,UAAI,QAAS,OAAM,KAAK,OAAO;AAG/B,YAAM,aAAa,MAAM,QAAQ,IAAI,OAAO,IACxC,IAAI,QAAQ,IAAI,CAAC,MAAM,EAAE,QAAQ,EAAE,EAAE,KAAK,IAAI,EAAE,KAAK,IACpD,OAAO,IAAI,YAAY,WAAW,IAAI,UAAU;AAErD,gBAAU;AAAA,QACR,QAAQ,WAAW,MAAM,GAAG,oBAAoB;AAAA,QAChD,WAAW;AAAA,QACX,WAAW,IAAI,aAAa;AAAA,MAC9B;AAAA,IACF,WAAW,IAAI,SAAS,eAAe,SAAS;AAE9C,YAAM,OAAO,OAAO,IAAI,YAAY,WAAW,IAAI,QAAQ,KAAK,IAAI;AACpE,UAAI,KAAM,SAAQ,aAAa;AAG/B,UAAI,MAAM,QAAQ,IAAI,SAAS,GAAG;AAChC,gBAAQ,aAAa,IAAI,UAAU;AAAA,MACrC;AAAA,IACF;AAAA,EACF;AAEA,MAAI,QAAS,OAAM,KAAK,OAAO;AAC/B,SAAO;AACT;;;AC7GA,OAAOE,SAAQ;AACf,OAAOC,WAAU;AACjB,OAAOC,SAAQ;AAEf,IAAM,iBAAiBD,MAAK,KAAKC,IAAG,QAAQ,GAAG,aAAa,aAAa;AAGzE,IAAM,kBAAkB;AACxB,IAAM,wBAAwB;AAC9B,IAAM,mBAAmB;AAElB,IAAM,kBAAmC;AAAA,EAC9C,MAAM;AAAA,EACN,aAAa;AAAA,EACb,kBAAkB;AAAA,EAClB,YAAY;AAAA,IACV,WAAW;AAAA,IACX,gBAAgB;AAAA,IAChB,cAAc;AAAA,IACd,QAAQ;AAAA,IACR,UAAU;AAAA,IACV,WAAW;AAAA,IACX,YAAY;AAAA,EACd;AAAA,EAEA,eAAe,WAAkC;AAE/C,UAAM,YAAYD,MAAK,KAAK,gBAAgB,GAAG,SAAS,QAAQ;AAChE,QAAI;AACF,MAAAD,IAAG,WAAW,SAAS;AACvB,aAAO;AAAA,IACT,QAAQ;AAAE,aAAO;AAAA,IAAM;AAAA,EACzB;AAAA,EAEA,WAAW,SAAmC;AAC5C,WAAO,mBAAmB,OAAO;AAAA,EACnC;AACF;AAQA,SAAS,mBAAmB,SAAmC;AAC7D,QAAM,QAAQ,QAAQ,MAAM,IAAI,EAAE,OAAO,OAAO;AAChD,QAAM,QAA0B,CAAC;AACjC,MAAI,UAAiC;AAErC,aAAW,QAAQ,OAAO;AACxB,QAAI;AACJ,QAAI;AAAE,cAAQ,KAAK,MAAM,IAAI;AAAA,IAAG,QAAQ;AAAE;AAAA,IAAU;AAEpD,UAAM,OAAO,MAAM;AAEnB,QAAI,SAAS,iBAAiB;AAC5B,UAAI,QAAS,OAAM,KAAK,OAAO;AAG/B,YAAM,cACH,MAAM,iBACN,MAAM,QACN,MAAM,WACP,IACA,KAAK,EAAE,MAAM,GAAG,oBAAoB;AAEtC,gBAAU,EAAE,QAAQ,YAAY,WAAW,GAAG,WAAW,GAAG;AAAA,IAC9D,WAAW,SAAS,yBAAyB,SAAS;AACpD,YAAM,QACH,MAAM,YACN,MAAM,QACN,MAAM,WACP,IACA,KAAK;AACP,UAAI,KAAM,SAAQ,aAAa;AAAA,IACjC,WAAW,SAAS,oBAAoB,SAAS;AAC/C,cAAQ;AAAA,IACV;AAAA,EACF;AAEA,MAAI,QAAS,OAAM,KAAK,OAAO;AAC/B,SAAO;AACT;;;AClEA,IAAM,kBAAkB,oBAAI,IAAI,CAAC,4BAA4B,oBAAoB,aAAa,CAAC;AAExF,IAAM,uBAAwC;AAAA,EACnD,MAAM;AAAA,EACN,aAAa;AAAA,EACb,kBAAkB;AAAA,EAClB,YAAY;AAAA,IACV,WAAW;AAAA,IACX,gBAAgB;AAAA,IAChB,cAAc;AAAA,IACd,QAAQ;AAAA,IACR,UAAU;AAAA,IACV,WAAW;AAAA,IACX,YAAY;AAAA,EACd;AAAA;AAAA,EAGA,gBAAgB,MAAM;AAAA,EAEtB,YAAY,CAAC,YAAY,sBAAsB,OAAO;AACxD;AAOA,SAAS,sBAAsB,SAAmC;AAChE,QAAM,QAAQ,QAAQ,MAAM,IAAI,EAAE,OAAO,OAAO;AAChD,MAAI,MAAM,WAAW,EAAG,QAAO,CAAC;AAGhC,MAAI;AACJ,MAAI;AACF,UAAM,QAAQ,KAAK,MAAM,MAAM,CAAC,CAAC;AACjC,QAAI,MAAM,SAAS,KAAK,CAAC,MAAM,EAAG,QAAO,CAAC;AAC1C,cAAU,MAAM;AAAA,EAClB,QAAQ;AAAE,WAAO,CAAC;AAAA,EAAG;AAGrB,QAAM,QAAQ,KAAK,MAAM,KAAK,UAAU,OAAO,CAAC;AAEhD,WAAS,IAAI,GAAG,IAAI,MAAM,QAAQ,KAAK;AACrC,QAAI;AACF,YAAM,QAAQ,KAAK,MAAM,MAAM,CAAC,CAAC;AACjC,UAAI,CAAC,MAAM,KAAK,CAAC,MAAM,QAAQ,MAAM,CAAC,EAAG;AACzC,iBAAW,OAAO,MAAM,GAAG,MAAM,GAAG,MAAM,IAAI;AAAA,IAChD,QAAQ;AAAA,IAA6B;AAAA,EACvC;AAGA,SAAO,aAAa,KAAK;AAC3B;AAGA,SAAS,WAAW,OAAgC,SAAmB,OAAgB,MAAoB;AACzG,MAAI,MAA+B;AACnC,WAAS,IAAI,GAAG,IAAI,QAAQ,SAAS,GAAG,KAAK;AAC3C,QAAI,IAAI,QAAQ,CAAC,CAAC,MAAM,UAAa,IAAI,QAAQ,CAAC,CAAC,MAAM,MAAM;AAC7D,UAAI,QAAQ,CAAC,CAAC,IAAI,CAAC;AAAA,IACrB;AACA,UAAM,IAAI,QAAQ,CAAC,CAAC;AAAA,EACtB;AACA,QAAM,UAAU,QAAQ,QAAQ,SAAS,CAAC;AAE1C,MAAI,SAAS,GAAG;AAEd,QAAI,OAAO,IAAI;AAAA,EACjB,WAAW,SAAS,GAAG;AAErB,QAAI,CAAC,MAAM,QAAQ,IAAI,OAAO,CAAC,EAAG,KAAI,OAAO,IAAI,CAAC;AAClD,IAAC,IAAI,OAAO,EAAgB,KAAK,KAAK;AAAA,EACxC;AACF;AAGA,SAAS,aAAa,OAAsC;AAC1D,MAAI,CAAC,MAAM,QAAQ,MAAM,QAAQ,EAAG,QAAO,CAAC;AAE5C,QAAM,QAA0B,CAAC;AAEjC,aAAW,OAAO,MAAM,UAAU;AAChC,UAAM,aAAa,IAAI,SAAS,MAAM,KAAK,KAAK;AAChD,QAAI,CAAC,WAAY;AAEjB,UAAM,YAAY,IAAI,YAAY,IAAI,KAAK,IAAI,SAAS,EAAE,YAAY,IAAI;AAG1E,QAAI,YAAY;AAChB,QAAI,aAAa;AACjB,UAAM,gBAAgB,uBAAuB,IAAI,QAAQ;AAEzD,eAAW,QAAQ,eAAe;AAChC,UAAI,gBAAgB,IAAI,KAAK,QAAQ,EAAE,GAAG;AACxC;AAAA,MACF,WAAW,KAAK,SAAS,qBAAqB,KAAK,SAAS,gBAAgB;AAE1E,cAAM,OAAO,KAAK,SAAS,SAAS,KAAK,SAAS;AAClD,YAAI,KAAM,cAAa;AAAA,MACzB,WAAW,CAAC,KAAK,QAAQ,OAAO,KAAK,UAAU,YAAY,KAAK,MAAM,KAAK,GAAG;AAE5E,uBAAe,aAAa,OAAO,MAAM,KAAK,MAAM,KAAK;AAAA,MAC3D;AAAA,IACF;AAEA,UAAM,KAAK;AAAA,MACT,QAAQ,WAAW,MAAM,GAAG,oBAAoB;AAAA,MAChD;AAAA,MACA;AAAA,MACA,GAAI,aAAa,EAAE,YAAY,WAAW,KAAK,EAAE,IAAI,CAAC;AAAA,IACxD,CAAC;AAAA,EACH;AAEA,SAAO;AACT;AAQA,SAAS,uBAAuB,UAAyC;AACvE,MAAI,CAAC,SAAU,QAAO,CAAC;AAGvB,MAAI,MAAM,QAAQ,QAAQ,GAAG;AAC3B,WAAO,SAAS,KAAK,EAAE,OAAO,CAAC,MAA+B,KAAK,OAAO,MAAM,QAAQ;AAAA,EAC1F;AAGA,MAAI,OAAO,aAAa,UAAU;AAChC,WAAO,OAAO,OAAO,QAAQ,EAC1B,KAAK,EACL,OAAO,CAAC,MAA+B,KAAK,OAAO,MAAM,YAAY,CAAC,MAAM,QAAQ,CAAC,CAAC;AAAA,EAC3F;AAEA,SAAO,CAAC;AACV;;;ACtJA,OAAOG,SAAQ;AAOf,IAAM,eAAkC;AAAA,EACtC;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AACF;AAEO,IAAM,mBAAN,MAAuB;AAAA,EACpB;AAAA,EAER,YAAY,qBAAwC,CAAC,GAAG;AACtD,SAAK,WAAW,CAAC,GAAG,cAAc,GAAG,kBAAkB;AAAA,EACzD;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,mBAAmB,WAAuE;AACxF,eAAW,WAAW,KAAK,UAAU;AACnC,YAAM,WAAW,QAAQ,eAAe,SAAS;AACjD,UAAI,CAAC,SAAU;AAEf,UAAI;AACF,cAAM,UAAUA,IAAG,aAAa,UAAU,OAAO;AACjD,cAAM,QAAQ,QAAQ,WAAW,OAAO;AACxC,YAAI,MAAM,SAAS,GAAG;AACpB,iBAAO,EAAE,OAAO,QAAQ,QAAQ,KAAK;AAAA,QACvC;AAAA,MACF,QAAQ;AAAA,MAER;AAAA,IACF;AACA,WAAO;AAAA,EACT;AAAA;AAAA,EAGA,IAAI,eAAyB;AAC3B,WAAO,KAAK,SAAS,IAAI,CAAC,MAAM,EAAE,IAAI;AAAA,EACxC;AAAA;AAAA,EAGA,WAAW,MAA2C;AACpD,WAAO,KAAK,SAAS,KAAK,CAAC,MAAM,EAAE,SAAS,IAAI;AAAA,EAClD;AAAA;AAAA,EAGA,oBAAiD;AAC/C,eAAW,WAAW,KAAK,UAAU;AACnC,UAAI,QAAQ,IAAI,QAAQ,gBAAgB,GAAG;AACzC,eAAO;AAAA,MACT;AAAA,IACF;AACA,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOA,mBAAmB,UAAsE;AACvF,QAAI;AACF,YAAM,UAAUA,IAAG,aAAa,UAAU,OAAO;AAEjD,YAAM,SAAS,KAAK,kBAAkB;AACtC,YAAM,kBAAkB,SACpB,CAAC,QAAQ,GAAG,KAAK,SAAS,OAAO,CAAC,MAAM,MAAM,MAAM,CAAC,IACrD,KAAK;AAET,iBAAW,WAAW,iBAAiB;AACrC,cAAM,QAAQ,QAAQ,WAAW,OAAO;AACxC,YAAI,MAAM,SAAS,GAAG;AACpB,iBAAO,EAAE,OAAO,QAAQ,GAAG,QAAQ,IAAI,UAAU;AAAA,QACnD;AAAA,MACF;AAAA,IACF,QAAQ;AAAA,IAER;AACA,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,oBAAwC;AACtC,eAAW,WAAW,KAAK,UAAU;AACnC,YAAM,QAAQ,QAAQ,IAAI,QAAQ,gBAAgB;AAClD,UAAI,MAAO,QAAO;AAAA,IACpB;AACA,WAAO;AAAA,EACT;AACF;;;AC/FO,IAAM,kBAAN,MAAsB;AAAA,EACnB;AAAA,EAER,YAAY,QAA2B;AACrC,SAAK,WAAW,IAAI,iBAAiB,QAAQ,kBAAkB;AAAA,EACjE;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,YAAY,WAAqC;AAC/C,WAAO,KAAK,sBAAsB,SAAS,EAAE;AAAA,EAC/C;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,sBAAsB,WAAmB,gBAAsE;AAE7G,QAAI,gBAAgB;AAClB,YAAMC,UAAS,KAAK,SAAS,mBAAmB,cAAc;AAC9D,UAAIA,QAAQ,QAAOA;AAAA,IACrB;AAGA,UAAM,SAAS,KAAK,SAAS,mBAAmB,SAAS;AACzD,QAAI,OAAQ,QAAO;AACnB,WAAO,EAAE,OAAO,CAAC,GAAG,QAAQ,OAAO;AAAA,EACrC;AACF;AAOO,SAAS,uBAAuB,QAA0D;AAC/F,QAAM,QAA0B,CAAC;AACjC,MAAI,UAAiC;AAErC,aAAW,SAAS,QAAQ;AAC1B,UAAM,OAAO,MAAM;AACnB,QAAI,SAAS,eAAe;AAC1B,UAAI,QAAS,OAAM,KAAK,OAAO;AAC/B,gBAAU;AAAA,QACR,QAAQ,OAAO,MAAM,UAAU,EAAE,EAAE,MAAM,GAAG,oBAAoB;AAAA,QAChE,WAAW;AAAA,QACX,WAAW,OAAO,MAAM,cAAa,oBAAI,KAAK,GAAE,YAAY,CAAC;AAAA,MAC/D;AAAA,IACF,WAAW,SAAS,YAAY;AAC9B,UAAI,QAAS,SAAQ;AAAA,IACvB;AAAA,EACF;AACA,MAAI,QAAS,OAAM,KAAK,OAAO;AAC/B,SAAO;AACT;;;ACjEO,SAAS,mBAAmB,KAAc,SAAS,IAAc;AACtE,MAAI,QAAQ,QAAQ,OAAO,QAAQ,YAAY,MAAM,QAAQ,GAAG,GAAG;AACjE,WAAO,SAAS,CAAC,MAAM,IAAI,CAAC;AAAA,EAC9B;AACA,QAAM,MAAgB,CAAC;AACvB,aAAW,CAAC,KAAK,KAAK,KAAK,OAAO,QAAQ,GAA8B,GAAG;AACzE,UAAM,OAAO,SAAS,GAAG,MAAM,IAAI,GAAG,KAAK;AAC3C,QAAI,KAAK,GAAG,mBAAmB,OAAO,IAAI,CAAC;AAAA,EAC7C;AACA,SAAO;AACT;AAOO,SAAS,oBAAoB,OAAgB,OAAuC;AACzF,QAAM,cAAc,SAAS,OAAO,UAAU,WAAW,mBAAmB,KAAK,IAAI,CAAC;AACtF,QAAM,YAAY,MAAM,QAAQ,KAAK,IAAI,QAAQ,CAAC;AAClD,SAAO,CAAC,GAAG,oBAAI,IAAI,CAAC,GAAG,aAAa,GAAG,SAAS,CAAC,CAAC;AACpD;;;ACbA,eAAsB,gBAAgB,UAA0C;AAC9E,QAAM,SAAS,WAAW,QAAQ;AAClC,SAAO,EAAE,MAAM,OAAO;AACxB;AAOA,eAAsB,sBAAsB,UAA0C;AACpF,QAAM,SAAS,iBAAiB,QAAQ;AACxC,SAAO,EAAE,MAAM,OAAO;AACxB;AAGA,eAAsB,qBAAqB,UAA0C;AACnF,SAAO,EAAE,MAAM,gBAAgB,QAAQ,EAAE;AAC3C;AAQA,IAAM,uBAAuB,CAAC,WAAW,OAAO;AAEhD,SAAS,oBAAoB,OAAiD;AAC5E,SAAO,OAAO,UAAU,YAClB,qBAA2C,SAAS,KAAK;AACjE;AAEA,SAAS,kBAAkB,OAA0C;AACnE,MAAI,UAAU,OAAW,QAAO,CAAC;AACjC,MAAI,CAAC,MAAM,QAAQ,KAAK,GAAG;AACzB,WAAO,EAAE,QAAQ,KAAK,MAAM,EAAE,OAAO,sCAAsC,EAAE;AAAA,EAC/E;AACA,QAAM,eAAe,MAAM,KAAK,CAAC,UAAU,OAAO,UAAU,YAAY,MAAM,KAAK,EAAE,WAAW,CAAC;AACjG,MAAI,iBAAiB,QAAW;AAC9B,WAAO,EAAE,QAAQ,KAAK,MAAM,EAAE,OAAO,0CAA0C,EAAE;AAAA,EACnF;AACA,SAAO;AACT;AAEA,SAAS,aAAa,GAAW,GAAoB;AACnD,SAAO,MAAM,KAAK,EAAE,WAAW,GAAG,CAAC,GAAG,KAAK,EAAE,WAAW,GAAG,CAAC,GAAG;AACjE;AAcA,eAAsB,sBAAsB,UAAkB,MAAuC;AACnG,QAAM,UAAW,QAAQ,CAAC;AAC1B,MAAI,CAAC,oBAAoB,QAAQ,KAAK,GAAG;AACvC,WAAO,EAAE,QAAQ,KAAK,MAAM,EAAE,OAAO,iCAAiC,EAAE;AAAA,EAC1E;AACA,QAAM,QAAQ,QAAQ;AACtB,QAAM,QAAQ,QAAQ,SAAS,CAAC;AAChC,QAAM,mBAAmB,kBAAkB,QAAQ,KAAK;AACxD,MAAI,MAAM,QAAQ,gBAAgB,MAAM,MAAO,QAAO;AACtD,QAAM,YAAY;AAElB,MAAI,OAAO,UAAU,YAAY,UAAU,QAAQ,MAAM,QAAQ,KAAK,GAAG;AACvE,WAAO,EAAE,QAAQ,KAAK,MAAM,EAAE,OAAO,0BAA0B,EAAE;AAAA,EACnE;AACA,QAAM,cAAc,mBAAmB,KAAK;AAC5C,QAAM,WAAW,YAAY,SAAS;AACtC,QAAM,WAAW,UAAU,SAAS;AACpC,MAAI,CAAC,YAAY,CAAC,UAAU;AAC1B,WAAO,EAAE,QAAQ,KAAK,MAAM,EAAE,OAAO,0BAA0B,EAAE;AAAA,EACnE;AAEA,QAAM,UAAU,YAAY,OAAO,CAAC,SAAS,UAAU,KAAK,CAAC,cAAc,aAAa,MAAM,SAAS,CAAC,CAAC;AACzG,MAAI,QAAQ,SAAS,GAAG;AACtB,WAAO,EAAE,QAAQ,KAAK,MAAM,EAAE,OAAO,uBAAuB,MAAM,QAAQ,EAAE;AAAA,EAC9E;AAEA,MAAI,UAAU,SAAS;AACrB,QAAI;AACF,YAAM,UAAU,WAAW,QAAQ;AACnC,YAAM,UAAU,kBAAkB,UAAU,CAAC,UAAU;AACrD,cAAM,UAAU,gBAAgB,KAAK;AACrC,mBAAW,OAAO,UAAW,aAAY,SAAS,GAAG;AACrD,cAAM,YAAY;AAAA,UAChB;AAAA,UACA;AAAA,QACF;AACA,cAAM,SAAS;AAAA,UACb;AAAA,UACA;AAAA,QACF;AACA,yBAAiB,MAAM,MAAM;AAC7B,eAAO;AAAA,MACT,CAAC;AACD,aAAO,EAAE,MAAM,QAAQ;AAAA,IACzB,SAAS,KAAK;AACZ,UAAI,eAAe,iBAAE,UAAU;AAC7B,eAAO,EAAE,QAAQ,KAAK,MAAM,EAAE,OAAO,qBAAqB,QAAQ,IAAI,OAAO,EAAE;AAAA,MACjF;AACA,YAAM;AAAA,IACR;AAAA,EACF;AAEA,MAAI;AAIF,UAAM,UAAU,aAAa,UAAU,CAAC,YAAY;AAClD,YAAM,UAAU,gBAAgB,OAAO;AACvC,iBAAW,OAAO,UAAW,aAAY,SAAS,GAAG;AACrD,aAAO,gBAAgB,SAAS,KAAgC;AAAA,IAClE,CAAC;AACD,WAAO,EAAE,MAAM,QAAQ;AAAA,EACzB,SAAS,KAAK;AACZ,QAAI,eAAe,iBAAE,UAAU;AAC7B,aAAO,EAAE,QAAQ,KAAK,MAAM,EAAE,OAAO,qBAAqB,QAAQ,IAAI,OAAO,EAAE;AAAA,IACjF;AACA,UAAM;AAAA,EACR;AACF;AAUO,SAAS,sBAAsB,MAAmB;AAOvD,iBAAe,kBAAkB,MAA4C;AAC3E,WAAO,EAAE,MAAM,EAAE,UAAU,KAAK,wBAAwB,EAAE;AAAA,EAC5D;AAEA,SAAO,EAAE,kBAAkB;AAC7B;;;AC5JO,SAAS,aAAa,KAA0C;AACrE,MAAI,CAAC,IAAK,QAAO,CAAC;AAClB,SAAO,IACJ,MAAM,GAAG,EACT,IAAI,CAAC,UAAU,MAAM,KAAK,CAAC,EAC3B,OAAO,CAAC,UAAU,MAAM,SAAS,CAAC;AACvC;;;ACCA,IAAM,oBAAoB;AAG1B,IAAM,uBAAuB;AA6D7B,SAAS,cAAc,KAA2C;AAChE,SAAO;AAAA,IACL,IAAI,IAAI;AAAA,IACR,WAAW,IAAI;AAAA,IACf,OAAO,IAAI;AAAA,IACX,MAAM,IAAI;AAAA,IACV,WAAW,IAAI;AAAA,IACf,SAAS,IAAI;AAAA,IACb,MAAO,IAAI,QAAmB;AAAA,IAC9B,YAAa,IAAI,cAAyB;AAAA,EAC5C;AACF;AAOA,SAAS,gBAAgB,UAA4B;AACnD,QAAM,WAAW,YAAY,QAAoB,KAAK;AACtD,SAAQ,OAAO,KAAK,WAAW,EAAiB;AAAA,IAC9C,CAAC,MAAM,YAAY,CAAC,KAAK;AAAA,EAC3B;AACF;AAYO,SAAS,eAAe,OAA+B;AAC5D,QAAM,KAAK,YAAY;AAEvB,QAAM,OAAO,GAAG;AAAA,IACd;AAAA;AAAA,EAEF,EAAE;AAAA,IACA,MAAM;AAAA,IACN,MAAM;AAAA,IACN,MAAM;AAAA,IACN,MAAM;AAAA,IACN,MAAM;AAAA,IACN,MAAM;AAAA,IACN,MAAM;AAAA,EACR;AAEA,SAAO,KAAK;AACd;AAgBO,SAAS,WAAW,QAA0C;AACnE,QAAM,KAAK,YAAY;AAEvB,QAAM,OAAO,OAAO,QAAQ;AAC5B,QAAM,WAAW,OAAO,aAAa;AACrC,QAAM,UAAU,OAAO,KAAK;AAE5B,QAAM,aAAuB,CAAC;AAC9B,QAAM,cAAyB,CAAC;AAGhC,MAAI,OAAO,MAAM,UAAa,OAAO,EAAE,SAAS,GAAG;AACjD,eAAW,KAAK,4EAA4E;AAC5F,gBAAY,KAAK,OAAO,CAAC;AAAA,EAC3B;AAGA,MAAI,OAAO,UAAU,UAAa,OAAO,MAAM,SAAS,GAAG;AACzD,UAAM,SAAS,gBAAgB,OAAO,KAAK;AAC3C,QAAI,OAAO,SAAS,GAAG;AACrB,iBAAW,KAAK,8CAA8C;AAC9D,kBAAY,KAAK,KAAK,UAAU,MAAM,CAAC;AAAA,IACzC;AAAA,EACF;AAGA,MAAI,OAAO,cAAc,UAAa,OAAO,UAAU,SAAS,GAAG;AACjE,UAAM,aAAa,aAAa,OAAO,SAAS;AAChD,QAAI,WAAW,SAAS,GAAG;AACzB,iBAAW,KAAK,kDAAkD;AAClE,kBAAY,KAAK,KAAK,UAAU,UAAU,CAAC;AAAA,IAC7C;AAAA,EACF;AAGA,MAAI,OAAO,SAAS,UAAa,OAAO,KAAK,SAAS,GAAG;AACvD,eAAW,KAAK,aAAa;AAC7B,gBAAY,KAAK,OAAO,IAAI;AAAA,EAC9B;AAGA,MAAI,OAAO,eAAe,UAAa,OAAO,WAAW,SAAS,GAAG;AACnE,eAAW,KAAK,mBAAmB;AACnC,gBAAY,KAAK,OAAO,UAAU;AAAA,EACpC;AAGA,MAAI,OAAO,SAAS,UAAa,OAAO,KAAK,SAAS,GAAG;AACvD,eAAW,KAAK,mBAAmB;AACnC,gBAAY,KAAK,OAAO,IAAI;AAAA,EAC9B;AAEA,MAAI,OAAO,OAAO,UAAa,OAAO,GAAG,SAAS,GAAG;AACnD,eAAW,KAAK,mBAAmB;AACnC,gBAAY,KAAK,OAAO,EAAE;AAAA,EAC5B;AAEA,QAAM,QAAQ,WAAW,SAAS,IAAI,SAAS,WAAW,KAAK,OAAO,CAAC,KAAK;AAE5E,QAAM,WAAW,GAAG;AAAA,IAClB,gDAAgD,KAAK;AAAA,EACvD,EAAE,IAAI,GAAG,WAAW;AAEpB,QAAM,OAAO,GAAG;AAAA,IACd;AAAA;AAAA,OAEG,KAAK;AAAA;AAAA;AAAA;AAAA,EAIV,EAAE,IAAI,GAAG,aAAa,UAAU,MAAM;AAEtC,SAAO;AAAA,IACL,SAAS,KAAK,IAAI,aAAa;AAAA,IAC/B,OAAO,SAAS;AAAA,IAChB;AAAA,IACA,WAAW;AAAA,EACb;AACF;AAQO,SAAS,aAAa,SAAiB,OAAiC;AAC7E,QAAM,KAAK,YAAY;AACvB,QAAM,iBAAiB,SAAS;AAEhC,QAAM,OAAO,GAAG;AAAA,IACd;AAAA;AAAA;AAAA;AAAA;AAAA,EAKF,EAAE,IAAI,SAAS,cAAc;AAE7B,QAAM,UAAU,KAAK,IAAI,aAAa;AACtC,QAAM,SAAS,QAAQ,SAAS,IAAI,QAAQ,QAAQ,SAAS,CAAC,EAAE,KAAK;AAErE,SAAO,EAAE,SAAS,OAAO;AAC3B;AASO,SAAS,WAAW,OAAiC;AAC1D,QAAM,KAAK,YAAY;AACvB,QAAM,iBAAiB,SAAS;AAGhC,QAAM,OAAO,GAAG;AAAA,IACd;AAAA;AAAA;AAAA;AAAA,EAIF,EAAE,IAAI,cAAc;AAEpB,QAAM,UAAU,KAAK,IAAI,aAAa,EAAE,QAAQ;AAChD,QAAM,SAAS,QAAQ,SAAS,IAAI,QAAQ,QAAQ,SAAS,CAAC,EAAE,KAAK;AAErE,SAAO,EAAE,SAAS,OAAO;AAC3B;AAOO,SAAS,YAAY,IAAgC;AAC1D,QAAM,KAAK,YAAY;AAEvB,QAAM,MAAM,GAAG;AAAA,IACb;AAAA;AAAA;AAAA,EAGF,EAAE,IAAI,EAAE;AAER,MAAI,CAAC,IAAK,QAAO;AACjB,SAAO,cAAc,GAAG;AAC1B;AASO,SAAS,cAAc,iBAAiC;AAC7D,QAAM,KAAK,YAAY;AAEvB,QAAM,OAAO,GAAG;AAAA,IACd;AAAA,EACF,EAAE,IAAI,eAAe;AAErB,SAAO,KAAK;AACd;AAQO,SAAS,kBAAiC;AAC/C,QAAM,KAAK,YAAY;AAEvB,QAAM,MAAM,GAAG;AAAA,IACb;AAAA,EACF,EAAE,IAAI;AAEN,SAAO,IAAI;AACb;;;ACtTA,eAAsB,gBAAgB,KAA2C;AAC/E,QAAM,EAAE,GAAG,OAAO,WAAW,MAAM,YAAY,MAAM,IAAI,MAAM,UAAU,IAAI,IAAI;AAEjF,QAAM,SAAS,WAAW;AAAA,IACxB,GAAG,KAAK;AAAA,IACR,OAAO,SAAS;AAAA,IAChB,WAAW,aAAa;AAAA,IACxB,MAAM,QAAQ;AAAA,IACd,YAAY,cAAc;AAAA,IAC1B,MAAM,QAAQ;AAAA,IACd,IAAI,MAAM;AAAA,IACV,MAAM,OAAO,SAAS,MAAM,EAAE,IAAI;AAAA,IAClC,WAAW,YAAY,SAAS,WAAW,EAAE,IAAI;AAAA,EACnD,CAAC;AAED,SAAO;AAAA,IACL,MAAM;AAAA,MACJ,SAAS,OAAO,QAAQ,IAAI,WAAW;AAAA,MACvC,OAAO,OAAO;AAAA,MACd,MAAM,OAAO;AAAA,MACb,WAAW,OAAO;AAAA,IACpB;AAAA,EACF;AACF;AAMA,eAAsB,gBAAgB,KAA2C;AAC/E,QAAM,WAAW,IAAI,MAAM;AAC3B,QAAM,WAAW,IAAI,MAAM;AAC3B,QAAM,iBAAiB,IAAI,MAAM,YAAY;AAC7C,QAAM,QAAQ,WAAW,SAAS,UAAU,EAAE,IAAI;AAKlD,QAAM,SAAS,aAAa,SACxB,WAAW,KAAK,IAChB,aAAa,SAAS,UAAU,EAAE,GAAG,KAAK;AAC9C,QAAM,UAAU,OAAO,QAAQ,IAAI,WAAW;AAC9C,QAAM,WAAW,iBACb,QAAQ,OAAO,CAAC,MAAM,EAAE,aAAa,cAAc,IACnD;AAEJ,SAAO;AAAA,IACL,MAAM;AAAA,MACJ,SAAS;AAAA,MACT,QAAQ,OAAO;AAAA,IACjB;AAAA,EACF;AACF;AAMA,eAAsB,gBAAgB,KAA2C;AAC/E,QAAM,KAAK,SAAS,IAAI,OAAO,IAAI,EAAE;AACrC,MAAI,MAAM,EAAE,EAAG,QAAO,EAAE,QAAQ,KAAK,MAAM,EAAE,OAAO,uBAAuB,EAAE;AAE7E,QAAM,QAAQ,YAAY,EAAE;AAC5B,MAAI,CAAC,MAAO,QAAO,EAAE,QAAQ,KAAK,MAAM,EAAE,OAAO,sBAAsB,EAAE;AAEzE,QAAM,SAAS,MAAM,OAAO,KAAK,MAAM,MAAM,IAAI,IAAI,CAAC;AACtD,QAAM,WAAoC,CAAC;AAG3C,MAAI,MAAM,YAAY;AACpB,QAAI;AACF,YAAM,UAAU,WAAW,MAAM,UAAU;AAC3C,UAAI,SAAS;AACX,iBAAS,gBAAiB,QAA+B,SAAS;AAAA,MACpE;AAAA,IACF,QAAQ;AAAA,IAA8B;AAAA,EACxC;AAEA,SAAO;AAAA,IACL,MAAM;AAAA,MACJ,GAAG;AAAA,MACH,MAAM;AAAA,MACN;AAAA,IACF;AAAA,EACF;AACF;AAMA,IAAM,kBAAkB,iBAAE,OAAO;AAAA,EAC/B,OAAO,iBAAE,KAAK,CAAC,SAAS,QAAQ,QAAQ,OAAO,CAAC;AAAA,EAChD,WAAW,iBAAE,OAAO;AAAA,EACpB,SAAS,iBAAE,OAAO;AAAA,EAClB,MAAM,iBAAE,OAAO,iBAAE,OAAO,GAAG,iBAAE,QAAQ,CAAC,EAAE,SAAS;AACnD,CAAC;AAMM,SAAS,0BAA0B,QAAoC;AAC5E,SAAO,OAAO,QAA8C;AAC1D,UAAM,EAAE,OAAO,WAAW,SAAS,KAAK,IAAI,gBAAgB,MAAM,IAAI,IAAI;AAC1E,WAAO,IAAI,OAAO,UAAU,WAAW,SAAS,EAAE,GAAG,MAAM,eAAe,UAAU,CAAC;AACrF,WAAO,EAAE,MAAM,EAAE,IAAI,KAAK,EAAE;AAAA,EAC9B;AACF;AAMA,SAAS,YAAY,OAAoB;AACvC,QAAM,MAAM,MAAM,KAAK,QAAQ,GAAG;AAClC,QAAM,WAAW,MAAM,IAAI,MAAM,KAAK,MAAM,GAAG,GAAG,IAAI,MAAM;AAC5D,SAAO;AAAA,IACL,GAAG;AAAA,IACH;AAAA,IACA,MAAM,MAAM,OAAO,KAAK,MAAM,MAAM,IAAI,IAAI;AAAA,EAC9C;AACF;;;AC1IA,SAAS,aAAa;AAOtB,IAAM,oBAAoB,iBAAE,OAAO;AAAA,EACjC,OAAO,iBAAE,QAAQ,EAAE,SAAS;AAC9B,CAAC,EAAE,SAAS;AAEZ,IAAM,8BAA8B;AAOpC,eAAsB,cACpB,MACA,MACwB;AACxB,QAAM,SAAS,kBAAkB,UAAU,IAAI;AAC/C,QAAM,QAAQ,OAAO,UAAU,OAAO,MAAM,QAAQ;AAGpD,MAAI,CAAC,SAAS,KAAK,gBAAgB,oBAAoB,GAAG;AACxD,WAAO;AAAA,MACL,QAAQ;AAAA,MACR,MAAM,EAAE,QAAQ,QAAQ,SAAS,6DAA6D;AAAA,IAChG;AAAA,EACF;AAKA,QAAM,EAAE,UAAU,SAAS,IAAI,oBAAoB;AACnD,QAAM,WAAW,SAAS,2BAA2B,OAAO,QAAQ,IAAI,QAAQ,mBAAmB,KAAK,QAAQ;AAEhH,QAAM,QAAQ,MAAM,WAAW,CAAC,MAAM,QAAQ,GAAG;AAAA,IAC/C,UAAU;AAAA,IACV,OAAO;AAAA,EACT,CAAC;AACD,QAAM,MAAM;AAGZ,aAAW,MAAM;AACf,YAAQ,KAAK,QAAQ,KAAK,SAAS;AAAA,EACrC,GAAG,yBAAyB;AAE5B,SAAO,EAAE,MAAM,EAAE,QAAQ,aAAa,EAAE;AAC1C;;;AC9BA,kBAAiB;AAHjB,OAAOC,SAAQ;AACf,OAAOC,WAAU;AACjB,SAAS,oBAAoB;AAE7B,OAAO,YAAY;AA0EnB,IAAM,4BAA4B;AAiBlC,IAAI,mBAAkC;AAO/B,SAAS,oBAAoB,UAA+B;AACjE,qBAAmB;AACrB;AAqBO,SAAS,oBAA4B;AAC1C,SAAO,oBAAoB;AAC7B;AAMO,SAAS,iBAA0B;AACxC,SAAO,qBAAqB;AAC9B;AAuBO,SAAS,eACd,cACA,UACA,UACe;AACf,MAAI,CAAC,aAAc,QAAO;AAC1B,MAAI,CAAC,SAAU,QAAO;AACtB,MAAI;AACF,UAAM,gBAAgB,SAAS,QAAQ;AACvC,UAAM,iBAAiB,SAAS,YAAY;AAC5C,QAAI,cAAc,WAAW,iBAAiBC,MAAK,GAAG,KAAK,kBAAkB,gBAAgB;AAC3F,aAAO;AAAA,IACT;AACA,WAAO;AAAA,EACT,QAAQ;AACN,WAAO;AAAA,EACT;AACF;AAOA,SAAS,sBAAoC;AAC3C,SAAO;AAAA,IACL,SAAS;AAAA,IACT,sBAAsB;AAAA,EACxB;AACF;AAMO,SAAS,mBAAiC;AAC/C,MAAI;AACF,UAAM,MAAMC,IAAG,aAAa,oBAAoB,OAAO;AACvD,UAAM,SAAS,YAAAC,QAAK,MAAM,GAAG;AAE7B,UAAM,UAAU,iBAAiB,SAAS,QAAQ,OAAyB,IACtE,OAAO,UACR;AAEJ,UAAM,uBACJ,OAAO,QAAQ,yBAAyB,YAAY,OAAO,uBAAuB,IAC9E,OAAO,uBACP;AAEN,WAAO,EAAE,SAAS,qBAAqB;AAAA,EACzC,QAAQ;AACN,WAAO,oBAAoB;AAAA,EAC7B;AACF;AAKO,SAAS,kBAAkB,QAA4B;AAC5D,EAAAD,IAAG,UAAU,iBAAiB,EAAE,WAAW,KAAK,CAAC;AACjD,EAAAA,IAAG,cAAc,oBAAoB,YAAAC,QAAK,UAAU,MAAM,GAAG,OAAO;AACtE;AAUO,SAAS,kBAAsC;AACpD,MAAI;AACF,UAAM,MAAMD,IAAG,aAAa,yBAAyB,OAAO;AAC5D,UAAM,SAAS,KAAK,MAAM,GAAG;AAE7B,QAAI,UAAU,OAAO,WAAW,YAAY,cAAc,UAAU,OAAO,UAAU;AACnF,aAAO;AAAA,IACT;AAEA,UAAM,SAAS;AAOf,QACE,OAAO,OAAO,eAAe,YAC7B,OAAO,OAAO,kBAAkB,UAChC;AACA,aAAO;AAAA,QACL,YAAY,OAAO;AAAA,QACnB,SAAS,iBAAiB,SAAS,OAAO,OAAyB,IAC9D,OAAO,UACR;AAAA,QACJ,UAAU;AAAA,UACR,MAAM;AAAA,YACJ,cAAc;AAAA,YACd,eAAe,OAAO;AAAA,YACtB,aAAa,OAAO,eAAe;AAAA,UACrC;AAAA,QACF;AAAA,MACF;AAAA,IACF;AAEA,WAAO;AAAA,EACT,QAAQ;AACN,WAAO;AAAA,EACT;AACF;AAMO,SAAS,mBAAyB;AACvC,MAAI;AACF,IAAAA,IAAG,WAAW,uBAAuB;AAAA,EACvC,QAAQ;AAAA,EAER;AACF;AAMO,SAAS,aAAa,OAA2B,eAAgC;AACtF,MAAI,UAAU,KAAM,QAAO;AAE3B,QAAM,YAAY,IAAI,KAAK,MAAM,UAAU,EAAE,QAAQ;AACrD,MAAI,MAAM,SAAS,EAAG,QAAO;AAE7B,QAAM,QAAQ,KAAK,IAAI,IAAI;AAC3B,SAAO,QAAQ,gBAAgB;AACjC;AAmCA,SAAS,mBAAmB,aAA6B;AACvD,SAAO,GAAG,qBAAqB,IAAI,mBAAmB,WAAW,CAAC;AACpE;AAWA,SAAS,qBAAqB,UAAuB,SAAiC;AACpF,QAAM,SAAS,SAAS;AACxB,QAAM,OAAO,SAAS,QAAQ;AAE9B,MAAI,YAAY,YAAY,SAAS,MAAM;AACzC,WAAO;AAAA,EACT;AAGA,QAAM,SAAS,OAAO,GAAG,MAAM,MAAM,IAAI,OAAO;AAChD,SAAO;AACT;AAEA,SAAS,8BACP,KACA,SACQ;AACR,SAAO;AAAA,IACL,EAAE,QAAQ,IAAI,eAAe,MAAM,IAAI,eAAe,OAAU;AAAA,IAChE;AAAA,EACF;AACF;AAEA,SAAS,8BACP,cACA,gBACwC;AACxC,QAAM,YAAoD;AAAA,IACxD,MAAM;AAAA,IACN,aAAa;AAAA,IACb,mBAAmB;AAAA,EACrB;AAEA,MAAI,iBAAiB,KAAM,QAAO;AAElC,aAAW,OAAO,iBAAiB;AACjC,UAAM,UAAU,oBAAoB,cAAc,IAAI,WAAW;AACjE,QAAI,IAAI,OAAO,QAAQ;AACrB,gBAAU,OAAO,WAAW;AAC5B;AAAA,IACF;AACA,cAAU,IAAI,EAAE,IAAI;AAAA,EACtB;AAEA,SAAO;AACT;AAEA,SAAS,oBACP,gBACA,OACA,SACA,cACsB;AACtB,QAAM,oBAAoB,8BAA8B,cAAc,cAAc;AAEpF,SAAO,gBAAgB,IAAI,CAAC,QAAQ;AAClC,UAAM,SAAS,MAAM,SAAS,IAAI,EAAE;AACpC,UAAM,mBAAmB,kBAAkB,IAAI,EAAE;AACjD,UAAM,gBAAgB,SAAS,8BAA8B,QAAQ,OAAO,IAAI;AAChF,UAAM,kBACJ,qBAAqB,QACrB,kBAAkB,QAClB,OAAO,MAAM,gBAAgB,MAAM,QACnC,OAAO,MAAM,aAAa,MAAM,QAChC,OAAO,GAAG,eAAe,gBAAgB;AAE3C,WAAO;AAAA,MACL,IAAI,IAAI;AAAA,MACR,cAAc,IAAI;AAAA,MAClB,cAAc,IAAI;AAAA,MAClB,WAAW,qBAAqB;AAAA,MAChC,mBAAmB;AAAA,MACnB,gBAAgB;AAAA,MAChB,eAAe,QAAQ,iBAAiB;AAAA,MACxC,aAAa,QAAQ,eAAe;AAAA,MACpC,kBAAkB;AAAA,IACpB;AAAA,EACF,CAAC;AACH;AAMA,SAAS,iBACP,gBACA,OACA,QACA,OACA,cACa;AACb,QAAM,WAAW,oBAAoB,gBAAgB,OAAO,MAAM,SAAS,YAAY;AACvF,QAAM,iBAAiB,SAAS,KAAK,CAAC,QAAQ,IAAI,OAAO,MAAM;AAC/D,QAAM,gBAAgB,gBAAgB,kBAAkB;AACxD,QAAM,eAAe,gBAAgB,iBAAiB;AACtD,QAAM,aAAa,gBAAgB,eAAe;AAClD,QAAM,kBAAkB,SAAS,KAAK,CAAC,QAAQ,IAAI,aAAa,IAAI,gBAAgB;AAEpF,SAAO;AAAA,IACL,kBAAkB;AAAA,IAClB,iBAAiB;AAAA,IACjB,gBAAgB;AAAA,IAChB,eAAe;AAAA,IACf,aAAa;AAAA,IACb,SAAS,MAAM;AAAA,IACf,sBAAsB,OAAO;AAAA,IAC7B,YAAY,MAAM;AAAA,IAClB;AAAA,IACA;AAAA,EACF;AACF;AAaO,SAAS,sBAA8B;AAC5C,SAAO,aAAa,OAAO,CAAC,UAAU,IAAI,GAAG,EAAE,UAAU,SAAS,SAAS,IAAM,CAAC,EAAE,KAAK;AAC3F;AASO,SAAS,oBACd,cACA,cAAc,kBACC;AACf,MAAI;AACF,UAAM,UAAUE,MAAK;AAAA,MACnB;AAAA,MAAc;AAAA,MAAO;AAAA,MAAgB;AAAA,MAAa;AAAA,IACpD;AACA,UAAM,MAAMC,IAAG,aAAa,SAAS,OAAO;AAC5C,UAAM,MAAM,KAAK,MAAM,GAAG;AAC1B,WAAO,IAAI,WAAW;AAAA,EACxB,QAAQ;AACN,WAAO;AAAA,EACT;AACF;AAaA,eAAsB,eACpB,gBACA,eAA8B,MACR;AACtB,QAAM,SAAS,iBAAiB;AAChC,QAAM,gBAAgB,gBAAgB;AAEtC,QAAM,gBAAsE,CAAC;AAC7E,QAAM,cAAwB,CAAC;AAE/B,QAAM,iBAAiB,MAAM,QAAQ;AAAA,IACnC,gBAAgB,IAAI,OAAO,QAAQ;AACjC,YAAM,WAAW,MAAM,MAAM,mBAAmB,IAAI,WAAW,GAAG;AAAA,QAChE,QAAQ,YAAY,QAAQ,yBAAyB;AAAA,MACvD,CAAC;AAED,UAAI,CAAC,SAAS,IAAI;AAChB,cAAM,IAAI,MAAM,GAAG,IAAI,WAAW,6BAA6B,SAAS,MAAM,EAAE;AAAA,MAClF;AAEA,YAAM,OAAQ,MAAM,SAAS,KAAK;AAClC,aAAO;AAAA,QACL,IAAI,IAAI;AAAA,QACR,cAAc,IAAI;AAAA,QAClB,eAAe,KAAK,WAAW,EAAE;AAAA,QACjC,aAAa,KAAK,WAAW,EAAE,QAAQ;AAAA,MACzC;AAAA,IACF,CAAC;AAAA,EACH;AAEA,aAAW,UAAU,gBAAgB;AACnC,QAAI,OAAO,WAAW,aAAa;AACjC,oBAAc,OAAO,MAAM,EAAE,IAAI;AAAA,QAC/B,cAAc,OAAO,MAAM;AAAA,QAC3B,eAAe,OAAO,MAAM;AAAA,QAC5B,aAAa,OAAO,MAAM;AAAA,MAC5B;AACA;AAAA,IACF;AAEA,UAAM,UAAU,OAAO,kBAAkB,QAAQ,OAAO,OAAO,UAAU,OAAO,OAAO,MAAM;AAC7F,gBAAY,KAAK,OAAO;AAAA,EAC1B;AAEA,MAAI,kBAAkB,MAAM;AAC1B,eAAW,OAAO,iBAAiB;AACjC,UAAI,cAAc,IAAI,EAAE,MAAM,OAAW;AACzC,YAAM,SAAS,cAAc,SAAS,IAAI,EAAE;AAC5C,UAAI,QAAQ;AACV,sBAAc,IAAI,EAAE,IAAI;AAAA,MAC1B;AAAA,IACF;AAAA,EACF;AAEA,MAAI,OAAO,KAAK,aAAa,EAAE,WAAW,GAAG;AAC3C,UAAM,aAAa,YAAY,CAAC,KAAK;AACrC,WAAO;AAAA,MACL,kBAAkB;AAAA,MAClB,iBAAiB;AAAA,MACjB,gBAAgB;AAAA,MAChB,eAAe;AAAA,MACf,aAAa;AAAA,MACb,SAAS,OAAO;AAAA,MAChB,sBAAsB,OAAO;AAAA,MAC7B,aAAY,oBAAI,KAAK,GAAE,YAAY;AAAA,MACnC,OAAO;AAAA,MACP,UAAU;AAAA,QACR;AAAA,QACA,EAAE,aAAY,oBAAI,KAAK,GAAE,YAAY,GAAG,SAAS,OAAO,SAAS,UAAU,CAAC,EAAE;AAAA,QAC9E,OAAO;AAAA,QACP;AAAA,MACF;AAAA,IACF;AAAA,EACF;AAEA,QAAM,aAA0B;AAAA,IAC9B,aAAY,oBAAI,KAAK,GAAE,YAAY;AAAA,IACnC,SAAS,OAAO;AAAA,IAChB,UAAU;AAAA,EACZ;AAEA,MAAI;AACF,IAAAA,IAAG,UAAUD,MAAK,QAAQ,uBAAuB,GAAG,EAAE,WAAW,KAAK,CAAC;AACvE,IAAAC,IAAG,cAAc,yBAAyB,KAAK,UAAU,YAAY,MAAM,CAAC,GAAG,OAAO;AAAA,EACxF,QAAQ;AAAA,EAER;AAEA,QAAM,QAAQ,YAAY,SAAS,IAAI,YAAY,KAAK,IAAI,IAAI;AAChE,SAAO,iBAAiB,gBAAgB,YAAY,QAAQ,OAAO,YAAY;AACjF;AASO,SAAS,gBACd,gBACA,OACA,QACA,eAA8B,MACV;AACpB,QAAM,gBAAgB,UAAU,SAAY,QAAQ,gBAAgB;AACpE,MAAI,kBAAkB,KAAM,QAAO;AAEnC,QAAM,iBAAiB,WAAW,SAAY,SAAS,iBAAiB;AACxE,SAAO,iBAAiB,gBAAgB,eAAe,gBAAgB,MAAM,YAAY;AAC3F;;;AC/mBA,OAAOC,UAAQ;AACf,OAAOC,SAAQ;AACf,OAAOC,YAAU;AACjB,SAAS,SAAAC,cAAa;AA6Cf,SAAS,qBAAqB,QAA+B;AAClE,QAAM,EAAE,cAAc,aAAa,UAAU,WAAW,IAAI;AAG5D,QAAM,cAAc,aAAa,IAAI,CAAC,SAAS,KAAK,UAAU,IAAI,CAAC,EAAE,KAAK,GAAG;AAC7E,QAAM,oBAAoB,KAAK,UAAU,WAAW;AACpD,QAAM,iBAAiB,KAAK,UAAU,QAAQ;AAC9C,QAAM,mBAAmB,KAAK,UAAU,UAAU;AAClD,QAAM,kBAAkB,KAAK,UAAU,iBAAiB;AACxD,QAAM,YAAY,KAAK;AAAA,IACrB,KAAK,UAAU,EAAE,OAAO,0BAA0B,aAAa,KAAK,IAAI,CAAC,GAAG,CAAC;AAAA,EAC/E;AAOA,SAAO;AAAA;AAAA,OAEF,gBAAgB;AAAA;AAAA;AAAA,QAGf,2BAA2B;AAAA;AAAA;AAAA,oBAGf,WAAW;AAAA;AAAA,6BAEF,iBAAiB;AAAA;AAAA,UAEpC,eAAe;AAAA;AAAA;AAAA,SAGhB,SAAS,MAAM,eAAe;AAAA;AAAA;AAAA;AAAA,yBAId,cAAc;AAAA;AAAA;AAAA;AAAA;AAKvC;AAUA,SAAS,oBAAoB,YAAoB,SAAyB;AACxE,QAAM,aAAaC,OAAK,KAAKC,IAAG,OAAO,GAAG,GAAG,UAAU,IAAI,KAAK,IAAI,CAAC,KAAK;AAC1E,EAAAC,KAAG,cAAc,YAAY,SAAS,EAAE,UAAU,SAAS,MAAM,IAAM,CAAC;AAExE,QAAM,QAAQC,OAAM,WAAW,CAAC,UAAU,GAAG;AAAA,IAC3C,UAAU;AAAA,IACV,OAAO;AAAA,EACT,CAAC;AACD,QAAM,MAAM;AAEZ,SAAO;AACT;AAKO,SAAS,kBAAkB,QAA+B;AAE/D,EAAAD,KAAG,UAAU,iBAAiB,EAAE,WAAW,KAAK,CAAC;AACjD,SAAO,oBAAoB,eAAe,qBAAqB,MAAM,CAAC;AACxE;AAmCO,SAAS,sBAAsB,QAA+B;AACnE,QAAM,EAAE,aAAa,UAAU,gBAAgB,aAAa,WAAW,WAAW,IAAI;AACtF,QAAM,oBAAoB,KAAK,UAAU,WAAW;AACpD,QAAM,iBAAiB,KAAK,UAAU,QAAQ;AAC9C,QAAM,mBAAmB,KAAK,UAAU,UAAU;AAClD,QAAM,aAAa,KAAK,UAAUF,OAAK,KAAK,UAAU,uBAAuB,CAAC;AAI9E,QAAM,aAAa,KAAK,UAAU,KAAK,UAAU;AAAA,IAC/C,QAAQ;AAAA,IACR,cAAc;AAAA,IACd,YAAY;AAAA,IACZ,kBAAkB;AAAA,EACpB,CAAC,CAAC;AAEF,QAAM,cAAc,iBAChB;AAAA;AAAA,2BAEqB,iBAAiB,aACtC;AAIJ,SAAO;AAAA;AAAA,OAEF,gBAAgB;AAAA;AAAA;AAAA,QAGf,2BAA2B;AAAA,EACjC,WAAW;AAAA;AAAA;AAAA,OAGN,UAAU,MAAM,UAAU;AAAA;AAAA;AAAA,yBAGR,cAAc;AAAA;AAAA;AAAA;AAAA;AAKvC;AAKO,SAAS,mBAAmB,QAA+B;AAChE,SAAO,oBAAoB,gBAAgB,sBAAsB,MAAM,CAAC;AAC1E;;;ACxLA,OAAOI,aAAY;AACnB,OAAOC,UAAQ;AACf,OAAOC,YAAU;AAyBjB,IAAM,oBAAoB,iBAAE,OAAO;AAAA,EACjC,SAAS,iBAAE,KAAK,gBAAgB;AAClC,CAAC;AAWM,SAAS,qBAAqB,MAAkB;AACrD,QAAM,EAAE,UAAU,aAAa,gBAAgB,kBAAkB,aAAa,IAAI;AAGlF,MAAI,mBAAmB;AAGvB,WAAS,iBAA0B;AACjC,QAAI;AACF,YAAM,YAAYA,OAAK,KAAK,UAAU,qBAAqB;AAC3D,YAAM,QAAQD,KAAG,aAAa,WAAW,OAAO,EAAE,KAAK;AACvD,aAAO,UAAU;AAAA,IACnB,QAAQ;AACN,aAAO;AAAA,IACT;AAAA,EACF;AAQA,iBAAe,mBAAmB,MAA4C;AAC5E,QAAI,eAAe,GAAG;AACpB,aAAO,EAAE,MAAM,EAAE,QAAQ,MAAM,iBAAiB,eAAe,EAAE;AAAA,IACnE;AAGA,QAAI,gBAAgB,CAAC,kBAAkB;AACrC,YAAM,mBAAmB,oBAAoB,YAAY;AACzD,UACE,oBACAD,QAAO,MAAM,gBAAgB,KAC7BA,QAAO,MAAM,cAAc,KAC3BA,QAAO,GAAG,kBAAkB,cAAc,GAC1C;AACA,2BAAmB;AACnB,cAAM,iBAAiB,CAAC,eAAe;AACvC,2BAAmB;AAAA,UACjB;AAAA,UAAa;AAAA,UAAU;AAAA,UACvB,aAAa;AAAA,UACb,WAAW;AAAA,UACX,YAAY,kBAAkB;AAAA,QAChC,CAAC;AACD,yBAAiB;AACjB,eAAO;AAAA,UACL,MAAM;AAAA,YACJ,YAAY;AAAA,YACZ,QAAQ;AAAA,YACR,iBAAiB;AAAA,YACjB,mBAAmB;AAAA,UACrB;AAAA,QACF;AAAA,MACF;AAAA,IACF;AAGA,UAAM,SAAS,iBAAiB;AAChC,UAAM,QAAQ,gBAAgB;AAE9B,QAAI,aAAa,OAAO,OAAO,oBAAoB,GAAG;AAEpD,qBAAe,gBAAgB,YAAY,EAAE,MAAM,MAAM;AAAA,MAAC,CAAC;AAAA,IAC7D;AAGA,UAAM,SAAS,gBAAgB,gBAAgB,OAAO,QAAQ,YAAY;AAC1E,QAAI,CAAC,QAAQ;AAEX,aAAO;AAAA,QACL,MAAM;AAAA,UACJ,QAAQ;AAAA,UACR,kBAAkB;AAAA,UAClB,iBAAiB;AAAA,UACjB,gBAAgB;AAAA,UAChB,eAAe;AAAA,UACf,aAAa;AAAA,UACb,SAAS,OAAO;AAAA,UAChB,sBAAsB,OAAO;AAAA,UAC7B,YAAY;AAAA,UACZ,OAAO;AAAA,QACT;AAAA,MACF;AAAA,IACF;AACA,WAAO,EAAE,MAAM,EAAE,QAAQ,OAAO,GAAG,OAAO,EAAE;AAAA,EAC9C;AAQA,iBAAe,kBAAkB,MAA4C;AAC3E,QAAI,eAAe,GAAG;AACpB,aAAO;AAAA,QACL,QAAQ;AAAA,QACR,MAAM,EAAE,OAAO,iBAAiB,SAAS,+BAA+B;AAAA,MAC1E;AAAA,IACF;AAEA,UAAM,SAAS,MAAM,eAAe,gBAAgB,YAAY;AAChE,WAAO,EAAE,MAAM,EAAE,QAAQ,OAAO,GAAG,OAAO,EAAE;AAAA,EAC9C;AAOA,iBAAe,kBAAkB,MAA4C;AAC3E,QAAI,eAAe,GAAG;AACpB,aAAO,EAAE,QAAQ,KAAK,MAAM,EAAE,OAAO,gBAAgB,EAAE;AAAA,IACzD;AAEA,UAAM,SAAS,gBAAgB,gBAAgB,QAAW,QAAW,YAAY;AACjF,UAAM,gBAAgB,QAAQ,YAAY,CAAC,GACxC,OAAO,CAAC,QAAQ,IAAI,aAAa,IAAI,oBAAoB,IAAI,cAAc,EAC3E,IAAI,CAAC,QAAQ,GAAG,IAAI,YAAY,IAAI,IAAI,cAAc,EAAE;AAC3D,QAAI,CAAC,UAAU,aAAa,WAAW,GAAG;AACxC,aAAO,EAAE,QAAQ,KAAK,MAAM,EAAE,OAAO,sBAAsB,EAAE;AAAA,IAC/D;AAEA,sBAAkB;AAAA,MAChB;AAAA,MACA;AAAA,MACA;AAAA,MACA,YAAY,kBAAkB;AAAA,IAChC,CAAC;AACD,qBAAiB;AAEjB,WAAO;AAAA,MACL,MAAM;AAAA,QACJ,QAAQ;AAAA,QACR,SAAS,OAAO;AAAA,QAChB,UAAU;AAAA,MACZ;AAAA,IACF;AAAA,EACF;AAOA,iBAAe,oBAAoB,KAA2C;AAC5E,UAAM,SAAS,kBAAkB,UAAU,IAAI,IAAI;AACnD,QAAI,CAAC,OAAO,SAAS;AACnB,aAAO,EAAE,QAAQ,KAAK,MAAM,EAAE,OAAO,kBAAkB,EAAE;AAAA,IAC3D;AAEA,UAAM,EAAE,QAAQ,IAAI,OAAO;AAC3B,UAAM,SAAS,iBAAiB;AAEhC,sBAAkB,EAAE,GAAG,QAAQ,QAAQ,CAAC;AACxC,qBAAiB;AAEjB,UAAM,gBAAgB,gBAAgB,gBAAgB,QAAW,QAAW,YAAY;AACxF,QAAI,CAAC,eAAe;AAClB,aAAO;AAAA,QACL,MAAM;AAAA,UACJ,QAAQ;AAAA,UACR,kBAAkB;AAAA,UAClB,iBAAiB;AAAA,UACjB,gBAAgB;AAAA,UAChB,eAAe;AAAA,UACf,aAAa;AAAA,UACb;AAAA,UACA,sBAAsB,OAAO;AAAA,UAC7B,YAAY;AAAA,UACZ,OAAO;AAAA,QACT;AAAA,MACF;AAAA,IACF;AACA,WAAO,EAAE,MAAM,EAAE,QAAQ,OAAO,GAAG,cAAc,EAAE;AAAA,EACrD;AAEA,SAAO;AAAA,IACL;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF;AACF;;;AC3PA,OAAOG,UAAQ;AACf,OAAOC,YAAU;AAMV,SAAS,uBAAuB,aAAqB,QAAoB;AAC9E,QAAM,eAAe,cAAc;AACnC,QAAM,eAAe,wBAAwB,MAAM;AACnD,MAAI,cAAc;AAChB,WAAO,aAAa,OAAO,CAAC,aAAa,aAAa,IAAI,SAAS,IAAI,CAAC;AAAA,EAC1E;AAEA,SAAO,aAAa,OAAO,CAAC,aAAaC,KAAG,WAAWC,OAAK,KAAK,aAAa,SAAS,SAAS,CAAC,CAAC;AACpG;AAEO,SAAS,6BACd,aACA,WAAWA,OAAK,KAAK,aAAa,OAAO,GACzC,iBACQ;AACR,QAAM,SAAS,mBAAmB,iBAAiB,QAAQ;AAC3D,QAAM,YAAY,uBAAuB,aAAa,MAAM;AAC5D,QAAM,cAAc,mBAAmB;AACvC,MAAI,eAAe;AAEnB,aAAW,YAAY,WAAW;AAChC,UAAM,YAAY,IAAI,kBAAkB,UAAU,aAAa,WAAW;AAC1E,cAAU,QAAQ;AAClB;AAAA,EACF;AAEA,SAAO;AACT;;;AC3BA,OAAOC,UAAQ;AACf,OAAOC,YAAU;AASV,IAAM,gBAAgB;AAAA,EAC3B;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AACF;AAGA,IAAM,mBAAmB;AAezB,IAAM,0BAA0B;AAGhC,IAAM,yBAAyB;AAoC/B,SAAS,UAAU,OAAuB;AACxC,SAAO,MAAM,QAAQ,MAAM,IAAI;AACjC;AAUA,SAAS,aAAa,OAAwB;AAC5C,MAAI,UAAU,QAAQ,UAAU,OAAW,QAAO;AAClD,MAAI,OAAO,UAAU,SAAU,QAAO,OAAO,KAAK;AAClD,MAAI,OAAO,SAAS,KAAK,EAAG,QAAO,KAAK,MAAM,SAAS,KAAK,CAAC;AAC7D,SAAO,IAAI,UAAU,OAAO,KAAK,CAAC,CAAC;AACrC;AAeO,SAAS,aACd,IACA,WACA,WACQ;AACR,EAAAC,KAAG,UAAU,WAAW,EAAE,WAAW,KAAK,CAAC;AAE3C,QAAM,QAAkB,CAAC;AACzB,QAAM,YAAY,aAAa;AAG/B,QAAM,KAAK,GAAG,sBAAsB,gBAAgB,SAAS,gBAAgB,SAAS,EAAE;AACxF,QAAM,KAAK,wBAAwB,qBAAqB,EAAE;AAC1D,QAAM,KAAK,EAAE;AAEb,aAAW,SAAS,eAAe;AACjC,UAAM,OAAO,GAAG,QAAQ,iBAAiB,KAAK,EAAE,EAAE,IAAI;AACtD,QAAI,KAAK,WAAW,EAAG;AAEvB,UAAM,KAAK,aAAa,KAAK,KAAK,KAAK,MAAM,QAAQ;AAGrD,UAAM,UAAU,OAAO,KAAK,KAAK,CAAC,CAAC;AACnC,UAAM,aAAa,QAAQ,IAAI,CAAC,MAAM,IAAI,CAAC,GAAG,EAAE,KAAK,IAAI;AAEzD,eAAW,OAAO,MAAM;AACtB,YAAM,SAAS,QAAQ,IAAI,CAAC,MAAM,aAAa,IAAI,CAAC,CAAC,CAAC,EAAE,KAAK,IAAI;AACjE,YAAM,KAAK,yBAAyB,KAAK,KAAK,UAAU,aAAa,MAAM,IAAI;AAAA,IACjF;AAEA,UAAM,KAAK,EAAE;AAAA,EACf;AAEA,QAAM,WAAWC,OAAK,KAAK,WAAW,GAAG,SAAS,GAAG,gBAAgB,EAAE;AACvE,EAAAD,KAAG,cAAc,UAAU,MAAM,KAAK,IAAI,GAAG,OAAO;AAEpD,SAAO;AACT;AAWO,SAAS,YAAY,WAAiC;AAC3D,MAAI;AACJ,MAAI;AACF,cAAUA,KAAG,YAAY,SAAS;AAAA,EACpC,QAAQ;AACN,WAAO,CAAC;AAAA,EACV;AAEA,QAAM,UAAwB,CAAC;AAE/B,aAAW,SAAS,SAAS;AAC3B,QAAI,CAAC,wBAAwB,KAAK,KAAK,EAAG;AAE1C,UAAM,WAAWC,OAAK,KAAK,WAAW,KAAK;AAC3C,UAAM,OAAOD,KAAG,SAAS,QAAQ;AAEjC,YAAQ,KAAK;AAAA,MACX,YAAY,MAAM,MAAM,GAAG,CAAC,iBAAiB,MAAM;AAAA,MACnD,WAAW;AAAA,MACX,YAAY,KAAK;AAAA,MACjB,aAAa,KAAK,MAAM,YAAY;AAAA,IACtC,CAAC;AAAA,EACH;AAEA,SAAO,QAAQ,KAAK,CAAC,GAAG,MAAM,EAAE,YAAY,cAAc,EAAE,WAAW,CAAC;AAC1E;AAOA,IAAM,eAAe;AAYrB,SAAS,gBAAgB,YAAoC;AAC3D,QAAM,UAAUA,KAAG,aAAa,YAAY,OAAO;AACnD,QAAM,UAA0B,CAAC;AAEjC,aAAW,QAAQ,QAAQ,MAAM,IAAI,GAAG;AACtC,UAAM,QAAQ,aAAa,KAAK,IAAI;AACpC,QAAI,CAAC,MAAO;AAEZ,YAAQ,KAAK;AAAA,MACX,OAAO,MAAM,CAAC;AAAA,MACd,SAAS,MAAM,CAAC,EAAE,MAAM,GAAG,EAAE,IAAI,CAAC,MAAM,EAAE,KAAK,EAAE,QAAQ,MAAM,EAAE,CAAC;AAAA,MAClE,UAAU,MAAM,CAAC;AAAA,IACnB,CAAC;AAAA,EACH;AAEA,SAAO;AACT;AAcO,SAAS,eACd,IACA,YACe;AACf,QAAM,UAAU,gBAAgB,UAAU;AAC1C,QAAM,SAAS,oBAAI,IAA+C;AAGlE,KAAG,OAAO,oBAAoB;AAE9B,KAAG,KAAK,2BAA2B;AACnC,MAAI;AACF,eAAW,UAAU,SAAS;AAC5B,UAAI,CAAC,OAAO,IAAI,OAAO,KAAK,GAAG;AAC7B,eAAO,IAAI,OAAO,OAAO,EAAE,KAAK,GAAG,UAAU,EAAE,CAAC;AAAA,MAClD;AACA,YAAM,cAAc,OAAO,IAAI,OAAO,KAAK;AAE3C,UAAI;AACF,cAAM,aAAa,OAAO,QAAQ,IAAI,CAAC,MAAM,IAAI,CAAC,GAAG,EAAE,KAAK,IAAI;AAChE,cAAM,OAAO,yBAAyB,OAAO,KAAK,KAAK,UAAU,aAAa,OAAO,QAAQ;AAC7F,cAAM,SAAS,GAAG,QAAQ,IAAI,EAAE,IAAI;AAEpC,YAAI,OAAO,UAAU,GAAG;AACtB,sBAAY;AAAA,QACd,OAAO;AACL,sBAAY;AAAA,QACd;AAAA,MACF,QAAQ;AACN,oBAAY;AAAA,MACd;AAAA,IACF;AAAA,EACF,UAAE;AACA,OAAG,KAAK,6BAA6B;AACrC,OAAG,KAAK,yBAAyB;AACjC,OAAG,OAAO,mBAAmB;AAAA,EAC/B;AAEA,SAAO,MAAM,KAAK,OAAO,QAAQ,CAAC,EAAE,IAAI,CAAC,CAAC,OAAO,CAAC,OAAO;AAAA,IACvD;AAAA,IACA,KAAK,EAAE;AAAA,IACP,UAAU,EAAE;AAAA,EACd,EAAE;AACJ;AAYO,SAAS,cACd,IACA,YACe;AACf,QAAM,UAAU,gBAAgB,UAAU;AAC1C,QAAM,SAAS,oBAAI,IAA+C;AAIlE,KAAG,OAAO,oBAAoB;AAC9B,MAAI;AACF,UAAM,aAAa,GAAG,YAAY,MAAM;AACtC,iBAAW,UAAU,SAAS;AAC5B,YAAI,CAAC,OAAO,IAAI,OAAO,KAAK,GAAG;AAC7B,iBAAO,IAAI,OAAO,OAAO,EAAE,KAAK,GAAG,UAAU,EAAE,CAAC;AAAA,QAClD;AACA,cAAM,cAAc,OAAO,IAAI,OAAO,KAAK;AAE3C,cAAM,aAAa,OAAO,QAAQ,IAAI,CAAC,MAAM,IAAI,CAAC,GAAG,EAAE,KAAK,IAAI;AAChE,cAAM,OAAO,yBAAyB,OAAO,KAAK,KAAK,UAAU,aAAa,OAAO,QAAQ;AAC7F,cAAM,SAAS,GAAG,QAAQ,IAAI,EAAE,IAAI;AAEpC,YAAI,OAAO,UAAU,GAAG;AACtB,sBAAY;AAAA,QACd,OAAO;AACL,sBAAY;AAAA,QACd;AAAA,MACF;AAAA,IACF,CAAC;AAED,eAAW;AAAA,EACb,UAAE;AACA,OAAG,OAAO,mBAAmB;AAAA,EAC/B;AAEA,QAAM,SAAS,MAAM,KAAK,OAAO,QAAQ,CAAC,EAAE,IAAI,CAAC,CAAC,OAAO,CAAC,OAAO;AAAA,IAC/D;AAAA,IACA,KAAK,EAAE;AAAA,IACP,UAAU,EAAE;AAAA,EACd,EAAE;AAEF,QAAM,iBAAiB,OAAO,OAAO,CAAC,KAAK,MAAM,MAAM,EAAE,KAAK,CAAC;AAC/D,QAAM,gBAAgB,OAAO,OAAO,CAAC,KAAK,MAAM,MAAM,EAAE,UAAU,CAAC;AAEnE,SAAO,EAAE,QAAQ,gBAAgB,cAAc;AACjD;;;AClUA,OAAOE,SAAQ;AACf,OAAOC,YAAU;AAsBV,SAAS,iBAAiB,QAAoB,UAA0B;AAC7E,QAAM,SAAS,OAAO,OAAO;AAC7B,MAAI,CAAC,OAAQ,QAAOA,OAAK,QAAQ,UAAU,SAAS;AACpD,QAAM,WAAW,OAAO,WAAW,IAAI,IACnCA,OAAK,KAAKD,IAAG,QAAQ,GAAG,OAAO,MAAM,CAAC,CAAC,IACvC;AACJ,SAAOC,OAAK,QAAQ,QAAQ;AAC9B;AAWO,SAAS,qBAAqB,MAAkB;AACrD,QAAM,mBAAmB,MAAM,iBAAiB,KAAK,WAAW,SAAS,KAAK,QAAQ;AAGtF,iBAAe,mBAAmB,MAA4C;AAC5E,UAAM,YAAY,iBAAiB;AACnC,UAAM,WAAW,aAAa,KAAK,IAAI,WAAW,KAAK,SAAS;AAChE,UAAM,UAAU,YAAY,SAAS;AACrC,UAAM,UAAU,QAAQ,KAAK,CAAC,MAAM,EAAE,eAAe,KAAK,SAAS;AAEnE,WAAO;AAAA,MACL,MAAM;AAAA,QACJ,WAAW;AAAA,QACX,YAAY,KAAK;AAAA,QACjB,YAAY,SAAS,cAAc;AAAA,MACrC;AAAA,IACF;AAAA,EACF;AAGA,iBAAe,kBAAkB,MAA4C;AAC3E,UAAM,UAAU,YAAY,iBAAiB,CAAC;AAC9C,WAAO,EAAE,MAAM,EAAE,QAAQ,EAAE;AAAA,EAC7B;AAGA,iBAAe,qBAAqB,KAA2C;AAC7E,UAAM,EAAE,WAAW,IAAI,IAAI;AAC3B,QAAI,CAAC,YAAY;AACf,aAAO,EAAE,QAAQ,KAAK,MAAM,EAAE,OAAO,qBAAqB,EAAE;AAAA,IAC9D;AAEA,UAAM,YAAY,iBAAiB;AACnC,UAAM,UAAU,YAAY,SAAS;AACrC,UAAM,SAAS,QAAQ,KAAK,CAAC,MAAM,EAAE,eAAe,UAAU;AAC9D,QAAI,CAAC,QAAQ;AACX,aAAO,EAAE,QAAQ,KAAK,MAAM,EAAE,OAAO,mBAAmB,EAAE;AAAA,IAC5D;AAEA,UAAM,aAAa,GAAG,SAAS,IAAI,OAAO,SAAS;AACnD,UAAM,SAAS,eAAe,KAAK,IAAI,UAAU;AACjD,UAAM,YAAY,OAAO,OAAO,CAAC,KAAK,MAAM,MAAM,EAAE,KAAK,CAAC;AAC1D,UAAM,iBAAiB,OAAO,OAAO,CAAC,KAAK,MAAM,MAAM,EAAE,UAAU,CAAC;AAEpE,WAAO,EAAE,MAAM,EAAE,YAAY,QAAQ,WAAW,eAAe,EAAE;AAAA,EACnE;AAGA,iBAAe,cAAc,KAA2C;AACtE,UAAM,EAAE,WAAW,IAAI,IAAI;AAC3B,QAAI,CAAC,YAAY;AACf,aAAO,EAAE,QAAQ,KAAK,MAAM,EAAE,OAAO,qBAAqB,EAAE;AAAA,IAC9D;AAEA,UAAM,YAAY,iBAAiB;AACnC,UAAM,UAAU,YAAY,SAAS;AACrC,UAAM,SAAS,QAAQ,KAAK,CAAC,MAAM,EAAE,eAAe,UAAU;AAC9D,QAAI,CAAC,QAAQ;AACX,aAAO,EAAE,QAAQ,KAAK,MAAM,EAAE,OAAO,mBAAmB,EAAE;AAAA,IAC5D;AAEA,UAAM,aAAa,GAAG,SAAS,IAAI,OAAO,SAAS;AACnD,UAAM,SAAS,cAAc,KAAK,IAAI,UAAU;AAEhD,WAAO,EAAE,MAAM,EAAE,YAAY,GAAG,OAAO,EAAE;AAAA,EAC3C;AAEA,SAAO;AAAA,IACL;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF;AACF;AAaO,SAAS,2BAA2B,MAAwB;AACjE,QAAM,EAAE,SAAS,IAAI;AAGrB,iBAAe,wBAAgD;AAC7D,UAAM,MAAM,iBAAiB,QAAQ;AACrC,WAAO,EAAE,MAAM,EAAE,KAAK,IAAI,OAAO,OAAO,MAAM,aAAaA,OAAK,QAAQ,UAAU,SAAS,EAAE,EAAE;AAAA,EACjG;AAGA,iBAAe,sBAAsB,KAA2C;AAC9E,UAAM,EAAE,IAAI,IAAI,IAAI;AACpB,uBAAmB,UAAU,EAAE,KAAK,OAAO,OAAU,CAAC;AACtD,WAAO,EAAE,MAAM,EAAE,KAAK,OAAO,KAAK,EAAE;AAAA,EACtC;AAEA,SAAO,EAAE,uBAAuB,sBAAsB;AACxD;;;AC5EO,IAAM,iBAAN,MAAM,gBAAe;AAAA,EACT;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACT,WAA0B;AAAA,EAC1B,eAA8B;AAAA,EAEtC,YAAY,SAAgC;AAC1C,SAAK,YAAY,QAAQ,UAAU,QAAQ,QAAQ,EAAE;AACrD,SAAK,SAAS,QAAQ;AACtB,SAAK,YAAY,QAAQ;AACzB,SAAK,sBAAsB,QAAQ;AACnC,SAAK,UAAU,QAAQ,SAAS,WAAW;AAAA,EAC7C;AAAA;AAAA,EAGA,OAAe,UAAU,OAAuB;AAC9C,QAAI,OAAO;AACX,aAAS,IAAI,GAAG,IAAI,MAAM,QAAQ,KAAK;AACrC,cAAS,QAAQ,KAAK,OAAO,MAAM,WAAW,CAAC,IAAK;AAAA,IACtD;AACA,WAAO,KAAK,IAAI,IAAI,EAAE,SAAS,EAAE,EAAE,SAAS,GAAG,GAAG,EAAE,MAAM,GAAG,CAAC;AAAA,EAChE;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,QAAQ,MAAoD;AAChE,UAAM,MAAM,MAAM,KAAK,QAAQ,QAAQ,YAAY;AAAA,MACjD,GAAG;AAAA,MACH,YAAY,KAAK;AAAA,MACjB,uBAAuB,KAAK;AAAA,IAC9B,CAAC;AACD,UAAM,WAAW;AACjB,QAAI,SAAS,WAAW;AACtB,WAAK,WAAW,SAAS;AACzB,WAAK,eAAe,gBAAe,UAAU,SAAS,SAAS;AAAA,IACjE;AACA,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOA,MAAM,UAAU,SAAiI;AAC/I,UAAM,MAAM,MAAM,KAAK,QAAQ,QAAQ,SAAS;AAAA,MAC9C,YAAY,KAAK;AAAA,MACjB,uBAAuB,KAAK;AAAA,MAC5B,SAAS,QAAQ,IAAI,CAAC,MAAM;AAC1B,cAAM,OAAO,OAAO,EAAE,YAAY,WAAW,KAAK,MAAM,EAAE,OAAO,IAAI,EAAE;AACvE,eAAO;AAAA,UACL,OAAO,EAAE;AAAA,UACT,IAAI,OAAO,EAAE,MAAM;AAAA,UACnB,YAAY,EAAE;AAAA,UACd,WAAW,EAAE;AAAA,UACb;AAAA,UACA,cAAc,KAAK,gBAAgB;AAAA,QACrC;AAAA,MACF,CAAC;AAAA,IACH,CAAC;AACD,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOA,MAAM,OAAO,OAAe,UAA6B,CAAC,GAAgC;AACxF,UAAM,YAAY,QAAQ,aAAa;AACvC,UAAM,aAAa,IAAI,gBAAgB;AACvC,UAAM,QAAQ,WAAW,MAAM,WAAW,MAAM,GAAG,SAAS;AAE5D,QAAI;AACF,YAAM,SAAS,IAAI,gBAAgB,EAAE,GAAG,MAAM,CAAC;AAC/C,UAAI,QAAQ,MAAO,QAAO,IAAI,SAAS,OAAO,QAAQ,KAAK,CAAC;AAC5D,UAAI,QAAQ,OAAQ,QAAO,IAAI,UAAU,QAAQ,OAAO,KAAK,GAAG,CAAC;AAEjE,YAAM,MAAM,MAAM,KAAK,QAAQ,GAAG,KAAK,SAAS,WAAW,MAAM,IAAI;AAAA,QACnE,QAAQ;AAAA,QACR,SAAS,KAAK,QAAQ;AAAA,QACtB,QAAQ,WAAW;AAAA,MACrB,CAAC;AAED,UAAI,CAAC,IAAI,IAAI;AACX,cAAM,IAAI,MAAM,uBAAuB,IAAI,MAAM,IAAI,IAAI,UAAU,EAAE;AAAA,MACvE;AAEA,aAAQ,MAAM,IAAI,KAAK;AAAA,IACzB,UAAE;AACA,mBAAa,KAAK;AAAA,IACpB;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,SAAsC;AAC1C,UAAM,aAAa,IAAI,gBAAgB;AACvC,UAAM,QAAQ,WAAW,MAAM,WAAW,MAAM,GAAG,sBAAsB;AAEzE,QAAI;AACF,YAAM,MAAM,MAAM,KAAK,QAAQ,GAAG,KAAK,SAAS,WAAW;AAAA,QACzD,QAAQ;AAAA,QACR,SAAS,KAAK,QAAQ;AAAA,QACtB,QAAQ,WAAW;AAAA,MACrB,CAAC;AAED,UAAI,CAAC,IAAI,IAAI;AACX,cAAM,IAAI,MAAM,wBAAwB,IAAI,MAAM,IAAI,IAAI,UAAU,EAAE;AAAA,MACxE;AAEA,YAAM,OAAQ,MAAM,IAAI,KAAK;AAQ7B,UAAI,KAAK,kBAAkB,KAAK,mBAAmB,KAAK,cAAc;AACpE,YAAI;AACF,gBAAM,KAAK,QAAQ,EAAE,YAAY,KAAK,UAAU,CAAC;AAAA,QACnD,QAAQ;AAAA,QAER;AAAA,MACF;AAEA,aAAO;AAAA,IACT,UAAE;AACA,mBAAa,KAAK;AAAA,IACpB;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,YAAyC;AAC7C,UAAM,MAAM,MAAM,KAAK,QAAQ,OAAO,SAAS;AAC/C,WAAO;AAAA,EACT;AAAA,EAEA,MAAM,sBAA6D;AACjE,UAAM,MAAM,MAAM,KAAK,QAAQ,OAAO,oBAAoB;AAC1D,WAAO;AAAA,EACT;AAAA,EAEA,MAAM,wBAAiE;AACrE,UAAM,MAAM,MAAM,KAAK,QAAQ,OAAO,sBAAsB;AAC5D,WAAO;AAAA,EACT;AAAA,EAEA,MAAM,gBAA6B,MAAc,OAAgC,CAAC,GAAe;AAC/F,UAAM,MAAM,MAAM,KAAK,QAAQ,QAAQ,qBAAqB,EAAE,MAAM,KAAK,CAAC;AAC1E,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA,EAMA,cAA6B;AAC3B,WAAO,KAAK;AAAA,EACd;AAAA,EAEA,iBAAgC;AAC9B,QAAI,CAAC,KAAK,SAAU,QAAO;AAC3B,WAAO,GAAG,KAAK,SAAS;AAAA,EAC1B;AAAA,EAEA,MAAM,iBAAkC;AACtC,UAAM,SAAS,MAAM,KAAK,QAAQ,QAAQ,aAAa;AACvD,SAAK,WAAW,OAAO;AACvB,SAAK,eAAe,gBAAe,UAAU,OAAO,KAAK;AACzD,WAAO,OAAO;AAAA,EAChB;AAAA;AAAA;AAAA;AAAA,EAMQ,UAAkC;AACxC,WAAO;AAAA,MACL,iBAAiB,UAAU,KAAK,MAAM;AAAA,MACtC,gBAAgB;AAAA,IAClB;AAAA,EACF;AAAA,EAEA,MAAc,QAAQ,QAAgBC,QAAc,MAAkC;AACpF,UAAM,MAAM,MAAM,KAAK,QAAQ,GAAG,KAAK,SAAS,GAAGA,MAAI,IAAI;AAAA,MACzD;AAAA,MACA,SAAS,KAAK,QAAQ;AAAA,MACtB,MAAM,SAAS,SAAY,KAAK,UAAU,IAAI,IAAI;AAAA,IACpD,CAAC;AAED,QAAI,CAAC,IAAI,IAAI;AACX,YAAM,OAAO,MAAM,IAAI,KAAK,EAAE,MAAM,MAAM,EAAE;AAC5C,YAAM,IAAI,MAAM,qBAAqB,MAAM,IAAIA,MAAI,YAAY,IAAI,MAAM,IAAI,IAAI,EAAE;AAAA,IACrF;AAEA,WAAO,IAAI,KAAK;AAAA,EAClB;AACF;;;AC7QA,IAAM,kBAAkB;AACxB,IAAM,mBAAmB;AAMzB,SAAS,6BAA6B,UAAiC;AACrE,QAAM,SAAS,eAAgC,uBAAuB,UAAU,iBAAiB,gBAAgB,CAAC;AAClH,SAAO,QAAQ,iBAAiB,KAAK,KAAK;AAC5C;AAkBO,SAAS,mBAAmB,MAAuB;AACxD,QAAM,EAAE,UAAU,WAAW,OAAO,IAAI;AAQxC,iBAAe,cAAc,KAA2C;AACtE,UAAM,EAAE,KAAK,QAAQ,IAAI,IAAI;AAE7B,QAAI,CAAC,OAAO,CAAC,SAAS;AACpB,aAAO;AAAA,QACL,QAAQ;AAAA,QACR,MAAM,EAAE,OAAO,kBAAkB,SAAS,oCAAoC;AAAA,MAChF;AAAA,IACF;AAGA,QAAI;AACF,UAAI,IAAI,GAAG;AAAA,IACb,QAAQ;AACN,aAAO;AAAA,QACL,QAAQ;AAAA,QACR,MAAM,EAAE,OAAO,eAAe,SAAS,qBAAqB;AAAA,MAC9D;AAAA,IACF;AAGA,UAAM,SAAS,IAAI,eAAe;AAAA,MAChC,WAAW;AAAA,MACX,QAAQ;AAAA,MACR;AAAA,MACA,qBAAqB;AAAA,IACvB,CAAC;AAED,QAAI;AACF,YAAM,OAAO,OAAO;AAAA,IACtB,SAAS,KAAK;AACZ,aAAO;AAAA,QACL,QAAQ;AAAA,QACR,MAAM;AAAA,UACJ,OAAO;AAAA,UACP,SAAS,qCAAsC,IAAc,OAAO;AAAA,QACtE;AAAA,MACF;AAAA,IACF;AAGA,qBAAiB,UAAU;AAAA,MACzB,SAAS;AAAA,MACT,YAAY;AAAA,IACd,CAAC;AACD,gBAAY,UAAU,qBAAqB,OAAO;AAElD,UAAM,SAAS,iBAAiB,QAAQ;AACxC,WAAO,EAAE,MAAM,EAAE,WAAW,MAAM,MAAM,OAAO,KAAK,EAAE;AAAA,EACxD;AAOA,iBAAe,iBAAiB,MAA4C;AAC1E,qBAAiB,UAAU,EAAE,SAAS,MAAM,CAAC;AAE7C,WAAO,EAAE,MAAM,EAAE,WAAW,MAAM,EAAE;AAAA,EACtC;AAOA,iBAAe,aAAa,MAA4C;AACtE,UAAM,SAAS,iBAAiB,QAAQ;AACxC,UAAM,SAAS,KAAK,cAAc;AAClC,UAAM,UAAU,YAAY,QAAQ;AACpC,UAAM,YAAY,QAAQ,QAAQ,mBAAmB,CAAC;AACtD,UAAM,0BAA0B,sBAAsB;AACtD,UAAM,2BAA2B,6BAA6B,QAAQ;AACtE,QAAI,wBAAuC;AAE3C,QAAI,UAAU;AACd,QAAI;AAEJ,QAAI,UAAU,OAAO,KAAK,SAAS;AACjC,UAAI;AACF,cAAM,SAAS,MAAM,OAAO,OAAO;AACnC,kBAAU;AACV,gCAAwB,OAAO,iBAAiB,KAAK,KAAK;AAAA,MAC5D,SAAS,KAAK;AACZ,sBAAe,IAAc;AAAA,MAC/B;AAAA,IACF;AAEA,QAAI,eAAe;AACnB,QAAI,kBAAkB;AACtB,QAAI;AACF,qBAAe,aAAa;AAC5B,wBAAkB,kBAAkB;AAAA,IACtC,QAAQ;AAAA,IAER;AAEA,QAAI,mBAAsF;AAC1F,QAAI,UAAU,OAAO,KAAK,SAAS;AACjC,UAAI;AACF,2BAAmB,MAAM,OAAO,oBAAoB;AAAA,MACtD,QAAQ;AACN,2BAAmB;AAAA,MACrB;AAAA,IACF;AAEA,WAAO;AAAA,MACL,MAAM;AAAA,QACJ,SAAS,OAAO,KAAK;AAAA,QACrB,YAAY,OAAO,KAAK,cAAc;AAAA,QACtC,aAAa;AAAA,QACb,SAAS,QAAQ,mBAAmB,KAAK;AAAA,QACzC;AAAA,QACA,cAAc;AAAA,QACd,oBAAoB;AAAA,QACpB,mBAAmB;AAAA,QACnB,YAAY;AAAA,QACZ,iBAAiB,iBAAiB;AAAA,QAClC,4BAA4B;AAAA,QAC5B,6BAA6B;AAAA,QAC7B,yBAAyB;AAAA,QACzB,yBACE,OAAO,KAAK,WACZ,QAAQ,uBAAuB,KAC/B,QAAQ,qBAAqB,KAC7B,0BAA0B;AAAA,QAC5B,sBAAsB,kBAAkB,aAAa;AAAA,QACrD,gBAAgB,kBAAkB,kBAAkB;AAAA,QACpD,uBAAuB,kBAAkB,cAAc;AAAA,QACvD,+BAA+B,kBAAkB,sBAAsB;AAAA,QACvE,2BAA2B,kBAAkB,kBAAkB;AAAA,QAC/D,yBAAyB,kBAAkB,gBAAgB,CAAC;AAAA,QAC5D,qBAAqB,kBAAkB,YAAY,CAAC;AAAA,QACpD,gBAAgB;AAAA,QAChB,uBAAuB;AAAA,QACvB,WAAW,QAAQ,YAAY,KAAK;AAAA,QACpC,cAAc,QAAQ,eAAe,KAAK;AAAA,MAC5C;AAAA,IACF;AAAA,EACF;AAGA,iBAAe,eAAe,MAA4C;AACxE,UAAM,QAAQ,iBAAiB,SAAS;AACxC,WAAO,EAAE,MAAM,EAAE,UAAU,MAAM,EAAE;AAAA,EACrC;AAGA,iBAAe,kBAAkB,MAA4C;AAC3E,UAAM,QAAQ,kBAAkB;AAChC,WAAO,EAAE,MAAM,EAAE,SAAS,MAAM,EAAE;AAAA,EACpC;AAGA,iBAAe,oBAAoB,MAA4C;AAC7E,UAAM,EAAE,cAAc,IAAI,MAAM,OAAO,oBAAmB;AAC1D,WAAO,KAAK,2BAA2B,yBAAyB;AAChE,UAAM,SAAS,cAAc,QAAQ;AACrC,QAAI,CAAC,OAAO,SAAS;AACnB,aAAO,MAAM,4BAA4B,yBAAyB,EAAE,OAAO,OAAO,MAAM,CAAC;AACzF,aAAO,EAAE,QAAQ,KAAK,MAAM,EAAE,OAAO,OAAO,MAAM,EAAE;AAAA,IACtD;AACA,WAAO,KAAK,8BAA8B,2BAA2B;AAAA,MACnE,YAAY,OAAO;AAAA,MACnB,SAAS,OAAO;AAAA,IAClB,CAAC;AAED,QAAI,OAAO,cAAc,KAAK,cAAc,GAAG;AAC7C,YAAM,UAAU,YAAY,QAAQ;AACpC,YAAM,SAAS,QAAQ,mBAAmB;AAC1C,UAAI,QAAQ;AACV,aAAK,cAAc,IAAI,eAAe;AAAA,UACpC,WAAW,OAAO;AAAA,UAClB;AAAA,UACA;AAAA,UACA,qBAAqB;AAAA,QACvB,CAAC,CAAC;AAAA,MACJ;AAAA,IACF;AACA,WAAO,EAAE,MAAM,OAAO;AAAA,EACxB;AAGA,iBAAe,qBAAqB,MAA4C;AAC9E,UAAM,SAAS,KAAK,cAAc;AAClC,QAAI,CAAC,QAAQ;AACX,aAAO;AAAA,QACL,QAAQ;AAAA,QACR,MAAM,EAAE,OAAO,0BAA0B;AAAA,MAC3C;AAAA,IACF;AACA,QAAI;AACF,YAAM,QAAQ,MAAM,OAAO,eAAe;AAC1C,aAAO,KAAK,+BAA+B,0BAA0B;AACrE,aAAO,EAAE,MAAM,EAAE,MAAM,EAAE;AAAA,IAC3B,SAAS,KAAK;AACZ,YAAM,UAAU,eAAe,QAAQ,IAAI,UAAU,OAAO,GAAG;AAC/D,aAAO,MAAM,qCAAqC,6BAA6B,EAAE,OAAO,QAAQ,CAAC;AACjG,aAAO;AAAA,QACL,QAAQ;AAAA,QACR,MAAM,EAAE,OAAO,QAAQ;AAAA,MACzB;AAAA,IACF;AAAA,EACF;AAEA,SAAO,EAAE,eAAe,kBAAkB,cAAc,gBAAgB,mBAAmB,qBAAqB,qBAAqB;AACvI;;;ACpQO,SAAS,yBAAyB,MAA6B;AACpE,iBAAe,oBAA6C;AAC1D,UAAM,SAAS,KAAK,cAAc;AAClC,QAAI,CAAC,QAAQ;AACX,YAAM,IAAI,MAAM,4BAA4B;AAAA,IAC9C;AACA,WAAO;AAAA,EACT;AAEA,iBAAe,aAAa,MAA4C;AACtE,QAAI;AACF,YAAM,SAAS,MAAM,kBAAkB;AACvC,aAAO,EAAE,MAAM,MAAM,OAAO,oBAAoB,EAAE;AAAA,IACpD,SAAS,OAAO;AACd,aAAO;AAAA,QACL,QAAQ;AAAA,QACR,MAAM,EAAE,OAAO,iBAAiB,QAAQ,MAAM,UAAU,OAAO,KAAK,EAAE;AAAA,MACxE;AAAA,IACF;AAAA,EACF;AAEA,iBAAe,aAAa,KAA2C;AACrE,QAAI;AACF,YAAM,SAAS,MAAM,kBAAkB;AACvC,YAAM,QAAQ,IAAI,MAAM;AACxB,UAAI,CAAC,OAAO;AACV,eAAO,EAAE,QAAQ,KAAK,MAAM,EAAE,OAAO,sBAAsB,EAAE;AAAA,MAC/D;AACA,YAAM,UAAU,IAAI,MAAM;AAC1B,YAAM,QAAQ,IAAI,MAAM,QAAQ,OAAO,IAAI,MAAM,KAAK,IAAI;AAC1D,aAAO;AAAA,QACL,MAAM,MAAM,OAAO,gBAAgB,qBAAqB,EAAE,OAAO,SAAS,MAAM,CAAC;AAAA,MACnF;AAAA,IACF,SAAS,OAAO;AACd,aAAO;AAAA,QACL,QAAQ;AAAA,QACR,MAAM,EAAE,OAAO,iBAAiB,QAAQ,MAAM,UAAU,OAAO,KAAK,EAAE;AAAA,MACxE;AAAA,IACF;AAAA,EACF;AAEA,iBAAe,eAAe,MAA4C;AACxE,QAAI;AACF,YAAM,SAAS,MAAM,kBAAkB;AACvC,aAAO;AAAA,QACL,MAAM,MAAM,OAAO,gBAAgB,uBAAuB,CAAC,CAAC;AAAA,MAC9D;AAAA,IACF,SAAS,OAAO;AACd,aAAO;AAAA,QACL,QAAQ;AAAA,QACR,MAAM,EAAE,OAAO,iBAAiB,QAAQ,MAAM,UAAU,OAAO,KAAK,EAAE;AAAA,MACxE;AAAA,IACF;AAAA,EACF;AAEA,iBAAe,cAAc,KAA2C;AACtE,QAAI;AACF,YAAM,SAAS,MAAM,kBAAkB;AACvC,YAAM,UAAU,IAAI,MAAM;AAC1B,UAAI,CAAC,SAAS;AACZ,eAAO,EAAE,QAAQ,KAAK,MAAM,EAAE,OAAO,4BAA4B,EAAE;AAAA,MACrE;AACA,YAAM,gBAAgB,IAAI,MAAM,mBAAmB;AACnD,aAAO;AAAA,QACL,MAAM,MAAM,OAAO,gBAAgB,sBAAsB,EAAE,SAAS,gBAAgB,cAAc,CAAC;AAAA,MACrG;AAAA,IACF,SAAS,OAAO;AACd,aAAO;AAAA,QACL,QAAQ;AAAA,QACR,MAAM,EAAE,OAAO,iBAAiB,QAAQ,MAAM,UAAU,OAAO,KAAK,EAAE;AAAA,MACxE;AAAA,IACF;AAAA,EACF;AAEA,iBAAe,eAAe,MAA4C;AACxE,QAAI;AACF,YAAM,SAAS,MAAM,kBAAkB;AACvC,aAAO,EAAE,MAAM,MAAM,OAAO,sBAAsB,EAAE;AAAA,IACtD,SAAS,OAAO;AACd,aAAO;AAAA,QACL,QAAQ;AAAA,QACR,MAAM,EAAE,OAAO,iBAAiB,QAAQ,MAAM,UAAU,OAAO,KAAK,EAAE;AAAA,MACxE;AAAA,IACF;AAAA,EACF;AAEA,SAAO;AAAA,IACL;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF;AACF;;;ACvEO,IAAM,eAAe,iBAAE,OAAO;AAAA,EACnC,YAAY,iBAAE,OAAO;AAAA,EACrB,OAAO,iBAAE,OAAO,EAAE,SAAS;AAAA,EAC3B,QAAQ,iBAAE,OAAO,EAAE,SAAS;AAAA,EAC5B,YAAY,iBAAE,OAAO,EAAE,SAAS;AAClC,CAAC;AAEM,IAAM,iBAAiB,iBAAE,OAAO,EAAE,YAAY,iBAAE,OAAO,EAAE,CAAC;AA0B1D,SAAS,+BAA+B,MAA4B;AACzE,QAAM;AAAA,IACJ;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF,IAAI;AAGJ,iBAAe,eAAe,KAAgD;AAC5E,iBAAa,eAAe;AAC5B,UAAM,EAAE,YAAY,OAAO,QAAQ,WAAW,IAAI,aAAa,MAAM,IAAI,IAAI;AAC7E,UAAM,oBAAoB,eAAc,oBAAI,KAAK,GAAE,YAAY;AAC/D,aAAS,SAAS,YAAY,EAAE,YAAY,mBAAmB,OAAO,CAAC;AACvE,WAAO,yBAAyB,SAAS,QAAQ;AAGjD,UAAM,MAAM,aAAa;AACzB,UAAM,eAAe,KAAK,MAAM,IAAI,KAAK,iBAAiB,EAAE,QAAQ,IAAI,GAAI;AAC5E,kBAAc;AAAA,MACZ,IAAI;AAAA,MACJ,OAAO,SAAS;AAAA,MAChB,MAAM;AAAA,MACN,cAAc,QAAQ,IAAI;AAAA,MAC1B,QAAQ,UAAU;AAAA,MAClB,YAAY;AAAA,MACZ,YAAY;AAAA,MACZ,QAAQ;AAAA,MACR,YAAY;AAAA,IACd,CAAC;AAED,kBAAc,YAAY,EAAE,UAAU,MAAM,QAAQ,SAAS,CAAC;AAG9D,eAAW,iBAAiB,UAAU;AAEtC,WAAO,KAAK,UAAU,oBAAoB,sBAAsB,EAAE,YAAY,QAAQ,YAAY,cAAc,KAAK,CAAC;AAEtH,WAAO,UAAU;AAAA,MACf,QAAQ;AAAA,MACR,MAAM;AAAA,MACN,OAAO;AAAA,MACP,SAAS,SAAS,WAAW,MAAM,KAAK;AAAA,MACxC,MAAM,aAAa,UAAU;AAAA,MAC7B,UAAU,EAAE,WAAW,YAAY,OAAO,SAAS,eAAe,OAAO;AAAA,IAC3E,GAAG,WAAW,OAAO;AAErB,WAAO,EAAE,MAAM,EAAE,IAAI,MAAM,UAAU,SAAS,SAAS,EAAE;AAAA,EAC3D;AAGA,iBAAe,iBAAiB,KAAgD;AAC9E,UAAM,EAAE,WAAW,IAAI,eAAe,MAAM,IAAI,IAAI;AACpD,aAAS,WAAW,UAAU;AAG9B,UAAM,YAAY,GAAG,QAAQ;AAC7B,sBAAkB,WAAW,yBAAyB,UAAU;AAGhE,iBAAa,YAAY,aAAa,CAAC;AAGvC,mBAAe,OAAO,UAAU;AAChC,kBAAc,aAAa,UAAU;AACrC,eAAW,aAAa,UAAU;AAClC,WAAO,yBAAyB,SAAS,QAAQ;AACjD,WAAO,KAAK,UAAU,sBAAsB,wBAAwB,EAAE,WAAW,CAAC;AAElF,WAAO,UAAU;AAAA,MACf,QAAQ;AAAA,MACR,MAAM;AAAA,MACN,OAAO;AAAA,MACP,MAAM,aAAa,UAAU;AAAA,MAC7B,UAAU,EAAE,WAAW,WAAW;AAAA,IACpC,GAAG,WAAW,OAAO;AAErB,WAAO,EAAE,MAAM,EAAE,IAAI,MAAM,UAAU,SAAS,SAAS,EAAE;AAAA,EAC3D;AAEA,SAAO,EAAE,gBAAgB,iBAAiB;AAC5C;;;ACnIA,OAAOC,UAAQ;AACf,OAAOC,YAAU;;;AC4BV,IAAM,gBAAgB;AAAA,EAC3B;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AACF;AAEA,IAAM,iBAAiB,cAAc,KAAK,IAAI;AAO9C,SAAS,WAAW,KAA6C;AAC/D,SAAO;AAAA,IACL,IAAI,IAAI;AAAA,IACR,UAAU,IAAI;AAAA,IACd,YAAY,IAAI;AAAA,IAChB,YAAa,IAAI,cAAyB,iBAAiB;AAAA,IAC3D,aAAa,IAAI;AAAA,EACnB;AACF;AAWO,SAAS,iBAAiB,MAAuC;AACtE,QAAM,KAAK,YAAY;AAEvB,KAAG;AAAA,IACD;AAAA;AAAA;AAAA;AAAA;AAAA,EAKF,EAAE;AAAA,IACA,KAAK;AAAA,IACL,KAAK;AAAA,IACL,KAAK;AAAA,IACL,KAAK,cAAc,iBAAiB;AAAA,IACpC,KAAK;AAAA,EACP;AAEA,QAAM,MAAM;AAAA,IACV,GAAG,QAAQ,UAAU,cAAc,gCAAgC,EAAE,IAAI,KAAK,EAAE;AAAA,EAClF;AAKA,SAAO;AACT;AA6BO,SAAS,2BAA2B,SAAiB,WAA4B;AACtF,QAAM,KAAK,YAAY;AACvB,QAAM,MAAM,GAAG;AAAA,IACb;AAAA,EACF,EAAE,IAAI,SAAS,SAAS;AACxB,SAAO,QAAQ;AACjB;AAKO,SAAS,mBAAmB,SAAyB;AAC1D,QAAM,KAAK,YAAY;AAEvB,QAAM,MAAM,GAAG;AAAA,IACb;AAAA,EACF,EAAE,IAAI,OAAO;AAEb,SAAO,IAAI;AACb;;;AD/GA,IAAM,sBAAsB;AAc5B,eAAsB,qBAAqB,KAA2C;AACpF,QAAM,QAAQ,IAAI,MAAM,QAAQ,OAAO,IAAI,MAAM,KAAK,IAAI;AAC1D,QAAM,SAAS,IAAI,MAAM,SAAS,OAAO,IAAI,MAAM,MAAM,IAAI;AAE7D,QAAM,EAAE,OAAO,YAAY,MAAM,IAAI,wBAAwB;AAAA,IAC3D,UAAU,aAAa,IAAI,MAAM,MAAM;AAAA,IACvC;AAAA,IACA;AAAA,EACF,CAAC;AAED,SAAO,EAAE,QAAQ,KAAK,MAAM,EAAE,YAAY,MAAM,EAAE;AACpD;AAOA,eAAsB,mBAAmB,KAA2C;AAClF,QAAM,YAAY,aAAa,IAAI,OAAO,EAAE;AAC5C,MAAI,CAAC,WAAW;AACd,WAAO,EAAE,QAAQ,KAAK,MAAM,EAAE,OAAO,cAAc,IAAI,OAAO,EAAE,GAAG,EAAE;AAAA,EACvE;AACA,SAAO,EAAE,QAAQ,KAAK,MAAM,EAAE,UAAU,EAAE;AAC5C;AAOA,IAAM,wBAAwB,IAAI,IAAY,sBAAsB;AASpE,eAAsB,sBAAsB,KAA2C;AACrF,QAAM,KAAK,IAAI,OAAO;AACtB,QAAM,OAAO,IAAI;AACjB,MAAI,CAAC,KAAM,QAAO,EAAE,QAAQ,KAAK,MAAM,EAAE,OAAO,wBAAwB,EAAE;AAG1E,QAAM,EAAE,QAAQ,OAAO,WAAW,YAAY,YAAY,SAAS,IAAI;AAMvE,MAAI,WAAW,QAAW;AACxB,QAAI,OAAO,WAAW,YAAY,CAAC,sBAAsB,IAAI,MAAM,GAAG;AACpE,aAAO;AAAA,QACL,QAAQ;AAAA,QACR,MAAM;AAAA,UACJ,OACE,mBAAmB,OAAO,MAAM,CAAC,iCAC9B,CAAC,GAAG,qBAAqB,EAAE,KAAK,IAAI,CAAC;AAAA,QAE5C;AAAA,MACF;AAAA,IACF;AAAA,EACF;AAEA,QAAM,UAAU,gBAAgB,IAAI;AAAA,IAClC,GAAI,WAAW,SAAY,EAAE,OAAyB,IAAI,CAAC;AAAA,IAC3D,GAAI,UAAU,SAAY,EAAE,MAAuB,IAAI,CAAC;AAAA,IACxD,GAAI,cAAc,SAAY,EAAE,UAA+B,IAAI,CAAC;AAAA,IACpE,GAAI,eAAe,SAAY,EAAE,WAAiC,IAAI,CAAC;AAAA,IACvE,GAAI,eAAe,SAAY,EAAE,WAAiC,IAAI,CAAC;AAAA,IACvE,GAAI,aAAa,SAAY,EAAE,SAAoC,IAAI,CAAC;AAAA,IACxE,YAAY,aAAa;AAAA,EAC3B,CAAC;AAED,MAAI,CAAC,QAAS,QAAO,EAAE,QAAQ,KAAK,MAAM,EAAE,OAAO,wBAAwB,EAAE,GAAG,EAAE;AAClF,SAAO,EAAE,QAAQ,KAAK,MAAM,EAAE,WAAW,QAAQ,EAAE;AACrD;AAOA,eAAsB,uBAAuB,KAA2C;AACtF,QAAM,SAAS,IAAI,MAAM,UAAU;AACnC,QAAM,QAAQ,IAAI,MAAM,QAAQ,OAAO,IAAI,MAAM,KAAK,IAAI;AAC1D,QAAM,SAAS,IAAI,MAAM,SAAS,OAAO,IAAI,MAAM,MAAM,IAAI;AAE7D,QAAM,EAAE,OAAO,SAAS,MAAM,IAAI,0BAA0B,EAAE,QAAQ,OAAO,OAAO,CAAC;AAErF,SAAO,EAAE,QAAQ,KAAK,MAAM,EAAE,SAAS,MAAM,EAAE;AACjD;AAQA,eAAsB,qBAAqB,KAA2C;AACpF,QAAM,WAAW,IAAI,OAAO;AAE5B,QAAM,SAAS,eAAe,QAAQ,KAAK,qBAAqB,QAAQ;AAExE,MAAI,CAAC,QAAQ;AACX,WAAO,EAAE,QAAQ,KAAK,MAAM,EAAE,OAAO,cAAc,QAAQ,GAAG,EAAE;AAAA,EAClE;AAEA,QAAM,UAAU,oBAAoB,OAAO,EAAE;AAC7C,QAAM,cAAc,mBAAmB,OAAO,EAAE;AAGhD,QAAM,iBAAiB,QAAQ,CAAC,GAAG;AACnC,QAAM,oBAA4C,CAAC;AACnD,MAAI,gBAAgB;AAClB,UAAM,UAAU,eAAe,MAAM,uBAAuB;AAC5D,QAAI,SAAS;AACX,iBAAW,QAAQ,QAAQ,CAAC,EAAE,MAAM,IAAI,GAAG;AACzC,cAAM,WAAW,KAAK,QAAQ,GAAG;AACjC,YAAI,WAAW,GAAG;AAChB,gBAAM,MAAM,KAAK,MAAM,GAAG,QAAQ,EAAE,KAAK;AACzC,gBAAM,MAAM,KAAK,MAAM,WAAW,CAAC,EAAE,KAAK;AAC1C,cAAI,OAAO,IAAK,mBAAkB,GAAG,IAAI;AAAA,QAC3C;AAAA,MACF;AAAA,IACF;AAAA,EACF;AAEA,SAAO,EAAE,QAAQ,KAAK,MAAM,EAAE,GAAG,QAAQ,SAAS,aAAa,aAAa,kBAAkB,EAAE;AAClG;AAKA,eAAsB,sBAAsB,KAA2C;AACrF,QAAM,KAAK,IAAI,OAAO;AACtB,QAAM,UAAU,gBAAgB,EAAE;AAClC,MAAI,CAAC,QAAS,QAAO,EAAE,QAAQ,KAAK,MAAM,EAAE,OAAO,cAAc,EAAE,GAAG,EAAE;AAExE,SAAO,EAAE,QAAQ,KAAK,MAAM,EAAE,SAAS,MAAM,GAAG,EAAE;AACpD;AAKA,eAAsB,wBAAwB,KAA2C;AACvF,QAAM,WAAW,IAAI,OAAO;AAC5B,QAAM,SAAS,yBAAyB,QAAQ;AAChD,MAAI,CAAC,OAAQ,QAAO,EAAE,QAAQ,KAAK,MAAM,EAAE,OAAO,cAAc,QAAQ,GAAG,EAAE;AAG7E,MAAI,kBAAkB,GAAG;AACvB,QAAI;AACF,oBAAc;AAAA,QACZ,YAAY;AAAA,QACZ,QAAQ,OAAO;AAAA,QACf,WAAW;AAAA,QACX,SAAS,KAAK,UAAU,EAAE,IAAI,OAAO,IAAI,MAAM,OAAO,KAAK,CAAC;AAAA,QAC5D,YAAY,iBAAiB;AAAA,QAC7B,YAAY,aAAa;AAAA,MAC3B,CAAC;AAAA,IACH,SAAS,KAAK;AAEZ,cAAQ,KAAK,wDAAwD,eAAe,QAAQ,IAAI,UAAU,GAAG;AAAA,IAC/G;AAAA,EACF;AAEA,SAAO,EAAE,QAAQ,KAAK,MAAM,EAAE,SAAS,MAAM,IAAI,OAAO,IAAI,MAAM,OAAO,KAAK,EAAE;AAClF;AAeO,SAAS,+BAA+B,MAAuB;AACpE,QAAM,EAAE,UAAU,OAAO,IAAI;AAE7B,SAAO,eAAe,mCAAmC,KAA2C;AAClG,UAAM,SAAS,MAAM,wBAAwB,GAAG;AAEhD,QAAK,OAAO,MAAkC,SAAS;AACrD,YAAM,SAAS,OAAO;AACtB,UAAI,OAAO,MAAM;AACf,cAAM,cAAcC,OAAK,QAAQ,UAAU,IAAI;AAC/C,cAAM,WAAWA,OAAK,QAAQ,aAAa,WAAW,UAAU,OAAO,IAAI;AAC3E,YAAI;AAAE,UAAAC,KAAG,OAAO,UAAU,EAAE,WAAW,MAAM,OAAO,KAAK,CAAC;AAAA,QAAG,SAAS,KAAK;AACzE,iBAAO,KAAK,UAAU,iBAAiB,oCAAoC,EAAE,MAAM,OAAO,MAAM,OAAO,OAAO,GAAG,EAAE,CAAC;AAAA,QACtH;AAEA,YAAI;AACF,gBAAM,EAAE,kBAAkB,IAAI,MAAM,OAAO,yBAA8B;AACzE,4BAAkB,aAAa,OAAO,MAAM,EAAE,QAAQ,KAAK,CAAC;AAAA,QAC9D,SAAS,KAAK;AACZ,iBAAO,KAAK,UAAU,iBAAiB,mCAAmC,EAAE,MAAM,OAAO,MAAM,OAAO,OAAO,GAAG,EAAE,CAAC;AAAA,QACrH;AAAA,MACF;AAAA,IACF;AACA,WAAO;AAAA,EACT;AACF;;;AEpNO,SAAS,aAAa,MAAoC;AAC/D,QAAM,EAAE,YAAY,WAAW,QAAQ,UAAU,cAAc,IAAI;AACnE,MAAI,aAAoC;AACxC,MAAI,kBAAiC;AAErC,iBAAe,kBAAiC;AAC9C,UAAM,SAAS,WAAW;AAC1B,UAAM,YAAY,OAAO,KAAK,YAAY,KAAK,KAAK;AACpD,UAAM,SAAS,YAAY,QAAQ,EAAE,mBAAmB,GAAG,KAAK,KAAK;AACrE,UAAM,gBAAgB,OAAO,KAAK,WAAW,aAAa,SACtD,GAAG,SAAS;AAAA,EAAK,MAAM,KACvB;AAEJ,QAAI,CAAC,eAAe;AAClB,UAAI,YAAY;AACd,eAAO,KAAK,UAAU,iBAAiB,4BAA4B;AAAA,UACjE,SAAS,OAAO,KAAK;AAAA,UACrB,gBAAgB,QAAQ,SAAS;AAAA,UACjC,aAAa,QAAQ,MAAM;AAAA,QAC7B,CAAC;AAAA,MACH;AACA,mBAAa;AACb,wBAAkB;AAClB;AAAA,IACF;AAEA,QAAI,cAAc,oBAAoB,cAAe;AAErD,UAAM,kBAAkB;AACxB,UAAM,eAAe;AACrB,iBAAa,IAAI,eAAe;AAAA,MAC9B,WAAW;AAAA,MACX,QAAQ;AAAA,MACR;AAAA,MACA,qBAAqB;AAAA,IACvB,CAAC;AACD,sBAAkB;AAElB,WAAO,KAAK,UAAU,iBAAiB,gCAAgC,EAAE,YAAY,gBAAgB,CAAC;AAEtG,QAAI;AACF,YAAM,WAAW,QAAQ;AAAA,QACvB,YAAY;AAAA,QACZ,SAAS;AAAA,MACX,CAAC;AACD,aAAO,KAAK,UAAU,iBAAiB,kCAAkC;AAAA,IAC3E,SAAS,KAAK;AACZ,aAAO,KAAK,UAAU,iBAAiB,uDAAuD;AAAA,QAC5F,OAAQ,IAAc;AAAA,MACxB,CAAC;AAAA,IACH;AAEA,QAAI;AACF,YAAM,aAAa,iBAAiB,SAAS;AAC7C,UAAI,aAAa,GAAG;AAClB,eAAO,KAAK,UAAU,iBAAiB,cAAc,UAAU,+BAA+B;AAAA,MAChG;AAAA,IACF,SAAS,KAAK;AACZ,aAAO,MAAM,UAAU,iBAAiB,mBAAmB,EAAE,OAAQ,IAAc,QAAQ,CAAC;AAAA,IAC9F;AAAA,EACF;AAEA,SAAO;AAAA,IACL,eAAe,MAAM;AAAA,IACrB,eAAe,CAAC,WAAW;AAAE,mBAAa;AAAA,IAAQ;AAAA,IAClD;AAAA,IACA,kBAAkB,CAAC,iBAAiB;AAGlC,YAAM,kBAAkB,CAAC,QAAkB;AACzC,YAAI,IAAI,SAAS,GAAG;AAClB,iBAAO,MAAM,UAAU,uBAAuB,iBAAiB,IAAI,MAAM,8BAA8B,EAAE,IAAI,CAAC;AAAA,QAChH;AAAA,MACF;AAEA,mBAAa,SAAS;AAAA,QACpB,MAAM;AAAA,QACN,OAAO,CAAC,UAAU,QAAQ,OAAO;AAAA,QACjC,mBAAmB,MAAM,WAAW,QAAQ,KAAK,WAAW,aAAa,IAAI;AAAA,QAC7E,IAAI,YAAY;AACd,cAAI,CAAC,WAAW,QAAQ,KAAK,QAAS;AACtC,gBAAM,SAAS;AACf,cAAI,CAAC,OAAQ;AAEb,gBAAM,UAAU,YAAY;AAC5B,cAAI,QAAQ,WAAW,EAAG;AAE1B,cAAI;AACF,mBAAO,KAAK,UAAU,iBAAiB,mBAAmB,EAAE,OAAO,QAAQ,OAAO,CAAC;AACnF,kBAAM,SAAS,MAAM,OAAO,UAAU,OAAO;AAC7C,kBAAM,MAAM,aAAa;AAGzB,kBAAM,YAAY,IAAI,IAAI,OAAO,OAAO,IAAI,CAAC,MAAM,EAAE,EAAE,CAAC;AACxD,kBAAM,cAAc,QAAQ,OAAO,CAAC,MAAM,CAAC,UAAU,IAAI,OAAO,EAAE,MAAM,CAAC,CAAC;AAC1E,kBAAM,UAAU,YAAY,IAAI,CAAC,MAAM,EAAE,EAAE;AAC3C,gBAAI,QAAQ,SAAS,GAAG;AACtB,uBAAS,SAAS,GAAG;AACrB,mCAAqB,aAAa,GAAG;AAAA,YACvC;AAGA,gBAAI,OAAO,OAAO,SAAS,GAAG;AAC5B,oBAAM,kBAAkB,QACrB,OAAO,CAAC,MAAM,UAAU,IAAI,OAAO,EAAE,MAAM,CAAC,CAAC,EAC7C,IAAI,CAAC,MAAM,EAAE,EAAE;AAClB,oBAAM,eAAe,oBAAoB,iBAAiB,GAAG;AAE7D,qBAAO,KAAK,UAAU,iBAAiB,YAAY,gBAAgB,MAAM,YAAY;AAAA,gBACnF,QAAQ,OAAO,OAAO,MAAM,GAAG,CAAC;AAAA,cAClC,CAAC;AAED,8BAAgB,YAAY;AAAA,YAC9B;AAEA,qBAAS;AACT,mBAAO,KAAK,UAAU,oBAAoB,yBAAyB;AAAA,cACjE,QAAQ,OAAO;AAAA,cAAQ,SAAS,OAAO;AAAA,cAAS,QAAQ,OAAO,OAAO;AAAA,cAAQ,OAAO,QAAQ;AAAA,YAC/F,CAAC;AAAA,UACH,SAAS,KAAK;AAEZ,gBAAI;AACF,oBAAM,MAAM,aAAa;AACzB,oBAAM,SAAS,QAAQ,IAAI,CAAC,MAAM,EAAE,EAAE;AACtC,oBAAM,eAAe,oBAAoB,QAAQ,GAAG;AAEpD,qBAAO,KAAK,UAAU,iBAAiB,0BAA0B,OAAO,MAAM,YAAY;AAAA,gBACxF,OAAQ,IAAc;AAAA,cACxB,CAAC;AAED,8BAAgB,YAAY;AAAA,YAC9B,QAAQ;AAAA,YAAmC;AAC3C,mBAAO,MAAM,UAAU,iBAAiB,uBAAuB,EAAE,OAAQ,IAAc,QAAQ,CAAC;AAAA,UAClG;AAAA,QACF;AAAA,MACF,CAAC;AAAA,IACH;AAAA,EACF;AACF;;;ACjMA,SAAS,kBAAkB;AAI3B,IAAM,4BAA4B;AAGlC,IAAM,kBAAkB,IAAI,KAAK;AAc1B,IAAM,kBAAN,MAAsB;AAAA,EACnB,UAAU,oBAAI,IAA2B;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAajD,OAAO,MAAiD;AAEtD,SAAK,QAAQ;AAGb,eAAW,SAAS,KAAK,QAAQ,OAAO,GAAG;AACzC,UAAI,MAAM,SAAS,QAAQ,MAAM,WAAW,WAAW;AACrD,eAAO,EAAE,OAAO,MAAM,OAAO,OAAO,MAAM;AAAA,MAC5C;AAAA,IACF;AAGA,UAAM,eAAe,CAAC,GAAG,KAAK,QAAQ,OAAO,CAAC,EAAE,OAAO,CAAC,MAAM,EAAE,WAAW,SAAS,EAAE;AACtF,QAAI,gBAAgB,2BAA2B;AAC7C,YAAM,IAAI,MAAM,0CAA0C,yBAAyB,GAAG;AAAA,IACxF;AAEA,UAAM,QAAQ,WAAW;AACzB,UAAM,MAAM,KAAK,IAAI;AACrB,SAAK,QAAQ,IAAI,OAAO;AAAA,MACtB;AAAA,MACA;AAAA,MACA,QAAQ;AAAA,MACR,SAAS;AAAA,MACT,SAAS;AAAA,IACX,CAAC;AACD,WAAO,EAAE,OAAO,OAAO,KAAK;AAAA,EAC9B;AAAA;AAAA;AAAA;AAAA,EAKA,OAAO,OAAe,MAA6E;AACjG,UAAM,QAAQ,KAAK,QAAQ,IAAI,KAAK;AACpC,QAAI,CAAC,MAAO;AAEZ,QAAI,KAAK,YAAY,OAAW,OAAM,UAAU,KAAK;AACrD,QAAI,KAAK,YAAY,OAAW,OAAM,UAAU,KAAK;AACrD,QAAI,KAAK,WAAW,OAAW,OAAM,SAAS,KAAK;AACnD,UAAM,UAAU,KAAK,IAAI;AAAA,EAC3B;AAAA;AAAA;AAAA;AAAA,EAKA,IAAI,OAA0C;AAC5C,WAAO,KAAK,QAAQ,IAAI,KAAK;AAAA,EAC/B;AAAA;AAAA;AAAA;AAAA,EAKA,sBAA+B;AAC7B,eAAW,SAAS,KAAK,QAAQ,OAAO,GAAG;AACzC,UAAI,MAAM,WAAW,UAAW,QAAO;AAAA,IACzC;AACA,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA,EAKA,UAAgB;AACd,UAAM,SAAS,KAAK,IAAI,IAAI;AAC5B,eAAW,CAAC,OAAO,KAAK,KAAK,KAAK,SAAS;AACzC,UAAI,MAAM,WAAW,aAAa,MAAM,UAAU,QAAQ;AACxD,aAAK,QAAQ,OAAO,KAAK;AAAA,MAC3B;AAAA,IACF;AAAA,EACF;AACF;AAEA,eAAsB,kBACpB,SACA,OACwB;AACxB,QAAM,QAAQ,QAAQ,IAAI,KAAK;AAC/B,MAAI,CAAC,OAAO;AACV,WAAO,EAAE,QAAQ,KAAK,MAAM,EAAE,OAAO,aAAa,SAAS,2BAA2B,EAAE;AAAA,EAC1F;AACA,SAAO,EAAE,MAAM,MAAM;AACvB;;;AChHA,IAAM,wBAAwB;AAKvB,IAAM,mBAAmB;AAAA,EAC9B;AAAA,EACA;AAAA,EACA;AACF;AAGA,IAAM,qBAAqB;AAAA,EACzB;AAAA,EAAS;AAAA,EAAQ;AAAA,EAAe;AAAA,EAAO;AAAA,EAAQ;AACjD;AAGA,SAAS,sBAAsB,QAA4B;AACzD,SAAO,OAAO,OAAO,CAAC,MAAM;AAC1B,UAAM,OAAO,EAAE,YAAY;AAC3B,WAAO,mBAAmB,KAAK,CAAC,MAAM,KAAK,SAAS,CAAC,CAAC;AAAA,EACxD,CAAC;AACH;AAGO,SAAS,gBAAgB,QAA4B;AAC1D,SAAO,OAAO,OAAO,CAAC,MAAM;AAC1B,UAAM,OAAO,EAAE,YAAY;AAC3B,WAAO,CAAC,mBAAmB,KAAK,CAAC,MAAM,KAAK,SAAS,CAAC,CAAC;AAAA,EACzD,CAAC;AACH;AAEA,eAAsB,gBAAgB,KAA2C;AAC/E,QAAM,WAAW,IAAI,MAAM;AAC3B,QAAM,OAAO,IAAI,MAAM;AAEvB,MAAI,CAAC,UAAU;AACb,WAAO,EAAE,QAAQ,KAAK,MAAM,EAAE,OAAO,oCAAoC,EAAE;AAAA,EAC7E;AAEA,MAAI,SAAmB,CAAC;AAExB,MAAI;AACF,QAAI,aAAa,UAAU;AACzB,YAAM,UAAU,IAAI,cAAc,EAAE,UAAU,IAAI,MAAM,SAAS,CAAC;AAClE,eAAS,MAAM,QAAQ,WAAW,qBAAqB;AAAA,IACzD,WAAW,aAAa,eAAe,aAAa,qBAAqB;AACvE,YAAM,UAAU,IAAI,gBAAgB,EAAE,UAAU,IAAI,MAAM,SAAS,CAAC;AACpE,eAAS,MAAM,QAAQ,WAAW,qBAAqB;AAAA,IACzD,WAAW,aAAa,aAAa;AACnC,eAAS;AAAA,IACX;AAAA,EACF,QAAQ;AAAA,EAER;AAGA,MAAI,SAAS,aAAa;AACxB,aAAS,sBAAsB,MAAM;AAAA,EACvC,WAAW,SAAS,OAAO;AACzB,aAAS,gBAAgB,MAAM;AAAA,EACjC;AAEA,SAAO,EAAE,MAAM,EAAE,UAAU,OAAO,EAAE;AACtC;;;ACpEA,SAAS,cAAAC,mBAAkB;AAC3B,OAAOC,UAAQ;AACf,OAAOC,YAAU;AAMV,SAAS,kBAAkB,UAA0B;AAC1D,MAAI;AACF,UAAM,aAAaC,OAAK,KAAK,UAAU,eAAe;AACtD,UAAM,MAAMC,KAAG,aAAa,YAAY,OAAO;AAC/C,WAAOC,YAAW,KAAK,EAAE,OAAO,GAAG,EAAE,OAAO,KAAK;AAAA,EACnD,QAAQ;AACN,WAAO;AAAA,EACT;AACF;AAaO,SAAS,uBAAuB,MAAmC;AACxE,SAAO,YAAoC;AACzC,UAAM,QAAQ,YAAY,KAAK,UAAU,EAAE,iBAAiB,KAAK,SAAS,SAAS,CAAC;AAEpF,UAAM,OAAO,MAAM,QAAQ;AAC3B,UAAM,OAAO,OAAO,KAAK,OAAO;AAChC,UAAM,OAAO,UAAU,KAAK,OAAO;AACnC,UAAM,OAAO,iBAAiB,KAAK,MAAM,QAAQ,OAAO,CAAC;AACzD,WAAO,EAAE,MAAM,EAAE,GAAG,OAAO,aAAa,KAAK,WAAW,IAAI,EAAE,EAAE;AAAA,EAClE;AACF;;;ACtBA,IAAM,kBAAkB;AAGxB,IAAM,oBAAoB;AAsD1B,IAAM,mBAAmB;AAAA,EACvB;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AACF;AAEA,IAAMC,kBAAiB,iBAAiB,KAAK,IAAI;AAOjD,SAAS,cAAc,KAA2C;AAChE,SAAO;AAAA,IACL,IAAI,IAAI;AAAA,IACR,YAAY,IAAI;AAAA,IAChB,iBAAkB,IAAI,mBAA8B;AAAA,IACpD,WAAW,IAAI;AAAA,IACf,YAAa,IAAI,cAAyB;AAAA,IAC1C,qBAAsB,IAAI,uBAAkC;AAAA,IAC5D,WAAY,IAAI,aAAwB;AAAA,IACxC,gBAAiB,IAAI,kBAA6B;AAAA,IAClD,aAAc,IAAI,eAA0B;AAAA,IAC5C,SAAS,IAAI;AAAA,IACb,eAAgB,IAAI,iBAA4B;AAAA,IAChD,WAAW,IAAI;AAAA,IACf,WAAW,IAAI;AAAA,IACf,cAAe,IAAI,gBAA2B;AAAA,IAC9C,YAAY,IAAI;AAAA,EAClB;AACF;AAsFO,SAAS,wBACd,MACa;AACb,QAAM,KAAK,YAAY;AAEvB,QAAM,OAAO,GAAG;AAAA,IACd;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAaF,EAAE;AAAA,IACA,KAAK;AAAA,IACL,KAAK;AAAA,IACL,KAAK;AAAA,IACL,KAAK,cAAc;AAAA,IACnB,KAAK,uBAAuB;AAAA,IAC5B,KAAK,aAAa;AAAA,IAClB,KAAK,kBAAkB;AAAA,IACvB,KAAK,eAAe;AAAA,IACpB,KAAK,WAAW;AAAA,IAChB,KAAK,iBAAiB;AAAA,IACtB,KAAK;AAAA,IACL;AAAA,IACA,KAAK,gBAAgB;AAAA,IACrB,KAAK;AAAA,EACP;AAEA,QAAM,aAAa,OAAO,KAAK,eAAe;AAG9C,QAAM,WAAW,KAAK;AACtB,QAAM,YAAY,KAAK,cAAc;AACrC,QAAM,WAAW,KAAK,aAAa;AACnC,MAAI,YAAY,aAAa,UAAU;AACrC,OAAG;AAAA,MACD;AAAA,IACF,EAAE,IAAI,YAAY,YAAY,IAAI,aAAa,IAAI,YAAY,EAAE;AAAA,EACnE;AAEA,SAAO;AAAA,IACL,GAAG,QAAQ,UAAUC,eAAc,+BAA+B,EAAE,IAAI,UAAU;AAAA,EACpF;AACF;AA6CO,SAAS,sBACd,SACe;AACf,QAAM,KAAK,YAAY;AAEvB,QAAM,OAAO,GAAG;AAAA,IACd,UAAUC,eAAc;AAAA;AAAA;AAAA;AAAA,EAI1B,EAAE,IAAI,OAAO;AAEb,SAAO,KAAK,IAAI,aAAa;AAC/B;AAKO,SAAS,gBAAgB,WAA2B;AACzD,QAAM,KAAK,YAAY;AAEvB,QAAM,MAAM,GAAG;AAAA,IACb;AAAA,EACF,EAAE,IAAI,SAAS;AAEf,SAAO,IAAI;AACb;;;ACnRA,IAAM,qBAAqB;AAAA,EACzB;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AACF;AAGA,IAAM,0BAA0B;AAAA,EAC9B;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AACF;AAEA,IAAMC,kBAAiB,mBAAmB,KAAK,IAAI;AACnD,IAAM,sBAAsB,wBAAwB,KAAK,IAAI;AAO7D,SAAS,iBAAiB,KAAiD;AACzE,SAAO;AAAA,IACL,IAAI,IAAI;AAAA,IACR,YAAY,IAAI;AAAA,IAChB,iBAAkB,IAAI,mBAA8B;AAAA,IACpD,WAAW,IAAI;AAAA,IACf,YAAa,IAAI,cAAyB;AAAA,IAC1C,aAAc,IAAI,eAA0B;AAAA,IAC5C,cAAe,IAAI,gBAA2B;AAAA,IAC9C,YAAY,IAAI;AAAA,EAClB;AACF;AAGA,SAAS,gBAAgB,KAA6C;AACpE,SAAO,EAAE,GAAG,iBAAiB,GAAG,GAAG,MAAO,IAAI,QAAmB,KAAK;AACxE;AAGA,SAAS,oBAAoB,KAAiD;AAC5E,SAAO,iBAAiB,GAAG;AAC7B;AAcO,SAAS,iBAAiB,MAAmD;AAClF,QAAM,KAAK,YAAY;AAEvB,QAAM,OAAO,GAAG;AAAA,IACd,4BAA4BA,eAAc;AAAA;AAAA;AAAA,EAG5C,EAAE;AAAA,IACA,KAAK;AAAA,IACL,KAAK;AAAA,IACL,KAAK,mBAAmB;AAAA,IACxB,KAAK;AAAA,IACL,KAAK,cAAc;AAAA,IACnB,KAAK,eAAe;AAAA,IACpB,KAAK,QAAQ;AAAA,IACb,KAAK,gBAAgB;AAAA,IACrB,KAAK;AAAA,EACP;AAEA,MAAI,KAAK,YAAY,EAAG,QAAO;AAE/B,SAAO;AAAA,IACL,GAAG,QAAQ,UAAUA,eAAc,gCAAgC,EAAE,IAAI,KAAK,EAAE;AAAA,EAClF;AACF;AAUO,SAAS,yBAAyB,WAAwC;AAC/E,QAAM,KAAK,YAAY;AAEvB,QAAM,OAAO,GAAG;AAAA,IACd,UAAU,mBAAmB;AAAA,EAC/B,EAAE,IAAI,SAAS;AAEf,SAAO,KAAK,IAAI,mBAAmB;AACrC;AAOO,SAAS,wBAAwB,UAAwC;AAC9E,QAAM,KAAK,YAAY;AAEvB,QAAM,MAAM,GAAG;AAAA,IACb,UAAUA,eAAc;AAAA,EAC1B,EAAE,IAAI,QAAQ;AAEd,SAAO,MAAM,gBAAgB,GAAG,IAAI;AACtC;;;AC9JA,IAAMC,sBAAqB;AAG3B,IAAMC,kBAAiB;AAGvB,IAAMC,qBAAoB;AAsD1B,IAAM,eAAe;AAAA,EACnB;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AACF;AAEA,IAAMC,kBAAiB,aAAa,KAAK,IAAI;AAO7C,SAAS,UAAU,KAAuC;AACxD,SAAO;AAAA,IACL,IAAI,IAAI;AAAA,IACR,QAAQ,IAAI;AAAA,IACZ,QAAS,IAAI,UAAqB;AAAA,IAClC,OAAQ,IAAI,SAAoB;AAAA,IAChC,SAAU,IAAI,WAAsB;AAAA,IACpC,aAAc,IAAI,eAA0B;AAAA,IAC5C,MAAO,IAAI,QAAmB;AAAA,IAC9B,YAAa,IAAI,cAAyB;AAAA,IAC1C,iBAAkB,IAAI,mBAA8B;AAAA,IACpD,cAAe,IAAI,gBAA2B;AAAA,IAC9C,WAAW,IAAI;AAAA,IACf,UAAW,IAAI,YAAuB;AAAA,IACtC,YAAY,IAAI;AAAA,IAChB,YAAa,IAAI,cAAyB;AAAA,IAC1C,YAAa,IAAI,cAAyB;AAAA,IAC1C,WAAY,IAAI,aAAwB;AAAA,EAC1C;AACF;AAWO,SAAS,WAAW,MAA2B;AACpD,QAAM,KAAK,YAAY;AAEvB,KAAG;AAAA,IACD;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAyBF,EAAE;AAAA,IACA,KAAK;AAAA,IACL,KAAK,UAAUF;AAAA,IACf,KAAK,UAAU;AAAA,IACf,KAAK,SAAS;AAAA,IACd,KAAK,WAAW;AAAA,IAChB,KAAK,eAAe;AAAA,IACpB,KAAK,QAAQ;AAAA,IACb,KAAK,cAAc;AAAA,IACnB,KAAK,mBAAmB;AAAA,IACxB,KAAK,gBAAgB;AAAA,IACrB,KAAK,aAAaC;AAAA,IAClB,KAAK;AAAA,IACL,KAAK,cAAc;AAAA,IACnB,KAAK,cAAc,iBAAiB;AAAA,EACtC;AAEA,QAAM,MAAM;AAAA,IACV,GAAG,QAAQ,UAAUC,eAAc,0BAA0B,EAAE,IAAI,KAAK,EAAE;AAAA,EAC5E;AAEA,UAAQ,SAAS,GAAG;AAEpB,SAAO;AACT;AAOO,SAAS,QAAQ,IAA4B;AAClD,QAAM,KAAK,YAAY;AAEvB,QAAM,MAAM,GAAG;AAAA,IACb,UAAUA,eAAc;AAAA,EAC1B,EAAE,IAAI,EAAE;AAER,MAAI,CAAC,IAAK,QAAO;AACjB,SAAO,UAAU,GAAG;AACtB;AAKO,SAAS,UACd,UAA4B,CAAC,GAClB;AACX,QAAM,KAAK,YAAY;AAEvB,QAAM,aAAuB,CAAC;AAC9B,QAAM,SAAoB,CAAC;AAE3B,MAAI,QAAQ,WAAW,QAAW;AAChC,eAAW,KAAK,YAAY;AAC5B,WAAO,KAAK,QAAQ,MAAM;AAAA,EAC5B;AAEA,QAAM,QAAQ,WAAW,SAAS,IAAI,SAAS,WAAW,KAAK,OAAO,CAAC,KAAK;AAC5E,QAAM,QAAQ,QAAQ,SAASH;AAE/B,SAAO,KAAK,KAAK;AAEjB,QAAM,OAAO,GAAG;AAAA,IACd,UAAUG,eAAc;AAAA;AAAA,OAErB,KAAK;AAAA;AAAA;AAAA,EAGV,EAAE,IAAI,GAAG,MAAM;AAEf,SAAO,KAAK,IAAI,SAAS;AAC3B;AAKO,SAAS,mBAAmB,WAA8B;AAC/D,QAAM,KAAK,YAAY;AAEvB,QAAM,OAAO,GAAG;AAAA,IACd,UAAUA,eAAc;AAAA;AAAA;AAAA;AAAA,EAI1B,EAAE,IAAI,SAAS;AAEf,SAAO,KAAK,IAAI,SAAS;AAC3B;;;ACjPA,SAAS,QAAQ,YAAY;AAU7B,eAAsB,2BACpB,WACA,QACA,kBACA,UACe;AAEf,MAAI;AAAE,qBAAiB,UAAU,YAAY,SAAS;AAAA,EAAG,QAAQ;AAAA,EAAoB;AACrF,aAAW,WAAW,OAAO,iBAAiB;AAC5C,QAAI;AAAE,uBAAiB,UAAU,UAAU,OAAO;AAAA,IAAG,QAAQ;AAAA,IAAoB;AAAA,EACnF;AAGA,MAAI;AACF,qBAAiB,KAAK,KAAK,uBAAuB,SAAS,OAAO,EAAE,KAAK,SAAS,CAAC,GAAG;AACpF,YAAM,OAAO,GAAG,QAAQ,IAAI,CAAC,EAAE,EAAE,MAAM,MAAM;AAAA,MAAC,CAAC;AAAA,IACjD;AAAA,EACF,QAAQ;AAAA,EAAoB;AAG5B,aAAW,WAAW,OAAO,iBAAiB;AAC5C,QAAI;AACF,uBAAiB,KAAK,KAAK,aAAa,OAAO,QAAQ,EAAE,KAAK,SAAS,CAAC,GAAG;AACzE,cAAM,OAAO,GAAG,QAAQ,IAAI,CAAC,EAAE,EAAE,MAAM,MAAM;AAAA,QAAC,CAAC;AAAA,MACjD;AAAA,IACF,QAAQ;AAAA,IAAoB;AAAA,EAC9B;AAGA,aAAW,YAAY,OAAO,wBAAwB;AACpD,QAAI;AAAE,YAAM,OAAO,QAAQ;AAAA,IAAG,QAAQ;AAAA,IAAoB;AAAA,EAC5D;AACF;;;ACfA,eAAsB,oBACpB,WACA,MACe;AACf,QAAM,EAAE,UAAU,kBAAkB,YAAY,OAAO,IAAI;AAC3D,QAAM,SAAS,WAAW;AAE1B,MAAI,OAAO,MAAM,0BAA0B,EAAG;AAC9C,MAAI,OAAO,MAAM,wBAAwB,MAAO;AAEhD,MAAI;AACF,UAAM,EAAE,SAAS,IAAI,MAAM,OAAO,wBAAsB;AACxD,aAAS,UAAU;AAAA,MACjB,MAAM;AAAA,MACN,aAAa,mBAAmB,SAAS;AAAA,MACzC;AAAA,IACF,CAAC,EAAE,MAAM,CAAC,QAAQ;AAChB,aAAO,KAAK,UAAU,aAAa,6BAA6B;AAAA,QAC9D,YAAY;AAAA,QACZ,OAAO,OAAO,GAAG;AAAA,MACnB,CAAC;AAAA,IACH,CAAC;AAAA,EACH,QAAQ;AAAA,EAER;AACF;;;AC9CA,IAAMC,sBAAqB;AAC3B,IAAMC,uBAAsB;AAE5B,eAAsB,mBAAmB,KAA2C;AAClF,QAAM,QAAQ,IAAI,MAAM,QAAQ,OAAO,IAAI,MAAM,KAAK,IAAID;AAC1D,QAAM,SAAS,IAAI,MAAM,SAAS,OAAO,IAAI,MAAM,MAAM,IAAIC;AAC7D,QAAM,SAAS,IAAI,MAAM,UAAU;AACnC,QAAM,QAAQ,IAAI,MAAM,SAAS;AACjC,QAAM,SAAS,IAAI,MAAM,UAAU;AAEnC,QAAM,aAAa,EAAE,QAAQ,OAAO,OAAO;AAE3C,QAAM,WAAW,aAAa,EAAE,GAAG,YAAY,OAAO,OAAO,CAAC,EAAE,IAAI,CAAC,OAAO;AAAA,IAC1E,IAAI,EAAE;AAAA,IACN,MAAM,IAAI,KAAK,EAAE,aAAa,GAAI,EAAE,YAAY,EAAE,MAAM,GAAG,EAAE;AAAA,IAC7D,OAAO,EAAE,SAAS,EAAE,GAAG,MAAM,GAAG,CAAC;AAAA,IACjC,QAAQ,EAAE;AAAA,IACV,OAAO,EAAE;AAAA,IACT,cAAc,EAAE;AAAA,IAChB,YAAY,EAAE;AAAA,IACd,YAAY,EAAE;AAAA,IACd,UAAU,EAAE;AAAA,EACd,EAAE;AACF,QAAM,QAAQ,cAAc,UAAU;AAEtC,SAAO,EAAE,MAAM,EAAE,UAAU,OAAO,QAAQ,MAAM,EAAE;AACpD;AAEA,eAAsB,iBAAiB,KAA2C;AAChF,QAAM,UAAU,WAAW,IAAI,OAAO,EAAE;AACxC,MAAI,CAAC,QAAS,QAAO,EAAE,QAAQ,KAAK,MAAM,EAAE,OAAO,YAAY,EAAE;AAIjE,QAAM,cAAc,sBAAsB,QAAQ,EAAE;AACpD,QAAM,YAAY,gBAAgB,QAAQ,EAAE;AAE5C,SAAO,EAAE,MAAM,EAAE,GAAG,SAAS,cAAc,aAAa,YAAY,UAAU,EAAE;AAClF;AAEA,eAAsB,wBAAwB,KAA2C;AACvF,QAAM,UAAU,qBAAqB,IAAI,OAAO,EAAE;AAClD,SAAO,EAAE,MAAM,QAAQ;AACzB;AAEA,eAAsB,yBAAyB,KAA2C;AACxF,QAAM,UAAU,OAAO,IAAI,OAAO,EAAE;AACpC,MAAI,MAAM,OAAO,EAAG,QAAO,EAAE,QAAQ,KAAK,MAAM,EAAE,OAAO,mBAAmB,EAAE;AAC9E,QAAM,aAAa,sBAAsB,OAAO;AAChD,SAAO,EAAE,MAAM,WAAW;AAC5B;AAEA,eAAsB,4BAA4B,KAA2C;AAC3F,QAAM,cAAc,yBAAyB,IAAI,OAAO,EAAE;AAC1D,SAAO,EAAE,MAAM,YAAY;AAC7B;AAEA,eAAsB,sBAAsB,KAA2C;AACrF,QAAM,QAAQ,mBAAmB,IAAI,OAAO,EAAE;AAC9C,SAAO,EAAE,MAAM,MAAM;AACvB;AAaO,SAAS,8BAA8B,MAA2B;AACvE,QAAM,EAAE,kBAAkB,UAAU,QAAQ,WAAW,IAAI;AAG3D,iBAAe,oBAAoB,KAA2C;AAC5E,UAAM,YAAY,IAAI,OAAO;AAC7B,UAAM,SAAS,qBAAqB,SAAS;AAC7C,QAAI,CAAC,OAAO,QAAS,QAAO,EAAE,QAAQ,KAAK,MAAM,EAAE,OAAO,oBAAoB,EAAE;AAGhF,+BAA2B,WAAW,QAAQ,kBAAkB,QAAQ,EAAE,MAAM,MAAM;AAAA,IAAC,CAAC;AAExF,WAAO,KAAK,UAAU,oBAAoB,2BAA2B;AAAA,MACnE,YAAY;AAAA,MACZ,QAAQ,OAAO;AAAA,IACjB,CAAC;AACD,WAAO,EAAE,MAAM,EAAE,IAAI,MAAM,QAAQ,OAAO,OAAO,EAAE;AAAA,EACrD;AAcA,iBAAe,sBAAsB,KAA2C;AAC9E,UAAM,YAAY,IAAI,OAAO;AAC7B,UAAM,UAAU,WAAW,SAAS;AACpC,QAAI,CAAC,QAAS,QAAO,EAAE,QAAQ,KAAK,MAAM,EAAE,OAAO,oBAAoB,EAAE;AAEzE,UAAM,YAAY,QAAQ,WAAW;AACrC,QAAI,WAAW;AACb,oBAAc,WAAW;AAAA,QACvB,QAAQ;AAAA,QACR,UAAU,QAAQ,YAAY,aAAa;AAAA,MAC7C,CAAC;AAAA,IACH;AAEA,UAAM,oBAAoB,WAAW,EAAE,UAAU,kBAAkB,YAAY,OAAO,CAAC;AAEvF,WAAO,KAAK,UAAU,sBAAsB,8BAA8B;AAAA,MACxE,YAAY;AAAA,MACZ,YAAY;AAAA,IACd,CAAC;AAED,WAAO,EAAE,MAAM,EAAE,IAAI,MAAM,YAAY,UAAU,EAAE;AAAA,EACrD;AAGA,iBAAe,uBAAuB,KAA2C;AAC/E,UAAM,YAAY,IAAI,OAAO;AAC7B,UAAM,UAAU,WAAW,SAAS;AACpC,QAAI,CAAC,QAAS,QAAO,EAAE,QAAQ,KAAK,MAAM,EAAE,OAAO,oBAAoB,EAAE;AACzE,UAAM,SAAS,iBAAiB,SAAS;AACzC,WAAO,EAAE,MAAM,OAAO;AAAA,EACxB;AAEA,SAAO,EAAE,qBAAqB,uBAAuB,uBAAuB;AAC9E;;;ACzIA,IAAMC,sBAAqB;AAG3B,IAAMC,uBAAsB;AAG5B,IAAM,sBAAsB;AAG5B,IAAM,kBAAkB;AAGxB,IAAM,2BAA2B;AAGjC,IAAM,0BAA0B;AAChC,IAAM,yBAAyB;AAC/B,IAAM,2BAA2B;AAGjC,IAAM,4BAA4B,oBAAI,IAAI,CAAC,aAAa,gBAAgB,CAAC;AAMzE,eAAsB,iBAAiB,KAA2C;AAChF,QAAM,UAAU,IAAI,MAAM;AAC1B,QAAM,OAAO,IAAI,MAAM;AACvB,QAAM,SAAS,IAAI,MAAM;AACzB,QAAM,QAAQ,IAAI,MAAM,QAAQ,OAAO,IAAI,MAAM,KAAK,IAAID;AAC1D,QAAM,SAAS,IAAI,MAAM,SAAS,OAAO,IAAI,MAAM,MAAM,IAAIC;AAC7D,QAAM,SAAS,IAAI,MAAM,UAAU;AAEnC,QAAM,aAAa;AAAA,IACjB,GAAI,UAAU,EAAE,UAAU,QAAQ,IAAI,CAAC;AAAA,IACvC,kBAAkB;AAAA,IAClB;AAAA,IACA;AAAA,EACF;AAEA,QAAM,SAAS,WAAW,EAAE,GAAG,YAAY,OAAO,OAAO,CAAC;AAC1D,QAAM,QAAQ,YAAY,UAAU;AAEpC,SAAO,EAAE,MAAM,EAAE,QAAQ,OAAO,QAAQ,MAAM,EAAE;AAClD;AAEA,eAAsB,eAAe,KAA2C;AAC9E,QAAM,QAAQ,SAAS,IAAI,OAAO,EAAE;AACpC,MAAI,CAAC,MAAO,QAAO,EAAE,QAAQ,KAAK,MAAM,EAAE,OAAO,YAAY,EAAE;AAC/D,SAAO,EAAE,MAAM,MAAM;AACvB;AAMA,eAAsB,mBAAmB,KAA2C;AAClF,QAAM,UAAU,IAAI,MAAM,YAAY;AACtC,QAAM,OAAO,IAAI,MAAM;AACvB,QAAM,eAAe,IAAI,MAAM;AAC/B,QAAM,YAAY,IAAI,MAAM;AAC5B,QAAM,QAAQ,IAAI,MAAM,QAAQ,OAAO,IAAI,MAAM,KAAK,IAAID;AAC1D,QAAM,SAAS,IAAI,MAAM,SAAS,OAAO,IAAI,MAAM,MAAM,IAAIC;AAE7D,QAAM,WAAW,aAAa;AAAA,IAC5B,UAAU;AAAA,IACV;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF,CAAC;AAED,SAAO,EAAE,MAAM,EAAE,SAAS,EAAE;AAC9B;AAEA,eAAsB,oBAAoB,MAA4C;AACpF,QAAM,KAAK,YAAY;AAEvB,QAAM,YAAY,GAAG;AAAA,IACnB;AAAA;AAAA;AAAA;AAAA;AAAA,EAKF,EAAE,IAAI,kBAAkB,sBAAsB;AAE9C,QAAM,cAAc,GAAG;AAAA,IACrB;AAAA;AAAA;AAAA;AAAA;AAAA,EAKF,EAAE,IAAI,wBAAwB;AAE9B,QAAM,aAAa,GAAG;AAAA,IACpB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOF,EAAE,IAAI,kBAAkB,uBAAuB;AAE/C,QAAM,aAAa,UAAU,IAAI,CAAC,SAAS;AAAA,IACvC,IAAI,IAAI;AAAA,IACR,OAAQ,IAAI,WAAsB,IAAI,MAAM,GAAG,wBAAwB;AAAA,IACvE,MAAM;AAAA,IACN,QAAS,IAAI,UAAqB;AAAA,IAClC,YAAY,IAAI;AAAA,IAChB,SAAS,IAAI;AAAA,IACb,kBAAkB,IAAI;AAAA,EACxB,EAAE;AACJ,QAAM,eAAe,YAAY,IAAI,CAAC,SAAS;AAAA,IAC3C,IAAI,IAAI;AAAA,IACR,MAAO,IAAI,SAAoB,WAAY,IAAI,GAAc,MAAM,EAAE,CAAC;AAAA,IACtE,MAAM;AAAA,IACN,QAAS,IAAI,UAAqB;AAAA,IAClC,YAAY,IAAI;AAAA,IAChB,SAAU,IAAI,WAAsB;AAAA,EACtC,EAAE;AACJ,QAAM,cAAc,WAAW,IAAI,CAAC,SAAS;AAAA,IACzC,IAAI,IAAI;AAAA,IACR,MAAM,IAAI;AAAA,IACV,MAAM,IAAI;AAAA,IACV,QAAS,IAAI,UAAqB;AAAA,IAClC,YAAY,IAAI;AAAA,IAChB,eAAe,OAAO,IAAI,aAAa,KAAK;AAAA,EAC9C,EAAE;AAEJ,QAAM,QAAQ;AAAA,IACZ,GAAG;AAAA,IACH,GAAG;AAAA,IACH,GAAG;AAAA,EACL;AAEA,QAAM,gBAAgB,YAAY,CAAC,GAAG,MACjC,aAAa,CAAC,GAAG,MACjB,WAAW,CAAC,GAAG,MACf;AAEL,SAAO;AAAA,IACL,MAAM;AAAA,MACJ;AAAA,MACA,gBAAgB;AAAA,IAClB;AAAA,EACF;AACF;AAMA,eAAsB,eAAe,KAA2C;AAC9E,QAAM,QAAQ,KAAK,IAAI,OAAO,IAAI,MAAM,KAAK,KAAK,qBAAqB,eAAe;AACtF,QAAM,KAAK,IAAI,OAAO;AAGtB,MAAI,aAAkB;AACtB,MAAI,aAA6C;AAEjD,QAAM,SAAS,UAAU,EAAE;AAC3B,MAAI,QAAQ;AACV,iBAAa;AACb,iBAAa;AAAA,EACf,OAAO;AACL,UAAM,QAAQ,SAAS,EAAE;AACzB,QAAI,OAAO;AACT,mBAAa;AACb,mBAAa;AAAA,IACf,OAAO;AACL,YAAM,UAAU,WAAW,EAAE;AAC7B,UAAI,SAAS;AACX,qBAAa;AACb,qBAAa;AAAA,MACf;AAAA,IACF;AAAA,EACF;AAEA,MAAI,CAAC,WAAY,QAAO,EAAE,QAAQ,KAAK,MAAM,EAAE,OAAO,YAAY,EAAE;AAGpE,QAAM,QAAQ,gBAAgB,IAAI,YAAY,EAAE,MAAM,CAAC;AAGvD,QAAM,gBAAgB,MAAM,MAAM;AAAA,IAChC,CAAC,MAAM,CAAC,0BAA0B,IAAI,EAAE,IAAI;AAAA,EAC9C;AAEA,QAAM,UAAU,YAAY;AAG5B,QAAM,YAAY,oBAAI,IAAY;AAClC,QAAM,WAAW,oBAAI,IAAY;AACjC,QAAM,aAAa,oBAAI,IAAY;AAEnC,aAAW,QAAQ,eAAe;AAChC,eAAW,CAAC,QAAQ,IAAI,KAAK;AAAA,MAC3B,CAAC,KAAK,WAAW,KAAK,WAAW;AAAA,MACjC,CAAC,KAAK,WAAW,KAAK,WAAW;AAAA,IACnC,GAAyB;AACvB,cAAQ,MAAM;AAAA,QACZ,KAAK;AAAU,oBAAU,IAAI,MAAM;AAAG;AAAA,QACtC,KAAK;AAAS,mBAAS,IAAI,MAAM;AAAG;AAAA,QACpC,KAAK;AAAW,qBAAW,IAAI,MAAM;AAAG;AAAA,MAE1C;AAAA,IACF;AAAA,EACF;AAGA,MAAI,eAAe,SAAU,WAAU,IAAI,EAAE;AAC7C,MAAI,eAAe,QAAS,UAAS,IAAI,EAAE;AAC3C,MAAI,eAAe,UAAW,YAAW,IAAI,EAAE;AAG/C,QAAM,gBAAgB,MAAM,KAAK,SAAS;AAC1C,MAAI,cAA8C,CAAC;AACnD,MAAI,cAAc,SAAS,GAAG;AAC5B,UAAM,eAAe,cAAc,IAAI,MAAM,GAAG,EAAE,KAAK,IAAI;AAC3D,kBAAc,QAAQ;AAAA,MACpB;AAAA,oCAC8B,YAAY;AAAA,IAC5C,EAAE,IAAI,GAAG,aAAa;AAAA,EACxB;AAGA,QAAM,eAAe,MAAM,KAAK,QAAQ;AACxC,MAAI,aAA6C,CAAC;AAClD,MAAI,aAAa,SAAS,GAAG;AAC3B,UAAM,eAAe,aAAa,IAAI,MAAM,GAAG,EAAE,KAAK,IAAI;AAC1D,iBAAa,QAAQ;AAAA,MACnB;AAAA,kCAC4B,YAAY;AAAA,IAC1C,EAAE,IAAI,GAAG,YAAY;AAAA,EACvB;AAGA,QAAM,iBAAiB,MAAM,KAAK,UAAU;AAC5C,MAAI,eAA+C,CAAC;AACpD,MAAI,eAAe,SAAS,GAAG;AAC7B,UAAM,eAAe,eAAe,IAAI,MAAM,GAAG,EAAE,KAAK,IAAI;AAC5D,mBAAe,QAAQ;AAAA,MACrB;AAAA,oCAC8B,YAAY;AAAA,IAC5C,EAAE,IAAI,GAAG,cAAc;AAAA,EACzB;AAGA,QAAM,gBAAgB,oBAAI,IAAoB;AAC9C,MAAI,cAAc,SAAS,GAAG;AAC5B,UAAM,eAAe,cAAc,IAAI,MAAM,GAAG,EAAE,KAAK,IAAI;AAC3D,UAAM,cAAc,QAAQ;AAAA,MAC1B;AAAA,6BACuB,YAAY;AAAA,IACrC,EAAE,IAAI,GAAG,aAAa;AACtB,eAAW,OAAO,aAAa;AAC7B,oBAAc,IAAI,IAAI,WAAqB,OAAO,IAAI,KAAK,CAAC;AAAA,IAC9D;AAAA,EACF;AAGA,QAAM,WAAW;AAAA,IACf,GAAG,YAAY,IAAI,CAAC,OAAO;AAAA,MACzB,IAAI,EAAE;AAAA,MACN,MAAM,EAAE;AAAA,MACR,MAAM,EAAE;AAAA,MACR,QAAS,EAAE,UAAqB;AAAA,MAChC,YAAY,EAAE;AAAA,MACd,YAAa,EAAE,cAAyB;AAAA,MACxC,eAAe,cAAc,IAAI,EAAE,EAAY,KAAK;AAAA,IACtD,EAAE;AAAA,IACF,GAAG,WAAW,IAAI,CAAC,OAAO;AAAA,MACxB,IAAI,EAAE;AAAA,MACN,OAAQ,EAAE,WAAsB,IAAI,MAAM,GAAG,wBAAwB;AAAA,MACrE,MAAM;AAAA,MACN,QAAS,EAAE,UAAqB;AAAA,MAChC,YAAY,EAAE;AAAA,MACd,SAAS,EAAE;AAAA,MACX,YAAa,EAAE,cAAyB;AAAA,MACxC,kBAAkB,EAAE;AAAA,IACtB,EAAE;AAAA,IACF,GAAG,aAAa,IAAI,CAAC,OAAO;AAAA,MAC1B,IAAI,EAAE;AAAA,MACN,MAAO,EAAE,SAAoB,WAAY,EAAE,GAAc,MAAM,EAAE,CAAC;AAAA,MAClE,MAAM;AAAA,MACN,QAAS,EAAE,UAAqB;AAAA,MAChC,YAAY,EAAE;AAAA,MACd,SAAU,EAAE,WAAsB;AAAA,IACpC,EAAE;AAAA,EACJ;AAGA,QAAM,UAAU,cAAc,IAAI,CAAC,OAAO;AAAA,IACxC,WAAW,EAAE;AAAA,IACb,WAAW,EAAE;AAAA,IACb,OAAO,EAAE;AAAA,IACT,QAAQ,EAAE;AAAA,EACZ,EAAE;AAEF,QAAM,qBAAqB,SAAS,KAAK,CAAC,MAAM,EAAE,OAAO,EAAE;AAE3D,SAAO;AAAA,IACL,MAAM;AAAA,MACJ,QAAQ;AAAA,MACR,OAAO,SAAS,OAAO,CAAC,MAAM,EAAE,OAAO,EAAE;AAAA,MACzC,OAAO;AAAA,MACP;AAAA,IACF;AAAA,EACF;AACF;AAOA,IAAM,wBAAwB;AAE9B,eAAsB,mBAAmB,MAA4C;AACnF,QAAM,KAAK,YAAY;AAGvB,QAAM,aAAa,GAAG;AAAA,IACpB;AAAA;AAAA,EAEF,EAAE,IAAI,kBAAkB,qBAAqB;AAG7C,QAAM,YAAY,GAAG;AAAA,IACnB;AAAA;AAAA,EAEF,EAAE,IAAI,kBAAkB,qBAAqB;AAG7C,QAAM,cAAc,GAAG;AAAA,IACrB;AAAA;AAAA,EAEF,EAAE,IAAI,qBAAqB;AAG3B,QAAM,SAAS,oBAAI,IAAY;AAC/B,aAAW,KAAK,CAAC,GAAG,YAAY,GAAG,WAAW,GAAG,WAAW,GAAG;AAC7D,WAAO,IAAI,EAAE,EAAY;AAAA,EAC3B;AAGA,QAAM,gBAAgB,MAAM,KAAK,yBAAyB,EAAE,IAAI,MAAM,GAAG,EAAE,KAAK,IAAI;AACpF,QAAM,aAAa,MAAM,KAAK,MAAM;AACpC,QAAM,iBAAiB,WAAW,IAAI,MAAM,GAAG,EAAE,KAAK,IAAI;AAC1D,QAAM,WAAW,GAAG;AAAA,IAClB;AAAA;AAAA;AAAA,0BAGsB,aAAa;AAAA,2BACZ,cAAc;AAAA,2BACd,cAAc;AAAA,EACvC,EAAE,IAAI,kBAAkB,GAAG,MAAM,KAAK,yBAAyB,GAAG,GAAG,YAAY,GAAG,UAAU;AAE9F,QAAM,gBAAgB;AAGtB,QAAM,gBAAgB,oBAAI,IAAoB;AAC9C,QAAM,gBAAgB,WAAW,IAAI,CAAC,MAAM,EAAE,EAAY;AAC1D,MAAI,cAAc,SAAS,GAAG;AAC5B,UAAM,eAAe,cAAc,IAAI,MAAM,GAAG,EAAE,KAAK,IAAI;AAC3D,UAAM,cAAc,GAAG;AAAA,MACrB;AAAA,6BACuB,YAAY;AAAA,IACrC,EAAE,IAAI,GAAG,aAAa;AACtB,eAAW,OAAO,aAAa;AAC7B,oBAAc,IAAI,IAAI,WAAqB,OAAO,IAAI,KAAK,CAAC;AAAA,IAC9D;AAAA,EACF;AAGA,QAAM,QAAQ;AAAA,IACZ,GAAG,WAAW,IAAI,CAAC,OAAO;AAAA,MACxB,IAAI,EAAE;AAAA,MACN,MAAM,EAAE;AAAA,MACR,MAAM,EAAE;AAAA,MACR,QAAS,EAAE,UAAqB;AAAA,MAChC,YAAY,EAAE;AAAA,MACd,YAAa,EAAE,cAAyB;AAAA,MACxC,eAAe,cAAc,IAAI,EAAE,EAAY,KAAK;AAAA,IACtD,EAAE;AAAA,IACF,GAAG,UAAU,IAAI,CAAC,OAAO;AAAA,MACvB,IAAI,EAAE;AAAA,MACN,OAAQ,EAAE,WAAsB,IAAI,MAAM,GAAG,wBAAwB;AAAA,MACrE,MAAM;AAAA,MACN,QAAS,EAAE,UAAqB;AAAA,MAChC,YAAY,EAAE;AAAA,MACd,SAAS,EAAE;AAAA,MACX,YAAa,EAAE,cAAyB;AAAA,MACxC,kBAAkB,EAAE;AAAA,IACtB,EAAE;AAAA,IACF,GAAG,YAAY,IAAI,CAAC,OAAO;AAAA,MACzB,IAAI,EAAE;AAAA,MACN,MAAO,EAAE,SAAoB,WAAY,EAAE,GAAc,MAAM,EAAE,CAAC;AAAA,MAClE,MAAM;AAAA,MACN,QAAS,EAAE,UAAqB;AAAA,MAChC,YAAY,EAAE;AAAA,MACd,SAAU,EAAE,WAAsB;AAAA,IACpC,EAAE;AAAA,EACJ;AAEA,QAAM,QAAQ,cAAc,IAAI,CAAC,OAAO;AAAA,IACtC,WAAW,EAAE;AAAA,IACb,WAAW,EAAE;AAAA,IACb,OAAO,EAAE;AAAA,IACT,QAAQ,EAAE;AAAA,EACZ,EAAE;AAEF,SAAO,EAAE,MAAM,EAAE,OAAO,MAAM,EAAE;AAClC;AAMA,eAAsB,gBAAgB,KAA2C;AAC/E,QAAM,UAAU,IAAI,MAAM,YAAY;AACtC,QAAM,WAAW,mBAAmB,OAAO;AAC3C,SAAO,EAAE,MAAM,EAAE,OAAO,SAAS,EAAE;AACrC;;;AC/ZA,IAAM,yBAAiE;AAAA,EACrE,EAAE,KAAK,OAAO,OAAO,OAAU;AAAA,EAC/B,EAAE,KAAK,WAAW,OAAO,WAAW;AAAA,EACpC,EAAE,KAAK,YAAY,OAAO,WAAW;AAAA,EACrC,EAAE,KAAK,SAAS,OAAO,SAAS;AAAA,EAChC,EAAE,KAAK,UAAU,OAAO,SAAS;AAAA,EACjC,EAAE,KAAK,QAAQ,OAAO,QAAQ;AAAA,EAC9B,EAAE,KAAK,SAAS,OAAO,QAAQ;AAAA,EAC/B,EAAE,KAAK,YAAY,OAAO,YAAY;AAAA,EACtC,EAAE,KAAK,aAAa,OAAO,YAAY;AAAA,EACvC,EAAE,KAAK,SAAS,OAAO,gBAAgB;AAAA,EACvC,EAAE,KAAK,iBAAiB,OAAO,gBAAgB;AACjD;AAEO,SAAS,yBAAyB,OAAoC;AAC3E,MAAI,CAAC,MAAO,QAAO;AACnB,aAAW,QAAQ,wBAAwB;AACzC,QAAI,KAAK,QAAQ,MAAO,QAAO,KAAK;AAAA,EACtC;AACA,SAAO;AACT;AAqBO,SAAS,oBAAoB,MAAkB;AACpD,SAAO,eAAe,aAAa,KAA2C;AAC5E,UAAM,QAAQ,IAAI,MAAM;AACxB,QAAI,CAAC,MAAO,QAAO,EAAE,QAAQ,KAAK,MAAM,EAAE,OAAO,gBAAgB,EAAE;AAEnE,UAAM,OAAQ,IAAI,MAAM,QAAQ;AAChC,UAAM,OAAO,IAAI,MAAM;AACvB,UAAM,QAAQ,OAAO,IAAI,MAAM,KAAK,KAAK;AACzC,UAAM,YAAY,IAAI,MAAM;AAG5B,QAAI,SAAS,OAAO;AAClB,YAAM,UAAU,eAAe,OAAO,EAAE,MAAM,MAAM,CAAC;AACrD,aAAO,EAAE,MAAM,EAAE,MAAM,OAAO,QAAQ,EAAE;AAAA,IAC1C;AAGA,UAAM,cAAc,MAAM,KAAK,iBAAiB,WAAW,KAAK;AAGhE,QAAI,gBAAgB,MAAM;AACxB,UAAI,SAAS,QAAQ;AACnB,cAAM,UAAU,eAAe,OAAO,EAAE,MAAM,MAAM,CAAC;AACrD,eAAO,EAAE,MAAM,EAAE,MAAM,OAAO,SAAS,UAAU,KAAK,EAAE;AAAA,MAC1D;AAEA,aAAO,EAAE,MAAM,EAAE,MAAM,YAAY,SAAS,CAAC,GAAG,sBAAsB,KAAK,EAAE;AAAA,IAC/E;AAGA,UAAM,kBAAkB,yBAAyB,aAAa,IAAI;AAClE,UAAM,gBAAgB,KAAK,iBAAiB,cAAc,aAAa;AAAA,MACrE,WAAW;AAAA,MACX;AAAA,MACA,WAAW;AAAA,IACb,CAAC;AAGD,UAAM,eAAe,qBAAqB,aAAa,EAAE,IAAI,CAAC,OAAO;AAAA,MACnE,GAAG;AAAA,MACH,QAAQ;AAAA,IACV,EAAE;AAGF,UAAM,aAAa,KAAK,gBAAgB;AACxC,QAAI,cAA4D,CAAC;AACjE,QAAI,YAAY;AACd,UAAI;AACF,cAAM,eAAe,MAAM,WAAW,OAAO,OAAO,EAAE,MAAM,CAAC;AAC7D,sBAAc,aAAa,QAAQ,IAAI,CAAC,OAAO;AAAA,UAC7C,GAAG;AAAA,UACH,QAAQ,GAAG,kBAAkB,GAAG,EAAE,UAAU;AAAA,QAC9C,EAAE;AAAA,MACJ,QAAQ;AAAA,MAER;AAAA,IACF;AAGA,UAAM,cAAc,KAAK,YACrB,YAAY,OAAO,CAAC,MAAM,EAAE,eAAe,KAAK,SAAS,IACzD;AAGJ,UAAM,SAAS,CAAC,GAAG,cAAc,GAAG,WAAW,EAC5C,KAAK,CAAC,GAAG,OAAO,EAAE,SAAS,MAAM,EAAE,SAAS,EAAE,EAC9C,MAAM,GAAG,KAAK;AAEjB,WAAO,EAAE,MAAM,EAAE,MAAM,YAAY,SAAS,OAAO,EAAE;AAAA,EACvD;AACF;;;AC5FA,IAAM,qBAAqB,iBAAE,OAAO;AAAA,EAClC,YAAY,iBAAE,OAAO,EAAE,SAAS;AAAA,EAChC,QAAQ,iBAAE,OAAO,EAAE,SAAS;AAC9B,CAAC;AAED,IAAM,oBAAoB,iBAAE,OAAO;AAAA,EACjC,YAAY,iBAAE,OAAO;AAAA,EACrB,mBAAmB,iBAAE,OAAO,EAAE,SAAS;AAAA,EACvC,QAAQ,iBAAE,OAAO,EAAE,SAAS;AAC9B,CAAC;AAED,IAAM,oBAAoB,iBAAE,OAAO;AAAA,EACjC,QAAQ,iBAAE,OAAO;AAAA,EACjB,YAAY,iBAAE,OAAO,EAAE,SAAS;AAClC,CAAC;AAYM,SAAS,4BAA4B,MAAmB;AAC7D,SAAO,eAAe,qBAAqB,KAA2C;AACpF,UAAM,EAAE,YAAY,OAAO,IAAI,mBAAmB,MAAM,IAAI,IAAI;AAChE,UAAM,EAAE,QAAQ,WAAW,IAAI;AAC/B,UAAM,SAAS,WAAW;AAE1B,WAAO,MAAM,UAAU,eAAe,yBAAyB,EAAE,WAAW,CAAC;AAE7E,QAAI;AACF,YAAM,QAAkB,CAAC;AAGzB,YAAM,OAAO,OAAO,QAAQ;AAC5B,YAAM,UAAU,iBAAiB,kBAAkB,IAAI;AAEvD,UAAI,SAAS;AACX,cAAM,KAAK,QAAQ,OAAO;AAC1B,eAAO,KAAK,UAAU,gBAAgB,wBAAwB;AAAA,UAC5D;AAAA,UACA;AAAA,UACA,gBAAgB,QAAQ,QAAQ;AAAA,UAChC,cAAc,QAAQ;AAAA,QACxB,CAAC;AAAA,MACH,OAAO;AACL,eAAO,MAAM,UAAU,gBAAgB,+BAA+B,EAAE,YAAY,KAAK,CAAC;AAAA,MAC5F;AAGA,UAAI,QAAQ;AACV,cAAM,KAAK,cAAc,MAAM,IAAI;AAAA,MACrC;AAGA,YAAM,KAAK,eAAe,UAAU,IAAI;AAExC,YAAM,SAAS,UAAU,WAAW;AACpC,YAAM,cAAc,MAAM,KAAK,MAAM;AAErC,YAAM,kBAAkB,eAAe,WAAW;AAClD,aAAO;AAAA,QACL,UAAU;AAAA,QACV,oBAAoB,eAAe,wBAAwB,MAAM,GAAG,UAAU,UAAU,IAAI,KAAK,EAAE;AAAA,QACnG;AAAA,UACE;AAAA,UACA;AAAA,UACA,MAAM,UAAU,OAAO;AAAA,UACvB,aAAa,YAAY;AAAA,UACzB,kBAAkB;AAAA,UAClB,cAAc,SAAS;AAAA,UACvB,eAAe;AAAA,QACjB;AAAA,MACF;AAEA,aAAO;AAAA,QACL,MAAM;AAAA,UACJ,MAAM;AAAA,UACN;AAAA,UACA,GAAI,UAAU,EAAE,KAAK,IAAI,CAAC;AAAA,QAC5B;AAAA,MACF;AAAA,IACF,SAAS,OAAO;AACd,aAAO,MAAM,UAAU,iBAAiB,0BAA0B,EAAE,OAAQ,MAAgB,QAAQ,CAAC;AACrG,aAAO,EAAE,MAAM,EAAE,MAAM,GAAG,EAAE;AAAA,IAC9B;AAAA,EACF;AACF;AAaO,SAAS,2BAA2B,MAAmB;AAC5D,SAAO,eAAe,oBAAoB,KAA2C;AACnF,UAAM,EAAE,YAAY,mBAAmB,OAAO,IAAI,kBAAkB,MAAM,IAAI,IAAI;AAClF,UAAM,EAAE,OAAO,IAAI;AAEnB,WAAO,MAAM,UAAU,eAAe,wBAAwB;AAAA,MAC5D;AAAA,MACA;AAAA,IACF,CAAC;AAED,QAAI;AACF,YAAM,gBAAgB,oBAAoB,WAAW,iBAAiB,IAAI;AAC1E,YAAM,iBAAiB,UAAU,eAAe,UAAU;AAC1D,YAAM,QAAkB,CAAC;AAEzB,UAAI,eAAe,OAAO;AACxB,cAAM,KAAK,uBAAuB,cAAc,KAAK,EAAE;AAAA,MACzD;AAEA,UAAI,eAAe,SAAS;AAC1B,cAAM,KAAK,cAAc,OAAO;AAAA,MAClC;AAEA,UAAI,gBAAgB;AAClB,cAAM,KAAK,cAAc,cAAc,IAAI;AAAA,MAC7C;AAEA,UAAI,iBAAiB,mBAAmB;AACtC,cAAM,KAAK,wBAAwB,iBAAiB,IAAI;AAAA,MAC1D;AAEA,UAAI,MAAM,WAAW,GAAG;AACtB,eAAO,MAAM,UAAU,iBAAiB,+BAA+B,EAAE,YAAY,kBAAkB,CAAC;AACxG,eAAO,EAAE,MAAM,EAAE,MAAM,GAAG,EAAE;AAAA,MAC9B;AAEA,YAAM,KAAK,eAAe,UAAU,IAAI;AACxC,YAAM,cAAc,MAAM,KAAK,MAAM;AACrC,YAAM,kBAAkB,eAAe,WAAW;AAElD,aAAO;AAAA,QACL,UAAU;AAAA,QACV,mBAAmB,eAAe;AAAA,QAClC;AAAA,UACE;AAAA,UACA;AAAA,UACA,QAAQ,kBAAkB;AAAA,UAC1B,aAAa,YAAY;AAAA,UACzB,kBAAkB;AAAA,UAClB,eAAe;AAAA,QACjB;AAAA,MACF;AAEA,aAAO;AAAA,QACL,MAAM;AAAA,UACJ,MAAM;AAAA,UACN,QAAQ;AAAA,QACV;AAAA,MACF;AAAA,IACF,SAAS,OAAO;AACd,aAAO,MAAM,UAAU,iBAAiB,yBAAyB;AAAA,QAC/D;AAAA,QACA;AAAA,QACA,OAAQ,MAAgB;AAAA,MAC1B,CAAC;AACD,aAAO,EAAE,MAAM,EAAE,MAAM,GAAG,EAAE;AAAA,IAC9B;AAAA,EACF;AACF;AAYO,SAAS,2BAA2B,MAAmB;AAC5D,SAAO,eAAe,oBAAoB,KAA2C;AACnF,UAAM,EAAE,QAAQ,WAAW,IAAI,kBAAkB,MAAM,IAAI,IAAI;AAC/D,UAAM,EAAE,QAAQ,YAAY,iBAAiB,IAAI;AACjD,UAAM,SAAS,WAAW;AAG1B,QAAI,CAAC,OAAO,QAAQ,eAAe;AACjC,aAAO,MAAM,UAAU,gBAAgB,oCAAoC,EAAE,WAAW,CAAC;AACzF,aAAO,EAAE,MAAM,EAAE,MAAM,GAAG,EAAE;AAAA,IAC9B;AAGA,QAAI,OAAO,SAAS,2BAA2B;AAC7C,aAAO,MAAM,UAAU,gBAAgB,+BAA+B;AAAA,QACpE;AAAA,QACA,QAAQ,OAAO;AAAA,QACf,KAAK;AAAA,MACP,CAAC;AACD,aAAO,EAAE,MAAM,EAAE,MAAM,GAAG,EAAE;AAAA,IAC9B;AAGA,UAAM,YAAY,OAAO,QAAQ;AACjC,QAAI,cAAc,GAAG;AACnB,aAAO,MAAM,UAAU,gBAAgB,kDAAkD,EAAE,WAAW,CAAC;AACvG,aAAO,EAAE,MAAM,EAAE,MAAM,GAAG,EAAE;AAAA,IAC9B;AAGA,UAAM,cAAc,MAAM,iBAAiB,WAAW,MAAM;AAC5D,QAAI,CAAC,aAAa;AAChB,aAAO,MAAM,UAAU,eAAe,oDAAoD,EAAE,WAAW,CAAC;AACxG,aAAO,EAAE,MAAM,EAAE,MAAM,GAAG,EAAE;AAAA,IAC9B;AAGA,UAAM,gBAAgB,iBAAiB,cAAc,aAAa;AAAA,MAChE,WAAW;AAAA,MACX,OAAO,YAAY;AAAA,MACnB,WAAW;AAAA,IACb,CAAC;AAED,WAAO,MAAM,UAAU,gBAAgB,kCAAkC;AAAA,MACvE;AAAA,MACA,aAAa,cAAc;AAAA,MAC3B,gBAAgB,cAAc,CAAC,GAAG;AAAA,IACpC,CAAC;AAED,QAAI,cAAc,WAAW,GAAG;AAC9B,aAAO,EAAE,MAAM,EAAE,MAAM,GAAG,EAAE;AAAA,IAC9B;AAGA,UAAM,WAAW,cAAc;AAAA,MAC7B,CAAC,MAAM,CAAC,wBAAwB,IAAI,EAAE,SAAS,MAAgB;AAAA,IACjE;AAEA,QAAI,SAAS,WAAW,GAAG;AACzB,aAAO,MAAM,UAAU,gBAAgB,+CAA+C,EAAE,WAAW,CAAC;AACpG,aAAO,EAAE,MAAM,EAAE,MAAM,GAAG,EAAE;AAAA,IAC9B;AAGA,UAAM,aAAa,SAAS,MAAM,GAAG,SAAS;AAC9C,UAAM,WAAW,qBAAqB,UAAU;AAChD,UAAM,SAAS,SAAS,OAAO,CAAC,MAAM,EAAE,SAAS,OAAO;AAExD,QAAI,OAAO,WAAW,GAAG;AACvB,aAAO,EAAE,MAAM,EAAE,MAAM,GAAG,EAAE;AAAA,IAC9B;AAGA,UAAM,OAAO,mBAAmB,MAAM;AAEtC,UAAM,eAAe,eAAe,IAAI;AACxC,UAAM,SAAS,OAAO,IAAI,CAAC,MAAM,EAAE,KAAK;AAKxC,WAAO,KAAK,UAAU,gBAAgB,mBAAmB,OAAO,MAAM,YAAY,OAAO,KAAK,IAAI,CAAC,OAAO,YAAY,YAAY;AAAA,MAChI;AAAA,MACA,aAAa,OAAO;AAAA,MACpB,cAAc;AAAA,MACd,QAAQ,OAAO,IAAI,CAAC,MAAM,EAAE,MAAM,QAAQ,CAAC,CAAC;AAAA,MAC5C,kBAAkB;AAAA,MAClB,eAAe;AAAA,IACjB,CAAC;AAED,WAAO,EAAE,MAAM,EAAE,KAAK,EAAE;AAAA,EAC1B;AACF;AAUA,SAAS,mBACP,QACQ;AACR,QAAM,SAAS;AACf,MAAI,OAAO;AACX,MAAI,SAAS,eAAe,IAAI;AAEhC,aAAW,SAAS,QAAQ;AAC1B,UAAM,OAAO;AAAA,KAAQ,MAAM,KAAK,KAAK,MAAM,OAAO;AAClD,UAAM,aAAa,eAAe,IAAI;AAEtC,QAAI,SAAS,aAAa,0BAA2B;AAErD,YAAQ;AACR,cAAU;AAAA,EACZ;AAGA,SAAO,SAAS,SAAS,KAAK;AAChC;;;AClTO,SAAS,gBAAgB,QAAgB,oBAAiC;AAC/E,QAAM,KAAK,YAAY;AAEvB,QAAM,OAAO,GAAG,QAAQ;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,GAwBvB,EAAE,IAAI,OAAO,OAAO,OAAO,KAAK;AAEjC,SAAO;AACT;;;ACjEA,eAAsB,cAAc,KAA2C;AAC7E,QAAM,QAAQ,OAAO,IAAI,MAAM,KAAK,KAAK;AACzC,QAAM,OAAO,gBAAgB,KAAK;AAClC,SAAO,EAAE,MAAM,KAAK;AACtB;;;ACiBA,eAAsB,oBAAoB,UAA0C;AAClF,QAAM,YAAY,cAAc;AAEhC,MAAI,eAAmC;AACvC,MAAI;AACF,mBAAe,wBAAwB,iBAAiB,QAAQ,CAAC;AAAA,EACnE,QAAQ;AAAA,EAA4B;AAEpC,QAAM,YAA4B,UAAU,IAAI,CAAC,OAAO;AAAA,IACtD,MAAM,EAAE;AAAA,IACR,aAAa,EAAE;AAAA,IACf,QAAQ,EAAE;AAAA,IACV,SAAS,eAAe,aAAa,IAAI,EAAE,IAAI,IAAI;AAAA,IACnD,GAAI,EAAE,gBAAgB,EAAE,eAAe,EAAE,cAAc,IAAI,CAAC;AAAA,EAC9D,EAAE;AAEF,SAAO,EAAE,MAAM,EAAE,UAAU,EAAE;AAC/B;;;ACnCA,IAAM,wBAAwB;AAG9B,IAAM,2BAA2B;AAMjC,eAAsB,yBAAyB,UAA0C;AACvF,QAAM,SAAS,iBAAiB,QAAQ;AAExC,QAAM,EAAE,aAAa,eAAe,IAAI,uBAAuB;AAE/D,SAAO;AAAA,IACL,MAAM;AAAA,MACJ,UAAU,OAAO,UAAU;AAAA,MAC3B,OAAO,OAAO,UAAU;AAAA,MACxB,UAAU,OAAO,UAAU,YAAY;AAAA,MACvC;AAAA,MACA;AAAA,MACA,QAAQ,gBAAgB,IAAI,wBAAwB;AAAA,IACtD;AAAA,EACF;AACF;AAEO,SAAS,uBAAuB,SAA0C;AAC/E,QAAM,UAAU,QAAQ,WAAW;AACnC,SAAO,EAAE,MAAM,QAAQ;AACzB;AAEO,SAAS,uBAAuB,SAA0C;AAC/E,QAAM,SAAS,QAAQ,WAAW;AAClC,SAAO,EAAE,MAAM,OAAO;AACxB;AAEA,eAAsB,yBAAyB,SAAmD;AAChG,QAAM,SAAS,MAAM,QAAQ,UAAU,oBAAoB;AAC3D,SAAO,EAAE,MAAM,OAAO;AACxB;AAEO,SAAS,4BAA4B,SAA0C;AACpF,QAAM,SAAS,QAAQ,aAAa;AACpC,SAAO,EAAE,MAAM,OAAO;AACxB;AAEA,eAAsB,4BAA4B,SAAmD;AACnG,QAAM,SAAS,MAAM,QAAQ,aAAa,oBAAoB;AAC9D,SAAO,EAAE,MAAM,OAAO;AACxB;;;ACJO,IAAM,oBAAoB;AAE1B,IAAM,sBAAN,cAAkC,MAAM;AAAA,EAC7C,YAAmB,gBAA+B,YAAoB;AACpE,UAAM,8BAA8B,iBAAiB,uBAAuB,aAAa,YAAY;AADpF;AAA+B;AAEhD,SAAK,OAAO;AAAA,EACd;AACF;;;AC3DA,eAAsB,sBACpB,SACwB;AACxB,QAAM,UAAU,MAAM,QAAQ,WAAW;AACzC,SAAO,EAAE,MAAM,QAAQ;AACzB;AAEA,eAAsB,uBACpB,SACwB;AACxB,QAAM,SAAS,MAAM,QAAQ,SAAS;AACtC,SAAO,EAAE,MAAM,OAAO;AACxB;AAEA,eAAsB,qBACpB,SACwB;AACxB,MAAI;AACF,UAAM,SAAS,MAAM,QAAQ,OAAO;AACpC,WAAO,EAAE,MAAM,OAAO;AAAA,EACxB,SAAS,KAAK;AACZ,QAAI,eAAe,qBAAqB;AACtC,aAAO;AAAA,QACL,QAAQ;AAAA,QACR,MAAM;AAAA,UACJ,OAAO;AAAA,UACP,gBAAgB,IAAI;AAAA,UACpB,YAAY,IAAI;AAAA,QAClB;AAAA,MACF;AAAA,IACF;AACA,UAAM;AAAA,EACR;AACF;AAEA,eAAsB,sBACpB,SACwB;AACxB,QAAM,SAAS,MAAM,QAAQ,QAAQ;AACrC,SAAO,EAAE,MAAM,OAAO;AACxB;AAEA,eAAsB,6BACpB,SACwB;AACxB,QAAM,SAAS,MAAM,QAAQ,eAAe;AAC5C,SAAO,EAAE,MAAM,OAAO;AACxB;;;ACpCA,SAAS,cAAAC,mBAAkB;AAqB3B,IAAM,gBAAgB;AAMf,IAAM,mBAAN,MAAuB;AAAA,EAC5B,YACU,aACA,mBACA,cACA,QACR;AAJQ;AACA;AACA;AACA;AAAA,EACP;AAAA;AAAA;AAAA;AAAA,EAMK,YAAY,MAAsB;AACxC,WAAOC,YAAW,sBAAsB,EAAE,OAAO,IAAI,EAAE,OAAO,KAAK;AAAA,EACrE;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAUA,MAAM,iBACJ,WACA,IACA,MACA,UACe;AACf,QAAI;AACF,YAAM,YAAY,MAAM,KAAK,kBAAkB,MAAM,IAAI;AACzD,UAAI,cAAc,MAAM;AACtB,aAAK,OAAO,KAAK,UAAU,oBAAoB,wCAAwC;AAAA,UACrF;AAAA,UACA;AAAA,QACF,CAAC;AACD;AAAA,MACF;AAEA,YAAM,OAAO,KAAK,YAAY,IAAI;AAElC,WAAK,YAAY,OAAO,WAAW,IAAI,WAAW;AAAA,QAChD,OAAO,KAAK,kBAAkB;AAAA,QAC9B,UAAU,KAAK,kBAAkB;AAAA,QACjC,YAAY,KAAK,kBAAkB;AAAA,QACnC,cAAc;AAAA,QACd,aAAa,aAAa;AAAA,QAC1B,iBAAiB;AAAA,MACnB,CAAC;AAED,WAAK,aAAa,aAAa,WAAW,EAAE;AAE5C,WAAK,OAAO,MAAM,UAAU,iBAAiB,iBAAiB,EAAE,WAAW,GAAG,CAAC;AAAA,IACjF,SAAS,KAAK;AACZ,WAAK,OAAO,KAAK,UAAU,iBAAiB,2BAA2B;AAAA,QACrE;AAAA,QACA;AAAA,QACA,OAAO,OAAO,GAAG;AAAA,MACnB,CAAC;AAAA,IACH;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,gBAAgB,WAAqB,IAAY,QAAsB;AACrE,QAAI;AACF,UAAI,WAAW,cAAe;AAE9B,WAAK,YAAY,OAAO,WAAW,EAAE;AACrC,WAAK,aAAa,cAAc,WAAW,EAAE;AAE7C,WAAK,OAAO,MAAM,UAAU,mBAAmB,kBAAkB;AAAA,QAC/D;AAAA,QACA;AAAA,QACA,QAAQ,UAAU,MAAM;AAAA,MAC1B,CAAC;AAAA,IACH,SAAS,KAAK;AACZ,WAAK,OAAO,KAAK,UAAU,mBAAmB,4CAA4C;AAAA,QACxF;AAAA,QACA;AAAA,QACA;AAAA,QACA,OAAO,OAAO,GAAG;AAAA,MACnB,CAAC;AAAA,IACH;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,UAAU,WAAgC,IAAkB;AAC1D,QAAI;AACF,WAAK,YAAY,OAAO,WAAW,EAAE;AAErC,WAAK,OAAO,MAAM,UAAU,mBAAmB,kBAAkB;AAAA,QAC/D;AAAA,QACA;AAAA,QACA,QAAQ;AAAA,MACV,CAAC;AAAA,IACH,SAAS,KAAK;AACZ,WAAK,OAAO,KAAK,UAAU,mBAAmB,qCAAqC;AAAA,QACjF;AAAA,QACA;AAAA,QACA,OAAO,OAAO,GAAG;AAAA,MACnB,CAAC;AAAA,IACH;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAUA,MAAM,UAAU,WAA6C;AAC3D,UAAM,QAAQ,KAAK,IAAI;AACvB,QAAI,WAAW;AACf,QAAI,mBAAmB;AACvB,QAAI,kBAAkB;AACtB,UAAM,eAAe,KAAK,kBAAkB;AAE5C,eAAW,aAAa,mBAAuB;AAE7C,YAAM,OAAO,KAAK,aAAa,kBAAkB,WAAW,SAAS;AAErE,iBAAW,OAAO,MAAM;AACtB,cAAM,YAAY,MAAM,KAAK,kBAAkB,MAAM,IAAI,IAAI;AAC7D,YAAI,cAAc,MAAM;AACtB,eAAK,OAAO,KAAK,UAAU,oBAAoB,qEAAqE;AAAA,YAClH;AAAA,YACA;AAAA,UACF,CAAC;AACD,iBAAO;AAAA,YACL;AAAA,YACA;AAAA,YACA;AAAA,YACA,aAAa,KAAK,IAAI,IAAI;AAAA,UAC5B;AAAA,QACF;AAEA,cAAM,OAAO,KAAK,YAAY,IAAI,IAAI;AAEtC,aAAK,YAAY,OAAO,WAAW,IAAI,IAAI,WAAW;AAAA,UACpD,OAAO;AAAA,UACP,UAAU,KAAK,kBAAkB;AAAA,UACjC,YAAY,KAAK,kBAAkB;AAAA,UACnC,cAAc;AAAA,UACd,aAAa,aAAa;AAAA,UAC1B,iBAAiB,IAAI;AAAA,QACvB,CAAC;AAED,aAAK,aAAa,aAAa,WAAW,IAAI,EAAE;AAChD;AAAA,MACF;AAGA,YAAM,WAAW,KAAK,YAAY,YAAY,WAAW,cAAc,SAAS;AAChF,UAAI,SAAS,SAAS,GAAG;AACvB,cAAM,UAAU,KAAK,aAAa,iBAAiB,WAAW,QAAQ;AACtE,cAAM,WAAW,IAAI,IAAI,QAAQ,IAAI,CAAC,MAAM,EAAE,EAAE,CAAC;AAEjD,mBAAW,UAAU,SAAS;AAC5B,gBAAM,YAAY,MAAM,KAAK,kBAAkB,MAAM,OAAO,IAAI;AAChE,cAAI,cAAc,MAAM;AACtB,iBAAK,OAAO,KAAK,UAAU,oBAAoB,0EAA0E;AAAA,cACvH;AAAA,cACA;AAAA,YACF,CAAC;AACD,mBAAO;AAAA,cACL;AAAA,cACA;AAAA,cACA;AAAA,cACA,aAAa,KAAK,IAAI,IAAI;AAAA,YAC5B;AAAA,UACF;AAEA,eAAK,YAAY,OAAO,WAAW,OAAO,IAAI,WAAW;AAAA,YACvD,OAAO;AAAA,YACP,UAAU,KAAK,kBAAkB;AAAA,YACjC,YAAY,KAAK,kBAAkB;AAAA,YACnC,cAAc,KAAK,YAAY,OAAO,IAAI;AAAA,YAC1C,aAAa,aAAa;AAAA,YAC1B,iBAAiB,OAAO;AAAA,UAC1B,CAAC;AAED;AAAA,QACF;AAGA,mBAAW,WAAW,UAAU;AAC9B,cAAI,CAAC,SAAS,IAAI,OAAO,GAAG;AAC1B,iBAAK,YAAY,OAAO,WAAW,OAAO;AAC1C,iBAAK,OAAO,KAAK,UAAU,mBAAmB,+BAA+B;AAAA,cAC3E;AAAA,cACA,IAAI;AAAA,YACN,CAAC;AACD;AAAA,UACF;AAAA,QACF;AAAA,MACF;AAGA,yBAAmB,KAAK,aAAa,SAAS;AAAA,IAChD;AAEA,UAAM,cAAc,KAAK,IAAI,IAAI;AAEjC,QAAI,WAAW,KAAK,mBAAmB,KAAK,kBAAkB,GAAG;AAC/D,WAAK,OAAO,KAAK,UAAU,qBAAqB,6BAA6B;AAAA,QAC3E;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,MACF,CAAC;AAAA,IACH;AAEA,WAAO,EAAE,UAAU,kBAAkB,iBAAiB,YAAY;AAAA,EACpE;AAAA;AAAA;AAAA;AAAA,EAKA,eAA4C;AAC1C,QAAI,kBAAkB;AACtB,eAAW,aAAa,mBAAuB;AAC7C,yBAAmB,KAAK,aAAa,SAAS;AAAA,IAChD;AACA,WAAO,EAAE,gBAAgB;AAAA,EAC3B;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAUA,aAAiC;AAC/B,UAAM,EAAE,QAAQ,IAAI,KAAK,YAAY,MAAM;AAC3C,SAAK,aAAa,iBAAiB;AAEnC,SAAK,OAAO,KAAK,UAAU,mBAAmB,mBAAmB,EAAE,QAAQ,CAAC;AAE5E,WAAO,EAAE,QAAQ,QAAQ;AAAA,EAC3B;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,aAAa,WAAoD;AACrE,QAAI,aAAa;AACjB,UAAM,eAAe,KAAK,kBAAkB;AAE5C,eAAW,aAAa,mBAAuB;AAC7C,YAAM,WAAW,KAAK,YAAY,YAAY,WAAW,cAAc,SAAS;AAChF,UAAI,SAAS,WAAW,EAAG;AAE3B,YAAM,UAAU,KAAK,aAAa,iBAAiB,WAAW,QAAQ;AAEtE,iBAAW,UAAU,SAAS;AAC5B,cAAM,YAAY,MAAM,KAAK,kBAAkB,MAAM,OAAO,IAAI;AAChE,YAAI,cAAc,MAAM;AACtB,eAAK,OAAO,KAAK,UAAU,oBAAoB,wCAAwC;AAAA,YACrF;AAAA,YACA;AAAA,UACF,CAAC;AACD,iBAAO,EAAE,WAAW;AAAA,QACtB;AAEA,cAAM,OAAO,KAAK,YAAY,OAAO,IAAI;AAEzC,aAAK,YAAY,OAAO,WAAW,OAAO,IAAI,WAAW;AAAA,UACvD,OAAO;AAAA,UACP,UAAU,KAAK,kBAAkB;AAAA,UACjC,YAAY,KAAK,kBAAkB;AAAA,UACnC,cAAc;AAAA,UACd,aAAa,aAAa;AAAA,UAC1B,iBAAiB,OAAO;AAAA,QAC1B,CAAC;AAED;AAAA,MACF;AAAA,IACF;AAEA,WAAO,EAAE,WAAW;AAAA,EACtB;AAAA;AAAA;AAAA;AAAA,EAKA,aAA+B;AAC7B,UAAM,QAAQ,KAAK,YAAY,MAAM;AAErC,UAAM,UAAkC,CAAC;AACzC,eAAW,aAAa,mBAAuB;AAC7C,cAAQ,SAAS,IAAI,KAAK,aAAa,gBAAgB,SAAS;AAAA,IAClE;AAEA,WAAO;AAAA,MACL,GAAG;AAAA,MACH;AAAA,MACA,UAAU;AAAA,QACR,MAAM,KAAK,kBAAkB;AAAA,QAC7B,OAAO,KAAK,kBAAkB;AAAA,QAC9B,WAAW;AAAA;AAAA,MACb;AAAA,IACF;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,WAAW,MAAwC;AACvD,WAAO,KAAK,kBAAkB,MAAM,IAAI;AAAA,EAC1C;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,cAAc,OAAiB,SAKN;AACvB,WAAO,KAAK,YAAY,OAAO,OAAO,OAAO;AAAA,EAC/C;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,mBACE,WACA,WACyD;AACzD,WAAO,KAAK,YAAY,mBAAmB,WAAW,SAAS;AAAA,EACjE;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAcQ,aAAa,WAAwC;AAC3D,UAAM,cAAc,KAAK,YAAY,eAAe,SAAS;AAC7D,QAAI,YAAY,WAAW,EAAG,QAAO;AAErC,UAAM,YAAY,KAAK,aAAa,mBAAmB,SAAS;AAChE,UAAM,YAAY,IAAI,IAAI,SAAS;AACnC,QAAI,UAAU;AAEd,eAAW,SAAS,aAAa;AAC/B,UAAI,CAAC,UAAU,IAAI,KAAK,GAAG;AACzB,aAAK,YAAY,OAAO,WAAW,KAAK;AACxC,aAAK,OAAO,KAAK,UAAU,mBAAmB,yBAAyB;AAAA,UACrE;AAAA,UACA,IAAI;AAAA,QACN,CAAC;AACD;AAAA,MACF;AAAA,IACF;AAEA,WAAO;AAAA,EACT;AACF;;;AC3ZA,OAAO,cAAc;AAErB,YAAY,eAAe;AAe3B,IAAM,uBAAuB;AAG7B,IAAM,+BAA+B;AAGrC,IAAM,qBAAqB;AAG3B,IAAM,wBAAwB;AAG9B,IAAM,4BAA4B;AAGlC,IAAM,qBAAqB,oBAAI,IAAI,CAAC,SAAS,YAAY,WAAW,CAAC;AAMrE,SAAS,2BAA2B,UAA0B;AAC5D,SAAO,IAAI;AACb;AAMA,IAAM,iBAAiB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAavB,IAAM,uBAAuB;AAAA;AAAA;AAK7B,SAAS,YAAY,WAAwC;AAC3D,SAAO,0CAA0C,SAAS;AAAA;AAAA,sBAEtC,oBAAoB;AAAA;AAE1C;AAMO,IAAM,uBAAN,MAAkD;AAAA,EAC/C;AAAA;AAAA,EAGA,iBAAiB,oBAAI,IAAuB;AAAA,EAC5C,iBAAiB,oBAAI,IAAuB;AAAA,EAC5C;AAAA,EACA;AAAA,EACA,cAAc,oBAAI,IAAuB;AAAA,EACzC;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EAER,YAAY,QAAiB;AAC3B,SAAK,KAAK,IAAI,SAAS,UAAU,UAAU;AAC3C,IAAU,eAAK,KAAK,EAAE;AACtB,SAAK,GAAG,OAAO,oBAAoB;AACnC,SAAK,aAAa;AAClB,SAAK,kBAAkB;AAAA,EACzB;AAAA;AAAA;AAAA;AAAA,EAMQ,eAAqB;AAC3B,SAAK,GAAG,KAAK,cAAc;AAC3B,SAAK,GAAG,KAAK,oBAAoB;AACjC,eAAW,MAAM,mBAAuB;AACtC,WAAK,GAAG,KAAK,YAAY,EAAE,CAAC;AAAA,IAC9B;AAAA,EACF;AAAA,EAEQ,oBAA0B;AAChC,SAAK,iBAAiB,KAAK,GAAG,QAAQ;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,KAUrC;AAED,SAAK,iBAAiB,KAAK,GAAG;AAAA,MAC5B;AAAA,IACF;AACA,SAAK,iBAAiB,KAAK,GAAG;AAAA,MAC5B;AAAA,IACF;AACA,SAAK,kBAAkB,KAAK,GAAG;AAAA,MAC7B;AAAA,IACF;AACA,SAAK,eAAe,KAAK,GAAG;AAAA,MAC1B;AAAA,IACF;AACA,SAAK,kBAAkB,KAAK,GAAG;AAAA,MAC7B;AAAA,IACF;AAGA,eAAW,MAAM,mBAAuB;AACtC,WAAK,eAAe;AAAA,QAClB;AAAA,QACA,KAAK,GAAG,QAAQ,mBAAmB,EAAE,sBAAsB;AAAA,MAC7D;AACA,WAAK,eAAe;AAAA,QAClB;AAAA,QACA,KAAK,GAAG,QAAQ,mBAAmB,EAAE,sCAAsC;AAAA,MAC7E;AACA,WAAK,YAAY;AAAA,QACf;AAAA,QACA,KAAK,GAAG,QAAQ;AAAA;AAAA;AAAA,qBAGH,EAAE;AAAA;AAAA,iCAEU,EAAE;AAAA;AAAA;AAAA;AAAA,SAI1B;AAAA,MACH;AAAA,IACF;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAMA,OACE,WACA,IACA,WACA,UACM;AACN,SAAK,kBAAkB,SAAS;AAChC,UAAM,KAAK;AAEX,UAAM,MAAM,IAAI,aAAa,SAAS;AAEtC,UAAM,MAAM,KAAK,GAAG,YAAY,MAAM;AAEpC,WAAK,eAAe,IAAI,EAAE,EAAG,IAAI,EAAE;AACnC,WAAK,eAAe,IAAI,EAAE,EAAG,IAAI,IAAI,GAAG;AAGxC,WAAK,eAAe,IAAI;AAAA,QACtB,WAAW;AAAA,QACX,WAAW;AAAA,QACX,OAAQ,WAAW,OAAO,KAAgB;AAAA,QAC1C,UAAW,WAAW,UAAU,KAAgB;AAAA,QAChD,YAAY,UAAU;AAAA,QACtB,cAAe,WAAW,cAAc,KAAgB;AAAA,QACxD,aAAc,WAAW,aAAa,KAAgB,KAAK,IAAI;AAAA,QAC/D,iBAAiB,WAAW,iBAAiB,IACzC,KAAK,UAAU,SAAS,iBAAiB,CAAC,IAC1C;AAAA,MACN,CAAC;AAAA,IACH,CAAC;AAED,QAAI;AAAA,EACN;AAAA,EAEA,OAAO,WAAmB,IAAkB;AAC1C,SAAK,kBAAkB,SAAS;AAChC,UAAM,KAAK;AAEX,UAAM,MAAM,KAAK,GAAG,YAAY,MAAM;AACpC,WAAK,eAAe,IAAI,EAAE,EAAG,IAAI,EAAE;AACnC,WAAK,eAAe,IAAI,IAAI,EAAE;AAAA,IAChC,CAAC;AAED,QAAI;AAAA,EACN;AAAA,EAEA,MAAM,WAAyC;AAC7C,QAAI,UAAU;AAEd,UAAM,UAAU,YACZ,CAAC,KAAK,mBAAmB,SAAS,CAAC,IACnC,CAAC,GAAG,iBAAqB;AAE7B,UAAM,MAAM,KAAK,GAAG,YAAY,MAAM;AACpC,iBAAW,MAAM,SAAS;AAExB,cAAM,WAAW,KAAK,GACnB,QAAQ,oEAAoE,EAC5E,IAAI,EAAE;AACT,mBAAW,SAAS;AAGpB,aAAK,GAAG,KAAK,mBAAmB,EAAE,EAAE;AAGpC,aAAK,GACF,QAAQ,oDAAoD,EAC5D,IAAI,EAAE;AAAA,MACX;AAAA,IACF,CAAC;AAED,QAAI;AACJ,WAAO,EAAE,QAAQ;AAAA,EACnB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAUA,OACE,OACA,SAMsB;AACtB,UAAM,QAAQ,SAAS,SAAS;AAChC,UAAM,YAAY,SAAS,aAAa;AACxC,UAAM,WAAW,IAAI,aAAa,KAAK;AAEvC,UAAM,UAAU,SAAS,YACrB,CAAC,KAAK,mBAAmB,QAAQ,SAAS,CAAC,IAC3C,CAAC,GAAG,iBAAqB;AAE7B,UAAM,aAAa,SAAS,WAAW,OAAO,KAAK,QAAQ,OAAO,EAAE,SAAS;AAC7E,UAAM,UAAgC,CAAC;AAEvC,eAAW,MAAM,SAAS;AACxB,UAAI;AAEJ,UAAI,YAAY;AAEd,cAAM,EAAE,KAAK,OAAO,IAAI,KAAK;AAAA,UAC3B;AAAA,UACA,QAAS;AAAA,UACT;AAAA,QACF;AACA,cAAM,OAAO,KAAK,GAAG,QAAQ,GAAG;AAChC,eAAO,KAAK,IAAI,UAAU,OAAO,GAAG,MAAM;AAAA,MAC5C,OAAO;AACL,eAAO,KAAK,YAAY,IAAI,EAAE,EAAG,IAAI,UAAU,KAAK;AAAA,MACtD;AAEA,iBAAW,OAAO,MAAM;AACtB,cAAM,aAAa,2BAA2B,IAAI,QAAkB;AACpE,YAAI,cAAc,WAAW;AAC3B,kBAAQ,KAAK;AAAA,YACX,IAAI,IAAI;AAAA,YACR,WAAW;AAAA,YACX;AAAA,YACA,UAAU;AAAA,cACR,OAAO,IAAI;AAAA,cACX,UAAU,IAAI;AAAA,cACd,cAAc,IAAI;AAAA,cAClB,aAAa,IAAI;AAAA,cACjB,GAAI,IAAI,kBAAkB,KAAK,MAAM,IAAI,eAAyB,IAAI,CAAC;AAAA,YACzE;AAAA,UACF,CAAC;AAAA,QACH;AAAA,MACF;AAAA,IACF;AAGA,YAAQ,KAAK,CAAC,GAAG,MAAM,EAAE,aAAa,EAAE,UAAU;AAClD,WAAO,QAAQ,MAAM,GAAG,KAAK;AAAA,EAC/B;AAAA,EAEA,MAAM,WAAsC;AAC1C,UAAM,UAAU,YACZ,CAAC,KAAK,mBAAmB,SAAS,CAAC,IACnC,CAAC,GAAG,iBAAqB;AAE7B,QAAI,QAAQ;AACZ,UAAM,eAAoE,CAAC;AAC3E,UAAM,SAAiC,CAAC;AAExC,eAAW,MAAM,SAAS;AACxB,YAAM,WAAW,KAAK,eAAe,IAAI,EAAE;AAC3C,YAAM,YAAY,KAAK,gBAAgB,IAAI,EAAE;AAK7C,UAAI,QAAQ;AACZ,UAAI,gBAAgB;AACpB,iBAAW,MAAM,WAAW;AAC1B,eAAO,GAAG,KAAK,KAAK,OAAO,GAAG,KAAK,KAAK,KAAK,GAAG;AAChD,YAAI,GAAG,MAAM,cAAe,iBAAgB,GAAG;AAAA,MACjD;AACA,cAAQ,SAAS,MAAM;AACvB,UAAI,QAAQ,EAAG,SAAQ;AAEvB,mBAAa,EAAE,IAAI,EAAE,UAAU,SAAS,KAAK,MAAM;AACnD,eAAS,SAAS;AAAA,IACpB;AAEA,WAAO,EAAE,OAAO,cAAc,OAAO;AAAA,EACvC;AAAA,EAEA,YAAY,WAAmB,cAAsB,OAAyB;AAC5E,SAAK,kBAAkB,SAAS;AAChC,UAAM,OAAO,KAAK,aAAa,IAAI,WAAW,cAAc,KAAK;AACjE,WAAO,KAAK,IAAI,CAAC,MAAM,EAAE,SAAS;AAAA,EACpC;AAAA,EAEA,eAAe,WAA6B;AAC1C,SAAK,kBAAkB,SAAS;AAChC,UAAM,OAAO,KAAK,gBAAgB,IAAI,SAAS;AAC/C,WAAO,KAAK,IAAI,CAAC,MAAM,EAAE,SAAS;AAAA,EACpC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EASA,mBACE,WACA,YAAoB,KACqC;AACzD,SAAK,kBAAkB,SAAS;AAChC,UAAM,KAAK;AAGX,UAAM,UAAU,KAAK,GAAG;AAAA,MACtB,wCAAwC,EAAE;AAAA,IAC5C,EAAE,IAAI;AAEN,QAAI,QAAQ,SAAS,EAAG,QAAO,CAAC;AAEhC,UAAM,QAAiE,CAAC;AACxE,UAAM,OAAO,oBAAI,IAAY;AAG7B,UAAM,aAAa,KAAK,YAAY,IAAI,EAAE;AAC1C,eAAW,OAAO,SAAS;AACzB,YAAM,UAAU,WAAW;AAAA,QACzB,IAAI;AAAA;AAAA,QACJ,QAAQ;AAAA;AAAA,MACV;AAEA,iBAAW,SAAS,SAAS;AAC3B,YAAI,MAAM,cAAc,IAAI,UAAW;AACvC,cAAM,UAAU,CAAC,IAAI,WAAW,MAAM,SAAS,EAAE,KAAK,EAAE,KAAK,GAAG;AAChE,YAAI,KAAK,IAAI,OAAO,EAAG;AACvB,aAAK,IAAI,OAAO;AAEhB,cAAM,aAAa,2BAA2B,MAAM,QAAQ;AAC5D,YAAI,cAAc,WAAW;AAC3B,gBAAM,KAAK;AAAA,YACT,KAAK,IAAI;AAAA,YACT,KAAK,MAAM;AAAA,YACX,YAAY,KAAK,MAAM,aAAa,GAAI,IAAI;AAAA,UAC9C,CAAC;AAAA,QACH;AAAA,MACF;AAAA,IACF;AAEA,UAAM,KAAK,CAAC,GAAG,MAAM,EAAE,aAAa,EAAE,UAAU;AAChD,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA,EAMA,QAAc;AACZ,SAAK,GAAG,MAAM;AAAA,EAChB;AAAA;AAAA;AAAA;AAAA,EAMQ,kBAAkB,WAAyB;AACjD,QAAI,CAAE,kBAA4C,SAAS,SAAS,GAAG;AACrE,YAAM,IAAI;AAAA,QACR,sBAAsB,SAAS,sBAAsB,kBAAsB,KAAK,IAAI,CAAC;AAAA,MACvF;AAAA,IACF;AAAA,EACF;AAAA,EAEQ,mBAAmB,WAAwC;AACjE,SAAK,kBAAkB,SAAS;AAChC,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA;AAAA,EAMQ,yBACN,WACA,SACA,QACoC;AACpC,UAAM,aAAuB,CAAC;AAC9B,UAAM,SAAoB,CAAC;AAE3B,eAAW,CAAC,KAAK,KAAK,KAAK,OAAO,QAAQ,OAAO,GAAG;AAClD,UAAI,mBAAmB,IAAI,GAAG,GAAG;AAC/B,mBAAW,KAAK,MAAM,GAAG,MAAM;AAC/B,eAAO,KAAK,KAAK;AAAA,MACnB;AAAA,IACF;AAEA,UAAM,cACJ,WAAW,SAAS,IAAI,SAAS,WAAW,KAAK,OAAO,CAAC,KAAK;AAEhE,UAAM,MAAM;AAAA;AAAA;AAAA,mBAGG,SAAS;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,6BASC,SAAS;AAAA,QAC9B,WAAW;AAAA;AAGf,WAAO,EAAE,KAAK,OAAO;AAAA,EACvB;AACF;;;ACjeA,eAAsB,kBACpB,SACA,MAC4B;AAC5B,QAAM,MAAM,MAAM,QAAQ,MAAM,IAAI;AACpC,SAAO;AAAA,IACL,WAAW,UAAU,IAAI,SAAS;AAAA,IAClC,OAAO,IAAI;AAAA,IACX,YAAY,IAAI;AAAA,EAClB;AACF;AAEA,SAAS,UAAU,KAAyB;AAC1C,QAAM,YAAY,KAAK,KAAK,IAAI,OAAO,CAAC,KAAK,MAAM,MAAM,IAAI,GAAG,CAAC,CAAC;AAClE,MAAI,cAAc,EAAG,QAAO;AAC5B,SAAO,IAAI,IAAI,CAAC,MAAM,IAAI,SAAS;AACrC;;;ACHA,IAAM,4BAA4B;AAGlC,IAAM,qBAAqB;AAG3B,IAAM,mBAAmB,oBAAI,IAAI,CAAC,QAAQ,CAAC;AAO3C,SAAS,mBAAmB,OAAe,UAA0B;AACnE,MAAI,iBAAiB,IAAI,QAAQ,KAAK,CAAC,MAAM,SAAS,GAAG,GAAG;AAC1D,WAAO,QAAQ;AAAA,EACjB;AACA,SAAO;AACT;AAEO,IAAM,2BAAN,MAAmE;AAAA,EASxE,YACU,UACR,QACA;AAFQ;AAGR,SAAK,QAAQ,mBAAmB,OAAO,OAAO,OAAO,QAAQ;AAC7D,SAAK,eAAe,OAAO;AAC3B,SAAK,aAAa;AAAA,EACpB;AAAA,EAfS;AAAA,EACA;AAAA,EACA;AAAA;AAAA,EAGD,kBAAkC;AAAA,EAClC,oBAAoB;AAAA,EAW5B,MAAM,MAAM,MAAwC;AAClD,QAAI;AACF,YAAM,OAAO,MAAM,KAAK,kBAAkB;AAC1C,UAAI,CAAC,KAAM,QAAO;AAClB,YAAM,SAAS,MAAM,kBAAkB,KAAK,UAAU,IAAI;AAC1D,aAAO,OAAO;AAAA,IAChB,QAAQ;AAEN,WAAK,kBAAkB;AACvB,aAAO;AAAA,IACT;AAAA,EACF;AAAA;AAAA,EAGA,MAAc,oBAAsC;AAClD,UAAM,MAAM,KAAK,IAAI;AACrB,QAAI,KAAK,oBAAoB,QAAS,MAAM,KAAK,oBAAqB,2BAA2B;AAC/F,aAAO,KAAK;AAAA,IACd;AACA,SAAK,kBAAkB,MAAM,KAAK,SAAS,YAAY;AACvD,SAAK,oBAAoB;AACzB,WAAO,KAAK;AAAA,EACd;AACF;;;ACnDA,IAAMC,iBAAgB;AAGtB,SAAS,gBAAgB,KAA8C;AACrE,SAAO;AAAA,IACL,GAAI,IAAI,gBAAgB,OAAO,EAAE,cAAc,IAAI,aAAuB,IAAI,CAAC;AAAA,EACjF;AACF;AAGA,SAAS,cAAc,KAA8C;AACnE,SAAO;AAAA,IACL,GAAI,IAAI,UAAU,OAAO,EAAE,QAAQ,IAAI,OAAiB,IAAI,CAAC;AAAA,IAC7D,GAAI,IAAI,cAAc,OAAO,EAAE,YAAY,IAAI,WAAqB,IAAI,CAAC;AAAA,IACzE,GAAI,IAAI,oBAAoB,OAAO,EAAE,kBAAkB,IAAI,iBAA2B,IAAI,CAAC;AAAA,EAC7F;AACF;AAGA,SAAS,gBAAgC;AACvC,SAAO,CAAC;AACV;AAGA,SAAS,aAAa,KAA8C;AAClE,SAAO;AAAA,IACL,GAAI,IAAI,cAAc,OAAO,EAAE,YAAY,IAAI,WAAqB,IAAI,CAAC;AAAA,IACzE,GAAI,IAAI,eAAe,OAAO,EAAE,aAAa,IAAI,YAAsB,IAAI,CAAC;AAAA,EAC9E;AACF;AAGA,SAAS,oBAAoB,KAA8C;AACzE,SAAO;AAAA,IACL,GAAI,IAAI,UAAU,OAAO,EAAE,QAAQ,IAAI,OAAiB,IAAI,CAAC;AAAA,IAC7D,GAAI,IAAI,QAAQ,OAAO,EAAE,MAAM,IAAI,KAAe,IAAI,CAAC;AAAA,EACzD;AACF;AAGA,SAAS,YAAY,WAA4B,KAA8C;AAC7F,UAAQ,WAAW;AAAA,IACjB,KAAK;AACH,aAAO,gBAAgB,GAAG;AAAA,IAC5B,KAAK;AACH,aAAO,cAAc,GAAG;AAAA,IAC1B,KAAK;AACH,aAAO,aAAa,GAAG;AAAA,IACzB,KAAK;AACH,aAAO,cAAc;AAAA,IACvB,KAAK;AACH,aAAO,oBAAoB,GAAG;AAAA,EAClC;AACF;AAMO,IAAM,qBAAN,MAA2D;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOhE,kBAAkB,WAAmB,OAIlC;AACD,qBAAqB,SAAS;AAE9B,QAAI,cAAc,UAAU;AAC1B,aAAO,KAAK,0BAA0B,KAAK;AAAA,IAC7C;AAEA,QAAI,cAAc,iBAAiB;AACjC,aAAO,KAAK,gCAAgC,KAAK;AAAA,IACnD;AAGA,UAAM,OAAO,cAAc,WAAW,KAAK;AAC3C,UAAM,KAAK,YAAY;AACvB,WAAO,KAAK,IAAI,CAAC,QAAQ;AACvB,YAAM,UAAU,GAAG,QAAQ,iBAAiB,SAAS,eAAe,EAAE,IAAI,IAAI,EAAE;AAChF,aAAO;AAAA,QACL,IAAI,OAAO,IAAI,EAAE;AAAA,QACjB,MAAM,IAAI;AAAA,QACV,UAAU,YAAY,WAA8B,OAAO;AAAA,MAC7D;AAAA,IACF,CAAC;AAAA,EACH;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EASA,mBAAmB,WAA6B;AAC9C,qBAAqB,SAAS;AAC9B,UAAM,KAAK,YAAY;AAEvB,YAAQ,WAAW;AAAA,MACjB,KAAK,YAAY;AACf,cAAM,OAAO,GAAG;AAAA,UACd;AAAA,QACF,EAAE,IAAI;AACN,eAAO,KAAK,IAAI,CAAC,MAAM,EAAE,EAAE;AAAA,MAC7B;AAAA,MACA,KAAK,UAAU;AACb,cAAM,OAAO,GAAG;AAAA,UACd;AAAA,QACF,EAAE,IAAIA,cAAa;AACnB,eAAO,KAAK,IAAI,CAAC,MAAM,EAAE,EAAE;AAAA,MAC7B;AAAA,MACA,KAAK,SAAS;AACZ,cAAM,OAAO,GAAG;AAAA,UACd;AAAA,QACF,EAAE,IAAI;AACN,eAAO,KAAK,IAAI,CAAC,MAAM,EAAE,EAAE;AAAA,MAC7B;AAAA,MACA,KAAK,aAAa;AAChB,cAAM,OAAO,GAAG;AAAA,UACd;AAAA,QACF,EAAE,IAAI;AACN,eAAO,KAAK,IAAI,CAAC,MAAM,EAAE,EAAE;AAAA,MAC7B;AAAA,MACA,KAAK,iBAAiB;AACpB,cAAM,OAAO,GAAG;AAAA,UACd;AAAA,QACF,EAAE,IAAIA,cAAa;AACnB,eAAO,KAAK,IAAI,CAAC,MAAM,EAAE,EAAE;AAAA,MAC7B;AAAA,IACF;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQA,iBAAiB,WAAmB,KAIjC;AACD,qBAAqB,SAAS;AAE9B,QAAI,IAAI,WAAW,EAAG,QAAO,CAAC;AAE9B,UAAM,KAAK,YAAY;AACvB,UAAM,UAAU,wBAAwB,SAA4B;AACpE,UAAM,eAAe,IAAI,IAAI,MAAM,GAAG,EAAE,KAAK,IAAI;AAEjD,UAAM,OAAO,GAAG;AAAA,MACd,aAAa,OAAO,iBAAiB,SAAS,iBAAiB,YAAY;AAAA,IAC7E,EAAE,IAAI,GAAG,GAAG;AAEZ,WAAO,KAAK,IAAI,CAAC,SAAS;AAAA,MACxB,IAAI,OAAO,IAAI,EAAE;AAAA,MACjB,MAAM,IAAI;AAAA,MACV,UAAU,YAAY,WAA8B,GAAG;AAAA,IACzD,EAAE;AAAA,EACJ;AAAA;AAAA,EAGA,aAAa,WAAmB,IAAkB;AAChD,iBAAe,WAAW,EAAE;AAAA,EAC9B;AAAA;AAAA,EAGA,cAAc,WAAmB,IAAkB;AACjD,kBAAgB,WAAW,EAAE;AAAA,EAC/B;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOA,iBAAiB,WAA0B;AACzC,UAAM,KAAK,YAAY;AAEvB,QAAI,cAAc,QAAW;AAC3B,uBAAqB,SAAS;AAC9B,SAAG,QAAQ,UAAU,SAAS,mBAAmB,EAAE,IAAI;AACvD;AAAA,IACF;AAEA,eAAW,SAAS,mBAAmB;AACrC,SAAG,QAAQ,UAAU,KAAK,mBAAmB,EAAE,IAAI;AAAA,IACrD;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,gBAAgB,WAA2B;AACzC,qBAAqB,SAAS;AAC9B,UAAM,KAAK,YAAY;AAEvB,UAAM,gBAAgB,cAAc,aAAa,6BAA6B;AAC9E,UAAM,eAAgB,cAAc,YAAY,cAAc,kBAAmB,kBAAkBA,cAAa,MAAM;AAEtH,UAAM,MAAM,GAAG;AAAA,MACb,+BAA+B,SAAS,sBAAsB,aAAa,GAAG,YAAY;AAAA,IAC5F,EAAE,IAAI;AAEN,WAAO,OAAO,IAAI,GAAG;AAAA,EACvB;AAAA;AAAA;AAAA;AAAA;AAAA,EAOQ,0BAA0B,OAI/B;AACD,UAAM,KAAK,YAAY;AACvB,UAAM,OAAO,GAAG;AAAA,MACd;AAAA;AAAA;AAAA;AAAA;AAAA,IAKF,EAAE,IAAIA,gBAAe,KAAK;AAE1B,WAAO,KAAK,IAAI,CAAC,SAAS;AAAA,MACxB,IAAI,OAAO,IAAI,EAAE;AAAA,MACjB,MAAM,IAAI;AAAA,MACV,UAAU,cAAc,GAAG;AAAA,IAC7B,EAAE;AAAA,EACJ;AAAA;AAAA,EAGQ,gCAAgC,OAIrC;AACD,UAAM,KAAK,YAAY;AACvB,UAAM,OAAO,GAAG;AAAA,MACd;AAAA;AAAA;AAAA;AAAA;AAAA,IAKF,EAAE,IAAIA,gBAAe,KAAK;AAE1B,WAAO,KAAK,IAAI,CAAC,SAAS;AAAA,MACxB,IAAI,OAAO,IAAI,EAAE;AAAA,MACjB,MAAM,IAAI;AAAA,MACV,UAAU,oBAAoB,GAAG;AAAA,IACnC,EAAE;AAAA,EACJ;AACF;;;AC/RA,OAAOC,UAAQ;;;ACOf,OAAOC,UAAQ;AA0Cf,SAAS,aAAgB,MAAiB;AACxC,QAAM,KAAK,YAAY;AACvB,SAAO,GAAG,OAAO,MAAM,EAAE,QAAQ,KAAK,CAAC;AACzC;AAEA,SAAS,aAAa,UAA0B;AAC9C,MAAI;AACF,WAAOC,KAAG,SAAS,QAAQ,EAAE;AAAA,EAC/B,SAAS,KAAK;AACZ,QAAK,IAA8B,SAAS,SAAU,QAAO;AAC7D,UAAM;AAAA,EACR;AACF;AAMO,SAAS,qBAAqB,QAAmC;AACtE,QAAM,aAAa,aAAa,MAAM;AACtC,QAAM,iBAAiB,aAAa,SAAS,MAAM;AAEnD,QAAM,YAAY,OAAO,aAAqB,WAAW,CAAC;AAC1D,QAAM,aAAa,OAAO,aAAqB,YAAY,CAAC;AAC5D,QAAM,iBAAiB,OAAO,aAAqB,gBAAgB,CAAC;AACpE,QAAM,oBAAoB,aAAa,IAAK,iBAAiB,aAAc,MAAM;AAEjF,SAAO;AAAA,IACL,MAAM;AAAA,IACN;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF;AACF;AAQA,SAAS,WAAW,MAAsB;AACxC,SAAO,MAAM,KAAK,QAAQ,MAAM,IAAI,IAAI;AAC1C;AAEO,SAAS,qBAA0C;AACxD,QAAM,KAAK,YAAY;AAGvB,QAAM,YAAY,GAAG;AAAA,IACnB;AAAA,EACF,EAAE,IAAI;AAMN,QAAM,gBAAgB,UAAU,OAAO,CAAC,QAAQ,EAAE,IAAI,OAAO,IAAI,WAAW,gBAAgB,CAAC;AAG7F,QAAM,iBAAiB,GAAG;AAAA,IACxB;AAAA,EACF,EAAE,IAAI;AACN,QAAM,oBAAoB,IAAI,IAAI,eAAe,IAAI,CAAC,MAAM,CAAC,EAAE,UAAU,OAAO,EAAE,GAAG,CAAC,CAAC,CAAC;AAMxF,QAAM,gBAAgB,oBAAI,IAAoB;AAC9C,MAAI,cAAc,SAAS,GAAG;AAC5B,UAAM,WAAW,cACd,IAAI,CAAC,GAAG,MAAM,qCAAqC,WAAW,cAAc,CAAC,EAAE,IAAI,CAAC,EAAE,EACtF,KAAK,aAAa;AACrB,UAAM,SAAS,cAAc,IAAI,CAAC,QAAQ,IAAI,IAAI;AAClD,QAAI;AACF,YAAM,OAAO,GAAG,QAAQ,QAAQ,EAAE,IAAI,GAAG,MAAM;AAC/C,iBAAW,KAAK,MAAM;AACpB,sBAAc,IAAI,EAAE,GAAG,OAAO,EAAE,KAAK,CAAC,CAAC;AAAA,MACzC;AAAA,IACF,QAAQ;AAIN,iBAAW,OAAO,eAAe;AAC/B,YAAI;AACF,gBAAM,IAAI,GAAG,QAAQ,6BAA6B,WAAW,IAAI,IAAI,CAAC,EAAE,EAAE,IAAI;AAC9E,wBAAc,IAAI,IAAI,MAAM,OAAO,EAAE,KAAK,CAAC,CAAC;AAAA,QAC9C,QAAQ;AACN,wBAAc,IAAI,IAAI,MAAM,CAAC;AAAA,QAC/B;AAAA,MACF;AAAA,IACF;AAAA,EACF;AAEA,SAAO,cAAc,IAAI,CAAC,SAAS;AAAA,IACjC,MAAM,IAAI;AAAA,IACV,MAAM,cAAc,IAAI,IAAI,IAAI,KAAK;AAAA,IACrC,aAAa,kBAAkB,IAAI,IAAI,IAAI,KAAK;AAAA,IAChD,SAAS,IAAI,OAAO,IAAI,YAAY,EAAE,SAAS,MAAM;AAAA,EACvD,EAAE;AACJ;AAEO,SAAS,iBAA8B;AAC5C,QAAM,KAAK,YAAY;AACvB,QAAM,OAAO,GAAG;AAAA,IACd;AAAA,EACF,EAAE,IAAI;AAEN,SAAO,KAAK,IAAI,CAAC,MAAM;AACrB,UAAM,OAAyB,EAAE,KAAK,WAAW,mBAAmB,IAAI,SAAS;AACjF,WAAO;AAAA,MACL,MAAM,EAAE;AAAA,MACR,OAAO,EAAE;AAAA,MACT;AAAA,MACA,KAAK,EAAE;AAAA,IACT;AAAA,EACF,CAAC;AACH;AAEO,SAAS,gBAA4B;AAC1C,QAAM,eAAe,OAAO,aAAqB,cAAc,CAAC;AAChE,QAAM,mBAAmB,aAA8B,cAAc;AACrE,QAAM,eAAe,OAAO,gBAAgB,MAAM;AAClD,SAAO;AAAA,IACL,SAAS;AAAA,IACT;AAAA,IACA;AAAA,EACF;AACF;AAEO,SAAS,4BAA4B,MAA6B;AACvE,QAAM,KAAK,YAAY;AACvB,QAAM,MAAM,GAAG;AAAA,IACb;AAAA,EACF,EAAE,IAAI,IAAI;AACV,MAAI,CAAC,IAAK,QAAO;AACjB,QAAM,IAAI,IAAI,KAAK,IAAI,SAAS,EAAE,QAAQ;AAC1C,SAAO,OAAO,SAAS,CAAC,IAAI,IAAI;AAClC;AAQO,SAAS,6BAA6B,OAA6C;AACxF,QAAM,SAAS,oBAAI,IAA2B;AAC9C,aAAW,KAAK,MAAO,QAAO,IAAI,GAAG,IAAI;AACzC,MAAI,MAAM,WAAW,EAAG,QAAO;AAE/B,QAAM,KAAK,YAAY;AACvB,QAAM,eAAe,MAAM,IAAI,MAAM,GAAG,EAAE,KAAK,GAAG;AAClD,QAAM,OAAO,GAAG;AAAA,IACd,yEAAyE,YAAY;AAAA,EACvF,EAAE,IAAI,GAAG,KAAK;AAEd,aAAW,OAAO,MAAM;AACtB,QAAI,CAAC,IAAI,OAAQ;AACjB,UAAM,IAAI,IAAI,KAAK,IAAI,MAAM,EAAE,QAAQ;AACvC,WAAO,IAAI,IAAI,MAAM,OAAO,SAAS,CAAC,IAAI,IAAI,IAAI;AAAA,EACpD;AACA,SAAO;AACT;AA6BO,SAAS,YAAkB;AAChC,QAAM,KAAK,YAAY;AACvB,KAAG,KAAK,QAAQ;AAClB;AAEO,SAAS,aAAmB;AACjC,QAAM,KAAK,YAAY;AACvB,KAAG,KAAK,SAAS;AACnB;AAEO,SAAS,aAAmB;AACjC,QAAM,KAAK,YAAY;AACvB,KAAG,KAAK,SAAS;AACnB;AAEO,SAAS,oBAA0C;AACxD,QAAM,KAAK,YAAY;AACvB,QAAM,OAAO,GAAG,QAAQ,wBAAwB,EAAE,IAAI;AACtD,QAAM,WAAW,KAAK,IAAI,CAAC,MAAM,EAAE,eAAe;AAClD,QAAM,KAAK,SAAS,WAAW,KAAK,SAAS,CAAC,MAAM;AACpD,SAAO;AAAA,IACL,QAAQ,KAAK,OAAO;AAAA,IACpB,QAAQ,KAAK,CAAC,IAAI;AAAA,EACpB;AACF;AAEO,SAAS,qBAA4C;AAC1D,QAAM,KAAK,YAAY;AACvB,QAAM,OAAO,GAAG,QAAQ,0BAA0B,EAAE,IAAI;AAMxD,SAAO;AACT;AAEO,SAAS,2BAAgD;AAC9D,QAAM,KAAK,YAAY;AACvB,QAAM,OAAO,GAAG,OAAO,0BAA0B;AAKjD,QAAM,MAAM,KAAK,CAAC,KAAK,EAAE,MAAM,GAAG,KAAK,GAAG,cAAc,EAAE;AAC1D,SAAO;AAAA,IACL,MAAM,OAAO,IAAI,IAAI;AAAA,IACrB,KAAK,OAAO,IAAI,GAAG;AAAA,IACnB,cAAc,OAAO,IAAI,YAAY;AAAA,EACvC;AACF;AAEO,SAAS,oBAA0B;AACxC,QAAM,KAAK,YAAY;AACvB,KAAG,OAAO,UAAU;AACtB;AAEA,IAAM,oBAAoB;AAEnB,SAAS,eAAe,cAA4B;AAKzD,MAAI,CAAC,kBAAkB,KAAK,YAAY,GAAG;AACzC,UAAM,IAAI,MAAM,8BAA8B,YAAY;AAAA,EAC5D;AACA,QAAM,KAAK,YAAY;AACvB,QAAM,MAAM,GAAG;AAAA,IACb;AAAA,EACF,EAAE,IAAI,YAAY;AAClB,MAAI,CAAC,OAAO,EAAE,IAAI,OAAO,IAAI,YAAY,EAAE,SAAS,MAAM,GAAG;AAC3D,UAAM,IAAI,MAAM,wBAAwB,YAAY;AAAA,EACtD;AAIA,QAAM,SAAS,MAAM,eAAe;AACpC,QAAM,cAAc,iBAAiB,SAAS,MAAM,SAAS;AAC7D,KAAG,QAAQ,WAAW,EAAE,IAAI;AAC9B;AAEO,SAAS,oBAA8B;AAC5C,QAAM,KAAK,YAAY;AACvB,QAAM,OAAO,GAAG;AAAA,IACd;AAAA,EACF,EAAE,IAAI;AACN,SAAO,KAAK,IAAI,CAAC,MAAM,EAAE,IAAI,EAAE,OAAO,CAAC,SAAS,kBAAkB,KAAK,IAAI,CAAC;AAC9E;;;AD/SA,IAAM,+BAA+B;AAE9B,IAAM,6BAAN,MAAiC;AAAA,EACtC,YACU,QACA,UACA,QACR;AAHQ;AACA;AACA;AAAA,EACP;AAAA,EAEH,MAAM,aAAuC;AAC3C,UAAM,OAAO,qBAAqB,KAAK,MAAM;AAC7C,UAAM,SAAS,cAAc;AAC7B,UAAM,SAAS,mBAAmB;AAClC,UAAM,UAAU,eAAe;AAG/B,UAAM,WAAW,6BAA6B;AAAA,MAC5C,UAAU;AAAA,MACV,UAAU;AAAA,MACV,UAAU;AAAA,MACV,UAAU;AAAA,IACZ,CAAC;AACD,UAAM,aAAa,SAAS,IAAI,UAAU,iBAAiB,KAAK;AAChE,UAAM,WAAW,SAAS,IAAI,UAAU,eAAe,KAAK;AAC5D,UAAM,gBAAgB,SAAS,IAAI,UAAU,wBAAwB,KAAK;AAC1E,UAAM,oBAAoB,SAAS,IAAI,UAAU,yBAAyB,KAAK;AAE/E,QAAI,uBAAuE;AAC3E,QAAI,kBAAkB,QAAQ,sBAAsB,MAAM;AACxD,YAAM,OAAO,iBAAiB;AAC9B,YAAM,WAAW,qBAAqB;AACtC,UAAI,QAAQ,UAAU;AACpB,+BAAuB,EAAE,IAAI,IAAI,KAAK,IAAI,EAAE,YAAY,GAAG,QAAQ,KAAK;AAAA,MAC1E,OAAO;AACL,+BAAuB,EAAE,IAAI,IAAI,KAAK,QAAQ,EAAE,YAAY,GAAG,QAAQ,SAAS;AAAA,MAClF;AAAA,IACF;AAEA,WAAO;AAAA,MACL;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA,kBAAkB,aAAa,IAAI,KAAK,UAAU,EAAE,YAAY,IAAI;AAAA,MACpE,gBAAgB,WAAW,IAAI,KAAK,QAAQ,EAAE,YAAY,IAAI;AAAA,MAC9D;AAAA,IACF;AAAA,EACF;AAAA,EAEA,MAAM,oBAA4C;AAChD,WAAO,4BAA4B,UAAU,iBAAiB;AAAA,EAChE;AAAA,EAEA,MAAM,WAAoC;AACxC,UAAM,YAAY,KAAK,IAAI;AAC3B,UAAM,YAA8B,CAAC;AACrC,UAAM,SAA2B,CAAC;AAElC,UAAM,QAAiD;AAAA,MACrD,EAAE,MAAM,WAAW,IAAI,WAAW;AAAA,MAClC,EAAE,MAAM,mBAAmB,IAAI,kBAAkB;AAAA,MACjD,GAAG,kBAAkB,EAAE,IAAI,CAAC,SAAS;AAAA,QACnC,MAAM,kBAAkB;AAAA,QACxB,IAAI,MAAM,eAAe,GAAG;AAAA,MAC9B,EAAE;AAAA;AAAA;AAAA,MAGF;AAAA,QACE,MAAM;AAAA,QACN,IAAI,MAAM;AACR,gBAAM,SAAS,yBAAyB;AACxC,cAAI,OAAO,SAAS,GAAG;AACrB,iBAAK,OAAO,KAAK,UAAU,gBAAgB,oCAAoC;AAAA,cAC7E,MAAM,OAAO;AAAA,cACb,KAAK,OAAO;AAAA,cACZ,cAAc,OAAO;AAAA,YACvB,CAAC;AAAA,UACH;AAAA,QACF;AAAA,MACF;AAAA,IACF;AAEA,eAAW,QAAQ,OAAO;AACxB,YAAM,YAAY,KAAK,IAAI;AAC3B,UAAI;AACF,aAAK,GAAG;AACR,kBAAU,KAAK,EAAE,MAAM,KAAK,MAAM,aAAa,KAAK,IAAI,IAAI,WAAW,IAAI,KAAK,CAAC;AAAA,MACnF,SAAS,KAAK;AACZ,cAAM,QAAS,IAAc;AAC7B,eAAO,KAAK,EAAE,MAAM,KAAK,MAAM,aAAa,KAAK,IAAI,IAAI,WAAW,IAAI,OAAO,MAAM,CAAC;AACtF,aAAK,OAAO,KAAK,UAAU,gBAAgB,2BAA2B,KAAK,MAAM,EAAE,MAAM,CAAC;AAAA,MAC5F;AAAA,IACF;AAEA,UAAM,cAAc,KAAK,IAAI,IAAI;AACjC,SAAK,OAAO,KAAK,UAAU,mBAAmB,8BAA8B;AAAA,MAC1E,WAAW,UAAU;AAAA,MACrB,QAAQ,OAAO;AAAA,MACf;AAAA,IACF,CAAC;AAED,WAAO,EAAE,mBAAmB,WAAW,gBAAgB,QAAQ,YAAY;AAAA,EAC7E;AAAA,EAEA,MAAM,SAAgC;AACpC,UAAM,cAAc,KAAK,SAAS;AAIlC,UAAM,QAAQ,MAAMC,KAAG,SAAS,OAAO,KAAK,QAAQ;AACpD,UAAM,aAAa,OAAO,MAAM,MAAM,IAAI,OAAO,MAAM,KAAK;AAC5D,UAAM,iBAAiB,cAAc;AACrC,QAAI,aAAa,gBAAgB;AAC/B,YAAM,IAAI,oBAAoB,gBAAgB,UAAU;AAAA,IAC1D;AAEA,UAAM,YAAY,KAAK,IAAI;AAC3B,cAAU;AACV,UAAM,cAAc,KAAK,IAAI,IAAI;AACjC,UAAM,aAAa,KAAK,SAAS;AACjC,UAAM,cAAc,cAAc;AAElC,SAAK,OAAO,KAAK,UAAU,iBAAiB,4BAA4B;AAAA,MACtE;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,IACF,CAAC;AAED,WAAO,EAAE,aAAa,YAAY,aAAa,YAAY;AAAA,EAC7D;AAAA,EAEA,MAAM,UAAkC;AACtC,UAAM,YAAY,KAAK,IAAI;AAC3B,eAAW;AACX,UAAM,cAAc,KAAK,IAAI,IAAI;AAEjC,SAAK,OAAO,KAAK,UAAU,kBAAkB,6BAA6B,EAAE,YAAY,CAAC;AAEzF,WAAO,EAAE,YAAY;AAAA,EACvB;AAAA,EAEA,MAAM,iBAA2C;AAC/C,UAAM,YAAY,KAAK,IAAI;AAC3B,UAAM,YAAY,kBAAkB;AACpC,UAAM,eAAe,mBAAmB;AACxC,UAAM,cAAc,KAAK,IAAI,IAAI;AACjC,UAAM,SAAS,UAAU,WAAW,QAAQ,aAAa,WAAW,IAAI,OAAO;AAK/E,UAAM,UAAU,WAAW,OACvB,UAAU,2BACV,UAAU;AACd,SAAK,OAAO,KAAK,SAAS,qCAAqC;AAAA,MAC7D;AAAA,MACA,aAAa,UAAU,OAAO;AAAA,MAC9B,eAAe,aAAa;AAAA,MAC5B;AAAA,IACF,CAAC;AAED,WAAO;AAAA,MACL;AAAA,MACA,QAAQ,UAAU;AAAA,MAClB,eAAe,aAAa;AAAA,MAC5B;AAAA,IACF;AAAA,EACF;AAAA,EAEQ,WAAmB;AACzB,QAAI;AACF,aAAOA,KAAG,SAAS,KAAK,MAAM,EAAE;AAAA,IAClC,QAAQ;AACN,aAAO;AAAA,IACT;AAAA,EACF;AACF;;;AErMO,SAAS,yBAA+B;AAC7C,WAAS;AAAA,IACP,QAAQ;AAAA,IACR,OAAO;AAAA,IACP,OAAO;AAAA,MACL,EAAE,IAAI,sBAAsB,OAAO,kBAAkB,aAAa,WAAW,cAAc,UAAU;AAAA,MACrG,EAAE,IAAI,sBAAsB,OAAO,eAAe,aAAa,WAAW,cAAc,QAAQ;AAAA,IAClG;AAAA,EACF,CAAC;AAED,WAAS;AAAA,IACP,QAAQ;AAAA,IACR,OAAO;AAAA,IACP,OAAO;AAAA,MACL,EAAE,IAAI,mBAAmB,OAAO,mBAAmB,aAAa,WAAW,cAAc,OAAO;AAAA,MAChG,EAAE,IAAI,iBAAiB,OAAO,iBAAiB,aAAa,WAAW,cAAc,OAAO;AAAA,IAC9F;AAAA,EACF,CAAC;AAED,WAAS;AAAA,IACP,QAAQ;AAAA,IACR,OAAO;AAAA,IACP,OAAO;AAAA,MACL,EAAE,IAAI,kBAAkB,OAAO,4BAA4B,aAAa,WAAW,cAAc,OAAO;AAAA,MACxG,EAAE,IAAI,iBAAiB,OAAO,iBAAiB,aAAa,WAAW,cAAc,UAAU;AAAA,MAC/F,EAAE,IAAI,iBAAiB,OAAO,iBAAiB,aAAa,WAAW,cAAc,OAAO;AAAA,IAC9F;AAAA,EACF,CAAC;AAED,WAAS;AAAA,IACP,QAAQ;AAAA,IACR,OAAO;AAAA,IACP,OAAO;AAAA,MACL,EAAE,IAAI,6BAA6B,OAAO,0BAA0B,aAAa,WAAW,cAAc,OAAO;AAAA,MACjH,EAAE,IAAI,0BAA0B,OAAO,uBAAuB,aAAa,WAAW,cAAc,OAAO;AAAA,IAC7G;AAAA,EACF,CAAC;AAED,WAAS;AAAA,IACP,QAAQ;AAAA,IACR,OAAO;AAAA,IACP,OAAO;AAAA,MACL,EAAE,IAAI,uBAAuB,OAAO,wBAAwB,aAAa,WAAW,cAAc,OAAO;AAAA,IAC3G;AAAA,EACF,CAAC;AAED,WAAS;AAAA,IACP,QAAQ;AAAA,IACR,OAAO;AAAA,IACP,OAAO;AAAA,MACL,EAAE,IAAI,kBAAkB,OAAO,kBAAkB,aAAa,UAAU,cAAc,UAAU;AAAA,IAClG;AAAA,EACF,CAAC;AACH;;;ACtCA,IAAM,yBAAyB,iBAAE,OAAO;AAAA,EACtC,QAAQ,iBAAE,OAAO,EAAE,IAAI,CAAC;AAAA,EACxB,MAAM,iBAAE,OAAO,EAAE,IAAI,CAAC;AAAA,EACtB,OAAO,iBAAE,KAAK,CAAC,QAAQ,WAAW,WAAW,OAAO,CAAC,EAAE,SAAS;AAAA,EAChE,OAAO,iBAAE,OAAO,EAAE,IAAI,CAAC;AAAA,EACvB,SAAS,iBAAE,OAAO,EAAE,SAAS;AAAA,EAC7B,MAAM,iBAAE,KAAK,CAAC,UAAU,SAAS,CAAC,EAAE,SAAS;AAAA,EAC7C,MAAM,iBAAE,OAAO,EAAE,SAAS;AAAA,EAC1B,UAAU,iBAAE,OAAO,iBAAE,OAAO,GAAG,iBAAE,QAAQ,CAAC,EAAE,SAAS;AACvD,CAAC;AAED,IAAM,mBAAmB,iBAAE,OAAO;AAAA,EAChC,QAAQ,iBAAE,KAAK,CAAC,QAAQ,WAAW,CAAC;AACtC,CAAC;AAOD,eAAsB,wBACpB,WACA,OACwB;AACxB,QAAM,SAAS,MAAM;AACrB,QAAM,SAAS,MAAM;AACrB,QAAM,OAAO,MAAM;AACnB,QAAM,QAAQ,MAAM,QAAQ,OAAO,MAAM,KAAK,IAAI;AAClD,QAAM,SAAS,MAAM,SAAS,OAAO,MAAM,MAAM,IAAI;AAErD,QAAM,QAAQ,kBAAkB,EAAE,QAAQ,QAAQ,MAAM,OAAO,OAAO,CAAC;AACvE,QAAM,cAAc,mBAAmB,QAAQ;AAE/C,SAAO;AAAA,IACL,MAAM;AAAA,MACJ,OAAO,MAAM,IAAI,oBAAoB;AAAA,MACrC,cAAc;AAAA,IAChB;AAAA,EACF;AACF;AAGA,eAAsB,yBACpB,UACA,MACwB;AACxB,QAAM,SAAS,uBAAuB,UAAU,IAAI;AACpD,MAAI,CAAC,OAAO,SAAS;AACnB,WAAO,EAAE,QAAQ,KAAK,MAAM,EAAE,OAAO,qBAAqB,QAAQ,OAAO,MAAM,OAAO,EAAE;AAAA,EAC1F;AAEA,QAAM,EAAE,QAAQ,MAAM,OAAO,SAAS,MAAM,SAAS,IAAI,OAAO;AAGhE,QAAM,SAAS,iBAAiB,QAAQ;AACxC,MAAI,CAAC,OAAO,cAAc,SAAS;AACjC,WAAO,EAAE,MAAM,EAAE,IAAI,MAAM,YAAY,MAAM,QAAQ,yBAAyB,EAAE;AAAA,EAClF;AACA,QAAM,eAAe,OAAO,cAAc,QAAQ,MAAM;AACxD,MAAI,gBAAgB,CAAC,aAAa,SAAS;AACzC,WAAO,EAAE,MAAM,EAAE,IAAI,MAAM,YAAY,MAAM,QAAQ,kBAAkB,EAAE;AAAA,EAC3E;AAGA,QAAM,KAAK,OAAO,UAAU;AAAA,IAC1B;AAAA,IAAQ;AAAA,IAAM;AAAA,IAAO;AAAA,IAAS;AAAA,IAAM;AAAA,IACpC,OAAO,OAAO,KAAK;AAAA,IACnB,MAAM,OAAO,KAAK;AAAA,EACpB,GAAG,MAAM;AAET,MAAI,CAAC,IAAI;AACP,WAAO,EAAE,MAAM,EAAE,IAAI,MAAM,YAAY,MAAM,QAAQ,UAAU,EAAE;AAAA,EACnE;AAEA,SAAO;AAAA,IACL,MAAM;AAAA,MACJ,IAAI;AAAA,MACJ;AAAA,MACA,cAAc,qBAAqB,gBAAgB,EAAE,CAAE;AAAA,IACzD;AAAA,EACF;AACF;AAGA,eAAsB,yBACpB,WACA,IACA,MACwB;AACxB,QAAM,SAAS,iBAAiB,UAAU,IAAI;AAC9C,MAAI,CAAC,OAAO,SAAS;AACnB,WAAO,EAAE,QAAQ,KAAK,MAAM,EAAE,OAAO,qBAAqB,QAAQ,OAAO,MAAM,OAAO,EAAE;AAAA,EAC1F;AAEA,QAAM,UAAU,yBAAyB,IAAI,OAAO,KAAK,MAAM;AAC/D,MAAI,CAAC,SAAS;AACZ,WAAO,EAAE,QAAQ,KAAK,MAAM,EAAE,OAAO,YAAY,EAAE;AAAA,EACrD;AAEA,SAAO,EAAE,MAAM,EAAE,IAAI,KAAK,EAAE;AAC9B;AAGA,eAAsB,iBACpB,WACA,MACwB;AACxB,QAAM,SAAU,MAAkC;AAClD,QAAM,QAAQ,wBAAwB,MAAM;AAC5C,SAAO,EAAE,MAAM,EAAE,IAAI,MAAM,WAAW,MAAM,EAAE;AAChD;AAGA,eAAsB,kBACpB,WACA,MACwB;AACxB,QAAM,SAAU,MAAkC;AAClD,QAAM,QAAQ,YAAY,MAAM;AAChC,SAAO,EAAE,MAAM,EAAE,IAAI,MAAM,QAAQ,MAAM,EAAE;AAC7C;AAGA,eAAsB,oBAA4C;AAChE,SAAO,EAAE,MAAM,EAAE,SAAS,cAAc,EAAE,EAAE;AAC9C;AAGA,eAAsB,oBAA4C;AAChE,SAAO,EAAE,MAAM,EAAE,OAAO,mBAAmB,QAAQ,EAAE,EAAE;AACzD;AAMA,SAAS,qBAAqB,KAAyC;AACrE,MAAI,CAAC,IAAK,QAAO;AACjB,SAAO;AAAA,IACL,GAAG;AAAA,IACH,UAAU,IAAI,WAAW,KAAK,MAAM,IAAI,QAAQ,IAAI;AAAA,EACtD;AACF;;;ACzJA,IAAAC,eAA+D;AAuB/D,IAAM,UAAU;AAGhB,IAAM,eAAe;AAGrB,IAAM,mBAAmB;AAGzB,IAAM,iBAAiB;AAGvB,IAAM,iBAAiB;AAGvB,IAAM,gBAAgB;AAWtB,eAAsB,gBACpB,KACA,UACwB;AACxB,QAAM,iBAAiB,sBAAsB;AAC7C,QAAM,WAAW,aAAa,gBAAgB,QAAQ;AACtD,MAAI,QAAQ,MAAM,KAAK,SAAS,OAAO,CAAC;AAExC,QAAM,eAAe,IAAI,OAAO;AAChC,MAAI,cAAc;AAChB,YAAQ,MAAM,OAAO,CAAC,MAAM,EAAE,WAAW,YAAY;AAAA,EACvD;AAEA,SAAO,EAAE,QAAQ,SAAS,MAAM,EAAE,MAAM,EAAE;AAC5C;AAOA,eAAsB,cACpB,KACA,UACwB;AACxB,QAAM,iBAAiB,sBAAsB;AAC7C,QAAM,WAAW,aAAa,gBAAgB,QAAQ;AACtD,QAAM,OAAO,SAAS,IAAI,IAAI,OAAO,EAAE;AAEvC,MAAI,CAAC,MAAM;AACT,WAAO,EAAE,QAAQ,gBAAgB,MAAM,EAAE,OAAO,iBAAiB,EAAE;AAAA,EACrE;AAEA,SAAO,EAAE,QAAQ,SAAS,MAAM,EAAE,KAAK,EAAE;AAC3C;AAYA,eAAsB,iBACpB,KACA,UACwB;AAExB,QAAM,SAAS,gBAAgB,UAAU,IAAI,IAAI;AACjD,MAAI,CAAC,OAAO,SAAS;AACnB,WAAO;AAAA,MACL,QAAQ;AAAA,MACR,MAAM,EAAE,OAAO,qBAAqB,QAAQ,OAAO,MAAM,OAAO;AAAA,IAClE;AAAA,EACF;AAEA,QAAM,SAAS,OAAO;AAGtB,MAAI,CAAC,iBAAiB,OAAO,IAAI,GAAG;AAClC,WAAO;AAAA,MACL,QAAQ;AAAA,MACR,MAAM,EAAE,OAAO,qBAAqB,MAAM,OAAO,KAAK;AAAA,IACxD;AAAA,EACF;AAGA,QAAM,iBAAiB,sBAAsB;AAC7C,QAAM,WAAW,aAAa,gBAAgB,QAAQ;AACtD,QAAM,WAAW,SAAS,IAAI,OAAO,IAAI;AACzC,MAAI,YAAY,SAAS,WAAW,kBAAkB;AACpD,WAAO;AAAA,MACL,QAAQ;AAAA,MACR,MAAM,EAAE,OAAO,uBAAuB,MAAM,OAAO,KAAK;AAAA,IAC1D;AAAA,EACF;AAEA,QAAM,OAAO;AAAA,IACX,GAAG;AAAA,IACH,WAAW;AAAA,IACX,QAAQ;AAAA,EACV;AAEA,gBAAc,UAAU,IAAI;AAE5B,SAAO,EAAE,QAAQ,cAAc,MAAM,EAAE,KAAK,EAAE;AAChD;AAUA,eAAsB,eACpB,KACA,UACwB;AACxB,QAAM,aAAa,IAAI,OAAO;AAC9B,QAAM,UAAW,IAAI,MAA8C;AAEnE,QAAM,iBAAiB,sBAAsB;AAG7C,MAAI,YAAY,UAAa,CAAC,iBAAiB,OAAO,GAAG;AACvD,WAAO;AAAA,MACL,QAAQ;AAAA,MACR,MAAM,EAAE,OAAO,qBAAqB,MAAM,QAAQ;AAAA,IACpD;AAAA,EACF;AAEA,MAAI;AACF,UAAM,OAAO,eAAe,gBAAgB,UAAU,YAAY,OAAO;AACzE,WAAO,EAAE,QAAQ,cAAc,MAAM,EAAE,MAAM,KAAK,EAAE;AAAA,EACtD,SAAS,KAAK;AACZ,UAAM,UAAU,aAAe,GAAG;AAClC,QAAI,QAAQ,SAAS,WAAW,GAAG;AACjC,aAAO,EAAE,QAAQ,gBAAgB,MAAM,EAAE,OAAO,kBAAkB,MAAM,WAAW,EAAE;AAAA,IACvF;AACA,WAAO,EAAE,QAAQ,kBAAkB,MAAM,EAAE,OAAO,eAAe,QAAQ,EAAE;AAAA,EAC7E;AACF;AAQA,eAAsB,kBACpB,KACA,UACwB;AACxB,QAAM,WAAW,IAAI,OAAO;AAC5B,QAAM,iBAAiB,sBAAsB;AAC7C,QAAM,WAAW,aAAa,gBAAgB,QAAQ;AACtD,QAAM,OAAO,SAAS,IAAI,QAAQ;AAElC,MAAI,CAAC,MAAM;AACT,WAAO,EAAE,QAAQ,gBAAgB,MAAM,EAAE,OAAO,kBAAkB,MAAM,SAAS,EAAE;AAAA,EACrF;AAGA,QAAM,EAAE,WAAW,KAAK,QAAQ,MAAM,GAAG,aAAa,IAAI;AAC1D,QAAM,WAAO,aAAAC,WAAc,YAAY;AAEvC,SAAO,EAAE,QAAQ,SAAS,MAAM,EAAE,MAAM,QAAQ,KAAK,OAAO,EAAE;AAChE;AASA,eAAsB,iBACpB,KACA,UACwB;AACxB,QAAM,WAAW,IAAI,OAAO;AAC5B,QAAM,iBAAiB,sBAAsB;AAC7C,QAAM,WAAW,aAAa,gBAAgB,QAAQ;AACtD,QAAM,WAAW,SAAS,IAAI,QAAQ;AAEtC,MAAI,CAAC,UAAU;AACb,WAAO,EAAE,QAAQ,gBAAgB,MAAM,EAAE,OAAO,kBAAkB,MAAM,SAAS,EAAE;AAAA,EACrF;AAEA,MAAI,SAAS,aAAa,SAAS,WAAW,kBAAkB;AAC9D,WAAO,EAAE,QAAQ,gBAAgB,MAAM,EAAE,OAAO,yBAAyB,MAAM,SAAS,EAAE;AAAA,EAC5F;AAEA,QAAM,OAAO,IAAI;AACjB,QAAM,cAAc,MAAM;AAC1B,MAAI,OAAO,gBAAgB,UAAU;AACnC,WAAO,EAAE,QAAQ,kBAAkB,MAAM,EAAE,OAAO,qBAAqB,EAAE;AAAA,EAC3E;AAEA,MAAI;AACF,UAAM,SAAS,gBAAgB,UAAM,aAAAC,OAAU,WAAW,CAAC;AAC3D,UAAM,OAAO,EAAE,GAAG,eAAe,MAAM,GAAG,WAAW,OAAO,QAAQ,iBAAiB;AAGrF,QAAI,KAAK,SAAS,UAAU;AAC1B,aAAO,EAAE,QAAQ,kBAAkB,MAAM,EAAE,OAAO,iBAAiB,UAAU,UAAU,KAAK,KAAK,KAAK,EAAE;AAAA,IAC1G;AAEA,kBAAc,UAAU,IAAI;AAC5B,WAAO,EAAE,QAAQ,SAAS,MAAM,EAAE,KAAK,EAAE;AAAA,EAC3C,SAAS,KAAK;AACZ,UAAM,UAAU,aAAe,GAAG;AAClC,WAAO,EAAE,QAAQ,kBAAkB,MAAM,EAAE,OAAO,qBAAqB,QAAQ,EAAE;AAAA,EACnF;AACF;AAQA,eAAsB,iBACpB,KACA,UACwB;AACxB,QAAM,WAAW,IAAI,OAAO;AAC5B,QAAM,iBAAiB,sBAAsB;AAC7C,QAAM,WAAW,aAAa,gBAAgB,QAAQ;AACtD,QAAM,OAAO,SAAS,IAAI,QAAQ;AAGlC,MAAI,CAAC,MAAM;AACT,WAAO,EAAE,QAAQ,gBAAgB,MAAM,EAAE,OAAO,kBAAkB,MAAM,SAAS,EAAE;AAAA,EACrF;AAGA,MAAI,KAAK,aAAa,KAAK,WAAW,kBAAkB;AACtD,WAAO;AAAA,MACL,QAAQ;AAAA,MACR,MAAM,EAAE,OAAO,yBAAyB,MAAM,SAAS;AAAA,IACzD;AAAA,EACF;AAEA,iBAAe,UAAU,QAAQ;AAEjC,SAAO,EAAE,QAAQ,SAAS,MAAM,EAAE,SAAS,SAAS,EAAE;AACxD;AAOA,eAAsB,oBACpB,KACA,UACwB;AACxB,QAAM,SAAS,IAAI,OAAO;AAC1B,QAAM,SAAS,iBAAiB,QAAQ;AACxC,QAAM,aAAa,OAAO,MAAM,QAAQ,MAAM,KAAK;AACnD,SAAO,EAAE,QAAQ,SAAS,MAAM,EAAE,QAAQ,QAAQ,WAAW,EAAE;AACjE;AAUA,eAAsB,uBACpB,KACA,UACwB;AACxB,QAAM,SAAS,IAAI,OAAO;AAC1B,QAAM,OAAO,IAAI;AAEjB,MAAI,CAAC,MAAM;AACT,WAAO,EAAE,QAAQ,kBAAkB,MAAM,EAAE,OAAO,eAAe,EAAE;AAAA,EACrE;AAEA,QAAM,UAAU;AAAA,IAAa;AAAA,IAAU,CAAC,WACtC,eAAe,QAAQ,QAAQ,IAAI;AAAA,EACrC;AAEA,SAAO;AAAA,IACL,QAAQ;AAAA,IACR,MAAM,EAAE,QAAQ,QAAQ,QAAQ,MAAM,QAAQ,MAAM,KAAK,KAAK;AAAA,EAChE;AACF;;;ACnUA,IAAM,8BAA8B;AAKpC,IAAM,gCAAgC,KAAK,KAAK;AAChD,IAAI,uBAAgE;AAGpE,IAAMC,WAAU;AAChB,IAAMC,oBAAmB;AA6BzB,eAAsB,qBAA6C;AAEjE,QAAM,UAAU,MAAM,QAAQ,WAAW;AAAA,IACvC,gBAAgB;AAAA,IAChB,wBAAwB,UAAU,cAAc,gBAAgB;AAAA,IAChE,wBAAwB,YAAY,gBAAgB,gBAAgB;AAAA,EACtE,CAAC;AAED,QAAM,YAA4B,QAAQ;AAAA,IAAI,CAAC,MAC7C,EAAE,WAAW,cACT,EAAE,QACF,EAAE,MAAM,WAAW,WAAW,OAAO,QAAQ,CAAC,EAAE;AAAA,EACtD;AAEA,SAAO,EAAE,QAAQD,UAAS,MAAM,EAAE,UAAU,EAAE;AAChD;AAQA,eAAsB,mBAAmB,KAA2C;AAClF,QAAM,OAAO,IAAI;AACjB,QAAM,OAAO,MAAM;AAEnB,MAAI,CAAC,QAAQ,CAAC,CAAC,aAAa,UAAU,UAAU,EAAE,SAAS,IAAI,GAAG;AAChE,WAAO;AAAA,MACL,QAAQC;AAAA,MACR,MAAM,EAAE,OAAO,mEAAmE;AAAA,IACpF;AAAA,EACF;AAEA,QAAM,UAAU,MAAM;AACtB,QAAM,QAAQ,YAAY,IAAI;AAC9B,MAAI;AAEJ,MAAI;AACF,QAAI,SAAS,UAAU;AACrB,eAAS,MAAM,kBAAkB,IAAI,cAAc,EAAE,UAAU,QAAQ,CAAC,GAAG,UAAU,cAAc,kBAAkB,OAAO;AAAA,IAC9H,WAAW,SAAS,YAAY;AAC9B,eAAS,MAAM,kBAAkB,IAAI,gBAAgB,EAAE,UAAU,QAAQ,CAAC,GAAG,aAAa,gBAAgB,kBAAkB,OAAO;AAAA,IACrI,OAAO;AACL,eAAS,cAAc;AAAA,IACzB;AAAA,EACF,SAAS,KAAK;AACZ,aAAS,EAAE,IAAI,OAAO,OAAO,OAAO,GAAG,EAAE;AAAA,EAC3C;AAEA,MAAI,OAAO,IAAI;AACb,WAAO,aAAa,KAAK,MAAM,YAAY,IAAI,IAAI,KAAK;AAAA,EAC1D;AAEA,SAAO,EAAE,QAAQD,UAAS,MAAM,OAAO;AACzC;AAQA,eAAe,wBACb,MACA,gBACuB;AACvB,QAAM,SAAS,MAAM,mBAAmB,IAAI;AAE5C,QAAM,kBAAkB,OAAO,OAAO,OAAO,OAAK,CAAC,UAAU,KAAK,CAAC,CAAC;AAEpE,QAAM,SAAS,gBAAgB,eAAe;AAC9C,SAAO,EAAE,MAAM,WAAW,OAAO,WAAW,SAAS,gBAAgB,OAAO;AAC9E;AAEA,eAAe,kBAAyC;AAStD,QAAM,MAAM,KAAK,IAAI;AACrB,MAAI,wBAAwB,MAAM,qBAAqB,KAAK,+BAA+B;AACzF,WAAO,EAAE,MAAM,aAAa,WAAW,MAAM,QAAQ,qBAAqB,OAAO;AAAA,EACnF;AAEA,MAAI,SAAS;AACb,MAAI;AACF,UAAM,SAAS,IAAI,UAAU;AAC7B,UAAM,WAAW,MAAM,OAAO,OAAO;AAAA,MACnC,EAAE,OAAO,GAAG;AAAA,MACZ,EAAE,SAAS,4BAA4B;AAAA,IACzC;AACA,UAAM,aAAa,SAAS,KACzB,IAAI,CAAC,MAAM,EAAE,EAAE,EACf,OAAO,CAAC,OAAO,GAAG,WAAW,SAAS,CAAC;AAC1C,QAAI,WAAW,SAAS,GAAG;AACzB,eAAS;AAAA,IACX;AAAA,EACF,QAAQ;AAAA,EAER;AACA,yBAAuB,EAAE,IAAI,KAAK,OAAO;AACzC,SAAO,EAAE,MAAM,aAAa,WAAW,MAAM,OAAO;AACtD;AAOA,eAAe,kBACb,SACA,OACA,gBACA,SACqB;AACrB,QAAM,YAAY,MAAM,QAAQ,YAAY;AAC5C,MAAI,CAAC,WAAW;AACd,WAAO,EAAE,IAAI,OAAO,OAAO,GAAG,KAAK,qBAAqB,WAAW,cAAc,GAAG;AAAA,EACtF;AACA,SAAO,EAAE,IAAI,KAAK;AACpB;AAEA,SAAS,gBAA4B;AAEnC,SAAO,EAAE,IAAI,KAAK;AACpB;;;AChLA,SAAS,eAAe;;;ACExB,SAAS,gBACP,cACA,gBACc;AACd,MAAI,CAAC,gBAAgB,SAAU,QAAO;AACtC,SAAO;AAAA,IACL,SAAS,eAAe,SAAS,WAAW,aAAa;AAAA,IACzD,iBAAiB,eAAe,SAAS,mBAAmB,aAAa;AAAA,IACzE,OAAO,eAAe,SAAS,SAAS,aAAa;AAAA,IACrD,cAAc,eAAe,SAAS,gBAAgB,aAAa;AAAA,EACrE;AACF;AA+BO,SAAS,mBACd,OACA,iBACA,SACA,iBACY;AACZ,QAAM,OAAmB,CAAC;AAE1B,aAAW,QAAQ,OAAO;AACxB,QAAI,CAAC,KAAK,SAAU;AAEpB,UAAM,WAAW,gBAAgB,KAAK,IAAI;AAC1C,UAAM,YAAY,gBAAgB,KAAK,UAAU,QAAQ;AAEzD,QAAI,CAAC,UAAU,QAAS;AAExB,QAAI,UAAU,kBAAkB,KAAK,IAAI,KAAK;AAC9C,UAAM,aAAa,UAAU,kBAAkB;AAE/C,SAAK,KAAK;AAAA,MACR,MAAM,aAAa,KAAK,IAAI;AAAA,MAC5B,OAAO,UAAU;AAAA,MACjB,IAAI,YAAY;AACd,YAAI,CAAC,QAAS;AACd,YAAI,QAAQ,cAAc,KAAK,IAAI,EAAG;AACtC,YAAI,KAAK,IAAI,IAAI,UAAU,WAAY;AAGvC,YAAI,UAAU,cAAc;AAC1B,gBAAM,QAAQ,QAAQ,cAAc,UAAU,YAAY;AAC1D,cAAI,CAAC,MAAO;AACZ,cAAI,CAAC,MAAM,EAAG;AAAA,QAChB;AASA,cAAM,MAAM;AACZ,YAAI,eAAe,KAAK,MAAM,IAAI;AAClC,kBAAU,KAAK,IAAI;AAEnB,aAAK,IAAI,QAAQ,KAAK,IAAI,EACvB,MAAM,CAAC,QAAQ;AACd,cAAI,cAAc,KAAK,MAAM,GAAG;AAAA,QAClC,CAAC,EACA,QAAQ,MAAM;AACb,cAAI,eAAe,KAAK,MAAM,KAAK;AAAA,QACrC,CAAC;AAAA,MACL;AAAA,IACF,CAAC;AAAA,EACH;AAEA,SAAO;AACT;;;ADlFA,IAAM,uBAAuB;AAoB7B,eAAsB,uBACpB,cACA,MACe;AACf,QAAM,EAAE,gBAAgB,UAAU,kBAAkB,QAAQ,WAAW,IAAI;AAC3E,QAAM,eAAe,oBAAI,IAAY;AAErC,MAAI,CAAC,gBAAgB;AACnB,WAAO,KAAK,UAAU,aAAa,2EAAsE;AACzG;AAAA,EACF;AAKA,MAAI,cAAc,WAAW,QAAQ,MAAM,4BAA4B;AACvE,MAAI,CAAC,aAAa;AAChB,WAAO,KAAK,UAAU,WAAW,2HAAsH;AAAA,EACzJ;AAEA,QAAM,EAAE,cAAAE,cAAa,IAAI,MAAM,OAAO,wBAAyB;AAC/D,QAAM,WAAW,MAAM,KAAKA,cAAa,gBAAgB,QAAQ,EAAE,OAAO,CAAC;AAG3E,QAAM,eAAe,oBAAI,IAAoB;AAC7C,aAAW,QAAQ,UAAU;AAC3B,iBAAa,IAAI,KAAK,MAAM,KAAK,KAAK;AAAA,EACxC;AAGA,QAAM,kBAA0C,CAAC;AACjD,MAAI;AACF,UAAM,aAAa,YAAY,EAAE;AAAA,MAC/B;AAAA;AAAA;AAAA;AAAA,IAIF,EAAE,IAAI;AACN,eAAW,OAAO,YAAY;AAC5B,sBAAgB,IAAI,IAAI,IAAI,IAAI,iBAAiB;AAAA,IACnD;AAAA,EACF,QAAQ;AAAA,EAER;AAEA,QAAM,mBAAwC;AAAA,IAC5C,eAAe,CAAC,SAAS,aAAa,IAAI,IAAI;AAAA,IAC9C,gBAAgB,CAAC,MAAM,YAAY;AACjC,UAAI,QAAS,cAAa,IAAI,IAAI;AAAA,UAC7B,cAAa,OAAO,IAAI;AAAA,IAC/B;AAAA,IACA,SAAS,OAAO,aAAa;AAC3B,YAAM,SAAS,WAAW;AAI1B,YAAM,UAAU,OAAO,MAAM,4BAA4B;AACzD,UAAI,YAAY,aAAa;AAC3B,eAAO;AAAA,UACL,UAAU;AAAA,UACV,UACI,qDACA;AAAA,QACN;AACA,sBAAc;AAAA,MAChB;AACA,UAAI,CAAC,QAAS;AAEd,YAAM,EAAE,SAAS,IAAI,MAAM,OAAO,wBAAyB;AAE3D,YAAM,aAAa,OAAO,MAAM,QAAQ,QAAQ;AAChD,YAAM,cAAc,QAAQ,UAAU,IAAI;AAC1C,YAAM,QAAQ,qBAAqB,UAAU,YAAY,QAAQ,aAAa,IAAI,QAAQ,GAAG,aAAa,gBAAgB;AAM1H,UAAI,0BAA0B,QAAQ,KAAK,CAAC,OAAO;AACjD,eAAO;AAAA,UACL,UAAU;AAAA,UACV,kBAAkB,QAAQ;AAAA,UAC1B,EAAE,MAAM,UAAU,QAAQ,UAAU;AAAA,QACtC;AACA;AAAA,MACF;AAEA,YAAM,SAAS,MAAM,SAAS,UAAU;AAAA,QACtC,MAAM;AAAA,QACN,aAAa,OAAO;AAAA,QACpB,YAAY,OAAO;AAAA,QACnB;AAAA,MACF,CAAC;AACD,aAAO,KAAK,UAAU,WAAW,kBAAkB,QAAQ,cAAc;AAAA,QACvE,QAAQ,OAAO;AAAA,QACf,OAAO,OAAO;AAAA,MAChB,CAAC;AAED,UAAI,OAAO,WAAW,UAAU;AAC9B,eAAO,UAAU;AAAA,UACf,QAAQ;AAAA,UACR,MAAM;AAAA,UACN,OAAO,gBAAgB,QAAQ;AAAA,UAC/B,SAAS,OAAO,SAAS;AAAA,UACzB,MAAM,cAAc,OAAO,KAAK;AAAA,UAChC,UAAU,EAAE,UAAU,OAAO,OAAO,MAAM;AAAA,QAC5C,GAAG,MAAM;AAAA,MACX,WAAW,OAAO,WAAW,aAAa;AACxC,eAAO,UAAU;AAAA,UACf,QAAQ;AAAA,UACR,MAAM;AAAA,UACN,OAAO,mBAAmB,QAAQ;AAAA,UAClC,MAAM,cAAc,OAAO,KAAK;AAAA,UAChC,UAAU,EAAE,UAAU,OAAO,OAAO,MAAM;AAAA,QAC5C,GAAG,MAAM;AAGT,cAAM,EAAE,oBAAoB,IAAI,MAAM,OAAO,qBAA2B;AACxE,cAAM,SAAS,oBAAoB,OAAO,OAAO,CAAC,sBAAsB,oBAAoB,CAAC;AAC7F,cAAM,aAAa,OAAO,oBAAoB,KAAK;AACnD,cAAM,cAAc,OAAO,oBAAoB,KAAK;AAEpD,YAAI,aAAa,GAAG;AAClB,iBAAO,UAAU;AAAA,YACf,QAAQ;AAAA,YACR,MAAM;AAAA,YACN,OAAO,eAAe,IAAI,4BAA4B,aAAa,UAAU;AAAA,YAC7E,SAAS,QAAQ,QAAQ;AAAA,YACzB,MAAM;AAAA,YACN,UAAU,EAAE,OAAO,YAAY,UAAU,OAAO,OAAO,MAAM;AAAA,UAC/D,GAAG,MAAM;AAAA,QACX;AACA,YAAI,cAAc,GAAG;AACnB,iBAAO,UAAU;AAAA,YACf,QAAQ;AAAA,YACR,MAAM;AAAA,YACN,OAAO,mBAAmB,WAAW,IAAI,gBAAgB,IAAI,SAAS,OAAO;AAAA,YAC7E,MAAM;AAAA,YACN,UAAU,EAAE,WAAW,aAAa,UAAU,OAAO,OAAO,MAAM;AAAA,UACpE,GAAG,MAAM;AAAA,QACX;AAAA,MACF;AAAA,IACF;AAAA,IACA,eAAe;AAAA,MACb,2BAA2B,MAAM;AAI/B,cAAM,MAAM,YAAY,EAAE;AAAA,UACxB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,QAOF,EAAE,IAAI;AACN,eAAO,QAAQ;AAAA,MACjB;AAAA,MACA,qBAAqB,MAAM;AACzB,eAAO,kBAAkB,EAAE,QAAQ,SAAS,CAAC,IAAI;AAAA,MACnD;AAAA,MACA,2BAA2B,MAAM;AAC/B,eAAO,gBAAgB,EAAE,QAAQ,WAAW,CAAC,IAAI;AAAA,MACnD;AAAA,MACA,6BAA6B,MAAM;AACjC,eAAO,0BAA0B,aAAa,IAAI,iBAAiB,CAAC,EAAE;AAAA,MACxE;AAAA,IACF;AAAA,IACA,aAAa,CAAC,UAAU,QAAQ;AAC9B,aAAO,MAAM,UAAU,aAAa,kBAAkB,QAAQ,WAAW;AAAA,QACvE,OAAO,eAAe,QAAQ,IAAI,UAAU,OAAO,GAAG;AAAA,MACxD,CAAC;AAAA,IACH;AAAA,EACF;AAEA,QAAM,gBAAgB;AAAA,IACpB;AAAA,IACA,WAAW,QAAQ,MAAM,SAAS,CAAC;AAAA,IACnC;AAAA,IACA;AAAA,EACF;AACA,eAAa,aAAa,sBAAsB,aAAa;AAC7D,SAAO,KAAK,UAAU,cAAc,UAAU,cAAc,MAAM,sBAAsB;AAAA,IACtF,OAAO,cAAc,IAAI,CAAC,MAAM,EAAE,IAAI;AAAA,EACxC,CAAC;AACH;;;AE3NO,SAAS,kBAAmC;AACjD,SAAO,YAAY,EAAE;AAAA,IACnB;AAAA;AAAA;AAAA,EAGF,EAAE,IAAI;AACR;;;ACSA,IAAM,wBAAwB;AAC9B,IAAM,6BAA6B;AACnC,IAAM,0BAA0B;AAMhC,IAAM,eAAe,iBAAE,OAAO;AAAA,EAC5B,SAAS,iBAAE,OAAO;AAAA,EAClB,MAAM,iBAAE,OAAO,EAAE,SAAS;AAAA,EAC1B,MAAM,iBAAE,MAAM,iBAAE,OAAO,CAAC,EAAE,SAAS;AACrC,CAAC;AAED,IAAM,gBAAgB,iBAAE,OAAO;AAAA,EAC7B,cAAc,iBAAE,OAAO;AAAA,EACvB,cAAc,iBAAE,OAAO;AAAA,EACvB,QAAQ,iBAAE,OAAO,EAAE,SAAS;AAC9B,CAAC;AAMD,SAAS,kBAAkB,KAAiC;AAC1D,QAAM,KAAK,KAAK,MAAM,GAAG;AACzB,SAAO,OAAO,MAAM,EAAE,IAAI,SAAY,KAAK,MAAM,KAAK,GAAI;AAC5D;AAEA,SAAS,qBAAqB,WAAyB;AACrD,gBAAc;AAAA,IACZ,IAAI;AAAA,IACJ,MAAM;AAAA,IACN,YAAY;AAAA,EACd,CAAC;AACH;AAEA,SAAS,eAAe,SAAgC;AACtD,QAAM,cAAc,WAAW;AAC/B,QAAM,WAAW,YAAY,MAAM,WAAW,KAAK,CAAC,GAAG;AACvD,QAAM,aAAa,YAAY,MAAM,UAAU,KAAK,CAAC,GAAG;AACxD,QAAM,QAAQ,UAAU;AACxB,SAAO,UAAU,IAAI,QAAQ,GAAG,OAAO,IAAI,KAAK;AAClD;AAEA,SAAS,WAAW,MAA+B;AACjD,SAAO,OAAO,KAAK,MAAM,GAAG,EAAE,IAAI,CAAC,QAAQ,IAAI,KAAK,CAAC,IAAI,CAAC;AAC5D;AAEA,IAAM,kBAAkB,iBAAE,OAAO;AAAA,EAC/B,kBAAkB,iBAAE,MAAM,iBAAE,OAAO,CAAC,EAAE,IAAI,uBAAuB;AAAA,EACjE,sBAAsB,iBAAE,OAAO,EAAE,IAAI,CAAC;AAAA,EACtC,kBAAkB,iBAAE,OAAO;AAAA,EAC3B,MAAM,iBAAE,MAAM,iBAAE,OAAO,CAAC,EAAE,SAAS;AAAA,EACnC,QAAQ,iBAAE,OAAO,EAAE,SAAS;AAC9B,CAAC;AAeM,SAAS,uBAAuB,MAAoB;AACzD,QAAM,EAAE,WAAW,iBAAiB,IAAI;AAExC,WAAS,cAAc,KAOpB;AACD,WAAO;AAAA,MACL,IAAI,IAAI;AAAA,MACR,OAAO,IAAI;AAAA,MACX,QAAQ,IAAI;AAAA,MACZ,UAAU,eAAe,IAAI,OAAO;AAAA,MACpC,MAAM,WAAW,IAAI,IAAI;AAAA,MACzB,YAAY,IAAI;AAAA,IAClB;AAAA,EACF;AAGA,iBAAe,eAAe,KAA2C;AACvE,UAAM,EAAE,SAAS,MAAM,KAAK,IAAI,aAAa,MAAM,IAAI,IAAI;AAC3D,UAAM,EAAE,YAAY,IAAI,MAAM,OAAO,QAAa;AAElD,UAAM,kBAAkB,QAAQ;AAChC,UAAM,KAAK,GAAG,eAAe,IAAI,YAAY,qBAAqB,EAAE,SAAS,KAAK,CAAC;AACnF,UAAM,MAAM,aAAa;AAEzB,yBAAqB,GAAG;AAExB,UAAM,QAAQ,YAAY;AAAA,MACxB;AAAA,MACA,UAAU;AAAA,MACV,YAAY;AAAA,MACZ,kBAAkB;AAAA,MAClB;AAAA,MACA,MAAM,OAAO,KAAK,KAAK,IAAI,IAAI;AAAA,MAC/B,YAAY;AAAA,IACd,CAAC;AAED,qBAAiB,iBAAiB,UAAU,MAAM,IAAI,SAAS;AAAA,MAC7D,QAAQ;AAAA,MACR,kBAAkB;AAAA,IACpB,CAAC,EAAE,MAAM,MAAM;AAAA,IAAC,CAAC;AAEjB,WAAO;AAAA,MACL,MAAM;AAAA,QACJ,IAAI,MAAM;AAAA,QACV,kBAAkB,MAAM;AAAA,QACxB,QAAQ,MAAM;AAAA,QACd,YAAY,MAAM;AAAA,MACpB;AAAA,IACF;AAAA,EACF;AAGA,iBAAe,gBAAgB,KAA2C;AACxE,UAAM,EAAE,cAAc,cAAc,OAAO,IAAI,cAAc,MAAM,IAAI,IAAI;AAC3E,UAAM,EAAE,YAAY,IAAI,MAAM,OAAO,QAAa;AAClD,UAAM,MAAM,aAAa;AAGzB,sBAAkB,cAAc,cAAc,GAAG;AACjD,QAAI;AAAE,uBAAiB,gBAAgB,UAAU,cAAc,YAAY;AAAA,IAAG,QAAQ;AAAA,IAAoB;AAE1G,yBAAqB,GAAG;AAGxB,UAAM,eAAe,OAAO,YAAY,0BAA0B,EAAE,SAAS,KAAK,CAAC;AAEnF,0BAAsB;AAAA,MACpB,IAAI;AAAA,MACJ,UAAU;AAAA,MACV,YAAY;AAAA,MACZ,UAAU;AAAA,MACV,QAAQ;AAAA,MACR;AAAA,MACA,QAAQ,UAAU;AAAA,MAClB,YAAY;AAAA,IACd,CAAC;AAED,WAAO;AAAA,MACL,MAAM;AAAA,QACJ,WAAW;AAAA,QACX,WAAW;AAAA,QACX,QAAQ;AAAA,MACV;AAAA,IACF;AAAA,EACF;AAWA,iBAAe,kBAAkB,KAA2C;AAC1E,UAAM,EAAE,kBAAkB,sBAAsB,kBAAkB,MAAM,OAAO,IAAI,gBAAgB,MAAM,IAAI,IAAI;AACjH,UAAM,EAAE,YAAY,IAAI,MAAM,OAAO,QAAa;AAClD,UAAM,MAAM,aAAa;AACzB,UAAM,aAAa,GAAG,gBAAgB,IAAI,YAAY,qBAAqB,EAAE,SAAS,KAAK,CAAC;AAC5F,UAAM,KAAK,YAAY;AAEvB,yBAAqB,GAAG;AAExB,UAAM,EAAE,QAAQ,kBAAkB,IAAI,GAAG,YAAY,MAAM;AACzD,YAAM,iBAAiB,YAAY;AAAA,QACjC,IAAI;AAAA,QACJ,UAAU;AAAA,QACV,YAAY;AAAA,QACZ;AAAA,QACA,SAAS;AAAA,QACT,MAAM,OAAO,KAAK,KAAK,IAAI,IAAI;AAAA,QAC/B,YAAY;AAAA,MACd,CAAC;AAED,YAAM,sBAAgC,CAAC;AACvC,iBAAW,YAAY,kBAAkB;AACvC,0BAAkB,UAAU,cAAc,GAAG;AAC7C,8BAAsB;AAAA,UACpB,IAAI,OAAO,YAAY,0BAA0B,EAAE,SAAS,KAAK,CAAC;AAAA,UAClE,UAAU;AAAA,UACV,YAAY;AAAA,UACZ,UAAU;AAAA,UACV,QAAQ;AAAA,UACR,cAAc;AAAA,UACd,QAAQ,UAAU;AAAA,UAClB,YAAY;AAAA,QACd,CAAC;AACD,4BAAoB,KAAK,QAAQ;AAAA,MACnC;AAEA,aAAO,EAAE,QAAQ,gBAAgB,mBAAmB,oBAAoB;AAAA,IAC1E,CAAC,EAAE;AAEH,qBAAiB,iBAAiB,UAAU,OAAO,IAAI,sBAAsB;AAAA,MAC3E,QAAQ;AAAA,MACR;AAAA,IACF,CAAC,EAAE,MAAM,MAAM;AAAA,IAAC,CAAC;AACjB,eAAW,YAAY,mBAAmB;AACxC,UAAI;AAAE,yBAAiB,gBAAgB,UAAU,UAAU,YAAY;AAAA,MAAG,QAAQ;AAAA,MAAoB;AAAA,IACxG;AAEA,WAAO;AAAA,MACL,MAAM;AAAA,QACJ,cAAc;AAAA,QACd,oBAAoB;AAAA,QACpB,QAAQ;AAAA,QACR,YAAY;AAAA,MACd;AAAA,IACF;AAAA,EACF;AAGA,iBAAe,YAAY,KAA2C;AACpE,UAAM,KAAK,OAAO,IAAI,MAAM,OAAO,WAAW,IAAI,MAAM,KAAK;AAE7D,QAAI,IAAI;AACN,YAAM,MAAM,QAAQ,EAAE;AACtB,UAAI,CAAC,IAAK,QAAO,EAAE,MAAM,EAAE,OAAO,CAAC,EAAE,EAAE;AACvC,aAAO;AAAA,QACL,MAAM;AAAA,UACJ,OAAO,CAAC;AAAA,YACN,GAAG,cAAc,GAAG;AAAA,YACpB,SAAS,IAAI;AAAA,UACf,CAAC;AAAA,QACH;AAAA,MACF;AAAA,IACF;AAEA,UAAM,eAAe,IAAI,MAAM,WAAW,QAAQ,SAAY,IAAI,MAAM;AACxE,UAAM,QAAQ,IAAI,MAAM,QAAQ,OAAO,IAAI,MAAM,KAAK,IAAI;AAE1D,UAAM,OAAO,UAAU,EAAE,QAAQ,cAAc,MAAM,CAAC;AACtD,UAAM,QAAQ,KAAK,IAAI,aAAa;AAEpC,WAAO,EAAE,MAAM,EAAE,MAAM,EAAE;AAAA,EAC3B;AAQA,iBAAe,eAAe,KAA2C;AACvE,UAAM,QAAQ,IAAI,MAAM,QAAQ,OAAO,IAAI,MAAM,KAAK,IAAI;AAC1D,UAAM,SAAS,OAAO,IAAI,MAAM,WAAW,WAAW,IAAI,MAAM,SAAS;AACzE,UAAM,SAAS,OAAO,IAAI,MAAM,WAAW,WAAW,IAAI,MAAM,SAAS;AACzE,UAAM,OAAO,OAAO,IAAI,MAAM,SAAS,WAAW,IAAI,MAAM,OAAO;AACnE,UAAM,OAAO,OAAO,IAAI,MAAM,SAAS,WAAW,IAAI,MAAM,OAAO;AACnE,UAAM,WAAW,OAAO,IAAI,MAAM,UAAU,WAAW,IAAI,MAAM,QAAQ;AAEzE,UAAM,QAAQ,WAAW,kBAAkB,QAAQ,IAAI;AAEvD,QAAI;AACJ,QAAI,MAAM;AACR,YAAM,UAAU,QAAQ,IAAI;AAC5B,UAAI,CAAC,WAAW,CAAC,QAAQ,WAAY,QAAO,EAAE,MAAM,EAAE,UAAU,CAAC,EAAE,EAAE;AACrE,WAAK,QAAQ;AAAA,IACf;AAEA,UAAM,OAAO,aAAa,EAAE,OAAO,QAAQ,QAAQ,MAAM,OAAO,GAAG,CAAC;AACpE,UAAM,WAAW,KAAK,IAAI,CAAC,SAAS;AAAA,MAClC,IAAI,IAAI;AAAA,MACR,OAAO,IAAI;AAAA,MACX,MAAM,IAAI;AAAA,MACV,QAAQ,IAAI;AAAA,MACZ,YAAY,IAAI;AAAA,MAChB,UAAU,IAAI;AAAA,MACd,QAAQ,IAAI;AAAA,MACZ,OAAO,IAAI;AAAA,MACX,UAAU,IAAI,WAAW,IAAI,MAAM,GAAG,6BAA6B;AAAA,MACnE,cAAc,IAAI;AAAA,MAClB,YAAY,IAAI;AAAA,MAChB,mBAAmB,IAAI;AAAA,IACzB,EAAE;AAEF,WAAO,EAAE,MAAM,EAAE,SAAS,EAAE;AAAA,EAC9B;AAGA,iBAAe,WAAW,MAA4C;AACpE,UAAM,OAAO,gBAAgB;AAC7B,UAAM,UAAU,KAAK,IAAI,CAAC,SAAS;AAAA,MACjC,IAAI,IAAI;AAAA,MACR,MAAM,IAAI;AAAA,MACV,MAAM,IAAI;AAAA,MACV,QAAQ,IAAI;AAAA,MACZ,MAAM,IAAI,OAAO,IAAI,KAAK,MAAM,GAAG,EAAE,IAAI,CAAC,MAAM,EAAE,KAAK,CAAC,IAAI,CAAC;AAAA,IAC/D,EAAE;AAEF,WAAO,EAAE,MAAM,EAAE,QAAQ,EAAE;AAAA,EAC7B;AAEA,SAAO;AAAA,IACL;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF;AACF;;;ACrVA,SAAS,WAAAC,gBAAe;AAmBjB,IAAM,2BAA2B;AAMxC,IAAM,eAAe,iBAAE,OAAO;AAAA,EAC5B,MAAM,iBAAE,OAAO,EAAE,SAAS;AAAA,EAC1B,aAAa,iBAAE,OAAO,EAAE,SAAS;AAAA,EACjC,SAAS,iBAAE,OAAO,EAAE,SAAS;AAC/B,CAAC;AAmBM,SAAS,uBAAuB,MAAoB;AACzD,QAAM,EAAE,UAAU,kBAAkB,OAAO,IAAI;AAG/C,iBAAe,UAAU,KAA2C;AAClE,UAAM,EAAE,MAAM,aAAa,gBAAgB,QAAQ,IAAI,aAAa,MAAM,IAAI,IAAI;AAIlF,UAAM,aAAa,iBAAiB,QAAQ;AAC5C,QAAI,CAAC,sBAAsB,YAAY,IAAI,GAAG;AAC5C,aAAO;AAAA,QACL,QAAQ;AAAA,QACR,MAAM;AAAA,UACJ,IAAI;AAAA,UACJ,OAAO;AAAA,QACT;AAAA,MACF;AAAA,IACF;AAEA,QAAI,cAAc;AAClB,QAAI;AACJ,QAAI,QAAQ,CAAC,aAAa;AACxB,UAAI;AACJ,UAAI;AACF,cAAM,aAAa,WAAW,MAAM,QAAQ,IAAI,GAAG;AACnD,cAAM,cAAcC,SAAQ,UAAU,IAAI;AAC1C,gBAAQ,qBAAqB,MAAM,YAAY,SAAS,aAAa,gBAAgB;AAAA,MACvF,QAAQ;AACN,cAAM,cAAcA,SAAQ,UAAU,IAAI;AAC1C,gBAAQ,qBAAqB,MAAM,QAAW,SAAS,aAAa,gBAAgB;AAAA,MACtF;AACA,oBAAc,OAAO;AACrB,mBAAa,OAAO;AAOpB,UAAI,QAAQ,0BAA0B,IAAI,KAAK,CAAC,OAAO;AACrD,eAAO;AAAA,UACL,MAAM;AAAA,YACJ,IAAI;AAAA,YACJ,SAAS,QAAQ,IAAI;AAAA,YACrB,QAAQ;AAAA,YACR,QAAQ;AAAA,UACV;AAAA,QACF;AAAA,MACF;AAAA,IACF;AAEA,UAAM,EAAE,SAAS,IAAI,MAAM,OAAO,wBAAyB;AAC3D,UAAM,gBAAgB,SAAS,UAAU;AAAA,MACvC;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,IACF,CAAC;AAKD,UAAM,mBAAmB,WAAW;AACpC,UAAM,QAAQ,eAAe,kBAAkB,IAAI;AAEnD,kBACG,KAAK,CAAC,WAAW;AAChB,YAAM,WAAW,QAAQ;AACzB,UAAI,OAAO,WAAW,UAAU;AAC9B,eAAO,UAAU;AAAA,UACf,QAAQ;AAAA,UACR,MAAM;AAAA,UACN,OAAO,gBAAgB,QAAQ;AAAA,UAC/B,SAAS,OAAO,SAAS;AAAA,UACzB,MAAM,cAAc,OAAO,KAAK;AAAA,UAChC,UAAU,EAAE,UAAU,QAAQ,MAAM,OAAO,OAAO,MAAM;AAAA,QAC1D,GAAG,UAAU;AACb,eAAO,MAAM,UAAU,aAAa,oBAAoB;AAAA,UACtD,OAAO,OAAO;AAAA,UACd,OAAO,OAAO,SAAS;AAAA,UACvB,QAAQ,OAAO,QAAQ,IAAI,OAAK,GAAG,EAAE,IAAI,IAAI,EAAE,MAAM,EAAE,KAAK,CAAC;AAAA,QAC/D,CAAC;AAAA,MACH,OAAO;AACL,eAAO,UAAU;AAAA,UACf,QAAQ;AAAA,UACR,MAAM;AAAA,UACN,OAAO,mBAAmB,QAAQ;AAAA,UAClC,MAAM,cAAc,OAAO,KAAK;AAAA,UAChC,UAAU,EAAE,UAAU,QAAQ,MAAM,OAAO,OAAO,MAAM;AAAA,QAC1D,GAAG,UAAU;AACb,eAAO,KAAK,UAAU,WAAW,uBAAuB;AAAA,UACtD,OAAO,OAAO;AAAA,UACd,QAAQ,OAAO;AAAA,UACf,QAAQ,OAAO,QAAQ,IAAI,OAAK,GAAG,EAAE,IAAI,IAAI,EAAE,MAAM,EAAE,KAAK,CAAC;AAAA,QAC/D,CAAC;AAAA,MACH;AAAA,IACF,CAAC,EACA,MAAM,CAAC,QAAQ;AACd,aAAO,MAAM,UAAU,aAAa,mCAAmC;AAAA,QACrE,OAAQ,IAAc,WAAW,OAAO,GAAG;AAAA,QAC3C,OAAQ,IAAc,OAAO,MAAM,IAAI,EAAE,MAAM,GAAG,CAAC,EAAE,KAAK,KAAK;AAAA,MACjE,CAAC;AAAA,IACH,CAAC;AAEH,WAAO,EAAE,MAAM,EAAE,IAAI,MAAM,SAAS,iBAAiB,MAAM,EAAE;AAAA,EAC/D;AAGA,iBAAe,eAAe,KAA2C;AACvE,UAAM,QAAQ,IAAI,MAAM,QAAQ,OAAO,IAAI,MAAM,KAAK,IAAI;AAC1D,UAAM,SAAS,IAAI,MAAM,SAAS,OAAO,IAAI,MAAM,MAAM,IAAI;AAC7D,UAAM,UAAU,IAAI,MAAM,WAAW;AACrC,UAAM,SAAS,IAAI,MAAM,UAAU;AACnC,UAAM,OAAO,IAAI,MAAM,QAAQ;AAC/B,UAAM,SAAS,IAAI,MAAM,UAAU;AAEnC,UAAM,aAAa,EAAE,UAAU,SAAS,QAAQ,MAAM,OAAO;AAC7D,UAAM,OAAO,SAAS,EAAE,GAAG,YAAY,OAAO,OAAO,CAAC;AACtD,UAAM,QAAQ,UAAU,UAAU;AAElC,WAAO,EAAE,MAAM,EAAE,MAAM,OAAO,QAAQ,MAAM,EAAE;AAAA,EAChD;AAGA,iBAAe,aAAa,KAA2C;AACrE,UAAM,MAAM,OAAO,IAAI,OAAO,EAAE;AAChC,QAAI,CAAC,KAAK;AACR,aAAO,EAAE,QAAQ,KAAK,MAAM,EAAE,OAAO,gBAAgB,EAAE;AAAA,IACzD;AACA,WAAO,EAAE,MAAM,EAAE,IAAI,EAAE;AAAA,EACzB;AAGA,iBAAe,oBAAoB,KAA2C;AAC5E,UAAM,UAAU,YAAY,IAAI,OAAO,EAAE;AACzC,WAAO,EAAE,MAAM,EAAE,QAAQ,EAAE;AAAA,EAC7B;AAGA,iBAAe,kBAAkB,KAA2C;AAC1E,UAAM,QAAQ,eAAe,IAAI,OAAO,EAAE;AAC1C,WAAO,EAAE,MAAM,MAAM;AAAA,EACvB;AAEA,SAAO;AAAA,IACL;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF;AACF;;;ACzMA,OAAOC,UAAQ;AACf,OAAOC,YAAU;AASjB,IAAM,yBAAiD;AAAA,EACrD,KAAK;AAAA,EACL,KAAK;AAAA,EACL,MAAM;AAAA,EACN,KAAK;AAAA,EACL,MAAM;AACR;AAcO,SAAS,wBAAwB,MAAsB;AAC5D,QAAM,EAAE,SAAS,IAAI;AAGrB,iBAAe,oBAAoB,KAA2C;AAC5E,UAAM,WAAW,IAAI,OAAO;AAE5B,QAAI,SAAS,SAAS,IAAI,KAAK,SAAS,SAAS,GAAG,GAAG;AACrD,aAAO,EAAE,QAAQ,KAAK,MAAM,EAAE,OAAO,mBAAmB,EAAE;AAAA,IAC5D;AAGA,UAAM,MAAM,wBAAwB,QAAQ;AAC5C,QAAI,KAAK,MAAM;AACb,YAAMC,eAAc,IAAI,cAAc;AACtC,aAAO,EAAE,QAAQ,KAAK,SAAS,EAAE,gBAAgBA,aAAY,GAAG,MAAM,IAAI,KAAK;AAAA,IACjF;AAGA,UAAM,WAAWC,OAAK,KAAK,UAAU,eAAe,QAAQ;AAC5D,QAAI;AACJ,QAAI;AACF,iBAAWC,KAAG,aAAa,QAAQ;AAAA,IACrC,QAAQ;AACN,aAAO,EAAE,QAAQ,KAAK,MAAM,EAAE,OAAO,YAAY,EAAE;AAAA,IACrD;AACA,UAAM,MAAMD,OAAK,QAAQ,QAAQ,EAAE,MAAM,CAAC,EAAE,YAAY;AACxD,UAAM,cAAc,uBAAuB,GAAG,KAAK;AACnD,WAAO,EAAE,QAAQ,KAAK,SAAS,EAAE,gBAAgB,YAAY,GAAG,MAAM,SAAS;AAAA,EACjF;AAEA,SAAO,EAAE,oBAAoB;AAC/B;;;ACrEA,OAAOE,UAAQ;AACf,OAAOC,YAAU;AAQV,SAAS,mBAAmB,QAAgB,gBAAgC;AACjF,MAAI,WAAW;AAGf,QAAM,QAAkB,CAAC;AACzB,WAAS,IAAI,GAAG,KAAK,GAAG,KAAK;AAC3B,UAAM,UAAUC,OAAK,KAAK,QAAQ,UAAU,CAAC,MAAM;AACnD,QAAIC,KAAG,WAAW,OAAO,EAAG,OAAM,KAAK,OAAO;AAAA,EAChD;AACA,QAAM,UAAUD,OAAK,KAAK,QAAQ,YAAY;AAC9C,MAAIC,KAAG,WAAW,OAAO,EAAG,OAAM,KAAK,OAAO;AAE9C,aAAW,QAAQ,OAAO;AACxB,UAAM,UAAUA,KAAG,aAAa,MAAM,OAAO;AAC7C,eAAW,QAAQ,QAAQ,MAAM,IAAI,GAAG;AACtC,UAAI,CAAC,KAAK,KAAK,EAAG;AAClB,UAAI;AACF,cAAM,QAAQ,KAAK,MAAM,IAAI;AAC7B,YAAI,MAAM,YAAY,gBAAgB;AACpC,gBAAM,EAAE,WAAW,OAAO,MAAM,WAAW,SAAS,GAAG,KAAK,IAAI;AAChE,yBAAe;AAAA,YACb;AAAA,YACA;AAAA,YACA,MAAM,QAAQ,GAAG,aAAa,SAAS;AAAA,YACvC,WAAW,aAAa,gBAAgB,QAAQ,SAAS;AAAA,YACzD;AAAA,YACA,MAAM,OAAO,KAAK,IAAI,EAAE,SAAS,IAAI,KAAK,UAAU,IAAI,IAAI;AAAA,YAC5D,YAAY,KAAK,cAAc;AAAA,UACjC,CAAC;AACD;AAAA,QACF;AAAA,MACF,QAAQ;AAAA,MAER;AAAA,IACF;AAAA,EACF;AAEA,SAAO;AACT;;;AC/CO,IAAM,6BAA6B;AACnC,IAAM,2BAA2B;AAIjC,IAAM,qBAAqB;AAAA,EAChC,YAAY;AAAA,EACZ,eAAe;AAAA,EACf,mBAAmB;AAAA,EACnB,0BAA0B;AAAA,EAC1B,uBAAuB;AAAA,EACvB,qBAAqB;AAAA,EACrB,iBAAiB;AAAA,EACjB,iBAAiB;AAAA,EACjB,uBAAuB;AAAA,EACvB,kBAAkB;AACpB;AAoBA,IAAM,gBAA8B;AAAA,EAClC;AAAA,IACE,QAAQ;AAAA,IACR,MAAM;AAAA,IACN,WAAW,mBAAmB;AAAA,IAC9B,cAAc;AAAA,EAChB;AAAA,EACA;AAAA,IACE,QAAQ;AAAA,IACR,MAAM;AAAA,IACN,WAAW,mBAAmB;AAAA,IAC9B,cAAc;AAAA,EAChB;AAAA,EACA;AAAA,IACE,QAAQ;AAAA,IACR,MAAM;AAAA,IACN,WAAW,mBAAmB;AAAA,IAC9B,cAAc;AAAA,EAChB;AAAA,EACA;AAAA,IACE,QAAQ;AAAA,IACR,MAAM;AAAA,IACN,WAAW,mBAAmB;AAAA,IAC9B,cAAc;AAAA,EAChB;AAAA,EACA;AAAA,IACE,QAAQ;AAAA,IACR,MAAM;AAAA,IACN,WAAW,mBAAmB;AAAA,IAC9B,cAAc;AAAA,EAChB;AAAA,EACA;AAAA,IACE,QAAQ;AAAA,IACR,MAAM;AAAA,IACN,WAAW,mBAAmB;AAAA,IAC9B,cAAc;AAAA,EAChB;AAAA,EACA;AAAA,IACE,QAAQ;AAAA,IACR,MAAM;AAAA,IACN,WAAW,mBAAmB;AAAA,IAC9B,cAAc;AAAA,EAChB;AAAA,EACA;AAAA,IACE,QAAQ;AAAA,IACR,MAAM;AAAA,IACN,WAAW,mBAAmB;AAAA,IAC9B,cAAc;AAAA,IACd,cAAc,EAAE,KAAK,SAAS;AAAA,EAChC;AAAA,EACA;AAAA,IACE,QAAQ;AAAA,IACR,MAAM;AAAA,IACN,WAAW,mBAAmB;AAAA,IAC9B,cAAc;AAAA,IACd,cAAc,EAAE,KAAK,SAAS;AAAA,EAChC;AAAA,EACA;AAAA,IACE,QAAQ;AAAA,IACR,MAAM;AAAA,IACN,WAAW,mBAAmB;AAAA,IAC9B,cAAc;AAAA,IACd,cAAc,EAAE,KAAK,SAAS;AAAA,EAChC;AAAA,EACA;AAAA,IACE,QAAQ;AAAA,IACR,MAAM;AAAA,IACN,WAAW,mBAAmB;AAAA,IAC9B,cAAc;AAAA,EAChB;AAAA,EACA;AAAA,IACE,QAAQ;AAAA,IACR,MAAM;AAAA,IACN,WAAW,mBAAmB;AAAA,IAC9B,cAAc;AAAA,EAChB;AACF;AAEA,IAAM,qBAA6C;AAAA,EACjD,oBAAoB;AAAA,EACpB,mBAAmB;AAAA,EACnB,mBAAmB;AAAA,EACnB,sBAAsB;AAAA,EACtB,kBAAkB;AAAA,EAClB,uBAAuB;AAAA,EACvB,wBAAwB;AAAA,EACxB,2BAA2B;AAAA,EAC3B,iCAAiC;AAAA,EACjC,sBAAsB;AAAA,EACtB,mBAAmB;AAAA,EACnB,sBAAsB;AAAA,EACtB,uBAAuB;AAAA,EACvB,yBAAyB;AAAA,EACzB,6BAA6B;AAAA,EAC7B,yBAAyB;AAAA,EACzB,8BAA8B;AAAA,EAC9B,sCAAsC;AAAA,EACtC,mCAAmC;AAAA,EACnC,qBAAqB;AAAA,EACrB,eAAe;AAAA,EACf,oBAAoB;AAAA,EACpB,6BAA6B;AAAA,EAC7B,iCAAiC;AAAA,EACjC,6BAA6B;AAAA,EAC7B,gCAAgC;AAAA,EAChC,6BAA6B;AAAA,EAC7B,4CAA4C;AAAA,EAC5C,cAAc;AAChB;AAEA,IAAM,4BAGD;AAAA,EACH;AAAA,IACE,QAAQ;AAAA,IACR,QAAQ,CAACC,WAAS;AAChB,YAAM,QAAQ,oDAAoD,KAAKA,MAAI;AAC3E,UAAI,CAAC,MAAO,QAAO;AACnB,YAAM,CAAC,EAAE,QAAQ,IAAI,IAAI;AACzB,YAAM,cAAc,cAAc,MAAM;AACxC,aAAO,SAAS,SAAS,GAAG,WAAW,aAAa,GAAG,WAAW;AAAA,IACpE;AAAA,EACF;AACF;AAEA,IAAM,2BAA2B;AAM1B,SAAS,yBAAyBC,QAAwC;AAC/E,QAAM,UAAU,gBAAgBA,MAAI;AACpC,MAAI,CAAC,QAAS,QAAO;AACrB,SAAO;AAAA,IACL,GAAG;AAAA,IACH,WAAWA;AAAA,IACX,YAAY,kBAAkBA,MAAI;AAAA,EACpC;AACF;AAEO,SAAS,qBAAqB,QAAmC;AACtE,QAAM,SAAS,IAAI,gBAAgB,OAAO,YAAY;AACtD,SAAO,IAAI,4BAA4B,OAAO,SAAS;AACvD,SAAO,IAAI,0BAA0B,OAAO,SAAS;AACrD,SAAO,GAAG,OAAO,IAAI,IAAI,OAAO,SAAS,CAAC;AAC5C;AAEO,SAAS,kCAAkC,OAA4B,cAAwB;AACpG,QAAM,cAAc,CAAC,GAAG,IAAI,IAAI,YAAY,CAAC;AAC7C,QAAM,aAAa,UAAU,UAAU,aAAa;AACpD,QAAM,cAAc,YAAY,IAAI,wBAAwB,EAAE,KAAK,CAAC,WAAW,WAAW,IAAI,KAAK;AACnG,QAAM,cAAc,YAAY,IAAI,iBAAiB;AACrD,QAAM,eAAe,YAAY,CAAC,KAAK;AACvC,QAAM,YAAY,YAAY,MAAM,GAAG,wBAAwB,EAAE,KAAK,IAAI;AAC1E,QAAM,iBAAiB,KAAK,IAAI,GAAG,YAAY,SAAS,wBAAwB;AAChF,QAAM,eAAe,iBAAiB,IAAI,GAAG,SAAS,MAAM,cAAc,UAAU;AAEpF,SAAO;AAAA,IACL,OAAO,YAAY,WAAW,IAAI,GAAG,YAAY,WAAW,GAAG,YAAY,MAAM;AAAA,IACjF,SAAS,cACL,GAAG,YAAY,YAAY,SAAM,YAAY,SAAM,UAAU,KAC7D,GAAG,YAAY,SAAM,UAAU;AAAA,IACnC,MAAM,cAAc,qBAAqB,WAAW,IAAI;AAAA,IACxD,UAAU;AAAA,MACR;AAAA,MACA,eAAe;AAAA,MACf,cAAc;AAAA,MACd,cAAc,cACV;AAAA,QACE,MAAM,YAAY;AAAA,QAClB,YAAY,YAAY;AAAA,QACxB,YAAY,YAAY;AAAA,QACxB,aAAa,YAAY;AAAA,MAC3B,IACA;AAAA,IACN;AAAA,EACF;AACF;AAEA,SAAS,gBAAgBA,QAA0C;AACjE,aAAW,QAAQ,eAAe;AAChC,QAAIA,WAAS,KAAK,UAAUA,OAAK,WAAW,GAAG,KAAK,MAAM,GAAG,GAAG;AAC9D,YAAM,EAAE,MAAM,WAAW,cAAc,aAAa,IAAI;AACxD,aAAO,EAAE,MAAM,WAAW,cAAc,aAAa;AAAA,IACvD;AAAA,EACF;AACA,SAAO;AACT;AAEA,SAAS,kBAAkBA,QAAsB;AAC/C,QAAM,QAAQ,mBAAmBA,MAAI;AACrC,MAAI,MAAO,QAAO;AAElB,aAAW,QAAQ,2BAA2B;AAC5C,QAAIA,WAAS,KAAK,UAAUA,OAAK,WAAW,KAAK,MAAM,GAAG;AACxD,YAAM,QAAQ,KAAK,OAAOA,MAAI;AAC9B,UAAI,MAAO,QAAO;AAAA,IACpB;AAAA,EACF;AAEA,SAAO,cAAcA,OAAK,MAAM,GAAG,EAAE,IAAI,KAAK,SAAS;AACzD;AAEA,SAAS,cAAc,OAAuB;AAC5C,SAAO,MACJ,MAAM,OAAO,EACb,OAAO,OAAO,EACd,IAAI,CAAC,SAAS,KAAK,OAAO,CAAC,EAAE,YAAY,IAAI,KAAK,MAAM,CAAC,CAAC,EAC1D,KAAK,GAAG;AACb;;;ACjOO,IAAM,eAAN,MAAmB;AAAA,EAChB,QAAoB;AAAA,EACpB,eAAuB,KAAK,IAAI;AAAA,EAChC,OAAmB,CAAC;AAAA,EACpB,QAA8C;AAAA,EAC9C,UAAU;AAAA,EACV;AAAA,EACA;AAAA,EACA,gBAAgB;AAAA,EAExB,YAAY,QAA4B;AACtC,SAAK,SAAS;AACd,SAAK,SAAS,OAAO;AAAA,EACvB;AAAA,EAEA,SAAS,KAAqB;AAC5B,SAAK,KAAK,KAAK,GAAG;AAAA,EACpB;AAAA,EAEA,aAAa,QAAgB,MAAwB;AACnD,SAAK,OAAO,KAAK,KAAK,OAAO,CAAC,QAAQ,CAAC,IAAI,KAAK,WAAW,MAAM,CAAC;AAClE,SAAK,KAAK,KAAK,GAAG,IAAI;AAAA,EACxB;AAAA,EAEA,iBAAuB;AACrB,SAAK,eAAe,KAAK,IAAI;AAC7B,SAAK,gBAAgB;AAErB,QAAI,KAAK,UAAU,cAAc;AAC/B,WAAK,OAAO,KAAK,UAAU,aAAa,wBAAwB;AAChE,WAAK,QAAQ;AACb,WAAK,iBAAiB;AAAA,IACxB;AAAA,EACF;AAAA,EAEA,QAAc;AACZ,SAAK,eAAe,KAAK,IAAI;AAC7B,SAAK,QAAQ;AACb,SAAK,UAAU;AACf,SAAK,iBAAiB;AACtB,SAAK,OAAO,KAAK,UAAU,aAAa,wBAAwB;AAAA,MAC9D,MAAM,KAAK,KAAK,IAAI,CAAC,MAAM,EAAE,IAAI;AAAA,IACnC,CAAC;AAAA,EACH;AAAA,EAEA,OAAa;AACX,SAAK,UAAU;AACf,QAAI,KAAK,OAAO;AACd,mBAAa,KAAK,KAAK;AACvB,WAAK,QAAQ;AAAA,IACf;AACA,SAAK,OAAO,KAAK,UAAU,aAAa,sBAAsB;AAAA,EAChE;AAAA,EAEA,WAAuB;AACrB,SAAK,cAAc;AACnB,WAAO,KAAK;AAAA,EACd;AAAA,EAEQ,gBAAsB;AAC5B,UAAM,SAAS,KAAK,IAAI,IAAI,KAAK;AACjC,QAAI;AAEJ,QAAI,UAAU,KAAK,OAAO,sBAAsB;AAC9C,YAAM,UAAU,KAAK,KAAK,KAAK,CAAC,MAAM,EAAE,oBAAoB,CAAC;AAC7D,UAAI,SAAS;AACX,iBAAS;AACT,YAAI,CAAC,KAAK,eAAe;AACvB,eAAK,gBAAgB;AACrB,eAAK,OAAO,KAAK,UAAU,aAAa,mBAAmB,EAAE,IAAI,QAAQ,KAAK,CAAC;AAAA,QACjF;AAAA,MACF,OAAO;AACL,iBAAS;AACT,aAAK,gBAAgB;AAAA,MACvB;AAAA,IACF,WAAW,UAAU,KAAK,OAAO,kBAAkB;AACjD,eAAS;AAAA,IACX,WAAW,UAAU,KAAK,OAAO,iBAAiB;AAChD,eAAS;AAAA,IACX,OAAO;AACL,eAAS;AAAA,IACX;AAEA,QAAI,WAAW,KAAK,OAAO;AACzB,WAAK,OAAO,KAAK,UAAU,aAAa,0BAA0B;AAAA,QAChE,MAAM,KAAK;AAAA,QACX,IAAI;AAAA,QACJ,SAAS;AAAA,MACX,CAAC;AACD,WAAK,QAAQ;AAAA,IACf;AAAA,EACF;AAAA,EAEQ,mBAAyB;AAC/B,QAAI,CAAC,KAAK,QAAS;AACnB,QAAI,KAAK,MAAO,cAAa,KAAK,KAAK;AAEvC,UAAM,WACJ,KAAK,UAAU,UACX,KAAK,OAAO,kBACZ,KAAK,OAAO;AAElB,SAAK,QAAQ,WAAW,MAAM,KAAK,KAAK,GAAG,QAAQ;AAAA,EACrD;AAAA,EAEA,MAAc,OAAsB;AAClC,QAAI,CAAC,KAAK,QAAS;AAEnB,SAAK,cAAc;AAEnB,QAAI,KAAK,UAAU,cAAc;AAC/B,WAAK,OAAO,KAAK,UAAU,aAAa,0CAAqC;AAC7E,WAAK,QAAQ;AACb;AAAA,IACF;AAGA,UAAM,WAAW,KAAK,KAAK,OAAO,CAAC,MAAM,EAAE,MAAM,SAAS,KAAK,KAAK,CAAC;AACrE,SAAK,OAAO,MAAM,UAAU,YAAY,QAAQ;AAAA,MAC9C,OAAO,KAAK;AAAA,MACZ,MAAM,SAAS,IAAI,CAAC,MAAM,EAAE,IAAI;AAAA,IAClC,CAAC;AAED,eAAW,OAAO,UAAU;AAC1B,UAAI;AACF,cAAM,IAAI,GAAG;AAAA,MACf,SAAS,KAAK;AACZ,aAAK,OAAO,MAAM,UAAU,iBAAiB,QAAQ,IAAI,IAAI,YAAY;AAAA,UACvE,OAAQ,IAAc;AAAA,QACxB,CAAC;AAAA,MACH;AAAA,IACF;AAEA,SAAK,iBAAiB;AAAA,EACxB;AACF;;;ACvHO,SAAS,4BACd,mBAA2B,6BAA6B,eAChD;AACR,QAAM,KAAK,YAAY;AACvB,QAAM,SAAS,aAAa,IAAI;AAEhC,QAAM,OAAO,GAAG;AAAA,IACd;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOF,EAAE,IAAI,aAAa,GAAG,MAAM;AAE5B,SAAO,KAAK;AACd;AAeO,SAAS,mBAAmB,sBAA0C;AAC3E,QAAM,KAAK,YAAY;AAEvB,QAAM,sBAAsB,qBAAqB,SAAS,IACtD,kBAAkB,qBAAqB,IAAI,MAAM,GAAG,EAAE,KAAK,IAAI,CAAC,MAChE;AAEJ,QAAM,SAAoB,CAAC,0BAA0B,GAAG,oBAAoB;AAE5E,QAAM,OAAO,GAAG;AAAA,IACd;AAAA;AAAA;AAAA,SAGK,mBAAmB;AAAA,EAC1B,EAAE,IAAI,GAAG,MAAM;AAEf,SAAO,KAAK,IAAI,CAAC,MAAM,EAAE,EAAE;AAC7B;AAmBA,eAAsB,sBAAsB,MAA6C;AACvF,QAAM,EAAE,QAAQ,sBAAsB,kBAAkB,UAAU,iBAAiB,IAAI;AACvF,QAAM,aAAa,qBAAqB;AAGxC,QAAM,oBAAoB,oBAAoB,8BAA8B;AAC5E,QAAM,YAAY,4BAA4B,gBAAgB;AAC9D,MAAI,YAAY,GAAG;AACjB,WAAO,KAAK,UAAU,qBAAqB,4BAA4B,EAAE,OAAO,UAAU,CAAC;AAAA,EAC7F;AAGA,QAAM,UAAU,mBAAmB,UAAU;AAC7C,MAAI,QAAQ,WAAW,EAAG;AAE1B,MAAI,eAAe;AACnB,aAAW,aAAa,SAAS;AAC/B,UAAM,SAAS,qBAAqB,SAAS;AAC7C,QAAI,CAAC,OAAO,QAAS;AAErB,UAAM,2BAA2B,WAAW,QAAQ,kBAAkB,QAAQ;AAE9E;AACA,WAAO,KAAK,UAAU,qBAAqB,wBAAwB;AAAA,MACjE,YAAY;AAAA,MACZ,QAAQ,OAAO;AAAA,IACjB,CAAC;AAAA,EACH;AAEA,MAAI,eAAe,GAAG;AACpB,WAAO,KAAK,UAAU,qBAAqB,iCAAiC,EAAE,SAAS,aAAa,CAAC;AAAA,EACvG;AACF;;;AC3GA,IAAM,qBAAqB,KAAK,KAAK,KAAK;AAuBnC,SAAS,kBAAkB,cAA4B,MAA0B;AACtF,QAAM,EAAE,kBAAkB,UAAU,QAAQ,YAAY,IAAI,WAAW,UAAU,gBAAgB,IAAI;AAErG,MAAI,mBAAmB;AACvB,eAAa,SAAS;AAAA,IACpB,MAAM;AAAA,IACN,OAAO,CAAC,UAAU,MAAM;AAAA,IACxB,IAAI,YAAY;AACd,UAAI,iBAAkB;AACtB,yBAAmB;AACnB,UAAI;AACF,cAAM,iBAAiB,UAAU,oBAAoB;AAAA,MACvD,UAAE;AACA,2BAAmB;AAAA,MACrB;AAAA,IACF;AAAA,EACF,CAAC;AAED,eAAa,SAAS;AAAA,IACpB,MAAM;AAAA,IACN,OAAO,CAAC,UAAU,QAAQ,OAAO;AAAA,IACjC,IAAI,MAAM,sBAAsB;AAAA,MAC9B;AAAA,MACA,sBAAsB,MAAM,SAAS;AAAA,MACrC;AAAA,MACA;AAAA,MACA,kBAAkB,WAAW,QAAQ,OAAO;AAAA,IAC9C,CAAC;AAAA,EACH,CAAC;AAED,eAAa,SAAS;AAAA,IACpB,MAAM;AAAA,IACN,OAAO,CAAC,QAAQ,OAAO;AAAA,IACvB,IAAI,YAAY;AACd,YAAM,gBAAgB,WAAW,QAAQ,OAAO;AAChD,YAAM,SAAS,IAAI,KAAK,KAAK,IAAI,IAAI,gBAAgB,UAAU,EAAE,YAAY;AAC7E,YAAM,UAAU,cAAc,MAAM;AACpC,UAAI,UAAU,GAAG;AACf,eAAO,KAAK,UAAU,eAAe,WAAW,OAAO,2BAA2B,aAAa,SAAS,EAAE,SAAS,gBAAgB,cAAc,CAAC;AAAA,MACpJ;AAAA,IACF;AAAA,EACF,CAAC;AAGD,eAAa,SAAS;AAAA,IACpB,MAAM;AAAA,IACN,OAAO,CAAC,QAAQ,OAAO;AAAA,IACvB,IAAI,YAAY;AACd,UAAI;AACF,cAAM,YAAY,iBAAiB,WAAW,SAAS,QAAQ;AAC/D,eAAO,KAAK,UAAU,cAAc,sBAAsB;AAC1D,cAAM,WAAW,aAAa,IAAI,WAAW,SAAS;AACtD,eAAO,KAAK,UAAU,iBAAiB,wBAAwB,EAAE,WAAW,SAAS,CAAC;AAAA,MACxF,SAAS,KAAK;AACZ,eAAO,MAAM,UAAU,cAAc,sBAAsB,EAAE,OAAQ,IAAc,QAAQ,CAAC;AAAA,MAC9F;AAAA,IACF;AAAA,EACF,CAAC;AAGD,eAAa,SAAS;AAAA,IACpB,MAAM;AAAA,IACN,OAAO,CAAC,QAAQ,OAAO;AAAA,IACvB,IAAI,YAAY;AACd,YAAM,SAAS,WAAW;AAC1B,UAAI,CAAC,OAAO,aAAa,cAAe;AACxC,YAAM,cAAc,OAAO,YAAY,gCAAgC,MAAM;AAC7E,YAAM,UAAU,MAAM,gBAAgB,kBAAkB;AACxD,UAAI,YAAY,QAAQ,KAAK,IAAI,IAAI,UAAU,WAAY;AAC3D,UAAI;AACF,cAAM,gBAAgB,SAAS;AAAA,MACjC,SAAS,KAAK;AACZ,eAAO,MAAM,UAAU,gBAAgB,wBAAwB;AAAA,UAC7D,OAAQ,IAAc;AAAA,QACxB,CAAC;AAAA,MACH;AAAA,IACF;AAAA,EACF,CAAC;AAQD,eAAa,SAAS;AAAA,IACpB,MAAM;AAAA,IACN,OAAO,CAAC,QAAQ,OAAO;AAAA,IACvB,IAAI,YAAY;AACd,YAAM,QAAQ,qBAAqB,UAAU,kBAAkB;AAC/D,UAAI,MAAM,WAAW,EAAG;AACxB,iBAAW,eAAe,OAAO;AAC/B,2BAAmB,UAAU,WAAW;AAAA,MAC1C;AACA,aAAO,KAAK,UAAU,wBAAwB,uCAAuC;AAAA,QACnF,OAAO,MAAM;AAAA,QACb,eAAe;AAAA,MACjB,CAAC;AAAA,IACH;AAAA,EACF,CAAC;AACH;;;AClJA,OAAOC,UAAQ;AACf,OAAOC,YAAU;;;ACUV,IAAM,yBAAyB;AAG/B,IAAM,0BAA0B;AAGhC,IAAM,sBAAsB;AAG5B,IAAM,0BAA0B;AAAA,EACrC;AAAA,EACA;AACF;AAGO,SAAS,gBAAgB,QAAyB;AACvD,QAAM,UAAU,OAAO,UAAU;AACjC,SAAO,wBAAwB,KAAK,CAAC,WAAW,QAAQ,WAAW,MAAM,CAAC;AAC5E;AAGA,SAAS,oBAAoB,WAAmC;AAC9D,QAAM,WAAW;AACjB,MAAI,CAAC,SAAU,QAAO;AAEtB,QAAM,WAAW,SAAS;AAC1B,MAAI,OAAO,aAAa,SAAU,QAAO;AAEzC,QAAM,gBAAgB,SAAS;AAC/B,MAAI,OAAO,kBAAkB,SAAU,QAAO;AAE9C,SAAO;AACT;AAcO,SAAS,iBACd,WACA,QAC2C;AAC3C,QAAM,MAAM,aAAa;AAGzB,mBAAiB,WAAW,GAAG;AAG/B,QAAM,QAAQ,qBAAqB;AAAA,IACjC,YAAY;AAAA,IACZ,aAAa,UAAU;AAAA,IACvB,YAAY;AAAA,IACZ,YAAY;AAAA,IACZ,YAAY,iBAAiB;AAAA,EAC/B,CAAC;AAGD,QAAM,eAAe,MAAM;AAG3B,MAAI;AAAE,uBAAmB,kBAAkB,WAAW,MAAM,IAAI,GAAG;AAAA,EAAG,QAAQ;AAAA,EAA4B;AAG1G,gBAAc,WAAW,EAAE,cAAc,aAAa,CAAC;AAEvD,SAAO,EAAE,SAAS,MAAM,IAAI,aAAa;AAC3C;AAQO,SAAS,cACd,WACA,UACA,WACA,YACM;AACN,QAAM,MAAM,aAAa;AAEzB,QAAM,WAAW,oBAAoB,SAAS;AAE9C,QAAM,WAAW,wBAAwB;AAAA,IACvC,YAAY;AAAA,IACZ,WAAW;AAAA,IACX,YAAY,YAAY,KAAK,UAAU,SAAS,EAAE,MAAM,GAAG,sBAAsB,IAAI;AAAA,IACrF,qBAAqB,YAAY,MAAM,GAAG,uBAAuB,KAAK;AAAA,IACtE,WAAW;AAAA,IACX,WAAW;AAAA,IACX,YAAY;AAAA,EACd,CAAC;AAGD,MAAI,SAAS,oBAAoB,MAAM;AACrC,2BAAuB,SAAS,eAAe;AAAA,EACjD;AAGA,4BAA0B,SAAS;AACrC;AAYO,SAAS,kBACd,WACM;AACN,mBAAiB,WAAW,aAAa,CAAC;AAC5C;AAKO,SAAS,kBACd,WACA,UACA,WACA,OACA,aACM;AACN,QAAM,MAAM,aAAa;AACzB,QAAM,WAAW,oBAAoB,SAAS;AAE9C,QAAM,WAAW,wBAAwB;AAAA,IACvC,YAAY;AAAA,IACZ,WAAW;AAAA,IACX,YAAY,YAAY,KAAK,UAAU,SAAS,EAAE,MAAM,GAAG,sBAAsB,IAAI;AAAA,IACrF,qBAAqB,OAAO,MAAM,GAAG,uBAAuB,KAAK;AAAA,IACjE,WAAW;AAAA,IACX,SAAS;AAAA,IACT,eAAe,OAAO,MAAM,GAAG,uBAAuB,MAAM,cAAc,gBAAgB;AAAA,IAC1F,WAAW;AAAA,IACX,YAAY;AAAA,EACd,CAAC;AAED,MAAI,SAAS,oBAAoB,MAAM;AACrC,2BAAuB,SAAS,eAAe;AAAA,EACjD;AACF;AAKO,SAAS,oBACd,WACA,SACA,WACM;AACN,QAAM,MAAM,aAAa;AACzB,0BAAwB;AAAA,IACtB,YAAY;AAAA,IACZ,WAAW;AAAA,IACX,YAAY,KAAK,UAAU,EAAE,UAAU,SAAS,YAAY,UAAU,CAAC,EAAE,MAAM,GAAG,sBAAsB;AAAA,IACxG,WAAW;AAAA,IACX,YAAY;AAAA,EACd,CAAC;AACH;AAKO,SAAS,mBACd,WACA,SACA,WACA,sBACM;AACN,QAAM,MAAM,aAAa;AACzB,0BAAwB;AAAA,IACtB,YAAY;AAAA,IACZ,WAAW;AAAA,IACX,YAAY,KAAK,UAAU,EAAE,UAAU,SAAS,YAAY,UAAU,CAAC,EAAE,MAAM,GAAG,sBAAsB;AAAA,IACxG,qBAAqB,sBAAsB,MAAM,GAAG,uBAAuB,KAAK;AAAA,IAChF,WAAW;AAAA,IACX,YAAY;AAAA,EACd,CAAC;AACH;AAKO,SAAS,kBACd,WACA,OACA,cACM;AACN,QAAM,MAAM,aAAa;AACzB,0BAAwB;AAAA,IACtB,YAAY;AAAA,IACZ,WAAW;AAAA,IACX,qBAAqB,cAAc,MAAM,GAAG,uBAAuB,KAAK;AAAA,IACxE,SAAS;AAAA,IACT,eAAe,OAAO,MAAM,GAAG,uBAAuB,KAAK;AAAA,IAC3D,WAAW;AAAA,IACX,YAAY;AAAA,EACd,CAAC;AACH;AAKO,SAAS,oBACd,WACA,QACA,aACA,iBACM;AACN,QAAM,MAAM,aAAa;AACzB,0BAAwB;AAAA,IACtB,YAAY;AAAA,IACZ,WAAW;AAAA,IACX,YAAY,KAAK,UAAU,EAAE,SAAS,QAAQ,cAAc,aAAa,kBAAkB,gBAAgB,CAAC,EAAE,MAAM,GAAG,sBAAsB;AAAA,IAC7I,qBAAqB,aAAa,MAAM,GAAG,uBAAuB,KAAK;AAAA,IACvE,WAAW;AAAA,IACX,YAAY;AAAA,EACd,CAAC;AACH;AAKO,SAAS,cACd,WACA,OACA,SACA,gBACM;AACN,QAAM,MAAM,aAAa;AACzB,0BAAwB;AAAA,IACtB,YAAY;AAAA,IACZ,WAAW,GAAG,KAAK;AAAA,IACnB,YAAY,UAAU,KAAK,UAAU,EAAE,QAAQ,CAAC,EAAE,MAAM,GAAG,sBAAsB,IAAI;AAAA,IACrF,qBAAqB,gBAAgB,MAAM,GAAG,uBAAuB,KAAK;AAAA,IAC1E,WAAW;AAAA,IACX,YAAY;AAAA,EACd,CAAC;AACH;;;ADzPA,IAAM,yBAA8C,oBAAI,IAAI,CAAC,eAAe,YAAY,cAAc,CAAC;AA+BhG,SAAS,iBAAiB,EAAE,WAAW,OAAO,GAA+B;AAGlF,QAAM,qBAAqB,oBAAI,IAAY;AAU3C,WAAS,YAAY,WAAmB,OAA8D;AACpG,QAAI,MAAM,SAAS,eAAe;AAChC,UAAI,gBAAgB,OAAO,MAAM,UAAU,EAAE,CAAC,EAAG,QAAO;AACxD,uBAAiB,WAAW,OAAO,MAAM,UAAU,EAAE,CAAC;AACtD,aAAO;AAAA,IACT;AACA,QAAI,MAAM,SAAS,YAAY;AAC7B;AAAA,QACE;AAAA,QACA,OAAO,MAAM,aAAa,EAAE;AAAA,QAC5B,MAAM;AAAA,QACN,OAAO,MAAM,mBAAmB,WAAW,MAAM,iBAAiB;AAAA,MACpE;AACA,aAAO;AAAA,IACT;AACA,QAAI,MAAM,SAAS,gBAAgB;AACjC;AAAA,QACE;AAAA,QACA,OAAO,MAAM,aAAa,EAAE;AAAA,QAC5B,MAAM;AAAA,QACN,OAAO,MAAM,UAAU,WAAW,MAAM,QAAQ;AAAA,QAChD,CAAC,CAAC,MAAM;AAAA,MACV;AACA,aAAO;AAAA,IACT;AACA,WAAO;AAAA,EACT;AAeA,WAAS,iBAAiB,WAAyB;AACjD,QAAI,mBAAmB,IAAI,SAAS,EAAG;AACvC,uBAAmB,IAAI,SAAS;AAIhC,UAAM,aAAaC,OAAK,KAAK,WAAW,GAAG,SAAS,QAAQ;AAC5D,QAAI;AACJ,QAAI;AACF,gBAAUC,KAAG,aAAa,YAAY,OAAO,EAAE,KAAK;AAAA,IACtD,QAAQ;AACN;AAAA,IACF;AACA,QAAI,CAAC,QAAS;AAKd,QAAI,CAAC,WAAW,SAAS,GAAG;AAC1B,aAAO,MAAM,UAAU,qBAAqB,+CAA+C,EAAE,YAAY,UAAU,CAAC;AACpH;AAAA,IACF;AAEA,UAAM,YAA4C,QAAQ,MAAM,IAAI,EAAE,IAAI,CAAC,SAAS,KAAK,MAAM,IAAI,CAAC;AAGpG,UAAM,qBAAqB,qBAAqB,SAAS,EAAE;AAE3D,QAAI,cAAc;AAClB,QAAI,mBAAmB;AAEvB,aAAS,IAAI,GAAG,IAAI,UAAU,QAAQ,KAAK;AACzC,YAAM,IAAI,UAAU,CAAC;AACrB,UAAI,EAAE,SAAS,iBAAiB,CAAC,gBAAgB,OAAO,EAAE,UAAU,EAAE,CAAC,GAAG;AACxE;AACA,YAAI,gBAAgB,qBAAqB,GAAG;AAC1C,6BAAmB;AACnB;AAAA,QACF;AAAA,MACF;AAAA,IACF;AAEA,QAAI,qBAAqB,GAAI;AAG7B,UAAM,iBAAiB,UAAU,MAAM,gBAAgB,EAAE;AAAA,MACvD,CAAC,MAAM,uBAAuB,IAAI,OAAO,EAAE,IAAI,CAAC;AAAA,IAClD;AAEA,QAAI,mBAAmB;AACvB,QAAI,sBAAsB;AAE1B,eAAW,SAAS,gBAAgB;AAClC,UAAI;AACF,cAAM,SAAS,YAAY,WAAW,KAAK;AAC3C,YAAI,WAAW,SAAU;AAAA,iBAChB,WAAW,WAAY;AAAA,MAClC,SAAS,KAAK;AACZ,eAAO,KAAK,UAAU,qBAAqB,0CAA0C;AAAA,UACnF,MAAM,OAAO,MAAM,IAAI;AAAA,UACvB,OAAO,OAAO,GAAG;AAAA,QACnB,CAAC;AAAA,MACH;AAAA,IACF;AAEA,QAAI,mBAAmB,KAAK,sBAAsB,GAAG;AACnD,aAAO,KAAK,UAAU,qBAAqB,kCAAkC;AAAA,QAC3E,YAAY;AAAA,QACZ,mBAAmB;AAAA,QACnB,sBAAsB;AAAA,MACxB,CAAC;AAAA,IACH;AAAA,EACF;AAMA,WAAS,2BAAiC;AAExC,UAAM,sBAAsB,kBAAkB,WAAW,uBAAuB;AAChF,QAAI,sBAAsB,GAAG;AAC3B,aAAO,KAAK,UAAU,gBAAgB,2BAA2B,EAAE,eAAe,oBAAoB,CAAC;AAAA,IACzG;AAIA,eAAW,aAAa,qBAAqB,SAAS,GAAG;AACvD,UAAI;AACF,yBAAiB,SAAS;AAAA,MAC5B,SAAS,KAAK;AACZ,eAAO,KAAK,UAAU,qBAAqB,iCAAiC,EAAE,YAAY,WAAW,OAAO,OAAO,GAAG,EAAE,CAAC;AAAA,MAC3H;AAAA,IACF;AAAA,EACF;AAEA,WAAS,aAAa,WAAyB;AAC7C,uBAAmB,OAAO,SAAS;AAAA,EACrC;AAEA,SAAO,EAAE,kBAAkB,aAAa,0BAA0B,aAAa;AACjF;;;AExMA,OAAOC,UAAQ;;;ACiBf,IAAM,oBAAoB;AAqBnB,SAAS,mBAAmB,OAAsC;AACvE,QAAM,EAAE,WAAW,eAAe,cAAc,QAAQ,OAAO,IAAI;AACnE,MAAI,OAAO,WAAW,EAAG;AAEzB,QAAM,eAAe,UAAU,MAAM,CAAC,iBAAiB;AACvD,WAAS,IAAI,GAAG,IAAI,OAAO,QAAQ,KAAK;AACtC,UAAM,MAAM,OAAO,CAAC;AACpB,QAAI,CAAC,KAAK,QAAQ,CAAC,KAAK,UAAW;AACnC,QAAI;AACF,YAAM,MAAM,qBAAqB,IAAI,SAAS;AAC9C,YAAM,WAAW,GAAG,YAAY,KAAK,YAAY,IAAI,IAAI,CAAC,IAAI,GAAG;AACjE,YAAM,WAAW,iBAAiB;AAAA,QAChC,IAAI,GAAG,YAAY,KAAK,YAAY,IAAI,IAAI,CAAC;AAAA,QAC7C,YAAY;AAAA,QACZ,iBAAiB,iBAAiB;AAAA,QAClC,WAAW;AAAA,QACX,YAAY,IAAI;AAAA,QAChB,MAAM,OAAO,KAAK,IAAI,MAAM,QAAQ;AAAA,QACpC,YAAY,aAAa;AAAA,MAC3B,CAAC;AAKD,UAAI,UAAU;AACZ,eAAO,MAAM,UAAU,oBAAoB,sBAAsB;AAAA,UAC/D;AAAA,UACA,OAAO;AAAA,QACT,CAAC;AAAA,MACH;AAAA,IACF,SAAS,KAAK;AACZ,aAAO,KAAK,UAAU,oBAAoB,+BAA+B;AAAA,QACvE,OAAO,OAAO,GAAG;AAAA,MACnB,CAAC;AAAA,IACH;AAAA,EACF;AACF;;;ACzEA,SAAS,cAAAC,mBAAkB;AAC3B,OAAOC,SAAQ;AACf,OAAOC,YAAU;AAgBV,SAAS,mBACd,MACA,MACyC;AACzC,QAAM,UAAmD,CAAC;AAC1D,aAAW,OAAO,MAAM;AACtB,UAAM,QAAQ,IAAI,OAAO,IAAI,GAAG,0BAA0B,GAAG,KAAK,GAAG;AACrE,QAAI;AACJ,YAAQ,QAAQ,MAAM,KAAK,IAAI,OAAO,MAAM;AAC1C,YAAM,UAAU,MAAM,CAAC,EAAE,KAAK;AAC9B,UAAI,QAAS,SAAQ,KAAK,EAAE,KAAK,QAAQ,CAAC;AAAA,IAC5C;AAAA,EACF;AACA,SAAO;AACT;AAUO,IAAM,2BAA2B;AAOxC,IAAM,mBAAmB,oBAAI,IAAI;AAAA,EAC/B;AAAA,EAAS;AAAA,EAAQ;AAAA,EACjB;AAAA,EAAS;AAAA,EAAQ;AAAA,EAAS;AAC5B,CAAC;AAGD,IAAM,gBAAgB;AAGtB,IAAM,sBAAsB;AAYrB,SAAS,kBACd,UACA,WACA,aACS;AACT,QAAM,MAAMC,OAAK,WAAW,QAAQ,IAAI,WAAWA,OAAK,QAAQ,aAAa,QAAQ;AACrF,SAAO,UAAU,KAAK,CAAC,QAAQ;AAE7B,UAAM,WAAW,IAAI,WAAW,IAAI,IAAIA,OAAK,KAAKC,IAAG,QAAQ,GAAG,IAAI,MAAM,CAAC,CAAC,IAAI;AAChF,UAAM,SAASD,OAAK,WAAW,QAAQ,IAAI,WAAWA,OAAK,QAAQ,aAAa,QAAQ;AAGxF,UAAM,SAAS,OAAO,SAASA,OAAK,GAAG,IAAI,SAAS,SAASA,OAAK;AAClE,WAAO,QAAQ,UAAU,IAAI,WAAW,MAAM;AAAA,EAChD,CAAC;AACH;AAgBO,SAAS,iBACd,UACA,WACA,QACe;AACf,MAAI,CAAC,iBAAiB,IAAI,QAAQ,EAAG,QAAO;AAG5C,QAAM,WAAW,WAAW,aAAa,WAAW,QAAQ,WAAW;AACvE,MAAI,OAAO,aAAa,SAAU,QAAO;AACzC,MAAI,CAAC,kBAAkB,UAAU,OAAO,WAAW,OAAO,WAAW,EAAG,QAAO;AAC/E,MAAI,OAAO,YAAY,QAAQ;AAC7B,UAAM,MAAMA,OAAK,QAAQ,QAAQ,EAAE,YAAY;AAC/C,QAAI,CAAC,OAAO,WAAW,SAAS,GAAG,EAAG,QAAO;AAAA,EAC/C;AACA,SAAO;AACT;AAQO,SAAS,eAAe,SAAiB,UAAkC;AAChF,QAAM,QAAQ,cAAc,KAAK,OAAO;AACxC,MAAI,MAAO,QAAO,MAAM,CAAC,EAAE,KAAK;AAChC,SAAO,YAAY;AACrB;AAyBO,SAAS,YAAY,OAAkC;AAC5D,QAAM,MAAM,KAAK,MAAM,KAAK,IAAI,IAAI,GAAI;AACxC,QAAM,cAAcE,YAAW,sBAAsB,EAAE,OAAO,MAAM,OAAO,EAAE,OAAO,KAAK;AACzF,QAAM,KAAKA,YAAW,KAAK,EAAE,OAAO,MAAM,UAAU,EAAE,OAAO,KAAK,EAAE,MAAM,GAAG,mBAAmB;AAChG,QAAM,QAAQ,eAAe,MAAM,SAASF,OAAK,SAAS,MAAM,UAAU,CAAC;AAE3E,SAAO,WAAW;AAAA,IAChB;AAAA,IACA;AAAA,IACA,SAAS,MAAM;AAAA,IACf,aAAa,MAAM;AAAA,IACnB,YAAY,MAAM;AAAA,IAClB,iBAAiB,MAAM,iBAAiB;AAAA,IACxC,cAAc;AAAA,IACd,QAAQ;AAAA,IACR,YAAY;AAAA,IACZ,YAAY;AAAA,EACd,CAAC;AACH;;;AC3KA,OAAO,YAAY;AAGZ,IAAM,gCAAgC;AAG7C,IAAM,0BAA0B;AAMzB,SAAS,iBAAiB,WAAmB,mBAAiC;AAGnF,MAAI,kBAAkB,SAAS,mBAAmB,EAAG;AAMrD,MAAI,CAAC,8BAA+B;AAEpC,QAAM,eAAe,iBAAiB,EAAE,QAAQ,UAAU,OAAO,wBAAwB,CAAC;AAC1F,MAAI,aAAa,WAAW,EAAG;AAG/B,QAAM,gBAAgB,aAAa,IAAI,CAAC,WAAW;AAAA,IACjD;AAAA,IACA,SAAS,IAAI;AAAA,MACX,UAAU,YAAY,MAAM,IAAI,CAAC,mCACV,YAAY,MAAM,IAAI,CAAC;AAAA,IAChD;AAAA,EACF,EAAE;AAEF,QAAM,MAAM,aAAa;AAEzB,aAAW,EAAE,OAAO,QAAQ,KAAK,eAAe;AAC9C,QAAI;AACF,UAAI,CAAC,QAAQ,KAAK,iBAAiB,EAAG;AAGtC,UAAI,2BAA2B,MAAM,IAAI,SAAS,EAAG;AAGrD,uBAAiB;AAAA,QACf,IAAI,OAAO,WAAW;AAAA,QACtB,UAAU,MAAM;AAAA,QAChB,YAAY;AAAA,QACZ,aAAa;AAAA,MACf,CAAC;AAGD,+BAAyB,MAAM,IAAI,GAAG;AAAA,IACxC,QAAQ;AAAA,IAER;AAAA,EACF;AACF;AAEA,SAAS,YAAY,GAAmB;AACtC,SAAO,EAAE,QAAQ,uBAAuB,MAAM;AAChD;;;AHZO,SAAS,4BAA4B,OAAyB,QAA8C;AACjH,MAAI,OAAO,WAAW,KAAK,MAAM,WAAW,EAAG;AAE/C,QAAM,aAAa,OAAO,OAAO,CAAC,MAAM,EAAE,SAAS,UAAU;AAC7D,MAAI,WAAW,WAAW,EAAG;AAE7B,MAAI,SAAS;AACb,WAAS,IAAI,GAAG,IAAI,MAAM,QAAQ,KAAK;AACrC,UAAM,UAAU,IAAI,IAAI,MAAM,SAAS,MAAM,IAAI,CAAC,EAAE,YAAY;AAChE,UAAM,YAAoC,CAAC;AAC3C,UAAM,QAAQ,oBAAI,IAAY;AAE9B,WAAO,SAAS,WAAW,QAAQ;AACjC,YAAM,KAAK,OAAO,WAAW,MAAM,EAAE,aAAa,EAAE;AACpD,UAAI,YAAY,QAAQ,MAAM,QAAS;AACvC,YAAM,MAAM,WAAW,MAAM;AAC7B,YAAM,WAAW,OAAO,IAAI,aAAa,IAAI,QAAQ,SAAS;AAC9D,gBAAU,QAAQ,KAAK,UAAU,QAAQ,KAAK,KAAK;AACnD,YAAM,QAAQ,IAAI;AAClB,YAAM,WAAW,OAAO,aAAa,OAAO;AAC5C,UAAI,OAAO,aAAa,SAAU,OAAM,IAAI,QAAQ;AACpD;AAAA,IACF;AAEA,QAAI,OAAO,KAAK,SAAS,EAAE,SAAS,GAAG;AACrC,YAAM,CAAC,EAAE,gBAAgB;AACzB,UAAI,MAAM,OAAO,EAAG,OAAM,CAAC,EAAE,QAAQ,CAAC,GAAG,KAAK;AAAA,IAChD;AAAA,EACF;AACF;AAMO,SAAS,oBAAoB,MAKlC;AACA,QAAM,EAAE,UAAU,gBAAgB,iBAAiB,kBAAkB,QAAQ,YAAY,SAAS,IAAI;AAGtG,MAAI,uBAA6C;AACjD,QAAM,oBAAoB,oBAAI,IAAoB;AAWlD,QAAM,WAAW,iBAAE,OAAO;AAAA,IACxB,YAAY,iBAAE,OAAO;AAAA,IACrB,OAAO,iBAAE,OAAO,EAAE,SAAS;AAAA,IAC3B,MAAM,iBAAE,OAAO,EAAE,SAAS;AAAA,IAC1B,iBAAiB,iBAAE,OAAO,EAAE,QAAQ;AAAA,IACpC,wBAAwB,iBAAE,OAAO,EAAE,QAAQ;AAAA,EAC7C,CAAC;AAED,QAAMG,uBAAsB,CAAC,cAC3B,oBAA0B,WAAW,EAAE,UAAU,kBAAkB,YAAY,OAAO,CAAC;AAEzF,WAAS,8BAA8B,WAA4B;AACjE,aAAS,WAAW,SAAS;AAC7B,mBAAe,OAAO,SAAS;AAC/B,sBAAkB,OAAO,SAAS;AAElC,UAAM,SAAS,qBAAqB,SAAS;AAC7C,QAAI,CAAC,OAAO,QAAS,QAAO;AAE5B,+BAA2B,WAAW,QAAQ,kBAAkB,QAAQ,EAAE,MAAM,MAAM;AAAA,IAAC,CAAC;AACxF,WAAO;AAAA,EACT;AAEA,iBAAe,iBACb,WACA,MACA,aACA,oBACA,sBACe;AAIf,UAAM,mBAAmB,gBAAgB,sBAAsB,WAAW,kBAAkB;AAC5F,QAAI,WAAW,iBAAiB;AAChC,QAAI,aAAa,iBAAiB;AAElC,UAAM,eAAe,eAAe,IAAI,SAAS,GAAG,QAAQ,KAAK,CAAC;AAElE,QAAI,SAAS,WAAW,GAAG;AACzB,iBAAW,uBAAuB,YAAY;AAC9C,mBAAa;AAAA,IACf,WAAW,aAAa,SAAS,GAAG;AAClC,YAAM,mBAAmB,SAAS,SAAS,SAAS,CAAC,EAAE;AACvD,UAAI,kBAAkB;AACpB,cAAM,cAAc,aAAa;AAAA,UAAO,CAAC,MACvC,OAAO,EAAE,aAAa,EAAE,IAAI;AAAA,QAC9B;AACA,YAAI,YAAY,SAAS,GAAG;AAC1B,gBAAM,cAAc,uBAAuB,WAAW;AACtD,qBAAW,CAAC,GAAG,UAAU,GAAG,WAAW;AACvC,uBAAa,GAAG,iBAAiB,MAAM;AACvC,iBAAO,KAAK,UAAU,sBAAsB,iDAAiD;AAAA,YAC3F,YAAY;AAAA,YAAW,iBAAiB,iBAAiB,MAAM;AAAA,YAAQ,aAAa,YAAY;AAAA,UAClG,CAAC;AAAA,QACH;AAAA,MACF;AAAA,IACF;AAGA,QAAI,wBAAwB,SAAS,SAAS,GAAG;AAC/C,YAAM,WAAW,SAAS,SAAS,SAAS,CAAC;AAC7C,UAAI,CAAC,SAAS,YAAY;AACxB,iBAAS,aAAa;AAAA,MACxB;AAAA,IACF;AAEA,gCAA4B,UAAU,YAAY;AAElD,UAAM,aAAa,SAAS,OAAO,CAAC,KAAK,MAAM,OAAO,EAAE,QAAQ,UAAU,IAAI,CAAC;AAC/E,WAAO,MAAM,UAAU,sBAAsB,qBAAqB;AAAA,MAChE,YAAY;AAAA,MACZ,YAAY,SAAS;AAAA,MACrB,aAAa;AAAA,IACf,CAAC;AAKD,UAAM,cAAc,eAAe,SAAS;AAI5C,QAAI,wBAAwB,eAAe,CAAC,YAAY,kBAAkB;AACxE,UAAI;AAAE,2BAAmB,YAAY,IAAI,oBAAoB;AAAA,MAAG,SACzD,KAAK;AAAE,eAAO,KAAK,UAAU,iBAAiB,kDAAkD,EAAE,OAAO,OAAO,GAAG,EAAE,CAAC;AAAA,MAAG;AAAA,IAClI;AAKA,qBAAiB,WAAW,aAAa,CAAC;AAK1C,UAAM,kBAAkB,WAAW,SAAS;AAC5C,UAAM,WAAW,iBAAiB,UAAU,QAAQ,iBAAiB,UAAU;AAE/E,QAAI,CAAC,UAAU;AACb,UAAI,QAAQ,kBAAkB,IAAI,SAAS,KAAK;AAChD,UAAI,CAAC,OAAO;AACV,cAAM,aAAa,qBAAqB,WAAW,EAAE,OAAO,EAAE,CAAC,EAAE,CAAC;AAClE,YAAI,YAAY,aAAa;AAC3B,kBAAQ,WAAW,YAAY,MAAM,GAAG,mBAAmB;AAC3D,cAAI,WAAW,YAAY,SAAS,qBAAqB;AACvD,qBAAS;AAAA,UACX;AACA,4BAAkB,IAAI,WAAW,KAAK;AAAA,QACxC;AAAA,MACF;AAAA,IACF;AAMA,UAAM,iBAAiB,WAAW,SAAS;AAC3C,UAAM,wBAAwB,SAAS;AACvC,UAAM,sBAAsB,SAAS,OAAO,CAAC,KAAK,MAAM,MAAM,EAAE,WAAW,CAAC;AAE5E,UAAM,eAAwC;AAAA,MAC5C,iBAAiB,sBAAsB;AAAA,MACvC,cAAc,KAAK,IAAI,uBAAuB,gBAAgB,gBAAgB,CAAC;AAAA,MAC/E,YAAY,KAAK,IAAI,qBAAqB,gBAAgB,cAAc,CAAC;AAAA,IAC3E;AACA,QAAI,KAAM,cAAa,OAAO;AAC9B,QAAI,CAAC,YAAY,kBAAkB,IAAI,SAAS,GAAG;AACjD,mBAAa,QAAQ,kBAAkB,IAAI,SAAS;AAAA,IACtD;AAEA,kBAAc,WAAW,YAAmD;AAI5E,QAAI,+BAA+B;AACjC,UAAI;AACF,YAAI,iBAAgC;AACpC,YAAI,oBAAoB;AACtB,cAAI;AAAE,6BAAiBC,KAAG,aAAa,oBAAoB,OAAO;AAAA,UAAG,QAC/D;AAAA,UAA8C;AAAA,QACtD;AACA,YAAI,CAAC,kBAAkB,SAAS,SAAS,GAAG;AAC1C,2BAAiB,SACd,IAAI,CAAC,MAAM,CAAC,EAAE,UAAU,IAAI,EAAE,cAAc,EAAE,EAAE,KAAK,GAAG,CAAC,EACzD,KAAK,IAAI;AAAA,QACd;AACA,YAAI,gBAAgB;AAClB,2BAAiB,WAAW,cAAc;AAAA,QAC5C;AAAA,MACF,QAAQ;AAAA,MAER;AAAA,IACF;AAMA,UAAM,YAA4D,CAAC;AACnE,aAAS,IAAI,GAAG,IAAI,SAAS,QAAQ,KAAK;AACxC,UAAI,SAAS,CAAC,EAAE,YAAY;AAC1B,kBAAU,KAAK,EAAE,WAAW,IAAI,GAAG,UAAU,SAAS,CAAC,EAAE,WAAY,CAAC;AAAA,MACxE;AAAA,IACF;AACA,QAAI,UAAU,SAAS,GAAG;AACxB,UAAI;AAAE,+BAAuB,WAAW,SAAS;AAAA,MAAG,SAC7C,KAAK;AAAE,eAAO,KAAK,UAAU,iBAAiB,sCAAsC,EAAE,OAAO,OAAO,GAAG,EAAE,CAAC;AAAA,MAAG;AAAA,IACtH;AAGA,QAAI,KAAK,SAAS,SAAS,GAAG;AAC5B,iBAAW,QAAQ,UAAU;AAC3B,YAAI,CAAC,KAAK,WAAY;AACtB,cAAM,cAAc,mBAAmB,KAAK,YAAY,KAAK,QAAQ;AACrE,mBAAW,EAAE,KAAK,QAAQ,KAAK,aAAa;AAC1C,cAAI;AACF,wBAAY;AAAA,cACV,YAAY,GAAG,wBAAwB,GAAG,GAAG;AAAA,cAC7C;AAAA,cACA;AAAA,cACA,eAAe,aAAa,MAAM;AAAA,YACpC,CAAC;AACD,mBAAO,KAAK,UAAU,cAAc,qCAAqC;AAAA,cACvE,YAAY;AAAA,cACZ;AAAA,cACA,gBAAgB,QAAQ;AAAA,YAC1B,CAAC;AAAA,UACH,SAAS,KAAK;AACZ,mBAAO,KAAK,UAAU,cAAc,8CAA8C;AAAA,cAChF,YAAY;AAAA,cACZ;AAAA,cACA,OAAQ,IAAc;AAAA,YACxB,CAAC;AAAA,UACH;AAAA,QACF;AAAA,MACF;AAAA,IACF;AAGA,QAAI,CAAC,UAAU;AACb,MAAAD,qBAAoB,SAAS;AAAA,IAC/B;AAMA,aAAS,IAAI,GAAG,IAAI,SAAS,QAAQ,KAAK;AACxC,YAAM,OAAO,SAAS,CAAC;AACvB,UAAI,CAAC,KAAK,QAAQ,OAAQ;AAM1B,YAAM,aAAa,MAAM,SAAS,SAAS;AAC3C,UAAI,kBAAiC;AACrC,UAAI,uBAA+B,IAAI;AAEvC,UAAI,cAAc,aAAa;AAC7B,0BAAkB,YAAY;AAC9B,+BAAuB,YAAY,iBAAiB;AAAA,MACtD,WAAW,KAAK,QAAQ;AACtB,YAAI;AACF,gBAAM,QAAQ,wBAAwB,WAAW,KAAK,MAAM;AAC5D,cAAI,OAAO;AACT,8BAAkB,MAAM;AACxB,mCAAuB,MAAM;AAAA,UAC/B;AAAA,QACF,QAAQ;AAAA,QAAgC;AAAA,MAC1C;AAEA,yBAAmB;AAAA,QACjB;AAAA,QACA,eAAe;AAAA,QACf,cAAc;AAAA,QACd,QAAQ,KAAK;AAAA,QACb;AAAA,MACF,CAAC;AAAA,IACH;AAEA,WAAO,KAAK,UAAU,mBAAmB,oBAAoB;AAAA,MAC3D,YAAY;AAAA,MACZ,OAAO,SAAS;AAAA,MAChB,QAAQ;AAAA,MACR,OAAO,iBAAiB,SAAS,kBAAkB,IAAI,SAAS,KAAK;AAAA,IACvE,CAAC;AAAA,EACH;AAEA,QAAM,kBAAgC,OAAO,QAAQ;AACnD,UAAM;AAAA,MACJ,YAAY;AAAA,MACZ;AAAA,MACA;AAAA,MACA,iBAAiB;AAAA,MACjB,wBAAwB;AAAA,IAC1B,IAAI,SAAS,MAAM,IAAI,IAAI;AAE3B,QAAI,oBAAoB;AACtB,YAAM,iBAAiB,mBAAmB,kBAAkB,KAAK;AACjE,YAAM,gBAAgB,SAAS,WAAW,SAAS,GAAG,SAAS;AAC/D,YAAM,WAAW,4BAA4B,cAAc,GAAG,eAAe;AAAA,QAC3E,gBAAgB;AAAA,QAChB;AAAA,MACF,CAAC;AACD,UAAI,SAAS,WAAW,QAAQ;AAC9B,cAAM,UAAU,8BAA8B,SAAS;AACvD,eAAO,KAAK,UAAU,YAAY,gDAA2C;AAAA,UAC3E,YAAY;AAAA,UACZ,QAAQ,SAAS,UAAU;AAAA,UAC3B,0BAA0B;AAAA,QAC5B,CAAC;AACD,eAAO,EAAE,MAAM,EAAE,IAAI,MAAM,SAAS,SAAS,UAAU,OAAO,EAAE;AAAA,MAClE;AAAA,IACF;AAWA,UAAM,sBAAsB,SAAS,WAAW,SAAS;AACzD,QAAI,CAAC,sBAAsB,CAAC,qBAAqB;AAM/C,aAAO,KAAK,UAAU,YAAY,gDAA2C;AAAA,QAC3E,YAAY;AAAA,MACd,CAAC;AACD,aAAO,EAAE,MAAM,EAAE,IAAI,MAAM,SAAS,2BAA2B,EAAE;AAAA,IACnE;AAGA,QAAI,CAAC,qBAAqB;AACxB,eAAS,SAAS,WAAW,EAAE,aAAY,oBAAI,KAAK,GAAE,YAAY,EAAE,CAAC;AACrE,aAAO,MAAM,UAAU,yBAAyB,2CAA2C,EAAE,YAAY,UAAU,CAAC;AAAA,IACtH;AACA,UAAM,cAAc,uBAAuB,SAAS,WAAW,SAAS;AACxE,WAAO,KAAK,UAAU,YAAY,iBAAiB;AAAA,MACjD,YAAY;AAAA,MACZ,qBAAqB,CAAC,CAAC;AAAA,MACvB,cAAc,CAAC,CAAC;AAAA,IAClB,CAAC;AACD,WAAO,MAAM,UAAU,YAAY,qBAAqB;AAAA,MACtD,YAAY;AAAA,MACZ,iBAAiB,sBAAsB;AAAA,MACvC,sBAAsB,sBAAsB,MAAM,GAAG,yBAAyB,KAAK;AAAA,IACrF,CAAC;AAMD,UAAM,2BAA2B,sBAAsB;AACvD,UAAM,6BAA6B,wBAAwB;AAC3D,UAAM,MAAM,MAAM,iBAAiB,WAAW,MAAM,aAAa,0BAA0B,0BAA0B,EAAE,MAAM,CAAC,QAAQ;AACpI,aAAO,MAAM,UAAU,mBAAmB,0BAA0B,EAAE,YAAY,WAAW,OAAQ,IAAc,QAAQ,CAAC;AAAA,IAC9H,CAAC;AAED,UAAM,OAAO,wBAAwB,QAAQ,QAAQ;AACrD,2BAAuB,KAAK,KAAK,GAAG,EAAE,QAAQ,MAAM;AAAE,6BAAuB;AAAA,IAAM,CAAC;AAEpF,WAAO,EAAE,MAAM,EAAE,IAAI,KAAK,EAAE;AAAA,EAC9B;AAEA,SAAO;AAAA,IACL;AAAA,IACA,cAAc,CAAC,cAAsB;AAAE,wBAAkB,OAAO,SAAS;AAAA,IAAG;AAAA,IAC5E,qBAAqB,MAAM;AAAA,IAC3B,qBAAAA;AAAA,EACF;AACF;;;AI9bA,OAAOE,UAAQ;AACf,OAAOC,YAAU;AA8BjB,IAAM,YAAY,iBAAE,OAAO,EAAE,MAAM,iBAAE,OAAO,GAAG,YAAY,iBAAE,OAAO,EAAE,CAAC,EAAE,YAAY;AAyB9E,SAAS,sBAAsB,MAAuC;AAC3E,QAAM;AAAA,IACJ;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA,qBAAAC;AAAA,EACF,IAAI;AAEJ,QAAM,cAAc,QAAQ,IAAI;AAEhC,SAAO,OAAO,QAAQ;AACpB,UAAM,YAAY,UAAU,MAAM,IAAI,IAAI;AAC1C,UAAM,QAAQ;AAAA,MACZ,GAAG;AAAA,MACH,WAAY,UAAsC,cAAa,oBAAI,KAAK,GAAE,YAAY;AAAA,IACxF;AAEA,WAAO,MAAM,UAAU,aAAa,kBAAkB,EAAE,MAAM,MAAM,MAAM,YAAY,MAAM,WAAW,CAAC;AAGxG,QAAI,CAAC,SAAS,WAAW,MAAM,UAAU,GAAG;AAC1C,eAAS,SAAS,MAAM,YAAY,EAAE,YAAY,MAAM,UAAU,CAAC;AACnE,aAAO,MAAM,UAAU,yBAAyB,sCAAsC,EAAE,YAAY,MAAM,WAAW,CAAC;AAItH,YAAM,MAAM,aAAa;AACzB,YAAM,eAAe,KAAK,MAAM,IAAI,KAAK,MAAM,SAAS,EAAE,QAAQ,IAAI,GAAI;AAC1E,oBAAc;AAAA,QACZ,IAAI,MAAM;AAAA,QACV,OAAQ,MAAkC,SAAmB;AAAA,QAC7D,QAAQ;AAAA,QACR,YAAY;AAAA,QACZ,YAAY;AAAA,QACZ,YAAY;AAAA,MACd,CAAC;AAGD,uBAAiB,MAAM,UAAU;AAAA,IACnC;AAGA,QAAI,CAAC,eAAe,IAAI,MAAM,UAAU,GAAG;AACzC,YAAM,YAAYC,OAAK,KAAK,UAAU,QAAQ;AAC9C,qBAAe,IAAI,MAAM,YAAY,IAAI,YAAY,WAAW,MAAM,UAAU,CAAC;AAAA,IACnF;AACA,mBAAe,IAAI,MAAM,UAAU,EAAG,OAAO,KAAK;AAGlD,QAAI,MAAM,SAAS,eAAe;AAChC,mBAAa,eAAe;AAC5B,YAAM,aAAa,OAAO,MAAM,UAAU,EAAE;AAI5C,UAAI,gBAAgB,UAAU,GAAG;AAC/B,eAAO,MAAM,UAAU,cAAc,mCAAmC;AAAA,UACtE,YAAY,MAAM;AAAA,UAClB,QAAQ,WAAW,UAAU,EAAE,MAAM,GAAG,wBAAwB;AAAA,QAClE,CAAC;AAAA,MACH,OAAO;AACL,eAAO,KAAK,UAAU,cAAc,wBAAwB;AAAA,UAC1D,YAAY,MAAM;AAAA,UAClB,gBAAgB,WAAW,MAAM,GAAG,wBAAwB;AAAA,UAC5D,eAAe,WAAW;AAAA,QAC5B,CAAC;AAMD,YAAI,6BAA6B,MAAM,UAAU,GAAG;AAClD,iBAAO,KAAK,UAAU,yBAAyB,iDAAiD;AAAA,YAC9F,YAAY,MAAM;AAAA,UACpB,CAAC;AAAA,QACH;AACA,YAAI;AACF,gBAAM,EAAE,SAAS,aAAa,IAAI,iBAAiB,MAAM,YAAY,cAAc,MAAS;AAC5F,iBAAO,MAAM,UAAU,eAAe,gBAAgB,EAAE,YAAY,MAAM,YAAY,UAAU,SAAS,eAAe,aAAa,CAAC;AAOtI,gBAAM,cAAc,MAAM;AAC1B,cAAI,MAAM,QAAQ,WAAW,KAAK,YAAY,SAAS,GAAG;AACxD,+BAAmB;AAAA,cACjB,WAAW,MAAM;AAAA,cACjB,eAAe;AAAA,cACf;AAAA,cACA,QAAQ;AAAA,cACR;AAAA,YACF,CAAC;AAAA,UACH;AAGA,gBAAM,aAAa;AACnB,gBAAM,kBAAkB,WAAW,QAAQ,MAAM;AACjD,cAAI,kBAAkB,KAAK,aAAa,KAAK,aAAa,oBAAoB,GAAG;AAC/E,YAAAD,qBAAoB,MAAM,UAAU;AAAA,UACtC;AAAA,QACF,SAAS,KAAK;AACZ,iBAAO,KAAK,UAAU,eAAe,wBAAwB,EAAE,YAAY,MAAM,YAAY,OAAQ,IAAc,QAAQ,CAAC;AAAA,QAC9H;AAAA,MACF;AAAA,IACF;AAEA,QAAI,MAAM,SAAS,YAAY;AAC7B,YAAM,WAAW,OAAO,MAAM,aAAa,EAAE;AAC7C,aAAO,MAAM,UAAU,YAAY,kBAAkB;AAAA,QACnD,YAAY,MAAM;AAAA,QAClB,WAAW;AAAA,MACb,CAAC;AAED,YAAM,eAAe;AAAA,QACnB;AAAA,QACA,MAAM;AAAA,QACN;AAAA,MACF;AACA,UAAI,cAAc;AAChB,cAAM,mBAAmB,MAAM;AAC/B,QAAAE,KAAG,SAAS,SAAS,cAAc,OAAO,EAAE,KAAK,CAAC,gBAAgB;AAChE,gBAAM,cAAc,eAAe,gBAAgB;AACnD,sBAAY;AAAA,YACV,YAAYD,OAAK,SAAS,aAAa,YAAY;AAAA,YACnD,SAAS;AAAA,YACT,WAAW;AAAA,YACX,eAAe,aAAa,MAAM;AAAA,UACpC,CAAC;AACD,iBAAO,KAAK,UAAU,cAAc,iBAAiB;AAAA,YACnD,YAAY;AAAA,YACZ,aAAa;AAAA,UACf,CAAC;AAAA,QACH,CAAC,EAAE,MAAM,CAAC,QAAQ;AAChB,iBAAO,KAAK,UAAU,cAAc,0BAA0B;AAAA,YAC5D,OAAQ,IAAc;AAAA,YACtB,MAAM;AAAA,UACR,CAAC;AAAA,QACH,CAAC;AAAA,MACH;AACA,UAAI;AACF;AAAA,UACE,MAAM;AAAA,UACN;AAAA,UACA,MAAM;AAAA,UACN,OAAO,MAAM,mBAAmB,WAAW,MAAM,iBAAiB;AAAA,QACpE;AAAA,MACF,SAAS,KAAK;AACZ,eAAO,KAAK,UAAU,kBAAkB,6BAA6B,EAAE,YAAY,MAAM,YAAY,OAAQ,IAAc,QAAQ,CAAC;AAAA,MACtI;AAAA,IACF;AAEA,QAAI,MAAM,SAAS,gBAAgB;AACjC,YAAM,WAAW,OAAO,MAAM,aAAa,EAAE;AAC7C,aAAO,KAAK,UAAU,YAAY,sBAAsB;AAAA,QACtD,YAAY,MAAM;AAAA,QAClB,WAAW;AAAA,QACX,cAAc,CAAC,CAAC,MAAM;AAAA,MACxB,CAAC;AACD,UAAI;AACF;AAAA,UACE,MAAM;AAAA,UACN;AAAA,UACA,MAAM;AAAA,UACN,OAAO,MAAM,UAAU,WAAW,MAAM,QAAQ;AAAA,UAChD,CAAC,CAAC,MAAM;AAAA,QACV;AAAA,MACF,SAAS,KAAK;AACZ,eAAO,KAAK,UAAU,kBAAkB,iCAAiC,EAAE,YAAY,MAAM,YAAY,OAAQ,IAAc,QAAQ,CAAC;AAAA,MAC1I;AAAA,IACF;AAEA,QAAI,MAAM,SAAS,kBAAkB;AACnC,aAAO,KAAK,UAAU,gBAAgB,wBAAwB;AAAA,QAC5D,YAAY,MAAM;AAAA,QAClB,UAAU,MAAM;AAAA,QAChB,YAAY,MAAM;AAAA,MACpB,CAAC;AACD,UAAI;AACF;AAAA,UACE,MAAM;AAAA,UACN,OAAO,MAAM,aAAa,WAAW,MAAM,WAAW;AAAA,UACtD,OAAO,MAAM,eAAe,WAAW,MAAM,aAAa;AAAA,QAC5D;AAAA,MACF,SAAS,KAAK;AACZ,eAAO,KAAK,UAAU,kBAAkB,mCAAmC,EAAE,YAAY,MAAM,YAAY,OAAQ,IAAc,QAAQ,CAAC;AAAA,MAC5I;AAAA,IACF;AAEA,QAAI,MAAM,SAAS,iBAAiB;AAClC,aAAO,KAAK,UAAU,gBAAgB,uBAAuB;AAAA,QAC3D,YAAY,MAAM;AAAA,QAClB,UAAU,MAAM;AAAA,QAChB,YAAY,MAAM;AAAA,MACpB,CAAC;AACD,UAAI;AACF;AAAA,UACE,MAAM;AAAA,UACN,OAAO,MAAM,aAAa,WAAW,MAAM,WAAW;AAAA,UACtD,OAAO,MAAM,eAAe,WAAW,MAAM,aAAa;AAAA,UAC1D,OAAO,MAAM,2BAA2B,WAAW,MAAM,yBAAyB;AAAA,QACpF;AAAA,MACF,SAAS,KAAK;AACZ,eAAO,KAAK,UAAU,kBAAkB,kCAAkC,EAAE,YAAY,MAAM,YAAY,OAAQ,IAAc,QAAQ,CAAC;AAAA,MAC3I;AAAA,IACF;AAEA,QAAI,MAAM,SAAS,gBAAgB;AACjC,aAAO,KAAK,UAAU,YAAY,sBAAsB;AAAA,QACtD,YAAY,MAAM;AAAA,QAClB,OAAO,MAAM;AAAA,MACf,CAAC;AACD,UAAI;AACF;AAAA,UACE,MAAM;AAAA,UACN,OAAO,MAAM,UAAU,WAAW,MAAM,QAAQ;AAAA,UAChD,OAAO,MAAM,kBAAkB,WAAW,MAAM,gBAAgB;AAAA,QAClE;AAAA,MACF,SAAS,KAAK;AACZ,eAAO,KAAK,UAAU,kBAAkB,iCAAiC,EAAE,YAAY,MAAM,YAAY,OAAQ,IAAc,QAAQ,CAAC;AAAA,MAC1I;AAAA,IACF;AAEA,QAAI,MAAM,SAAS,kBAAkB;AACnC,aAAO,KAAK,UAAU,aAAa,wBAAwB;AAAA,QACzD,YAAY,MAAM;AAAA,QAClB,SAAS,MAAM;AAAA,QACf,cAAc,MAAM;AAAA,MACtB,CAAC;AACD,UAAI;AACF;AAAA,UACE,MAAM;AAAA,UACN,OAAO,MAAM,YAAY,WAAW,MAAM,UAAU;AAAA,UACpD,OAAO,MAAM,iBAAiB,WAAW,MAAM,eAAe;AAAA,UAC9D,OAAO,MAAM,qBAAqB,WAAW,MAAM,mBAAmB;AAAA,QACxE;AAAA,MACF,SAAS,KAAK;AACZ,eAAO,KAAK,UAAU,kBAAkB,oCAAoC,EAAE,YAAY,MAAM,YAAY,OAAQ,IAAc,QAAQ,CAAC;AAAA,MAC7I;AAAA,IACF;AAEA,QAAI,MAAM,SAAS,eAAe;AAChC,aAAO,KAAK,UAAU,aAAa,qBAAqB,EAAE,YAAY,MAAM,WAAW,CAAC;AACxF,UAAI;AACF;AAAA,UACE,MAAM;AAAA,UACN;AAAA,UACA,OAAO,MAAM,YAAY,WAAW,MAAM,UAAU;AAAA,UACpD;AAAA,QACF;AAAA,MACF,SAAS,KAAK;AACZ,eAAO,KAAK,UAAU,kBAAkB,gCAAgC,EAAE,YAAY,MAAM,YAAY,OAAQ,IAAc,QAAQ,CAAC;AAAA,MACzI;AAAA,IACF;AAEA,QAAI,MAAM,SAAS,gBAAgB;AACjC,aAAO,KAAK,UAAU,aAAa,sBAAsB,EAAE,YAAY,MAAM,WAAW,CAAC;AACzF,UAAI;AACF;AAAA,UACE,MAAM;AAAA,UACN;AAAA,UACA,OAAO,MAAM,YAAY,WAAW,MAAM,UAAU;AAAA,UACpD,OAAO,MAAM,oBAAoB,WAAW,MAAM,kBAAkB;AAAA,QACtE;AAAA,MACF,SAAS,KAAK;AACZ,eAAO,KAAK,UAAU,kBAAkB,iCAAiC,EAAE,YAAY,MAAM,YAAY,OAAQ,IAAc,QAAQ,CAAC;AAAA,MAC1I;AAAA,IACF;AAEA,WAAO,EAAE,MAAM,EAAE,IAAI,KAAK,EAAE;AAAA,EAC9B;AACF;;;ACzSO,SAAS,6BAA6B,QAAwC;AACnF,QAAM,UAAmB,CAAC;AAE1B,SAAO;AAAA,IACL,GAAG,OAAO,IAAI;AACZ,cAAQ,KAAK,EAAE,OAAO,GAAG,CAAC;AAAA,IAC5B;AAAA,IACA,MAAM,KAAK,cAAc,KAAK;AAC5B,iBAAW,SAAS,SAAS;AAC3B,YAAI,CAAC,WAAW,MAAM,OAAO,YAAY,EAAG;AAC5C,YAAI;AACF,gBAAM,MAAM,GAAG,GAAG;AAAA,QACpB,SAAS,KAAK;AACZ,iBAAO,MAAM,oBAAoB,kBAAkB,EAAE,OAAO,OAAO,GAAG,EAAE,CAAC;AAAA,QAC3E;AAAA,MACF;AAAA,IACF;AAAA,EACF;AACF;AAEA,SAAS,WAAW,iBAA2B,SAA4B;AACzE,MAAI,gBAAgB,WAAW,EAAG,QAAO;AACzC,aAAW,UAAU,iBAAiB;AACpC,eAAWE,UAAQ,SAAS;AAC1B,UAAIA,WAAS,UAAUA,OAAK,WAAW,GAAG,MAAM,GAAG,EAAG,QAAO;AAAA,IAC/D;AAAA,EACF;AACA,SAAO;AACT;;;ACjEO,SAAS,oBAAoB,UAAkB,QAAmC;AACvF,MAAI;AACF,WAAO,iBAAiB,QAAQ;AAAA,EAClC,SAAS,KAAK;AACZ,QAAI,eAAe,iBAAE,UAAU;AAC7B,aAAO,KAAK,oBAAoB,uDAAuD;AAAA,QACrF,QAAQ,IAAI,OAAO,IAAI,CAAC,WAAW;AAAA,UACjC,MAAM,MAAM,KAAK,KAAK,GAAG;AAAA,UACzB,SAAS,MAAM;AAAA,QACjB,EAAE;AAAA,MACJ,CAAC;AACD,aAAO;AAAA,IACT;AACA,UAAM;AAAA,EACR;AACF;;;ACLO,SAAS,wBAAwB,MAA6C;AACnF,SAAO,CAAC,QAAQ;AACd,UAAM,aAAa,IAAI,QAAQ,aAAa,CAAC;AAC7C,SAAK,gBAAgB,YAAY,CAAC,GAAG,oBAAI,IAAI,CAAC,GAAG,KAAK,kBAAkB,GAAG,UAAU,CAAC,CAAC;AACvF,SAAK,gBAAgB,aAAa,IAAI,QAAQ;AAAA,EAChD;AACF;;;ACgIA,OAAOC,UAAQ;AAEf,OAAOC,YAAU;AAWjB,SAAS,gBAAgB,UAAkB,QAA4B;AACrE,QAAM,iBAAiBA,OAAK,KAAK,UAAU,aAAa;AACxD,MAAI;AACF,QAAI,CAACD,KAAG,WAAW,cAAc,EAAG;AACpC,UAAM,OAAO,KAAK,MAAMA,KAAG,aAAa,gBAAgB,OAAO,CAAC;AAChE,QAAI,CAAC,KAAK,IAAK;AAGf,QAAI,KAAK,QAAQ,QAAQ,IAAK;AAE9B,QAAI;AACF,cAAQ,KAAK,KAAK,KAAK,CAAC;AACxB,cAAQ,KAAK,KAAK,KAAK,SAAS;AAChC,aAAO,KAAK,UAAU,cAAc,uBAAuB,EAAE,KAAK,KAAK,IAAI,CAAC;AAAA,IAC9E,QAAQ;AAAA,IAAqB;AAE7B,IAAAA,KAAG,WAAW,cAAc;AAAA,EAC9B,QAAQ;AAAA,EAAwC;AAClD;AAMA,eAAsB,OAAsB;AAC1C,QAAM,WAAW,QAAQ,KAAK,KAAK,CAAC,GAAG,MAAM,QAAQ,KAAK,IAAI,CAAC,MAAM,SAAS;AAC9E,MAAI,CAAC,UAAU;AACb,YAAQ,OAAO,MAAM,+BAA+B;AACpD,YAAQ,KAAK,CAAC;AAAA,EAChB;AAEA,QAAM,WAAWC,OAAK,QAAQ,QAAQ;AAGtC,cAAY,QAAQ;AAKpB,QAAM,SAAS,iBAAiB,QAAQ;AAIxC,QAAM,aAAyC,EAAE,SAAS,OAAO;AAEjE,QAAM,YAAY,cAAc;AAChC,QAAM,mBAAmB,UAAU,QAAQ,CAAC,MAAM,EAAE,SAAS,YAAY,CAAC,CAAC;AAC3E,QAAM,mBAAmB,CAAC,GAAG,IAAI,IAAI,UAAU,QAAQ,CAAC,MAAM,EAAE,SAAS,YAAY,CAAC,CAAC,CAAC,CAAC;AACzF,QAAM,cAAc,QAAQ,IAAI;AAChC,QAAM,kBAAmC;AAAA,IACvC,WAAW,CAAC,GAAG,oBAAI,IAAI,CAAC,GAAG,kBAAkB,GAAI,OAAO,QAAQ,aAAa,CAAC,CAAE,CAAC,CAAC;AAAA,IAClF;AAAA,IACA,YAAY,OAAO,QAAQ;AAAA,EAC7B;AAEA,QAAM,SAAS,IAAI,aAAaA,OAAK,KAAK,UAAU,MAAM,GAAG;AAAA,IAC3D,OAAO,OAAO,OAAO;AAAA,EACvB,CAAC;AAMD,MAAI,OAAO,OAAO,cAAc,SAAS;AACvC,YAAQ,IAAI,mBAAmB;AAAA,EACjC;AAGA,kBAAgB,UAAU,MAAM;AAEhC,SAAO,KAAK,UAAU,eAAe,iBAAiB;AAAA,IACpD,OAAO;AAAA,IACP,oBAAoB,OAAO,UAAU;AAAA,EACvC,CAAC;AACD,SAAO,KAAK,UAAU,cAAc,0BAA0B,EAAE,MAAM,gBAAgB,UAAU,CAAC;AACjG,MAAI,iBAAiB,SAAS,GAAG;AAC/B,WAAO,KAAK,UAAU,cAAc,wBAAwB,EAAE,MAAM,iBAAiB,CAAC;AAAA,EACxF;AAGA,QAAM,YAAY,aAAa,QAAQ;AACvC,SAAO,KAAK,UAAU,cAAc,uBAAuB,EAAE,YAAY,UAAU,CAAC;AAKpF,MAAI,eAA8B;AAClC,MAAI;AACF,mBAAe,oBAAoB;AACnC,WAAO,MAAM,UAAU,cAAc,8BAA8B,EAAE,QAAQ,aAAa,CAAC;AAAA,EAC7F,SAAS,KAAK;AACZ,WAAO,KAAK,UAAU,cAAc,uCAAuC;AAAA,MACzE,OAAQ,IAAc;AAAA,IACxB,CAAC;AAAA,EACH;AAOA,QAAM,cAAc;AAAA,IAClB;AAAA,IACA,QAAQ,KAAK,CAAC;AAAA,IACdD,KAAG;AAAA,EACL;AACA,MAAI,aAAa;AACf,wBAAoB,WAAW;AAC/B,mBAAe;AACf,WAAO,KAAK,UAAU,cAAc,8CAA8C;AAAA,MAChF,WAAW;AAAA,IACb,CAAC;AAAA,EACH;AAGA,QAAM,KAAK,aAAa,YAAY,QAAQ,CAAC;AAC7C,eAAa,IAAI,SAAS;AAC1B,yBAAuB;AAEvB,SAAO,KAAK,UAAU,cAAc,sBAAsB,EAAE,OAAO,SAAS,CAAC;AAG7E;AACE,UAAM,aAAaC,OAAK,KAAK,UAAU,uBAAuB;AAC9D,QAAI;AACF,UAAID,KAAG,WAAW,UAAU,GAAG;AAC7B,cAAM,MAAM,KAAK,MAAMA,KAAG,aAAa,YAAY,OAAO,CAAC;AAM3D,QAAAA,KAAG,WAAW,UAAU;AAExB,YAAI,IAAI,WAAW,kBAAkB,IAAI,YAAY;AACnD,gBAAM,UAAU,IAAI,mBAChB,+CACA;AAEJ,iBAAO,UAAU;AAAA,YACf,QAAQ;AAAA,YACR,MAAM;AAAA,YACN,OAAO,eAAe,IAAI,UAAU;AAAA,YACpC;AAAA,YACA,UAAU;AAAA,cACR,cAAc,IAAI,gBAAgB;AAAA,cAClC,YAAY,IAAI;AAAA,cAChB,kBAAkB,IAAI,oBAAoB;AAAA,YAC5C;AAAA,UACF,CAAC;AAED,iBAAO,KAAK,UAAU,cAAc,iCAAiC;AAAA,YACnE,MAAM,IAAI;AAAA,YACV,IAAI,IAAI;AAAA,YACR,cAAc,IAAI;AAAA,UACpB,CAAC;AAAA,QACH;AAAA,MACF;AAAA,IACF,SAAS,KAAK;AACZ,aAAO,KAAK,UAAU,cAAc,sCAAsC;AAAA,QACxE,OAAQ,IAAc;AAAA,MACxB,CAAC;AAAA,IACH;AAAA,EACF;AAGA,kBAAgB,OAAO,KAAK,SAAS,SAAS;AAG9C,SAAO,aAAa,CAAC,UAAU;AAC7B,UAAM,EAAE,WAAW,OAAO,MAAM,WAAW,SAAS,GAAG,KAAK,IAAI;AAChE,mBAAe;AAAA,MACb;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA,MAAM,OAAO,KAAK,IAAI,EAAE,SAAS,IAAI,KAAK,UAAU,IAAI,IAAI;AAAA,MAC5D,YAAa,KAAK,cAAyB;AAAA,IAC7C,CAAC;AAAA,EACH,CAAC;AAGD,QAAM,mBAAmB,gBAAgB;AACzC,MAAI,kBAAkB;AACpB,UAAM,SAASC,OAAK,KAAK,UAAU,MAAM;AACzC,UAAM,gBAAgB,mBAAmB,QAAQ,gBAAgB;AACjE,QAAI,gBAAgB,GAAG;AACrB,aAAO,KAAK,UAAU,kBAAkB,YAAY,aAAa,4BAA4B,EAAE,UAAU,cAAc,CAAC;AAAA,IAC1H;AAAA,EACF;AAGA,QAAM,gBAAgBA,OAAK,KAAK,UAAU,YAAY;AACtD,QAAM,cAAc,IAAI,qBAAqB,aAAa;AAC1D,QAAM,cAAc,wBAAwB,OAAO,SAAS;AAC5D,QAAM,oBAAoB,IAAI,yBAAyB,aAAa,OAAO,SAAS;AACpF,QAAM,eAAe,IAAI,mBAAmB;AAC5C,QAAM,mBAAmB,IAAI,iBAAiB,aAAa,mBAAmB,cAAc,MAAM;AAClG,SAAO,KAAK,UAAU,iBAAiB,gCAAgC,EAAE,YAAY,cAAc,CAAC;AACpG,QAAM,kBAAkB,IAAI,2BAA2B,YAAY,QAAQ,GAAG,UAAU,MAAM;AAG9F,MAAI;AACJ,MAAI;AACF,UAAM,EAAE,+BAA+B,uBAAAC,uBAAsB,IAAI,MAAM,OAAO,sBAAoB;AAClG,qBAAiBA,uBAAsB;AACvC,UAAM,8BAA8B,gBAAgB,QAAQ;AAC5D,WAAO,KAAK,UAAU,YAAY,sCAAsC;AAAA,EAC1E,SAAS,KAAK;AACZ,WAAO,KAAK,UAAU,aAAa,4CAA4C,EAAE,OAAQ,IAAc,QAAQ,CAAC;AAAA,EAClH;AAGA,MAAI;AACF,UAAM,UAAU,YAAY;AAE5B,UAAM,YAAY,QAAQ;AAAA,MACxB;AAAA,IACF,EAAE,IAAI;AAEN,QAAI,UAAU,SAAS,GAAG;AACxB,YAAM,cAAc,aAAa;AACjC,cAAQ;AAAA,QACN;AAAA,MACF,EAAE,IAAI,WAAW;AACjB,iBAAW,OAAO,WAAW;AAC3B,eAAO,UAAU;AAAA,UACf,QAAQ;AAAA,UACR,MAAM;AAAA,UACN,OAAO,gBAAgB,IAAI,QAAQ,WAAW;AAAA,UAC9C,SAAS;AAAA,UACT,MAAM,cAAc,IAAI,EAAE;AAAA,UAC1B,UAAU,EAAE,UAAU,IAAI,MAAM,OAAO,IAAI,IAAI,QAAQ,iBAAiB;AAAA,QAC1E,GAAG,WAAW,OAAO;AAAA,MACvB;AACA,aAAO,KAAK,UAAU,WAAW,oCAAoC;AAAA,QACnE,OAAO,UAAU;AAAA,QACjB,KAAK,UAAU,IAAI,CAAC,MAAM,EAAE,EAAE;AAAA,MAChC,CAAC;AAAA,IACH;AAAA,EACF,SAAS,KAAK;AACZ,WAAO,KAAK,UAAU,aAAa,8BAA8B,EAAE,OAAQ,IAAc,QAAQ,CAAC;AAAA,EACpG;AAGA,MAAI,QAAuB;AAC3B;AACE,UAAM,OAAO,gBAAgBD,OAAK,QAAQ,IAAI,IAAI,YAAY,GAAG,EAAE,QAAQ,CAAC;AAC5E,QAAI,MAAM;AACR,YAAM,YAAYA,OAAK,KAAK,MAAM,QAAQ,IAAI;AAC9C,UAAID,KAAG,WAAW,SAAS,EAAG,SAAQ;AAAA,IACxC;AAAA,EACF;AACA,MAAI,OAAO;AACT,WAAO,MAAM,UAAU,cAAc,6BAA6B,EAAE,MAAM,MAAM,CAAC;AAAA,EACnF;AAEA,QAAM,eAAe,IAAI,aAAa;AAAA,IACpC,iBAAiB;AAAA,IACjB,kBAAkB;AAAA,IAClB,sBAAsB;AAAA,IACtB,kBAAkB;AAAA,IAClB,iBAAiB;AAAA,IACjB;AAAA,EACF,CAAC;AAED,QAAM,SAAS,IAAI,aAAa;AAAA,IAC9B;AAAA,IACA;AAAA,IACA,OAAO,SAAS;AAAA;AAAA;AAAA;AAAA;AAAA,EAKlB,CAAC;AAID,QAAM,WAAW,IAAI,gBAAgB;AAAA,IACnC,aAAa;AAAA,IACb,SAAS,MAAM;AAAA,IAAC;AAAA,EAClB,CAAC;AAED,QAAM,kBAAkB,IAAI,gBAAgB;AAAA,IAC1C,oBAAoB,OAAO,QAAQ,iBAAiB;AAAA,MAAI,CAAC,MACvD,wBAAwB,GAAG,kBAAkB,UAAU;AAAA,IACzD;AAAA,EACF,CAAC;AAED,QAAM,YAAYC,OAAK,KAAK,UAAU,QAAQ;AAC9C,QAAM,iBAAiB,oBAAI,IAAyB;AAEpD,QAAM,aAAa,iBAAiB,EAAE,WAAW,OAAO,CAAC;AACzD,aAAW,yBAAyB;AAGpC,QAAM,gBAAgB,oBAAoB;AAAA,IACxC;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA,UAAU;AAAA,EACZ,CAAC;AAGD,QAAM,mBAAmB,+BAA+B;AAAA,IACtD;AAAA,IAAU;AAAA,IAAgB;AAAA,IAAY;AAAA,IACtC;AAAA,IAAQ;AAAA,IAAc;AAAA,IAAW;AAAA,IAAQ;AAAA,IAAY;AAAA,EACvD,CAAC;AACD,SAAO,cAAc,QAAQ,sBAAsB,iBAAiB,cAAc;AAClF,SAAO,cAAc,QAAQ,wBAAwB,iBAAiB,gBAAgB;AAItF,QAAM,kBAAkB,sBAAsB;AAAA,IAC5C;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA,kBAAkB,WAAW;AAAA,IAC7B;AAAA,IACA,qBAAqB,cAAc;AAAA,EACrC,CAAC;AACD,SAAO,cAAc,QAAQ,WAAW,eAAe;AAIvD,SAAO,cAAc,QAAQ,gBAAgB,cAAc,eAAe;AAG1E,QAAM,cAAc,EAAE,kBAAkB,YAAY,OAAO;AAC3D,SAAO,cAAc,QAAQ,YAAY,4BAA4B,WAAW,CAAC;AACjF,SAAO,cAAc,QAAQ,mBAAmB,2BAA2B,WAAW,CAAC;AACvF,SAAO,cAAc,QAAQ,mBAAmB,2BAA2B,WAAW,CAAC;AAGvF,QAAM,kBAAkB,IAAI,gBAAgB;AAC5C,MAAI,aAAa,kBAAkB,QAAQ;AAE3C,SAAO,cAAc,OAAO,eAAe,YAAY,gBAAgB,QAAQ,CAAC;AAChF,SAAO,cAAc,OAAO,kBAAkB,YAAY,oBAAoB,QAAQ,CAAC;AAEvF,SAAO,cAAc,OAAO,sBAAsB,YAAY,sBAAsB,QAAQ,CAAC;AAC7F,SAAO,cAAc,OAAO,qBAAqB,YAAY,qBAAqB,QAAQ,CAAC;AAG3F,QAAM,0BAAoD,CAAC;AAC3D,aAAW,KAAK,WAAW;AACzB,UAAM,OAAO,EAAE,SAAS,YAAY,CAAC;AACrC,QAAI,KAAK,SAAS,EAAG,yBAAwB,EAAE,WAAW,IAAI;AAAA,EAChE;AAOA,QAAM,YAAY,6BAA6B,MAAM;AAGrD,YAAU,GAAG,CAAC,GAAG,MAAM;AAAE,iBAAa,kBAAkB,QAAQ;AAAA,EAAG,CAAC;AAKpE,YAAU,GAAG,CAAC,GAAG,CAAC,QAAQ;AAAE,eAAW,UAAU;AAAA,EAAK,CAAC;AAIvD,YAAU,GAAG,CAAC,WAAW,WAAW,GAAG,CAAC,QAAQ;AAC9C,iCAA6BA,OAAK,QAAQ,QAAQ,GAAG,UAAU,GAAG;AAAA,EACpE,CAAC;AAGD,YAAU,GAAG,CAAC,SAAS,GAAG,wBAAwB;AAAA,IAChD;AAAA,IACA;AAAA,EACF,CAAC,CAAC;AAGF,YAAU,GAAG,CAAC,kBAAkB,GAAG,CAAC,QAAQ;AAC1C,WAAO,SAAS,IAAI,OAAO,SAAS;AACpC,QAAI,IAAI,OAAO,cAAc,SAAS;AACpC,cAAQ,IAAI,mBAAmB;AAAA,IACjC,OAAO;AACL,aAAO,QAAQ,IAAI;AAAA,IACrB;AAAA,EACF,CAAC;AAED,iBAAe,qBAAqB;AAClC,UAAM,uBAAuB,cAAc,EAAE,gBAAgB,UAAU,kBAAkB,QAAQ,WAAW,CAAC;AAAA,EAC/G;AAEA,YAAU,GAAG,CAAC,aAAa,GAAG,YAAY;AACxC,UAAM,mBAAmB;AAAA,EAC3B,CAAC;AAED,iBAAe,0BAA0B,cAAwB;AAC/D,UAAM,kBAAkB,oBAAoB,UAAU,MAAM;AAC5D,QAAI,CAAC,iBAAiB;AACpB,mBAAa,kBAAkB,QAAQ;AACvC,aAAO;AAAA,IACT;AACA,UAAM,UAAU,KAAK,cAAc,eAAe;AAClD,WAAO;AAAA,EACT;AAEA,SAAO,cAAc,OAAO,sBAAsB,OAAO,QAAQ;AAC/D,UAAM,SAAS,MAAM,sBAAsB,UAAU,IAAI,IAAI;AAC7D,QAAI,CAAC,OAAO,UAAU,OAAO,SAAS,KAAK;AACzC,YAAM,OAAO,IAAI;AACjB,YAAM,eAAe,oBAAoB,KAAK,OAAO,KAAK,KAAK;AAC/D,YAAM,kBAAkB,MAAM,0BAA0B,YAAY;AACpE,UAAI,iBAAiB;AACnB,cAAM,UAAU,kCAAkC,KAAK,OAAO,YAAY;AAC1E,eAAO,UAAU;AAAA,UACf,QAAQ;AAAA,UACR,MAAM;AAAA,UACN,OAAO,QAAQ;AAAA,UACf,SAAS,QAAQ;AAAA,UACjB,MAAM,QAAQ,QAAQ;AAAA,UACtB,UAAU,QAAQ;AAAA,QACpB,GAAG,eAAe;AAAA,MACpB,OAAO;AACL,qBAAa,kBAAkB,QAAQ;AAAA,MACzC;AAAA,IACF;AACA,WAAO;AAAA,EACT,CAAC;AAED,QAAM,kBAAkB,sBAAsB;AAAA,IAC5C;AAAA,EACF,CAAC;AACD,SAAO,cAAc,OAAO,yBAAyB,gBAAgB,iBAAiB;AAGtF,QAAM,gBAAgB,EAAE,KAAK,MAAM,WAAW;AAC9C,SAAO,cAAc,OAAO,cAAc,uBAAuB;AAAA,IAC/D;AAAA,IACA;AAAA,IACA;AAAA,IACA,YAAY;AAAA,EACd,CAAC,CAAC;AAEF,SAAO,cAAc,OAAO,aAAa,eAAe;AACxD,SAAO,cAAc,OAAO,oBAAoB,eAAe;AAC/D,SAAO,cAAc,OAAO,oBAAoB,eAAe;AAC/D,SAAO,cAAc,OAAO,iBAAiB,eAAe;AAG5D,SAAO,cAAc,QAAQ,YAAY,0BAA0B,MAAM,CAAC;AAE1E,SAAO,cAAc,OAAO,eAAe,OAAO,QAAQ,gBAAgB,GAAG,CAAC;AAC9E,SAAO,cAAc,QAAQ,gBAAgB,OAAO,QAAQ,cAAc,EAAE,UAAU,gBAAgB,GAAG,IAAI,IAAI,CAAC;AAGlH,QAAM,oBAAoBA,OAAK,QAAQ,QAAQ;AAC/C,QAAM,iBAAiB,qBAAqB;AAAA,IAC1C;AAAA,IACA,aAAa;AAAA,IACb,gBAAgB,OAAO;AAAA,IACvB;AAAA,IACA,kBAAkB,MAAM;AACtB,iBAAW,MAAM;AACf,gBAAQ,KAAK,QAAQ,KAAK,SAAS;AAAA,MACrC,GAAG,yBAAyB;AAAA,IAC9B;AAAA,EACF,CAAC;AAED,SAAO,cAAc,OAAO,sBAAsB,OAAO,QAAQ,eAAe,mBAAmB,GAAG,CAAC;AACvG,SAAO,cAAc,QAAQ,qBAAqB,OAAO,QAAQ,eAAe,kBAAkB,GAAG,CAAC;AACtG,SAAO,cAAc,QAAQ,qBAAqB,OAAO,QAAQ,eAAe,kBAAkB,GAAG,CAAC;AACtG,SAAO,cAAc,OAAO,uBAAuB,OAAO,QAAQ,eAAe,oBAAoB,GAAG,CAAC;AAEzG,SAAO,cAAc,OAAO,wBAAwB,OAAO,QAAQ,kBAAkB,iBAAiB,IAAI,OAAO,KAAK,CAAC;AAEvH,SAAO,cAAc,OAAO,iBAAiB,kBAAkB;AAE/D,SAAO,cAAc,OAAO,qBAAqB,gBAAgB;AACjE,QAAM,mBAAmB,8BAA8B,EAAE,kBAAkB,UAAU,QAAQ,WAAW,CAAC;AACzG,SAAO,cAAc,OAAO,4BAA4B,iBAAiB,sBAAsB;AAC/F,SAAO,cAAc,QAAQ,8BAA8B,iBAAiB,qBAAqB;AACjG,SAAO,cAAc,UAAU,qBAAqB,iBAAiB,mBAAmB;AACxF,SAAO,cAAc,OAAO,6BAA6B,uBAAuB;AAChF,SAAO,cAAc,OAAO,+BAA+B,wBAAwB;AACnF,SAAO,cAAc,OAAO,iCAAiC,2BAA2B;AACxF,SAAO,cAAc,OAAO,2BAA2B,qBAAqB;AAG5E,SAAO,cAAc,OAAO,yBAAyB,oBAAoB;AACzE,SAAO,cAAc,OAAO,6BAA6B,kBAAkB;AAC3E,SAAO,cAAc,OAAO,6BAA6B,qBAAqB;AAC9E,SAAO,cAAc,OAAO,sBAAsB,sBAAsB;AACxE,SAAO,cAAc,OAAO,0BAA0B,oBAAoB;AAC1E,SAAO,cAAc,UAAU,6BAA6B,qBAAqB;AACjF,SAAO,cAAc,UAAU,0BAA0B,+BAA+B,EAAE,UAAU,OAAO,CAAC,CAAC;AAG7G,SAAO,cAAc,OAAO,eAAe,gBAAgB;AAC3D,SAAO,cAAc,OAAO,mBAAmB,cAAc;AAC7D,SAAO,cAAc,OAAO,iBAAiB,kBAAkB;AAC/D,SAAO,cAAc,OAAO,oBAAoB,mBAAmB;AACnE,SAAO,cAAc,OAAO,cAAc,kBAAkB;AAC5D,SAAO,cAAc,OAAO,kBAAkB,cAAc;AAC5D,SAAO,cAAc,OAAO,eAAe,eAAe;AAE1D,QAAM,cAAc,wBAAwB,EAAE,SAAS,CAAC;AACxD,SAAO,cAAc,OAAO,8BAA8B,YAAY,mBAAmB;AAGzF,QAAM,mBAAmB,uBAAuB,EAAE,UAAU,kBAAkB,OAAO,CAAC;AACtF,SAAO,cAAc,QAAQ,kBAAkB,iBAAiB,SAAS;AACzE,SAAO,cAAc,OAAO,mBAAmB,iBAAiB,cAAc;AAC9E,SAAO,cAAc,OAAO,uBAAuB,iBAAiB,YAAY;AAChF,SAAO,cAAc,OAAO,+BAA+B,iBAAiB,mBAAmB;AAC/F,SAAO,cAAc,OAAO,6BAA6B,iBAAiB,iBAAiB;AAE3F,SAAO,cAAc,OAAO,oBAAoB,OAAO,QAAQ,gBAAgB,KAAK,QAAQ,CAAC;AAC7F,SAAO,cAAc,OAAO,wBAAwB,OAAO,QAAQ,cAAc,KAAK,QAAQ,CAAC;AAC/F,SAAO,cAAc,OAAO,6BAA6B,OAAO,QAAQ,kBAAkB,KAAK,QAAQ,CAAC;AACxG,SAAO,cAAc,OAAO,wBAAwB,OAAO,QAAQ;AACjE,UAAM,SAAS,MAAM,iBAAiB,KAAK,QAAQ;AACnD,QAAI,CAAC,OAAO,UAAU,OAAO,SAAS,KAAK;AACzC,YAAM,mBAAmB;AAAA,IAC3B;AACA,WAAO;AAAA,EACT,CAAC;AACD,SAAO,cAAc,QAAQ,oBAAoB,OAAO,QAAQ;AAC9D,UAAM,SAAS,MAAM,iBAAiB,KAAK,QAAQ;AACnD,QAAI,CAAC,OAAO,UAAU,OAAO,SAAS,KAAK;AACzC,YAAM,mBAAmB;AAAA,IAC3B;AACA,WAAO;AAAA,EACT,CAAC;AACD,SAAO,cAAc,QAAQ,6BAA6B,OAAO,QAAQ;AACvE,UAAM,SAAS,MAAM,eAAe,KAAK,QAAQ;AACjD,QAAI,CAAC,OAAO,UAAU,OAAO,SAAS,KAAK;AACzC,YAAM,mBAAmB;AAAA,IAC3B;AACA,WAAO;AAAA,EACT,CAAC;AACD,SAAO,cAAc,UAAU,wBAAwB,OAAO,QAAQ;AACpE,UAAM,SAAS,MAAM,iBAAiB,KAAK,QAAQ;AACnD,QAAI,CAAC,OAAO,UAAU,OAAO,SAAS,KAAK;AACzC,YAAM,mBAAmB;AAAA,IAC3B;AACA,WAAO;AAAA,EACT,CAAC;AACD,SAAO,cAAc,OAAO,+BAA+B,OAAO,QAAQ,oBAAoB,KAAK,QAAQ,CAAC;AAC5G,SAAO,cAAc,OAAO,+BAA+B,OAAO,QAAQ;AACxE,UAAM,SAAS,MAAM,uBAAuB,KAAK,QAAQ;AACzD,QAAI,CAAC,OAAO,UAAU,OAAO,SAAS,KAAK;AACzC,YAAM,0BAA0B,CAAC,eAAe,IAAI,OAAO,EAAE,EAAE,CAAC;AAAA,IAClE;AACA,WAAO;AAAA,EACT,CAAC;AAGD,SAAO,cAAc,OAAO,kBAAkB,YAAY,mBAAmB,CAAC;AAC9E,SAAO,cAAc,QAAQ,uBAAuB,OAAO,QAAQ,mBAAmB,GAAG,CAAC;AAK1F,QAAM,WAAW,uBAAuB,EAAE,WAAW,iBAAiB,CAAC;AACvE,SAAO,cAAc,QAAQ,qBAAqB,SAAS,cAAc;AACzE,SAAO,cAAc,QAAQ,sBAAsB,SAAS,eAAe;AAC3E,SAAO,cAAc,QAAQ,wBAAwB,SAAS,iBAAiB;AAC/E,SAAO,cAAc,OAAO,kBAAkB,SAAS,WAAW;AAClE,SAAO,cAAc,OAAO,qBAAqB,SAAS,cAAc;AACxE,SAAO,cAAc,OAAO,iBAAiB,SAAS,UAAU;AAGhE,QAAM,iBAAiB,qBAAqB,EAAE,IAAI,WAAW,UAAU,WAAW,CAAC;AACnF,SAAO,cAAc,QAAQ,eAAe,eAAe,kBAAkB;AAC7E,SAAO,cAAc,OAAO,gBAAgB,eAAe,iBAAiB;AAC5E,SAAO,cAAc,QAAQ,wBAAwB,eAAe,oBAAoB;AACxF,SAAO,cAAc,QAAQ,gBAAgB,eAAe,aAAa;AAEzE,QAAM,uBAAuB,2BAA2B,EAAE,SAAS,CAAC;AACpE,SAAO,cAAc,OAAO,sBAAsB,qBAAqB,qBAAqB;AAC5F,SAAO,cAAc,OAAO,sBAAsB,OAAO,QAAQ;AAC/D,UAAM,SAAS,MAAM,qBAAqB,sBAAsB,GAAG;AACnE,QAAI,CAAC,OAAO,UAAU,OAAO,SAAS,KAAK;AACzC,YAAM,0BAA0B,CAAC,YAAY,CAAC;AAAA,IAChD;AACA,WAAO;AAAA,EACT,CAAC;AAGD,QAAM,WAAW,aAAa,EAAE,YAAY,WAAW,QAAQ,UAAU,eAAe,OAAO,QAAQ,CAAC;AACxG,YAAU,GAAG,CAAC,MAAM,GAAG,YAAY;AACjC,UAAM,SAAS,gBAAgB;AAAA,EACjC,CAAC;AACD,QAAM,SAAS,gBAAgB;AAE/B,QAAM,eAAe,mBAAmB;AAAA,IACtC;AAAA,IACA;AAAA,IACA;AAAA,IACA,eAAe,SAAS;AAAA,IACxB,eAAe,SAAS;AAAA,EAC1B,CAAC;AACD,SAAO,cAAc,QAAQ,qBAAqB,OAAO,QAAQ;AAC/D,UAAM,SAAS,MAAM,aAAa,cAAc,GAAG;AACnD,QAAI,CAAC,OAAO,UAAU,OAAO,SAAS,KAAK;AACzC,YAAM,0BAA0B,CAAC,gBAAgB,iBAAiB,CAAC;AAAA,IACrE;AACA,WAAO;AAAA,EACT,CAAC;AACD,SAAO,cAAc,QAAQ,wBAAwB,OAAO,QAAQ;AAClE,UAAM,SAAS,MAAM,aAAa,iBAAiB,GAAG;AACtD,QAAI,CAAC,OAAO,UAAU,OAAO,SAAS,KAAK;AACzC,YAAM,0BAA0B,CAAC,gBAAgB,iBAAiB,CAAC;AAAA,IACrE;AACA,WAAO;AAAA,EACT,CAAC;AACD,SAAO,cAAc,OAAO,oBAAoB,aAAa,YAAY;AACzE,SAAO,cAAc,QAAQ,sBAAsB,aAAa,cAAc;AAC9E,SAAO,cAAc,QAAQ,0BAA0B,aAAa,iBAAiB;AACrF,SAAO,cAAc,QAAQ,4BAA4B,aAAa,mBAAmB;AACzF,SAAO,cAAc,QAAQ,8BAA8B,aAAa,oBAAoB;AAE5F,QAAM,qBAAqB,yBAAyB;AAAA,IAClD,eAAe,SAAS;AAAA,EAC1B,CAAC;AACD,SAAO,cAAc,OAAO,0BAA0B,mBAAmB,YAAY;AACrF,SAAO,cAAc,OAAO,0BAA0B,mBAAmB,YAAY;AACrF,SAAO,cAAc,OAAO,4BAA4B,mBAAmB,cAAc;AACzF,SAAO,cAAc,OAAO,2BAA2B,mBAAmB,aAAa;AACvF,SAAO,cAAc,OAAO,4BAA4B,mBAAmB,cAAc;AAIzF,SAAO,cAAc,OAAO,eAAe,oBAAoB,EAAE,kBAAkB,eAAe,SAAS,eAAe,UAAU,CAAC,CAAC;AACtI,SAAO,cAAc,OAAO,iBAAiB,aAAa;AAC1D,SAAO,cAAc,OAAO,yBAAyB,YAAY,yBAAyB,QAAQ,CAAC;AACnG,SAAO,cAAc,OAAO,0BAA0B,YAAY,uBAAuB,gBAAgB,CAAC;AAC1G,SAAO,cAAc,QAAQ,0BAA0B,YAAY,uBAAuB,gBAAgB,CAAC;AAC3G,SAAO,cAAc,QAAQ,4BAA4B,YAAY,yBAAyB,gBAAgB,CAAC;AAC/G,SAAO,cAAc,QAAQ,gCAAgC,YAAY,4BAA4B,gBAAgB,CAAC;AACtH,SAAO,cAAc,QAAQ,gCAAgC,YAAY,4BAA4B,gBAAgB,CAAC;AACtH,SAAO,cAAc,OAAO,yBAAyB,YAAY,sBAAsB,eAAe,CAAC;AACvG,SAAO,cAAc,QAAQ,0BAA0B,YAAY,uBAAuB,eAAe,CAAC;AAC1G,SAAO,cAAc,QAAQ,wBAAwB,YAAY,qBAAqB,eAAe,CAAC;AACtG,SAAO,cAAc,QAAQ,yBAAyB,YAAY,sBAAsB,eAAe,CAAC;AACxG,SAAO,cAAc,QAAQ,iCAAiC,YAAY,6BAA6B,eAAe,CAAC;AAGvH,SAAO,cAAc,OAAO,sBAAsB,OAAO,QAAQ,wBAAwB,UAAU,IAAI,KAAK,CAAC;AAC7G,SAAO,cAAc,QAAQ,sBAAsB,OAAO,QAAQ,yBAAyB,UAAU,IAAI,IAAI,CAAC;AAC9G,SAAO,cAAc,SAAS,0BAA0B,OAAO,QAAQ,yBAAyB,UAAU,IAAI,OAAO,IAAI,IAAI,IAAI,CAAC;AAClI,SAAO,cAAc,QAAQ,kCAAkC,OAAO,QAAQ,iBAAiB,UAAU,IAAI,IAAI,CAAC;AAClH,SAAO,cAAc,QAAQ,oCAAoC,OAAO,QAAQ,kBAAkB,UAAU,IAAI,IAAI,CAAC;AACrH,SAAO,cAAc,OAAO,+BAA+B,YAAY,kBAAkB,CAAC;AAC1F,SAAO,cAAc,OAAO,mCAAmC,YAAY,kBAAkB,CAAC;AAI9F,QAAM,OAAO,oBAAoB;AACjC,QAAM,eAAe,MAAM,YAAY,OAAO,OAAO,MAAM,QAAQ;AACnE,MAAI,iBAAiB,GAAG;AACtB,WAAO,KAAK,UAAU,aAAa,oDAAoD;AAAA,EACzF;AACA,QAAM,OAAO,MAAM,YAAY;AAC/B,SAAO,KAAK,UAAU,cAAc,gBAAgB,EAAE,OAAO,UAAU,MAAM,OAAO,KAAK,CAAC;AAG1F,MAAI,OAAO,OAAO,SAAS,QAAQ,iBAAiB,GAAG;AACrD,QAAI;AACF,mBAAa,UAAU,CAAC,OAAO;AAAA,QAC7B,GAAG;AAAA,QACH,QAAQ,EAAE,GAAG,EAAE,QAAQ,MAAM,aAAa;AAAA,MAC5C,EAAE;AACF,aAAO,KAAK,UAAU,eAAe,4CAA4C,EAAE,MAAM,aAAa,CAAC;AAAA,IACzG,SAAS,KAAK;AACZ,aAAO,KAAK,UAAU,eAAe,uCAAuC,EAAE,OAAQ,IAAc,QAAQ,CAAC;AAAA,IAC/G;AAAA,EACF;AAGA,oBAAkB,cAAc,EAAE,kBAAkB,UAAU,QAAQ,YAAY,IAAI,WAAW,UAAU,gBAAgB,CAAC;AAC5H,WAAS,iBAAiB,YAAY;AAGtC,QAAM,uBAAuB,cAAc,EAAE,gBAAgB,UAAU,kBAAkB,QAAQ,WAAW,CAAC;AAE7G,eAAa,MAAM;AAInB,QAAM,WAAW,OAAO,WAAmB;AACzC,WAAO,KAAK,UAAU,cAAc,GAAG,MAAM,WAAW;AACxD,iBAAa,KAAK;AAElB,UAAM,uBAAuB,cAAc,oBAAoB;AAC/D,QAAI,sBAAsB;AACxB,aAAO,KAAK,UAAU,cAAc,mDAAmD;AACvF,YAAM;AAAA,IACR;AACA,aAAS,QAAQ;AACjB,UAAM,OAAO,KAAK;AAClB,gBAAY,MAAM;AAClB,kBAAc;AACd,WAAO,MAAM;AACb,YAAQ,KAAK,CAAC;AAAA,EAChB;AAEA,UAAQ,GAAG,WAAW,MAAM,SAAS,SAAS,CAAC;AAC/C,UAAQ,GAAG,UAAU,MAAM,SAAS,QAAQ,CAAC;AAC/C;","names":["fs","path","resolve","fs","path","resolve","fs","path","fs","path","path","fs","path","os","fs","path","os","TRANSCRIPT_BASE","fs","path","os","fs","path","os","fs","result","fs","path","path","fs","YAML","path","fs","fs","os","path","spawn","path","os","fs","spawn","semver","fs","path","fs","path","fs","path","fs","path","fs","path","os","path","path","fs","path","path","fs","createHash","fs","path","path","fs","createHash","SELECT_COLUMNS","SELECT_COLUMNS","SELECT_COLUMNS","SELECT_COLUMNS","DEFAULT_LIST_LIMIT","DEFAULT_STATUS","DEFAULT_PROCESSED","SELECT_COLUMNS","DEFAULT_LIST_LIMIT","DEFAULT_LIST_OFFSET","DEFAULT_LIST_LIMIT","DEFAULT_LIST_OFFSET","createHash","createHash","ACTIVE_STATUS","fs","fs","fs","fs","import_yaml","stringifyYaml","parseYaml","HTTP_OK","HTTP_BAD_REQUEST","loadAllTasks","resolve","resolve","fs","path","contentType","path","fs","fs","path","path","fs","path","path","fs","path","path","fs","fs","createHash","os","path","path","os","createHash","triggerTitleSummary","fs","fs","path","triggerTitleSummary","path","fs","path","fs","path","resolveDefinitionsDir"]}