@fml-inc/panopticon 0.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (124) hide show
  1. package/.claude-plugin/plugin.json +10 -0
  2. package/LICENSE +5 -0
  3. package/README.md +363 -0
  4. package/bin/hook-handler +3 -0
  5. package/bin/mcp-server +3 -0
  6. package/bin/panopticon +3 -0
  7. package/bin/proxy +3 -0
  8. package/bin/server +3 -0
  9. package/dist/api/client.d.ts +67 -0
  10. package/dist/api/client.js +48 -0
  11. package/dist/api/client.js.map +1 -0
  12. package/dist/chunk-3BUJ7URA.js +387 -0
  13. package/dist/chunk-3BUJ7URA.js.map +1 -0
  14. package/dist/chunk-3TZAKV3M.js +158 -0
  15. package/dist/chunk-3TZAKV3M.js.map +1 -0
  16. package/dist/chunk-4SM2H22C.js +169 -0
  17. package/dist/chunk-4SM2H22C.js.map +1 -0
  18. package/dist/chunk-7Q3BJMLG.js +62 -0
  19. package/dist/chunk-7Q3BJMLG.js.map +1 -0
  20. package/dist/chunk-BVOE7A2Z.js +412 -0
  21. package/dist/chunk-BVOE7A2Z.js.map +1 -0
  22. package/dist/chunk-CF4GPWLI.js +170 -0
  23. package/dist/chunk-CF4GPWLI.js.map +1 -0
  24. package/dist/chunk-DZ5HJFB4.js +467 -0
  25. package/dist/chunk-DZ5HJFB4.js.map +1 -0
  26. package/dist/chunk-HQCY722C.js +428 -0
  27. package/dist/chunk-HQCY722C.js.map +1 -0
  28. package/dist/chunk-HRCEIYKU.js +134 -0
  29. package/dist/chunk-HRCEIYKU.js.map +1 -0
  30. package/dist/chunk-K7YUPLES.js +76 -0
  31. package/dist/chunk-K7YUPLES.js.map +1 -0
  32. package/dist/chunk-L7G27XWF.js +130 -0
  33. package/dist/chunk-L7G27XWF.js.map +1 -0
  34. package/dist/chunk-LWXF7YRG.js +626 -0
  35. package/dist/chunk-LWXF7YRG.js.map +1 -0
  36. package/dist/chunk-NXH7AONS.js +1120 -0
  37. package/dist/chunk-NXH7AONS.js.map +1 -0
  38. package/dist/chunk-QK5442ZP.js +55 -0
  39. package/dist/chunk-QK5442ZP.js.map +1 -0
  40. package/dist/chunk-QVK6VGCV.js +1703 -0
  41. package/dist/chunk-QVK6VGCV.js.map +1 -0
  42. package/dist/chunk-RX2RXHBH.js +1699 -0
  43. package/dist/chunk-RX2RXHBH.js.map +1 -0
  44. package/dist/chunk-SEXU2WYG.js +788 -0
  45. package/dist/chunk-SEXU2WYG.js.map +1 -0
  46. package/dist/chunk-SUGSQ4YI.js +264 -0
  47. package/dist/chunk-SUGSQ4YI.js.map +1 -0
  48. package/dist/chunk-TGXFVAID.js +138 -0
  49. package/dist/chunk-TGXFVAID.js.map +1 -0
  50. package/dist/chunk-WLBNFVIG.js +447 -0
  51. package/dist/chunk-WLBNFVIG.js.map +1 -0
  52. package/dist/chunk-XLTCUH5A.js +1072 -0
  53. package/dist/chunk-XLTCUH5A.js.map +1 -0
  54. package/dist/chunk-YVRWVDIA.js +146 -0
  55. package/dist/chunk-YVRWVDIA.js.map +1 -0
  56. package/dist/chunk-ZEC4LRKS.js +176 -0
  57. package/dist/chunk-ZEC4LRKS.js.map +1 -0
  58. package/dist/cli.d.ts +1 -0
  59. package/dist/cli.js +1084 -0
  60. package/dist/cli.js.map +1 -0
  61. package/dist/config-NwoZC-GM.d.ts +20 -0
  62. package/dist/db.d.ts +46 -0
  63. package/dist/db.js +15 -0
  64. package/dist/db.js.map +1 -0
  65. package/dist/doctor.d.ts +37 -0
  66. package/dist/doctor.js +14 -0
  67. package/dist/doctor.js.map +1 -0
  68. package/dist/hooks/handler.d.ts +23 -0
  69. package/dist/hooks/handler.js +295 -0
  70. package/dist/hooks/handler.js.map +1 -0
  71. package/dist/index.d.ts +57 -0
  72. package/dist/index.js +101 -0
  73. package/dist/index.js.map +1 -0
  74. package/dist/mcp/server.d.ts +1 -0
  75. package/dist/mcp/server.js +243 -0
  76. package/dist/mcp/server.js.map +1 -0
  77. package/dist/otlp/server.d.ts +7 -0
  78. package/dist/otlp/server.js +17 -0
  79. package/dist/otlp/server.js.map +1 -0
  80. package/dist/permissions.d.ts +33 -0
  81. package/dist/permissions.js +14 -0
  82. package/dist/permissions.js.map +1 -0
  83. package/dist/pricing.d.ts +29 -0
  84. package/dist/pricing.js +13 -0
  85. package/dist/pricing.js.map +1 -0
  86. package/dist/proxy/server.d.ts +10 -0
  87. package/dist/proxy/server.js +20 -0
  88. package/dist/proxy/server.js.map +1 -0
  89. package/dist/prune.d.ts +18 -0
  90. package/dist/prune.js +13 -0
  91. package/dist/prune.js.map +1 -0
  92. package/dist/query.d.ts +56 -0
  93. package/dist/query.js +27 -0
  94. package/dist/query.js.map +1 -0
  95. package/dist/reparse-636YZCE3.js +14 -0
  96. package/dist/reparse-636YZCE3.js.map +1 -0
  97. package/dist/repo.d.ts +17 -0
  98. package/dist/repo.js +9 -0
  99. package/dist/repo.js.map +1 -0
  100. package/dist/scanner.d.ts +73 -0
  101. package/dist/scanner.js +15 -0
  102. package/dist/scanner.js.map +1 -0
  103. package/dist/sdk.d.ts +82 -0
  104. package/dist/sdk.js +208 -0
  105. package/dist/sdk.js.map +1 -0
  106. package/dist/server.d.ts +5 -0
  107. package/dist/server.js +25 -0
  108. package/dist/server.js.map +1 -0
  109. package/dist/setup.d.ts +35 -0
  110. package/dist/setup.js +19 -0
  111. package/dist/setup.js.map +1 -0
  112. package/dist/sync/index.d.ts +29 -0
  113. package/dist/sync/index.js +32 -0
  114. package/dist/sync/index.js.map +1 -0
  115. package/dist/targets.d.ts +279 -0
  116. package/dist/targets.js +20 -0
  117. package/dist/targets.js.map +1 -0
  118. package/dist/types-D-MYCBol.d.ts +128 -0
  119. package/dist/types.d.ts +164 -0
  120. package/dist/types.js +1 -0
  121. package/dist/types.js.map +1 -0
  122. package/hooks/hooks.json +274 -0
  123. package/package.json +124 -0
  124. package/skills/panopticon-optimize/SKILL.md +222 -0
@@ -0,0 +1 @@
1
+ {"version":3,"sources":["../src/scanner/reparse.ts","../src/scanner/loop.ts","../src/archive/local.ts","../src/archive/index.ts","../src/summary/llm.ts","../src/summary/loop.ts","../src/scanner/store.ts"],"sourcesContent":["/**\n * Atomic DB reparse: builds a fresh database from scratch, copies\n * non-scanner metadata from the old DB, then swaps files atomically.\n *\n * This avoids the cost of row-by-row deletes on large databases and\n * ensures that parser changes (tracked via SCANNER_DATA_VERSION) are\n * applied cleanly to all existing session data.\n *\n * Preserves sync_id values for rows that match on natural keys so\n * that remote sync targets can deduplicate correctly.\n */\nimport fs from \"node:fs\";\nimport { gunzipSync } from \"node:zlib\";\nimport Database from \"better-sqlite3\";\nimport { config } from \"../config.js\";\nimport {\n closeDb,\n getDb,\n SCANNER_DATA_VERSION,\n SCHEMA_SQL,\n} from \"../db/schema.js\";\nimport { scanOnce } from \"./loop.js\";\n\n/**\n * Tables whose data is independent of the scanner and must be\n * preserved across reparse. Copied row-by-row from old → new DB.\n */\nconst PRESERVED_TABLES = [\n \"hook_events\",\n \"otel_logs\",\n \"otel_metrics\",\n \"otel_spans\",\n \"watermarks\",\n \"target_session_sync\",\n \"model_pricing\",\n \"user_config_snapshots\",\n \"repo_config_snapshots\",\n];\n\n/**\n * Session columns that come from non-scanner sources (hooks, OTLP)\n * and should be merged back after the scanner rebuilds sessions.\n */\nconst SESSION_MERGE_COLUMNS = [\n \"has_hooks\",\n \"has_otel\",\n \"otel_input_tokens\",\n \"otel_output_tokens\",\n \"otel_cache_read_tokens\",\n \"otel_cache_creation_tokens\",\n \"summary\",\n \"summary_version\",\n \"permission_mode\",\n \"is_automated\",\n \"created_at\",\n];\n\nexport interface ReparseResult {\n success: boolean;\n filesScanned: number;\n newTurns: number;\n error?: string;\n}\n\nfunction removeTempFiles(tempPath: string): void {\n for (const suffix of [\"\", \"-wal\", \"-shm\"]) {\n try {\n fs.unlinkSync(tempPath + suffix);\n } catch {}\n }\n}\n\nfunction removeWAL(dbPath: string): void {\n for (const suffix of [\"-wal\", \"-shm\"]) {\n try {\n fs.unlinkSync(dbPath + suffix);\n } catch {}\n }\n}\n\nfunction initTempDb(tempPath: string): Database.Database {\n const db = new Database(tempPath);\n db.pragma(\"journal_mode = WAL\");\n db.pragma(\"busy_timeout = 5000\");\n db.function(\"decompress\", (blob: Buffer | null) =>\n blob ? gunzipSync(blob).toString() : null,\n );\n db.exec(SCHEMA_SQL);\n return db;\n}\n\n/**\n * Perform an atomic reparse:\n * 1. Close current DB\n * 2. Create a fresh temp DB with current schema\n * 3. Redirect config.dbPath → temp, run full scan into it\n * 4. Copy preserved (non-scanner) data from old DB via ATTACH\n * 5. Merge session metadata from hooks/OTLP\n * 6. Preserve sync_id values for rows matching on natural keys\n * 7. Atomic file swap (rename)\n * 8. Reopen the main DB handle\n */\nexport function reparseAll(\n log: (msg: string) => void = () => {},\n): ReparseResult {\n const origPath = config.dbPath;\n const tempPath = `${origPath}-reparse`;\n\n // Clean up stale temp DB from a prior crash\n removeTempFiles(tempPath);\n\n log(\"Starting atomic reparse...\");\n\n // 1. Create fresh temp DB and verify schema\n let tempDb: Database.Database;\n try {\n tempDb = initTempDb(tempPath);\n tempDb.close();\n } catch (err) {\n removeTempFiles(tempPath);\n return {\n success: false,\n filesScanned: 0,\n newTurns: 0,\n error: `Failed to create temp DB: ${err}`,\n };\n }\n\n // Snapshot old session count for safety check\n let oldSessionCount = 0;\n try {\n const oldDb = new Database(origPath);\n oldSessionCount = (\n oldDb.prepare(\"SELECT COUNT(*) as c FROM sessions\").get() as {\n c: number;\n }\n ).c;\n oldDb.close();\n } catch {}\n\n // 2. Close current DB, redirect to temp, scan\n closeDb();\n const savedDbPath = config.dbPath;\n (config as { dbPath: string }).dbPath = tempPath;\n\n let filesScanned = 0;\n let newTurns = 0;\n try {\n const result = scanOnce();\n filesScanned = result.filesScanned;\n newTurns = result.newTurns;\n } catch (err) {\n (config as { dbPath: string }).dbPath = savedDbPath;\n closeDb();\n getDb(); // reopen original\n removeTempFiles(tempPath);\n return {\n success: false,\n filesScanned: 0,\n newTurns: 0,\n error: `Scan into temp DB failed: ${err}`,\n };\n }\n\n // Check session count in temp DB\n const db = getDb();\n const tempSessionCount = (\n db.prepare(\"SELECT COUNT(*) as c FROM sessions\").get() as { c: number }\n ).c;\n closeDb();\n\n // Restore config path for all subsequent operations\n (config as { dbPath: string }).dbPath = savedDbPath;\n\n // Abort if scan produced nothing but old DB had data\n if (tempSessionCount === 0 && oldSessionCount > 0) {\n log(\n `Reparse aborted: temp DB has 0 sessions but old DB has ${oldSessionCount}`,\n );\n getDb(); // reopen original\n removeTempFiles(tempPath);\n return {\n success: false,\n filesScanned,\n newTurns,\n error: `Aborted: 0 sessions in reparse vs ${oldSessionCount} in old DB`,\n };\n }\n\n // 3. Copy preserved data from old DB into temp DB\n log(\"Copying preserved data from old database...\");\n try {\n tempDb = new Database(tempPath);\n tempDb.pragma(\"journal_mode = WAL\");\n tempDb.function(\"decompress\", (blob: Buffer | null) =>\n blob ? gunzipSync(blob).toString() : null,\n );\n const escapedPath = origPath.replace(/'/g, \"''\");\n tempDb.exec(`ATTACH DATABASE '${escapedPath}' AS old_db`);\n\n const tx = tempDb.transaction(() => {\n for (const table of PRESERVED_TABLES) {\n try {\n tempDb.exec(\n `INSERT OR IGNORE INTO main.${table} SELECT * FROM old_db.${table}`,\n );\n } catch (e) {\n log(` Skipping ${table}: ${e instanceof Error ? e.message : e}`);\n }\n }\n\n // Rebuild hook_events_fts from copied hook_events\n try {\n tempDb.exec(\n \"INSERT INTO main.hook_events_fts(rowid, payload) SELECT id, decompress(payload) FROM main.hook_events\",\n );\n } catch (e) {\n log(` hook_events_fts rebuild: ${e instanceof Error ? e.message : e}`);\n }\n\n // Merge session metadata from hooks/OTLP into scanner-created sessions\n const setClauses = SESSION_MERGE_COLUMNS.map(\n (col) => `${col} = old_db.sessions.${col}`,\n ).join(\", \");\n try {\n tempDb.exec(`\n UPDATE main.sessions SET ${setClauses}\n FROM old_db.sessions\n WHERE main.sessions.session_id = old_db.sessions.session_id\n `);\n } catch (e) {\n log(` Session merge: ${e instanceof Error ? e.message : e}`);\n }\n\n // Copy session_repositories and session_cwds\n try {\n tempDb.exec(\n \"INSERT OR IGNORE INTO main.session_repositories SELECT * FROM old_db.session_repositories\",\n );\n } catch (e) {\n log(` session_repositories: ${e instanceof Error ? e.message : e}`);\n }\n try {\n tempDb.exec(\n \"INSERT OR IGNORE INTO main.session_cwds SELECT * FROM old_db.session_cwds\",\n );\n } catch (e) {\n log(` session_cwds: ${e instanceof Error ? e.message : e}`);\n }\n\n // Preserve sync_id values from old DB for scanner-produced tables\n // by matching on natural keys\n try {\n tempDb.exec(`\n UPDATE main.scanner_turns SET sync_id = old_db.scanner_turns.sync_id\n FROM old_db.scanner_turns\n WHERE main.scanner_turns.session_id = old_db.scanner_turns.session_id\n AND main.scanner_turns.source = old_db.scanner_turns.source\n AND main.scanner_turns.turn_index = old_db.scanner_turns.turn_index\n `);\n } catch (e) {\n log(` scanner_turns sync_id: ${e instanceof Error ? e.message : e}`);\n }\n\n try {\n tempDb.exec(`\n UPDATE main.scanner_events SET sync_id = old_db.scanner_events.sync_id\n FROM old_db.scanner_events\n WHERE main.scanner_events.session_id = old_db.scanner_events.session_id\n AND main.scanner_events.source = old_db.scanner_events.source\n AND main.scanner_events.event_type = old_db.scanner_events.event_type\n AND main.scanner_events.timestamp_ms = old_db.scanner_events.timestamp_ms\n AND COALESCE(main.scanner_events.tool_name, '') = COALESCE(old_db.scanner_events.tool_name, '')\n `);\n } catch (e) {\n log(` scanner_events sync_id: ${e instanceof Error ? e.message : e}`);\n }\n\n // Preserve sync_id for tool_calls by matching via message natural key + tool_use_id\n try {\n tempDb.exec(`\n UPDATE main.tool_calls SET sync_id = old_tc.sync_id\n FROM old_db.tool_calls old_tc\n INNER JOIN old_db.messages old_m ON old_tc.message_id = old_m.id\n INNER JOIN main.messages new_m ON new_m.session_id = old_m.session_id AND new_m.ordinal = old_m.ordinal\n WHERE main.tool_calls.message_id = new_m.id\n AND COALESCE(main.tool_calls.tool_use_id, '') = COALESCE(old_tc.tool_use_id, '')\n AND main.tool_calls.tool_name = old_tc.tool_name\n `);\n } catch (e) {\n log(` tool_calls sync_id: ${e instanceof Error ? e.message : e}`);\n }\n });\n tx();\n\n tempDb.exec(\"DETACH DATABASE old_db\");\n tempDb.pragma(`user_version = ${SCANNER_DATA_VERSION}`);\n tempDb.close();\n } catch (err) {\n log(`Failed to copy preserved data: ${err}`);\n getDb(); // reopen original\n removeTempFiles(tempPath);\n return {\n success: false,\n filesScanned,\n newTurns,\n error: `Copy preserved data failed: ${err}`,\n };\n }\n\n // 4. Atomic file swap\n log(\"Swapping database files...\");\n try {\n removeWAL(origPath);\n fs.renameSync(tempPath, origPath);\n removeWAL(tempPath);\n } catch (err) {\n log(`File swap failed: ${err}`);\n getDb();\n removeTempFiles(tempPath);\n return {\n success: false,\n filesScanned,\n newTurns,\n error: `Atomic swap failed: ${err}`,\n };\n }\n\n // 5. Reopen the main DB handle\n getDb();\n\n log(\n `Reparse complete: ${filesScanned} files, ${newTurns} turns, ${tempSessionCount} sessions`,\n );\n\n return { success: true, filesScanned, newTurns };\n}\n","import fs from \"node:fs\";\nimport type Database from \"better-sqlite3\";\nimport { getDb, markResyncComplete, needsResync } from \"../db/schema.js\";\nimport { updateSessionMessageCounts } from \"../db/store.js\";\n// Import targets so they self-register before we iterate the registry\nimport \"../targets/claude.js\";\nimport \"../targets/codex.js\";\nimport \"../targets/gemini.js\";\nimport { getArchiveBackend } from \"../archive/index.js\";\nimport { log } from \"../log.js\";\nimport { generateSummariesOnce } from \"../summary/index.js\";\nimport { allTargets } from \"../targets/registry.js\";\nimport type { ParseResult } from \"../targets/types.js\";\nimport type { SavedSyncIds } from \"./store.js\";\nimport {\n getMaxOrdinal,\n getTurnCount,\n insertMessages,\n insertScannerEvents,\n insertTurns,\n linkSubagentSessions,\n readArchivedSize,\n readFileWatermark,\n resetFileForReparse,\n restoreSyncIds,\n updateSessionTotals,\n upsertSession,\n writeArchivedSize,\n writeFileWatermark,\n} from \"./store.js\";\nimport type { ScannerHandle, ScannerOptions } from \"./types.js\";\n\nconst DEFAULT_IDLE_MS = 60_000;\nconst DEFAULT_CATCHUP_MS = 5_000;\n\nexport function scanOnce(): {\n filesScanned: number;\n newTurns: number;\n} {\n getDb(); // ensure DB is accessible\n\n let filesScanned = 0;\n let newTurns = 0;\n\n for (const target of allTargets()) {\n if (!target.scanner) continue;\n const source = target.id;\n\n for (const { filePath } of target.scanner.discover()) {\n let offset = readFileWatermark(filePath);\n let result = target.scanner.parseFile(filePath, offset);\n if (!result) continue;\n\n // If incremental parse detected a DAG fork, reset watermark\n // and reparse from byte 0 so fork detection runs on the full file.\n let savedSyncIds: SavedSyncIds | undefined;\n if (result.needsFullReparse && offset > 0) {\n savedSyncIds = resetFileForReparse(filePath, result.meta?.sessionId);\n offset = 0;\n result = target.scanner.parseFile(filePath, 0);\n if (!result) continue;\n log.scanner.info(`Reparsing ${filePath} from start (fork detected)`);\n }\n\n filesScanned++;\n\n // When reading from byte 0 (full file), turn indices start at 0 so\n // INSERT OR IGNORE deduplicates. When incremental (offset > 0),\n // re-index from existing turn count — unless the parser produces\n // absolute indices (e.g. Gemini re-reads the full JSON file).\n if (offset > 0 && result.meta?.sessionId && !result.absoluteIndices) {\n const existingCount = getTurnCount(result.meta.sessionId, source);\n if (existingCount > 0) {\n reindexTurns(result, existingCount);\n }\n // Re-index message ordinals for incremental reads\n if (result.messages.length > 0) {\n const maxOrd = getMaxOrdinal(result.meta.sessionId);\n reindexMessages(result, maxOrd + 1);\n }\n }\n\n if (!result.meta?.sessionId) {\n writeFileWatermark(filePath, result.newByteOffset);\n continue;\n }\n\n // Wrap all per-file DB writes in a single transaction so that\n // a crash can't leave messages inserted without watermark advancement\n // (which would cause tool_call duplication on retry).\n const sessionId = result.meta.sessionId;\n const fileMeta = result.meta;\n const fileResult = result;\n const db = getDb();\n (\n db.transaction(() => {\n upsertSession(fileMeta, filePath, source);\n\n if (fileResult.turns.length > 0) {\n insertTurns(fileResult.turns, source);\n updateSessionTotals(sessionId);\n }\n\n if (fileResult.events.length > 0) {\n insertScannerEvents(fileResult.events, source);\n }\n\n if (\n fileResult.messages.length > 0 ||\n fileResult.orphanedToolResults?.size\n ) {\n insertMessages(fileResult.messages, fileResult.orphanedToolResults);\n updateSessionMessageCounts(sessionId);\n }\n\n writeFileWatermark(filePath, fileResult.newByteOffset);\n }) as Database.Transaction\n )();\n\n newTurns += result.turns.length;\n\n // Process fork results (additional sessions from DAG analysis)\n if (result.forks) {\n for (const fork of result.forks) {\n if (!fork.meta?.sessionId) continue;\n const forkSessionId = fork.meta.sessionId;\n const forkMeta = fork.meta;\n (\n db.transaction(() => {\n upsertSession(forkMeta, filePath, source);\n if (fork.turns.length > 0) {\n insertTurns(fork.turns, source);\n updateSessionTotals(forkSessionId);\n }\n if (fork.events.length > 0) {\n insertScannerEvents(fork.events, source);\n }\n if (fork.messages.length > 0 || fork.orphanedToolResults?.size) {\n insertMessages(fork.messages, fork.orphanedToolResults);\n updateSessionMessageCounts(forkSessionId);\n }\n // No watermark — shared file, one watermark for the whole file\n }) as Database.Transaction\n )();\n newTurns += fork.turns.length;\n }\n }\n\n // Restore sync_ids after all data for this file has been re-inserted\n if (savedSyncIds) {\n restoreSyncIds(savedSyncIds);\n }\n\n // Archive raw file for 100% recall\n try {\n const fileSize = fs.statSync(filePath).size;\n const archivedSize = readArchivedSize(filePath);\n if (fileSize > archivedSize) {\n const rawContent = fs.readFileSync(filePath);\n getArchiveBackend().putSync(\n result.meta.sessionId,\n source,\n rawContent,\n );\n writeArchivedSize(filePath, fileSize);\n }\n } catch (archiveErr) {\n // Archive failure is non-fatal\n log.scanner.warn(\n `Archive error for ${filePath}: ${archiveErr instanceof Error ? archiveErr.message : archiveErr}`,\n );\n }\n }\n }\n\n // Link subagent sessions to parents after all files are processed\n if (filesScanned > 0) {\n const linked = linkSubagentSessions();\n if (linked > 0) {\n log.scanner.info(\n `Linked ${linked} subagent session${linked > 1 ? \"s\" : \"\"}`,\n );\n }\n log.scanner.info(`Scanned ${filesScanned} files, ${newTurns} new turns`);\n }\n\n return { filesScanned, newTurns };\n}\n\nfunction reindexTurns(result: ParseResult, startIndex: number): void {\n for (let i = 0; i < result.turns.length; i++) {\n result.turns[i].turnIndex = startIndex + i;\n }\n}\n\nfunction reindexMessages(result: ParseResult, startOrdinal: number): void {\n for (let i = 0; i < result.messages.length; i++) {\n result.messages[i].ordinal = startOrdinal + i;\n }\n}\n\nexport function createScannerLoop(opts: ScannerOptions): ScannerHandle {\n const idleMs = opts.idleIntervalMs ?? DEFAULT_IDLE_MS;\n const catchUpMs = opts.catchUpIntervalMs ?? DEFAULT_CATCHUP_MS;\n\n let timer: ReturnType<typeof setTimeout> | null = null;\n let stopping = false;\n let reparseChecked = false;\n let ready = false;\n\n function scheduleNext(hadWork: boolean): void {\n if (stopping) return;\n const delay = hadWork ? catchUpMs : idleMs;\n timer = setTimeout(() => tick(), delay);\n if (!opts.keepAlive && timer.unref) {\n timer.unref();\n }\n }\n\n function tick(): void {\n if (stopping) return;\n\n // On first tick, check if data version requires a full reparse\n if (!reparseChecked) {\n reparseChecked = true;\n if (needsResync()) {\n log.scanner.info(\"Data version outdated — running atomic reparse...\");\n import(\"./reparse.js\")\n .then(({ reparseAll }) => {\n try {\n const result = reparseAll((msg) => log.scanner.info(msg));\n if (result.success) {\n markResyncComplete();\n } else {\n log.scanner.error(\n `Reparse failed: ${result.error ?? \"unknown\"}`,\n );\n }\n } catch (err) {\n log.scanner.error(\n `Reparse error: ${err instanceof Error ? err.message : err}`,\n );\n }\n scheduleNext(true);\n })\n .catch((err) => {\n log.scanner.error(\n `Reparse import error: ${err instanceof Error ? err.message : err}`,\n );\n scheduleNext(false);\n });\n return;\n }\n // No reparse needed — stamp version if not already set\n markResyncComplete();\n }\n\n let hadWork = false;\n try {\n const { newTurns } = scanOnce();\n hadWork = newTurns > 0;\n\n if (!ready) {\n ready = true;\n opts.onReady?.();\n }\n\n // Only generate summaries when idle and scanner is ready.\n if (!hadWork && ready) {\n try {\n generateSummariesOnce((msg) => log.scanner.info(msg));\n } catch (err) {\n log.scanner.error(\n `Session summary error: ${err instanceof Error ? err.message : err}`,\n );\n }\n }\n } catch (err) {\n log.scanner.error(\n `Scan error: ${err instanceof Error ? err.message : err}`,\n );\n }\n if (!stopping) {\n scheduleNext(hadWork);\n }\n }\n\n return {\n start() {\n if (timer) return;\n stopping = false;\n log.scanner.info(\"Starting scanner\");\n tick();\n },\n stop() {\n stopping = true;\n if (timer) {\n clearTimeout(timer);\n timer = null;\n log.scanner.info(\"Stopped scanner\");\n }\n },\n };\n}\n","import fs from \"node:fs\";\nimport path from \"node:path\";\nimport { gunzipSync, gzipSync } from \"node:zlib\";\nimport type { ArchiveBackend } from \"./backend.js\";\n\nexport class LocalArchiveBackend implements ArchiveBackend {\n constructor(private baseDir: string) {}\n\n putSync(sessionId: string, source: string, content: Buffer): void {\n const dir = path.join(this.baseDir, sessionId);\n fs.mkdirSync(dir, { recursive: true });\n const filePath = path.join(dir, `${source}.jsonl.gz`);\n const compressed = gzipSync(content);\n fs.writeFileSync(filePath, compressed);\n }\n\n getSync(sessionId: string, source: string): Buffer | null {\n const filePath = path.join(this.baseDir, sessionId, `${source}.jsonl.gz`);\n if (!fs.existsSync(filePath)) return null;\n const compressed = fs.readFileSync(filePath);\n return gunzipSync(compressed);\n }\n\n hasSync(sessionId: string, source: string): boolean {\n const filePath = path.join(this.baseDir, sessionId, `${source}.jsonl.gz`);\n return fs.existsSync(filePath);\n }\n\n list(): Array<{ sessionId: string; source: string; sizeBytes: number }> {\n const results: Array<{\n sessionId: string;\n source: string;\n sizeBytes: number;\n }> = [];\n\n if (!fs.existsSync(this.baseDir)) return results;\n\n for (const sessionId of fs.readdirSync(this.baseDir)) {\n const sessionDir = path.join(this.baseDir, sessionId);\n const stat = fs.statSync(sessionDir);\n if (!stat.isDirectory()) continue;\n\n for (const file of fs.readdirSync(sessionDir)) {\n if (!file.endsWith(\".jsonl.gz\")) continue;\n const source = file.replace(/\\.jsonl\\.gz$/, \"\");\n const fileStat = fs.statSync(path.join(sessionDir, file));\n results.push({\n sessionId,\n source,\n sizeBytes: fileStat.size,\n });\n }\n }\n\n return results;\n }\n\n stats(): { totalFiles: number; totalBytes: number } {\n const entries = this.list();\n return {\n totalFiles: entries.length,\n totalBytes: entries.reduce((sum, e) => sum + e.sizeBytes, 0),\n };\n }\n}\n","export type { ArchiveBackend } from \"./backend.js\";\nexport { LocalArchiveBackend } from \"./local.js\";\n\nimport path from \"node:path\";\nimport { config } from \"../config.js\";\nimport type { ArchiveBackend } from \"./backend.js\";\nimport { LocalArchiveBackend } from \"./local.js\";\n\nlet _backend: ArchiveBackend | null = null;\n\nexport function getArchiveBackend(): ArchiveBackend {\n if (!_backend) {\n _backend = new LocalArchiveBackend(path.join(config.dataDir, \"archive\"));\n }\n return _backend;\n}\n","import { execFileSync, spawnSync } from \"node:child_process\";\nimport path from \"node:path\";\nimport { fileURLToPath } from \"node:url\";\nimport { log } from \"../log.js\";\n\nconst LLM_TIMEOUT_MS = 180_000;\n\nlet _claudePath: string | null | undefined;\n\n/**\n * Detect whether the `claude` CLI is available on this machine.\n * Result is cached for the lifetime of the process.\n */\nexport function detectAgent(): string | null {\n if (_claudePath !== undefined) return _claudePath;\n try {\n _claudePath = execFileSync(\"which\", [\"claude\"], {\n encoding: \"utf-8\",\n timeout: 3000,\n stdio: [\"ignore\", \"pipe\", \"ignore\"],\n }).trim();\n } catch {\n _claudePath = null;\n }\n return _claudePath;\n}\n\n/**\n * Build a clean env that won't trigger recursive hooks or proxy loops.\n */\nfunction cleanEnv(): Record<string, string> {\n const env: Record<string, string> = {};\n for (const [k, v] of Object.entries(process.env)) {\n if (v === undefined) continue;\n if (k === \"CLAUDECODE\") continue;\n if (k.startsWith(\"CLAUDE_CODE_\")) continue;\n if (k === \"ANTHROPIC_BASE_URL\") continue;\n env[k] = v;\n }\n return env;\n}\n\n/** Get the path to the panopticon MCP server script. */\nfunction getMcpServerPath(): string {\n const dir = path.dirname(fileURLToPath(import.meta.url));\n // In the built dist/, summary code is in a chunk at dist/ level,\n // and mcp/server.js is at dist/mcp/server.js (same level)\n return path.resolve(dir, \"mcp\", \"server.js\");\n}\n\n/**\n * Invoke Claude CLI with a prompt and optional MCP server.\n * Returns the trimmed output text, or null on any failure.\n */\nexport function invokeLlm(\n prompt: string,\n opts: {\n timeoutMs?: number;\n withMcp?: boolean;\n systemPrompt?: string;\n model?: string;\n } = {},\n): string | null {\n const claudePath = detectAgent();\n if (!claudePath) return null;\n\n const timeoutMs = opts.timeoutMs ?? LLM_TIMEOUT_MS;\n\n const args = [\n claudePath,\n \"-p\",\n prompt,\n \"--output-format\",\n \"text\",\n \"--model\",\n opts.model ?? \"haiku\",\n \"--no-session-persistence\",\n \"--permission-mode\",\n \"auto\",\n ];\n\n if (opts.systemPrompt) {\n args.push(\"--append-system-prompt\", opts.systemPrompt);\n }\n\n if (opts.withMcp) {\n const mcpPath = getMcpServerPath();\n args.push(\n \"--strict-mcp-config\",\n \"--mcp-config\",\n JSON.stringify({\n mcpServers: {\n panopticon: {\n command: \"node\",\n args: [mcpPath],\n },\n },\n }),\n \"--allowed-tools\",\n \"mcp__panopticon__timeline\",\n \"mcp__panopticon__get\",\n \"mcp__panopticon__query\",\n \"mcp__panopticon__search\",\n \"mcp__panopticon__status\",\n );\n } else {\n args.push(\"--tools\", \"\");\n }\n\n const result = spawnSync(args[0], args.slice(1), {\n env: cleanEnv(),\n stdio: [\"ignore\", \"pipe\", \"pipe\"],\n timeout: timeoutMs,\n maxBuffer: 4 * 1024 * 1024,\n });\n\n const text = result.stdout?.toString().trim();\n const stderr = result.stderr?.toString().trim();\n\n if (stderr) log.llm.warn(`stderr: ${stderr.slice(0, 500)}`);\n if (result.signal) {\n log.llm.error(`killed by signal: ${result.signal}`);\n return null;\n }\n log.llm.info(`exit=${result.status} stdout=${text?.length ?? 0} chars`);\n\n // Accept output even with non-zero exit (hooks may cause exit code 1\n // after successful response)\n return text || null;\n}\n","import { getDb } from \"../db/schema.js\";\nimport { detectAgent, invokeLlm } from \"./llm.js\";\n\n/** Minimum messages before a session is worth summarizing. */\nconst MIN_MESSAGES = 3;\n/** Re-summarize when message count has grown by this much. */\nconst SUMMARY_THRESHOLD = 20;\n/** Max sessions to summarize per idle cycle. */\nconst MAX_PER_CYCLE = 50;\n/** Timeout for agent-based summary (longer than simple LLM call). */\nconst AGENT_TIMEOUT_MS = 120_000;\n\nconst SYSTEM_PROMPT = `You are summarizing a coding session for search and retrieval. You have access to panopticon MCP tools to explore the session data.\n\nInstructions:\n1. Use the \"timeline\" tool to read the session's messages and tool calls\n2. If needed, use \"get\" to read full message content or \"query\" for specific data\n3. Produce a summary optimized for AI consumption and full-text search\n4. Include: what was accomplished, key decisions made, specific file/function/package names, problems encountered and how they were resolved\n5. Use concrete names rather than generic descriptions (e.g. \"added FTS5 index on messages table\" not \"improved search\")\n6. Format as 2-4 concise sentences\n7. Output ONLY the summary text, nothing else`;\n\n/**\n * Generate a summary for a single session.\n * Uses Claude CLI with panopticon MCP if available, falls back to deterministic.\n */\nfunction _summarizeSession(\n sessionId: string,\n log: (msg: string) => void,\n): string | null {\n // Try agent-based summary first\n if (detectAgent()) {\n const prompt = `Summarize session ${sessionId}. Start by calling the timeline tool with sessionId \"${sessionId}\" and limit 50.`;\n const result = invokeLlm(prompt, {\n timeoutMs: AGENT_TIMEOUT_MS,\n withMcp: true,\n systemPrompt: SYSTEM_PROMPT,\n model: \"sonnet\",\n });\n if (result) return result;\n log(`LLM summary failed for ${sessionId}, falling back to deterministic`);\n }\n\n // Deterministic fallback\n return buildDeterministicSummary(sessionId);\n}\n\n/**\n * Build a deterministic summary from messages and tool_calls.\n */\nfunction buildDeterministicSummary(sessionId: string): string | null {\n const db = getDb();\n\n const firstUser = db\n .prepare(\n \"SELECT SUBSTR(content, 1, 200) as content FROM messages WHERE session_id = ? AND role = 'user' AND is_system = 0 ORDER BY ordinal ASC LIMIT 1\",\n )\n .get(sessionId) as { content: string } | undefined;\n\n const counts = db\n .prepare(\n \"SELECT COUNT(*) as msg_count, SUM(CASE WHEN role = 'user' AND is_system = 0 THEN 1 ELSE 0 END) as user_count FROM messages WHERE session_id = ?\",\n )\n .get(sessionId) as { msg_count: number; user_count: number };\n\n const tools = db\n .prepare(\n \"SELECT tool_name, COUNT(*) as cnt FROM tool_calls WHERE session_id = ? GROUP BY tool_name ORDER BY cnt DESC LIMIT 5\",\n )\n .all(sessionId) as Array<{ tool_name: string; cnt: number }>;\n\n const files = db\n .prepare(\n \"SELECT DISTINCT json_extract(input_json, '$.file_path') as fp FROM tool_calls WHERE session_id = ? AND tool_name IN ('Write', 'Edit') AND input_json IS NOT NULL LIMIT 10\",\n )\n .all(sessionId) as Array<{ fp: string | null }>;\n\n if (!firstUser && counts.msg_count === 0) return null;\n\n const parts: string[] = [];\n if (firstUser) parts.push(`Prompt: \"${firstUser.content}\"`);\n parts.push(`${counts.msg_count} messages (${counts.user_count} user)`);\n if (tools.length > 0) {\n parts.push(\n `Tools: ${tools.map((t) => `${t.tool_name}(${t.cnt})`).join(\", \")}`,\n );\n }\n const filePaths = files.map((f) => f.fp).filter(Boolean) as string[];\n if (filePaths.length > 0) {\n parts.push(`Files: ${filePaths.join(\", \")}`);\n }\n\n return parts.join(\". \");\n}\n\n/**\n * Generate summaries for sessions that need them.\n * Called when the scanner is idle.\n */\nexport function generateSummariesOnce(log: (msg: string) => void = () => {}): {\n updated: number;\n} {\n const db = getDb();\n let updated = 0;\n\n // Find sessions needing summary:\n // 1. Never summarized + enough messages\n // 2. Stale by message count (grown by THRESHOLD since last summary)\n // 3. Session ended after last summary\n const sessions = db\n .prepare(\n `\n SELECT s.session_id, s.message_count, s.summary_version, s.ended_at_ms,\n EXISTS(SELECT 1 FROM session_repositories WHERE session_id = s.session_id) as has_repo\n FROM sessions s\n WHERE s.message_count >= ?\n AND (\n s.summary IS NULL\n OR (s.message_count - COALESCE(s.summary_version, 0)) >= ?\n OR (s.ended_at_ms IS NOT NULL AND s.ended_at_ms > COALESCE(\n (SELECT MAX(created_at_ms) FROM session_summary_deltas WHERE session_id = s.session_id),\n 0\n ))\n )\n ORDER BY s.started_at_ms DESC\n LIMIT ?\n `,\n )\n .all(MIN_MESSAGES, SUMMARY_THRESHOLD, MAX_PER_CYCLE) as Array<{\n session_id: string;\n message_count: number;\n summary_version: number | null;\n ended_at_ms: number | null;\n has_repo: number;\n }>;\n\n for (const sess of sessions) {\n try {\n // TODO: re-enable LLM summaries once backfill is complete\n const summary = buildDeterministicSummary(sess.session_id);\n if (!summary) continue;\n\n db.prepare(\n \"UPDATE sessions SET summary = ?, summary_version = ?, sync_dirty = 1, sync_seq = COALESCE(sync_seq, 0) + 1 WHERE session_id = ?\",\n ).run(summary, sess.message_count, sess.session_id);\n\n updated++;\n log(`Summarized ${sess.session_id} (${sess.message_count} messages)`);\n } catch (err) {\n log(\n `Summary error for ${sess.session_id}: ${err instanceof Error ? err.message : err}`,\n );\n }\n }\n\n return { updated };\n}\n","import path from \"node:path\";\nimport { refreshIfStale } from \"../db/pricing.js\";\nimport { getDb } from \"../db/schema.js\";\nimport {\n upsertSessionCwd,\n upsertSessionRepository,\n upsertSession as upsertSessionRow,\n} from \"../db/store.js\";\nimport { resolveRepoFromCwd } from \"../repo.js\";\nimport type {\n ParsedEvent,\n ParsedMessage,\n ParsedSession,\n ParsedToolCall,\n ParsedTurn,\n} from \"../targets/types.js\";\n\n// ── Sync ID preservation across single-file reparses ──────────────────────\n\nexport interface SavedSyncIds {\n turns: Array<{\n sessionId: string;\n source: string;\n turnIndex: number;\n syncId: string;\n }>;\n events: Array<{\n sessionId: string;\n source: string;\n eventType: string;\n timestampMs: number;\n toolName: string;\n syncId: string;\n }>;\n toolCalls: Array<{\n sessionId: string;\n ordinal: number;\n toolUseId: string;\n toolName: string;\n syncId: string;\n }>;\n}\n\n// ── Session upsert (writes to unified sessions table) ───────────────────────\n\nexport function upsertSession(\n meta: ParsedSession,\n filePath: string,\n source: string,\n): void {\n // Derive project from repository or cwd basename\n let project: string | undefined;\n if (meta.cwd) {\n const info = resolveRepoFromCwd(meta.cwd);\n if (info) {\n project = info.repo; // e.g. \"fml-inc/panopticon\"\n } else {\n project = path.basename(meta.cwd);\n }\n }\n\n upsertSessionRow({\n session_id: meta.sessionId,\n target: source,\n started_at_ms: meta.startedAtMs,\n first_prompt: meta.firstPrompt,\n model: meta.model,\n cli_version: meta.cliVersion,\n scanner_file_path: filePath,\n has_scanner: 1,\n project,\n created_at: meta.startedAtMs ?? Date.now(),\n parent_session_id: meta.parentSessionId,\n relationship_type:\n meta.relationshipType ?? (meta.parentSessionId ? \"subagent\" : undefined),\n });\n\n // Record cwd and repo for scanner-only sessions\n if (meta.cwd) {\n upsertSessionCwd(meta.sessionId, meta.cwd, meta.startedAtMs ?? Date.now());\n }\n if (meta.cwd) {\n const info = resolveRepoFromCwd(meta.cwd);\n if (info) {\n upsertSessionRepository(\n meta.sessionId,\n info.repo,\n meta.startedAtMs ?? Date.now(),\n undefined,\n info.branch,\n );\n }\n }\n}\n\n// ── Turn insert ─────────────────────────────────────────────────────────────\n\nconst INSERT_TURN_SQL = `\n INSERT OR IGNORE INTO scanner_turns\n (session_id, source, turn_index, timestamp_ms, model, role,\n content_preview, input_tokens, output_tokens,\n cache_read_tokens, cache_creation_tokens, reasoning_tokens)\n VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)\n`;\n\nexport function insertTurns(turns: ParsedTurn[], source: string): void {\n if (turns.length === 0) return;\n const db = getDb();\n const stmt = db.prepare(INSERT_TURN_SQL);\n const tx = db.transaction(() => {\n for (const t of turns) {\n stmt.run(\n t.sessionId,\n source,\n t.turnIndex,\n t.timestampMs,\n t.model ?? null,\n t.role,\n t.contentPreview ?? null,\n t.inputTokens,\n t.outputTokens,\n t.cacheReadTokens,\n t.cacheCreationTokens,\n t.reasoningTokens,\n );\n }\n });\n tx();\n}\n\n// ── Scanner events insert ───────────────────────────────────────────────────\n\nconst INSERT_EVENT_SQL = `\n INSERT OR IGNORE INTO scanner_events\n (session_id, source, event_type, timestamp_ms, tool_name, tool_input, tool_output, content, metadata)\n VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?)\n`;\n\nexport function insertScannerEvents(\n events: ParsedEvent[],\n source: string,\n): void {\n if (events.length === 0) return;\n const db = getDb();\n const stmt = db.prepare(INSERT_EVENT_SQL);\n const tx = db.transaction(() => {\n for (const e of events) {\n stmt.run(\n e.sessionId,\n source,\n e.eventType,\n e.timestampMs,\n e.toolName ?? null,\n e.toolInput ?? null,\n e.toolOutput ?? null,\n e.content ?? null,\n e.metadata ? JSON.stringify(e.metadata) : null,\n );\n }\n });\n tx();\n\n // Resolve repos from file paths in tool_call events (greedy attribution)\n const seen = new Set<string>();\n for (const e of events) {\n if (e.eventType !== \"tool_call\" || !e.toolInput) continue;\n try {\n const input = JSON.parse(e.toolInput);\n const fp = input.file_path ?? input.path;\n if (typeof fp !== \"string\" || !path.isAbsolute(fp)) continue;\n const dir = path.dirname(fp);\n if (seen.has(dir)) continue;\n seen.add(dir);\n const info = resolveRepoFromCwd(dir);\n if (info) {\n upsertSessionRepository(\n e.sessionId,\n info.repo,\n e.timestampMs,\n undefined,\n info.branch,\n );\n }\n } catch {\n // malformed tool_input JSON\n }\n }\n}\n\n// ── Session totals update (writes to unified sessions table) ────────────────\n\nconst UPDATE_TOTALS_SQL = `\n UPDATE sessions SET\n total_input_tokens = (SELECT COALESCE(SUM(input_tokens), 0) FROM scanner_turns WHERE session_id = ?),\n total_output_tokens = (SELECT COALESCE(SUM(output_tokens), 0) FROM scanner_turns WHERE session_id = ?),\n total_cache_read_tokens = (SELECT COALESCE(SUM(cache_read_tokens), 0) FROM scanner_turns WHERE session_id = ?),\n total_cache_creation_tokens = (SELECT COALESCE(SUM(cache_creation_tokens), 0) FROM scanner_turns WHERE session_id = ?),\n total_reasoning_tokens = (SELECT COALESCE(SUM(reasoning_tokens), 0) FROM scanner_turns WHERE session_id = ?),\n turn_count = (SELECT COUNT(*) FROM scanner_turns WHERE session_id = ?)\n WHERE session_id = ?\n`;\n\nexport function updateSessionTotals(sessionId: string): void {\n const db = getDb();\n db.prepare(UPDATE_TOTALS_SQL).run(\n sessionId,\n sessionId,\n sessionId,\n sessionId,\n sessionId,\n sessionId,\n sessionId,\n );\n\n // Compute tool_counts from scanner tool_calls table\n const toolRows = db\n .prepare(\n `SELECT tool_name, COUNT(*) as cnt FROM tool_calls WHERE session_id = ? GROUP BY tool_name`,\n )\n .all(sessionId) as Array<{ tool_name: string; cnt: number }>;\n\n // Compute event_type_counts from scanner_events (strip \"progress:\" prefix)\n const eventRows = db\n .prepare(\n `SELECT event_type, COUNT(*) as cnt FROM scanner_events WHERE session_id = ? GROUP BY event_type`,\n )\n .all(sessionId) as Array<{ event_type: string; cnt: number }>;\n\n const toolCounts: Record<string, number> = {};\n for (const r of toolRows) toolCounts[r.tool_name] = r.cnt;\n\n const eventCounts: Record<string, number> = {};\n for (const r of eventRows) {\n const key = r.event_type.startsWith(\"progress:\")\n ? r.event_type.slice(\"progress:\".length)\n : r.event_type;\n eventCounts[key] = (eventCounts[key] ?? 0) + r.cnt;\n }\n\n if (toolRows.length > 0 || eventRows.length > 0) {\n db.prepare(\n `UPDATE sessions\n SET tool_counts = ?,\n event_type_counts = ?,\n sync_seq = COALESCE(sync_seq, 0) + 1\n WHERE session_id = ?`,\n ).run(JSON.stringify(toolCounts), JSON.stringify(eventCounts), sessionId);\n }\n // Scanner produces token data that needs pricing for cost queries\n refreshIfStale().catch(() => {});\n}\n\n// ── File watermarks ─────────────────────────────────────────────────────────\n\nexport function readFileWatermark(filePath: string): number {\n const db = getDb();\n const row = db\n .prepare(\n \"SELECT byte_offset FROM scanner_file_watermarks WHERE file_path = ?\",\n )\n .get(filePath) as { byte_offset: number } | undefined;\n return row?.byte_offset ?? 0;\n}\n\nexport function writeFileWatermark(filePath: string, byteOffset: number): void {\n const db = getDb();\n db.prepare(\n `INSERT INTO scanner_file_watermarks (file_path, byte_offset, last_scanned_ms)\n VALUES (?, ?, ?)\n ON CONFLICT(file_path) DO UPDATE SET byte_offset = excluded.byte_offset, last_scanned_ms = excluded.last_scanned_ms`,\n ).run(filePath, byteOffset, Date.now());\n}\n\n/**\n * Snapshot sync_id values for a session and its fork children so they\n * can be restored after re-insertion (preserving upstream sync identity).\n */\nfunction snapshotSyncIds(sessionId: string): SavedSyncIds {\n const db = getDb();\n const saved: SavedSyncIds = { turns: [], events: [], toolCalls: [] };\n\n const forkRows = db\n .prepare(\n \"SELECT session_id FROM sessions WHERE parent_session_id = ? AND relationship_type = 'fork'\",\n )\n .all(sessionId) as Array<{ session_id: string }>;\n const allIds = [sessionId, ...forkRows.map((r) => r.session_id)];\n\n for (const sid of allIds) {\n const turns = db\n .prepare(\n \"SELECT session_id, source, turn_index, sync_id FROM scanner_turns WHERE session_id = ?\",\n )\n .all(sid) as Array<{\n session_id: string;\n source: string;\n turn_index: number;\n sync_id: string;\n }>;\n for (const t of turns) {\n saved.turns.push({\n sessionId: t.session_id,\n source: t.source,\n turnIndex: t.turn_index,\n syncId: t.sync_id,\n });\n }\n\n const events = db\n .prepare(\n \"SELECT session_id, source, event_type, timestamp_ms, COALESCE(tool_name, '') as tool_name, sync_id FROM scanner_events WHERE session_id = ?\",\n )\n .all(sid) as Array<{\n session_id: string;\n source: string;\n event_type: string;\n timestamp_ms: number;\n tool_name: string;\n sync_id: string;\n }>;\n for (const e of events) {\n saved.events.push({\n sessionId: e.session_id,\n source: e.source,\n eventType: e.event_type,\n timestampMs: e.timestamp_ms,\n toolName: e.tool_name,\n syncId: e.sync_id,\n });\n }\n\n const tcs = db\n .prepare(\n `SELECT tc.session_id, m.ordinal, COALESCE(tc.tool_use_id, '') as tool_use_id,\n tc.tool_name, tc.sync_id\n FROM tool_calls tc\n INNER JOIN messages m ON tc.message_id = m.id\n WHERE tc.session_id = ?`,\n )\n .all(sid) as Array<{\n session_id: string;\n ordinal: number;\n tool_use_id: string;\n tool_name: string;\n sync_id: string;\n }>;\n for (const tc of tcs) {\n saved.toolCalls.push({\n sessionId: tc.session_id,\n ordinal: tc.ordinal,\n toolUseId: tc.tool_use_id,\n toolName: tc.tool_name,\n syncId: tc.sync_id,\n });\n }\n }\n\n return saved;\n}\n\n/**\n * Reset a single file for full reparse: clear its watermark and delete\n * all turns, messages, tool_calls, and events for the session so the\n * full-file parse can re-insert cleanly (including fork detection).\n *\n * Returns previously-assigned sync_id values keyed by natural keys so\n * the caller can restore them after re-insertion.\n */\nexport function resetFileForReparse(\n filePath: string,\n sessionId?: string,\n): SavedSyncIds {\n const db = getDb();\n const saved: SavedSyncIds = sessionId\n ? snapshotSyncIds(sessionId)\n : { turns: [], events: [], toolCalls: [] };\n\n db.prepare(\"DELETE FROM scanner_file_watermarks WHERE file_path = ?\").run(\n filePath,\n );\n if (sessionId) {\n db.prepare(\"DELETE FROM scanner_turns WHERE session_id = ?\").run(sessionId);\n db.prepare(\"DELETE FROM scanner_events WHERE session_id = ?\").run(\n sessionId,\n );\n db.prepare(\"DELETE FROM tool_calls WHERE session_id = ?\").run(sessionId);\n db.prepare(\n \"DELETE FROM messages_fts WHERE rowid IN (SELECT id FROM messages WHERE session_id = ?)\",\n ).run(sessionId);\n db.prepare(\"DELETE FROM messages WHERE session_id = ?\").run(sessionId);\n // Also clean up any previously-detected fork sessions from this file\n const forkSessionFilter =\n \"SELECT session_id FROM sessions WHERE parent_session_id = ? AND relationship_type = 'fork'\";\n db.prepare(\n `DELETE FROM scanner_turns WHERE session_id IN (${forkSessionFilter})`,\n ).run(sessionId);\n db.prepare(\n `DELETE FROM scanner_events WHERE session_id IN (${forkSessionFilter})`,\n ).run(sessionId);\n db.prepare(\n `DELETE FROM tool_calls WHERE session_id IN (${forkSessionFilter})`,\n ).run(sessionId);\n db.prepare(\n `DELETE FROM messages_fts WHERE rowid IN (SELECT id FROM messages WHERE session_id IN (${forkSessionFilter}))`,\n ).run(sessionId);\n db.prepare(\n `DELETE FROM messages WHERE session_id IN (${forkSessionFilter})`,\n ).run(sessionId);\n db.prepare(\n \"DELETE FROM sessions WHERE parent_session_id = ? AND relationship_type = 'fork'\",\n ).run(sessionId);\n }\n\n return saved;\n}\n\n/**\n * Restore previously-saved sync_id values after data has been re-inserted.\n * Matches rows by the same natural keys used in reparseAll().\n */\nexport function restoreSyncIds(saved: SavedSyncIds): void {\n if (!saved.turns.length && !saved.events.length && !saved.toolCalls.length)\n return;\n\n const db = getDb();\n const tx = db.transaction(() => {\n if (saved.turns.length > 0) {\n const stmt = db.prepare(\n \"UPDATE scanner_turns SET sync_id = ? WHERE session_id = ? AND source = ? AND turn_index = ?\",\n );\n for (const t of saved.turns) {\n stmt.run(t.syncId, t.sessionId, t.source, t.turnIndex);\n }\n }\n\n if (saved.events.length > 0) {\n const stmt = db.prepare(\n `UPDATE scanner_events SET sync_id = ?\n WHERE session_id = ? AND source = ? AND event_type = ? AND timestamp_ms = ?\n AND COALESCE(tool_name, '') = ?`,\n );\n for (const e of saved.events) {\n stmt.run(\n e.syncId,\n e.sessionId,\n e.source,\n e.eventType,\n e.timestampMs,\n e.toolName,\n );\n }\n }\n\n if (saved.toolCalls.length > 0) {\n const stmt = db.prepare(\n `UPDATE tool_calls SET sync_id = ?\n WHERE message_id IN (SELECT id FROM messages WHERE session_id = ? AND ordinal = ?)\n AND COALESCE(tool_use_id, '') = ?\n AND tool_name = ?`,\n );\n for (const tc of saved.toolCalls) {\n stmt.run(\n tc.syncId,\n tc.sessionId,\n tc.ordinal,\n tc.toolUseId,\n tc.toolName,\n );\n }\n }\n });\n tx();\n}\n\n// ── Turn count for incremental parsing ──────────────────────────────────────\n\nexport function getTurnCount(sessionId: string, source: string): number {\n const db = getDb();\n const row = db\n .prepare(\n \"SELECT COUNT(*) as count FROM scanner_turns WHERE session_id = ? AND source = ?\",\n )\n .get(sessionId, source) as { count: number };\n return row.count;\n}\n\n// ── Archive watermarks ─────────────────────────────────────────────────────\n\nexport function readArchivedSize(filePath: string): number {\n const db = getDb();\n const row = db\n .prepare(\n \"SELECT archived_size FROM scanner_file_watermarks WHERE file_path = ?\",\n )\n .get(filePath) as { archived_size: number } | undefined;\n return row?.archived_size ?? 0;\n}\n\nexport function writeArchivedSize(filePath: string, size: number): void {\n const db = getDb();\n db.prepare(\n \"UPDATE scanner_file_watermarks SET archived_size = ? WHERE file_path = ?\",\n ).run(size, filePath);\n}\n\n// ── Turn summaries ─────────────────────────────────────────────────────────\n\n// ── Messages & tool calls insert ───────────────────────────────────────────\n\n/** Build a short summary for tool-only assistant messages (no text content). */\nfunction toolUseSummary(toolCalls: ParsedToolCall[]): string {\n return toolCalls\n .map((tc) => {\n let label = \"\";\n if (tc.inputJson) {\n try {\n const input = JSON.parse(tc.inputJson);\n label =\n input.description ??\n input.command ??\n input.pattern ??\n input.file_path ??\n input.query ??\n input.prompt ??\n input.skill ??\n \"\";\n } catch {}\n }\n return label ? `[${tc.toolName}: ${label}]` : `[${tc.toolName}]`;\n })\n .join(\"\\n\");\n}\n\nconst INSERT_MESSAGE_SQL = `\n INSERT OR IGNORE INTO messages\n (session_id, ordinal, role, content, timestamp_ms,\n has_thinking, has_tool_use, content_length, is_system,\n model, token_usage, context_tokens, output_tokens,\n has_context_tokens, has_output_tokens, uuid, parent_uuid)\n VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)\n`;\n\nconst INSERT_TOOL_CALL_SQL = `\n INSERT INTO tool_calls\n (message_id, session_id, tool_name, category, tool_use_id,\n input_json, skill_name, result_content_length, result_content,\n subagent_session_id, duration_ms)\n VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)\n`;\n\n/**\n * Insert parsed messages and their tool calls into the database.\n * Tool results from user messages are matched back to tool calls\n * from the preceding assistant message by tool_use_id.\n *\n * Also backfills tool_calls from previous scans whose result_content\n * was NULL because the tool_result arrived in a later batch.\n */\nexport function insertMessages(\n messages: ParsedMessage[],\n orphanedToolResults?: Map<\n string,\n { contentLength: number; contentRaw: string; timestampMs?: number }\n >,\n): void {\n if (messages.length === 0 && !orphanedToolResults?.size) return;\n const db = getDb();\n\n // Collect all tool results across user messages for backfilling\n const toolResultMap = new Map<\n string,\n { contentLength: number; contentRaw: string; timestampMs?: number }\n >();\n // Include orphaned results from filtered-out messages\n if (orphanedToolResults) {\n for (const [id, result] of orphanedToolResults) {\n toolResultMap.set(id, result);\n }\n }\n for (const msg of messages) {\n for (const [id, result] of msg.toolResults) {\n toolResultMap.set(id, result);\n }\n }\n\n const msgStmt = db.prepare(INSERT_MESSAGE_SQL);\n const tcStmt = db.prepare(INSERT_TOOL_CALL_SQL);\n const ftsStmt = db.prepare(\n \"INSERT INTO messages_fts(rowid, content) VALUES (?, ?)\",\n );\n\n const tx = db.transaction(() => {\n for (const msg of messages) {\n // Synthesize content for empty assistant messages with tool calls\n let content = msg.content;\n if (!content && msg.role === \"assistant\" && msg.toolCalls.length > 0) {\n content = toolUseSummary(msg.toolCalls);\n }\n\n const result = msgStmt.run(\n msg.sessionId,\n msg.ordinal,\n msg.role,\n content,\n msg.timestampMs ?? null,\n msg.hasThinking ? 1 : 0,\n msg.hasToolUse ? 1 : 0,\n msg.contentLength,\n msg.isSystem ? 1 : 0,\n msg.model ?? \"\",\n msg.tokenUsage ?? \"\",\n msg.contextTokens ?? 0,\n msg.outputTokens ?? 0,\n msg.hasContextTokens ? 1 : 0,\n msg.hasOutputTokens ? 1 : 0,\n msg.uuid ?? null,\n msg.parentUuid ?? null,\n );\n\n // INSERT OR IGNORE returns 0 changes if the row already exists\n if (result.changes === 0) continue;\n\n const messageId = result.lastInsertRowid;\n ftsStmt.run(messageId, content);\n\n for (const tc of msg.toolCalls) {\n // Look up result from the tool_result blocks\n const toolResult = toolResultMap.get(tc.toolUseId);\n const durationMs =\n tc.timestampMs && toolResult?.timestampMs\n ? toolResult.timestampMs - tc.timestampMs\n : null;\n tcStmt.run(\n messageId,\n msg.sessionId,\n tc.toolName,\n tc.category,\n tc.toolUseId,\n tc.inputJson ?? null,\n tc.skillName ?? null,\n toolResult?.contentLength ?? null,\n toolResult?.contentRaw ?? null,\n tc.subagentSessionId ?? null,\n durationMs != null && durationMs >= 0 ? durationMs : null,\n );\n }\n }\n\n // Backfill tool_calls from previous scans whose results arrived in this batch.\n if (toolResultMap.size > 0) {\n const backfillStmt = db.prepare(\n `UPDATE tool_calls\n SET result_content = ?, result_content_length = ?\n WHERE tool_use_id = ? AND result_content IS NULL`,\n );\n for (const [toolUseId, result] of toolResultMap) {\n backfillStmt.run(result.contentRaw, result.contentLength, toolUseId);\n }\n }\n });\n tx();\n}\n\n/**\n * Link subagent sessions to their parents.\n * Finds sessions whose ID appears in tool_calls.subagent_session_id\n * and sets their parent_session_id and relationship_type accordingly.\n */\nexport function linkSubagentSessions(): number {\n const db = getDb();\n // Only check sessions that don't already have a relationship set,\n // which limits work to newly-discovered sessions.\n const result = db\n .prepare(\n `UPDATE sessions\n SET parent_session_id = (\n SELECT tc.session_id\n FROM tool_calls tc\n WHERE tc.subagent_session_id = sessions.session_id\n LIMIT 1\n ),\n relationship_type = 'subagent',\n sync_seq = COALESCE(sync_seq, 0) + 1\n WHERE (relationship_type = '' OR relationship_type IS NULL)\n AND parent_session_id IS NULL\n AND EXISTS (\n SELECT 1 FROM tool_calls tc\n WHERE tc.subagent_session_id = sessions.session_id\n )`,\n )\n .run();\n return result.changes;\n}\n\n/**\n * Get the highest message ordinal for a session, or -1 if no messages exist.\n */\nexport function getMaxOrdinal(sessionId: string): number {\n const db = getDb();\n const row = db\n .prepare(\n \"SELECT MAX(ordinal) as max_ord FROM messages WHERE session_id = ?\",\n )\n .get(sessionId) as { max_ord: number | null };\n return row.max_ord ?? -1;\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAWA,OAAOA,SAAQ;AACf,SAAS,cAAAC,mBAAkB;AAC3B,OAAO,cAAc;;;ACbrB,OAAOC,SAAQ;;;ACAf,OAAO,QAAQ;AACf,OAAO,UAAU;AACjB,SAAS,YAAY,gBAAgB;AAG9B,IAAM,sBAAN,MAAoD;AAAA,EACzD,YAAoB,SAAiB;AAAjB;AAAA,EAAkB;AAAA,EAEtC,QAAQ,WAAmB,QAAgB,SAAuB;AAChE,UAAM,MAAM,KAAK,KAAK,KAAK,SAAS,SAAS;AAC7C,OAAG,UAAU,KAAK,EAAE,WAAW,KAAK,CAAC;AACrC,UAAM,WAAW,KAAK,KAAK,KAAK,GAAG,MAAM,WAAW;AACpD,UAAM,aAAa,SAAS,OAAO;AACnC,OAAG,cAAc,UAAU,UAAU;AAAA,EACvC;AAAA,EAEA,QAAQ,WAAmB,QAA+B;AACxD,UAAM,WAAW,KAAK,KAAK,KAAK,SAAS,WAAW,GAAG,MAAM,WAAW;AACxE,QAAI,CAAC,GAAG,WAAW,QAAQ,EAAG,QAAO;AACrC,UAAM,aAAa,GAAG,aAAa,QAAQ;AAC3C,WAAO,WAAW,UAAU;AAAA,EAC9B;AAAA,EAEA,QAAQ,WAAmB,QAAyB;AAClD,UAAM,WAAW,KAAK,KAAK,KAAK,SAAS,WAAW,GAAG,MAAM,WAAW;AACxE,WAAO,GAAG,WAAW,QAAQ;AAAA,EAC/B;AAAA,EAEA,OAAwE;AACtE,UAAM,UAID,CAAC;AAEN,QAAI,CAAC,GAAG,WAAW,KAAK,OAAO,EAAG,QAAO;AAEzC,eAAW,aAAa,GAAG,YAAY,KAAK,OAAO,GAAG;AACpD,YAAM,aAAa,KAAK,KAAK,KAAK,SAAS,SAAS;AACpD,YAAM,OAAO,GAAG,SAAS,UAAU;AACnC,UAAI,CAAC,KAAK,YAAY,EAAG;AAEzB,iBAAW,QAAQ,GAAG,YAAY,UAAU,GAAG;AAC7C,YAAI,CAAC,KAAK,SAAS,WAAW,EAAG;AACjC,cAAM,SAAS,KAAK,QAAQ,gBAAgB,EAAE;AAC9C,cAAM,WAAW,GAAG,SAAS,KAAK,KAAK,YAAY,IAAI,CAAC;AACxD,gBAAQ,KAAK;AAAA,UACX;AAAA,UACA;AAAA,UACA,WAAW,SAAS;AAAA,QACtB,CAAC;AAAA,MACH;AAAA,IACF;AAEA,WAAO;AAAA,EACT;AAAA,EAEA,QAAoD;AAClD,UAAM,UAAU,KAAK,KAAK;AAC1B,WAAO;AAAA,MACL,YAAY,QAAQ;AAAA,MACpB,YAAY,QAAQ,OAAO,CAAC,KAAK,MAAM,MAAM,EAAE,WAAW,CAAC;AAAA,IAC7D;AAAA,EACF;AACF;;;AC7DA,OAAOC,WAAU;AAKjB,IAAI,WAAkC;AAE/B,SAAS,oBAAoC;AAClD,MAAI,CAAC,UAAU;AACb,eAAW,IAAI,oBAAoBC,MAAK,KAAK,OAAO,SAAS,SAAS,CAAC;AAAA,EACzE;AACA,SAAO;AACT;;;ACfA,SAAS,cAAc,iBAAiB;AACxC,OAAOC,WAAU;AACjB,SAAS,qBAAqB;;;ACE9B,IAAM,eAAe;AAErB,IAAM,oBAAoB;AAE1B,IAAM,gBAAgB;AA2CtB,SAAS,0BAA0B,WAAkC;AACnE,QAAM,KAAK,MAAM;AAEjB,QAAM,YAAY,GACf;AAAA,IACC;AAAA,EACF,EACC,IAAI,SAAS;AAEhB,QAAM,SAAS,GACZ;AAAA,IACC;AAAA,EACF,EACC,IAAI,SAAS;AAEhB,QAAM,QAAQ,GACX;AAAA,IACC;AAAA,EACF,EACC,IAAI,SAAS;AAEhB,QAAM,QAAQ,GACX;AAAA,IACC;AAAA,EACF,EACC,IAAI,SAAS;AAEhB,MAAI,CAAC,aAAa,OAAO,cAAc,EAAG,QAAO;AAEjD,QAAM,QAAkB,CAAC;AACzB,MAAI,UAAW,OAAM,KAAK,YAAY,UAAU,OAAO,GAAG;AAC1D,QAAM,KAAK,GAAG,OAAO,SAAS,cAAc,OAAO,UAAU,QAAQ;AACrE,MAAI,MAAM,SAAS,GAAG;AACpB,UAAM;AAAA,MACJ,UAAU,MAAM,IAAI,CAAC,MAAM,GAAG,EAAE,SAAS,IAAI,EAAE,GAAG,GAAG,EAAE,KAAK,IAAI,CAAC;AAAA,IACnE;AAAA,EACF;AACA,QAAM,YAAY,MAAM,IAAI,CAAC,MAAM,EAAE,EAAE,EAAE,OAAO,OAAO;AACvD,MAAI,UAAU,SAAS,GAAG;AACxB,UAAM,KAAK,UAAU,UAAU,KAAK,IAAI,CAAC,EAAE;AAAA,EAC7C;AAEA,SAAO,MAAM,KAAK,IAAI;AACxB;AAMO,SAAS,sBAAsBC,OAA6B,MAAM;AAAC,GAExE;AACA,QAAM,KAAK,MAAM;AACjB,MAAI,UAAU;AAMd,QAAM,WAAW,GACd;AAAA,IACC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAgBF,EACC,IAAI,cAAc,mBAAmB,aAAa;AAQrD,aAAW,QAAQ,UAAU;AAC3B,QAAI;AAEF,YAAM,UAAU,0BAA0B,KAAK,UAAU;AACzD,UAAI,CAAC,QAAS;AAEd,SAAG;AAAA,QACD;AAAA,MACF,EAAE,IAAI,SAAS,KAAK,eAAe,KAAK,UAAU;AAElD;AACA,MAAAA,KAAI,cAAc,KAAK,UAAU,KAAK,KAAK,aAAa,YAAY;AAAA,IACtE,SAAS,KAAK;AACZ,MAAAA;AAAA,QACE,qBAAqB,KAAK,UAAU,KAAK,eAAe,QAAQ,IAAI,UAAU,GAAG;AAAA,MACnF;AAAA,IACF;AAAA,EACF;AAEA,SAAO,EAAE,QAAQ;AACnB;;;AC7JA,OAAOC,WAAU;AA6CV,SAASC,eACd,MACA,UACA,QACM;AAEN,MAAI;AACJ,MAAI,KAAK,KAAK;AACZ,UAAM,OAAO,mBAAmB,KAAK,GAAG;AACxC,QAAI,MAAM;AACR,gBAAU,KAAK;AAAA,IACjB,OAAO;AACL,gBAAUC,MAAK,SAAS,KAAK,GAAG;AAAA,IAClC;AAAA,EACF;AAEA,gBAAiB;AAAA,IACf,YAAY,KAAK;AAAA,IACjB,QAAQ;AAAA,IACR,eAAe,KAAK;AAAA,IACpB,cAAc,KAAK;AAAA,IACnB,OAAO,KAAK;AAAA,IACZ,aAAa,KAAK;AAAA,IAClB,mBAAmB;AAAA,IACnB,aAAa;AAAA,IACb;AAAA,IACA,YAAY,KAAK,eAAe,KAAK,IAAI;AAAA,IACzC,mBAAmB,KAAK;AAAA,IACxB,mBACE,KAAK,qBAAqB,KAAK,kBAAkB,aAAa;AAAA,EAClE,CAAC;AAGD,MAAI,KAAK,KAAK;AACZ,qBAAiB,KAAK,WAAW,KAAK,KAAK,KAAK,eAAe,KAAK,IAAI,CAAC;AAAA,EAC3E;AACA,MAAI,KAAK,KAAK;AACZ,UAAM,OAAO,mBAAmB,KAAK,GAAG;AACxC,QAAI,MAAM;AACR;AAAA,QACE,KAAK;AAAA,QACL,KAAK;AAAA,QACL,KAAK,eAAe,KAAK,IAAI;AAAA,QAC7B;AAAA,QACA,KAAK;AAAA,MACP;AAAA,IACF;AAAA,EACF;AACF;AAIA,IAAM,kBAAkB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAQjB,SAAS,YAAY,OAAqB,QAAsB;AACrE,MAAI,MAAM,WAAW,EAAG;AACxB,QAAM,KAAK,MAAM;AACjB,QAAM,OAAO,GAAG,QAAQ,eAAe;AACvC,QAAM,KAAK,GAAG,YAAY,MAAM;AAC9B,eAAW,KAAK,OAAO;AACrB,WAAK;AAAA,QACH,EAAE;AAAA,QACF;AAAA,QACA,EAAE;AAAA,QACF,EAAE;AAAA,QACF,EAAE,SAAS;AAAA,QACX,EAAE;AAAA,QACF,EAAE,kBAAkB;AAAA,QACpB,EAAE;AAAA,QACF,EAAE;AAAA,QACF,EAAE;AAAA,QACF,EAAE;AAAA,QACF,EAAE;AAAA,MACJ;AAAA,IACF;AAAA,EACF,CAAC;AACD,KAAG;AACL;AAIA,IAAM,mBAAmB;AAAA;AAAA;AAAA;AAAA;AAMlB,SAAS,oBACd,QACA,QACM;AACN,MAAI,OAAO,WAAW,EAAG;AACzB,QAAM,KAAK,MAAM;AACjB,QAAM,OAAO,GAAG,QAAQ,gBAAgB;AACxC,QAAM,KAAK,GAAG,YAAY,MAAM;AAC9B,eAAW,KAAK,QAAQ;AACtB,WAAK;AAAA,QACH,EAAE;AAAA,QACF;AAAA,QACA,EAAE;AAAA,QACF,EAAE;AAAA,QACF,EAAE,YAAY;AAAA,QACd,EAAE,aAAa;AAAA,QACf,EAAE,cAAc;AAAA,QAChB,EAAE,WAAW;AAAA,QACb,EAAE,WAAW,KAAK,UAAU,EAAE,QAAQ,IAAI;AAAA,MAC5C;AAAA,IACF;AAAA,EACF,CAAC;AACD,KAAG;AAGH,QAAM,OAAO,oBAAI,IAAY;AAC7B,aAAW,KAAK,QAAQ;AACtB,QAAI,EAAE,cAAc,eAAe,CAAC,EAAE,UAAW;AACjD,QAAI;AACF,YAAM,QAAQ,KAAK,MAAM,EAAE,SAAS;AACpC,YAAM,KAAK,MAAM,aAAa,MAAM;AACpC,UAAI,OAAO,OAAO,YAAY,CAACA,MAAK,WAAW,EAAE,EAAG;AACpD,YAAM,MAAMA,MAAK,QAAQ,EAAE;AAC3B,UAAI,KAAK,IAAI,GAAG,EAAG;AACnB,WAAK,IAAI,GAAG;AACZ,YAAM,OAAO,mBAAmB,GAAG;AACnC,UAAI,MAAM;AACR;AAAA,UACE,EAAE;AAAA,UACF,KAAK;AAAA,UACL,EAAE;AAAA,UACF;AAAA,UACA,KAAK;AAAA,QACP;AAAA,MACF;AAAA,IACF,QAAQ;AAAA,IAER;AAAA,EACF;AACF;AAIA,IAAM,oBAAoB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAWnB,SAAS,oBAAoB,WAAyB;AAC3D,QAAM,KAAK,MAAM;AACjB,KAAG,QAAQ,iBAAiB,EAAE;AAAA,IAC5B;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF;AAGA,QAAM,WAAW,GACd;AAAA,IACC;AAAA,EACF,EACC,IAAI,SAAS;AAGhB,QAAM,YAAY,GACf;AAAA,IACC;AAAA,EACF,EACC,IAAI,SAAS;AAEhB,QAAM,aAAqC,CAAC;AAC5C,aAAW,KAAK,SAAU,YAAW,EAAE,SAAS,IAAI,EAAE;AAEtD,QAAM,cAAsC,CAAC;AAC7C,aAAW,KAAK,WAAW;AACzB,UAAM,MAAM,EAAE,WAAW,WAAW,WAAW,IAC3C,EAAE,WAAW,MAAM,YAAY,MAAM,IACrC,EAAE;AACN,gBAAY,GAAG,KAAK,YAAY,GAAG,KAAK,KAAK,EAAE;AAAA,EACjD;AAEA,MAAI,SAAS,SAAS,KAAK,UAAU,SAAS,GAAG;AAC/C,OAAG;AAAA,MACD;AAAA;AAAA;AAAA;AAAA;AAAA,IAKF,EAAE,IAAI,KAAK,UAAU,UAAU,GAAG,KAAK,UAAU,WAAW,GAAG,SAAS;AAAA,EAC1E;AAEA,iBAAe,EAAE,MAAM,MAAM;AAAA,EAAC,CAAC;AACjC;AAIO,SAAS,kBAAkB,UAA0B;AAC1D,QAAM,KAAK,MAAM;AACjB,QAAM,MAAM,GACT;AAAA,IACC;AAAA,EACF,EACC,IAAI,QAAQ;AACf,SAAO,KAAK,eAAe;AAC7B;AAEO,SAAS,mBAAmB,UAAkB,YAA0B;AAC7E,QAAM,KAAK,MAAM;AACjB,KAAG;AAAA,IACD;AAAA;AAAA;AAAA,EAGF,EAAE,IAAI,UAAU,YAAY,KAAK,IAAI,CAAC;AACxC;AAMA,SAAS,gBAAgB,WAAiC;AACxD,QAAM,KAAK,MAAM;AACjB,QAAM,QAAsB,EAAE,OAAO,CAAC,GAAG,QAAQ,CAAC,GAAG,WAAW,CAAC,EAAE;AAEnE,QAAM,WAAW,GACd;AAAA,IACC;AAAA,EACF,EACC,IAAI,SAAS;AAChB,QAAM,SAAS,CAAC,WAAW,GAAG,SAAS,IAAI,CAAC,MAAM,EAAE,UAAU,CAAC;AAE/D,aAAW,OAAO,QAAQ;AACxB,UAAM,QAAQ,GACX;AAAA,MACC;AAAA,IACF,EACC,IAAI,GAAG;AAMV,eAAW,KAAK,OAAO;AACrB,YAAM,MAAM,KAAK;AAAA,QACf,WAAW,EAAE;AAAA,QACb,QAAQ,EAAE;AAAA,QACV,WAAW,EAAE;AAAA,QACb,QAAQ,EAAE;AAAA,MACZ,CAAC;AAAA,IACH;AAEA,UAAM,SAAS,GACZ;AAAA,MACC;AAAA,IACF,EACC,IAAI,GAAG;AAQV,eAAW,KAAK,QAAQ;AACtB,YAAM,OAAO,KAAK;AAAA,QAChB,WAAW,EAAE;AAAA,QACb,QAAQ,EAAE;AAAA,QACV,WAAW,EAAE;AAAA,QACb,aAAa,EAAE;AAAA,QACf,UAAU,EAAE;AAAA,QACZ,QAAQ,EAAE;AAAA,MACZ,CAAC;AAAA,IACH;AAEA,UAAM,MAAM,GACT;AAAA,MACC;AAAA;AAAA;AAAA;AAAA;AAAA,IAKF,EACC,IAAI,GAAG;AAOV,eAAW,MAAM,KAAK;AACpB,YAAM,UAAU,KAAK;AAAA,QACnB,WAAW,GAAG;AAAA,QACd,SAAS,GAAG;AAAA,QACZ,WAAW,GAAG;AAAA,QACd,UAAU,GAAG;AAAA,QACb,QAAQ,GAAG;AAAA,MACb,CAAC;AAAA,IACH;AAAA,EACF;AAEA,SAAO;AACT;AAUO,SAAS,oBACd,UACA,WACc;AACd,QAAM,KAAK,MAAM;AACjB,QAAM,QAAsB,YACxB,gBAAgB,SAAS,IACzB,EAAE,OAAO,CAAC,GAAG,QAAQ,CAAC,GAAG,WAAW,CAAC,EAAE;AAE3C,KAAG,QAAQ,yDAAyD,EAAE;AAAA,IACpE;AAAA,EACF;AACA,MAAI,WAAW;AACb,OAAG,QAAQ,gDAAgD,EAAE,IAAI,SAAS;AAC1E,OAAG,QAAQ,iDAAiD,EAAE;AAAA,MAC5D;AAAA,IACF;AACA,OAAG,QAAQ,6CAA6C,EAAE,IAAI,SAAS;AACvE,OAAG;AAAA,MACD;AAAA,IACF,EAAE,IAAI,SAAS;AACf,OAAG,QAAQ,2CAA2C,EAAE,IAAI,SAAS;AAErE,UAAM,oBACJ;AACF,OAAG;AAAA,MACD,kDAAkD,iBAAiB;AAAA,IACrE,EAAE,IAAI,SAAS;AACf,OAAG;AAAA,MACD,mDAAmD,iBAAiB;AAAA,IACtE,EAAE,IAAI,SAAS;AACf,OAAG;AAAA,MACD,+CAA+C,iBAAiB;AAAA,IAClE,EAAE,IAAI,SAAS;AACf,OAAG;AAAA,MACD,yFAAyF,iBAAiB;AAAA,IAC5G,EAAE,IAAI,SAAS;AACf,OAAG;AAAA,MACD,6CAA6C,iBAAiB;AAAA,IAChE,EAAE,IAAI,SAAS;AACf,OAAG;AAAA,MACD;AAAA,IACF,EAAE,IAAI,SAAS;AAAA,EACjB;AAEA,SAAO;AACT;AAMO,SAAS,eAAe,OAA2B;AACxD,MAAI,CAAC,MAAM,MAAM,UAAU,CAAC,MAAM,OAAO,UAAU,CAAC,MAAM,UAAU;AAClE;AAEF,QAAM,KAAK,MAAM;AACjB,QAAM,KAAK,GAAG,YAAY,MAAM;AAC9B,QAAI,MAAM,MAAM,SAAS,GAAG;AAC1B,YAAM,OAAO,GAAG;AAAA,QACd;AAAA,MACF;AACA,iBAAW,KAAK,MAAM,OAAO;AAC3B,aAAK,IAAI,EAAE,QAAQ,EAAE,WAAW,EAAE,QAAQ,EAAE,SAAS;AAAA,MACvD;AAAA,IACF;AAEA,QAAI,MAAM,OAAO,SAAS,GAAG;AAC3B,YAAM,OAAO,GAAG;AAAA,QACd;AAAA;AAAA;AAAA,MAGF;AACA,iBAAW,KAAK,MAAM,QAAQ;AAC5B,aAAK;AAAA,UACH,EAAE;AAAA,UACF,EAAE;AAAA,UACF,EAAE;AAAA,UACF,EAAE;AAAA,UACF,EAAE;AAAA,UACF,EAAE;AAAA,QACJ;AAAA,MACF;AAAA,IACF;AAEA,QAAI,MAAM,UAAU,SAAS,GAAG;AAC9B,YAAM,OAAO,GAAG;AAAA,QACd;AAAA;AAAA;AAAA;AAAA,MAIF;AACA,iBAAW,MAAM,MAAM,WAAW;AAChC,aAAK;AAAA,UACH,GAAG;AAAA,UACH,GAAG;AAAA,UACH,GAAG;AAAA,UACH,GAAG;AAAA,UACH,GAAG;AAAA,QACL;AAAA,MACF;AAAA,IACF;AAAA,EACF,CAAC;AACD,KAAG;AACL;AAIO,SAAS,aAAa,WAAmB,QAAwB;AACtE,QAAM,KAAK,MAAM;AACjB,QAAM,MAAM,GACT;AAAA,IACC;AAAA,EACF,EACC,IAAI,WAAW,MAAM;AACxB,SAAO,IAAI;AACb;AAIO,SAAS,iBAAiB,UAA0B;AACzD,QAAM,KAAK,MAAM;AACjB,QAAM,MAAM,GACT;AAAA,IACC;AAAA,EACF,EACC,IAAI,QAAQ;AACf,SAAO,KAAK,iBAAiB;AAC/B;AAEO,SAAS,kBAAkB,UAAkB,MAAoB;AACtE,QAAM,KAAK,MAAM;AACjB,KAAG;AAAA,IACD;AAAA,EACF,EAAE,IAAI,MAAM,QAAQ;AACtB;AAOA,SAAS,eAAe,WAAqC;AAC3D,SAAO,UACJ,IAAI,CAAC,OAAO;AACX,QAAI,QAAQ;AACZ,QAAI,GAAG,WAAW;AAChB,UAAI;AACF,cAAM,QAAQ,KAAK,MAAM,GAAG,SAAS;AACrC,gBACE,MAAM,eACN,MAAM,WACN,MAAM,WACN,MAAM,aACN,MAAM,SACN,MAAM,UACN,MAAM,SACN;AAAA,MACJ,QAAQ;AAAA,MAAC;AAAA,IACX;AACA,WAAO,QAAQ,IAAI,GAAG,QAAQ,KAAK,KAAK,MAAM,IAAI,GAAG,QAAQ;AAAA,EAC/D,CAAC,EACA,KAAK,IAAI;AACd;AAEA,IAAM,qBAAqB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAS3B,IAAM,uBAAuB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAgBtB,SAAS,eACd,UACA,qBAIM;AACN,MAAI,SAAS,WAAW,KAAK,CAAC,qBAAqB,KAAM;AACzD,QAAM,KAAK,MAAM;AAGjB,QAAM,gBAAgB,oBAAI,IAGxB;AAEF,MAAI,qBAAqB;AACvB,eAAW,CAAC,IAAI,MAAM,KAAK,qBAAqB;AAC9C,oBAAc,IAAI,IAAI,MAAM;AAAA,IAC9B;AAAA,EACF;AACA,aAAW,OAAO,UAAU;AAC1B,eAAW,CAAC,IAAI,MAAM,KAAK,IAAI,aAAa;AAC1C,oBAAc,IAAI,IAAI,MAAM;AAAA,IAC9B;AAAA,EACF;AAEA,QAAM,UAAU,GAAG,QAAQ,kBAAkB;AAC7C,QAAM,SAAS,GAAG,QAAQ,oBAAoB;AAC9C,QAAM,UAAU,GAAG;AAAA,IACjB;AAAA,EACF;AAEA,QAAM,KAAK,GAAG,YAAY,MAAM;AAC9B,eAAW,OAAO,UAAU;AAE1B,UAAI,UAAU,IAAI;AAClB,UAAI,CAAC,WAAW,IAAI,SAAS,eAAe,IAAI,UAAU,SAAS,GAAG;AACpE,kBAAU,eAAe,IAAI,SAAS;AAAA,MACxC;AAEA,YAAM,SAAS,QAAQ;AAAA,QACrB,IAAI;AAAA,QACJ,IAAI;AAAA,QACJ,IAAI;AAAA,QACJ;AAAA,QACA,IAAI,eAAe;AAAA,QACnB,IAAI,cAAc,IAAI;AAAA,QACtB,IAAI,aAAa,IAAI;AAAA,QACrB,IAAI;AAAA,QACJ,IAAI,WAAW,IAAI;AAAA,QACnB,IAAI,SAAS;AAAA,QACb,IAAI,cAAc;AAAA,QAClB,IAAI,iBAAiB;AAAA,QACrB,IAAI,gBAAgB;AAAA,QACpB,IAAI,mBAAmB,IAAI;AAAA,QAC3B,IAAI,kBAAkB,IAAI;AAAA,QAC1B,IAAI,QAAQ;AAAA,QACZ,IAAI,cAAc;AAAA,MACpB;AAGA,UAAI,OAAO,YAAY,EAAG;AAE1B,YAAM,YAAY,OAAO;AACzB,cAAQ,IAAI,WAAW,OAAO;AAE9B,iBAAW,MAAM,IAAI,WAAW;AAE9B,cAAM,aAAa,cAAc,IAAI,GAAG,SAAS;AACjD,cAAM,aACJ,GAAG,eAAe,YAAY,cAC1B,WAAW,cAAc,GAAG,cAC5B;AACN,eAAO;AAAA,UACL;AAAA,UACA,IAAI;AAAA,UACJ,GAAG;AAAA,UACH,GAAG;AAAA,UACH,GAAG;AAAA,UACH,GAAG,aAAa;AAAA,UAChB,GAAG,aAAa;AAAA,UAChB,YAAY,iBAAiB;AAAA,UAC7B,YAAY,cAAc;AAAA,UAC1B,GAAG,qBAAqB;AAAA,UACxB,cAAc,QAAQ,cAAc,IAAI,aAAa;AAAA,QACvD;AAAA,MACF;AAAA,IACF;AAGA,QAAI,cAAc,OAAO,GAAG;AAC1B,YAAM,eAAe,GAAG;AAAA,QACtB;AAAA;AAAA;AAAA,MAGF;AACA,iBAAW,CAAC,WAAW,MAAM,KAAK,eAAe;AAC/C,qBAAa,IAAI,OAAO,YAAY,OAAO,eAAe,SAAS;AAAA,MACrE;AAAA,IACF;AAAA,EACF,CAAC;AACD,KAAG;AACL;AAOO,SAAS,uBAA+B;AAC7C,QAAM,KAAK,MAAM;AAGjB,QAAM,SAAS,GACZ;AAAA,IACC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAeF,EACC,IAAI;AACP,SAAO,OAAO;AAChB;AAKO,SAAS,cAAc,WAA2B;AACvD,QAAM,KAAK,MAAM;AACjB,QAAM,MAAM,GACT;AAAA,IACC;AAAA,EACF,EACC,IAAI,SAAS;AAChB,SAAO,IAAI,WAAW;AACxB;;;ALjqBA,IAAM,kBAAkB;AACxB,IAAM,qBAAqB;AAEpB,SAAS,WAGd;AACA,QAAM;AAEN,MAAI,eAAe;AACnB,MAAI,WAAW;AAEf,aAAW,UAAU,WAAW,GAAG;AACjC,QAAI,CAAC,OAAO,QAAS;AACrB,UAAM,SAAS,OAAO;AAEtB,eAAW,EAAE,SAAS,KAAK,OAAO,QAAQ,SAAS,GAAG;AACpD,UAAI,SAAS,kBAAkB,QAAQ;AACvC,UAAI,SAAS,OAAO,QAAQ,UAAU,UAAU,MAAM;AACtD,UAAI,CAAC,OAAQ;AAIb,UAAI;AACJ,UAAI,OAAO,oBAAoB,SAAS,GAAG;AACzC,uBAAe,oBAAoB,UAAU,OAAO,MAAM,SAAS;AACnE,iBAAS;AACT,iBAAS,OAAO,QAAQ,UAAU,UAAU,CAAC;AAC7C,YAAI,CAAC,OAAQ;AACb,YAAI,QAAQ,KAAK,aAAa,QAAQ,6BAA6B;AAAA,MACrE;AAEA;AAMA,UAAI,SAAS,KAAK,OAAO,MAAM,aAAa,CAAC,OAAO,iBAAiB;AACnE,cAAM,gBAAgB,aAAa,OAAO,KAAK,WAAW,MAAM;AAChE,YAAI,gBAAgB,GAAG;AACrB,uBAAa,QAAQ,aAAa;AAAA,QACpC;AAEA,YAAI,OAAO,SAAS,SAAS,GAAG;AAC9B,gBAAM,SAAS,cAAc,OAAO,KAAK,SAAS;AAClD,0BAAgB,QAAQ,SAAS,CAAC;AAAA,QACpC;AAAA,MACF;AAEA,UAAI,CAAC,OAAO,MAAM,WAAW;AAC3B,2BAAmB,UAAU,OAAO,aAAa;AACjD;AAAA,MACF;AAKA,YAAM,YAAY,OAAO,KAAK;AAC9B,YAAM,WAAW,OAAO;AACxB,YAAM,aAAa;AACnB,YAAM,KAAK,MAAM;AACjB,MACE,GAAG,YAAY,MAAM;AACnB,QAAAC,eAAc,UAAU,UAAU,MAAM;AAExC,YAAI,WAAW,MAAM,SAAS,GAAG;AAC/B,sBAAY,WAAW,OAAO,MAAM;AACpC,8BAAoB,SAAS;AAAA,QAC/B;AAEA,YAAI,WAAW,OAAO,SAAS,GAAG;AAChC,8BAAoB,WAAW,QAAQ,MAAM;AAAA,QAC/C;AAEA,YACE,WAAW,SAAS,SAAS,KAC7B,WAAW,qBAAqB,MAChC;AACA,yBAAe,WAAW,UAAU,WAAW,mBAAmB;AAClE,qCAA2B,SAAS;AAAA,QACtC;AAEA,2BAAmB,UAAU,WAAW,aAAa;AAAA,MACvD,CAAC,EACD;AAEF,kBAAY,OAAO,MAAM;AAGzB,UAAI,OAAO,OAAO;AAChB,mBAAW,QAAQ,OAAO,OAAO;AAC/B,cAAI,CAAC,KAAK,MAAM,UAAW;AAC3B,gBAAM,gBAAgB,KAAK,KAAK;AAChC,gBAAM,WAAW,KAAK;AACtB,UACE,GAAG,YAAY,MAAM;AACnB,YAAAA,eAAc,UAAU,UAAU,MAAM;AACxC,gBAAI,KAAK,MAAM,SAAS,GAAG;AACzB,0BAAY,KAAK,OAAO,MAAM;AAC9B,kCAAoB,aAAa;AAAA,YACnC;AACA,gBAAI,KAAK,OAAO,SAAS,GAAG;AAC1B,kCAAoB,KAAK,QAAQ,MAAM;AAAA,YACzC;AACA,gBAAI,KAAK,SAAS,SAAS,KAAK,KAAK,qBAAqB,MAAM;AAC9D,6BAAe,KAAK,UAAU,KAAK,mBAAmB;AACtD,yCAA2B,aAAa;AAAA,YAC1C;AAAA,UAEF,CAAC,EACD;AACF,sBAAY,KAAK,MAAM;AAAA,QACzB;AAAA,MACF;AAGA,UAAI,cAAc;AAChB,uBAAe,YAAY;AAAA,MAC7B;AAGA,UAAI;AACF,cAAM,WAAWC,IAAG,SAAS,QAAQ,EAAE;AACvC,cAAM,eAAe,iBAAiB,QAAQ;AAC9C,YAAI,WAAW,cAAc;AAC3B,gBAAM,aAAaA,IAAG,aAAa,QAAQ;AAC3C,4BAAkB,EAAE;AAAA,YAClB,OAAO,KAAK;AAAA,YACZ;AAAA,YACA;AAAA,UACF;AACA,4BAAkB,UAAU,QAAQ;AAAA,QACtC;AAAA,MACF,SAAS,YAAY;AAEnB,YAAI,QAAQ;AAAA,UACV,qBAAqB,QAAQ,KAAK,sBAAsB,QAAQ,WAAW,UAAU,UAAU;AAAA,QACjG;AAAA,MACF;AAAA,IACF;AAAA,EACF;AAGA,MAAI,eAAe,GAAG;AACpB,UAAM,SAAS,qBAAqB;AACpC,QAAI,SAAS,GAAG;AACd,UAAI,QAAQ;AAAA,QACV,UAAU,MAAM,oBAAoB,SAAS,IAAI,MAAM,EAAE;AAAA,MAC3D;AAAA,IACF;AACA,QAAI,QAAQ,KAAK,WAAW,YAAY,WAAW,QAAQ,YAAY;AAAA,EACzE;AAEA,SAAO,EAAE,cAAc,SAAS;AAClC;AAEA,SAAS,aAAa,QAAqB,YAA0B;AACnE,WAAS,IAAI,GAAG,IAAI,OAAO,MAAM,QAAQ,KAAK;AAC5C,WAAO,MAAM,CAAC,EAAE,YAAY,aAAa;AAAA,EAC3C;AACF;AAEA,SAAS,gBAAgB,QAAqB,cAA4B;AACxE,WAAS,IAAI,GAAG,IAAI,OAAO,SAAS,QAAQ,KAAK;AAC/C,WAAO,SAAS,CAAC,EAAE,UAAU,eAAe;AAAA,EAC9C;AACF;AAEO,SAAS,kBAAkB,MAAqC;AACrE,QAAM,SAAS,KAAK,kBAAkB;AACtC,QAAM,YAAY,KAAK,qBAAqB;AAE5C,MAAI,QAA8C;AAClD,MAAI,WAAW;AACf,MAAI,iBAAiB;AACrB,MAAI,QAAQ;AAEZ,WAAS,aAAa,SAAwB;AAC5C,QAAI,SAAU;AACd,UAAM,QAAQ,UAAU,YAAY;AACpC,YAAQ,WAAW,MAAM,KAAK,GAAG,KAAK;AACtC,QAAI,CAAC,KAAK,aAAa,MAAM,OAAO;AAClC,YAAM,MAAM;AAAA,IACd;AAAA,EACF;AAEA,WAAS,OAAa;AACpB,QAAI,SAAU;AAGd,QAAI,CAAC,gBAAgB;AACnB,uBAAiB;AACjB,UAAI,YAAY,GAAG;AACjB,YAAI,QAAQ,KAAK,wDAAmD;AACpE,eAAO,uBAAc,EAClB,KAAK,CAAC,EAAE,YAAAC,YAAW,MAAM;AACxB,cAAI;AACF,kBAAM,SAASA,YAAW,CAAC,QAAQ,IAAI,QAAQ,KAAK,GAAG,CAAC;AACxD,gBAAI,OAAO,SAAS;AAClB,iCAAmB;AAAA,YACrB,OAAO;AACL,kBAAI,QAAQ;AAAA,gBACV,mBAAmB,OAAO,SAAS,SAAS;AAAA,cAC9C;AAAA,YACF;AAAA,UACF,SAAS,KAAK;AACZ,gBAAI,QAAQ;AAAA,cACV,kBAAkB,eAAe,QAAQ,IAAI,UAAU,GAAG;AAAA,YAC5D;AAAA,UACF;AACA,uBAAa,IAAI;AAAA,QACnB,CAAC,EACA,MAAM,CAAC,QAAQ;AACd,cAAI,QAAQ;AAAA,YACV,yBAAyB,eAAe,QAAQ,IAAI,UAAU,GAAG;AAAA,UACnE;AACA,uBAAa,KAAK;AAAA,QACpB,CAAC;AACH;AAAA,MACF;AAEA,yBAAmB;AAAA,IACrB;AAEA,QAAI,UAAU;AACd,QAAI;AACF,YAAM,EAAE,SAAS,IAAI,SAAS;AAC9B,gBAAU,WAAW;AAErB,UAAI,CAAC,OAAO;AACV,gBAAQ;AACR,aAAK,UAAU;AAAA,MACjB;AAGA,UAAI,CAAC,WAAW,OAAO;AACrB,YAAI;AACF,gCAAsB,CAAC,QAAQ,IAAI,QAAQ,KAAK,GAAG,CAAC;AAAA,QACtD,SAAS,KAAK;AACZ,cAAI,QAAQ;AAAA,YACV,0BAA0B,eAAe,QAAQ,IAAI,UAAU,GAAG;AAAA,UACpE;AAAA,QACF;AAAA,MACF;AAAA,IACF,SAAS,KAAK;AACZ,UAAI,QAAQ;AAAA,QACV,eAAe,eAAe,QAAQ,IAAI,UAAU,GAAG;AAAA,MACzD;AAAA,IACF;AACA,QAAI,CAAC,UAAU;AACb,mBAAa,OAAO;AAAA,IACtB;AAAA,EACF;AAEA,SAAO;AAAA,IACL,QAAQ;AACN,UAAI,MAAO;AACX,iBAAW;AACX,UAAI,QAAQ,KAAK,kBAAkB;AACnC,WAAK;AAAA,IACP;AAAA,IACA,OAAO;AACL,iBAAW;AACX,UAAI,OAAO;AACT,qBAAa,KAAK;AAClB,gBAAQ;AACR,YAAI,QAAQ,KAAK,iBAAiB;AAAA,MACpC;AAAA,IACF;AAAA,EACF;AACF;;;ADpRA,IAAM,mBAAmB;AAAA,EACvB;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AACF;AAMA,IAAM,wBAAwB;AAAA,EAC5B;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AACF;AASA,SAAS,gBAAgB,UAAwB;AAC/C,aAAW,UAAU,CAAC,IAAI,QAAQ,MAAM,GAAG;AACzC,QAAI;AACF,MAAAC,IAAG,WAAW,WAAW,MAAM;AAAA,IACjC,QAAQ;AAAA,IAAC;AAAA,EACX;AACF;AAEA,SAAS,UAAU,QAAsB;AACvC,aAAW,UAAU,CAAC,QAAQ,MAAM,GAAG;AACrC,QAAI;AACF,MAAAA,IAAG,WAAW,SAAS,MAAM;AAAA,IAC/B,QAAQ;AAAA,IAAC;AAAA,EACX;AACF;AAEA,SAAS,WAAW,UAAqC;AACvD,QAAM,KAAK,IAAI,SAAS,QAAQ;AAChC,KAAG,OAAO,oBAAoB;AAC9B,KAAG,OAAO,qBAAqB;AAC/B,KAAG;AAAA,IAAS;AAAA,IAAc,CAAC,SACzB,OAAOC,YAAW,IAAI,EAAE,SAAS,IAAI;AAAA,EACvC;AACA,KAAG,KAAK,UAAU;AAClB,SAAO;AACT;AAaO,SAAS,WACdC,OAA6B,MAAM;AAAC,GACrB;AACf,QAAM,WAAW,OAAO;AACxB,QAAM,WAAW,GAAG,QAAQ;AAG5B,kBAAgB,QAAQ;AAExB,EAAAA,KAAI,4BAA4B;AAGhC,MAAI;AACJ,MAAI;AACF,aAAS,WAAW,QAAQ;AAC5B,WAAO,MAAM;AAAA,EACf,SAAS,KAAK;AACZ,oBAAgB,QAAQ;AACxB,WAAO;AAAA,MACL,SAAS;AAAA,MACT,cAAc;AAAA,MACd,UAAU;AAAA,MACV,OAAO,6BAA6B,GAAG;AAAA,IACzC;AAAA,EACF;AAGA,MAAI,kBAAkB;AACtB,MAAI;AACF,UAAM,QAAQ,IAAI,SAAS,QAAQ;AACnC,sBACE,MAAM,QAAQ,oCAAoC,EAAE,IAAI,EAGxD;AACF,UAAM,MAAM;AAAA,EACd,QAAQ;AAAA,EAAC;AAGT,UAAQ;AACR,QAAM,cAAc,OAAO;AAC3B,EAAC,OAA8B,SAAS;AAExC,MAAI,eAAe;AACnB,MAAI,WAAW;AACf,MAAI;AACF,UAAM,SAAS,SAAS;AACxB,mBAAe,OAAO;AACtB,eAAW,OAAO;AAAA,EACpB,SAAS,KAAK;AACZ,IAAC,OAA8B,SAAS;AACxC,YAAQ;AACR,UAAM;AACN,oBAAgB,QAAQ;AACxB,WAAO;AAAA,MACL,SAAS;AAAA,MACT,cAAc;AAAA,MACd,UAAU;AAAA,MACV,OAAO,6BAA6B,GAAG;AAAA,IACzC;AAAA,EACF;AAGA,QAAM,KAAK,MAAM;AACjB,QAAM,mBACJ,GAAG,QAAQ,oCAAoC,EAAE,IAAI,EACrD;AACF,UAAQ;AAGR,EAAC,OAA8B,SAAS;AAGxC,MAAI,qBAAqB,KAAK,kBAAkB,GAAG;AACjD,IAAAA;AAAA,MACE,0DAA0D,eAAe;AAAA,IAC3E;AACA,UAAM;AACN,oBAAgB,QAAQ;AACxB,WAAO;AAAA,MACL,SAAS;AAAA,MACT;AAAA,MACA;AAAA,MACA,OAAO,qCAAqC,eAAe;AAAA,IAC7D;AAAA,EACF;AAGA,EAAAA,KAAI,6CAA6C;AACjD,MAAI;AACF,aAAS,IAAI,SAAS,QAAQ;AAC9B,WAAO,OAAO,oBAAoB;AAClC,WAAO;AAAA,MAAS;AAAA,MAAc,CAAC,SAC7B,OAAOD,YAAW,IAAI,EAAE,SAAS,IAAI;AAAA,IACvC;AACA,UAAM,cAAc,SAAS,QAAQ,MAAM,IAAI;AAC/C,WAAO,KAAK,oBAAoB,WAAW,aAAa;AAExD,UAAM,KAAK,OAAO,YAAY,MAAM;AAClC,iBAAW,SAAS,kBAAkB;AACpC,YAAI;AACF,iBAAO;AAAA,YACL,8BAA8B,KAAK,yBAAyB,KAAK;AAAA,UACnE;AAAA,QACF,SAAS,GAAG;AACV,UAAAC,KAAI,cAAc,KAAK,KAAK,aAAa,QAAQ,EAAE,UAAU,CAAC,EAAE;AAAA,QAClE;AAAA,MACF;AAGA,UAAI;AACF,eAAO;AAAA,UACL;AAAA,QACF;AAAA,MACF,SAAS,GAAG;AACV,QAAAA,KAAI,8BAA8B,aAAa,QAAQ,EAAE,UAAU,CAAC,EAAE;AAAA,MACxE;AAGA,YAAM,aAAa,sBAAsB;AAAA,QACvC,CAAC,QAAQ,GAAG,GAAG,sBAAsB,GAAG;AAAA,MAC1C,EAAE,KAAK,IAAI;AACX,UAAI;AACF,eAAO,KAAK;AAAA,qCACiB,UAAU;AAAA;AAAA;AAAA,SAGtC;AAAA,MACH,SAAS,GAAG;AACV,QAAAA,KAAI,oBAAoB,aAAa,QAAQ,EAAE,UAAU,CAAC,EAAE;AAAA,MAC9D;AAGA,UAAI;AACF,eAAO;AAAA,UACL;AAAA,QACF;AAAA,MACF,SAAS,GAAG;AACV,QAAAA,KAAI,2BAA2B,aAAa,QAAQ,EAAE,UAAU,CAAC,EAAE;AAAA,MACrE;AACA,UAAI;AACF,eAAO;AAAA,UACL;AAAA,QACF;AAAA,MACF,SAAS,GAAG;AACV,QAAAA,KAAI,mBAAmB,aAAa,QAAQ,EAAE,UAAU,CAAC,EAAE;AAAA,MAC7D;AAIA,UAAI;AACF,eAAO,KAAK;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,SAMX;AAAA,MACH,SAAS,GAAG;AACV,QAAAA,KAAI,4BAA4B,aAAa,QAAQ,EAAE,UAAU,CAAC,EAAE;AAAA,MACtE;AAEA,UAAI;AACF,eAAO,KAAK;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,SAQX;AAAA,MACH,SAAS,GAAG;AACV,QAAAA,KAAI,6BAA6B,aAAa,QAAQ,EAAE,UAAU,CAAC,EAAE;AAAA,MACvE;AAGA,UAAI;AACF,eAAO,KAAK;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,SAQX;AAAA,MACH,SAAS,GAAG;AACV,QAAAA,KAAI,yBAAyB,aAAa,QAAQ,EAAE,UAAU,CAAC,EAAE;AAAA,MACnE;AAAA,IACF,CAAC;AACD,OAAG;AAEH,WAAO,KAAK,wBAAwB;AACpC,WAAO,OAAO,kBAAkB,oBAAoB,EAAE;AACtD,WAAO,MAAM;AAAA,EACf,SAAS,KAAK;AACZ,IAAAA,KAAI,kCAAkC,GAAG,EAAE;AAC3C,UAAM;AACN,oBAAgB,QAAQ;AACxB,WAAO;AAAA,MACL,SAAS;AAAA,MACT;AAAA,MACA;AAAA,MACA,OAAO,+BAA+B,GAAG;AAAA,IAC3C;AAAA,EACF;AAGA,EAAAA,KAAI,4BAA4B;AAChC,MAAI;AACF,cAAU,QAAQ;AAClB,IAAAF,IAAG,WAAW,UAAU,QAAQ;AAChC,cAAU,QAAQ;AAAA,EACpB,SAAS,KAAK;AACZ,IAAAE,KAAI,qBAAqB,GAAG,EAAE;AAC9B,UAAM;AACN,oBAAgB,QAAQ;AACxB,WAAO;AAAA,MACL,SAAS;AAAA,MACT;AAAA,MACA;AAAA,MACA,OAAO,uBAAuB,GAAG;AAAA,IACnC;AAAA,EACF;AAGA,QAAM;AAEN,EAAAA;AAAA,IACE,qBAAqB,YAAY,WAAW,QAAQ,WAAW,gBAAgB;AAAA,EACjF;AAEA,SAAO,EAAE,SAAS,MAAM,cAAc,SAAS;AACjD;","names":["fs","gunzipSync","fs","path","path","path","log","path","upsertSession","path","upsertSession","fs","reparseAll","fs","gunzipSync","log"]}
@@ -0,0 +1,55 @@
1
+ import {
2
+ config,
3
+ ensureDataDir
4
+ } from "./chunk-K7YUPLES.js";
5
+
6
+ // src/unified-config.ts
7
+ import fs from "fs";
8
+ import path from "path";
9
+ var DEFAULT_RETENTION = {
10
+ maxAgeDays: 90,
11
+ maxSizeMb: 1e3
12
+ };
13
+ function defaultConfig() {
14
+ return { sync: { targets: [] }, retention: { ...DEFAULT_RETENTION } };
15
+ }
16
+ var CONFIG_FILE = "config.json";
17
+ function configPath() {
18
+ return path.join(config.dataDir, CONFIG_FILE);
19
+ }
20
+ function mergeDefaults(raw) {
21
+ const retention = {
22
+ ...DEFAULT_RETENTION,
23
+ ...raw.retention ?? {}
24
+ };
25
+ return {
26
+ hooksInstalled: raw.hooksInstalled,
27
+ sentryDsn: raw.sentryDsn,
28
+ sync: raw.sync ?? { targets: [] },
29
+ retention
30
+ };
31
+ }
32
+ function loadUnifiedConfig() {
33
+ try {
34
+ const raw = JSON.parse(fs.readFileSync(configPath(), "utf-8"));
35
+ return mergeDefaults(raw);
36
+ } catch {
37
+ return defaultConfig();
38
+ }
39
+ }
40
+ function saveUnifiedConfig(cfg) {
41
+ ensureDataDir();
42
+ const p = configPath();
43
+ fs.writeFileSync(p, `${JSON.stringify(cfg, null, 2)}
44
+ `, { mode: 384 });
45
+ }
46
+ function loadRetentionConfig() {
47
+ return loadUnifiedConfig().retention;
48
+ }
49
+
50
+ export {
51
+ loadUnifiedConfig,
52
+ saveUnifiedConfig,
53
+ loadRetentionConfig
54
+ };
55
+ //# sourceMappingURL=chunk-QK5442ZP.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"sources":["../src/unified-config.ts"],"sourcesContent":["import fs from \"node:fs\";\nimport path from \"node:path\";\nimport { config, ensureDataDir } from \"./config.js\";\nimport type { SyncFilter, SyncTarget } from \"./sync/types.js\";\n\n// ── Types ────────────────────────────────────────────────────────────────────\n\nexport interface RetentionConfig {\n maxAgeDays: number;\n maxSizeMb: number;\n /** Delete synced rows older than this many days. Undefined = disabled. */\n syncedMaxAgeDays?: number;\n}\n\nexport interface UnifiedConfig {\n /** Whether panopticon hooks are installed (plugin mode). When true, OTLP sync\n * filters out body types that hooks already cover to avoid double-counting. */\n hooksInstalled?: boolean;\n /** Sentry DSN for error reporting. When set, uncaught exceptions and key\n * error paths are reported to Sentry. */\n sentryDsn?: string;\n sync: {\n targets: SyncTarget[];\n filter?: SyncFilter;\n };\n retention: RetentionConfig;\n}\n\n// ── Defaults ─────────────────────────────────────────────────────────────────\n\nconst DEFAULT_RETENTION: RetentionConfig = {\n maxAgeDays: 90,\n maxSizeMb: 1000,\n};\n\nfunction defaultConfig(): UnifiedConfig {\n return { sync: { targets: [] }, retention: { ...DEFAULT_RETENTION } };\n}\n\n// ── Paths ────────────────────────────────────────────────────────────────────\n\nconst CONFIG_FILE = \"config.json\";\n\nfunction configPath(): string {\n return path.join(config.dataDir, CONFIG_FILE);\n}\n\n// ── Load / Save ──────────────────────────────────────────────────────────────\n\nfunction mergeDefaults(raw: Partial<UnifiedConfig>): UnifiedConfig {\n const retention: RetentionConfig = {\n ...DEFAULT_RETENTION,\n ...(raw.retention ?? {}),\n };\n return {\n hooksInstalled: raw.hooksInstalled,\n sentryDsn: raw.sentryDsn,\n sync: raw.sync ?? { targets: [] },\n retention,\n };\n}\n\nexport function loadUnifiedConfig(): UnifiedConfig {\n try {\n const raw = JSON.parse(fs.readFileSync(configPath(), \"utf-8\"));\n return mergeDefaults(raw);\n } catch {\n return defaultConfig();\n }\n}\n\nexport function saveUnifiedConfig(cfg: UnifiedConfig): void {\n ensureDataDir();\n const p = configPath();\n fs.writeFileSync(p, `${JSON.stringify(cfg, null, 2)}\\n`, { mode: 0o600 });\n}\n\n// ── Convenience ──────────────────────────────────────────────────────────────\n\nexport function loadRetentionConfig(): RetentionConfig {\n return loadUnifiedConfig().retention;\n}\n"],"mappings":";;;;;;AAAA,OAAO,QAAQ;AACf,OAAO,UAAU;AA6BjB,IAAM,oBAAqC;AAAA,EACzC,YAAY;AAAA,EACZ,WAAW;AACb;AAEA,SAAS,gBAA+B;AACtC,SAAO,EAAE,MAAM,EAAE,SAAS,CAAC,EAAE,GAAG,WAAW,EAAE,GAAG,kBAAkB,EAAE;AACtE;AAIA,IAAM,cAAc;AAEpB,SAAS,aAAqB;AAC5B,SAAO,KAAK,KAAK,OAAO,SAAS,WAAW;AAC9C;AAIA,SAAS,cAAc,KAA4C;AACjE,QAAM,YAA6B;AAAA,IACjC,GAAG;AAAA,IACH,GAAI,IAAI,aAAa,CAAC;AAAA,EACxB;AACA,SAAO;AAAA,IACL,gBAAgB,IAAI;AAAA,IACpB,WAAW,IAAI;AAAA,IACf,MAAM,IAAI,QAAQ,EAAE,SAAS,CAAC,EAAE;AAAA,IAChC;AAAA,EACF;AACF;AAEO,SAAS,oBAAmC;AACjD,MAAI;AACF,UAAM,MAAM,KAAK,MAAM,GAAG,aAAa,WAAW,GAAG,OAAO,CAAC;AAC7D,WAAO,cAAc,GAAG;AAAA,EAC1B,QAAQ;AACN,WAAO,cAAc;AAAA,EACvB;AACF;AAEO,SAAS,kBAAkB,KAA0B;AAC1D,gBAAc;AACd,QAAM,IAAI,WAAW;AACrB,KAAG,cAAc,GAAG,GAAG,KAAK,UAAU,KAAK,MAAM,CAAC,CAAC;AAAA,GAAM,EAAE,MAAM,IAAM,CAAC;AAC1E;AAIO,SAAS,sBAAuC;AACrD,SAAO,kBAAkB,EAAE;AAC7B;","names":[]}