@goondocks/myco 0.6.4 → 0.6.5

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (88) hide show
  1. package/.claude-plugin/marketplace.json +1 -1
  2. package/.claude-plugin/plugin.json +1 -1
  3. package/dist/{chunk-YIQLYIHW.js → chunk-4B5RO2YV.js} +4 -4
  4. package/dist/{chunk-7WHF2OIZ.js → chunk-4DYD4HHG.js} +25 -7
  5. package/dist/chunk-4DYD4HHG.js.map +1 -0
  6. package/dist/{chunk-NLUE6CYG.js → chunk-54WVLTKD.js} +3 -3
  7. package/dist/{chunk-NL6WQO56.js → chunk-5LMRZDH3.js} +2 -2
  8. package/dist/{chunk-O6PERU7U.js → chunk-AHZN4Z34.js} +2 -2
  9. package/dist/{chunk-J4D4CROB.js → chunk-DYDBF5W6.js} +5 -1
  10. package/dist/chunk-DYDBF5W6.js.map +1 -0
  11. package/dist/{chunk-Z74SDEKE.js → chunk-F7GAYVWF.js} +2 -2
  12. package/dist/{chunk-H7PRCVGQ.js → chunk-F7PGDD2X.js} +2 -2
  13. package/dist/{chunk-2ZIBCEYO.js → chunk-LEK6DEAE.js} +3 -3
  14. package/dist/{chunk-QLUE3BUL.js → chunk-O6TBHGVO.js} +9 -2
  15. package/dist/chunk-O6TBHGVO.js.map +1 -0
  16. package/dist/{chunk-FPRXMJLT.js → chunk-OEGZ5YTJ.js} +2 -2
  17. package/dist/{chunk-4XVKZ3WA.js → chunk-TK7A4RX7.js} +10 -3
  18. package/dist/chunk-TK7A4RX7.js.map +1 -0
  19. package/dist/{chunk-UP4P4OAA.js → chunk-V6BJVYNH.js} +2 -2
  20. package/dist/{chunk-QN4W3JUA.js → chunk-XH34FX4C.js} +2 -2
  21. package/dist/{chunk-YTFXA4RX.js → chunk-YRIIBPJD.js} +3 -3
  22. package/dist/{cli-IHILSS6N.js → cli-OJYHLO4Y.js} +20 -20
  23. package/dist/{client-AGFNR2S4.js → client-SS3C5MF6.js} +5 -5
  24. package/dist/{curate-3D4GHKJH.js → curate-4CKEMOPV.js} +6 -6
  25. package/dist/{detect-providers-XEP4QA3R.js → detect-providers-LFIVJYQO.js} +3 -3
  26. package/dist/{digest-7HLJXL77.js → digest-ZLARHLLY.js} +8 -8
  27. package/dist/{init-ARQ53JOR.js → init-3LVKVQ4L.js} +5 -5
  28. package/dist/{logs-IENORIYR.js → logs-6CWVP574.js} +3 -3
  29. package/dist/{main-6AGPIMH2.js → main-RB727YRP.js} +149 -28
  30. package/dist/main-RB727YRP.js.map +1 -0
  31. package/dist/{rebuild-Q2ACEB6F.js → rebuild-QWVVCBCZ.js} +6 -6
  32. package/dist/{reprocess-CDEFGQOV.js → reprocess-YG3WLUI2.js} +8 -8
  33. package/dist/{restart-XCMILOL5.js → restart-UIP7US4U.js} +6 -6
  34. package/dist/{search-7W25SKCB.js → search-BQLBW5CS.js} +4 -4
  35. package/dist/{server-6UDN35QN.js → server-43KSJ65Q.js} +77 -29
  36. package/dist/{server-6UDN35QN.js.map → server-43KSJ65Q.js.map} +1 -1
  37. package/dist/{session-start-K6IGAC7H.js → session-start-6SHGT2AW.js} +6 -6
  38. package/dist/{setup-digest-X5PN27F4.js → setup-digest-X735EZSD.js} +5 -5
  39. package/dist/{setup-llm-S5OHQJXK.js → setup-llm-QBSTQO7N.js} +5 -5
  40. package/dist/src/cli.js +4 -4
  41. package/dist/src/daemon/main.js +4 -4
  42. package/dist/src/hooks/post-tool-use.js +5 -5
  43. package/dist/src/hooks/session-end.js +5 -5
  44. package/dist/src/hooks/session-start.js +4 -4
  45. package/dist/src/hooks/stop.js +5 -5
  46. package/dist/src/hooks/user-prompt-submit.js +5 -5
  47. package/dist/src/mcp/server.js +4 -4
  48. package/dist/{stats-TTSDXGJV.js → stats-QBLIEFWL.js} +6 -6
  49. package/dist/ui/assets/index-CjWGVHhF.css +1 -0
  50. package/dist/ui/assets/{index-CMSMi4Jb.js → index-Cq-H7wgE.js} +20 -20
  51. package/dist/ui/index.html +2 -2
  52. package/dist/{verify-TOWQHPBX.js → verify-X272WGBD.js} +4 -4
  53. package/dist/{version-36RVCQA6.js → version-XE4GYTBV.js} +4 -4
  54. package/package.json +1 -1
  55. package/dist/chunk-4XVKZ3WA.js.map +0 -1
  56. package/dist/chunk-7WHF2OIZ.js.map +0 -1
  57. package/dist/chunk-J4D4CROB.js.map +0 -1
  58. package/dist/chunk-QLUE3BUL.js.map +0 -1
  59. package/dist/main-6AGPIMH2.js.map +0 -1
  60. package/dist/ui/assets/index-08wKT7wS.css +0 -1
  61. /package/dist/{chunk-YIQLYIHW.js.map → chunk-4B5RO2YV.js.map} +0 -0
  62. /package/dist/{chunk-NLUE6CYG.js.map → chunk-54WVLTKD.js.map} +0 -0
  63. /package/dist/{chunk-NL6WQO56.js.map → chunk-5LMRZDH3.js.map} +0 -0
  64. /package/dist/{chunk-O6PERU7U.js.map → chunk-AHZN4Z34.js.map} +0 -0
  65. /package/dist/{chunk-Z74SDEKE.js.map → chunk-F7GAYVWF.js.map} +0 -0
  66. /package/dist/{chunk-H7PRCVGQ.js.map → chunk-F7PGDD2X.js.map} +0 -0
  67. /package/dist/{chunk-2ZIBCEYO.js.map → chunk-LEK6DEAE.js.map} +0 -0
  68. /package/dist/{chunk-FPRXMJLT.js.map → chunk-OEGZ5YTJ.js.map} +0 -0
  69. /package/dist/{chunk-UP4P4OAA.js.map → chunk-V6BJVYNH.js.map} +0 -0
  70. /package/dist/{chunk-QN4W3JUA.js.map → chunk-XH34FX4C.js.map} +0 -0
  71. /package/dist/{chunk-YTFXA4RX.js.map → chunk-YRIIBPJD.js.map} +0 -0
  72. /package/dist/{cli-IHILSS6N.js.map → cli-OJYHLO4Y.js.map} +0 -0
  73. /package/dist/{client-AGFNR2S4.js.map → client-SS3C5MF6.js.map} +0 -0
  74. /package/dist/{curate-3D4GHKJH.js.map → curate-4CKEMOPV.js.map} +0 -0
  75. /package/dist/{detect-providers-XEP4QA3R.js.map → detect-providers-LFIVJYQO.js.map} +0 -0
  76. /package/dist/{digest-7HLJXL77.js.map → digest-ZLARHLLY.js.map} +0 -0
  77. /package/dist/{init-ARQ53JOR.js.map → init-3LVKVQ4L.js.map} +0 -0
  78. /package/dist/{logs-IENORIYR.js.map → logs-6CWVP574.js.map} +0 -0
  79. /package/dist/{rebuild-Q2ACEB6F.js.map → rebuild-QWVVCBCZ.js.map} +0 -0
  80. /package/dist/{reprocess-CDEFGQOV.js.map → reprocess-YG3WLUI2.js.map} +0 -0
  81. /package/dist/{restart-XCMILOL5.js.map → restart-UIP7US4U.js.map} +0 -0
  82. /package/dist/{search-7W25SKCB.js.map → search-BQLBW5CS.js.map} +0 -0
  83. /package/dist/{session-start-K6IGAC7H.js.map → session-start-6SHGT2AW.js.map} +0 -0
  84. /package/dist/{setup-digest-X5PN27F4.js.map → setup-digest-X735EZSD.js.map} +0 -0
  85. /package/dist/{setup-llm-S5OHQJXK.js.map → setup-llm-QBSTQO7N.js.map} +0 -0
  86. /package/dist/{stats-TTSDXGJV.js.map → stats-QBLIEFWL.js.map} +0 -0
  87. /package/dist/{verify-TOWQHPBX.js.map → verify-X272WGBD.js.map} +0 -0
  88. /package/dist/{version-36RVCQA6.js.map → version-XE4GYTBV.js.map} +0 -0
@@ -5,8 +5,8 @@
5
5
  <link rel="icon" type="image/svg+xml" href="/favicon.svg" />
6
6
  <meta name="viewport" content="width=device-width, initial-scale=1.0" />
7
7
  <title>Myco</title>
8
- <script type="module" crossorigin src="/assets/index-CMSMi4Jb.js"></script>
9
- <link rel="stylesheet" crossorigin href="/assets/index-08wKT7wS.css">
8
+ <script type="module" crossorigin src="/assets/index-Cq-H7wgE.js"></script>
9
+ <link rel="stylesheet" crossorigin href="/assets/index-CjWGVHhF.css">
10
10
  </head>
11
11
  <body>
12
12
  <div id="root"></div>
@@ -2,13 +2,13 @@ import { createRequire as __cr } from 'node:module'; const require = __cr(import
2
2
  import {
3
3
  createEmbeddingProvider,
4
4
  createLlmProvider
5
- } from "./chunk-NLUE6CYG.js";
6
- import "./chunk-7WHF2OIZ.js";
5
+ } from "./chunk-54WVLTKD.js";
6
+ import "./chunk-4DYD4HHG.js";
7
7
  import {
8
8
  loadConfig
9
9
  } from "./chunk-HYVT345Y.js";
10
10
  import "./chunk-ERG2IEWX.js";
11
- import "./chunk-J4D4CROB.js";
11
+ import "./chunk-DYDBF5W6.js";
12
12
  import "./chunk-PZUWP5VK.js";
13
13
 
14
14
  // src/cli/verify.ts
@@ -47,4 +47,4 @@ async function run(_args, vaultDir) {
47
47
  export {
48
48
  run
49
49
  };
50
- //# sourceMappingURL=verify-TOWQHPBX.js.map
50
+ //# sourceMappingURL=verify-X272WGBD.js.map
@@ -1,11 +1,11 @@
1
1
  import { createRequire as __cr } from 'node:module'; const require = __cr(import.meta.url);
2
2
  import {
3
3
  getPluginVersion
4
- } from "./chunk-QN4W3JUA.js";
5
- import "./chunk-Z74SDEKE.js";
6
- import "./chunk-J4D4CROB.js";
4
+ } from "./chunk-XH34FX4C.js";
5
+ import "./chunk-F7GAYVWF.js";
6
+ import "./chunk-DYDBF5W6.js";
7
7
  import "./chunk-PZUWP5VK.js";
8
8
  export {
9
9
  getPluginVersion
10
10
  };
11
- //# sourceMappingURL=version-36RVCQA6.js.map
11
+ //# sourceMappingURL=version-XE4GYTBV.js.map
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@goondocks/myco",
3
- "version": "0.6.4",
3
+ "version": "0.6.5",
4
4
  "description": "Collective agent intelligence — Claude Code plugin",
5
5
  "type": "module",
6
6
  "main": "dist/index.js",
@@ -1 +0,0 @@
1
- {"version":3,"sources":["../src/daemon/digest.ts","../src/daemon/trace.ts","../src/daemon/processor.ts","../src/vault/observations.ts","../src/services/vault-ops.ts","../src/intelligence/batch.ts"],"sourcesContent":["/**\n * DigestEngine — synthesizes vault knowledge into tiered context extracts.\n * Metabolism — adaptive timer that throttles digest cycles based on activity.\n */\n\nimport fs from 'node:fs';\nimport path from 'node:path';\nimport crypto from 'node:crypto';\nimport YAML from 'yaml';\n\nimport type { MycoIndex, IndexedNote } from '@myco/index/sqlite.js';\nimport type { LlmProvider, LlmRequestOptions } from '@myco/intelligence/llm.js';\nimport type { MycoConfig } from '@myco/config/schema.js';\nimport { loadPrompt } from '@myco/prompts/index.js';\nimport { stripReasoningTokens } from '@myco/intelligence/response.js';\nimport { stripFrontmatter } from '@myco/vault/frontmatter.js';\nimport { readLastTimestamp, appendTraceRecord } from './trace.js';\nimport {\n estimateTokens,\n CHARS_PER_TOKEN,\n DIGEST_TIERS,\n DIGEST_TIER_MIN_CONTEXT,\n DIGEST_SUBSTRATE_TYPE_WEIGHTS,\n DIGEST_LLM_REQUEST_TIMEOUT_MS,\n LLM_REASONING_MODE,\n} from '@myco/constants.js';\n\n// --- Interfaces ---\n\nexport interface DigestCycleResult {\n cycleId: string;\n timestamp: string;\n substrate: {\n sessions: string[];\n spores: string[];\n plans: string[];\n artifacts: string[];\n team: string[];\n };\n tiersGenerated: number[];\n model: string;\n durationMs: number;\n tokensUsed: number;\n}\n\n/** Simple log function signature for digest progress reporting. */\nexport type DigestLogFn = (level: 'debug' | 'info' | 'warn', message: string, data?: Record<string, unknown>) => void;\n\nexport interface DigestCycleOptions {\n /** Process all substrate regardless of last cycle timestamp. */\n fullReprocess?: boolean;\n /** Only generate these tiers (default: all eligible). */\n tiers?: number[];\n /** Skip previous extract — start from clean slate. */\n cleanSlate?: boolean;\n}\n\nexport interface DigestEngineConfig {\n vaultDir: string;\n index: MycoIndex;\n llmProvider: LlmProvider;\n config: MycoConfig;\n log?: DigestLogFn;\n}\n\n// --- Constants ---\n\n/** Token overhead estimate for previous extract section wrapper. */\nconst PREVIOUS_EXTRACT_OVERHEAD_TOKENS = 50;\n\n/** Safety margin for context window — our CHARS_PER_TOKEN=4 heuristic significantly\n * underestimates real token counts (observed ~3.2 chars/token for mixed content).\n * 0.70 provides a safe buffer: 32K * 0.70 = 22.4K usable tokens. */\nconst CONTEXT_SAFETY_MARGIN = 0.70;\n\n/** Types that are digest output — excluded from substrate to avoid self-digestion. */\nconst EXTRACT_TYPE = 'extract';\n\n// --- DigestEngine ---\n\nexport class DigestEngine {\n private vaultDir: string;\n private index: MycoIndex;\n private llm: LlmProvider;\n private config: MycoConfig;\n private log: DigestLogFn;\n private lastCycleTimestampCache: string | null | undefined = undefined;\n private cycleInProgress = false;\n\n /** Whether a digest cycle is currently running. */\n get isCycleInProgress(): boolean {\n return this.cycleInProgress;\n }\n\n /** Hooks that run before each digest cycle (e.g., consolidation). */\n private prePassHooks: Array<{ name: string; fn: () => Promise<void> }> = [];\n\n /** Hooks that run after each successful digest cycle. */\n private postPassHooks: Array<{ name: string; fn: (result: DigestCycleResult) => Promise<void> }> = [];\n\n constructor(engineConfig: DigestEngineConfig) {\n this.vaultDir = engineConfig.vaultDir;\n this.index = engineConfig.index;\n this.llm = engineConfig.llmProvider;\n this.config = engineConfig.config;\n this.log = engineConfig.log ?? (() => {});\n }\n\n /** Register a hook that runs before each digest cycle. Best-effort — errors are logged, not thrown. */\n registerPrePass(name: string, fn: () => Promise<void>): void {\n this.prePassHooks.push({ name, fn });\n }\n\n /** Register a hook that runs after each successful digest cycle. Best-effort — errors are logged, not thrown. */\n registerPostPass(name: string, fn: (result: DigestCycleResult) => Promise<void>): void {\n this.postPassHooks.push({ name, fn });\n }\n\n /**\n * Query index for recent vault notes to feed into the digest.\n * Filters out extract notes (our own output) and caps at max_notes_per_cycle.\n */\n discoverSubstrate(lastCycleTimestamp: string | null): IndexedNote[] {\n const maxNotes = this.config.digest.substrate.max_notes_per_cycle;\n\n const notes = lastCycleTimestamp\n ? this.index.query({ updatedSince: lastCycleTimestamp, limit: maxNotes })\n : this.index.query({ limit: maxNotes });\n\n // Guard against self-digestion: extract files are not currently indexed,\n // but this filter prevents feedback loops if they ever are (e.g., via rebuild)\n const filtered = notes\n .filter((n) => n.type !== EXTRACT_TYPE)\n .filter((n) => {\n if (n.type !== 'spore') return true;\n const status = n.frontmatter.status as string | undefined;\n return !status || status === 'active';\n });\n\n // Sort by type weight (descending) then by recency (descending)\n filtered.sort((a, b) => {\n const weightA = DIGEST_SUBSTRATE_TYPE_WEIGHTS[a.type] ?? 0;\n const weightB = DIGEST_SUBSTRATE_TYPE_WEIGHTS[b.type] ?? 0;\n if (weightB !== weightA) return weightB - weightA;\n // More recent first — created is ISO string, lexicographic sort works\n return b.created.localeCompare(a.created);\n });\n\n return filtered.slice(0, maxNotes);\n }\n\n /**\n * Filter configured tiers by the context window available.\n * Only tiers whose minimum context requirement is met are eligible.\n */\n getEligibleTiers(): number[] {\n const contextWindow = this.config.digest.intelligence.context_window;\n return DIGEST_TIERS.filter((tier) => {\n const minContext = DIGEST_TIER_MIN_CONTEXT[tier];\n return minContext !== undefined && minContext <= contextWindow;\n });\n }\n\n /**\n * Format notes compactly for inclusion in the digest prompt.\n * Stops adding notes once the token budget is exceeded.\n */\n formatSubstrate(notes: IndexedNote[], tokenBudget: number): string {\n const charBudget = tokenBudget * CHARS_PER_TOKEN;\n const parts: string[] = [];\n let usedChars = 0;\n\n for (const note of notes) {\n const entry = `### [${note.type}] ${note.id} — \"${note.title}\"\\n${note.content}`;\n if (usedChars + entry.length > charBudget && parts.length > 0) break;\n parts.push(entry);\n usedChars += entry.length;\n }\n\n return parts.join('\\n\\n');\n }\n\n /**\n * Read a previously generated extract for a given tier.\n * Returns the body (stripped of YAML frontmatter), or null if not found.\n */\n readPreviousExtract(tier: number): string | null {\n const extractPath = path.join(this.vaultDir, 'digest', `extract-${tier}.md`);\n let content: string;\n try {\n content = fs.readFileSync(extractPath, 'utf-8');\n } catch {\n return null;\n }\n\n return stripFrontmatter(content).body;\n }\n\n /**\n * Write a digest extract to the vault with YAML frontmatter.\n * Uses atomic write pattern (temp file + rename).\n */\n writeExtract(\n tier: number,\n body: string,\n cycleId: string,\n model: string,\n substrateCount: number,\n substrateNotes?: string[],\n tokensUsed?: number,\n ): void {\n const digestDir = path.join(this.vaultDir, 'digest');\n fs.mkdirSync(digestDir, { recursive: true });\n\n const frontmatter: Record<string, unknown> = {\n type: EXTRACT_TYPE,\n tier,\n generated: new Date().toISOString(),\n cycle_id: cycleId,\n substrate_count: substrateCount,\n model,\n };\n if (substrateNotes && substrateNotes.length > 0) frontmatter.substrate_notes = substrateNotes;\n if (tokensUsed !== undefined) frontmatter.tokens_used = tokensUsed;\n\n const fmYaml = YAML.stringify(frontmatter, {\n defaultStringType: 'QUOTE_DOUBLE',\n defaultKeyType: 'PLAIN',\n }).trim();\n const file = `---\\n${fmYaml}\\n---\\n\\n${body}\\n`;\n\n const fullPath = path.join(digestDir, `extract-${tier}.md`);\n const tmpPath = `${fullPath}.tmp`;\n fs.writeFileSync(tmpPath, file, 'utf-8');\n fs.renameSync(tmpPath, fullPath);\n }\n\n /**\n * Append a digest cycle result as a JSON line to trace.jsonl.\n */\n appendTrace(record: DigestCycleResult): void {\n const tracePath = path.join(this.vaultDir, 'digest', 'trace.jsonl');\n appendTraceRecord(tracePath, record as unknown as Record<string, unknown>);\n this.lastCycleTimestampCache = record.timestamp;\n }\n\n /**\n * Read the last cycle timestamp from trace.jsonl.\n * Cached in memory after first read — subsequent calls are O(1).\n */\n getLastCycleTimestamp(): string | null {\n if (this.lastCycleTimestampCache !== undefined) return this.lastCycleTimestampCache;\n\n const tracePath = path.join(this.vaultDir, 'digest', 'trace.jsonl');\n this.lastCycleTimestampCache = readLastTimestamp(tracePath);\n return this.lastCycleTimestampCache;\n }\n\n /**\n * Run a full digest cycle: discover substrate, generate extracts for each tier.\n * Returns the cycle result, or null if no substrate was found.\n */\n async runCycle(opts?: DigestCycleOptions): Promise<DigestCycleResult | null> {\n if (this.cycleInProgress) {\n this.log('debug', 'Cycle already in progress — skipping');\n return null;\n }\n this.cycleInProgress = true;\n\n try {\n // Ensure model is loaded BEFORE pre-pass hooks (e.g., consolidation).\n // Pre-pass hooks share the digest LLM provider — without ensureLoaded first,\n // requests go with just the model name (no instance ID), causing LM Studio\n // to spawn duplicate instances for each concurrent request.\n if (this.llm.ensureLoaded) {\n const { context_window: contextWindow, gpu_kv_cache: gpuKvCache } = this.config.digest.intelligence;\n this.log('debug', 'Verifying digest model', { contextWindow, gpuKvCache });\n await this.llm.ensureLoaded(contextWindow, gpuKvCache);\n }\n\n // Run pre-pass hooks (e.g., consolidation) before discovering substrate\n for (const hook of this.prePassHooks) {\n try {\n await hook.fn();\n } catch (err) {\n this.log('warn', `Pre-pass hook \"${hook.name}\" failed`, { error: (err as Error).message });\n }\n }\n\n return await this.runCycleInternal(opts);\n } finally {\n this.cycleInProgress = false;\n }\n }\n\n private async runCycleInternal(opts?: DigestCycleOptions): Promise<DigestCycleResult | null> {\n\n const startTime = Date.now();\n const fullReprocess = opts?.fullReprocess ?? false;\n const lastTimestamp = fullReprocess ? null : this.getLastCycleTimestamp();\n const substrate = this.discoverSubstrate(lastTimestamp);\n\n this.log('debug', 'Discovering substrate', { lastTimestamp: lastTimestamp ?? 'full reprocess', substrateCount: substrate.length });\n if (substrate.length === 0) {\n this.log('debug', 'No substrate found — skipping cycle');\n return null;\n }\n\n this.log('info', `Starting digest cycle`, { substrateCount: substrate.length, fullReprocess });\n const cycleId = crypto.randomUUID();\n const allEligible = this.getEligibleTiers();\n const eligibleTiers = opts?.tiers\n ? allEligible.filter((t) => opts.tiers!.includes(t))\n : allEligible;\n this.log('debug', `Eligible tiers: [${eligibleTiers.join(', ')}]`);\n const tiersGenerated: number[] = [];\n let totalTokensUsed = 0;\n let model = '';\n\n // Categorize substrate by type for the result\n const typeToKey: Record<string, keyof DigestCycleResult['substrate']> = {\n session: 'sessions',\n spore: 'spores',\n plan: 'plans',\n artifact: 'artifacts',\n 'team-member': 'team',\n };\n const substrateIndex: DigestCycleResult['substrate'] = {\n sessions: [],\n spores: [],\n plans: [],\n artifacts: [],\n team: [],\n };\n for (const note of substrate) {\n const key = typeToKey[note.type];\n if (key) {\n substrateIndex[key].push(note.id);\n }\n }\n\n // Record the cycle timestamp NOW, before tier processing. This ensures the\n // timestamp advances even if LLM calls fail, preventing the same substrate\n // from being rediscovered on every subsequent timer fire.\n const cycleTimestamp = new Date().toISOString();\n\n const systemPrompt = loadPrompt('digest-system');\n const allSubstrateIds = substrate.map((note) => note.id);\n\n for (const tier of eligibleTiers) {\n const tierPrompt = loadPrompt(`digest-${tier}`);\n const previousExtract = opts?.cleanSlate ? null : this.readPreviousExtract(tier);\n\n // Calculate token budget for substrate:\n // (context_window * safety_margin) - output - system_prompt - tier_prompt - previous_extract\n const contextWindow = this.config.digest.intelligence.context_window;\n const systemPromptTokens = estimateTokens(systemPrompt);\n const tierPromptTokens = estimateTokens(tierPrompt);\n const previousExtractTokens = previousExtract\n ? estimateTokens(previousExtract) + PREVIOUS_EXTRACT_OVERHEAD_TOKENS\n : 0;\n const availableTokens = Math.floor(contextWindow * CONTEXT_SAFETY_MARGIN);\n const substrateBudget = availableTokens - tier - systemPromptTokens - tierPromptTokens - previousExtractTokens;\n\n if (substrateBudget <= 0) continue;\n\n const formattedSubstrate = this.formatSubstrate(substrate, substrateBudget);\n\n // Build user prompt (system prompt sent separately via LlmRequestOptions)\n const promptParts = [tierPrompt];\n\n if (previousExtract) {\n promptParts.push('', '## Previous Synthesis', '', previousExtract);\n }\n\n promptParts.push('', '## New Substrate', '', formattedSubstrate);\n promptParts.push(\n '',\n '---',\n 'Produce your updated synthesis now. Stay within the token budget specified above.',\n );\n\n const userPrompt = promptParts.join('\\n');\n const promptTokens = estimateTokens(systemPrompt + userPrompt);\n this.log('debug', `Tier ${tier}: sending LLM request`, { promptTokens, maxTokens: tier, substrateBudget });\n\n try {\n const tierStart = Date.now();\n const digestConfig = this.config.digest.intelligence;\n const opts: LlmRequestOptions = {\n maxTokens: tier,\n timeoutMs: DIGEST_LLM_REQUEST_TIMEOUT_MS,\n contextLength: contextWindow,\n reasoning: LLM_REASONING_MODE,\n systemPrompt,\n keepAlive: digestConfig.keep_alive ?? undefined,\n };\n const response = await this.llm.summarize(userPrompt, opts);\n const tierDuration = Date.now() - tierStart;\n\n // Strip reasoning tokens if present (some models output chain-of-thought)\n const extractText = stripReasoningTokens(response.text);\n model = response.model;\n const responseTokens = estimateTokens(extractText);\n totalTokensUsed += promptTokens + responseTokens;\n\n this.log('info', `Tier ${tier}: completed`, { durationMs: tierDuration, responseTokens, model: response.model });\n this.writeExtract(tier, extractText, cycleId, response.model, substrate.length, allSubstrateIds, promptTokens + responseTokens);\n tiersGenerated.push(tier);\n } catch (err) {\n this.log('warn', `Tier ${tier}: failed`, { error: (err as Error).message });\n }\n }\n\n // Patch tiers_generated into each extract's frontmatter (not known until all tiers complete)\n if (tiersGenerated.length > 0) {\n const digestDir = path.join(this.vaultDir, 'digest');\n for (const tier of tiersGenerated) {\n const extractPath = path.join(digestDir, `extract-${tier}.md`);\n try {\n const content = fs.readFileSync(extractPath, 'utf-8');\n const fmMatch = content.match(/^---\\n([\\s\\S]*?)\\n---/);\n if (fmMatch) {\n const parsed = YAML.parse(fmMatch[1]) as Record<string, unknown>;\n parsed.tiers_generated = tiersGenerated;\n const fmYaml = YAML.stringify(parsed, { defaultStringType: 'QUOTE_DOUBLE', defaultKeyType: 'PLAIN' }).trim();\n const extractBody = content.slice(fmMatch[0].length);\n const tmpPath = `${extractPath}.tmp`;\n fs.writeFileSync(tmpPath, `---\\n${fmYaml}\\n---${extractBody}`, 'utf-8');\n fs.renameSync(tmpPath, extractPath);\n }\n } catch {\n // Extract file may not exist if that tier failed\n }\n }\n }\n\n const result: DigestCycleResult = {\n cycleId,\n timestamp: cycleTimestamp,\n substrate: substrateIndex,\n tiersGenerated,\n model,\n durationMs: Date.now() - startTime,\n tokensUsed: totalTokensUsed,\n };\n\n this.appendTrace(result);\n\n // Run post-pass hooks after successful digest\n for (const hook of this.postPassHooks) {\n try {\n await hook.fn(result);\n } catch (err) {\n this.log('warn', `Post-pass hook \"${hook.name}\" failed`, { error: (err as Error).message });\n }\n }\n\n return result;\n }\n}\n\n// --- Metabolism (Adaptive Timer) ---\n\nexport type MetabolismState = 'active' | 'cooling' | 'dormant';\n\n/** Milliseconds per second for config conversion. */\nconst MS_PER_SECOND = 1000;\n\nexport class Metabolism {\n state: MetabolismState = 'active';\n currentIntervalMs: number;\n\n private cooldownStep = 0;\n private lastSubstrateTime: number;\n private timer: ReturnType<typeof setTimeout> | null = null;\n private activeIntervalMs: number;\n private cooldownIntervalsMs: number[];\n private dormancyThresholdMs: number;\n\n constructor(config: MycoConfig['digest']['metabolism']) {\n this.activeIntervalMs = config.active_interval * MS_PER_SECOND;\n this.cooldownIntervalsMs = config.cooldown_intervals.map((s) => s * MS_PER_SECOND);\n this.dormancyThresholdMs = config.dormancy_threshold * MS_PER_SECOND;\n this.currentIntervalMs = this.activeIntervalMs;\n this.lastSubstrateTime = Date.now();\n }\n\n /** Reset to active state when new substrate is found. */\n onSubstrateFound(): void {\n this.state = 'active';\n this.cooldownStep = 0;\n this.currentIntervalMs = this.activeIntervalMs;\n this.lastSubstrateTime = Date.now();\n }\n\n /** Advance cooldown when a cycle finds no new substrate. */\n onEmptyCycle(): void {\n if (this.state === 'dormant') return;\n\n this.state = 'cooling';\n if (this.cooldownStep < this.cooldownIntervalsMs.length) {\n this.currentIntervalMs = this.cooldownIntervalsMs[this.cooldownStep];\n this.cooldownStep++;\n }\n\n this.checkDormancy();\n }\n\n /** Enter dormant state if enough time has elapsed since last substrate. */\n checkDormancy(): void {\n const elapsed = Date.now() - this.lastSubstrateTime;\n if (elapsed >= this.dormancyThresholdMs) {\n this.state = 'dormant';\n // Keep the last cooldown interval as the dormant polling rate\n }\n }\n\n /** Return to active from any state, resetting timers and rescheduling immediately. */\n activate(): void {\n this.onSubstrateFound();\n // Reschedule with the new active interval — without this, the old\n // (possibly dormant) timer continues ticking at the wrong rate\n if (this.callback) {\n this.reschedule();\n }\n }\n\n /** Set lastSubstrateTime explicitly (for testing). */\n markLastSubstrate(time: number): void {\n this.lastSubstrateTime = time;\n }\n\n /** Begin scheduling digest cycles with adaptive intervals. */\n start(callback: () => Promise<void>): void {\n this.callback = callback;\n this.reschedule();\n }\n\n /** Stop the timer. */\n stop(): void {\n if (this.timer) {\n clearTimeout(this.timer);\n this.timer = null;\n }\n }\n\n private callback: (() => Promise<void>) | null = null;\n\n private reschedule(): void {\n this.stop();\n if (!this.callback) return;\n const cb = this.callback;\n const schedule = (): void => {\n this.timer = setTimeout(async () => {\n await cb();\n schedule();\n }, this.currentIntervalMs);\n this.timer.unref();\n };\n schedule();\n }\n}\n","/**\n * Shared JSONL trace file utilities.\n *\n * Used by DigestEngine and ConsolidationEngine to read/write append-only\n * trace records with timestamp caching.\n */\n\nimport fs from 'node:fs';\nimport path from 'node:path';\n\n/** Read the last JSON record from a JSONL file. Returns null if file is missing or empty. */\nexport function readLastRecord<T = Record<string, unknown>>(filePath: string): T | null {\n let content: string;\n try {\n content = fs.readFileSync(filePath, 'utf-8').trim();\n } catch {\n return null;\n }\n if (!content) return null;\n const lines = content.split('\\n');\n const lastLine = lines[lines.length - 1];\n try {\n return JSON.parse(lastLine) as T;\n } catch {\n return null;\n }\n}\n\n/** Read the last JSON record's `timestamp` field from a JSONL file. Returns null if file is missing or empty. */\nexport function readLastTimestamp(filePath: string): string | null {\n return readLastRecord<{ timestamp: string }>(filePath)?.timestamp ?? null;\n}\n\n/** Append a JSON record to a JSONL file, creating parent directories if needed. */\nexport function appendTraceRecord(filePath: string, record: Record<string, unknown>): void {\n fs.mkdirSync(path.dirname(filePath), { recursive: true });\n fs.appendFileSync(filePath, JSON.stringify(record) + '\\n', 'utf-8');\n}\n","import { z } from 'zod';\nimport type { LlmProvider } from '../intelligence/llm.js';\nimport { ARTIFACT_TYPES } from '../vault/types.js';\nimport { estimateTokens, CHARS_PER_TOKEN, LLM_REASONING_MODE } from '../constants.js';\nimport type { MycoConfig } from '../config/schema.js';\nimport type { ObservationType, ArtifactType } from '../vault/types.js';\nimport { buildExtractionPrompt, buildSummaryPrompt, buildTitlePrompt, buildClassificationPrompt } from '../prompts/index.js';\nimport { extractJson, stripReasoningTokens } from '../intelligence/response.js';\nimport { TURN_HEADING_PREFIX } from '../obsidian/formatter.js';\n\n/** Estimated token overhead for the extraction prompt template (excluding conversation content). */\nconst EXTRACTION_PROMPT_OVERHEAD_TOKENS = 500;\n\n/** Marker substring in failed summary text. Used by reprocess --failed to detect failures. */\nexport const SUMMARIZATION_FAILED_MARKER = 'summarization failed';\n\nexport interface Observation {\n type: ObservationType;\n title: string;\n content: string;\n tags: string[];\n root_cause?: string;\n fix?: string;\n rationale?: string;\n alternatives_rejected?: string;\n gained?: string;\n sacrificed?: string;\n}\n\nexport interface ProcessorResult {\n summary: string;\n observations: Observation[];\n degraded: boolean;\n}\n\nexport interface ClassifiedArtifact {\n source_path: string;\n artifact_type: ArtifactType;\n title: string;\n tags: string[];\n}\n\nconst ClassificationResponseSchema = z.object({\n artifacts: z.array(z.object({\n source_path: z.string(),\n artifact_type: z.enum(ARTIFACT_TYPES),\n title: z.string(),\n tags: z.array(z.string()).default([]),\n })).default([]),\n});\n\nexport class BufferProcessor {\n private extractionMaxTokens: number;\n private summaryMaxTokens: number;\n private titleMaxTokens: number;\n private classificationMaxTokens: number;\n\n constructor(private backend: LlmProvider, private contextWindow: number = 8192, captureConfig?: MycoConfig['capture']) {\n this.extractionMaxTokens = captureConfig?.extraction_max_tokens ?? 2048;\n this.summaryMaxTokens = captureConfig?.summary_max_tokens ?? 512;\n this.titleMaxTokens = captureConfig?.title_max_tokens ?? 32;\n this.classificationMaxTokens = captureConfig?.classification_max_tokens ?? 1024;\n }\n\n private truncateForContext(data: string, maxTokens: number): string {\n const available = this.contextWindow - maxTokens;\n const dataTokens = estimateTokens(data);\n if (dataTokens <= available) return data;\n const charBudget = available * CHARS_PER_TOKEN;\n return data.slice(0, charBudget);\n }\n\n async process(conversationMarkdown: string, sessionId: string): Promise<ProcessorResult> {\n if (!conversationMarkdown.trim()) {\n return { summary: '', observations: [], degraded: false };\n }\n\n // Truncate from the beginning (keep recent turns) if conversation exceeds budget\n const availableTokens = this.contextWindow - EXTRACTION_PROMPT_OVERHEAD_TOKENS - this.extractionMaxTokens;\n const availableChars = availableTokens * CHARS_PER_TOKEN;\n let truncated = conversationMarkdown;\n if (conversationMarkdown.length > availableChars) {\n truncated = conversationMarkdown.slice(-availableChars);\n // Find first turn boundary to avoid cutting mid-turn\n const turnBoundary = truncated.indexOf(TURN_HEADING_PREFIX);\n if (turnBoundary > 0) {\n truncated = truncated.slice(turnBoundary);\n }\n }\n\n const prompt = buildExtractionPrompt(sessionId, truncated, this.extractionMaxTokens);\n\n try {\n const response = await this.backend.summarize(prompt, {\n maxTokens: this.extractionMaxTokens,\n reasoning: LLM_REASONING_MODE,\n });\n const parsed = extractJson(response.text) as {\n summary: string;\n observations: Observation[];\n };\n return {\n summary: parsed.summary,\n observations: parsed.observations ?? [],\n degraded: false,\n };\n } catch (error) {\n return {\n summary: `LLM processing failed for session ${sessionId}. Error: ${(error as Error).message}`,\n observations: [],\n degraded: true,\n };\n }\n }\n\n async summarizeSession(\n conversationMarkdown: string,\n sessionId: string,\n user?: string,\n ): Promise<{ summary: string; title: string }> {\n const truncatedContent = this.truncateForContext(conversationMarkdown, this.summaryMaxTokens);\n const summaryPrompt = buildSummaryPrompt(sessionId, user ?? 'unknown', truncatedContent, this.summaryMaxTokens);\n\n let summaryText: string;\n try {\n const response = await this.backend.summarize(summaryPrompt, { maxTokens: this.summaryMaxTokens, reasoning: LLM_REASONING_MODE });\n summaryText = stripReasoningTokens(response.text);\n } catch (error) {\n summaryText = `Session ${sessionId} — ${SUMMARIZATION_FAILED_MARKER}: ${(error as Error).message}`;\n }\n\n const titlePrompt = buildTitlePrompt(summaryText, sessionId);\n let title: string;\n try {\n const response = await this.backend.summarize(titlePrompt, { maxTokens: this.titleMaxTokens, reasoning: LLM_REASONING_MODE });\n title = stripReasoningTokens(response.text).trim();\n } catch {\n title = `Session ${sessionId}`;\n }\n\n return { summary: summaryText, title };\n }\n\n async classifyArtifacts(\n candidates: Array<{ path: string; content: string }>,\n sessionId: string,\n ): Promise<ClassifiedArtifact[]> {\n if (candidates.length === 0) return [];\n\n const prompt = this.buildPromptForClassification(candidates, sessionId);\n const response = await this.backend.summarize(prompt, { maxTokens: this.classificationMaxTokens, reasoning: LLM_REASONING_MODE });\n const raw = extractJson(response.text);\n const parsed = ClassificationResponseSchema.parse(raw);\n return parsed.artifacts;\n }\n\n private buildPromptForClassification(\n candidates: Array<{ path: string; content: string }>,\n sessionId: string,\n ): string {\n return buildClassificationPrompt(sessionId, candidates, this.classificationMaxTokens);\n }\n\n}\n","import { formatSporeBody } from '../obsidian/formatter.js';\nimport { sessionNoteId } from './session-id.js';\nimport { indexNote } from '../index/rebuild.js';\nimport type { Observation } from '../daemon/processor.js';\nimport type { VaultWriter } from './writer.js';\nimport type { MycoIndex } from '../index/sqlite.js';\n\nexport interface WrittenNote {\n id: string;\n path: string;\n observation: Observation;\n}\n\nexport function writeObservationNotes(\n observations: Observation[],\n sessionId: string,\n writer: VaultWriter,\n index: MycoIndex,\n vaultDir: string,\n): WrittenNote[] {\n const results: WrittenNote[] = [];\n\n for (const obs of observations) {\n const obsId = `${obs.type}-${sessionId.slice(-6)}-${Date.now()}`;\n const body = formatSporeBody({\n title: obs.title,\n observationType: obs.type,\n content: obs.content,\n sessionId,\n root_cause: obs.root_cause,\n fix: obs.fix,\n rationale: obs.rationale,\n alternatives_rejected: obs.alternatives_rejected,\n gained: obs.gained,\n sacrificed: obs.sacrificed,\n tags: obs.tags,\n });\n const relativePath = writer.writeSpore({\n id: obsId,\n observation_type: obs.type,\n session: sessionNoteId(sessionId),\n tags: obs.tags,\n content: body,\n });\n indexNote(index, vaultDir, relativePath);\n results.push({ id: obsId, path: relativePath, observation: obs });\n }\n\n return results;\n}\n","import fs from 'node:fs';\nimport path from 'node:path';\nimport type { MycoIndex } from '../index/sqlite.js';\nimport type { VectorIndex } from '../index/vectors.js';\nimport type { MycoConfig } from '../config/schema.js';\nimport type { LlmProvider, EmbeddingProvider } from '../intelligence/llm.js';\nimport type { DigestCycleResult, DigestLogFn } from '../daemon/digest.js';\nimport { rebuildIndex, indexNote } from '../index/rebuild.js';\nimport { initFts } from '../index/fts.js';\nimport { generateEmbedding } from '../intelligence/embeddings.js';\nimport { batchExecute, LLM_BATCH_CONCURRENCY, EMBEDDING_BATCH_CONCURRENCY } from '../intelligence/batch.js';\nimport { DigestEngine } from '../daemon/digest.js';\nimport type { DigestCycleOptions } from '../daemon/digest.js';\nimport { BufferProcessor, SUMMARIZATION_FAILED_MARKER } from '../daemon/processor.js';\nimport { VaultWriter } from '../vault/writer.js';\nimport { writeObservationNotes } from '../vault/observations.js';\nimport { sessionNoteId, bareSessionId } from '../vault/session-id.js';\nimport { callout, extractSection, CONVERSATION_HEADING } from '../obsidian/formatter.js';\nimport matter from 'gray-matter';\nimport {\n EMBEDDING_INPUT_LIMIT,\n CURATION_CLUSTER_SIMILARITY,\n SUPERSESSION_MAX_TOKENS,\n LLM_REASONING_MODE,\n} from '../constants.js';\nimport { stripReasoningTokens } from '../intelligence/response.js';\nimport { loadPrompt, formatNoteForPrompt, formatNotesForPrompt } from '../prompts/index.js';\nimport { supersedeSpore, supersededIdsSchema, isActiveSpore } from '../vault/curation.js';\nimport type { PipelineManager } from '../daemon/pipeline.js';\n\nexport interface OperationContext {\n vaultDir: string;\n config: MycoConfig;\n index: MycoIndex;\n vectorIndex?: VectorIndex;\n pipeline?: PipelineManager;\n log?: (level: string, message: string, data?: Record<string, unknown>) => void;\n}\n\n// --- Rebuild ---\n\nexport interface RebuildResult {\n ftsCount: number;\n embeddedCount: number;\n failedCount: number;\n skippedCount: number;\n}\n\n/**\n * Rebuild FTS index and re-embed all active notes.\n * Core logic shared between CLI (`myco rebuild`) and daemon API (`POST /api/rebuild`).\n */\nexport async function runRebuild(\n ctx: OperationContext,\n embeddingProvider: EmbeddingProvider,\n onProgress?: (done: number, total: number) => void,\n): Promise<RebuildResult> {\n const { index, vaultDir } = ctx;\n\n // Phase 1: FTS rebuild\n initFts(index);\n const ftsCount = rebuildIndex(index, vaultDir);\n\n // Phase 2: Vector embeddings\n if (!ctx.vectorIndex) {\n return { ftsCount, embeddedCount: 0, failedCount: 0, skippedCount: 0 };\n }\n\n const allNotes = index.query({});\n // Skip superseded/archived spores\n const activeNotes = allNotes.filter((n) => {\n const status = (n.frontmatter as Record<string, unknown>)?.status as string | undefined;\n return status !== 'superseded' && status !== 'archived';\n });\n const skippedCount = allNotes.length - activeNotes.length;\n\n const vec = ctx.vectorIndex;\n const result = await batchExecute(\n activeNotes,\n async (note) => {\n const text = `${note.title}\\n${note.content}`.slice(0, EMBEDDING_INPUT_LIMIT);\n const emb = await generateEmbedding(embeddingProvider, text);\n vec.upsert(note.id, emb.embedding, {\n type: note.type,\n session_id: (note.frontmatter as Record<string, unknown>)?.session as string ?? '',\n });\n },\n {\n concurrency: EMBEDDING_BATCH_CONCURRENCY,\n onProgress,\n },\n );\n\n // Register embedding results in the pipeline when available\n if (ctx.pipeline) {\n for (const note of activeNotes) {\n ctx.pipeline.register(note.id, note.type, note.path);\n ctx.pipeline.advance(note.id, note.type, 'capture', 'succeeded');\n ctx.pipeline.advance(note.id, note.type, 'embedding', 'succeeded');\n }\n }\n\n return {\n ftsCount,\n embeddedCount: result.succeeded,\n failedCount: result.failed,\n skippedCount,\n };\n}\n\n// --- Digest ---\n\nexport interface DigestOptions {\n tier?: number;\n full?: boolean;\n}\n\n/**\n * Run a single digest cycle.\n * Core logic shared between CLI (`myco digest`) and daemon API (`POST /api/digest`).\n *\n * When a PipelineManager is available in ctx.pipeline and `--full` is set,\n * all pipeline items are reset to digest:pending so the metabolism timer\n * picks them up. Regular (non-full) digest runs the engine directly.\n */\nexport async function runDigest(\n ctx: OperationContext,\n llmProvider: LlmProvider,\n options?: DigestOptions,\n): Promise<DigestCycleResult | null> {\n const { config, vaultDir, index } = ctx;\n const log: DigestLogFn = ctx.log\n ? (level, message, data) => ctx.log!(level, message, data)\n : () => {};\n\n // Pipeline mode for --full: reset digest stages to pending and let\n // the metabolism timer handle the actual digest cycle.\n if (ctx.pipeline && options?.full) {\n const items = ctx.pipeline.listItems({ stage: 'digest', status: 'succeeded' });\n let reset = 0;\n for (const item of items.items) {\n ctx.pipeline.advance(item.id, item.item_type, 'digest', 'pending');\n reset++;\n }\n log('info', `Reset ${reset} item(s) to digest:pending for full reprocessing`);\n // Return null to indicate no immediate cycle was run; the metabolism\n // timer will pick up the pending items on its next tick.\n return null;\n }\n\n const engine = new DigestEngine({\n vaultDir,\n index,\n llmProvider,\n config,\n log,\n });\n\n const opts: DigestCycleOptions = {};\n const isReprocess = options?.full || options?.tier !== undefined;\n\n if (isReprocess) {\n opts.fullReprocess = true;\n opts.cleanSlate = true;\n }\n if (options?.tier !== undefined) {\n const eligible = engine.getEligibleTiers();\n if (!eligible.includes(options.tier)) {\n throw new Error(`Tier ${options.tier} is not eligible. Eligible tiers: [${eligible.join(', ')}]`);\n }\n opts.tiers = [options.tier];\n }\n\n return engine.runCycle(opts);\n}\n\n// --- Curation ---\n\nexport interface CurationDeps {\n vaultDir: string;\n config: MycoConfig;\n index: MycoIndex;\n vectorIndex: VectorIndex;\n llmProvider: LlmProvider;\n embeddingProvider: EmbeddingProvider;\n log?: (level: string, message: string, data?: Record<string, unknown>) => void;\n pipeline?: PipelineManager;\n}\n\n// --- Curation internals ---\n\n/** Max concurrent embedding requests to avoid overwhelming the provider. */\nconst CURATION_EMBEDDING_BATCH_SIZE = 10;\n\ninterface SporeWithEmbedding {\n id: string;\n path: string;\n title: string;\n content: string;\n created: string;\n frontmatter: Record<string, unknown>;\n embedding: number[];\n}\n\ninterface Cluster {\n spores: SporeWithEmbedding[];\n centroid: number[];\n}\n\nfunction cosineSimilarity(a: number[], b: number[]): number {\n let dot = 0, normA = 0, normB = 0;\n for (let i = 0; i < a.length; i++) {\n dot += a[i] * b[i];\n normA += a[i] * a[i];\n normB += b[i] * b[i];\n }\n return dot / (Math.sqrt(normA) * Math.sqrt(normB));\n}\n\nfunction updateCentroid(spores: SporeWithEmbedding[]): number[] {\n if (spores.length === 0) return [];\n const dim = spores[0].embedding.length;\n const centroid = new Array<number>(dim).fill(0);\n for (const s of spores) {\n for (let i = 0; i < dim; i++) {\n centroid[i] += s.embedding[i];\n }\n }\n for (let i = 0; i < dim; i++) {\n centroid[i] /= spores.length;\n }\n return centroid;\n}\n\nfunction clusterSpores(spores: SporeWithEmbedding[]): Cluster[] {\n const clusters: Cluster[] = [];\n for (const spore of spores) {\n let bestCluster: Cluster | null = null;\n let bestSimilarity = -1;\n for (const cluster of clusters) {\n const sim = cosineSimilarity(spore.embedding, cluster.centroid);\n if (sim > bestSimilarity) {\n bestSimilarity = sim;\n bestCluster = cluster;\n }\n }\n if (bestCluster !== null && bestSimilarity >= CURATION_CLUSTER_SIMILARITY) {\n bestCluster.spores.push(spore);\n bestCluster.centroid = updateCentroid(bestCluster.spores);\n } else {\n clusters.push({ spores: [spore], centroid: [...spore.embedding] });\n }\n }\n return clusters;\n}\n\nexport interface CurationResult {\n scanned: number;\n clustersEvaluated: number;\n superseded: number;\n /** True when spores were enqueued via pipeline instead of curated inline. */\n enqueued?: boolean;\n}\n\n/**\n * Run vault curation: embed spores, cluster, ask LLM which are outdated, supersede.\n * Core logic shared between CLI (`myco curate`) and daemon API (`POST /api/curate`).\n *\n * When a PipelineManager is available in deps.pipeline, active spores are\n * registered at consolidation:pending for the pipeline tick to handle.\n * When no pipeline is available, falls back to inline curation.\n */\nexport async function runCuration(\n deps: CurationDeps,\n dryRun: boolean,\n): Promise<CurationResult> {\n const { index, vectorIndex, llmProvider, embeddingProvider, vaultDir } = deps;\n const log = deps.log ?? (() => {});\n\n // 1. Query all spores and filter for active ones\n const allSpores = index.query({ type: 'spore' });\n const activeSpores = allSpores.filter((n) => isActiveSpore(n.frontmatter));\n\n // --- Pipeline mode: enqueue spores for consolidation via pipeline tick ---\n if (deps.pipeline && !dryRun) {\n let enqueued = 0;\n for (const spore of activeSpores) {\n deps.pipeline.register(spore.id, 'spore', spore.path);\n deps.pipeline.advance(spore.id, 'spore', 'capture', 'succeeded');\n deps.pipeline.advance(spore.id, 'spore', 'consolidation', 'pending');\n enqueued++;\n }\n log('info', `Enqueued ${enqueued} spore(s) for pipeline consolidation`);\n return { scanned: activeSpores.length, clustersEvaluated: 0, superseded: 0, enqueued: true };\n }\n\n if (activeSpores.length === 0) {\n return { scanned: 0, clustersEvaluated: 0, superseded: 0 };\n }\n\n // 2. Embed all active spores (batched for concurrency)\n const sporesWithEmbeddings: SporeWithEmbedding[] = [];\n let embedFailures = 0;\n\n for (let i = 0; i < activeSpores.length; i += CURATION_EMBEDDING_BATCH_SIZE) {\n const batch = activeSpores.slice(i, i + CURATION_EMBEDDING_BATCH_SIZE);\n const results = await Promise.allSettled(\n batch.map(async (spore) => {\n const text = spore.content.slice(0, EMBEDDING_INPUT_LIMIT);\n const result = await generateEmbedding(embeddingProvider, text);\n return { spore, embedding: result.embedding };\n }),\n );\n\n for (const result of results) {\n if (result.status === 'fulfilled') {\n const { spore, embedding } = result.value;\n sporesWithEmbeddings.push({\n id: spore.id,\n path: spore.path,\n title: spore.title,\n content: spore.content,\n created: spore.created,\n frontmatter: spore.frontmatter,\n embedding,\n });\n } else {\n embedFailures++;\n }\n }\n }\n\n if (embedFailures > 0) {\n log('warn', `${embedFailures} spore(s) could not be embedded and were skipped`);\n }\n\n // 3. Group by observation_type\n const byType = new Map<string, SporeWithEmbedding[]>();\n for (const spore of sporesWithEmbeddings) {\n const obsType = (spore.frontmatter['observation_type'] as string | undefined) ?? 'unknown';\n if (!byType.has(obsType)) byType.set(obsType, []);\n byType.get(obsType)!.push(spore);\n }\n\n // 4. Cluster within each type group\n const template = loadPrompt('supersession');\n let totalClusters = 0;\n let totalSuperseded = 0;\n\n for (const [obsType, typeSpores] of byType) {\n const clusters = clusterSpores(typeSpores);\n const multiSpore = clusters.filter((c) => c.spores.length >= 2);\n if (multiSpore.length === 0) continue;\n\n log('info', `Type: ${obsType} — ${typeSpores.length} spores, ${multiSpore.length} cluster(s) to evaluate`);\n totalClusters += multiSpore.length;\n\n for (const cluster of multiSpore) {\n const sorted = [...cluster.spores].sort((a, b) => a.created.localeCompare(b.created));\n const newest = sorted[sorted.length - 1];\n const candidates = sorted.slice(0, sorted.length - 1);\n\n // Build supersession prompt\n const newSporeText = formatNoteForPrompt(newest);\n const candidatesText = formatNotesForPrompt(candidates);\n\n const prompt = template\n .replace('{{new_spore}}', newSporeText)\n .replace('{{candidates}}', candidatesText);\n\n // Ask LLM which candidates are outdated\n let responseText: string;\n try {\n const response = await llmProvider.summarize(prompt, {\n maxTokens: SUPERSESSION_MAX_TOKENS,\n reasoning: LLM_REASONING_MODE,\n });\n responseText = stripReasoningTokens(response.text);\n } catch (err) {\n log('warn', `LLM call failed for cluster in ${obsType}: ${String(err)}`);\n continue;\n }\n\n // Parse response\n let rawIds: unknown;\n try {\n rawIds = JSON.parse(responseText);\n } catch {\n log('warn', `Could not parse LLM response for cluster in ${obsType}`);\n continue;\n }\n\n const parsed = supersededIdsSchema.safeParse(rawIds);\n if (!parsed.success) {\n log('warn', `LLM response schema invalid for cluster in ${obsType}`);\n continue;\n }\n\n // Validate IDs against actual candidates\n const candidateMap = new Map(candidates.map((c) => [c.id, c]));\n const validIds = parsed.data.filter((id) => candidateMap.has(id));\n if (validIds.length === 0) continue;\n\n for (const id of validIds) {\n const candidate = candidateMap.get(id)!;\n\n if (dryRun) {\n log('info', `[dry-run] Would supersede: ${candidate.title} (${id}) by ${newest.title} (${newest.id})`);\n totalSuperseded++;\n continue;\n }\n\n const wrote = supersedeSpore(id, newest.id, candidate.path, { index, vectorIndex, vaultDir });\n if (!wrote) {\n log('warn', `File not found for ${id}, skipping write`);\n continue;\n }\n\n log('info', `Superseded: ${candidate.title} (${id}) by ${newest.title} (${newest.id})`);\n totalSuperseded++;\n }\n }\n }\n\n return {\n scanned: activeSpores.length,\n clustersEvaluated: totalClusters,\n superseded: totalSuperseded,\n };\n}\n\n// --- Reprocess ---\n\nexport interface ReprocessOptions {\n /** Filter to sessions matching this substring. */\n session?: string;\n /** Filter to sessions from a specific date (YYYY-MM-DD). */\n date?: string;\n /** Only reprocess sessions with failed summaries. */\n failed?: boolean;\n /** Skip LLM calls — re-index and re-embed only. */\n indexOnly?: boolean;\n}\n\nexport interface ReprocessResult {\n sessionsFound: number;\n sessionsProcessed: number;\n observationsExtracted: number;\n summariesRegenerated: number;\n embeddingsQueued: number;\n /** True when sessions were enqueued via pipeline instead of processed inline. */\n enqueued?: boolean;\n}\n\n\n/**\n * Replace the title (first `# ` line) and summary callout in a session note body.\n * Handles both cases: existing callout (replace) and no callout (insert after title).\n */\nexport function updateTitleAndSummary(body: string, newTitle: string, newNarrative: string): string {\n let updated = body.replace(/^# .*/m, `# ${newTitle}`);\n const summaryCallout = callout('abstract', 'Summary', newNarrative);\n const hasExistingCallout = /> \\[!abstract\\] Summary/.test(updated);\n if (hasExistingCallout) {\n updated = updated.replace(/> \\[!abstract\\] Summary\\n(?:> .*\\n?)*/m, summaryCallout + '\\n');\n } else {\n // Insert callout after the title line\n updated = updated.replace(/^(# .*\\n)/m, `$1\\n${summaryCallout}\\n`);\n }\n return updated;\n}\n\n/**\n * Reprocess sessions: re-extract observations, regenerate summaries, re-index.\n * Core logic shared between CLI (`myco reprocess`) and daemon API (`POST /api/reprocess`).\n *\n * When a PipelineManager is available in ctx.pipeline, sessions are registered\n * at extraction:pending for the pipeline tick to handle. When no pipeline is\n * available (standalone CLI), falls back to inline processing.\n */\nexport async function runReprocess(\n ctx: OperationContext,\n llmProvider: LlmProvider | null,\n embeddingProvider: EmbeddingProvider,\n options?: ReprocessOptions,\n onProgress?: (phase: string, done: number, total: number) => void,\n): Promise<ReprocessResult> {\n const { vaultDir, config, index } = ctx;\n const log = ctx.log ?? (() => {});\n\n const sessionFilter = options?.session;\n const dateFilter = options?.date;\n const failedOnly = options?.failed ?? false;\n const skipLlm = options?.indexOnly ?? false;\n\n // Find sessions (shared between pipeline and legacy modes)\n const sessionsDir = path.join(vaultDir, 'sessions');\n if (!fs.existsSync(sessionsDir)) {\n return { sessionsFound: 0, sessionsProcessed: 0, observationsExtracted: 0, summariesRegenerated: 0, embeddingsQueued: 0 };\n }\n\n const sessionFiles: Array<{ relativePath: string; sessionId: string; dateDir: string }> = [];\n for (const dateDir of fs.readdirSync(sessionsDir)) {\n if (dateFilter && dateDir !== dateFilter) continue;\n const datePath = path.join(sessionsDir, dateDir);\n if (!fs.statSync(datePath).isDirectory()) continue;\n for (const file of fs.readdirSync(datePath)) {\n if (!file.startsWith('session-') || !file.endsWith('.md')) continue;\n const sessionId = file.replace('session-', '').replace('.md', '');\n if (sessionFilter && !sessionId.includes(sessionFilter)) continue;\n sessionFiles.push({ relativePath: path.join('sessions', dateDir, file), sessionId, dateDir });\n }\n }\n\n if (sessionFiles.length === 0) {\n return { sessionsFound: 0, sessionsProcessed: 0, observationsExtracted: 0, summariesRegenerated: 0, embeddingsQueued: 0 };\n }\n\n // --- Pipeline mode: enqueue sessions for extraction via pipeline tick ---\n if (ctx.pipeline && !skipLlm) {\n let enqueued = 0;\n\n // Pre-filter for failed-only if requested\n let eligibleFiles = sessionFiles;\n if (failedOnly) {\n eligibleFiles = sessionFiles.filter(({ relativePath }) => {\n const rawContent = fs.readFileSync(path.join(vaultDir, relativePath), 'utf-8');\n return rawContent.includes(SUMMARIZATION_FAILED_MARKER);\n });\n }\n\n for (const { relativePath, sessionId } of eligibleFiles) {\n // Register (idempotent) and reset extraction to pending\n ctx.pipeline.register(sessionId, 'session', relativePath);\n ctx.pipeline.advance(sessionId, 'session', 'capture', 'succeeded');\n ctx.pipeline.advance(sessionId, 'session', 'extraction', 'pending');\n enqueued++;\n }\n\n log('info', `Enqueued ${enqueued} session(s) for pipeline reprocessing`, {\n filters: { session: sessionFilter, date: dateFilter, failed: failedOnly },\n });\n\n return {\n sessionsFound: sessionFiles.length,\n sessionsProcessed: enqueued,\n observationsExtracted: 0,\n summariesRegenerated: 0,\n embeddingsQueued: 0,\n enqueued: true,\n };\n }\n\n // --- Legacy inline mode (no pipeline) ---\n const effectiveLlm = skipLlm ? null : llmProvider;\n const processor = effectiveLlm\n ? new BufferProcessor(effectiveLlm, config.intelligence.llm.context_window, config.capture)\n : null;\n const writer = new VaultWriter(vaultDir);\n\n // Prepare tasks\n interface SessionTask {\n relativePath: string;\n sessionId: string;\n bare: string;\n frontmatter: Record<string, unknown>;\n frontmatterBlock: string;\n body: string;\n conversationSection: string;\n hasFailed: boolean;\n }\n\n const tasks: SessionTask[] = [];\n for (const { relativePath, sessionId } of sessionFiles) {\n const rawContent = fs.readFileSync(path.join(vaultDir, relativePath), 'utf-8');\n\n // Quick pre-screen before expensive parsing when filtering to failed sessions only\n const hasFailed = rawContent.includes(SUMMARIZATION_FAILED_MARKER);\n if (failedOnly && !hasFailed) continue;\n\n const { data: frontmatter, content: body } = matter(rawContent);\n const bare = bareSessionId(sessionId);\n const conversationSection = extractSection(body, CONVERSATION_HEADING);\n const fmEnd = rawContent.indexOf('---', 4);\n const frontmatterBlock = rawContent.slice(0, fmEnd + 3);\n\n tasks.push({ relativePath, sessionId, bare, frontmatter, frontmatterBlock, body, conversationSection, hasFailed });\n }\n\n if (tasks.length === 0) {\n return { sessionsFound: sessionFiles.length, sessionsProcessed: 0, observationsExtracted: 0, summariesRegenerated: 0, embeddingsQueued: 0 };\n }\n\n log('info', `Reprocessing ${tasks.length} session(s)`, { filters: { session: sessionFilter, date: dateFilter, failed: failedOnly, indexOnly: skipLlm } });\n\n // Fire-and-forget embedding helper — pipelines embeddings as data is produced\n // instead of accumulating all jobs in memory for a separate phase.\n let embeddingsQueued = 0;\n const embedPending: Promise<void>[] = [];\n const fireEmbed = (id: string, text: string, metadata: Record<string, string>) => {\n if (!ctx.vectorIndex) return;\n embeddingsQueued++;\n const vec = ctx.vectorIndex;\n const p = generateEmbedding(embeddingProvider, text)\n .then((emb) => { vec.upsert(id, emb.embedding, metadata); })\n .catch((err) => { log('warn', `Embedding failed for ${id}`, { error: (err as Error).message }); });\n embedPending.push(p);\n };\n\n // Phase 1: Extraction + FTS re-indexing + inline embeddings\n let totalObservations = 0;\n\n const extractionResult = await batchExecute(\n tasks,\n async (task) => {\n let obs = 0;\n\n if (processor && task.conversationSection.trim()) {\n const result = await processor.process(task.conversationSection, task.bare);\n if (result.observations.length > 0) {\n writeObservationNotes(result.observations, task.bare, writer, index, vaultDir);\n obs = result.observations.length;\n for (const o of result.observations) {\n fireEmbed(\n `${o.type}-${task.bare.slice(-6)}-${Date.now()}`,\n `${o.title}\\n${o.content}`.slice(0, EMBEDDING_INPUT_LIMIT),\n { type: 'spore', session_id: task.bare },\n );\n }\n }\n }\n\n indexNote(index, vaultDir, task.relativePath);\n\n const embText = `${task.frontmatter.title ?? ''}\\n${task.frontmatter.summary ?? ''}`.slice(0, EMBEDDING_INPUT_LIMIT);\n if (embText.trim()) {\n fireEmbed(sessionNoteId(task.bare), embText, { type: 'session', session_id: task.bare });\n }\n\n return obs;\n },\n {\n concurrency: LLM_BATCH_CONCURRENCY,\n onProgress: (done, total) => onProgress?.('extraction', done, total),\n },\n );\n\n for (const r of extractionResult.results) {\n if (r.status === 'fulfilled') totalObservations += r.value;\n }\n\n // Phase 2: Resummarize\n let summarized = 0;\n if (processor) {\n const summarizableTasks = tasks.filter((t) => t.conversationSection);\n if (summarizableTasks.length > 0) {\n const summaryResult = await batchExecute(\n summarizableTasks,\n async (task) => {\n const user = typeof task.frontmatter.user === 'string' ? task.frontmatter.user : undefined;\n const result = await processor.summarizeSession(task.conversationSection, task.bare, user);\n\n if (result.summary.includes(SUMMARIZATION_FAILED_MARKER)) {\n log('warn', `Summarization failed for ${task.sessionId.slice(0, 12)}`);\n return false;\n }\n\n const updatedBody = updateTitleAndSummary(task.body, result.title, result.summary);\n fs.writeFileSync(path.join(vaultDir, task.relativePath), task.frontmatterBlock + updatedBody);\n indexNote(index, vaultDir, task.relativePath);\n return true;\n },\n {\n concurrency: LLM_BATCH_CONCURRENCY,\n onProgress: (done, total) => onProgress?.('summarization', done, total),\n },\n );\n\n for (const r of summaryResult.results) {\n if (r.status === 'fulfilled' && r.value) summarized++;\n }\n }\n }\n\n // Wait for all pipelined embeddings to settle\n await Promise.allSettled(embedPending);\n\n log('info', 'Reprocess completed', {\n sessions: tasks.length,\n observations: totalObservations,\n summaries: summarized,\n embeddings: embeddingsQueued,\n });\n\n return {\n sessionsFound: sessionFiles.length,\n sessionsProcessed: tasks.length,\n observationsExtracted: totalObservations,\n summariesRegenerated: summarized,\n embeddingsQueued,\n };\n}\n","/**\n * Batch execution utilities for LLM and embedding operations.\n *\n * Provides concurrency-limited parallel execution for bulk operations\n * like reprocessing, rebuilding, and any future batch pipeline.\n */\n\n/** Default concurrency for LLM calls (heavier, single-threaded backends). */\nexport const LLM_BATCH_CONCURRENCY = 3;\n/** Default concurrency for embedding calls (lighter, can run more in parallel). */\nexport const EMBEDDING_BATCH_CONCURRENCY = 4;\n\nexport interface BatchResult<T> {\n succeeded: number;\n failed: number;\n results: Array<{ status: 'fulfilled'; value: T } | { status: 'rejected'; reason: string }>;\n}\n\n/**\n * Execute async tasks with a concurrency limit.\n * Reports progress via an optional callback.\n */\nexport async function batchExecute<I, O>(\n items: I[],\n fn: (item: I) => Promise<O>,\n options: {\n concurrency: number;\n onProgress?: (completed: number, total: number) => void;\n },\n): Promise<BatchResult<O>> {\n const { concurrency, onProgress } = options;\n let succeeded = 0;\n let failed = 0;\n const results: BatchResult<O>['results'] = [];\n\n for (let i = 0; i < items.length; i += concurrency) {\n const batch = items.slice(i, i + concurrency);\n const settled = await Promise.allSettled(batch.map(fn));\n\n for (const result of settled) {\n if (result.status === 'fulfilled') {\n succeeded++;\n results.push({ status: 'fulfilled', value: result.value });\n } else {\n failed++;\n results.push({ status: 'rejected', reason: (result.reason as Error)?.message ?? String(result.reason) });\n }\n }\n\n onProgress?.(succeeded + failed, items.length);\n }\n\n return { succeeded, failed, results };\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAQA,kBAAiB;AAHjB,OAAOA,SAAQ;AACf,OAAOC,WAAU;AACjB,OAAO,YAAY;;;ACAnB,OAAO,QAAQ;AACf,OAAO,UAAU;AAGV,SAAS,eAA4C,UAA4B;AACtF,MAAI;AACJ,MAAI;AACF,cAAU,GAAG,aAAa,UAAU,OAAO,EAAE,KAAK;AAAA,EACpD,QAAQ;AACN,WAAO;AAAA,EACT;AACA,MAAI,CAAC,QAAS,QAAO;AACrB,QAAM,QAAQ,QAAQ,MAAM,IAAI;AAChC,QAAM,WAAW,MAAM,MAAM,SAAS,CAAC;AACvC,MAAI;AACF,WAAO,KAAK,MAAM,QAAQ;AAAA,EAC5B,QAAQ;AACN,WAAO;AAAA,EACT;AACF;AAGO,SAAS,kBAAkB,UAAiC;AACjE,SAAO,eAAsC,QAAQ,GAAG,aAAa;AACvE;AAGO,SAAS,kBAAkB,UAAkB,QAAuC;AACzF,KAAG,UAAU,KAAK,QAAQ,QAAQ,GAAG,EAAE,WAAW,KAAK,CAAC;AACxD,KAAG,eAAe,UAAU,KAAK,UAAU,MAAM,IAAI,MAAM,OAAO;AACpE;;;AD+BA,IAAM,mCAAmC;AAKzC,IAAM,wBAAwB;AAG9B,IAAM,eAAe;AAId,IAAM,eAAN,MAAmB;AAAA,EAChB;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA,0BAAqD;AAAA,EACrD,kBAAkB;AAAA;AAAA,EAG1B,IAAI,oBAA6B;AAC/B,WAAO,KAAK;AAAA,EACd;AAAA;AAAA,EAGQ,eAAiE,CAAC;AAAA;AAAA,EAGlE,gBAA2F,CAAC;AAAA,EAEpG,YAAY,cAAkC;AAC5C,SAAK,WAAW,aAAa;AAC7B,SAAK,QAAQ,aAAa;AAC1B,SAAK,MAAM,aAAa;AACxB,SAAK,SAAS,aAAa;AAC3B,SAAK,MAAM,aAAa,QAAQ,MAAM;AAAA,IAAC;AAAA,EACzC;AAAA;AAAA,EAGA,gBAAgB,MAAc,IAA+B;AAC3D,SAAK,aAAa,KAAK,EAAE,MAAM,GAAG,CAAC;AAAA,EACrC;AAAA;AAAA,EAGA,iBAAiB,MAAc,IAAwD;AACrF,SAAK,cAAc,KAAK,EAAE,MAAM,GAAG,CAAC;AAAA,EACtC;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,kBAAkB,oBAAkD;AAClE,UAAM,WAAW,KAAK,OAAO,OAAO,UAAU;AAE9C,UAAM,QAAQ,qBACV,KAAK,MAAM,MAAM,EAAE,cAAc,oBAAoB,OAAO,SAAS,CAAC,IACtE,KAAK,MAAM,MAAM,EAAE,OAAO,SAAS,CAAC;AAIxC,UAAM,WAAW,MACd,OAAO,CAAC,MAAM,EAAE,SAAS,YAAY,EACrC,OAAO,CAAC,MAAM;AACb,UAAI,EAAE,SAAS,QAAS,QAAO;AAC/B,YAAM,SAAS,EAAE,YAAY;AAC7B,aAAO,CAAC,UAAU,WAAW;AAAA,IAC/B,CAAC;AAGH,aAAS,KAAK,CAAC,GAAG,MAAM;AACtB,YAAM,UAAU,8BAA8B,EAAE,IAAI,KAAK;AACzD,YAAM,UAAU,8BAA8B,EAAE,IAAI,KAAK;AACzD,UAAI,YAAY,QAAS,QAAO,UAAU;AAE1C,aAAO,EAAE,QAAQ,cAAc,EAAE,OAAO;AAAA,IAC1C,CAAC;AAED,WAAO,SAAS,MAAM,GAAG,QAAQ;AAAA,EACnC;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,mBAA6B;AAC3B,UAAM,gBAAgB,KAAK,OAAO,OAAO,aAAa;AACtD,WAAO,aAAa,OAAO,CAAC,SAAS;AACnC,YAAM,aAAa,wBAAwB,IAAI;AAC/C,aAAO,eAAe,UAAa,cAAc;AAAA,IACnD,CAAC;AAAA,EACH;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,gBAAgB,OAAsB,aAA6B;AACjE,UAAM,aAAa,cAAc;AACjC,UAAM,QAAkB,CAAC;AACzB,QAAI,YAAY;AAEhB,eAAW,QAAQ,OAAO;AACxB,YAAM,QAAQ,QAAQ,KAAK,IAAI,KAAK,KAAK,EAAE,YAAO,KAAK,KAAK;AAAA,EAAM,KAAK,OAAO;AAC9E,UAAI,YAAY,MAAM,SAAS,cAAc,MAAM,SAAS,EAAG;AAC/D,YAAM,KAAK,KAAK;AAChB,mBAAa,MAAM;AAAA,IACrB;AAEA,WAAO,MAAM,KAAK,MAAM;AAAA,EAC1B;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,oBAAoB,MAA6B;AAC/C,UAAM,cAAcC,MAAK,KAAK,KAAK,UAAU,UAAU,WAAW,IAAI,KAAK;AAC3E,QAAI;AACJ,QAAI;AACF,gBAAUC,IAAG,aAAa,aAAa,OAAO;AAAA,IAChD,QAAQ;AACN,aAAO;AAAA,IACT;AAEA,WAAO,iBAAiB,OAAO,EAAE;AAAA,EACnC;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,aACE,MACA,MACA,SACA,OACA,gBACA,gBACA,YACM;AACN,UAAM,YAAYD,MAAK,KAAK,KAAK,UAAU,QAAQ;AACnD,IAAAC,IAAG,UAAU,WAAW,EAAE,WAAW,KAAK,CAAC;AAE3C,UAAM,cAAuC;AAAA,MAC3C,MAAM;AAAA,MACN;AAAA,MACA,YAAW,oBAAI,KAAK,GAAE,YAAY;AAAA,MAClC,UAAU;AAAA,MACV,iBAAiB;AAAA,MACjB;AAAA,IACF;AACA,QAAI,kBAAkB,eAAe,SAAS,EAAG,aAAY,kBAAkB;AAC/E,QAAI,eAAe,OAAW,aAAY,cAAc;AAExD,UAAM,SAAS,YAAAC,QAAK,UAAU,aAAa;AAAA,MACzC,mBAAmB;AAAA,MACnB,gBAAgB;AAAA,IAClB,CAAC,EAAE,KAAK;AACR,UAAM,OAAO;AAAA,EAAQ,MAAM;AAAA;AAAA;AAAA,EAAY,IAAI;AAAA;AAE3C,UAAM,WAAWF,MAAK,KAAK,WAAW,WAAW,IAAI,KAAK;AAC1D,UAAM,UAAU,GAAG,QAAQ;AAC3B,IAAAC,IAAG,cAAc,SAAS,MAAM,OAAO;AACvC,IAAAA,IAAG,WAAW,SAAS,QAAQ;AAAA,EACjC;AAAA;AAAA;AAAA;AAAA,EAKA,YAAY,QAAiC;AAC3C,UAAM,YAAYD,MAAK,KAAK,KAAK,UAAU,UAAU,aAAa;AAClE,sBAAkB,WAAW,MAA4C;AACzE,SAAK,0BAA0B,OAAO;AAAA,EACxC;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,wBAAuC;AACrC,QAAI,KAAK,4BAA4B,OAAW,QAAO,KAAK;AAE5D,UAAM,YAAYA,MAAK,KAAK,KAAK,UAAU,UAAU,aAAa;AAClE,SAAK,0BAA0B,kBAAkB,SAAS;AAC1D,WAAO,KAAK;AAAA,EACd;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,MAAM,SAAS,MAA8D;AAC3E,QAAI,KAAK,iBAAiB;AACxB,WAAK,IAAI,SAAS,2CAAsC;AACxD,aAAO;AAAA,IACT;AACA,SAAK,kBAAkB;AAEvB,QAAI;AAKF,UAAI,KAAK,IAAI,cAAc;AACzB,cAAM,EAAE,gBAAgB,eAAe,cAAc,WAAW,IAAI,KAAK,OAAO,OAAO;AACvF,aAAK,IAAI,SAAS,0BAA0B,EAAE,eAAe,WAAW,CAAC;AACzE,cAAM,KAAK,IAAI,aAAa,eAAe,UAAU;AAAA,MACvD;AAGA,iBAAW,QAAQ,KAAK,cAAc;AACpC,YAAI;AACF,gBAAM,KAAK,GAAG;AAAA,QAChB,SAAS,KAAK;AACZ,eAAK,IAAI,QAAQ,kBAAkB,KAAK,IAAI,YAAY,EAAE,OAAQ,IAAc,QAAQ,CAAC;AAAA,QAC3F;AAAA,MACF;AAEA,aAAO,MAAM,KAAK,iBAAiB,IAAI;AAAA,IACzC,UAAE;AACA,WAAK,kBAAkB;AAAA,IACzB;AAAA,EACF;AAAA,EAEA,MAAc,iBAAiB,MAA8D;AAE3F,UAAM,YAAY,KAAK,IAAI;AAC3B,UAAM,gBAAgB,MAAM,iBAAiB;AAC7C,UAAM,gBAAgB,gBAAgB,OAAO,KAAK,sBAAsB;AACxE,UAAM,YAAY,KAAK,kBAAkB,aAAa;AAEtD,SAAK,IAAI,SAAS,yBAAyB,EAAE,eAAe,iBAAiB,kBAAkB,gBAAgB,UAAU,OAAO,CAAC;AACjI,QAAI,UAAU,WAAW,GAAG;AAC1B,WAAK,IAAI,SAAS,0CAAqC;AACvD,aAAO;AAAA,IACT;AAEA,SAAK,IAAI,QAAQ,yBAAyB,EAAE,gBAAgB,UAAU,QAAQ,cAAc,CAAC;AAC7F,UAAM,UAAU,OAAO,WAAW;AAClC,UAAM,cAAc,KAAK,iBAAiB;AAC1C,UAAM,gBAAgB,MAAM,QACxB,YAAY,OAAO,CAAC,MAAM,KAAK,MAAO,SAAS,CAAC,CAAC,IACjD;AACJ,SAAK,IAAI,SAAS,oBAAoB,cAAc,KAAK,IAAI,CAAC,GAAG;AACjE,UAAM,iBAA2B,CAAC;AAClC,QAAI,kBAAkB;AACtB,QAAI,QAAQ;AAGZ,UAAM,YAAkE;AAAA,MACtE,SAAS;AAAA,MACT,OAAO;AAAA,MACP,MAAM;AAAA,MACN,UAAU;AAAA,MACV,eAAe;AAAA,IACjB;AACA,UAAM,iBAAiD;AAAA,MACrD,UAAU,CAAC;AAAA,MACX,QAAQ,CAAC;AAAA,MACT,OAAO,CAAC;AAAA,MACR,WAAW,CAAC;AAAA,MACZ,MAAM,CAAC;AAAA,IACT;AACA,eAAW,QAAQ,WAAW;AAC5B,YAAM,MAAM,UAAU,KAAK,IAAI;AAC/B,UAAI,KAAK;AACP,uBAAe,GAAG,EAAE,KAAK,KAAK,EAAE;AAAA,MAClC;AAAA,IACF;AAKA,UAAM,kBAAiB,oBAAI,KAAK,GAAE,YAAY;AAE9C,UAAM,eAAe,WAAW,eAAe;AAC/C,UAAM,kBAAkB,UAAU,IAAI,CAAC,SAAS,KAAK,EAAE;AAEvD,eAAW,QAAQ,eAAe;AAChC,YAAM,aAAa,WAAW,UAAU,IAAI,EAAE;AAC9C,YAAM,kBAAkB,MAAM,aAAa,OAAO,KAAK,oBAAoB,IAAI;AAI/E,YAAM,gBAAgB,KAAK,OAAO,OAAO,aAAa;AACtD,YAAM,qBAAqB,eAAe,YAAY;AACtD,YAAM,mBAAmB,eAAe,UAAU;AAClD,YAAM,wBAAwB,kBAC1B,eAAe,eAAe,IAAI,mCAClC;AACJ,YAAM,kBAAkB,KAAK,MAAM,gBAAgB,qBAAqB;AACxE,YAAM,kBAAkB,kBAAkB,OAAO,qBAAqB,mBAAmB;AAEzF,UAAI,mBAAmB,EAAG;AAE1B,YAAM,qBAAqB,KAAK,gBAAgB,WAAW,eAAe;AAG1E,YAAM,cAAc,CAAC,UAAU;AAE/B,UAAI,iBAAiB;AACnB,oBAAY,KAAK,IAAI,yBAAyB,IAAI,eAAe;AAAA,MACnE;AAEA,kBAAY,KAAK,IAAI,oBAAoB,IAAI,kBAAkB;AAC/D,kBAAY;AAAA,QACV;AAAA,QACA;AAAA,QACA;AAAA,MACF;AAEA,YAAM,aAAa,YAAY,KAAK,IAAI;AACxC,YAAM,eAAe,eAAe,eAAe,UAAU;AAC7D,WAAK,IAAI,SAAS,QAAQ,IAAI,yBAAyB,EAAE,cAAc,WAAW,MAAM,gBAAgB,CAAC;AAEzG,UAAI;AACF,cAAM,YAAY,KAAK,IAAI;AAC3B,cAAM,eAAe,KAAK,OAAO,OAAO;AACxC,cAAMG,QAA0B;AAAA,UAC9B,WAAW;AAAA,UACX,WAAW;AAAA,UACX,eAAe;AAAA,UACf,WAAW;AAAA,UACX;AAAA,UACA,WAAW,aAAa,cAAc;AAAA,QACxC;AACA,cAAM,WAAW,MAAM,KAAK,IAAI,UAAU,YAAYA,KAAI;AAC1D,cAAM,eAAe,KAAK,IAAI,IAAI;AAGlC,cAAM,cAAc,qBAAqB,SAAS,IAAI;AACtD,gBAAQ,SAAS;AACjB,cAAM,iBAAiB,eAAe,WAAW;AACjD,2BAAmB,eAAe;AAElC,aAAK,IAAI,QAAQ,QAAQ,IAAI,eAAe,EAAE,YAAY,cAAc,gBAAgB,OAAO,SAAS,MAAM,CAAC;AAC/G,aAAK,aAAa,MAAM,aAAa,SAAS,SAAS,OAAO,UAAU,QAAQ,iBAAiB,eAAe,cAAc;AAC9H,uBAAe,KAAK,IAAI;AAAA,MAC1B,SAAS,KAAK;AACZ,aAAK,IAAI,QAAQ,QAAQ,IAAI,YAAY,EAAE,OAAQ,IAAc,QAAQ,CAAC;AAAA,MAC5E;AAAA,IACF;AAGA,QAAI,eAAe,SAAS,GAAG;AAC7B,YAAM,YAAYH,MAAK,KAAK,KAAK,UAAU,QAAQ;AACnD,iBAAW,QAAQ,gBAAgB;AACjC,cAAM,cAAcA,MAAK,KAAK,WAAW,WAAW,IAAI,KAAK;AAC7D,YAAI;AACF,gBAAM,UAAUC,IAAG,aAAa,aAAa,OAAO;AACpD,gBAAM,UAAU,QAAQ,MAAM,uBAAuB;AACrD,cAAI,SAAS;AACX,kBAAM,SAAS,YAAAC,QAAK,MAAM,QAAQ,CAAC,CAAC;AACpC,mBAAO,kBAAkB;AACzB,kBAAM,SAAS,YAAAA,QAAK,UAAU,QAAQ,EAAE,mBAAmB,gBAAgB,gBAAgB,QAAQ,CAAC,EAAE,KAAK;AAC3G,kBAAM,cAAc,QAAQ,MAAM,QAAQ,CAAC,EAAE,MAAM;AACnD,kBAAM,UAAU,GAAG,WAAW;AAC9B,YAAAD,IAAG,cAAc,SAAS;AAAA,EAAQ,MAAM;AAAA,KAAQ,WAAW,IAAI,OAAO;AACtE,YAAAA,IAAG,WAAW,SAAS,WAAW;AAAA,UACpC;AAAA,QACF,QAAQ;AAAA,QAER;AAAA,MACF;AAAA,IACF;AAEA,UAAM,SAA4B;AAAA,MAChC;AAAA,MACA,WAAW;AAAA,MACX,WAAW;AAAA,MACX;AAAA,MACA;AAAA,MACA,YAAY,KAAK,IAAI,IAAI;AAAA,MACzB,YAAY;AAAA,IACd;AAEA,SAAK,YAAY,MAAM;AAGvB,eAAW,QAAQ,KAAK,eAAe;AACrC,UAAI;AACF,cAAM,KAAK,GAAG,MAAM;AAAA,MACtB,SAAS,KAAK;AACZ,aAAK,IAAI,QAAQ,mBAAmB,KAAK,IAAI,YAAY,EAAE,OAAQ,IAAc,QAAQ,CAAC;AAAA,MAC5F;AAAA,IACF;AAEA,WAAO;AAAA,EACT;AACF;AAOA,IAAM,gBAAgB;AAEf,IAAM,aAAN,MAAiB;AAAA,EACtB,QAAyB;AAAA,EACzB;AAAA,EAEQ,eAAe;AAAA,EACf;AAAA,EACA,QAA8C;AAAA,EAC9C;AAAA,EACA;AAAA,EACA;AAAA,EAER,YAAY,QAA4C;AACtD,SAAK,mBAAmB,OAAO,kBAAkB;AACjD,SAAK,sBAAsB,OAAO,mBAAmB,IAAI,CAAC,MAAM,IAAI,aAAa;AACjF,SAAK,sBAAsB,OAAO,qBAAqB;AACvD,SAAK,oBAAoB,KAAK;AAC9B,SAAK,oBAAoB,KAAK,IAAI;AAAA,EACpC;AAAA;AAAA,EAGA,mBAAyB;AACvB,SAAK,QAAQ;AACb,SAAK,eAAe;AACpB,SAAK,oBAAoB,KAAK;AAC9B,SAAK,oBAAoB,KAAK,IAAI;AAAA,EACpC;AAAA;AAAA,EAGA,eAAqB;AACnB,QAAI,KAAK,UAAU,UAAW;AAE9B,SAAK,QAAQ;AACb,QAAI,KAAK,eAAe,KAAK,oBAAoB,QAAQ;AACvD,WAAK,oBAAoB,KAAK,oBAAoB,KAAK,YAAY;AACnE,WAAK;AAAA,IACP;AAEA,SAAK,cAAc;AAAA,EACrB;AAAA;AAAA,EAGA,gBAAsB;AACpB,UAAM,UAAU,KAAK,IAAI,IAAI,KAAK;AAClC,QAAI,WAAW,KAAK,qBAAqB;AACvC,WAAK,QAAQ;AAAA,IAEf;AAAA,EACF;AAAA;AAAA,EAGA,WAAiB;AACf,SAAK,iBAAiB;AAGtB,QAAI,KAAK,UAAU;AACjB,WAAK,WAAW;AAAA,IAClB;AAAA,EACF;AAAA;AAAA,EAGA,kBAAkB,MAAoB;AACpC,SAAK,oBAAoB;AAAA,EAC3B;AAAA;AAAA,EAGA,MAAM,UAAqC;AACzC,SAAK,WAAW;AAChB,SAAK,WAAW;AAAA,EAClB;AAAA;AAAA,EAGA,OAAa;AACX,QAAI,KAAK,OAAO;AACd,mBAAa,KAAK,KAAK;AACvB,WAAK,QAAQ;AAAA,IACf;AAAA,EACF;AAAA,EAEQ,WAAyC;AAAA,EAEzC,aAAmB;AACzB,SAAK,KAAK;AACV,QAAI,CAAC,KAAK,SAAU;AACpB,UAAM,KAAK,KAAK;AAChB,UAAM,WAAW,MAAY;AAC3B,WAAK,QAAQ,WAAW,YAAY;AAClC,cAAM,GAAG;AACT,iBAAS;AAAA,MACX,GAAG,KAAK,iBAAiB;AACzB,WAAK,MAAM,MAAM;AAAA,IACnB;AACA,aAAS;AAAA,EACX;AACF;;;AEviBA,IAAM,oCAAoC;AAGnC,IAAM,8BAA8B;AA4B3C,IAAM,+BAA+B,iBAAE,OAAO;AAAA,EAC5C,WAAW,iBAAE,MAAM,iBAAE,OAAO;AAAA,IAC1B,aAAa,iBAAE,OAAO;AAAA,IACtB,eAAe,iBAAE,KAAK,cAAc;AAAA,IACpC,OAAO,iBAAE,OAAO;AAAA,IAChB,MAAM,iBAAE,MAAM,iBAAE,OAAO,CAAC,EAAE,QAAQ,CAAC,CAAC;AAAA,EACtC,CAAC,CAAC,EAAE,QAAQ,CAAC,CAAC;AAChB,CAAC;AAEM,IAAM,kBAAN,MAAsB;AAAA,EAM3B,YAAoB,SAA8B,gBAAwB,MAAM,eAAuC;AAAnG;AAA8B;AAChD,SAAK,sBAAsB,eAAe,yBAAyB;AACnE,SAAK,mBAAmB,eAAe,sBAAsB;AAC7D,SAAK,iBAAiB,eAAe,oBAAoB;AACzD,SAAK,0BAA0B,eAAe,6BAA6B;AAAA,EAC7E;AAAA,EAVQ;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EASA,mBAAmB,MAAc,WAA2B;AAClE,UAAM,YAAY,KAAK,gBAAgB;AACvC,UAAM,aAAa,eAAe,IAAI;AACtC,QAAI,cAAc,UAAW,QAAO;AACpC,UAAM,aAAa,YAAY;AAC/B,WAAO,KAAK,MAAM,GAAG,UAAU;AAAA,EACjC;AAAA,EAEA,MAAM,QAAQ,sBAA8B,WAA6C;AACvF,QAAI,CAAC,qBAAqB,KAAK,GAAG;AAChC,aAAO,EAAE,SAAS,IAAI,cAAc,CAAC,GAAG,UAAU,MAAM;AAAA,IAC1D;AAGA,UAAM,kBAAkB,KAAK,gBAAgB,oCAAoC,KAAK;AACtF,UAAM,iBAAiB,kBAAkB;AACzC,QAAI,YAAY;AAChB,QAAI,qBAAqB,SAAS,gBAAgB;AAChD,kBAAY,qBAAqB,MAAM,CAAC,cAAc;AAEtD,YAAM,eAAe,UAAU,QAAQ,mBAAmB;AAC1D,UAAI,eAAe,GAAG;AACpB,oBAAY,UAAU,MAAM,YAAY;AAAA,MAC1C;AAAA,IACF;AAEA,UAAM,SAAS,sBAAsB,WAAW,WAAW,KAAK,mBAAmB;AAEnF,QAAI;AACF,YAAM,WAAW,MAAM,KAAK,QAAQ,UAAU,QAAQ;AAAA,QACpD,WAAW,KAAK;AAAA,QAChB,WAAW;AAAA,MACb,CAAC;AACD,YAAM,SAAS,YAAY,SAAS,IAAI;AAIxC,aAAO;AAAA,QACL,SAAS,OAAO;AAAA,QAChB,cAAc,OAAO,gBAAgB,CAAC;AAAA,QACtC,UAAU;AAAA,MACZ;AAAA,IACF,SAAS,OAAO;AACd,aAAO;AAAA,QACL,SAAS,qCAAqC,SAAS,YAAa,MAAgB,OAAO;AAAA,QAC3F,cAAc,CAAC;AAAA,QACf,UAAU;AAAA,MACZ;AAAA,IACF;AAAA,EACF;AAAA,EAEA,MAAM,iBACJ,sBACA,WACA,MAC6C;AAC7C,UAAM,mBAAmB,KAAK,mBAAmB,sBAAsB,KAAK,gBAAgB;AAC5F,UAAM,gBAAgB,mBAAmB,WAAW,QAAQ,WAAW,kBAAkB,KAAK,gBAAgB;AAE9G,QAAI;AACJ,QAAI;AACF,YAAM,WAAW,MAAM,KAAK,QAAQ,UAAU,eAAe,EAAE,WAAW,KAAK,kBAAkB,WAAW,mBAAmB,CAAC;AAChI,oBAAc,qBAAqB,SAAS,IAAI;AAAA,IAClD,SAAS,OAAO;AACd,oBAAc,WAAW,SAAS,WAAM,2BAA2B,KAAM,MAAgB,OAAO;AAAA,IAClG;AAEA,UAAM,cAAc,iBAAiB,aAAa,SAAS;AAC3D,QAAI;AACJ,QAAI;AACF,YAAM,WAAW,MAAM,KAAK,QAAQ,UAAU,aAAa,EAAE,WAAW,KAAK,gBAAgB,WAAW,mBAAmB,CAAC;AAC5H,cAAQ,qBAAqB,SAAS,IAAI,EAAE,KAAK;AAAA,IACnD,QAAQ;AACN,cAAQ,WAAW,SAAS;AAAA,IAC9B;AAEA,WAAO,EAAE,SAAS,aAAa,MAAM;AAAA,EACvC;AAAA,EAEA,MAAM,kBACJ,YACA,WAC+B;AAC/B,QAAI,WAAW,WAAW,EAAG,QAAO,CAAC;AAErC,UAAM,SAAS,KAAK,6BAA6B,YAAY,SAAS;AACtE,UAAM,WAAW,MAAM,KAAK,QAAQ,UAAU,QAAQ,EAAE,WAAW,KAAK,yBAAyB,WAAW,mBAAmB,CAAC;AAChI,UAAM,MAAM,YAAY,SAAS,IAAI;AACrC,UAAM,SAAS,6BAA6B,MAAM,GAAG;AACrD,WAAO,OAAO;AAAA,EAChB;AAAA,EAEQ,6BACN,YACA,WACQ;AACR,WAAO,0BAA0B,WAAW,YAAY,KAAK,uBAAuB;AAAA,EACtF;AAEF;;;ACtJO,SAAS,sBACd,cACA,WACA,QACA,OACA,UACe;AACf,QAAM,UAAyB,CAAC;AAEhC,aAAW,OAAO,cAAc;AAC9B,UAAM,QAAQ,GAAG,IAAI,IAAI,IAAI,UAAU,MAAM,EAAE,CAAC,IAAI,KAAK,IAAI,CAAC;AAC9D,UAAM,OAAO,gBAAgB;AAAA,MAC3B,OAAO,IAAI;AAAA,MACX,iBAAiB,IAAI;AAAA,MACrB,SAAS,IAAI;AAAA,MACb;AAAA,MACA,YAAY,IAAI;AAAA,MAChB,KAAK,IAAI;AAAA,MACT,WAAW,IAAI;AAAA,MACf,uBAAuB,IAAI;AAAA,MAC3B,QAAQ,IAAI;AAAA,MACZ,YAAY,IAAI;AAAA,MAChB,MAAM,IAAI;AAAA,IACZ,CAAC;AACD,UAAM,eAAe,OAAO,WAAW;AAAA,MACrC,IAAI;AAAA,MACJ,kBAAkB,IAAI;AAAA,MACtB,SAAS,cAAc,SAAS;AAAA,MAChC,MAAM,IAAI;AAAA,MACV,SAAS;AAAA,IACX,CAAC;AACD,cAAU,OAAO,UAAU,YAAY;AACvC,YAAQ,KAAK,EAAE,IAAI,OAAO,MAAM,cAAc,aAAa,IAAI,CAAC;AAAA,EAClE;AAEA,SAAO;AACT;;;ACjDA,OAAOG,SAAQ;AACf,OAAOC,WAAU;;;ACOV,IAAM,wBAAwB;AAE9B,IAAM,8BAA8B;AAY3C,eAAsB,aACpB,OACA,IACA,SAIyB;AACzB,QAAM,EAAE,aAAa,WAAW,IAAI;AACpC,MAAI,YAAY;AAChB,MAAI,SAAS;AACb,QAAM,UAAqC,CAAC;AAE5C,WAAS,IAAI,GAAG,IAAI,MAAM,QAAQ,KAAK,aAAa;AAClD,UAAM,QAAQ,MAAM,MAAM,GAAG,IAAI,WAAW;AAC5C,UAAM,UAAU,MAAM,QAAQ,WAAW,MAAM,IAAI,EAAE,CAAC;AAEtD,eAAW,UAAU,SAAS;AAC5B,UAAI,OAAO,WAAW,aAAa;AACjC;AACA,gBAAQ,KAAK,EAAE,QAAQ,aAAa,OAAO,OAAO,MAAM,CAAC;AAAA,MAC3D,OAAO;AACL;AACA,gBAAQ,KAAK,EAAE,QAAQ,YAAY,QAAS,OAAO,QAAkB,WAAW,OAAO,OAAO,MAAM,EAAE,CAAC;AAAA,MACzG;AAAA,IACF;AAEA,iBAAa,YAAY,QAAQ,MAAM,MAAM;AAAA,EAC/C;AAEA,SAAO,EAAE,WAAW,QAAQ,QAAQ;AACtC;;;ADnCA,yBAAmB;AAkCnB,eAAsB,WACpB,KACA,mBACA,YACwB;AACxB,QAAM,EAAE,OAAO,SAAS,IAAI;AAG5B,UAAQ,KAAK;AACb,QAAM,WAAW,aAAa,OAAO,QAAQ;AAG7C,MAAI,CAAC,IAAI,aAAa;AACpB,WAAO,EAAE,UAAU,eAAe,GAAG,aAAa,GAAG,cAAc,EAAE;AAAA,EACvE;AAEA,QAAM,WAAW,MAAM,MAAM,CAAC,CAAC;AAE/B,QAAM,cAAc,SAAS,OAAO,CAAC,MAAM;AACzC,UAAM,SAAU,EAAE,aAAyC;AAC3D,WAAO,WAAW,gBAAgB,WAAW;AAAA,EAC/C,CAAC;AACD,QAAM,eAAe,SAAS,SAAS,YAAY;AAEnD,QAAM,MAAM,IAAI;AAChB,QAAM,SAAS,MAAM;AAAA,IACnB;AAAA,IACA,OAAO,SAAS;AACd,YAAM,OAAO,GAAG,KAAK,KAAK;AAAA,EAAK,KAAK,OAAO,GAAG,MAAM,GAAG,qBAAqB;AAC5E,YAAM,MAAM,MAAM,kBAAkB,mBAAmB,IAAI;AAC3D,UAAI,OAAO,KAAK,IAAI,IAAI,WAAW;AAAA,QACjC,MAAM,KAAK;AAAA,QACX,YAAa,KAAK,aAAyC,WAAqB;AAAA,MAClF,CAAC;AAAA,IACH;AAAA,IACA;AAAA,MACE,aAAa;AAAA,MACb;AAAA,IACF;AAAA,EACF;AAGA,MAAI,IAAI,UAAU;AAChB,eAAW,QAAQ,aAAa;AAC9B,UAAI,SAAS,SAAS,KAAK,IAAI,KAAK,MAAM,KAAK,IAAI;AACnD,UAAI,SAAS,QAAQ,KAAK,IAAI,KAAK,MAAM,WAAW,WAAW;AAC/D,UAAI,SAAS,QAAQ,KAAK,IAAI,KAAK,MAAM,aAAa,WAAW;AAAA,IACnE;AAAA,EACF;AAEA,SAAO;AAAA,IACL;AAAA,IACA,eAAe,OAAO;AAAA,IACtB,aAAa,OAAO;AAAA,IACpB;AAAA,EACF;AACF;AAiBA,eAAsB,UACpB,KACA,aACA,SACmC;AACnC,QAAM,EAAE,QAAQ,UAAU,MAAM,IAAI;AACpC,QAAM,MAAmB,IAAI,MACzB,CAAC,OAAO,SAAS,SAAS,IAAI,IAAK,OAAO,SAAS,IAAI,IACvD,MAAM;AAAA,EAAC;AAIX,MAAI,IAAI,YAAY,SAAS,MAAM;AACjC,UAAM,QAAQ,IAAI,SAAS,UAAU,EAAE,OAAO,UAAU,QAAQ,YAAY,CAAC;AAC7E,QAAI,QAAQ;AACZ,eAAW,QAAQ,MAAM,OAAO;AAC9B,UAAI,SAAS,QAAQ,KAAK,IAAI,KAAK,WAAW,UAAU,SAAS;AACjE;AAAA,IACF;AACA,QAAI,QAAQ,SAAS,KAAK,kDAAkD;AAG5E,WAAO;AAAA,EACT;AAEA,QAAM,SAAS,IAAI,aAAa;AAAA,IAC9B;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF,CAAC;AAED,QAAM,OAA2B,CAAC;AAClC,QAAM,cAAc,SAAS,QAAQ,SAAS,SAAS;AAEvD,MAAI,aAAa;AACf,SAAK,gBAAgB;AACrB,SAAK,aAAa;AAAA,EACpB;AACA,MAAI,SAAS,SAAS,QAAW;AAC/B,UAAM,WAAW,OAAO,iBAAiB;AACzC,QAAI,CAAC,SAAS,SAAS,QAAQ,IAAI,GAAG;AACpC,YAAM,IAAI,MAAM,QAAQ,QAAQ,IAAI,sCAAsC,SAAS,KAAK,IAAI,CAAC,GAAG;AAAA,IAClG;AACA,SAAK,QAAQ,CAAC,QAAQ,IAAI;AAAA,EAC5B;AAEA,SAAO,OAAO,SAAS,IAAI;AAC7B;AAkBA,IAAM,gCAAgC;AAiBtC,SAAS,iBAAiB,GAAa,GAAqB;AAC1D,MAAI,MAAM,GAAG,QAAQ,GAAG,QAAQ;AAChC,WAAS,IAAI,GAAG,IAAI,EAAE,QAAQ,KAAK;AACjC,WAAO,EAAE,CAAC,IAAI,EAAE,CAAC;AACjB,aAAS,EAAE,CAAC,IAAI,EAAE,CAAC;AACnB,aAAS,EAAE,CAAC,IAAI,EAAE,CAAC;AAAA,EACrB;AACA,SAAO,OAAO,KAAK,KAAK,KAAK,IAAI,KAAK,KAAK,KAAK;AAClD;AAEA,SAAS,eAAe,QAAwC;AAC9D,MAAI,OAAO,WAAW,EAAG,QAAO,CAAC;AACjC,QAAM,MAAM,OAAO,CAAC,EAAE,UAAU;AAChC,QAAM,WAAW,IAAI,MAAc,GAAG,EAAE,KAAK,CAAC;AAC9C,aAAW,KAAK,QAAQ;AACtB,aAAS,IAAI,GAAG,IAAI,KAAK,KAAK;AAC5B,eAAS,CAAC,KAAK,EAAE,UAAU,CAAC;AAAA,IAC9B;AAAA,EACF;AACA,WAAS,IAAI,GAAG,IAAI,KAAK,KAAK;AAC5B,aAAS,CAAC,KAAK,OAAO;AAAA,EACxB;AACA,SAAO;AACT;AAEA,SAAS,cAAc,QAAyC;AAC9D,QAAM,WAAsB,CAAC;AAC7B,aAAW,SAAS,QAAQ;AAC1B,QAAI,cAA8B;AAClC,QAAI,iBAAiB;AACrB,eAAW,WAAW,UAAU;AAC9B,YAAM,MAAM,iBAAiB,MAAM,WAAW,QAAQ,QAAQ;AAC9D,UAAI,MAAM,gBAAgB;AACxB,yBAAiB;AACjB,sBAAc;AAAA,MAChB;AAAA,IACF;AACA,QAAI,gBAAgB,QAAQ,kBAAkB,6BAA6B;AACzE,kBAAY,OAAO,KAAK,KAAK;AAC7B,kBAAY,WAAW,eAAe,YAAY,MAAM;AAAA,IAC1D,OAAO;AACL,eAAS,KAAK,EAAE,QAAQ,CAAC,KAAK,GAAG,UAAU,CAAC,GAAG,MAAM,SAAS,EAAE,CAAC;AAAA,IACnE;AAAA,EACF;AACA,SAAO;AACT;AAkBA,eAAsB,YACpB,MACA,QACyB;AACzB,QAAM,EAAE,OAAO,aAAa,aAAa,mBAAmB,SAAS,IAAI;AACzE,QAAM,MAAM,KAAK,QAAQ,MAAM;AAAA,EAAC;AAGhC,QAAM,YAAY,MAAM,MAAM,EAAE,MAAM,QAAQ,CAAC;AAC/C,QAAM,eAAe,UAAU,OAAO,CAAC,MAAM,cAAc,EAAE,WAAW,CAAC;AAGzE,MAAI,KAAK,YAAY,CAAC,QAAQ;AAC5B,QAAI,WAAW;AACf,eAAW,SAAS,cAAc;AAChC,WAAK,SAAS,SAAS,MAAM,IAAI,SAAS,MAAM,IAAI;AACpD,WAAK,SAAS,QAAQ,MAAM,IAAI,SAAS,WAAW,WAAW;AAC/D,WAAK,SAAS,QAAQ,MAAM,IAAI,SAAS,iBAAiB,SAAS;AACnE;AAAA,IACF;AACA,QAAI,QAAQ,YAAY,QAAQ,sCAAsC;AACtE,WAAO,EAAE,SAAS,aAAa,QAAQ,mBAAmB,GAAG,YAAY,GAAG,UAAU,KAAK;AAAA,EAC7F;AAEA,MAAI,aAAa,WAAW,GAAG;AAC7B,WAAO,EAAE,SAAS,GAAG,mBAAmB,GAAG,YAAY,EAAE;AAAA,EAC3D;AAGA,QAAM,uBAA6C,CAAC;AACpD,MAAI,gBAAgB;AAEpB,WAAS,IAAI,GAAG,IAAI,aAAa,QAAQ,KAAK,+BAA+B;AAC3E,UAAM,QAAQ,aAAa,MAAM,GAAG,IAAI,6BAA6B;AACrE,UAAM,UAAU,MAAM,QAAQ;AAAA,MAC5B,MAAM,IAAI,OAAO,UAAU;AACzB,cAAM,OAAO,MAAM,QAAQ,MAAM,GAAG,qBAAqB;AACzD,cAAM,SAAS,MAAM,kBAAkB,mBAAmB,IAAI;AAC9D,eAAO,EAAE,OAAO,WAAW,OAAO,UAAU;AAAA,MAC9C,CAAC;AAAA,IACH;AAEA,eAAW,UAAU,SAAS;AAC5B,UAAI,OAAO,WAAW,aAAa;AACjC,cAAM,EAAE,OAAO,UAAU,IAAI,OAAO;AACpC,6BAAqB,KAAK;AAAA,UACxB,IAAI,MAAM;AAAA,UACV,MAAM,MAAM;AAAA,UACZ,OAAO,MAAM;AAAA,UACb,SAAS,MAAM;AAAA,UACf,SAAS,MAAM;AAAA,UACf,aAAa,MAAM;AAAA,UACnB;AAAA,QACF,CAAC;AAAA,MACH,OAAO;AACL;AAAA,MACF;AAAA,IACF;AAAA,EACF;AAEA,MAAI,gBAAgB,GAAG;AACrB,QAAI,QAAQ,GAAG,aAAa,kDAAkD;AAAA,EAChF;AAGA,QAAM,SAAS,oBAAI,IAAkC;AACrD,aAAW,SAAS,sBAAsB;AACxC,UAAM,UAAW,MAAM,YAAY,kBAAkB,KAA4B;AACjF,QAAI,CAAC,OAAO,IAAI,OAAO,EAAG,QAAO,IAAI,SAAS,CAAC,CAAC;AAChD,WAAO,IAAI,OAAO,EAAG,KAAK,KAAK;AAAA,EACjC;AAGA,QAAM,WAAW,WAAW,cAAc;AAC1C,MAAI,gBAAgB;AACpB,MAAI,kBAAkB;AAEtB,aAAW,CAAC,SAAS,UAAU,KAAK,QAAQ;AAC1C,UAAM,WAAW,cAAc,UAAU;AACzC,UAAM,aAAa,SAAS,OAAO,CAAC,MAAM,EAAE,OAAO,UAAU,CAAC;AAC9D,QAAI,WAAW,WAAW,EAAG;AAE7B,QAAI,QAAQ,SAAS,OAAO,WAAM,WAAW,MAAM,YAAY,WAAW,MAAM,yBAAyB;AACzG,qBAAiB,WAAW;AAE5B,eAAW,WAAW,YAAY;AAChC,YAAM,SAAS,CAAC,GAAG,QAAQ,MAAM,EAAE,KAAK,CAAC,GAAG,MAAM,EAAE,QAAQ,cAAc,EAAE,OAAO,CAAC;AACpF,YAAM,SAAS,OAAO,OAAO,SAAS,CAAC;AACvC,YAAM,aAAa,OAAO,MAAM,GAAG,OAAO,SAAS,CAAC;AAGpD,YAAM,eAAe,oBAAoB,MAAM;AAC/C,YAAM,iBAAiB,qBAAqB,UAAU;AAEtD,YAAM,SAAS,SACZ,QAAQ,iBAAiB,YAAY,EACrC,QAAQ,kBAAkB,cAAc;AAG3C,UAAI;AACJ,UAAI;AACF,cAAM,WAAW,MAAM,YAAY,UAAU,QAAQ;AAAA,UACnD,WAAW;AAAA,UACX,WAAW;AAAA,QACb,CAAC;AACD,uBAAe,qBAAqB,SAAS,IAAI;AAAA,MACnD,SAAS,KAAK;AACZ,YAAI,QAAQ,kCAAkC,OAAO,KAAK,OAAO,GAAG,CAAC,EAAE;AACvE;AAAA,MACF;AAGA,UAAI;AACJ,UAAI;AACF,iBAAS,KAAK,MAAM,YAAY;AAAA,MAClC,QAAQ;AACN,YAAI,QAAQ,+CAA+C,OAAO,EAAE;AACpE;AAAA,MACF;AAEA,YAAM,SAAS,oBAAoB,UAAU,MAAM;AACnD,UAAI,CAAC,OAAO,SAAS;AACnB,YAAI,QAAQ,8CAA8C,OAAO,EAAE;AACnE;AAAA,MACF;AAGA,YAAM,eAAe,IAAI,IAAI,WAAW,IAAI,CAAC,MAAM,CAAC,EAAE,IAAI,CAAC,CAAC,CAAC;AAC7D,YAAM,WAAW,OAAO,KAAK,OAAO,CAAC,OAAO,aAAa,IAAI,EAAE,CAAC;AAChE,UAAI,SAAS,WAAW,EAAG;AAE3B,iBAAW,MAAM,UAAU;AACzB,cAAM,YAAY,aAAa,IAAI,EAAE;AAErC,YAAI,QAAQ;AACV,cAAI,QAAQ,8BAA8B,UAAU,KAAK,KAAK,EAAE,QAAQ,OAAO,KAAK,KAAK,OAAO,EAAE,GAAG;AACrG;AACA;AAAA,QACF;AAEA,cAAM,QAAQ,eAAe,IAAI,OAAO,IAAI,UAAU,MAAM,EAAE,OAAO,aAAa,SAAS,CAAC;AAC5F,YAAI,CAAC,OAAO;AACV,cAAI,QAAQ,sBAAsB,EAAE,kBAAkB;AACtD;AAAA,QACF;AAEA,YAAI,QAAQ,eAAe,UAAU,KAAK,KAAK,EAAE,QAAQ,OAAO,KAAK,KAAK,OAAO,EAAE,GAAG;AACtF;AAAA,MACF;AAAA,IACF;AAAA,EACF;AAEA,SAAO;AAAA,IACL,SAAS,aAAa;AAAA,IACtB,mBAAmB;AAAA,IACnB,YAAY;AAAA,EACd;AACF;AA8BO,SAAS,sBAAsB,MAAc,UAAkB,cAA8B;AAClG,MAAI,UAAU,KAAK,QAAQ,UAAU,KAAK,QAAQ,EAAE;AACpD,QAAM,iBAAiB,QAAQ,YAAY,WAAW,YAAY;AAClE,QAAM,qBAAqB,0BAA0B,KAAK,OAAO;AACjE,MAAI,oBAAoB;AACtB,cAAU,QAAQ,QAAQ,0CAA0C,iBAAiB,IAAI;AAAA,EAC3F,OAAO;AAEL,cAAU,QAAQ,QAAQ,cAAc;AAAA,EAAO,cAAc;AAAA,CAAI;AAAA,EACnE;AACA,SAAO;AACT;AAUA,eAAsB,aACpB,KACA,aACA,mBACA,SACA,YAC0B;AAC1B,QAAM,EAAE,UAAU,QAAQ,MAAM,IAAI;AACpC,QAAM,MAAM,IAAI,QAAQ,MAAM;AAAA,EAAC;AAE/B,QAAM,gBAAgB,SAAS;AAC/B,QAAM,aAAa,SAAS;AAC5B,QAAM,aAAa,SAAS,UAAU;AACtC,QAAM,UAAU,SAAS,aAAa;AAGtC,QAAM,cAAcC,MAAK,KAAK,UAAU,UAAU;AAClD,MAAI,CAACC,IAAG,WAAW,WAAW,GAAG;AAC/B,WAAO,EAAE,eAAe,GAAG,mBAAmB,GAAG,uBAAuB,GAAG,sBAAsB,GAAG,kBAAkB,EAAE;AAAA,EAC1H;AAEA,QAAM,eAAoF,CAAC;AAC3F,aAAW,WAAWA,IAAG,YAAY,WAAW,GAAG;AACjD,QAAI,cAAc,YAAY,WAAY;AAC1C,UAAM,WAAWD,MAAK,KAAK,aAAa,OAAO;AAC/C,QAAI,CAACC,IAAG,SAAS,QAAQ,EAAE,YAAY,EAAG;AAC1C,eAAW,QAAQA,IAAG,YAAY,QAAQ,GAAG;AAC3C,UAAI,CAAC,KAAK,WAAW,UAAU,KAAK,CAAC,KAAK,SAAS,KAAK,EAAG;AAC3D,YAAM,YAAY,KAAK,QAAQ,YAAY,EAAE,EAAE,QAAQ,OAAO,EAAE;AAChE,UAAI,iBAAiB,CAAC,UAAU,SAAS,aAAa,EAAG;AACzD,mBAAa,KAAK,EAAE,cAAcD,MAAK,KAAK,YAAY,SAAS,IAAI,GAAG,WAAW,QAAQ,CAAC;AAAA,IAC9F;AAAA,EACF;AAEA,MAAI,aAAa,WAAW,GAAG;AAC7B,WAAO,EAAE,eAAe,GAAG,mBAAmB,GAAG,uBAAuB,GAAG,sBAAsB,GAAG,kBAAkB,EAAE;AAAA,EAC1H;AAGA,MAAI,IAAI,YAAY,CAAC,SAAS;AAC5B,QAAI,WAAW;AAGf,QAAI,gBAAgB;AACpB,QAAI,YAAY;AACd,sBAAgB,aAAa,OAAO,CAAC,EAAE,aAAa,MAAM;AACxD,cAAM,aAAaC,IAAG,aAAaD,MAAK,KAAK,UAAU,YAAY,GAAG,OAAO;AAC7E,eAAO,WAAW,SAAS,2BAA2B;AAAA,MACxD,CAAC;AAAA,IACH;AAEA,eAAW,EAAE,cAAc,UAAU,KAAK,eAAe;AAEvD,UAAI,SAAS,SAAS,WAAW,WAAW,YAAY;AACxD,UAAI,SAAS,QAAQ,WAAW,WAAW,WAAW,WAAW;AACjE,UAAI,SAAS,QAAQ,WAAW,WAAW,cAAc,SAAS;AAClE;AAAA,IACF;AAEA,QAAI,QAAQ,YAAY,QAAQ,yCAAyC;AAAA,MACvE,SAAS,EAAE,SAAS,eAAe,MAAM,YAAY,QAAQ,WAAW;AAAA,IAC1E,CAAC;AAED,WAAO;AAAA,MACL,eAAe,aAAa;AAAA,MAC5B,mBAAmB;AAAA,MACnB,uBAAuB;AAAA,MACvB,sBAAsB;AAAA,MACtB,kBAAkB;AAAA,MAClB,UAAU;AAAA,IACZ;AAAA,EACF;AAGA,QAAM,eAAe,UAAU,OAAO;AACtC,QAAM,YAAY,eACd,IAAI,gBAAgB,cAAc,OAAO,aAAa,IAAI,gBAAgB,OAAO,OAAO,IACxF;AACJ,QAAM,SAAS,IAAI,YAAY,QAAQ;AAcvC,QAAM,QAAuB,CAAC;AAC9B,aAAW,EAAE,cAAc,UAAU,KAAK,cAAc;AACtD,UAAM,aAAaC,IAAG,aAAaD,MAAK,KAAK,UAAU,YAAY,GAAG,OAAO;AAG7E,UAAM,YAAY,WAAW,SAAS,2BAA2B;AACjE,QAAI,cAAc,CAAC,UAAW;AAE9B,UAAM,EAAE,MAAM,aAAa,SAAS,KAAK,QAAI,mBAAAE,SAAO,UAAU;AAC9D,UAAM,OAAO,cAAc,SAAS;AACpC,UAAM,sBAAsB,eAAe,MAAM,oBAAoB;AACrE,UAAM,QAAQ,WAAW,QAAQ,OAAO,CAAC;AACzC,UAAM,mBAAmB,WAAW,MAAM,GAAG,QAAQ,CAAC;AAEtD,UAAM,KAAK,EAAE,cAAc,WAAW,MAAM,aAAa,kBAAkB,MAAM,qBAAqB,UAAU,CAAC;AAAA,EACnH;AAEA,MAAI,MAAM,WAAW,GAAG;AACtB,WAAO,EAAE,eAAe,aAAa,QAAQ,mBAAmB,GAAG,uBAAuB,GAAG,sBAAsB,GAAG,kBAAkB,EAAE;AAAA,EAC5I;AAEA,MAAI,QAAQ,gBAAgB,MAAM,MAAM,eAAe,EAAE,SAAS,EAAE,SAAS,eAAe,MAAM,YAAY,QAAQ,YAAY,WAAW,QAAQ,EAAE,CAAC;AAIxJ,MAAI,mBAAmB;AACvB,QAAM,eAAgC,CAAC;AACvC,QAAM,YAAY,CAAC,IAAY,MAAc,aAAqC;AAChF,QAAI,CAAC,IAAI,YAAa;AACtB;AACA,UAAM,MAAM,IAAI;AAChB,UAAM,IAAI,kBAAkB,mBAAmB,IAAI,EAChD,KAAK,CAAC,QAAQ;AAAE,UAAI,OAAO,IAAI,IAAI,WAAW,QAAQ;AAAA,IAAG,CAAC,EAC1D,MAAM,CAAC,QAAQ;AAAE,UAAI,QAAQ,wBAAwB,EAAE,IAAI,EAAE,OAAQ,IAAc,QAAQ,CAAC;AAAA,IAAG,CAAC;AACnG,iBAAa,KAAK,CAAC;AAAA,EACrB;AAGA,MAAI,oBAAoB;AAExB,QAAM,mBAAmB,MAAM;AAAA,IAC7B;AAAA,IACA,OAAO,SAAS;AACd,UAAI,MAAM;AAEV,UAAI,aAAa,KAAK,oBAAoB,KAAK,GAAG;AAChD,cAAM,SAAS,MAAM,UAAU,QAAQ,KAAK,qBAAqB,KAAK,IAAI;AAC1E,YAAI,OAAO,aAAa,SAAS,GAAG;AAClC,gCAAsB,OAAO,cAAc,KAAK,MAAM,QAAQ,OAAO,QAAQ;AAC7E,gBAAM,OAAO,aAAa;AAC1B,qBAAW,KAAK,OAAO,cAAc;AACnC;AAAA,cACE,GAAG,EAAE,IAAI,IAAI,KAAK,KAAK,MAAM,EAAE,CAAC,IAAI,KAAK,IAAI,CAAC;AAAA,cAC9C,GAAG,EAAE,KAAK;AAAA,EAAK,EAAE,OAAO,GAAG,MAAM,GAAG,qBAAqB;AAAA,cACzD,EAAE,MAAM,SAAS,YAAY,KAAK,KAAK;AAAA,YACzC;AAAA,UACF;AAAA,QACF;AAAA,MACF;AAEA,gBAAU,OAAO,UAAU,KAAK,YAAY;AAE5C,YAAM,UAAU,GAAG,KAAK,YAAY,SAAS,EAAE;AAAA,EAAK,KAAK,YAAY,WAAW,EAAE,GAAG,MAAM,GAAG,qBAAqB;AACnH,UAAI,QAAQ,KAAK,GAAG;AAClB,kBAAU,cAAc,KAAK,IAAI,GAAG,SAAS,EAAE,MAAM,WAAW,YAAY,KAAK,KAAK,CAAC;AAAA,MACzF;AAEA,aAAO;AAAA,IACT;AAAA,IACA;AAAA,MACE,aAAa;AAAA,MACb,YAAY,CAAC,MAAM,UAAU,aAAa,cAAc,MAAM,KAAK;AAAA,IACrE;AAAA,EACF;AAEA,aAAW,KAAK,iBAAiB,SAAS;AACxC,QAAI,EAAE,WAAW,YAAa,sBAAqB,EAAE;AAAA,EACvD;AAGA,MAAI,aAAa;AACjB,MAAI,WAAW;AACb,UAAM,oBAAoB,MAAM,OAAO,CAAC,MAAM,EAAE,mBAAmB;AACnE,QAAI,kBAAkB,SAAS,GAAG;AAChC,YAAM,gBAAgB,MAAM;AAAA,QAC1B;AAAA,QACA,OAAO,SAAS;AACd,gBAAM,OAAO,OAAO,KAAK,YAAY,SAAS,WAAW,KAAK,YAAY,OAAO;AACjF,gBAAM,SAAS,MAAM,UAAU,iBAAiB,KAAK,qBAAqB,KAAK,MAAM,IAAI;AAEzF,cAAI,OAAO,QAAQ,SAAS,2BAA2B,GAAG;AACxD,gBAAI,QAAQ,4BAA4B,KAAK,UAAU,MAAM,GAAG,EAAE,CAAC,EAAE;AACrE,mBAAO;AAAA,UACT;AAEA,gBAAM,cAAc,sBAAsB,KAAK,MAAM,OAAO,OAAO,OAAO,OAAO;AACjF,UAAAD,IAAG,cAAcD,MAAK,KAAK,UAAU,KAAK,YAAY,GAAG,KAAK,mBAAmB,WAAW;AAC5F,oBAAU,OAAO,UAAU,KAAK,YAAY;AAC5C,iBAAO;AAAA,QACT;AAAA,QACA;AAAA,UACE,aAAa;AAAA,UACb,YAAY,CAAC,MAAM,UAAU,aAAa,iBAAiB,MAAM,KAAK;AAAA,QACxE;AAAA,MACF;AAEA,iBAAW,KAAK,cAAc,SAAS;AACrC,YAAI,EAAE,WAAW,eAAe,EAAE,MAAO;AAAA,MAC3C;AAAA,IACF;AAAA,EACF;AAGA,QAAM,QAAQ,WAAW,YAAY;AAErC,MAAI,QAAQ,uBAAuB;AAAA,IACjC,UAAU,MAAM;AAAA,IAChB,cAAc;AAAA,IACd,WAAW;AAAA,IACX,YAAY;AAAA,EACd,CAAC;AAED,SAAO;AAAA,IACL,eAAe,aAAa;AAAA,IAC5B,mBAAmB,MAAM;AAAA,IACzB,uBAAuB;AAAA,IACvB,sBAAsB;AAAA,IACtB;AAAA,EACF;AACF;","names":["fs","path","path","fs","YAML","opts","fs","path","path","fs","matter"]}
@@ -1 +0,0 @@
1
- {"version":3,"sources":["../src/intelligence/ollama.ts","../src/intelligence/lm-studio.ts"],"sourcesContent":["import type { LlmProvider, EmbeddingProvider, LlmResponse, EmbeddingResponse, LlmRequestOptions } from './llm.js';\nimport { LLM_REQUEST_TIMEOUT_MS, EMBEDDING_REQUEST_TIMEOUT_MS, DAEMON_CLIENT_TIMEOUT_MS } from '../constants.js';\n\ninterface OllamaConfig {\n model?: string;\n base_url?: string;\n context_window?: number;\n max_tokens?: number;\n // Legacy fields (ignored, kept for backward compat during migration)\n embedding_model?: string;\n summary_model?: string;\n}\n\n// Ollama API endpoints\nconst ENDPOINT_GENERATE = '/api/generate';\nconst ENDPOINT_EMBED = '/api/embed';\nconst ENDPOINT_TAGS = '/api/tags';\n\nexport class OllamaBackend implements LlmProvider, EmbeddingProvider {\n static readonly DEFAULT_BASE_URL = 'http://localhost:11434';\n readonly name = 'ollama';\n private baseUrl: string;\n private model: string;\n private defaultMaxTokens: number;\n private contextWindow: number | undefined;\n\n constructor(config?: OllamaConfig) {\n this.baseUrl = config?.base_url ?? OllamaBackend.DEFAULT_BASE_URL;\n this.model = config?.model ?? config?.summary_model ?? 'llama3.2';\n this.defaultMaxTokens = config?.max_tokens ?? 1024;\n this.contextWindow = config?.context_window;\n }\n\n async summarize(prompt: string, opts?: LlmRequestOptions): Promise<LlmResponse> {\n const maxTokens = opts?.maxTokens ?? this.defaultMaxTokens;\n\n // Send num_ctx from config or per-call override. Ollama reloads the model\n // on num_ctx changes, but consistent values (same num_ctx every call)\n // only cause one reload on first use. Without this, Ollama falls back to\n // its model default (often 2048), ignoring the user's configured context.\n const contextLength = opts?.contextLength ?? this.contextWindow;\n const options: Record<string, unknown> = { num_predict: maxTokens };\n if (contextLength) {\n options.num_ctx = contextLength;\n }\n\n const body: Record<string, unknown> = {\n model: this.model,\n prompt,\n stream: true,\n options,\n };\n\n // System prompt — sent as a separate field instead of concatenated into prompt\n if (opts?.systemPrompt) {\n body.system = opts.systemPrompt;\n }\n\n // Thinking control — false suppresses chain-of-thought for reasoning models\n if (opts?.reasoning) {\n body.think = opts.reasoning === 'off' ? false : opts.reasoning;\n }\n\n // Keep model loaded between requests (useful for digest cycles)\n if (opts?.keepAlive) {\n body.keep_alive = opts.keepAlive;\n }\n\n const response = await fetch(`${this.baseUrl}${ENDPOINT_GENERATE}`, {\n method: 'POST',\n headers: { 'Content-Type': 'application/json' },\n body: JSON.stringify(body),\n signal: AbortSignal.timeout(opts?.timeoutMs ?? LLM_REQUEST_TIMEOUT_MS),\n });\n\n if (!response.ok) {\n const errorBody = await response.text().catch(() => '');\n throw new Error(`Ollama summarize failed: ${response.status} ${errorBody.slice(0, 500)}`);\n }\n\n return this.readStream(response);\n }\n\n /** Read an Ollama streaming response (newline-delimited JSON) and accumulate the result. */\n private async readStream(response: Response): Promise<LlmResponse> {\n const reader = response.body!.getReader();\n const decoder = new TextDecoder();\n let text = '';\n let model = this.model;\n let buffer = '';\n\n try {\n for (;;) {\n const { done, value } = await reader.read();\n if (done) break;\n\n buffer += decoder.decode(value, { stream: true });\n const lines = buffer.split('\\n');\n buffer = lines.pop() ?? '';\n\n for (const line of lines) {\n if (!line.trim()) continue;\n const chunk = JSON.parse(line) as { response?: string; model?: string; error?: string };\n if (chunk.error) throw new Error(`Ollama stream error: ${chunk.error}`);\n text += chunk.response ?? '';\n if (chunk.model) model = chunk.model;\n }\n }\n\n // Process remaining buffer\n if (buffer.trim()) {\n const chunk = JSON.parse(buffer) as { response?: string; model?: string; error?: string };\n if (chunk.error) throw new Error(`Ollama stream error: ${chunk.error}`);\n text += chunk.response ?? '';\n if (chunk.model) model = chunk.model;\n }\n } finally {\n reader.releaseLock();\n }\n\n return { text, model };\n }\n\n async embed(text: string): Promise<EmbeddingResponse> {\n const response = await fetch(`${this.baseUrl}${ENDPOINT_EMBED}`, {\n method: 'POST',\n headers: { 'Content-Type': 'application/json' },\n body: JSON.stringify({\n model: this.model,\n input: text,\n }),\n signal: AbortSignal.timeout(EMBEDDING_REQUEST_TIMEOUT_MS),\n });\n\n if (!response.ok) {\n throw new Error(`Ollama embed failed: ${response.status} ${response.statusText}`);\n }\n\n const data = await response.json() as { embeddings: number[][]; model: string };\n const embedding = data.embeddings[0];\n return { embedding, model: data.model, dimensions: embedding.length };\n }\n\n async isAvailable(): Promise<boolean> {\n try {\n const response = await fetch(`${this.baseUrl}${ENDPOINT_TAGS}`, {\n signal: AbortSignal.timeout(DAEMON_CLIENT_TIMEOUT_MS),\n });\n return response.ok;\n } catch {\n return false;\n }\n }\n\n /** List available models on this Ollama instance. */\n async listModels(timeoutMs?: number): Promise<string[]> {\n try {\n const response = await fetch(`${this.baseUrl}${ENDPOINT_TAGS}`, {\n signal: AbortSignal.timeout(timeoutMs ?? DAEMON_CLIENT_TIMEOUT_MS),\n });\n const data = await response.json() as { models: Array<{ name: string }> };\n return data.models.map((m) => m.name);\n } catch {\n return [];\n }\n }\n}\n","import type { LlmProvider, EmbeddingProvider, LlmResponse, EmbeddingResponse, LlmRequestOptions } from './llm.js';\nimport { LLM_REQUEST_TIMEOUT_MS, EMBEDDING_REQUEST_TIMEOUT_MS, DAEMON_CLIENT_TIMEOUT_MS } from '../constants.js';\n\ninterface LmStudioConfig {\n model?: string;\n base_url?: string;\n context_window?: number;\n max_tokens?: number;\n // Legacy fields\n embedding_model?: string;\n summary_model?: string;\n}\n\n// LM Studio API endpoints\nconst ENDPOINT_CHAT = '/api/v1/chat';\nconst ENDPOINT_MODELS_LOAD = '/api/v1/models/load';\nconst ENDPOINT_MODELS_LIST = '/v1/models';\nconst ENDPOINT_MODELS_NATIVE = '/api/v1/models';\nconst ENDPOINT_EMBEDDINGS = '/v1/embeddings';\n\n/** Shape of a loaded instance from the LM Studio native models API.\n * Config fields vary by engine — llama.cpp models include flash_attention\n * and offload_kv_cache_to_gpu, but other engines (MLX, etc.) may omit them. */\ninterface NativeLoadedInstance {\n id: string;\n config: {\n context_length: number;\n flash_attention?: boolean;\n offload_kv_cache_to_gpu?: boolean;\n };\n}\n\n/** Shape of a model entry from the LM Studio native models API. */\ninterface NativeModelEntry {\n type: string;\n key: string;\n loaded_instances: NativeLoadedInstance[];\n}\n\nexport class LmStudioBackend implements LlmProvider, EmbeddingProvider {\n static readonly DEFAULT_BASE_URL = 'http://localhost:1234';\n readonly name = 'lm-studio';\n private baseUrl: string;\n private model: string;\n private instanceId: string | null = null;\n private contextWindow: number | undefined;\n private defaultMaxTokens: number;\n\n constructor(config?: LmStudioConfig) {\n this.baseUrl = config?.base_url ?? LmStudioBackend.DEFAULT_BASE_URL;\n this.model = config?.model ?? config?.summary_model ?? 'llama3.2';\n this.contextWindow = config?.context_window;\n this.defaultMaxTokens = config?.max_tokens ?? 1024;\n }\n\n /**\n * Generate text using LM Studio's native REST API (/api/v1/chat).\n * Routes to our specific instance by ID when available, with model name +\n * context_length as fallback. This ensures correct routing when multiple\n * daemons share the same LM Studio, and graceful degradation when our\n * instance is evicted by idle TTL.\n */\n async summarize(prompt: string, opts?: LlmRequestOptions): Promise<LlmResponse> {\n const maxTokens = opts?.maxTokens ?? this.defaultMaxTokens;\n const contextLength = opts?.contextLength ?? this.contextWindow;\n\n const body: Record<string, unknown> = {\n model: this.instanceId ?? this.model,\n input: prompt,\n max_output_tokens: maxTokens,\n store: false,\n };\n\n // Always send context_length — even when routing by instance ID.\n // If our instance was evicted and LM Studio auto-loads, this ensures\n // the replacement gets the correct context window.\n if (contextLength) {\n body.context_length = contextLength;\n }\n\n // System prompt — sent separately from user content\n if (opts?.systemPrompt) {\n body.system_prompt = opts.systemPrompt;\n }\n\n // Reasoning control — 'off' suppresses chain-of-thought for reasoning models\n if (opts?.reasoning) {\n body.reasoning = opts.reasoning;\n }\n\n const response = await fetch(`${this.baseUrl}${ENDPOINT_CHAT}`, {\n method: 'POST',\n headers: { 'Content-Type': 'application/json' },\n body: JSON.stringify(body),\n signal: AbortSignal.timeout(opts?.timeoutMs ?? LLM_REQUEST_TIMEOUT_MS),\n });\n\n if (!response.ok) {\n const errorBody = await response.text().catch(() => '');\n // If our instance was evicted, clear the ID so ensureLoaded\n // reloads on the next cycle instead of hitting a stale ID repeatedly\n if (response.status === 404 && this.instanceId) {\n this.instanceId = null;\n }\n throw new Error(`LM Studio summarize failed: ${response.status} ${errorBody.slice(0, 500)}`);\n }\n\n const data = await response.json() as {\n model_instance_id: string;\n output: Array<{ type: string; content: string }>;\n };\n const messageOutput = data.output.find((o) => o.type === 'message');\n const text = messageOutput?.content ?? '';\n return { text, model: data.model_instance_id };\n }\n\n /**\n * Generate embeddings using LM Studio's OpenAI-compatible endpoint.\n * (The native API doesn't have an embedding endpoint — OpenAI-compat is fine here.)\n */\n async embed(text: string): Promise<EmbeddingResponse> {\n const response = await fetch(`${this.baseUrl}${ENDPOINT_EMBEDDINGS}`, {\n method: 'POST',\n headers: { 'Content-Type': 'application/json' },\n body: JSON.stringify({\n model: this.model,\n input: text,\n }),\n signal: AbortSignal.timeout(EMBEDDING_REQUEST_TIMEOUT_MS),\n });\n\n if (!response.ok) {\n throw new Error(`LM Studio embed failed: ${response.status}`);\n }\n\n const data = await response.json() as {\n data: Array<{ embedding: number[] }>;\n model: string;\n };\n const embedding = data.data[0].embedding;\n return { embedding, model: data.model, dimensions: embedding.length };\n }\n\n /**\n * Ensure a model instance is loaded and capture its ID for routing.\n * Called every digest cycle so it recovers from idle TTL eviction.\n *\n * Strategy: reuse ANY loaded instance of this model. Only load a new one\n * when zero instances exist. This avoids the previous bug where strict\n * config matching (context_length, offload_kv_cache_to_gpu) caused new\n * instances to spawn every cycle — exhausting system resources.\n *\n * context_length is set per-request on /api/v1/chat, so we don't need\n * to match it at load time. Load-time-only params like\n * offload_kv_cache_to_gpu are llama.cpp-specific and may not apply to\n * all models (e.g., glm-4.7-flash has no KV cache setting).\n */\n async ensureLoaded(contextLength?: number, gpuKvCache?: boolean): Promise<void> {\n // Query native API for existing loaded instances of this model\n const instances = await this.getLoadedInstances();\n\n if (instances.length > 0) {\n // Reuse the first available instance — don't reject over config differences.\n // context_length is set per-request; load-time params like kv_cache are\n // model-dependent and may not even appear in the instance config.\n this.instanceId = instances[0].id;\n return;\n }\n\n // No instances loaded — load one with our preferred settings.\n // These are hints; LM Studio silently ignores params that don't apply to the model's engine.\n const ctx = contextLength ?? this.contextWindow;\n const body: Record<string, unknown> = {\n model: this.model,\n // llama.cpp-specific — ignored by other engines (MLX, etc.)\n flash_attention: true,\n };\n if (ctx) {\n body.context_length = ctx;\n }\n if (gpuKvCache) {\n body.offload_kv_cache_to_gpu = true;\n }\n\n const response = await fetch(`${this.baseUrl}${ENDPOINT_MODELS_LOAD}`, {\n method: 'POST',\n headers: { 'Content-Type': 'application/json' },\n body: JSON.stringify(body),\n signal: AbortSignal.timeout(LLM_REQUEST_TIMEOUT_MS),\n });\n\n if (!response.ok) {\n const errorBody = await response.text().catch(() => '');\n throw new Error(`LM Studio model load failed: ${response.status} ${errorBody.slice(0, 200)}`);\n }\n\n const loadResult = await response.json() as Record<string, unknown>;\n const id = (loadResult.instance_id ?? loadResult.id ?? loadResult.model_instance_id) as string | undefined;\n if (id) {\n this.instanceId = id;\n }\n }\n\n /**\n * Query the LM Studio native API for loaded instances of this model.\n * Returns an empty array if the API is unavailable or the model has no loaded instances.\n */\n private async getLoadedInstances(): Promise<NativeLoadedInstance[]> {\n try {\n const response = await fetch(`${this.baseUrl}${ENDPOINT_MODELS_NATIVE}`, {\n signal: AbortSignal.timeout(DAEMON_CLIENT_TIMEOUT_MS),\n });\n if (!response.ok) return [];\n\n const data = await response.json() as { models: NativeModelEntry[] };\n const entry = data.models.find((m) => m.key === this.model);\n return entry?.loaded_instances ?? [];\n } catch {\n return [];\n }\n }\n\n async isAvailable(): Promise<boolean> {\n try {\n const response = await fetch(`${this.baseUrl}${ENDPOINT_MODELS_LIST}`, {\n signal: AbortSignal.timeout(DAEMON_CLIENT_TIMEOUT_MS),\n });\n return response.ok;\n } catch {\n return false;\n }\n }\n\n /** List available models on this LM Studio instance. */\n async listModels(timeoutMs?: number): Promise<string[]> {\n try {\n const response = await fetch(`${this.baseUrl}${ENDPOINT_MODELS_LIST}`, {\n signal: AbortSignal.timeout(timeoutMs ?? DAEMON_CLIENT_TIMEOUT_MS),\n });\n const data = await response.json() as { data: Array<{ id: string }> };\n return data.data.map((m) => m.id);\n } catch {\n return [];\n }\n }\n}\n"],"mappings":";;;;;;;;AAcA,IAAM,oBAAoB;AAC1B,IAAM,iBAAiB;AACvB,IAAM,gBAAgB;AAEf,IAAM,gBAAN,MAAM,eAAwD;AAAA,EACnE,OAAgB,mBAAmB;AAAA,EAC1B,OAAO;AAAA,EACR;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EAER,YAAY,QAAuB;AACjC,SAAK,UAAU,QAAQ,YAAY,eAAc;AACjD,SAAK,QAAQ,QAAQ,SAAS,QAAQ,iBAAiB;AACvD,SAAK,mBAAmB,QAAQ,cAAc;AAC9C,SAAK,gBAAgB,QAAQ;AAAA,EAC/B;AAAA,EAEA,MAAM,UAAU,QAAgB,MAAgD;AAC9E,UAAM,YAAY,MAAM,aAAa,KAAK;AAM1C,UAAM,gBAAgB,MAAM,iBAAiB,KAAK;AAClD,UAAM,UAAmC,EAAE,aAAa,UAAU;AAClE,QAAI,eAAe;AACjB,cAAQ,UAAU;AAAA,IACpB;AAEA,UAAM,OAAgC;AAAA,MACpC,OAAO,KAAK;AAAA,MACZ;AAAA,MACA,QAAQ;AAAA,MACR;AAAA,IACF;AAGA,QAAI,MAAM,cAAc;AACtB,WAAK,SAAS,KAAK;AAAA,IACrB;AAGA,QAAI,MAAM,WAAW;AACnB,WAAK,QAAQ,KAAK,cAAc,QAAQ,QAAQ,KAAK;AAAA,IACvD;AAGA,QAAI,MAAM,WAAW;AACnB,WAAK,aAAa,KAAK;AAAA,IACzB;AAEA,UAAM,WAAW,MAAM,MAAM,GAAG,KAAK,OAAO,GAAG,iBAAiB,IAAI;AAAA,MAClE,QAAQ;AAAA,MACR,SAAS,EAAE,gBAAgB,mBAAmB;AAAA,MAC9C,MAAM,KAAK,UAAU,IAAI;AAAA,MACzB,QAAQ,YAAY,QAAQ,MAAM,aAAa,sBAAsB;AAAA,IACvE,CAAC;AAED,QAAI,CAAC,SAAS,IAAI;AAChB,YAAM,YAAY,MAAM,SAAS,KAAK,EAAE,MAAM,MAAM,EAAE;AACtD,YAAM,IAAI,MAAM,4BAA4B,SAAS,MAAM,IAAI,UAAU,MAAM,GAAG,GAAG,CAAC,EAAE;AAAA,IAC1F;AAEA,WAAO,KAAK,WAAW,QAAQ;AAAA,EACjC;AAAA;AAAA,EAGA,MAAc,WAAW,UAA0C;AACjE,UAAM,SAAS,SAAS,KAAM,UAAU;AACxC,UAAM,UAAU,IAAI,YAAY;AAChC,QAAI,OAAO;AACX,QAAI,QAAQ,KAAK;AACjB,QAAI,SAAS;AAEb,QAAI;AACF,iBAAS;AACP,cAAM,EAAE,MAAM,MAAM,IAAI,MAAM,OAAO,KAAK;AAC1C,YAAI,KAAM;AAEV,kBAAU,QAAQ,OAAO,OAAO,EAAE,QAAQ,KAAK,CAAC;AAChD,cAAM,QAAQ,OAAO,MAAM,IAAI;AAC/B,iBAAS,MAAM,IAAI,KAAK;AAExB,mBAAW,QAAQ,OAAO;AACxB,cAAI,CAAC,KAAK,KAAK,EAAG;AAClB,gBAAM,QAAQ,KAAK,MAAM,IAAI;AAC7B,cAAI,MAAM,MAAO,OAAM,IAAI,MAAM,wBAAwB,MAAM,KAAK,EAAE;AACtE,kBAAQ,MAAM,YAAY;AAC1B,cAAI,MAAM,MAAO,SAAQ,MAAM;AAAA,QACjC;AAAA,MACF;AAGA,UAAI,OAAO,KAAK,GAAG;AACjB,cAAM,QAAQ,KAAK,MAAM,MAAM;AAC/B,YAAI,MAAM,MAAO,OAAM,IAAI,MAAM,wBAAwB,MAAM,KAAK,EAAE;AACtE,gBAAQ,MAAM,YAAY;AAC1B,YAAI,MAAM,MAAO,SAAQ,MAAM;AAAA,MACjC;AAAA,IACF,UAAE;AACA,aAAO,YAAY;AAAA,IACrB;AAEA,WAAO,EAAE,MAAM,MAAM;AAAA,EACvB;AAAA,EAEA,MAAM,MAAM,MAA0C;AACpD,UAAM,WAAW,MAAM,MAAM,GAAG,KAAK,OAAO,GAAG,cAAc,IAAI;AAAA,MAC/D,QAAQ;AAAA,MACR,SAAS,EAAE,gBAAgB,mBAAmB;AAAA,MAC9C,MAAM,KAAK,UAAU;AAAA,QACnB,OAAO,KAAK;AAAA,QACZ,OAAO;AAAA,MACT,CAAC;AAAA,MACD,QAAQ,YAAY,QAAQ,4BAA4B;AAAA,IAC1D,CAAC;AAED,QAAI,CAAC,SAAS,IAAI;AAChB,YAAM,IAAI,MAAM,wBAAwB,SAAS,MAAM,IAAI,SAAS,UAAU,EAAE;AAAA,IAClF;AAEA,UAAM,OAAO,MAAM,SAAS,KAAK;AACjC,UAAM,YAAY,KAAK,WAAW,CAAC;AACnC,WAAO,EAAE,WAAW,OAAO,KAAK,OAAO,YAAY,UAAU,OAAO;AAAA,EACtE;AAAA,EAEA,MAAM,cAAgC;AACpC,QAAI;AACF,YAAM,WAAW,MAAM,MAAM,GAAG,KAAK,OAAO,GAAG,aAAa,IAAI;AAAA,QAC9D,QAAQ,YAAY,QAAQ,wBAAwB;AAAA,MACtD,CAAC;AACD,aAAO,SAAS;AAAA,IAClB,QAAQ;AACN,aAAO;AAAA,IACT;AAAA,EACF;AAAA;AAAA,EAGA,MAAM,WAAW,WAAuC;AACtD,QAAI;AACF,YAAM,WAAW,MAAM,MAAM,GAAG,KAAK,OAAO,GAAG,aAAa,IAAI;AAAA,QAC9D,QAAQ,YAAY,QAAQ,aAAa,wBAAwB;AAAA,MACnE,CAAC;AACD,YAAM,OAAO,MAAM,SAAS,KAAK;AACjC,aAAO,KAAK,OAAO,IAAI,CAAC,MAAM,EAAE,IAAI;AAAA,IACtC,QAAQ;AACN,aAAO,CAAC;AAAA,IACV;AAAA,EACF;AACF;;;ACxJA,IAAM,gBAAgB;AACtB,IAAM,uBAAuB;AAC7B,IAAM,uBAAuB;AAC7B,IAAM,yBAAyB;AAC/B,IAAM,sBAAsB;AAqBrB,IAAM,kBAAN,MAAM,iBAA0D;AAAA,EACrE,OAAgB,mBAAmB;AAAA,EAC1B,OAAO;AAAA,EACR;AAAA,EACA;AAAA,EACA,aAA4B;AAAA,EAC5B;AAAA,EACA;AAAA,EAER,YAAY,QAAyB;AACnC,SAAK,UAAU,QAAQ,YAAY,iBAAgB;AACnD,SAAK,QAAQ,QAAQ,SAAS,QAAQ,iBAAiB;AACvD,SAAK,gBAAgB,QAAQ;AAC7B,SAAK,mBAAmB,QAAQ,cAAc;AAAA,EAChD;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EASA,MAAM,UAAU,QAAgB,MAAgD;AAC9E,UAAM,YAAY,MAAM,aAAa,KAAK;AAC1C,UAAM,gBAAgB,MAAM,iBAAiB,KAAK;AAElD,UAAM,OAAgC;AAAA,MACpC,OAAO,KAAK,cAAc,KAAK;AAAA,MAC/B,OAAO;AAAA,MACP,mBAAmB;AAAA,MACnB,OAAO;AAAA,IACT;AAKA,QAAI,eAAe;AACjB,WAAK,iBAAiB;AAAA,IACxB;AAGA,QAAI,MAAM,cAAc;AACtB,WAAK,gBAAgB,KAAK;AAAA,IAC5B;AAGA,QAAI,MAAM,WAAW;AACnB,WAAK,YAAY,KAAK;AAAA,IACxB;AAEA,UAAM,WAAW,MAAM,MAAM,GAAG,KAAK,OAAO,GAAG,aAAa,IAAI;AAAA,MAC9D,QAAQ;AAAA,MACR,SAAS,EAAE,gBAAgB,mBAAmB;AAAA,MAC9C,MAAM,KAAK,UAAU,IAAI;AAAA,MACzB,QAAQ,YAAY,QAAQ,MAAM,aAAa,sBAAsB;AAAA,IACvE,CAAC;AAED,QAAI,CAAC,SAAS,IAAI;AAChB,YAAM,YAAY,MAAM,SAAS,KAAK,EAAE,MAAM,MAAM,EAAE;AAGtD,UAAI,SAAS,WAAW,OAAO,KAAK,YAAY;AAC9C,aAAK,aAAa;AAAA,MACpB;AACA,YAAM,IAAI,MAAM,+BAA+B,SAAS,MAAM,IAAI,UAAU,MAAM,GAAG,GAAG,CAAC,EAAE;AAAA,IAC7F;AAEA,UAAM,OAAO,MAAM,SAAS,KAAK;AAIjC,UAAM,gBAAgB,KAAK,OAAO,KAAK,CAAC,MAAM,EAAE,SAAS,SAAS;AAClE,UAAM,OAAO,eAAe,WAAW;AACvC,WAAO,EAAE,MAAM,OAAO,KAAK,kBAAkB;AAAA,EAC/C;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,MAAM,MAAM,MAA0C;AACpD,UAAM,WAAW,MAAM,MAAM,GAAG,KAAK,OAAO,GAAG,mBAAmB,IAAI;AAAA,MACpE,QAAQ;AAAA,MACR,SAAS,EAAE,gBAAgB,mBAAmB;AAAA,MAC9C,MAAM,KAAK,UAAU;AAAA,QACnB,OAAO,KAAK;AAAA,QACZ,OAAO;AAAA,MACT,CAAC;AAAA,MACD,QAAQ,YAAY,QAAQ,4BAA4B;AAAA,IAC1D,CAAC;AAED,QAAI,CAAC,SAAS,IAAI;AAChB,YAAM,IAAI,MAAM,2BAA2B,SAAS,MAAM,EAAE;AAAA,IAC9D;AAEA,UAAM,OAAO,MAAM,SAAS,KAAK;AAIjC,UAAM,YAAY,KAAK,KAAK,CAAC,EAAE;AAC/B,WAAO,EAAE,WAAW,OAAO,KAAK,OAAO,YAAY,UAAU,OAAO;AAAA,EACtE;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAgBA,MAAM,aAAa,eAAwB,YAAqC;AAE9E,UAAM,YAAY,MAAM,KAAK,mBAAmB;AAEhD,QAAI,UAAU,SAAS,GAAG;AAIxB,WAAK,aAAa,UAAU,CAAC,EAAE;AAC/B;AAAA,IACF;AAIA,UAAM,MAAM,iBAAiB,KAAK;AAClC,UAAM,OAAgC;AAAA,MACpC,OAAO,KAAK;AAAA;AAAA,MAEZ,iBAAiB;AAAA,IACnB;AACA,QAAI,KAAK;AACP,WAAK,iBAAiB;AAAA,IACxB;AACA,QAAI,YAAY;AACd,WAAK,0BAA0B;AAAA,IACjC;AAEA,UAAM,WAAW,MAAM,MAAM,GAAG,KAAK,OAAO,GAAG,oBAAoB,IAAI;AAAA,MACrE,QAAQ;AAAA,MACR,SAAS,EAAE,gBAAgB,mBAAmB;AAAA,MAC9C,MAAM,KAAK,UAAU,IAAI;AAAA,MACzB,QAAQ,YAAY,QAAQ,sBAAsB;AAAA,IACpD,CAAC;AAED,QAAI,CAAC,SAAS,IAAI;AAChB,YAAM,YAAY,MAAM,SAAS,KAAK,EAAE,MAAM,MAAM,EAAE;AACtD,YAAM,IAAI,MAAM,gCAAgC,SAAS,MAAM,IAAI,UAAU,MAAM,GAAG,GAAG,CAAC,EAAE;AAAA,IAC9F;AAEA,UAAM,aAAa,MAAM,SAAS,KAAK;AACvC,UAAM,KAAM,WAAW,eAAe,WAAW,MAAM,WAAW;AAClE,QAAI,IAAI;AACN,WAAK,aAAa;AAAA,IACpB;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,MAAc,qBAAsD;AAClE,QAAI;AACF,YAAM,WAAW,MAAM,MAAM,GAAG,KAAK,OAAO,GAAG,sBAAsB,IAAI;AAAA,QACvE,QAAQ,YAAY,QAAQ,wBAAwB;AAAA,MACtD,CAAC;AACD,UAAI,CAAC,SAAS,GAAI,QAAO,CAAC;AAE1B,YAAM,OAAO,MAAM,SAAS,KAAK;AACjC,YAAM,QAAQ,KAAK,OAAO,KAAK,CAAC,MAAM,EAAE,QAAQ,KAAK,KAAK;AAC1D,aAAO,OAAO,oBAAoB,CAAC;AAAA,IACrC,QAAQ;AACN,aAAO,CAAC;AAAA,IACV;AAAA,EACF;AAAA,EAEA,MAAM,cAAgC;AACpC,QAAI;AACF,YAAM,WAAW,MAAM,MAAM,GAAG,KAAK,OAAO,GAAG,oBAAoB,IAAI;AAAA,QACrE,QAAQ,YAAY,QAAQ,wBAAwB;AAAA,MACtD,CAAC;AACD,aAAO,SAAS;AAAA,IAClB,QAAQ;AACN,aAAO;AAAA,IACT;AAAA,EACF;AAAA;AAAA,EAGA,MAAM,WAAW,WAAuC;AACtD,QAAI;AACF,YAAM,WAAW,MAAM,MAAM,GAAG,KAAK,OAAO,GAAG,oBAAoB,IAAI;AAAA,QACrE,QAAQ,YAAY,QAAQ,aAAa,wBAAwB;AAAA,MACnE,CAAC;AACD,YAAM,OAAO,MAAM,SAAS,KAAK;AACjC,aAAO,KAAK,KAAK,IAAI,CAAC,MAAM,EAAE,EAAE;AAAA,IAClC,QAAQ;AACN,aAAO,CAAC;AAAA,IACV;AAAA,EACF;AACF;","names":[]}
@@ -1 +0,0 @@
1
- {"version":3,"sources":["../src/constants.ts"],"sourcesContent":["/**\n * Shared constants for the Myco codebase.\n * Per CLAUDE.md: \"No Magic Literals — Numeric and string constants\n * MUST NOT appear inline in logic.\"\n */\n\n// --- Token estimation ---\n/** Approximate characters per token for the chars/4 heuristic. */\nexport const CHARS_PER_TOKEN = 4;\n\n/** Estimate token count from character length using the CHARS_PER_TOKEN heuristic. */\nexport function estimateTokens(text: string): number {\n return Math.ceil(text.length / CHARS_PER_TOKEN);\n}\n\n// --- Embedding ---\n/** Max characters of text sent to the embedding model. */\nexport const EMBEDDING_INPUT_LIMIT = 8000;\n\n// --- Truncation limits (display/preview) ---\n/** Max chars for a user prompt preview in event summaries. */\nexport const PROMPT_PREVIEW_CHARS = 300;\n/** Max chars for an AI response preview in event summaries. */\nexport const AI_RESPONSE_PREVIEW_CHARS = 500;\n/** Max chars for a command string preview. */\nexport const COMMAND_PREVIEW_CHARS = 80;\n/** Max chars for a content snippet in search results. */\nexport const CONTENT_SNIPPET_CHARS = 120;\n/** Max chars for a tool output preview in hooks. */\nexport const TOOL_OUTPUT_PREVIEW_CHARS = 200;\n/** Max chars for a session summary preview in MCP tools. */\nexport const SESSION_SUMMARY_PREVIEW_CHARS = 300;\n/** Max chars for a recall summary preview. */\nexport const RECALL_SUMMARY_PREVIEW_CHARS = 200;\n\n// --- Context injection layer budgets (chars, not tokens — used with .slice()) ---\nexport const CONTEXT_PLAN_PREVIEW_CHARS = 100;\nexport const CONTEXT_SESSION_PREVIEW_CHARS = 80;\nexport const CONTEXT_SPORE_PREVIEW_CHARS = 80;\n\n// --- Processor maxTokens budgets ---\n/** Response token budget for observation extraction. */\nexport const EXTRACTION_MAX_TOKENS = 2048;\n/** Response token budget for session summary. */\nexport const SUMMARY_MAX_TOKENS = 512;\n/** Response token budget for session title generation. */\nexport const TITLE_MAX_TOKENS = 32;\n/** Response token budget for artifact classification. */\nexport const CLASSIFICATION_MAX_TOKENS = 1024;\n\n// --- Timeouts ---\n/** Daemon client HTTP request timeout (ms). */\nexport const DAEMON_CLIENT_TIMEOUT_MS = 2000;\n/** Health check timeout (ms) — fail fast if daemon isn't responding. */\nexport const DAEMON_HEALTH_CHECK_TIMEOUT_MS = 500;\n/** LLM request timeout (ms). All LLM calls are background daemon work — no need to be aggressive. */\nexport const LLM_REQUEST_TIMEOUT_MS = 180_000;\n/** Embedding request timeout (ms). Embeddings run in background batch processing — generous timeout. */\nexport const EMBEDDING_REQUEST_TIMEOUT_MS = 60_000;\n/** Digest LLM request timeout (ms). Digest cycles use large context windows and may need model loading time. */\nexport const DIGEST_LLM_REQUEST_TIMEOUT_MS = 600_000;\n/** Stdin read timeout for hooks (ms). */\nexport const STDIN_TIMEOUT_MS = 100;\n/** Chokidar write stability threshold (ms). */\nexport const FILE_WATCH_STABILITY_MS = 1000;\n/** Provider detection timeout for detect-providers CLI command (ms). */\nexport const PROVIDER_DETECT_TIMEOUT_MS = 3000;\n\n// --- Time ---\n/** Milliseconds in one day. */\nexport const MS_PER_DAY = 24 * 60 * 60 * 1000;\n\n// --- Buffer cleanup ---\n/** Max age for stale buffer files before cleanup (ms). */\nexport const STALE_BUFFER_MAX_AGE_MS = 1 * MS_PER_DAY;\n\n// --- Retry backoff ---\n/** Retry delays for daemon health check (ms). */\nexport const DAEMON_HEALTH_RETRY_DELAYS = [100, 200, 400, 800, 1500];\n\n/** Grace period after daemon.json is written before stale checks can trigger a restart (ms).\n * Prevents rapid restart loops from concurrent hooks or session reloads. */\nexport const DAEMON_STALE_GRACE_PERIOD_MS = 60_000;\n\n/** Grace period for SIGTERM before escalating to SIGKILL (ms).\n * Gives the old daemon a chance to shut down cleanly, but force-kills\n * to guarantee the configured port is reclaimed. */\nexport const DAEMON_EVICT_TIMEOUT_MS = 3000;\n/** Poll interval when waiting for an evicted daemon to die (ms). */\nexport const DAEMON_EVICT_POLL_MS = 100;\n\n// --- Slug limits ---\n/** Max length for slugified artifact IDs. */\nexport const MAX_SLUG_LENGTH = 100;\n\n// --- Content preview for classification prompt ---\n/** Max chars of file content per candidate in classification prompt. */\nexport const CANDIDATE_CONTENT_PREVIEW = 2000;\n\n// --- Turn rendering ---\n/** Max file paths displayed per turn in session notes. */\nexport const TURN_MAX_FILES_DISPLAYED = 10;\n\n// --- Transcript mining ---\n/** Minimum content length to consider a transcript entry meaningful. */\nexport const MIN_TRANSCRIPT_CONTENT_LENGTH = 10;\n\n// --- Query limits ---\n/** Max recent sessions to check for lineage heuristics. */\nexport const LINEAGE_RECENT_SESSIONS_LIMIT = 5;\n/** Max related spores to query for session notes. */\nexport const RELATED_SPORES_LIMIT = 50;\n\n// --- Context injection ---\n/** Max active plans to inject at session start. */\nexport const SESSION_CONTEXT_MAX_PLANS = 3;\n/** Max spores to inject per prompt. */\nexport const PROMPT_CONTEXT_MAX_SPORES = 3;\n/** Minimum similarity score for prompt context injection (0-1). */\nexport const PROMPT_CONTEXT_MIN_SIMILARITY = 0.3;\n/** Max token budget for session-start context injection. */\nexport const SESSION_CONTEXT_MAX_TOKENS = 500;\n/** Max token budget for per-prompt context injection. */\nexport const PROMPT_CONTEXT_MAX_TOKENS = 300;\n/** Minimum prompt length to trigger context search. */\nexport const PROMPT_CONTEXT_MIN_LENGTH = 10;\n\n// --- MCP tool defaults ---\n/** Default result limit for myco_search. */\nexport const MCP_SEARCH_DEFAULT_LIMIT = 10;\n/** Default result limit for myco_sessions. */\nexport const MCP_SESSIONS_DEFAULT_LIMIT = 20;\n/** Default result limit for myco_logs. */\nexport const MCP_LOGS_DEFAULT_LIMIT = 50;\n\n// --- Digest — Tiers ---\n/** Available token-budget tiers for digest synthesis. */\nexport const DIGEST_TIERS = [1500, 3000, 5000, 7500, 10000] as const;\nexport type DigestTier = (typeof DIGEST_TIERS)[number];\n\n// --- Digest — Context window minimums per tier ---\n/** Minimum context window (tokens) required to run a digest at a given tier. */\nexport const DIGEST_TIER_MIN_CONTEXT: Record<number, number> = {\n 1500: 6500,\n 3000: 11500,\n 5000: 18500,\n 7500: 24500,\n 10000: 30500,\n};\n\n// --- Digest — Substrate ---\n/** Default minimum substrate notes required before a digest cycle runs. */\nexport const DIGEST_MIN_NOTES_FOR_CYCLE = 10;\n\n/** Scoring weights by note type when selecting substrate for synthesis. */\nexport const DIGEST_SUBSTRATE_TYPE_WEIGHTS: Record<string, number> = {\n session: 3,\n spore: 3,\n plan: 2,\n artifact: 1,\n team: 1,\n};\n\n// --- LLM reasoning control ---\n/** Reasoning mode for all Myco LLM calls. Suppresses chain-of-thought tokens from reasoning models. */\nexport const LLM_REASONING_MODE = 'off' as const;\n\n// --- Digest — System prompt overhead estimate ---\n\n// --- Vault curation ---\n/** Max candidate spores after post-filtering for supersession check. */\nexport const SUPERSESSION_CANDIDATE_LIMIT = 5;\n\n/** Over-fetch from vector index before post-filtering by status/type. */\nexport const SUPERSESSION_VECTOR_FETCH_LIMIT = 20;\n\n/** Max output tokens for supersession LLM evaluation. */\nexport const SUPERSESSION_MAX_TOKENS = 256;\n\n/** Similarity threshold for clustering related spores in batch curation. */\nexport const CURATION_CLUSTER_SIMILARITY = 0.75;\n\n// --- Pipeline processing ---\n/** Default page size for pipeline items API listing. */\nexport const PIPELINE_ITEMS_DEFAULT_LIMIT = 50;\n\n// --- Pipeline retry ---\n/** Max retries for parse (structural) pipeline failures — fail fast. */\nexport const PIPELINE_PARSE_MAX_RETRIES = 1;\n/** Exponential backoff multiplier for successive pipeline retries. */\nexport const PIPELINE_BACKOFF_MULTIPLIER = 4;\n\n// --- Pipeline stages (ordered) ---\nexport const PIPELINE_STAGES = ['capture', 'extraction', 'embedding', 'consolidation', 'digest'] as const;\nexport type PipelineStage = typeof PIPELINE_STAGES[number];\n\n// --- Pipeline statuses ---\nexport const PIPELINE_STATUSES = ['pending', 'processing', 'succeeded', 'failed', 'blocked', 'skipped', 'poisoned'] as const;\nexport type PipelineStatus = typeof PIPELINE_STATUSES[number];\n\n// --- Provider roles for circuit breakers ---\nexport const PIPELINE_PROVIDER_ROLES = ['llm', 'embedding', 'digest-llm'] as const;\nexport type PipelineProviderRole = typeof PIPELINE_PROVIDER_ROLES[number];\n\n// --- Stage to provider role mapping ---\nexport const STAGE_PROVIDER_MAP: Record<PipelineStage, PipelineProviderRole | null> = {\n capture: null,\n extraction: 'llm',\n embedding: 'embedding',\n consolidation: 'digest-llm',\n digest: 'digest-llm',\n};\n\n/**\n * Stages processed by the pipeline tick timer.\n * Capture is handled at registration time, digest is gated by the metabolism timer.\n */\nexport const PIPELINE_TICK_STAGES: PipelineStage[] = ['extraction', 'embedding', 'consolidation'];\n\n// --- Item type to applicable stages ---\n// Sessions skip consolidation — consolidation applies to the spores\n// extracted FROM sessions, not the session work item itself.\n// Lineage detection stays outside the pipeline (fire-and-forget, non-critical).\nexport const ITEM_STAGE_MAP: Record<string, PipelineStage[]> = {\n session: ['capture', 'extraction', 'embedding', 'digest'],\n spore: ['capture', 'embedding', 'consolidation', 'digest'],\n artifact: ['capture', 'embedding', 'digest'],\n};\n\n// --- Automatic consolidation ---\n/** Minimum cluster size required before asking LLM to consolidate. */\nexport const CONSOLIDATION_MIN_CLUSTER_SIZE = 3;\n\n/** Over-fetch from vector index before post-filtering by status/type. */\nexport const CONSOLIDATION_VECTOR_FETCH_LIMIT = 20;\n\n/** Max output tokens for consolidation LLM synthesis.\n * Must be large enough for the full JSON response including content field. */\nexport const CONSOLIDATION_MAX_TOKENS = 2048;\n"],"mappings":";;;AAQO,IAAM,kBAAkB;AAGxB,SAAS,eAAe,MAAsB;AACnD,SAAO,KAAK,KAAK,KAAK,SAAS,eAAe;AAChD;AAIO,IAAM,wBAAwB;AAI9B,IAAM,uBAAuB;AAM7B,IAAM,wBAAwB;AAE9B,IAAM,4BAA4B;AAElC,IAAM,gCAAgC;AAEtC,IAAM,+BAA+B;AAGrC,IAAM,6BAA6B;AACnC,IAAM,gCAAgC;AACtC,IAAM,8BAA8B;AAcpC,IAAM,2BAA2B;AAEjC,IAAM,iCAAiC;AAEvC,IAAM,yBAAyB;AAE/B,IAAM,+BAA+B;AAErC,IAAM,gCAAgC;AAEtC,IAAM,mBAAmB;AAEzB,IAAM,0BAA0B;AAEhC,IAAM,6BAA6B;AAInC,IAAM,aAAa,KAAK,KAAK,KAAK;AAIlC,IAAM,0BAA0B,IAAI;AAIpC,IAAM,6BAA6B,CAAC,KAAK,KAAK,KAAK,KAAK,IAAI;AAI5D,IAAM,+BAA+B;AAKrC,IAAM,0BAA0B;AAEhC,IAAM,uBAAuB;AAI7B,IAAM,kBAAkB;AAIxB,IAAM,4BAA4B;AAIlC,IAAM,2BAA2B;AAQjC,IAAM,gCAAgC;AAEtC,IAAM,uBAAuB;AAI7B,IAAM,4BAA4B;AAElC,IAAM,4BAA4B;AAElC,IAAM,gCAAgC;AAMtC,IAAM,4BAA4B;AAIlC,IAAM,2BAA2B;AAEjC,IAAM,6BAA6B;AAEnC,IAAM,yBAAyB;AAI/B,IAAM,eAAe,CAAC,MAAM,KAAM,KAAM,MAAM,GAAK;AAKnD,IAAM,0BAAkD;AAAA,EAC7D,MAAM;AAAA,EACN,KAAM;AAAA,EACN,KAAM;AAAA,EACN,MAAM;AAAA,EACN,KAAO;AACT;AAOO,IAAM,gCAAwD;AAAA,EACnE,SAAS;AAAA,EACT,OAAO;AAAA,EACP,MAAM;AAAA,EACN,UAAU;AAAA,EACV,MAAM;AACR;AAIO,IAAM,qBAAqB;AAM3B,IAAM,+BAA+B;AAGrC,IAAM,kCAAkC;AAGxC,IAAM,0BAA0B;AAGhC,IAAM,8BAA8B;AAIpC,IAAM,+BAA+B;AAIrC,IAAM,6BAA6B;AAEnC,IAAM,8BAA8B;AAGpC,IAAM,kBAAkB,CAAC,WAAW,cAAc,aAAa,iBAAiB,QAAQ;AAQxF,IAAM,0BAA0B,CAAC,OAAO,aAAa,YAAY;AAIjE,IAAM,qBAAyE;AAAA,EACpF,SAAS;AAAA,EACT,YAAY;AAAA,EACZ,WAAW;AAAA,EACX,eAAe;AAAA,EACf,QAAQ;AACV;AAMO,IAAM,uBAAwC,CAAC,cAAc,aAAa,eAAe;AAMzF,IAAM,iBAAkD;AAAA,EAC7D,SAAS,CAAC,WAAW,cAAc,aAAa,QAAQ;AAAA,EACxD,OAAO,CAAC,WAAW,aAAa,iBAAiB,QAAQ;AAAA,EACzD,UAAU,CAAC,WAAW,aAAa,QAAQ;AAC7C;AAIO,IAAM,iCAAiC;AAGvC,IAAM,mCAAmC;AAIzC,IAAM,2BAA2B;","names":[]}
@@ -1 +0,0 @@
1
- {"version":3,"sources":["../src/daemon/logger.ts","../src/daemon/log-buffer.ts"],"sourcesContent":["import fs from 'node:fs';\nimport path from 'node:path';\nimport { LogRingBuffer } from './log-buffer.js';\n\nexport interface LogEntry {\n timestamp: string;\n level: string;\n component: string;\n message: string;\n [key: string]: unknown;\n}\n\nexport type LogLevel = 'debug' | 'info' | 'warn' | 'error';\n\nexport const LEVEL_ORDER: Record<LogLevel, number> = {\n debug: 0, info: 1, warn: 2, error: 3,\n};\n\ninterface LoggerOptions {\n level?: LogLevel;\n maxSize?: number;\n maxFiles?: number;\n}\n\nexport class DaemonLogger {\n private logPath: string;\n private fd: number | null = null;\n private currentSize = 0;\n private level: LogLevel;\n private maxSize: number;\n private maxFiles: number;\n private logDir: string;\n private ringBuffer: LogRingBuffer;\n\n constructor(logDir: string, options: LoggerOptions = {}) {\n this.logDir = logDir;\n this.logPath = path.join(logDir, 'daemon.log');\n this.level = options.level ?? 'info';\n this.maxSize = options.maxSize ?? 5_242_880;\n this.maxFiles = options.maxFiles ?? 3;\n this.ringBuffer = new LogRingBuffer();\n\n fs.mkdirSync(logDir, { recursive: true });\n this.fd = fs.openSync(this.logPath, 'a');\n try {\n this.currentSize = fs.fstatSync(this.fd).size;\n } catch {\n this.currentSize = 0;\n }\n }\n\n debug(component: string, message: string, data?: Record<string, unknown>): void {\n this.write('debug', component, message, data);\n }\n\n info(component: string, message: string, data?: Record<string, unknown>): void {\n this.write('info', component, message, data);\n }\n\n warn(component: string, message: string, data?: Record<string, unknown>): void {\n this.write('warn', component, message, data);\n }\n\n error(component: string, message: string, data?: Record<string, unknown>): void {\n this.write('error', component, message, data);\n }\n\n close(): void {\n if (this.fd !== null) {\n fs.closeSync(this.fd);\n this.fd = null;\n }\n }\n\n getRingBuffer(): LogRingBuffer {\n return this.ringBuffer;\n }\n\n private write(level: LogLevel, component: string, message: string, data?: Record<string, unknown>): void {\n if (LEVEL_ORDER[level] < LEVEL_ORDER[this.level]) return;\n\n const entry: LogEntry = {\n timestamp: new Date().toISOString(),\n level,\n component,\n message,\n ...data,\n };\n\n this.ringBuffer.push(entry);\n\n const line = JSON.stringify(entry) + '\\n';\n const bytes = Buffer.byteLength(line);\n\n if (this.currentSize + bytes > this.maxSize) {\n this.rotate();\n }\n\n if (this.fd !== null) {\n fs.writeSync(this.fd, line);\n this.currentSize += bytes;\n }\n }\n\n private rotate(): void {\n this.close();\n\n for (let i = this.maxFiles - 1; i >= 1; i--) {\n const from = path.join(this.logDir, `daemon.${i}.log`);\n const to = path.join(this.logDir, `daemon.${i + 1}.log`);\n if (fs.existsSync(from)) {\n if (i + 1 > this.maxFiles) {\n fs.unlinkSync(from);\n } else {\n fs.renameSync(from, to);\n }\n }\n }\n\n if (fs.existsSync(this.logPath)) {\n fs.renameSync(this.logPath, path.join(this.logDir, 'daemon.1.log'));\n }\n\n this.fd = fs.openSync(this.logPath, 'a');\n this.currentSize = 0;\n }\n}\n","import type { LogEntry, LogLevel } from './logger.js';\nimport { LEVEL_ORDER } from './logger.js';\n\nconst LOG_RING_BUFFER_CAPACITY = 1000;\nconst LOG_QUERY_DEFAULT_LIMIT = 100;\n\ninterface LogQueryResult {\n entries: LogEntry[];\n cursor: string;\n cursor_reset?: boolean;\n}\n\ninterface LogQueryOptions {\n level?: LogLevel;\n limit?: number;\n}\n\nexport class LogRingBuffer {\n private buffer: LogEntry[];\n private head = 0;\n private count = 0;\n private sequence = 0;\n private startSequence = 0;\n private readonly capacity: number;\n\n constructor(capacity = LOG_RING_BUFFER_CAPACITY) {\n this.capacity = capacity;\n this.buffer = new Array(capacity);\n }\n\n push(entry: LogEntry): void {\n this.buffer[this.head] = entry;\n this.head = (this.head + 1) % this.capacity;\n if (this.count < this.capacity) {\n this.count++;\n } else {\n this.startSequence++;\n }\n this.sequence++;\n }\n\n since(cursor: string | null, options?: LogQueryOptions): LogQueryResult {\n const limit = options?.limit ?? LOG_QUERY_DEFAULT_LIMIT;\n const minLevel = options?.level ? LEVEL_ORDER[options.level] : 0;\n\n let startIdx = 0;\n let cursorReset = false;\n\n if (cursor !== null) {\n const seq = parseInt(cursor, 10);\n if (isNaN(seq) || seq < this.startSequence) {\n cursorReset = true;\n startIdx = 0;\n } else {\n startIdx = seq - this.startSequence;\n }\n } else {\n // No cursor: return last `limit` entries\n startIdx = Math.max(0, this.count - limit);\n }\n\n const entries: LogEntry[] = [];\n for (let i = startIdx; i < this.count && entries.length < limit; i++) {\n const bufIdx = (this.head - this.count + i + this.capacity) % this.capacity;\n const entry = this.buffer[bufIdx];\n if (entry && LEVEL_ORDER[entry.level as LogLevel] >= minLevel) {\n entries.push(entry);\n }\n }\n\n const result: LogQueryResult = {\n entries,\n cursor: String(this.sequence),\n };\n if (cursorReset) result.cursor_reset = true;\n return result;\n }\n}\n"],"mappings":";;;AAAA,OAAO,QAAQ;AACf,OAAO,UAAU;;;ACEjB,IAAM,2BAA2B;AACjC,IAAM,0BAA0B;AAazB,IAAM,gBAAN,MAAoB;AAAA,EACjB;AAAA,EACA,OAAO;AAAA,EACP,QAAQ;AAAA,EACR,WAAW;AAAA,EACX,gBAAgB;AAAA,EACP;AAAA,EAEjB,YAAY,WAAW,0BAA0B;AAC/C,SAAK,WAAW;AAChB,SAAK,SAAS,IAAI,MAAM,QAAQ;AAAA,EAClC;AAAA,EAEA,KAAK,OAAuB;AAC1B,SAAK,OAAO,KAAK,IAAI,IAAI;AACzB,SAAK,QAAQ,KAAK,OAAO,KAAK,KAAK;AACnC,QAAI,KAAK,QAAQ,KAAK,UAAU;AAC9B,WAAK;AAAA,IACP,OAAO;AACL,WAAK;AAAA,IACP;AACA,SAAK;AAAA,EACP;AAAA,EAEA,MAAM,QAAuB,SAA2C;AACtE,UAAM,QAAQ,SAAS,SAAS;AAChC,UAAM,WAAW,SAAS,QAAQ,YAAY,QAAQ,KAAK,IAAI;AAE/D,QAAI,WAAW;AACf,QAAI,cAAc;AAElB,QAAI,WAAW,MAAM;AACnB,YAAM,MAAM,SAAS,QAAQ,EAAE;AAC/B,UAAI,MAAM,GAAG,KAAK,MAAM,KAAK,eAAe;AAC1C,sBAAc;AACd,mBAAW;AAAA,MACb,OAAO;AACL,mBAAW,MAAM,KAAK;AAAA,MACxB;AAAA,IACF,OAAO;AAEL,iBAAW,KAAK,IAAI,GAAG,KAAK,QAAQ,KAAK;AAAA,IAC3C;AAEA,UAAM,UAAsB,CAAC;AAC7B,aAAS,IAAI,UAAU,IAAI,KAAK,SAAS,QAAQ,SAAS,OAAO,KAAK;AACpE,YAAM,UAAU,KAAK,OAAO,KAAK,QAAQ,IAAI,KAAK,YAAY,KAAK;AACnE,YAAM,QAAQ,KAAK,OAAO,MAAM;AAChC,UAAI,SAAS,YAAY,MAAM,KAAiB,KAAK,UAAU;AAC7D,gBAAQ,KAAK,KAAK;AAAA,MACpB;AAAA,IACF;AAEA,UAAM,SAAyB;AAAA,MAC7B;AAAA,MACA,QAAQ,OAAO,KAAK,QAAQ;AAAA,IAC9B;AACA,QAAI,YAAa,QAAO,eAAe;AACvC,WAAO;AAAA,EACT;AACF;;;AD/DO,IAAM,cAAwC;AAAA,EACnD,OAAO;AAAA,EAAG,MAAM;AAAA,EAAG,MAAM;AAAA,EAAG,OAAO;AACrC;AAQO,IAAM,eAAN,MAAmB;AAAA,EAChB;AAAA,EACA,KAAoB;AAAA,EACpB,cAAc;AAAA,EACd;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EAER,YAAY,QAAgB,UAAyB,CAAC,GAAG;AACvD,SAAK,SAAS;AACd,SAAK,UAAU,KAAK,KAAK,QAAQ,YAAY;AAC7C,SAAK,QAAQ,QAAQ,SAAS;AAC9B,SAAK,UAAU,QAAQ,WAAW;AAClC,SAAK,WAAW,QAAQ,YAAY;AACpC,SAAK,aAAa,IAAI,cAAc;AAEpC,OAAG,UAAU,QAAQ,EAAE,WAAW,KAAK,CAAC;AACxC,SAAK,KAAK,GAAG,SAAS,KAAK,SAAS,GAAG;AACvC,QAAI;AACF,WAAK,cAAc,GAAG,UAAU,KAAK,EAAE,EAAE;AAAA,IAC3C,QAAQ;AACN,WAAK,cAAc;AAAA,IACrB;AAAA,EACF;AAAA,EAEA,MAAM,WAAmB,SAAiB,MAAsC;AAC9E,SAAK,MAAM,SAAS,WAAW,SAAS,IAAI;AAAA,EAC9C;AAAA,EAEA,KAAK,WAAmB,SAAiB,MAAsC;AAC7E,SAAK,MAAM,QAAQ,WAAW,SAAS,IAAI;AAAA,EAC7C;AAAA,EAEA,KAAK,WAAmB,SAAiB,MAAsC;AAC7E,SAAK,MAAM,QAAQ,WAAW,SAAS,IAAI;AAAA,EAC7C;AAAA,EAEA,MAAM,WAAmB,SAAiB,MAAsC;AAC9E,SAAK,MAAM,SAAS,WAAW,SAAS,IAAI;AAAA,EAC9C;AAAA,EAEA,QAAc;AACZ,QAAI,KAAK,OAAO,MAAM;AACpB,SAAG,UAAU,KAAK,EAAE;AACpB,WAAK,KAAK;AAAA,IACZ;AAAA,EACF;AAAA,EAEA,gBAA+B;AAC7B,WAAO,KAAK;AAAA,EACd;AAAA,EAEQ,MAAM,OAAiB,WAAmB,SAAiB,MAAsC;AACvG,QAAI,YAAY,KAAK,IAAI,YAAY,KAAK,KAAK,EAAG;AAElD,UAAM,QAAkB;AAAA,MACtB,YAAW,oBAAI,KAAK,GAAE,YAAY;AAAA,MAClC;AAAA,MACA;AAAA,MACA;AAAA,MACA,GAAG;AAAA,IACL;AAEA,SAAK,WAAW,KAAK,KAAK;AAE1B,UAAM,OAAO,KAAK,UAAU,KAAK,IAAI;AACrC,UAAM,QAAQ,OAAO,WAAW,IAAI;AAEpC,QAAI,KAAK,cAAc,QAAQ,KAAK,SAAS;AAC3C,WAAK,OAAO;AAAA,IACd;AAEA,QAAI,KAAK,OAAO,MAAM;AACpB,SAAG,UAAU,KAAK,IAAI,IAAI;AAC1B,WAAK,eAAe;AAAA,IACtB;AAAA,EACF;AAAA,EAEQ,SAAe;AACrB,SAAK,MAAM;AAEX,aAAS,IAAI,KAAK,WAAW,GAAG,KAAK,GAAG,KAAK;AAC3C,YAAM,OAAO,KAAK,KAAK,KAAK,QAAQ,UAAU,CAAC,MAAM;AACrD,YAAM,KAAK,KAAK,KAAK,KAAK,QAAQ,UAAU,IAAI,CAAC,MAAM;AACvD,UAAI,GAAG,WAAW,IAAI,GAAG;AACvB,YAAI,IAAI,IAAI,KAAK,UAAU;AACzB,aAAG,WAAW,IAAI;AAAA,QACpB,OAAO;AACL,aAAG,WAAW,MAAM,EAAE;AAAA,QACxB;AAAA,MACF;AAAA,IACF;AAEA,QAAI,GAAG,WAAW,KAAK,OAAO,GAAG;AAC/B,SAAG,WAAW,KAAK,SAAS,KAAK,KAAK,KAAK,QAAQ,cAAc,CAAC;AAAA,IACpE;AAEA,SAAK,KAAK,GAAG,SAAS,KAAK,SAAS,GAAG;AACvC,SAAK,cAAc;AAAA,EACrB;AACF;","names":[]}